Compare commits

..

No commits in common. "ea07d74457146495817b5c5ff6d8582aab7026ff" and "8a2a1895762b9f67307b8c383c5c66d93c6ec81b" have entirely different histories.

21 changed files with 103 additions and 2414 deletions

View file

@ -6,10 +6,8 @@ use clap::{Parser, Subcommand};
use crate::diff;
use crate::ignore::IgnoreRules;
use crate::inspect;
use crate::modify;
use crate::refs;
use crate::repo::Repository;
use crate::stash;
use crate::tracking;
#[derive(Parser)]
@ -399,14 +397,7 @@ pub fn dispatch(cli: Cli) {
}
}
Command::Merge { target } => {
let repo = open_repo_or_exit();
match modify::merge_branch(&repo, &target) {
Ok(id) => println!("merged {target} -> {}", &id.0[..id.0.len().min(12)]),
Err(e) => {
eprintln!("error: {e}");
std::process::exit(1);
}
}
println!("arc merge: {target} (not yet implemented)");
}
Command::Show { target } => {
let repo = open_repo_or_exit();
@ -429,23 +420,13 @@ pub fn dispatch(cli: Cli) {
}
}
Command::Revert { target } => {
let repo = open_repo_or_exit();
match modify::revert(&repo, &target) {
Ok(id) => println!("reverted -> {}", &id.0[..id.0.len().min(12)]),
Err(e) => {
eprintln!("error: {e}");
std::process::exit(1);
}
}
println!("arc revert: {target} (not yet implemented)");
}
Command::Reset { files } => {
let repo = open_repo_or_exit();
match modify::reset(&repo, &files) {
Ok(msg) => println!("{msg}"),
Err(e) => {
eprintln!("error: {e}");
std::process::exit(1);
}
if files.is_empty() {
println!("arc reset: all (not yet implemented)");
} else {
println!("arc reset: {} (not yet implemented)", files.join(", "));
}
}
Command::Push { remote } => {
@ -529,66 +510,28 @@ pub fn dispatch(cli: Cli) {
},
}
}
Command::Stash { command } => {
let repo = open_repo_or_exit();
match command {
StashCommand::Create { name } => match stash::create(&repo, &name) {
Ok(()) => println!("stash '{name}' created"),
Err(e) => {
eprintln!("error: {e}");
std::process::exit(1);
}
},
StashCommand::Use { name } => match stash::use_stash(&repo, &name) {
Ok(()) => println!("switched to stash '{name}'"),
Err(e) => {
eprintln!("error: {e}");
std::process::exit(1);
}
},
StashCommand::Push => match stash::push(&repo) {
Ok(msg) => println!("{msg}"),
Err(e) => {
eprintln!("error: {e}");
std::process::exit(1);
}
},
StashCommand::Pop => match stash::pop(&repo) {
Ok(msg) => println!("{msg}"),
Err(e) => {
eprintln!("error: {e}");
std::process::exit(1);
}
},
StashCommand::Rm { name } => match stash::rm(&repo, &name) {
Ok(()) => println!("stash '{name}' removed"),
Err(e) => {
eprintln!("error: {e}");
std::process::exit(1);
}
},
StashCommand::List => match stash::list(&repo) {
Ok(output) => println!("{output}"),
Err(e) => {
eprintln!("error: {e}");
std::process::exit(1);
}
},
Command::Stash { command } => match command {
StashCommand::Create { name } => {
println!("arc stash create: {name} (not yet implemented)");
}
}
StashCommand::Use { name } => {
println!("arc stash use: {name} (not yet implemented)");
}
StashCommand::Push => {
println!("arc stash push (not yet implemented)");
}
StashCommand::Pop => {
println!("arc stash pop (not yet implemented)");
}
StashCommand::Rm { name } => {
println!("arc stash rm: {name} (not yet implemented)");
}
StashCommand::List => {
println!("arc stash list (not yet implemented)");
}
},
Command::Graft { target, onto } => {
let repo = open_repo_or_exit();
match modify::graft(&repo, &target, &onto) {
Ok(ids) => {
for id in &ids {
println!("grafted {}", &id.0[..id.0.len().min(12)]);
}
}
Err(e) => {
eprintln!("error: {e}");
std::process::exit(1);
}
}
println!("arc graft: {target} onto {onto} (not yet implemented)");
}
Command::Config { command } => match command {
ConfigCommand::Set { global, key, value } => {
@ -647,5 +590,5 @@ fn run_diff(repo: &Repository) -> crate::error::Result<String> {
let worktree = tracking::scan_worktree(repo, &ignore)?;
let changes = tracking::detect_changes(&committed, &worktree);
Ok(diff::render_diff(&committed, &changes))
Ok(diff::render_diff(&committed, &worktree, &changes))
}

View file

@ -1,7 +1,7 @@
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::fs;
use std::path::{Path, PathBuf};
use std::path::PathBuf;
use crate::error::{ArcError, Result};
use crate::repo::Repository;
@ -54,7 +54,7 @@ impl Config {
Self::load_from(&path)
}
fn load_from(path: &Path) -> Result<Option<Config>> {
fn load_from(path: &PathBuf) -> Result<Option<Config>> {
if !path.exists() {
return Ok(None);
}
@ -63,7 +63,7 @@ impl Config {
Ok(Some(config))
}
pub fn save_to(&self, path: &Path) -> Result<()> {
pub fn save_to(&self, path: &PathBuf) -> Result<()> {
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)?;
}
@ -128,12 +128,6 @@ impl Config {
}
}
pub fn load_effective(repo: &crate::repo::Repository) -> EffectiveConfig {
let local = Config::load_local(repo).ok().flatten();
let global = Config::load_global().ok().flatten();
Config::effective(local, global)
}
impl ArcError {
pub fn invalid_path(msg: impl Into<String>) -> Self {
Self::InvalidPath(msg.into())

View file

@ -1,7 +1,7 @@
use crate::model::{FileChange, FileChangeKind, FileContentDelta};
use crate::tracking::FileTree;
pub fn render_diff(committed: &FileTree, changes: &[FileChange]) -> String {
pub fn render_diff(committed: &FileTree, _worktree: &FileTree, changes: &[FileChange]) -> String {
let mut output = String::new();
for change in changes {

View file

@ -22,17 +22,6 @@ pub enum ArcError {
TagAlreadyExists(String),
CannotRemoveActiveMark(String),
DirtyWorktree,
InvalidRefName(String),
ClockError,
HashError(String),
MergeConflicts(Vec<String>),
NoMergeBase(String),
StashAlreadyExists(String),
StashNotFound(String),
NoActiveStash,
NothingToStash,
StashEmpty(String),
StashBaseMismatch,
}
impl fmt::Display for ArcError {
@ -64,23 +53,6 @@ impl fmt::Display for ArcError {
f,
"uncommitted changes in worktree; commit or reset before switching"
),
Self::InvalidRefName(n) => write!(f, "invalid ref name: {n}"),
Self::ClockError => write!(f, "system clock error: time before unix epoch"),
Self::HashError(msg) => write!(f, "hash computation error: {msg}"),
Self::MergeConflicts(files) => {
write!(f, "merge conflicts in: {}", files.join(", "))
}
Self::NoMergeBase(name) => {
write!(f, "no common ancestor found for merge with: {name}")
}
Self::StashAlreadyExists(n) => write!(f, "stash already exists: {n}"),
Self::StashNotFound(n) => write!(f, "stash not found: {n}"),
Self::NoActiveStash => write!(f, "no active stash"),
Self::NothingToStash => write!(f, "nothing to stash, working tree clean"),
Self::StashEmpty(n) => write!(f, "stash is empty: {n}"),
Self::StashBaseMismatch => {
write!(f, "stash base does not match current HEAD")
}
}
}
}

View file

@ -63,7 +63,7 @@ pub fn show(repo: &Repository, target: &str) -> Result<String> {
tracking::materialize_committed_tree(repo, &c.parents[0])?
};
let diff_output = diff::render_diff(&parent_tree, &obj.delta.changes);
let diff_output = diff::render_diff(&parent_tree, &BTreeMap::new(), &obj.delta.changes);
if !diff_output.is_empty() {
output.push_str(&diff_output);
}
@ -225,13 +225,13 @@ fn days_to_ymd(days_since_epoch: i64) -> (i64, u32, u32) {
}
#[derive(Debug)]
pub enum DiffOp {
enum DiffOp {
Equal(usize, usize),
Insert(usize),
Delete(usize),
Delete(()),
}
pub fn myers_diff(old: &[String], new: &[String]) -> Vec<DiffOp> {
fn myers_diff(old: &[String], new: &[String]) -> Vec<DiffOp> {
let n = old.len();
let m = new.len();
@ -239,7 +239,7 @@ pub fn myers_diff(old: &[String], new: &[String]) -> Vec<DiffOp> {
return (0..m).map(DiffOp::Insert).collect();
}
if m == 0 {
return (0..n).map(DiffOp::Delete).collect();
return (0..n).map(|_| DiffOp::Delete(())).collect();
}
let max_d = n + m;
@ -311,7 +311,7 @@ pub fn myers_diff(old: &[String], new: &[String]) -> Vec<DiffOp> {
ops.push(DiffOp::Insert(y as usize));
} else {
x -= 1;
ops.push(DiffOp::Delete(x as usize));
ops.push(DiffOp::Delete(()));
}
}
}

View file

@ -4,13 +4,10 @@ pub mod diff;
pub mod error;
pub mod ignore;
pub mod inspect;
pub mod merge;
pub mod model;
pub mod modify;
pub mod refs;
pub mod repo;
pub mod resolve;
pub mod stash;
pub mod store;
pub mod tracking;

View file

@ -1,272 +0,0 @@
use std::collections::BTreeSet;
use crate::inspect::{DiffOp, myers_diff};
use crate::tracking::FileTree;
pub struct MergeOutcome {
pub tree: FileTree,
pub conflicts: Vec<String>,
}
pub fn three_way_merge(base: &FileTree, ours: &FileTree, theirs: &FileTree) -> MergeOutcome {
let all_paths: BTreeSet<&String> = base
.keys()
.chain(ours.keys())
.chain(theirs.keys())
.collect();
let mut tree = FileTree::new();
let mut conflicts = Vec::new();
for path in all_paths {
let b = base.get(path);
let o = ours.get(path);
let t = theirs.get(path);
match (b, o, t) {
(_, Some(ov), Some(tv)) if ov == tv => {
tree.insert(path.clone(), ov.clone());
}
(Some(bv), Some(ov), Some(tv)) if bv == ov => {
tree.insert(path.clone(), tv.clone());
}
(Some(bv), Some(ov), Some(tv)) if bv == tv => {
tree.insert(path.clone(), ov.clone());
}
(Some(bv), Some(ov), Some(tv)) => match merge_file_content(path, ov, tv, bv) {
FileMerge::Clean(bytes) => {
tree.insert(path.clone(), bytes);
}
FileMerge::Conflict(bytes) => {
tree.insert(path.clone(), bytes);
conflicts.push(path.clone());
}
},
(None, None, Some(tv)) => {
tree.insert(path.clone(), tv.clone());
}
(None, Some(ov), None) => {
tree.insert(path.clone(), ov.clone());
}
(None, Some(ov), Some(tv)) => match merge_file_content(path, ov, tv, &[]) {
FileMerge::Clean(bytes) => {
tree.insert(path.clone(), bytes);
}
FileMerge::Conflict(bytes) => {
tree.insert(path.clone(), bytes);
conflicts.push(path.clone());
}
},
(Some(bv), None, Some(tv)) if bv == tv => {}
(Some(bv), Some(ov), None) if bv == ov => {}
(Some(_), None, Some(_tv)) => {
conflicts.push(path.clone());
}
(Some(_), Some(ov), None) => {
conflicts.push(path.clone());
tree.insert(path.clone(), ov.clone());
}
(Some(_), None, None) => {}
(None, None, None) => {}
}
}
MergeOutcome { tree, conflicts }
}
enum FileMerge {
Clean(Vec<u8>),
Conflict(Vec<u8>),
}
fn merge_file_content(_path: &str, ours: &[u8], theirs: &[u8], base: &[u8]) -> FileMerge {
let (Ok(base_text), Ok(ours_text), Ok(theirs_text)) = (
std::str::from_utf8(base),
std::str::from_utf8(ours),
std::str::from_utf8(theirs),
) else {
return FileMerge::Conflict(ours.to_vec());
};
let base_lines: Vec<String> = base_text.lines().map(String::from).collect();
let ours_lines: Vec<String> = ours_text.lines().map(String::from).collect();
let theirs_lines: Vec<String> = theirs_text.lines().map(String::from).collect();
let edits_ours = build_edit_map(&base_lines, &ours_lines);
let edits_theirs = build_edit_map(&base_lines, &theirs_lines);
let mut result = Vec::new();
let mut has_conflict = false;
let mut i = 0;
while i < base_lines.len() || edits_ours.has_tail_insert(i) || edits_theirs.has_tail_insert(i) {
let o_edit = edits_ours.get(i);
let t_edit = edits_theirs.get(i);
match (o_edit, t_edit) {
(None, None) => {
if i < base_lines.len() {
result.push(base_lines[i].clone());
}
i += 1;
}
(Some(chunk_o), None) => {
for line in &chunk_o.replacement {
result.push(line.clone());
}
i += chunk_o.delete_count.max(1);
}
(None, Some(chunk_t)) => {
for line in &chunk_t.replacement {
result.push(line.clone());
}
i += chunk_t.delete_count.max(1);
}
(Some(chunk_o), Some(chunk_t)) => {
if chunk_o.replacement == chunk_t.replacement
&& chunk_o.delete_count == chunk_t.delete_count
{
for line in &chunk_o.replacement {
result.push(line.clone());
}
} else {
has_conflict = true;
result.push("<<<<<<< ours".to_string());
for line in &chunk_o.replacement {
result.push(line.clone());
}
result.push("=======".to_string());
for line in &chunk_t.replacement {
result.push(line.clone());
}
result.push(">>>>>>> theirs".to_string());
}
i += chunk_o.delete_count.max(chunk_t.delete_count).max(1);
}
}
}
let mut text = result.join("\n");
if !text.is_empty() {
text.push('\n');
}
if has_conflict {
FileMerge::Conflict(text.into_bytes())
} else {
FileMerge::Clean(text.into_bytes())
}
}
#[derive(Debug, Clone)]
struct EditChunk {
replacement: Vec<String>,
delete_count: usize,
}
struct EditMap {
chunks: std::collections::BTreeMap<usize, EditChunk>,
tail_inserts: std::collections::BTreeMap<usize, Vec<String>>,
}
impl EditMap {
fn get(&self, base_idx: usize) -> Option<EditChunk> {
if let Some(chunk) = self.chunks.get(&base_idx) {
return Some(chunk.clone());
}
if let Some(inserts) = self.tail_inserts.get(&base_idx) {
return Some(EditChunk {
replacement: inserts.clone(),
delete_count: 0,
});
}
None
}
fn has_tail_insert(&self, base_idx: usize) -> bool {
self.tail_inserts.contains_key(&base_idx)
}
}
fn build_edit_map(base: &[String], modified: &[String]) -> EditMap {
let ops = myers_diff(base, modified);
let mut chunks: std::collections::BTreeMap<usize, EditChunk> =
std::collections::BTreeMap::new();
let mut tail_inserts: std::collections::BTreeMap<usize, Vec<String>> =
std::collections::BTreeMap::new();
let mut i = 0;
while i < ops.len() {
match &ops[i] {
DiffOp::Equal(_, _) => {
i += 1;
}
DiffOp::Delete(base_idx) => {
let start = *base_idx;
let mut delete_count = 0;
let mut replacement = Vec::new();
while i < ops.len() {
match &ops[i] {
DiffOp::Delete(_) => {
delete_count += 1;
i += 1;
}
DiffOp::Insert(new_idx) => {
replacement.push(modified[*new_idx].clone());
i += 1;
}
_ => break,
}
}
chunks.insert(
start,
EditChunk {
replacement,
delete_count,
},
);
}
DiffOp::Insert(_) => {
let mut inserts = Vec::new();
while i < ops.len() {
if let DiffOp::Insert(idx) = &ops[i] {
inserts.push(modified[*idx].clone());
i += 1;
} else {
break;
}
}
let base_pos = if i < ops.len() {
match &ops[i] {
DiffOp::Equal(bi, _) => *bi,
DiffOp::Delete(bi) => *bi,
_ => base.len(),
}
} else {
base.len()
};
if base_pos < base.len() {
let chunk = chunks.entry(base_pos).or_insert(EditChunk {
replacement: Vec::new(),
delete_count: 0,
});
let mut combined = inserts;
combined.append(&mut chunk.replacement);
chunk.replacement = combined;
} else {
tail_inserts.entry(base_pos).or_default().extend(inserts);
}
}
}
}
EditMap {
chunks,
tail_inserts,
}
}

View file

@ -8,30 +8,6 @@ pub struct CommitId(pub String);
#[serde(transparent)]
pub struct DeltaId(pub String);
impl std::fmt::Display for CommitId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(&self.0)
}
}
impl std::fmt::Display for DeltaId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(&self.0)
}
}
impl AsRef<str> for CommitId {
fn as_ref(&self) -> &str {
&self.0
}
}
impl AsRef<str> for DeltaId {
fn as_ref(&self) -> &str {
&self.0
}
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct Commit {
pub id: CommitId,

View file

@ -1,361 +0,0 @@
use std::collections::{BTreeMap, HashSet};
use std::fs;
use crate::error::{ArcError, Result};
use crate::ignore::IgnoreRules;
use crate::merge;
use crate::model::{CommitId, Delta, FileChangeKind, Head, RefTarget};
use crate::refs;
use crate::repo::Repository;
use crate::resolve;
use crate::store::{self, CommitObject};
use crate::tracking::{self, FileTree};
pub fn reset(repo: &Repository, files: &[String]) -> Result<String> {
let head_commit = tracking::resolve_head_commit(repo)?;
let ignore = IgnoreRules::load(&repo.workdir);
let committed = match &head_commit {
Some(id) => tracking::materialize_committed_tree(repo, id)?,
None => BTreeMap::new(),
};
let worktree = tracking::scan_worktree(repo, &ignore)?;
let changes = tracking::detect_changes(&committed, &worktree);
if changes.is_empty() {
return Ok("nothing to reset, working tree clean".to_string());
}
let filter: Option<HashSet<&str>> = if files.is_empty() {
None
} else {
Some(files.iter().map(|s| s.as_str()).collect())
};
let mut reset_count = 0usize;
for change in &changes {
if let Some(ref f) = filter
&& !f.contains(change.path.as_str())
{
continue;
}
crate::repo::validate_repo_path(&change.path)?;
let abs = repo.workdir.join(&change.path);
match &change.kind {
FileChangeKind::Add { .. } => {
if abs.exists() {
fs::remove_file(&abs)?;
}
}
FileChangeKind::Modify { .. } | FileChangeKind::Delete => {
if let Some(content) = committed.get(&change.path) {
if let Some(parent) = abs.parent() {
fs::create_dir_all(parent)?;
}
fs::write(&abs, content)?;
}
}
FileChangeKind::Rename { from } => {
if abs.exists() {
fs::remove_file(&abs)?;
}
if let Some(content) = committed.get(from) {
let from_abs = repo.workdir.join(from);
if let Some(parent) = from_abs.parent() {
fs::create_dir_all(parent)?;
}
fs::write(&from_abs, content)?;
}
}
}
reset_count += 1;
}
refs::remove_empty_dirs(&repo.workdir)?;
if reset_count == 0 {
Ok("no matching files to reset".to_string())
} else {
Ok(format!("reset {reset_count} file(s)"))
}
}
pub fn revert(repo: &Repository, target: &str) -> Result<CommitId> {
require_clean_worktree(repo)?;
let head_id = tracking::resolve_head_commit(repo)?.ok_or(ArcError::NoCommitsYet)?;
let commits = resolve_commit_or_range(repo, target)?;
let mut current_tree = tracking::materialize_committed_tree(repo, &head_id)?;
for obj in commits.iter().rev() {
let parent_tree = if obj.commit.parents.is_empty() {
BTreeMap::new()
} else {
tracking::materialize_committed_tree(repo, &obj.commit.parents[0])?
};
let commit_tree = tracking::materialize_committed_tree(repo, &obj.commit.id)?;
let outcome = merge::three_way_merge(&commit_tree, &current_tree, &parent_tree);
if !outcome.conflicts.is_empty() {
write_tree_to_worktree(repo, &outcome.tree)?;
return Err(ArcError::MergeConflicts(outcome.conflicts));
}
current_tree = outcome.tree;
}
write_tree_to_worktree(repo, &current_tree)?;
let short_target = if target.len() > 12 {
&target[..12]
} else {
target
};
let message = format!("revert {short_target}");
commit_tree(repo, &message, vec![head_id], &current_tree)
}
pub fn merge_branch(repo: &Repository, target: &str) -> Result<CommitId> {
require_clean_worktree(repo)?;
let ours_id = tracking::resolve_head_commit(repo)?.ok_or(ArcError::NoCommitsYet)?;
let theirs_id = resolve::resolve_target(repo, target)?;
if ours_id == theirs_id {
return Err(ArcError::NothingToCommit);
}
let base_id = find_merge_base(repo, &ours_id, &theirs_id)?;
let base_tree = match &base_id {
Some(id) => tracking::materialize_committed_tree(repo, id)?,
None => BTreeMap::new(),
};
let ours_tree = tracking::materialize_committed_tree(repo, &ours_id)?;
let theirs_tree = tracking::materialize_committed_tree(repo, &theirs_id)?;
let outcome = merge::three_way_merge(&base_tree, &ours_tree, &theirs_tree);
write_tree_to_worktree(repo, &outcome.tree)?;
if !outcome.conflicts.is_empty() {
return Err(ArcError::MergeConflicts(outcome.conflicts));
}
let message = format!("merge {target}");
commit_tree(repo, &message, vec![ours_id, theirs_id], &outcome.tree)
}
pub fn graft(repo: &Repository, target: &str, onto: &str) -> Result<Vec<CommitId>> {
require_clean_worktree(repo)?;
let source_commits = resolve_commit_or_range(repo, target)?;
let onto_id = resolve::resolve_target(repo, onto)?;
let is_bookmark = if crate::repo::validate_ref_name(onto).is_ok() {
repo.bookmarks_dir().join(onto).exists()
} else {
false
};
let mut current_tip = onto_id.clone();
let mut current_tree = tracking::materialize_committed_tree(repo, &current_tip)?;
let mut new_ids = Vec::new();
for obj in &source_commits {
let parent_tree = if obj.commit.parents.is_empty() {
BTreeMap::new()
} else {
tracking::materialize_committed_tree(repo, &obj.commit.parents[0])?
};
let commit_tree = tracking::materialize_committed_tree(repo, &obj.commit.id)?;
let outcome = merge::three_way_merge(&parent_tree, &current_tree, &commit_tree);
if !outcome.conflicts.is_empty() {
write_tree_to_worktree(repo, &outcome.tree)?;
return Err(ArcError::MergeConflicts(outcome.conflicts));
}
let short_id = &obj.commit.id.0[..obj.commit.id.0.len().min(12)];
let message = format!("graft {short_id}: {}", obj.commit.message);
let new_id = commit_tree_internal(repo, &message, vec![current_tip], &outcome.tree)?;
current_tip = new_id.clone();
current_tree = outcome.tree;
new_ids.push(new_id);
}
if is_bookmark {
let bookmark_path = repo.bookmarks_dir().join(onto);
let ref_target = RefTarget {
commit: Some(current_tip.clone()),
};
let ref_yaml = serde_yaml::to_string(&ref_target)?;
fs::write(&bookmark_path, ref_yaml)?;
let head = repo.load_head()?;
if let Head::Attached { bookmark, .. } = &head
&& bookmark == onto
{
repo.save_head(&Head::Attached {
bookmark: bookmark.clone(),
commit: current_tip.clone(),
})?;
}
} else {
repo.save_head(&Head::Detached {
commit: current_tip,
})?;
}
write_tree_to_worktree(repo, &current_tree)?;
Ok(new_ids)
}
fn require_clean_worktree(repo: &Repository) -> Result<()> {
let (report, _) = tracking::status(repo)?;
if !report.is_clean() {
return Err(ArcError::DirtyWorktree);
}
Ok(())
}
fn resolve_commit_or_range(repo: &Repository, spec: &str) -> Result<Vec<CommitObject>> {
if spec.contains("..") {
let resolved = resolve::parse_and_resolve_range(repo, Some(spec))?;
Ok(resolved.chain[resolved.start_idx..].to_vec())
} else {
let id = resolve::resolve_target(repo, spec)?;
let obj = store::read_commit_object(repo, &id)?;
Ok(vec![obj])
}
}
fn find_merge_base(
repo: &Repository,
ours: &CommitId,
theirs: &CommitId,
) -> Result<Option<CommitId>> {
let mut ours_ancestors = HashSet::new();
collect_ancestors(repo, ours, &mut ours_ancestors)?;
ours_ancestors.insert(ours.0.clone());
let mut queue = vec![theirs.clone()];
let mut visited = HashSet::new();
while let Some(id) = queue.pop() {
if ours_ancestors.contains(&id.0) {
return Ok(Some(id));
}
if !visited.insert(id.0.clone()) {
continue;
}
let obj = store::read_commit_object(repo, &id)?;
for parent in &obj.commit.parents {
queue.push(parent.clone());
}
}
Ok(None)
}
fn collect_ancestors(
repo: &Repository,
id: &CommitId,
ancestors: &mut HashSet<String>,
) -> Result<()> {
let obj = store::read_commit_object(repo, id)?;
for parent in &obj.commit.parents {
if ancestors.insert(parent.0.clone()) {
collect_ancestors(repo, parent, ancestors)?;
}
}
Ok(())
}
fn write_tree_to_worktree(repo: &Repository, tree: &FileTree) -> Result<()> {
let ignore = IgnoreRules::load(&repo.workdir);
let current = tracking::scan_worktree(repo, &ignore)?;
refs::clean_tracked_files(repo, &current)?;
refs::write_tree(repo, tree)?;
Ok(())
}
fn commit_tree(
repo: &Repository,
message: &str,
parents: Vec<CommitId>,
tree: &FileTree,
) -> Result<CommitId> {
let id = commit_tree_internal(repo, message, parents, tree)?;
Ok(id)
}
fn commit_tree_internal(
repo: &Repository,
message: &str,
parents: Vec<CommitId>,
new_tree: &FileTree,
) -> Result<CommitId> {
let parent_tree = if parents.is_empty() {
BTreeMap::new()
} else {
tracking::materialize_committed_tree(repo, &parents[0])?
};
let changes = tracking::detect_changes(&parent_tree, new_tree);
if changes.is_empty() {
return Err(ArcError::NothingToCommit);
}
let delta_id = store::compute_delta_id(&parents.first().cloned(), &changes)?;
let delta = Delta {
id: delta_id.clone(),
base: parents.first().cloned(),
changes,
};
let config = crate::config::load_effective(repo);
let author = match (config.user_name, config.user_email) {
(Some(name), Some(email)) => Some(crate::model::Signature { name, email }),
_ => None,
};
let timestamp = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.map_err(|_| ArcError::ClockError)?
.as_secs() as i64;
let commit_id = store::compute_commit_id(&parents, &delta_id, message, &author, timestamp)?;
let commit_obj = crate::model::Commit {
id: commit_id.clone(),
parents: parents.clone(),
delta: delta_id,
message: message.to_string(),
author,
timestamp,
};
let obj = CommitObject {
commit: commit_obj,
delta,
};
store::write_commit_object(repo, &obj)?;
let head = repo.load_head()?;
crate::refs::update_refs_after_commit(repo, &head, &commit_id)?;
Ok(commit_id)
}

View file

@ -44,14 +44,12 @@ fn short_id(id: &CommitId) -> &str {
}
pub fn mark_add(repo: &Repository, name: &str, commit: Option<&str>) -> Result<CommitId> {
crate::repo::validate_ref_name(name)?;
let id = resolve_commit_or_head(repo, commit)?;
write_ref_target(&repo.bookmarks_dir().join(name), &id)?;
Ok(id)
}
pub fn mark_rm(repo: &Repository, name: &str) -> Result<()> {
crate::repo::validate_ref_name(name)?;
let path = repo.bookmarks_dir().join(name);
if !path.exists() {
return Err(ArcError::BookmarkNotFound(name.to_string()));
@ -97,8 +95,6 @@ pub fn mark_list(repo: &Repository) -> Result<String> {
}
pub fn mark_rename(repo: &Repository, name: &str, new_name: &str) -> Result<()> {
crate::repo::validate_ref_name(name)?;
crate::repo::validate_ref_name(new_name)?;
let old_path = repo.bookmarks_dir().join(name);
if !old_path.exists() {
return Err(ArcError::BookmarkNotFound(name.to_string()));
@ -129,7 +125,6 @@ pub fn mark_rename(repo: &Repository, name: &str, new_name: &str) -> Result<()>
}
pub fn tag_add(repo: &Repository, name: &str, commit: Option<&str>) -> Result<CommitId> {
crate::repo::validate_ref_name(name)?;
let path = repo.tags_dir().join(name);
if path.exists() {
return Err(ArcError::TagAlreadyExists(name.to_string()));
@ -140,7 +135,6 @@ pub fn tag_add(repo: &Repository, name: &str, commit: Option<&str>) -> Result<Co
}
pub fn tag_rm(repo: &Repository, name: &str) -> Result<()> {
crate::repo::validate_ref_name(name)?;
let path = repo.tags_dir().join(name);
if !path.exists() {
return Err(ArcError::TagNotFound(name.to_string()));
@ -188,18 +182,19 @@ pub fn switch(repo: &Repository, target: &str) -> Result<String> {
return Err(ArcError::DirtyWorktree);
}
let valid_ref = crate::repo::validate_ref_name(target).is_ok();
let bookmark_path = repo.bookmarks_dir().join(target);
let tag_path = repo.tags_dir().join(target);
let (new_head, message) = if valid_ref && repo.bookmarks_dir().join(target).exists() {
let ref_target = read_ref_target(&repo.bookmarks_dir().join(target))?;
let (new_head, message) = if bookmark_path.exists() {
let ref_target = read_ref_target(&bookmark_path)?;
let commit = ref_target.commit.ok_or(ArcError::NoCommitsYet)?;
let head = Head::Attached {
bookmark: target.to_string(),
commit,
};
(head, format!("switched to bookmark '{target}'"))
} else if valid_ref && repo.tags_dir().join(target).exists() {
let ref_target = read_ref_target(&repo.tags_dir().join(target))?;
} else if tag_path.exists() {
let ref_target = read_ref_target(&tag_path)?;
let commit = ref_target.commit.ok_or(ArcError::NoCommitsYet)?;
let head = Head::Detached { commit };
(head, format!("switched to tag '{target}'"))
@ -226,9 +221,8 @@ pub fn switch(repo: &Repository, target: &str) -> Result<String> {
Ok(message)
}
pub fn clean_tracked_files(repo: &Repository, tree: &tracking::FileTree) -> Result<()> {
fn clean_tracked_files(repo: &Repository, tree: &tracking::FileTree) -> Result<()> {
for path in tree.keys() {
crate::repo::validate_repo_path(path)?;
let abs = repo.workdir.join(path);
if abs.exists() {
fs::remove_file(&abs)?;
@ -239,7 +233,7 @@ pub fn clean_tracked_files(repo: &Repository, tree: &tracking::FileTree) -> Resu
Ok(())
}
pub fn remove_empty_dirs(dir: &std::path::Path) -> Result<()> {
fn remove_empty_dirs(dir: &std::path::Path) -> Result<()> {
let entries = match fs::read_dir(dir) {
Ok(e) => e,
Err(_) => return Ok(()),
@ -263,9 +257,8 @@ pub fn remove_empty_dirs(dir: &std::path::Path) -> Result<()> {
Ok(())
}
pub fn write_tree(repo: &Repository, tree: &tracking::FileTree) -> Result<()> {
fn write_tree(repo: &Repository, tree: &tracking::FileTree) -> Result<()> {
for (path, bytes) in tree {
crate::repo::validate_repo_path(path)?;
let abs = repo.workdir.join(path);
if let Some(parent) = abs.parent() {
fs::create_dir_all(parent)?;
@ -274,33 +267,3 @@ pub fn write_tree(repo: &Repository, tree: &tracking::FileTree) -> Result<()> {
}
Ok(())
}
pub fn update_refs_after_commit(
repo: &Repository,
head: &Head,
commit_id: &CommitId,
) -> Result<()> {
let ref_target = RefTarget {
commit: Some(commit_id.clone()),
};
let ref_yaml = serde_yaml::to_string(&ref_target)?;
match head {
Head::Unborn { bookmark } | Head::Attached { bookmark, .. } => {
fs::write(repo.bookmarks_dir().join(bookmark), &ref_yaml)?;
let new_head = Head::Attached {
bookmark: bookmark.clone(),
commit: commit_id.clone(),
};
repo.save_head(&new_head)?;
}
Head::Detached { .. } => {
let new_head = Head::Detached {
commit: commit_id.clone(),
};
repo.save_head(&new_head)?;
}
}
Ok(())
}

View file

@ -110,42 +110,3 @@ impl Repository {
Ok(())
}
}
pub fn validate_ref_name(name: &str) -> Result<()> {
use std::path::{Component, Path};
if name.is_empty() {
return Err(ArcError::InvalidRefName(name.to_string()));
}
let p = Path::new(name);
let mut comps = p.components();
match (comps.next(), comps.next()) {
(Some(Component::Normal(_)), None) => {}
_ => return Err(ArcError::InvalidRefName(name.to_string())),
}
if name.starts_with('.') || name.contains('\0') {
return Err(ArcError::InvalidRefName(name.to_string()));
}
Ok(())
}
pub fn validate_repo_path(p: &str) -> Result<()> {
use std::path::{Component, Path};
if p.is_empty() || p.contains('\0') {
return Err(ArcError::InvalidPath(format!("invalid repo path: {p}")));
}
let path = Path::new(p);
for c in path.components() {
match c {
Component::Normal(_) | Component::CurDir => {}
_ => return Err(ArcError::InvalidPath(format!("invalid repo path: {p}"))),
}
}
Ok(())
}

View file

@ -11,23 +11,21 @@ pub fn resolve_target(repo: &Repository, target: &str) -> Result<CommitId> {
return tracking::resolve_head_commit(repo)?.ok_or(ArcError::NoCommitsYet);
}
if crate::repo::validate_ref_name(target).is_ok() {
let bookmark_path = repo.bookmarks_dir().join(target);
if bookmark_path.exists() {
let contents = fs::read_to_string(&bookmark_path)?;
let ref_target: RefTarget = serde_yaml::from_str(&contents)?;
if let Some(id) = ref_target.commit {
return Ok(id);
}
let bookmark_path = repo.bookmarks_dir().join(target);
if bookmark_path.exists() {
let contents = fs::read_to_string(&bookmark_path)?;
let ref_target: RefTarget = serde_yaml::from_str(&contents)?;
if let Some(id) = ref_target.commit {
return Ok(id);
}
}
let tag_path = repo.tags_dir().join(target);
if tag_path.exists() {
let contents = fs::read_to_string(&tag_path)?;
let ref_target: RefTarget = serde_yaml::from_str(&contents)?;
if let Some(id) = ref_target.commit {
return Ok(id);
}
let tag_path = repo.tags_dir().join(target);
if tag_path.exists() {
let contents = fs::read_to_string(&tag_path)?;
let ref_target: RefTarget = serde_yaml::from_str(&contents)?;
if let Some(id) = ref_target.commit {
return Ok(id);
}
}
@ -53,13 +51,7 @@ fn resolve_commit_prefix(repo: &Repository, prefix: &str) -> Result<CommitId> {
match matches.len() {
0 => Err(ArcError::UnknownRevision(prefix.to_string())),
1 => {
let id = matches
.into_iter()
.next()
.ok_or_else(|| ArcError::UnknownRevision(prefix.to_string()))?;
Ok(id)
}
1 => Ok(matches.into_iter().next().unwrap()),
_ => Err(ArcError::AmbiguousPrefix(prefix.to_string())),
}
}

View file

@ -1,330 +0,0 @@
use std::collections::BTreeMap;
use std::fs;
use std::path::PathBuf;
use std::time::{SystemTime, UNIX_EPOCH};
use serde::{Deserialize, Serialize};
use crate::error::{ArcError, Result};
use crate::ignore::IgnoreRules;
use crate::model::{CommitId, FileChangeKind};
use crate::refs;
use crate::repo::{self, Repository};
use crate::tracking;
/// Persistent state tracking the currently active stash.
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct StashState {
pub active: Option<String>,
}
/// A named stash file containing a stack of entries.
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct StashFile {
pub entries: Vec<StashEntry>,
}
/// A single stash entry representing a snapshot of dirty changes.
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct StashEntry {
pub base: Option<CommitId>,
pub timestamp: i64,
pub changes: Vec<StashChange>,
}
/// A single file change within a stash entry.
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct StashChange {
pub path: String,
pub kind: StashChangeKind,
pub content: Option<Vec<u8>>,
}
/// The kind of change recorded in a stash.
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "lowercase")]
pub enum StashChangeKind {
Add,
Modify,
Delete,
}
fn stash_named_dir(repo: &Repository) -> PathBuf {
repo.stashes_dir().join("named")
}
fn stash_state_path(repo: &Repository) -> PathBuf {
repo.stashes_dir().join("state.yml")
}
fn stash_file_path(repo: &Repository, name: &str) -> PathBuf {
stash_named_dir(repo).join(format!("{name}.yml"))
}
fn load_state(repo: &Repository) -> Result<StashState> {
let path = stash_state_path(repo);
if !path.exists() {
return Ok(StashState { active: None });
}
let contents = fs::read_to_string(&path)?;
let state: StashState = serde_yaml::from_str(&contents)?;
Ok(state)
}
fn save_state(repo: &Repository, state: &StashState) -> Result<()> {
let yaml = serde_yaml::to_string(state)?;
fs::write(stash_state_path(repo), yaml)?;
Ok(())
}
fn load_stash_file(repo: &Repository, name: &str) -> Result<StashFile> {
let path = stash_file_path(repo, name);
let contents = fs::read_to_string(&path)?;
let file: StashFile = serde_yaml::from_str(&contents)?;
Ok(file)
}
fn save_stash_file(repo: &Repository, name: &str, file: &StashFile) -> Result<()> {
let yaml = serde_yaml::to_string(file)?;
fs::write(stash_file_path(repo, name), yaml)?;
Ok(())
}
/// Create a new named stash and set it as active.
pub fn create(repo: &Repository, name: &str) -> Result<()> {
repo::validate_ref_name(name)?;
fs::create_dir_all(stash_named_dir(repo))?;
let path = stash_file_path(repo, name);
if path.exists() {
return Err(ArcError::StashAlreadyExists(name.to_string()));
}
let file = StashFile { entries: vec![] };
save_stash_file(repo, name, &file)?;
let state = StashState {
active: Some(name.to_string()),
};
save_state(repo, &state)?;
Ok(())
}
/// Switch the active stash to an existing named stash.
pub fn use_stash(repo: &Repository, name: &str) -> Result<()> {
repo::validate_ref_name(name)?;
let path = stash_file_path(repo, name);
if !path.exists() {
return Err(ArcError::StashNotFound(name.to_string()));
}
let state = StashState {
active: Some(name.to_string()),
};
save_state(repo, &state)?;
Ok(())
}
/// Push current dirty changes onto the active stash and reset the worktree.
pub fn push(repo: &Repository) -> Result<String> {
let state = load_state(repo)?;
let name = state.active.ok_or(ArcError::NoActiveStash)?;
repo::validate_ref_name(&name)?;
let ignore = IgnoreRules::load(&repo.workdir);
let head_commit = tracking::resolve_head_commit(repo)?;
let committed = match &head_commit {
Some(id) => tracking::materialize_committed_tree(repo, id)?,
None => BTreeMap::new(),
};
let worktree = tracking::scan_worktree(repo, &ignore)?;
let changes = tracking::detect_changes(&committed, &worktree);
if changes.is_empty() {
return Err(ArcError::NothingToStash);
}
let mut stash_changes = Vec::new();
let mut added_paths = Vec::new();
for change in &changes {
let (kind, content) = match &change.kind {
FileChangeKind::Add { content: c } => {
added_paths.push(change.path.clone());
let bytes = match c {
crate::model::FileContentDelta::Full { bytes } => Some(bytes.clone()),
_ => None,
};
(StashChangeKind::Add, bytes)
}
FileChangeKind::Modify { content: c } => {
let bytes = match c {
crate::model::FileContentDelta::Full { bytes } => Some(bytes.clone()),
_ => None,
};
(StashChangeKind::Modify, bytes)
}
FileChangeKind::Delete => (StashChangeKind::Delete, None),
FileChangeKind::Rename { .. } => (StashChangeKind::Add, None),
};
stash_changes.push(StashChange {
path: change.path.clone(),
kind,
content,
});
}
let timestamp = SystemTime::now()
.duration_since(UNIX_EPOCH)
.map_err(|_| ArcError::ClockError)?
.as_secs() as i64;
let entry = StashEntry {
base: head_commit.clone(),
timestamp,
changes: stash_changes,
};
let mut stash_file = load_stash_file(repo, &name)?;
stash_file.entries.push(entry);
save_stash_file(repo, &name, &stash_file)?;
refs::clean_tracked_files(repo, &committed)?;
for path in &added_paths {
repo::validate_repo_path(path)?;
let abs = repo.workdir.join(path);
if abs.exists() {
fs::remove_file(&abs)?;
}
}
refs::write_tree(repo, &committed)?;
refs::remove_empty_dirs(&repo.workdir)?;
let n = changes.len();
Ok(format!("pushed {n} change(s) to stash '{name}'"))
}
/// Pop the most recent entry from the active stash and apply it to the worktree.
pub fn pop(repo: &Repository) -> Result<String> {
let state = load_state(repo)?;
let name = state.active.ok_or(ArcError::NoActiveStash)?;
repo::validate_ref_name(&name)?;
let (report, _) = tracking::status(repo)?;
if !report.is_clean() {
return Err(ArcError::DirtyWorktree);
}
let mut stash_file = load_stash_file(repo, &name)?;
if stash_file.entries.is_empty() {
return Err(ArcError::StashEmpty(name.clone()));
}
let entry = stash_file
.entries
.pop()
.ok_or_else(|| ArcError::StashEmpty(name.clone()))?;
let head_commit = tracking::resolve_head_commit(repo)?;
if entry.base != head_commit {
return Err(ArcError::StashBaseMismatch);
}
let n = entry.changes.len();
for change in &entry.changes {
match change.kind {
StashChangeKind::Add | StashChangeKind::Modify => {
repo::validate_repo_path(&change.path)?;
let abs = repo.workdir.join(&change.path);
if let Some(parent) = abs.parent() {
fs::create_dir_all(parent)?;
}
if let Some(bytes) = &change.content {
fs::write(&abs, bytes)?;
}
}
StashChangeKind::Delete => {
repo::validate_repo_path(&change.path)?;
let abs = repo.workdir.join(&change.path);
if abs.exists() {
fs::remove_file(&abs)?;
}
}
}
}
refs::remove_empty_dirs(&repo.workdir)?;
save_stash_file(repo, &name, &stash_file)?;
Ok(format!("popped {n} change(s) from stash '{name}'"))
}
/// Remove a named stash. If it was active, deactivate it.
pub fn rm(repo: &Repository, name: &str) -> Result<()> {
repo::validate_ref_name(name)?;
let path = stash_file_path(repo, name);
if !path.exists() {
return Err(ArcError::StashNotFound(name.to_string()));
}
fs::remove_file(&path)?;
let mut state = load_state(repo)?;
if state.active.as_deref() == Some(name) {
state.active = None;
save_state(repo, &state)?;
}
Ok(())
}
/// List all named stashes, marking the active one.
pub fn list(repo: &Repository) -> Result<String> {
let state = load_state(repo)?;
let active = state.active.as_deref();
let named_dir = stash_named_dir(repo);
if !named_dir.exists() {
return Ok("no stashes".to_string());
}
let mut names: Vec<String> = Vec::new();
for entry in fs::read_dir(&named_dir)? {
let entry = entry?;
if entry.file_type()?.is_file() {
let fname = entry.file_name().to_string_lossy().to_string();
if let Some(name) = fname.strip_suffix(".yml") {
names.push(name.to_string());
}
}
}
names.sort();
if names.is_empty() {
return Ok("no stashes".to_string());
}
let mut lines = Vec::new();
for name in &names {
let stash_file = load_stash_file(repo, name)?;
let count = stash_file.entries.len();
let prefix = if active == Some(name.as_str()) {
"* "
} else {
" "
};
lines.push(format!("{prefix}{name} ({count} entries)"));
}
Ok(lines.join("\n"))
}

View file

@ -67,11 +67,10 @@ struct CommitForHash<'a> {
pub timestamp: i64,
}
pub fn compute_delta_id(base: &Option<CommitId>, changes: &[FileChange]) -> Result<DeltaId> {
pub fn compute_delta_id(base: &Option<CommitId>, changes: &[FileChange]) -> DeltaId {
let hashable = DeltaForHash { base, changes };
let bytes = rmp_serde::to_vec(&hashable)
.map_err(|e| crate::error::ArcError::HashError(e.to_string()))?;
Ok(DeltaId(sha256_hex(&bytes)))
let bytes = rmp_serde::to_vec(&hashable).expect("delta hash serialization failed");
DeltaId(sha256_hex(&bytes))
}
pub fn compute_commit_id(
@ -80,7 +79,7 @@ pub fn compute_commit_id(
message: &str,
author: &Option<Signature>,
timestamp: i64,
) -> Result<CommitId> {
) -> CommitId {
let hashable = CommitForHash {
parents,
delta,
@ -88,7 +87,6 @@ pub fn compute_commit_id(
author,
timestamp,
};
let bytes = rmp_serde::to_vec(&hashable)
.map_err(|e| crate::error::ArcError::HashError(e.to_string()))?;
Ok(CommitId(sha256_hex(&bytes)))
let bytes = rmp_serde::to_vec(&hashable).expect("commit hash serialization failed");
CommitId(sha256_hex(&bytes))
}

View file

@ -179,14 +179,14 @@ pub fn commit(repo: &Repository, message: &str) -> Result<CommitId> {
None => vec![],
};
let delta_id = store::compute_delta_id(&head_commit, &changes)?;
let delta_id = store::compute_delta_id(&head_commit, &changes);
let delta = Delta {
id: delta_id.clone(),
base: head_commit.clone(),
changes,
};
let config = crate::config::load_effective(repo);
let config = load_effective_config(repo);
let author = match (config.user_name, config.user_email) {
(Some(name), Some(email)) => Some(Signature { name, email }),
_ => None,
@ -194,10 +194,10 @@ pub fn commit(repo: &Repository, message: &str) -> Result<CommitId> {
let timestamp = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.map_err(|_| ArcError::ClockError)?
.expect("system clock error")
.as_secs() as i64;
let commit_id = store::compute_commit_id(&parents, &delta_id, message, &author, timestamp)?;
let commit_id = store::compute_commit_id(&parents, &delta_id, message, &author, timestamp);
let commit_obj = crate::model::Commit {
id: commit_id.clone(),
@ -214,11 +214,43 @@ pub fn commit(repo: &Repository, message: &str) -> Result<CommitId> {
};
store::write_commit_object(repo, &obj)?;
crate::refs::update_refs_after_commit(repo, &head, &commit_id)?;
update_refs_after_commit(repo, &head, &commit_id)?;
Ok(commit_id)
}
fn load_effective_config(repo: &Repository) -> crate::config::EffectiveConfig {
let local = crate::config::Config::load_local(repo).ok().flatten();
let global = crate::config::Config::load_global().ok().flatten();
crate::config::Config::effective(local, global)
}
fn update_refs_after_commit(repo: &Repository, head: &Head, commit_id: &CommitId) -> Result<()> {
let ref_target = crate::model::RefTarget {
commit: Some(commit_id.clone()),
};
let ref_yaml = serde_yaml::to_string(&ref_target)?;
match head {
Head::Unborn { bookmark } | Head::Attached { bookmark, .. } => {
fs::write(repo.bookmarks_dir().join(bookmark), &ref_yaml)?;
let new_head = Head::Attached {
bookmark: bookmark.clone(),
commit: commit_id.clone(),
};
repo.save_head(&new_head)?;
}
Head::Detached { .. } => {
let new_head = Head::Detached {
commit: commit_id.clone(),
};
repo.save_head(&new_head)?;
}
}
Ok(())
}
use std::fmt;
pub struct StatusReport {

View file

@ -75,15 +75,8 @@ fn tag_list_subcommand_succeeds() {
#[test]
fn stash_list_subcommand_succeeds() {
let dir = TempDir::new().unwrap();
arc_cmd()
.arg("init")
.current_dir(dir.path())
.output()
.expect("failed to init");
let output = arc_cmd()
.args(["stash", "list"])
.current_dir(dir.path())
.output()
.expect("failed to run arc");
assert!(output.status.success());

View file

@ -1,207 +0,0 @@
use std::process::Command;
use tempfile::TempDir;
fn arc_cmd() -> Command {
Command::new(env!("CARGO_BIN_EXE_arc"))
}
fn init_repo() -> TempDir {
let dir = TempDir::new().unwrap();
arc_cmd()
.arg("init")
.current_dir(dir.path())
.output()
.expect("failed to init");
dir
}
fn commit_file(dir: &TempDir, name: &str, content: &str, msg: &str) -> String {
std::fs::write(dir.path().join(name), content).unwrap();
let output = arc_cmd()
.args(["commit", msg])
.current_dir(dir.path())
.output()
.expect("failed to commit");
assert!(output.status.success());
String::from_utf8_lossy(&output.stdout)
.trim()
.strip_prefix("committed ")
.unwrap()
.to_string()
}
#[test]
fn graft_single_commit_onto_bookmark() {
let dir = init_repo();
commit_file(&dir, "base.txt", "base\n", "base");
arc_cmd()
.args(["mark", "add", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
arc_cmd()
.args(["switch", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
let cherry = commit_file(&dir, "cherry.txt", "cherry\n", "cherry pick me");
arc_cmd()
.args(["switch", "main"])
.current_dir(dir.path())
.output()
.expect("failed");
let output = arc_cmd()
.args(["graft", &cherry, "--onto", "main"])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(output.status.success());
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(stdout.contains("grafted"));
assert!(dir.path().join("cherry.txt").exists());
let content = std::fs::read_to_string(dir.path().join("cherry.txt")).unwrap();
assert_eq!(content, "cherry\n");
}
#[test]
fn graft_creates_new_commit() {
let dir = init_repo();
commit_file(&dir, "base.txt", "base\n", "base");
arc_cmd()
.args(["mark", "add", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
arc_cmd()
.args(["switch", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
let cherry = commit_file(&dir, "cherry.txt", "cherry\n", "cherry");
arc_cmd()
.args(["switch", "main"])
.current_dir(dir.path())
.output()
.expect("failed");
arc_cmd()
.args(["graft", &cherry, "--onto", "main"])
.current_dir(dir.path())
.output()
.expect("failed");
let log_output = arc_cmd()
.args(["log"])
.current_dir(dir.path())
.output()
.expect("failed");
let log_stdout = String::from_utf8_lossy(&log_output.stdout);
assert!(log_stdout.contains("graft"));
}
#[test]
fn graft_fails_with_dirty_worktree() {
let dir = init_repo();
let id = commit_file(&dir, "a.txt", "a\n", "first");
std::fs::write(dir.path().join("dirty.txt"), "dirty\n").unwrap();
let output = arc_cmd()
.args(["graft", &id, "--onto", "main"])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(!output.status.success());
let stderr = String::from_utf8_lossy(&output.stderr);
assert!(stderr.contains("uncommitted changes"));
}
#[test]
fn graft_preserves_original_commits() {
let dir = init_repo();
commit_file(&dir, "base.txt", "base\n", "base");
arc_cmd()
.args(["mark", "add", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
arc_cmd()
.args(["switch", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
let cherry = commit_file(&dir, "cherry.txt", "cherry\n", "cherry");
arc_cmd()
.args(["switch", "main"])
.current_dir(dir.path())
.output()
.expect("failed");
arc_cmd()
.args(["graft", &cherry, "--onto", "main"])
.current_dir(dir.path())
.output()
.expect("failed");
let show_output = arc_cmd()
.args(["show", &cherry])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(show_output.status.success());
let show_stdout = String::from_utf8_lossy(&show_output.stdout);
assert!(show_stdout.contains("cherry"));
}
#[test]
fn graft_with_commit_prefix() {
let dir = init_repo();
commit_file(&dir, "base.txt", "base\n", "base");
arc_cmd()
.args(["mark", "add", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
arc_cmd()
.args(["switch", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
let cherry = commit_file(&dir, "cherry.txt", "cherry\n", "cherry");
arc_cmd()
.args(["switch", "main"])
.current_dir(dir.path())
.output()
.expect("failed");
let short = &cherry[..12];
let output = arc_cmd()
.args(["graft", short, "--onto", "main"])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(output.status.success());
}

View file

@ -1,264 +0,0 @@
use std::process::Command;
use tempfile::TempDir;
fn arc_cmd() -> Command {
Command::new(env!("CARGO_BIN_EXE_arc"))
}
fn init_repo() -> TempDir {
let dir = TempDir::new().unwrap();
arc_cmd()
.arg("init")
.current_dir(dir.path())
.output()
.expect("failed to init");
dir
}
fn commit_file(dir: &TempDir, name: &str, content: &str, msg: &str) -> String {
std::fs::write(dir.path().join(name), content).unwrap();
let output = arc_cmd()
.args(["commit", msg])
.current_dir(dir.path())
.output()
.expect("failed to commit");
assert!(output.status.success());
String::from_utf8_lossy(&output.stdout)
.trim()
.strip_prefix("committed ")
.unwrap()
.to_string()
}
#[test]
fn merge_diverged_branches() {
let dir = init_repo();
commit_file(&dir, "base.txt", "base\n", "base commit");
arc_cmd()
.args(["mark", "add", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
commit_file(&dir, "main-file.txt", "main\n", "main change");
arc_cmd()
.args(["switch", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
commit_file(&dir, "feature-file.txt", "feature\n", "feature change");
arc_cmd()
.args(["switch", "main"])
.current_dir(dir.path())
.output()
.expect("failed");
let output = arc_cmd()
.args(["merge", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(output.status.success());
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(stdout.contains("merged feature"));
assert!(dir.path().join("base.txt").exists());
assert!(dir.path().join("main-file.txt").exists());
assert!(dir.path().join("feature-file.txt").exists());
}
#[test]
fn merge_creates_commit_with_two_parents() {
let dir = init_repo();
commit_file(&dir, "base.txt", "base\n", "base");
arc_cmd()
.args(["mark", "add", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
commit_file(&dir, "a.txt", "main\n", "main work");
arc_cmd()
.args(["switch", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
commit_file(&dir, "b.txt", "feature\n", "feature work");
arc_cmd()
.args(["switch", "main"])
.current_dir(dir.path())
.output()
.expect("failed");
let output = arc_cmd()
.args(["merge", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(output.status.success());
let stdout = String::from_utf8_lossy(&output.stdout);
let merged_line = stdout.trim();
assert!(merged_line.contains("merged"));
let show_output = arc_cmd()
.args(["show", "HEAD"])
.current_dir(dir.path())
.output()
.expect("failed");
let show_stdout = String::from_utf8_lossy(&show_output.stdout);
assert!(show_stdout.contains("parent"));
}
#[test]
fn merge_fails_with_dirty_worktree() {
let dir = init_repo();
commit_file(&dir, "a.txt", "a\n", "first");
arc_cmd()
.args(["mark", "add", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
commit_file(&dir, "b.txt", "b\n", "main work");
arc_cmd()
.args(["switch", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
commit_file(&dir, "c.txt", "c\n", "feature work");
arc_cmd()
.args(["switch", "main"])
.current_dir(dir.path())
.output()
.expect("failed");
std::fs::write(dir.path().join("dirty.txt"), "dirty\n").unwrap();
let output = arc_cmd()
.args(["merge", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(!output.status.success());
let stderr = String::from_utf8_lossy(&output.stderr);
assert!(stderr.contains("uncommitted changes"));
}
#[test]
fn merge_same_file_no_conflict() {
let dir = init_repo();
std::fs::write(dir.path().join("a.txt"), "line1\nline2\nline3\n").unwrap();
arc_cmd()
.args(["commit", "base"])
.current_dir(dir.path())
.output()
.expect("failed");
arc_cmd()
.args(["mark", "add", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
std::fs::write(dir.path().join("a.txt"), "line1\nline2\nline3\nmain-line\n").unwrap();
arc_cmd()
.args(["commit", "main edit"])
.current_dir(dir.path())
.output()
.expect("failed");
arc_cmd()
.args(["switch", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
std::fs::write(
dir.path().join("a.txt"),
"feature-line\nline1\nline2\nline3\n",
)
.unwrap();
arc_cmd()
.args(["commit", "feature edit"])
.current_dir(dir.path())
.output()
.expect("failed");
arc_cmd()
.args(["switch", "main"])
.current_dir(dir.path())
.output()
.expect("failed");
let output = arc_cmd()
.args(["merge", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(output.status.success());
}
#[test]
fn merge_conflict_reports_error() {
let dir = init_repo();
commit_file(&dir, "a.txt", "original\n", "base");
arc_cmd()
.args(["mark", "add", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
std::fs::write(dir.path().join("a.txt"), "main version\n").unwrap();
arc_cmd()
.args(["commit", "main change"])
.current_dir(dir.path())
.output()
.expect("failed");
arc_cmd()
.args(["switch", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
std::fs::write(dir.path().join("a.txt"), "feature version\n").unwrap();
arc_cmd()
.args(["commit", "feature change"])
.current_dir(dir.path())
.output()
.expect("failed");
arc_cmd()
.args(["switch", "main"])
.current_dir(dir.path())
.output()
.expect("failed");
let output = arc_cmd()
.args(["merge", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(!output.status.success());
let stderr = String::from_utf8_lossy(&output.stderr);
assert!(stderr.contains("conflict"));
}

View file

@ -1,151 +0,0 @@
use std::process::Command;
use tempfile::TempDir;
fn arc_cmd() -> Command {
Command::new(env!("CARGO_BIN_EXE_arc"))
}
fn init_repo() -> TempDir {
let dir = TempDir::new().unwrap();
arc_cmd()
.arg("init")
.current_dir(dir.path())
.output()
.expect("failed to init");
dir
}
fn commit_file(dir: &TempDir, name: &str, content: &str, msg: &str) -> String {
std::fs::write(dir.path().join(name), content).unwrap();
let output = arc_cmd()
.args(["commit", msg])
.current_dir(dir.path())
.output()
.expect("failed to commit");
assert!(output.status.success());
String::from_utf8_lossy(&output.stdout)
.trim()
.strip_prefix("committed ")
.unwrap()
.to_string()
}
#[test]
fn reset_all_restores_modified_file() {
let dir = init_repo();
commit_file(&dir, "a.txt", "original\n", "first");
std::fs::write(dir.path().join("a.txt"), "modified\n").unwrap();
let output = arc_cmd()
.args(["reset"])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(output.status.success());
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(stdout.contains("reset"));
let content = std::fs::read_to_string(dir.path().join("a.txt")).unwrap();
assert_eq!(content, "original\n");
}
#[test]
fn reset_all_removes_added_file() {
let dir = init_repo();
commit_file(&dir, "a.txt", "a\n", "first");
std::fs::write(dir.path().join("new.txt"), "new\n").unwrap();
let output = arc_cmd()
.args(["reset"])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(output.status.success());
assert!(!dir.path().join("new.txt").exists());
}
#[test]
fn reset_all_restores_deleted_file() {
let dir = init_repo();
commit_file(&dir, "a.txt", "a\n", "first");
std::fs::remove_file(dir.path().join("a.txt")).unwrap();
let output = arc_cmd()
.args(["reset"])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(output.status.success());
let content = std::fs::read_to_string(dir.path().join("a.txt")).unwrap();
assert_eq!(content, "a\n");
}
#[test]
fn reset_specific_file_only() {
let dir = init_repo();
commit_file(&dir, "a.txt", "a\n", "first");
commit_file(&dir, "b.txt", "b\n", "second");
std::fs::write(dir.path().join("a.txt"), "changed-a\n").unwrap();
std::fs::write(dir.path().join("b.txt"), "changed-b\n").unwrap();
let output = arc_cmd()
.args(["reset", "a.txt"])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(output.status.success());
let a_content = std::fs::read_to_string(dir.path().join("a.txt")).unwrap();
assert_eq!(a_content, "a\n");
let b_content = std::fs::read_to_string(dir.path().join("b.txt")).unwrap();
assert_eq!(b_content, "changed-b\n");
}
#[test]
fn reset_clean_worktree() {
let dir = init_repo();
commit_file(&dir, "a.txt", "a\n", "first");
let output = arc_cmd()
.args(["reset"])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(output.status.success());
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(stdout.contains("nothing to reset"));
}
#[test]
fn reset_multiple_changes() {
let dir = init_repo();
commit_file(&dir, "a.txt", "a\n", "first");
commit_file(&dir, "b.txt", "b\n", "second");
std::fs::write(dir.path().join("a.txt"), "changed\n").unwrap();
std::fs::remove_file(dir.path().join("b.txt")).unwrap();
std::fs::write(dir.path().join("c.txt"), "new\n").unwrap();
let output = arc_cmd()
.args(["reset"])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(output.status.success());
let a = std::fs::read_to_string(dir.path().join("a.txt")).unwrap();
assert_eq!(a, "a\n");
assert!(dir.path().join("b.txt").exists());
assert!(!dir.path().join("c.txt").exists());
}

View file

@ -1,159 +0,0 @@
use std::process::Command;
use tempfile::TempDir;
fn arc_cmd() -> Command {
Command::new(env!("CARGO_BIN_EXE_arc"))
}
fn init_repo() -> TempDir {
let dir = TempDir::new().unwrap();
arc_cmd()
.arg("init")
.current_dir(dir.path())
.output()
.expect("failed to init");
dir
}
fn commit_file(dir: &TempDir, name: &str, content: &str, msg: &str) -> String {
std::fs::write(dir.path().join(name), content).unwrap();
let output = arc_cmd()
.args(["commit", msg])
.current_dir(dir.path())
.output()
.expect("failed to commit");
assert!(output.status.success());
String::from_utf8_lossy(&output.stdout)
.trim()
.strip_prefix("committed ")
.unwrap()
.to_string()
}
#[test]
fn revert_single_commit() {
let dir = init_repo();
commit_file(&dir, "a.txt", "original\n", "first");
let id2 = commit_file(&dir, "a.txt", "changed\n", "second");
let output = arc_cmd()
.args(["revert", &id2])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(output.status.success());
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(stdout.contains("reverted"));
let content = std::fs::read_to_string(dir.path().join("a.txt")).unwrap();
assert_eq!(content, "original\n");
}
#[test]
fn revert_creates_new_commit() {
let dir = init_repo();
commit_file(&dir, "a.txt", "v1\n", "first");
let id2 = commit_file(&dir, "a.txt", "v2\n", "second");
let output = arc_cmd()
.args(["revert", &id2])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(output.status.success());
let log_output = arc_cmd()
.args(["log"])
.current_dir(dir.path())
.output()
.expect("failed");
let log_stdout = String::from_utf8_lossy(&log_output.stdout);
assert!(log_stdout.contains("revert"));
}
#[test]
fn revert_file_addition() {
let dir = init_repo();
commit_file(&dir, "a.txt", "a\n", "first");
let id2 = commit_file(&dir, "b.txt", "b\n", "add b");
let output = arc_cmd()
.args(["revert", &id2])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(output.status.success());
assert!(!dir.path().join("b.txt").exists());
}
#[test]
fn revert_file_deletion() {
let dir = init_repo();
commit_file(&dir, "a.txt", "a\n", "first");
commit_file(&dir, "b.txt", "b\n", "add b");
std::fs::remove_file(dir.path().join("b.txt")).unwrap();
let id3 = {
let output = arc_cmd()
.args(["commit", "delete b"])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(output.status.success());
String::from_utf8_lossy(&output.stdout)
.trim()
.strip_prefix("committed ")
.unwrap()
.to_string()
};
let output = arc_cmd()
.args(["revert", &id3])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(output.status.success());
let content = std::fs::read_to_string(dir.path().join("b.txt")).unwrap();
assert_eq!(content, "b\n");
}
#[test]
fn revert_fails_with_dirty_worktree() {
let dir = init_repo();
let id = commit_file(&dir, "a.txt", "a\n", "first");
std::fs::write(dir.path().join("a.txt"), "dirty\n").unwrap();
let output = arc_cmd()
.args(["revert", &id])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(!output.status.success());
let stderr = String::from_utf8_lossy(&output.stderr);
assert!(stderr.contains("uncommitted changes"));
}
#[test]
fn revert_with_prefix() {
let dir = init_repo();
commit_file(&dir, "a.txt", "v1\n", "first");
let id2 = commit_file(&dir, "a.txt", "v2\n", "second");
let short = &id2[..12];
let output = arc_cmd()
.args(["revert", short])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(output.status.success());
let content = std::fs::read_to_string(dir.path().join("a.txt")).unwrap();
assert_eq!(content, "v1\n");
}

View file

@ -1,388 +0,0 @@
use std::process::Command;
use tempfile::TempDir;
fn arc_cmd() -> Command {
Command::new(env!("CARGO_BIN_EXE_arc"))
}
fn init_repo() -> TempDir {
let dir = TempDir::new().unwrap();
arc_cmd()
.arg("init")
.current_dir(dir.path())
.output()
.expect("failed to init");
dir
}
fn commit_file(dir: &TempDir, name: &str, content: &str, msg: &str) {
std::fs::write(dir.path().join(name), content).unwrap();
let output = arc_cmd()
.args(["commit", msg])
.current_dir(dir.path())
.output()
.expect("failed to commit");
assert!(
output.status.success(),
"commit failed: {}",
String::from_utf8_lossy(&output.stderr)
);
}
fn run_ok(dir: &TempDir, args: &[&str]) -> String {
let output = arc_cmd()
.args(args)
.current_dir(dir.path())
.output()
.expect("failed to run");
assert!(
output.status.success(),
"command {:?} failed: {}",
args,
String::from_utf8_lossy(&output.stderr)
);
String::from_utf8_lossy(&output.stdout).trim().to_string()
}
fn run_fail(dir: &TempDir, args: &[&str]) -> String {
let output = arc_cmd()
.args(args)
.current_dir(dir.path())
.output()
.expect("failed to run");
assert!(
!output.status.success(),
"command {:?} should have failed but succeeded: {}",
args,
String::from_utf8_lossy(&output.stdout)
);
String::from_utf8_lossy(&output.stderr).trim().to_string()
}
#[test]
fn stash_create_creates_stash() {
let dir = init_repo();
let stdout = run_ok(&dir, &["stash", "create", "wip"]);
assert!(stdout.contains("stash 'wip' created"));
let stash_path = dir.path().join(".arc/stashes/named/wip.yml");
assert!(stash_path.exists());
}
#[test]
fn stash_create_sets_active() {
let dir = init_repo();
run_ok(&dir, &["stash", "create", "wip"]);
let state = std::fs::read_to_string(dir.path().join(".arc/stashes/state.yml")).unwrap();
assert!(state.contains("wip"));
}
#[test]
fn stash_create_fails_if_exists() {
let dir = init_repo();
run_ok(&dir, &["stash", "create", "wip"]);
let stderr = run_fail(&dir, &["stash", "create", "wip"]);
assert!(stderr.contains("stash already exists"));
}
#[test]
fn stash_create_fails_invalid_name() {
let dir = init_repo();
let stderr = run_fail(&dir, &["stash", "create", "../escape"]);
assert!(stderr.contains("invalid ref name"));
}
#[test]
fn stash_use_switches_active() {
let dir = init_repo();
run_ok(&dir, &["stash", "create", "first"]);
run_ok(&dir, &["stash", "create", "second"]);
run_ok(&dir, &["stash", "use", "first"]);
let state = std::fs::read_to_string(dir.path().join(".arc/stashes/state.yml")).unwrap();
assert!(state.contains("first"));
}
#[test]
fn stash_use_fails_nonexistent() {
let dir = init_repo();
let stderr = run_fail(&dir, &["stash", "use", "nope"]);
assert!(stderr.contains("stash not found"));
}
#[test]
fn stash_push_saves_and_resets() {
let dir = init_repo();
commit_file(&dir, "a.txt", "hello\n", "initial");
run_ok(&dir, &["stash", "create", "wip"]);
std::fs::write(dir.path().join("a.txt"), "modified\n").unwrap();
let stdout = run_ok(&dir, &["stash", "push"]);
assert!(stdout.contains("pushed"));
assert!(stdout.contains("change(s)"));
let content = std::fs::read_to_string(dir.path().join("a.txt")).unwrap();
assert_eq!(content, "hello\n");
}
#[test]
fn stash_push_handles_added_files() {
let dir = init_repo();
commit_file(&dir, "a.txt", "hello\n", "initial");
run_ok(&dir, &["stash", "create", "wip"]);
std::fs::write(dir.path().join("new.txt"), "added\n").unwrap();
run_ok(&dir, &["stash", "push"]);
assert!(!dir.path().join("new.txt").exists());
}
#[test]
fn stash_push_handles_deleted_files() {
let dir = init_repo();
commit_file(&dir, "a.txt", "hello\n", "initial");
run_ok(&dir, &["stash", "create", "wip"]);
std::fs::remove_file(dir.path().join("a.txt")).unwrap();
run_ok(&dir, &["stash", "push"]);
let content = std::fs::read_to_string(dir.path().join("a.txt")).unwrap();
assert_eq!(content, "hello\n");
}
#[test]
fn stash_push_fails_no_active() {
let dir = init_repo();
commit_file(&dir, "a.txt", "hello\n", "initial");
std::fs::write(dir.path().join("a.txt"), "changed\n").unwrap();
let stderr = run_fail(&dir, &["stash", "push"]);
assert!(stderr.contains("no active stash"));
}
#[test]
fn stash_push_fails_clean_worktree() {
let dir = init_repo();
commit_file(&dir, "a.txt", "hello\n", "initial");
run_ok(&dir, &["stash", "create", "wip"]);
let stderr = run_fail(&dir, &["stash", "push"]);
assert!(stderr.contains("nothing to stash"));
}
#[test]
fn stash_pop_restores_changes() {
let dir = init_repo();
commit_file(&dir, "a.txt", "hello\n", "initial");
run_ok(&dir, &["stash", "create", "wip"]);
std::fs::write(dir.path().join("a.txt"), "modified\n").unwrap();
run_ok(&dir, &["stash", "push"]);
let stdout = run_ok(&dir, &["stash", "pop"]);
assert!(stdout.contains("popped"));
let content = std::fs::read_to_string(dir.path().join("a.txt")).unwrap();
assert_eq!(content, "modified\n");
}
#[test]
fn stash_pop_restores_added_files() {
let dir = init_repo();
commit_file(&dir, "a.txt", "hello\n", "initial");
run_ok(&dir, &["stash", "create", "wip"]);
std::fs::write(dir.path().join("new.txt"), "added\n").unwrap();
run_ok(&dir, &["stash", "push"]);
assert!(!dir.path().join("new.txt").exists());
run_ok(&dir, &["stash", "pop"]);
let content = std::fs::read_to_string(dir.path().join("new.txt")).unwrap();
assert_eq!(content, "added\n");
}
#[test]
fn stash_pop_restores_deleted_files() {
let dir = init_repo();
commit_file(&dir, "a.txt", "hello\n", "initial");
run_ok(&dir, &["stash", "create", "wip"]);
std::fs::remove_file(dir.path().join("a.txt")).unwrap();
run_ok(&dir, &["stash", "push"]);
run_ok(&dir, &["stash", "pop"]);
assert!(!dir.path().join("a.txt").exists());
}
#[test]
fn stash_pop_fails_no_active() {
let dir = init_repo();
let stderr = run_fail(&dir, &["stash", "pop"]);
assert!(stderr.contains("no active stash"));
}
#[test]
fn stash_pop_fails_empty_stash() {
let dir = init_repo();
run_ok(&dir, &["stash", "create", "wip"]);
let stderr = run_fail(&dir, &["stash", "pop"]);
assert!(stderr.contains("stash is empty"));
}
#[test]
fn stash_pop_fails_dirty_worktree() {
let dir = init_repo();
commit_file(&dir, "a.txt", "hello\n", "initial");
run_ok(&dir, &["stash", "create", "wip"]);
std::fs::write(dir.path().join("a.txt"), "modified\n").unwrap();
run_ok(&dir, &["stash", "push"]);
std::fs::write(dir.path().join("a.txt"), "dirty\n").unwrap();
let stderr = run_fail(&dir, &["stash", "pop"]);
assert!(stderr.contains("uncommitted changes"));
}
#[test]
fn stash_pop_fails_base_mismatch() {
let dir = init_repo();
commit_file(&dir, "a.txt", "hello\n", "initial");
run_ok(&dir, &["stash", "create", "wip"]);
std::fs::write(dir.path().join("a.txt"), "modified\n").unwrap();
run_ok(&dir, &["stash", "push"]);
commit_file(&dir, "b.txt", "new file\n", "second commit");
let stderr = run_fail(&dir, &["stash", "pop"]);
assert!(stderr.contains("stash base does not match"));
}
#[test]
fn stash_rm_removes_stash() {
let dir = init_repo();
run_ok(&dir, &["stash", "create", "wip"]);
let stdout = run_ok(&dir, &["stash", "rm", "wip"]);
assert!(stdout.contains("stash 'wip' removed"));
let stash_path = dir.path().join(".arc/stashes/named/wip.yml");
assert!(!stash_path.exists());
}
#[test]
fn stash_rm_clears_active_if_removed() {
let dir = init_repo();
run_ok(&dir, &["stash", "create", "wip"]);
run_ok(&dir, &["stash", "rm", "wip"]);
let state = std::fs::read_to_string(dir.path().join(".arc/stashes/state.yml")).unwrap();
assert!(state.contains("null") || !state.contains("wip"));
}
#[test]
fn stash_rm_fails_nonexistent() {
let dir = init_repo();
let stderr = run_fail(&dir, &["stash", "rm", "nope"]);
assert!(stderr.contains("stash not found"));
}
#[test]
fn stash_list_shows_stashes() {
let dir = init_repo();
run_ok(&dir, &["stash", "create", "alpha"]);
run_ok(&dir, &["stash", "create", "beta"]);
let stdout = run_ok(&dir, &["stash", "list"]);
assert!(stdout.contains("alpha"));
assert!(stdout.contains("beta"));
}
#[test]
fn stash_list_marks_active() {
let dir = init_repo();
run_ok(&dir, &["stash", "create", "alpha"]);
run_ok(&dir, &["stash", "create", "beta"]);
let stdout = run_ok(&dir, &["stash", "list"]);
assert!(stdout.contains("* beta"));
let has_inactive_alpha = stdout.lines().any(|l| l.trim_start().starts_with("alpha"));
assert!(has_inactive_alpha);
assert!(!stdout.contains("* alpha"));
}
#[test]
fn stash_list_sorted() {
let dir = init_repo();
run_ok(&dir, &["stash", "create", "zebra"]);
run_ok(&dir, &["stash", "create", "alpha"]);
let stdout = run_ok(&dir, &["stash", "list"]);
let alpha_pos = stdout.find("alpha").unwrap();
let zebra_pos = stdout.find("zebra").unwrap();
assert!(alpha_pos < zebra_pos);
}
#[test]
fn stash_list_shows_entry_count() {
let dir = init_repo();
commit_file(&dir, "a.txt", "hello\n", "initial");
run_ok(&dir, &["stash", "create", "wip"]);
let stdout = run_ok(&dir, &["stash", "list"]);
assert!(stdout.contains("0 entries"));
std::fs::write(dir.path().join("a.txt"), "modified\n").unwrap();
run_ok(&dir, &["stash", "push"]);
let stdout = run_ok(&dir, &["stash", "list"]);
assert!(stdout.contains("1 entries"));
}
#[test]
fn stash_list_empty() {
let dir = init_repo();
let stdout = run_ok(&dir, &["stash", "list"]);
assert!(stdout.contains("no stashes"));
}
#[test]
fn stash_push_pop_multiple() {
let dir = init_repo();
commit_file(&dir, "a.txt", "v1\n", "initial");
run_ok(&dir, &["stash", "create", "wip"]);
std::fs::write(dir.path().join("a.txt"), "v2\n").unwrap();
run_ok(&dir, &["stash", "push"]);
std::fs::write(dir.path().join("a.txt"), "v3\n").unwrap();
run_ok(&dir, &["stash", "push"]);
run_ok(&dir, &["stash", "pop"]);
let content = std::fs::read_to_string(dir.path().join("a.txt")).unwrap();
assert_eq!(content, "v3\n");
run_ok(&dir, &["reset"]);
run_ok(&dir, &["stash", "pop"]);
let content = std::fs::read_to_string(dir.path().join("a.txt")).unwrap();
assert_eq!(content, "v2\n");
}
#[test]
fn stash_push_on_unborn() {
let dir = init_repo();
run_ok(&dir, &["stash", "create", "wip"]);
std::fs::write(dir.path().join("new.txt"), "content\n").unwrap();
run_ok(&dir, &["stash", "push"]);
assert!(!dir.path().join("new.txt").exists());
run_ok(&dir, &["stash", "pop"]);
let content = std::fs::read_to_string(dir.path().join("new.txt")).unwrap();
assert_eq!(content, "content\n");
}