Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
203 changes: 189 additions & 14 deletions crates/east-cli/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,16 @@ enum Commands {
revision: Option<String>,
},
/// Update (fetch/checkout) all projects in the workspace.
Update,
Update {
/// Force checkout even if the working tree has uncommitted changes.
/// When project names are given, only those projects are force-checked-out.
/// Without names, force applies to all projects.
#[arg(short, long)]
force: bool,
/// Project names to force checkout (only used with --force).
#[arg(requires = "force")]
projects: Vec<String>,
},
/// List all projects in the workspace.
List,
/// Show status of all projects in the workspace.
Expand Down Expand Up @@ -125,7 +134,7 @@ fn main() -> miette::Result<()> {
async fn run(cli: Cli) -> miette::Result<()> {
match cli.command {
Commands::Init { manifest, revision } => cmd_init(&manifest, revision.as_deref()).await,
Commands::Update => cmd_update().await,
Commands::Update { force, projects } => cmd_update(force, &projects).await,
Commands::List => cmd_list(),
Commands::Status => cmd_status().await,
Commands::Manifest { resolve } => {
Expand Down Expand Up @@ -185,17 +194,22 @@ async fn cmd_init(manifest_source: &str, revision: Option<&str>) -> miette::Resu
info!("initialized east workspace at {}", cwd.display());

// Run update to clone all projects
do_update(&cwd).await
do_update(&cwd, false, &[]).await
}

async fn cmd_update() -> miette::Result<()> {
async fn cmd_update(force: bool, force_projects: &[String]) -> miette::Result<()> {
let ws = Workspace::discover(&std::env::current_dir().into_diagnostic()?)
.into_diagnostic()
.wrap_err("not inside an east workspace")?;
do_update(ws.root()).await
do_update(ws.root(), force, force_projects).await
}

async fn do_update(workspace_root: &Path) -> miette::Result<()> {
#[allow(clippy::too_many_lines)]
async fn do_update(
workspace_root: &Path,
force: bool,
force_projects: &[String],
) -> miette::Result<()> {
let manifest_path = workspace_root.join("east.yml");
let manifest = Manifest::resolve(&manifest_path)
.into_diagnostic()
Expand All @@ -207,25 +221,62 @@ async fn do_update(workspace_root: &Path) -> miette::Result<()> {
return Ok(());
}

// Validate that --force project names actually exist in the manifest
if !force_projects.is_empty() {
let known: std::collections::HashSet<&str> =
projects.iter().map(|p| p.name.as_str()).collect();
let mut unknown = Vec::new();
for name in force_projects {
if !known.contains(name.as_str()) {
unknown.push(name.as_str());
}
}
if !unknown.is_empty() {
bail!("unknown project(s) for --force: {}", unknown.join(", "));
}
}

let total = projects.len() as u64;
let mp = MultiProgress::new();
let style = ProgressStyle::default_spinner()
.template("{spinner:.green} {msg}")

// Top-level progress bar showing overall completion
let overall_style = ProgressStyle::default_bar()
.template("[{bar:30.cyan/dim}] {pos}/{len} {msg}")
.expect("valid template")
.progress_chars("##-");
let overall = mp.add(ProgressBar::new(total));
overall.set_style(overall_style);
overall.set_message("updating...");

// Style for per-task spinners (inserted below the overall bar)
let spinner_style = ProgressStyle::default_spinner()
.template(" {spinner:.green} {msg}")
.expect("valid template");

let semaphore = std::sync::Arc::new(Semaphore::new(MAX_CONCURRENT_GIT));
let overall = std::sync::Arc::new(overall);
let force_set: std::sync::Arc<std::collections::HashSet<String>> =
std::sync::Arc::new(force_projects.iter().cloned().collect());
let mut handles = Vec::new();

for project in &projects {
let project_path = workspace_root.join(project.effective_path());
let revision = manifest.project_revision(project).map(String::from);
let clone_url = manifest.project_clone_url(project).ok();
let project_name = project.name.clone();
let project_rel_path = project.effective_path().to_string();
let sem = semaphore.clone();
let pb = mp.add(ProgressBar::new_spinner());
pb.set_style(style.clone());
let overall = overall.clone();
let force_set = force_set.clone();
let mp_handle = mp.clone();
let spinner_style_clone = spinner_style.clone();

let handle = tokio::spawn(async move {
let _permit = sem.acquire().await.expect("semaphore closed");
// Add spinner only after acquiring permit to avoid empty lines
let pb = mp_handle.add(ProgressBar::new_spinner());
pb.set_style(spinner_style_clone);
pb.enable_steady_tick(std::time::Duration::from_millis(100));
pb.set_message(format!("{project_name}: starting..."));

// A directory may exist without being a git repo (e.g. parent
Expand All @@ -238,8 +289,22 @@ async fn do_update(workspace_root: &Path) -> miette::Result<()> {
pb.set_message(format!("{project_name}: fetching..."));
Git::fetch(&project_path).await?;
if let Some(rev) = &revision {
let dirty = Git::is_dirty(&project_path).await.unwrap_or(false);
let force_this =
force && (force_set.is_empty() || force_set.contains(&project_name));
if dirty && !force_this {
pb.finish_with_message(format!(
"{project_name} ({project_rel_path}): skipped checkout (uncommitted changes, use --force to override)"
));
overall.inc(1);
return Ok(());
}
pb.set_message(format!("{project_name}: checking out {rev}..."));
Git::checkout(&project_path, rev).await?;
if force_this {
Git::force_checkout(&project_path, rev).await?;
} else {
Git::checkout(&project_path, rev).await?;
}
}
Ok(())
} else if let Some(url) = &clone_url {
Expand All @@ -249,9 +314,24 @@ async fn do_update(workspace_root: &Path) -> miette::Result<()> {
pb.set_message(format!("{project_name}: initializing..."));
Git::init_and_fetch(url, &project_path, revision.as_deref()).await
} else {
// Clone
// Clone — fallback to init+fetch if the directory was
// created by a concurrent sibling clone in the meantime.
pb.set_message(format!("{project_name}: cloning..."));
Git::clone(url, &project_path, revision.as_deref()).await
let clone_result = Git::clone(url, &project_path, revision.as_deref()).await;
match &clone_result {
Err(east_vcs::error::VcsError::GitFailed { stderr, .. })
if (stderr.contains("already exists")
|| stderr.contains("File exists"))
&& project_path.exists()
&& !project_path.join(".git").exists() =>
{
// Directory was created by a concurrent sibling clone;
// fallback to init+fetch only for this specific case.
pb.set_message(format!("{project_name}: initializing (fallback)..."));
Git::init_and_fetch(url, &project_path, revision.as_deref()).await
}
_ => clone_result,
}
Comment on lines +320 to +334
Copy link

Copilot AI Apr 10, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The clone fallback triggers on any clone error as long as project_path.exists() afterward. Since git clone often creates the destination directory even when failing (e.g., auth/network failures), this can replace the original error with an init_and_fetch failure (e.g., remote origin already exists) and make debugging harder. Restrict the fallback to the specific “destination exists / not empty” failure mode (e.g., by inspecting the error’s stderr), or re-check that the destination is still a non-repo directory before falling back.

Copilot uses AI. Check for mistakes.
}
} else {
Err(east_vcs::error::VcsError::GitFailed {
Expand All @@ -260,10 +340,12 @@ async fn do_update(workspace_root: &Path) -> miette::Result<()> {
})
};

// Update UI: remove spinner on success, keep failure visible
match &result {
Ok(()) => pb.finish_with_message(format!("{project_name}: done")),
Ok(()) => pb.finish_and_clear(),
Err(e) => pb.finish_with_message(format!("{project_name}: FAILED ({e})")),
}
overall.inc(1);
result
});
handles.push((project.name.clone(), handle));
Expand All @@ -277,6 +359,11 @@ async fn do_update(workspace_root: &Path) -> miette::Result<()> {
Err(e) => errors.push(format!("{name}: task panicked: {e}")),
}
}
overall.finish_and_clear();

// Maintain .git/info/exclude for parent repos that contain child project paths.
// This prevents nested project directories from showing as untracked in the parent.
update_git_excludes(workspace_root, &projects);

if errors.is_empty() {
println!("updated {} projects", projects.len());
Expand All @@ -286,6 +373,94 @@ async fn do_update(workspace_root: &Path) -> miette::Result<()> {
}
}

/// For each project whose path is a prefix of another project's path (parent/child
/// relationship), add the child's relative path to the parent's `.git/info/exclude`.
fn update_git_excludes(workspace_root: &Path, projects: &[&east_manifest::Project]) {
use std::collections::BTreeMap;

// Build a map of project path -> list of child relative paths
let paths: Vec<String> = projects
.iter()
.map(|p| p.effective_path().to_string())
.collect();
let mut parent_children: BTreeMap<String, Vec<String>> = BTreeMap::new();

for (i, parent_path) in paths.iter().enumerate() {
for (j, child_path) in paths.iter().enumerate() {
if i == j {
continue;
}
let prefix = format!("{parent_path}/");
if child_path.starts_with(&prefix) {
// child_path relative to parent_path
let relative = &child_path[prefix.len()..];
parent_children
.entry(parent_path.clone())
.or_default()
.push(format!("/{relative}/"));
}
}
}

let marker = "# managed by east — do not edit this block";
let end_marker = "# end east managed block";

for (parent_path, children) in &parent_children {
let git_dir = workspace_root.join(parent_path).join(".git");
if !git_dir.is_dir() {
continue;
}
let info_dir = git_dir.join("info");
let _ = std::fs::create_dir_all(&info_dir);
let exclude_path = info_dir.join("exclude");

// Read existing content, strip old east block if present
let existing = std::fs::read_to_string(&exclude_path).unwrap_or_default();
let mut lines: Vec<&str> = Vec::new();
let mut in_block = false;
for line in existing.lines() {
if line == marker {
in_block = true;
continue;
}
if line == end_marker {
in_block = false;
continue;
}
if !in_block {
lines.push(line);
}
}

// Remove trailing empty lines
while lines.last() == Some(&"") {
lines.pop();
}

// Append east managed block
if !lines.is_empty() {
lines.push("");
}
let mut block = vec![marker.to_string()];
let mut sorted_children = children.clone();
sorted_children.sort();
sorted_children.dedup();
for child in &sorted_children {
block.push(child.clone());
}
block.push(end_marker.to_string());

let mut output = lines.join("\n");
if !output.is_empty() {
output.push('\n');
}
output.push_str(&block.join("\n"));
output.push('\n');

let _ = std::fs::write(&exclude_path, output);
}
}

fn cmd_list() -> miette::Result<()> {
let ws = Workspace::discover(&std::env::current_dir().into_diagnostic()?)
.into_diagnostic()
Expand Down
94 changes: 94 additions & 0 deletions crates/east-cli/tests/cli_update.rs
Original file line number Diff line number Diff line change
Expand Up @@ -194,6 +194,100 @@ fn manifest_resolve_outputs_yaml() {
.stdout(predicate::str::contains("project-1"));
}

#[test]
fn update_skips_dirty_project_checkout() {
let (_fixture, workspace) = setup_multi_project_workspace(2);

// Make project-0 dirty
fs::write(workspace.path().join("project-0/lib.rs"), "// modified\n").unwrap();

// Update should succeed but skip project-0's checkout
AssertCmd::cargo_bin("east")
.unwrap()
.arg("update")
.current_dir(workspace.path())
.assert()
.success()
.stderr(predicate::str::contains("skipped checkout").or(predicate::str::is_empty()))
// The output goes to stderr via progress bar; check stdout for completion
.stdout(predicate::str::contains("updated 2 projects"));
}

#[test]
fn update_force_specific_project() {
let (_fixture, workspace) = setup_multi_project_workspace(2);

// Make both projects dirty
fs::write(workspace.path().join("project-0/lib.rs"), "// modified\n").unwrap();
fs::write(workspace.path().join("project-1/lib.rs"), "// modified\n").unwrap();

// Force only project-0; project-1 should still be skipped
AssertCmd::cargo_bin("east")
.unwrap()
.args(["update", "--force", "project-0"])
.current_dir(workspace.path())
.assert()
.success();

// project-0 should be checked out (clean now — git may use \r\n on Windows)
let content = fs::read_to_string(workspace.path().join("project-0/lib.rs")).unwrap();
assert!(
content.contains("// code for project-0"),
"project-0 should be restored after force checkout"
);

// project-1 should still have local modifications (checkout was skipped)
let content = fs::read_to_string(workspace.path().join("project-1/lib.rs")).unwrap();
assert!(
content.contains("// modified"),
"project-1 should still have local modifications"
);
}

#[test]
fn update_force_all_projects() {
let (_fixture, workspace) = setup_multi_project_workspace(2);

// Make both dirty
fs::write(workspace.path().join("project-0/lib.rs"), "// modified\n").unwrap();
fs::write(workspace.path().join("project-1/lib.rs"), "// modified\n").unwrap();

// Force all (no project names)
AssertCmd::cargo_bin("east")
.unwrap()
.args(["update", "--force"])
.current_dir(workspace.path())
.assert()
.success();

// Both should be restored (git may use \r\n on Windows)
let c0 = fs::read_to_string(workspace.path().join("project-0/lib.rs")).unwrap();
let c1 = fs::read_to_string(workspace.path().join("project-1/lib.rs")).unwrap();
assert!(
c0.contains("// code for project-0"),
"project-0 should be restored"
);
assert!(
c1.contains("// code for project-1"),
"project-1 should be restored"
);
}

#[test]
fn update_force_unknown_project_fails() {
let (_fixture, workspace) = setup_multi_project_workspace(1);

AssertCmd::cargo_bin("east")
.unwrap()
.args(["update", "--force", "nonexistent"])
.current_dir(workspace.path())
.assert()
.failure()
.stderr(predicate::str::contains(
"unknown project(s) for --force: nonexistent",
));
}

#[test]
fn update_outside_workspace_fails() {
let dir = TempDir::new().unwrap();
Expand Down
Loading