diff --git a/cli/examples/custom-command/main.rs b/cli/examples/custom-command/main.rs index f1ba432ee2f..631c1fdc45f 100644 --- a/cli/examples/custom-command/main.rs +++ b/cli/examples/custom-command/main.rs @@ -19,6 +19,7 @@ use jj_cli::cli_util::CommandHelper; use jj_cli::cli_util::RevisionArg; use jj_cli::command_error::CommandError; use jj_cli::ui::Ui; +use pollster::FutureExt as _; #[derive(clap::Parser, Clone, Debug)] enum CustomCommand { @@ -47,7 +48,8 @@ fn run_custom_command( .repo_mut() .rewrite_commit(&commit) .set_description("Frobnicated!") - .write()?; + .write() + .block_on()?; tx.finish(ui, "frobnicate")?; writeln!( ui.status(), diff --git a/cli/src/cli_util.rs b/cli/src/cli_util.rs index 970c40a276a..9d883404dc5 100644 --- a/cli/src/cli_util.rs +++ b/cli/src/cli_util.rs @@ -469,7 +469,7 @@ impl CommandHelper { ) -> Result { let workspace = self.load_workspace()?; let op_head = self.resolve_operation(ui, workspace.repo_loader())?; - let repo = workspace.repo_loader().load_at(&op_head)?; + let repo = workspace.repo_loader().load_at(&op_head).block_on()?; let env = self.workspace_environment(ui, &workspace)?; revset_util::warn_unresolvable_trunk(ui, repo.as_ref(), &env.revset_parse_context())?; WorkspaceCommandHelper::new(ui, workspace, repo, env, self.is_at_head_operation()) @@ -530,9 +530,9 @@ impl CommandHelper { let workspace = self.load_workspace()?; let op_id = workspace.working_copy().operation_id(); - match workspace.repo_loader().load_operation(op_id) { + match workspace.repo_loader().load_operation(op_id).block_on() { Ok(op) => { - let repo = workspace.repo_loader().load_at(&op)?; + let repo = workspace.repo_loader().load_at(&op).block_on()?; let mut workspace_command = self.for_workable_repo(ui, workspace, repo)?; // Snapshot the current working copy on top of the last known working-copy @@ -556,7 +556,9 @@ impl CommandHelper { locked_ws.locked_wc(), &desired_wc_commit, &repo, - )? { + ) + .block_on()? + { WorkingCopyFreshness::Fresh | WorkingCopyFreshness::Updated(_) => { writeln!( ui.status(), @@ -638,22 +640,22 @@ impl CommandHelper { repo_loader: &RepoLoader, ) -> Result { if let Some(op_str) = &self.data.global_args.at_operation { - Ok(op_walk::resolve_op_for_load(repo_loader, op_str)?) + Ok(op_walk::resolve_op_for_load(repo_loader, op_str).block_on()?) } else { op_heads_store::resolve_op_heads( repo_loader.op_heads_store().as_ref(), repo_loader.op_store(), - |op_heads| { + |op_heads| async { writeln!( ui.status(), "Concurrent modification detected, resolving automatically.", )?; - let base_repo = repo_loader.load_at(&op_heads[0])?; + let base_repo = repo_loader.load_at(&op_heads[0]).await?; // TODO: It may be helpful to print each operation we're merging here let mut tx = start_repo_transaction(&base_repo, &self.data.string_args); for other_op_head in op_heads.into_iter().skip(1) { - tx.merge_operation(other_op_head)?; - let num_rebased = tx.repo_mut().rebase_descendants()?; + tx.merge_operation(other_op_head).await?; + let num_rebased = tx.repo_mut().rebase_descendants().await?; if num_rebased > 0 { writeln!( ui.status(), @@ -663,12 +665,14 @@ impl CommandHelper { } } Ok(tx - .write("reconcile divergent operations")? + .write("reconcile divergent operations") + .await? .leave_unpublished() .operation() .clone()) }, ) + .block_on() } } @@ -1154,14 +1158,15 @@ impl WorkspaceCommandHelper { let new_git_head_commit = tx.repo().store().get_commit(new_git_head_id)?; let wc_commit = tx .repo_mut() - .check_out(workspace_name, &new_git_head_commit)?; + .check_out(workspace_name, &new_git_head_commit) + .block_on()?; let mut locked_ws = self.workspace.start_working_copy_mutation()?; // The working copy was presumably updated by the git command that updated // HEAD, so we just need to reset our working copy // state to it without updating working copy files. locked_ws.locked_wc().reset(&wc_commit).block_on()?; - tx.repo_mut().rebase_descendants()?; - self.user_repo = ReadonlyUserRepo::new(tx.commit("import git head")?); + tx.repo_mut().rebase_descendants().block_on()?; + self.user_repo = ReadonlyUserRepo::new(tx.commit("import git head").block_on()?); locked_ws.finish(self.user_repo.repo.op_id().clone())?; if old_git_head.is_present() { writeln!( @@ -1199,7 +1204,7 @@ impl WorkspaceCommandHelper { let mut tx = tx.into_inner(); // Rebase here to show slightly different status message. - let num_rebased = tx.repo_mut().rebase_descendants()?; + let num_rebased = tx.repo_mut().rebase_descendants().block_on()?; if num_rebased > 0 { writeln!( ui.status(), @@ -1279,7 +1284,8 @@ operation that was subsequently lost (or was at least unavailable when you ran what the parent commits are supposed to be. That means that the diff compared to the current parents may contain changes from multiple commits. ", - )?; + ) + .block_on()?; writeln!( ui.status(), @@ -1542,7 +1548,7 @@ to the current parents may contain changes from multiple commits. } pub fn resolve_single_op(&self, op_str: &str) -> Result { - op_walk::resolve_op_with_repo(self.repo(), op_str) + op_walk::resolve_op_with_repo(self.repo(), op_str).block_on() } /// Resolve a revset to a single revision. Return an error if the revset is @@ -1854,11 +1860,14 @@ to the current parents may contain changes from multiple commits. let old_op_id = locked_ws.locked_wc().old_operation_id().clone(); let (repo, wc_commit) = - match WorkingCopyFreshness::check_stale(locked_ws.locked_wc(), &wc_commit, &repo) { + match WorkingCopyFreshness::check_stale(locked_ws.locked_wc(), &wc_commit, &repo) + .block_on() + { Ok(WorkingCopyFreshness::Fresh) => (repo, wc_commit), Ok(WorkingCopyFreshness::Updated(wc_operation)) => { let repo = repo .reload_at(&wc_operation) + .block_on() .map_err(snapshot_command_error)?; let wc_commit = if let Some(wc_commit) = get_wc_commit(&repo)? { wc_commit @@ -1923,6 +1932,7 @@ See https://jj-vcs.github.io/jj/latest/working-copy/#stale-working-copy \ .rewrite_commit(&wc_commit) .set_tree_id(new_tree_id) .write() + .block_on() .map_err(snapshot_command_error)?; mut_repo .set_wc_commit(workspace_name, commit.id().clone()) @@ -1931,6 +1941,7 @@ See https://jj-vcs.github.io/jj/latest/working-copy/#stale-working-copy \ // Rebase descendants let num_rebased = mut_repo .rebase_descendants() + .block_on() .map_err(snapshot_command_error)?; if num_rebased > 0 { writeln!( @@ -1950,6 +1961,7 @@ See https://jj-vcs.github.io/jj/latest/working-copy/#stale-working-copy \ let repo = tx .commit("snapshot working copy") + .block_on() .map_err(snapshot_command_error)?; self.user_repo = ReadonlyUserRepo::new(repo); } @@ -2032,7 +2044,7 @@ See https://jj-vcs.github.io/jj/latest/working-copy/#stale-working-copy \ writeln!(ui.status(), "Nothing changed.")?; return Ok(()); } - let num_rebased = tx.repo_mut().rebase_descendants()?; + let num_rebased = tx.repo_mut().rebase_descendants().block_on()?; if num_rebased > 0 { writeln!(ui.status(), "Rebased {num_rebased} descendant commits")?; } @@ -2044,7 +2056,9 @@ See https://jj-vcs.github.io/jj/latest/working-copy/#stale-working-copy \ .is_some() { let wc_commit = tx.repo().store().get_commit(wc_commit_id)?; - tx.repo_mut().check_out(name.clone(), &wc_commit)?; + tx.repo_mut() + .check_out(name.clone(), &wc_commit) + .block_on()?; writeln!( ui.warning_default(), "The working-copy commit in workspace '{name}' became immutable, so a new \ @@ -2087,7 +2101,7 @@ See https://jj-vcs.github.io/jj/latest/working-copy/#stale-working-copy \ crate::git_util::print_git_export_stats(ui, &stats)?; } - self.user_repo = ReadonlyUserRepo::new(tx.commit(description)?); + self.user_repo = ReadonlyUserRepo::new(tx.commit(description).block_on()?); // Update working copy before reporting repo changes, so that // potential errors while reporting changes (broken pipe, etc) @@ -2410,7 +2424,7 @@ impl WorkspaceCommandTransaction<'_> { pub fn check_out(&mut self, commit: &Commit) -> Result { let name = self.helper.workspace_name().to_owned(); self.id_prefix_context.take(); // invalidate - self.tx.repo_mut().check_out(name, commit) + self.tx.repo_mut().check_out(name, commit).block_on() } pub fn edit(&mut self, commit: &Commit) -> Result<(), EditCommitError> { @@ -2845,6 +2859,7 @@ pub fn update_working_copy( // warning for most commands (but be an error for the checkout command) let stats = workspace .check_out(repo.op_id().clone(), old_tree_id.as_ref(), new_commit) + .block_on() .map_err(|err| { internal_error_with_message( format!("Failed to check out commit {}", new_commit.id().hex()), diff --git a/cli/src/commands/abandon.rs b/cli/src/commands/abandon.rs index 6ff1e18e89a..0293f4fe61e 100644 --- a/cli/src/commands/abandon.rs +++ b/cli/src/commands/abandon.rs @@ -22,6 +22,7 @@ use jj_lib::refs::diff_named_ref_targets; use jj_lib::repo::Repo as _; use jj_lib::revset::RevsetExpression; use jj_lib::rewrite::RewriteRefsOptions; +use pollster::FutureExt as _; use tracing::instrument; use crate::cli_util::CommandHelper; @@ -107,23 +108,25 @@ pub(crate) fn cmd_abandon( delete_abandoned_bookmarks: !args.retain_bookmarks, }; let mut num_rebased = 0; - tx.repo_mut().transform_descendants_with_options( - to_abandon.iter().cloned().collect(), - &HashMap::new(), - &options, - async |rewriter| { - if to_abandon.contains(rewriter.old_commit().id()) { - rewriter.abandon(); - } else if args.restore_descendants { - rewriter.reparent().write()?; - num_rebased += 1; - } else { - rewriter.rebase().await?.write()?; - num_rebased += 1; - } - Ok(()) - }, - )?; + tx.repo_mut() + .transform_descendants_with_options( + to_abandon.iter().cloned().collect(), + &HashMap::new(), + &options, + async |rewriter| { + if to_abandon.contains(rewriter.old_commit().id()) { + rewriter.abandon(); + } else if args.restore_descendants { + rewriter.reparent().write().await?; + num_rebased += 1; + } else { + rewriter.rebase().await?.write().await?; + num_rebased += 1; + } + Ok(()) + }, + ) + .block_on()?; let deleted_bookmarks = diff_named_ref_targets( tx.base_repo().view().local_bookmarks(), diff --git a/cli/src/commands/absorb.rs b/cli/src/commands/absorb.rs index 65c0f56c412..9f3783b66d8 100644 --- a/cli/src/commands/absorb.rs +++ b/cli/src/commands/absorb.rs @@ -98,7 +98,7 @@ pub(crate) fn cmd_absorb( workspace_command.check_rewritable(selected_trees.target_commits.keys())?; let mut tx = workspace_command.start_transaction(); - let stats = absorb_hunks(tx.repo_mut(), &source, selected_trees.target_commits)?; + let stats = absorb_hunks(tx.repo_mut(), &source, selected_trees.target_commits).block_on()?; if let Some(mut formatter) = ui.status_formatter() { if !stats.rewritten_destinations.is_empty() { diff --git a/cli/src/commands/commit.rs b/cli/src/commands/commit.rs index 3dd1430aabc..e23c2f6749a 100644 --- a/cli/src/commands/commit.rs +++ b/cli/src/commands/commit.rs @@ -18,6 +18,7 @@ use indoc::writedoc; use jj_lib::backend::Signature; use jj_lib::object_id::ObjectId as _; use jj_lib::repo::Repo as _; +use pollster::FutureExt as _; use tracing::instrument; use crate::cli_util::CommandHelper; @@ -181,7 +182,7 @@ new working-copy commit. } else { let description = add_trailers(ui, &tx, &commit_builder)?; commit_builder.set_description(description); - let temp_commit = commit_builder.write_hidden()?; + let temp_commit = commit_builder.write_hidden().block_on()?; let intro = ""; let description = description_template(ui, &tx, intro, &temp_commit)?; let description = edit_description(&text_editor, &description)?; @@ -198,14 +199,15 @@ new working-copy commit. description }; commit_builder.set_description(description); - let new_commit = commit_builder.write(tx.repo_mut())?; + let new_commit = commit_builder.write(tx.repo_mut()).block_on()?; let workspace_names = tx.repo().view().workspaces_for_wc_commit_id(commit.id()); if !workspace_names.is_empty() { let new_wc_commit = tx .repo_mut() .new_commit(vec![new_commit.id().clone()], commit.tree_id().clone()) - .write()?; + .write() + .block_on()?; // Does nothing if there's no bookmarks to advance. tx.advance_bookmarks(advanceable_bookmarks, new_commit.id()); diff --git a/cli/src/commands/debug/index.rs b/cli/src/commands/debug/index.rs index 9d95143f2b6..5cebff31ec3 100644 --- a/cli/src/commands/debug/index.rs +++ b/cli/src/commands/debug/index.rs @@ -16,6 +16,7 @@ use std::fmt::Debug; use std::io::Write as _; use jj_lib::default_index::DefaultReadonlyIndex; +use pollster::FutureExt as _; use crate::cli_util::CommandHelper; use crate::command_error::CommandError; @@ -40,6 +41,7 @@ pub fn cmd_debug_index( let index_store = repo_loader.index_store(); let index = index_store .get_index_at_op(&op, repo_loader.store()) + .block_on() .map_err(internal_error)?; if let Some(default_index) = index.downcast_ref::() { let stats = default_index.stats(); diff --git a/cli/src/commands/describe.rs b/cli/src/commands/describe.rs index 6ecbdee7944..ddcb70fa3f7 100644 --- a/cli/src/commands/describe.rs +++ b/cli/src/commands/describe.rs @@ -22,6 +22,7 @@ use itertools::Itertools as _; use jj_lib::backend::Signature; use jj_lib::commit::CommitIteratorExt as _; use jj_lib::object_id::ObjectId as _; +use pollster::FutureExt as _; use tracing::instrument; use crate::cli_util::CommandHelper; @@ -208,7 +209,7 @@ pub(crate) fn cmd_describe( // can be discarded as soon as it's no longer the working copy. Adding a // trailer to an empty description would break that logic. if use_editor || !commit_builder.description().is_empty() { - let temp_commit = commit_builder.write_hidden()?; + let temp_commit = commit_builder.write_hidden().block_on()?; let new_description = add_trailers_with_template(&trailer_template, &temp_commit)?; commit_builder.set_description(new_description); } @@ -222,6 +223,7 @@ pub(crate) fn cmd_describe( .map(|(commit, commit_builder)| { commit_builder .write_hidden() + .block_on() .map(|temp_commit| (commit.id(), temp_commit)) }) .try_collect()?; @@ -288,26 +290,28 @@ pub(crate) fn cmd_describe( // `MutableRepo::transform_descendants` prevents us from rewriting the same // commit multiple times, and adding additional entries in the predecessor // chain. - tx.repo_mut().transform_descendants( - commit_builders.keys().map(|&id| id.clone()).collect(), - async |rewriter| { - let old_commit_id = rewriter.old_commit().id().clone(); - let commit_builder = rewriter.reparent(); - if let Some(temp_builder) = commit_builders.get(&old_commit_id) { - commit_builder - .set_description(temp_builder.description()) - .set_author(temp_builder.author().clone()) - // Copy back committer for consistency with author timestamp - .set_committer(temp_builder.committer().clone()) - .write()?; - num_described += 1; - } else { - commit_builder.write()?; - num_reparented += 1; - } - Ok(()) - }, - )?; + tx.repo_mut() + .transform_descendants( + commit_builders.keys().map(|&id| id.clone()).collect(), + async |rewriter| { + let old_commit_id = rewriter.old_commit().id().clone(); + let commit_builder = rewriter.reparent(); + if let Some(temp_builder) = commit_builders.get(&old_commit_id) { + commit_builder + .set_description(temp_builder.description()) + .set_author(temp_builder.author().clone()) + // Copy back committer for consistency with author timestamp + .set_committer(temp_builder.committer().clone()) + .write().await?; + num_described += 1; + } else { + commit_builder.write().await?; + num_reparented += 1; + } + Ok(()) + }, + ) + .block_on()?; if num_described > 1 { writeln!(ui.status(), "Updated {num_described} commits")?; } diff --git a/cli/src/commands/diffedit.rs b/cli/src/commands/diffedit.rs index 1313695daf5..7de7fad7c08 100644 --- a/cli/src/commands/diffedit.rs +++ b/cli/src/commands/diffedit.rs @@ -155,16 +155,17 @@ don't make any changes, then the operation will be aborted.", tx.repo_mut() .rewrite_commit(&target_commit) .set_tree_id(tree_id) - .write()?; + .write() + .block_on()?; // rebase_descendants early; otherwise `new_commit` would always have // a conflicted change id at this point. let (num_rebased, extra_msg) = if args.restore_descendants { ( - tx.repo_mut().reparent_descendants()?, + tx.repo_mut().reparent_descendants().block_on()?, " (while preserving their content)", ) } else { - (tx.repo_mut().rebase_descendants()?, "") + (tx.repo_mut().rebase_descendants().block_on()?, "") }; if let Some(mut formatter) = ui.status_formatter() && num_rebased > 0 diff --git a/cli/src/commands/duplicate.rs b/cli/src/commands/duplicate.rs index 93376cda83b..9208a7d4154 100644 --- a/cli/src/commands/duplicate.rs +++ b/cli/src/commands/duplicate.rs @@ -201,7 +201,7 @@ pub(crate) fn cmd_duplicate( ) .block_on()? } else { - duplicate_commits_onto_parents(tx.repo_mut(), &to_duplicate, &new_descs)? + duplicate_commits_onto_parents(tx.repo_mut(), &to_duplicate, &new_descs).block_on()? }; if let Some(mut formatter) = ui.status_formatter() { diff --git a/cli/src/commands/file/annotate.rs b/cli/src/commands/file/annotate.rs index 59a21856825..affdec8505b 100644 --- a/cli/src/commands/file/annotate.rs +++ b/cli/src/commands/file/annotate.rs @@ -19,6 +19,7 @@ use jj_lib::annotate::FileAnnotator; use jj_lib::annotate::LineOrigin; use jj_lib::repo::Repo; use jj_lib::revset::RevsetExpression; +use pollster::FutureExt as _; use tracing::instrument; use crate::cli_util::CommandHelper; @@ -103,8 +104,10 @@ pub(crate) fn cmd_file_annotate( // Note that this is probably different from "--skip REVS", which won't // exclude the revisions, but will ignore diffs in those revisions as if // ancestor revisions had new content. - let mut annotator = FileAnnotator::from_commit(&starting_commit, &file_path)?; - annotator.compute(repo.as_ref(), &RevsetExpression::all())?; + let mut annotator = FileAnnotator::from_commit(&starting_commit, &file_path).block_on()?; + annotator + .compute(repo.as_ref(), &RevsetExpression::all()) + .block_on()?; let annotation = annotator.to_annotation(); render_file_annotation(repo.as_ref(), ui, &template, &annotation)?; diff --git a/cli/src/commands/file/chmod.rs b/cli/src/commands/file/chmod.rs index ee3a6621a5e..25917d7ac69 100644 --- a/cli/src/commands/file/chmod.rs +++ b/cli/src/commands/file/chmod.rs @@ -16,6 +16,7 @@ use clap_complete::ArgValueCompleter; use jj_lib::backend::TreeValue; use jj_lib::merged_tree::MergedTreeBuilder; use jj_lib::object_id::ObjectId as _; +use pollster::FutureExt as _; use tracing::instrument; use crate::cli_util::CommandHelper; @@ -119,11 +120,12 @@ pub(crate) fn cmd_file_chmod( tree_builder.set_or_remove(repo_path, tree_value); } - let new_tree_id = tree_builder.write_tree(store)?; + let new_tree_id = tree_builder.write_tree(store).block_on()?; tx.repo_mut() .rewrite_commit(&commit) .set_tree_id(new_tree_id) - .write()?; + .write() + .block_on()?; tx.finish( ui, format!( diff --git a/cli/src/commands/file/track.rs b/cli/src/commands/file/track.rs index b05df1d0e64..d3299abf51e 100644 --- a/cli/src/commands/file/track.rs +++ b/cli/src/commands/file/track.rs @@ -60,11 +60,11 @@ pub(crate) fn cmd_file_track( let mut tx = workspace_command.start_transaction().into_inner(); let (mut locked_ws, _wc_commit) = workspace_command.start_working_copy_mutation()?; let (_tree_id, track_stats) = locked_ws.locked_wc().snapshot(&options).block_on()?; - let num_rebased = tx.repo_mut().rebase_descendants()?; + let num_rebased = tx.repo_mut().rebase_descendants().block_on()?; if num_rebased > 0 { writeln!(ui.status(), "Rebased {num_rebased} descendant commits")?; } - let repo = tx.commit("track paths")?; + let repo = tx.commit("track paths").block_on()?; locked_ws.finish(repo.op_id().clone())?; print_track_snapshot_stats( ui, diff --git a/cli/src/commands/file/untrack.rs b/cli/src/commands/file/untrack.rs index c8cadc509c0..f425b27c406 100644 --- a/cli/src/commands/file/untrack.rs +++ b/cli/src/commands/file/untrack.rs @@ -71,12 +71,13 @@ pub(crate) fn cmd_file_untrack( for (path, _value) in wc_tree.entries_matching(matcher.as_ref()) { tree_builder.set_or_remove(path, Merge::absent()); } - let new_tree_id = tree_builder.write_tree(&store)?; + let new_tree_id = tree_builder.write_tree(&store).block_on()?; let new_commit = tx .repo_mut() .rewrite_commit(&wc_commit) .set_tree_id(new_tree_id) - .write()?; + .write() + .block_on()?; // Reset the working copy to the new commit locked_ws.locked_wc().reset(&new_commit).block_on()?; // Commit the working copy again so we can inform the user if paths couldn't be @@ -109,14 +110,14 @@ Make sure they're ignored, then try again.", locked_ws.locked_wc().reset(&new_commit).block_on()?; } } - let num_rebased = tx.repo_mut().rebase_descendants()?; + let num_rebased = tx.repo_mut().rebase_descendants().block_on()?; if num_rebased > 0 { writeln!(ui.status(), "Rebased {num_rebased} descendant commits")?; } if working_copy_shared_with_git { export_working_copy_changes_to_git(ui, tx.repo_mut(), &wc_tree, &new_commit.tree()?)?; } - let repo = tx.commit("untrack paths")?; + let repo = tx.commit("untrack paths").block_on()?; locked_ws.finish(repo.op_id().clone())?; print_snapshot_stats(ui, &stats, workspace_command.env().path_converter())?; Ok(()) diff --git a/cli/src/commands/gerrit/upload.rs b/cli/src/commands/gerrit/upload.rs index 6408799a679..e8be6817deb 100644 --- a/cli/src/commands/gerrit/upload.rs +++ b/cli/src/commands/gerrit/upload.rs @@ -31,6 +31,7 @@ use jj_lib::settings::UserSettings; use jj_lib::store::Store; use jj_lib::trailer::Trailer; use jj_lib::trailer::parse_description_trailers; +use pollster::FutureExt as _; use crate::cli_util::CommandHelper; use crate::cli_util::RevisionArg; @@ -306,7 +307,8 @@ pub fn cmd_gerrit_upload( // two patchsets with the only difference being the timestamp. .set_committer(original_commit.committer().clone()) .set_author(original_commit.author().clone()) - .write()?; + .write() + .block_on()?; old_to_new.insert(original_commit.id().clone(), new_commit); } diff --git a/cli/src/commands/git/clone.rs b/cli/src/commands/git/clone.rs index 71f039b22eb..7dac0e9fdf6 100644 --- a/cli/src/commands/git/clone.rs +++ b/cli/src/commands/git/clone.rs @@ -28,6 +28,7 @@ use jj_lib::ref_name::RemoteNameBuf; use jj_lib::repo::Repo as _; use jj_lib::str_util::StringPattern; use jj_lib::workspace::Workspace; +use pollster::FutureExt as _; use super::write_repository_level_trunk_alias; use crate::cli_util::CommandHelper; @@ -272,8 +273,9 @@ fn configure_remote( )?; let op = workspace .repo_loader() - .load_operation(workspace_command.repo().op_id())?; - let repo = workspace.repo_loader().load_at(&op)?; + .load_operation(workspace_command.repo().op_id()) + .block_on()?; + let repo = workspace.repo_loader().load_at(&op).block_on()?; command.for_workable_repo(ui, workspace, repo) } diff --git a/cli/src/commands/git/init.rs b/cli/src/commands/git/init.rs index 51f1fab50f5..b71450c2df6 100644 --- a/cli/src/commands/git/init.rs +++ b/cli/src/commands/git/init.rs @@ -28,6 +28,7 @@ use jj_lib::repo::ReadonlyRepo; use jj_lib::repo::Repo as _; use jj_lib::view::View; use jj_lib::workspace::Workspace; +use pollster::FutureExt as _; use super::write_repository_level_trunk_alias; use crate::cli_util::CommandHelper; @@ -232,7 +233,7 @@ fn init_git_refs( let stats = git::export_refs(tx.repo_mut())?; print_git_export_stats(ui, &stats)?; } - let repo = tx.commit("import git refs")?; + let repo = tx.commit("import git refs").block_on()?; writeln!( ui.status(), "Done importing changes from the underlying Git repo." diff --git a/cli/src/commands/git/push.rs b/cli/src/commands/git/push.rs index 98c8e383114..cbe78ed6c68 100644 --- a/cli/src/commands/git/push.rs +++ b/cli/src/commands/git/push.rs @@ -47,6 +47,7 @@ use jj_lib::settings::UserSettings; use jj_lib::signing::SignBehavior; use jj_lib::str_util::StringPattern; use jj_lib::view::View; +use pollster::FutureExt as _; use crate::cli_util::CommandHelper; use crate::cli_util::RevisionArg; @@ -605,24 +606,23 @@ fn sign_commits_before_push( let commit_ids: IndexSet = commits_to_sign.iter().ids().cloned().collect(); let mut old_to_new_commits_map: HashMap = HashMap::new(); let mut num_rebased_descendants = 0; - tx.repo_mut().transform_descendants( - commit_ids.iter().cloned().collect_vec(), - async |rewriter| { + tx.repo_mut() + .transform_descendants(commit_ids.iter().cloned().collect_vec(), async |rewriter| { let old_commit_id = rewriter.old_commit().id().clone(); if commit_ids.contains(&old_commit_id) { let commit = rewriter .reparent() .set_sign_behavior(sign_behavior) - .write()?; + .write().await?; old_to_new_commits_map.insert(old_commit_id, commit.id().clone()); } else { num_rebased_descendants += 1; - let commit = rewriter.reparent().write()?; + let commit = rewriter.reparent().write().await?; old_to_new_commits_map.insert(old_commit_id, commit.id().clone()); } Ok(()) - }, - )?; + }) + .block_on()?; let bookmark_updates = bookmark_updates .into_iter() diff --git a/cli/src/commands/metaedit.rs b/cli/src/commands/metaedit.rs index b80cfb267b1..90483fb34bb 100644 --- a/cli/src/commands/metaedit.rs +++ b/cli/src/commands/metaedit.rs @@ -20,6 +20,7 @@ use jj_lib::backend::Timestamp; use jj_lib::commit::Commit; use jj_lib::object_id::ObjectId as _; use jj_lib::time_util::parse_datetime; +use pollster::FutureExt as _; use tracing::instrument; use crate::cli_util::CommandHelper; @@ -210,15 +211,16 @@ pub(crate) fn cmd_metaedit( } if has_changes { - let new_commit = commit_builder.write()?; + let new_commit = commit_builder.write().await?; modified.push(new_commit); } } else if rewriter.parents_changed() { - rewriter.reparent().write()?; + rewriter.reparent().write().await?; num_reparented += 1; } Ok(()) - })?; + }) + .block_on()?; if !modified.is_empty() { writeln!(ui.status(), "Modified {} commits:", modified.len())?; if let Some(mut formatter) = ui.status_formatter() { diff --git a/cli/src/commands/new.rs b/cli/src/commands/new.rs index c14d5b16ed1..6d6c8248d99 100644 --- a/cli/src/commands/new.rs +++ b/cli/src/commands/new.rs @@ -200,7 +200,7 @@ pub(crate) fn cmd_new( description = add_trailers(ui, &tx, &commit_builder)?; } commit_builder.set_description(&description); - let new_commit = commit_builder.write(tx.repo_mut())?; + let new_commit = commit_builder.write(tx.repo_mut()).block_on()?; let child_commits: Vec<_> = child_commit_ids .iter() @@ -218,7 +218,7 @@ pub(crate) fn cmd_new( rebase_commit(tx.repo_mut(), child_commit, new_parent_ids).block_on()?; num_rebased += 1; } - num_rebased += tx.repo_mut().rebase_descendants()?; + num_rebased += tx.repo_mut().rebase_descendants().block_on()?; if args.no_edit { if let Some(mut formatter) = ui.status_formatter() { diff --git a/cli/src/commands/operation/abandon.rs b/cli/src/commands/operation/abandon.rs index d94ca96db9f..448c34cf3bf 100644 --- a/cli/src/commands/operation/abandon.rs +++ b/cli/src/commands/operation/abandon.rs @@ -65,12 +65,12 @@ pub fn cmd_op_abandon( if command.global_args().at_operation.is_some() { return Err(cli_error("--at-op is not respected")); } - let current_head_ops = op_walk::get_current_head_ops(op_store, op_heads_store.as_ref())?; - let resolve_op = |op_str| op_walk::resolve_op_at(op_store, ¤t_head_ops, op_str); + let current_head_ops = op_walk::get_current_head_ops(op_store, op_heads_store.as_ref()).block_on()?; + let resolve_op = |op_str| op_walk::resolve_op_at(op_store, ¤t_head_ops, op_str).block_on(); let (abandon_root_op, abandon_head_ops) = if let Some((root_op_str, head_op_str)) = args.operation.split_once("..") { let root_op = if root_op_str.is_empty() { - repo_loader.root_operation() + repo_loader.root_operation().block_on() } else { resolve_op(root_op_str)? }; @@ -111,7 +111,8 @@ pub fn cmd_op_abandon( &abandon_head_ops, ¤t_head_ops, &abandon_root_op, - )?; + ) + .block_on()?; assert_eq!( current_head_ops.len(), stats.new_head_ids.len(), diff --git a/cli/src/commands/operation/diff.rs b/cli/src/commands/operation/diff.rs index c299add6ef8..a40f42b4780 100644 --- a/cli/src/commands/operation/diff.rs +++ b/cli/src/commands/operation/diff.rs @@ -109,9 +109,11 @@ pub fn cmd_op_diff( let graph_style = GraphStyle::from_settings(settings)?; let with_content_format = LogContentFormat::new(ui, settings)?; - let merged_from_op = repo_loader.merge_operations(from_ops.clone(), None)?; - let from_repo = repo_loader.load_at(&merged_from_op)?; - let to_repo = repo_loader.load_at(&to_op)?; + let merged_from_op = repo_loader + .merge_operations(from_ops.clone(), None) + .block_on()?; + let from_repo = repo_loader.load_at(&merged_from_op).block_on()?; + let to_repo = repo_loader.load_at(&to_op).block_on()?; // Create a new transaction starting from `to_repo`. let mut tx = to_repo.start_transaction(); diff --git a/cli/src/commands/operation/log.rs b/cli/src/commands/operation/log.rs index c20850547ac..af953ff068b 100644 --- a/cli/src/commands/operation/log.rs +++ b/cli/src/commands/operation/log.rs @@ -22,6 +22,7 @@ use jj_lib::op_store::OpStoreError; use jj_lib::op_walk; use jj_lib::operation::Operation; use jj_lib::repo::RepoLoader; +use pollster::FutureExt as _; use super::diff::show_op_diff; use crate::cli_util::CommandHelper; @@ -151,9 +152,11 @@ fn do_op_log( op: &Operation, with_content_format: &LogContentFormat| { let parent_ops: Vec<_> = op.parents().try_collect()?; - let merged_parent_op = repo_loader.merge_operations(parent_ops.clone(), None)?; - let parent_repo = repo_loader.load_at(&merged_parent_op)?; - let repo = repo_loader.load_at(op)?; + let merged_parent_op = repo_loader + .merge_operations(parent_ops.clone(), None) + .block_on()?; + let parent_repo = repo_loader.load_at(&merged_parent_op).block_on()?; + let repo = repo_loader.load_at(op).block_on()?; let id_prefix_context = workspace_env.new_id_prefix_context(); let commit_summary_template = { diff --git a/cli/src/commands/operation/restore.rs b/cli/src/commands/operation/restore.rs index f6387f73b38..528e3e6cafb 100644 --- a/cli/src/commands/operation/restore.rs +++ b/cli/src/commands/operation/restore.rs @@ -14,6 +14,7 @@ use clap_complete::ArgValueCandidates; use jj_lib::object_id::ObjectId as _; +use pollster::FutureExt as _; use super::DEFAULT_REVERT_WHAT; use super::RevertWhatToRestore; @@ -53,7 +54,7 @@ pub fn cmd_op_restore( let target_op = workspace_command.resolve_single_op(&args.operation)?; let mut tx = workspace_command.start_transaction(); let new_view = view_with_desired_portions_restored( - target_op.view()?.store_view(), + target_op.view().block_on()?.store_view(), tx.base_repo().view().store_view(), &args.what, ); diff --git a/cli/src/commands/operation/revert.rs b/cli/src/commands/operation/revert.rs index 04d83f02b02..e1c048f3c3e 100644 --- a/cli/src/commands/operation/revert.rs +++ b/cli/src/commands/operation/revert.rs @@ -17,6 +17,7 @@ use itertools::Itertools as _; use jj_lib::object_id::ObjectId as _; use jj_lib::operation::Operation; use jj_lib::repo::Repo as _; +use pollster::FutureExt as _; use super::DEFAULT_REVERT_WHAT; use super::RevertWhatToRestore; @@ -65,8 +66,8 @@ pub fn cmd_op_revert( let mut tx = workspace_command.start_transaction(); let repo_loader = tx.base_repo().loader(); - let bad_repo = repo_loader.load_at(&bad_op)?; - let parent_repo = repo_loader.load_at(&parent_of_bad_op)?; + let bad_repo = repo_loader.load_at(&bad_op).block_on()?; + let parent_repo = repo_loader.load_at(&parent_of_bad_op).block_on()?; tx.repo_mut().merge(&bad_repo, &parent_repo)?; let new_view = view_with_desired_portions_restored( tx.repo().view().store_view(), diff --git a/cli/src/commands/operation/show.rs b/cli/src/commands/operation/show.rs index ce027131a89..0d9ddb30af4 100644 --- a/cli/src/commands/operation/show.rs +++ b/cli/src/commands/operation/show.rs @@ -15,6 +15,7 @@ use clap_complete::ArgValueCandidates; use itertools::Itertools as _; use jj_lib::operation::Operation; +use pollster::FutureExt as _; use super::diff::show_op_diff; use crate::cli_util::CommandHelper; @@ -75,9 +76,11 @@ pub fn cmd_op_show( let settings = workspace_command.settings(); let op = workspace_command.resolve_single_op(&args.operation)?; let parent_ops: Vec<_> = op.parents().try_collect()?; - let merged_parent_op = repo_loader.merge_operations(parent_ops.clone(), None)?; - let parent_repo = repo_loader.load_at(&merged_parent_op)?; - let repo = repo_loader.load_at(&op)?; + let merged_parent_op = repo_loader + .merge_operations(parent_ops.clone(), None) + .block_on()?; + let parent_repo = repo_loader.load_at(&merged_parent_op).block_on()?; + let repo = repo_loader.load_at(&op).block_on()?; let id_prefix_context = workspace_env.new_id_prefix_context(); let commit_summary_template = { diff --git a/cli/src/commands/parallelize.rs b/cli/src/commands/parallelize.rs index 63d4519a17a..ec6f4df858c 100644 --- a/cli/src/commands/parallelize.rs +++ b/cli/src/commands/parallelize.rs @@ -20,6 +20,7 @@ use itertools::Itertools as _; use jj_lib::backend::CommitId; use jj_lib::commit::Commit; use jj_lib::commit::CommitIteratorExt as _; +use pollster::FutureExt as _; use tracing::instrument; use crate::cli_util::CommandHelper; @@ -114,36 +115,38 @@ pub(crate) fn cmd_parallelize( new_child_parents.insert(commit.id().clone(), new_parents); } - tx.repo_mut().transform_descendants( - target_commits.iter().ids().cloned().collect_vec(), - async |mut rewriter| { - // Commits in the target set do not depend on each other but they still depend - // on other parents - if let Some(new_parents) = new_target_parents.get(rewriter.old_commit().id()) { - rewriter.set_new_rewritten_parents(new_parents); - } else if rewriter - .old_commit() - .parent_ids() - .iter() - .any(|id| new_child_parents.contains_key(id)) - { - let mut new_parents = vec![]; - for parent in rewriter.old_commit().parent_ids() { - if let Some(parents) = new_child_parents.get(parent) { - new_parents.extend(parents.iter().cloned()); - } else { - new_parents.push(parent.clone()); + tx.repo_mut() + .transform_descendants( + target_commits.iter().ids().cloned().collect_vec(), + async |mut rewriter| { + // Commits in the target set do not depend on each other but they still depend + // on other parents + if let Some(new_parents) = new_target_parents.get(rewriter.old_commit().id()) { + rewriter.set_new_rewritten_parents(new_parents); + } else if rewriter + .old_commit() + .parent_ids() + .iter() + .any(|id| new_child_parents.contains_key(id)) + { + let mut new_parents = vec![]; + for parent in rewriter.old_commit().parent_ids() { + if let Some(parents) = new_child_parents.get(parent) { + new_parents.extend(parents.iter().cloned()); + } else { + new_parents.push(parent.clone()); + } } + rewriter.set_new_rewritten_parents(&new_parents); } - rewriter.set_new_rewritten_parents(&new_parents); - } - if rewriter.parents_changed() { - let builder = rewriter.rebase().await?; - builder.write()?; - } - Ok(()) - }, - )?; + if rewriter.parents_changed() { + let builder = rewriter.rebase().await?; + builder.write().await?; + } + Ok(()) + }, + ) + .block_on()?; tx.finish(ui, format!("parallelize {} commits", target_commits.len())) } diff --git a/cli/src/commands/rebase.rs b/cli/src/commands/rebase.rs index d9c63e94057..3a45bc07faa 100644 --- a/cli/src/commands/rebase.rs +++ b/cli/src/commands/rebase.rs @@ -32,6 +32,7 @@ use jj_lib::rewrite::RebaseOptions; use jj_lib::rewrite::RewriteRefsOptions; use jj_lib::rewrite::compute_move_commits; use jj_lib::rewrite::find_duplicate_divergent_commits; +use pollster::FutureExt as _; use tracing::instrument; use crate::cli_util::CommandHelper; @@ -392,10 +393,11 @@ pub(crate) fn cmd_rebase( }; let mut tx = workspace_command.start_transaction(); - let mut computed_move = compute_move_commits(tx.repo(), &loc)?; + let mut computed_move = compute_move_commits(tx.repo(), &loc).block_on()?; if !args.keep_divergent { let abandoned_divergent = - find_duplicate_divergent_commits(tx.repo(), &loc.new_parent_ids, &loc.target)?; + find_duplicate_divergent_commits(tx.repo(), &loc.new_parent_ids, &loc.target) + .block_on()?; computed_move.record_to_abandon(abandoned_divergent.iter().map(Commit::id).cloned()); if !abandoned_divergent.is_empty() && let Some(mut formatter) = ui.status_formatter() @@ -412,7 +414,9 @@ pub(crate) fn cmd_rebase( )?; } }; - let stats = computed_move.apply(tx.repo_mut(), &rebase_options)?; + let stats = computed_move + .apply(tx.repo_mut(), &rebase_options) + .block_on()?; print_move_commits_stats(ui, &stats)?; tx.finish(ui, tx_description(&loc.target))?; diff --git a/cli/src/commands/redo.rs b/cli/src/commands/redo.rs index 558db86271b..51dcc511a94 100644 --- a/cli/src/commands/redo.rs +++ b/cli/src/commands/redo.rs @@ -15,6 +15,7 @@ use itertools::Itertools as _; use jj_lib::object_id::ObjectId as _; use jj_lib::op_store::OperationId; +use pollster::FutureExt as _; use crate::cli_util::CommandHelper; use crate::command_error::CommandError; @@ -104,7 +105,8 @@ pub fn cmd_redo(ui: &mut Ui, command: &CommandHelper, _: &RedoArgs) -> Result<() op_to_redo = workspace_command .repo() .loader() - .load_operation(&id_of_restored_op)?; + .load_operation(&id_of_restored_op) + .block_on()?; } if !op_to_redo @@ -143,12 +145,13 @@ pub fn cmd_redo(ui: &mut Ui, command: &CommandHelper, _: &RedoArgs) -> Result<() op_to_restore = workspace_command .repo() .loader() - .load_operation(&id_of_original_op)?; + .load_operation(&id_of_original_op) + .block_on()?; } let mut tx = workspace_command.start_transaction(); let new_view = view_with_desired_portions_restored( - op_to_restore.view()?.store_view(), + op_to_restore.view().block_on()?.store_view(), tx.base_repo().view().store_view(), &DEFAULT_REVERT_WHAT, ); diff --git a/cli/src/commands/resolve.rs b/cli/src/commands/resolve.rs index e88ecc5ed83..ea48cf10288 100644 --- a/cli/src/commands/resolve.rs +++ b/cli/src/commands/resolve.rs @@ -16,6 +16,7 @@ use clap_complete::ArgValueCandidates; use clap_complete::ArgValueCompleter; use itertools::Itertools as _; use jj_lib::object_id::ObjectId as _; +use pollster::FutureExt as _; use tracing::instrument; use crate::cli_util::CommandHelper; @@ -124,7 +125,8 @@ pub(crate) fn cmd_resolve( .repo_mut() .rewrite_commit(&commit) .set_tree_id(new_tree_id) - .write()?; + .write() + .block_on()?; tx.finish( ui, format!("Resolve conflicts in commit {}", commit.id().hex()), diff --git a/cli/src/commands/restore.rs b/cli/src/commands/restore.rs index 2ad06842065..7047f830f3e 100644 --- a/cli/src/commands/restore.rs +++ b/cli/src/commands/restore.rs @@ -19,6 +19,7 @@ use clap_complete::ArgValueCompleter; use indoc::formatdoc; use itertools::Itertools as _; use jj_lib::object_id::ObjectId as _; +use pollster::FutureExt as _; use tracing::instrument; use crate::cli_util::CommandHelper; @@ -171,16 +172,17 @@ pub(crate) fn cmd_restore( tx.repo_mut() .rewrite_commit(&to_commit) .set_tree_id(new_tree_id) - .write()?; + .write() + .block_on()?; // rebase_descendants early; otherwise the new commit would always have // a conflicted change id at this point. let (num_rebased, extra_msg) = if args.restore_descendants { ( - tx.repo_mut().reparent_descendants()?, + tx.repo_mut().reparent_descendants().block_on()?, " (while preserving their content)", ) } else { - (tx.repo_mut().rebase_descendants()?, "") + (tx.repo_mut().rebase_descendants().block_on()?, "") }; if let Some(mut formatter) = ui.status_formatter() && num_rebased > 0 diff --git a/cli/src/commands/revert.rs b/cli/src/commands/revert.rs index 875e7c500e4..6e775687af4 100644 --- a/cli/src/commands/revert.rs +++ b/cli/src/commands/revert.rs @@ -149,7 +149,8 @@ pub(crate) fn cmd_revert( .repo_mut() .new_commit(new_parent_ids, new_tree.id()) .set_description(new_commit_description) - .write()?; + .write() + .block_on()?; parent_ids = vec![new_commit.id().clone()]; reverted_commits.push(new_commit); new_base_tree = new_tree; @@ -180,9 +181,9 @@ pub(crate) fn cmd_revert( rewriter.set_new_parents(child_new_parent_ids.into_iter().collect()); } num_rebased += 1; - rewriter.rebase().await?.write()?; + rewriter.rebase().await?.write().await?; Ok(()) - })?; + }).block_on()?; if let Some(mut formatter) = ui.status_formatter() { writeln!( diff --git a/cli/src/commands/sign.rs b/cli/src/commands/sign.rs index 9e2266cadc2..8b0ce51a056 100644 --- a/cli/src/commands/sign.rs +++ b/cli/src/commands/sign.rs @@ -19,6 +19,7 @@ use jj_lib::commit::Commit; use jj_lib::commit::CommitIteratorExt as _; use jj_lib::repo::Repo as _; use jj_lib::signing::SignBehavior; +use pollster::FutureExt as _; use crate::cli_util::CommandHelper; use crate::cli_util::RevisionArg; @@ -87,30 +88,32 @@ pub fn cmd_sign(ui: &mut Ui, command: &CommandHelper, args: &SignArgs) -> Result let mut signed_commits = vec![]; let mut num_reparented = 0; - tx.repo_mut().transform_descendants( - to_sign.iter().ids().cloned().collect_vec(), - async |rewriter| { - let old_commit = rewriter.old_commit().clone(); - let mut commit_builder = rewriter.reparent(); - - if to_sign.contains(&old_commit) { - if let Some(key) = &args.key { - commit_builder = commit_builder.set_sign_key(key.clone()); + tx.repo_mut() + .transform_descendants( + to_sign.iter().ids().cloned().collect_vec(), + async |rewriter| { + let old_commit = rewriter.old_commit().clone(); + let mut commit_builder = rewriter.reparent(); + + if to_sign.contains(&old_commit) { + if let Some(key) = &args.key { + commit_builder = commit_builder.set_sign_key(key.clone()); + } + + let new_commit = commit_builder + .set_sign_behavior(SignBehavior::Force) + .write().await?; + + signed_commits.push(new_commit); + } else { + commit_builder.write().await?; + num_reparented += 1; } - let new_commit = commit_builder - .set_sign_behavior(SignBehavior::Force) - .write()?; - - signed_commits.push(new_commit); - } else { - commit_builder.write()?; - num_reparented += 1; - } - - Ok(()) - }, - )?; + Ok(()) + }, + ) + .block_on()?; if let Some(mut formatter) = ui.status_formatter() && !signed_commits.is_empty() diff --git a/cli/src/commands/simplify_parents.rs b/cli/src/commands/simplify_parents.rs index be90491fd93..e282473b24f 100644 --- a/cli/src/commands/simplify_parents.rs +++ b/cli/src/commands/simplify_parents.rs @@ -3,6 +3,7 @@ use std::collections::HashSet; use clap_complete::ArgValueCompleter; use itertools::Itertools as _; use jj_lib::backend::BackendError; +use pollster::FutureExt as _; use crate::cli_util::CommandHelper; use crate::cli_util::RevisionArg; @@ -95,7 +96,7 @@ pub(crate) fn cmd_simplify_parents( let num_new_heads = rewriter.new_parents().len(); if rewriter.parents_changed() { - rewriter.reparent().write()?; + rewriter.reparent().write().await?; if num_new_heads < num_old_heads { simplified_commits += 1; @@ -105,7 +106,8 @@ pub(crate) fn cmd_simplify_parents( } } Ok(()) - })?; + }) + .block_on()?; if let Some(mut formatter) = ui.status_formatter() && simplified_commits > 0 diff --git a/cli/src/commands/split.rs b/cli/src/commands/split.rs index 8b1bf679a3c..f5b9fb4e971 100644 --- a/cli/src/commands/split.rs +++ b/cli/src/commands/split.rs @@ -246,13 +246,13 @@ pub(crate) fn cmd_split( } else { let new_description = add_trailers(ui, &tx, &commit_builder)?; commit_builder.set_description(new_description); - let temp_commit = commit_builder.write_hidden()?; + let temp_commit = commit_builder.write_hidden().block_on()?; let intro = "Enter a description for the selected changes."; let template = description_template(ui, &tx, intro, &temp_commit)?; edit_description(&text_editor, &template)? }; commit_builder.set_description(description); - commit_builder.write(tx.repo_mut())? + commit_builder.write(tx.repo_mut()).block_on()? }; // Create the second commit, which includes everything the user didn't @@ -294,13 +294,13 @@ pub(crate) fn cmd_split( } else { let new_description = add_trailers(ui, &tx, &commit_builder)?; commit_builder.set_description(new_description); - let temp_commit = commit_builder.write_hidden()?; + let temp_commit = commit_builder.write_hidden().block_on()?; let intro = "Enter a description for the remaining changes."; let template = description_template(ui, &tx, intro, &temp_commit)?; edit_description(&text_editor, &template)? }; commit_builder.set_description(description); - commit_builder.write(tx.repo_mut())? + commit_builder.write(tx.repo_mut()).block_on()? }; let (first_commit, second_commit, num_rebased) = if use_move_flags { @@ -311,9 +311,10 @@ pub(crate) fn cmd_split( second_commit, new_parent_ids, new_child_ids, - )? + ) + .block_on()? } else { - rewrite_descendants(&mut tx, &target, first_commit, second_commit, parallel)? + rewrite_descendants(&mut tx, &target, first_commit, second_commit, parallel).block_on()? }; if let Some(mut formatter) = ui.status_formatter() { if num_rebased > 0 { @@ -329,8 +330,8 @@ pub(crate) fn cmd_split( Ok(()) } -fn move_first_commit( - tx: &mut WorkspaceCommandTransaction, +async fn move_first_commit( + tx: &mut WorkspaceCommandTransaction<'_>, target: &CommitWithSelection, mut first_commit: Commit, mut second_commit: Commit, @@ -342,10 +343,11 @@ fn move_first_commit( tx.repo_mut() .transform_descendants(vec![target.commit.id().clone()], async |rewriter| { let old_commit_id = rewriter.old_commit().id().clone(); - let new_commit = rewriter.rebase().await?.write()?; + let new_commit = rewriter.rebase().await?.write().await?; rewritten_commits.insert(old_commit_id, new_commit.id().clone()); Ok(()) - })?; + }) + .await?; let new_parent_ids: Vec<_> = new_parent_ids .iter() @@ -371,7 +373,8 @@ fn move_first_commit( }, simplify_ancestor_merge: false, }, - )?; + ) + .await?; // 1 for the transformation of the original commit to the second commit // that was inserted in rewritten_commits @@ -396,8 +399,8 @@ fn move_first_commit( Ok((first_commit, second_commit, num_rebased)) } -fn rewrite_descendants( - tx: &mut WorkspaceCommandTransaction, +async fn rewrite_descendants( + tx: &mut WorkspaceCommandTransaction<'_>, target: &CommitWithSelection, first_commit: Commit, second_commit: Commit, @@ -412,9 +415,8 @@ fn rewrite_descendants( .set_rewritten_commit(target.commit.id().clone(), second_commit.id().clone()); } let mut num_rebased = 0; - tx.repo_mut().transform_descendants( - vec![target.commit.id().clone()], - async |mut rewriter| { + tx.repo_mut() + .transform_descendants(vec![target.commit.id().clone()], async |mut rewriter| { num_rebased += 1; if parallel && legacy_bookmark_behavior { // The old_parent is the second commit due to the rewrite above. @@ -425,10 +427,10 @@ fn rewrite_descendants( } else { rewriter.replace_parent(first_commit.id(), [second_commit.id()]); } - rewriter.rebase().await?.write()?; + rewriter.rebase().await?.write().await?; Ok(()) - }, - )?; + }) + .await?; // Move the working copy commit (@) to the second commit for any workspaces // where the target commit is the working copy commit. for (name, working_copy_commit) in tx.base_repo().clone().view().wc_commit_ids() { diff --git a/cli/src/commands/squash.rs b/cli/src/commands/squash.rs index e6011a288b7..84ccccc1ee3 100644 --- a/cli/src/commands/squash.rs +++ b/cli/src/commands/squash.rs @@ -271,7 +271,8 @@ pub(crate) fn cmd_squash( let commit = tx .repo_mut() .new_commit(parent_ids.clone(), merged_tree.id()) - .write()?; + .write() + .block_on()?; let mut rewritten = HashMap::new(); tx.repo_mut() .transform_descendants(child_ids.clone(), async |mut rewriter| { @@ -289,11 +290,12 @@ pub(crate) fn cmd_squash( .collect(), ); } - let new_commit = rewriter.rebase().await?.write()?; + let new_commit = rewriter.rebase().await?.write().await?; rewritten.insert(old_commit_id, new_commit); num_rebased += 1; Ok(()) - })?; + }) + .block_on()?; for source in &mut *sources { if let Some(rewritten_source) = rewritten.remove(source.id()) { *source = rewritten_source; @@ -318,7 +320,9 @@ pub(crate) fn cmd_squash( &source_commits, &destination, args.keep_emptied, - )? { + ) + .block_on()? + { let mut commit_builder = squashed.commit_builder.detach(); let new_description = match description { SquashedDescription::Exact(description) => { @@ -356,7 +360,7 @@ pub(crate) fn cmd_squash( )?; // It's weird that commit.description() contains "JJ: " lines, but works. commit_builder.set_description(combined); - let temp_commit = commit_builder.write_hidden()?; + let temp_commit = commit_builder.write_hidden().block_on()?; let intro = "Enter a description for the combined commit."; let template = description_template(ui, &tx, intro, &temp_commit)?; edit_description(&text_editor, &template)? @@ -375,8 +379,8 @@ pub(crate) fn cmd_squash( .collect(), ); } - let commit = commit_builder.write(tx.repo_mut())?; - let num_rebased = tx.repo_mut().rebase_descendants()?; + let commit = commit_builder.write(tx.repo_mut()).block_on()?; + let num_rebased = tx.repo_mut().rebase_descendants().block_on()?; if let Some(mut formatter) = ui.status_formatter() { if insert_destination_commit { write!(formatter, "Created new commit ")?; diff --git a/cli/src/commands/undo.rs b/cli/src/commands/undo.rs index 92749084288..203b1ce3b2b 100644 --- a/cli/src/commands/undo.rs +++ b/cli/src/commands/undo.rs @@ -16,6 +16,7 @@ use clap_complete::ArgValueCandidates; use itertools::Itertools as _; use jj_lib::object_id::ObjectId as _; use jj_lib::op_store::OperationId; +use pollster::FutureExt as _; use crate::cli_util::CommandHelper; use crate::command_error::CommandError; @@ -146,7 +147,8 @@ pub fn cmd_undo(ui: &mut Ui, command: &CommandHelper, args: &UndoArgs) -> Result op_to_undo = workspace_command .repo() .loader() - .load_operation(&id_of_restored_op)?; + .load_operation(&id_of_restored_op) + .block_on()?; } let mut op_to_restore = match op_to_undo.parents().at_most_one() { @@ -180,12 +182,13 @@ pub fn cmd_undo(ui: &mut Ui, command: &CommandHelper, args: &UndoArgs) -> Result op_to_restore = workspace_command .repo() .loader() - .load_operation(&id_of_original_op)?; + .load_operation(&id_of_original_op) + .block_on()?; } let mut tx = workspace_command.start_transaction(); let new_view = view_with_desired_portions_restored( - op_to_restore.view()?.store_view(), + op_to_restore.view().block_on()?.store_view(), tx.base_repo().view().store_view(), &DEFAULT_REVERT_WHAT, ); diff --git a/cli/src/commands/unsign.rs b/cli/src/commands/unsign.rs index 1918803a0ed..57d7ace4a58 100644 --- a/cli/src/commands/unsign.rs +++ b/cli/src/commands/unsign.rs @@ -18,6 +18,7 @@ use itertools::Itertools as _; use jj_lib::commit::Commit; use jj_lib::commit::CommitIteratorExt as _; use jj_lib::signing::SignBehavior; +use pollster::FutureExt as _; use crate::cli_util::CommandHelper; use crate::cli_util::RevisionArg; @@ -67,25 +68,27 @@ pub fn cmd_unsign( let mut unsigned_commits = vec![]; let mut num_reparented = 0; - tx.repo_mut().transform_descendants( - to_unsign.iter().ids().cloned().collect_vec(), - async |rewriter| { - let old_commit = rewriter.old_commit().clone(); - let commit_builder = rewriter.reparent(); - - if to_unsign.contains(&old_commit) { - let new_commit = commit_builder - .set_sign_behavior(SignBehavior::Drop) - .write()?; - - unsigned_commits.push(new_commit); - } else { - commit_builder.write()?; - num_reparented += 1; - } - Ok(()) - }, - )?; + tx.repo_mut() + .transform_descendants( + to_unsign.iter().ids().cloned().collect_vec(), + async |rewriter| { + let old_commit = rewriter.old_commit().clone(); + let commit_builder = rewriter.reparent(); + + if to_unsign.contains(&old_commit) { + let new_commit = commit_builder + .set_sign_behavior(SignBehavior::Drop) + .write().await?; + + unsigned_commits.push(new_commit); + } else { + commit_builder.write().await?; + num_reparented += 1; + } + Ok(()) + }, + ) + .block_on()?; if let Some(mut formatter) = ui.status_formatter() && !unsigned_commits.is_empty() diff --git a/cli/src/commands/workspace/add.rs b/cli/src/commands/workspace/add.rs index 28a26956c46..7f1f7b63949 100644 --- a/cli/src/commands/workspace/add.rs +++ b/cli/src/commands/workspace/add.rs @@ -189,7 +189,7 @@ pub fn cmd_workspace_add( let tree = merge_commit_trees(tx.repo(), &parents).block_on()?; let parent_ids = parents.iter().ids().cloned().collect_vec(); - let new_wc_commit = tx.repo_mut().new_commit(parent_ids, tree.id()).write()?; + let new_wc_commit = tx.repo_mut().new_commit(parent_ids, tree.id()).write().block_on()?; tx.edit(&new_wc_commit)?; tx.finish( diff --git a/cli/src/commands/workspace/rename.rs b/cli/src/commands/workspace/rename.rs index d7f9883c829..e43e7f09092 100644 --- a/cli/src/commands/workspace/rename.rs +++ b/cli/src/commands/workspace/rename.rs @@ -13,6 +13,7 @@ // limitations under the License. use jj_lib::ref_name::WorkspaceNameBuf; +use pollster::FutureExt as _; use tracing::instrument; use crate::cli_util::CommandHelper; @@ -65,11 +66,13 @@ pub fn cmd_workspace_rename( tx.repo_mut() .rename_workspace(&old_name, new_name.to_owned())?; - let repo = tx.commit(format!( - "Renamed workspace '{old}' to '{new}'", - old = old_name.as_symbol(), - new = new_name.as_symbol() - ))?; + let repo = tx + .commit(format!( + "Renamed workspace '{old}' to '{new}'", + old = old_name.as_symbol(), + new = new_name.as_symbol() + )) + .block_on()?; locked_ws.finish(repo.op_id().clone())?; Ok(()) diff --git a/cli/src/description_util.rs b/cli/src/description_util.rs index bb10b741fc9..364e2635745 100644 --- a/cli/src/description_util.rs +++ b/cli/src/description_util.rs @@ -20,6 +20,7 @@ use jj_lib::file_util::PathError; use jj_lib::settings::UserSettings; use jj_lib::trailer::parse_description_trailers; use jj_lib::trailer::parse_trailers; +use pollster::FutureExt as _; use thiserror::Error; use crate::cli_util::WorkspaceCommandTransaction; @@ -343,7 +344,7 @@ pub fn combine_messages_for_editing( .chain(destination) .flat_map(|commit| parse_description_trailers(commit.description())) .collect(); - let commit = commit_builder.write_hidden()?; + let commit = commit_builder.write_hidden().block_on()?; let trailer_lines = template .format_plain_text(&commit) .into_string() @@ -432,7 +433,7 @@ pub fn add_trailers( commit_builder: &DetachedCommitBuilder, ) -> Result { if let Some(renderer) = parse_trailers_template(ui, tx)? { - let commit = commit_builder.write_hidden()?; + let commit = commit_builder.write_hidden().block_on()?; add_trailers_with_template(&renderer, &commit) } else { Ok(commit_builder.description().to_owned()) diff --git a/cli/src/diff_util.rs b/cli/src/diff_util.rs index 706992bdb9b..6b145f63fb0 100644 --- a/cli/src/diff_util.rs +++ b/cli/src/diff_util.rs @@ -614,7 +614,7 @@ impl<'a> DiffRenderer<'a> { .simplify() }; let to_description = Merge::resolved(to_commit.description()); - let from_tree = rebase_to_dest_parent(self.repo, from_commits, to_commit)?; + let from_tree = rebase_to_dest_parent(self.repo, from_commits, to_commit).await?; let to_tree = to_commit.tree_async().await?; let copy_records = CopyRecords::default(); // TODO self.show_diff_commit_descriptions(*formatter, [&from_description, &to_description])?; diff --git a/cli/src/merge_tools/builtin.rs b/cli/src/merge_tools/builtin.rs index b6bdd157a85..80f4b1b1903 100644 --- a/cli/src/merge_tools/builtin.rs +++ b/cli/src/merge_tools/builtin.rs @@ -438,7 +438,7 @@ fn apply_diff_builtin( Ok(new_value) }, )?; - tree_builder.write_tree(store) + tree_builder.write_tree(store).block_on() } fn apply_changes( @@ -712,7 +712,7 @@ pub fn edit_merge_builtin( })) }, )?; - Ok(tree_builder.write_tree(store)?) + Ok(tree_builder.write_tree(store).block_on()?) } #[cfg(test)] diff --git a/cli/src/merge_tools/external.rs b/cli/src/merge_tools/external.rs index 2a9c25565d9..b2a8fc4ded4 100644 --- a/cli/src/merge_tools/external.rs +++ b/cli/src/merge_tools/external.rs @@ -371,7 +371,7 @@ pub fn run_mergetool_external( } } } - let new_tree = tree_builder.write_tree(tree.store())?; + let new_tree = tree_builder.write_tree(tree.store()).block_on()?; Ok((new_tree, partial_resolution_error)) } diff --git a/cli/src/merge_tools/mod.rs b/cli/src/merge_tools/mod.rs index 45edad7af76..498e6f1277a 100644 --- a/cli/src/merge_tools/mod.rs +++ b/cli/src/merge_tools/mod.rs @@ -480,7 +480,7 @@ fn pick_conflict_side( })); tree_builder.set_or_remove(merge_tool_file.repo_path.clone(), new_tree_value); } - tree_builder.write_tree(tree.store()) + tree_builder.write_tree(tree.store()).block_on() } #[cfg(test)] diff --git a/lib/src/absorb.rs b/lib/src/absorb.rs index 39f69f6cf4a..e77b4f34c50 100644 --- a/lib/src/absorb.rs +++ b/lib/src/absorb.rs @@ -23,7 +23,6 @@ use std::sync::Arc; use bstr::BString; use futures::StreamExt as _; use itertools::Itertools as _; -use pollster::FutureExt as _; use thiserror::Error; use crate::annotate::FileAnnotator; @@ -136,7 +135,7 @@ pub async fn split_hunks_to_trees( // Compute annotation of parent (= left) content to map right hunks let mut annotator = FileAnnotator::with_file_content(source.commit.id(), left_path, left_text.clone()); - annotator.compute(repo, destinations)?; + annotator.compute(repo, destinations).await?; let annotation = annotator.to_annotation(); let annotation_ranges = annotation .compact_line_ranges() @@ -285,7 +284,7 @@ pub struct AbsorbStats { /// Merges selected trees into the specified commits. Abandons the source commit /// if it becomes discardable. -pub fn absorb_hunks( +pub async fn absorb_hunks( repo: &mut MutableRepo, source: &AbsorbSource, mut selected_trees: HashMap, @@ -303,33 +302,35 @@ pub fn absorb_hunks( if commit_builder.is_discardable()? { commit_builder.abandon(); } else { - rewritten_source = Some(commit_builder.write()?); + rewritten_source = Some(commit_builder.write().await?); num_rebased += 1; } return Ok(()); } let Some(tree_builder) = selected_trees.remove(rewriter.old_commit().id()) else { - rewriter.rebase().await?.write()?; + rewriter.rebase().await?.write().await?; num_rebased += 1; return Ok(()); }; // Merge hunks between source parent tree and selected tree - let selected_tree_id = tree_builder.write_tree(&store)?; + let selected_tree_id = tree_builder.write_tree(&store).await?; let commit_builder = rewriter.rebase().await?; let destination_tree = store.get_root_tree(commit_builder.tree_id())?; let selected_tree = store.get_root_tree(&selected_tree_id)?; let new_tree = destination_tree .merge(source.parent_tree.clone(), selected_tree) - .block_on()?; + .await?; let mut predecessors = commit_builder.predecessors().to_vec(); predecessors.push(source.commit.id().clone()); let new_commit = commit_builder .set_tree_id(new_tree.id()) .set_predecessors(predecessors) - .write()?; + .write() + .await?; rewritten_destinations.push(new_commit); Ok(()) - })?; + }) + .await?; Ok(AbsorbStats { rewritten_source, rewritten_destinations, diff --git a/lib/src/annotate.rs b/lib/src/annotate.rs index 46cc6e1fa76..08589542007 100644 --- a/lib/src/annotate.rs +++ b/lib/src/annotate.rs @@ -27,7 +27,6 @@ use std::sync::Arc; use bstr::BStr; use bstr::BString; use itertools::Itertools as _; -use pollster::FutureExt as _; use crate::backend::BackendError; use crate::backend::BackendResult; @@ -162,8 +161,11 @@ impl FileAnnotator { /// Initializes annotator for a specific file in the `starting_commit`. /// /// If the file is not found, the result would be empty. - pub fn from_commit(starting_commit: &Commit, file_path: &RepoPath) -> BackendResult { - let source = Source::load(starting_commit, file_path)?; + pub async fn from_commit( + starting_commit: &Commit, + file_path: &RepoPath, + ) -> BackendResult { + let source = Source::load(starting_commit, file_path).await?; Ok(Self::with_source(starting_commit.id(), file_path, source)) } @@ -213,12 +215,12 @@ impl FileAnnotator { /// The `domain` expression narrows the range of ancestors to search. It /// will be intersected as `domain & ::pending_commits & files(file_path)`. /// The `pending_commits` is assumed to be included in the `domain`. - pub fn compute( + pub async fn compute( &mut self, repo: &dyn Repo, domain: &Arc, ) -> Result<(), RevsetEvaluationError> { - process_commits(repo, &mut self.state, domain, &self.file_path) + process_commits(repo, &mut self.state, domain, &self.file_path).await } /// Remaining commit ids to visit from. @@ -266,9 +268,9 @@ impl Source { } } - fn load(commit: &Commit, file_path: &RepoPath) -> Result { + async fn load(commit: &Commit, file_path: &RepoPath) -> Result { let tree = commit.tree()?; - let text = get_file_contents(commit.store(), file_path, &tree).block_on()?; + let text = get_file_contents(commit.store(), file_path, &tree).await?; Ok(Self::new(text)) } @@ -293,7 +295,7 @@ pub struct LineOrigin { /// Starting from the source commits, compute changes at that commit relative to /// its direct parents, updating the mappings as we go. -fn process_commits( +async fn process_commits( repo: &dyn Repo, state: &mut AnnotationState, domain: &Arc, @@ -313,7 +315,7 @@ fn process_commits( state.num_unresolved_roots = 0; for node in revset.iter_graph() { let (commit_id, edge_list) = node?; - process_commit(repo, file_name, state, &commit_id, &edge_list)?; + process_commit(repo, file_name, state, &commit_id, &edge_list).await?; if state.commit_source_map.len() == state.num_unresolved_roots { // No more lines to propagate to ancestors. break; @@ -325,7 +327,7 @@ fn process_commits( /// For a given commit, for each parent, we compare the version in the parent /// tree with the current version, updating the mappings for any lines in /// common. If the parent doesn't have the file, we skip it. -fn process_commit( +async fn process_commit( repo: &dyn Repo, file_name: &RepoPath, state: &mut AnnotationState, @@ -341,8 +343,8 @@ fn process_commit( let parent_source = match state.commit_source_map.entry(parent_commit_id.clone()) { hash_map::Entry::Occupied(entry) => entry.into_mut(), hash_map::Entry::Vacant(entry) => { - let commit = repo.store().get_commit(entry.key())?; - entry.insert(Source::load(&commit, file_name)?) + let commit = repo.store().get_commit_async(entry.key()).await?; + entry.insert(Source::load(&commit, file_name).await?) } }; diff --git a/lib/src/commit_builder.rs b/lib/src/commit_builder.rs index 38a6da2935c..cdaa6767cff 100644 --- a/lib/src/commit_builder.rs +++ b/lib/src/commit_builder.rs @@ -16,8 +16,6 @@ use std::sync::Arc; -use pollster::FutureExt as _; - use crate::backend; use crate::backend::BackendError; use crate::backend::BackendResult; @@ -153,8 +151,8 @@ impl CommitBuilder<'_> { self } - pub fn write(self) -> BackendResult { - self.inner.write(self.mut_repo) + pub async fn write(self) -> BackendResult { + self.inner.write(self.mut_repo).await } /// Records the old commit as abandoned instead of writing new commit. This @@ -364,9 +362,9 @@ impl DetachedCommitBuilder { } /// Writes new commit and makes it visible in the `mut_repo`. - pub fn write(self, mut_repo: &mut MutableRepo) -> BackendResult { + pub async fn write(self, mut_repo: &mut MutableRepo) -> BackendResult { let predecessors = self.commit.predecessors.clone(); - let commit = write_to_store(&self.store, self.commit, &self.sign_settings)?; + let commit = write_to_store(&self.store, self.commit, &self.sign_settings).await?; // FIXME: Google's index.has_id() always returns true. if mut_repo.is_backed_by_default_index() && mut_repo.index().has_id(commit.id()) { // Recording existing commit as new would create cycle in @@ -388,8 +386,8 @@ impl DetachedCommitBuilder { /// /// This does not consume the builder, so you can reuse the current /// configuration to create another commit later. - pub fn write_hidden(&self) -> BackendResult { - write_to_store(&self.store, self.commit.clone(), &self.sign_settings) + pub async fn write_hidden(&self) -> BackendResult { + write_to_store(&self.store, self.commit.clone(), &self.sign_settings).await } /// Records the old commit as abandoned in the `mut_repo`. @@ -405,7 +403,7 @@ impl DetachedCommitBuilder { } } -fn write_to_store( +async fn write_to_store( store: &Arc, mut commit: backend::Commit, sign_settings: &SignSettings, @@ -420,5 +418,5 @@ fn write_to_store( store .write_commit(commit, should_sign.then_some(&mut &sign_fn)) - .block_on() + .await } diff --git a/lib/src/conflicts.rs b/lib/src/conflicts.rs index 4adf63037ca..b1124c37f99 100644 --- a/lib/src/conflicts.rs +++ b/lib/src/conflicts.rs @@ -23,10 +23,10 @@ use bstr::BString; use bstr::ByteSlice as _; use futures::Stream; use futures::StreamExt as _; +use futures::future::try_join_all; use futures::stream::BoxStream; use futures::try_join; use itertools::Itertools as _; -use pollster::FutureExt as _; use tokio::io::AsyncRead; use tokio::io::AsyncReadExt as _; @@ -993,13 +993,12 @@ pub async fn update_from_content( // Now write the new files contents we found by parsing the file with conflict // markers. - // TODO: Write these concurrently - let new_file_ids: Vec> = zip(contents.iter(), simplified_file_ids.iter()) - .map(|(content, file_id)| -> BackendResult> { + let write_futures = + zip(contents.iter(), simplified_file_ids.iter()).map(|(content, file_id)| async move { match file_id { Some(_) => { - let file_id = store.write_file(path, &mut content.as_slice()).block_on()?; - Ok(Some(file_id)) + let file_id = store.write_file(path, &mut content.as_slice()).await?; + Ok::<_, BackendError>(Some(file_id)) } None => { // The missing side of a conflict is still represented by @@ -1007,8 +1006,9 @@ pub async fn update_from_content( Ok(None) } } - }) - .try_collect()?; + }); + + let new_file_ids: Vec> = try_join_all(write_futures).await?; // If the conflict was simplified, expand the conflict to the original // number of sides. diff --git a/lib/src/default_index/store.rs b/lib/src/default_index/store.rs index 8077aa4a729..6edb3b1b87e 100644 --- a/lib/src/default_index/store.rs +++ b/lib/src/default_index/store.rs @@ -24,6 +24,7 @@ use std::path::PathBuf; use std::slice; use std::sync::Arc; +use async_trait::async_trait; use itertools::Itertools as _; use pollster::FutureExt as _; use prost::Message as _; @@ -288,7 +289,7 @@ impl DefaultIndexStore { for op in &ops_to_visit { for commit_id in itertools::chain( op.all_referenced_commit_ids(), - op.view()?.all_referenced_commit_ids(), + op.view().await?.all_referenced_commit_ids(), ) { if !historical_heads.contains_key(commit_id) { historical_heads.insert(commit_id.clone(), op.id().clone()); @@ -553,12 +554,13 @@ impl DefaultIndexStore { } } +#[async_trait(?Send)] impl IndexStore for DefaultIndexStore { fn name(&self) -> &str { Self::name() } - fn get_index_at_op( + async fn get_index_at_op( &self, op: &Operation, store: &Arc, @@ -571,7 +573,7 @@ impl IndexStore for DefaultIndexStore { Err(DefaultIndexStoreError::LoadAssociation(PathError { source: error, .. })) if error.kind() == io::ErrorKind::NotFound => { - self.build_index_at_operation(op, store).block_on() + self.build_index_at_operation(op, store).await } Err(DefaultIndexStoreError::LoadIndex(err)) if err.is_corrupt_or_not_found() => { // If the index was corrupt (maybe it was written in a different format), @@ -600,7 +602,7 @@ impl IndexStore for DefaultIndexStore { Ok(Box::new(index)) } - fn write_index( + async fn write_index( &self, index: Box, op: &Operation, diff --git a/lib/src/fix.rs b/lib/src/fix.rs index 9904f7a4cf3..4ee79e9b13d 100644 --- a/lib/src/fix.rs +++ b/lib/src/fix.rs @@ -275,59 +275,61 @@ pub async fn fix_files( // Substitute the fixed file IDs into all of the affected commits. Currently, // fixes cannot delete or rename files, change the executable bit, or modify // other parts of the commit like the description. - repo_mut.transform_descendants(root_commits, async |mut rewriter| { - // TODO: Build the trees in parallel before `transform_descendants()` and only - // keep the tree IDs in memory, so we can pass them to the rewriter. - let old_commit_id = rewriter.old_commit().id().clone(); - let repo_paths = commit_paths.get(&old_commit_id).unwrap(); - let old_tree = rewriter.old_commit().tree_async().await?; - let mut tree_builder = MergedTreeBuilder::new(old_tree.id().clone()); - let mut has_changes = false; - for repo_path in repo_paths { - let old_value = old_tree.path_value_async(repo_path).await?; - let new_value = old_value.map(|old_term| { - if let Some(TreeValue::File { - id, - executable, - copy_id, - }) = old_term - { - let file_to_fix = FileToFix { - file_id: id.clone(), - repo_path: repo_path.clone(), - }; - if let Some(new_id) = fixed_file_ids.get(&file_to_fix) { - return Some(TreeValue::File { - id: new_id.clone(), - executable: *executable, - copy_id: copy_id.clone(), - }); + repo_mut + .transform_descendants(root_commits, async |mut rewriter| { + // TODO: Build the trees in parallel before `transform_descendants()` and only + // keep the tree IDs in memory, so we can pass them to the rewriter. + let old_commit_id = rewriter.old_commit().id().clone(); + let repo_paths = commit_paths.get(&old_commit_id).unwrap(); + let old_tree = rewriter.old_commit().tree_async().await?; + let mut tree_builder = MergedTreeBuilder::new(old_tree.id().clone()); + let mut has_changes = false; + for repo_path in repo_paths { + let old_value = old_tree.path_value_async(repo_path).await?; + let new_value = old_value.map(|old_term| { + if let Some(TreeValue::File { + id, + executable, + copy_id, + }) = old_term + { + let file_to_fix = FileToFix { + file_id: id.clone(), + repo_path: repo_path.clone(), + }; + if let Some(new_id) = fixed_file_ids.get(&file_to_fix) { + return Some(TreeValue::File { + id: new_id.clone(), + executable: *executable, + copy_id: copy_id.clone(), + }); + } } + old_term.clone() + }); + if new_value != old_value { + tree_builder.set_or_remove(repo_path.clone(), new_value); + has_changes = true; } - old_term.clone() - }); - if new_value != old_value { - tree_builder.set_or_remove(repo_path.clone(), new_value); - has_changes = true; } - } - summary.num_checked_commits += 1; - if has_changes { - summary.num_fixed_commits += 1; - let new_tree = tree_builder.write_tree(rewriter.repo_mut().store())?; - let builder = rewriter.reparent(); - let new_commit = builder.set_tree_id(new_tree).write()?; - summary - .rewrites - .insert(old_commit_id, new_commit.id().clone()); - } else if rewriter.parents_changed() { - let new_commit = rewriter.reparent().write()?; - summary - .rewrites - .insert(old_commit_id, new_commit.id().clone()); - } - Ok(()) - })?; + summary.num_checked_commits += 1; + if has_changes { + summary.num_fixed_commits += 1; + let new_tree = tree_builder.write_tree(rewriter.repo_mut().store()).await?; + let builder = rewriter.reparent(); + let new_commit = builder.set_tree_id(new_tree).write().await?; + summary + .rewrites + .insert(old_commit_id, new_commit.id().clone()); + } else if rewriter.parents_changed() { + let new_commit = rewriter.reparent().write().await?; + summary + .rewrites + .insert(old_commit_id, new_commit.id().clone()); + } + Ok(()) + }) + .await?; tracing::debug!(?summary); Ok(summary) diff --git a/lib/src/index.rs b/lib/src/index.rs index 043b9b20cc6..a86bf4ee90c 100644 --- a/lib/src/index.rs +++ b/lib/src/index.rs @@ -18,6 +18,7 @@ use std::any::Any; use std::fmt::Debug; use std::sync::Arc; +use async_trait::async_trait; use thiserror::Error; use crate::backend::ChangeId; @@ -55,6 +56,7 @@ pub struct AllHeadsForGcUnsupported; /// Defines the interface for types that provide persistent storage for an /// index. +#[async_trait(?Send)] pub trait IndexStore: Any + Send + Sync + Debug { /// Returns a name representing the type of index that the `IndexStore` is /// compatible with. For example, the `IndexStore` for the default index @@ -62,7 +64,7 @@ pub trait IndexStore: Any + Send + Sync + Debug { fn name(&self) -> &str; /// Returns the index at the specified operation. - fn get_index_at_op( + async fn get_index_at_op( &self, op: &Operation, store: &Arc, @@ -70,7 +72,7 @@ pub trait IndexStore: Any + Send + Sync + Debug { /// Writes `index` to the index store and returns a read-only version of the /// index. - fn write_index( + async fn write_index( &self, index: Box, op: &Operation, diff --git a/lib/src/local_working_copy.rs b/lib/src/local_working_copy.rs index f00540ad464..0d1a89bcd72 100644 --- a/lib/src/local_working_copy.rs +++ b/lib/src/local_working_copy.rs @@ -1105,7 +1105,7 @@ impl TreeState { .merge_in(changed_file_states, &deleted_files); }); trace_span!("write tree").in_scope(|| -> Result<(), BackendError> { - let new_tree_id = tree_builder.write_tree(&self.store)?; + let new_tree_id = tree_builder.write_tree(&self.store).block_on()?; is_dirty |= new_tree_id != self.tree_id; self.tree_id = new_tree_id; Ok(()) diff --git a/lib/src/merged_tree.rs b/lib/src/merged_tree.rs index 87a411e66dd..19bd4851681 100644 --- a/lib/src/merged_tree.rs +++ b/lib/src/merged_tree.rs @@ -29,6 +29,7 @@ use futures::Stream; use futures::StreamExt as _; use futures::future::BoxFuture; use futures::future::try_join; +use futures::future::try_join_all; use futures::stream::BoxStream; use itertools::EitherOrBoth; use itertools::Itertools as _; @@ -978,20 +979,20 @@ impl MergedTreeBuilder { } /// Create new tree(s) from the base tree(s) and overrides. - pub fn write_tree(self, store: &Arc) -> BackendResult { + pub async fn write_tree(self, store: &Arc) -> BackendResult { let base_tree_ids = self.base_tree_id.as_merge().clone(); - let new_tree_ids = self.write_merged_trees(base_tree_ids, store)?; + let new_tree_ids = self.write_merged_trees(base_tree_ids, store).await?; match new_tree_ids.simplify().into_resolved() { Ok(single_tree_id) => Ok(MergedTreeId::resolved(single_tree_id)), Err(tree_id) => { let tree = store.get_root_tree(&MergedTreeId::new(tree_id))?; - let resolved = tree.resolve().block_on()?; + let resolved = tree.resolve().await?; Ok(resolved.id()) } } } - fn write_merged_trees( + async fn write_merged_trees( self, mut base_tree_ids: Merge, store: &Arc, @@ -1028,10 +1029,11 @@ impl MergedTreeBuilder { // TODO: This can be made more efficient. If there's a single resolved conflict // in `dir/file`, we shouldn't have to write the `dir/` and root trees more than // once. - let merge_builder: MergeBuilder = tree_builders + let futures: Vec<_> = tree_builders .into_iter() .map(|builder| builder.write_tree()) - .try_collect()?; - Ok(merge_builder.build()) + .collect(); + let merge = Merge::from_vec(try_join_all(futures).await?); + Ok(merge) } } diff --git a/lib/src/op_heads_store.rs b/lib/src/op_heads_store.rs index 8854b0c0982..1e6df6fe151 100644 --- a/lib/src/op_heads_store.rs +++ b/lib/src/op_heads_store.rs @@ -21,7 +21,6 @@ use std::sync::Arc; use async_trait::async_trait; use itertools::Itertools as _; -use pollster::FutureExt as _; use thiserror::Error; use crate::dag_walk; @@ -85,22 +84,23 @@ impl dyn OpHeadsStore { // lock. // // This routine is defined outside the trait because it must support generics. -pub fn resolve_op_heads( +pub async fn resolve_op_heads( op_heads_store: &dyn OpHeadsStore, op_store: &Arc, - resolver: impl FnOnce(Vec) -> Result, + resolver: impl FnOnce(Vec) -> Fut, ) -> Result where E: From + From + From, + Fut: Future>, { // This can be empty if the OpHeadsStore doesn't support atomic updates. // For example, all entries ahead of a readdir() pointer could be deleted by // another concurrent process. - let mut op_heads = op_heads_store.get_op_heads().block_on()?; + let mut op_heads = op_heads_store.get_op_heads().await?; if op_heads.len() == 1 { let operation_id = op_heads.pop().unwrap(); - let operation = op_store.read_operation(&operation_id).block_on()?; + let operation = op_store.read_operation(&operation_id).await?; return Ok(Operation::new(op_store.clone(), operation_id, operation)); } @@ -112,8 +112,8 @@ where // Note that the locking isn't necessary for correctness of merge; we take // the lock only to prevent other concurrent processes from doing the same // work (and producing another set of divergent heads). - let _lock = op_heads_store.lock().block_on()?; - let op_head_ids = op_heads_store.get_op_heads().block_on()?; + let _lock = op_heads_store.lock().await?; + let op_head_ids = op_heads_store.get_op_heads().await?; if op_head_ids.is_empty() { return Err(OpHeadResolutionError::NoHeads.into()); @@ -121,17 +121,16 @@ where if op_head_ids.len() == 1 { let op_head_id = op_head_ids[0].clone(); - let op_head = op_store.read_operation(&op_head_id).block_on()?; + let op_head = op_store.read_operation(&op_head_id).await?; return Ok(Operation::new(op_store.clone(), op_head_id, op_head)); } - let op_heads: Vec<_> = op_head_ids - .iter() - .map(|op_id: &OperationId| -> Result { - let data = op_store.read_operation(op_id).block_on()?; - Ok(Operation::new(op_store.clone(), op_id.clone(), data)) - }) - .try_collect()?; + let mut op_heads = vec![]; + for op_id in &op_head_ids { + let data = op_store.read_operation(op_id).await?; + op_heads.push(Operation::new(op_store.clone(), op_id.clone(), data)); + } + // Remove ancestors so we don't create merge operation with an operation and its // ancestor let op_head_ids_before: HashSet<_> = op_heads.iter().map(|op| op.id().clone()).collect(); @@ -152,16 +151,16 @@ where if let [op_head] = &*op_heads { op_heads_store .update_op_heads(&ancestor_op_heads, op_head.id()) - .block_on()?; + .await?; return Ok(op_head.clone()); } op_heads.sort_by_key(|op| op.metadata().time.end.timestamp); - let new_op = resolver(op_heads)?; + let new_op = resolver(op_heads).await?; let mut old_op_heads = ancestor_op_heads; old_op_heads.extend_from_slice(new_op.parent_ids()); op_heads_store .update_op_heads(&old_op_heads, new_op.id()) - .block_on()?; + .await?; Ok(new_op) } diff --git a/lib/src/op_walk.rs b/lib/src/op_walk.rs index b3ef025d4f5..013bb5d2ec4 100644 --- a/lib/src/op_walk.rs +++ b/lib/src/op_walk.rs @@ -21,7 +21,6 @@ use std::slice; use std::sync::Arc; use itertools::Itertools as _; -use pollster::FutureExt as _; use thiserror::Error; use crate::dag_walk; @@ -86,42 +85,43 @@ pub enum OpsetResolutionError { } /// Resolves operation set expression without loading a repo. -pub fn resolve_op_for_load( +pub async fn resolve_op_for_load( repo_loader: &RepoLoader, op_str: &str, ) -> Result { let op_store = repo_loader.op_store(); let op_heads_store = repo_loader.op_heads_store().as_ref(); - let get_current_op = || { - op_heads_store::resolve_op_heads(op_heads_store, op_store, |op_heads| { + let get_current_op = || async { + op_heads_store::resolve_op_heads(op_heads_store, op_store, |op_heads| async move { Err(OpsetResolutionError::MultipleOperations { expr: "@".to_owned(), candidates: op_heads.iter().map(|op| op.id().clone()).collect(), } .into()) }) + .await }; - let get_head_ops = || get_current_head_ops(op_store, op_heads_store); - resolve_single_op(op_store, get_current_op, get_head_ops, op_str) + let get_head_ops = async || get_current_head_ops(op_store, op_heads_store).await; + resolve_single_op(op_store, get_current_op, get_head_ops, op_str).await } /// Resolves operation set expression against the loaded repo. /// /// The "@" symbol will be resolved to the operation the repo was loaded at. -pub fn resolve_op_with_repo( +pub async fn resolve_op_with_repo( repo: &ReadonlyRepo, op_str: &str, ) -> Result { - resolve_op_at(repo.op_store(), slice::from_ref(repo.operation()), op_str) + resolve_op_at(repo.op_store(), slice::from_ref(repo.operation()), op_str).await } /// Resolves operation set expression at the given head operations. -pub fn resolve_op_at( +pub async fn resolve_op_at( op_store: &Arc, head_ops: &[Operation], op_str: &str, ) -> Result { - let get_current_op = || match head_ops { + let get_current_op = async || match head_ops { [head_op] => Ok(head_op.clone()), [] => Err(OpsetResolutionError::EmptyOperations("@".to_owned()).into()), _ => Err(OpsetResolutionError::MultipleOperations { @@ -130,24 +130,28 @@ pub fn resolve_op_at( } .into()), }; - let get_head_ops = || Ok(head_ops.to_vec()); - resolve_single_op(op_store, get_current_op, get_head_ops, op_str) + let get_head_ops = async || Ok(head_ops.to_vec()); + resolve_single_op(op_store, get_current_op, get_head_ops, op_str).await } /// Resolves operation set expression with the given "@" symbol resolution /// callbacks. -fn resolve_single_op( +async fn resolve_single_op( op_store: &Arc, - get_current_op: impl FnOnce() -> Result, - get_head_ops: impl FnOnce() -> Result, OpsetEvaluationError>, + get_current_op: impl AsyncFnOnce() -> Result, + get_head_ops: impl AsyncFnOnce() -> Result, OpsetEvaluationError>, op_str: &str, ) -> Result { let op_symbol = op_str.trim_end_matches(['-', '+']); let op_postfix = &op_str[op_symbol.len()..]; - let head_ops = op_postfix.contains('+').then(get_head_ops).transpose()?; + let head_ops: Option> = if op_postfix.contains('+') { + Some(get_head_ops().await?) + } else { + None + }; let mut operation = match op_symbol { - "@" => get_current_op(), - s => resolve_single_op_from_store(op_store, s), + "@" => get_current_op().await, + s => resolve_single_op_from_store(op_store, s).await, }?; for (i, c) in op_postfix.chars().enumerate() { let mut neighbor_ops = match c { @@ -177,7 +181,7 @@ fn resolve_single_op( Ok(operation) } -fn resolve_single_op_from_store( +async fn resolve_single_op_from_store( op_store: &Arc, op_str: &str, ) -> Result { @@ -186,12 +190,12 @@ fn resolve_single_op_from_store( } let prefix = HexPrefix::try_from_hex(op_str) .ok_or_else(|| OpsetResolutionError::InvalidIdPrefix(op_str.to_owned()))?; - match op_store.resolve_operation_id_prefix(&prefix).block_on()? { + match op_store.resolve_operation_id_prefix(&prefix).await? { PrefixResolution::NoMatch => { Err(OpsetResolutionError::NoSuchOperation(op_str.to_owned()).into()) } PrefixResolution::SingleMatch(op_id) => { - let data = op_store.read_operation(&op_id).block_on()?; + let data = op_store.read_operation(&op_id).await?; Ok(Operation::new(op_store.clone(), op_id, data)) } PrefixResolution::AmbiguousMatch => { @@ -202,19 +206,18 @@ fn resolve_single_op_from_store( /// Loads the current head operations. The returned operations may contain /// redundant ones which are ancestors of the other heads. -pub fn get_current_head_ops( +pub async fn get_current_head_ops( op_store: &Arc, op_heads_store: &dyn OpHeadsStore, ) -> Result, OpsetEvaluationError> { - let mut head_ops: Vec<_> = op_heads_store - .get_op_heads() - .block_on()? - .into_iter() - .map(|id| -> OpStoreResult { - let data = op_store.read_operation(&id).block_on()?; - Ok(Operation::new(op_store.clone(), id, data)) - }) - .try_collect()?; + let op_ids = op_heads_store.get_op_heads().await?; + + let mut head_ops = Vec::with_capacity(op_ids.len()); + for id in op_ids { + let data = op_store.read_operation(&id).await?; + head_ops.push(Operation::new(op_store.clone(), id, data)); + } + // To stabilize output, sort in the same order as resolve_op_heads() head_ops.sort_by_key(|op| op.metadata().time.end.timestamp); Ok(head_ops) @@ -351,7 +354,7 @@ pub struct ReparentStats { /// If the source operation range `root_ops..head_ops` was empty, the /// `new_head_ids` will be `[dest_op.id()]`, meaning the `dest_op` is the head. // TODO: Find better place to host this function. It might be an OpStore method. -pub fn reparent_range( +pub async fn reparent_range( op_store: &dyn OpStore, root_ops: &[Operation], head_ops: &[Operation], @@ -377,7 +380,7 @@ pub fn reparent_range( .filter_map(|id| rewritten_ids.get(id).or_else(|| dest_once.take())) .cloned() .collect(); - let new_id = op_store.write_operation(&data).block_on()?; + let new_id = op_store.write_operation(&data).await?; rewritten_ids.insert(old_op.id().clone(), new_id); } diff --git a/lib/src/operation.rs b/lib/src/operation.rs index a1d7791d221..6a171763740 100644 --- a/lib/src/operation.rs +++ b/lib/src/operation.rs @@ -114,8 +114,8 @@ impl Operation { }) } - pub fn view(&self) -> OpStoreResult { - let data = self.op_store.read_view(&self.data.view_id).block_on()?; + pub async fn view(&self) -> OpStoreResult { + let data = self.op_store.read_view(&self.data.view_id).await?; Ok(View::new(data)) } diff --git a/lib/src/repo.rs b/lib/src/repo.rs index 83e1964a3e7..76835b5f650 100644 --- a/lib/src/repo.rs +++ b/lib/src/repo.rs @@ -25,9 +25,9 @@ use std::path::Path; use std::slice; use std::sync::Arc; +use futures::future::try_join_all; use itertools::Itertools as _; use once_cell::sync::OnceCell; -use pollster::FutureExt as _; use thiserror::Error; use tracing::instrument; @@ -187,15 +187,15 @@ impl ReadonlyRepo { } #[expect(clippy::too_many_arguments)] - pub fn init( + pub async fn init( settings: &UserSettings, repo_path: &Path, - backend_initializer: &BackendInitializer, + backend_initializer: &BackendInitializer<'_>, signer: Signer, - op_store_initializer: &OpStoreInitializer, - op_heads_store_initializer: &OpHeadsStoreInitializer, - index_store_initializer: &IndexStoreInitializer, - submodule_store_initializer: &SubmoduleStoreInitializer, + op_store_initializer: &OpStoreInitializer<'_>, + op_heads_store_initializer: &OpHeadsStoreInitializer<'_>, + index_store_initializer: &IndexStoreInitializer<'_>, + submodule_store_initializer: &SubmoduleStoreInitializer<'_>, ) -> Result, RepoInitError> { let repo_path = dunce::canonicalize(repo_path).context(repo_path)?; @@ -225,7 +225,7 @@ impl ReadonlyRepo { fs::write(&op_heads_type_path, op_heads_store.name()).context(&op_heads_type_path)?; op_heads_store .update_op_heads(&[], op_store.root_operation_id()) - .block_on()?; + .await?; let op_heads_store: Arc = Arc::from(op_heads_store); let index_path = repo_path.join("index"); @@ -252,12 +252,16 @@ impl ReadonlyRepo { submodule_store, }; - let root_operation = loader.root_operation(); - let root_view = root_operation.view().expect("failed to read root view"); + let root_operation = loader.root_operation().await; + let root_view = root_operation + .view() + .await + .expect("failed to read root view"); assert!(!root_view.heads().is_empty()); let index = loader .index_store .get_index_at_op(&root_operation, &loader.store) + .await // If the root op index couldn't be read, the index backend wouldn't // be initialized properly. .map_err(|err| BackendInitError(err.into()))?; @@ -316,13 +320,13 @@ impl ReadonlyRepo { Transaction::new(mut_repo, self.settings()) } - pub fn reload_at_head(&self) -> Result, RepoLoaderError> { - self.loader().load_at_head() + pub async fn reload_at_head(&self) -> Result, RepoLoaderError> { + self.loader().load_at_head().await } #[instrument] - pub fn reload_at(&self, operation: &Operation) -> Result, RepoLoaderError> { - self.loader().load_at(operation) + pub async fn reload_at(&self, operation: &Operation) -> Result, RepoLoaderError> { + self.loader().load_at(operation).await } } @@ -736,20 +740,21 @@ impl RepoLoader { &self.submodule_store } - pub fn load_at_head(&self) -> Result, RepoLoaderError> { + pub async fn load_at_head(&self) -> Result, RepoLoaderError> { let op = op_heads_store::resolve_op_heads( self.op_heads_store.as_ref(), &self.op_store, |op_heads| self.resolve_op_heads(op_heads), - )?; - let view = op.view()?; - self.finish_load(op, view) + ) + .await?; + let view = op.view().await?; + self.finish_load(op, view).await } #[instrument(skip(self))] - pub fn load_at(&self, op: &Operation) -> Result, RepoLoaderError> { - let view = op.view()?; - self.finish_load(op.clone(), view) + pub async fn load_at(&self, op: &Operation) -> Result, RepoLoaderError> { + let view = op.view().await?; + self.finish_load(op.clone(), view).await } pub fn create_from( @@ -772,20 +777,21 @@ impl RepoLoader { // load_operation() will be moved there. /// Returns the root operation. - pub fn root_operation(&self) -> Operation { + pub async fn root_operation(&self) -> Operation { self.load_operation(self.op_store.root_operation_id()) + .await .expect("failed to read root operation") } /// Loads the specified operation from the operation store. - pub fn load_operation(&self, id: &OperationId) -> OpStoreResult { - let data = self.op_store.read_operation(id).block_on()?; + pub async fn load_operation(&self, id: &OperationId) -> OpStoreResult { + let data = self.op_store.read_operation(id).await?; Ok(Operation::new(self.op_store.clone(), id.clone(), data)) } /// Merges the given `operations` into a single operation. Returns the root /// operation if the `operations` is empty. - pub fn merge_operations( + pub async fn merge_operations( &self, operations: Vec, tx_description: Option<&str>, @@ -793,20 +799,20 @@ impl RepoLoader { let num_operations = operations.len(); let mut operations = operations.into_iter(); let Some(base_op) = operations.next() else { - return Ok(self.root_operation()); + return Ok(self.root_operation().await); }; let final_op = if num_operations > 1 { - let base_repo = self.load_at(&base_op)?; + let base_repo = self.load_at(&base_op).await?; let mut tx = base_repo.start_transaction(); for other_op in operations { - tx.merge_operation(other_op)?; - tx.repo_mut().rebase_descendants()?; + tx.merge_operation(other_op).await?; + tx.repo_mut().rebase_descendants().await?; } let tx_description = tx_description.map_or_else( || format!("merge {num_operations} operations"), |tx_description| tx_description.to_string(), ); - let merged_repo = tx.write(tx_description)?.leave_unpublished(); + let merged_repo = tx.write(tx_description).await?.leave_unpublished(); merged_repo.operation().clone() } else { base_op @@ -815,17 +821,24 @@ impl RepoLoader { Ok(final_op) } - fn resolve_op_heads(&self, op_heads: Vec) -> Result { + async fn resolve_op_heads( + &self, + op_heads: Vec, + ) -> Result { assert!(!op_heads.is_empty()); self.merge_operations(op_heads, Some("reconcile divergent operations")) + .await } - fn finish_load( + async fn finish_load( &self, operation: Operation, view: View, ) -> Result, RepoLoaderError> { - let index = self.index_store.get_index_at_op(&operation, &self.store)?; + let index = self + .index_store + .get_index_at_op(&operation, &self.store) + .await?; let repo = ReadonlyRepo { loader: self.clone(), operation, @@ -1104,20 +1117,20 @@ impl MutableRepo { /// Updates bookmarks, working copies, and anonymous heads after rewriting /// and/or abandoning commits. - pub fn update_rewritten_references( + pub async fn update_rewritten_references( &mut self, options: &RewriteRefsOptions, ) -> BackendResult<()> { - self.update_all_references(options)?; + self.update_all_references(options).await?; self.update_heads() .map_err(|err| err.into_backend_error())?; Ok(()) } - fn update_all_references(&mut self, options: &RewriteRefsOptions) -> BackendResult<()> { + async fn update_all_references(&mut self, options: &RewriteRefsOptions) -> BackendResult<()> { let rewrite_mapping = self.resolve_rewrite_mapping_with(|_| true); self.update_local_bookmarks(&rewrite_mapping, options); - self.update_wc_commits(&rewrite_mapping)?; + self.update_wc_commits(&rewrite_mapping).await?; Ok(()) } @@ -1155,7 +1168,7 @@ impl MutableRepo { } } - fn update_wc_commits( + async fn update_wc_commits( &mut self, rewrite_mapping: &HashMap>, ) -> BackendResult<()> { @@ -1176,18 +1189,21 @@ impl MutableRepo { ); let new_wc_commit = if !abandoned_old_commit { // We arbitrarily pick a new working-copy commit among the candidates. - self.store().get_commit(&new_commit_ids[0])? + self.store().get_commit_async(&new_commit_ids[0]).await? } else if let Some(commit) = recreated_wc_commits.get(old_commit_id) { commit.clone() } else { - let new_commits: Vec<_> = new_commit_ids - .iter() - .map(|id| self.store().get_commit(id)) - .try_collect()?; - let merged_parents_tree = merge_commit_trees(self, &new_commits).block_on()?; + let new_commits: Vec<_> = try_join_all( + new_commit_ids + .iter() + .map(|id| self.store().get_commit_async(id)), + ) + .await?; + let merged_parents_tree = merge_commit_trees(self, &new_commits).await?; let commit = self .new_commit(new_commit_ids.clone(), merged_parents_tree.id().clone()) - .write()?; + .write() + .await?; recreated_wc_commits.insert(old_commit_id, commit.clone()); commit }; @@ -1294,13 +1310,14 @@ impl MutableRepo { /// adds new descendants, then the callback will not be called for those. /// Similarly, if the callback rewrites unrelated commits, then the callback /// will not be called for descendants of those commits. - pub fn transform_descendants( + pub async fn transform_descendants( &mut self, roots: Vec, callback: impl AsyncFnMut(CommitRewriter) -> BackendResult<()>, ) -> BackendResult<()> { let options = RewriteRefsOptions::default(); self.transform_descendants_with_options(roots, &HashMap::new(), &options, callback) + .await } /// Rewrite descendants of the given roots with options. @@ -1310,7 +1327,7 @@ impl MutableRepo { /// parents. /// /// See [`Self::transform_descendants()`] for details. - pub fn transform_descendants_with_options( + pub async fn transform_descendants_with_options( &mut self, roots: Vec, new_parents_map: &HashMap>, @@ -1319,6 +1336,7 @@ impl MutableRepo { ) -> BackendResult<()> { let descendants = self.find_descendants_for_rebase(roots)?; self.transform_commits(descendants, new_parents_map, options, callback) + .await } /// Rewrite the given commits in reverse topological order. @@ -1328,7 +1346,7 @@ impl MutableRepo { /// This function is similar to /// [`Self::transform_descendants_with_options()`], but only rewrites the /// `commits` provided, and does not rewrite their descendants. - pub fn transform_commits( + pub async fn transform_commits( &mut self, commits: Vec, new_parents_map: &HashMap>, @@ -1342,9 +1360,9 @@ impl MutableRepo { .map_or(old_commit.parent_ids(), |parent_ids| parent_ids); let new_parent_ids = self.new_parents(parent_ids); let rewriter = CommitRewriter::new(self, old_commit, new_parent_ids); - callback(rewriter).block_on()?; + callback(rewriter).await?; } - self.update_rewritten_references(options)?; + self.update_rewritten_references(options).await?; // Since we didn't necessarily visit all descendants of rewritten commits (e.g. // if they were rewritten in the callback), there can still be commits left to // rebase, so we don't clear `parent_mapping` here. @@ -1372,7 +1390,7 @@ impl MutableRepo { /// /// The `progress` callback will be invoked for each rebase operation with /// `(old_commit, rebased_commit)` as arguments. - pub fn rebase_descendants_with_options( + pub async fn rebase_descendants_with_options( &mut self, options: &RebaseOptions, mut progress: impl FnMut(Commit, RebasedCommit), @@ -1385,12 +1403,13 @@ impl MutableRepo { async |rewriter| { if rewriter.parents_changed() { let old_commit = rewriter.old_commit().clone(); - let rebased_commit = rebase_commit_with_options(rewriter, options)?; + let rebased_commit = rebase_commit_with_options(rewriter, options).await?; progress(old_commit, rebased_commit); } Ok(()) }, - )?; + ) + .await?; self.parent_mapping.clear(); Ok(()) } @@ -1404,12 +1423,13 @@ impl MutableRepo { /// All rebased descendant commits will be preserved even if they were /// emptied following the rebase operation. To customize the rebase /// behavior, use [`MutableRepo::rebase_descendants_with_options`]. - pub fn rebase_descendants(&mut self) -> BackendResult { + pub async fn rebase_descendants(&mut self) -> BackendResult { let options = RebaseOptions::default(); let mut num_rebased = 0; self.rebase_descendants_with_options(&options, |_old_commit, _rebased_commit| { num_rebased += 1; - })?; + }) + .await?; Ok(num_rebased) } @@ -1419,17 +1439,18 @@ impl MutableRepo { /// be recursively reparented onto the new version of their parents. /// The content of those descendants will remain untouched. /// Returns the number of reparented descendants. - pub fn reparent_descendants(&mut self) -> BackendResult { + pub async fn reparent_descendants(&mut self) -> BackendResult { let roots = self.parent_mapping.keys().cloned().collect_vec(); let mut num_reparented = 0; self.transform_descendants(roots, async |rewriter| { if rewriter.parents_changed() { let builder = rewriter.reparent(); - builder.write()?; + builder.write().await?; num_reparented += 1; } Ok(()) - })?; + }) + .await?; self.parent_mapping.clear(); Ok(num_reparented) } @@ -1490,14 +1511,15 @@ impl MutableRepo { self.view_mut().rename_workspace(old_name, new_name) } - pub fn check_out( + pub async fn check_out( &mut self, name: WorkspaceNameBuf, commit: &Commit, ) -> Result { let wc_commit = self .new_commit(vec![commit.id().clone()], commit.tree_id().clone()) - .write()?; + .write() + .await?; self.edit(name, &wc_commit)?; Ok(wc_commit) } diff --git a/lib/src/revset.rs b/lib/src/revset.rs index 08fdac08be3..313502ad33a 100644 --- a/lib/src/revset.rs +++ b/lib/src/revset.rs @@ -25,6 +25,7 @@ use std::sync::Arc; use std::sync::LazyLock; use itertools::Itertools as _; +use pollster::FutureExt as _; use thiserror::Error; use crate::backend::BackendError; @@ -2468,7 +2469,7 @@ pub fn walk_revs<'index>( .evaluate(repo) } -fn reload_repo_at_operation( +async fn reload_repo_at_operation( repo: &dyn Repo, op_str: &str, ) -> Result, RevsetResolutionError> { @@ -2477,15 +2478,19 @@ fn reload_repo_at_operation( // to the outer repo. let base_repo = repo.base_repo(); let operation = op_walk::resolve_op_with_repo(base_repo, op_str) + .await .map_err(|err| RevsetResolutionError::Other(err.into()))?; - base_repo.reload_at(&operation).map_err(|err| match err { - RepoLoaderError::Backend(err) => RevsetResolutionError::Backend(err), - RepoLoaderError::IndexRead(_) - | RepoLoaderError::OpHeadResolution(_) - | RepoLoaderError::OpHeadsStoreError(_) - | RepoLoaderError::OpStore(_) - | RepoLoaderError::TransactionCommit(_) => RevsetResolutionError::Other(err.into()), - }) + base_repo + .reload_at(&operation) + .await + .map_err(|err| match err { + RepoLoaderError::Backend(err) => RevsetResolutionError::Backend(err), + RepoLoaderError::IndexRead(_) + | RepoLoaderError::OpHeadResolution(_) + | RepoLoaderError::OpHeadsStoreError(_) + | RepoLoaderError::OpStore(_) + | RepoLoaderError::TransactionCommit(_) => RevsetResolutionError::Other(err.into()), + }) } fn resolve_remote_bookmark( @@ -2921,7 +2926,7 @@ impl ExpressionStateFolder operation: &String, candidates: &UserRevsetExpression, ) -> Result, Self::Error> { - let repo = reload_repo_at_operation(self.repo(), operation)?; + let repo = reload_repo_at_operation(self.repo(), operation).block_on()?; self.repo_stack.push(repo); let candidates = self.fold_expression(candidates)?; let visible_heads = self.repo().view().heads().iter().cloned().collect(); diff --git a/lib/src/rewrite.rs b/lib/src/rewrite.rs index 25f9929fe4c..1c8a2afbd8b 100644 --- a/lib/src/rewrite.rs +++ b/lib/src/rewrite.rs @@ -25,7 +25,6 @@ use futures::try_join; use indexmap::IndexMap; use indexmap::IndexSet; use itertools::Itertools as _; -use pollster::FutureExt as _; use tracing::instrument; use crate::backend::BackendError; @@ -134,7 +133,7 @@ pub async fn restore_tree( let source_value = values?.before; tree_builder.set_or_remove(repo_path, source_value); } - tree_builder.write_tree(destination.store()) + tree_builder.write_tree(destination.store()).await } } @@ -145,7 +144,7 @@ pub async fn rebase_commit( ) -> BackendResult { let rewriter = CommitRewriter::new(mut_repo, old_commit, new_parents); let builder = rewriter.rebase().await?; - builder.write() + builder.write().await } /// Helps rewrite a commit. @@ -326,7 +325,7 @@ pub enum RebasedCommit { Abandoned { parent_id: CommitId }, } -pub fn rebase_commit_with_options( +pub async fn rebase_commit_with_options( mut rewriter: CommitRewriter<'_>, options: &RebaseOptions, ) -> BackendResult { @@ -344,11 +343,8 @@ pub fn rebase_commit_with_options( _ => None, }; let new_parents_len = rewriter.new_parents.len(); - if let Some(builder) = rewriter - .rebase_with_empty_behavior(options.empty) - .block_on()? - { - let new_commit = builder.write()?; + if let Some(builder) = rewriter.rebase_with_empty_behavior(options.empty).await? { + let new_commit = builder.write().await?; Ok(RebasedCommit::Rewritten(new_commit)) } else { assert_eq!(new_parents_len, 1); @@ -359,7 +355,7 @@ pub fn rebase_commit_with_options( } /// Moves changes from `sources` to the `destination` parent, returns new tree. -pub fn rebase_to_dest_parent( +pub async fn rebase_to_dest_parent( repo: &dyn Repo, sources: &[Commit], destination: &Commit, @@ -369,16 +365,17 @@ pub fn rebase_to_dest_parent( { return source.tree(); } - sources.iter().try_fold( - destination.parent_tree(repo)?, - |destination_tree, source| { - let source_parent_tree = source.parent_tree(repo)?; - let source_tree = source.tree()?; - destination_tree - .merge(source_parent_tree, source_tree) - .block_on() - }, - ) + + let mut destination_tree = destination.parent_tree(repo)?; + for source in sources { + let source_parent_tree = source.parent_tree(repo)?; + let source_tree = source.tree()?; + destination_tree = destination_tree + .merge(source_parent_tree, source_tree) + .await?; + } + + Ok(destination_tree) } #[derive(Clone, Copy, Default, PartialEq, Eq, Debug)] @@ -480,12 +477,12 @@ impl ComputedMoveCommits { self.to_abandon.extend(commit_ids); } - pub fn apply( + pub async fn apply( self, mut_repo: &mut MutableRepo, options: &RebaseOptions, ) -> BackendResult { - apply_move_commits(mut_repo, self, options) + apply_move_commits(mut_repo, self, options).await } } @@ -497,15 +494,18 @@ impl ComputedMoveCommits { /// heads of the commits in `targets`. This assumes that commits in `target` and /// `new_child_ids` can be rewritten, and there will be no cycles in the /// resulting graph. Commits in `target` should be in reverse topological order. -pub fn move_commits( +pub async fn move_commits( mut_repo: &mut MutableRepo, loc: &MoveCommitsLocation, options: &RebaseOptions, ) -> BackendResult { - compute_move_commits(mut_repo, loc)?.apply(mut_repo, options) + compute_move_commits(mut_repo, loc) + .await? + .apply(mut_repo, options) + .await } -pub fn compute_move_commits( +pub async fn compute_move_commits( repo: &MutableRepo, loc: &MoveCommitsLocation, ) -> BackendResult { @@ -554,10 +554,12 @@ pub fn compute_move_commits( .try_collect() .map_err(|err| err.into_backend_error())?; - connected_target_commits = target_commit_ids - .iter() - .map(|id| repo.store().get_commit(id)) - .try_collect()?; + connected_target_commits = try_join_all( + target_commit_ids + .iter() + .map(|id| repo.store().get_commit_async(id)), + ) + .await?; // We don't have to compute the internal parents for the connected target set, // since the connected target set is the same as the target set. connected_target_commits_internal_parents = HashMap::new(); @@ -570,7 +572,7 @@ pub fn compute_move_commits( // ancestors which are not in the target set as parents. let mut target_commits_external_parents: HashMap> = HashMap::new(); for id in target_commit_ids.iter().rev() { - let commit = repo.store().get_commit(id)?; + let commit = repo.store().get_commit_async(id).await?; let mut new_parents = IndexSet::new(); for old_parent in commit.parent_ids() { if let Some(parents) = target_commits_external_parents.get(old_parent) { @@ -659,15 +661,17 @@ pub fn compute_move_commits( if let Some(children) = target_commit_external_descendants.get(id) { new_children.extend(children.iter().cloned()); } else { - new_children.push(repo.store().get_commit(id)?); + new_children.push(repo.store().get_commit_async(id).await?); } } new_children } else { - loc.new_child_ids - .iter() - .map(|id| repo.store().get_commit(id)) - .try_collect()? + try_join_all( + loc.new_child_ids + .iter() + .map(|id| repo.store().get_commit_async(id)), + ) + .await? }; // Compute the parents of the new children, which will include the heads of the @@ -793,7 +797,7 @@ pub fn compute_move_commits( }) } -fn apply_move_commits( +async fn apply_move_commits( mut_repo: &mut MutableRepo, commits: ComputedMoveCommits, options: &RebaseOptions, @@ -811,39 +815,42 @@ fn apply_move_commits( }; let mut rebased_commits: HashMap = HashMap::new(); - mut_repo.transform_commits( - commits.descendants, - &commits.commit_new_parents_map, - &options.rewrite_refs, - async |rewriter| { - let old_commit_id = rewriter.old_commit().id().clone(); - if commits.to_abandon.contains(&old_commit_id) { - rewriter.abandon(); - } else if rewriter.parents_changed() { - let is_target_commit = commits.target_commit_ids.contains(&old_commit_id); - let rebased_commit = rebase_commit_with_options( - rewriter, - if is_target_commit { - options + mut_repo + .transform_commits( + commits.descendants, + &commits.commit_new_parents_map, + &options.rewrite_refs, + async |rewriter| { + let old_commit_id = rewriter.old_commit().id().clone(); + if commits.to_abandon.contains(&old_commit_id) { + rewriter.abandon(); + } else if rewriter.parents_changed() { + let is_target_commit = commits.target_commit_ids.contains(&old_commit_id); + let rebased_commit = rebase_commit_with_options( + rewriter, + if is_target_commit { + options + } else { + rebase_descendant_options + }, + ) + .await?; + if let RebasedCommit::Abandoned { .. } = rebased_commit { + num_abandoned_empty += 1; + } else if is_target_commit { + num_rebased_targets += 1; } else { - rebase_descendant_options - }, - )?; - if let RebasedCommit::Abandoned { .. } = rebased_commit { - num_abandoned_empty += 1; - } else if is_target_commit { - num_rebased_targets += 1; + num_rebased_descendants += 1; + } + rebased_commits.insert(old_commit_id, rebased_commit); } else { - num_rebased_descendants += 1; + num_skipped_rebases += 1; } - rebased_commits.insert(old_commit_id, rebased_commit); - } else { - num_skipped_rebases += 1; - } - Ok(()) - }, - )?; + Ok(()) + }, + ) + .await?; Ok(MoveCommitsStats { num_rebased_targets, @@ -965,7 +972,10 @@ pub async fn duplicate_commits( if let Some(desc) = target_descriptions.get(original_commit_id) { new_commit_builder = new_commit_builder.set_description(desc); } - duplicated_old_to_new.insert(original_commit_id.clone(), new_commit_builder.write()?); + duplicated_old_to_new.insert( + original_commit_id.clone(), + new_commit_builder.write().await?, + ); } // Replace the original commit IDs in `target_head_ids` with the duplicated @@ -981,29 +991,31 @@ pub async fn duplicate_commits( // Rebase new children onto the target heads. let children_commit_ids_set: HashSet = children_commit_ids.iter().cloned().collect(); - mut_repo.transform_descendants(children_commit_ids.to_vec(), async |mut rewriter| { - if children_commit_ids_set.contains(rewriter.old_commit().id()) { - let mut child_new_parent_ids = IndexSet::new(); - for old_parent_id in rewriter.old_commit().parent_ids() { - // If the original parents of the new children are the new parents of - // `target_head_ids`, replace them with `target_head_ids` since we are - // "inserting" the target commits in between the new parents and the new - // children. - if parent_commit_ids.contains(old_parent_id) { - child_new_parent_ids.extend(target_head_ids.clone()); - } else { - child_new_parent_ids.insert(old_parent_id.clone()); + mut_repo + .transform_descendants(children_commit_ids.to_vec(), async |mut rewriter| { + if children_commit_ids_set.contains(rewriter.old_commit().id()) { + let mut child_new_parent_ids = IndexSet::new(); + for old_parent_id in rewriter.old_commit().parent_ids() { + // If the original parents of the new children are the new parents of + // `target_head_ids`, replace them with `target_head_ids` since we are + // "inserting" the target commits in between the new parents and the new + // children. + if parent_commit_ids.contains(old_parent_id) { + child_new_parent_ids.extend(target_head_ids.clone()); + } else { + child_new_parent_ids.insert(old_parent_id.clone()); + } } + // If not already present, add `target_head_ids` as parents of the new child + // commit. + child_new_parent_ids.extend(target_head_ids.clone()); + rewriter.set_new_parents(child_new_parent_ids.into_iter().collect()); } - // If not already present, add `target_head_ids` as parents of the new child - // commit. - child_new_parent_ids.extend(target_head_ids.clone()); - rewriter.set_new_parents(child_new_parent_ids.into_iter().collect()); - } - num_rebased += 1; - rewriter.rebase().await?.write()?; - Ok(()) - })?; + num_rebased += 1; + rewriter.rebase().await?.write().await?; + Ok(()) + }) + .await?; Ok(DuplicateCommitsStats { duplicated_commits: duplicated_old_to_new, @@ -1020,7 +1032,7 @@ pub async fn duplicate_commits( /// If `target_descriptions` is not empty, it will be consulted to retrieve the /// new descriptions of the target commits, falling back to the original if /// the map does not contain an entry for a given commit. -pub fn duplicate_commits_onto_parents( +pub async fn duplicate_commits_onto_parents( mut_repo: &mut MutableRepo, target_commits: &[CommitId], target_descriptions: &HashMap, @@ -1034,7 +1046,10 @@ pub fn duplicate_commits_onto_parents( // Topological order ensures that any parents of the original commit are // either not in `target_commits` or were already duplicated. for original_commit_id in target_commits.iter().rev() { - let original_commit = mut_repo.store().get_commit(original_commit_id)?; + let original_commit = mut_repo + .store() + .get_commit_async(original_commit_id) + .await?; let new_parent_ids = original_commit .parent_ids() .iter() @@ -1053,7 +1068,10 @@ pub fn duplicate_commits_onto_parents( if let Some(desc) = target_descriptions.get(original_commit_id) { new_commit_builder = new_commit_builder.set_description(desc); } - duplicated_old_to_new.insert(original_commit_id.clone(), new_commit_builder.write()?); + duplicated_old_to_new.insert( + original_commit_id.clone(), + new_commit_builder.write().await?, + ); } Ok(DuplicateCommitsStats { @@ -1150,7 +1168,7 @@ pub struct SquashedCommit<'repo> { /// Squash `sources` into `destination` and return a [`SquashedCommit`] for the /// resulting commit. Caller is responsible for setting the description and /// finishing the commit. -pub fn squash_commits<'repo>( +pub async fn squash_commits<'repo>( repo: &'repo mut MutableRepo, sources: &[CommitWithSelection], destination: &Commit, @@ -1195,10 +1213,11 @@ pub fn squash_commits<'repo>( source.commit.selected_tree.clone(), source.commit.parent_tree.clone(), ) - .block_on()?; + .await?; repo.rewrite_commit(&source.commit.commit) .set_tree_id(new_source_tree.id().clone()) - .write()?; + .write() + .await?; } } @@ -1220,7 +1239,8 @@ pub fn squash_commits<'repo>( RebasedCommit::Rewritten(commit) => commit, RebasedCommit::Abandoned { .. } => panic!("all commits should be kept"), }; - })?; + }) + .await?; } // Apply the selected changes onto the destination let mut destination_tree = rewritten_destination.tree()?; @@ -1230,7 +1250,7 @@ pub fn squash_commits<'repo>( source.commit.parent_tree.clone(), source.commit.selected_tree.clone(), ) - .block_on()?; + .await?; } let mut predecessors = vec![destination.id().clone()]; predecessors.extend( @@ -1252,16 +1272,20 @@ pub fn squash_commits<'repo>( /// Find divergent commits from the target that are already present with /// identical contents in the destination. These commits should be able to be /// safely abandoned. -pub fn find_duplicate_divergent_commits( +pub async fn find_duplicate_divergent_commits( repo: &dyn Repo, new_parent_ids: &[CommitId], target: &MoveCommitsTarget, ) -> BackendResult> { let target_commits: Vec = match target { - MoveCommitsTarget::Commits(commit_ids) => commit_ids - .iter() - .map(|commit_id| repo.store().get_commit(commit_id)) - .try_collect()?, + MoveCommitsTarget::Commits(commit_ids) => { + try_join_all( + commit_ids + .iter() + .map(|commit_id| repo.store().get_commit_async(commit_id)), + ) + .await? + } MoveCommitsTarget::Roots(root_ids) => RevsetExpression::commits(root_ids.clone()) .descendants() .evaluate(repo) @@ -1314,9 +1338,13 @@ pub fn find_duplicate_divergent_commits( continue; } - let ancestor_candidate = repo.store().get_commit(&ancestor_candidate_id)?; + let ancestor_candidate = repo + .store() + .get_commit_async(&ancestor_candidate_id) + .await?; let new_tree = - rebase_to_dest_parent(repo, slice::from_ref(target_commit), &ancestor_candidate)?; + rebase_to_dest_parent(repo, slice::from_ref(target_commit), &ancestor_candidate) + .await?; // Check whether the rebased commit would have the same tree as the existing // commit if they had the same parents. If so, we can skip this rebased commit. if new_tree.id() == *ancestor_candidate.tree_id() { diff --git a/lib/src/transaction.rs b/lib/src/transaction.rs index 2346e7e6030..f67f6740393 100644 --- a/lib/src/transaction.rs +++ b/lib/src/transaction.rs @@ -17,7 +17,6 @@ use std::sync::Arc; use itertools::Itertools as _; -use pollster::FutureExt as _; use thiserror::Error; use crate::backend::Timestamp; @@ -95,7 +94,7 @@ impl Transaction { &mut self.mut_repo } - pub fn merge_operation(&mut self, other_op: Operation) -> Result<(), RepoLoaderError> { + pub async fn merge_operation(&mut self, other_op: Operation) -> Result<(), RepoLoaderError> { let ancestor_op = dag_walk::closest_common_node_ok( self.parent_ops.iter().cloned().map(Ok), [Ok(other_op.clone())], @@ -104,8 +103,8 @@ impl Transaction { )? .unwrap(); let repo_loader = self.base_repo().loader(); - let base_repo = repo_loader.load_at(&ancestor_op)?; - let other_repo = repo_loader.load_at(&other_op)?; + let base_repo = repo_loader.load_at(&ancestor_op).await?; + let other_repo = repo_loader.load_at(&other_op).await?; self.parent_ops.push(other_op); let merged_repo = self.repo_mut(); merged_repo.merge(&base_repo, &other_repo)?; @@ -117,17 +116,17 @@ impl Transaction { } /// Writes the transaction to the operation store and publishes it. - pub fn commit( + pub async fn commit( self, description: impl Into, ) -> Result, TransactionCommitError> { - self.write(description)?.publish() + self.write(description).await?.publish().await } /// Writes the transaction to the operation store, but does not publish it. /// That means that a repo can be loaded at the operation, but the /// operation will not be seen when loading the repo at head. - pub fn write( + pub async fn write( mut self, description: impl Into, ) -> Result { @@ -141,10 +140,7 @@ impl Transaction { let (mut_index, view, predecessors) = mut_repo.consume(); let operation = { - let view_id = base_repo - .op_store() - .write_view(view.store_view()) - .block_on()?; + let view_id = base_repo.op_store().write_view(view.store_view()).await?; self.op_metadata.description = description.into(); self.op_metadata.time.end = self.end_time.unwrap_or_else(Timestamp::now); let parents = self.parent_ops.iter().map(|op| op.id().clone()).collect(); @@ -157,11 +153,14 @@ impl Transaction { let new_op_id = base_repo .op_store() .write_operation(&store_operation) - .block_on()?; + .await?; Operation::new(base_repo.op_store().clone(), new_op_id, store_operation) }; - let index = base_repo.index_store().write_index(mut_index, &operation)?; + let index = base_repo + .index_store() + .write_index(mut_index, &operation) + .await?; let unpublished = UnpublishedOperation::new(base_repo.loader(), operation, view, index); Ok(unpublished) } @@ -221,11 +220,11 @@ impl UnpublishedOperation { self.repo.operation() } - pub fn publish(self) -> Result, TransactionCommitError> { - let _lock = self.op_heads_store.lock().block_on()?; + pub async fn publish(self) -> Result, TransactionCommitError> { + let _lock = self.op_heads_store.lock().await?; self.op_heads_store .update_op_heads(self.operation().parent_ids(), self.operation().id()) - .block_on()?; + .await?; Ok(self.repo) } diff --git a/lib/src/tree_builder.rs b/lib/src/tree_builder.rs index d7c55d65848..070a6086cbf 100644 --- a/lib/src/tree_builder.rs +++ b/lib/src/tree_builder.rs @@ -17,8 +17,6 @@ use std::collections::BTreeMap; use std::sync::Arc; -use pollster::FutureExt as _; - use crate::backend; use crate::backend::BackendResult; use crate::backend::TreeId; @@ -75,7 +73,7 @@ impl TreeBuilder { } } - pub fn write_tree(self) -> BackendResult { + pub async fn write_tree(self) -> BackendResult { if self.overrides.is_empty() { return Ok(self.base_tree_id); } @@ -112,14 +110,14 @@ impl TreeBuilder { } else { let data = backend::Tree::from_sorted_entries(cur_entries.into_iter().collect()); - let tree = store.write_tree(&dir, data).block_on()?; + let tree = store.write_tree(&dir, data).await?; parent_entries.insert(basename.to_owned(), TreeValue::Tree(tree.id().clone())); } } else { // We're writing the root tree. Write it even if empty. Return its id. assert!(trees_to_write.is_empty()); let data = backend::Tree::from_sorted_entries(cur_entries.into_iter().collect()); - let written_tree = store.write_tree(&dir, data).block_on()?; + let written_tree = store.write_tree(&dir, data).await?; return Ok(written_tree.id().clone()); } } diff --git a/lib/src/working_copy.rs b/lib/src/working_copy.rs index 1e2dee5f6f3..5f84d00aa49 100644 --- a/lib/src/working_copy.rs +++ b/lib/src/working_copy.rs @@ -23,7 +23,6 @@ use std::sync::Arc; use async_trait::async_trait; use itertools::Itertools as _; -use pollster::FutureExt as _; use thiserror::Error; use tracing::instrument; @@ -357,7 +356,7 @@ impl WorkingCopyFreshness { /// Determine the freshness of the provided working copy relative to the /// target commit. #[instrument(skip_all)] - pub fn check_stale( + pub async fn check_stale( locked_wc: &dyn LockedWorkingCopy, wc_commit: &Commit, repo: &ReadonlyRepo, @@ -368,7 +367,10 @@ impl WorkingCopyFreshness { // The working copy isn't stale, and no need to reload the repo. Ok(Self::Fresh) } else { - let wc_operation = repo.loader().load_operation(locked_wc.old_operation_id())?; + let wc_operation = repo + .loader() + .load_operation(locked_wc.old_operation_id()) + .await?; let repo_operation = repo.operation(); let ancestor_op = dag_walk::closest_common_node_ok( [Ok(wc_operation.clone())], @@ -413,7 +415,7 @@ pub enum RecoverWorkspaceError { } /// Recover this workspace to its last known checkout. -pub fn create_and_check_out_recovery_commit( +pub async fn create_and_check_out_recovery_commit( locked_wc: &mut dyn LockedWorkingCopy, repo: &Arc, workspace_name: WorkspaceNameBuf, @@ -432,11 +434,12 @@ pub fn create_and_check_out_recovery_commit( let new_commit = repo_mut .new_commit(vec![commit_id.clone()], commit.tree_id().clone()) .set_description(description) - .write()?; + .write() + .await?; repo_mut.set_wc_commit(workspace_name, new_commit.id().clone())?; - let repo = tx.commit("recovery commit")?; - locked_wc.recover(&new_commit).block_on()?; + let repo = tx.commit("recovery commit").await?; + locked_wc.recover(&new_commit).await?; Ok((repo, new_commit)) } diff --git a/lib/src/workspace.rs b/lib/src/workspace.rs index ae19d14b3e7..3bea6e069b3 100644 --- a/lib/src/workspace.rs +++ b/lib/src/workspace.rs @@ -138,8 +138,11 @@ fn init_working_copy( let mut tx = repo.start_transaction(); tx.repo_mut() - .check_out(workspace_name.clone(), &repo.store().root_commit())?; - let repo = tx.commit(format!("add workspace '{}'", workspace_name.as_symbol()))?; + .check_out(workspace_name.clone(), &repo.store().root_commit()) + .block_on()?; + let repo = tx + .commit(format!("add workspace '{}'", workspace_name.as_symbol())) + .block_on()?; let working_copy = working_copy_factory.init_working_copy( repo.store().clone(), @@ -307,6 +310,7 @@ impl Workspace { index_store_initializer, submodule_store_initializer, ) + .block_on() .map_err(|repo_init_err| match repo_init_err { RepoInitError::Backend(err) => WorkspaceInitError::Backend(err), RepoInitError::OpHeadsStore(err) => WorkspaceInitError::OpHeadsStore(err), @@ -425,7 +429,7 @@ impl Workspace { }) } - pub fn check_out( + pub async fn check_out( &mut self, operation_id: OperationId, old_tree_id: Option<&MergedTreeId>, @@ -441,7 +445,7 @@ impl Workspace { { return Err(CheckoutError::ConcurrentCheckout); } - let stats = locked_ws.locked_wc().check_out(commit).block_on()?; + let stats = locked_ws.locked_wc().check_out(commit).await?; locked_ws .finish(operation_id) .map_err(|err| CheckoutError::Other { diff --git a/lib/tests/test_annotate.rs b/lib/tests/test_annotate.rs index dd1217cfb26..fb0f97d393a 100644 --- a/lib/tests/test_annotate.rs +++ b/lib/tests/test_annotate.rs @@ -30,6 +30,7 @@ use jj_lib::repo::Repo; use jj_lib::repo_path::RepoPath; use jj_lib::revset::ResolvedRevsetExpression; use jj_lib::revset::RevsetExpression; +use pollster::FutureExt as _; use testutils::TestRepo; use testutils::create_tree; use testutils::read_file; @@ -55,6 +56,7 @@ fn create_commit_fn( .set_committer(signature.clone()) .set_description(description) .write() + .block_on() .unwrap() } } @@ -70,8 +72,10 @@ fn annotate_within( domain: &Arc, file_path: &RepoPath, ) -> String { - let mut annotator = FileAnnotator::from_commit(commit, file_path).unwrap(); - annotator.compute(repo, domain).unwrap(); + let mut annotator = FileAnnotator::from_commit(commit, file_path) + .block_on() + .unwrap(); + annotator.compute(repo, domain).block_on().unwrap(); format_annotation(repo, &annotator.to_annotation()) } @@ -82,7 +86,10 @@ fn annotate_parent_tree(repo: &dyn Repo, commit: &Commit, file_path: &RepoPath) value => panic!("unexpected path value: {value:?}"), }; let mut annotator = FileAnnotator::with_file_content(commit.id(), file_path, text); - annotator.compute(repo, &RevsetExpression::all()).unwrap(); + annotator + .compute(repo, &RevsetExpression::all()) + .block_on() + .unwrap(); format_annotation(repo, &annotator.to_annotation()) } @@ -210,7 +217,9 @@ fn test_annotate_merge_simple() { "); // Calculate incrementally - let mut annotator = FileAnnotator::from_commit(&commit4, file_path).unwrap(); + let mut annotator = FileAnnotator::from_commit(&commit4, file_path) + .block_on() + .unwrap(); assert_eq!(annotator.pending_commits().collect_vec(), [commit4.id()]); insta::assert_snapshot!(format_annotation(tx.repo(), &annotator.to_annotation()), @r" commit4:1*: 2 @@ -226,6 +235,7 @@ fn test_annotate_merge_simple() { commit2.id().clone(), ]), ) + .block_on() .unwrap(); assert_eq!(annotator.pending_commits().collect_vec(), [commit1.id()]); insta::assert_snapshot!(format_annotation(tx.repo(), &annotator.to_annotation()), @r" @@ -238,6 +248,7 @@ fn test_annotate_merge_simple() { tx.repo(), &RevsetExpression::commits(vec![commit1.id().clone()]), ) + .block_on() .unwrap(); assert!(annotator.pending_commits().next().is_none()); insta::assert_snapshot!(format_annotation(tx.repo(), &annotator.to_annotation()), @r" diff --git a/lib/tests/test_bad_locking.rs b/lib/tests/test_bad_locking.rs index 7063065b2aa..735548e51ce 100644 --- a/lib/tests/test_bad_locking.rs +++ b/lib/tests/test_bad_locking.rs @@ -113,7 +113,7 @@ fn test_bad_locking_children(backend: TestRepoBackend) { let mut tx = repo.start_transaction(); let initial = write_random_commit(tx.repo_mut()); - tx.commit("test").unwrap(); + tx.commit("test").block_on().unwrap(); // Simulate a write of a commit that happens on one machine let machine1_root = test_workspace.root_dir().join("machine1"); @@ -125,10 +125,14 @@ fn test_bad_locking_children(backend: TestRepoBackend) { &default_working_copy_factories(), ) .unwrap(); - let machine1_repo = machine1_workspace.repo_loader().load_at_head().unwrap(); + let machine1_repo = machine1_workspace + .repo_loader() + .load_at_head() + .block_on() + .unwrap(); let mut machine1_tx = machine1_repo.start_transaction(); let child1 = write_random_commit_with_parents(machine1_tx.repo_mut(), &[&initial]); - machine1_tx.commit("test").unwrap(); + machine1_tx.commit("test").block_on().unwrap(); // Simulate a write of a commit that happens on another machine let machine2_root = test_workspace.root_dir().join("machine2"); @@ -140,10 +144,14 @@ fn test_bad_locking_children(backend: TestRepoBackend) { &default_working_copy_factories(), ) .unwrap(); - let machine2_repo = machine2_workspace.repo_loader().load_at_head().unwrap(); + let machine2_repo = machine2_workspace + .repo_loader() + .load_at_head() + .block_on() + .unwrap(); let mut machine2_tx = machine2_repo.start_transaction(); let child2 = write_random_commit_with_parents(machine2_tx.repo_mut(), &[&initial]); - machine2_tx.commit("test").unwrap(); + machine2_tx.commit("test").block_on().unwrap(); // Simulate that the distributed file system now has received the changes from // both machines @@ -156,7 +164,11 @@ fn test_bad_locking_children(backend: TestRepoBackend) { &default_working_copy_factories(), ) .unwrap(); - let merged_repo = merged_workspace.repo_loader().load_at_head().unwrap(); + let merged_repo = merged_workspace + .repo_loader() + .load_at_head() + .block_on() + .unwrap(); assert!(merged_repo.view().heads().contains(child1.id())); assert!(merged_repo.view().heads().contains(child2.id())); let op_id = merged_repo.op_id().clone(); @@ -181,7 +193,7 @@ fn test_bad_locking_interrupted(backend: TestRepoBackend) { let mut tx = repo.start_transaction(); let initial = write_random_commit(tx.repo_mut()); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); // Simulate a crash that resulted in the old op-head left in place. We simulate // it somewhat hackily by copying the .jj/op_heads/ directory before the @@ -192,7 +204,13 @@ fn test_bad_locking_interrupted(backend: TestRepoBackend) { copy_directory(&op_heads_dir, &backup_path); let mut tx = repo.start_transaction(); write_random_commit_with_parents(tx.repo_mut(), &[&initial]); - let op_id = tx.commit("test").unwrap().operation().id().clone(); + let op_id = tx + .commit("test") + .block_on() + .unwrap() + .operation() + .id() + .clone(); copy_directory(&backup_path, &op_heads_dir); // Reload the repo and check that only the new head is present. diff --git a/lib/tests/test_commit_builder.rs b/lib/tests/test_commit_builder.rs index 9dc5e22bd07..ada63dd0b71 100644 --- a/lib/tests/test_commit_builder.rs +++ b/lib/tests/test_commit_builder.rs @@ -115,8 +115,8 @@ fn test_initial(backend: TestRepoBackend) { assert_eq!(builder.change_id(), &change_id); assert_eq!(builder.author(), &author_signature); assert_eq!(builder.committer(), &committer_signature); - let commit = builder.write().unwrap(); - let repo = tx.commit("test").unwrap(); + let commit = builder.write().block_on().unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let parents: Vec<_> = commit.parents().try_collect().unwrap(); assert_eq!(parents, vec![store.root_commit()]); @@ -161,8 +161,9 @@ fn test_rewrite(backend: TestRepoBackend) { .repo_mut() .new_commit(vec![store.root_commit_id().clone()], initial_tree.id()) .write() + .block_on() .unwrap(); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let rewritten_tree = create_tree( &repo, @@ -193,9 +194,10 @@ fn test_rewrite(backend: TestRepoBackend) { .rewrite_commit(&initial_commit) .set_tree_id(rewritten_tree.id().clone()) .write() + .block_on() .unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); - let repo = tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let parents: Vec<_> = rewritten_commit.parents().try_collect().unwrap(); assert_eq!(parents, vec![store.root_commit()]); assert_eq!( @@ -254,12 +256,13 @@ fn test_rewrite_update_missing_user(backend: TestRepoBackend) { repo.store().empty_merged_tree_id(), ) .write() + .block_on() .unwrap(); assert_eq!(initial_commit.author().name, ""); assert_eq!(initial_commit.author().email, ""); assert_eq!(initial_commit.committer().name, ""); assert_eq!(initial_commit.committer().email, ""); - tx.commit("test").unwrap(); + tx.commit("test").block_on().unwrap(); let mut config = StackedConfig::with_defaults(); config.add_layer( @@ -280,6 +283,7 @@ fn test_rewrite_update_missing_user(backend: TestRepoBackend) { .repo_mut() .rewrite_commit(&initial_commit) .write() + .block_on() .unwrap(); assert_eq!(rewritten_commit.author().name, "Configured User"); @@ -313,8 +317,9 @@ fn test_rewrite_resets_author_timestamp(backend: TestRepoBackend) { repo.store().empty_merged_tree_id(), ) .write() + .block_on() .unwrap(); - tx.commit("test").unwrap(); + tx.commit("test").block_on().unwrap(); let initial_timestamp = Timestamp::from_datetime(chrono::DateTime::parse_from_rfc3339(initial_timestamp).unwrap()); @@ -333,9 +338,10 @@ fn test_rewrite_resets_author_timestamp(backend: TestRepoBackend) { .rewrite_commit(&initial_commit) .set_description("No longer discardable") .write() + .block_on() .unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); - tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + tx.commit("test").block_on().unwrap(); let new_timestamp_1 = Timestamp::from_datetime(chrono::DateTime::parse_from_rfc3339(new_timestamp_1).unwrap()); @@ -357,9 +363,10 @@ fn test_rewrite_resets_author_timestamp(backend: TestRepoBackend) { .rewrite_commit(&rewritten_commit_1) .set_description("New description") .write() + .block_on() .unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); - tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + tx.commit("test").block_on().unwrap(); let new_timestamp_2 = Timestamp::from_datetime(chrono::DateTime::parse_from_rfc3339(new_timestamp_2).unwrap()); @@ -386,22 +393,23 @@ fn test_rewrite_to_identical_commit(backend: TestRepoBackend) { store.empty_merged_tree_id(), ) .write() + .block_on() .unwrap(); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); // Create commit identical to the original let mut tx = repo.start_transaction(); let mut builder = tx.repo_mut().rewrite_commit(&commit1).detach(); builder.set_predecessors(vec![]); // Writing to the store should work - let commit2 = builder.write_hidden().unwrap(); + let commit2 = builder.write_hidden().block_on().unwrap(); assert_eq!(commit1, commit2); // Writing to the repo shouldn't work, which would create cycle in // predecessors/parent mappings - let result = builder.write(tx.repo_mut()); + let result = builder.write(tx.repo_mut()).block_on(); assert_matches!(result, Err(BackendError::Other(_))); - tx.repo_mut().rebase_descendants().unwrap(); - tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + tx.commit("test").block_on().unwrap(); // Create two rewritten commits of the same content and metadata let mut tx = repo.start_transaction(); @@ -409,15 +417,17 @@ fn test_rewrite_to_identical_commit(backend: TestRepoBackend) { .rewrite_commit(&commit1) .set_description("rewritten") .write() + .block_on() .unwrap(); let result = tx .repo_mut() .rewrite_commit(&commit1) .set_description("rewritten") - .write(); + .write() + .block_on(); assert_matches!(result, Err(BackendError::Other(_))); - tx.repo_mut().rebase_descendants().unwrap(); - tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + tx.commit("test").block_on().unwrap(); } #[test_case(TestRepoBackend::Simple ; "simple backend")] @@ -431,7 +441,7 @@ fn test_commit_builder_descendants(backend: TestRepoBackend) { let commit1 = write_random_commit(tx.repo_mut()); let commit2 = write_random_commit_with_parents(tx.repo_mut(), &[&commit1]); let commit3 = write_random_commit_with_parents(tx.repo_mut(), &[&commit2]); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); // Test with for_new_commit() let mut tx = repo.start_transaction(); @@ -441,6 +451,7 @@ fn test_commit_builder_descendants(backend: TestRepoBackend) { store.empty_merged_tree_id(), ) .write() + .block_on() .unwrap(); let rebase_map = rebase_descendants_with_options_return_map(tx.repo_mut(), &RebaseOptions::default()); @@ -448,7 +459,7 @@ fn test_commit_builder_descendants(backend: TestRepoBackend) { // Test with for_rewrite_from() let mut tx = repo.start_transaction(); - let commit4 = tx.repo_mut().rewrite_commit(&commit2).write().unwrap(); + let commit4 = tx.repo_mut().rewrite_commit(&commit2).write().block_on().unwrap(); let rebase_map = rebase_descendants_with_options_return_map(tx.repo_mut(), &RebaseOptions::default()); assert_rebased_onto(tx.repo_mut(), &rebase_map, &commit3, &[commit4.id()]); @@ -461,6 +472,7 @@ fn test_commit_builder_descendants(backend: TestRepoBackend) { .clear_rewrite_source() .generate_new_change_id() .write() + .block_on() .unwrap(); let rebase_map = rebase_descendants_with_options_return_map(tx.repo_mut(), &RebaseOptions::default()); diff --git a/lib/tests/test_commit_concurrent.rs b/lib/tests/test_commit_concurrent.rs index 1c13ae67fb4..e85900e3614 100644 --- a/lib/tests/test_commit_concurrent.rs +++ b/lib/tests/test_commit_concurrent.rs @@ -59,11 +59,11 @@ fn test_commit_parallel(backend: TestRepoBackend) { s.spawn(move || { let mut tx = repo.start_transaction(); write_random_commit(tx.repo_mut()); - tx.commit("test").unwrap(); + tx.commit("test").block_on().unwrap(); }); } }); - let repo = repo.reload_at_head().unwrap(); + let repo = repo.reload_at_head().block_on().unwrap(); // One commit per thread plus the commit from the initial working-copy on top of // the root commit assert_eq!(repo.view().heads().len(), num_threads + 1); @@ -90,7 +90,7 @@ fn test_commit_parallel_instances(backend: TestRepoBackend) { s.spawn(move || { let mut tx = repo.start_transaction(); write_random_commit(tx.repo_mut()); - tx.commit("test").unwrap(); + tx.commit("test").block_on().unwrap(); }); } }); diff --git a/lib/tests/test_default_revset_graph_iterator.rs b/lib/tests/test_default_revset_graph_iterator.rs index f81b102660f..2769b712c07 100644 --- a/lib/tests/test_default_revset_graph_iterator.rs +++ b/lib/tests/test_default_revset_graph_iterator.rs @@ -21,6 +21,7 @@ use jj_lib::graph::GraphEdge; use jj_lib::repo::ReadonlyRepo; use jj_lib::repo::Repo as _; use jj_lib::revset::ResolvedExpression; +use pollster::FutureExt as _; use test_case::test_case; use testutils::TestRepo; use testutils::write_random_commit; @@ -71,7 +72,7 @@ fn test_graph_iterator_linearized(skip_transitive_edges: bool, padding: u32) { let commit_b = write_random_commit_with_parents(tx.repo_mut(), &[&commit_a]); let commit_c = write_random_commit_with_parents(tx.repo_mut(), &[&commit_a]); let commit_d = write_random_commit_with_parents(tx.repo_mut(), &[&commit_b, &commit_c]); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let root_commit = repo.store().root_commit(); let revset = revset_for_commits(repo.as_ref(), &[&commit_a, &commit_d]); @@ -114,7 +115,7 @@ fn test_graph_iterator_virtual_octopus(skip_transitive_edges: bool, padding: u32 let commit_d = write_random_commit_with_parents(tx.repo_mut(), &[&commit_a, &commit_b]); let commit_e = write_random_commit_with_parents(tx.repo_mut(), &[&commit_b, &commit_c]); let commit_f = write_random_commit_with_parents(tx.repo_mut(), &[&commit_d, &commit_e]); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let root_commit = repo.store().root_commit(); let revset = revset_for_commits(repo.as_ref(), &[&commit_a, &commit_b, &commit_c, &commit_f]); @@ -168,7 +169,7 @@ fn test_graph_iterator_simple_fork(skip_transitive_edges: bool, padding: u32) { let commit_c = write_random_commit_with_parents(tx.repo_mut(), &[&commit_b]); let commit_d = write_random_commit_with_parents(tx.repo_mut(), &[&commit_b]); let commit_e = write_random_commit_with_parents(tx.repo_mut(), &[&commit_d]); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let root_commit = repo.store().root_commit(); let revset = revset_for_commits(repo.as_ref(), &[&commit_a, &commit_c, &commit_e]); @@ -212,7 +213,7 @@ fn test_graph_iterator_multiple_missing(skip_transitive_edges: bool, padding: u3 let commit_d = write_random_commit_with_parents(tx.repo_mut(), &[&commit_a, &commit_b]); let commit_e = write_random_commit_with_parents(tx.repo_mut(), &[&commit_b, &commit_c]); let commit_f = write_random_commit_with_parents(tx.repo_mut(), &[&commit_d, &commit_e]); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let root_commit = repo.store().root_commit(); let revset = revset_for_commits(repo.as_ref(), &[&commit_b, &commit_f]); @@ -260,7 +261,7 @@ fn test_graph_iterator_edge_to_ancestor(skip_transitive_edges: bool, padding: u3 let commit_d = write_random_commit_with_parents(tx.repo_mut(), &[&commit_b, &commit_c]); let commit_e = write_random_commit_with_parents(tx.repo_mut(), &[&commit_c]); let commit_f = write_random_commit_with_parents(tx.repo_mut(), &[&commit_d, &commit_e]); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let revset = revset_for_commits(repo.as_ref(), &[&commit_c, &commit_d, &commit_f]); let commits: Vec<_> = revset @@ -318,7 +319,7 @@ fn test_graph_iterator_edge_escapes_from_(skip_transitive_edges: bool, padding: let commit_h = write_random_commit_with_parents(tx.repo_mut(), &[&commit_f]); let commit_i = write_random_commit_with_parents(tx.repo_mut(), &[&commit_e, &commit_h]); let commit_j = write_random_commit_with_parents(tx.repo_mut(), &[&commit_g, &commit_i]); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let root_commit = repo.store().root_commit(); let revset = revset_for_commits( diff --git a/lib/tests/test_eol.rs b/lib/tests/test_eol.rs index 0221473af7b..76e03b1f8ac 100644 --- a/lib/tests/test_eol.rs +++ b/lib/tests/test_eol.rs @@ -146,6 +146,7 @@ fn test_eol_conversion_snapshot( None, &file_removed_commit, ) + .block_on() .unwrap(); assert!(!file_disk_path.exists()); @@ -173,6 +174,7 @@ fn test_eol_conversion_snapshot( None, &file_added_commit, ) + .block_on() .unwrap(); assert!(file_disk_path.exists()); let new_tree = test_workspace.snapshot().unwrap(); @@ -221,12 +223,14 @@ fn create_conflict_snapshot_and_read(extra_setting: &str) -> Vec { .repo_mut() .new_commit(vec![root_commit.id().clone()], tree.id()) .write() + .block_on() .unwrap(); - tx.commit("commit parent1").unwrap(); + tx.commit("commit parent1").block_on().unwrap(); test_workspace .workspace .check_out(test_workspace.repo.op_id().clone(), None, &root_commit) + .block_on() .unwrap(); testutils::write_working_copy_file( test_workspace.workspace.workspace_root(), @@ -239,11 +243,12 @@ fn create_conflict_snapshot_and_read(extra_setting: &str) -> Vec { .repo_mut() .new_commit(vec![root_commit.id().clone()], tree.id()) .write() + .block_on() .unwrap(); - tx.commit("commit parent2").unwrap(); + tx.commit("commit parent2").block_on().unwrap(); // Reload the repo to pick up the new commits. - test_workspace.repo = test_workspace.repo.reload_at_head().unwrap(); + test_workspace.repo = test_workspace.repo.reload_at_head().block_on().unwrap(); // Create the merge commit. let tree = merge_commit_trees(&*test_workspace.repo, &[parent1_commit, parent2_commit]) .block_on() @@ -254,6 +259,7 @@ fn create_conflict_snapshot_and_read(extra_setting: &str) -> Vec { test_workspace .workspace .check_out(test_workspace.repo.op_id().clone(), None, &merge_commit) + .block_on() .unwrap(); let mut file = File::options().append(true).open(&file_disk_path).unwrap(); file.write_all(b"c\r\n").unwrap(); @@ -297,6 +303,7 @@ fn create_conflict_snapshot_and_read(extra_setting: &str) -> Vec { None, &test_workspace.workspace.repo_loader().store().root_commit(), ) + .block_on() .unwrap(); // We have to query the Commit again. The Workspace is backed by a different // Store from the original Commit. @@ -309,6 +316,7 @@ fn create_conflict_snapshot_and_read(extra_setting: &str) -> Vec { test_workspace .workspace .check_out(test_workspace.repo.op_id().clone(), None, &merge_commit) + .block_on() .unwrap(); assert!(std::fs::exists(&file_disk_path).unwrap()); @@ -423,17 +431,19 @@ fn test_eol_conversion_update_conflicts( .repo_mut() .new_commit(vec![root_commit.id().clone()], tree.id()) .write() + .block_on() .unwrap(); let tree = testutils::create_tree(&test_workspace.repo, &[(file_repo_path, parent2_contents)]); let parent2_commit = tx .repo_mut() .new_commit(vec![root_commit.id().clone()], tree.id()) .write() + .block_on() .unwrap(); - tx.commit("commit parent 2").unwrap(); + tx.commit("commit parent 2").block_on().unwrap(); // Reload the repo to pick up the new commits. - test_workspace.repo = test_workspace.repo.reload_at_head().unwrap(); + test_workspace.repo = test_workspace.repo.reload_at_head().block_on().unwrap(); // Create the merge commit. let tree = merge_commit_trees(&*test_workspace.repo, &[parent1_commit, parent2_commit]) .block_on() @@ -444,6 +454,7 @@ fn test_eol_conversion_update_conflicts( test_workspace .workspace .check_out(test_workspace.repo.op_id().clone(), None, &merge_commit) + .block_on() .unwrap(); let contents = std::fs::read(&file_disk_path).unwrap(); for line in contents.lines_with_terminator() { @@ -552,6 +563,7 @@ fn test_eol_conversion_checkout( None, &test_workspace.workspace.repo_loader().store().root_commit(), ) + .block_on() .unwrap(); assert!(!std::fs::exists(&file_disk_path).unwrap()); @@ -579,6 +591,7 @@ fn test_eol_conversion_checkout( test_workspace .workspace .check_out(test_workspace.repo.op_id().clone(), None, &commit) + .block_on() .unwrap(); // When we take a snapshot now, the tree may not be clean, because the EOL our diff --git a/lib/tests/test_evolution_predecessors.rs b/lib/tests/test_evolution_predecessors.rs index 7dc97856e0b..a9caa28cf9b 100644 --- a/lib/tests/test_evolution_predecessors.rs +++ b/lib/tests/test_evolution_predecessors.rs @@ -48,7 +48,7 @@ fn test_walk_predecessors_basic() { let mut tx = repo0.start_transaction(); let commit1 = write_random_commit(tx.repo_mut()); - let repo1 = tx.commit("test").unwrap(); + let repo1 = tx.commit("test").block_on().unwrap(); let mut tx = repo1.start_transaction(); let commit2 = tx @@ -56,9 +56,10 @@ fn test_walk_predecessors_basic() { .rewrite_commit(&commit1) .set_description("rewritten") .write() + .block_on() .unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); - let repo2 = tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo2 = tx.commit("test").block_on().unwrap(); // The root commit has no associated operation because it isn't "created" at // the root operation. @@ -92,7 +93,7 @@ fn test_walk_predecessors_basic_legacy_op() { let mut tx = repo0.start_transaction(); let commit1 = write_random_commit(tx.repo_mut()); - let repo1 = tx.commit("test").unwrap(); + let repo1 = tx.commit("test").block_on().unwrap(); let mut tx = repo1.start_transaction(); let commit2 = tx @@ -100,9 +101,10 @@ fn test_walk_predecessors_basic_legacy_op() { .rewrite_commit(&commit1) .set_description("rewritten") .write() + .block_on() .unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); - let repo2 = tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo2 = tx.commit("test").block_on().unwrap(); // Save operation without the predecessors as old jj would do. We only need // to rewrite the head operation since walk_predecessors() will fall back to @@ -111,8 +113,8 @@ fn test_walk_predecessors_basic_legacy_op() { let mut data = repo2.operation().store_operation().clone(); data.commit_predecessors = None; let op_id = loader.op_store().write_operation(&data).block_on().unwrap(); - let op = loader.load_operation(&op_id).unwrap(); - loader.load_at(&op).unwrap() + let op = loader.load_operation(&op_id).block_on().unwrap(); + loader.load_at(&op).block_on().unwrap() }; let entries = collect_predecessors(&repo2, commit2.id()); @@ -132,7 +134,7 @@ fn test_walk_predecessors_concurrent_ops() { let mut tx = repo0.start_transaction(); let commit1 = write_random_commit(tx.repo_mut()); - let repo1 = tx.commit("test").unwrap(); + let repo1 = tx.commit("test").block_on().unwrap(); let mut tx2 = repo1.start_transaction(); let commit2 = tx2 @@ -140,16 +142,18 @@ fn test_walk_predecessors_concurrent_ops() { .rewrite_commit(&commit1) .set_description("rewritten 2") .write() + .block_on() .unwrap(); - tx2.repo_mut().rebase_descendants().unwrap(); + tx2.repo_mut().rebase_descendants().block_on().unwrap(); let mut tx3 = repo1.start_transaction(); let commit3 = tx3 .repo_mut() .rewrite_commit(&commit1) .set_description("rewritten 3") .write() + .block_on() .unwrap(); - tx3.repo_mut().rebase_descendants().unwrap(); + tx3.repo_mut().rebase_descendants().block_on().unwrap(); let repo4 = commit_transactions(vec![tx2, tx3]); let [op2, op3] = repo4 .operation() @@ -164,15 +168,17 @@ fn test_walk_predecessors_concurrent_ops() { .rewrite_commit(&commit2) .set_description("rewritten 4") .write() + .block_on() .unwrap(); let commit5 = tx .repo_mut() .rewrite_commit(&commit3) .set_description("rewritten 5") .write() + .block_on() .unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); - let repo5 = tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo5 = tx.commit("test").block_on().unwrap(); let entries = collect_predecessors(&repo5, commit4.id()); assert_eq!(entries.len(), 3); @@ -206,11 +212,11 @@ fn test_walk_predecessors_multiple_predecessors_across_ops() { let mut tx = repo0.start_transaction(); let commit1 = write_random_commit(tx.repo_mut()); - let repo1 = tx.commit("test").unwrap(); + let repo1 = tx.commit("test").block_on().unwrap(); let mut tx = repo1.start_transaction(); let commit2 = write_random_commit(tx.repo_mut()); - let repo2 = tx.commit("test").unwrap(); + let repo2 = tx.commit("test").block_on().unwrap(); let mut tx = repo2.start_transaction(); let commit3 = tx @@ -219,9 +225,10 @@ fn test_walk_predecessors_multiple_predecessors_across_ops() { .set_predecessors(vec![commit2.id().clone(), commit1.id().clone()]) .set_description("rewritten") .write() + .block_on() .unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); - let repo3 = tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo3 = tx.commit("test").block_on().unwrap(); // Predecessor commits are emitted in chronological (operation) order. let entries = collect_predecessors(&repo3, commit3.id()); @@ -248,7 +255,7 @@ fn test_walk_predecessors_multiple_predecessors_within_op() { let mut tx = repo0.start_transaction(); let commit1 = write_random_commit(tx.repo_mut()); let commit2 = write_random_commit(tx.repo_mut()); - let repo1 = tx.commit("test").unwrap(); + let repo1 = tx.commit("test").block_on().unwrap(); let mut tx = repo1.start_transaction(); let commit3 = tx @@ -257,9 +264,10 @@ fn test_walk_predecessors_multiple_predecessors_within_op() { .set_predecessors(vec![commit1.id().clone(), commit2.id().clone()]) .set_description("rewritten") .write() + .block_on() .unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); - let repo2 = tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo2 = tx.commit("test").block_on().unwrap(); let entries = collect_predecessors(&repo2, commit3.id()); assert_eq!(entries.len(), 3); @@ -284,7 +292,7 @@ fn test_walk_predecessors_transitive() { let mut tx = repo0.start_transaction(); let commit1 = write_random_commit(tx.repo_mut()); - let repo1 = tx.commit("test").unwrap(); + let repo1 = tx.commit("test").block_on().unwrap(); let mut tx = repo1.start_transaction(); let commit2 = tx @@ -292,15 +300,17 @@ fn test_walk_predecessors_transitive() { .rewrite_commit(&commit1) .set_description("rewritten 2") .write() + .block_on() .unwrap(); let commit3 = tx .repo_mut() .rewrite_commit(&commit2) .set_description("rewritten 3") .write() + .block_on() .unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); - let repo2 = tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo2 = tx.commit("test").block_on().unwrap(); let entries = collect_predecessors(&repo2, commit3.id()); assert_eq!(entries.len(), 3); @@ -335,21 +345,24 @@ fn test_walk_predecessors_transitive_graph_order() { .rewrite_commit(&commit1) .set_description("rewritten 2") .write() + .block_on() .unwrap(); let commit3 = tx .repo_mut() .rewrite_commit(&commit2) .set_description("rewritten 3") .write() + .block_on() .unwrap(); let commit4 = tx .repo_mut() .rewrite_commit(&commit1) .set_description("rewritten 4") .write() + .block_on() .unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); - let repo1 = tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo1 = tx.commit("test").block_on().unwrap(); let mut tx = repo1.start_transaction(); let commit5 = tx @@ -358,9 +371,10 @@ fn test_walk_predecessors_transitive_graph_order() { .set_predecessors(vec![commit4.id().clone(), commit3.id().clone()]) .set_description("rewritten 5") .write() + .block_on() .unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); - let repo2 = tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo2 = tx.commit("test").block_on().unwrap(); let entries = collect_predecessors(&repo2, commit5.id()); assert_eq!(entries.len(), 5); @@ -397,7 +411,7 @@ fn test_walk_predecessors_unsimplified() { let mut tx = repo0.start_transaction(); let commit1 = write_random_commit(tx.repo_mut()); - let repo1 = tx.commit("test").unwrap(); + let repo1 = tx.commit("test").block_on().unwrap(); let mut tx = repo1.start_transaction(); let commit2 = tx @@ -405,9 +419,10 @@ fn test_walk_predecessors_unsimplified() { .rewrite_commit(&commit1) .set_description("rewritten 2") .write() + .block_on() .unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); - let repo2 = tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo2 = tx.commit("test").block_on().unwrap(); let mut tx = repo2.start_transaction(); let commit3 = tx @@ -416,9 +431,10 @@ fn test_walk_predecessors_unsimplified() { .set_predecessors(vec![commit1.id().clone(), commit2.id().clone()]) .set_description("rewritten 3") .write() + .block_on() .unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); - let repo3 = tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo3 = tx.commit("test").block_on().unwrap(); let entries = collect_predecessors(&repo3, commit3.id()); assert_eq!(entries.len(), 3); @@ -444,7 +460,7 @@ fn test_walk_predecessors_direct_cycle_within_op() { let mut tx = repo0.start_transaction(); let commit1 = write_random_commit(tx.repo_mut()); - let repo1 = tx.commit("test").unwrap(); + let repo1 = tx.commit("test").block_on().unwrap(); let repo1 = { let mut data = repo1.operation().store_operation().clone(); @@ -452,8 +468,8 @@ fn test_walk_predecessors_direct_cycle_within_op() { commit1.id().clone() => vec![commit1.id().clone()], }); let op_id = loader.op_store().write_operation(&data).block_on().unwrap(); - let op = loader.load_operation(&op_id).unwrap(); - loader.load_at(&op).unwrap() + let op = loader.load_operation(&op_id).block_on().unwrap(); + loader.load_at(&op).block_on().unwrap() }; assert_matches!( walk_predecessors(&repo1, slice::from_ref(commit1.id())).next(), @@ -471,7 +487,7 @@ fn test_walk_predecessors_indirect_cycle_within_op() { let commit1 = write_random_commit(tx.repo_mut()); let commit2 = write_random_commit(tx.repo_mut()); let commit3 = write_random_commit(tx.repo_mut()); - let repo1 = tx.commit("test").unwrap(); + let repo1 = tx.commit("test").block_on().unwrap(); let repo1 = { let mut data = repo1.operation().store_operation().clone(); @@ -481,8 +497,8 @@ fn test_walk_predecessors_indirect_cycle_within_op() { commit3.id().clone() => vec![commit2.id().clone()], }); let op_id = loader.op_store().write_operation(&data).block_on().unwrap(); - let op = loader.load_operation(&op_id).unwrap(); - loader.load_at(&op).unwrap() + let op = loader.load_operation(&op_id).block_on().unwrap(); + loader.load_at(&op).block_on().unwrap() }; assert_matches!( walk_predecessors(&repo1, slice::from_ref(commit3.id())).next(), @@ -511,6 +527,7 @@ fn test_accumulate_predecessors() { ) .set_description(desc) .write() + .block_on() .unwrap() } @@ -519,6 +536,7 @@ fn test_accumulate_predecessors() { .set_predecessors(predecessors.iter().map(|c| c.id().clone()).collect()) .set_description(desc) .write() + .block_on() .unwrap() } @@ -536,26 +554,26 @@ fn test_accumulate_predecessors() { let commit_a1 = new_commit(tx.repo_mut(), "a1"); let commit_a2 = new_commit(tx.repo_mut(), "a2"); let commit_a3 = new_commit(tx.repo_mut(), "a3"); - let repo_a = tx.commit("a").unwrap(); + let repo_a = tx.commit("a").block_on().unwrap(); let mut tx = repo_a.start_transaction(); let commit_b1 = rewrite_commit(tx.repo_mut(), &[&commit_a1], "b1"); let commit_b2 = rewrite_commit(tx.repo_mut(), &[&commit_a2, &commit_a3], "b2"); - tx.repo_mut().rebase_descendants().unwrap(); - let repo_b = tx.commit("b").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo_b = tx.commit("b").block_on().unwrap(); let mut tx = repo_b.start_transaction(); let commit_c1 = rewrite_commit(tx.repo_mut(), &[&commit_b1], "c1"); let commit_c2 = rewrite_commit(tx.repo_mut(), &[&commit_b2, &commit_a3], "c2"); let commit_c3 = rewrite_commit(tx.repo_mut(), &[&commit_c2], "c3"); - tx.repo_mut().rebase_descendants().unwrap(); - let repo_c = tx.commit("c").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo_c = tx.commit("c").block_on().unwrap(); let mut tx = repo_a.start_transaction(); let commit_d1 = rewrite_commit(tx.repo_mut(), &[&commit_a1], "d1"); let commit_d2 = rewrite_commit(tx.repo_mut(), &[&commit_a2], "d2"); - tx.repo_mut().rebase_descendants().unwrap(); - let repo_d = tx.commit("d").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo_d = tx.commit("d").block_on().unwrap(); // Empty old/new ops let predecessors = accumulate_predecessors(&[], slice::from_ref(repo_c.operation())).unwrap(); diff --git a/lib/tests/test_fix.rs b/lib/tests/test_fix.rs index e39882816a6..660c8867611 100644 --- a/lib/tests/test_fix.rs +++ b/lib/tests/test_fix.rs @@ -94,6 +94,7 @@ fn create_commit(tx: &mut Transaction, parents: Vec, tree_id: MergedTr tx.repo_mut() .new_commit(parents, tree_id) .write() + .block_on() .unwrap() .id() .clone() diff --git a/lib/tests/test_git.rs b/lib/tests/test_git.rs index 9889db6a3a2..bf3981a6767 100644 --- a/lib/tests/test_git.rs +++ b/lib/tests/test_git.rs @@ -76,6 +76,7 @@ use jj_lib::str_util::StringPattern; use jj_lib::workspace::Workspace; use maplit::btreemap; use maplit::hashset; +use pollster::FutureExt as _; use tempfile::TempDir; use test_case::test_case; use testutils::TestRepo; @@ -201,8 +202,8 @@ fn test_import_refs() { let mut tx = repo.start_transaction(); git::import_head(tx.repo_mut()).unwrap(); let stats = git::import_refs(tx.repo_mut(), &git_settings).unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); - let repo = tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let view = repo.view(); assert!(stats.abandoned_commits.is_empty()); @@ -346,8 +347,8 @@ fn test_import_refs_reimport() { let mut tx = repo.start_transaction(); let stats = git::import_refs(tx.repo_mut(), &git_settings).unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); - let repo = tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo = tx.commit("test").block_on().unwrap(); assert!(stats.abandoned_commits.is_empty()); let expected_heads = hashset! { @@ -367,15 +368,16 @@ fn test_import_refs_reimport() { let commit6 = create_random_commit(tx.repo_mut()) .set_parents(vec![jj_id(commit2)]) .write() + .block_on() .unwrap(); tx.repo_mut() .set_local_bookmark_target("feature2".as_ref(), RefTarget::normal(commit6.id().clone())); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx = repo.start_transaction(); let stats = git::import_refs(tx.repo_mut(), &git_settings).unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); - let repo = tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo = tx.commit("test").block_on().unwrap(); assert_eq!( // The order is unstable just because we import heads from Git repo. @@ -458,7 +460,7 @@ fn test_import_refs_reimport_head_removed() { let commit = empty_git_commit(&git_repo, "refs/heads/main", &[]); let mut tx = repo.start_transaction(); git::import_refs(tx.repo_mut(), &git_settings).unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); let commit_id = jj_id(commit); // Test the setup assert!(tx.repo().view().heads().contains(&commit_id)); @@ -466,7 +468,7 @@ fn test_import_refs_reimport_head_removed() { // Remove the head and re-import tx.repo_mut().remove_head(&commit_id); git::import_refs(tx.repo_mut(), &git_settings).unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); assert!(!tx.repo().view().heads().contains(&commit_id)); } @@ -485,7 +487,7 @@ fn test_import_refs_reimport_git_head_does_not_count() { let mut tx = repo.start_transaction(); git::import_head(tx.repo_mut()).unwrap(); git::import_refs(tx.repo_mut(), &git_settings).unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); // Delete the bookmark and re-import. The commit should still be there since // HEAD points to it @@ -496,7 +498,7 @@ fn test_import_refs_reimport_git_head_does_not_count() { .unwrap(); git::import_head(tx.repo_mut()).unwrap(); git::import_refs(tx.repo_mut(), &git_settings).unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); assert!(!tx.repo().view().heads().contains(&jj_id(commit))); } @@ -517,7 +519,7 @@ fn test_import_refs_reimport_git_head_without_ref() { // Import HEAD. git::import_head(tx.repo_mut()).unwrap(); git::import_refs(tx.repo_mut(), &git_settings).unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); assert!(tx.repo().view().heads().contains(commit1.id())); assert!(tx.repo().view().heads().contains(commit2.id())); @@ -530,7 +532,7 @@ fn test_import_refs_reimport_git_head_without_ref() { // but it should be safer than abandoning old checkout branch. git::import_head(tx.repo_mut()).unwrap(); git::import_refs(tx.repo_mut(), &git_settings).unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); assert!(tx.repo().view().heads().contains(commit1.id())); assert!(tx.repo().view().heads().contains(commit2.id())); } @@ -560,7 +562,7 @@ fn test_import_refs_reimport_git_head_with_moved_ref() { // Import HEAD and main. git::import_head(tx.repo_mut()).unwrap(); git::import_refs(tx.repo_mut(), &git_settings).unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); assert!(tx.repo().view().heads().contains(commit1.id())); assert!(tx.repo().view().heads().contains(commit2.id())); @@ -578,13 +580,13 @@ fn test_import_refs_reimport_git_head_with_moved_ref() { // Reimport HEAD and main, which abandons the old main branch. git::import_head(tx.repo_mut()).unwrap(); git::import_refs(tx.repo_mut(), &git_settings).unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); assert!(!tx.repo().view().heads().contains(commit1.id())); assert!(tx.repo().view().heads().contains(commit2.id())); // Reimport HEAD and main, which abandons the old main bookmark. git::import_head(tx.repo_mut()).unwrap(); git::import_refs(tx.repo_mut(), &git_settings).unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); assert!(!tx.repo().view().heads().contains(commit1.id())); assert!(tx.repo().view().heads().contains(commit2.id())); } @@ -619,8 +621,8 @@ fn test_import_refs_reimport_with_deleted_remote_ref() { let mut tx = repo.start_transaction(); git::import_refs(tx.repo_mut(), &git_settings).unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); - let repo = tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let expected_heads = hashset! { jj_id(commit_main), @@ -676,8 +678,8 @@ fn test_import_refs_reimport_with_deleted_remote_ref() { let mut tx = repo.start_transaction(); git::import_refs(tx.repo_mut(), &git_settings).unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); - let repo = tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let view = repo.view(); // The local bookmarks were indeed deleted @@ -747,8 +749,8 @@ fn test_import_refs_reimport_with_moved_remote_ref() { let mut tx = repo.start_transaction(); git::import_refs(tx.repo_mut(), &git_settings).unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); - let repo = tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let expected_heads = hashset! { jj_id(commit_main), @@ -814,8 +816,8 @@ fn test_import_refs_reimport_with_moved_remote_ref() { let mut tx = repo.start_transaction(); git::import_refs(tx.repo_mut(), &git_settings).unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); - let repo = tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let view = repo.view(); assert_eq!(view.bookmarks().count(), 3); @@ -881,8 +883,8 @@ fn test_import_refs_reimport_with_moved_untracked_remote_ref() { let commit_remote_t0 = empty_git_commit(&git_repo, remote_ref_name, &[commit_base]); let mut tx = repo.start_transaction(); git::import_refs(tx.repo_mut(), &git_settings).unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); - let repo = tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let view = repo.view(); assert_eq!(*view.heads(), hashset! { jj_id(commit_remote_t0) }); @@ -901,8 +903,8 @@ fn test_import_refs_reimport_with_moved_untracked_remote_ref() { let commit_remote_t1 = empty_git_commit(&git_repo, remote_ref_name, &[commit_base]); let mut tx = repo.start_transaction(); git::import_refs(tx.repo_mut(), &git_settings).unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); - let repo = tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let view = repo.view(); // commit_remote_t0 should be abandoned, but commit_base shouldn't because @@ -938,8 +940,8 @@ fn test_import_refs_reimport_with_deleted_untracked_intermediate_remote_ref() { let commit_remote_b = empty_git_commit(&git_repo, remote_ref_name_b, &[commit_remote_a]); let mut tx = repo.start_transaction(); git::import_refs(tx.repo_mut(), &git_settings).unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); - let repo = tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let view = repo.view(); assert_eq!(*view.heads(), hashset! { jj_id(commit_remote_b) }); @@ -964,8 +966,8 @@ fn test_import_refs_reimport_with_deleted_untracked_intermediate_remote_ref() { delete_git_ref(&git_repo, remote_ref_name_a); let mut tx = repo.start_transaction(); git::import_refs(tx.repo_mut(), &git_settings).unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); - let repo = tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let view = repo.view(); // No commits should be abandoned because feature-a is pinned by feature-b. @@ -1002,8 +1004,8 @@ fn test_import_refs_reimport_with_deleted_abandoned_untracked_remote_ref() { let commit_remote_b = empty_git_commit(&git_repo, remote_ref_name_b, &[commit_remote_a]); let mut tx = repo.start_transaction(); git::import_refs(tx.repo_mut(), &git_settings).unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); - let repo = tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let view = repo.view(); assert_eq!(*view.heads(), hashset! { jj_id(commit_remote_b) }); @@ -1034,8 +1036,8 @@ fn test_import_refs_reimport_with_deleted_abandoned_untracked_remote_ref() { .get_commit(&jj_id(commit_remote_b)) .unwrap(); tx.repo_mut().record_abandoned_commit(&jj_commit_remote_b); - tx.repo_mut().rebase_descendants().unwrap(); - let repo = tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let view = repo.view(); assert_eq!(*view.heads(), hashset! { jj_id(commit_remote_a) }); assert_eq!(view.local_bookmarks().count(), 0); @@ -1045,8 +1047,8 @@ fn test_import_refs_reimport_with_deleted_abandoned_untracked_remote_ref() { delete_git_ref(&git_repo, remote_ref_name_a); let mut tx = repo.start_transaction(); git::import_refs(tx.repo_mut(), &git_settings).unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); - let repo = tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let view = repo.view(); // The feature-a commit should be abandoned. Since feature-b has already @@ -1087,12 +1089,12 @@ fn test_import_refs_reimport_absent_tracked_remote_bookmarks() { .set_remote_bookmark(remote_symbol("foo", "origin"), absent_tracked_ref.clone()); tx.repo_mut() .set_remote_bookmark(remote_symbol("foo", "upstream"), absent_tracked_ref.clone()); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); // Import with no change. let mut tx = repo.start_transaction(); git::import_refs(tx.repo_mut(), &git_settings).unwrap(); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); // Absent tracked remote refs shouldn't be deleted. assert_eq!( @@ -1115,7 +1117,7 @@ fn test_import_refs_reimport_absent_tracked_remote_bookmarks() { .unwrap(); let mut tx = repo.start_transaction(); git::import_refs(tx.repo_mut(), &git_settings).unwrap(); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); // Tracked refs should be merged and their state should be preserved. assert_eq!( @@ -1161,12 +1163,12 @@ fn test_import_refs_reimport_absent_tracked_remote_tags() { .set_remote_tag(remote_symbol("bar", "git"), absent_tracked_ref.clone()); tx.repo_mut() .set_remote_tag(remote_symbol("foo", "git"), absent_tracked_ref.clone()); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); // Import with no change. let mut tx = repo.start_transaction(); git::import_refs(tx.repo_mut(), &git_settings).unwrap(); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); // Absent tracked remote refs shouldn't be deleted. assert_eq!( @@ -1189,7 +1191,7 @@ fn test_import_refs_reimport_absent_tracked_remote_tags() { .unwrap(); let mut tx = repo.start_transaction(); git::import_refs(tx.repo_mut(), &git_settings).unwrap(); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); // Tracked refs should be merged and their state should be preserved. assert_eq!( @@ -1238,7 +1240,7 @@ fn test_import_refs_reimport_git_head_with_fixed_ref() { // Import HEAD and main. git::import_head(tx.repo_mut()).unwrap(); git::import_refs(tx.repo_mut(), &git_settings).unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); assert!(tx.repo().view().heads().contains(commit1.id())); assert!(tx.repo().view().heads().contains(commit2.id())); @@ -1248,7 +1250,7 @@ fn test_import_refs_reimport_git_head_with_fixed_ref() { // Reimport HEAD, which shouldn't abandon the old HEAD branch. git::import_head(tx.repo_mut()).unwrap(); git::import_refs(tx.repo_mut(), &git_settings).unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); assert!(tx.repo().view().heads().contains(commit1.id())); assert!(tx.repo().view().heads().contains(commit2.id())); } @@ -1265,7 +1267,7 @@ fn test_import_refs_reimport_all_from_root_removed() { let commit = empty_git_commit(&git_repo, "refs/heads/main", &[]); let mut tx = repo.start_transaction(); git::import_refs(tx.repo_mut(), &git_settings).unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); // Test the setup assert!(tx.repo().view().heads().contains(&jj_id(commit))); @@ -1276,7 +1278,7 @@ fn test_import_refs_reimport_all_from_root_removed() { .delete() .unwrap(); git::import_refs(tx.repo_mut(), &git_settings).unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); assert!(!tx.repo().view().heads().contains(&jj_id(commit))); } @@ -1295,7 +1297,7 @@ fn test_import_refs_reimport_abandoning_disabled() { let commit2 = empty_git_commit(&git_repo, "refs/heads/delete-me", &[commit1]); let mut tx = repo.start_transaction(); git::import_refs(tx.repo_mut(), &git_settings).unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); // Test the setup assert!(tx.repo().view().heads().contains(&jj_id(commit2))); @@ -1306,7 +1308,7 @@ fn test_import_refs_reimport_abandoning_disabled() { .delete() .unwrap(); git::import_refs(tx.repo_mut(), &git_settings).unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); assert!(tx.repo().view().heads().contains(&jj_id(commit2))); } @@ -1348,7 +1350,7 @@ fn test_import_refs_reimport_conflicted_remote_bookmark() { // The conflict can be resolved by importing the current Git state let mut tx = repo.start_transaction(); git::import_refs(tx.repo_mut(), &git_settings).unwrap(); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); assert_eq!( repo.view().get_git_ref("refs/remotes/origin/main".as_ref()), &RefTarget::normal(jj_id(commit2)), @@ -1414,8 +1416,8 @@ fn test_import_some_refs() { && symbol.name.as_str().starts_with("feature") }) .unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); - let repo = tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo = tx.commit("test").block_on().unwrap(); // There are two heads, feature2 and feature4. let view = repo.view(); @@ -1507,8 +1509,8 @@ fn test_import_some_refs() { kind == GitRefKind::Bookmark && symbol.remote == "origin" && symbol.name == "feature2" }) .unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); - let repo = tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo = tx.commit("test").block_on().unwrap(); // feature2 and feature4 will still be heads, and all four bookmarks should be // present. @@ -1524,8 +1526,8 @@ fn test_import_some_refs() { }) .unwrap(); // No descendant should be rewritten. - assert_eq!(tx.repo_mut().rebase_descendants().unwrap(), 0); - let repo = tx.commit("test").unwrap(); + assert_eq!(tx.repo_mut().rebase_descendants().block_on().unwrap(), 0); + let repo = tx.commit("test").block_on().unwrap(); // feature2 and feature4 should still be the heads, and all three bookmarks // feature2, feature3, and feature3 should exist. @@ -1541,8 +1543,8 @@ fn test_import_some_refs() { }) .unwrap(); // No descendant should be rewritten - assert_eq!(tx.repo_mut().rebase_descendants().unwrap(), 0); - let repo = tx.commit("test").unwrap(); + assert_eq!(tx.repo_mut().rebase_descendants().block_on().unwrap(), 0); + let repo = tx.commit("test").block_on().unwrap(); // feature2 and feature4 should still be the heads, and both bookmarks // should exist. @@ -1557,8 +1559,8 @@ fn test_import_some_refs() { }) .unwrap(); // No descendant should be rewritten - assert_eq!(tx.repo_mut().rebase_descendants().unwrap(), 0); - let repo = tx.commit("test").unwrap(); + assert_eq!(tx.repo_mut().rebase_descendants().block_on().unwrap(), 0); + let repo = tx.commit("test").block_on().unwrap(); // feature2 should now be the only head and only bookmark. let view = repo.view(); @@ -1613,6 +1615,7 @@ impl GitRepoData { ReadonlyRepo::default_index_store_initializer(), ReadonlyRepo::default_submodule_store_initializer(), ) + .block_on() .unwrap(); Self { _temp_dir: temp_dir, @@ -1630,8 +1633,8 @@ fn test_import_refs_empty_git_repo() { let heads_before = test_data.repo.view().heads().clone(); let mut tx = test_data.repo.start_transaction(); git::import_refs(tx.repo_mut(), &git_settings).unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); - let repo = tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo = tx.commit("test").block_on().unwrap(); assert_eq!(*repo.view().heads(), heads_before); assert_eq!(repo.view().bookmarks().count(), 0); assert_eq!(repo.view().local_tags().count(), 0); @@ -1735,8 +1738,8 @@ fn test_import_refs_detached_head() { let mut tx = test_data.repo.start_transaction(); git::import_head(tx.repo_mut()).unwrap(); git::import_refs(tx.repo_mut(), &git_settings).unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); - let repo = tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let expected_heads = hashset! { jj_id(commit1) }; assert_eq!(*repo.view().heads(), expected_heads); @@ -1757,7 +1760,7 @@ fn test_export_refs_no_detach() { let mut_repo = tx.repo_mut(); git::import_head(mut_repo).unwrap(); git::import_refs(mut_repo, &git_settings).unwrap(); - mut_repo.rebase_descendants().unwrap(); + mut_repo.rebase_descendants().block_on().unwrap(); // Do an initial export to make sure `main` is considered let stats = git::export_refs(mut_repo).unwrap(); @@ -1802,7 +1805,7 @@ fn test_export_refs_bookmark_changed() { let mut_repo = tx.repo_mut(); git::import_head(mut_repo).unwrap(); git::import_refs(mut_repo, &git_settings).unwrap(); - mut_repo.rebase_descendants().unwrap(); + mut_repo.rebase_descendants().block_on().unwrap(); let stats = git::export_refs(mut_repo).unwrap(); assert!(stats.failed_bookmarks.is_empty()); assert!(stats.failed_tags.is_empty()); @@ -1810,6 +1813,7 @@ fn test_export_refs_bookmark_changed() { let new_commit = create_random_commit(mut_repo) .set_parents(vec![jj_id(commit)]) .write() + .block_on() .unwrap(); mut_repo.set_local_bookmark_target("main".as_ref(), RefTarget::normal(new_commit.id().clone())); let stats = git::export_refs(mut_repo).unwrap(); @@ -1847,7 +1851,7 @@ fn test_export_refs_tag_changed() { let mut_repo = tx.repo_mut(); git::import_head(mut_repo).unwrap(); git::import_refs(mut_repo, &git_settings).unwrap(); - mut_repo.rebase_descendants().unwrap(); + mut_repo.rebase_descendants().block_on().unwrap(); let stats = git::export_refs(mut_repo).unwrap(); assert!(stats.failed_bookmarks.is_empty()); assert!(stats.failed_tags.is_empty()); @@ -1855,6 +1859,7 @@ fn test_export_refs_tag_changed() { let new_commit = create_random_commit(mut_repo) .set_parents(vec![jj_id(commit)]) .write() + .block_on() .unwrap(); mut_repo.set_local_tag_target("v1.0".as_ref(), RefTarget::normal(new_commit.id().clone())); let stats = git::export_refs(mut_repo).unwrap(); @@ -1888,7 +1893,7 @@ fn test_export_refs_current_bookmark_changed() { let mut_repo = tx.repo_mut(); git::import_head(mut_repo).unwrap(); git::import_refs(mut_repo, &git_settings).unwrap(); - mut_repo.rebase_descendants().unwrap(); + mut_repo.rebase_descendants().block_on().unwrap(); let stats = git::export_refs(mut_repo).unwrap(); assert!(stats.failed_bookmarks.is_empty()); assert!(stats.failed_tags.is_empty()); @@ -1896,6 +1901,7 @@ fn test_export_refs_current_bookmark_changed() { let new_commit = create_random_commit(mut_repo) .set_parents(vec![jj_id(commit1)]) .write() + .block_on() .unwrap(); mut_repo.set_local_bookmark_target("main".as_ref(), RefTarget::normal(new_commit.id().clone())); let stats = git::export_refs(mut_repo).unwrap(); @@ -1931,7 +1937,7 @@ fn test_export_refs_current_tag_changed() { let mut_repo = tx.repo_mut(); git::import_head(mut_repo).unwrap(); git::import_refs(mut_repo, &git_settings).unwrap(); - mut_repo.rebase_descendants().unwrap(); + mut_repo.rebase_descendants().block_on().unwrap(); let stats = git::export_refs(mut_repo).unwrap(); assert!(stats.failed_bookmarks.is_empty()); assert!(stats.failed_tags.is_empty()); @@ -1939,6 +1945,7 @@ fn test_export_refs_current_tag_changed() { let new_commit = create_random_commit(mut_repo) .set_parents(vec![jj_id(commit1)]) .write() + .block_on() .unwrap(); mut_repo.set_local_tag_target("v1.0".as_ref(), RefTarget::normal(new_commit.id().clone())); let stats = git::export_refs(mut_repo).unwrap(); @@ -1973,7 +1980,7 @@ fn test_export_refs_unborn_git_bookmark(move_placeholder_ref: bool) { let mut_repo = tx.repo_mut(); git::import_head(mut_repo).unwrap(); git::import_refs(mut_repo, &git_settings).unwrap(); - mut_repo.rebase_descendants().unwrap(); + mut_repo.rebase_descendants().block_on().unwrap(); let stats = git::export_refs(mut_repo).unwrap(); assert!(stats.failed_bookmarks.is_empty()); assert!(stats.failed_tags.is_empty()); @@ -2722,10 +2729,12 @@ fn test_reset_head_to_root() { let commit1 = mut_repo .new_commit(vec![root_commit_id.clone()], tree_id.clone()) .write() + .block_on() .unwrap(); let commit2 = mut_repo .new_commit(vec![commit1.id().clone()], tree_id.clone()) .write() + .block_on() .unwrap(); // Set Git HEAD to commit2's parent (i.e. commit1) @@ -2875,10 +2884,12 @@ fn test_reset_head_with_index() { let commit1 = mut_repo .new_commit(vec![root_commit_id.clone()], tree_id.clone()) .write() + .block_on() .unwrap(); let commit2 = mut_repo .new_commit(vec![commit1.id().clone()], tree_id.clone()) .write() + .block_on() .unwrap(); // Set Git HEAD to commit2's parent (i.e. commit1) @@ -2931,11 +2942,13 @@ fn test_reset_head_with_index_no_conflict() { let parent_commit = mut_repo .new_commit(vec![repo.store().root_commit_id().clone()], tree_id.clone()) .write() + .block_on() .unwrap(); let wc_commit = mut_repo .new_commit(vec![parent_commit.id().clone()], tree_id.clone()) .write() + .block_on() .unwrap(); // Reset head to working copy commit @@ -3017,14 +3030,17 @@ fn test_reset_head_with_index_merge_conflict() { base_tree_id.clone(), ) .write() + .block_on() .unwrap(); let left_commit = mut_repo .new_commit(vec![base_commit.id().clone()], left_tree_id.clone()) .write() + .block_on() .unwrap(); let right_commit = mut_repo .new_commit(vec![base_commit.id().clone()], right_tree_id.clone()) .write() + .block_on() .unwrap(); // Create working copy commit with resolution of conflict by taking the right @@ -3036,6 +3052,7 @@ fn test_reset_head_with_index_merge_conflict() { right_tree_id.clone(), ) .write() + .block_on() .unwrap(); // Reset head to working copy commit with merge conflict @@ -3089,6 +3106,7 @@ fn test_reset_head_with_index_file_directory_conflict() { left_tree_id.clone(), ) .write() + .block_on() .unwrap(); let right_commit = mut_repo .new_commit( @@ -3096,6 +3114,7 @@ fn test_reset_head_with_index_file_directory_conflict() { right_tree_id.clone(), ) .write() + .block_on() .unwrap(); let wc_commit = mut_repo @@ -3104,6 +3123,7 @@ fn test_reset_head_with_index_file_directory_conflict() { repo.store().empty_merged_tree_id().clone(), ) .write() + .block_on() .unwrap(); // Reset head to working copy commit with file-directory conflict @@ -3138,6 +3158,7 @@ fn test_init() { ReadonlyRepo::default_index_store_initializer(), ReadonlyRepo::default_submodule_store_initializer(), ) + .block_on() .unwrap(); // The refs were *not* imported -- it's the caller's responsibility to import // any refs they care about. @@ -3186,7 +3207,7 @@ fn test_fetch_initial_commit_head_is_not_set() { // No default bookmark because the origin repo's HEAD wasn't set assert_eq!(stats.default_branch, None); assert!(stats.import_stats.abandoned_commits.is_empty()); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); // The initial commit is visible after git_fetch(). let view = repo.view(); assert!(view.heads().contains(&jj_id(initial_git_commit))); @@ -3270,7 +3291,7 @@ fn test_fetch_success() { None, ) .unwrap(); - test_data.repo = tx.commit("test").unwrap(); + test_data.repo = tx.commit("test").block_on().unwrap(); testutils::git::set_symbolic_reference(&test_data.origin_repo, "HEAD", "refs/heads/main"); let new_git_commit = empty_git_commit( @@ -3300,7 +3321,7 @@ fn test_fetch_success() { // The default bookmark is "main" assert_eq!(stats.default_branch, Some("main".into())); assert!(stats.import_stats.abandoned_commits.is_empty()); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); // The new commit is visible after we fetch again let view = repo.view(); assert!(view.heads().contains(&jj_id(new_git_commit))); @@ -3781,7 +3802,7 @@ fn test_fetch_with_fetch_tags_override() { None, ) .unwrap(); - let _repo = tx.commit("test").unwrap(); + let _repo = tx.commit("test").block_on().unwrap(); // Reload after Git configuration change. let repo = &test_repo .env @@ -3821,7 +3842,7 @@ fn test_fetch_with_fetch_tags_override() { None, ) .unwrap(); - let _repo = tx.commit("test").unwrap(); + let _repo = tx.commit("test").block_on().unwrap(); // Reload after Git configuration change. let repo = &test_repo .env @@ -3902,6 +3923,7 @@ fn set_up_push_repos(settings: &UserSettings, temp_dir: &TempDir) -> PushTestSet ReadonlyRepo::default_index_store_initializer(), ReadonlyRepo::default_submodule_store_initializer(), ) + .block_on() .unwrap(); get_git_backend(&jj_repo) .import_head_commits(&[jj_id(initial_git_commit)]) @@ -3930,7 +3952,7 @@ fn set_up_push_repos(settings: &UserSettings, temp_dir: &TempDir) -> PushTestSet state: RemoteRefState::Tracked, }, ); - let jj_repo = tx.commit("test").unwrap(); + let jj_repo = tx.commit("test").block_on().unwrap(); PushTestSetup { source_repo_dir, jj_repo, @@ -4003,7 +4025,7 @@ fn test_push_bookmarks_success() { ); // Check that the repo view reflects the changes in the Git repo - setup.jj_repo = tx.commit("test").unwrap(); + setup.jj_repo = tx.commit("test").block_on().unwrap(); let mut tx = setup.jj_repo.start_transaction(); git::import_refs(tx.repo_mut(), &git_settings).unwrap(); assert!(!tx.repo().has_changes()); @@ -4070,7 +4092,7 @@ fn test_push_bookmarks_deletion() { ); // Check that the repo view reflects the changes in the Git repo - setup.jj_repo = tx.commit("test").unwrap(); + setup.jj_repo = tx.commit("test").block_on().unwrap(); let mut tx = setup.jj_repo.start_transaction(); git::import_refs(tx.repo_mut(), &git_settings).unwrap(); assert!(!tx.repo().has_changes()); @@ -4151,7 +4173,7 @@ fn test_push_bookmarks_mixed_deletion_and_addition() { ); // Check that the repo view reflects the changes in the Git repo - setup.jj_repo = tx.commit("test").unwrap(); + setup.jj_repo = tx.commit("test").block_on().unwrap(); let mut tx = setup.jj_repo.start_transaction(); git::import_refs(tx.repo_mut(), &git_settings).unwrap(); assert!(!tx.repo().has_changes()); @@ -4489,8 +4511,8 @@ fn test_bulk_update_extra_on_import_refs() { let import_refs = |repo: &Arc| { let mut tx = repo.start_transaction(); git::import_refs(tx.repo_mut(), &git_settings).unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); - tx.commit("test").unwrap() + tx.repo_mut().rebase_descendants().block_on().unwrap(); + tx.commit("test").block_on().unwrap() }; // Extra metadata table shouldn't be created per read_commit() call. The number @@ -4533,8 +4555,8 @@ fn test_rewrite_imported_commit() { let git_commit = empty_git_commit(&git_repo, "refs/heads/main", &[]); let mut tx = repo.start_transaction(); git::import_refs(tx.repo_mut(), &git_settings).unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); - let repo = tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let imported_commit = repo.store().get_commit(&jj_id(git_commit)).unwrap(); // Try to create identical commit with different change id. @@ -4549,8 +4571,9 @@ fn test_rewrite_imported_commit() { .set_committer(imported_commit.committer().clone()) .set_description(imported_commit.description()) .write() + .block_on() .unwrap(); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); // Imported commit shouldn't be reused, and the timestamp of the authored // commit should be adjusted to create new commit. @@ -4594,8 +4617,9 @@ fn test_concurrent_write_commit() { let commit = create_rooted_commit(tx.repo_mut()) .set_description("racy commit") .write() + .block_on() .unwrap(); - tx.commit(format!("writer {i}")).unwrap(); + tx.commit(format!("writer {i}")).block_on().unwrap(); sender .send((commit.id().clone(), commit.change_id().clone())) .unwrap(); @@ -4616,7 +4640,7 @@ fn test_concurrent_write_commit() { assert_eq!(commit_change_ids.len(), num_thread); // All unique commits should be preserved. - let repo = repo.reload_at_head().unwrap(); + let repo = repo.reload_at_head().block_on().unwrap(); for (commit_id, change_ids) in &commit_change_ids { let commit = repo.store().get_commit(commit_id).unwrap(); assert_eq!(commit.id(), commit_id); @@ -4682,8 +4706,9 @@ fn test_concurrent_read_write_commit() { let commit = create_rooted_commit(tx.repo_mut()) .set_description(format!("commit {i}")) .write() + .block_on() .unwrap(); - tx.commit(format!("writer {i}")).unwrap(); + tx.commit(format!("writer {i}")).block_on().unwrap(); assert_eq!(commit.id(), commit_id); }); } @@ -4702,7 +4727,7 @@ fn test_concurrent_read_write_commit() { if pending_commit_ids.is_empty() { break; } - repo = repo.reload_at_head().unwrap(); + repo = repo.reload_at_head().block_on().unwrap(); let git_backend = get_git_backend(&repo); let mut tx = repo.start_transaction(); pending_commit_ids = pending_commit_ids @@ -4731,7 +4756,7 @@ fn test_concurrent_read_write_commit() { }) .collect_vec(); if tx.repo().has_changes() { - tx.commit(format!("reader {i}")).unwrap(); + tx.commit(format!("reader {i}")).block_on().unwrap(); } thread::yield_now(); } @@ -4749,7 +4774,7 @@ fn test_concurrent_read_write_commit() { }); // The index should be consistent with the store. - let repo = repo.reload_at_head().unwrap(); + let repo = repo.reload_at_head().block_on().unwrap(); for commit_id in &commit_ids { assert!(repo.index().has_id(commit_id)); let commit = repo.store().get_commit(commit_id).unwrap(); @@ -4822,7 +4847,7 @@ fn test_shallow_commits_lack_parents() { let mut tx = repo.start_transaction(); git::import_refs(tx.repo_mut(), &git_settings).unwrap(); - let repo = tx.commit("import").unwrap(); + let repo = tx.commit("import").block_on().unwrap(); let store = repo.store(); let root = store.root_commit_id(); @@ -4853,7 +4878,7 @@ fn test_shallow_commits_lack_parents() { let mut tx = repo.start_transaction(); git::import_refs(tx.repo_mut(), &git_settings).unwrap(); - let repo = tx.commit("import").unwrap(); + let repo = tx.commit("import").block_on().unwrap(); let store = repo.store(); let root = store.root_commit_id(); @@ -4889,7 +4914,7 @@ fn test_remote_remove_refs() { None, ) .unwrap(); - let _repo = tx.commit("test").unwrap(); + let _repo = tx.commit("test").block_on().unwrap(); // Reload after Git configuration change. let repo = &test_repo .env @@ -4902,7 +4927,7 @@ fn test_remote_remove_refs() { let mut tx = repo.start_transaction(); git::remove_remote(tx.repo_mut(), "foo".as_ref()).unwrap(); - let repo = &tx.commit("remove").unwrap(); + let repo = &tx.commit("remove").block_on().unwrap(); let git_repo = get_git_repo(repo); assert!( @@ -4939,7 +4964,7 @@ fn test_remote_rename_refs() { None, ) .unwrap(); - let _repo = tx.commit("test").unwrap(); + let _repo = tx.commit("test").block_on().unwrap(); // Reload after Git configuration change. let repo = &test_repo .env @@ -4952,7 +4977,7 @@ fn test_remote_rename_refs() { let mut tx = repo.start_transaction(); git::rename_remote(tx.repo_mut(), "foo".as_ref(), "bar".as_ref()).unwrap(); - let repo = &tx.commit("rename").unwrap(); + let repo = &tx.commit("rename").block_on().unwrap(); let git_repo = get_git_repo(repo); assert!( @@ -5016,7 +5041,7 @@ fn test_remote_add_with_tags_specification() { None, ) .unwrap(); - let _repo = tx.commit("test").unwrap(); + let _repo = tx.commit("test").block_on().unwrap(); // Reload after Git configuration change. let repo = &test_repo diff --git a/lib/tests/test_git_backend.rs b/lib/tests/test_git_backend.rs index ae0f01f710a..e7a93e04ab0 100644 --- a/lib/tests/test_git_backend.rs +++ b/lib/tests/test_git_backend.rs @@ -34,6 +34,7 @@ use jj_lib::store::Store; use jj_lib::transaction::Transaction; use maplit::hashmap; use maplit::hashset; +use pollster::FutureExt as _; use testutils::TestRepo; use testutils::TestRepoBackend; use testutils::commit_with_tree; @@ -88,6 +89,7 @@ fn make_commit( tx.repo_mut() .new_commit(parents, tree.id()) .write() + .block_on() .unwrap() } @@ -128,8 +130,9 @@ fn test_gc() { .set_parents(vec![commit_f.id().clone()]) .set_predecessors(vec![commit_d.id().clone()]) .write() + .block_on() .unwrap(); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); assert_eq!( *repo.view().heads(), hashset! { diff --git a/lib/tests/test_id_prefix.rs b/lib/tests/test_id_prefix.rs index 6720ce3a191..c6f27325733 100644 --- a/lib/tests/test_id_prefix.rs +++ b/lib/tests/test_id_prefix.rs @@ -30,6 +30,7 @@ use jj_lib::op_store::RefTarget; use jj_lib::repo::Repo as _; use jj_lib::revset::RevsetExpression; use jj_lib::settings::UserSettings; +use pollster::FutureExt as _; use testutils::TestRepo; use testutils::TestRepoBackend; @@ -65,13 +66,14 @@ fn test_id_prefix() { .set_author(signature.clone()) .set_committer(signature) .write() + .block_on() .unwrap() }; let mut commits = vec![create_commit(root_commit_id)]; for _ in 0..25 { commits.push(create_commit(commits.last().unwrap().id())); } - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); // Print the commit IDs and change IDs for reference let commit_prefixes = commits @@ -290,6 +292,7 @@ fn test_id_prefix_divergent() { .set_committer(signature) .set_change_id(change_id) .write() + .block_on() .unwrap() }; @@ -306,7 +309,7 @@ fn test_id_prefix_divergent() { second_commit.clone(), third_commit_divergent_with_second.clone(), ]; - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); // Print the commit IDs and change IDs for reference let change_prefixes = commits @@ -426,6 +429,7 @@ fn test_id_prefix_hidden() { .set_author(signature.clone()) .set_committer(signature) .write() + .block_on() .unwrap(); commits.push(commit); } @@ -470,8 +474,8 @@ fn test_id_prefix_hidden() { let hidden_commit = &commits[8]; tx.repo_mut().record_abandoned_commit(hidden_commit); - tx.repo_mut().rebase_descendants().unwrap(); - let repo = tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let prefix = |x: &str| HexPrefix::try_from_hex(x).unwrap(); @@ -562,6 +566,7 @@ fn test_id_prefix_shadowed_by_ref() { repo.store().empty_merged_tree_id(), ) .write() + .block_on() .unwrap(); let commit_id_sym = commit.id().to_string(); diff --git a/lib/tests/test_index.rs b/lib/tests/test_index.rs index 20f9a2612e1..8331dfa3420 100644 --- a/lib/tests/test_index.rs +++ b/lib/tests/test_index.rs @@ -70,7 +70,7 @@ fn enable_changed_path_index(repo: &ReadonlyRepo) -> Arc { .build_changed_path_index_at_operation(repo.op_id(), repo.store(), 0) .block_on() .unwrap(); - repo.reload_at(repo.operation()).unwrap() + repo.reload_at(repo.operation()).block_on().unwrap() } fn collect_changed_paths(repo: &ReadonlyRepo, commit_id: &CommitId) -> Option> { @@ -127,7 +127,7 @@ fn test_index_commits_standard_cases() { let commit_f = write_random_commit_with_parents(tx.repo_mut(), &[&commit_b, &commit_e]); let commit_g = write_random_commit_with_parents(tx.repo_mut(), &[&commit_f]); let commit_h = write_random_commit_with_parents(tx.repo_mut(), &[&commit_e]); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let index = as_readonly_index(&repo); // There should be the root commit, plus 8 more @@ -194,7 +194,7 @@ fn test_index_commits_criss_cross() { left_commits.push(new_left); right_commits.push(new_right); } - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let index = as_readonly_index(&repo); // There should the root commit, plus 2 for each generation @@ -358,11 +358,11 @@ fn test_index_commits_previous_operations() { let commit_a = write_random_commit(tx.repo_mut()); let commit_b = write_random_commit_with_parents(tx.repo_mut(), &[&commit_a]); let commit_c = write_random_commit_with_parents(tx.repo_mut(), &[&commit_b]); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx = repo.start_transaction(); tx.repo_mut().remove_head(commit_c.id()); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); // Delete index from disk let default_index_store: &DefaultIndexStore = repo.index_store().downcast_ref().unwrap(); @@ -412,7 +412,7 @@ fn test_index_commits_hidden_but_referenced() { state: jj_lib::op_store::RemoteRefState::New, }, ); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); // All commits should be indexed assert!(repo.index().has_id(commit_a.id())); @@ -449,7 +449,7 @@ fn test_index_commits_incremental() { let root_commit = repo.store().root_commit(); let mut tx = repo.start_transaction(); let commit_a = write_random_commit_with_parents(tx.repo_mut(), &[]); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let index = as_readonly_index(&repo); // There should be the root commit, plus 1 more @@ -458,7 +458,7 @@ fn test_index_commits_incremental() { let mut tx = repo.start_transaction(); let commit_b = write_random_commit_with_parents(tx.repo_mut(), &[&commit_a]); let commit_c = write_random_commit_with_parents(tx.repo_mut(), &[&commit_b]); - tx.commit("test").unwrap(); + tx.commit("test").block_on().unwrap(); let repo = test_env.load_repo_at_head(&settings, test_repo.repo_path()); let index = as_readonly_index(&repo); @@ -495,13 +495,13 @@ fn test_index_commits_incremental_empty_transaction() { let root_commit = repo.store().root_commit(); let mut tx = repo.start_transaction(); let commit_a = write_random_commit_with_parents(tx.repo_mut(), &[&root_commit]); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let index = as_readonly_index(&repo); // There should be the root commit, plus 1 more assert_eq!(index.num_commits(), 1 + 1); - repo.start_transaction().commit("test").unwrap(); + repo.start_transaction().commit("test").block_on().unwrap(); let repo = test_env.load_repo_at_head(&settings, test_repo.repo_path()); let index = as_readonly_index(&repo); @@ -534,7 +534,7 @@ fn test_index_commits_incremental_already_indexed() { let root_commit = repo.store().root_commit(); let mut tx = repo.start_transaction(); let commit_a = write_random_commit_with_parents(tx.repo_mut(), &[&root_commit]); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); assert!(repo.index().has_id(commit_a.id())); assert_eq!(as_readonly_index(&repo).num_commits(), 1 + 1); @@ -550,7 +550,7 @@ fn create_n_commits(repo: &Arc, num_commits: i32) -> Arc) -> &DefaultReadonlyIndex { @@ -640,7 +640,7 @@ fn test_reindex_no_segments_dir() { let mut tx = repo.start_transaction(); let commit_a = write_random_commit(tx.repo_mut()); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); assert!(repo.index().has_id(commit_a.id())); // jj <= 0.14 doesn't have "segments" directory @@ -661,7 +661,7 @@ fn test_reindex_corrupt_segment_files() { let mut tx = repo.start_transaction(); let commit_a = write_random_commit(tx.repo_mut()); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); assert!(repo.index().has_id(commit_a.id())); // Corrupt the index files @@ -698,7 +698,7 @@ fn test_reindex_from_merged_operation() { for _ in 0..2 { let mut tx = repo.start_transaction(); let commit = write_random_commit(tx.repo_mut()); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx = repo.start_transaction(); tx.repo_mut().remove_head(commit.id()); txs.push(tx); @@ -708,7 +708,7 @@ fn test_reindex_from_merged_operation() { op_ids_to_delete.push(repo.op_id()); let mut tx = repo.start_transaction(); write_random_commit(tx.repo_mut()); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); op_ids_to_delete.push(repo.op_id()); let operation_to_reload = repo.operation(); @@ -726,7 +726,7 @@ fn test_reindex_from_merged_operation() { // When re-indexing, one of the merge parent operations will be selected as // the parent index segment. The commits in the other side should still be // reachable. - let repo = repo.reload_at(operation_to_reload).unwrap(); + let repo = repo.reload_at(operation_to_reload).block_on().unwrap(); let index = as_readonly_index(&repo); assert_eq!(index.num_commits(), 4); } @@ -740,12 +740,12 @@ fn test_reindex_missing_commit() { let mut tx = repo.start_transaction(); let missing_commit = write_random_commit(tx.repo_mut()); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let bad_op_id = repo.op_id(); let mut tx = repo.start_transaction(); tx.repo_mut().remove_head(missing_commit.id()); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); // Remove historical head commit to simulate bad GC. let test_backend: &TestBackend = repo.store().backend_impl().unwrap(); @@ -788,7 +788,7 @@ fn test_read_legacy_operation_link_file() { fs::remove_dir_all(&op_links_dir).unwrap(); // Reload repo and index - let repo = repo.reload_at(repo.operation()).unwrap(); + let repo = repo.reload_at(repo.operation()).block_on().unwrap(); let _ = repo.readonly_index(); // Existing index should still be readable, so new operation link file won't // be created @@ -797,7 +797,7 @@ fn test_read_legacy_operation_link_file() { // New operation link file and directory can be created let mut tx = repo.start_transaction(); write_random_commit(tx.repo_mut()); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); assert!(op_links_dir.join(repo.op_id().hex()).exists()); } @@ -832,8 +832,9 @@ fn test_changed_path_segments() { .repo_mut() .new_commit(vec![root_commit_id.clone()], tree1.id()) .write() + .block_on() .unwrap(); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let stats = as_readonly_index(&repo).stats(); assert_eq!(count_segment_files(), 1); assert_eq!(stats.changed_path_commits_range, Some(1..2)); @@ -853,8 +854,9 @@ fn test_changed_path_segments() { .repo_mut() .new_commit(vec![root_commit_id.clone()], tree2.id()) .write() + .block_on() .unwrap(); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let stats = as_readonly_index(&repo).stats(); assert_eq!(count_segment_files(), 2); assert_eq!(stats.changed_path_commits_range, Some(1..3)); @@ -886,16 +888,17 @@ fn test_build_changed_path_segments() { tx.repo_mut() .new_commit(vec![root_commit_id.clone()], tree.id()) .write() + .block_on() .unwrap(); } - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); // Index the last 4 commits default_index_store .build_changed_path_index_at_operation(repo.op_id(), repo.store(), 4) .block_on() .unwrap(); - let repo = repo.reload_at(repo.operation()).unwrap(); + let repo = repo.reload_at(repo.operation()).block_on().unwrap(); let stats = as_readonly_index(&repo).stats(); assert_eq!(stats.changed_path_commits_range, Some(6..10)); assert_eq!(stats.changed_path_levels.len(), 1); @@ -908,7 +911,7 @@ fn test_build_changed_path_segments() { .build_changed_path_index_at_operation(repo.op_id(), repo.store(), u32::MAX) .block_on() .unwrap(); - let repo = repo.reload_at(repo.operation()).unwrap(); + let repo = repo.reload_at(repo.operation()).block_on().unwrap(); let stats = as_readonly_index(&repo).stats(); assert_eq!(stats.changed_path_commits_range, Some(0..10)); assert_eq!(stats.changed_path_levels.len(), 2); @@ -935,15 +938,17 @@ fn test_build_changed_path_segments_partially_enabled() { tx.repo_mut() .new_commit(vec![root_commit_id.clone()], tree.id()) .write() + .block_on() .unwrap(); } - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let repo = enable_changed_path_index(&repo); let mut tx = repo.start_transaction(); let tree = create_tree(&repo, &[(repo_path("5"), "")]); tx.repo_mut() .new_commit(vec![root_commit_id.clone()], tree.id()) .write() + .block_on() .unwrap(); tx }; @@ -953,6 +958,7 @@ fn test_build_changed_path_segments_partially_enabled() { tx2.repo_mut() .new_commit(vec![root_commit_id.clone()], tree.id()) .write() + .block_on() .unwrap(); } let repo = commit_transactions(vec![tx1, tx2]); @@ -969,7 +975,7 @@ fn test_build_changed_path_segments_partially_enabled() { .build_changed_path_index_at_operation(repo.op_id(), repo.store(), 2) .block_on() .unwrap(); - let repo = repo.reload_at(repo.operation()).unwrap(); + let repo = repo.reload_at(repo.operation()).block_on().unwrap(); let stats = as_readonly_index(&repo).stats(); assert_eq!(stats.changed_path_commits_range, Some(5..8)); assert_eq!(stats.changed_path_levels.len(), 1); @@ -982,7 +988,7 @@ fn test_build_changed_path_segments_partially_enabled() { .build_changed_path_index_at_operation(repo.op_id(), repo.store(), 3) .block_on() .unwrap(); - let repo = repo.reload_at(repo.operation()).unwrap(); + let repo = repo.reload_at(repo.operation()).block_on().unwrap(); let stats = as_readonly_index(&repo).stats(); assert_eq!(stats.changed_path_commits_range, Some(4..10)); assert_eq!(stats.changed_path_levels.len(), 1); @@ -1006,19 +1012,22 @@ fn test_merge_changed_path_segments_both_enabled() { tx.repo_mut() .new_commit(vec![root_commit_id.clone()], tree1.id()) .write() + .block_on() .unwrap(); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); // Merge concurrent index segments without the common base segment let mut tx1 = repo.start_transaction(); tx1.repo_mut() .new_commit(vec![root_commit_id.clone()], tree2.id()) .write() + .block_on() .unwrap(); let mut tx2 = repo.start_transaction(); tx2.repo_mut() .new_commit(vec![root_commit_id.clone()], tree3.id()) .write() + .block_on() .unwrap(); let repo = commit_transactions(vec![tx1, tx2]); let stats = as_readonly_index(&repo).stats(); @@ -1046,13 +1055,15 @@ fn test_merge_changed_path_segments_enabled_and_disabled() { tx.repo_mut() .new_commit(vec![root_commit_id.clone()], tree1.id()) .write() + .block_on() .unwrap(); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let repo = enable_changed_path_index(&repo); let mut tx = repo.start_transaction(); tx.repo_mut() .new_commit(vec![root_commit_id.clone()], tree2.id()) .write() + .block_on() .unwrap(); tx }; @@ -1060,6 +1071,7 @@ fn test_merge_changed_path_segments_enabled_and_disabled() { tx2.repo_mut() .new_commit(vec![root_commit_id.clone()], tree3.id()) .write() + .block_on() .unwrap(); let repo = commit_transactions(vec![tx1, tx2]); let stats = as_readonly_index(&repo).stats(); @@ -1073,7 +1085,7 @@ fn test_merge_changed_path_segments_enabled_and_disabled() { // Changed paths in new commit can no longer be indexed let mut tx = repo.start_transaction(); write_random_commit(tx.repo_mut()); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let stats = as_readonly_index(&repo).stats(); assert_eq!(stats.num_commits, 5); assert_eq!(stats.changed_path_commits_range, Some(2..3)); @@ -1100,16 +1112,19 @@ fn test_commit_is_empty(indexed: bool) { .repo_mut() .new_commit(vec![root_commit_id.clone()], root_tree_id.clone()) .write() + .block_on() .unwrap(); let commit2 = tx .repo_mut() .new_commit(vec![root_commit_id.clone()], tree2.id()) .write() + .block_on() .unwrap(); let commit3 = tx .repo_mut() .new_commit(vec![root_commit_id.clone()], tree3.id()) .write() + .block_on() .unwrap(); let commit4 = tx .repo_mut() @@ -1122,8 +1137,9 @@ fn test_commit_is_empty(indexed: bool) { tree4.id(), ) .write() + .block_on() .unwrap(); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); // Sanity check let stats = as_readonly_index(&repo).stats(); @@ -1172,6 +1188,7 @@ fn test_change_id_index() { .set_change_id(ChangeId::from_hex(change_id)) .set_description(format!("commit {commit_number}")) .write() + .block_on() .unwrap() }; let commit_1 = commit_with_change_id("abbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"); diff --git a/lib/tests/test_init.rs b/lib/tests/test_init.rs index c13218db194..95783ad6eb5 100644 --- a/lib/tests/test_init.rs +++ b/lib/tests/test_init.rs @@ -178,6 +178,7 @@ fn test_init_load_non_utf8_path() { use std::os::unix::ffi::OsStrExt as _; use jj_lib::workspace::default_working_copy_factories; + use pollster::FutureExt as _; use testutils::TestEnvironment; let settings = testutils::user_settings(); @@ -202,7 +203,7 @@ fn test_init_load_non_utf8_path() { .unwrap(); // Just test that we can write a commit to the store - let repo = workspace.repo_loader().load_at_head().unwrap(); + let repo = workspace.repo_loader().load_at_head().block_on().unwrap(); let mut tx = repo.start_transaction(); write_random_commit(tx.repo_mut()); } diff --git a/lib/tests/test_load_repo.rs b/lib/tests/test_load_repo.rs index af13379e80e..9dfc4a628b2 100644 --- a/lib/tests/test_load_repo.rs +++ b/lib/tests/test_load_repo.rs @@ -13,6 +13,7 @@ // limitations under the License. use jj_lib::repo::RepoLoader; +use pollster::FutureExt as _; use testutils::TestRepo; use testutils::write_random_commit; @@ -24,11 +25,11 @@ fn test_load_at_operation() { let mut tx = repo.start_transaction(); let commit = write_random_commit(tx.repo_mut()); - let repo = tx.commit("add commit").unwrap(); + let repo = tx.commit("add commit").block_on().unwrap(); let mut tx = repo.start_transaction(); tx.repo_mut().remove_head(commit.id()); - tx.commit("remove commit").unwrap(); + tx.commit("remove commit").block_on().unwrap(); // If we load the repo at head, we should not see the commit since it was // removed @@ -38,7 +39,7 @@ fn test_load_at_operation() { &test_repo.env.default_store_factories(), ) .unwrap(); - let head_repo = loader.load_at_head().unwrap(); + let head_repo = loader.load_at_head().block_on().unwrap(); assert!(!head_repo.view().heads().contains(commit.id())); // If we load the repo at the previous operation, we should see the commit since @@ -49,6 +50,6 @@ fn test_load_at_operation() { &test_repo.env.default_store_factories(), ) .unwrap(); - let old_repo = loader.load_at(repo.operation()).unwrap(); + let old_repo = loader.load_at(repo.operation()).block_on().unwrap(); assert!(old_repo.view().heads().contains(commit.id())); } diff --git a/lib/tests/test_local_working_copy.rs b/lib/tests/test_local_working_copy.rs index 376104b9a61..80d42bc65d0 100644 --- a/lib/tests/test_local_working_copy.rs +++ b/lib/tests/test_local_working_copy.rs @@ -260,7 +260,7 @@ fn test_checkout_file_transitions(backend: TestRepoBackend) { Kind::GitSubmodule => { let mut tx = repo.start_transaction(); let id = write_random_commit(tx.repo_mut()).id().clone(); - tx.commit("test").unwrap(); + tx.commit("test").block_on().unwrap(); Merge::normal(TreeValue::GitSubmodule(id)) } }; @@ -291,15 +291,17 @@ fn test_checkout_file_transitions(backend: TestRepoBackend) { files.push((*left_kind, *right_kind, path.clone())); } } - let left_tree_id = left_tree_builder.write_tree(&store).unwrap(); - let right_tree_id = right_tree_builder.write_tree(&store).unwrap(); + let left_tree_id = left_tree_builder.write_tree(&store).block_on().unwrap(); + let right_tree_id = right_tree_builder.write_tree(&store).block_on().unwrap(); let left_commit = commit_with_tree(&store, left_tree_id); let right_commit = commit_with_tree(&store, right_tree_id.clone()); let ws = &mut test_workspace.workspace; ws.check_out(repo.op_id().clone(), None, &left_commit) + .block_on() .unwrap(); ws.check_out(repo.op_id().clone(), None, &right_commit) + .block_on() .unwrap(); // Check that the working copy is clean. @@ -394,7 +396,9 @@ fn test_checkout_no_op() { let commit2 = commit_with_tree(repo.store(), tree.id()); let ws = &mut test_workspace.workspace; - ws.check_out(repo.op_id().clone(), None, &commit1).unwrap(); + ws.check_out(repo.op_id().clone(), None, &commit1) + .block_on() + .unwrap(); // Test the setup: the file should exist on in the tree state. let wc: &LocalWorkingCopy = ws.working_copy().downcast_ref().unwrap(); @@ -402,7 +406,10 @@ fn test_checkout_no_op() { // Update to commit2 (same tree as commit1) let new_op_id = OperationId::from_bytes(b"whatever"); - let stats = ws.check_out(new_op_id.clone(), None, &commit2).unwrap(); + let stats = ws + .check_out(new_op_id.clone(), None, &commit2) + .block_on() + .unwrap(); assert_eq!(stats, CheckoutStats::default()); // The tree state is unchanged but the recorded operation id is updated. @@ -426,8 +433,11 @@ fn test_conflict_subdirectory() { let merged_commit = commit_with_tree(repo.store(), merged_tree.id()); let repo = &test_workspace.repo; let ws = &mut test_workspace.workspace; - ws.check_out(repo.op_id().clone(), None, &commit1).unwrap(); + ws.check_out(repo.op_id().clone(), None, &commit1) + .block_on() + .unwrap(); ws.check_out(repo.op_id().clone(), None, &merged_commit) + .block_on() .unwrap(); } @@ -474,11 +484,17 @@ fn test_acl() { ) .unwrap(); // Reload commits from the store associated with the workspace - let repo = ws.repo_loader().load_at(repo.operation()).unwrap(); + let repo = ws + .repo_loader() + .load_at(repo.operation()) + .block_on() + .unwrap(); let commit1 = repo.store().get_commit(commit1.id()).unwrap(); let commit2 = repo.store().get_commit(commit2.id()).unwrap(); - ws.check_out(repo.op_id().clone(), None, &commit1).unwrap(); + ws.check_out(repo.op_id().clone(), None, &commit1) + .block_on() + .unwrap(); assert!( !secret_modified_path .to_fs_path_unchecked(&workspace_root) @@ -504,7 +520,9 @@ fn test_acl() { .to_fs_path_unchecked(&workspace_root) .is_file() ); - ws.check_out(repo.op_id().clone(), None, &commit2).unwrap(); + ws.check_out(repo.op_id().clone(), None, &commit2) + .block_on() + .unwrap(); assert!( !secret_modified_path .to_fs_path_unchecked(&workspace_root) @@ -542,7 +560,9 @@ fn test_tree_builder_file_directory_transition() { let mut check_out_tree = |tree_id: &TreeId| { let tree = repo.store().get_tree(RepoPathBuf::root(), tree_id).unwrap(); let commit = commit_with_tree(repo.store(), MergedTreeId::resolved(tree.id().clone())); - ws.check_out(repo.op_id().clone(), None, &commit).unwrap(); + ws.check_out(repo.op_id().clone(), None, &commit) + .block_on() + .unwrap(); }; let parent_path = repo_path("foo/bar"); @@ -558,7 +578,7 @@ fn test_tree_builder_file_directory_transition() { copy_id: CopyId::placeholder(), }, ); - let tree_id = tree_builder.write_tree().unwrap(); + let tree_id = tree_builder.write_tree().block_on().unwrap(); check_out_tree(&tree_id); assert!(parent_path.to_fs_path_unchecked(&workspace_root).is_file()); assert!(!child_path.to_fs_path_unchecked(&workspace_root).exists()); @@ -574,7 +594,7 @@ fn test_tree_builder_file_directory_transition() { copy_id: CopyId::placeholder(), }, ); - let tree_id = tree_builder.write_tree().unwrap(); + let tree_id = tree_builder.write_tree().block_on().unwrap(); check_out_tree(&tree_id); assert!(parent_path.to_fs_path_unchecked(&workspace_root).is_dir()); assert!(child_path.to_fs_path_unchecked(&workspace_root).is_file()); @@ -590,7 +610,7 @@ fn test_tree_builder_file_directory_transition() { copy_id: CopyId::placeholder(), }, ); - let tree_id = tree_builder.write_tree().unwrap(); + let tree_id = tree_builder.write_tree().block_on().unwrap(); check_out_tree(&tree_id); assert!(parent_path.to_fs_path_unchecked(&workspace_root).is_file()); assert!(!child_path.to_fs_path_unchecked(&workspace_root).exists()); @@ -641,7 +661,10 @@ fn test_conflicting_changes_on_disk() { ) .unwrap(); - let stats = ws.check_out(repo.op_id().clone(), None, &commit).unwrap(); + let stats = ws + .check_out(repo.op_id().clone(), None, &commit) + .block_on() + .unwrap(); assert_eq!( stats, CheckoutStats { @@ -691,7 +714,9 @@ fn test_reset() { let ws = &mut test_workspace.workspace; let commit = commit_with_tree(repo.store(), tree_with_file.id()); - ws.check_out(repo.op_id().clone(), None, &commit).unwrap(); + ws.check_out(repo.op_id().clone(), None, &commit) + .block_on() + .unwrap(); // Test the setup: the file should exist on disk and in the tree state. assert!(ignored_path.to_fs_path_unchecked(&workspace_root).is_file()); @@ -750,7 +775,9 @@ fn test_checkout_discard() { let commit2 = commit_with_tree(repo.store(), tree2.id()); let ws = &mut test_workspace.workspace; - ws.check_out(repo.op_id().clone(), None, &commit1).unwrap(); + ws.check_out(repo.op_id().clone(), None, &commit1) + .block_on() + .unwrap(); let wc: &LocalWorkingCopy = ws.working_copy().downcast_ref().unwrap(); let state_path = wc.state_path().to_path_buf(); @@ -811,7 +838,9 @@ fn test_snapshot_file_directory_transition() { let commit2 = commit_with_tree(repo.store(), tree2.id()); let ws = &mut test_workspace.workspace; - ws.check_out(repo.op_id().clone(), None, &commit1).unwrap(); + ws.check_out(repo.op_id().clone(), None, &commit1) + .block_on() + .unwrap(); // file -> directory std::fs::remove_file(to_ws_path(file1p_path)).unwrap(); @@ -824,7 +853,9 @@ fn test_snapshot_file_directory_transition() { assert_eq!(new_tree.id(), tree2.id()); let ws = &mut test_workspace.workspace; - ws.check_out(repo.op_id().clone(), None, &commit2).unwrap(); + ws.check_out(repo.op_id().clone(), None, &commit2) + .block_on() + .unwrap(); // directory -> file std::fs::remove_file(to_ws_path(file1_path)).unwrap(); @@ -864,7 +895,10 @@ fn test_materialize_snapshot_conflicted_files() { .unwrap(); let commit = commit_with_tree(repo.store(), merged_tree.id()); - let stats = ws.check_out(repo.op_id().clone(), None, &commit).unwrap(); + let stats = ws + .check_out(repo.op_id().clone(), None, &commit) + .block_on() + .unwrap(); assert_eq!( stats, CheckoutStats { @@ -993,6 +1027,7 @@ fn test_materialize_snapshot_unchanged_conflicts() { test_workspace .workspace .check_out(repo.op_id().clone(), None, &commit) + .block_on() .unwrap(); // "line 5" should be deleted from the checked-out content. @@ -1184,7 +1219,9 @@ fn test_gitignores_in_ignored_dir() { let tree1 = create_tree(&test_workspace.repo, &[(gitignore_path, "ignored\n")]); let commit1 = commit_with_tree(test_workspace.repo.store(), tree1.id()); let ws = &mut test_workspace.workspace; - ws.check_out(op_id.clone(), None, &commit1).unwrap(); + ws.check_out(op_id.clone(), None, &commit1) + .block_on() + .unwrap(); testutils::write_working_copy_file(&workspace_root, nested_gitignore_path, "!file\n"); testutils::write_working_copy_file(&workspace_root, ignored_path, "contents"); @@ -1235,7 +1272,11 @@ fn test_gitignores_checkout_never_overwrites_ignored() { // "contents". The exiting contents ("garbage") shouldn't be replaced in the // working copy. let ws = &mut test_workspace.workspace; - assert!(ws.check_out(repo.op_id().clone(), None, &commit,).is_ok()); + assert!( + ws.check_out(repo.op_id().clone(), None, &commit,) + .block_on() + .is_ok() + ); // Check that the old contents are in the working copy let path = workspace_root.join("modified"); @@ -1284,7 +1325,9 @@ fn test_gitignores_ignored_directory_already_tracked() { // Check out the tree with the files in `ignored/` let ws = &mut test_workspace.workspace; - ws.check_out(repo.op_id().clone(), None, &commit).unwrap(); + ws.check_out(repo.op_id().clone(), None, &commit) + .block_on() + .unwrap(); // Make some changes inside the ignored directory and check that they are // detected when we snapshot. The files that are still there should not be @@ -1393,7 +1436,7 @@ fn test_git_submodule(gitignore_content: &str) { Merge::normal(TreeValue::GitSubmodule(submodule_id1)), ); - let tree_id1 = tree_builder.write_tree(&store).unwrap(); + let tree_id1 = tree_builder.write_tree(&store).block_on().unwrap(); let commit1 = commit_with_tree(repo.store(), tree_id1.clone()); let mut tree_builder = MergedTreeBuilder::new(tree_id1.clone()); @@ -1402,11 +1445,13 @@ fn test_git_submodule(gitignore_content: &str) { submodule_path.to_owned(), Merge::normal(TreeValue::GitSubmodule(submodule_id2)), ); - let tree_id2 = tree_builder.write_tree(&store).unwrap(); + let tree_id2 = tree_builder.write_tree(&store).block_on().unwrap(); let commit2 = commit_with_tree(repo.store(), tree_id2.clone()); let ws = &mut test_workspace.workspace; - ws.check_out(repo.op_id().clone(), None, &commit1).unwrap(); + ws.check_out(repo.op_id().clone(), None, &commit1) + .block_on() + .unwrap(); std::fs::create_dir(submodule_path.to_fs_path_unchecked(&workspace_root)).unwrap(); @@ -1433,7 +1478,9 @@ fn test_git_submodule(gitignore_content: &str) { // Check out new commit updating the submodule, which shouldn't fail because // of existing submodule files let ws = &mut test_workspace.workspace; - ws.check_out(repo.op_id().clone(), None, &commit2).unwrap(); + ws.check_out(repo.op_id().clone(), None, &commit2) + .block_on() + .unwrap(); // Check that the files in the submodule are not deleted let file_in_submodule_path = added_submodule_path.to_fs_path_unchecked(&workspace_root); @@ -1453,6 +1500,7 @@ fn test_git_submodule(gitignore_content: &str) { let ws = &mut test_workspace.workspace; let stats = ws .check_out(repo.op_id().clone(), None, &store.root_commit()) + .block_on() .unwrap(); assert_eq!(stats.skipped_files, 1); } @@ -1470,7 +1518,9 @@ fn test_check_out_existing_file_cannot_be_removed() { let commit2 = commit_with_tree(repo.store(), tree2.id()); let ws = &mut test_workspace.workspace; - ws.check_out(repo.op_id().clone(), None, &commit1).unwrap(); + ws.check_out(repo.op_id().clone(), None, &commit1) + .block_on() + .unwrap(); // Make the parent directory readonly. let writable_dir_perm = workspace_root.symlink_metadata().unwrap().permissions(); @@ -1478,7 +1528,9 @@ fn test_check_out_existing_file_cannot_be_removed() { readonly_dir_perm.set_readonly(true); std::fs::set_permissions(&workspace_root, readonly_dir_perm).unwrap(); - let result = ws.check_out(repo.op_id().clone(), None, &commit2); + let result = ws + .check_out(repo.op_id().clone(), None, &commit2) + .block_on(); std::fs::set_permissions(&workspace_root, writable_dir_perm).unwrap(); // TODO: find a way to trigger the error on Windows @@ -1503,13 +1555,18 @@ fn test_check_out_existing_file_replaced_with_directory() { let commit2 = commit_with_tree(repo.store(), tree2.id()); let ws = &mut test_workspace.workspace; - ws.check_out(repo.op_id().clone(), None, &commit1).unwrap(); + ws.check_out(repo.op_id().clone(), None, &commit1) + .block_on() + .unwrap(); std::fs::remove_file(file_path.to_fs_path_unchecked(&workspace_root)).unwrap(); std::fs::create_dir(file_path.to_fs_path_unchecked(&workspace_root)).unwrap(); // Checkout doesn't fail, but the file should be skipped. - let stats = ws.check_out(repo.op_id().clone(), None, &commit2).unwrap(); + let stats = ws + .check_out(repo.op_id().clone(), None, &commit2) + .block_on() + .unwrap(); assert_eq!(stats.skipped_files, 1); assert!(file_path.to_fs_path_unchecked(&workspace_root).is_dir()); } @@ -1535,7 +1592,10 @@ fn test_check_out_existing_directory_symlink() { // Checkout doesn't fail, but the file should be skipped. let ws = &mut test_workspace.workspace; - let stats = ws.check_out(repo.op_id().clone(), None, &commit).unwrap(); + let stats = ws + .check_out(repo.op_id().clone(), None, &commit) + .block_on() + .unwrap(); assert_eq!(stats.skipped_files, 1); // Therefore, "../escaped" shouldn't be created. @@ -1564,7 +1624,10 @@ fn test_check_out_existing_directory_symlink_icase_fs() { // Checkout doesn't fail, but the file should be skipped on icase fs. let ws = &mut test_workspace.workspace; - let stats = ws.check_out(repo.op_id().clone(), None, &commit).unwrap(); + let stats = ws + .check_out(repo.op_id().clone(), None, &commit) + .block_on() + .unwrap(); if is_icase_fs { assert_eq!(stats.skipped_files, 1); } else { @@ -1607,7 +1670,10 @@ fn test_check_out_existing_file_symlink_icase_fs(victim_exists: bool) { // Checkout doesn't fail, but the file should be skipped on icase fs. let ws = &mut test_workspace.workspace; - let stats = ws.check_out(repo.op_id().clone(), None, &commit).unwrap(); + let stats = ws + .check_out(repo.op_id().clone(), None, &commit) + .block_on() + .unwrap(); if is_icase_fs { assert_eq!(stats.skipped_files, 1); } else { @@ -1641,7 +1707,9 @@ fn test_check_out_file_removal_over_existing_directory_symlink() { // Check out "parent/escaped". let ws = &mut test_workspace.workspace; - ws.check_out(repo.op_id().clone(), None, &commit1).unwrap(); + ws.check_out(repo.op_id().clone(), None, &commit1) + .block_on() + .unwrap(); // Pretend that "parent" was a symlink, which might be created by // e.g. checking out "PARENT" on case-insensitive fs. The file @@ -1654,7 +1722,10 @@ fn test_check_out_file_removal_over_existing_directory_symlink() { assert!(file_path.to_fs_path_unchecked(&workspace_root).exists()); // Check out empty tree, which tries to remove "parent/escaped". - let stats = ws.check_out(repo.op_id().clone(), None, &commit2).unwrap(); + let stats = ws + .check_out(repo.op_id().clone(), None, &commit2) + .block_on() + .unwrap(); assert_eq!(stats.skipped_files, 1); // "../escaped" shouldn't be removed. @@ -1674,7 +1745,7 @@ fn test_check_out_malformed_file_path(file_path_str: &str) { // Checkout should fail let ws = &mut test_workspace.workspace; - let result = ws.check_out(repo.op_id().clone(), None, &commit); + let result = ws.check_out(repo.op_id().clone(), None, &commit).block_on(); assert_matches!(result, Err(CheckoutError::InvalidRepoPath(_))); // Therefore, "pwned" file shouldn't be created. @@ -1695,7 +1766,7 @@ fn test_check_out_malformed_file_path_windows(file_path_str: &str) { // Checkout should fail on Windows let ws = &mut test_workspace.workspace; - let result = ws.check_out(repo.op_id().clone(), None, &commit); + let result = ws.check_out(repo.op_id().clone(), None, &commit).block_on(); if cfg!(windows) { assert_matches!(result, Err(CheckoutError::InvalidRepoPath(_))); } else { @@ -1732,7 +1803,9 @@ fn test_check_out_reserved_file_path(file_path_str: &str) { // Checkout should fail. let ws = &mut test_workspace.workspace; - let result = ws.check_out(repo.op_id().clone(), None, &commit1); + let result = ws + .check_out(repo.op_id().clone(), None, &commit1) + .block_on(); assert_matches!(result, Err(CheckoutError::ReservedPathComponent { .. })); // Therefore, "pwned" file shouldn't be created. @@ -1754,7 +1827,9 @@ fn test_check_out_reserved_file_path(file_path_str: &str) { } // Check out empty tree, which tries to remove the file. - let result = ws.check_out(repo.op_id().clone(), None, &commit2); + let result = ws + .check_out(repo.op_id().clone(), None, &commit2) + .block_on(); assert_matches!(result, Err(CheckoutError::ReservedPathComponent { .. })); // The existing file shouldn't be removed. @@ -1783,7 +1858,9 @@ fn test_check_out_reserved_file_path_icase_fs(file_path_str: &str) { // Checkout should fail on icase fs. let ws = &mut test_workspace.workspace; - let result = ws.check_out(repo.op_id().clone(), None, &commit1); + let result = ws + .check_out(repo.op_id().clone(), None, &commit1) + .block_on(); if is_icase_fs { assert_matches!(result, Err(CheckoutError::ReservedPathComponent { .. })); } else { @@ -1807,7 +1884,9 @@ fn test_check_out_reserved_file_path_icase_fs(file_path_str: &str) { std::fs::write(&disk_path, "").unwrap(); // Check out empty tree, which tries to remove the file. - let result = ws.check_out(repo.op_id().clone(), None, &commit2); + let result = ws + .check_out(repo.op_id().clone(), None, &commit2) + .block_on(); if is_icase_fs { assert_matches!(result, Err(CheckoutError::ReservedPathComponent { .. })); } else { @@ -1844,7 +1923,9 @@ fn test_check_out_reserved_file_path_hfs_plus(file_path_str: &str) { // Checkout should fail on HFS+-like fs. let ws = &mut test_workspace.workspace; - let result = ws.check_out(repo.op_id().clone(), None, &commit1); + let result = ws + .check_out(repo.op_id().clone(), None, &commit1) + .block_on(); if is_hfs_plus { assert_matches!(result, Err(CheckoutError::ReservedPathComponent { .. })); } else { @@ -1868,7 +1949,9 @@ fn test_check_out_reserved_file_path_hfs_plus(file_path_str: &str) { std::fs::write(&disk_path, "").unwrap(); // Check out empty tree, which tries to remove the file. - let result = ws.check_out(repo.op_id().clone(), None, &commit2); + let result = ws + .check_out(repo.op_id().clone(), None, &commit2) + .block_on(); if is_hfs_plus { assert_matches!(result, Err(CheckoutError::ReservedPathComponent { .. })); } else { @@ -1910,7 +1993,9 @@ fn test_check_out_reserved_file_path_vfat(vfat_path_str: &str, file_path_strs: & // Checkout should fail on VFAT-like fs. let ws = &mut test_workspace.workspace; - let result = ws.check_out(repo.op_id().clone(), None, &commit1); + let result = ws + .check_out(repo.op_id().clone(), None, &commit1) + .block_on(); if is_vfat { assert_matches!(result, Err(CheckoutError::ReservedPathComponent { .. })); } else { @@ -1936,7 +2021,9 @@ fn test_check_out_reserved_file_path_vfat(vfat_path_str: &str, file_path_strs: & } // Check out empty tree, which tries to remove the file. - let result = ws.check_out(repo.op_id().clone(), None, &commit2); + let result = ws + .check_out(repo.op_id().clone(), None, &commit2) + .block_on(); if is_vfat { assert_matches!(result, Err(CheckoutError::ReservedPathComponent { .. })); } else { diff --git a/lib/tests/test_local_working_copy_concurrent.rs b/lib/tests/test_local_working_copy_concurrent.rs index e1037077732..88e7d7b27bc 100644 --- a/lib/tests/test_local_working_copy_concurrent.rs +++ b/lib/tests/test_local_working_copy_concurrent.rs @@ -51,7 +51,9 @@ fn test_concurrent_checkout() { // Check out tree1 let ws1 = &mut test_workspace1.workspace; // The operation ID is not correct, but that doesn't matter for this test - ws1.check_out(repo.op_id().clone(), None, &commit1).unwrap(); + ws1.check_out(repo.op_id().clone(), None, &commit1) + .block_on() + .unwrap(); // Check out tree2 from another process (simulated by another workspace // instance) @@ -64,15 +66,21 @@ fn test_concurrent_checkout() { ) .unwrap(); // Reload commit from the store associated with the workspace - let repo = ws2.repo_loader().load_at(repo.operation()).unwrap(); + let repo = ws2 + .repo_loader() + .load_at(repo.operation()) + .block_on() + .unwrap(); let commit2 = repo.store().get_commit(commit2.id()).unwrap(); ws2.check_out(repo.op_id().clone(), Some(&tree_id1), &commit2) + .block_on() .unwrap(); } // Checking out another tree (via the first workspace instance) should now fail. assert_matches!( - ws1.check_out(repo.op_id().clone(), Some(&tree_id1), &commit3,), + ws1.check_out(repo.op_id().clone(), Some(&tree_id1), &commit3,) + .block_on(), Err(CheckoutError::ConcurrentCheckout) ); @@ -111,6 +119,7 @@ fn test_checkout_parallel() { test_workspace .workspace .check_out(repo.op_id().clone(), None, &commit) + .block_on() .unwrap(); thread::scope(|s| { @@ -130,10 +139,17 @@ fn test_checkout_parallel() { ) .unwrap(); // Reload commit from the store associated with the workspace - let repo = workspace.repo_loader().load_at(repo.operation()).unwrap(); + let repo = workspace + .repo_loader() + .load_at(repo.operation()) + .block_on() + .unwrap(); let commit = repo.store().get_commit(commit.id()).unwrap(); // The operation ID is not correct, but that doesn't matter for this test - let stats = workspace.check_out(op_id, None, &commit).unwrap(); + let stats = workspace + .check_out(op_id, None, &commit) + .block_on() + .unwrap(); assert_eq!(stats.updated_files, 0); assert_eq!(stats.added_files, 1); assert_eq!(stats.removed_files, 1); @@ -167,7 +183,9 @@ fn test_racy_checkout() { let mut num_matches = 0; for _ in 0..100 { let ws = &mut test_workspace.workspace; - ws.check_out(op_id.clone(), None, &commit).unwrap(); + ws.check_out(op_id.clone(), None, &commit) + .block_on() + .unwrap(); assert_eq!( std::fs::read(path.to_fs_path_unchecked(&workspace_root)).unwrap(), b"1".to_vec() diff --git a/lib/tests/test_local_working_copy_sparse.rs b/lib/tests/test_local_working_copy_sparse.rs index 70f77e3a775..9fee97590d2 100644 --- a/lib/tests/test_local_working_copy_sparse.rs +++ b/lib/tests/test_local_working_copy_sparse.rs @@ -63,6 +63,7 @@ fn test_sparse_checkout() { test_workspace .workspace .check_out(repo.op_id().clone(), None, &commit) + .block_on() .unwrap(); let ws = &mut test_workspace.workspace; @@ -223,6 +224,7 @@ fn test_sparse_commit() { test_workspace .workspace .check_out(repo.op_id().clone(), None, &commit) + .block_on() .unwrap(); // Set sparse patterns to only dir1/ diff --git a/lib/tests/test_merge_trees.rs b/lib/tests/test_merge_trees.rs index 985a238c0bc..35b384d7099 100644 --- a/lib/tests/test_merge_trees.rs +++ b/lib/tests/test_merge_trees.rs @@ -51,24 +51,28 @@ fn test_simplify_conflict_after_resolving_parent() { .repo_mut() .new_commit(vec![repo.store().root_commit_id().clone()], tree_a.id()) .write() + .block_on() .unwrap(); let tree_b = create_tree(repo, &[(path, "Abc\ndef\nghi\n")]); let commit_b = tx .repo_mut() .new_commit(vec![commit_a.id().clone()], tree_b.id()) .write() + .block_on() .unwrap(); let tree_c = create_tree(repo, &[(path, "Abc\ndef\nGhi\n")]); let commit_c = tx .repo_mut() .new_commit(vec![commit_b.id().clone()], tree_c.id()) .write() + .block_on() .unwrap(); let tree_d = create_tree(repo, &[(path, "abC\ndef\nghi\n")]); let commit_d = tx .repo_mut() .new_commit(vec![commit_a.id().clone()], tree_d.id()) .write() + .block_on() .unwrap(); let commit_b2 = rebase_commit(tx.repo_mut(), commit_b, vec![commit_d.id().clone()]) @@ -91,12 +95,13 @@ fn test_simplify_conflict_after_resolving_parent() { .rewrite_commit(&commit_b2) .set_tree_id(tree_b3.id()) .write() + .block_on() .unwrap(); let commit_c3 = rebase_commit(tx.repo_mut(), commit_c2, vec![commit_b3.id().clone()]) .block_on() .unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); - let repo = tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo = tx.commit("test").block_on().unwrap(); // The conflict should now be resolved. let tree_c2 = commit_c3.tree().unwrap(); @@ -150,14 +155,17 @@ fn test_rebase_linearize_lossy_merge(same_change: SameChange) { let commit_a = repo_mut .new_commit(vec![repo.store().root_commit_id().clone()], tree_1.id()) .write() + .block_on() .unwrap(); let commit_b = repo_mut .new_commit(vec![commit_a.id().clone()], tree_2.id()) .write() + .block_on() .unwrap(); let commit_c = repo_mut .new_commit(vec![commit_a.id().clone()], tree_2.id()) .write() + .block_on() .unwrap(); let commit_d = repo_mut .new_commit( @@ -165,6 +173,7 @@ fn test_rebase_linearize_lossy_merge(same_change: SameChange) { tree_2.id(), ) .write() + .block_on() .unwrap(); match same_change { @@ -213,14 +222,17 @@ fn test_rebase_on_lossy_merge(same_change: SameChange) { let commit_a = repo_mut .new_commit(vec![repo.store().root_commit_id().clone()], tree_1.id()) .write() + .block_on() .unwrap(); let commit_b = repo_mut .new_commit(vec![commit_a.id().clone()], tree_2.id()) .write() + .block_on() .unwrap(); let commit_c = repo_mut .new_commit(vec![commit_a.id().clone()], tree_2.id()) .write() + .block_on() .unwrap(); let commit_d = repo_mut .new_commit( @@ -228,6 +240,7 @@ fn test_rebase_on_lossy_merge(same_change: SameChange) { tree_2.id(), ) .write() + .block_on() .unwrap(); match same_change { @@ -238,6 +251,7 @@ fn test_rebase_on_lossy_merge(same_change: SameChange) { let commit_c2 = repo_mut .new_commit(vec![commit_a.id().clone()], tree_3.id()) .write() + .block_on() .unwrap(); let commit_d2 = rebase_commit( repo_mut, diff --git a/lib/tests/test_merged_tree.rs b/lib/tests/test_merged_tree.rs index 14e6f6245e5..e20ad2dcf4e 100644 --- a/lib/tests/test_merged_tree.rs +++ b/lib/tests/test_merged_tree.rs @@ -86,7 +86,7 @@ fn test_merged_tree_builder_resolves_conflict() { [tree2.id().clone(), tree3.id().clone()], )); let tree_builder = MergedTreeBuilder::new(base_tree_id); - let tree_id = tree_builder.write_tree(store).unwrap(); + let tree_id = tree_builder.write_tree(store).block_on().unwrap(); assert_eq!(tree_id, MergedTreeId::resolved(tree2.id().clone())); } diff --git a/lib/tests/test_mut_repo.rs b/lib/tests/test_mut_repo.rs index 6819936586e..505cd8711a8 100644 --- a/lib/tests/test_mut_repo.rs +++ b/lib/tests/test_mut_repo.rs @@ -52,12 +52,12 @@ fn test_edit() { let mut tx = repo.start_transaction(); let wc_commit = write_random_commit(tx.repo_mut()); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx = repo.start_transaction(); let ws_name = WorkspaceName::DEFAULT.to_owned(); tx.repo_mut().edit(ws_name.clone(), &wc_commit).unwrap(); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); assert_eq!(repo.view().get_wc_commit_id(&ws_name), Some(wc_commit.id())); } @@ -69,18 +69,19 @@ fn test_checkout() { let mut tx = repo.start_transaction(); let wc_commit_parent = write_random_commit(tx.repo_mut()); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx = repo.start_transaction(); let ws_name = WorkspaceName::DEFAULT.to_owned(); let wc_commit = tx .repo_mut() .check_out(ws_name.clone(), &wc_commit_parent) + .block_on() .unwrap(); assert_eq!(wc_commit.tree_id(), wc_commit_parent.tree_id()); assert_eq!(wc_commit.parent_ids().len(), 1); assert_eq!(&wc_commit.parent_ids()[0], wc_commit_parent.id()); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); assert_eq!(repo.view().get_wc_commit_id(&ws_name), Some(wc_commit.id())); } @@ -96,13 +97,13 @@ fn test_edit_previous_not_empty() { let old_wc_commit = write_random_commit(mut_repo); let ws_name = WorkspaceName::DEFAULT.to_owned(); mut_repo.edit(ws_name.clone(), &old_wc_commit).unwrap(); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx = repo.start_transaction(); let mut_repo = tx.repo_mut(); let new_wc_commit = write_random_commit(mut_repo); mut_repo.edit(ws_name, &new_wc_commit).unwrap(); - mut_repo.rebase_descendants().unwrap(); + mut_repo.rebase_descendants().block_on().unwrap(); assert!(mut_repo.view().heads().contains(old_wc_commit.id())); } @@ -121,16 +122,17 @@ fn test_edit_previous_empty() { repo.store().empty_merged_tree_id(), ) .write() + .block_on() .unwrap(); let ws_name = WorkspaceName::DEFAULT.to_owned(); mut_repo.edit(ws_name.clone(), &old_wc_commit).unwrap(); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx = repo.start_transaction(); let mut_repo = tx.repo_mut(); let new_wc_commit = write_random_commit(mut_repo); mut_repo.edit(ws_name, &new_wc_commit).unwrap(); - mut_repo.rebase_descendants().unwrap(); + mut_repo.rebase_descendants().block_on().unwrap(); assert!(!mut_repo.view().heads().contains(old_wc_commit.id())); } @@ -159,16 +161,17 @@ fn test_edit_previous_empty_merge() { ) .set_tree_id(old_parent_tree.id()) .write() + .block_on() .unwrap(); let ws_name = WorkspaceName::DEFAULT.to_owned(); mut_repo.edit(ws_name.clone(), &old_wc_commit).unwrap(); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx = repo.start_transaction(); let mut_repo = tx.repo_mut(); let new_wc_commit = write_random_commit(mut_repo); mut_repo.edit(ws_name, &new_wc_commit).unwrap(); - mut_repo.rebase_descendants().unwrap(); + mut_repo.rebase_descendants().block_on().unwrap(); assert!(!mut_repo.view().heads().contains(old_wc_commit.id())); } @@ -188,16 +191,17 @@ fn test_edit_previous_empty_with_description() { ) .set_description("not empty") .write() + .block_on() .unwrap(); let ws_name = WorkspaceName::DEFAULT.to_owned(); mut_repo.edit(ws_name.clone(), &old_wc_commit).unwrap(); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx = repo.start_transaction(); let mut_repo = tx.repo_mut(); let new_wc_commit = write_random_commit(mut_repo); mut_repo.edit(ws_name, &new_wc_commit).unwrap(); - mut_repo.rebase_descendants().unwrap(); + mut_repo.rebase_descendants().block_on().unwrap(); assert!(mut_repo.view().heads().contains(old_wc_commit.id())); } @@ -216,17 +220,18 @@ fn test_edit_previous_empty_with_local_bookmark() { repo.store().empty_merged_tree_id(), ) .write() + .block_on() .unwrap(); mut_repo.set_local_bookmark_target("b".as_ref(), RefTarget::normal(old_wc_commit.id().clone())); let ws_name = WorkspaceName::DEFAULT.to_owned(); mut_repo.edit(ws_name.clone(), &old_wc_commit).unwrap(); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx = repo.start_transaction(); let mut_repo = tx.repo_mut(); let new_wc_commit = write_random_commit(mut_repo); mut_repo.edit(ws_name, &new_wc_commit).unwrap(); - mut_repo.rebase_descendants().unwrap(); + mut_repo.rebase_descendants().block_on().unwrap(); assert!(mut_repo.view().heads().contains(old_wc_commit.id())); } @@ -245,6 +250,7 @@ fn test_edit_previous_empty_with_other_workspace() { repo.store().empty_merged_tree_id(), ) .write() + .block_on() .unwrap(); let ws_name = WorkspaceName::DEFAULT.to_owned(); mut_repo.edit(ws_name.clone(), &old_wc_commit).unwrap(); @@ -252,13 +258,13 @@ fn test_edit_previous_empty_with_other_workspace() { mut_repo .edit(other_ws_name.clone(), &old_wc_commit) .unwrap(); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx = repo.start_transaction(); let mut_repo = tx.repo_mut(); let new_wc_commit = write_random_commit(mut_repo); mut_repo.edit(ws_name, &new_wc_commit).unwrap(); - mut_repo.rebase_descendants().unwrap(); + mut_repo.rebase_descendants().block_on().unwrap(); assert!(mut_repo.view().heads().contains(old_wc_commit.id())); } @@ -277,6 +283,7 @@ fn test_edit_previous_empty_non_head() { repo.store().empty_merged_tree_id(), ) .write() + .block_on() .unwrap(); let old_child = mut_repo .new_commit( @@ -284,16 +291,17 @@ fn test_edit_previous_empty_non_head() { old_wc_commit.tree_id().clone(), ) .write() + .block_on() .unwrap(); let ws_name = WorkspaceName::DEFAULT.to_owned(); mut_repo.edit(ws_name.clone(), &old_wc_commit).unwrap(); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx = repo.start_transaction(); let mut_repo = tx.repo_mut(); let new_wc_commit = write_random_commit(mut_repo); mut_repo.edit(ws_name, &new_wc_commit).unwrap(); - mut_repo.rebase_descendants().unwrap(); + mut_repo.rebase_descendants().block_on().unwrap(); assert_eq!( *mut_repo.view().heads(), hashset! {old_child.id().clone(), new_wc_commit.id().clone()} @@ -309,12 +317,12 @@ fn test_edit_initial() { let mut tx = repo.start_transaction(); let wc_commit = write_random_commit(tx.repo_mut()); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx = repo.start_transaction(); let ws_name = WorkspaceNameBuf::from("new-workspace"); tx.repo_mut().edit(ws_name.clone(), &wc_commit).unwrap(); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); assert_eq!(repo.view().get_wc_commit_id(&ws_name), Some(wc_commit.id())); } @@ -334,7 +342,7 @@ fn test_edit_hidden_commit() { let mut tx = repo.start_transaction(); let ws_name = WorkspaceName::DEFAULT.to_owned(); tx.repo_mut().edit(ws_name.clone(), &wc_commit).unwrap(); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); assert_eq!(repo.view().get_wc_commit_id(&ws_name), Some(wc_commit.id())); assert_eq!(*repo.view().heads(), hashset! {wc_commit.id().clone()}); } @@ -359,7 +367,7 @@ fn test_add_head_success() { mut_repo.add_head(&new_commit).unwrap(); assert!(mut_repo.view().heads().contains(new_commit.id())); assert!(mut_repo.index().has_id(new_commit.id())); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); assert!(repo.view().heads().contains(new_commit.id())); assert!(repo.index().has_id(new_commit.id())); } @@ -375,7 +383,7 @@ fn test_add_head_ancestor() { let commit1 = write_random_commit(tx.repo_mut()); let commit2 = write_random_commit_with_parents(tx.repo_mut(), &[&commit1]); let commit3 = write_random_commit_with_parents(tx.repo_mut(), &[&commit2]); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); assert_eq!(repo.view().heads(), &hashset! {commit3.id().clone()}); let mut tx = repo.start_transaction(); @@ -393,7 +401,7 @@ fn test_add_head_not_immediate_child() { let mut tx = repo.start_transaction(); let initial = write_random_commit(tx.repo_mut()); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); // Create some commits outside of the repo by using a temporary transaction. // Then add one of them as a head. @@ -402,6 +410,7 @@ fn test_add_head_not_immediate_child() { .set_change_id(initial.change_id().clone()) .set_predecessors(vec![initial.id().clone()]) .write() + .block_on() .unwrap(); let child = write_random_commit_with_parents(tx.repo_mut(), &[&rewritten]); drop(tx); @@ -431,7 +440,7 @@ fn test_remove_head() { let commit1 = write_random_commit(tx.repo_mut()); let commit2 = write_random_commit_with_parents(tx.repo_mut(), &[&commit1]); let commit3 = write_random_commit_with_parents(tx.repo_mut(), &[&commit2]); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx = repo.start_transaction(); let mut_repo = tx.repo_mut(); @@ -444,7 +453,7 @@ fn test_remove_head() { assert!(mut_repo.index().has_id(commit1.id())); assert!(mut_repo.index().has_id(commit2.id())); assert!(mut_repo.index().has_id(commit3.id())); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let heads = repo.view().heads().clone(); assert!(!heads.contains(commit3.id())); assert!(!heads.contains(commit2.id())); @@ -480,7 +489,7 @@ fn test_has_changed() { remote_symbol("main", "origin"), normal_remote_ref(commit1.id()), ); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); // Test the setup assert_eq!(repo.view().heads(), &hashset! {commit1.id().clone()}); @@ -547,7 +556,7 @@ fn test_rebase_descendants_simple() { let commit3 = write_random_commit_with_parents(tx.repo_mut(), &[&commit2]); let commit4 = write_random_commit_with_parents(tx.repo_mut(), &[&commit1]); let commit5 = write_random_commit_with_parents(tx.repo_mut(), &[&commit4]); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx = repo.start_transaction(); let mut_repo = tx.repo_mut(); @@ -580,7 +589,7 @@ fn test_rebase_descendants_divergent_rewrite() { let commit1 = write_random_commit(tx.repo_mut()); let commit2 = write_random_commit_with_parents(tx.repo_mut(), &[&commit1]); let _commit3 = write_random_commit_with_parents(tx.repo_mut(), &[&commit2]); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx = repo.start_transaction(); let mut_repo = tx.repo_mut(); @@ -632,12 +641,12 @@ fn test_remove_wc_commit_previous_not_discardable() { let old_wc_commit = write_random_commit(mut_repo); let ws_name = WorkspaceName::DEFAULT.to_owned(); mut_repo.edit(ws_name.clone(), &old_wc_commit).unwrap(); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx = repo.start_transaction(); let mut_repo = tx.repo_mut(); mut_repo.remove_wc_commit(&ws_name).unwrap(); - mut_repo.rebase_descendants().unwrap(); + mut_repo.rebase_descendants().block_on().unwrap(); assert!(mut_repo.view().heads().contains(old_wc_commit.id())); } @@ -656,15 +665,16 @@ fn test_remove_wc_commit_previous_discardable() { repo.store().empty_merged_tree_id(), ) .write() + .block_on() .unwrap(); let ws_name = WorkspaceName::DEFAULT.to_owned(); mut_repo.edit(ws_name.clone(), &old_wc_commit).unwrap(); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx = repo.start_transaction(); let mut_repo = tx.repo_mut(); mut_repo.remove_wc_commit(&ws_name).unwrap(); - mut_repo.rebase_descendants().unwrap(); + mut_repo.rebase_descendants().block_on().unwrap(); assert!(!mut_repo.view().heads().contains(old_wc_commit.id())); } @@ -694,7 +704,7 @@ fn test_reparent_descendants() { mut_repo .set_local_bookmark_target(bookmark.as_ref(), RefTarget::normal(commit.id().clone())); } - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); // Rewrite "commit_a". let mut tx = repo.start_transaction(); @@ -703,12 +713,13 @@ fn test_reparent_descendants() { .rewrite_commit(&commit_a) .set_tree_id(create_random_tree(&repo)) .write() + .block_on() .unwrap(); - let reparented = mut_repo.reparent_descendants().unwrap(); + let reparented = mut_repo.reparent_descendants().block_on().unwrap(); // "child_a_b", "grandchild_a_b" and "child_a" (3 commits) must have been // reparented. assert_eq!(reparented, 3); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); for (bookmark, commit) in [ ("b", &commit_b), @@ -751,13 +762,13 @@ fn test_bookmark_hidden_commit() { let mut tx = repo.start_transaction(); let commit = write_random_commit(tx.repo_mut()); tx.repo_mut().remove_head(commit.id()); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); // Test the setup assert_eq!(*repo.view().heads(), hashset! {root_commit.id().clone()}); let mut tx = repo.start_transaction(); tx.repo_mut() .set_local_bookmark_target("b".as_ref(), RefTarget::normal(commit.id().clone())); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); assert_eq!(*repo.view().heads(), hashset! {commit.id().clone()}); } diff --git a/lib/tests/test_operations.rs b/lib/tests/test_operations.rs index c36bc4fd953..8189433ebc1 100644 --- a/lib/tests/test_operations.rs +++ b/lib/tests/test_operations.rs @@ -68,11 +68,11 @@ fn test_unpublished_operation() { let mut tx1 = repo.start_transaction(); write_random_commit(tx1.repo_mut()); - let unpublished_op = tx1.write("transaction 1").unwrap(); + let unpublished_op = tx1.write("transaction 1").block_on().unwrap(); let op_id1 = unpublished_op.operation().id().clone(); assert_ne!(op_id1, op_id0); assert_eq!(list_dir(&op_heads_dir), vec![op_id0.hex()]); - unpublished_op.publish().unwrap(); + unpublished_op.publish().block_on().unwrap(); assert_eq!(list_dir(&op_heads_dir), vec![op_id1.hex()]); } @@ -91,6 +91,7 @@ fn test_consecutive_operations() { write_random_commit(tx1.repo_mut()); let op_id1 = tx1 .commit("transaction 1") + .block_on() .unwrap() .operation() .id() @@ -98,11 +99,12 @@ fn test_consecutive_operations() { assert_ne!(op_id1, op_id0); assert_eq!(list_dir(&op_heads_dir), vec![op_id1.hex()]); - let repo = repo.reload_at_head().unwrap(); + let repo = repo.reload_at_head().block_on().unwrap(); let mut tx2 = repo.start_transaction(); write_random_commit(tx2.repo_mut()); let op_id2 = tx2 .commit("transaction 2") + .block_on() .unwrap() .operation() .id() @@ -113,7 +115,7 @@ fn test_consecutive_operations() { // Reloading the repo makes no difference (there are no conflicting operations // to resolve). - let _repo = repo.reload_at_head().unwrap(); + let _repo = repo.reload_at_head().block_on().unwrap(); assert_eq!(list_dir(&op_heads_dir), vec![op_id2.hex()]); } @@ -132,6 +134,7 @@ fn test_concurrent_operations() { write_random_commit(tx1.repo_mut()); let op_id1 = tx1 .commit("transaction 1") + .block_on() .unwrap() .operation() .id() @@ -145,6 +148,7 @@ fn test_concurrent_operations() { write_random_commit(tx2.repo_mut()); let op_id2 = tx2 .commit("transaction 2") + .block_on() .unwrap() .operation() .id() @@ -158,7 +162,7 @@ fn test_concurrent_operations() { assert_eq!(actual_heads_on_disk, expected_heads_on_disk); // Reloading the repo causes the operations to be merged - let repo = repo.reload_at_head().unwrap(); + let repo = repo.reload_at_head().block_on().unwrap(); let merged_op_id = repo.op_id().clone(); assert_ne!(merged_op_id, op_id0); assert_ne!(merged_op_id, op_id1); @@ -179,7 +183,7 @@ fn test_isolation() { let mut tx = repo.start_transaction(); let initial = write_random_commit_with_parents(tx.repo_mut(), &[]); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx1 = repo.start_transaction(); let mut_repo1 = tx1.repo_mut(); @@ -194,14 +198,16 @@ fn test_isolation() { .rewrite_commit(&initial) .set_description("rewrite1") .write() + .block_on() .unwrap(); - mut_repo1.rebase_descendants().unwrap(); + mut_repo1.rebase_descendants().block_on().unwrap(); let rewrite2 = mut_repo2 .rewrite_commit(&initial) .set_description("rewrite2") .write() + .block_on() .unwrap(); - mut_repo2.rebase_descendants().unwrap(); + mut_repo2.rebase_descendants().block_on().unwrap(); // Neither transaction has committed yet, so each transaction sees its own // commit. @@ -210,15 +216,15 @@ fn test_isolation() { assert_heads(mut_repo2, vec![rewrite2.id()]); // The base repo and tx2 don't see the commits from tx1. - tx1.commit("transaction 1").unwrap(); + tx1.commit("transaction 1").block_on().unwrap(); assert_heads(repo.as_ref(), vec![initial.id()]); assert_heads(mut_repo2, vec![rewrite2.id()]); // The base repo still doesn't see the commits after both transactions commit. - tx2.commit("transaction 2").unwrap(); + tx2.commit("transaction 2").block_on().unwrap(); assert_heads(repo.as_ref(), vec![initial.id()]); // After reload, the base repo sees both rewrites. - let repo = repo.reload_at_head().unwrap(); + let repo = repo.reload_at_head().block_on().unwrap(); assert_heads(repo.as_ref(), vec![rewrite1.id(), rewrite2.id()]); } @@ -235,12 +241,13 @@ fn test_stored_commit_predecessors() { .rewrite_commit(&commit1) .set_description("rewritten") .write() + .block_on() .unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); - let repo = tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo = tx.commit("test").block_on().unwrap(); // Reload operation from disk. - let op = loader.load_operation(repo.op_id()).unwrap(); + let op = loader.load_operation(repo.op_id()).block_on().unwrap(); assert!(op.stores_commit_predecessors()); assert_matches!(op.predecessors_for_commit(commit1.id()), Some([])); assert_matches!(op.predecessors_for_commit(commit2.id()), Some([id]) if id == commit1.id()); @@ -250,7 +257,7 @@ fn test_stored_commit_predecessors() { data.commit_predecessors = None; let op_id = loader.op_store().write_operation(&data).block_on().unwrap(); assert_ne!(&op_id, op.id()); - let op = loader.load_operation(&op_id).unwrap(); + let op = loader.load_operation(&op_id).block_on().unwrap(); assert!(!op.stores_commit_predecessors()); } @@ -261,7 +268,7 @@ fn test_reparent_range_linear() { let loader = repo_0.loader(); let op_store = repo_0.op_store(); - let read_op = |id| loader.load_operation(id).unwrap(); + let read_op = |id| loader.load_operation(id).block_on().unwrap(); fn op_parents(op: &Operation) -> [Operation; N] { let parents: Vec<_> = op.parents().try_collect().unwrap(); @@ -279,10 +286,10 @@ fn test_reparent_range_linear() { write_random_commit(tx.repo_mut()); tx }; - let repo_a = random_tx(&repo_0).commit("op A").unwrap(); - let repo_b = random_tx(&repo_a).commit("op B").unwrap(); - let repo_c = random_tx(&repo_b).commit("op C").unwrap(); - let repo_d = random_tx(&repo_c).commit("op D").unwrap(); + let repo_a = random_tx(&repo_0).commit("op A").block_on().unwrap(); + let repo_b = random_tx(&repo_a).commit("op B").block_on().unwrap(); + let repo_c = random_tx(&repo_b).commit("op C").block_on().unwrap(); + let repo_d = random_tx(&repo_c).commit("op D").block_on().unwrap(); // Reparent B..D (=C|D) onto A: // D' @@ -295,6 +302,7 @@ fn test_reparent_range_linear() { slice::from_ref(repo_d.operation()), repo_a.operation(), ) + .block_on() .unwrap(); assert_eq!(stats.new_head_ids.len(), 1); assert_eq!(stats.rewritten_count, 2); @@ -314,6 +322,7 @@ fn test_reparent_range_linear() { slice::from_ref(repo_d.operation()), repo_a.operation(), ) + .block_on() .unwrap(); assert_eq!(stats.new_head_ids, vec![repo_a.op_id().clone()]); assert_eq!(stats.rewritten_count, 0); @@ -327,7 +336,7 @@ fn test_reparent_range_branchy() { let loader = repo_0.loader(); let op_store = repo_0.op_store(); - let read_op = |id| loader.load_operation(id).unwrap(); + let read_op = |id| loader.load_operation(id).block_on().unwrap(); fn op_parents(op: &Operation) -> [Operation; N] { let parents: Vec<_> = op.parents().try_collect().unwrap(); @@ -350,10 +359,10 @@ fn test_reparent_range_branchy() { write_random_commit(tx.repo_mut()); tx }; - let repo_a = random_tx(&repo_0).commit("op A").unwrap(); - let repo_b = random_tx(&repo_a).commit("op B").unwrap(); - let repo_c = random_tx(&repo_b).commit("op C").unwrap(); - let repo_d = random_tx(&repo_c).commit("op D").unwrap(); + let repo_a = random_tx(&repo_0).commit("op A").block_on().unwrap(); + let repo_b = random_tx(&repo_a).commit("op B").block_on().unwrap(); + let repo_c = random_tx(&repo_b).commit("op C").block_on().unwrap(); + let repo_d = random_tx(&repo_c).commit("op D").block_on().unwrap(); let tx_e = random_tx(&repo_d); let tx_f = random_tx(&repo_c); let repo_g = testutils::commit_transactions(vec![tx_e, tx_f]); @@ -374,6 +383,7 @@ fn test_reparent_range_branchy() { slice::from_ref(repo_g.operation()), repo_b.operation(), ) + .block_on() .unwrap(); assert_eq!(stats.new_head_ids.len(), 1); assert_eq!(stats.rewritten_count, 3); @@ -401,6 +411,7 @@ fn test_reparent_range_branchy() { slice::from_ref(repo_g.operation()), repo_a.operation(), ) + .block_on() .unwrap(); assert_eq!(stats.new_head_ids.len(), 1); assert_eq!(stats.rewritten_count, 5); @@ -427,6 +438,7 @@ fn test_reparent_range_branchy() { slice::from_ref(repo_g.operation()), repo_d.operation(), ) + .block_on() .unwrap(); assert_eq!(stats.new_head_ids.len(), 1); assert_eq!(stats.rewritten_count, 1); @@ -449,6 +461,7 @@ fn test_reparent_range_branchy() { slice::from_ref(&op_f), repo_d.operation(), ) + .block_on() .unwrap(); assert_eq!(stats.new_head_ids.len(), 1); assert_eq!(stats.rewritten_count, 1); @@ -468,8 +481,8 @@ fn test_reparent_discarding_predecessors(op_stores_commit_predecessors: bool) { let op_store = repo_0.op_store(); let repo_at = |id: &OperationId| { - let op = loader.load_operation(id).unwrap(); - loader.load_at(&op).unwrap() + let op = loader.load_operation(id).block_on().unwrap(); + loader.load_at(&op).block_on().unwrap() }; let head_commits = |repo: &dyn Repo| { repo.view() @@ -488,7 +501,7 @@ fn test_reparent_discarding_predecessors(op_stores_commit_predecessors: bool) { let mut tx = repo_0.start_transaction(); let commit_a0 = write_random_commit(tx.repo_mut()); let commit_b0 = write_random_commit_with_parents(tx.repo_mut(), &[&commit_a0]); - let repo_1 = tx.commit("op1").unwrap(); + let repo_1 = tx.commit("op1").block_on().unwrap(); let mut tx = repo_1.start_transaction(); let commit_a1 = tx @@ -496,16 +509,17 @@ fn test_reparent_discarding_predecessors(op_stores_commit_predecessors: bool) { .rewrite_commit(&commit_a0) .set_description("a1") .write() + .block_on() .unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); let [commit_b1] = head_commits(tx.repo()).try_into().unwrap(); tx.repo_mut().add_head(&commit_b0).unwrap(); // resurrect rewritten commits - let repo_2 = tx.commit("op2").unwrap(); + let repo_2 = tx.commit("op2").block_on().unwrap(); let mut tx = repo_2.start_transaction(); tx.repo_mut().record_abandoned_commit(&commit_b0); - tx.repo_mut().rebase_descendants().unwrap(); - let repo_3 = tx.commit("op3").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo_3 = tx.commit("op3").block_on().unwrap(); let mut tx = repo_3.start_transaction(); tx.repo_mut().record_abandoned_commit(&commit_a0); @@ -515,9 +529,10 @@ fn test_reparent_discarding_predecessors(op_stores_commit_predecessors: bool) { .rewrite_commit(&commit_a1) .set_description("a2") .write() + .block_on() .unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); - let repo_4 = tx.commit("op4").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo_4 = tx.commit("op4").block_on().unwrap(); let repo_4 = if op_stores_commit_predecessors { repo_4 @@ -561,6 +576,7 @@ fn test_reparent_discarding_predecessors(op_stores_commit_predecessors: bool) { slice::from_ref(repo_4.operation()), repo_0.operation(), ) + .block_on() .unwrap(); assert_eq!(stats.new_head_ids.len(), 1); assert_eq!(stats.rewritten_count, 3); @@ -587,6 +603,7 @@ fn test_reparent_discarding_predecessors(op_stores_commit_predecessors: bool) { slice::from_ref(repo_4.operation()), repo_0.operation(), ) + .block_on() .unwrap(); assert_eq!(stats.new_head_ids.len(), 1); assert_eq!(stats.rewritten_count, 2); @@ -621,6 +638,7 @@ fn test_reparent_discarding_predecessors(op_stores_commit_predecessors: bool) { slice::from_ref(repo_4.operation()), repo_0.operation(), ) + .block_on() .unwrap(); assert_eq!(stats.new_head_ids.len(), 1); assert_eq!(stats.rewritten_count, 1); @@ -661,7 +679,7 @@ fn test_resolve_op_id() { // up with hashes with ambiguous prefixes. for i in (1..5).chain([9, 27]) { let tx = repo.start_transaction(); - let repo = tx.commit(format!("transaction {i}")).unwrap(); + let repo = tx.commit(format!("transaction {i}")).block_on().unwrap(); operations.push(repo.operation().clone()); } // "6" and "0" are ambiguous @@ -677,7 +695,7 @@ fn test_resolve_op_id() { "#); let repo_loader = repo.loader(); - let resolve = |op_str: &str| op_walk::resolve_op_for_load(repo_loader, op_str); + let resolve = |op_str: &str| op_walk::resolve_op_for_load(repo_loader, op_str).block_on(); // Full id assert_eq!(resolve(&operations[0].id().hex()).unwrap(), operations[0]); @@ -713,7 +731,7 @@ fn test_resolve_op_id() { )) ); // Virtual root id - let root_operation = loader.root_operation(); + let root_operation = loader.root_operation().block_on(); assert_eq!(resolve(&root_operation.id().hex()).unwrap(), root_operation); assert_eq!(resolve("00").unwrap(), root_operation); assert_eq!(resolve("0e").unwrap(), operations[4]); @@ -732,7 +750,7 @@ fn test_resolve_current_op() { let repo = test_repo.repo; assert_eq!( - op_walk::resolve_op_with_repo(&repo, "@").unwrap(), + op_walk::resolve_op_with_repo(&repo, "@").block_on().unwrap(), *repo.operation() ); } @@ -747,7 +765,7 @@ fn test_resolve_op_parents_children() { let mut repos = Vec::new(); for _ in 0..3 { let tx = repo.start_transaction(); - repos.push(tx.commit("test").unwrap()); + repos.push(tx.commit("test").block_on().unwrap()); repo = repos.last().unwrap(); } let operations = repos.iter().map(|repo| repo.operation()).collect_vec(); @@ -755,16 +773,16 @@ fn test_resolve_op_parents_children() { // Parent let op2_id_hex = operations[2].id().hex(); assert_eq!( - op_walk::resolve_op_with_repo(repo, &format!("{op2_id_hex}-")).unwrap(), + op_walk::resolve_op_with_repo(repo, &format!("{op2_id_hex}-")).block_on().unwrap(), *operations[1] ); assert_eq!( - op_walk::resolve_op_with_repo(repo, &format!("{op2_id_hex}--")).unwrap(), + op_walk::resolve_op_with_repo(repo, &format!("{op2_id_hex}--")).block_on().unwrap(), *operations[0] ); // "{op2_id_hex}----" is the root operation assert_matches!( - op_walk::resolve_op_with_repo(repo, &format!("{op2_id_hex}-----")), + op_walk::resolve_op_with_repo(repo, &format!("{op2_id_hex}-----")).block_on(), Err(OpsetEvaluationError::OpsetResolution( OpsetResolutionError::EmptyOperations(_) )) @@ -773,15 +791,15 @@ fn test_resolve_op_parents_children() { // Child let op0_id_hex = operations[0].id().hex(); assert_eq!( - op_walk::resolve_op_with_repo(repo, &format!("{op0_id_hex}+")).unwrap(), + op_walk::resolve_op_with_repo(repo, &format!("{op0_id_hex}+")).block_on().unwrap(), *operations[1] ); assert_eq!( - op_walk::resolve_op_with_repo(repo, &format!("{op0_id_hex}++")).unwrap(), + op_walk::resolve_op_with_repo(repo, &format!("{op0_id_hex}++")).block_on().unwrap(), *operations[2] ); assert_matches!( - op_walk::resolve_op_with_repo(repo, &format!("{op0_id_hex}+++")), + op_walk::resolve_op_with_repo(repo, &format!("{op0_id_hex}+++")).block_on(), Err(OpsetEvaluationError::OpsetResolution( OpsetResolutionError::EmptyOperations(_) )) @@ -789,17 +807,17 @@ fn test_resolve_op_parents_children() { // Child of parent assert_eq!( - op_walk::resolve_op_with_repo(repo, &format!("{op2_id_hex}--+")).unwrap(), + op_walk::resolve_op_with_repo(repo, &format!("{op2_id_hex}--+")).block_on().unwrap(), *operations[1] ); // Child at old repo: new operations shouldn't be visible assert_eq!( - op_walk::resolve_op_with_repo(&repos[1], &format!("{op0_id_hex}+")).unwrap(), + op_walk::resolve_op_with_repo(&repos[1], &format!("{op0_id_hex}+")).block_on().unwrap(), *operations[1] ); assert_matches!( - op_walk::resolve_op_with_repo(&repos[0], &format!("{op0_id_hex}+")), + op_walk::resolve_op_with_repo(&repos[0], &format!("{op0_id_hex}+")).block_on(), Err(OpsetEvaluationError::OpsetResolution( OpsetResolutionError::EmptyOperations(_) )) @@ -815,19 +833,19 @@ fn test_resolve_op_parents_children() { // op ids) should be reported, not the full expression provided by the user. let op5_id_hex = repo.operation().id().hex(); let parents_op_str = format!("{op5_id_hex}-"); - let error = op_walk::resolve_op_with_repo(&repo, &parents_op_str).unwrap_err(); + let error = op_walk::resolve_op_with_repo(&repo, &parents_op_str).block_on().unwrap_err(); assert_eq!( extract_multiple_operations_error(&error).unwrap(), (&parents_op_str, parent_op_ids) ); let grandparents_op_str = format!("{op5_id_hex}--"); - let error = op_walk::resolve_op_with_repo(&repo, &grandparents_op_str).unwrap_err(); + let error = op_walk::resolve_op_with_repo(&repo, &grandparents_op_str).block_on().unwrap_err(); assert_eq!( extract_multiple_operations_error(&error).unwrap(), (&parents_op_str, parent_op_ids) ); let children_of_parents_op_str = format!("{op5_id_hex}-+"); - let error = op_walk::resolve_op_with_repo(&repo, &children_of_parents_op_str).unwrap_err(); + let error = op_walk::resolve_op_with_repo(&repo, &children_of_parents_op_str).block_on().unwrap_err(); assert_eq!( extract_multiple_operations_error(&error).unwrap(), (&parents_op_str, parent_op_ids) @@ -835,7 +853,7 @@ fn test_resolve_op_parents_children() { let op2_id_hex = operations[2].id().hex(); let op_str = format!("{op2_id_hex}+"); - let error = op_walk::resolve_op_with_repo(&repo, &op_str).unwrap_err(); + let error = op_walk::resolve_op_with_repo(&repo, &op_str).block_on().unwrap_err(); assert_eq!( extract_multiple_operations_error(&error).unwrap(), (&op_str, parent_op_ids) @@ -876,19 +894,36 @@ fn test_walk_ancestors() { // A | // |/ // 0 (initial) - let repo_a = repo_0.start_transaction().commit("op A").unwrap(); + let repo_a = repo_0 + .start_transaction() + .commit("op A") + .block_on() + .unwrap(); let repo_b = repo_0 .start_transaction() .write("op B") + .block_on() .unwrap() .leave_unpublished(); - let repo_c = repo_a.start_transaction().commit("op C").unwrap(); - let repo_d = repo_c.start_transaction().commit("op D").unwrap(); + let repo_c = repo_a + .start_transaction() + .commit("op C") + .block_on() + .unwrap(); + let repo_d = repo_c + .start_transaction() + .commit("op D") + .block_on() + .unwrap(); let tx_e = repo_d.start_transaction(); let tx_f = repo_c.start_transaction(); let repo_g = testutils::commit_transactions(vec![tx_e, tx_f]); let [op_e, op_f] = op_parents(repo_g.operation()); - let repo_h = repo_g.start_transaction().commit("op H").unwrap(); + let repo_h = repo_g + .start_transaction() + .commit("op H") + .block_on() + .unwrap(); // At merge, parents are visited in forward order, which isn't important. assert_eq!( @@ -901,7 +936,7 @@ fn test_walk_ancestors() { op_f.clone(), repo_c.operation().clone(), repo_a.operation().clone(), - loader.root_operation(), + loader.root_operation().block_on(), ] ); @@ -913,7 +948,7 @@ fn test_walk_ancestors() { repo_c.operation().clone(), repo_a.operation().clone(), repo_b.operation().clone(), - loader.root_operation(), + loader.root_operation().block_on(), ] ); @@ -997,12 +1032,12 @@ fn test_gc() { write_random_commit(tx.repo_mut()); tx }; - let repo_a = random_tx(&repo_0).commit("op A").unwrap(); - let repo_b = random_tx(&repo_a).commit("op B").unwrap(); - let repo_c = random_tx(&repo_b).commit("op C").unwrap(); - let repo_d = random_tx(&repo_c).commit("op D").unwrap(); - let repo_e = empty_tx(&repo_b).commit("op E").unwrap(); - let repo_f = random_tx(&repo_e).commit("op F").unwrap(); + let repo_a = random_tx(&repo_0).commit("op A").block_on().unwrap(); + let repo_b = random_tx(&repo_a).commit("op B").block_on().unwrap(); + let repo_c = random_tx(&repo_b).commit("op C").block_on().unwrap(); + let repo_d = random_tx(&repo_c).commit("op D").block_on().unwrap(); + let repo_e = empty_tx(&repo_b).commit("op E").block_on().unwrap(); + let repo_f = random_tx(&repo_e).commit("op F").block_on().unwrap(); // Sanity check for the original state let mut expected_op_entries = list_dir(&op_dir); diff --git a/lib/tests/test_refs.rs b/lib/tests/test_refs.rs index bd988e40ed3..1069dab14b5 100644 --- a/lib/tests/test_refs.rs +++ b/lib/tests/test_refs.rs @@ -16,6 +16,7 @@ use jj_lib::merge::Merge; use jj_lib::op_store::RefTarget; use jj_lib::refs::merge_ref_targets; use jj_lib::repo::Repo as _; +use pollster::FutureExt as _; use testutils::TestWorkspace; use testutils::write_random_commit; use testutils::write_random_commit_with_parents; @@ -41,7 +42,7 @@ fn test_merge_ref_targets() { let commit5 = write_random_commit_with_parents(tx.repo_mut(), &[&commit1]); let commit6 = write_random_commit_with_parents(tx.repo_mut(), &[&commit5]); let commit7 = write_random_commit_with_parents(tx.repo_mut(), &[&commit5]); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let target1 = RefTarget::normal(commit1.id().clone()); let target2 = RefTarget::normal(commit2.id().clone()); diff --git a/lib/tests/test_revset.rs b/lib/tests/test_revset.rs index 13966ce9f9a..4c5e10d80b7 100644 --- a/lib/tests/test_revset.rs +++ b/lib/tests/test_revset.rs @@ -129,7 +129,7 @@ fn build_changed_path_index(repo: &ReadonlyRepo) -> Arc { .build_changed_path_index_at_operation(repo.op_id(), repo.store(), u32::MAX) .block_on() .unwrap(); - repo.reload_at(repo.operation()).unwrap() + repo.reload_at(repo.operation()).block_on().unwrap() } #[test] @@ -174,10 +174,11 @@ fn test_resolve_symbol_commit_id() { .set_author(signature.clone()) .set_committer(signature.clone()) .write() + .block_on() .unwrap(); commits.push(commit); } - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); // Test the test setup insta::assert_snapshot!(commits.iter().map(|c| c.id().hex()).join("\n"), @r" @@ -315,6 +316,7 @@ fn test_resolve_symbol_change_id(readonly: bool) { .set_author(author.clone()) .set_committer(committer.clone()) .write() + .block_on() .unwrap(); commits.push(commit); } @@ -332,7 +334,7 @@ fn test_resolve_symbol_change_id(readonly: bool) { let _readonly_repo; let repo: &dyn Repo = if readonly { - _readonly_repo = tx.commit("test").unwrap(); + _readonly_repo = tx.commit("test").block_on().unwrap(); _readonly_repo.as_ref() } else { tx.repo_mut() @@ -420,6 +422,7 @@ fn test_resolve_symbol_divergent_change_id() { let commit2 = create_random_commit(tx.repo_mut()) .set_change_id(commit1.change_id().clone()) .write() + .block_on() .unwrap(); let change_id = commit1.change_id(); @@ -448,12 +451,12 @@ fn test_resolve_symbol_in_different_disambiguation_context() { for _ in 0..50 { write_random_commit(tx.repo_mut()); } - let repo1 = tx.commit("test").unwrap(); + let repo1 = tx.commit("test").block_on().unwrap(); let mut tx = repo1.start_transaction(); - let commit2 = tx.repo_mut().rewrite_commit(&commit1).write().unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); - let repo2 = tx.commit("test").unwrap(); + let commit2 = tx.repo_mut().rewrite_commit(&commit1).write().block_on().unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo2 = tx.commit("test").block_on().unwrap(); // Set up disambiguation index which only contains the commit2.id(). let id_prefix_context = IdPrefixContext::new(Arc::new(RevsetExtensions::default())) @@ -1099,12 +1102,12 @@ fn test_evaluate_expression_with_hidden_revisions() { let commit2 = write_random_commit(mut_repo); let commit3 = write_random_commit_with_parents(mut_repo, &[&commit1]); let commit4 = write_random_commit_with_parents(mut_repo, &[&commit3]); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx = repo.start_transaction(); tx.repo_mut().record_abandoned_commit(&commit3); tx.repo_mut().record_abandoned_commit(&commit4); - tx.repo_mut().rebase_descendants().unwrap(); - let repo = tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo = tx.commit("test").block_on().unwrap(); // Sanity check assert_eq!( @@ -1166,8 +1169,8 @@ fn test_evaluate_expression_root_and_checkout() { let test_workspace = TestWorkspace::init(); let repo = &test_workspace.repo; - let root_operation = repo.loader().root_operation(); - let root_repo = repo.reload_at(&root_operation).unwrap(); + let root_operation = repo.loader().root_operation().block_on(); + let root_repo = repo.reload_at(&root_operation).block_on().unwrap(); let mut tx = repo.start_transaction(); let mut_repo = tx.repo_mut(); @@ -2807,7 +2810,7 @@ fn test_evaluate_expression_latest() { let builder = create_random_commit(mut_repo); let mut committer = builder.committer().clone(); committer.timestamp.timestamp = MillisSinceEpoch(sec * 1000); - builder.set_committer(committer).write().unwrap() + builder.set_committer(committer).write().block_on().unwrap() }; let commit1_t3 = write_commit_with_committer_timestamp(3); let commit2_t2 = write_commit_with_committer_timestamp(2); @@ -3223,16 +3226,19 @@ fn test_evaluate_expression_description() { let commit1 = create_random_commit(mut_repo) .set_description("commit 1\n") .write() + .block_on() .unwrap(); let commit2 = create_random_commit(mut_repo) .set_parents(vec![commit1.id().clone()]) .set_description("commit 2\n\nblah blah...\n") .write() + .block_on() .unwrap(); let commit3 = create_random_commit(mut_repo) .set_parents(vec![commit2.id().clone()]) .set_description("commit 3\n") .write() + .block_on() .unwrap(); // Can find multiple matches @@ -3309,6 +3315,7 @@ fn test_evaluate_expression_author() { timestamp, }) .write() + .block_on() .unwrap(); let commit2 = create_random_commit(mut_repo) .set_parents(vec![commit1.id().clone()]) @@ -3318,6 +3325,7 @@ fn test_evaluate_expression_author() { timestamp, }) .write() + .block_on() .unwrap(); let commit3 = create_random_commit(mut_repo) .set_parents(vec![commit2.id().clone()]) @@ -3327,6 +3335,7 @@ fn test_evaluate_expression_author() { timestamp, }) .write() + .block_on() .unwrap(); // Can find multiple matches @@ -3419,6 +3428,7 @@ fn test_evaluate_expression_author_date() { timestamp: timestamp2, }) .write() + .block_on() .unwrap(); let commit2 = create_random_commit(mut_repo) .set_parents(vec![commit1.id().clone()]) @@ -3433,6 +3443,7 @@ fn test_evaluate_expression_author_date() { timestamp: timestamp2, }) .write() + .block_on() .unwrap(); let commit3 = create_random_commit(mut_repo) .set_parents(vec![commit2.id().clone()]) @@ -3447,6 +3458,7 @@ fn test_evaluate_expression_author_date() { timestamp: timestamp2, }) .write() + .block_on() .unwrap(); // Can find multiple matches @@ -3485,6 +3497,7 @@ fn test_evaluate_expression_committer_date() { timestamp: timestamp1, }) .write() + .block_on() .unwrap(); let commit2 = create_random_commit(mut_repo) .set_parents(vec![commit1.id().clone()]) @@ -3499,6 +3512,7 @@ fn test_evaluate_expression_committer_date() { timestamp: timestamp2, }) .write() + .block_on() .unwrap(); let commit3 = create_random_commit(mut_repo) .set_parents(vec![commit2.id().clone()]) @@ -3513,6 +3527,7 @@ fn test_evaluate_expression_committer_date() { timestamp: timestamp3, }) .write() + .block_on() .unwrap(); // Can find multiple matches @@ -3547,6 +3562,7 @@ fn test_evaluate_expression_mine() { timestamp, }) .write() + .block_on() .unwrap(); let commit2 = create_random_commit(mut_repo) .set_parents(vec![commit1.id().clone()]) @@ -3556,6 +3572,7 @@ fn test_evaluate_expression_mine() { timestamp, }) .write() + .block_on() .unwrap(); // Can find a unique match assert_eq!( @@ -3571,6 +3588,7 @@ fn test_evaluate_expression_mine() { timestamp, }) .write() + .block_on() .unwrap(); // Can find multiple matches assert_eq!( @@ -3617,6 +3635,7 @@ fn test_evaluate_expression_signed() { }) .set_sign_behavior(SignBehavior::Own) .write() + .block_on() .unwrap(); let commit2 = create_random_commit(mut_repo) .set_parents(vec![commit1.id().clone()]) @@ -3627,6 +3646,7 @@ fn test_evaluate_expression_signed() { }) .set_sign_behavior(SignBehavior::Drop) .write() + .block_on() .unwrap(); assert!(commit1.is_signed()); @@ -3660,6 +3680,7 @@ fn test_evaluate_expression_committer() { timestamp, }) .write() + .block_on() .unwrap(); let commit2 = create_random_commit(mut_repo) .set_parents(vec![commit1.id().clone()]) @@ -3669,6 +3690,7 @@ fn test_evaluate_expression_committer() { timestamp, }) .write() + .block_on() .unwrap(); let commit3 = create_random_commit(mut_repo) .set_parents(vec![commit2.id().clone()]) @@ -3678,6 +3700,7 @@ fn test_evaluate_expression_committer() { timestamp, }) .write() + .block_on() .unwrap(); // Can find multiple matches @@ -3743,16 +3766,18 @@ fn test_evaluate_expression_at_operation() { let commit1_op1 = create_random_commit(tx.repo_mut()) .set_description("commit1@op1") .write() + .block_on() .unwrap(); let commit2_op1 = create_random_commit(tx.repo_mut()) .set_description("commit2@op1") .write() + .block_on() .unwrap(); tx.repo_mut().set_local_bookmark_target( "commit1_ref".as_ref(), RefTarget::normal(commit1_op1.id().clone()), ); - let repo1 = tx.commit("test").unwrap(); + let repo1 = tx.commit("test").block_on().unwrap(); let mut tx = repo1.start_transaction(); let commit1_op2 = tx @@ -3760,18 +3785,21 @@ fn test_evaluate_expression_at_operation() { .rewrite_commit(&commit1_op1) .set_description("commit1@op2") .write() + .block_on() .unwrap(); let commit3_op2 = create_random_commit(tx.repo_mut()) .set_description("commit3@op2") .write() + .block_on() .unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); - let repo2 = tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo2 = tx.commit("test").block_on().unwrap(); let mut tx = repo2.start_transaction(); let _commit4_op3 = create_random_commit(tx.repo_mut()) .set_description("commit4@op3") .write() + .block_on() .unwrap(); // Symbol resolution: @@ -4158,16 +4186,19 @@ fn test_evaluate_expression_filter_combinator() { let commit1 = create_random_commit(mut_repo) .set_description("commit 1") .write() + .block_on() .unwrap(); let commit2 = create_random_commit(mut_repo) .set_parents(vec![commit1.id().clone()]) .set_description("commit 2") .write() + .block_on() .unwrap(); let commit3 = create_random_commit(mut_repo) .set_parents(vec![commit2.id().clone()]) .set_description("commit 3") .write() + .block_on() .unwrap(); // Not intersected with a set node @@ -4262,18 +4293,22 @@ fn test_evaluate_expression_file(indexed: bool) { let commit1 = mut_repo .new_commit(vec![repo.store().root_commit_id().clone()], tree1.id()) .write() + .block_on() .unwrap(); let commit2 = mut_repo .new_commit(vec![commit1.id().clone()], tree2.id()) .write() + .block_on() .unwrap(); let commit3 = mut_repo .new_commit(vec![commit2.id().clone()], tree3.id()) .write() + .block_on() .unwrap(); let commit4 = mut_repo .new_commit(vec![commit3.id().clone()], tree3.id()) .write() + .block_on() .unwrap(); let resolve = |file_path: &RepoPath| -> Vec { @@ -4391,18 +4426,22 @@ fn test_evaluate_expression_diff_contains(indexed: bool) { let commit1 = mut_repo .new_commit(vec![repo.store().root_commit_id().clone()], tree1.id()) .write() + .block_on() .unwrap(); let commit2 = mut_repo .new_commit(vec![commit1.id().clone()], tree2.id()) .write() + .block_on() .unwrap(); let commit3 = mut_repo .new_commit(vec![commit2.id().clone()], tree3.id()) .write() + .block_on() .unwrap(); let commit4 = mut_repo .new_commit(vec![commit3.id().clone()], tree4.id()) .write() + .block_on() .unwrap(); let query = |revset_str: &str| { @@ -4491,6 +4530,7 @@ fn test_evaluate_expression_diff_contains_non_utf8() { let commit1 = mut_repo .new_commit(vec![repo.store().root_commit_id().clone()], tree1.id()) .write() + .block_on() .unwrap(); let query = |revset_str: &str| resolve_commit_ids(mut_repo, revset_str); @@ -4517,7 +4557,7 @@ fn test_evaluate_expression_diff_contains_conflict(indexed: bool) { let mut_repo = tx.repo_mut(); let mut create_commit = - |parent_ids, tree_id| mut_repo.new_commit(parent_ids, tree_id).write().unwrap(); + |parent_ids, tree_id| mut_repo.new_commit(parent_ids, tree_id).write().block_on().unwrap(); let file_path = repo_path("file"); let tree1 = create_tree(&repo, &[(file_path, "0\n1\n")]); @@ -4563,7 +4603,7 @@ fn test_evaluate_expression_file_merged_parents(indexed: bool) { let tree4 = create_tree(&repo, &[(file_path1, "1\n4\n"), (file_path2, "2\n1\n3\n")]); let mut create_commit = - |parent_ids, tree_id| mut_repo.new_commit(parent_ids, tree_id).write().unwrap(); + |parent_ids, tree_id| mut_repo.new_commit(parent_ids, tree_id).write().block_on().unwrap(); let commit1 = create_commit(vec![repo.store().root_commit_id().clone()], tree1.id()); let commit2 = create_commit(vec![commit1.id().clone()], tree2.id()); let commit3 = create_commit(vec![commit1.id().clone()], tree3.id()); @@ -4624,7 +4664,7 @@ fn test_evaluate_expression_conflict() { let mut_repo = tx.repo_mut(); let mut create_commit = - |parent_ids, tree_id| mut_repo.new_commit(parent_ids, tree_id).write().unwrap(); + |parent_ids, tree_id| mut_repo.new_commit(parent_ids, tree_id).write().block_on().unwrap(); // Create a few trees, including one with a conflict in `file1` let file_path1 = repo_path("file1"); @@ -4677,7 +4717,7 @@ fn test_reverse_graph() { let commit_d = write_random_commit_with_parents(mut_repo, &[&commit_c]); let commit_e = write_random_commit_with_parents(mut_repo, &[&commit_c]); let commit_f = write_random_commit_with_parents(mut_repo, &[&commit_d, &commit_e]); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let revset = revset_for_commits( repo.as_ref(), @@ -4741,7 +4781,7 @@ fn test_revset_containing_fn() { let commit_b = write_random_commit(mut_repo); let commit_c = write_random_commit(mut_repo); let commit_d = write_random_commit(mut_repo); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let revset = revset_for_commits(repo.as_ref(), &[&commit_b, &commit_d]); diff --git a/lib/tests/test_revset_optimized.rs b/lib/tests/test_revset_optimized.rs index 29a2d6c794e..f9c2cdb1aaf 100644 --- a/lib/tests/test_revset_optimized.rs +++ b/lib/tests/test_revset_optimized.rs @@ -34,6 +34,7 @@ use jj_lib::revset::RevsetFilterPredicate; use jj_lib::rewrite::RebaseOptions; use jj_lib::rewrite::RebasedCommit; use jj_lib::settings::UserSettings; +use pollster::FutureExt as _; use proptest::prelude::*; use testutils::TestRepo; @@ -57,6 +58,7 @@ fn write_new_commit<'a>( repo.new_commit(parents, tree_id) .set_description(desc) .write() + .block_on() .unwrap() } @@ -66,6 +68,7 @@ fn rebase_descendants(repo: &mut MutableRepo) -> Vec { RebasedCommit::Rewritten(commit) => commits.push(commit), RebasedCommit::Abandoned { .. } => {} }) + .block_on() .unwrap(); commits } @@ -190,7 +193,7 @@ fn test_mostly_linear() { let commits = vec![ commit0, commit1, commit2, commit3, commit4, commit5, commit6, commit7, commit8, commit9, ]; - let repo = tx.commit("a").unwrap(); + let repo = tx.commit("a").block_on().unwrap(); // Commit ids for reference insta::assert_snapshot!( @@ -244,7 +247,7 @@ fn test_weird_merges() { let commits = vec![ commit0, commit1, commit2, commit3, commit4, commit5, commit6, commit7, commit8, ]; - let repo = tx.commit("a").unwrap(); + let repo = tx.commit("a").block_on().unwrap(); // Commit ids for reference insta::assert_snapshot!( @@ -301,18 +304,18 @@ fn test_feature_branches() { let commit3 = write_new_commit(tx.repo_mut(), "3", [&commit0]); let commit4 = write_new_commit(tx.repo_mut(), "4", [&commit3]); let commit5 = write_new_commit(tx.repo_mut(), "5", [&commit4]); - let repo = tx.commit("a").unwrap(); + let repo = tx.commit("a").block_on().unwrap(); // Merge branch 2 let mut tx = repo.start_transaction(); let commit6 = write_new_commit(tx.repo_mut(), "6", [&commit0, &commit2]); - let repo = tx.commit("a").unwrap(); + let repo = tx.commit("a").block_on().unwrap(); // Fetch merged branch 7 let mut tx = repo.start_transaction(); let commit7 = write_new_commit(tx.repo_mut(), "7", [&commit6]); let commit8 = write_new_commit(tx.repo_mut(), "8", [&commit6, &commit7]); - let repo = tx.commit("a").unwrap(); + let repo = tx.commit("a").block_on().unwrap(); // Merge branch 5 let mut tx = repo.start_transaction(); @@ -320,7 +323,7 @@ fn test_feature_branches() { let commits = vec![ commit0, commit1, commit2, commit3, commit4, commit5, commit6, commit7, commit8, commit9, ]; - let repo = tx.commit("a").unwrap(); + let repo = tx.commit("a").block_on().unwrap(); // Commit ids for reference insta::assert_snapshot!( @@ -372,7 +375,7 @@ fn test_rewritten() { let commit4 = write_new_commit(tx.repo_mut(), "4", [&commit1]); let commit5 = write_new_commit(tx.repo_mut(), "5", [&commit4, &commit2]); let mut commits = vec![commit0, commit1, commit2, commit3, commit4, commit5]; - let repo = tx.commit("a").unwrap(); + let repo = tx.commit("a").block_on().unwrap(); // Rewrite 2, rebase 3 and 5 let mut tx = repo.start_transaction(); @@ -381,16 +384,17 @@ fn test_rewritten() { .rewrite_commit(&commits[2]) .set_description("2b") .write() + .block_on() .unwrap(); commits.push(commit2b); commits.extend(rebase_descendants(tx.repo_mut())); - let repo = tx.commit("b").unwrap(); + let repo = tx.commit("b").block_on().unwrap(); // Abandon 4, rebase 5 let mut tx = repo.start_transaction(); tx.repo_mut().record_abandoned_commit(&commits[4]); commits.extend(rebase_descendants(tx.repo_mut())); - let repo = tx.commit("c").unwrap(); + let repo = tx.commit("c").block_on().unwrap(); // Commit ids for reference insta::assert_snapshot!( diff --git a/lib/tests/test_rewrite.rs b/lib/tests/test_rewrite.rs index ddd797a235e..4dfd3eddf72 100644 --- a/lib/tests/test_rewrite.rs +++ b/lib/tests/test_rewrite.rs @@ -86,6 +86,7 @@ fn test_merge_criss_cross() { .new_commit(parents, tree_id) .set_description(description) .write() + .block_on() .unwrap() }; let commit_a = make_commit( @@ -768,7 +769,7 @@ fn test_rebase_descendants_multiple_swap() { .set_rewritten_commit(commit_b.id().clone(), commit_d.id().clone()); tx.repo_mut() .set_rewritten_commit(commit_d.id().clone(), commit_b.id().clone()); - let _ = tx.repo_mut().rebase_descendants(); // Panics because of the cycle + let _ = tx.repo_mut().rebase_descendants().block_on(); // Panics because of the cycle } #[test] @@ -791,7 +792,7 @@ fn test_rebase_descendants_multiple_no_descendants() { .set_rewritten_commit(commit_b.id().clone(), commit_c.id().clone()); tx.repo_mut() .set_rewritten_commit(commit_c.id().clone(), commit_b.id().clone()); - let _ = tx.repo_mut().rebase_descendants(); // Panics because of the cycle + let _ = tx.repo_mut().rebase_descendants().block_on(); // Panics because of the cycle } #[test] @@ -887,6 +888,7 @@ fn test_rebase_descendants_repeated() { .rewrite_commit(&commit_b) .set_description("b2") .write() + .block_on() .unwrap(); let rebase_map = rebase_descendants_with_options_return_map(tx.repo_mut(), &RebaseOptions::default()); @@ -911,6 +913,7 @@ fn test_rebase_descendants_repeated() { .rewrite_commit(&commit_b2) .set_description("b3") .write() + .block_on() .unwrap(); let rebase_map = rebase_descendants_with_options_return_map(tx.repo_mut(), &RebaseOptions::default()); @@ -946,6 +949,7 @@ fn test_rebase_descendants_contents() { .repo_mut() .new_commit(vec![repo.store().root_commit_id().clone()], tree1.id()) .write() + .block_on() .unwrap(); let path2 = repo_path("file2"); let tree2 = create_tree(repo, &[(path2, "content")]); @@ -953,6 +957,7 @@ fn test_rebase_descendants_contents() { .repo_mut() .new_commit(vec![commit_a.id().clone()], tree2.id()) .write() + .block_on() .unwrap(); let path3 = repo_path("file3"); let tree3 = create_tree(repo, &[(path3, "content")]); @@ -960,6 +965,7 @@ fn test_rebase_descendants_contents() { .repo_mut() .new_commit(vec![commit_b.id().clone()], tree3.id()) .write() + .block_on() .unwrap(); let path4 = repo_path("file4"); let tree4 = create_tree(repo, &[(path4, "content")]); @@ -967,6 +973,7 @@ fn test_rebase_descendants_contents() { .repo_mut() .new_commit(vec![commit_a.id().clone()], tree4.id()) .write() + .block_on() .unwrap(); tx.repo_mut() @@ -1013,11 +1020,11 @@ fn test_rebase_descendants_basic_bookmark_update() { let commit_b = write_random_commit_with_parents(tx.repo_mut(), &[&commit_a]); tx.repo_mut() .set_local_bookmark_target("main".as_ref(), RefTarget::normal(commit_b.id().clone())); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx = repo.start_transaction(); - let commit_b2 = tx.repo_mut().rewrite_commit(&commit_b).write().unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); + let commit_b2 = tx.repo_mut().rewrite_commit(&commit_b).write().block_on().unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); assert_eq!( tx.repo().get_local_bookmark("main".as_ref()), RefTarget::normal(commit_b2.id().clone()) @@ -1047,7 +1054,7 @@ fn test_rebase_descendants_bookmark_move_two_steps() { let commit_c = write_random_commit_with_parents(tx.repo_mut(), &[&commit_b]); tx.repo_mut() .set_local_bookmark_target("main".as_ref(), RefTarget::normal(commit_c.id().clone())); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx = repo.start_transaction(); let commit_b2 = tx @@ -1055,14 +1062,16 @@ fn test_rebase_descendants_bookmark_move_two_steps() { .rewrite_commit(&commit_b) .set_description("different") .write() + .block_on() .unwrap(); let commit_c2 = tx .repo_mut() .rewrite_commit(&commit_c) .set_description("more different") .write() + .block_on() .unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); let heads = tx.repo().view().heads(); assert_eq!(heads.len(), 1); let c3_id = heads.iter().next().unwrap().clone(); @@ -1101,11 +1110,11 @@ fn test_rebase_descendants_basic_bookmark_update_with_non_local_bookmark() { .set_remote_bookmark(remote_symbol("main", "origin"), commit_b_remote_ref.clone()); tx.repo_mut() .set_local_tag_target("v1".as_ref(), RefTarget::normal(commit_b.id().clone())); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx = repo.start_transaction(); - let commit_b2 = tx.repo_mut().rewrite_commit(&commit_b).write().unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); + let commit_b2 = tx.repo_mut().rewrite_commit(&commit_b).write().block_on().unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); assert_eq!( tx.repo().get_local_bookmark("main".as_ref()), RefTarget::normal(commit_b2.id().clone()) @@ -1154,7 +1163,7 @@ fn test_rebase_descendants_update_bookmark_after_abandon(delete_abandoned_bookma .set_remote_bookmark(remote_symbol("main", "origin"), commit_b_remote_ref.clone()); tx.repo_mut() .set_local_bookmark_target("other".as_ref(), RefTarget::normal(commit_c.id().clone())); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx = repo.start_transaction(); tx.repo_mut().record_abandoned_commit(&commit_b); @@ -1214,16 +1223,17 @@ fn test_rebase_descendants_update_bookmarks_after_divergent_rewrite() { .set_local_bookmark_target("main".as_ref(), RefTarget::normal(commit_b.id().clone())); tx.repo_mut() .set_local_bookmark_target("other".as_ref(), RefTarget::normal(commit_c.id().clone())); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx = repo.start_transaction(); - let commit_b2 = tx.repo_mut().rewrite_commit(&commit_b).write().unwrap(); + let commit_b2 = tx.repo_mut().rewrite_commit(&commit_b).write().block_on().unwrap(); // Different description so they're not the same commit let commit_b3 = tx .repo_mut() .rewrite_commit(&commit_b) .set_description("different") .write() + .block_on() .unwrap(); // Different description so they're not the same commit let commit_b4 = tx @@ -1231,6 +1241,7 @@ fn test_rebase_descendants_update_bookmarks_after_divergent_rewrite() { .rewrite_commit(&commit_b) .set_description("more different") .write() + .block_on() .unwrap(); tx.repo_mut().set_divergent_rewrite( commit_b.id().clone(), @@ -1240,18 +1251,19 @@ fn test_rebase_descendants_update_bookmarks_after_divergent_rewrite() { commit_b4.id().clone(), ], ); - let commit_b41 = tx.repo_mut().rewrite_commit(&commit_b4).write().unwrap(); + let commit_b41 = tx.repo_mut().rewrite_commit(&commit_b4).write().block_on().unwrap(); let commit_b42 = tx .repo_mut() .rewrite_commit(&commit_b4) .set_description("different") .write() + .block_on() .unwrap(); tx.repo_mut().set_divergent_rewrite( commit_b4.id().clone(), vec![commit_b41.id().clone(), commit_b42.id().clone()], ); - tx.repo_mut().rebase_descendants().unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); let main_target = tx.repo().get_local_bookmark("main".as_ref()); assert!(main_target.has_conflict()); @@ -1306,24 +1318,26 @@ fn test_rebase_descendants_rewrite_updates_bookmark_conflict() { [commit_b.id().clone(), commit_c.id().clone()], ), ); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx = repo.start_transaction(); - let commit_a2 = tx.repo_mut().rewrite_commit(&commit_a).write().unwrap(); + let commit_a2 = tx.repo_mut().rewrite_commit(&commit_a).write().block_on().unwrap(); // Different description so they're not the same commit let commit_a3 = tx .repo_mut() .rewrite_commit(&commit_a) .set_description("different") .write() + .block_on() .unwrap(); - let commit_b2 = tx.repo_mut().rewrite_commit(&commit_b).write().unwrap(); + let commit_b2 = tx.repo_mut().rewrite_commit(&commit_b).write().block_on().unwrap(); // Different description so they're not the same commit let commit_b3 = tx .repo_mut() .rewrite_commit(&commit_b) .set_description("different") .write() + .block_on() .unwrap(); tx.repo_mut().set_divergent_rewrite( commit_a.id().clone(), @@ -1333,7 +1347,7 @@ fn test_rebase_descendants_rewrite_updates_bookmark_conflict() { commit_b.id().clone(), vec![commit_b2.id().clone(), commit_b3.id().clone()], ); - tx.repo_mut().rebase_descendants().unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); let target = tx.repo().get_local_bookmark("main".as_ref()); assert!(target.has_conflict()); @@ -1385,7 +1399,7 @@ fn test_rebase_descendants_rewrite_resolves_bookmark_conflict() { [commit_b.id().clone(), commit_c.id().clone()], ), ); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx = repo.start_transaction(); let commit_b2 = tx @@ -1393,8 +1407,9 @@ fn test_rebase_descendants_rewrite_resolves_bookmark_conflict() { .rewrite_commit(&commit_b) .set_parents(vec![commit_c.id().clone()]) .write() + .block_on() .unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); assert_eq!( tx.repo().get_local_bookmark("main".as_ref()), RefTarget::normal(commit_b2.id().clone()) @@ -1429,7 +1444,7 @@ fn test_rebase_descendants_bookmark_delete_modify_abandon(delete_abandoned_bookm "main".as_ref(), RefTarget::from_legacy_form([commit_a.id().clone()], [commit_b.id().clone()]), ); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx = repo.start_transaction(); tx.repo_mut().record_abandoned_commit(&commit_b); @@ -1472,7 +1487,7 @@ fn test_rebase_descendants_bookmark_move_forward_abandon(delete_abandoned_bookma Some(commit_c.id().clone()), ])), ); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx = repo.start_transaction(); tx.repo_mut().record_abandoned_commit(&commit_b); @@ -1523,7 +1538,7 @@ fn test_rebase_descendants_bookmark_move_sideways_abandon(delete_abandoned_bookm Some(commit_c.id().clone()), ])), ); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx = repo.start_transaction(); tx.repo_mut().record_abandoned_commit(&commit_b); @@ -1578,7 +1593,7 @@ fn test_rebase_descendants_update_checkout() { tx.repo_mut() .set_wc_commit(ws3_name.clone(), commit_a.id().clone()) .unwrap(); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx = repo.start_transaction(); let commit_c = tx @@ -1586,9 +1601,10 @@ fn test_rebase_descendants_update_checkout() { .rewrite_commit(&commit_b) .set_description("C") .write() + .block_on() .unwrap(); - tx.repo_mut().rebase_descendants().unwrap(); - let repo = tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo = tx.commit("test").block_on().unwrap(); // Workspaces 1 and 2 had B checked out, so they get updated to C. Workspace 3 // had A checked out, so it doesn't get updated. @@ -1623,12 +1639,12 @@ fn test_rebase_descendants_update_checkout_abandoned() { tx.repo_mut() .set_wc_commit(ws3_name.clone(), commit_a.id().clone()) .unwrap(); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx = repo.start_transaction(); tx.repo_mut().record_abandoned_commit(&commit_b); - tx.repo_mut().rebase_descendants().unwrap(); - let repo = tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo = tx.commit("test").block_on().unwrap(); // Workspaces 1 and 2 had B checked out, so they get updated to the same new // commit on top of C. Workspace 3 had A checked out, so it doesn't get updated. @@ -1666,12 +1682,12 @@ fn test_rebase_descendants_update_checkout_abandoned_merge() { tx.repo_mut() .set_wc_commit(ws_name.clone(), commit_d.id().clone()) .unwrap(); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx = repo.start_transaction(); tx.repo_mut().record_abandoned_commit(&commit_d); - tx.repo_mut().rebase_descendants().unwrap(); - let repo = tx.commit("test").unwrap(); + tx.repo_mut().rebase_descendants().block_on().unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let new_checkout_id = repo.view().get_wc_commit_id(&ws_name).unwrap(); let checkout = repo.store().get_commit(new_checkout_id).unwrap(); @@ -1732,6 +1748,7 @@ fn test_empty_commit_option(empty_behavior: EmptyBehavior) { ) .set_tree_id(tree.id()) .write() + .block_on() .unwrap() }; let commit_b = create_commit(&[&commit_a], &tree_b); @@ -1850,25 +1867,30 @@ fn test_rebase_abandoning_empty() { .set_parents(vec![commit_c.id().clone()]) .set_tree_id(commit_c.tree_id().clone()) .write() + .block_on() .unwrap(); let commit_e = create_random_commit(tx.repo_mut()) .set_parents(vec![commit_c.id().clone()]) .set_tree_id(commit_c.tree_id().clone()) .write() + .block_on() .unwrap(); let commit_b2 = create_random_commit(tx.repo_mut()) .set_parents(vec![commit_a.id().clone()]) .set_tree_id(commit_b.tree_id().clone()) .write() + .block_on() .unwrap(); let commit_f = create_random_commit(tx.repo_mut()) .set_parents(vec![commit_e.id().clone()]) .write() + .block_on() .unwrap(); let commit_g = create_random_commit(tx.repo_mut()) .set_parents(vec![commit_e.id().clone()]) .set_tree_id(commit_e.tree_id().clone()) .write() + .block_on() .unwrap(); let workspace = WorkspaceNameBuf::from("ws"); @@ -1884,7 +1906,9 @@ fn test_rebase_abandoning_empty() { simplify_ancestor_merge: true, }; let rewriter = CommitRewriter::new(tx.repo_mut(), commit_b, vec![commit_b2.id().clone()]); - rebase_commit_with_options(rewriter, &rebase_options).unwrap(); + rebase_commit_with_options(rewriter, &rebase_options) + .block_on() + .unwrap(); let rebase_map = rebase_descendants_with_options_return_map(tx.repo_mut(), &rebase_options); assert_eq!(rebase_map.len(), 5); let new_commit_c = assert_rebased_onto(tx.repo(), &rebase_map, &commit_c, &[commit_b2.id()]); @@ -1992,6 +2016,7 @@ fn test_find_duplicate_divergent_commits() { store.change_id_length() ])) .write() + .block_on() .unwrap() }; @@ -2010,6 +2035,7 @@ fn test_find_duplicate_divergent_commits() { &[commit_a2.id().clone()], &MoveCommitsTarget::Roots(vec![commit_d.id().clone()]), ) + .block_on() .unwrap(); // Commits b2 and c2 are duplicates assert_eq!(duplicate_commits, &[commit_c2.clone(), commit_b2.clone()]); @@ -2020,6 +2046,7 @@ fn test_find_duplicate_divergent_commits() { &[commit_e.id().clone()], &MoveCommitsTarget::Roots(vec![commit_b1.id().clone()]), ) + .block_on() .unwrap(); // Commits b1 and c1 are duplicates. Commit a2 is not a duplicate, because // it already had a1 as an ancestor before the rebase. @@ -2035,6 +2062,7 @@ fn test_find_duplicate_divergent_commits() { commit_e.id().clone(), ]), ) + .block_on() .unwrap(); // Commit c2 is a duplicate assert_eq!(duplicate_commits, std::slice::from_ref(&commit_c2)); diff --git a/lib/tests/test_rewrite_duplicate.rs b/lib/tests/test_rewrite_duplicate.rs index f487e5fbfdd..42644f9eb01 100644 --- a/lib/tests/test_rewrite_duplicate.rs +++ b/lib/tests/test_rewrite_duplicate.rs @@ -49,28 +49,33 @@ fn test_duplicate_linear_contents() { empty_tree_id.clone(), ) .write() + .block_on() .unwrap(); let commit_b = tx .repo_mut() .new_commit(vec![commit_a.id().clone()], tree_1.id()) .write() + .block_on() .unwrap(); let commit_c = tx .repo_mut() .new_commit(vec![commit_b.id().clone()], tree_1_2.id()) .write() + .block_on() .unwrap(); let commit_d = tx .repo_mut() .new_commit(vec![commit_c.id().clone()], tree_2.id()) .write() + .block_on() .unwrap(); let commit_e = tx .repo_mut() .new_commit(vec![commit_d.id().clone()], tree_2.id()) .write() + .block_on() .unwrap(); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let duplicate_in_between = |tx: &mut Transaction, target_commits: &[&CommitId], diff --git a/lib/tests/test_rewrite_transform.rs b/lib/tests/test_rewrite_transform.rs index c0164dae2c2..fd8153016e8 100644 --- a/lib/tests/test_rewrite_transform.rs +++ b/lib/tests/test_rewrite_transform.rs @@ -19,6 +19,7 @@ use jj_lib::repo::Repo as _; use jj_lib::rewrite::RewriteRefsOptions; use maplit::hashmap; use maplit::hashset; +use pollster::FutureExt as _; use testutils::TestRepo; use testutils::write_random_commit; use testutils::write_random_commit_with_parents; @@ -56,11 +57,12 @@ fn test_transform_descendants_sync() { rewriter.abandon(); } else { let old_commit_id = rewriter.old_commit().id().clone(); - let new_commit = rewriter.rebase().await?.write()?; + let new_commit = rewriter.rebase().await?.write().await?; rebased.insert(old_commit_id, new_commit); } Ok(()) }) + .block_on() .unwrap(); assert_eq!(rebased.len(), 4); let new_commit_b = rebased.get(commit_b.id()).unwrap(); @@ -105,10 +107,11 @@ fn test_transform_descendants_sync_linearize_merge() { .transform_descendants(vec![commit_c.id().clone()], async |mut rewriter| { rewriter.replace_parent(commit_a.id(), [commit_b.id()]); let old_commit_id = rewriter.old_commit().id().clone(); - let new_commit = rewriter.rebase().await?.write()?; + let new_commit = rewriter.rebase().await?.write().await?; rebased.insert(old_commit_id, new_commit); Ok(()) }) + .block_on() .unwrap(); assert_eq!(rebased.len(), 1); let new_commit_c = rebased.get(commit_c.id()).unwrap(); @@ -167,11 +170,12 @@ fn test_transform_descendants_new_parents_map() { let new_commit_b: &Commit = rebased.get(commit_b.id()).unwrap(); rewriter.replace_parent(new_commit_c.id(), [new_commit_b.id()]); } - let new_commit = rewriter.rebase().await?.write()?; + let new_commit = rewriter.rebase().await?.write().await?; rebased.insert(old_commit_id, new_commit); Ok(()) }, ) + .block_on() .unwrap(); assert_eq!(rebased.len(), 5); let new_commit_b = rebased.get(commit_b.id()).unwrap(); diff --git a/lib/tests/test_signing.rs b/lib/tests/test_signing.rs index c01b352dbbb..e8bce03bade 100644 --- a/lib/tests/test_signing.rs +++ b/lib/tests/test_signing.rs @@ -10,6 +10,7 @@ use jj_lib::signing::SignBehavior; use jj_lib::signing::Signer; use jj_lib::signing::Verification; use jj_lib::test_signing_backend::TestSigningBackend; +use pollster::FutureExt as _; use test_case::test_case; use testutils::TestRepoBackend; use testutils::TestWorkspace; @@ -73,13 +74,15 @@ fn manual(backend: TestRepoBackend) { let commit1 = create_random_commit(tx.repo_mut()) .set_sign_behavior(SignBehavior::Own) .write() + .block_on() .unwrap(); let commit2 = create_random_commit(tx.repo_mut()) .set_sign_behavior(SignBehavior::Own) .set_author(someone_else()) .write() + .block_on() .unwrap(); - tx.commit("test").unwrap(); + tx.commit("test").block_on().unwrap(); let commit1 = repo.store().get_commit(commit1.id()).unwrap(); assert_eq!(commit1.verification().unwrap(), good_verification()); @@ -102,12 +105,13 @@ fn keep_on_rewrite(backend: TestRepoBackend) { let commit = create_random_commit(tx.repo_mut()) .set_sign_behavior(SignBehavior::Own) .write() + .block_on() .unwrap(); - tx.commit("test").unwrap(); + tx.commit("test").block_on().unwrap(); let mut tx = repo.start_transaction(); let mut_repo = tx.repo_mut(); - let rewritten = mut_repo.rewrite_commit(&commit).write().unwrap(); + let rewritten = mut_repo.rewrite_commit(&commit).write().block_on().unwrap(); let commit = repo.store().get_commit(rewritten.id()).unwrap(); assert_eq!(commit.verification().unwrap(), good_verification()); @@ -127,8 +131,9 @@ fn manual_drop_on_rewrite(backend: TestRepoBackend) { let commit = create_random_commit(tx.repo_mut()) .set_sign_behavior(SignBehavior::Own) .write() + .block_on() .unwrap(); - tx.commit("test").unwrap(); + tx.commit("test").block_on().unwrap(); let mut tx = repo.start_transaction(); let mut_repo = tx.repo_mut(); @@ -136,6 +141,7 @@ fn manual_drop_on_rewrite(backend: TestRepoBackend) { .rewrite_commit(&commit) .set_sign_behavior(SignBehavior::Drop) .write() + .block_on() .unwrap(); let commit = repo.store().get_commit(rewritten.id()).unwrap(); @@ -156,8 +162,9 @@ fn forced(backend: TestRepoBackend) { let commit = create_random_commit(tx.repo_mut()) .set_author(someone_else()) .write() + .block_on() .unwrap(); - tx.commit("test").unwrap(); + tx.commit("test").block_on().unwrap(); let commit = repo.store().get_commit(commit.id()).unwrap(); assert_eq!(commit.verification().unwrap(), good_verification()); @@ -175,7 +182,7 @@ fn configured(backend: TestRepoBackend) { let repo = repo.clone(); let mut tx = repo.start_transaction(); let commit = write_random_commit(tx.repo_mut()); - tx.commit("test").unwrap(); + tx.commit("test").block_on().unwrap(); let commit = repo.store().get_commit(commit.id()).unwrap(); assert_eq!(commit.verification().unwrap(), good_verification()); @@ -195,15 +202,16 @@ fn drop_behavior(backend: TestRepoBackend) { let commit = create_random_commit(tx.repo_mut()) .set_sign_behavior(SignBehavior::Own) .write() + .block_on() .unwrap(); - tx.commit("test").unwrap(); + tx.commit("test").block_on().unwrap(); let original_commit = repo.store().get_commit(commit.id()).unwrap(); assert_eq!(original_commit.verification().unwrap(), good_verification()); let mut tx = repo.start_transaction(); let mut_repo = tx.repo_mut(); - let rewritten = mut_repo.rewrite_commit(&original_commit).write().unwrap(); + let rewritten = mut_repo.rewrite_commit(&original_commit).write().block_on().unwrap(); let rewritten_commit = repo.store().get_commit(rewritten.id()).unwrap(); assert_eq!(rewritten_commit.verification().unwrap(), None); diff --git a/lib/tests/test_view.rs b/lib/tests/test_view.rs index 9665ade7e32..37704ffd069 100644 --- a/lib/tests/test_view.rs +++ b/lib/tests/test_view.rs @@ -26,6 +26,7 @@ use jj_lib::ref_name::WorkspaceNameBuf; use jj_lib::repo::Repo as _; use maplit::btreemap; use maplit::hashset; +use pollster::FutureExt as _; use test_case::test_case; use testutils::TestRepo; use testutils::commit_transactions; @@ -64,7 +65,7 @@ fn test_heads_fork() { let initial = write_random_commit(tx.repo_mut()); let child1 = write_random_commit_with_parents(tx.repo_mut(), &[&initial]); let child2 = write_random_commit_with_parents(tx.repo_mut(), &[&initial]); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); assert_eq!( *repo.view().heads(), @@ -85,7 +86,7 @@ fn test_heads_merge() { let child1 = write_random_commit_with_parents(tx.repo_mut(), &[&initial]); let child2 = write_random_commit_with_parents(tx.repo_mut(), &[&initial]); let merge = write_random_commit_with_parents(tx.repo_mut(), &[&child1, &child2]); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); assert_eq!(*repo.view().heads(), hashset! {merge.id().clone()}); } @@ -101,7 +102,7 @@ fn test_merge_views_heads() { let head_unchanged = write_random_commit(mut_repo); let head_remove_tx1 = write_random_commit(mut_repo); let head_remove_tx2 = write_random_commit(mut_repo); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx1 = repo.start_transaction(); tx1.repo_mut().remove_head(head_remove_tx1.id()); @@ -165,7 +166,7 @@ fn test_merge_views_checkout() { .repo_mut() .set_wc_commit(ws5_name.clone(), commit1.id().clone()) .unwrap(); - let repo = initial_tx.commit("test").unwrap(); + let repo = initial_tx.commit("test").block_on().unwrap(); let mut tx1 = repo.start_transaction(); tx1.repo_mut() @@ -247,7 +248,7 @@ fn test_merge_views_bookmarks() { "feature".as_ref(), RefTarget::normal(feature_bookmark_local_tx0.id().clone()), ); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx1 = repo.start_transaction(); let main_bookmark_local_tx1 = write_random_commit(tx1.repo_mut()); @@ -321,7 +322,7 @@ fn test_merge_views_tags() { mut_repo.set_local_tag_target("v1.0".as_ref(), RefTarget::normal(v1_tx0.id().clone())); let v2_tx0 = write_random_commit(mut_repo); mut_repo.set_local_tag_target("v2.0".as_ref(), RefTarget::normal(v2_tx0.id().clone())); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx1 = repo.start_transaction(); let v1_tx1 = write_random_commit(tx1.repo_mut()); @@ -384,7 +385,7 @@ fn test_merge_views_remote_tags() { state: RemoteRefState::Tracked, }, ); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); // v1.0@origin: tx0 (new) -> tx1 (new) // v2.0@upstream: tx0 (tracked) -> tx1 (tracked) @@ -471,7 +472,7 @@ fn test_merge_views_git_refs() { "refs/heads/feature".as_ref(), RefTarget::normal(feature_bookmark_tx0.id().clone()), ); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx1 = repo.start_transaction(); let main_bookmark_tx1 = write_random_commit(tx1.repo_mut()); @@ -521,7 +522,7 @@ fn test_merge_views_git_heads() { let tx0_head = write_random_commit(tx0.repo_mut()); tx0.repo_mut() .set_git_head_target(RefTarget::normal(tx0_head.id().clone())); - let repo = tx0.commit("test").unwrap(); + let repo = tx0.commit("test").block_on().unwrap(); let mut tx1 = repo.start_transaction(); let tx1_head = write_random_commit(tx1.repo_mut()); @@ -549,7 +550,7 @@ fn test_merge_views_divergent() { let mut tx = test_repo.repo.start_transaction(); let commit_a = write_random_commit(tx.repo_mut()); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx1 = repo.start_transaction(); let commit_a2 = tx1 @@ -557,8 +558,9 @@ fn test_merge_views_divergent() { .rewrite_commit(&commit_a) .set_description("A2") .write() + .block_on() .unwrap(); - tx1.repo_mut().rebase_descendants().unwrap(); + tx1.repo_mut().rebase_descendants().block_on().unwrap(); let mut tx2 = repo.start_transaction(); let commit_a3 = tx2 @@ -566,8 +568,9 @@ fn test_merge_views_divergent() { .rewrite_commit(&commit_a) .set_description("A3") .write() + .block_on() .unwrap(); - tx2.repo_mut().rebase_descendants().unwrap(); + tx2.repo_mut().rebase_descendants().block_on().unwrap(); let repo = commit_transactions(vec![tx1, tx2]); @@ -587,7 +590,7 @@ fn test_merge_views_child_on_rewritten(child_first: bool) { let mut tx = test_repo.repo.start_transaction(); let commit_a = write_random_commit(tx.repo_mut()); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx1 = repo.start_transaction(); let commit_b = write_random_commit_with_parents(tx1.repo_mut(), &[&commit_a]); @@ -598,8 +601,9 @@ fn test_merge_views_child_on_rewritten(child_first: bool) { .rewrite_commit(&commit_a) .set_description("A2") .write() + .block_on() .unwrap(); - tx2.repo_mut().rebase_descendants().unwrap(); + tx2.repo_mut().rebase_descendants().block_on().unwrap(); let repo = if child_first { commit_transactions(vec![tx1, tx2]) @@ -632,8 +636,9 @@ fn test_merge_views_child_on_rewritten_divergent(on_rewritten: bool, child_first let commit_a3 = create_random_commit(tx.repo_mut()) .set_change_id(commit_a2.change_id().clone()) .write() + .block_on() .unwrap(); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx1 = repo.start_transaction(); let parent = if on_rewritten { &commit_a2 } else { &commit_a3 }; @@ -645,8 +650,9 @@ fn test_merge_views_child_on_rewritten_divergent(on_rewritten: bool, child_first .rewrite_commit(&commit_a2) .set_description("A4") .write() + .block_on() .unwrap(); - tx2.repo_mut().rebase_descendants().unwrap(); + tx2.repo_mut().rebase_descendants().block_on().unwrap(); let repo = if child_first { commit_transactions(vec![tx1, tx2]) @@ -682,14 +688,14 @@ fn test_merge_views_child_on_abandoned(child_first: bool) { let mut tx = test_repo.repo.start_transaction(); let commit_a = write_random_commit(tx.repo_mut()); let commit_b = write_random_commit_with_parents(tx.repo_mut(), &[&commit_a]); - let repo = tx.commit("test").unwrap(); + let repo = tx.commit("test").block_on().unwrap(); let mut tx1 = repo.start_transaction(); let commit_c = write_random_commit_with_parents(tx1.repo_mut(), &[&commit_b]); let mut tx2 = repo.start_transaction(); tx2.repo_mut().record_abandoned_commit(&commit_b); - tx2.repo_mut().rebase_descendants().unwrap(); + tx2.repo_mut().rebase_descendants().block_on().unwrap(); let repo = if child_first { commit_transactions(vec![tx1, tx2]) diff --git a/lib/testutils/src/lib.rs b/lib/testutils/src/lib.rs index d626f13158f..2fccfdabe02 100644 --- a/lib/testutils/src/lib.rs +++ b/lib/testutils/src/lib.rs @@ -197,6 +197,7 @@ impl TestEnvironment { RepoLoader::init_from_file_system(settings, repo_path, &self.default_store_factories()) .unwrap() .load_at_head() + .block_on() .unwrap() } } @@ -261,6 +262,7 @@ impl TestRepo { ReadonlyRepo::default_index_store_initializer(), ReadonlyRepo::default_submodule_store_initializer(), ) + .block_on() .unwrap(); Self { @@ -360,10 +362,10 @@ pub fn commit_transactions(txs: Vec) -> Arc { let repo_loader = txs[0].base_repo().loader().clone(); let mut op_ids = vec![]; for tx in txs { - op_ids.push(tx.commit("test").unwrap().op_id().clone()); + op_ids.push(tx.commit("test").block_on().unwrap().op_id().clone()); std::thread::sleep(std::time::Duration::from_millis(1)); } - let repo = repo_loader.load_at_head().unwrap(); + let repo = repo_loader.load_at_head().block_on().unwrap(); // Test the setup. The assumption here is that the parent order matches the // order in which they were merged (which currently matches the transaction // commit order), so we want to know make sure they appear in a certain @@ -438,7 +440,7 @@ impl TestTreeBuilder { } pub fn write_single_tree(self) -> Tree { - let id = self.tree_builder.write_tree().unwrap(); + let id = self.tree_builder.write_tree().block_on().unwrap(); self.store.get_tree(RepoPathBuf::root(), &id).unwrap() } @@ -625,6 +627,7 @@ pub fn write_random_commit_with_parents(mut_repo: &mut MutableRepo, parents: &[& create_random_commit(mut_repo) .set_parents(parents.iter().map(|commit| commit.id().clone()).collect()) .write() + .block_on() .unwrap() } @@ -657,6 +660,7 @@ pub fn rebase_descendants_with_options_return_map( }; rebased.insert(old_commit_id, new_commit_id); }) + .block_on() .unwrap(); rebased }