Skip to content
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion test/src/commands/destroy.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use std::path::Path;
use anyhow::{Context, Result};
use tokio::process::Command;

use crate::helpers::{ci_log_group, run_cmd, write_lifecycle};
use crate::helpers::{ci_log_group, delete_backend_state, run_cmd, write_lifecycle};

const MAX_DESTROY_ATTEMPTS: u32 = 3;

Expand Down Expand Up @@ -38,6 +38,7 @@ pub async fn phase_destroy(dir: &Path, rm: bool) -> Result<()> {
}

if rm {
delete_backend_state(dir).await?;
tokio::fs::remove_dir_all(dir).await?;
println!(
"\nDestroy completed successfully. Removed {}",
Expand Down
4 changes: 3 additions & 1 deletion test/src/commands/init.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use tokio::process::Command;
use crate::cli::InitProvider;
use crate::helpers::{
ci_log_group, example_dir, generate_test_run_id, project_root, run_cmd, runs_dir,
write_lifecycle,
upload_tfvars_to_backend, write_lifecycle,
};
use crate::types::{CloudProvider, CommonTfVars, TfVars};

Expand Down Expand Up @@ -71,6 +71,8 @@ pub async fn phase_init(provider_args: &InitProvider) -> Result<PathBuf> {
println!("{backend_tf}");
}

upload_tfvars_to_backend(&dest).await?;

println!("\nRunning terraform init...");
run_cmd(Command::new("terraform").arg("init").current_dir(&dest))
.await
Expand Down
4 changes: 3 additions & 1 deletion test/src/commands/sync.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use anyhow::Result;
use crate::commands::init::{
DevOverrides, copy_example_files, inject_dev_overrides, write_dev_variables_tf,
};
use crate::helpers::{ci_log_group, example_dir, project_root, read_tfvars};
use crate::helpers::{ci_log_group, example_dir, project_root, read_tfvars, upload_tfvars_to_backend};

/// Re-copies example .tf files into an existing test run directory,
/// overwriting the current versions. Useful for picking up local
Expand Down Expand Up @@ -46,6 +46,8 @@ pub async fn phase_sync(dir: &Path) -> Result<()> {
inject_dev_overrides(dir, &overrides).await?;
}

upload_tfvars_to_backend(dir).await?;

println!("\nSync completed successfully.");
Ok(())
})
Expand Down
138 changes: 132 additions & 6 deletions test/src/helpers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,19 @@ use crate::types::TfVars;
// Paths
// ---------------------------------------------------------------------------

/// Returns the project root directory (parent of `test/`).
/// Returns the project root directory (the git repo root).
pub fn project_root() -> Result<PathBuf> {
let manifest_dir = Path::new(env!("CARGO_MANIFEST_DIR"));
manifest_dir
.parent()
.map(|p| p.to_path_buf())
.context("Could not determine project root from CARGO_MANIFEST_DIR")
let output = std::process::Command::new("git")
.args(["rev-parse", "--show-toplevel"])
.output()
.context("Failed to run `git rev-parse --show-toplevel`")?;
if !output.status.success() {
bail!("Not inside a git repository");
}
let root = std::str::from_utf8(&output.stdout)
.context("git output was not valid UTF-8")?
.trim();
Ok(PathBuf::from(root))
}

pub fn example_dir(provider: crate::types::CloudProvider) -> Result<PathBuf> {
Expand Down Expand Up @@ -193,3 +199,123 @@ pub fn kubectl(kubeconfig: &Path) -> Command {
cmd.arg("--kubeconfig").arg(kubeconfig);
cmd
}

// ---------------------------------------------------------------------------
// S3 backend helpers
// ---------------------------------------------------------------------------

/// Parsed S3 backend configuration from a `backend.tf` file.
pub struct S3Backend {
pub bucket: String,
pub region: String,
pub profile: Option<String>,
/// The key prefix (test run ID), extracted from the state key.
pub key_prefix: String,
}

/// Reads `backend.tf` from the given directory and extracts the S3
/// configuration. Returns `None` if the file does not exist (local state).
pub fn read_s3_backend(dir: &Path) -> Result<Option<S3Backend>> {
let path = dir.join("backend.tf");
let content = match std::fs::read_to_string(&path) {
Ok(c) => c,
Err(e) if e.kind() == std::io::ErrorKind::NotFound => return Ok(None),
Err(e) => return Err(e).context("Failed to read backend.tf"),
};

fn extract<'a>(content: &'a str, key: &str) -> Option<&'a str> {
content
.lines()
.find(|l| l.trim_start().starts_with(key))
.and_then(|l| l.split('"').nth(1))
}

let bucket = extract(&content, "bucket")
.context("backend.tf missing bucket")?
.to_string();
let region = extract(&content, "region")
.context("backend.tf missing region")?
.to_string();
let profile = extract(&content, "profile").map(|s| s.to_string());
let key = extract(&content, "key").context("backend.tf missing key")?;
let key_prefix = key
.split('/')
.next()
.context("backend.tf key has no prefix")?
.to_string();

Ok(Some(S3Backend {
bucket,
region,
profile,
key_prefix,
}))
}

/// Uploads `terraform.tfvars.json` to the S3 backend alongside the state
/// file, so that other commands or CI jobs can discover the tfvars for a
/// given test run.
pub async fn upload_tfvars_to_backend(dir: &Path) -> Result<()> {
let backend = match read_s3_backend(dir)? {
Some(b) => b,
None => return Ok(()),
};

let src = dir.join("terraform.tfvars.json");
let s3_uri = format!(
"s3://{}/{}/terraform.tfvars.json",
backend.bucket, backend.key_prefix
);

println!("Uploading terraform.tfvars.json to {s3_uri}");
let mut cmd = Command::new("aws");
cmd.args([
"s3",
"cp",
&src.display().to_string(),
&s3_uri,
"--region",
&backend.region,
]);
if let Some(profile) = &backend.profile {
cmd.args(["--profile", profile]);
}
run_cmd(&mut cmd)
.await
.context("Failed to upload terraform.tfvars.json to S3")?;

Ok(())
}

/// Deletes the remote state file and tfvars file from S3 for the given
/// test run directory. No-ops if no S3 backend is configured.
pub async fn delete_backend_state(dir: &Path) -> Result<()> {
let backend = match read_s3_backend(dir)? {
Some(b) => b,
None => return Ok(()),
};

let prefix = format!(
"s3://{}/{}/",
backend.bucket, backend.key_prefix
);

println!("Deleting remote state from {prefix}");
let mut cmd = Command::new("aws");
cmd.args([
"s3",
"rm",
&prefix,
"--recursive",
"--region",
&backend.region,
]);
Comment on lines +303 to +312
Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

why not use an aws sdk for this?

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Because claude wrote it and I didn't think of it. I'll follow up with another PR to change that.

if let Some(profile) = &backend.profile {
cmd.args(["--profile", profile]);
}
run_cmd(&mut cmd)
.await
.context("Failed to delete remote state from S3")?;

Ok(())
}