Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 21 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 6 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,10 @@ repository = "https://github.com/srlabs/ziggy/"
[workspace]
members = [".", "examples/arbitrary", "examples/asan", "examples/url"]

[[bin]]
name = "cargo-ziggy"
required-features = ["cli"]

[features]
default = ["cli"]
cli = [
Expand All @@ -19,6 +23,7 @@ cli = [
"glob",
"libc",
"semver",
"signal-hook",
"strip-ansi-escapes",
"target-triple",
"time-humanize",
Expand All @@ -36,6 +41,7 @@ glob = { version = "0.3.3", optional = true }
honggfuzz = { version = "0.5.59", optional = true }
libc = { version = "0.2.182", optional = true }
semver = { version = "1.0.27", optional = true }
signal-hook ={ version = "0.4.3", optional = true }
strip-ansi-escapes = { version = "0.2.1", optional = true }
target-triple = { version = "1.0.0", optional = true }
time-humanize = { version = "0.1.3", optional = true }
Expand Down
219 changes: 132 additions & 87 deletions src/bin/cargo-ziggy/fuzz.rs
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ impl Fuzz {
}

// Manages the continuous running of fuzzers
pub fn fuzz(&mut self) -> Result<(), anyhow::Error> {
pub fn fuzz(&mut self, common: &Common) -> Result<(), anyhow::Error> {
if !self.fuzz_binary() {
let build = Build {
no_afl: !self.afl(),
Expand Down Expand Up @@ -175,11 +175,23 @@ impl Fuzz {
let target = self.target.clone();
let main_corpus = self.corpus();
let output_target = self.output_target();
let mut crashes = (String::new(), String::new());

common.shutdown_deferred(); // handle termination signals gracefully
loop {
let sleep_duration = Duration::from_secs(1);
thread::sleep(sleep_duration);

if common.is_terminated() {
eprintln!("Shutting down...");
let res = (
stop_fuzzers(&processes),
self.sync_corpora(last_synced_created_time).map(|_| ()),
self.sync_crashes(crash_path),
);
return res.0.and(res.1).and(res.2);
}

let coverage_status = match (
self.coverage_worker,
*coverage_now_running.lock().unwrap(),
Expand All @@ -193,7 +205,7 @@ impl Fuzz {
(false, _, _) => String::from("disabled"),
};

self.print_stats(&coverage_status);
let current_crashes = self.print_stats(&coverage_status);

if coverage_status.as_str() == "starting" {
*coverage_now_running.lock().unwrap() = true;
Expand Down Expand Up @@ -254,7 +266,7 @@ impl Fuzz {
} else if afl_log.contains("/proc/sys/kernel/core_pattern")
|| afl_log.contains("/sys/devices/system/cpu")
{
stop_fuzzers(&mut processes)?;
stop_fuzzers(&processes)?;
eprintln!("We highly recommend you configure your system for better performance:\n");
eprintln!(" cargo afl system-config\n");
eprintln!(
Expand All @@ -265,99 +277,131 @@ impl Fuzz {
}
}

// We check AFL++ and Honggfuzz's outputs for crash files and copy them over to
// our own crashes directory
let crash_dirs = glob(&format!("{}/afl/*/crashes", self.output_target()))
.map_err(|_| anyhow!("Failed to read crashes glob pattern"))?
.flatten()
.chain(vec![format!(
"{}/honggfuzz/{}",
self.output_target(),
self.target
)
.into()]);

for crash_dir in crash_dirs {
if let Ok(crashes) = fs::read_dir(crash_dir) {
for crash_input in crashes.flatten() {
let file_name = crash_input.file_name();
let to_path = crash_path.join(&file_name);
if to_path.exists()
|| ["", "README.txt", "HONGGFUZZ.REPORT.TXT", "input"]
.contains(&file_name.to_str().unwrap_or_default())
{
continue;
}
// Copy crash files from AFL++ and Honggfuzz's outputs
if current_crashes != crashes {
crashes = current_crashes;
self.sync_crashes(crash_path)?;
}

// Sync corpus dirs
if last_sync_time.elapsed() > Duration::from_mins(self.corpus_sync_interval) {
last_synced_created_time = self.sync_corpora(last_synced_created_time)?;
last_sync_time = Instant::now();
}

if !processes
.iter_mut()
.all(|p| p.try_wait().is_ok_and(|exited| exited.is_none()))
{
stop_fuzzers(&processes)?;
return Ok(());
}
}
}

/// Copy crashes from AFL++ or Honggfuzz's outputs into `target_dir`
fn sync_crashes(&self, target_dir: &Path) -> Result<(), anyhow::Error> {
let crash_dirs = glob(&format!("{}/afl/*/crashes", self.output_target()))
.map_err(|_| anyhow!("Failed to read crashes glob pattern"))?
.flatten()
.chain(std::iter::once(PathBuf::from(format!(
"{}/honggfuzz/{}",
self.output_target(),
self.target
))));

for crash_dir in crash_dirs {
if let Ok(crashes) = fs::read_dir(crash_dir) {
for crash_input in crashes.flatten() {
let file_name = crash_input.file_name();
let to_path = target_dir.join(&file_name);
if ["README.txt", "HONGGFUZZ.REPORT.TXT", "input"]
.iter()
.all(|name| name != &file_name)
&& !to_path.exists()
{
fs::copy(crash_input.path(), to_path)?;
}
}
}
}
Ok(())
}

// If both fuzzers are running, we copy over AFL++'s queue for consumption by Honggfuzz.
// We also copy-over each live corpus to the shared corpus directory, where each file
// name is the md5 hash of the file. This happens every 10 minutes.
if last_sync_time.elapsed().as_secs() > 10 * 60 {
let mut files = vec![];
if self.afl() {
files.append(
&mut glob(&format!(
"{}/afl/mainaflfuzzer/queue/*",
self.output_target(),
))?
.flatten()
.collect(),
);
}
if self.honggfuzz() {
files.append(
&mut glob(&format!("{}/honggfuzz/corpus/*", self.output_target(),))?
.flatten()
.collect(),
);
}
let mut newest_time = last_synced_created_time;
let valid_files = files.iter().filter(|file| {
if let Ok(metadata) = file.metadata() {
let created = metadata.created().unwrap();
if last_synced_created_time.is_none_or(|time| created > time) {
if newest_time.is_none_or(|time| created > time) {
newest_time = Some(created);
}
return true;
}
}
/// Sync shared corpora
///
/// Copy-over each live corpus to the shared corpus directory, where each file name is usually its hash.
/// If both fuzzers are running, copy over AFL++'s queue for consumption by Honggfuzz.
fn sync_corpora(
&self,
last_synced: Option<SystemTime>,
) -> Result<Option<SystemTime>, anyhow::Error> {
let now = SystemTime::now();
let afl_files = self
.afl()
.then_some(
glob(&format!(
"{}/afl/mainaflfuzzer/queue/*",
self.output_target(),
))?
.flatten(),
)
.into_iter()
.flatten();
let hfuzz_files = self
.honggfuzz()
.then_some(glob(&format!("{}/honggfuzz/corpus/*", self.output_target()))?.flatten())
.into_iter()
.flatten();

let mut latest = last_synced;
let potentially_new_files = afl_files.chain(hfuzz_files).filter(|file| {
file.metadata().is_ok_and(|metadata| {
let Ok(created) = metadata.created() else {
return false;
};
// be conservative and consider some too old files
if last_synced.is_none_or(|synced| synced - Duration::from_secs(1) < created) {
latest = latest.max(Some(created));
true
} else {
false
});
for file in valid_files {
if let Some(file_name) = file.file_name() {
if self.honggfuzz() {
let queue_path =
format!("{}/queue/{}", self.output_target(), file_name.display());
if !Path::new(&queue_path).exists() {
let _ = fs::copy(file, queue_path);
}
}
// Hash the file to get its file name
let bytes = fs::read(file).unwrap_or_default();
let hash = XxHash64::oneshot(0, &bytes);
let corpus_path = format!("{}/corpus/{hash:x}", self.output_target());
if !Path::new(&corpus_path).exists() {
let _ = fs::copy(file, corpus_path);
}
}
})
});

let queue_path = PathBuf::from(format!("{}/queue", self.output_target()));
let corpus_path = PathBuf::from(format!("{}/corpus", self.output_target()));
for file in potentially_new_files {
if self.honggfuzz() {
if let Some(file_name) = file.file_name() {
let target = queue_path.join(file_name);
if !target.exists() {
let _ = fs::copy(&file, target);
}
}
last_synced_created_time = newest_time;
last_sync_time = Instant::now();
}

if processes
.iter_mut()
.all(|p| p.try_wait().unwrap_or(None).is_some())
{
stop_fuzzers(&mut processes)?;
return Ok(());
// Hash the file to get its file name
if let Ok(bytes) = fs::read(&file) {
let mut hash = XxHash64::oneshot(0, &bytes);
// linear probing (bounded)
for _ in 0..1024 {
let target = corpus_path.join(format!("{hash:x}"));
if !target.exists() {
let _ = fs::copy(&file, target);
break;
} else if target
.metadata()
.is_ok_and(|m| m.len() == bytes.len() as u64)
&& fs::read(target).is_ok_and(|t| t == bytes)
{
break;
}
hash = hash.wrapping_add(1);
}
}
}
Ok(latest.min(Some(now)))
}

// Spawns new fuzzers
Expand Down Expand Up @@ -730,7 +774,7 @@ impl Fuzz {
Ok(())
}

pub fn print_stats(&self, cov_worker_status: &str) {
pub fn print_stats(&self, cov_worker_status: &str) -> (String, String) {
let fuzzer_name = format!(" {} ", self.target);

let reset = "\x1b[0m";
Expand Down Expand Up @@ -932,6 +976,7 @@ impl Fuzz {
screen += "└──────────────────────────────────────────────────────────────────────┘\n";
}
eprintln!("{screen}");
(afl_crashes, hf_crashes)
}
}

Expand Down Expand Up @@ -982,7 +1027,7 @@ pub fn kill_subprocesses_recursively(pid: &str) -> Result<(), Error> {
}

// Stop all fuzzer processes
pub fn stop_fuzzers(processes: &mut Vec<process::Child>) -> Result<(), Error> {
pub fn stop_fuzzers(processes: &[process::Child]) -> Result<(), Error> {
for process in processes {
kill_subprocesses_recursively(&process.id().to_string())?;
}
Expand Down
Loading