Skip to content

Commit b50c921

Browse files
committed
refactor: remove redundant SSRF regex patterns from hardcoded_filters
validate_web_fetch_url() already provides strictly superior SSRF protection via DNS resolution and IP checking. The regex patterns in hardcoded_filters.rs were a subset of that coverage, added as duplication during the PR #32 revert.
1 parent 42e352c commit b50c921

2 files changed

Lines changed: 3 additions & 58 deletions

File tree

crates/core/src/agent/hardcoded_filters.rs

Lines changed: 0 additions & 52 deletions
Original file line numberDiff line numberDiff line change
@@ -22,27 +22,6 @@ pub const BASH_DENY_PATTERNS: &[&str] = &[
2222
r"curl\s.*\|\s*python",
2323
];
2424

25-
/// Web fetch deny substrings — case-insensitive substring match.
26-
pub const WEB_FETCH_DENY_SUBSTRINGS: &[&str] = &[
27-
"file://",
28-
"localhost",
29-
"0.0.0.0",
30-
"169.254.169.254",
31-
"[::1]",
32-
];
33-
34-
/// Web fetch deny patterns — regex patterns for private/internal IP ranges.
35-
pub const WEB_FETCH_DENY_PATTERNS: &[&str] = &[
36-
// 10.x.x.x
37-
r"https?://10\.\d{1,3}\.\d{1,3}\.\d{1,3}",
38-
// 172.16-31.x.x
39-
r"https?://172\.(1[6-9]|2\d|3[01])\.\d{1,3}\.\d{1,3}",
40-
// 192.168.x.x
41-
r"https?://192\.168\.\d{1,3}\.\d{1,3}",
42-
// 127.x.x.x
43-
r"https?://127\.\d{1,3}\.\d{1,3}\.\d{1,3}",
44-
];
45-
4625
#[cfg(test)]
4726
mod tests {
4827
use super::*;
@@ -55,23 +34,11 @@ mod tests {
5534
}
5635
}
5736

58-
#[test]
59-
fn all_web_fetch_deny_patterns_compile() {
60-
for p in WEB_FETCH_DENY_PATTERNS {
61-
assert!(Regex::new(p).is_ok(), "Failed to compile: {}", p);
62-
}
63-
}
64-
6537
#[test]
6638
fn bash_deny_substrings_not_empty() {
6739
assert!(!BASH_DENY_SUBSTRINGS.is_empty());
6840
}
6941

70-
#[test]
71-
fn web_fetch_deny_substrings_not_empty() {
72-
assert!(!WEB_FETCH_DENY_SUBSTRINGS.is_empty());
73-
}
74-
7542
#[test]
7643
fn sudo_pattern_matches() {
7744
let re = Regex::new(BASH_DENY_PATTERNS[0]).unwrap();
@@ -87,23 +54,4 @@ mod tests {
8754
assert!(!re.is_match("curl https://example.com -o file.txt"));
8855
}
8956

90-
#[test]
91-
fn private_ip_patterns_match() {
92-
let re10 = Regex::new(WEB_FETCH_DENY_PATTERNS[0]).unwrap();
93-
assert!(re10.is_match("http://10.0.0.1/api"));
94-
assert!(!re10.is_match("http://100.0.0.1/api"));
95-
96-
let re172 = Regex::new(WEB_FETCH_DENY_PATTERNS[1]).unwrap();
97-
assert!(re172.is_match("http://172.16.0.1/api"));
98-
assert!(re172.is_match("http://172.31.255.255"));
99-
assert!(!re172.is_match("http://172.32.0.1/api"));
100-
101-
let re192 = Regex::new(WEB_FETCH_DENY_PATTERNS[2]).unwrap();
102-
assert!(re192.is_match("http://192.168.1.1"));
103-
assert!(!re192.is_match("http://192.169.1.1"));
104-
105-
let re127 = Regex::new(WEB_FETCH_DENY_PATTERNS[3]).unwrap();
106-
assert!(re127.is_match("http://127.0.0.1/api"));
107-
assert!(!re127.is_match("http://128.0.0.1/api"));
108-
}
10957
}

crates/core/src/agent/tools/mod.rs

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -52,17 +52,14 @@ pub fn create_safe_tools(
5252
Box::new(MemorySearchTool::new(workspace.clone()))
5353
};
5454

55-
// Compile web_fetch filter with SSRF deny patterns
55+
// Compile web_fetch filter from user config (SSRF protection is handled
56+
// by validate_web_fetch_url() which does DNS-resolution-based IP checking)
5657
let web_fetch_filter = config
5758
.tools
5859
.filters
5960
.get("web_fetch")
6061
.map(CompiledToolFilter::compile)
61-
.unwrap_or_else(|| Ok(CompiledToolFilter::permissive()))?
62-
.merge_hardcoded(
63-
hardcoded_filters::WEB_FETCH_DENY_SUBSTRINGS,
64-
hardcoded_filters::WEB_FETCH_DENY_PATTERNS,
65-
)?;
62+
.unwrap_or_else(|| Ok(CompiledToolFilter::permissive()))?;
6663

6764
let mut tools: Vec<Box<dyn Tool>> = vec![
6865
memory_search_tool,

0 commit comments

Comments
 (0)