Skip to content

Commit d5415a8

Browse files
Copilot0xrinegade
andcommitted
Implement --debug flag to hide debug info by default for AI queries
Co-authored-by: 0xrinegade <[email protected]>
1 parent b065854 commit d5415a8

File tree

5 files changed

+142
-32
lines changed

5 files changed

+142
-32
lines changed

src/clparse.rs

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -80,6 +80,13 @@ pub fn parse_command_line() -> clap::ArgMatches {
8080
.global(true)
8181
.help("Disable colorized output (also respects NO_COLOR environment variable)"),
8282
)
83+
.arg(
84+
Arg::new("debug")
85+
.long("debug")
86+
.action(ArgAction::SetTrue)
87+
.global(true)
88+
.help("Show debug information"),
89+
)
8390
.arg(
8491
Arg::new("json_rpc_url")
8592
.short('u')

src/main.rs

Lines changed: 13 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -54,6 +54,7 @@ fn is_known_command(sub_command: &str) -> bool {
5454
async fn handle_ai_query(
5555
sub_command: &str,
5656
sub_matches: &clap::ArgMatches,
57+
app_matches: &clap::ArgMatches,
5758
) -> Result<(), Box<dyn std::error::Error>> {
5859
// For external subcommands, clap provides the additional arguments differently
5960
// We need to collect them from the raw args since clap doesn't know about them
@@ -76,13 +77,20 @@ async fn handle_ai_query(
7677

7778
let query = query_parts.join(" ");
7879

80+
// Get debug flag from global args
81+
let debug_mode = app_matches.get_flag("debug");
82+
7983
// Make AI request
80-
println!("🔍 Interpreting as AI query: \"{}\"", query);
84+
if debug_mode {
85+
println!("🔍 Interpreting as AI query: \"{}\"", query);
86+
}
8187

82-
let ai_service = crate::services::ai_service::AiService::new();
83-
match ai_service.query(&query).await {
88+
let ai_service = crate::services::ai_service::AiService::new_with_debug(debug_mode);
89+
match ai_service.query_with_debug(&query, debug_mode).await {
8490
Ok(response) => {
85-
println!("🤖 AI Response:");
91+
if debug_mode {
92+
println!("🤖 AI Response:");
93+
}
8694
println!("{}", response);
8795
}
8896
Err(e) => {
@@ -316,7 +324,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
316324

317325
// Handle AI queries early to avoid config loading
318326
if !is_known_command(sub_command) {
319-
return handle_ai_query(sub_command, sub_matches).await;
327+
return handle_ai_query(sub_command, sub_matches, &app_matches).await;
320328
}
321329

322330
// Load configuration using the new Config module

src/services/ai_service.rs

Lines changed: 54 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -58,10 +58,18 @@ pub struct AiService {
5858

5959
impl AiService {
6060
pub fn new() -> Self {
61-
Self::with_api_url(None)
61+
Self::new_with_debug(true)
62+
}
63+
64+
pub fn new_with_debug(debug_mode: bool) -> Self {
65+
Self::with_api_url_and_debug(None, debug_mode)
6266
}
6367

6468
pub fn with_api_url(custom_api_url: Option<String>) -> Self {
69+
Self::with_api_url_and_debug(custom_api_url, true)
70+
}
71+
72+
pub fn with_api_url_and_debug(custom_api_url: Option<String>, debug_mode: bool) -> Self {
6573
let (api_url, use_openai) = match custom_api_url {
6674
Some(url) => {
6775
// Check if it's an OpenAI URL and we have an API key
@@ -100,8 +108,10 @@ impl AiService {
100108
let mut template_manager = PromptTemplateManager::new();
101109

102110
// Initialize template manager
103-
if let Err(e) = template_manager.load_from_directory("./templates/ai_prompts") {
104-
println!("⚠️ Failed to load AI prompt templates: {}", e);
111+
if let Err(e) = template_manager.load_from_directory_with_debug("./templates/ai_prompts", debug_mode) {
112+
if debug_mode {
113+
println!("⚠️ Failed to load AI prompt templates: {}", e);
114+
}
105115
}
106116

107117
Self {
@@ -115,6 +125,10 @@ impl AiService {
115125
}
116126

117127
pub async fn query(&self, question: &str) -> Result<String> {
128+
self.query_with_debug(question, true).await
129+
}
130+
131+
pub async fn query_with_debug(&self, question: &str, debug_mode: bool) -> Result<String> {
118132
let endpoint = if self.use_openai {
119133
EndpointId::openai()
120134
} else {
@@ -127,29 +141,37 @@ impl AiService {
127141
}
128142

129143
if self.use_openai {
130-
println!("🤖 Asking OpenAI ({}): {}", self.api_url, question);
144+
if debug_mode {
145+
println!("🤖 Asking OpenAI ({}): {}", self.api_url, question);
146+
}
131147
} else {
132-
println!("🤖 Asking OSVM AI ({}): {}", self.api_url, question);
148+
if debug_mode {
149+
println!("🤖 Asking OSVM AI ({}): {}", self.api_url, question);
150+
}
133151
}
134152

135153
let result = if self.use_openai {
136-
self.query_openai(question).await
154+
self.query_openai(question, debug_mode).await
137155
} else {
138-
self.query_osvm_ai(question).await
156+
self.query_osvm_ai(question, debug_mode).await
139157
};
140158

141159
// Record success/failure with circuit breaker
142160
match &result {
143161
Ok(_) => {
144162
self.circuit_breaker.on_success_endpoint(&endpoint);
145-
println!(
146-
"🔍 AI Response received ({} chars)",
147-
result.as_ref().unwrap().len()
148-
);
163+
if debug_mode {
164+
println!(
165+
"🔍 AI Response received ({} chars)",
166+
result.as_ref().unwrap().len()
167+
);
168+
}
149169
}
150170
Err(e) => {
151171
self.circuit_breaker.on_failure_endpoint(&endpoint);
152-
println!("❌ AI Response error: {}", e);
172+
if debug_mode {
173+
println!("❌ AI Response error: {}", e);
174+
}
153175
}
154176
}
155177

@@ -228,15 +250,17 @@ impl AiService {
228250
}
229251
}
230252

231-
async fn query_osvm_ai(&self, question: &str) -> Result<String> {
253+
async fn query_osvm_ai(&self, question: &str, debug_mode: bool) -> Result<String> {
232254
let request_body = AiRequest {
233255
question: question.to_string(),
234256
};
235257

236-
println!(
237-
"📤 OSVM AI Request: {}",
238-
serde_json::to_string_pretty(&request_body)?
239-
);
258+
if debug_mode {
259+
println!(
260+
"📤 OSVM AI Request: {}",
261+
serde_json::to_string_pretty(&request_body)?
262+
);
263+
}
240264

241265
let response = self
242266
.client
@@ -249,7 +273,9 @@ impl AiService {
249273
let status = response.status();
250274
let response_text = response.text().await?;
251275

252-
println!("📥 OSVM AI Response ({}): {}", status, response_text);
276+
if debug_mode {
277+
println!("📥 OSVM AI Response ({}): {}", status, response_text);
278+
}
253279

254280
if !status.is_success() {
255281
// Try to parse error response as JSON first
@@ -286,7 +312,7 @@ impl AiService {
286312
}
287313
}
288314

289-
async fn query_openai(&self, question: &str) -> Result<String> {
315+
async fn query_openai(&self, question: &str, debug_mode: bool) -> Result<String> {
290316
let api_key = self.api_key.as_ref().unwrap();
291317

292318
let request_body = OpenAiRequest {
@@ -299,10 +325,12 @@ impl AiService {
299325
temperature: 0.7,
300326
};
301327

302-
println!(
303-
"📤 OpenAI Request: {}",
304-
serde_json::to_string_pretty(&request_body)?
305-
);
328+
if debug_mode {
329+
println!(
330+
"📤 OpenAI Request: {}",
331+
serde_json::to_string_pretty(&request_body)?
332+
);
333+
}
306334

307335
let response = self
308336
.client
@@ -316,7 +344,9 @@ impl AiService {
316344
let status = response.status();
317345
let response_text = response.text().await?;
318346

319-
println!("📥 OpenAI Response ({}): {}", status, response_text);
347+
if debug_mode {
348+
println!("📥 OpenAI Response ({}): {}", status, response_text);
349+
}
320350

321351
if !status.is_success() {
322352
anyhow::bail!(

src/utils/prompt_templates.rs

Lines changed: 13 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -135,11 +135,17 @@ impl PromptTemplateManager {
135135

136136
/// Load templates from a directory
137137
pub fn load_from_directory(&mut self, dir_path: &str) -> Result<usize> {
138+
self.load_from_directory_with_debug(dir_path, true)
139+
}
140+
141+
pub fn load_from_directory_with_debug(&mut self, dir_path: &str, debug_mode: bool) -> Result<usize> {
138142
self.template_dirs.push(dir_path.to_string());
139143

140144
let dir = Path::new(dir_path);
141145
if !dir.exists() {
142-
println!("📁 Creating template directory: {}", dir_path);
146+
if debug_mode {
147+
println!("📁 Creating template directory: {}", dir_path);
148+
}
143149
fs::create_dir_all(dir)?;
144150
self.create_default_templates(dir)?;
145151
}
@@ -157,10 +163,14 @@ impl PromptTemplateManager {
157163
match self.load_template_file(&path) {
158164
Ok(count) => {
159165
loaded_count += count;
160-
println!("✅ Loaded {} templates from {}", count, path.display());
166+
if debug_mode {
167+
println!("✅ Loaded {} templates from {}", count, path.display());
168+
}
161169
}
162170
Err(e) => {
163-
println!("⚠️ Failed to load template file {}: {}", path.display(), e);
171+
if debug_mode {
172+
println!("⚠️ Failed to load template file {}: {}", path.display(), e);
173+
}
164174
}
165175
}
166176
}

tests/debug_flag_tests.rs

Lines changed: 55 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,55 @@
1+
/// Test debug flag functionality for AI queries
2+
use std::process::Command;
3+
4+
#[test]
5+
fn test_ai_query_without_debug_flag() {
6+
let output = Command::new(env!("CARGO_BIN_EXE_osvm"))
7+
.args(["hello"])
8+
.output()
9+
.expect("Failed to execute command");
10+
11+
let stdout = String::from_utf8_lossy(&output.stdout);
12+
13+
// Should not contain debug information
14+
assert!(!stdout.contains("🔍 Interpreting as AI query"));
15+
assert!(!stdout.contains("✅ Loaded"));
16+
assert!(!stdout.contains("📤 OSVM AI Request"));
17+
assert!(!stdout.contains("📥 OSVM AI Response"));
18+
assert!(!stdout.contains("🔍 AI Response received"));
19+
assert!(!stdout.contains("🤖 AI Response:"));
20+
21+
// Should contain the actual response (without debug prefix)
22+
assert!(!stdout.is_empty());
23+
}
24+
25+
#[test]
26+
fn test_ai_query_with_debug_flag() {
27+
let output = Command::new(env!("CARGO_BIN_EXE_osvm"))
28+
.args(["--debug", "hello"])
29+
.output()
30+
.expect("Failed to execute command");
31+
32+
let stdout = String::from_utf8_lossy(&output.stdout);
33+
34+
// Should contain debug information
35+
assert!(stdout.contains("🔍 Interpreting as AI query"));
36+
assert!(stdout.contains("✅ Loaded"));
37+
assert!(stdout.contains("📤 OSVM AI Request"));
38+
assert!(stdout.contains("📥 OSVM AI Response"));
39+
assert!(stdout.contains("🔍 AI Response received"));
40+
assert!(stdout.contains("🤖 AI Response:"));
41+
}
42+
43+
#[test]
44+
fn test_debug_flag_appears_in_help() {
45+
let output = Command::new(env!("CARGO_BIN_EXE_osvm"))
46+
.args(["--help"])
47+
.output()
48+
.expect("Failed to execute command");
49+
50+
let stdout = String::from_utf8_lossy(&output.stdout);
51+
52+
// Should contain debug flag in help
53+
assert!(stdout.contains("--debug"));
54+
assert!(stdout.contains("Show debug information"));
55+
}

0 commit comments

Comments
 (0)