Skip to content
This repository was archived by the owner on Oct 21, 2025. It is now read-only.

Commit 11d121d

Browse files
committed
Fixes
1 parent 269cffb commit 11d121d

File tree

1 file changed

+4
-4
lines changed

1 file changed

+4
-4
lines changed

src/utils/llm_backend.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -27,9 +27,9 @@ def _process_raw_response_for_logging(raw_content: str, max_chars: int = 1000) -
2727
# Truncate if needed
2828
if len(compact_content) > max_chars:
2929
truncated_content = compact_content[:max_chars] + "..."
30-
return f"{truncated_content} [omitted {whitespace_count} whitespace chars, truncated at {max_chars}]"
30+
return f"{truncated_content} (omitted {whitespace_count} whitespace chars, truncated at {max_chars})"
3131
else:
32-
return f"{compact_content} [omitted {whitespace_count} whitespace chars]"
32+
return f"{compact_content} (omitted {whitespace_count} whitespace chars)"
3333

3434

3535
class LLMBackend(ABC):
@@ -247,7 +247,7 @@ def generate(
247247
# Show processed raw response content if we captured it
248248
if raw_response_content:
249249
processed_content = _process_raw_response_for_logging(raw_response_content)
250-
display.error(f"Raw response content: {processed_content}")
250+
display.error(f"Raw response content: '{processed_content}'")
251251
else:
252252
display.error("Could not capture raw response content")
253253

@@ -387,7 +387,7 @@ def chat(
387387
# Show processed raw response content if we captured it
388388
if raw_response_content:
389389
processed_content = _process_raw_response_for_logging(raw_response_content)
390-
display.error(f"Raw response content: {processed_content}")
390+
display.error(f"Raw response content: '{processed_content}'")
391391
else:
392392
display.error("Could not capture raw response content")
393393

0 commit comments

Comments
 (0)