Skip to content

Commit de7c278

Browse files
fix(demo): add Gemini backend support and graceful LLM error handling
- Add Google Gemini as LLM backend (auto-detect GOOGLE_API_KEY) - Normalize tool calls across OpenAI/Azure/Gemini backends - Gracefully catch API errors (quota, auth) with simulated fallback - Governance middleware always runs real, even when LLM fails Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
1 parent 91cca4d commit de7c278

File tree

1 file changed

+34
-4
lines changed

1 file changed

+34
-4
lines changed

demo/maf_governance_demo.py

Lines changed: 34 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -209,11 +209,41 @@ def _llm_call(client: Any, model: str, messages: list[dict], **kwargs: Any) -> A
209209
"""Make a real LLM call, dispatching to the correct backend.
210210
211211
Returns a normalized response object with .text and .tool_calls attributes.
212+
On API error, returns a fallback response with the error description.
212213
"""
213-
if _ACTIVE_BACKEND == BACKEND_GEMINI:
214-
return _gemini_call(client, model, messages, **kwargs)
215-
# OpenAI / Azure OpenAI
216-
return _openai_call(client, model, messages, **kwargs)
214+
try:
215+
if _ACTIVE_BACKEND == BACKEND_GEMINI:
216+
return _gemini_call(client, model, messages, **kwargs)
217+
return _openai_call(client, model, messages, **kwargs)
218+
except Exception as exc:
219+
# Extract the user prompt for the fallback
220+
user_msg = next((m["content"] for m in messages if m["role"] == "user"), "")
221+
err_type = type(exc).__name__
222+
print(
223+
_tree(
224+
"⚠️ ",
225+
C.YELLOW,
226+
"LLM Error",
227+
f"{C.YELLOW}{err_type}{C.RESET}: {C.DIM}{str(exc)[:80]}{C.RESET}",
228+
)
229+
)
230+
print(
231+
_tree(
232+
"🔄",
233+
C.CYAN,
234+
"Fallback",
235+
f"{C.DIM}Using simulated response (governance middleware is still REAL){C.RESET}",
236+
)
237+
)
238+
# Return a synthetic response so governance pipeline still runs end-to-end
239+
return _NormalizedResponse(
240+
choices=[
241+
_NormalizedChoice(
242+
text=f"[Simulated: response to '{user_msg[:60]}']",
243+
tool_calls=None,
244+
)
245+
]
246+
)
217247

218248

219249
@dataclass

0 commit comments

Comments
 (0)