-
-
Notifications
You must be signed in to change notification settings - Fork 4.7k
feat(external-issues): Use LLM generated title/description for ticket creation #114760
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 5 commits
0e10f21
c5c1f55
8b58219
9cdf1cc
e9b1ae8
9410c88
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,145 @@ | ||
| from __future__ import annotations | ||
|
|
||
| import logging | ||
| from typing import Any, TypedDict | ||
|
|
||
| from sentry import features | ||
| from sentry.models.group import Group | ||
| from sentry.seer.signed_seer_api import ( | ||
| LlmGenerateRequest, | ||
| SeerViewerContext, | ||
| make_llm_generate_request, | ||
| ) | ||
| from sentry.services.eventstore.models import GroupEvent | ||
| from sentry.users.models.user import User | ||
| from sentry.users.services.user import RpcUser | ||
| from sentry.utils import json | ||
| from sentry.utils.safe import safe_execute | ||
|
|
||
| logger = logging.getLogger(__name__) | ||
|
|
||
| SYSTEM_PROMPT = """You are a helpful assistant that generates concise titles and descriptions for issue tickets in external project management tools like Jira, GitHub Issues, and Linear. | ||
|
|
||
| Given information about a Sentry error (title and stack trace / error details), generate: | ||
| 1. A short, actionable title (3-8 words) suitable for a ticket. Describe the problem clearly. | ||
| 2. A brief description (1-3 sentences) summarizing the error, its likely cause, and potential impact. | ||
|
|
||
| Do not include Sentry-specific formatting, links, or markdown. Keep the description in plain text. | ||
| Return a JSON object with "title" and "description" keys. Return only the JSON, nothing else.""" | ||
|
|
||
| MAX_CONTEXT_LENGTH = 2000 | ||
|
|
||
|
|
||
| def _build_event_context(group: Group, event: Any | None = None) -> str: | ||
| if event is None: | ||
| event = group.get_latest_event() | ||
| title = group.title or "" | ||
| culprit = group.culprit or "" | ||
|
|
||
| body_parts: list[str] = [] | ||
| if event: | ||
| for interface in event.interfaces.values(): | ||
| output = safe_execute(interface.to_string, event) | ||
| if output: | ||
| body_parts.append(output) | ||
|
|
||
| event_body = "\n\n".join(body_parts) | ||
|
|
||
| context = f"Error Title: {title}" | ||
| if culprit: | ||
| context += f"\nCulprit: {culprit}" | ||
| if event_body: | ||
| context += f"\nDetails:\n{event_body}" | ||
|
|
||
| if len(context) > MAX_CONTEXT_LENGTH: | ||
| context = context[:MAX_CONTEXT_LENGTH] | ||
|
|
||
| return context | ||
|
|
||
|
|
||
| class GeneratedExternalIssueDetails(TypedDict): | ||
| title: str | None | ||
| description: str | None | ||
|
|
||
|
|
||
| def _make_generate_external_issue_details_request( | ||
| group: Group, event: Any | None = None, viewer_context: SeerViewerContext | None = None | ||
| ) -> GeneratedExternalIssueDetails | None: | ||
| logging_ctx: dict[str, Any] = {"group_id": group.id, "viewer_context": viewer_context} | ||
| context = _build_event_context(group, event=event) | ||
|
|
||
| body = LlmGenerateRequest( | ||
| provider="gemini", | ||
| model="flash", | ||
| referrer="sentry.external-issue.details-generate", | ||
| prompt=f"Generate a title and description for this Sentry error:\n\n{context}", | ||
| system_prompt=SYSTEM_PROMPT, | ||
| temperature=0.3, | ||
| max_tokens=750, | ||
|
Comment on lines
+77
to
+78
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Curious Qs, but what do temperature and max tokens do? is temp analogous to effort? What happens if you hit max tokens? Will 750 be enough? is there a way to test how many will be used?
Member
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Honestly, it's not a science and kind of a best guess approximation. these were the values I landed on to consistently get results locally without timeouts or token issues. if you hit the max tokens the request fails, and we fall back to the default, but since these things aren't deterministic its kinda guess work. the temperature is a sorta analog for creativity or randomness, closer to 0, is supposedly more deterministic, but we also want to avoid being boring (i.e. every title being "A sentry python issue occurred." or something like that) |
||
| response_schema={ | ||
| "type": "object", | ||
| "properties": { | ||
| "title": {"type": "string"}, | ||
| "description": {"type": "string"}, | ||
| }, | ||
| "required": ["title", "description"], | ||
| }, | ||
| ) | ||
| response = make_llm_generate_request(body, timeout=10, viewer_context=viewer_context) | ||
| logging_ctx["status_code"] = response.status | ||
| if response.status >= 400: | ||
| logger.warning("external_issues.seer_request_failed", extra=logging_ctx) | ||
| return None | ||
|
|
||
| try: | ||
| data = response.json() | ||
| except (json.JSONDecodeError, ValueError): | ||
| logger.warning( | ||
| "external_issues.seer_response_json_failed", extra=logging_ctx, exc_info=True | ||
| ) | ||
| return None | ||
| content = data.get("content") | ||
| try: | ||
| content = json.loads(content) | ||
| except (json.JSONDecodeError, TypeError, ValueError): | ||
| logger.warning( | ||
| "external_issues.seer_response_parse_failed", extra=logging_ctx, exc_info=True | ||
| ) | ||
| return None | ||
|
|
||
| title = content.get("title") | ||
| description = content.get("description") | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Non-dict JSON response causes uncaught AttributeErrorLow Severity If Reviewed by Cursor Bugbot for commit 9410c88. Configure here. |
||
| if title and description: | ||
| return {"title": title.strip(), "description": description.strip()} | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Truthiness check before strip causes inconsistent fallbackLow Severity The Additional Locations (1)Reviewed by Cursor Bugbot for commit 9410c88. Configure here. |
||
|
|
||
| logging_ctx["title"] = title | ||
| logging_ctx["description"] = description | ||
| logger.warning("external_issues.invalid_shape", extra=logging_ctx) | ||
| return None | ||
|
|
||
|
|
||
| def maybe_generate_external_issue_details( | ||
| *, group: Group, user: User | RpcUser, event: GroupEvent | None = None | ||
| ) -> GeneratedExternalIssueDetails: | ||
| organization = group.organization | ||
| empty_result = GeneratedExternalIssueDetails(title=None, description=None) | ||
| if not features.has("organizations:gen-ai-features", organization, actor=user): | ||
| return empty_result | ||
| if organization.get_option("sentry:hide_ai_features", False): | ||
| return empty_result | ||
| if not features.has("organizations:external-issues-ai-generate", organization, actor=user): | ||
| return empty_result | ||
|
|
||
| try: | ||
| viewer_context = SeerViewerContext(organization_id=organization.id, user_id=user.id) | ||
| result = _make_generate_external_issue_details_request( | ||
| group, event=event, viewer_context=viewer_context | ||
| ) | ||
| except Exception: | ||
| logger.error("external_issues.generate_issue_details_failed", exc_info=True) | ||
| return empty_result | ||
|
|
||
| if not result: | ||
| return empty_result | ||
|
|
||
| return result | ||


Uh oh!
There was an error while loading. Please reload this page.