Skip to content
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions src/sentry/features/temporary.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,8 @@ def register_temporary_features(manager: FeatureManager) -> None:
manager.add("organizations:gen-ai-explore-metrics-search", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enable GenAI consent
manager.add("organizations:gen-ai-consent", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enable LLM-generated title and description for external issue details
manager.add("organizations:external-issues-ai-generate", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
Comment thread
cursor[bot] marked this conversation as resolved.
# Enable increased issue_owners rate limit for auto-assignment
manager.add("organizations:increased-issue-owners-rate-limit", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
# Starfish: extract metrics from the spans
Expand Down
16 changes: 14 additions & 2 deletions src/sentry/integrations/mixins/issues.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
from sentry.integrations.tasks.sync_status_inbound import (
sync_status_inbound as sync_status_inbound_task,
)
from sentry.integrations.utils.external_issues import maybe_generate_external_issue_details
from sentry.issues.grouptype import GroupCategory
from sentry.issues.issue_occurrence import IssueOccurrence
from sentry.models.group import Group
Expand Down Expand Up @@ -161,18 +162,29 @@ def get_create_issue_config(

event = group.get_latest_event()

default_title = self.get_group_title(group, event, **kwargs)
default_description = self.get_group_description(group, event, **kwargs)

llm_details = maybe_generate_external_issue_details(group=group, user=user, event=event)
title = llm_details["title"] if llm_details["title"] else default_title
description = (
f"**{default_title}**\n\n{llm_details['description']}\n\n---\n\n{default_description}"
if llm_details["description"]
else default_description
)

return [
{
"name": "title",
"label": "Title",
"default": self.get_group_title(group, event, **kwargs),
"default": title,
"type": "string",
"required": True,
},
{
"name": "description",
"label": "Description",
"default": self.get_group_description(group, event, **kwargs),
"default": description,
"type": "textarea",
"autosize": True,
"maxRows": 10,
Expand Down
145 changes: 145 additions & 0 deletions src/sentry/integrations/utils/external_issues.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,145 @@
from __future__ import annotations

import logging
from typing import Any, TypedDict

from sentry import features
from sentry.models.group import Group
from sentry.seer.signed_seer_api import (
LlmGenerateRequest,
SeerViewerContext,
make_llm_generate_request,
)
from sentry.services.eventstore.models import GroupEvent
from sentry.users.models.user import User
from sentry.users.services.user import RpcUser
from sentry.utils import json
from sentry.utils.safe import safe_execute

logger = logging.getLogger(__name__)

SYSTEM_PROMPT = """You are a helpful assistant that generates concise titles and descriptions for issue tickets in external project management tools like Jira, GitHub Issues, and Linear.

Given information about a Sentry error (title and stack trace / error details), generate:
1. A short, actionable title (3-8 words) suitable for a ticket. Describe the problem clearly.
2. A brief description (1-3 sentences) summarizing the error, its likely cause, and potential impact.

Do not include Sentry-specific formatting, links, or markdown. Keep the description in plain text.
Return a JSON object with "title" and "description" keys. Return only the JSON, nothing else."""

MAX_CONTEXT_LENGTH = 2000


def _build_event_context(group: Group, event: Any | None = None) -> str:
if event is None:
event = group.get_latest_event()
title = group.title or ""
culprit = group.culprit or ""

body_parts: list[str] = []
if event:
for interface in event.interfaces.values():
output = safe_execute(interface.to_string, event)
if output:
body_parts.append(output)

event_body = "\n\n".join(body_parts)

context = f"Error Title: {title}"
if culprit:
context += f"\nCulprit: {culprit}"
if event_body:
context += f"\nDetails:\n{event_body}"

if len(context) > MAX_CONTEXT_LENGTH:
context = context[:MAX_CONTEXT_LENGTH]

return context


class GeneratedExternalIssueDetails(TypedDict):
title: str | None
description: str | None


def _make_generate_external_issue_details_request(
group: Group, event: Any | None = None, viewer_context: SeerViewerContext | None = None
) -> GeneratedExternalIssueDetails | None:
logging_ctx: dict[str, Any] = {"group_id": group.id, "viewer_context": viewer_context}
context = _build_event_context(group, event=event)

body = LlmGenerateRequest(
provider="gemini",
model="flash",
referrer="sentry.external-issue.details-generate",
prompt=f"Generate a title and description for this Sentry error:\n\n{context}",
system_prompt=SYSTEM_PROMPT,
temperature=0.3,
max_tokens=750,
Comment on lines +77 to +78
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Curious Qs, but what do temperature and max tokens do? is temp analogous to effort? What happens if you hit max tokens? Will 750 be enough? is there a way to test how many will be used?

Copy link
Copy Markdown
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Honestly, it's not a science and kind of a best guess approximation. these were the values I landed on to consistently get results locally without timeouts or token issues.

if you hit the max tokens the request fails, and we fall back to the default, but since these things aren't deterministic its kinda guess work.

the temperature is a sorta analog for creativity or randomness, closer to 0, is supposedly more deterministic, but we also want to avoid being boring (i.e. every title being "A sentry python issue occurred." or something like that)

response_schema={
"type": "object",
"properties": {
"title": {"type": "string"},
"description": {"type": "string"},
},
"required": ["title", "description"],
},
)
response = make_llm_generate_request(body, timeout=10, viewer_context=viewer_context)
logging_ctx["status_code"] = response.status
if response.status >= 400:
logger.warning("external_issues.seer_request_failed", extra=logging_ctx)
return None

try:
data = response.json()
except (json.JSONDecodeError, ValueError):
logger.warning(
"external_issues.seer_response_json_failed", extra=logging_ctx, exc_info=True
)
return None
content = data.get("content")
try:
content = json.loads(content)
except (json.JSONDecodeError, TypeError, ValueError):
logger.warning(
"external_issues.seer_response_parse_failed", extra=logging_ctx, exc_info=True
)
return None

title = content.get("title")
description = content.get("description")
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Non-dict JSON response causes uncaught AttributeError

Low Severity

If json.loads(content) succeeds but returns a non-dict type (e.g., a list, string, or number), the subsequent content.get("title") call raises an AttributeError. The local except block only catches json.JSONDecodeError, TypeError, and ValueError, so this error escapes to the generic except Exception in maybe_generate_external_issue_details, losing the specific logging context (group_id, viewer_context) and logging a misleading error message.

Fix in Cursor Fix in Web

Reviewed by Cursor Bugbot for commit 9410c88. Configure here.

if title and description:
return {"title": title.strip(), "description": description.strip()}
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Truthiness check before strip causes inconsistent fallback

Low Severity

The if title and description: truthiness check on line 112 runs on pre-stripped values, but the returned values on line 113 are post-strip(). If the LLM returns a whitespace-only title (truthy pre-strip, falsy post-strip) but a valid description, the function returns {"title": "", "description": "real desc"}. The caller in issues.py then independently checks each field's truthiness, causing the title to fall back to default while the description uses the LLM-formatted template — an inconsistent pairing.

Additional Locations (1)
Fix in Cursor Fix in Web

Reviewed by Cursor Bugbot for commit 9410c88. Configure here.


logging_ctx["title"] = title
logging_ctx["description"] = description
logger.warning("external_issues.invalid_shape", extra=logging_ctx)
return None


def maybe_generate_external_issue_details(
*, group: Group, user: User | RpcUser, event: GroupEvent | None = None
) -> GeneratedExternalIssueDetails:
organization = group.organization
empty_result = GeneratedExternalIssueDetails(title=None, description=None)
if not features.has("organizations:gen-ai-features", organization, actor=user):
return empty_result
if organization.get_option("sentry:hide_ai_features", False):
return empty_result
if not features.has("organizations:external-issues-ai-generate", organization, actor=user):
return empty_result

try:
viewer_context = SeerViewerContext(organization_id=organization.id, user_id=user.id)
result = _make_generate_external_issue_details_request(
group, event=event, viewer_context=viewer_context
)
except Exception:
logger.error("external_issues.generate_issue_details_failed", exc_info=True)
return empty_result

if not result:
return empty_result

return result
65 changes: 65 additions & 0 deletions tests/sentry/integrations/test_issues.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from sentry.integrations.models.external_issue import ExternalIssue
from sentry.integrations.models.organization_integration import OrganizationIntegration
from sentry.integrations.services.integration import integration_service
from sentry.integrations.utils.external_issues import GeneratedExternalIssueDetails
from sentry.models.activity import Activity
from sentry.models.group import Group, GroupStatus
from sentry.models.grouplink import GroupLink
Expand Down Expand Up @@ -591,6 +592,12 @@ def test_status_sync_inbound_unresolve_webhook_and_sends_to_sentry_app(

class IssueDefaultTest(TestCase):
def setUp(self) -> None:
event = self.store_event(
data={"message": "test error", "level": "error"},
project_id=self.project.id,
)
self.group = event.group

self.group.status = GroupStatus.RESOLVED
self.group.substatus = None
self.group.save()
Expand Down Expand Up @@ -733,3 +740,61 @@ def test_annotations(self) -> None:
assert isinstance(installation, ExampleIntegration)

assert installation.get_annotations_for_group_list([self.group]) == {self.group.id: []}

@patch("sentry.integrations.mixins.issues.maybe_generate_external_issue_details")
def test_ai_text_replaces_defaults(self, mock_generate: MagicMock) -> None:
mock_generate.return_value = GeneratedExternalIssueDetails(
title="LLM Title",
description="LLM Description",
)

config = self.installation.get_create_issue_config(self.group, self.user)

title_field = next(f for f in config if f["name"] == "title")
desc_field = next(f for f in config if f["name"] == "description")

assert title_field["default"] == "LLM Title"
assert desc_field["default"].startswith("**")
assert "LLM Description" in desc_field["default"]

@patch("sentry.integrations.mixins.issues.maybe_generate_external_issue_details")
def test_falls_back_when_ai_returns_empty(self, mock_generate: MagicMock) -> None:
mock_generate.return_value = GeneratedExternalIssueDetails(title=None, description=None)

config = self.installation.get_create_issue_config(self.group, self.user)

title_field = next(f for f in config if f["name"] == "title")
assert title_field["default"] == self.group.get_latest_event().title

@patch("sentry.integrations.utils.external_issues.make_llm_generate_request")
def test_feature_flag_disabled_skips_ai(self, mock_request: MagicMock) -> None:
config = self.installation.get_create_issue_config(self.group, self.user)

title_field = next(f for f in config if f["name"] == "title")
assert title_field["default"] == self.group.get_latest_event().title
mock_request.assert_not_called()

@patch("sentry.integrations.utils.external_issues.make_llm_generate_request")
def test_hide_ai_features_skips_ai(self, mock_request: MagicMock) -> None:
self.group.organization.update_option("sentry:hide_ai_features", True)

with self.feature(
["organizations:gen-ai-features", "organizations:external-issues-ai-generate"]
):
config = self.installation.get_create_issue_config(self.group, self.user)

title_field = next(f for f in config if f["name"] == "title")
assert title_field["default"] == self.group.get_latest_event().title
mock_request.assert_not_called()

@patch("sentry.integrations.utils.external_issues.make_llm_generate_request")
def test_ai_exception_falls_back(self, mock_request: MagicMock) -> None:
mock_request.side_effect = Exception("Connection error")

with self.feature(
["organizations:gen-ai-features", "organizations:external-issues-ai-generate"]
):
config = self.installation.get_create_issue_config(self.group, self.user)

title_field = next(f for f in config if f["name"] == "title")
assert title_field["default"] == self.group.get_latest_event().title
Loading
Loading