Skip to content
This repository was archived by the owner on Sep 3, 2025. It is now read-only.

Commit 6299684

Browse files
Merge branch 'master' into enhancement/show-response-cost-update-time
2 parents 334d5b6 + 24a36fe commit 6299684

File tree

29 files changed

+642
-417
lines changed

29 files changed

+642
-417
lines changed

.nvmrc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
16.13.0
1+
20.18.0

docker/Dockerfile

Lines changed: 19 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
FROM python:3.11.4-slim-bullseye as sdist
1+
FROM python:3.13.0-slim-bullseye as sdist
22

33
LABEL maintainer="[email protected]"
44
LABEL org.opencontainers.image.title="Dispatch PyPI Wheel"
@@ -17,7 +17,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
1717
wget \
1818
&& rm -rf /var/lib/apt/lists/*
1919

20-
RUN wget --quiet -O - https://deb.nodesource.com/setup_16.x | bash - \
20+
RUN wget --quiet -O - https://deb.nodesource.com/setup_20.x | bash - \
2121
&& apt-get install -y nodejs --no-install-recommends
2222

2323
ARG SOURCE_COMMIT
@@ -56,7 +56,7 @@ RUN YARN_CACHE_FOLDER="$(mktemp -d)" \
5656
&& mv /usr/src/dispatch/dist /dist
5757

5858
# This is the image to be run
59-
FROM python:3.11.4-slim-bullseye
59+
FROM python:3.13.0-slim-bullseye
6060

6161
LABEL maintainer="[email protected]"
6262
LABEL org.opencontainers.image.title="Dispatch"
@@ -87,7 +87,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
8787
RUN echo "deb http://apt.postgresql.org/pub/repos/apt bullseye-pgdg main" > /etc/apt/sources.list.d/pgdg.list \
8888
&& wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add -
8989

90-
RUN wget --quiet -O - https://deb.nodesource.com/setup_12.x | bash -
90+
RUN wget --quiet -O - https://deb.nodesource.com/setup_20.x | bash -
9191

9292
COPY --from=sdist /dist/*.whl /tmp/dist/
9393
RUN buildDeps="" \
@@ -104,7 +104,21 @@ RUN buildDeps="" \
104104
pkg-config postgresql-client-14 nodejs \
105105
&& apt-get clean \
106106
&& rm -rf /var/lib/apt/lists/* \
107-
&& npm install mjml --no-cache-dir
107+
# mjml has to be installed differently here because
108+
# after node 14, docker will install npm files at the
109+
# root directoy and fail, so we have to create a new
110+
# directory and use it for the install then copy the
111+
# files to the root directory to maintain backwards
112+
# compatibility for email generation
113+
&& mkdir -p /mjml_install \
114+
# if our workdir is /, then pushd/popd doesn't work
115+
# for the npm install. It still tries to install in /,
116+
# which npm can't do
117+
&& cd /mjml_install \
118+
&& npm install --no-cache-dir mjml \
119+
&& mv node_modules / \
120+
&& cd / \
121+
&& rm -rf /mjml_install
108122

109123
EXPOSE 8000
110124
VOLUME /var/lib/dispatch/files

requirements-base.txt

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -126,7 +126,7 @@ frozenlist==1.4.1
126126
# aiosignal
127127
google-api-core==2.15.0
128128
# via google-api-python-client
129-
google-api-python-client==2.149.0
129+
google-api-python-client==2.151.0
130130
# via -r requirements-base.in
131131
google-auth==2.26.1
132132
# via
@@ -330,7 +330,7 @@ python-dateutil==2.9.0.post0
330330
# pandas
331331
python-jose==3.3.0
332332
# via -r requirements-base.in
333-
python-multipart==0.0.12
333+
python-multipart==0.0.17
334334
# via -r requirements-base.in
335335
python-slugify==8.0.4
336336
# via -r requirements-base.in
@@ -398,7 +398,7 @@ six==1.16.0
398398
# validators
399399
slack-bolt==1.21.2
400400
# via -r requirements-base.in
401-
slack-sdk==3.33.1
401+
slack-sdk==3.33.3
402402
# via
403403
# -r requirements-base.in
404404
# slack-bolt

requirements-dev.txt

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ executing==2.0.1
3232
# stack-data
3333
factory-boy==3.3.1
3434
# via -r requirements-dev.in
35-
faker==30.8.1
35+
faker==30.8.2
3636
# via
3737
# -r requirements-dev.in
3838
# factory-boy
@@ -86,7 +86,7 @@ python-dateutil==2.9.0.post0
8686
# via faker
8787
pyyaml==6.0.1
8888
# via pre-commit
89-
ruff==0.7.1
89+
ruff==0.7.2
9090
# via -r requirements-dev.in
9191
six==1.16.0
9292
# via

src/dispatch/ai/exceptions.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
from dispatch.exceptions import DispatchException
2+
3+
4+
class GenAIException(DispatchException):
5+
pass

src/dispatch/ai/service.py

Lines changed: 211 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,211 @@
1+
import json
2+
import logging
3+
4+
from sqlalchemy.orm import Session
5+
6+
from dispatch.case.enums import CaseResolutionReason
7+
from dispatch.case.models import Case
8+
from dispatch.plugin import service as plugin_service
9+
from dispatch.signal import service as signal_service
10+
11+
from .exceptions import GenAIException
12+
13+
log = logging.getLogger(__name__)
14+
15+
16+
def generate_case_signal_historical_context(case: Case, db_session: Session) -> str:
17+
"""
18+
Generate historical context for a case stemming from a signal, including related cases and relevant data.
19+
20+
Args:
21+
case (Case): The case object for which historical context is being generated.
22+
db_session (Session): The database session used for querying related data.
23+
24+
Returns:
25+
str: A string containing the historical context for the case, or an error message if context generation fails.
26+
"""
27+
# we fetch the first instance id and signal
28+
(first_instance_id, first_instance_signal) = signal_service.get_instances_in_case(
29+
db_session=db_session, case_id=case.id
30+
).first()
31+
32+
signal_instance = signal_service.get_signal_instance(
33+
db_session=db_session, signal_instance_id=first_instance_id
34+
)
35+
36+
# Check if the signal instance is valid
37+
if not signal_instance:
38+
message = "Unable to generate historical context. Signal instance not found."
39+
log.warning(message)
40+
raise GenAIException(message)
41+
42+
# Check if the signal is valid
43+
if not signal_instance.signal:
44+
message = "Unable to generate historical context. Signal not found."
45+
log.warning(message)
46+
raise GenAIException(message)
47+
48+
# Check if GenAI is enabled for the signal
49+
if not signal_instance.signal.genai_enabled:
50+
message = (
51+
"Unable to generate historical context. GenAI feature not enabled for this detection."
52+
)
53+
log.warning(message)
54+
raise GenAIException(message)
55+
56+
# we fetch related cases
57+
related_cases = []
58+
for resolution_reason in CaseResolutionReason:
59+
related_cases.extend(
60+
signal_service.get_cases_for_signal_by_resolution_reason(
61+
db_session=db_session,
62+
signal_id=first_instance_signal.id,
63+
resolution_reason=resolution_reason,
64+
)
65+
.from_self() # NOTE: function deprecated in SQLAlchemy 1.4 and removed in 2.0
66+
.filter(Case.id != case.id)
67+
)
68+
69+
# we prepare historical context
70+
historical_context = []
71+
for related_case in related_cases:
72+
historical_context.append("<case>")
73+
historical_context.append(f"<case_name>{related_case.name}</case_name>")
74+
historical_context.append(f"<case_resolution>{related_case.resolution}</case_resolution")
75+
historical_context.append(
76+
f"<case_resolution_reason>{related_case.resolution_reason}</case_resolution_reason>"
77+
)
78+
historical_context.append(
79+
f"<case_alert_data>{related_case.signal_instances[0].raw}</case_alert_data>"
80+
)
81+
conversation_plugin = plugin_service.get_active_instance(
82+
db_session=db_session, project_id=case.project.id, plugin_type="conversation"
83+
)
84+
if conversation_plugin:
85+
if related_case.conversation and related_case.conversation.channel_id:
86+
# we fetch conversation replies for the related case
87+
conversation_replies = conversation_plugin.instance.get_conversation_replies(
88+
conversation_id=related_case.conversation.channel_id,
89+
thread_ts=related_case.conversation.thread_id,
90+
)
91+
for reply in conversation_replies:
92+
historical_context.append(
93+
f"<case_conversation_reply>{reply}</case_conversation_reply>"
94+
)
95+
else:
96+
log.warning(
97+
"Conversation replies not included in historical context. No conversation plugin enabled."
98+
)
99+
historical_context.append("</case>")
100+
101+
return "\n".join(historical_context)
102+
103+
104+
def generate_case_signal_summary(case: Case, db_session: Session) -> dict[str, str]:
105+
"""
106+
Generate an analysis summary of a case stemming from a signal.
107+
108+
Args:
109+
case (Case): The case object for which the analysis summary is being generated.
110+
db_session (Session): The database session used for querying related data.
111+
112+
Returns:
113+
dict: A dictionary containing the analysis summary, or an error message if the summary generation fails.
114+
"""
115+
# we generate the historical context
116+
try:
117+
historical_context = generate_case_signal_historical_context(
118+
case=case, db_session=db_session
119+
)
120+
except GenAIException as e:
121+
log.warning(f"Error generating GenAI historical context for {case.name}: {str(e)}")
122+
raise e
123+
124+
# we fetch the artificial intelligence plugin
125+
genai_plugin = plugin_service.get_active_instance(
126+
db_session=db_session, project_id=case.project.id, plugin_type="artificial-intelligence"
127+
)
128+
129+
# we check if the artificial intelligence plugin is enabled
130+
if not genai_plugin:
131+
message = (
132+
"Unable to generate GenAI signal analysis. No artificial-intelligence plugin enabled."
133+
)
134+
log.warning(message)
135+
raise GenAIException(message)
136+
137+
# we fetch the first instance id and signal
138+
(first_instance_id, first_instance_signal) = signal_service.get_instances_in_case(
139+
db_session=db_session, case_id=case.id
140+
).first()
141+
142+
signal_instance = signal_service.get_signal_instance(
143+
db_session=db_session, signal_instance_id=first_instance_id
144+
)
145+
146+
# Check if the signal instance is valid
147+
if not signal_instance:
148+
message = "Unable to generate GenAI signal analysis. Signal instance not found."
149+
log.warning(message)
150+
raise GenAIException(message)
151+
152+
# Check if the signal is valid
153+
if not signal_instance.signal:
154+
message = "Unable to generate GenAI signal analysis. Signal not found."
155+
log.warning(message)
156+
raise GenAIException(message)
157+
158+
# Check if GenAI is enabled for the signal
159+
if not signal_instance.signal.genai_enabled:
160+
message = f"Unable to generate GenAI signal analysis. GenAI feature not enabled for {signal_instance.signal.name}."
161+
log.warning(message)
162+
raise GenAIException(message)
163+
164+
# we check if the signal has a prompt defined
165+
if not signal_instance.signal.genai_prompt:
166+
message = f"Unable to generate GenAI signal analysis. No GenAI prompt defined for {signal_instance.signal.name}."
167+
log.warning(message)
168+
raise GenAIException(message)
169+
170+
# we generate the analysis
171+
response = genai_plugin.instance.chat_completion(
172+
prompt=f"""
173+
174+
<prompt>
175+
{signal_instance.signal.genai_prompt}
176+
</prompt>
177+
178+
<current_event>
179+
{str(signal_instance.raw)}
180+
</current_event>
181+
182+
<runbook>
183+
{signal_instance.signal.runbook}
184+
</runbook>
185+
186+
<historical_context>
187+
{historical_context}
188+
</historical_context>
189+
190+
"""
191+
)
192+
193+
try:
194+
summary = json.loads(
195+
response["choices"][0]["message"]["content"]
196+
.replace("```json", "")
197+
.replace("```", "")
198+
.strip()
199+
)
200+
201+
# we check if the summary is empty
202+
if not summary:
203+
message = "Unable to generate GenAI signal analysis. We received an empty response from the artificial-intelligence plugin."
204+
log.warning(message)
205+
raise GenAIException(message)
206+
207+
return summary
208+
except json.JSONDecodeError as e:
209+
message = "Unable to generate GenAI signal analysis. Error decoding response from the artificial-intelligence plugin."
210+
log.warning(message)
211+
raise GenAIException(message) from e

src/dispatch/case/flows.py

Lines changed: 30 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,12 +33,13 @@
3333
from dispatch.storage.enums import StorageAction
3434
from dispatch.ticket import flows as ticket_flows
3535

36+
from .enums import CaseResolutionReason, CaseStatus
3637
from .messaging import (
3738
send_case_created_notifications,
3839
send_case_rating_feedback_message,
3940
send_case_update_notifications,
4041
)
41-
from .models import Case, CaseStatus
42+
from .models import Case
4243
from .service import get
4344

4445
log = logging.getLogger(__name__)
@@ -191,6 +192,30 @@ def update_conversation(case: Case, db_session: Session) -> None:
191192
)
192193

193194

195+
def case_auto_close_flow(case: Case, db_session: Session):
196+
"Runs the case auto close flow."
197+
# we mark the case as closed
198+
case.resolution = "Auto closed via case type auto close configuration."
199+
case.resolution_reason = CaseResolutionReason.user_acknowledge
200+
case.status = CaseStatus.closed
201+
db_session.add(case)
202+
db_session.commit()
203+
204+
# we transition the case from the new to the closed state
205+
case_triage_status_flow(
206+
case=case,
207+
db_session=db_session,
208+
)
209+
case_closed_status_flow(
210+
case=case,
211+
db_session=db_session,
212+
)
213+
214+
if case.conversation and case.has_thread:
215+
# we update the case conversation
216+
update_conversation(case=case, db_session=db_session)
217+
218+
194219
def case_new_create_flow(
195220
*,
196221
case_id: int,
@@ -253,6 +278,10 @@ def case_new_create_flow(
253278
log.warning("Case assignee not paged. No plugin of type oncall enabled.")
254279
return case
255280

281+
if case and case.case_type.auto_close:
282+
# we transition the case to the closed state if its case type has auto close enabled
283+
case_auto_close_flow(case=case, db_session=db_session)
284+
256285
return case
257286

258287

0 commit comments

Comments
 (0)