Skip to content

Commit 03abda8

Browse files
committed
removed prefect support
1 parent 528a43c commit 03abda8

File tree

5 files changed

+34
-103
lines changed

5 files changed

+34
-103
lines changed

deploy/docker/osbot-llms/Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
FROM python:3.12-slim
22

33
RUN pip install mangum uvicorn httpx openai numpy
4-
RUN pip install osbot-aws osbot-fast-api osbot-prefect
4+
RUN pip install osbot-aws osbot-fast-api
55

66
COPY --from=public.ecr.aws/awsguru/aws-lambda-adapter:0.8.4 /lambda-adapter /opt/extensions/lambda-adapter
77

osbot_llms/fast_api/routes/Routes__Chat.py

Lines changed: 13 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -2,21 +2,18 @@
22
import traceback
33

44
from fastapi import Request
5-
from fastapi.params import Header, Body
6-
from osbot_prefect.flows.Flow_Events__To__Prefect_Server import Flow_Events__To__Prefect_Server
7-
from osbot_utils.helpers.Random_Guid import Random_Guid
8-
from osbot_utils.helpers.flows.Flow import Flow
9-
from osbot_utils.helpers.flows.decorators.flow import flow
10-
from osbot_utils.helpers.flows.models.Flow__Config import Flow__Config
11-
from osbot_utils.utils.Dev import pprint
5+
from fastapi.params import Header
6+
from osbot_utils.helpers.flows.Flow import Flow
7+
from osbot_utils.helpers.flows.decorators.flow import flow
8+
from osbot_utils.helpers.flows.models.Flow_Run__Config import Flow_Run__Config
129
from starlette.responses import StreamingResponse
1310
from osbot_fast_api.api.Fast_API_Routes import Fast_API_Routes
1411
from osbot_utils.context_managers.capture_duration import capture_duration
1512
from osbot_llms.OSBot_LLMs__Shared_Objects import osbot_llms__shared_objects
1613
from osbot_llms.fast_api.routes.Routes__OpenAI import Routes__OpenAI
1714
from osbot_llms.llms.chats.LLM__Chat_Completion__Resolve_Engine import LLM__Chat_Completion__Resolve_Engine
1815
from osbot_llms.llms.storage.Chats_Storage__S3_Minio import Chats_Storage__S3_Minio
19-
from osbot_llms.models.LLMs__Chat_Completion import LLMs__Chat_Completion, SWAGGER_EXAMPLE__LLMs__Chat_Completion
16+
from osbot_llms.models.LLMs__Chat_Completion import LLMs__Chat_Completion, SWAGGER_EXAMPLE__LLMs__Chat_Completion
2017

2118
ROUTES_PATHS__CONFIG = ['/config/status', '/config/version']
2219
HEADER_NAME__CHAT_ID = 'osbot-llms-chat-id'
@@ -36,38 +33,35 @@ def execute_llm_request(self, llm_chat_completion):
3633
return 'no engine'
3734

3835
async def handle_other_llms(self, llm_chat_completion: LLMs__Chat_Completion, request: Request, request_id: str):
39-
@flow(flow_config=Flow__Config(log_to_console=True))
36+
@flow(flow_config=Flow_Run__Config(log_to_console=True))
4037
def handle_other_llms__streamer() -> Flow:
4138
print("in handle_other_llms__streamer")
4239
print(llm_chat_completion.json())
4340
return StreamingResponse(self.handle_other_llms__streamer(llm_chat_completion, request, request_id),media_type='text/event-stream"; charset=utf-8')
4441

4542
stream = llm_chat_completion.stream
4643
if stream:
47-
with Flow_Events__To__Prefect_Server():
48-
with handle_other_llms__streamer() as _:
49-
_.execute_flow()
50-
return _.flow_return_value
44+
with handle_other_llms__streamer() as _:
45+
_.execute_flow()
46+
return _.flow_return_value
5147
else:
5248
return await self.handle_other_llms__no_stream(llm_chat_completion, request, request_id)
5349

5450
async def handle_other_llms__no_stream(self, llm_chat_completion: LLMs__Chat_Completion, request: Request, request_id: str):
55-
@flow(flow_config=Flow__Config(log_to_console=True))
51+
@flow(flow_config=Flow_Run__Config(log_to_console=True))
5652
def flow_handle_other_llms__no_stream() -> Flow:
5753
print("in handle_other_llms__streamer")
5854
print(llm_chat_completion.json())
5955
complete_answer = self.execute_llm_request(llm_chat_completion)
6056
try:
61-
#request_headers = {key: value for key, value in request.headers.items()}
6257
llm_chat_completion.llm_answer = complete_answer
6358
except:
6459
pass
6560
return complete_answer
6661

67-
with Flow_Events__To__Prefect_Server() :
68-
with flow_handle_other_llms__no_stream() as _:
69-
_.execute_flow()
70-
return _.flow_return_value
62+
with flow_handle_other_llms__no_stream() as _:
63+
_.execute_flow()
64+
return _.flow_return_value
7165

7266

7367
async def handle_other_llms__streamer(self, llm_chat_completion: LLMs__Chat_Completion, request: Request, request_id: str):

poetry.lock

Lines changed: 20 additions & 49 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

pyproject.toml

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,3 @@
1-
# poetry install didn't work because of pendulum (which is a dependency of prefect)
2-
# pip install prefect did work
31
[tool.poetry]
42
name = "osbot_llms"
53
version = "v0.2.12"
@@ -14,7 +12,6 @@ repository = "https://github.com/owasp-sbot/OSBot-LLMs"
1412
python = "^3.11"
1513
osbot-aws = "*"
1614
osbot-fast-api = "*"
17-
osbot-prefect = "*"
1815
mangum = "*"
1916
uvicorn = "*"
2017
httpx = "*"

tests/integration/test__prefect_support.py

Lines changed: 0 additions & 31 deletions
This file was deleted.

0 commit comments

Comments
 (0)