Skip to content

Commit f45b305

Browse files
authored
Add service for deploying the application (#22)
Make it possible to deploy the application as an Anyscale service by adding a service definition `service.yaml`
1 parent e99f095 commit f45b305

File tree

2 files changed

+11
-2
lines changed

2 files changed

+11
-2
lines changed

app/serve.py

+5-2
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33

44
import os
55

6-
import query
76
import ray
87
import requests
98
from fastapi import FastAPI
@@ -12,6 +11,7 @@
1211
from slack_bolt import App
1312
from slack_bolt.adapter.socket_mode import SocketModeHandler
1413

14+
from app import query
1515
from app.config import (
1616
DB_CONNECTION_STRING,
1717
OPENAI_API_KEY,
@@ -69,7 +69,10 @@ class Answer(BaseModel):
6969
@serve.ingress(app)
7070
class RayAssistantDeployment:
7171
def __init__(self):
72-
self.agent = query.QueryAgent()
72+
self.agent = query.QueryAgent(
73+
llm="meta-llama/Llama-2-70b-chat-hf",
74+
max_context_length=4096,
75+
)
7376
self.app = SlackApp.remote()
7477
# Run the Slack app in the background
7578
self.runner = self.app.run.remote()

app/service.yaml

+6
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
name: "ray-assistant"
2+
cluster_env: ray-assistant
3+
ray_serve_config:
4+
import_path: app.serve:deployment
5+
runtime_env:
6+
working_dir: "https://github.com/ray-project/llm-applications/archive/refs/tags/v0.0.1.zip"

0 commit comments

Comments
 (0)