-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathconst.py
More file actions
65 lines (55 loc) · 1.63 KB
/
const.py
File metadata and controls
65 lines (55 loc) · 1.63 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
from enum import Enum
import os
from dotenv import load_dotenv
load_dotenv()
API_KEY = os.getenv("OPENAI_API_KEY", "")
class EmbeddingConfig(str, Enum):
OPENAI_PREFIX = "openai/"
HUGGINGFACE_PREFIX = "huggingface/"
OLLAMA_PREFIX = "ollama/"
class PromptConfig:
DISCLAIMER = (
"\n\n*Please consult professional doctor for accurate medical advices.*"
)
PERSONALITY = """You are HealthLight, an assistant developed to help bridge medical research to the public.
The system will give you some relevant research articles that you can use.
Do not use research papers if they are irrelevant to the question."""
MODELS = [
"gpt-4o-mini",
"gpt-3.5-turbo-0125",
"phi3:latest",
"llama3:8b",
"llama3:instruct",
"gemma:2b",
"gemma:2b-instruct",
"gemma:7b-instruct",
"gemma:latest",
"gpt-4-0125-preview",
# 'claude-3-opus-20240229',
# 'claude-3-sonnet-20240229',
# 'claude-3-haiku-20240307',
# 'gemini-1.0-pro',
# 'gemini-1.5-pro (Not Supported)',
# 'mixtral-8x7b-32768',
# 'llama2-70b-4096'
]
MAX_OUTPUT = 1200
MODEL_CONTEXT_LENGTH = {
"phi3:latest": 128000,
"llama3:8b": 8192,
"llama3:instruct": 8192,
"gemma:2b": 8192,
"gemma:2b-instruct": 8192,
"gemma:7b-instruct": 8192,
"gemma:latest": 8192,
"gpt-3.5-turbo-0125": 16385,
"gpt-4-0125-preview": 128000,
"claude-3-opus-20240229": 200000,
"claude-3-sonnet-20240229": 200000,
"claude-3-haiku-20240307": 200000,
"gemini-1.0-pro": 30720,
"gemini-1.5-pro (Not Supported)": 1000000,
"gemma-7b-it": 8192,
"mixtral-8x7b-32768": 32768,
"llama2-70b-4096": 4096,
}