-
Notifications
You must be signed in to change notification settings - Fork 12
/
Copy path.env.example
34 lines (30 loc) · 1.65 KB
/
.env.example
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
# Postgres Database Configuration
DB_HOST=
DB_PORT=
DB_NAME=
DB_USER=
DB_PASSWORD=
# if you do not require any certificate, you can ignore this
# if you want to need ssl connection with the DB, upload the certificate to amazon s3 and provide link of the certificate
DB_CERTIFICATE_LINK=
DB_CERTIFICATE_REGION=
DB_CERTIFICATE_BUCKET_NAME=
DB_CERTIFICATE_FILE=
DB_CERTIFICATE_ACCESS_KEY_ID=
DB_CERTIFICATE_SECRET_ACCESS_KEY=
# Github Token for increasing rate limit of reading the repository
GITHUB_TOKEN=
# LLM_PROVIDER=deepseek | google (for google ai studio)
LLM_PROVIDER=
LLM_APIKEY=
LLM_MODELNAME=
# Deepseek's 64k context window prevents us to input the whole code. So we can limit this by setting the code limit
# This environment variable is using a number of characters to filter the amount of code to be processed
# We didn't use the number of words as code contains a lot of special characters and spaces which it varies. We could have used tiktoken counter, but that takes a lot of time to process
# If you have no idea, just leave it blank. There is a default value for this // src/service/config.ts
TOKEN_PROCESSING_CHARACTER_LIMIT=30000 # Approx useful for 64k context window, approx processes 700-900 lines of code
# Maximum retries for trying to input the code. If it is still beyond the limit, it will be try max retries then stop. To prevent huge input token billing
TOKEN_PROCESSING_MAX_RETRIES=3
# Reduce the number of characters per retry. You can think it as PROCESSOR_CHAR_LIMIT - REDUCE_CHAR_PER_RETRY * retries of characters will be processed in each retry
TOKEN_PROCESSING_REDUCE_CHAR_PER_RETRY=3000 # Approx useful for 64k context window
NEXT_PUBLIC_API_ENDPOINT=