diff --git a/frontend/rust-lib/flowy-ai/dev.env b/frontend/rust-lib/flowy-ai/dev.env index 5cff5dd8589b3..60fb4386cdeba 100644 --- a/frontend/rust-lib/flowy-ai/dev.env +++ b/frontend/rust-lib/flowy-ai/dev.env @@ -1,5 +1,11 @@ +# Set the path to the main binary/executable for the AI application (e.g., chat program) +CHAT_BIN_PATH=/usr/local/bin/your-ai-chat-program -CHAT_BIN_PATH= -LOCAL_AI_MODEL_DIR= -LOCAL_AI_CHAT_MODEL_NAME= -LOCAL_AI_EMBEDDING_MODEL_NAME= +# Set the directory where your local AI models (e.g., Llama, Mistral) are stored +LOCAL_AI_MODEL_DIR=/home/user/models/ai/ + +# Set the specific file name or identifier for the model used for generating chat responses +LOCAL_AI_CHAT_MODEL_NAME=Llama-2-7b-chat.gguf + +# Set the specific file name or identifier for the model used for converting text to vector embeddings +LOCAL_AI_EMBEDDING_MODEL_NAME=bge-small-en-v1.5.bin