We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
2 parents 1a12b67 + 82838bd commit 8592c63Copy full SHA for 8592c63
config/config.example-model-ppio.toml
@@ -0,0 +1,17 @@
1
+# Global LLM configuration
2
+[llm] #PPIO:
3
+api_type = 'ppio'
4
+model = "deepseek/deepseek-v3-0324" # The LLM model to use
5
+base_url = "https://api.ppinfra.com/v3/openai" # API endpoint URL
6
+api_key = "your ppio api key" # Your API key
7
+max_tokens = 16000 # Maximum number of tokens in the response
8
+temperature = 0.0 # Controls randomness
9
+
10
11
+[llm.vision] #PPIO VISION:
12
13
+model = "qwen/qwen2.5-vl-72b-instruct" # The vision model to use
14
+base_url = "https://api.ppinfra.com/v3/openai" # API endpoint URL for vision model
15
+api_key = "your ppio api key" # Your API key for vision model
16
+max_tokens = 96000 # Maximum number of tokens in the response
17
+temperature = 0.0 # Controls randomness for vision model
0 commit comments