Skip to content

Commit 38083b2

Browse files
committed
Added safe_prompt to the API
1 parent c7153d7 commit 38083b2

File tree

1 file changed

+4
-0
lines changed

1 file changed

+4
-0
lines changed

chat.go

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -28,13 +28,15 @@ type ChatRequestParams struct {
2828
TopP float64 `json:"top_p"` // An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or Temperature but not both.
2929
RandomSeed int `json:"random_seed"`
3030
MaxTokens int `json:"max_tokens"`
31+
SafePrompt bool `json:"safe_prompt"` // Adds a Mistral defined safety message to the system prompt to enforce guardrailing
3132
}
3233

3334
var DefaultChatRequestParams = ChatRequestParams{
3435
Temperature: 1,
3536
TopP: 1,
3637
RandomSeed: 42069,
3738
MaxTokens: 4000,
39+
SafePrompt: false,
3840
}
3941

4042
// ChatMessage represents a single message in a chat.
@@ -97,6 +99,7 @@ func (c *MistralClient) Chat(model string, messages []ChatMessage, params *ChatR
9799
"max_tokens": params.MaxTokens,
98100
"top_p": params.TopP,
99101
"random_seed": params.RandomSeed,
102+
"safe_prompt": params.SafePrompt,
100103
}
101104

102105
response, err := c.request(http.MethodPost, requestData, "v1/chat/completions", false, nil)
@@ -133,6 +136,7 @@ func (c *MistralClient) ChatStream(model string, messages []ChatMessage, params
133136
"max_tokens": params.MaxTokens,
134137
"top_p": params.TopP,
135138
"random_seed": params.RandomSeed,
139+
"safe_prompt": params.SafePrompt,
136140
"stream": true,
137141
}
138142

0 commit comments

Comments
 (0)