Skip to content

Commit d88010f

Browse files
update Readme
1 parent 2ad34c3 commit d88010f

File tree

1 file changed

+66
-1
lines changed

1 file changed

+66
-1
lines changed

README.md

+66-1
Original file line numberDiff line numberDiff line change
@@ -322,9 +322,12 @@ public struct ChatCompletionParameters: Encodable {
322322
/// Modify the likelihood of specified tokens appearing in the completion.
323323
/// Accepts a json object that maps tokens (specified by their token ID in the tokenizer) to an associated bias value from -100 to 100. Mathematically, the bias is added to the logits generated by the model prior to sampling. The exact effect will vary per model, but values between -1 and 1 should decrease or increase likelihood of selection; values like -100 or 100 should result in a ban or exclusive selection of the relevant token. Defaults to null.
324324
public var logitBias: [Int: Double]?
325+
/// Whether to return log probabilities of the output tokens or not. If true, returns the log probabilities of each output token returned in the content of message. This option is currently not available on the gpt-4-vision-preview model. Defaults to false.
326+
public var logprobs: Bool?
327+
/// An integer between 0 and 5 specifying the number of most likely tokens to return at each token position, each with an associated log probability. logprobs must be set to true if this parameter is used.
328+
public var topLogprobs: Int?
325329
/// The maximum number of [tokens](https://platform.openai.com/tokenizer) to generate in the chat completion.
326330
/// The total length of input tokens and generated tokens is limited by the model's context length. Example [Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) for counting tokens.
327-
/// Defaults to inf
328331
public var maxTokens: Int?
329332
/// How many chat completion choices to generate for each input message. Defaults to 1.
330333
public var n: Int?
@@ -781,6 +784,8 @@ public struct ChatCompletionObject: Decodable {
781784
public let index: Int
782785
/// A chat completion message generated by the model.
783786
public let message: ChatMessage
787+
/// Log probability information for the choice.
788+
public let logprobs: LogProb?
784789

785790
public struct ChatMessage: Decodable {
786791

@@ -801,6 +806,36 @@ public struct ChatCompletionObject: Decodable {
801806
let type: String
802807
}
803808
}
809+
810+
public struct LogProb: Decodable {
811+
/// A list of message content tokens with log probability information.
812+
let content: [TokenDetail]
813+
}
814+
815+
public struct TokenDetail: Decodable {
816+
/// The token.
817+
let token: String
818+
/// The log probability of this token.
819+
let logprob: Double
820+
/// A list of integers representing the UTF-8 bytes representation of the token. Useful in instances where characters are represented by multiple tokens and their byte representations must be combined to generate the correct text representation. Can be null if there is no bytes representation for the token.
821+
let bytes: [Int]?
822+
/// List of the most likely tokens and their log probability, at this token position. In rare cases, there may be fewer than the number of requested top_logprobs returned.
823+
let topLogprobs: [TopLogProb]
824+
825+
enum CodingKeys: String, CodingKey {
826+
case token, logprob, bytes
827+
case topLogprobs = "top_logprobs"
828+
}
829+
830+
struct TopLogProb: Decodable {
831+
/// The token.
832+
let token: String
833+
/// The log probability of this token.
834+
let logprob: Double
835+
/// A list of integers representing the UTF-8 bytes representation of the token. Useful in instances where characters are represented by multiple tokens and their byte representations must be combined to generate the correct text representation. Can be null if there is no bytes representation for the token.
836+
let bytes: [Int]?
837+
}
838+
}
804839
}
805840

806841
public struct ChatUsage: Decodable {
@@ -866,6 +901,36 @@ public struct ChatCompletionChunkObject: Decodable {
866901
public let role: String?
867902
}
868903

904+
public struct LogProb: Decodable {
905+
/// A list of message content tokens with log probability information.
906+
let content: [TokenDetail]
907+
}
908+
909+
public struct TokenDetail: Decodable {
910+
/// The token.
911+
let token: String
912+
/// The log probability of this token.
913+
let logprob: Double
914+
/// A list of integers representing the UTF-8 bytes representation of the token. Useful in instances where characters are represented by multiple tokens and their byte representations must be combined to generate the correct text representation. Can be null if there is no bytes representation for the token.
915+
let bytes: [Int]?
916+
/// List of the most likely tokens and their log probability, at this token position. In rare cases, there may be fewer than the number of requested top_logprobs returned.
917+
let topLogprobs: [TopLogProb]
918+
919+
enum CodingKeys: String, CodingKey {
920+
case token, logprob, bytes
921+
case topLogprobs = "top_logprobs"
922+
}
923+
924+
struct TopLogProb: Decodable {
925+
/// The token.
926+
let token: String
927+
/// The log probability of this token.
928+
let logprob: Double
929+
/// A list of integers representing the UTF-8 bytes representation of the token. Useful in instances where characters are represented by multiple tokens and their byte representations must be combined to generate the correct text representation. Can be null if there is no bytes representation for the token.
930+
let bytes: [Int]?
931+
}
932+
}
933+
869934
/// Provided by the Vision API.
870935
public struct FinishDetails: Decodable {
871936
let type: String

0 commit comments

Comments
 (0)