Skip to content

Commit 980f47d

Browse files
committed
Replace map object by serde_json Value
1 parent 3f7aca8 commit 980f47d

File tree

2 files changed

+6
-11
lines changed

2 files changed

+6
-11
lines changed

openai_client/build.rs

Lines changed: 2 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -426,12 +426,7 @@ fn parse_object_type(name: &str, schema: &Yaml, output_file: &mut File) {
426426
if let Some(type_label) = schema_map.get(&Yaml::String("x-oaiTypeLabel".to_string())) {
427427
let type_label_str = type_label.as_str().unwrap();
428428
match type_label_str {
429-
"map" => writeln!(
430-
output_file,
431-
"pub struct {}(pub HashMap<String,String>);\n",
432-
name
433-
)
434-
.unwrap(),
429+
"map" => writeln!(output_file, "pub struct {}(pub serde_json::Value);\n", name).unwrap(),
435430
_ => unimplemented!("{} with type label {:?}", name, type_label),
436431
}
437432
} else {
@@ -1379,7 +1374,7 @@ fn parse_endpoint_path(path_schema: &Yaml, client_output_file: &mut File) {
13791374

13801375
if let Some(response_content_hash) = ok_response["content"].as_hash() {
13811376
if response_content_hash.len() == 1 {
1382-
// This is a special case
1377+
// This is a special case
13831378
if result_type == "String" {
13841379
writeln!(
13851380
client_output_file,

openai_client/src/types.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1099,7 +1099,7 @@ pub struct BatchRequestInput {
10991099

11001100
/** The JSON body of the response */
11011101
#[derive(Debug, PartialEq, Serialize, Deserialize)]
1102-
pub struct BatchRequestOutputResponseBody(pub HashMap<String,String>);
1102+
pub struct BatchRequestOutputResponseBody(pub serde_json::Value);
11031103

11041104
#[derive(Debug, PartialEq, Serialize, Deserialize)]
11051105
pub struct BatchRequestOutputResponse {
@@ -2714,7 +2714,7 @@ The exact effect will vary per model, but values between -1 and 1 should
27142714
decrease or increase likelihood of selection; values like -100 or 100
27152715
should result in a ban or exclusive selection of the relevant token. */
27162716
#[derive(Debug, PartialEq, Serialize, Deserialize)]
2717-
pub struct CreateChatCompletionRequestObjectLogitBias(pub HashMap<String,String>);
2717+
pub struct CreateChatCompletionRequestObjectLogitBias(pub serde_json::Value);
27182718

27192719
/** Configuration for a [Predicted Output](/docs/guides/predicted-outputs),
27202720
which can greatly improve response times when large parts of the model
@@ -3082,7 +3082,7 @@ Accepts a JSON object that maps tokens (specified by their token ID in the GPT t
30823082

30833083
As an example, you can pass `{"50256": -100}` to prevent the <|endoftext|> token from being generated. */
30843084
#[derive(Debug, PartialEq, Serialize, Deserialize)]
3085-
pub struct CreateCompletionRequestLogitBias(pub HashMap<String,String>);
3085+
pub struct CreateCompletionRequestLogitBias(pub serde_json::Value);
30863086

30873087
#[derive(Debug, PartialEq, Serialize, Deserialize)]
30883088
pub struct CreateCompletionRequest {
@@ -14651,7 +14651,7 @@ pub enum RunStepDeltaStepDetailsToolCallsFileSearchObjectType {
1465114651

1465214652
/** For now, this is always going to be an empty object. */
1465314653
#[derive(Debug, PartialEq, Serialize, Deserialize)]
14654-
pub struct RunStepDeltaStepDetailsToolCallsFileSearchObjectFileSearch(pub HashMap<String,String>);
14654+
pub struct RunStepDeltaStepDetailsToolCallsFileSearchObjectFileSearch(pub serde_json::Value);
1465514655

1465614656
#[derive(Debug, PartialEq, Serialize, Deserialize)]
1465714657
pub struct RunStepDeltaStepDetailsToolCallsFileSearchObject {

0 commit comments

Comments
 (0)