Skip to content
This repository was archived by the owner on Jan 2, 2025. It is now read-only.

Commit e0d3fec

Browse files
authored
Null values, rate limit fix and gpt-4o (#1294)
* fix: refusal and content Null calls * fix: 429 rate limit * refactor: close previous response * feat: default to gpt-4o instead of gpt-4-turbo
1 parent f4a4a12 commit e0d3fec

File tree

3 files changed

+36
-6
lines changed

3 files changed

+36
-6
lines changed

apps/desktop/dist/.keep

Whitespace-only changes.

server/bleep/src/llm/call.rs

Lines changed: 35 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -80,10 +80,16 @@ struct OpenAiRequest {
8080
pub async fn llm_call(
8181
req: api::LLMRequest,
8282
) -> anyhow::Result<impl Stream<Item = Result<Delta, api::Error>>> {
83+
// print req.messages.messages
84+
for message in &req.messages.messages {
85+
println!("OpenAiMessage: {:?}", message);
86+
}
87+
8388
let model = match req.model.as_deref() {
8489
Some(model) => model.to_owned(),
85-
None => "gpt-4-turbo".into(),
90+
None => "gpt-4o".into(),
8691
};
92+
//filter out Null values from messages
8793

8894
let builder = {
8995
let request = OpenAiRequest {
@@ -104,7 +110,7 @@ pub async fn llm_call(
104110

105111
reqwest::Client::new()
106112
.post("https://api.openai.com/v1/chat/completions")
107-
.bearer_auth(req.openai_key)
113+
.bearer_auth(req.openai_key.clone())
108114
.json(&request)
109115
};
110116

@@ -116,6 +122,17 @@ pub async fn llm_call(
116122
Some(Ok(reqwest_eventsource::Event::Open)) => {}
117123
Some(Err(reqwest_eventsource::Error::InvalidStatusCode(status, _))) => {
118124
error!("{}", &status);
125+
if status == 429 {
126+
response.close();
127+
println!("Rate limit exceeded, try again after 5s");
128+
tokio::time::sleep(Duration::from_secs(5)).await;
129+
let openai_key = req.openai_key.clone();
130+
return Box::pin(llm_call(api::LLMRequest {
131+
openai_key,
132+
..req.clone()
133+
})).await;
134+
}
135+
119136
return Err(api::Error::BadOpenAiRequest.into());
120137
}
121138
Some(Err(e)) => {
@@ -163,14 +180,27 @@ pub async fn llm_call(
163180
Some(ChatChoice { ref mut delta, .. }) => {
164181
// The first message contains a redundant `role` field. We remove it.
165182
delta.remove("role");
166-
if delta.is_empty() {
167-
return Ok(None);
168-
}
169183

170184
if delta.len() == 2 {
171185
delta.remove("content");
172186
}
173187

188+
if delta.contains_key("content") {
189+
if delta.get_key_value("content").unwrap().1.is_null() {
190+
delta.remove("content");
191+
}
192+
}
193+
194+
if delta.contains_key("refusal") {
195+
if delta.get_key_value("refusal").unwrap().1.is_null() {
196+
delta.remove("refusal");
197+
}
198+
}
199+
200+
if delta.is_empty() {
201+
return Ok(None);
202+
}
203+
174204
let delta = serde_json::from_value(delta.clone().into()).map_err(|e| {
175205
error!(?delta, "{}", e);
176206
api::Error::BadOpenAiRequest

server/bleep/src/llm/client.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,7 @@ pub mod api {
7373
pub functions: Vec<Function>,
7474
}
7575

76-
#[derive(Debug, serde::Serialize, serde::Deserialize)]
76+
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
7777
pub struct LLMRequest {
7878
pub openai_key: String,
7979
pub messages: Messages,

0 commit comments

Comments
 (0)