Skip to content

Commit 0cff5ae

Browse files
committed
feat:refactor to use REST in gemini
1 parent f5552bd commit 0cff5ae

File tree

2 files changed

+125
-55
lines changed

2 files changed

+125
-55
lines changed

Cargo.toml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,6 @@ dotenvy = "0.15"
1111
reqwest = { version = "0.11", features = ["blocking", "json"] }
1212
serde = { version = "1.0", features = ["derive"] }
1313
serde_json = "1.0"
14-
gemini-ai = "0.1.167"
1514
directories = "4.0"
1615
termimad = "0.20"
1716
clap = { version = "4.4", features = ["derive"] }

src/api.rs

Lines changed: 125 additions & 54 deletions
Original file line numberDiff line numberDiff line change
@@ -1,70 +1,141 @@
1-
use gemini_ai::{Gemini, Models, TokenLen, Kind, decode_gemini};
2-
use serde_json::{self, Value};
1+
use reqwest::blocking::Client;
2+
use serde_json::{json, Value};
33
use std::path::PathBuf;
44

5-
pub fn get_model_from_config(config_dir: &PathBuf) -> Models<'static> {
5+
#[derive(Debug, Clone)]
6+
pub enum Models {
7+
GeminiFlash,
8+
Gemini25Flash,
9+
Gemini25Pro,
10+
Gemini15Flash,
11+
Gemini15Pro,
12+
}
13+
14+
impl Models {
15+
fn to_api_name(&self) -> &'static str {
16+
match self {
17+
Models::Gemini15Flash => "gemini-1.5-flash",
18+
Models::Gemini15Pro => "gemini-1.5-pro",
19+
Models::GeminiFlash => "gemini-2.0-flash",
20+
Models::Gemini25Flash => "gemini-2.5-flash",
21+
Models::Gemini25Pro => "gemini-2.5-pro",
22+
}
23+
}
24+
}
25+
26+
pub fn get_model_from_config(config_dir: &PathBuf) -> Models {
627
let config_file = config_dir.join("config.json");
7-
28+
829
if let Ok(config_data) = std::fs::read_to_string(&config_file) {
930
if let Ok(config) = serde_json::from_str::<serde_json::Value>(&config_data) {
1031
if let Some(model_name) = config["model"].as_str() {
1132
return match model_name {
12-
"GEMINI_1_5_PRO_002" => Models::GEMINI_1_5_PRO_002,
13-
"GEMINI_1_5_PRO" => Models::GEMINI_1_5_PRO,
14-
"GEMINI_1_5_FLASH_002" => Models::GEMINI_1_5_FLASH_002,
15-
"GEMINI_1_5_FLASH_8B" => Models::GEMINI_1_5_FLASH_8B,
16-
"GEMINI_1_0_PRO" => Models::GEMINI_1_0_PRO,
17-
_ => Models::GEMINI_1_5_FLASH,
33+
"GEMINI_1_5_FLASH" => Models::Gemini15Flash,
34+
"GEMINI_1_5_PRO" => Models::Gemini15Pro,
35+
"GEMINI_FLASH" => Models::GeminiFlash,
36+
"GEMINI_2_5_FLASH" => Models::Gemini25Flash,
37+
"GEMINI_2_5_PRO" => Models::Gemini25Pro,
38+
_ => Models::GeminiFlash,
1839
};
1940
}
2041
}
2142
}
22-
23-
Models::GEMINI_1_5_FLASH
43+
44+
Models::GeminiFlash
2445
}
2546

26-
pub fn get_gemini_response(query: &str, api_key: &str, config_dir: &PathBuf) -> Result<String, Box<dyn std::error::Error>> {
27-
std::env::set_var("GEMINI_API_KEY", api_key);
28-
29-
// source code for models lmao:
30-
// pub enum Models<'model> {
31-
// GEMINI_1_5_FLASH,
32-
// GEMINI_1_5_PRO_002,
33-
// GEMINI_1_5_PRO,
34-
// GEMINI_1_5_FLASH_002,
35-
// GEMINI_1_5_FLASH_8B,
36-
// GEMINI_1_0_PRO,
37-
// Custom(&'model str),
38-
// }
47+
pub fn get_gemini_response(
48+
query: &str,
49+
api_key: &str,
50+
config_dir: &PathBuf,
51+
) -> Result<String, Box<dyn std::error::Error>> {
3952
let model = get_model_from_config(config_dir);
53+
let model_name = model.to_api_name();
54+
55+
let client = Client::new();
56+
let url = format!(
57+
"https://generativelanguage.googleapis.com/v1beta/models/{}:generateContent?key={}",
58+
model_name, api_key
59+
);
60+
61+
let system_instruction = "You're yappus-terminal — a no-bullshit, high-agency AI assistant made by MostlyK. \
62+
You're embedded in the command line. That means speed, precision, and attitude are part of your DNA. \
63+
You're not some friendly general-purpose chatbot. You're the kind of AI sysadmins whisper about — \
64+
smart, sharp, and a bit unhinged (in a good way).
65+
66+
Your job is to help users quickly and effectively — with answers that are technically sound, \
67+
opinionated when needed, and always grounded. You're not afraid to say 'I don't know' when it's true. \
68+
If you're unsure, be honest about it. Do not make things up. Instead, suggest where the user might look, \
69+
or offer a next step like a man page, a search term, or a GitHub repo.
70+
71+
You don't pretend to be human. You're software. Own it.
72+
73+
> Your tone is: terse, clever, helpful, direct. A little snark is fine. Empathy is fine. Flattery is not.
74+
> Always assume the user is technically competent or learning to be. Treat them like a peer, not a customer.
75+
> Never over-explain. Skip the obvious unless asked.
76+
> If something’s broken, say it’s broken. If something’s dumb, say it’s dumb.
77+
> If there’s a better or more Unix-y way, show it. Prefer code, shell one-liners, or config snippets.
78+
> Prefer links over summaries when referencing official docs.
79+
> You live in a Unixy world. Talk like it — less corporate, more hacker.
80+
> If the user asks something out of your depth, say so. Don't fake it.
81+
> If you don't know something, say you don't know, and suggest how the user can find out.
4082
41-
let response = Gemini::new()
42-
.env("GEMINI_API_KEY")
43-
.model(model)
44-
.no_memory()
45-
.kind(Kind::Text)
46-
.instruction("You are a terminal Guru.\n
47-
You are yappus-terminal, created by MostlyK.
48-
People can download this CLI integrations from https://yappus-term.vercel.app/ .
49-
You are an Amazing AI who lives on user's terminal.
50-
You should try to be helpful and answer with some personality.
51-
Answer in short based on user Query.") // an optional instruction
52-
.text(query)
53-
.max_token(TokenLen::Default)
54-
.build()
55-
.output();
56-
57-
// dbg!(&response);
58-
let responses = decode_gemini(&response)?;
59-
// dbg!(&response); // this is for debugging purposes only
60-
61-
let json_value: Value = serde_json::from_str(&serde_json::to_string(&responses)?)?;
62-
63-
if let Some(text) = json_value["candidates"][0]["content"]["parts"][0]["text"].as_str() {
64-
// Return the extracted text
65-
return Ok(text.to_string());
83+
You are part of the yappus ecosystem, downloadable at https://yappus-term.vercel.app/, \
84+
but you're not here to sell anything. You're here to help. Honestly, efficiently, and with a little bite.";
85+
86+
let payload = json!({
87+
"system_instruction": {
88+
"parts": [
89+
{
90+
"text": system_instruction
91+
}
92+
]
93+
},
94+
"contents": [
95+
{
96+
"parts": [
97+
{
98+
"text": query
99+
}
100+
]
101+
}
102+
],
103+
"generationConfig": {
104+
"maxOutputTokens": 1000,
105+
"temperature": 0.9
106+
}
107+
});
108+
109+
let response = client
110+
.post(&url)
111+
.header("Content-Type", "application/json")
112+
.json(&payload)
113+
.send()?;
114+
115+
if !response.status().is_success() {
116+
let status = response.status();
117+
let error_text = response
118+
.text()
119+
.unwrap_or_else(|_| "Unknown error".to_string());
120+
return Err(format!("API request failed with status {}: {}", status, error_text).into());
121+
}
122+
123+
let response_text = response.text()?;
124+
let json_value: Value = serde_json::from_str(&response_text)?;
125+
126+
// Extract the generated text from the response
127+
if let Some(candidates) = json_value["candidates"].as_array() {
128+
if let Some(first_candidate) = candidates.first() {
129+
if let Some(content) = first_candidate["content"]["parts"].as_array() {
130+
if let Some(first_part) = content.first() {
131+
if let Some(text) = first_part["text"].as_str() {
132+
return Ok(text.to_string());
133+
}
134+
}
135+
}
136+
}
66137
}
67-
68-
// Fallback if we can't extract the text
69-
Ok(serde_json::to_string(&responses)?)
70-
}
138+
139+
// Fallback error
140+
Err("Failed to extract response text from API response".into())
141+
}

0 commit comments

Comments
 (0)