Skip to content

Commit 80c8f42

Browse files
amit-webkulgithub-actions[bot]
authored andcommitted
chore: applied pint changes
1 parent 258dff8 commit 80c8f42

File tree

1 file changed

+16
-16
lines changed
  • packages/Webkul/Admin/src/Helpers

1 file changed

+16
-16
lines changed

packages/Webkul/Admin/src/Helpers/Lead.php

+16-16
Original file line numberDiff line numberDiff line change
@@ -70,11 +70,11 @@ private static function sendGeminiRequest($prompt, $model)
7070
$url = "https://generativelanguage.googleapis.com/v1beta/models/{$model}:generateContent?key={$apiKey}";
7171

7272
$data = [
73-
"contents" => [
73+
'contents' => [
7474
[
75-
"parts" => [
75+
'parts' => [
7676
[
77-
"text" => 'You are an AI assistant. You have to extract the data from the PDF file.
77+
'text' => 'You are an AI assistant. You have to extract the data from the PDF file.
7878
Example Output:
7979
{
8080
"status": 1,
@@ -94,24 +94,24 @@ private static function sendGeminiRequest($prompt, $model)
9494
"lead_value": 0,
9595
"source": "AI Extracted"
9696
}
97-
Note: Only return the output, Do not return or add any comments.'
98-
]
97+
Note: Only return the output, Do not return or add any comments.',
98+
],
9999
],
100-
"role" => "system"
100+
'role' => 'system',
101101
],
102102
[
103-
"parts" => [
104-
["text" => "PDF:\n$prompt"]
103+
'parts' => [
104+
['text' => "PDF:\n$prompt"],
105105
],
106-
"role" => "user"
107-
]
106+
'role' => 'user',
107+
],
108+
],
109+
'generationConfig' => [
110+
'temperature' => 0.2,
111+
'topK' => 30,
112+
'topP' => 0.8,
113+
'maxOutputTokens' => 512,
108114
],
109-
"generationConfig" => [
110-
"temperature" => 0.2,
111-
"topK" => 30,
112-
"topP" => 0.8,
113-
"maxOutputTokens" => 512
114-
]
115115
];
116116

117117
return self::makeCurlRequest($url, $model, $data);

0 commit comments

Comments
 (0)