Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,13 @@ To start using the plugin, enable it in your settings menu and insert an API key
- gpt-4-turbo
- gpt-4o
- gpt-4o-mini
- Azure OpenAI
- gpt-3.5-turbo
- gpt-4
- gpt-4-32k
- gpt-4-turbo
- gpt-4o
- gpt-4o-mini
- Any Openrouter provided models.

## Other Notes
Expand Down
31 changes: 28 additions & 3 deletions src/components/FetchModelEditor.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { requestUrl } from 'obsidian';
import { BMOSettings } from 'src/main';
import {requestUrl} from 'obsidian';
import {BMOSettings} from 'src/main';

// Request response from Ollama REST API URL (editor)
export async function fetchOllamaResponseEditor(settings: BMOSettings, prompt: string, model?: string, temperature?: string, maxTokens?: string, signal?: AbortSignal) {
Expand Down Expand Up @@ -220,6 +220,31 @@ export async function fetchOpenAIBaseAPIResponseEditor(settings: BMOSettings, pr
return message;
}

// Fetch Azure OpenAI-Based API Editor
export async function fetchAzureOpenAIBaseAPIResponseEditor(settings: BMOSettings, prompt: string, temperature?: string, maxTokens?: string, signal?: AbortSignal) {
const {azureOpenAIBaseUrl, APIKey} = settings.APIConnections.azureOpenAI
const response = await fetch(`${azureOpenAIBaseUrl}/openai/deployments/${settings.general.model}/chat/completions?api-version=2024-02-15-preview`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'api-key': APIKey,
},
body: JSON.stringify({
max_tokens: parseInt(maxTokens || settings.general.max_tokens),
temperature: parseFloat(temperature || settings.general.temperature),
top_p: 0.95,
messages: [
{ role: 'system', content: [{type: "text", content: settings.editor.systen_role}] },
{ role: 'user', content: [{type: "text", content: prompt}] }
],
}),
signal: signal,
});

const data = await response.json();
return data.choices[0].message.content || '';
}

// Request response from openai-based rest api url (editor)
export async function fetchOpenRouterEditor(settings: BMOSettings, prompt: string, model?: string, temperature?: string, maxTokens?: string, signal?: AbortSignal) {
try {
Expand Down Expand Up @@ -249,4 +274,4 @@ export async function fetchOpenRouterEditor(settings: BMOSettings, prompt: strin
console.error('Error making API request:', error);
throw error;
}
}
}
17 changes: 16 additions & 1 deletion src/components/FetchModelList.ts
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,21 @@ export async function fetchOpenAIBaseModels(plugin: BMOGPT) {

}

export async function fetchAzureOpenAIBaseModels(plugin: BMOGPT): Promise<string[]> {
const {azureOpenAIBaseUrl, APIKey} = plugin.settings.APIConnections.azureOpenAI
const response = await requestUrl({
url: `${azureOpenAIBaseUrl}/openai/deployments?api-version=2022-12-01`,
method: "GET",
headers: {
"Content-Type": "application/json",
"api-key": APIKey
}
})

return response.json.data.map((e: any) => e.id)
}


export async function fetchOpenRouterModels(plugin: BMOGPT) {
try {
const response = await requestUrl({
Expand Down Expand Up @@ -164,4 +179,4 @@ export async function fetchOpenRouterModels(plugin: BMOGPT) {
throw error;
}

}
}
Loading