Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,8 @@ Let's assume we want to download utility bills:

**Recommended to use gpt-4o as the model for graph generation as it supports function calling. Integuru will automatically switch to o1-preview for code generation if available in the user's OpenAI account.** ⚠️ **Note: o1-preview does not support function calls.**

**Ollama support is now available! You can use the Ollama model by specifying `--model ollama` in the command.**

## Usage

After setting up the project, you can use Integuru to analyze and reverse-engineer API requests for external platforms. Simply provide the appropriate .har file and a prompt describing the action that you want to trigger.
Expand Down
2 changes: 1 addition & 1 deletion integuru/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@

@click.command()
@click.option(
"--model", default="gpt-4o", help="The LLM model to use (default is gpt-4o)"
"--model", default="gpt-4o", help="The LLM model to use (default is gpt-4o, supports ollama)"
)
@click.option("--prompt", required=True, help="The prompt for the model")
@click.option(
Expand Down
16 changes: 13 additions & 3 deletions integuru/util/LLM.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,22 @@
from langchain_openai import ChatOpenAI
from ollama import Ollama

class LLMSingleton:
_instance = None
_default_model = "gpt-4o"
_alternate_model = "o1-preview"
_ollama_model = "ollama"

@classmethod
def get_instance(cls, model: str = None):
if model is None:
model = cls._default_model

if cls._instance is None:
cls._instance = ChatOpenAI(model=model, temperature=1)
if cls._instance is None or cls._instance.model != model:
if model == cls._ollama_model:
cls._instance = Ollama(model=model)
else:
cls._instance = ChatOpenAI(model=model, temperature=1)
return cls._instance

@classmethod
Expand All @@ -34,5 +39,10 @@ def switch_to_alternate_model(cls):

return cls._instance

llm = LLMSingleton()
@classmethod
def get_ollama_instance(cls):
"""Returns an Ollama instance"""
cls._instance = Ollama(model=cls._ollama_model)
return cls._instance

llm = LLMSingleton()