Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 28 additions & 5 deletions src/arduino/app_bricks/cloud_llm/cloud_llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,8 +54,9 @@ def __init__(
'API_KEY' environment variable.
model (Union[str, CloudModel]): The model identifier. Accepts a `CloudModel`
enum member (e.g., `CloudModel.OPENAI_GPT`) or its corresponding raw string
value (e.g., `'openai:gpt-4o-mini'`). Defaults to `CloudModel.ANTHROPIC_CLAUDE`.
value (e.g., `'openai:gpt-5-mini'`). Defaults to `CloudModel.ANTHROPIC_CLAUDE`.
To identify the model provider, you need to use prefixes like 'openai:', 'anthropic:', or 'google:'.
If no prefix is provided, the model will be defaulted to an OpenAI compatible model.
system_prompt (str): A system-level instruction that defines the AI's persona
and constraints (e.g., "You are a helpful assistant"). Defaults to empty.
temperature (Optional[float]): The sampling temperature between 0.0 and 1.0.
Expand All @@ -72,7 +73,11 @@ def __init__(
Raises:
ValueError: If `api_key` is not provided (empty string).
"""
if api_key == "":
if api_key == "" and (
model.startswith(f"{CloudModelProvider.OPENAI}:")
or model.startswith(f"{CloudModelProvider.ANTHROPIC}:")
or model.startswith(f"{CloudModelProvider.GOOGLE}:")
):
raise ValueError("API key is required to initialize CloudLLM brick.")

self._api_key = api_key
Expand All @@ -83,6 +88,8 @@ def __init__(
self._max_tool_loops = max_tool_loops
self._timeout = timeout
self._callbacks = callbacks
self._model_loaded = False
self._model_name = model

# Registered tools
self._tools_map = {}
Expand Down Expand Up @@ -141,6 +148,11 @@ def _get_message_with_history(self, user_input: str, images: List[str | bytes] =
List[BaseMessage]: The list of messages in the conversation history,
including system prompt if set.
"""

if self._model_loaded is False:
logger.info(f"Initializing model {self._model_name}...")
self._model_loaded = True

messages = self._history.get_messages()
message = None
if images is not None and len(images) > 0:
Expand Down Expand Up @@ -410,20 +422,31 @@ def model_factory(model_name: CloudModel, **kwargs) -> BaseChatModel:
"""Factory function to instantiate the specific LangChain chat model.

This function maps the supported `CloudModel` enum values to their respective
LangChain implementations.
LangChain implementations. In case of prefix-based model identifiers (e.g., 'openai:gpt-5-mini'),
it extracts the provider and model name accordingly.

Args:
model_name (CloudModel): The enum or string identifier for the model.
Model name can include provider prefixes like 'openai:', 'anthropic:', or 'google:'
to specify the provider.
to specify the provider. If no prefix is provided, the model will be defaulted to an OpenAI compatible model.
**kwargs: Additional arguments passed to the model constructor (e.g., api_key, temperature).

Returns:
BaseChatModel: An instance of a LangChain chat model wrapper.

Raises:
ValueError: If `model_name` does not match one of the supported `CloudModel` options.
ValueError: If `model_name` does not match one of the supported options.
"""

if (
"base_url" in kwargs
and not model_name.startswith(f"{CloudModelProvider.OPENAI}:")
and not model_name.startswith(f"{CloudModelProvider.ANTHROPIC}:")
and not model_name.startswith(f"{CloudModelProvider.GOOGLE}:")
):
logger.debug(f"Model name '{model_name}' does not specify a provider prefix, but 'base_url' is provided. Defaulting to OpenAI provider.")
model_name = f"{CloudModelProvider.OPENAI}:{model_name}"

if model_name == CloudModel.ANTHROPIC_CLAUDE or model_name.startswith(f"{CloudModelProvider.ANTHROPIC}:"):
from langchain_anthropic import ChatAnthropic

Expand Down
26 changes: 26 additions & 0 deletions src/arduino/app_bricks/cloud_llm/examples/7_external_provider.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
# SPDX-FileCopyrightText: Copyright (C) ARDUINO SRL (http://www.arduino.cc)
#
# SPDX-License-Identifier: MPL-2.0

# EXAMPLE_NAME = "Chat with an Ollama model"

from arduino.app_bricks.cloud_llm import CloudLLM
from arduino.app_utils import App
import time

llm = CloudLLM(
model="qwen3.5:0.8b", # Replace with the actual model name you want to use. Model must be available in your Ollama instance.
base_url="http://localhost:11434/v1", # Default Ollama address
system_prompt="You are a helpful assistant that provides concise answers to questions about historical figures.",
)


def ask_prompt():
print("\n----- Sending prompt to the model -----")
for chunk in llm.chat_stream(message="Who was Giuseppe Verdi?"):
print(chunk, end="", flush=True)
print("\n----- Response complete -----")
time.sleep(60)


App.run(ask_prompt)
2 changes: 1 addition & 1 deletion src/arduino/app_bricks/cloud_llm/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,4 @@

from arduino.app_utils import Logger

logger = Logger("CloudLLM")
logger = Logger("LLM")
Loading