Skip to content

Commit b9e16c5

Browse files
authored
Merge pull request #265 from acon96/release/v0.3.8
Release v0.3.8
2 parents feb791c + fe39e48 commit b9e16c5

20 files changed

+678
-444
lines changed

.github/workflows/create-release.yml

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -20,33 +20,33 @@ jobs:
2020
matrix:
2121
include:
2222
# ARM variants
23-
- home_assistant_version: "2024.12.3"
23+
- home_assistant_version: "2025.4.1"
2424
arch: "aarch64"
25-
- home_assistant_version: "2024.12.3"
25+
- home_assistant_version: "2025.4.1"
2626
arch: "armhf"
2727

2828
# Base x86
29-
- home_assistant_version: "2024.12.3"
29+
- home_assistant_version: "2025.4.1"
3030
suffix: "-noavx"
3131
arch: "amd64"
3232
extra_defines: "-DGGML_AVX=OFF -DGGML_AVX2=OFF -DGGML_FMA=OFF -DGGML_F16C=OFF"
33-
- home_assistant_version: "2024.12.3"
33+
- home_assistant_version: "2025.4.1"
3434
arch: "i386"
3535
suffix: "-noavx"
3636
extra_defines: "-DGGML_AVX=OFF -DGGML_AVX2=OFF -DGGML_FMA=OFF -DGGML_F16C=OFF"
3737

3838
# AVX2 and AVX512
39-
- home_assistant_version: "2024.12.3"
39+
- home_assistant_version: "2025.4.1"
4040
arch: "amd64"
4141
extra_defines: "-DGGML_AVX=ON -DGGML_AVX2=ON -DGGML_FMA=ON -DGGML_F16C=ON"
42-
- home_assistant_version: "2024.12.3"
42+
- home_assistant_version: "2025.4.1"
4343
arch: "amd64"
4444
suffix: "-avx512"
4545
extra_defines: "-DGGML_AVX512=ON -DGGML_FMA=ON -DGGML_F16C=ON"
46-
- home_assistant_version: "2024.12.3"
46+
- home_assistant_version: "2025.4.1"
4747
arch: "i386"
4848
extra_defines: "-DGGML_AVX=ON -DGGML_AVX2=ON -DGGML_FMA=ON -DGGML_F16C=ON"
49-
- home_assistant_version: "2024.12.3"
49+
- home_assistant_version: "2025.4.1"
5050
arch: "i386"
5151
suffix: "-avx512"
5252
extra_defines: "-DGGML_AVX512=ON -DGGML_FMA=ON -DGGML_F16C=ON"

README.md

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ This project provides the required "glue" components to control your Home Assist
55
Please see the [Setup Guide](./docs/Setup.md) for more information on installation.
66

77
## Local LLM Conversation Integration
8-
**The latest version of this integration requires Home Assistant 2024.12.3 or newer**
8+
**The latest version of this integration requires Home Assistant 2025.4.1 or newer**
99

1010
In order to integrate with Home Assistant, we provide a custom component that exposes the locally running LLM as a "conversation agent".
1111

@@ -150,6 +150,7 @@ In order to facilitate running the project entirely on the system where Home Ass
150150
## Version History
151151
| Version | Description |
152152
|---------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
153+
| v0.3.8 | Update llama.cpp, remove think blocks from "thinking" models, fix wheel detection for some Intel CPUs, Fixes for compatibility with latest Home Assistant version (2025.4), other small bug fixes |
153154
| v0.3.7 | Update llama.cpp version to support newer models, Update minimum Home Assistant version to 2024.12.3, Add German In-Context Learning examples, Fix multi-turn use, Fix an issue with webcolors |
154155
| v0.3.6 | Small llama.cpp backend fixes |
155156
| v0.3.5 | Fix for llama.cpp backend installation, Fix for Home LLM v1-3 API parameters, add Polish ICL examples |

custom_components/llama_conversation/__init__.py

Lines changed: 44 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -8,19 +8,31 @@
88
from homeassistant.config_entries import ConfigEntry
99
from homeassistant.const import ATTR_ENTITY_ID, Platform
1010
from homeassistant.core import HomeAssistant
11-
from homeassistant.exceptions import HomeAssistantError
1211
from homeassistant.helpers import config_validation as cv, llm
1312
from homeassistant.util.json import JsonObjectType
1413

1514
import voluptuous as vol
1615

17-
1816
from .const import (
1917
ALLOWED_SERVICE_CALL_ARGUMENTS,
2018
DOMAIN,
2119
HOME_LLM_API_ID,
2220
SERVICE_TOOL_NAME,
21+
SERVICE_TOOL_ALLOWED_SERVICES,
22+
SERVICE_TOOL_ALLOWED_DOMAINS,
23+
CONF_BACKEND_TYPE,
24+
DEFAULT_BACKEND_TYPE,
25+
BACKEND_TYPE_LLAMA_HF,
26+
BACKEND_TYPE_LLAMA_EXISTING,
27+
BACKEND_TYPE_TEXT_GEN_WEBUI,
28+
BACKEND_TYPE_GENERIC_OPENAI,
29+
BACKEND_TYPE_LLAMA_CPP_PYTHON_SERVER,
30+
BACKEND_TYPE_OLLAMA,
2331
)
32+
from .conversation import LlamaCppAgent, GenericOpenAIAPIAgent, TextGenerationWebuiAgent, \
33+
LlamaCppPythonAPIAgent, OllamaAPIAgent, LocalLLMAgent
34+
35+
type LocalLLMConfigEntry = ConfigEntry[LocalLLMAgent]
2436

2537
_LOGGER = logging.getLogger(__name__)
2638

@@ -29,26 +41,51 @@
2941
PLATFORMS = (Platform.CONVERSATION,)
3042

3143

32-
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
44+
async def async_setup_entry(hass: HomeAssistant, entry: LocalLLMConfigEntry) -> bool:
3345

3446
# make sure the API is registered
3547
if not any([x.id == HOME_LLM_API_ID for x in llm.async_get_apis(hass)]):
3648
llm.async_register_api(hass, HomeLLMAPI(hass))
3749

3850
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = entry
3951

52+
def create_agent(backend_type):
53+
agent_cls = None
54+
55+
if backend_type in [ BACKEND_TYPE_LLAMA_HF, BACKEND_TYPE_LLAMA_EXISTING ]:
56+
agent_cls = LlamaCppAgent
57+
elif backend_type == BACKEND_TYPE_GENERIC_OPENAI:
58+
agent_cls = GenericOpenAIAPIAgent
59+
elif backend_type == BACKEND_TYPE_TEXT_GEN_WEBUI:
60+
agent_cls = TextGenerationWebuiAgent
61+
elif backend_type == BACKEND_TYPE_LLAMA_CPP_PYTHON_SERVER:
62+
agent_cls = LlamaCppPythonAPIAgent
63+
elif backend_type == BACKEND_TYPE_OLLAMA:
64+
agent_cls = OllamaAPIAgent
65+
66+
return agent_cls(hass, entry)
67+
68+
# create the agent in an executor job because the constructor calls `open()`
69+
backend_type = entry.data.get(CONF_BACKEND_TYPE, DEFAULT_BACKEND_TYPE)
70+
entry.runtime_data = await hass.async_add_executor_job(create_agent, backend_type)
71+
72+
# call load model
73+
await entry.runtime_data._async_load_model(entry)
74+
75+
# forward setup to platform to register the entity
4076
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
77+
4178
return True
4279

4380

44-
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
81+
async def async_unload_entry(hass: HomeAssistant, entry: LocalLLMConfigEntry) -> bool:
4582
"""Unload Ollama."""
4683
if not await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
4784
return False
4885
hass.data[DOMAIN].pop(entry.entry_id)
4986
return True
5087

51-
async def async_migrate_entry(hass, config_entry: ConfigEntry):
88+
async def async_migrate_entry(hass: HomeAssistant, config_entry: LocalLLMConfigEntry):
5289
"""Migrate old entry."""
5390
_LOGGER.debug("Migrating from version %s", config_entry.version)
5491

@@ -82,13 +119,8 @@ class HassServiceTool(llm.Tool):
82119
vol.Optional('item'): str,
83120
})
84121

85-
ALLOWED_SERVICES: Final[list[str]] = [
86-
"turn_on", "turn_off", "toggle", "press", "increase_speed", "decrease_speed", "open_cover", "close_cover", "stop_cover",
87-
"lock", "unlock", "start", "stop", "return_to_base", "pause", "cancel", "add_item"
88-
]
89-
ALLOWED_DOMAINS: Final[list[str]] = [
90-
"light", "switch", "button", "fan", "cover", "lock", "media_player", "climate", "vacuum", "todo", "timer", "script",
91-
]
122+
ALLOWED_SERVICES: Final[list[str]] = SERVICE_TOOL_ALLOWED_SERVICES
123+
ALLOWED_DOMAINS: Final[list[str]] = SERVICE_TOOL_ALLOWED_DOMAINS
92124

93125
async def async_call(
94126
self, hass: HomeAssistant, tool_input: llm.ToolInput, llm_context: llm.LLMContext

custom_components/llama_conversation/config_flow.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -687,6 +687,7 @@ async def async_step_model_parameters(
687687
for key in OPTIONS_OVERRIDES.keys():
688688
if key in model_name:
689689
selected_default_options.update(OPTIONS_OVERRIDES[key])
690+
break
690691

691692
persona = PERSONA_PROMPTS.get(self.selected_language, PERSONA_PROMPTS.get("en"))
692693
current_date = CURRENT_DATE_PROMPT.get(self.selected_language, CURRENT_DATE_PROMPT.get("en"))
@@ -765,15 +766,15 @@ def async_get_options_flow(
765766
config_entry: config_entries.ConfigEntry,
766767
) -> config_entries.OptionsFlow:
767768
"""Create the options flow."""
768-
return OptionsFlow(config_entry)
769+
return OptionsFlow()
769770

770771

771772
class OptionsFlow(config_entries.OptionsFlow):
772773
"""Local LLM config flow options handler."""
773774

774-
def __init__(self, config_entry: config_entries.ConfigEntry) -> None:
775-
"""Initialize options flow."""
776-
self.config_entry = config_entry
775+
@property
776+
def config_entry(self):
777+
return self.hass.config_entries.async_get_entry(self.handler)
777778

778779
async def async_step_init(
779780
self, user_input: dict[str, Any] | None = None

custom_components/llama_conversation/const.py

Lines changed: 22 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,8 @@
44
DOMAIN = "llama_conversation"
55
HOME_LLM_API_ID = "home-llm-service-api"
66
SERVICE_TOOL_NAME = "HassCallService"
7+
SERVICE_TOOL_ALLOWED_SERVICES = ["turn_on", "turn_off", "toggle", "press", "increase_speed", "decrease_speed", "open_cover", "close_cover", "stop_cover", "lock", "unlock", "start", "stop", "return_to_base", "pause", "cancel", "add_item", "set_temperature", "set_humidity", "set_fan_mode", "set_hvac_mode", "set_preset_mode"]
8+
SERVICE_TOOL_ALLOWED_DOMAINS = ["light", "switch", "button", "fan", "cover", "lock", "media_player", "climate", "vacuum", "todo", "timer", "script"]
79
CONF_PROMPT = "prompt"
810
PERSONA_PROMPTS = {
911
"en": "You are 'Al', a helpful AI Assistant that controls the devices in a house. Complete the following task as instructed with the information provided only.",
@@ -13,7 +15,7 @@
1315
"pl": "Jeste\u015b 'Al', pomocnym asystentem AI, kt\u00f3ry kontroluje urz\u0105dzenia w domu. Wykonaj poni\u017csze zadanie zgodnie z instrukcj\u0105 lub odpowiedz na poni\u017csze pytanie, korzystaj\u0105c wy\u0142\u0105cznie z podanych informacji."
1416
}
1517
CURRENT_DATE_PROMPT = {
16-
"en": """The current time and date is {{ (as_timestamp(now()) | timestamp_custom("%I:%M %p on %A %B %d, %Y", "")) }}""",
18+
"en": """The current time and date is {{ (as_timestamp(now()) | timestamp_custom("%I:%M %p on %A %B %d, %Y", True, "")) }}""",
1719
"de": """{% set day_name = ["Montag", "Dienstag", "Mittwoch", "Donnerstag", "Freitag", "Samstag", "Sonntag"] %}{% set month_name = ["Januar", "Februar", "März", "April", "Mai", "Juni", "Juli", "August", "September", "Oktober", "November", "Dezember"] %}Die aktuelle Uhrzeit und das aktuelle Datum sind {{ (as_timestamp(now()) | timestamp_custom("%H:%M", local=True)) }} {{ day_name[now().weekday()] }}, {{ now().day }} {{ month_name[now().month -1]}} {{ now().year }}.""",
1820
"fr": """{% set day_name = ["lundi", "mardi", "mercredi", "jeudi", "vendredi", "samedi", "dimanche"] %}{% set month_name = ["janvier", "février", "mars", "avril", "mai", "juin", "juillet", "août", "septembre", "octobre", "novembre", "décembre"] %} L'heure et la date actuelles sont {{ (as_timestamp(now()) | timestamp_custom("%H:%M", local=True)) }} {{ day_name[now().weekday()] }}, {{ now().day }} {{ month_name[now().month -1]}} {{ now().year }}.""",
1921
"es": """{% set day_name = ["lunes", "martes", "miércoles", "jueves", "viernes", "sábado", "domingo"] %}{% set month_name = ["enero", "febrero", "marzo", "abril", "mayo", "junio", "julio", "agosto", "septiembre", "octubre", "noviembre", "diciembre"] %}La hora y fecha actuales son {{ (as_timestamp(now()) | timestamp_custom("%H:%M", local=True)) }} {{ day_name[now().weekday()] }}, {{ now().day }} de {{ month_name[now().month -1]}} de {{ now().year }}.""",
@@ -146,59 +148,69 @@
146148
"user": { "prefix": "<|im_start|>user\n", "suffix": "<|im_end|>" },
147149
"assistant": { "prefix": "<|im_start|>assistant\n", "suffix": "<|im_end|>" },
148150
"tool": { "prefix": "<|im_start|>tool", "suffix": "<|im_end|>" },
151+
"chain_of_thought": { "prefix": "<think>", "suffix": "</think>"},
149152
"generation_prompt": "<|im_start|>assistant"
150153
},
151154
PROMPT_TEMPLATE_COMMAND_R: {
152155
"system": { "prefix": "<|START_OF_TURN_TOKEN|><|SYSTEM_TOKEN|>", "suffix": "<|END_OF_TURN_TOKEN|>" },
153156
"user": { "prefix": "<|START_OF_TURN_TOKEN|><|USER_TOKEN|>", "suffix": "<|END_OF_TURN_TOKEN|>" },
154157
"assistant": { "prefix": "<|START_OF_TURN_TOKEN|><|CHATBOT_TOKEN|>", "suffix": "<|END_OF_TURN_TOKEN|>" },
158+
"chain_of_thought": { "prefix": "<think>", "suffix": "</think>"},
155159
"generation_prompt": "<|START_OF_TURN_TOKEN|><|CHATBOT_TOKEN|>"
156160
},
157161
PROMPT_TEMPLATE_ALPACA: {
158162
"system": { "prefix": "", "suffix": "\n" },
159163
"user": { "prefix": "### Instruction:\n", "suffix": "\n" },
160164
"assistant": { "prefix": "### Response:\n", "suffix": "\n" },
165+
"chain_of_thought": { "prefix": "<think>", "suffix": "</think>"},
161166
"generation_prompt": "### Response:"
162167
},
163168
PROMPT_TEMPLATE_VICUNA: {
164169
"system": { "prefix": "", "suffix": "\n" },
165170
"user": { "prefix": "USER: ", "suffix": "" },
166171
"assistant": { "prefix": "ASSISTANT: ", "suffix": "</s>" },
172+
"chain_of_thought": { "prefix": "<think>", "suffix": "</think>"},
167173
"generation_prompt": "ASSISTANT:"
168174
},
169175
PROMPT_TEMPLATE_NONE: {
170176
"system": { "prefix": "", "suffix": "" },
171177
"user": { "prefix": "", "suffix": "" },
172178
"assistant": { "prefix": "", "suffix": "" },
179+
"chain_of_thought": { "prefix": "", "suffix": ""},
173180
"generation_prompt": ""
174181
},
175182
PROMPT_TEMPLATE_MISTRAL: {
176183
"user": { "prefix": "<s>[INST] ", "suffix": " [/INST] " },
177184
"assistant": { "prefix": "", "suffix": "</s>" },
185+
"chain_of_thought": { "prefix": "<think>", "suffix": "</think>"},
178186
"generation_prompt": ""
179187
},
180188
PROMPT_TEMPLATE_ZEPHYR: {
181189
"system": { "prefix": "<|system|>\n", "suffix": "<|endoftext|>" },
182190
"user": { "prefix": "<|user|>\n", "suffix": "<|endoftext|>" },
183191
"assistant": { "prefix": "<|assistant|>\n", "suffix": "<|endoftext|>" },
192+
"chain_of_thought": { "prefix": "<think>", "suffix": "</think>"},
184193
"generation_prompt": "<|assistant|>\n"
185194
},
186195
PROMPT_TEMPLATE_ZEPHYR2: {
187196
"system": { "prefix": "<|system|>\n", "suffix": "</s>" },
188197
"user": { "prefix": "<|user|>\n", "suffix": "</s>" },
189198
"assistant": { "prefix": "<|assistant|>\n", "suffix": "</s>" },
199+
"chain_of_thought": { "prefix": "<think>", "suffix": "</think>"},
190200
"generation_prompt": "<|assistant|>\n"
191201
},
192202
PROMPT_TEMPLATE_ZEPHYR3: {
193203
"system": { "prefix": "<|system|>\n", "suffix": "<|end|>" },
194204
"user": { "prefix": "<|user|>\n", "suffix": "<|end|>" },
195205
"assistant": { "prefix": "<|assistant|>\n", "suffix": "<|end|>" },
206+
"chain_of_thought": { "prefix": "<think>", "suffix": "</think>"},
196207
"generation_prompt": "<|assistant|>\n"
197208
},
198209
PROMPT_TEMPLATE_LLAMA3: {
199210
"system": { "prefix": "<|start_header_id|>system<|end_header_id|>\n\n", "suffix": "<|eot_id|>"},
200211
"user": { "prefix": "<|start_header_id|>user<|end_header_id|>\n\n", "suffix": "<|eot_id|>"},
201212
"assistant": { "prefix": "<|start_header_id|>assistant<|end_header_id|>\n\n", "suffix": "<|eot_id|>"},
213+
"chain_of_thought": { "prefix": "<think>", "suffix": "</think>"},
202214
"generation_prompt": "<|start_header_id|>assistant<|end_header_id|>\n\n"
203215
}
204216
}
@@ -297,6 +309,14 @@
297309
)
298310

299311
OPTIONS_OVERRIDES = {
312+
"home-llama-3.2": {
313+
CONF_PROMPT: DEFAULT_PROMPT_BASE_LEGACY,
314+
CONF_PROMPT_TEMPLATE: PROMPT_TEMPLATE_LLAMA3,
315+
CONF_USE_IN_CONTEXT_LEARNING_EXAMPLES: False,
316+
CONF_SERVICE_CALL_REGEX: FINE_TUNED_SERVICE_CALL_REGEX,
317+
CONF_TOOL_FORMAT: TOOL_FORMAT_MINIMAL,
318+
CONF_CONTEXT_LENGTH: 131072,
319+
},
300320
"home-3b-v3": {
301321
CONF_PROMPT: DEFAULT_PROMPT_BASE_LEGACY,
302322
CONF_PROMPT_TEMPLATE: PROMPT_TEMPLATE_ZEPHYR,
@@ -383,5 +403,5 @@
383403
},
384404
}
385405

386-
INTEGRATION_VERSION = "0.3.7"
406+
INTEGRATION_VERSION = "0.3.8"
387407
EMBEDDED_LLAMA_CPP_PYTHON_VERSION = "0.3.5"

0 commit comments

Comments
 (0)