diff --git a/ai_oca_native_llm/README.rst b/ai_oca_native_llm/README.rst new file mode 100644 index 0000000..14627c5 --- /dev/null +++ b/ai_oca_native_llm/README.rst @@ -0,0 +1,77 @@ +.. image:: https://odoo-community.org/readme-banner-image + :target: https://odoo-community.org/get-involved?utm_source=readme + :alt: Odoo Community Association + +================================== +Native AI LLM Integration (Ollama) +================================== + +.. + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !! This file is generated by oca-gen-addon-readme !! + !! changes will be overwritten. !! + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !! source digest: sha256:c6d57c5b0f7a15f580843211db4c2ea0935e0b89601a4ba5af44a7dba8c489eb + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +.. |badge1| image:: https://img.shields.io/badge/maturity-Beta-yellow.png + :target: https://odoo-community.org/page/development-status + :alt: Beta +.. |badge2| image:: https://img.shields.io/badge/license-LGPL--3-blue.png + :target: http://www.gnu.org/licenses/lgpl-3.0-standalone.html + :alt: License: LGPL-3 +.. |badge3| image:: https://img.shields.io/badge/github-OCA%2Fai-lightgray.png?logo=github + :target: https://github.com/OCA/ai/tree/19.0/ai_oca_native_llm + :alt: OCA/ai +.. |badge4| image:: https://img.shields.io/badge/weblate-Translate%20me-F47D42.png + :target: https://translation.odoo-community.org/projects/ai-19-0/ai-19-0-ai_oca_native_llm + :alt: Translate me on Weblate +.. |badge5| image:: https://img.shields.io/badge/runboat-Try%20me-875A7B.png + :target: https://runboat.odoo-community.org/builds?repo=OCA/ai&target_branch=19.0 + :alt: Try me on Runboat + +|badge1| |badge2| |badge3| |badge4| |badge5| + +Provides a basic Python client wrapper to communicate with a +local/remote Ollama instance. + +**Table of contents** + +.. contents:: + :local: + +Known issues / Roadmap +====================== + +- Transform this module as base module that provide abstraction to chat + with any llm provider + +Bug Tracker +=========== + +Bugs are tracked on `GitHub Issues `_. +In case of trouble, please check there if your issue has already been reported. +If you spotted it first, help us to smash it by providing a detailed and welcomed +`feedback `_. + +Do not contact contributors directly about support or help with technical issues. + +Credits +======= + +Maintainers +----------- + +This module is maintained by the OCA. + +.. image:: https://odoo-community.org/logo.png + :alt: Odoo Community Association + :target: https://odoo-community.org + +OCA, or the Odoo Community Association, is a nonprofit organization whose +mission is to support the collaborative development of Odoo features and +promote its widespread use. + +This module is part of the `OCA/ai `_ project on GitHub. + +You are welcome to contribute. To learn how please visit https://odoo-community.org/page/Contribute. diff --git a/ai_oca_native_llm/__init__.py b/ai_oca_native_llm/__init__.py new file mode 100644 index 0000000..0650744 --- /dev/null +++ b/ai_oca_native_llm/__init__.py @@ -0,0 +1 @@ +from . import models diff --git a/ai_oca_native_llm/__manifest__.py b/ai_oca_native_llm/__manifest__.py new file mode 100644 index 0000000..6c35bd2 --- /dev/null +++ b/ai_oca_native_llm/__manifest__.py @@ -0,0 +1,19 @@ +# Copyright 2025 Pierre Verkest +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html) +{ + "name": "Native AI LLM Integration (Ollama)", + "version": "19.0.1.0.0", + "category": "AI", + "summary": "Core LLM wrapper for Ollama", + "author": "Odoo Community Association (OCA)", + "website": "https://github.com/OCA/ai", + "license": "LGPL-3", + "depends": ["base"], + "external_dependencies": { + "python": ["ollama"], + }, + "data": [ + "views/res_config_settings_views.xml", + ], + "installable": True, +} diff --git a/ai_oca_native_llm/models/__init__.py b/ai_oca_native_llm/models/__init__.py new file mode 100644 index 0000000..4fc5a44 --- /dev/null +++ b/ai_oca_native_llm/models/__init__.py @@ -0,0 +1,2 @@ +from . import res_config_settings +from . import ai_llm_client diff --git a/ai_oca_native_llm/models/ai_llm_client.py b/ai_oca_native_llm/models/ai_llm_client.py new file mode 100644 index 0000000..044a829 --- /dev/null +++ b/ai_oca_native_llm/models/ai_llm_client.py @@ -0,0 +1,49 @@ +# Copyright 2025 Pierre Verkest +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html) +import logging + +from ollama import Client + +from odoo import api, models + +_logger = logging.getLogger(__name__) + + +class AiLlmClient(models.AbstractModel): + """ + Abstract model to provide a simple Python client for Ollama. + It resolves configuration dynamically and performs the HTTP calls. + """ + + _name = "ai.llm.client" + _description = "AI LLM Client Wrapper" + + @api.model + def _get_client(self): + url = ( + self.env["ir.config_parameter"] + .sudo() + .get_param("ai_llm.ollama_url", "http://localhost:11434") + ) + return Client(host=url) + + @api.model + def chat(self, messages, model=None, options=None): + """ + Sends a chat request to Ollama. + :param messages: list of dicts [{'role': 'user', 'content': 'hello'}, ...] + :param options: dict of optional parameters (e.g. temperature) + :return: dict response from Ollama + """ + client = self._get_client() + if not model: + model = ( + self.env["ir.config_parameter"] + .sudo() + .get_param("ai_llm.ollama_model", "llama3") + ) + + response = client.chat( + model=model, messages=messages, options=options, stream=False + ) + return response.message.content diff --git a/ai_oca_native_llm/models/res_config_settings.py b/ai_oca_native_llm/models/res_config_settings.py new file mode 100644 index 0000000..aa7d916 --- /dev/null +++ b/ai_oca_native_llm/models/res_config_settings.py @@ -0,0 +1,20 @@ +# Copyright 2025 Pierre Verkest +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html) +from odoo import fields, models + + +class ResConfigSettings(models.TransientModel): + _inherit = "res.config.settings" + + ai_llm_ollama_url = fields.Char( + string="Ollama URL", + config_parameter="ai_llm.ollama_url", + default="http://localhost:11434", + help="The URL of the Ollama server.", + ) + ai_llm_ollama_model = fields.Char( + string="Ollama Model", + config_parameter="ai_llm.ollama_model", + default="llama3", + help="The model to use for the AI features (e.g., llama3, mistral).", + ) diff --git a/ai_oca_native_llm/pyproject.toml b/ai_oca_native_llm/pyproject.toml new file mode 100644 index 0000000..4231d0c --- /dev/null +++ b/ai_oca_native_llm/pyproject.toml @@ -0,0 +1,3 @@ +[build-system] +requires = ["whool"] +build-backend = "whool.buildapi" diff --git a/ai_oca_native_llm/readme/DESCRIPTION.md b/ai_oca_native_llm/readme/DESCRIPTION.md new file mode 100644 index 0000000..e977fb7 --- /dev/null +++ b/ai_oca_native_llm/readme/DESCRIPTION.md @@ -0,0 +1,2 @@ +Provides a basic Python client wrapper to communicate with +a local/remote Ollama instance. diff --git a/ai_oca_native_llm/readme/ROADMAP.md b/ai_oca_native_llm/readme/ROADMAP.md new file mode 100644 index 0000000..c3eec63 --- /dev/null +++ b/ai_oca_native_llm/readme/ROADMAP.md @@ -0,0 +1,2 @@ +* Transform this module as base module that provide + abstraction to chat with any llm provider diff --git a/ai_oca_native_llm/static/description/index.html b/ai_oca_native_llm/static/description/index.html new file mode 100644 index 0000000..5ae2fcd --- /dev/null +++ b/ai_oca_native_llm/static/description/index.html @@ -0,0 +1,424 @@ + + + + + +README.rst + + + +
+ + + +Odoo Community Association + +
+

Native AI LLM Integration (Ollama)

+ +

Beta License: LGPL-3 OCA/ai Translate me on Weblate Try me on Runboat

+

Provides a basic Python client wrapper to communicate with a +local/remote Ollama instance.

+

Table of contents

+ +
+

Known issues / Roadmap

+
    +
  • Transform this module as base module that provide abstraction to chat +with any llm provider
  • +
+
+
+

Bug Tracker

+

Bugs are tracked on GitHub Issues. +In case of trouble, please check there if your issue has already been reported. +If you spotted it first, help us to smash it by providing a detailed and welcomed +feedback.

+

Do not contact contributors directly about support or help with technical issues.

+
+
+

Credits

+
+

Maintainers

+

This module is maintained by the OCA.

+ +Odoo Community Association + +

OCA, or the Odoo Community Association, is a nonprofit organization whose +mission is to support the collaborative development of Odoo features and +promote its widespread use.

+

This module is part of the OCA/ai project on GitHub.

+

You are welcome to contribute. To learn how please visit https://odoo-community.org/page/Contribute.

+
+
+
+
+ + diff --git a/ai_oca_native_llm/tests/__init__.py b/ai_oca_native_llm/tests/__init__.py new file mode 100644 index 0000000..197d444 --- /dev/null +++ b/ai_oca_native_llm/tests/__init__.py @@ -0,0 +1 @@ +from . import test_ai_llm_client diff --git a/ai_oca_native_llm/tests/common.py b/ai_oca_native_llm/tests/common.py new file mode 100644 index 0000000..5b8be0a --- /dev/null +++ b/ai_oca_native_llm/tests/common.py @@ -0,0 +1,8 @@ +class OllamaMockMessage: + def __init__(self, content): + self.content = content + + +class OllamaMockResponse: + def __init__(self, message_content): + self.message = OllamaMockMessage(message_content) diff --git a/ai_oca_native_llm/tests/fake_models.py b/ai_oca_native_llm/tests/fake_models.py new file mode 100644 index 0000000..0e7d4c4 --- /dev/null +++ b/ai_oca_native_llm/tests/fake_models.py @@ -0,0 +1,7 @@ +from odoo import models + + +class FakeModel(models.AbstractModel): + _name = "fake.model" + _inherit = "ai.llm.client" + _description = "Fake Model for AI LLM Client testing" diff --git a/ai_oca_native_llm/tests/test_ai_llm_client.py b/ai_oca_native_llm/tests/test_ai_llm_client.py new file mode 100644 index 0000000..4b27cbe --- /dev/null +++ b/ai_oca_native_llm/tests/test_ai_llm_client.py @@ -0,0 +1,67 @@ +from unittest.mock import patch + +from odoo.orm.model_classes import add_to_registry +from odoo.tests.common import TransactionCase + +from odoo.addons.ai_oca_native_llm.tests.common import OllamaMockResponse + + +class TestAiLlmClient(TransactionCase): + @classmethod + def setUpClass(cls): + super().setUpClass() + from .fake_models import FakeModel + + add_to_registry(cls.registry, FakeModel) + cls.addClassCleanup(cls.registry.__delitem__, "fake.model") + cls.registry._setup_models__(cls.env.cr, ["fake.model"]) + cls.registry.init_models(cls.env.cr, ["fake.model"], {"models_to_check": True}) + cls.env["ir.config_parameter"].sudo().set_param( + "ai_llm.ollama_url", "http://test-url:11434" + ) + cls.env["ir.config_parameter"].sudo().set_param( + "ai_llm.ollama_model", "test-model" + ) + + def setUp(self): + super().setUp() + + @patch("odoo.addons.ai_oca_native_llm.models.ai_llm_client.Client") + def test_chat_success(self, mock_client_class): + # Mock the ollama.Client and its chat method + mock_instance = mock_client_class.return_value + mock_instance.chat.return_value = OllamaMockResponse("Hello from mock") + + messages = [{"role": "user", "content": "Hi"}] + response = self.env["fake.model"].chat(messages, options={"temperature": 0.7}) + + # Assertions + mock_client_class.assert_called_once_with(host="http://test-url:11434") + mock_instance.chat.assert_called_once_with( + model="test-model", + messages=messages, + options={"temperature": 0.7}, + stream=False, + ) + self.assertEqual(response, "Hello from mock") + + @patch("odoo.addons.ai_oca_native_llm.models.ai_llm_client.Client") + def test_chat_success_whit_specific_model(self, mock_client_class): + # Mock the ollama.Client and its chat method + mock_instance = mock_client_class.return_value + mock_instance.chat.return_value = OllamaMockResponse("Hello from mock") + + messages = [{"role": "user", "content": "Hi"}] + response = self.env["fake.model"].chat( + messages, model="other-model", options={"temperature": 0.7} + ) + + # Assertions + mock_client_class.assert_called_once_with(host="http://test-url:11434") + mock_instance.chat.assert_called_once_with( + model="other-model", + messages=messages, + options={"temperature": 0.7}, + stream=False, + ) + self.assertEqual(response, "Hello from mock") diff --git a/ai_oca_native_llm/views/res_config_settings_views.xml b/ai_oca_native_llm/views/res_config_settings_views.xml new file mode 100644 index 0000000..4c823a6 --- /dev/null +++ b/ai_oca_native_llm/views/res_config_settings_views.xml @@ -0,0 +1,66 @@ + + + + + res.config.settings.view.form.inherit.ai.llm + res.config.settings + + + + + + + + + + + + + + + + + + + + Settings + res.config.settings + form + {'module' : 'ai_oca_native_llm', 'bin_size': False} + + + + + + diff --git a/ai_oca_native_thread/README.rst b/ai_oca_native_thread/README.rst new file mode 100644 index 0000000..8578191 --- /dev/null +++ b/ai_oca_native_thread/README.rst @@ -0,0 +1,73 @@ +.. image:: https://odoo-community.org/readme-banner-image + :target: https://odoo-community.org/get-involved?utm_source=readme + :alt: Odoo Community Association + +============================= +Native AI Thread (Chatter UI) +============================= + +.. + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !! This file is generated by oca-gen-addon-readme !! + !! changes will be overwritten. !! + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !! source digest: sha256:3b5e674b24d188b9d3a277c1d19ed16ca99c120bd3e6a941eee92c24385db93d + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +.. |badge1| image:: https://img.shields.io/badge/maturity-Beta-yellow.png + :target: https://odoo-community.org/page/development-status + :alt: Beta +.. |badge2| image:: https://img.shields.io/badge/license-LGPL--3-blue.png + :target: http://www.gnu.org/licenses/lgpl-3.0-standalone.html + :alt: License: LGPL-3 +.. |badge3| image:: https://img.shields.io/badge/github-OCA%2Fai-lightgray.png?logo=github + :target: https://github.com/OCA/ai/tree/19.0/ai_oca_native_thread + :alt: OCA/ai +.. |badge4| image:: https://img.shields.io/badge/weblate-Translate%20me-F47D42.png + :target: https://translation.odoo-community.org/projects/ai-19-0/ai-19-0-ai_oca_native_thread + :alt: Translate me on Weblate +.. |badge5| image:: https://img.shields.io/badge/runboat-Try%20me-875A7B.png + :target: https://runboat.odoo-community.org/builds?repo=OCA/ai&target_branch=19.0 + :alt: Try me on Runboat + +|badge1| |badge2| |badge3| |badge4| |badge5| + +AI Thread +========= + +Add LLM thread discussion with chabot in chatter + +**Table of contents** + +.. contents:: + :local: + +Bug Tracker +=========== + +Bugs are tracked on `GitHub Issues `_. +In case of trouble, please check there if your issue has already been reported. +If you spotted it first, help us to smash it by providing a detailed and welcomed +`feedback `_. + +Do not contact contributors directly about support or help with technical issues. + +Credits +======= + +Maintainers +----------- + +This module is maintained by the OCA. + +.. image:: https://odoo-community.org/logo.png + :alt: Odoo Community Association + :target: https://odoo-community.org + +OCA, or the Odoo Community Association, is a nonprofit organization whose +mission is to support the collaborative development of Odoo features and +promote its widespread use. + +This module is part of the `OCA/ai `_ project on GitHub. + +You are welcome to contribute. To learn how please visit https://odoo-community.org/page/Contribute. diff --git a/ai_oca_native_thread/__init__.py b/ai_oca_native_thread/__init__.py new file mode 100644 index 0000000..0650744 --- /dev/null +++ b/ai_oca_native_thread/__init__.py @@ -0,0 +1 @@ +from . import models diff --git a/ai_oca_native_thread/__manifest__.py b/ai_oca_native_thread/__manifest__.py new file mode 100644 index 0000000..d9733cb --- /dev/null +++ b/ai_oca_native_thread/__manifest__.py @@ -0,0 +1,26 @@ +# Copyright 2025 Pierre Verkest +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html) +{ + "name": "Native AI Thread (Chatter UI)", + "version": "19.0.1.0.0", + "category": "AI", + "summary": "AI Thread history and Chatter UI integration", + "author": "Odoo Community Association (OCA)", + "website": "https://github.com/OCA/ai", + "license": "LGPL-3", + "depends": ["mail", "ai_oca_native_llm"], + "data": [ + "security/ir.model.access.csv", + "security/security.xml", + "views/ai_thread_views.xml", + ], + "assets": { + "web.assets_backend": [ + "ai_oca_native_thread/static/src/components/**/*", + ], + "web.assets_tests": [ + "ai_oca_native_thread/static/tests/tours/**/*", + ], + }, + "installable": True, +} diff --git a/ai_oca_native_thread/models/__init__.py b/ai_oca_native_thread/models/__init__.py new file mode 100644 index 0000000..e4a3c98 --- /dev/null +++ b/ai_oca_native_thread/models/__init__.py @@ -0,0 +1,2 @@ +from . import ai_thread +from . import ai_message diff --git a/ai_oca_native_thread/models/ai_message.py b/ai_oca_native_thread/models/ai_message.py new file mode 100644 index 0000000..2703367 --- /dev/null +++ b/ai_oca_native_thread/models/ai_message.py @@ -0,0 +1,24 @@ +# Copyright 2025 Pierre Verkest +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html) +from odoo import fields, models + + +class AiMessage(models.Model): + _name = "ai.message" + _description = "AI Thread Message" + _order = "create_date asc" + + thread_id = fields.Many2one( + "ai.thread", required=True, ondelete="cascade", index=True + ) + role = fields.Selection( + [ + ("system", "System"), + ("user", "User"), + ("assistant", "Assistant"), + ("tool", "Tool"), + ], + required=True, + default="user", + ) + content = fields.Text(required=True) diff --git a/ai_oca_native_thread/models/ai_thread.py b/ai_oca_native_thread/models/ai_thread.py new file mode 100644 index 0000000..d7711b1 --- /dev/null +++ b/ai_oca_native_thread/models/ai_thread.py @@ -0,0 +1,206 @@ +# Copyright 2025 Pierre Verkest +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html) +import json +import logging + +from odoo import api, fields, models +from odoo.tools import html2plaintext, json_default + +_logger = logging.getLogger(__name__) + + +class AiThread(models.Model): + _name = "ai.thread" + _inherit = "ai.llm.client" + _description = "AI Conversation Thread" + _order = "create_date desc" + + name = fields.Char(string="Reference", required=True, default="New Thread") + res_model = fields.Char(string="Related Document Model", required=True, index=True) + res_id = fields.Integer(string="Related Document ID", required=True, index=True) + user_id = fields.Many2one( + "res.users", + string="User", + default=lambda self: self.env.user, + required=True, + index=True, + ) + message_ids = fields.One2many("ai.message", "thread_id", string="Messages") + + @api.model + def _get_field_type_to_ignore(self): + return ("binary", "image") + + @api.model + def _get_field_names_to_ignore(self): + """Fields to ignore when extracting record context. + We especially ignore mail.thread fields to put them in a separate block. + """ + return ( + "message_ids", + "message_follower_ids", + "activity_ids", + "message_partner_ids", + ) + + def _get_record_context(self, record): + """Extract user-accessible fields into a JSON serializable dict.""" + self.ensure_one() + context_data = {} + if not record.exists(): + return context_data + + try: + # check access rights on the record + record.check_access("read") + # Using read()[0] returns primitive data ready for json + # except for Many2one, One2many, Many2many, dates etc. + raw_data = record.read()[0] + except Exception: + # If the user doesn't have read access to the record, we ignore. + return context_data + + for field_name, value in raw_data.items(): + if not value: + continue + + field = record._fields[field_name] + # Skip massive fields or non-contextual fields + if field.type in self._get_field_type_to_ignore(): + continue + if field_name in self._get_field_names_to_ignore(): + continue + + if field.type == "many2one": + # read() returns a tuple (id, display_name) for many2one + context_data[field_name] = ( + value[1] if isinstance(value, tuple) else value + ) + elif field.type in ("one2many", "many2many"): + # read() returns a list of IDs for x2many + if isinstance(value, list): + related_records = self.env[field.comodel_name].browse(value) + context_data[field_name] = related_records.mapped("display_name") + else: + context_data[field_name] = value + + return context_data + + def _add_message(self, content, role="user"): + return self.env["ai.message"].create( + { + "thread_id": self.id, + "role": role, + "content": content, + } + ) + + def _get_chatter_history_content(self, record): + """Extract and format the chatter history to send to the LLM.""" + if "message_ids" not in record._fields or not record.message_ids: + return "" + + messages = record.message_ids.filtered(lambda m: m.body or m.subject) + if not messages: + return "" + + history = [] + # Sort oldest to newest for chronological reading by LLM + for msg in messages.sorted("id"): + author = msg.author_id.name or msg.email_from or "System" + date = msg.date.strftime("%Y-%m-%d %H:%M:%S") if msg.date else "" + body = html2plaintext(msg.body) if msg.body else "" + subject = msg.subject or "" + + msg_block = f"[{date}] {author}:" + if subject: + msg_block += f"\nSubject: {subject}" + if body: + msg_block += f"\n{body.strip()}" + + history.append(msg_block) + + return "\n---\n".join(history) + + def _get_system_prompt(self) -> list[dict[str, str]]: + # Inject basic system prompt with record context + record = self.env[self.res_model].browse(self.res_id) + system_content = ( + "You are a helpful Odoo Assistant. " + f"The user: {self.env.user.name}, is currently looking at " + f"a record of type {self.res_model}." + f"The user speak {self.env.user.partner_id.lang} which should be" + "the prefered language for the responses specify." + ) + if record.exists(): + system_content += ( + f"\nThe contextual record name is '{record.display_name}'." + ) + context_data = self._get_record_context(record) + if context_data: + context_str = json.dumps(context_data, default=json_default, indent=2) + system_content += ( + "\nHere is the data associated with this record in " + f"JSON format:\n{context_str}\n" + ) + + if "message_ids" in record._fields: + chatter_content = self._get_chatter_history_content(record) + if chatter_content: + system_content += ( + "\nHere is the chatter history of the record:\n" + f"{chatter_content}\n" + ) + return [{"role": "system", "content": system_content}] + + def _get_thread_messages(self) -> list[dict[str, str]]: + return self.message_ids.sorted("create_date").mapped( + lambda m: {"role": m.role, "content": m.content} + ) + + def _generate_and_set_title(self, first_message_content): + prompt = [ + { + "role": "system", + "content": ( + "You are a helpful assistant. Provide a very " + "concise title (3-5 words max) summarizing the " + "user's request. Do not include quotes or extra " + "text. Reply in the user language " + f"({self.env.user.partner_id.lang})." + ), + }, + {"role": "user", "content": first_message_content}, + ] + title = self.chat(prompt) + if title: + today_str = fields.Date.context_today(self).strftime("%Y-%m-%d") + # Strip spaces and quotes + title = title.strip(" \"'\n") + self.name = f"[{today_str}] {title}" + + def action_send_message(self, content): + self.ensure_one() + is_first_message = not bool(self.message_ids) + + # 1. Create User Message + self._add_message(content) + + if is_first_message: + self._generate_and_set_title(content) + + ollama_messages = self._get_system_prompt() + self._get_thread_messages() + _logger.debug("Content send to Ollama...\n%s", ollama_messages) + # 3. Call LLM + ai_content = self.chat(ollama_messages) + + # 4. Save Assistant Message + if ai_content: + self._add_message(ai_content, role="assistant") + return { + "status": "success", + "content": ai_content, + "thread_name": self.name, + } + + return {"status": "error", "content": "No response from LLM"} diff --git a/ai_oca_native_thread/pyproject.toml b/ai_oca_native_thread/pyproject.toml new file mode 100644 index 0000000..4231d0c --- /dev/null +++ b/ai_oca_native_thread/pyproject.toml @@ -0,0 +1,3 @@ +[build-system] +requires = ["whool"] +build-backend = "whool.buildapi" diff --git a/ai_oca_native_thread/readme/DESCRIPTION.md b/ai_oca_native_thread/readme/DESCRIPTION.md new file mode 100644 index 0000000..ec7758c --- /dev/null +++ b/ai_oca_native_thread/readme/DESCRIPTION.md @@ -0,0 +1,3 @@ +# AI Thread + +Add LLM thread discussion with chabot in chatter diff --git a/ai_oca_native_thread/security/ir.model.access.csv b/ai_oca_native_thread/security/ir.model.access.csv new file mode 100644 index 0000000..5abe6bf --- /dev/null +++ b/ai_oca_native_thread/security/ir.model.access.csv @@ -0,0 +1,3 @@ +id,name,model_id:id,group_id:id,perm_read,perm_write,perm_create,perm_unlink +access_ai_thread_user,ai.thread.user,model_ai_thread,base.group_user,1,1,1,1 +access_ai_message_user,ai.message.user,model_ai_message,base.group_user,1,1,1,1 diff --git a/ai_oca_native_thread/security/security.xml b/ai_oca_native_thread/security/security.xml new file mode 100644 index 0000000..2753e7b --- /dev/null +++ b/ai_oca_native_thread/security/security.xml @@ -0,0 +1,20 @@ + + + + + AI Thread: User only + + [('user_id', '=', user.id)] + + + + + AI Message: User only + + [('thread_id.user_id', '=', user.id)] + + + diff --git a/ai_oca_native_thread/static/description/index.html b/ai_oca_native_thread/static/description/index.html new file mode 100644 index 0000000..eb8e584 --- /dev/null +++ b/ai_oca_native_thread/static/description/index.html @@ -0,0 +1,409 @@ + + + + + +README.rst + + + +
+ + + +Odoo Community Association + +
+

Native AI Thread (Chatter UI)

+ +

Beta License: LGPL-3 OCA/ai Translate me on Weblate Try me on Runboat

+
+

AI Thread

+

Add LLM thread discussion with chabot in chatter

+

Table of contents

+
+
+

Bug Tracker

+

Bugs are tracked on GitHub Issues. +In case of trouble, please check there if your issue has already been reported. +If you spotted it first, help us to smash it by providing a detailed and welcomed +feedback.

+

Do not contact contributors directly about support or help with technical issues.

+
+
+

Credits

+
+

Maintainers

+

This module is maintained by the OCA.

+ +Odoo Community Association + +

OCA, or the Odoo Community Association, is a nonprofit organization whose +mission is to support the collaborative development of Odoo features and +promote its widespread use.

+

This module is part of the OCA/ai project on GitHub.

+

You are welcome to contribute. To learn how please visit https://odoo-community.org/page/Contribute.

+
+
+
+
+ + diff --git a/ai_oca_native_thread/static/src/components/ai_thread.esm.js b/ai_oca_native_thread/static/src/components/ai_thread.esm.js new file mode 100644 index 0000000..f85a9c6 --- /dev/null +++ b/ai_oca_native_thread/static/src/components/ai_thread.esm.js @@ -0,0 +1,149 @@ +import {Component, onWillStart, onWillUpdateProps, useState} from "@odoo/owl"; +import {useService} from "@web/core/utils/hooks"; + +export class AiThread extends Component { + setup() { + this.orm = useService("orm"); + this.state = useState({ + messages: [], + prompt: "", + isLoading: false, + threads: [], + threadId: null, + }); + + onWillStart(async () => { + await this.initConversations(this.props.threadModel, this.props.recordId); + }); + + onWillUpdateProps(async (nextProps) => { + if ( + nextProps.recordId !== this.props.recordId || + nextProps.threadModel !== this.props.threadModel + ) { + this.state.prompt = ""; + this.state.messages = []; + this.state.threadId = null; + await this.initConversations(nextProps.threadModel, nextProps.recordId); + } + }); + } + + async fetchThreads(threadModel, recordId, threadId = null) { + this.state.threads = await this.orm.searchRead( + "ai.thread", + [ + ["res_model", "=", threadModel], + ["res_id", "=", recordId], + ], + ["id", "name"], + {order: "create_date desc"} + ); + this.state.threadId = threadId; + } + + async initConversations(threadModel, recordId) { + this.state.threadId = null; + this.state.messages = []; + await this.fetchThreads(threadModel, recordId, null); + } + + async onThreadChange(ev) { + const selectedId = ev.target.value; + if (selectedId === "new" || !selectedId) { + this.state.threadId = null; + this.state.messages = []; + } else { + this.state.threadId = parseInt(selectedId, 10); + await this.loadMessages(); + } + } + + async loadMessages() { + if (!this.state.threadId) return; + const messages = await this.orm.searchRead( + "ai.message", + [["thread_id", "=", this.state.threadId]], + ["role", "content"] + ); + this.state.messages = messages; + } + + async deleteCurrentThread() { + if (!this.state.threadId) return; + await this.orm.unlink("ai.thread", [this.state.threadId]); + await this.initConversations(this.props.threadModel, this.props.recordId); + } + + onKeydown(e) { + if (e.key === "Enter" && !e.shiftKey) { + e.preventDefault(); + this.sendMessage(); + } + } + + async sendMessage() { + if (!this.state.prompt.trim()) return; + + const content = this.state.prompt; + this.state.prompt = ""; + this.state.isLoading = true; + + // Optimistically add user message + this.state.messages.push({role: "user", content: content}); + + try { + let isNew = false; + let threadId = this.state.threadId; + if (!threadId) { + isNew = true; + // Create thread if not exists + const threadIds = await this.orm.create("ai.thread", [ + { + res_model: this.props.threadModel, + res_id: this.props.recordId, + }, + ]); + threadId = threadIds[0]; + } + + const response = await this.orm.call("ai.thread", "action_send_message", [ + threadId, + content, + ]); + + if (response.status === "success") { + this.state.messages.push({ + role: "assistant", + content: response.content, + }); + if (isNew) { + await this.fetchThreads( + this.props.threadModel, + this.props.recordId, + threadId + ); + } + } else { + this.state.messages.push({ + role: "system", + content: `Error: ${response.content}`, + }); + } + } catch (error) { + console.error("Failed to send AI message", error); + this.state.messages.push({ + role: "system", + content: `Network or Server Error.`, + }); + } finally { + this.state.isLoading = false; + } + } +} + +AiThread.template = "ai_oca_native_thread.AiThread"; +AiThread.props = { + threadModel: String, + recordId: Number, +}; diff --git a/ai_oca_native_thread/static/src/components/ai_thread.xml b/ai_oca_native_thread/static/src/components/ai_thread.xml new file mode 100644 index 0000000..0f90745 --- /dev/null +++ b/ai_oca_native_thread/static/src/components/ai_thread.xml @@ -0,0 +1,98 @@ + + + + +
+ +
+ + +
+ + +
+ +
No AI history for this new threadthread. Say hi!
+
+ +
+
+ : +
+ +
+
+
+
+ +
+
+ AI is thinking... +
+
+
+
+ + +
+