diff --git a/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/copilot.py b/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/copilot.py new file mode 100644 index 000000000..cb79a5d5c --- /dev/null +++ b/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/copilot.py @@ -0,0 +1,178 @@ +from subprocess import PIPE, Popen +from typing import Optional + +import pylspclient +from jupyter_ai import BaseProvider, TextField +from jupyter_ai import __version__ as jupyter_ai_version +from jupyter_ai_magics.models.completion import ( + InlineCompletionList, + InlineCompletionReply, + InlineCompletionRequest, +) +from jupyterlab import __version__ as jupyterlab_version + +INIT_PARAMS = { + "capabilities": {"workspace": {"workspaceFolders": False}}, + "initializationOptions": { + "editorInfo": {"name": "JupyterLab", "version": jupyterlab_version}, + "editorPluginInfo": {"name": "jupyter-ai", "version": jupyter_ai_version}, + }, +} + + +def calc_position_lineno_and_char(prefix, suffix): + """ + Calculate the line number and character position within a text based on a given prefix and suffix text. + GitHub Copilot LSP requires those positions for completion requests. + https://www.npmjs.com/package/@github/copilot-language-server#panel-completions + """ + + full_text = prefix + suffix + + lineno = full_text.count("\n", 0, len(prefix)) + prefix_text = "\n".join(full_text.split("\n")[:lineno]) + char_pos = len(prefix) - len(prefix_text) - 1 + + return lineno, char_pos + + +class GitHubCopilotLLM: + process: Optional[Popen] = None + + def __init__(self, lsp_bin_path: str): + self.lsp_bin_path = lsp_bin_path + self.ensure_lsp_server_initialized() + + def _initialize(self): + self.lsp_endpoint.call_method(method_name="initialize", **INIT_PARAMS) + self.lsp_endpoint.send_notification(method_name="initialized") + + def _signin(self): + self.ensure_lsp_server_initialized() + res = self.lsp_endpoint.call_method( + method_name="signIn", + ) + return res + + def _signout(self): + self.ensure_lsp_server_initialized() + res = self.lsp_endpoint.call_method( + method_name="signOut", + ) + return res + + def _completion(self, code, pos_line, pos_char): + self.ensure_lsp_server_initialized() + self.lsp_endpoint.send_notification( + method_name="textDocument/didOpen", + **{ + "textDocument": { + "uri": "file:///dummy", + "version": 0, + "languageId": "python", + "text": code, + } + }, + ) + + res = self.lsp_endpoint.call_method( + method_name="textDocument/copilotPanelCompletion", + **{ + "textDocument": { + "uri": "file:///dummy", + "version": 0, + }, + "position": { + "line": pos_line, + "character": pos_char, + }, + }, + ) + return res + + def _start_lsp_server(self): + if not self.is_lsp_server_running: + self.process = Popen( + [self.lsp_bin_path, "--stdio"], stdin=PIPE, stdout=PIPE, stderr=PIPE + ) + self.json_rpc_endpoint = pylspclient.JsonRpcEndpoint( + self.process.stdin, self.process.stdout + ) + self.lsp_endpoint = pylspclient.LspEndpoint( + self.json_rpc_endpoint, timeout=15 + ) + self.lsp_endpoint.start() + + def _stop_lsp_server(self): + self.lsp_endpoint.stop() + self.process.kill() + + def ensure_lsp_server_initialized(self): + if not self.is_lsp_server_running: + self._start_lsp_server() + self._initialize() + + @property + def is_lsp_server_running(self): + return self.process is not None and self.process.poll() is None + + @property + def _llm_type(self) -> str: + return "github-copilot" + + +class GitHubCopilotProvider(BaseProvider): + id = "github-copilot" + name = "GitHub Copilot" + models = ["default"] + model_id_key = "model" + pypi_package_deps = ["pylspclient"] + help = ( + "Make sure you've installed copilot-language-server [https://www.npmjs.com/package/@github/copilot-language-server](https://www.npmjs.com/package/@github/copilot-language-server) . " + "Set this absolute path to `lsp_bin_path`." + ) + fields = [ + TextField( + key="lsp_bin_path", label="Copilot LSP binary absolute path", format="text" + ), + ] + + def __init__( + self, + **kwargs, + ): + super().__init__(**kwargs) + self._llm = GitHubCopilotLLM(lsp_bin_path=self.lsp_bin_path) + + @classmethod + def chat_models(self): + """ + Empty list because Copilot LSP does not support the chat feature + https://github.com/github/copilot-language-server-release/issues/1 + """ + return [] + + @classmethod + def completion_models(self): + """ + Only `default` (GPT-4o as of May 2025) because Copilot LSP does not support different model selection + https://docs.github.com/en/copilot/using-github-copilot/ai-models/changing-the-ai-model-for-copilot-code-completion + """ + return ["default"] + + async def generate_inline_completions(self, request: InlineCompletionRequest): + self._llm.ensure_lsp_server_initialized() + + full_text = request.prefix + request.suffix + lineno, char = calc_position_lineno_and_char(request.prefix, request.suffix) + suggestions = self._llm._completion(full_text, lineno, char) + completions = [ + { + "insertText": item["insertText"][char:], + } + for item in suggestions["items"] + ] + return InlineCompletionReply( + list=InlineCompletionList(items=completions), + reply_to=request.number, + ) diff --git a/packages/jupyter-ai-magics/jupyter_ai_magics/tests/test_copilot.py b/packages/jupyter-ai-magics/jupyter_ai_magics/tests/test_copilot.py new file mode 100644 index 000000000..0987c5b47 --- /dev/null +++ b/packages/jupyter-ai-magics/jupyter_ai_magics/tests/test_copilot.py @@ -0,0 +1,67 @@ +import unittest +from subprocess import PIPE +from unittest.mock import MagicMock, patch + +from jupyter_ai_magics.models.completion import InlineCompletionRequest +from jupyter_ai_magics.partner_providers.copilot import ( + GitHubCopilotLLM, + GitHubCopilotProvider, + calc_position_lineno_and_char, +) + + +class TestGitHubCopilotLLM(unittest.TestCase): + @patch("jupyter_ai_magics.partner_providers.copilot.Popen") + @patch("jupyter_ai_magics.partner_providers.copilot.pylspclient.JsonRpcEndpoint") + @patch("jupyter_ai_magics.partner_providers.copilot.pylspclient.LspEndpoint") + def test_initialize_lsp_server( + self, mock_lsp_endpoint, mock_json_rpc_endpoint, mock_popen + ): + mock_process = MagicMock() + mock_popen.return_value = mock_process + mock_endpoint = MagicMock() + mock_lsp_endpoint.return_value = mock_endpoint + + llm = GitHubCopilotLLM(lsp_bin_path="dummy_path") + + mock_popen.assert_called_once_with( + ["dummy_path", "--stdio"], stdin=PIPE, stdout=PIPE, stderr=PIPE + ) + mock_json_rpc_endpoint.assert_called_once_with( + mock_process.stdin, mock_process.stdout + ) + mock_lsp_endpoint.assert_called_once_with( + mock_json_rpc_endpoint.return_value, timeout=15 + ) + mock_endpoint.start.assert_called_once() + + def test_calc_position_lineno_and_char(self): + prefix = "line1\nline2\n" + suffix = "line3\nline4" + lineno, char_pos = calc_position_lineno_and_char(prefix, suffix) + + self.assertEqual(lineno, 2) + self.assertEqual(char_pos, 0) + + +class TestGitHubCopilotProvider(unittest.TestCase): + @patch("jupyter_ai_magics.partner_providers.copilot.GitHubCopilotLLM") + def test_generate_inline_completions(self, mock_llm_class): + mock_llm = MagicMock() + mock_llm_class.return_value = mock_llm + mock_llm._completion.return_value = { + "items": [{"insertText": "completion1"}, {"insertText": "completion2"}] + } + + provider = GitHubCopilotProvider( + lsp_bin_path="dummy_path", model_id="github-copilot" + ) + result = provider._llm._completion("print()", 0, 6) + + self.assertEqual(len(result["items"]), 2) + self.assertEqual(result["items"][0]["insertText"], "completion1") + self.assertEqual(result["items"][1]["insertText"], "completion2") + + +if __name__ == "__main__": + unittest.main() diff --git a/packages/jupyter-ai-magics/pyproject.toml b/packages/jupyter-ai-magics/pyproject.toml index 694eabf6d..97248a9e5 100644 --- a/packages/jupyter-ai-magics/pyproject.toml +++ b/packages/jupyter-ai-magics/pyproject.toml @@ -84,6 +84,7 @@ together-ai = "jupyter_ai_magics:TogetherAIProvider" gemini = "jupyter_ai_magics.partner_providers.gemini:GeminiProvider" mistralai = "jupyter_ai_magics.partner_providers.mistralai:MistralAIProvider" openrouter = "jupyter_ai_magics.partner_providers.openrouter:OpenRouterProvider" +github-copilot = "jupyter_ai_magics.partner_providers.copilot:GitHubCopilotProvider" [project.entry-points."jupyter_ai.embeddings_model_providers"] azure = "jupyter_ai_magics.partner_providers.openai:AzureOpenAIEmbeddingsProvider"