Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 18 additions & 5 deletions py/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def initialize_chat_window():
vim.command("redraw")

file_content = vim.eval('trim(join(getline(1, "$"), "\n"))')
role_lines = re.findall(r'(^>>> user|^>>> system|^<<< assistant).*', file_content, flags=re.MULTILINE)
role_lines = re.findall(r'(^>>> user|^>>> system|^<<< thinking|^<<< assistant).*', file_content, flags=re.MULTILINE)
if not role_lines[-1].startswith(">>> user"):
# last role is not user, most likely completion was cancelled before
vim.command("normal! o")
Expand Down Expand Up @@ -58,14 +58,27 @@ def initialize_chat_window():
try:
last_content = messages[-1]["content"][-1]
if last_content['type'] != 'text' or last_content['text']:
vim.command("normal! Go\n<<< assistant\n\n")
vim.command("redraw")

print('Answering...')
vim.command("redraw")

text_chunks = make_chat_text_chunks(messages, options)
render_text_chunks(text_chunks)
def _chunks_to_sections(chunks):
first_thinking_chunk = True
first_content_chunk = True
for chunk in chunks:
if chunk['thinking'] is not None:
if first_thinking_chunk:
first_thinking_chunk = False
vim.command("normal! Go\n<<< thinking\n\n")
yield chunk['thinking']
if chunk['content'] is not None:
if first_content_chunk:
first_content_chunk = False
vim.command("normal! Go\n<<< assistant\n\n")
yield chunk['content']

chunks = make_chat_text_chunks(messages, options)
render_text_chunks(_chunks_to_sections(chunks))

vim.command("normal! a\n\n>>> user\n\n")
vim.command("redraw")
Expand Down
5 changes: 4 additions & 1 deletion py/complete.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,10 @@ def chat_engine(prompt):
chat_content = f"{initial_prompt}\n\n>>> user\n\n{prompt}".strip()
messages = parse_chat_messages(chat_content)
print_debug("[engine-chat] text:\n" + chat_content)
return make_chat_text_chunks(messages, config_options)
return filter(
lambda c: c,
map(lambda c: c.get("content"), make_chat_text_chunks(messages, config_options)),
)

engines = {"chat": chat_engine, "complete": complete_engine}

Expand Down
37 changes: 19 additions & 18 deletions py/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,6 +158,8 @@ def parse_chat_messages(chat_content):
case '>>> system':
messages.append({'role': 'system', 'content': [{ 'type': 'text', 'text': '' }]})
current_type = 'system'
case '<<< thinking':
current_type = 'thinking'
case '<<< assistant':
messages.append({'role': 'assistant', 'content': [{ 'type': 'text', 'text': '' }]})
current_type = 'assistant'
Expand Down Expand Up @@ -315,34 +317,33 @@ def make_chat_text_chunks(messages, config_options):
openai_options = make_openai_options(config_options)
http_options = make_http_options(config_options)

def _flatten_content(messages):
"""Some providers like api.deepseek.com & api.groq.com expect a flat 'content' field."""
for message in messages:
match message['role']:
case 'system' | 'assistant':
message['content'] = '\n'.join(map(lambda c: c['text'], message['content']))
return messages

request = {
'messages': messages,
'messages': _flatten_content(messages),
**openai_options
}
print_debug("[engine-chat] request: {}", request)
url = config_options['endpoint_url']
response = openai_request(url, request, http_options)
_choice_key = 'delta' if openai_options['stream'] else 'message'

def _choices(resp):
choices = resp.get('choices', [{}])

# NOTE choices may exist in the response, but be an empty list.
if not choices:
return [{}]

return choices
def _get_delta(resp):
choices = resp.get('choices') or [{}]
return choices[0].get(_choice_key, {})

def map_chunk_no_stream(resp):
def _map_chunk(resp):
print_debug("[engine-chat] response: {}", resp)
return _choices(resp)[0].get('message', {}).get('content', '')

def map_chunk_stream(resp):
print_debug("[engine-chat] response: {}", resp)
return _choices(resp)[0].get('delta', {}).get('content', '')

map_chunk = map_chunk_stream if openai_options['stream'] else map_chunk_no_stream
delta = _get_delta(resp)
return {'thinking': delta.get('reasoning_content'), 'content': delta.get('content')}

return map(map_chunk, response)
return map(_map_chunk, response)

def read_role_files():
plugin_root = vim.eval("s:plugin_root")
Expand Down
1 change: 1 addition & 0 deletions syntax/aichat.vim
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
syntax match aichatRole ">>> system"
syntax match aichatRole ">>> user"
syntax match aichatRole ">>> include"
syntax match aichatRole "<<< thinking"
syntax match aichatRole "<<< assistant"

highlight default link aichatRole Comment