-
Notifications
You must be signed in to change notification settings - Fork 113
Show reasoner model's thinking in a <<< thinking section
#151
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 1 commit
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -156,10 +156,12 @@ def parse_chat_messages(chat_content): | |
| for line in lines: | ||
| match line: | ||
| case '>>> system': | ||
| messages.append({'role': 'system', 'content': [{ 'type': 'text', 'text': '' }]}) | ||
| messages.append({'role': 'system', 'content': ''}) | ||
|
||
| current_type = 'system' | ||
| case '<<< thinking': | ||
| current_type = 'thinking' | ||
| case '<<< assistant': | ||
| messages.append({'role': 'assistant', 'content': [{ 'type': 'text', 'text': '' }]}) | ||
| messages.append({'role': 'assistant', 'content': ''}) | ||
| current_type = 'assistant' | ||
| case '>>> user': | ||
| if messages and messages[-1]['role'] == 'user': | ||
|
|
@@ -175,7 +177,9 @@ def parse_chat_messages(chat_content): | |
| if not messages: | ||
| continue | ||
| match current_type: | ||
| case 'assistant' | 'system' | 'user': | ||
| case 'system' | 'assistant': | ||
| messages[-1]['content'] += '\n' + line | ||
| case 'user': | ||
| messages[-1]['content'][-1]['text'] += '\n' + line | ||
| case 'include': | ||
| paths = parse_include_paths(line) | ||
|
|
@@ -185,6 +189,10 @@ def parse_chat_messages(chat_content): | |
|
|
||
| for message in messages: | ||
| # strip newlines from the text content as it causes empty responses | ||
|
|
||
| if isinstance(message['content'], str): | ||
| message['content'] = message['content'].strip() | ||
| continue | ||
| for content in message['content']: | ||
| if content['type'] == 'text': | ||
| content['text'] = content['text'].strip() | ||
|
|
@@ -334,11 +342,19 @@ def _choices(resp): | |
|
|
||
| def map_chunk_no_stream(resp): | ||
| print_debug("[engine-chat] response: {}", resp) | ||
| return _choices(resp)[0].get('message', {}).get('content', '') | ||
| message = _choices(resp)[0].get('message', {}) | ||
| reasoning_content = message.get('reasoning_content', '') | ||
| content = message.get('content', '') | ||
| return {"content": content, "thinking": reasoning_content} | ||
|
|
||
| def map_chunk_stream(resp): | ||
| print_debug("[engine-chat] response: {}", resp) | ||
| return _choices(resp)[0].get('delta', {}).get('content', '') | ||
| delta = _choices(resp)[0].get('delta', {}) | ||
| if reasoning_content := delta.get('reasoning_content'): | ||
| return {"thinking": reasoning_content} | ||
| if content := delta.get('content'): | ||
| return {"content": content} | ||
|
||
| return {"content": ""} | ||
|
|
||
| map_chunk = map_chunk_stream if openai_options['stream'] else map_chunk_no_stream | ||
|
|
||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -1,6 +1,7 @@ | ||
| syntax match aichatRole ">>> system" | ||
| syntax match aichatRole ">>> user" | ||
| syntax match aichatRole ">>> include" | ||
| syntax match aichatRole "<<< thinking" | ||
| syntax match aichatRole "<<< assistant" | ||
|
|
||
| highlight default link aichatRole Comment |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
here it would be nice to re-use
render_text_chunkshelper function, it covers more functionality like AIRedo. For example filter thinking chunks first, then if not empty render it withrender_text_chunksand then handle contentThere was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
For me it's confusing how
render_text_chunkshandles insertion to support in-place edits vs. chat.If you want this in the render_text_chunks function, could you please first check if you agree with this change #148 ?