Skip to content
Open
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions DESCRIPTION
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ Suggests:
withr
VignetteBuilder:
knitr
Remotes: posit-dev/shinychat/pkg-r#167
Config/Needs/website: tidyverse/tidytemplate, rmarkdown
Config/testthat/edition: 3
Config/testthat/parallel: true
Expand Down
1 change: 1 addition & 0 deletions NEWS.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
# ellmer (development version)

* `stream_text()` now returns `ContentThinking` objects for thinking/reasoning content from Anthropic, OpenAI, and Google Gemini, allowing downstream packages like shinychat to distinguish thinking from regular text during streaming (@simonpcouch, #909).
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This now needs an update, but otherwise looks good I think.

* `chat_github()` now uses `chat_openai_compatible()` for improved compatibility, and `models_github()` now supports custom `base_url` configuration (@D-M4rk, #877).
* `chat_ollama()` now contains a slot for `top_k` within the `params` argument (@frankiethull).

Expand Down
39 changes: 25 additions & 14 deletions R/chat.R
Original file line number Diff line number Diff line change
Expand Up @@ -606,14 +606,11 @@ Chat <- R6::R6Class(
if (stream) {
result <- NULL
for (chunk in response) {
text <- stream_text(private$provider, chunk)
if (!is.null(text)) {
content <- stream_content(private$provider, chunk)
if (!is.null(content)) {
text <- content_text(content)
emit(text)
if (yield_as_content) {
yield(ContentText(text))
} else {
yield(text)
}
yield(if (yield_as_content) content else text)
any_text <- TRUE
}

Expand Down Expand Up @@ -688,14 +685,11 @@ Chat <- R6::R6Class(
if (stream) {
result <- NULL
for (chunk in await_each(response)) {
text <- stream_text(private$provider, chunk)
if (!is.null(text)) {
content <- stream_content(private$provider, chunk)
if (!is.null(content)) {
text <- content_text(content)
emit(text)
if (yield_as_content) {
yield(ContentText(text))
} else {
yield(text)
}
yield(if (yield_as_content) content else text)
any_text <- TRUE
}

Expand Down Expand Up @@ -774,6 +768,23 @@ Chat <- R6::R6Class(
)
)

stream_content <- function(provider, event) {
result <- stream_text(provider, event)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This feels inside out to me? Why doesn't stream_text() call stream_content()?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I had been wondering whether I was interpreting that incorrectly. :) Just switched those out.

if (is.null(result)) {
return(NULL)
}
if (S7_inherits(result, Content)) result else ContentText(result)
}

content_text <- function(content) {
switch(
class(content)[1],
"ellmer::ContentThinking" = content@thinking,
"ellmer::ContentText" = content@text,
format(content)
)
}

#' @export
print.Chat <- function(x, ...) {
provider <- x$get_provider()
Expand Down
5 changes: 4 additions & 1 deletion R/provider-claude.R
Original file line number Diff line number Diff line change
Expand Up @@ -275,7 +275,10 @@ method(stream_parse, ProviderAnthropic) <- function(provider, event) {
}
method(stream_text, ProviderAnthropic) <- function(provider, event) {
if (event$type == "content_block_delta") {
event$delta$text %||% event$delta$thinking
if (identical(event$delta$type, "thinking_delta")) {
return(ContentThinking(event$delta$thinking))
}
event$delta$text
}
}
method(stream_merge_chunks, ProviderAnthropic) <- function(
Expand Down
25 changes: 23 additions & 2 deletions R/provider-google.R
Original file line number Diff line number Diff line change
Expand Up @@ -273,7 +273,17 @@ method(stream_parse, ProviderGoogleGemini) <- function(provider, event) {
}
}
method(stream_text, ProviderGoogleGemini) <- function(provider, event) {
event$candidates[[1]]$content$parts[[1]]$text
parts <- event$candidates[[1]]$content$parts
if (is.null(parts) || length(parts) == 0) {
return(NULL)
}

part <- parts[[1]]
if (isTRUE(part$thought) && !is.null(part$text)) {
ContentThinking(part$text)
} else if (!is.null(part$text)) {
part$text
}
}
method(stream_merge_chunks, ProviderGoogleGemini) <- function(
provider,
Expand Down Expand Up @@ -320,7 +330,9 @@ method(value_turn, ProviderGoogleGemini) <- function(
message <- result$candidates[[1]]$content

contents <- lapply(message$parts, function(content) {
if (has_name(content, "text")) {
if (isTRUE(content$thought) && has_name(content, "text")) {
ContentThinking(content$text)
} else if (has_name(content, "text")) {
if (has_type) {
ContentJson(string = content$text)
} else {
Expand Down Expand Up @@ -404,6 +416,15 @@ method(as_json, list(ProviderGoogleGemini, ContentText)) <- function(
}
}

method(as_json, list(ProviderGoogleGemini, ContentThinking)) <- function(
provider,
x,
...
) {
# https://ai.google.dev/gemini-api/docs/thinking
list(thought = TRUE, text = x@thinking)
}

method(as_json, list(ProviderGoogleGemini, ContentPDF)) <- function(
provider,
x,
Expand Down
4 changes: 2 additions & 2 deletions R/provider-openai.R
Original file line number Diff line number Diff line change
Expand Up @@ -247,10 +247,10 @@ method(stream_text, ProviderOpenAI) <- function(provider, event) {
event$delta
} else if (event$type == "response.reasoning_summary_text.delta") {
# https://platform.openai.com/docs/api-reference/responses-streaming/response/reasoning_summary_text/delta
event$delta
ContentThinking(event$delta)
} else if (event$type == "response.reasoning_summary_text.done") {
# https://platform.openai.com/docs/api-reference/responses-streaming/response/reasoning_summary_text/done
"\n\n"
NULL
}
}
method(stream_merge_chunks, ProviderOpenAI) <- function(
Expand Down