-
Notifications
You must be signed in to change notification settings - Fork 22k
fix(langchain): prefer metadata ls_provider in SummarizationMiddleware token check
#36604
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: master
Are you sure you want to change the base?
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -378,7 +378,12 @@ def _should_summarize_based_on_reported_tokens( | |
| and last_ai_message.usage_metadata is not None | ||
| and (reported_tokens := last_ai_message.usage_metadata.get("total_tokens", -1)) | ||
| and reported_tokens >= threshold | ||
| and (message_provider := last_ai_message.response_metadata.get("model_provider")) | ||
| and ( | ||
| message_provider := last_ai_message.response_metadata.get( | ||
| "ls_provider", | ||
| last_ai_message.response_metadata.get("model_provider"), | ||
|
Comment on lines
+381
to
+384
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. this is basically, get "ls_provider" and assign to message_provider. if "ls_provider" is not valid then get "model_provider" and assign to message_provider? |
||
| ) | ||
| ) | ||
| and message_provider == self.model._get_ls_params().get("ls_provider") # noqa: SLF001 | ||
| ): | ||
| return True | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -1219,6 +1219,56 @@ def test_usage_metadata_trigger() -> None: | |
| assert not middleware._should_summarize(messages, 0) | ||
|
|
||
|
|
||
| def test_usage_metadata_trigger_ls_provider() -> None: | ||
| """ls_provider in response_metadata takes precedence over model_provider.""" | ||
|
|
||
| class BedrockMockModel(MockChatModel): | ||
| def _get_ls_params(self, **kwargs: Any) -> dict[str, Any]: | ||
| return {"ls_provider": "amazon_bedrock"} | ||
|
|
||
| model = BedrockMockModel() | ||
| middleware = SummarizationMiddleware( | ||
| model=model, | ||
| trigger=("tokens", 10_000), | ||
| keep=("messages", 4), | ||
| ) | ||
| # ls_provider matches even though model_provider doesn't | ||
| messages: list[AnyMessage] = [ | ||
| HumanMessage(content="msg1"), | ||
| AIMessage( | ||
| content="msg2", | ||
| response_metadata={ | ||
| "model_provider": "bedrock_converse", | ||
| "ls_provider": "amazon_bedrock", | ||
|
Comment on lines
+1241
to
+1242
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. It's not in a
|
||
| }, | ||
| usage_metadata={ | ||
| "input_tokens": 7500, | ||
| "output_tokens": 2501, | ||
| "total_tokens": 10_001, | ||
| }, | ||
| ), | ||
| ] | ||
| assert middleware._should_summarize(messages, 0) | ||
|
|
||
| # ls_provider doesn't match — should not trigger | ||
| messages_mismatch: list[AnyMessage] = [ | ||
| HumanMessage(content="msg1"), | ||
| AIMessage( | ||
| content="msg2", | ||
| response_metadata={ | ||
| "model_provider": "amazon_bedrock", | ||
| "ls_provider": "not-amazon_bedrock", | ||
| }, | ||
| usage_metadata={ | ||
| "input_tokens": 7500, | ||
| "output_tokens": 2501, | ||
| "total_tokens": 10_001, | ||
| }, | ||
| ), | ||
| ] | ||
| assert not middleware._should_summarize(messages_mismatch, 0) | ||
|
|
||
|
|
||
| class ConfigCapturingModel(BaseChatModel): | ||
| """Mock model that captures the config passed to invoke/ainvoke.""" | ||
|
|
||
|
|
||

There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
just curious, for my own learnings what is ls_provider? i understand model/llm provider