Skip to content

Commit 70accb7

Browse files
fix(langsmith.py): respect langsmith batch size param (#10411)
* fix(langsmith.py): respect langsmith batch size param * build(model_prices_and_context_window.json): add missing tgai models Fixes #8921 Fixes #8278
1 parent cd27185 commit 70accb7

File tree

3 files changed

+101
-3
lines changed

3 files changed

+101
-3
lines changed

litellm/integrations/langsmith.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,8 @@ def __init__(
4141
langsmith_base_url: Optional[str] = None,
4242
**kwargs,
4343
):
44+
self.flush_lock = asyncio.Lock()
45+
super().__init__(**kwargs, flush_lock=self.flush_lock)
4446
self.default_credentials = self.get_credentials_from_env(
4547
langsmith_api_key=langsmith_api_key,
4648
langsmith_project=langsmith_project,
@@ -61,13 +63,11 @@ def __init__(
6163
_batch_size = (
6264
os.getenv("LANGSMITH_BATCH_SIZE", None) or litellm.langsmith_batch_size
6365
)
66+
6467
if _batch_size:
6568
self.batch_size = int(_batch_size)
6669
self.log_queue: List[LangsmithQueueObject] = []
6770
asyncio.create_task(self.periodic_flush())
68-
self.flush_lock = asyncio.Lock()
69-
70-
super().__init__(**kwargs, flush_lock=self.flush_lock)
7171

7272
def get_credentials_from_env(
7373
self,

litellm/model_prices_and_context_window_backup.json

+49
Original file line numberDiff line numberDiff line change
@@ -10235,6 +10235,55 @@
1023510235
"mode": "chat",
1023610236
"supports_tool_choice": true
1023710237
},
10238+
"together_ai/meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8": {
10239+
"litellm_provider": "together_ai",
10240+
"supports_function_calling": true,
10241+
"supports_parallel_function_calling": true,
10242+
"mode": "chat",
10243+
"supports_tool_choice": true
10244+
},
10245+
"together_ai/meta-llama/Llama-4-Scout-17B-16E-Instruct": {
10246+
"litellm_provider": "together_ai",
10247+
"supports_function_calling": true,
10248+
"supports_parallel_function_calling": true,
10249+
"mode": "chat",
10250+
"supports_tool_choice": true
10251+
},
10252+
"together_ai/meta-llama/Llama-3.2-3B-Instruct-Turbo": {
10253+
"litellm_provider": "together_ai",
10254+
"supports_function_calling": true,
10255+
"supports_parallel_function_calling": true,
10256+
"mode": "chat",
10257+
"supports_tool_choice": true
10258+
},
10259+
"together_ai/Qwen/Qwen2.5-7B-Instruct-Turbo": {
10260+
"litellm_provider": "together_ai",
10261+
"supports_function_calling": true,
10262+
"supports_parallel_function_calling": true,
10263+
"mode": "chat",
10264+
"supports_tool_choice": true
10265+
},
10266+
"together_ai/Qwen/Qwen2.5-72B-Instruct-Turbo": {
10267+
"litellm_provider": "together_ai",
10268+
"supports_function_calling": true,
10269+
"supports_parallel_function_calling": true,
10270+
"mode": "chat",
10271+
"supports_tool_choice": true
10272+
},
10273+
"together_ai/deepseek-ai/DeepSeek-V3": {
10274+
"litellm_provider": "together_ai",
10275+
"supports_function_calling": true,
10276+
"supports_parallel_function_calling": true,
10277+
"mode": "chat",
10278+
"supports_tool_choice": true
10279+
},
10280+
"together_ai/mistralai/Mistral-Small-24B-Instruct-2501": {
10281+
"litellm_provider": "together_ai",
10282+
"supports_function_calling": true,
10283+
"supports_parallel_function_calling": true,
10284+
"mode": "chat",
10285+
"supports_tool_choice": true
10286+
},
1023810287
"ollama/codegemma": {
1023910288
"max_tokens": 8192,
1024010289
"max_input_tokens": 8192,

model_prices_and_context_window.json

+49
Original file line numberDiff line numberDiff line change
@@ -10235,6 +10235,55 @@
1023510235
"mode": "chat",
1023610236
"supports_tool_choice": true
1023710237
},
10238+
"together_ai/meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8": {
10239+
"litellm_provider": "together_ai",
10240+
"supports_function_calling": true,
10241+
"supports_parallel_function_calling": true,
10242+
"mode": "chat",
10243+
"supports_tool_choice": true
10244+
},
10245+
"together_ai/meta-llama/Llama-4-Scout-17B-16E-Instruct": {
10246+
"litellm_provider": "together_ai",
10247+
"supports_function_calling": true,
10248+
"supports_parallel_function_calling": true,
10249+
"mode": "chat",
10250+
"supports_tool_choice": true
10251+
},
10252+
"together_ai/meta-llama/Llama-3.2-3B-Instruct-Turbo": {
10253+
"litellm_provider": "together_ai",
10254+
"supports_function_calling": true,
10255+
"supports_parallel_function_calling": true,
10256+
"mode": "chat",
10257+
"supports_tool_choice": true
10258+
},
10259+
"together_ai/Qwen/Qwen2.5-7B-Instruct-Turbo": {
10260+
"litellm_provider": "together_ai",
10261+
"supports_function_calling": true,
10262+
"supports_parallel_function_calling": true,
10263+
"mode": "chat",
10264+
"supports_tool_choice": true
10265+
},
10266+
"together_ai/Qwen/Qwen2.5-72B-Instruct-Turbo": {
10267+
"litellm_provider": "together_ai",
10268+
"supports_function_calling": true,
10269+
"supports_parallel_function_calling": true,
10270+
"mode": "chat",
10271+
"supports_tool_choice": true
10272+
},
10273+
"together_ai/deepseek-ai/DeepSeek-V3": {
10274+
"litellm_provider": "together_ai",
10275+
"supports_function_calling": true,
10276+
"supports_parallel_function_calling": true,
10277+
"mode": "chat",
10278+
"supports_tool_choice": true
10279+
},
10280+
"together_ai/mistralai/Mistral-Small-24B-Instruct-2501": {
10281+
"litellm_provider": "together_ai",
10282+
"supports_function_calling": true,
10283+
"supports_parallel_function_calling": true,
10284+
"mode": "chat",
10285+
"supports_tool_choice": true
10286+
},
1023810287
"ollama/codegemma": {
1023910288
"max_tokens": 8192,
1024010289
"max_input_tokens": 8192,

0 commit comments

Comments
 (0)