Skip to content

feat: add tool mode unsupported list for groq Models #7497

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 8 commits into from
Apr 15, 2025
63 changes: 50 additions & 13 deletions src/backend/base/langflow/base/models/groq_constants.py
Original file line number Diff line number Diff line change
@@ -1,23 +1,60 @@
GROQ_MODELS = [
"distil-whisper-large-v3-en", # HuggingFace
# Production Models - Stable and reliable for production use
GROQ_PRODUCTION_MODELS = [
"gemma2-9b-it", # Google
"gemma-7b-it", # Google
"llama3-groq-70b-8192-tool-use-preview", # Groq
"llama3-groq-8b-8192-tool-use-preview", # Groq
"llama-3.1-70b-versatile", # Meta
"llama-3.3-70b-versatile", # Meta
"llama-3.1-8b-instant", # Meta
"llama-guard-3-8b", # Meta
"llama3-70b-8192", # Meta
"llama3-8b-8192", # Meta
]

# Preview Models - For evaluation purposes only
GROQ_PREVIEW_MODELS = [
"meta-llama/llama-4-scout-17b-16e-instruct", # Meta
"meta-llama/llama-4-maverick-17b-128e-instruct", # Meta
"qwen-qwq-32b", # Alibaba Cloud
"qwen-2.5-coder-32b", # Alibaba Cloud
"qwen-2.5-32b", # Alibaba Cloud
"deepseek-r1-distill-qwen-32b", # DeepSeek
"deepseek-r1-distill-llama-70b", # DeepSeek
"llama-3.3-70b-specdec", # Meta
"llama-3.2-1b-preview", # Meta
"llama-3.2-3b-preview", # Meta
"llama-3.2-11b-vision-preview", # Meta
"llama-3.2-90b-vision-preview", # Meta
"llama-3.3-70b-specdec", # Meta
"llama-3.3-70b-versatile", # Meta
"deepseek-r1-distill-llama-70b" # DeepSeek
"llama-guard-3-8b", # Meta
"llama3-70b-8192", # Meta
"llama3-8b-8192", # Meta
"allam-2-7b", # Saudi Data and AI Authority (SDAIA)
]

# Deprecated Models - Previously available but now removed
DEPRECATED_GROQ_MODELS = [
"gemma-7b-it", # Google
"llama3-groq-70b-8192-tool-use-preview", # Groq
"llama3-groq-8b-8192-tool-use-preview", # Groq
"llama-3.1-70b-versatile", # Meta
"mixtral-8x7b-32768", # Mistral
]

UNSUPPORTED_GROQ_MODELS = [
"mistral-saba-24b", # Mistral
"playai-tts", # Playht, Inc
"playai-tts-arabic", # Playht, Inc
"whisper-large-v3", # OpenAI
"whisper-large-v3-turbo", # OpenAI
"distil-whisper-large-v3-en", # HuggingFace
]

TOOL_CALLING_UNSUPPORTED_GROQ_MODELS = [
"allam-2-7b", # Saudi Data and AI Authority (SDAIA)
"llama-3.1-8b-instant", # Meta Slow Response
"llama-guard-3-8b", # Meta
"llama-3.2-11b-vision-preview", # Meta
"llama3-8b-8192", # Meta
"llama3-8b-8192", # Meta
"llama3-70b-8192", # Meta
"deepseek-r1-distill-llama-70b", # DeepSeek
]
MODEL_NAMES = GROQ_MODELS # reverse compatibility
# Combined list of all current models for backward compatibility
GROQ_MODELS = GROQ_PRODUCTION_MODELS + GROQ_PREVIEW_MODELS

# For reverse compatibility
MODEL_NAMES = GROQ_MODELS
12 changes: 9 additions & 3 deletions src/backend/base/langflow/components/models/groq.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,11 @@
from loguru import logger
from pydantic.v1 import SecretStr

from langflow.base.models.groq_constants import GROQ_MODELS
from langflow.base.models.groq_constants import (
GROQ_MODELS,
TOOL_CALLING_UNSUPPORTED_GROQ_MODELS,
UNSUPPORTED_GROQ_MODELS,
)
from langflow.base.models.model import LCModelComponent
from langflow.field_typing import LanguageModel
from langflow.field_typing.range_spec import RangeSpec
Expand Down Expand Up @@ -78,7 +82,9 @@ def get_models(self, tool_model_enabled: bool | None = None) -> list[str]:
response = requests.get(url, headers=headers, timeout=10)
response.raise_for_status()
model_list = response.json()
model_ids = [model["id"] for model in model_list.get("data", [])]
model_ids = [
model["id"] for model in model_list.get("data", []) if model["id"] not in UNSUPPORTED_GROQ_MODELS
]
except (ImportError, ValueError, requests.exceptions.RequestException) as e:
logger.exception(f"Error getting model names: {e}")
model_ids = GROQ_MODELS
Expand All @@ -94,7 +100,7 @@ def get_models(self, tool_model_enabled: bool | None = None) -> list[str]:
api_key=self.api_key,
base_url=self.base_url,
)
if not self.supports_tool_calling(model_with_tool):
if not self.supports_tool_calling(model_with_tool) or model in TOOL_CALLING_UNSUPPORTED_GROQ_MODELS:
Copy link
Preview

Copilot AI Apr 8, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The variable 'model' is undefined in this condition and should likely be 'model_with_tool'. Update the variable to ensure the logic functions as intended.

Suggested change
if not self.supports_tool_calling(model_with_tool) or model in TOOL_CALLING_UNSUPPORTED_GROQ_MODELS:
if not self.supports_tool_calling(model_with_tool) or model_with_tool in TOOL_CALLING_UNSUPPORTED_GROQ_MODELS:

Copilot uses AI. Check for mistakes.

model_ids.remove(model)
return model_ids
return model_ids
Expand Down
Loading