fix: add Groq Model Constants (#4689)
* upddate in groq models groq models updated * [autofix.ci] apply automated fixes * Update model_input_constants.py remove groq prefix * Update groq.py updated Groq Model list to be also a backup list if the API call fails, also at start. * Update groq.py format issue solved --------- Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
This commit is contained in:
parent
50aa61826c
commit
699ec18205
3 changed files with 25 additions and 4 deletions
|
|
@ -1 +1,20 @@
|
|||
MODEL_NAMES = ["llama3-8b-8192", "llama3-70b-8192", "mixtral-8x7b-32768", "gemma-7b-it", "gemma2-9b-it"]
|
||||
GROQ_MODELS = [
|
||||
"distil-whisper-large-v3-en", # HuggingFace
|
||||
"gemma2-9b-it", # Google
|
||||
"gemma-7b-it", # Google
|
||||
"llama3-groq-70b-8192-tool-use-preview", # Groq
|
||||
"llama3-groq-8b-8192-tool-use-preview", # Groq
|
||||
"llama-3.1-70b-versatile", # Meta
|
||||
"llama-3.1-8b-instant", # Meta
|
||||
"llama-3.2-1b-preview", # Meta
|
||||
"llama-3.2-3b-preview", # Meta
|
||||
"llama-3.2-11b-vision-preview", # Meta
|
||||
"llama-3.2-90b-vision-preview", # Meta
|
||||
"llama-guard-3-8b", # Meta
|
||||
"llama3-70b-8192", # Meta
|
||||
"llama3-8b-8192", # Meta
|
||||
"mixtral-8x7b-32768", # Mistral
|
||||
"whisper-large-v3", # OpenAI
|
||||
"whisper-large-v3-turbo", # OpenAI
|
||||
]
|
||||
MODEL_NAMES = GROQ_MODELS # reverse compatibility
|
||||
|
|
|
|||
|
|
@ -64,7 +64,7 @@ MODEL_PROVIDERS_DICT = {
|
|||
"prefix": "",
|
||||
"component_class": OpenAIModelComponent(),
|
||||
},
|
||||
"Groq": {"fields": GROQ_FIELDS, "inputs": GROQ_INPUTS, "prefix": "groq_", "component_class": GroqModel()},
|
||||
"Groq": {"fields": GROQ_FIELDS, "inputs": GROQ_INPUTS, "prefix": "", "component_class": GroqModel()},
|
||||
"Anthropic": {
|
||||
"fields": ANTHROPIC_FIELDS,
|
||||
"inputs": ANTHROPIC_INPUTS,
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ from langchain_groq import ChatGroq
|
|||
from pydantic.v1 import SecretStr
|
||||
from typing_extensions import override
|
||||
|
||||
from langflow.base.models.groq_constants import GROQ_MODELS
|
||||
from langflow.base.models.model import LCModelComponent
|
||||
from langflow.field_typing import LanguageModel
|
||||
from langflow.inputs.inputs import HandleInput
|
||||
|
|
@ -48,7 +49,8 @@ class GroqModel(LCModelComponent):
|
|||
name="model_name",
|
||||
display_name="Model",
|
||||
info="The name of the model to use.",
|
||||
options=[],
|
||||
options=GROQ_MODELS,
|
||||
value="llama-3.1-8b-instant",
|
||||
refresh_button=True,
|
||||
),
|
||||
HandleInput(
|
||||
|
|
@ -74,7 +76,7 @@ class GroqModel(LCModelComponent):
|
|||
return [model["id"] for model in model_list.get("data", [])]
|
||||
except requests.RequestException as e:
|
||||
self.status = f"Error fetching models: {e}"
|
||||
return []
|
||||
return GROQ_MODELS
|
||||
|
||||
@override
|
||||
def update_build_config(self, build_config: dict, field_value: str, field_name: str | None = None):
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue