refactor: Organize model lists to support metadata (#8268)

* add model metadata

* add new anthropic models

* templates

* [autofix.ci] apply automated fixes

* stylel fix

* whitespace

* move models to a list

* [autofix.ci] apply automated fixes

---------

Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
This commit is contained in:
Mike Fortman 2025-05-30 13:13:06 -05:00 committed by GitHub
commit 7ab495171a
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
10 changed files with 659 additions and 220 deletions

View file

@ -1,33 +1,47 @@
from .model_metadata import create_model_metadata
ANTHROPIC_MODELS_DETAILED = [
# Tool calling supported models
create_model_metadata(provider="Anthropic", name="claude-opus-4-20250514", icon="Anthropic", tool_calling=True),
create_model_metadata(provider="Anthropic", name="claude-sonnet-4-20250514", icon="Anthropic", tool_calling=True),
create_model_metadata(provider="Anthropic", name="claude-3-7-sonnet-latest", icon="Anthropic", tool_calling=True),
create_model_metadata(provider="Anthropic", name="claude-3-5-sonnet-latest", icon="Anthropic", tool_calling=True),
create_model_metadata(provider="Anthropic", name="claude-3-5-haiku-latest", icon="Anthropic", tool_calling=True),
create_model_metadata(provider="Anthropic", name="claude-3-opus-latest", icon="Anthropic", tool_calling=True),
create_model_metadata(provider="Anthropic", name="claude-3-sonnet-20240229", icon="Anthropic", tool_calling=True),
# Tool calling unsupported models
create_model_metadata(provider="Anthropic", name="claude-2.1", icon="Anthropic", tool_calling=False),
create_model_metadata(provider="Anthropic", name="claude-2.0", icon="Anthropic", tool_calling=False),
# Deprecated models
create_model_metadata(
provider="Anthropic", name="claude-3-5-sonnet-20240620", icon="Anthropic", tool_calling=True, deprecated=True
),
create_model_metadata(
provider="Anthropic", name="claude-3-5-sonnet-20241022", icon="Anthropic", tool_calling=True, deprecated=True
),
create_model_metadata(
provider="Anthropic", name="claude-3-5-haiku-20241022", icon="Anthropic", tool_calling=True, deprecated=True
),
create_model_metadata(
provider="Anthropic", name="claude-3-haiku-20240307", icon="Anthropic", tool_calling=True, deprecated=True
),
]
ANTHROPIC_MODELS = [
# all the models below support tool calling also
"claude-3-7-sonnet-latest",
"claude-3-5-sonnet-latest",
"claude-3-5-haiku-latest",
"claude-3-opus-latest",
"claude-3-sonnet-20240229",
"claude-3-haiku-20240307",
metadata["name"]
for metadata in ANTHROPIC_MODELS_DETAILED
if not metadata.get("deprecated", False) and metadata.get("tool_calling", False)
]
TOOL_CALLING_SUPPORTED_ANTHROPIC_MODELS = [
# all the models below support tool calling also
"claude-3-7-sonnet-latest",
"claude-3-5-sonnet-latest",
"claude-3-5-haiku-latest",
"claude-3-opus-latest",
"claude-3-sonnet-20240229",
"claude-3-haiku-20240307",
]
TOOL_CALLING_UNSUPPORTED_ANTHROPIC_MODELS = [
"claude-2.1",
"claude-2.0",
metadata["name"] for metadata in ANTHROPIC_MODELS_DETAILED if metadata.get("tool_calling", False)
]
DEPRECATED_MODELS = [
"claude-3-5-sonnet-20240620",
"claude-3-5-sonnet-20241022",
"claude-3-5-haiku-20241022",
"claude-3-haiku-20240307",
TOOL_CALLING_UNSUPPORTED_ANTHROPIC_MODELS = [
metadata["name"] for metadata in ANTHROPIC_MODELS_DETAILED if not metadata.get("tool_calling", False)
]
DEPRECATED_MODELS = [metadata["name"] for metadata in ANTHROPIC_MODELS_DETAILED if metadata.get("deprecated", False)]
DEFAULT_ANTHROPIC_API_URL = "https://api.anthropic.com"

View file

@ -1,49 +1,109 @@
AWS_MODEL_IDs = [
from .model_metadata import create_model_metadata
# Unified model metadata - single source of truth
AWS_MODELS_DETAILED = [
# Amazon Titan Models
"amazon.titan-text-express-v1",
"amazon.titan-text-lite-v1",
"amazon.titan-text-premier-v1:0",
create_model_metadata(
provider="Amazon Bedrock", name="amazon.titan-text-express-v1", icon="Amazon", tool_calling=True
),
create_model_metadata(
provider="Amazon Bedrock", name="amazon.titan-text-lite-v1", icon="Amazon", tool_calling=True
),
create_model_metadata(
provider="Amazon Bedrock", name="amazon.titan-text-premier-v1:0", icon="Amazon", tool_calling=True
),
# Anthropic Models
"anthropic.claude-v2",
"anthropic.claude-v2:1",
"anthropic.claude-3-sonnet-20240229-v1:0",
"anthropic.claude-3-5-sonnet-20240620-v1:0",
"anthropic.claude-3-5-sonnet-20241022-v2:0",
"anthropic.claude-3-haiku-20240307-v1:0",
"anthropic.claude-3-5-haiku-20241022-v1:0",
"anthropic.claude-3-opus-20240229-v1:0",
"anthropic.claude-instant-v1",
create_model_metadata(provider="Amazon Bedrock", name="anthropic.claude-v2", icon="Amazon", tool_calling=True),
create_model_metadata(provider="Amazon Bedrock", name="anthropic.claude-v2:1", icon="Amazon", tool_calling=True),
create_model_metadata(
provider="Amazon Bedrock", name="anthropic.claude-3-sonnet-20240229-v1:0", icon="Amazon", tool_calling=True
),
create_model_metadata(
provider="Amazon Bedrock", name="anthropic.claude-3-5-sonnet-20240620-v1:0", icon="Amazon", tool_calling=True
),
create_model_metadata(
provider="Amazon Bedrock", name="anthropic.claude-3-5-sonnet-20241022-v2:0", icon="Amazon", tool_calling=True
),
create_model_metadata(
provider="Amazon Bedrock", name="anthropic.claude-3-haiku-20240307-v1:0", icon="Amazon", tool_calling=True
),
create_model_metadata(
provider="Amazon Bedrock", name="anthropic.claude-3-5-haiku-20241022-v1:0", icon="Amazon", tool_calling=True
),
create_model_metadata(
provider="Amazon Bedrock", name="anthropic.claude-3-opus-20240229-v1:0", icon="Amazon", tool_calling=True
),
create_model_metadata(
provider="Amazon Bedrock", name="anthropic.claude-instant-v1", icon="Amazon", tool_calling=True
),
# AI21 Labs Models
"ai21.jamba-instruct-v1:0",
"ai21.j2-mid-v1",
"ai21.j2-ultra-v1",
"ai21.jamba-1-5-large-v1:0",
"ai21.jamba-1-5-mini-v1:0",
create_model_metadata(provider="Amazon Bedrock", name="ai21.jamba-instruct-v1:0", icon="Amazon", tool_calling=True),
create_model_metadata(provider="Amazon Bedrock", name="ai21.j2-mid-v1", icon="Amazon", tool_calling=True),
create_model_metadata(provider="Amazon Bedrock", name="ai21.j2-ultra-v1", icon="Amazon", tool_calling=True),
create_model_metadata(
provider="Amazon Bedrock", name="ai21.jamba-1-5-large-v1:0", icon="Amazon", tool_calling=True
),
create_model_metadata(provider="Amazon Bedrock", name="ai21.jamba-1-5-mini-v1:0", icon="Amazon", tool_calling=True),
# Cohere Models
"cohere.command-text-v14",
"cohere.command-light-text-v14",
"cohere.command-r-v1:0",
"cohere.command-r-plus-v1:0",
create_model_metadata(provider="Amazon Bedrock", name="cohere.command-text-v14", icon="Amazon", tool_calling=True),
create_model_metadata(
provider="Amazon Bedrock", name="cohere.command-light-text-v14", icon="Amazon", tool_calling=True
),
create_model_metadata(provider="Amazon Bedrock", name="cohere.command-r-v1:0", icon="Amazon", tool_calling=True),
create_model_metadata(
provider="Amazon Bedrock", name="cohere.command-r-plus-v1:0", icon="Amazon", tool_calling=True
),
# Meta Models
"meta.llama2-13b-chat-v1",
"meta.llama2-70b-chat-v1",
"meta.llama3-8b-instruct-v1:0",
"meta.llama3-70b-instruct-v1:0",
"meta.llama3-1-8b-instruct-v1:0",
"meta.llama3-1-70b-instruct-v1:0",
"meta.llama3-1-405b-instruct-v1:0",
"meta.llama3-2-1b-instruct-v1:0",
"meta.llama3-2-3b-instruct-v1:0",
"meta.llama3-2-11b-instruct-v1:0",
"meta.llama3-2-90b-instruct-v1:0",
create_model_metadata(provider="Amazon Bedrock", name="meta.llama2-13b-chat-v1", icon="Amazon", tool_calling=True),
create_model_metadata(provider="Amazon Bedrock", name="meta.llama2-70b-chat-v1", icon="Amazon", tool_calling=True),
create_model_metadata(
provider="Amazon Bedrock", name="meta.llama3-8b-instruct-v1:0", icon="Amazon", tool_calling=True
),
create_model_metadata(
provider="Amazon Bedrock", name="meta.llama3-70b-instruct-v1:0", icon="Amazon", tool_calling=True
),
create_model_metadata(
provider="Amazon Bedrock", name="meta.llama3-1-8b-instruct-v1:0", icon="Amazon", tool_calling=True
),
create_model_metadata(
provider="Amazon Bedrock", name="meta.llama3-1-70b-instruct-v1:0", icon="Amazon", tool_calling=True
),
create_model_metadata(
provider="Amazon Bedrock", name="meta.llama3-1-405b-instruct-v1:0", icon="Amazon", tool_calling=True
),
create_model_metadata(
provider="Amazon Bedrock", name="meta.llama3-2-1b-instruct-v1:0", icon="Amazon", tool_calling=True
),
create_model_metadata(
provider="Amazon Bedrock", name="meta.llama3-2-3b-instruct-v1:0", icon="Amazon", tool_calling=True
),
create_model_metadata(
provider="Amazon Bedrock", name="meta.llama3-2-11b-instruct-v1:0", icon="Amazon", tool_calling=True
),
create_model_metadata(
provider="Amazon Bedrock", name="meta.llama3-2-90b-instruct-v1:0", icon="Amazon", tool_calling=True
),
# Mistral AI Models
"mistral.mistral-7b-instruct-v0:2",
"mistral.mixtral-8x7b-instruct-v0:1",
"mistral.mistral-large-2402-v1:0",
"mistral.mistral-large-2407-v1:0",
"mistral.mistral-small-2402-v1:0",
create_model_metadata(
provider="Amazon Bedrock", name="mistral.mistral-7b-instruct-v0:2", icon="Amazon", tool_calling=True
),
create_model_metadata(
provider="Amazon Bedrock", name="mistral.mixtral-8x7b-instruct-v0:1", icon="Amazon", tool_calling=True
),
create_model_metadata(
provider="Amazon Bedrock", name="mistral.mistral-large-2402-v1:0", icon="Amazon", tool_calling=True
),
create_model_metadata(
provider="Amazon Bedrock", name="mistral.mistral-large-2407-v1:0", icon="Amazon", tool_calling=True
),
create_model_metadata(
provider="Amazon Bedrock", name="mistral.mistral-small-2402-v1:0", icon="Amazon", tool_calling=True
),
]
# Generate backwards-compatible list from the metadata
AWS_MODEL_IDs = [metadata["name"] for metadata in AWS_MODELS_DETAILED]
AWS_EMBEDDING_MODEL_IDS = [
# Amazon Titan Embedding Models
"amazon.titan-embed-text-v1",

View file

@ -1,15 +1,56 @@
GOOGLE_GENERATIVE_AI_MODELS = [
from .model_metadata import create_model_metadata
# Unified model metadata - single source of truth
GOOGLE_GENERATIVE_AI_MODELS_DETAILED = [
# GEMINI 1.5
"gemini-1.5-pro",
"gemini-1.5-flash",
"gemini-1.5-flash-8b",
create_model_metadata(
provider="Google Generative AI", name="gemini-1.5-pro", icon="GoogleGenerativeAI", tool_calling=True
),
create_model_metadata(
provider="Google Generative AI", name="gemini-1.5-flash", icon="GoogleGenerativeAI", tool_calling=True
),
create_model_metadata(
provider="Google Generative AI", name="gemini-1.5-flash-8b", icon="GoogleGenerativeAI", tool_calling=True
),
# PREVIEW
"gemini-2.0-flash",
"gemini-exp-1206",
"gemini-2.0-flash-thinking-exp-01-21",
"learnlm-1.5-pro-experimental",
create_model_metadata(
provider="Google Generative AI",
name="gemini-2.0-flash",
icon="GoogleGenerativeAI",
tool_calling=True,
preview=True,
),
create_model_metadata(
provider="Google Generative AI",
name="gemini-exp-1206",
icon="GoogleGenerativeAI",
tool_calling=True,
preview=True,
),
create_model_metadata(
provider="Google Generative AI",
name="gemini-2.0-flash-thinking-exp-01-21",
icon="GoogleGenerativeAI",
tool_calling=True,
preview=True,
),
create_model_metadata(
provider="Google Generative AI",
name="learnlm-1.5-pro-experimental",
icon="GoogleGenerativeAI",
tool_calling=True,
preview=True,
),
# GEMMA
"gemma-2-2b",
"gemma-2-9b",
"gemma-2-27b",
create_model_metadata(
provider="Google Generative AI", name="gemma-2-2b", icon="GoogleGenerativeAI", tool_calling=True
),
create_model_metadata(
provider="Google Generative AI", name="gemma-2-9b", icon="GoogleGenerativeAI", tool_calling=True
),
create_model_metadata(
provider="Google Generative AI", name="gemma-2-27b", icon="GoogleGenerativeAI", tool_calling=True
),
]
GOOGLE_GENERATIVE_AI_MODELS = [metadata["name"] for metadata in GOOGLE_GENERATIVE_AI_MODELS_DETAILED]

View file

@ -1,57 +1,132 @@
# Production Models - Stable and reliable for production use
from .model_metadata import create_model_metadata
# Unified model metadata - single source of truth
GROQ_MODELS_DETAILED = [
# Production Models - Stable and reliable for production use
create_model_metadata( # Google
provider="Groq", name="gemma2-9b-it", icon="Groq", tool_calling=True
),
create_model_metadata( # Meta
provider="Groq", name="llama-3.3-70b-versatile", icon="Groq", tool_calling=True
),
create_model_metadata( # Meta
provider="Groq", name="llama-3.1-8b-instant", icon="Groq"
),
create_model_metadata( # Meta
provider="Groq", name="llama-guard-3-8b", icon="Groq"
),
create_model_metadata( # Meta
provider="Groq", name="llama3-70b-8192", icon="Groq"
),
create_model_metadata( # Meta
provider="Groq", name="llama3-8b-8192", icon="Groq"
),
# Preview Models - For evaluation purposes only
create_model_metadata( # Meta
provider="Groq", name="meta-llama/llama-4-scout-17b-16e-instruct", icon="Groq", tool_calling=True, preview=True
),
create_model_metadata( # Meta
provider="Groq",
name="meta-llama/llama-4-maverick-17b-128e-instruct",
icon="Groq",
tool_calling=True,
preview=True,
),
create_model_metadata( # Alibaba Cloud
provider="Groq", name="qwen-qwq-32b", icon="Groq", tool_calling=True, preview=True
),
create_model_metadata( # Alibaba Cloud
provider="Groq", name="qwen-2.5-coder-32b", icon="Groq", tool_calling=True, preview=True
),
create_model_metadata( # Alibaba Cloud
provider="Groq", name="qwen-2.5-32b", icon="Groq", tool_calling=True, preview=True
),
create_model_metadata( # DeepSeek
provider="Groq", name="deepseek-r1-distill-qwen-32b", icon="Groq", tool_calling=True, preview=True
),
create_model_metadata( # DeepSeek
provider="Groq", name="deepseek-r1-distill-llama-70b", icon="Groq", preview=True
),
create_model_metadata( # Meta
provider="Groq", name="llama-3.3-70b-specdec", icon="Groq", tool_calling=True, preview=True
),
create_model_metadata( # Meta
provider="Groq", name="llama-3.2-1b-preview", icon="Groq", tool_calling=True, preview=True
),
create_model_metadata( # Meta
provider="Groq", name="llama-3.2-3b-preview", icon="Groq", tool_calling=True, preview=True
),
create_model_metadata( # Meta
provider="Groq", name="llama-3.2-11b-vision-preview", icon="Groq", preview=True
),
create_model_metadata( # Meta
provider="Groq", name="llama-3.2-90b-vision-preview", icon="Groq", tool_calling=True, preview=True
),
create_model_metadata( # Saudi Data and AI Authority (SDAIA)
provider="Groq", name="allam-2-7b", icon="Groq", preview=True
),
# Deprecated Models - Previously available but now removed
create_model_metadata( # Google
provider="Groq", name="gemma-7b-it", icon="Groq", tool_calling=True, deprecated=True
),
create_model_metadata( # Groq
provider="Groq", name="llama3-groq-70b-8192-tool-use-preview", icon="Groq", tool_calling=True, deprecated=True
),
create_model_metadata( # Groq
provider="Groq", name="llama3-groq-8b-8192-tool-use-preview", icon="Groq", tool_calling=True, deprecated=True
),
create_model_metadata( # Meta
provider="Groq", name="llama-3.1-70b-versatile", icon="Groq", tool_calling=True, deprecated=True
),
create_model_metadata( # Mistral
provider="Groq", name="mixtral-8x7b-32768", icon="Groq", tool_calling=True, deprecated=True
),
# Unsupported Models
create_model_metadata( # Mistral
provider="Groq", name="mistral-saba-24b", icon="Groq", not_supported=True
),
create_model_metadata( # Playht, Inc
provider="Groq", name="playai-tts", icon="Groq", not_supported=True
),
create_model_metadata( # Playht, Inc
provider="Groq", name="playai-tts-arabic", icon="Groq", not_supported=True
),
create_model_metadata( # OpenAI
provider="Groq", name="whisper-large-v3", icon="Groq", not_supported=True
),
create_model_metadata( # OpenAI
provider="Groq", name="whisper-large-v3-turbo", icon="Groq", not_supported=True
),
create_model_metadata( # HuggingFace
provider="Groq", name="distil-whisper-large-v3-en", icon="Groq", not_supported=True
),
]
# Generate backwards-compatible lists from the metadata
GROQ_PRODUCTION_MODELS = [
"gemma2-9b-it", # Google
"llama-3.3-70b-versatile", # Meta
"llama-3.1-8b-instant", # Meta
"llama-guard-3-8b", # Meta
"llama3-70b-8192", # Meta
"llama3-8b-8192", # Meta
metadata["name"]
for metadata in GROQ_MODELS_DETAILED
if not metadata.get("preview", False)
and not metadata.get("deprecated", False)
and not metadata.get("not_supported", False)
]
# Preview Models - For evaluation purposes only
GROQ_PREVIEW_MODELS = [
"meta-llama/llama-4-scout-17b-16e-instruct", # Meta
"meta-llama/llama-4-maverick-17b-128e-instruct", # Meta
"qwen-qwq-32b", # Alibaba Cloud
"qwen-2.5-coder-32b", # Alibaba Cloud
"qwen-2.5-32b", # Alibaba Cloud
"deepseek-r1-distill-qwen-32b", # DeepSeek
"deepseek-r1-distill-llama-70b", # DeepSeek
"llama-3.3-70b-specdec", # Meta
"llama-3.2-1b-preview", # Meta
"llama-3.2-3b-preview", # Meta
"llama-3.2-11b-vision-preview", # Meta
"llama-3.2-90b-vision-preview", # Meta
"allam-2-7b", # Saudi Data and AI Authority (SDAIA)
]
GROQ_PREVIEW_MODELS = [metadata["name"] for metadata in GROQ_MODELS_DETAILED if metadata.get("preview", False)]
# Deprecated Models - Previously available but now removed
DEPRECATED_GROQ_MODELS = [
"gemma-7b-it", # Google
"llama3-groq-70b-8192-tool-use-preview", # Groq
"llama3-groq-8b-8192-tool-use-preview", # Groq
"llama-3.1-70b-versatile", # Meta
"mixtral-8x7b-32768", # Mistral
]
DEPRECATED_GROQ_MODELS = [metadata["name"] for metadata in GROQ_MODELS_DETAILED if metadata.get("deprecated", False)]
UNSUPPORTED_GROQ_MODELS = [
"mistral-saba-24b", # Mistral
"playai-tts", # Playht, Inc
"playai-tts-arabic", # Playht, Inc
"whisper-large-v3", # OpenAI
"whisper-large-v3-turbo", # OpenAI
"distil-whisper-large-v3-en", # HuggingFace
metadata["name"] for metadata in GROQ_MODELS_DETAILED if metadata.get("not_supported", False)
]
TOOL_CALLING_UNSUPPORTED_GROQ_MODELS = [
"allam-2-7b", # Saudi Data and AI Authority (SDAIA)
"llama-3.1-8b-instant", # Meta Slow Response
"llama-guard-3-8b", # Meta
"llama-3.2-11b-vision-preview", # Meta
"llama3-8b-8192", # Meta
"llama3-70b-8192", # Meta
"deepseek-r1-distill-llama-70b", # DeepSeek
metadata["name"]
for metadata in GROQ_MODELS_DETAILED
if not metadata.get("tool_calling", False)
and not metadata.get("not_supported", False)
and not metadata.get("deprecated", False)
]
# Combined list of all current models for backward compatibility
GROQ_MODELS = GROQ_PRODUCTION_MODELS + GROQ_PREVIEW_MODELS

View file

@ -0,0 +1,41 @@
from typing import TypedDict
class ModelMetadata(TypedDict, total=False):
"""Simple model metadata structure."""
provider: str # Provider name (e.g., "anthropic", "groq", "openai")
name: str # Model name/ID
icon: str # Icon name for UI
tool_calling: bool # Whether model supports tool calling (defaults to False)
reasoning: bool # Reasoning models (defaults to False)
search: bool # Search models (defaults to False)
preview: bool # Whether model is in preview/beta (defaults to False)
not_supported: bool # Whether model is not supported or deprecated (defaults to False)
deprecated: bool # Whether model is deprecated (defaults to False)
def create_model_metadata(
provider: str,
name: str,
icon: str,
*,
tool_calling: bool = False,
reasoning: bool = False,
search: bool = False,
preview: bool = False,
not_supported: bool = False,
deprecated: bool = False,
) -> ModelMetadata:
"""Helper function to create ModelMetadata with explicit defaults."""
return ModelMetadata(
provider=provider,
name=name,
icon=icon,
tool_calling=tool_calling,
reasoning=reasoning,
search=search,
preview=preview,
not_supported=not_supported,
deprecated=deprecated,
)

View file

@ -1,34 +1,75 @@
from .model_metadata import create_model_metadata
# Unified model metadata - single source of truth
OPENAI_MODELS_DETAILED = [
# Regular OpenAI Models
create_model_metadata(provider="OpenAI", name="gpt-4o-mini", icon="OpenAI", tool_calling=True),
create_model_metadata(provider="OpenAI", name="gpt-4o", icon="OpenAI", tool_calling=True),
create_model_metadata(provider="OpenAI", name="gpt-4.1", icon="OpenAI", tool_calling=True),
create_model_metadata(provider="OpenAI", name="gpt-4.1-mini", icon="OpenAI", tool_calling=True),
create_model_metadata(provider="OpenAI", name="gpt-4.1-nano", icon="OpenAI", tool_calling=True),
create_model_metadata(provider="OpenAI", name="gpt-4.5-preview", icon="OpenAI", tool_calling=True, preview=True),
create_model_metadata(provider="OpenAI", name="gpt-4-turbo", icon="OpenAI", tool_calling=True),
create_model_metadata(
provider="OpenAI", name="gpt-4-turbo-preview", icon="OpenAI", tool_calling=True, preview=True
),
create_model_metadata(provider="OpenAI", name="gpt-4", icon="OpenAI", tool_calling=True),
create_model_metadata(provider="OpenAI", name="gpt-3.5-turbo", icon="OpenAI", tool_calling=True),
# Reasoning Models
create_model_metadata(provider="OpenAI", name="o1", icon="OpenAI", reasoning=True),
# Search Models
create_model_metadata(
provider="OpenAI",
name="gpt-4o-mini-search-preview",
icon="OpenAI",
tool_calling=True,
search=True,
preview=True,
),
create_model_metadata(
provider="OpenAI", name="gpt-4o-search-preview", icon="OpenAI", tool_calling=True, search=True, preview=True
),
# Not Supported Models
create_model_metadata(
provider="OpenAI", name="computer-use-preview", icon="OpenAI", not_supported=True, preview=True
),
create_model_metadata(
provider="OpenAI", name="gpt-4o-audio-preview", icon="OpenAI", not_supported=True, preview=True
),
create_model_metadata(
provider="OpenAI", name="gpt-4o-realtime-preview", icon="OpenAI", not_supported=True, preview=True
),
create_model_metadata(
provider="OpenAI", name="gpt-4o-mini-audio-preview", icon="OpenAI", not_supported=True, preview=True
),
create_model_metadata(
provider="OpenAI", name="gpt-4o-mini-realtime-preview", icon="OpenAI", not_supported=True, preview=True
),
create_model_metadata(provider="OpenAI", name="o3-mini", icon="OpenAI", reasoning=True, not_supported=True),
create_model_metadata(provider="OpenAI", name="o1-mini", icon="OpenAI", reasoning=True, not_supported=True),
]
OPENAI_MODEL_NAMES = [
"gpt-4o-mini",
"gpt-4o",
"gpt-4.1",
"gpt-4.1-mini",
"gpt-4.1-nano",
"gpt-4.5-preview",
"gpt-4-turbo",
"gpt-4-turbo-preview",
"gpt-4",
"gpt-3.5-turbo",
metadata["name"]
for metadata in OPENAI_MODELS_DETAILED
if not metadata.get("reasoning", False)
and not metadata.get("search", False)
and not metadata.get("not_supported", False)
]
OPENAI_REASONING_MODEL_NAMES = [
"o1", # High-intelligence reasoning model
metadata["name"]
for metadata in OPENAI_MODELS_DETAILED
if metadata.get("reasoning", False) and not metadata.get("not_supported", False)
]
OPENAI_SEARCH_MODEL_NAMES = [
"gpt-4o-mini-search-preview",
"gpt-4o-search-preview",
metadata["name"]
for metadata in OPENAI_MODELS_DETAILED
if metadata.get("search", False) and not metadata.get("not_supported", False)
]
NOT_SUPPORTED_MODELS = [
"computer-use-preview",
"gpt-4o-audio-preview",
"gpt-4o-realtime-preview",
"gpt-4o-mini-audio-preview",
"gpt-4o-mini-realtime-preview",
"o3-mini",
"o1-mini",
]
NOT_SUPPORTED_MODELS = [metadata["name"] for metadata in OPENAI_MODELS_DETAILED if metadata.get("not_supported", False)]
OPENAI_EMBEDDING_MODEL_NAMES = [
"text-embedding-3-small",

View file

@ -1556,12 +1556,13 @@
"info": "",
"name": "model_name",
"options": [
"claude-opus-4-20250514",
"claude-sonnet-4-20250514",
"claude-3-7-sonnet-latest",
"claude-3-5-sonnet-latest",
"claude-3-5-haiku-latest",
"claude-3-opus-latest",
"claude-3-sonnet-20240229",
"claude-3-haiku-20240307"
"claude-3-sonnet-20240229"
],
"options_metadata": [],
"placeholder": "",

View file

@ -9,12 +9,16 @@
"dataType": "TextInput",
"id": "TextInput-WR8cL",
"name": "text",
"output_types": ["Message"]
"output_types": [
"Message"
]
},
"targetHandle": {
"fieldName": "system_message",
"id": "AnthropicModel-tZx4n",
"inputTypes": ["Message"],
"inputTypes": [
"Message"
],
"type": "str"
}
},
@ -33,12 +37,16 @@
"dataType": "AnthropicModel",
"id": "AnthropicModel-fz6rd",
"name": "model_output",
"output_types": ["LanguageModel"]
"output_types": [
"LanguageModel"
]
},
"targetHandle": {
"fieldName": "llm",
"id": "StructuredOutput-tWLRa",
"inputTypes": ["LanguageModel"],
"inputTypes": [
"LanguageModel"
],
"type": "other"
}
},
@ -57,12 +65,16 @@
"dataType": "ParserComponent",
"id": "ParserComponent-wrPyP",
"name": "parsed_text",
"output_types": ["Message"]
"output_types": [
"Message"
]
},
"targetHandle": {
"fieldName": "input_value",
"id": "StructuredOutput-tWLRa",
"inputTypes": ["Message"],
"inputTypes": [
"Message"
],
"type": "str"
}
},
@ -81,12 +93,17 @@
"dataType": "File",
"id": "File-v8ouW",
"name": "dataframe",
"output_types": ["DataFrame"]
"output_types": [
"DataFrame"
]
},
"targetHandle": {
"fieldName": "input_data",
"id": "ParserComponent-wrPyP",
"inputTypes": ["DataFrame", "Data"],
"inputTypes": [
"DataFrame",
"Data"
],
"type": "other"
}
},
@ -105,12 +122,18 @@
"dataType": "AnthropicModel",
"id": "AnthropicModel-tZx4n",
"name": "text_output",
"output_types": ["Message"]
"output_types": [
"Message"
]
},
"targetHandle": {
"fieldName": "input_value",
"id": "ChatOutput-iLekZ",
"inputTypes": ["Data", "DataFrame", "Message"],
"inputTypes": [
"Data",
"DataFrame",
"Message"
],
"type": "str"
}
},
@ -129,12 +152,17 @@
"dataType": "StructuredOutput",
"id": "StructuredOutput-tWLRa",
"name": "structured_output_dataframe",
"output_types": ["DataFrame"]
"output_types": [
"DataFrame"
]
},
"targetHandle": {
"fieldName": "input_data",
"id": "parser-ifKFs",
"inputTypes": ["DataFrame", "Data"],
"inputTypes": [
"DataFrame",
"Data"
],
"type": "other"
}
},
@ -153,12 +181,16 @@
"dataType": "parser",
"id": "parser-ifKFs",
"name": "parsed_text",
"output_types": ["Message"]
"output_types": [
"Message"
]
},
"targetHandle": {
"fieldName": "input_value",
"id": "AnthropicModel-tZx4n",
"inputTypes": ["Message"],
"inputTypes": [
"Message"
],
"type": "str"
}
},
@ -175,7 +207,9 @@
"data": {
"id": "TextInput-WR8cL",
"node": {
"base_classes": ["Message"],
"base_classes": [
"Message"
],
"beta": false,
"category": "inputs",
"conditional_paths": [],
@ -184,7 +218,9 @@
"display_name": "Text Input",
"documentation": "",
"edited": false,
"field_order": ["input_value"],
"field_order": [
"input_value"
],
"frozen": false,
"icon": "type",
"key": "TextInput",
@ -202,7 +238,9 @@
"name": "text",
"selected": "Message",
"tool_mode": true,
"types": ["Message"],
"types": [
"Message"
],
"value": "__UNDEFINED__"
}
],
@ -234,7 +272,9 @@
"display_name": "Text",
"dynamic": false,
"info": "Text to be passed as input.",
"input_types": ["Message"],
"input_types": [
"Message"
],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -273,7 +313,10 @@
"data": {
"id": "AnthropicModel-fz6rd",
"node": {
"base_classes": ["LanguageModel", "Message"],
"base_classes": [
"LanguageModel",
"Message"
],
"beta": false,
"category": "models",
"conditional_paths": [],
@ -319,7 +362,9 @@
"required_inputs": [],
"selected": "Message",
"tool_mode": true,
"types": ["Message"],
"types": [
"Message"
],
"value": "__UNDEFINED__"
},
{
@ -328,10 +373,14 @@
"display_name": "Language Model",
"method": "build_model",
"name": "model_output",
"required_inputs": ["api_key"],
"required_inputs": [
"api_key"
],
"selected": "LanguageModel",
"tool_mode": true,
"types": ["LanguageModel"],
"types": [
"LanguageModel"
],
"value": "__UNDEFINED__"
}
],
@ -363,7 +412,9 @@
"display_name": "Anthropic API URL",
"dynamic": false,
"info": "Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.",
"input_types": ["Message"],
"input_types": [
"Message"
],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -403,7 +454,9 @@
"display_name": "Input",
"dynamic": false,
"info": "",
"input_types": ["Message"],
"input_types": [
"Message"
],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -446,12 +499,13 @@
"info": "",
"name": "model_name",
"options": [
"claude-opus-4-20250514",
"claude-sonnet-4-20250514",
"claude-3-7-sonnet-latest",
"claude-3-5-sonnet-latest",
"claude-3-5-haiku-latest",
"claude-3-opus-latest",
"claude-3-sonnet-20240229",
"claude-3-haiku-20240307"
"claude-3-sonnet-20240229"
],
"options_metadata": [],
"placeholder": "",
@ -470,7 +524,9 @@
"display_name": "Prefill",
"dynamic": false,
"info": "Prefill text to guide the model's response.",
"input_types": ["Message"],
"input_types": [
"Message"
],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -509,7 +565,9 @@
"display_name": "System Message",
"dynamic": false,
"info": "System message to pass to the model.",
"input_types": ["Message"],
"input_types": [
"Message"
],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -595,7 +653,10 @@
"data": {
"id": "AnthropicModel-tZx4n",
"node": {
"base_classes": ["LanguageModel", "Message"],
"base_classes": [
"LanguageModel",
"Message"
],
"beta": false,
"category": "models",
"conditional_paths": [],
@ -641,7 +702,9 @@
"required_inputs": [],
"selected": "Message",
"tool_mode": true,
"types": ["Message"],
"types": [
"Message"
],
"value": "__UNDEFINED__"
},
{
@ -650,10 +713,14 @@
"display_name": "Language Model",
"method": "build_model",
"name": "model_output",
"required_inputs": ["api_key"],
"required_inputs": [
"api_key"
],
"selected": "LanguageModel",
"tool_mode": true,
"types": ["LanguageModel"],
"types": [
"LanguageModel"
],
"value": "__UNDEFINED__"
}
],
@ -685,7 +752,9 @@
"display_name": "Anthropic API URL",
"dynamic": false,
"info": "Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.",
"input_types": ["Message"],
"input_types": [
"Message"
],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -725,7 +794,9 @@
"display_name": "Input",
"dynamic": false,
"info": "",
"input_types": ["Message"],
"input_types": [
"Message"
],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -768,12 +839,13 @@
"info": "",
"name": "model_name",
"options": [
"claude-opus-4-20250514",
"claude-sonnet-4-20250514",
"claude-3-7-sonnet-latest",
"claude-3-5-sonnet-latest",
"claude-3-5-haiku-latest",
"claude-3-opus-latest",
"claude-3-sonnet-20240229",
"claude-3-haiku-20240307"
"claude-3-sonnet-20240229"
],
"options_metadata": [],
"placeholder": "",
@ -792,7 +864,9 @@
"display_name": "Prefill",
"dynamic": false,
"info": "Prefill text to guide the model's response.",
"input_types": ["Message"],
"input_types": [
"Message"
],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -831,7 +905,9 @@
"display_name": "System Message",
"dynamic": false,
"info": "System message to pass to the model.",
"input_types": ["Message"],
"input_types": [
"Message"
],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -917,7 +993,9 @@
"data": {
"id": "ChatOutput-iLekZ",
"node": {
"base_classes": ["Message"],
"base_classes": [
"Message"
],
"beta": false,
"category": "outputs",
"conditional_paths": [],
@ -954,7 +1032,9 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": ["Message"],
"types": [
"Message"
],
"value": "__UNDEFINED__"
}
],
@ -968,7 +1048,9 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": ["Message"],
"input_types": [
"Message"
],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -989,7 +1071,9 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": ["Message"],
"input_types": [
"Message"
],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1046,7 +1130,9 @@
"display_name": "Data Template",
"dynamic": false,
"info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.",
"input_types": ["Message"],
"input_types": [
"Message"
],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1067,7 +1153,11 @@
"display_name": "Text",
"dynamic": false,
"info": "Message to be passed as output.",
"input_types": ["Data", "DataFrame", "Message"],
"input_types": [
"Data",
"DataFrame",
"Message"
],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1091,7 +1181,10 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": ["Machine", "User"],
"options": [
"Machine",
"User"
],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -1108,7 +1201,9 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": ["Message"],
"input_types": [
"Message"
],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1129,7 +1224,9 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": ["Message"],
"input_types": [
"Message"
],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1168,7 +1265,9 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": ["Message"],
"input_types": [
"Message"
],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1349,7 +1448,10 @@
"data": {
"id": "StructuredOutput-tWLRa",
"node": {
"base_classes": ["Data", "DataFrame"],
"base_classes": [
"Data",
"DataFrame"
],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -1381,7 +1483,9 @@
"name": "structured_output",
"selected": "Data",
"tool_mode": true,
"types": ["Data"],
"types": [
"Data"
],
"value": "__UNDEFINED__"
},
{
@ -1392,7 +1496,9 @@
"name": "structured_output_dataframe",
"selected": "DataFrame",
"tool_mode": true,
"types": ["DataFrame"],
"types": [
"DataFrame"
],
"value": "__UNDEFINED__"
}
],
@ -1423,7 +1529,9 @@
"display_name": "Input Message",
"dynamic": false,
"info": "The input message to the language model.",
"input_types": ["Message"],
"input_types": [
"Message"
],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1444,7 +1552,9 @@
"display_name": "Language Model",
"dynamic": false,
"info": "The language model to use to generate the structured output.",
"input_types": ["LanguageModel"],
"input_types": [
"LanguageModel"
],
"list": false,
"list_add_label": "Add More",
"name": "llm",
@ -1639,7 +1749,9 @@
"display_name": "Schema Name",
"dynamic": false,
"info": "Provide a name for the output data schema.",
"input_types": ["Message"],
"input_types": [
"Message"
],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1661,7 +1773,9 @@
"display_name": "Format Instructions",
"dynamic": false,
"info": "The instructions to the language model for formatting the output.",
"input_types": ["Message"],
"input_types": [
"Message"
],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1700,7 +1814,9 @@
"data": {
"id": "File-v8ouW",
"node": {
"base_classes": ["Data"],
"base_classes": [
"Data"
],
"beta": false,
"category": "data",
"conditional_paths": [],
@ -1737,7 +1853,9 @@
"required_inputs": [],
"selected": "Data",
"tool_mode": true,
"types": ["Data"],
"types": [
"Data"
],
"value": "__UNDEFINED__"
},
{
@ -1749,7 +1867,9 @@
"required_inputs": [],
"selected": "DataFrame",
"tool_mode": true,
"types": ["DataFrame"],
"types": [
"DataFrame"
],
"value": "__UNDEFINED__"
},
{
@ -1761,7 +1881,9 @@
"required_inputs": [],
"selected": "Message",
"tool_mode": true,
"types": ["Message"],
"types": [
"Message"
],
"value": "__UNDEFINED__"
}
],
@ -1829,7 +1951,10 @@
"display_name": "Server File Path",
"dynamic": false,
"info": "Data object with a 'file_path' property pointing to server file or a Message object with a path to the file. Supercedes 'Path' but supports same file types.",
"input_types": ["Data", "Message"],
"input_types": [
"Data",
"Message"
],
"list": true,
"list_add_label": "Add More",
"name": "file_path",
@ -1999,7 +2124,9 @@
"data": {
"id": "ParserComponent-wrPyP",
"node": {
"base_classes": ["Message"],
"base_classes": [
"Message"
],
"beta": false,
"category": "processing",
"conditional_paths": [],
@ -2008,7 +2135,12 @@
"display_name": "Parser",
"documentation": "",
"edited": false,
"field_order": ["stringify", "template", "input_data", "sep"],
"field_order": [
"stringify",
"template",
"input_data",
"sep"
],
"frozen": false,
"icon": "braces",
"key": "ParserComponent",
@ -2026,7 +2158,9 @@
"name": "parsed_text",
"selected": "Message",
"tool_mode": true,
"types": ["Message"],
"types": [
"Message"
],
"value": "__UNDEFINED__"
}
],
@ -2058,7 +2192,10 @@
"display_name": "Data or DataFrame",
"dynamic": false,
"info": "Accepts either a DataFrame or a Data object.",
"input_types": ["DataFrame", "Data"],
"input_types": [
"DataFrame",
"Data"
],
"list": false,
"list_add_label": "Add More",
"name": "input_data",
@ -2077,7 +2214,10 @@
"dynamic": false,
"info": "Convert into raw string instead of using a template.",
"name": "mode",
"options": ["Parser", "Stringify"],
"options": [
"Parser",
"Stringify"
],
"placeholder": "",
"real_time_refresh": true,
"required": false,
@ -2095,7 +2235,9 @@
"display_name": "Template",
"dynamic": true,
"info": "Use variables within curly brackets to extract column values for DataFrames or key values for Data.For example: `Name: {Name}, Age: {Age}, Country: {Country}`",
"input_types": ["Message"],
"input_types": [
"Message"
],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -2117,7 +2259,9 @@
"display_name": "Separator",
"dynamic": false,
"info": "String used to separate rows/items.",
"input_types": ["Message"],
"input_types": [
"Message"
],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -2155,7 +2299,9 @@
"data": {
"id": "parser-ifKFs",
"node": {
"base_classes": ["Message"],
"base_classes": [
"Message"
],
"beta": false,
"category": "processing",
"conditional_paths": [],
@ -2164,7 +2310,12 @@
"display_name": "Parser",
"documentation": "",
"edited": false,
"field_order": ["mode", "pattern", "input_data", "sep"],
"field_order": [
"mode",
"pattern",
"input_data",
"sep"
],
"frozen": false,
"icon": "braces",
"key": "parser",
@ -2182,7 +2333,9 @@
"name": "parsed_text",
"selected": "Message",
"tool_mode": true,
"types": ["Message"],
"types": [
"Message"
],
"value": "__UNDEFINED__"
}
],
@ -2214,7 +2367,10 @@
"display_name": "Data or DataFrame",
"dynamic": false,
"info": "Accepts either a DataFrame or a Data object.",
"input_types": ["DataFrame", "Data"],
"input_types": [
"DataFrame",
"Data"
],
"list": false,
"list_add_label": "Add More",
"name": "input_data",
@ -2233,7 +2389,10 @@
"dynamic": false,
"info": "Convert into raw string instead of using a template.",
"name": "mode",
"options": ["Parser", "Stringify"],
"options": [
"Parser",
"Stringify"
],
"placeholder": "",
"real_time_refresh": true,
"required": false,
@ -2251,7 +2410,9 @@
"display_name": "Template",
"dynamic": true,
"info": "Use variables within curly brackets to extract column values for DataFrames or key values for Data.For example: `Name: {Name}, Age: {Age}, Country: {Country}`",
"input_types": ["Message"],
"input_types": [
"Message"
],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -2273,7 +2434,9 @@
"display_name": "Separator",
"dynamic": false,
"info": "String used to separate rows/items.",
"input_types": ["Message"],
"input_types": [
"Message"
],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -2320,5 +2483,8 @@
"is_component": false,
"last_tested_version": "1.2.0",
"name": "Portfolio Website Code Generator",
"tags": ["chatbots", "coding"]
}
"tags": [
"chatbots",
"coding"
]
}

View file

@ -541,12 +541,13 @@
"info": "",
"name": "model_name",
"options": [
"claude-opus-4-20250514",
"claude-sonnet-4-20250514",
"claude-3-7-sonnet-latest",
"claude-3-5-sonnet-latest",
"claude-3-5-haiku-latest",
"claude-3-opus-latest",
"claude-3-sonnet-20240229",
"claude-3-haiku-20240307"
"claude-3-sonnet-20240229"
],
"options_metadata": [],
"placeholder": "",

View file

@ -708,7 +708,6 @@
},
"node_modules/@clack/prompts/node_modules/is-unicode-supported": {
"version": "1.3.0",
"extraneous": true,
"inBundle": true,
"license": "MIT",
"engines": {