Update field order in model components

This commit is contained in:
Gabriel Luiz Freitas Almeida 2024-03-29 22:39:30 -03:00
commit a200f58251
9 changed files with 128 additions and 2 deletions

View file

@ -12,6 +12,17 @@ class AnthropicLLM(LCModelComponent):
description: str = "Generate text using Anthropic Chat&Completion LLMs."
icon = "Anthropic"
field_order = [
"model",
"anthropic_api_key",
"max_tokens",
"temperature",
"anthropic_api_url",
"input_value",
"system_message",
"stream",
]
def build_config(self):
return {
"model": {

View file

@ -14,6 +14,19 @@ class AzureChatOpenAIComponent(LCModelComponent):
beta = False
icon = "Azure"
field_order = [
"model",
"azure_endpoint",
"azure_deployment",
"api_version",
"api_key",
"temperature",
"max_tokens",
"input_value",
"system_message",
"stream",
]
AZURE_OPENAI_MODELS = [
"gpt-35-turbo",
"gpt-35-turbo-16k",

View file

@ -13,6 +13,19 @@ class QianfanChatEndpointComponent(LCModelComponent):
documentation: str = "https://python.langchain.com/docs/integrations/chat/baidu_qianfan_endpoint."
icon = "BaiduQianfan"
field_order = [
"model",
"qianfan_ak",
"qianfan_sk",
"top_p",
"temperature",
"penalty_score",
"endpoint",
"input_value",
"system_message",
"stream",
]
def build_config(self):
return {
"model": {

View file

@ -1,6 +1,5 @@
from typing import Optional
from langchain_community.chat_models.cohere import ChatCohere
from pydantic.v1 import SecretStr
@ -15,6 +14,15 @@ class CohereComponent(LCModelComponent):
icon = "Cohere"
field_order = [
"cohere_api_key",
"max_tokens",
"temperature",
"input_value",
"system_message",
"stream",
]
def build_config(self):
return {
"cohere_api_key": {

View file

@ -11,7 +11,19 @@ class GoogleGenerativeAIComponent(LCModelComponent):
display_name: str = "Google Generative AI"
description: str = "Generate text using Google Generative AI."
icon = "GoogleGenerativeAI"
icon = "Google"
field_order = [
"google_api_key",
"model",
"max_output_tokens",
"temperature",
"top_k",
"top_p",
"n",
"input_value",
"system_message",
"stream",
]
def build_config(self):
return {

View file

@ -12,6 +12,16 @@ class HuggingFaceEndpointsComponent(LCModelComponent):
description: str = "Generate text using Hugging Face Inference APIs."
icon = "HuggingFace"
field_order = [
"endpoint_url",
"task",
"huggingfacehub_api_token",
"model_kwargs",
"input_value",
"system_message",
"stream",
]
def build_config(self):
return {
"endpoint_url": {"display_name": "Endpoint URL", "password": True},

View file

@ -17,6 +17,37 @@ class ChatOllamaComponent(LCModelComponent):
description = "Generate text using Ollama Local LLMs."
icon = "Ollama"
field_order = [
"base_url",
"model",
"temperature",
"cache",
"callback_manager",
"callbacks",
"format",
"metadata",
"mirostat",
"mirostat_eta",
"mirostat_tau",
"num_ctx",
"num_gpu",
"num_thread",
"repeat_last_n",
"repeat_penalty",
"tfs_z",
"timeout",
"top_k",
"top_p",
"verbose",
"tags",
"stop",
"system",
"template",
"input_value",
"system_message",
"stream",
]
def build_config(self) -> dict:
return {
"base_url": {

View file

@ -11,6 +11,18 @@ class OpenAIModelComponent(LCModelComponent):
description = "Generates text using OpenAI LLMs."
icon = "OpenAI"
field_order = [
"max_tokens",
"model_kwargs",
"model_name",
"openai_api_base",
"openai_api_key",
"temperature",
"input_value",
"system_message",
"stream",
]
def build_config(self):
return {
"input_value": {"display_name": "Input"},

View file

@ -11,6 +11,22 @@ class ChatVertexAIComponent(LCModelComponent):
description = "Generate text using Vertex AI LLMs."
icon = "VertexAI"
field_order = [
"credentials",
"project",
"examples",
"location",
"max_output_tokens",
"model_name",
"temperature",
"top_k",
"top_p",
"verbose",
"input_value",
"system_message",
"stream",
]
def build_config(self):
return {
"credentials": {