diff --git a/src/backend/langflow/components/llms/LiteLLM.py b/src/backend/langflow/components/llms/ChatLiteLLM.py similarity index 97% rename from src/backend/langflow/components/llms/LiteLLM.py rename to src/backend/langflow/components/llms/ChatLiteLLM.py index 6826bf1b2..25f75210b 100644 --- a/src/backend/langflow/components/llms/LiteLLM.py +++ b/src/backend/langflow/components/llms/ChatLiteLLM.py @@ -7,7 +7,7 @@ import os class LiteLLMComponent(CustomComponent): - display_name = "LiteLLM" + display_name = "ChatLiteLLM" description = "`LiteLLM` collection of large language models." documentation = "https://python.langchain.com/docs/integrations/chat/litellm" @@ -37,7 +37,7 @@ class LiteLLMComponent(CustomComponent): "temperature": { "display_name": "Temperature", "field_type": "float", - "advanced": True, + "advanced": False, "required": False, "default": 0.7, }, @@ -72,7 +72,7 @@ class LiteLLMComponent(CustomComponent): "max_tokens": { "display_name": "Max tokens", "field_type": "int", - "advanced": True, + "advanced": False, "required": False, "default": 256, "info": "The maximum number of tokens to generate for each chat completion.",