diff --git a/src/backend/langflow/template/field/base.py b/src/backend/langflow/template/field/base.py index a9c18ff63..fdfdca562 100644 --- a/src/backend/langflow/template/field/base.py +++ b/src/backend/langflow/template/field/base.py @@ -21,6 +21,7 @@ class TemplateFieldCreator(BaseModel, ABC): name: str = "" display_name: Optional[str] = None advanced: bool = False + info: Optional[str] = "" def to_dict(self): result = self.dict() diff --git a/src/backend/langflow/template/frontend_node/constants.py b/src/backend/langflow/template/frontend_node/constants.py index 20b8a0c61..ac2e400b3 100644 --- a/src/backend/langflow/template/frontend_node/constants.py +++ b/src/backend/langflow/template/frontend_node/constants.py @@ -32,3 +32,13 @@ You are a good listener and you can talk about anything. HUMAN_PROMPT = "{input}" QA_CHAIN_TYPES = ["stuff", "map_reduce", "map_rerank", "refine"] + + +# This variable is used to tell the user +# that it can be changed to use other APIs +# like Prem and LocalAI +OPENAI_API_BASE_INFO = """ +The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. + +You can change this to use other APIs like Prem and LocalAI. +""" diff --git a/src/backend/langflow/template/frontend_node/llms.py b/src/backend/langflow/template/frontend_node/llms.py index 272e42c7f..65bffc303 100644 --- a/src/backend/langflow/template/frontend_node/llms.py +++ b/src/backend/langflow/template/frontend_node/llms.py @@ -2,6 +2,7 @@ from typing import Optional from langflow.template.field.base import TemplateField from langflow.template.frontend_node.base import FrontendNode +from langflow.template.frontend_node.constants import OPENAI_API_BASE_INFO class LLMFrontendNode(FrontendNode): @@ -15,6 +16,9 @@ class LLMFrontendNode(FrontendNode): if "key" not in field.name.lower() and "token" not in field.name.lower(): field.password = False + if field.name == "openai_api_base": + field.info = OPENAI_API_BASE_INFO + @staticmethod def format_azure_field(field: TemplateField): if field.name == "model_name":