From e2e6f804618c650d17034b76e9392d31caf11307 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 27 Feb 2024 12:55:34 -0300 Subject: [PATCH] Update input variable names in build functions --- .../components/chains/ConversationChain.py | 2 +- .../components/chains/LLMCheckerChain.py | 2 +- .../components/chains/LLMMathChain.py | 2 +- .../langflow/components/chains/RetrievalQA.py | 2 +- .../chains/RetrievalQAWithSourcesChain.py | 2 +- .../components/chains/SQLGenerator.py | 2 +- .../langflow/components/io/ChatInput.py | 24 +++++++++---------- .../langflow/components/io/ChatOutput.py | 24 +++++++++---------- .../components/models/AmazonBedrockModel.py | 2 +- .../components/models/AnthropicModel.py | 10 +++++--- .../components/models/AzureOpenAIModel.py | 6 +++-- .../models/BaiduQianfanChatModel.py | 2 +- .../components/models/CTransformersModel.py | 6 +++-- .../langflow/components/models/CohereModel.py | 2 +- .../models/GoogleGenerativeAIModel.py | 2 +- .../components/models/HuggingFaceModel.py | 3 +-- .../components/models/LlamaCppModel.py | 2 +- .../langflow/components/models/OllamaModel.py | 2 +- .../langflow/components/models/OpenAIModel.py | 3 ++- .../components/models/VertexAiModel.py | 2 +- .../components/utilities/RunnableExecutor.py | 2 +- .../components/vectorstores/ChromaSearch.py | 6 +++-- .../custom_component/custom_component.py | 4 ++-- 23 files changed, 62 insertions(+), 52 deletions(-) diff --git a/src/backend/langflow/components/chains/ConversationChain.py b/src/backend/langflow/components/chains/ConversationChain.py index 3183954a3..7d9d28dcc 100644 --- a/src/backend/langflow/components/chains/ConversationChain.py +++ b/src/backend/langflow/components/chains/ConversationChain.py @@ -23,7 +23,7 @@ class ConversationChainComponent(CustomComponent): def build( self, - inputs: str, + input_value: str, llm: BaseLanguageModel, memory: Optional[BaseMemory] = None, ) -> Text: diff --git a/src/backend/langflow/components/chains/LLMCheckerChain.py b/src/backend/langflow/components/chains/LLMCheckerChain.py index bfee0b5a9..15a540311 100644 --- a/src/backend/langflow/components/chains/LLMCheckerChain.py +++ b/src/backend/langflow/components/chains/LLMCheckerChain.py @@ -18,7 +18,7 @@ class LLMCheckerChainComponent(CustomComponent): def build( self, - inputs: str, + input_value: str, llm: BaseLanguageModel, ) -> Text: diff --git a/src/backend/langflow/components/chains/LLMMathChain.py b/src/backend/langflow/components/chains/LLMMathChain.py index 919de34e6..7fb253b83 100644 --- a/src/backend/langflow/components/chains/LLMMathChain.py +++ b/src/backend/langflow/components/chains/LLMMathChain.py @@ -24,7 +24,7 @@ class LLMMathChainComponent(CustomComponent): def build( self, - inputs: Text, + input_value: Text, llm: BaseLanguageModel, llm_chain: LLMChain, input_key: str = "question", diff --git a/src/backend/langflow/components/chains/RetrievalQA.py b/src/backend/langflow/components/chains/RetrievalQA.py index 2fe31353e..4968afe87 100644 --- a/src/backend/langflow/components/chains/RetrievalQA.py +++ b/src/backend/langflow/components/chains/RetrievalQA.py @@ -27,7 +27,7 @@ class RetrievalQAComponent(CustomComponent): self, combine_documents_chain: BaseCombineDocumentsChain, retriever: BaseRetriever, - inputs: str = "", + input_value: str = "", memory: Optional[BaseMemory] = None, input_key: str = "query", output_key: str = "result", diff --git a/src/backend/langflow/components/chains/RetrievalQAWithSourcesChain.py b/src/backend/langflow/components/chains/RetrievalQAWithSourcesChain.py index faf3ab7dd..8be64c631 100644 --- a/src/backend/langflow/components/chains/RetrievalQAWithSourcesChain.py +++ b/src/backend/langflow/components/chains/RetrievalQAWithSourcesChain.py @@ -26,7 +26,7 @@ class RetrievalQAWithSourcesChainComponent(CustomComponent): def build( self, - inputs: str, + input_value: str, retriever: BaseRetriever, llm: BaseLanguageModel, chain_type: str, diff --git a/src/backend/langflow/components/chains/SQLGenerator.py b/src/backend/langflow/components/chains/SQLGenerator.py index ea22a6de0..39b8fe394 100644 --- a/src/backend/langflow/components/chains/SQLGenerator.py +++ b/src/backend/langflow/components/chains/SQLGenerator.py @@ -28,7 +28,7 @@ class SQLGeneratorComponent(CustomComponent): def build( self, - inputs: Text, + input_value: Text, db: SQLDatabase, llm: BaseLanguageModel, top_k: int = 5, diff --git a/src/backend/langflow/components/io/ChatInput.py b/src/backend/langflow/components/io/ChatInput.py index 6d96a6b96..0666f92d1 100644 --- a/src/backend/langflow/components/io/ChatInput.py +++ b/src/backend/langflow/components/io/ChatInput.py @@ -11,7 +11,7 @@ class ChatInput(CustomComponent): def build_config(self): return { - "message": { + "input_value": { "input_types": ["Text"], "display_name": "Message", "multiline": True, @@ -35,26 +35,26 @@ class ChatInput(CustomComponent): self, sender: Optional[str] = "User", sender_name: Optional[str] = "User", - message: Optional[str] = None, + input_value: Optional[str] = None, session_id: Optional[str] = None, return_record: Optional[bool] = False, ) -> Union[Text, Record]: if return_record: - if isinstance(message, Record): + if isinstance(input_value, Record): # Update the data of the record - message.data["sender"] = sender - message.data["sender_name"] = sender_name - message.data["session_id"] = session_id + input_value.data["sender"] = sender + input_value.data["sender_name"] = sender_name + input_value.data["session_id"] = session_id else: - message = Record( - text=message, + input_value = Record( + text=input_value, data={ "sender": sender, "sender_name": sender_name, "session_id": session_id, }, ) - if not message: - message = "" - self.status = message - return message + if not input_value: + input_value = "" + self.status = input_value + return input_value diff --git a/src/backend/langflow/components/io/ChatOutput.py b/src/backend/langflow/components/io/ChatOutput.py index 05639cdb2..72667374f 100644 --- a/src/backend/langflow/components/io/ChatOutput.py +++ b/src/backend/langflow/components/io/ChatOutput.py @@ -17,7 +17,7 @@ class ChatOutput(CustomComponent): def build_config(self): return { - "message": {"input_types": ["Text"], "display_name": "Message"}, + "input_value": {"input_types": ["Text"], "display_name": "Message"}, "sender": { "options": ["Machine", "User"], "display_name": "Sender Type", @@ -39,25 +39,25 @@ class ChatOutput(CustomComponent): sender: Optional[str] = "Machine", sender_name: Optional[str] = "AI", session_id: Optional[str] = None, - message: Optional[str] = None, + input_value: Optional[str] = None, return_record: Optional[bool] = False, ) -> Union[Text, Record]: if return_record: - if isinstance(message, Record): + if isinstance(input_value, Record): # Update the data of the record - message.data["sender"] = sender - message.data["sender_name"] = sender_name - message.data["session_id"] = session_id + input_value.data["sender"] = sender + input_value.data["sender_name"] = sender_name + input_value.data["session_id"] = session_id else: - message = Record( - text=message, + input_value = Record( + text=input_value, data={ "sender": sender, "sender_name": sender_name, "session_id": session_id, }, ) - if not message: - message = "" - self.status = message - return message + if not input_value: + input_value = "" + self.status = input_value + return input_value diff --git a/src/backend/langflow/components/models/AmazonBedrockModel.py b/src/backend/langflow/components/models/AmazonBedrockModel.py index a2e008e2e..68e404773 100644 --- a/src/backend/langflow/components/models/AmazonBedrockModel.py +++ b/src/backend/langflow/components/models/AmazonBedrockModel.py @@ -39,7 +39,7 @@ class AmazonBedrockComponent(CustomComponent): def build( self, - inputs: str, + input_value: str, model_id: str = "anthropic.claude-instant-v1", credentials_profile_name: Optional[str] = None, region_name: Optional[str] = None, diff --git a/src/backend/langflow/components/models/AnthropicModel.py b/src/backend/langflow/components/models/AnthropicModel.py index 793bec46a..be6e46d9a 100644 --- a/src/backend/langflow/components/models/AnthropicModel.py +++ b/src/backend/langflow/components/models/AnthropicModel.py @@ -9,7 +9,9 @@ from langflow.field_typing import Text class AnthropicLLM(CustomComponent): display_name: str = "AnthropicModel" - description: str = "Generate text using Anthropic Chat&Completion large language models." + description: str = ( + "Generate text using Anthropic Chat&Completion large language models." + ) def build_config(self): return { @@ -53,7 +55,7 @@ class AnthropicLLM(CustomComponent): def build( self, model: str, - inputs: str, + input_value: str, anthropic_api_key: Optional[str] = None, max_tokens: Optional[int] = None, temperature: Optional[float] = None, @@ -66,7 +68,9 @@ class AnthropicLLM(CustomComponent): try: output = ChatAnthropic( model_name=model, - anthropic_api_key=(SecretStr(anthropic_api_key) if anthropic_api_key else None), + anthropic_api_key=( + SecretStr(anthropic_api_key) if anthropic_api_key else None + ), max_tokens_to_sample=max_tokens, # type: ignore temperature=temperature, anthropic_api_url=api_endpoint, diff --git a/src/backend/langflow/components/models/AzureOpenAIModel.py b/src/backend/langflow/components/models/AzureOpenAIModel.py index 1e646e43a..be1f724bf 100644 --- a/src/backend/langflow/components/models/AzureOpenAIModel.py +++ b/src/backend/langflow/components/models/AzureOpenAIModel.py @@ -9,7 +9,9 @@ from langflow import CustomComponent class AzureChatOpenAIComponent(CustomComponent): display_name: str = "AzureOpenAI Model" description: str = "Generate text using LLM model from Azure OpenAI." - documentation: str = "https://python.langchain.com/docs/integrations/llms/azure_openai" + documentation: str = ( + "https://python.langchain.com/docs/integrations/llms/azure_openai" + ) beta = False AZURE_OPENAI_MODELS = [ @@ -78,7 +80,7 @@ class AzureChatOpenAIComponent(CustomComponent): self, model: str, azure_endpoint: str, - inputs: str, + input_value: str, azure_deployment: str, api_key: str, api_version: str, diff --git a/src/backend/langflow/components/models/BaiduQianfanChatModel.py b/src/backend/langflow/components/models/BaiduQianfanChatModel.py index 88051d0e9..9eadb7013 100644 --- a/src/backend/langflow/components/models/BaiduQianfanChatModel.py +++ b/src/backend/langflow/components/models/BaiduQianfanChatModel.py @@ -73,7 +73,7 @@ class QianfanChatEndpointComponent(CustomComponent): def build( self, - inputs: str, + input_value: str, model: str = "ERNIE-Bot-turbo", qianfan_ak: Optional[str] = None, qianfan_sk: Optional[str] = None, diff --git a/src/backend/langflow/components/models/CTransformersModel.py b/src/backend/langflow/components/models/CTransformersModel.py index 932b1b351..60cc2eb12 100644 --- a/src/backend/langflow/components/models/CTransformersModel.py +++ b/src/backend/langflow/components/models/CTransformersModel.py @@ -35,11 +35,13 @@ class CTransformersComponent(CustomComponent): self, model: str, model_file: str, - inputs: str, + input_value: str, model_type: str, config: Optional[Dict] = None, ) -> Text: - output = CTransformers(model=model, model_file=model_file, model_type=model_type, config=config) + output = CTransformers( + model=model, model_file=model_file, model_type=model_type, config=config + ) message = output.invoke(inputs) result = message.content if hasattr(message, "content") else message self.status = result diff --git a/src/backend/langflow/components/models/CohereModel.py b/src/backend/langflow/components/models/CohereModel.py index 3912cb855..28b198ec1 100644 --- a/src/backend/langflow/components/models/CohereModel.py +++ b/src/backend/langflow/components/models/CohereModel.py @@ -34,7 +34,7 @@ class CohereComponent(CustomComponent): def build( self, cohere_api_key: str, - inputs: str, + input_value: str, max_tokens: int = 256, temperature: float = 0.75, ) -> Text: diff --git a/src/backend/langflow/components/models/GoogleGenerativeAIModel.py b/src/backend/langflow/components/models/GoogleGenerativeAIModel.py index ce967bd57..2ff01c4c7 100644 --- a/src/backend/langflow/components/models/GoogleGenerativeAIModel.py +++ b/src/backend/langflow/components/models/GoogleGenerativeAIModel.py @@ -57,7 +57,7 @@ class GoogleGenerativeAIComponent(CustomComponent): self, google_api_key: str, model: str, - inputs: str, + input_value: str, max_output_tokens: Optional[int] = None, temperature: float = 0.1, top_k: Optional[int] = None, diff --git a/src/backend/langflow/components/models/HuggingFaceModel.py b/src/backend/langflow/components/models/HuggingFaceModel.py index 4357ede61..394938344 100644 --- a/src/backend/langflow/components/models/HuggingFaceModel.py +++ b/src/backend/langflow/components/models/HuggingFaceModel.py @@ -4,7 +4,6 @@ from langchain_community.chat_models.huggingface import ChatHuggingFace from langchain_community.llms.huggingface_endpoint import HuggingFaceEndpoint from langflow import CustomComponent - from langflow.field_typing import Text @@ -30,7 +29,7 @@ class HuggingFaceEndpointsComponent(CustomComponent): def build( self, - inputs: str, + input_value: str, endpoint_url: str, task: str = "text2text-generation", huggingfacehub_api_token: Optional[str] = None, diff --git a/src/backend/langflow/components/models/LlamaCppModel.py b/src/backend/langflow/components/models/LlamaCppModel.py index af0de5159..53a6f8ace 100644 --- a/src/backend/langflow/components/models/LlamaCppModel.py +++ b/src/backend/langflow/components/models/LlamaCppModel.py @@ -62,7 +62,7 @@ class LlamaCppComponent(CustomComponent): def build( self, model_path: str, - inputs: str, + input_value: str, grammar: Optional[str] = None, cache: Optional[bool] = None, client: Optional[Any] = None, diff --git a/src/backend/langflow/components/models/OllamaModel.py b/src/backend/langflow/components/models/OllamaModel.py index 129f96482..3dc8dacab 100644 --- a/src/backend/langflow/components/models/OllamaModel.py +++ b/src/backend/langflow/components/models/OllamaModel.py @@ -171,7 +171,7 @@ class ChatOllamaComponent(CustomComponent): self, base_url: Optional[str], model: str, - inputs: str, + input_value: str, mirostat: Optional[str], mirostat_eta: Optional[float] = None, mirostat_tau: Optional[float] = None, diff --git a/src/backend/langflow/components/models/OpenAIModel.py b/src/backend/langflow/components/models/OpenAIModel.py index 1cc352b20..07ba7013c 100644 --- a/src/backend/langflow/components/models/OpenAIModel.py +++ b/src/backend/langflow/components/models/OpenAIModel.py @@ -1,6 +1,7 @@ from typing import Optional from langchain_openai import ChatOpenAI + from langflow import CustomComponent from langflow.field_typing import NestedDict, Text @@ -60,7 +61,7 @@ class OpenAIModelComponent(CustomComponent): def build( self, - inputs: Text, + input_value: Text, max_tokens: Optional[int] = 256, model_kwargs: NestedDict = {}, model_name: str = "gpt-4-1106-preview", diff --git a/src/backend/langflow/components/models/VertexAiModel.py b/src/backend/langflow/components/models/VertexAiModel.py index eee804e02..81338f723 100644 --- a/src/backend/langflow/components/models/VertexAiModel.py +++ b/src/backend/langflow/components/models/VertexAiModel.py @@ -62,7 +62,7 @@ class ChatVertexAIComponent(CustomComponent): def build( self, - inputs: str, + input_value: str, credentials: Optional[str], project: str, examples: Optional[List[BaseMessage]] = [], diff --git a/src/backend/langflow/components/utilities/RunnableExecutor.py b/src/backend/langflow/components/utilities/RunnableExecutor.py index f83f352b4..5533e6d1d 100644 --- a/src/backend/langflow/components/utilities/RunnableExecutor.py +++ b/src/backend/langflow/components/utilities/RunnableExecutor.py @@ -32,7 +32,7 @@ class RunnableExecComponent(CustomComponent): def build( self, input_key: str, - inputs: str, + input_value: str, runnable: Runnable, output_key: str = "output", ) -> Text: diff --git a/src/backend/langflow/components/vectorstores/ChromaSearch.py b/src/backend/langflow/components/vectorstores/ChromaSearch.py index c6eb1ebac..5dd33abf2 100644 --- a/src/backend/langflow/components/vectorstores/ChromaSearch.py +++ b/src/backend/langflow/components/vectorstores/ChromaSearch.py @@ -2,6 +2,7 @@ from typing import List, Optional import chromadb # type: ignore from langchain_community.vectorstores.chroma import Chroma + from langflow import CustomComponent from langflow.field_typing import Embeddings, Text from langflow.schema import Record, docs_to_records @@ -57,7 +58,7 @@ class ChromaSearchComponent(CustomComponent): def build( self, - inputs: Text, + input_value: Text, search_type: str, collection_name: str, embedding: Embeddings, @@ -92,7 +93,8 @@ class ChromaSearchComponent(CustomComponent): if chroma_server_host is not None: chroma_settings = chromadb.config.Settings( - chroma_server_cors_allow_origins=chroma_server_cors_allow_origins or None, + chroma_server_cors_allow_origins=chroma_server_cors_allow_origins + or None, chroma_server_host=chroma_server_host, chroma_server_port=chroma_server_port or None, chroma_server_grpc_port=chroma_server_grpc_port or None, diff --git a/src/backend/langflow/interface/custom/custom_component/custom_component.py b/src/backend/langflow/interface/custom/custom_component/custom_component.py index 549c0dad3..a8c81f041 100644 --- a/src/backend/langflow/interface/custom/custom_component/custom_component.py +++ b/src/backend/langflow/interface/custom/custom_component/custom_component.py @@ -47,10 +47,10 @@ class CustomComponent(Component): """The icon of the component. It should be an emoji. Defaults to None.""" is_input: Optional[bool] = None """The input state of the component. Defaults to None. - If True, the component must have a field named 'message'.""" + If True, the component must have a field named 'input_value'.""" is_output: Optional[bool] = None """The output state of the component. Defaults to None. - If True, the component must have a field named 'message'.""" + If True, the component must have a field named 'input_value'.""" code: Optional[str] = None """The code of the component. Defaults to None.""" field_config: dict = {}