From e0a67b0410df98f51db13aea146748fa9db2ddec Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 27 Feb 2024 16:52:04 -0300 Subject: [PATCH] Update model invocation in OpenAIModel, VertexAiModel, CohereModel, LlamaCppModel, AnthropicModel, GoogleGenerativeAIModel, AzureOpenAIModel, CTransformersModel, OllamaModel, AmazonBedrockModel, BaiduQianfanChatModel, and HuggingFaceModel --- src/backend/langflow/components/chains/ConversationChain.py | 2 +- src/backend/langflow/components/models/AmazonBedrockModel.py | 2 +- src/backend/langflow/components/models/AnthropicModel.py | 2 +- src/backend/langflow/components/models/AzureOpenAIModel.py | 2 +- src/backend/langflow/components/models/BaiduQianfanChatModel.py | 2 +- src/backend/langflow/components/models/CTransformersModel.py | 2 +- src/backend/langflow/components/models/CohereModel.py | 2 +- .../langflow/components/models/GoogleGenerativeAIModel.py | 2 +- src/backend/langflow/components/models/HuggingFaceModel.py | 2 +- src/backend/langflow/components/models/LlamaCppModel.py | 2 +- src/backend/langflow/components/models/OllamaModel.py | 2 +- src/backend/langflow/components/models/OpenAIModel.py | 2 +- src/backend/langflow/components/models/VertexAiModel.py | 2 +- 13 files changed, 13 insertions(+), 13 deletions(-) diff --git a/src/backend/langflow/components/chains/ConversationChain.py b/src/backend/langflow/components/chains/ConversationChain.py index 7d9d28dcc..726056138 100644 --- a/src/backend/langflow/components/chains/ConversationChain.py +++ b/src/backend/langflow/components/chains/ConversationChain.py @@ -31,7 +31,7 @@ class ConversationChainComponent(CustomComponent): chain = ConversationChain(llm=llm) else: chain = ConversationChain(llm=llm, memory=memory) - result = chain.invoke(inputs) + result = chain.invoke(input_value) # result is an AIMessage which is a subclass of BaseMessage # We need to check if it is a string or a BaseMessage if hasattr(result, "content") and isinstance(result.content, str): diff --git a/src/backend/langflow/components/models/AmazonBedrockModel.py b/src/backend/langflow/components/models/AmazonBedrockModel.py index 68e404773..478bf8e9a 100644 --- a/src/backend/langflow/components/models/AmazonBedrockModel.py +++ b/src/backend/langflow/components/models/AmazonBedrockModel.py @@ -60,7 +60,7 @@ class AmazonBedrockComponent(CustomComponent): ) # type: ignore except Exception as e: raise ValueError("Could not connect to AmazonBedrock API.") from e - message = output.invoke(inputs) + message = output.invoke(input_value) result = message.content if hasattr(message, "content") else message self.status = result return result diff --git a/src/backend/langflow/components/models/AnthropicModel.py b/src/backend/langflow/components/models/AnthropicModel.py index be6e46d9a..cb8e55194 100644 --- a/src/backend/langflow/components/models/AnthropicModel.py +++ b/src/backend/langflow/components/models/AnthropicModel.py @@ -77,7 +77,7 @@ class AnthropicLLM(CustomComponent): ) except Exception as e: raise ValueError("Could not connect to Anthropic API.") from e - message = output.invoke(inputs) + message = output.invoke(input_value) result = message.content if hasattr(message, "content") else message self.status = result return result diff --git a/src/backend/langflow/components/models/AzureOpenAIModel.py b/src/backend/langflow/components/models/AzureOpenAIModel.py index be1f724bf..df8413870 100644 --- a/src/backend/langflow/components/models/AzureOpenAIModel.py +++ b/src/backend/langflow/components/models/AzureOpenAIModel.py @@ -99,7 +99,7 @@ class AzureChatOpenAIComponent(CustomComponent): ) except Exception as e: raise ValueError("Could not connect to AzureOpenAI API.") from e - message = output.invoke(inputs) + message = output.invoke(input_value) result = message.content if hasattr(message, "content") else message self.status = result return result diff --git a/src/backend/langflow/components/models/BaiduQianfanChatModel.py b/src/backend/langflow/components/models/BaiduQianfanChatModel.py index 9eadb7013..0075316a2 100644 --- a/src/backend/langflow/components/models/BaiduQianfanChatModel.py +++ b/src/backend/langflow/components/models/BaiduQianfanChatModel.py @@ -94,7 +94,7 @@ class QianfanChatEndpointComponent(CustomComponent): ) except Exception as e: raise ValueError("Could not connect to Baidu Qianfan API.") from e - message = output.invoke(inputs) + message = output.invoke(input_value) result = message.content if hasattr(message, "content") else message self.status = result return result diff --git a/src/backend/langflow/components/models/CTransformersModel.py b/src/backend/langflow/components/models/CTransformersModel.py index 60cc2eb12..fb292d9cf 100644 --- a/src/backend/langflow/components/models/CTransformersModel.py +++ b/src/backend/langflow/components/models/CTransformersModel.py @@ -42,7 +42,7 @@ class CTransformersComponent(CustomComponent): output = CTransformers( model=model, model_file=model_file, model_type=model_type, config=config ) - message = output.invoke(inputs) + message = output.invoke(input_value) result = message.content if hasattr(message, "content") else message self.status = result return result diff --git a/src/backend/langflow/components/models/CohereModel.py b/src/backend/langflow/components/models/CohereModel.py index 28b198ec1..c2a004c38 100644 --- a/src/backend/langflow/components/models/CohereModel.py +++ b/src/backend/langflow/components/models/CohereModel.py @@ -43,7 +43,7 @@ class CohereComponent(CustomComponent): max_tokens=max_tokens, temperature=temperature, ) - message = output.invoke(inputs) + message = output.invoke(input_value) result = message.content if hasattr(message, "content") else message self.status = result return result diff --git a/src/backend/langflow/components/models/GoogleGenerativeAIModel.py b/src/backend/langflow/components/models/GoogleGenerativeAIModel.py index 2ff01c4c7..3b0c758e8 100644 --- a/src/backend/langflow/components/models/GoogleGenerativeAIModel.py +++ b/src/backend/langflow/components/models/GoogleGenerativeAIModel.py @@ -73,7 +73,7 @@ class GoogleGenerativeAIComponent(CustomComponent): n=n or 1, google_api_key=SecretStr(google_api_key), ) - message = output.invoke(inputs) + message = output.invoke(input_value) result = message.content if hasattr(message, "content") else message self.status = result return result diff --git a/src/backend/langflow/components/models/HuggingFaceModel.py b/src/backend/langflow/components/models/HuggingFaceModel.py index 394938344..99fcc6ab0 100644 --- a/src/backend/langflow/components/models/HuggingFaceModel.py +++ b/src/backend/langflow/components/models/HuggingFaceModel.py @@ -45,7 +45,7 @@ class HuggingFaceEndpointsComponent(CustomComponent): except Exception as e: raise ValueError("Could not connect to HuggingFace Endpoints API.") from e output = ChatHuggingFace(llm=llm) - message = output.invoke(inputs) + message = output.invoke(input_value)alue) result = message.content if hasattr(message, "content") else message self.status = result return result diff --git a/src/backend/langflow/components/models/LlamaCppModel.py b/src/backend/langflow/components/models/LlamaCppModel.py index 53a6f8ace..5941f213b 100644 --- a/src/backend/langflow/components/models/LlamaCppModel.py +++ b/src/backend/langflow/components/models/LlamaCppModel.py @@ -135,7 +135,7 @@ class LlamaCppComponent(CustomComponent): verbose=verbose, vocab_only=vocab_only, ) - message = output.invoke(inputs) + message = output.invoke(input_value) result = message.content if hasattr(message, "content") else message self.status = result return result diff --git a/src/backend/langflow/components/models/OllamaModel.py b/src/backend/langflow/components/models/OllamaModel.py index 3dc8dacab..7ae896532 100644 --- a/src/backend/langflow/components/models/OllamaModel.py +++ b/src/backend/langflow/components/models/OllamaModel.py @@ -250,7 +250,7 @@ class ChatOllamaComponent(CustomComponent): output = ChatOllama(**llm_params) # type: ignore except Exception as e: raise ValueError("Could not initialize Ollama LLM.") from e - message = output.invoke(inputs) + message = output.invoke(input_value) result = message.content if hasattr(message, "content") else message self.status = result return result diff --git a/src/backend/langflow/components/models/OpenAIModel.py b/src/backend/langflow/components/models/OpenAIModel.py index 07ba7013c..ee6809e0b 100644 --- a/src/backend/langflow/components/models/OpenAIModel.py +++ b/src/backend/langflow/components/models/OpenAIModel.py @@ -80,7 +80,7 @@ class OpenAIModelComponent(CustomComponent): temperature=temperature, ) - message = model.invoke(inputs) + message = model.invoke(input_value) result = message.content if hasattr(message, "content") else message self.status = result return result diff --git a/src/backend/langflow/components/models/VertexAiModel.py b/src/backend/langflow/components/models/VertexAiModel.py index 81338f723..c05dc5e94 100644 --- a/src/backend/langflow/components/models/VertexAiModel.py +++ b/src/backend/langflow/components/models/VertexAiModel.py @@ -92,7 +92,7 @@ class ChatVertexAIComponent(CustomComponent): top_p=top_p, verbose=verbose, ) - message = output.invoke(inputs) + message = output.invoke(input_value) result = message.content if hasattr(message, "content") else message self.status = result return result