diff --git a/src/backend/langflow/components/models/AmazonBedrock.py b/src/backend/langflow/components/models/AmazonBedrock.py index 4f051fcde..d679dd501 100644 --- a/src/backend/langflow/components/models/AmazonBedrock.py +++ b/src/backend/langflow/components/models/AmazonBedrock.py @@ -60,6 +60,7 @@ class AmazonBedrockComponent(CustomComponent): ) # type: ignore except Exception as e: raise ValueError("Could not connect to AmazonBedrock API.") from e - message = output.invoke(input=inputs) - self.status = message - return message \ No newline at end of file + message = output.invoke(inputs) + result = message.content if hasattr(message, "content") else message + self.status = result + return result \ No newline at end of file diff --git a/src/backend/langflow/components/models/AzureOpenAI.py b/src/backend/langflow/components/models/AzureOpenAI.py new file mode 100644 index 000000000..285c7f316 --- /dev/null +++ b/src/backend/langflow/components/models/AzureOpenAI.py @@ -0,0 +1,104 @@ +from typing import Optional +from langflow import CustomComponent +from langchain.llms.base import BaseLanguageModel +from langchain_openai import AzureChatOpenAI + + +class AzureChatOpenAIComponent(CustomComponent): + display_name: str = "AzureOpenAI model" + description: str = "LLM model from Azure OpenAI." + documentation: str = ( + "https://python.langchain.com/docs/integrations/llms/azure_openai" + ) + beta = False + + AZURE_OPENAI_MODELS = [ + "gpt-35-turbo", + "gpt-35-turbo-16k", + "gpt-35-turbo-instruct", + "gpt-4", + "gpt-4-32k", + "gpt-4-vision", + ] + + AZURE_OPENAI_API_VERSIONS = [ + "2023-03-15-preview", + "2023-05-15", + "2023-06-01-preview", + "2023-07-01-preview", + "2023-08-01-preview", + "2023-09-01-preview", + "2023-12-01-preview", + ] + + def build_config(self): + return { + "model": { + "display_name": "Model Name", + "value": self.AZURE_OPENAI_MODELS[0], + "options": self.AZURE_OPENAI_MODELS, + "required": True, + }, + "azure_endpoint": { + "display_name": "Azure Endpoint", + "required": True, + "info": "Your Azure endpoint, including the resource.. Example: `https://example-resource.azure.openai.com/`", + }, + "azure_deployment": { + "display_name": "Deployment Name", + "required": True, + }, + "api_version": { + "display_name": "API Version", + "options": self.AZURE_OPENAI_API_VERSIONS, + "value": self.AZURE_OPENAI_API_VERSIONS[-1], + "required": True, + "advanced": True, + }, + "api_key": {"display_name": "API Key", "required": True, "password": True}, + "temperature": { + "display_name": "Temperature", + "value": 0.7, + "field_type": "float", + "required": False, + }, + "max_tokens": { + "display_name": "Max Tokens", + "value": 1000, + "required": False, + "field_type": "int", + "advanced": True, + "info": "Maximum number of tokens to generate.", + }, + "code": {"show": False}, + "inputs": {"display_name": "Input"}, + "inputs": {"display_name": "Input"}, + } + + def build( + self, + model: str, + azure_endpoint: str, + inputs: str, + azure_deployment: str, + api_key: str, + api_version: str, + temperature: float = 0.7, + max_tokens: Optional[int] = 1000, + ) -> BaseLanguageModel: + try: + output = AzureChatOpenAI( + model=model, + azure_endpoint=azure_endpoint, + azure_deployment=azure_deployment, + api_version=api_version, + api_key=api_key, + temperature=temperature, + max_tokens=max_tokens, + ) + except Exception as e: + raise ValueError("Could not connect to AzureOpenAI API.") from e + message = output.invoke(inputs) + result = message.content if hasattr(message, "content") else message + self.status = result + return result diff --git a/src/backend/langflow/components/models/OpenAI.py b/src/backend/langflow/components/models/OpenAI.py index 45fa12e23..1cc352b20 100644 --- a/src/backend/langflow/components/models/OpenAI.py +++ b/src/backend/langflow/components/models/OpenAI.py @@ -1,6 +1,6 @@ from typing import Optional -from langchain_community.chat_models.openai import ChatOpenAI +from langchain_openai import ChatOpenAI from langflow import CustomComponent from langflow.field_typing import NestedDict, Text