Update AmazonBedrock and add AzureOpenAI model

This commit is contained in:
anovazzi1 2024-02-16 14:57:46 -03:00
commit 3579150bf0
3 changed files with 109 additions and 4 deletions

View file

@ -60,6 +60,7 @@ class AmazonBedrockComponent(CustomComponent):
) # type: ignore
except Exception as e:
raise ValueError("Could not connect to AmazonBedrock API.") from e
message = output.invoke(input=inputs)
self.status = message
return message
message = output.invoke(inputs)
result = message.content if hasattr(message, "content") else message
self.status = result
return result

View file

@ -0,0 +1,104 @@
from typing import Optional
from langflow import CustomComponent
from langchain.llms.base import BaseLanguageModel
from langchain_openai import AzureChatOpenAI
class AzureChatOpenAIComponent(CustomComponent):
display_name: str = "AzureOpenAI model"
description: str = "LLM model from Azure OpenAI."
documentation: str = (
"https://python.langchain.com/docs/integrations/llms/azure_openai"
)
beta = False
AZURE_OPENAI_MODELS = [
"gpt-35-turbo",
"gpt-35-turbo-16k",
"gpt-35-turbo-instruct",
"gpt-4",
"gpt-4-32k",
"gpt-4-vision",
]
AZURE_OPENAI_API_VERSIONS = [
"2023-03-15-preview",
"2023-05-15",
"2023-06-01-preview",
"2023-07-01-preview",
"2023-08-01-preview",
"2023-09-01-preview",
"2023-12-01-preview",
]
def build_config(self):
return {
"model": {
"display_name": "Model Name",
"value": self.AZURE_OPENAI_MODELS[0],
"options": self.AZURE_OPENAI_MODELS,
"required": True,
},
"azure_endpoint": {
"display_name": "Azure Endpoint",
"required": True,
"info": "Your Azure endpoint, including the resource.. Example: `https://example-resource.azure.openai.com/`",
},
"azure_deployment": {
"display_name": "Deployment Name",
"required": True,
},
"api_version": {
"display_name": "API Version",
"options": self.AZURE_OPENAI_API_VERSIONS,
"value": self.AZURE_OPENAI_API_VERSIONS[-1],
"required": True,
"advanced": True,
},
"api_key": {"display_name": "API Key", "required": True, "password": True},
"temperature": {
"display_name": "Temperature",
"value": 0.7,
"field_type": "float",
"required": False,
},
"max_tokens": {
"display_name": "Max Tokens",
"value": 1000,
"required": False,
"field_type": "int",
"advanced": True,
"info": "Maximum number of tokens to generate.",
},
"code": {"show": False},
"inputs": {"display_name": "Input"},
"inputs": {"display_name": "Input"},
}
def build(
self,
model: str,
azure_endpoint: str,
inputs: str,
azure_deployment: str,
api_key: str,
api_version: str,
temperature: float = 0.7,
max_tokens: Optional[int] = 1000,
) -> BaseLanguageModel:
try:
output = AzureChatOpenAI(
model=model,
azure_endpoint=azure_endpoint,
azure_deployment=azure_deployment,
api_version=api_version,
api_key=api_key,
temperature=temperature,
max_tokens=max_tokens,
)
except Exception as e:
raise ValueError("Could not connect to AzureOpenAI API.") from e
message = output.invoke(inputs)
result = message.content if hasattr(message, "content") else message
self.status = result
return result

View file

@ -1,6 +1,6 @@
from typing import Optional
from langchain_community.chat_models.openai import ChatOpenAI
from langchain_openai import ChatOpenAI
from langflow import CustomComponent
from langflow.field_typing import NestedDict, Text