diff --git a/src/backend/langflow/components/llms/AzureChatOpenAI.py b/src/backend/langflow/components/llms/AzureChatOpenAI.py index 92f52d1d1..4595e3322 100644 --- a/src/backend/langflow/components/llms/AzureChatOpenAI.py +++ b/src/backend/langflow/components/llms/AzureChatOpenAI.py @@ -8,6 +8,7 @@ class AzureChatOpenAIComponent(CustomComponent): display_name: str = "AzureChatOpenAI" description: str = "LLM model from Azure OpenAI." documentation: str = "https://python.langchain.com/docs/integrations/llms/azure_openai" + beta = False AZURE_OPENAI_MODELS = [ "gpt-35-turbo", @@ -18,11 +19,20 @@ class AzureChatOpenAIComponent(CustomComponent): "gpt-4-vision", ] + AZURE_OPENAI_API_VERSIONS = [ + "2023-03-15-preview", + "2023-05-15", + "2023-06-01-preview", + "2023-07-01-preview", + "2023-08-01-preview", + "2023-12-01-preview" + ] + def build_config(self): return { "model": { "display_name": "Model Name", - "value": "gpt-35-turbo", + "value": self.AZURE_OPENAI_MODELS[0], "options": self.AZURE_OPENAI_MODELS, "required": True, }, @@ -37,11 +47,16 @@ class AzureChatOpenAIComponent(CustomComponent): }, "api_version": { "display_name": "API Version", - "value": "2023-05-15", + "options": self.AZURE_OPENAI_API_VERSIONS, + "value": self.AZURE_OPENAI_API_VERSIONS[-1], "required": True, "advanced": True, }, - "api_key": {"display_name": "API Key", "required": True, "password": True}, + "api_key": { + "display_name": "API Key", + "required": True, + "password": True + }, "temperature": { "display_name": "Temperature", "value": 0.7, @@ -54,26 +69,32 @@ class AzureChatOpenAIComponent(CustomComponent): "required": False, "field_type": "int", "advanced": True, + "info": "Maximum number of tokens to generate.", + }, + "code": { + "show": False }, - "code": {"show": False}, } - def build( self, model: str, azure_endpoint: str, azure_deployment: str, api_key: str, - api_version: str = "2023-05-15", + api_version: str, temperature: float = 0.7, max_tokens: Optional[int] = 1000, ) -> BaseLanguageModel: - return AzureChatOpenAI( - model=model, - azure_endpoint=azure_endpoint, - azure_deployment=azure_deployment, - api_version=api_version, - api_key=api_key, - temperature=temperature, - max_tokens=max_tokens, - ) + try: + llm = AzureChatOpenAI( + model=model, + azure_endpoint=azure_endpoint, + azure_deployment=azure_deployment, + api_version=api_version, + api_key=api_key, + temperature=temperature, + max_tokens=max_tokens, + ) + except Exception as e: + raise ValueError("Could not connect to AzureOpenAI API.") from e + return llm