diff --git a/src/backend/base/langflow/components/models/AzureOpenAIModel.py b/src/backend/base/langflow/components/models/AzureOpenAIModel.py index fb24c24de..7b5a73b9e 100644 --- a/src/backend/base/langflow/components/models/AzureOpenAIModel.py +++ b/src/backend/base/langflow/components/models/AzureOpenAIModel.py @@ -52,36 +52,28 @@ class AzureChatOpenAIComponent(LCModelComponent): "display_name": "Model Name", "value": self.AZURE_OPENAI_MODELS[0], "options": self.AZURE_OPENAI_MODELS, - "required": True, }, "azure_endpoint": { "display_name": "Azure Endpoint", - "required": True, "info": "Your Azure endpoint, including the resource.. Example: `https://example-resource.azure.openai.com/`", }, "azure_deployment": { "display_name": "Deployment Name", - "required": True, }, "api_version": { "display_name": "API Version", "options": self.AZURE_OPENAI_API_VERSIONS, "value": self.AZURE_OPENAI_API_VERSIONS[-1], - "required": True, "advanced": True, }, - "api_key": {"display_name": "API Key", "required": True, "password": True}, + "api_key": {"display_name": "API Key", "password": True}, "temperature": { "display_name": "Temperature", - "value": 0.1, - "field_type": "float", - "required": True, + "value": 0.7, }, "max_tokens": { "display_name": "Max Tokens", "value": 1000, - "required": False, - "field_type": "int", "advanced": True, "info": "Maximum number of tokens to generate.", }, @@ -106,9 +98,9 @@ class AzureChatOpenAIComponent(LCModelComponent): input_value: Text, azure_deployment: str, api_version: str, - api_key: Optional[str] = None, + api_key: str, + temperature: float, system_message: Optional[str] = None, - temperature: float = 0.7, max_tokens: Optional[int] = 1000, stream: bool = False, ) -> BaseLanguageModel: