diff --git a/src/backend/langflow/components/llms/AmazonBedrock.py b/src/backend/langflow/components/llms/AmazonBedrock.py index 04785db63..306f84e30 100644 --- a/src/backend/langflow/components/llms/AmazonBedrock.py +++ b/src/backend/langflow/components/llms/AmazonBedrock.py @@ -1,7 +1,9 @@ from typing import Optional -from langflow import CustomComponent -from langchain.llms.bedrock import Bedrock + from langchain.llms.base import BaseLLM +from langchain.llms.bedrock import Bedrock + +from langflow import CustomComponent class AmazonBedrockComponent(CustomComponent): @@ -27,18 +29,32 @@ class AmazonBedrockComponent(CustomComponent): }, "credentials_profile_name": {"display_name": "Credentials Profile Name"}, "streaming": {"display_name": "Streaming", "field_type": "bool"}, - "code": {"show": False}, + "endpoint_url": {"display_name": "Endpoint URL"}, + "region_name": {"display_name": "Region Name"}, + "model_kwargs": {"display_name": "Model Kwargs"}, + "cache": {"display_name": "Cache"}, + "code": {"advanced": True}, } def build( self, model_id: str = "anthropic.claude-instant-v1", credentials_profile_name: Optional[str] = None, + region_name: Optional[str] = None, + model_kwargs: Optional[dict] = None, + endpoint_url: Optional[str] = None, + streaming: bool = False, + cache: bool | None = None, ) -> BaseLLM: try: output = Bedrock( credentials_profile_name=credentials_profile_name, model_id=model_id, + region_name=region_name, + model_kwargs=model_kwargs, + endpoint_url=endpoint_url, + streaming=streaming, + cache=cache, ) # type: ignore except Exception as e: raise ValueError("Could not connect to AmazonBedrock API.") from e