diff --git a/src/backend/langflow/components/models/AmazonBedrockModel.py b/src/backend/langflow/components/models/AmazonBedrockModel.py index 60cc27fd5..f263066df 100644 --- a/src/backend/langflow/components/models/AmazonBedrockModel.py +++ b/src/backend/langflow/components/models/AmazonBedrockModel.py @@ -36,6 +36,7 @@ class AmazonBedrockComponent(LCModelComponent): "cache": {"display_name": "Cache"}, "code": {"advanced": True}, "input_value": {"display_name": "Input"}, + "system_message": {"display_name": "System Message", "info": "System message to pass to the model."}, "stream": { "display_name": "Stream", "info": "Stream the response from the model.", @@ -45,6 +46,7 @@ class AmazonBedrockComponent(LCModelComponent): def build( self, input_value: Text, + system_message: Optional[str] = None, model_id: str = "anthropic.claude-instant-v1", credentials_profile_name: Optional[str] = None, region_name: Optional[str] = None, @@ -67,4 +69,4 @@ class AmazonBedrockComponent(LCModelComponent): except Exception as e: raise ValueError("Could not connect to AmazonBedrock API.") from e - return self.get_result(output=output, stream=stream, input_value=input_value) + return self.get_chat_result(output, stream, input_value, system_message) diff --git a/src/backend/langflow/components/models/AnthropicModel.py b/src/backend/langflow/components/models/AnthropicModel.py index f27c7add0..f5a3dbb40 100644 --- a/src/backend/langflow/components/models/AnthropicModel.py +++ b/src/backend/langflow/components/models/AnthropicModel.py @@ -53,12 +53,17 @@ class AnthropicLLM(LCModelComponent): "display_name": "Stream", "info": "Stream the response from the model.", }, + "system_message": { + "display_name": "System Message", + "info": "System message to pass to the model.", + }, } def build( self, model: str, input_value: Text, + system_message: Optional[str] = None, anthropic_api_key: Optional[str] = None, max_tokens: Optional[int] = None, temperature: Optional[float] = None, @@ -80,4 +85,4 @@ class AnthropicLLM(LCModelComponent): except Exception as e: raise ValueError("Could not connect to Anthropic API.") from e - return self.get_result(output=output, stream=stream, input_value=input_value) + return self.get_chat_result(output, stream, input_value, system_message) diff --git a/src/backend/langflow/components/models/AzureOpenAIModel.py b/src/backend/langflow/components/models/AzureOpenAIModel.py index 6dc122e03..543df96b5 100644 --- a/src/backend/langflow/components/models/AzureOpenAIModel.py +++ b/src/backend/langflow/components/models/AzureOpenAIModel.py @@ -79,6 +79,14 @@ class AzureChatOpenAIComponent(LCModelComponent): "display_name": "Stream", "info": "Stream the response from the model.", }, + "system_message": { + "display_name": "System Message", + "info": "System message to pass to the model.", + }, + "system_message": { + "display_name": "System Message", + "info": "System message to pass to the model.", + }, } def build( @@ -89,6 +97,7 @@ class AzureChatOpenAIComponent(LCModelComponent): azure_deployment: str, api_key: str, api_version: str, + system_message: Optional[str] = None, temperature: float = 0.7, max_tokens: Optional[int] = 1000, stream: bool = False, @@ -107,4 +116,4 @@ class AzureChatOpenAIComponent(LCModelComponent): except Exception as e: raise ValueError("Could not connect to AzureOpenAI API.") from e - return self.get_result(output=output, stream=stream, input_value=input_value) + return self.get_chat_result(output, stream, input_value, system_message) diff --git a/src/backend/langflow/components/models/BaiduQianfanChatModel.py b/src/backend/langflow/components/models/BaiduQianfanChatModel.py index 1619b1394..2df7c2b12 100644 --- a/src/backend/langflow/components/models/BaiduQianfanChatModel.py +++ b/src/backend/langflow/components/models/BaiduQianfanChatModel.py @@ -74,6 +74,10 @@ class QianfanChatEndpointComponent(LCModelComponent): "display_name": "Stream", "info": "Stream the response from the model.", }, + "system_message": { + "display_name": "System Message", + "info": "System message to pass to the model.", + }, } def build( @@ -87,6 +91,7 @@ class QianfanChatEndpointComponent(LCModelComponent): penalty_score: Optional[float] = None, endpoint: Optional[str] = None, stream: bool = False, + system_message: Optional[str] = None, ) -> Text: try: output = QianfanChatEndpoint( # type: ignore @@ -101,4 +106,4 @@ class QianfanChatEndpointComponent(LCModelComponent): except Exception as e: raise ValueError("Could not connect to Baidu Qianfan API.") from e - return self.get_result(output=output, stream=stream, input_value=input_value) + return self.get_chat_result(output, stream, input_value, system_message) diff --git a/src/backend/langflow/components/models/CTransformersModel.py b/src/backend/langflow/components/models/CTransformersModel.py index 818548289..352d3cedb 100644 --- a/src/backend/langflow/components/models/CTransformersModel.py +++ b/src/backend/langflow/components/models/CTransformersModel.py @@ -33,6 +33,10 @@ class CTransformersComponent(LCModelComponent): "display_name": "Stream", "info": "Stream the response from the model.", }, + "system_message": { + "display_name": "System Message", + "info": "System message to pass to the model.", + }, } def build( @@ -52,4 +56,4 @@ class CTransformersComponent(LCModelComponent): config=config, # noqa ) - return self.get_result(output=output, stream=stream, input_value=input_value) + return self.get_result(runnable=output, stream=stream, input_value=input_value) diff --git a/src/backend/langflow/components/models/CohereModel.py b/src/backend/langflow/components/models/CohereModel.py index 9d99ab591..af76e1adc 100644 --- a/src/backend/langflow/components/models/CohereModel.py +++ b/src/backend/langflow/components/models/CohereModel.py @@ -36,6 +36,10 @@ class CohereComponent(LCModelComponent): "display_name": "Stream", "info": "Stream the response from the model.", }, + "system_message": { + "display_name": "System Message", + "info": "System message to pass to the model.", + }, } def build( @@ -44,10 +48,11 @@ class CohereComponent(LCModelComponent): input_value: Text, temperature: float = 0.75, stream: bool = False, + system_message: Optional[str] = None, ) -> Text: api_key = SecretStr(cohere_api_key) output = ChatCohere( # type: ignore cohere_api_key=api_key, temperature=temperature, ) - return self.get_result(output=output, stream=stream, input_value=input_value) + return self.get_chat_result(output, stream, input_value, system_message) diff --git a/src/backend/langflow/components/models/GoogleGenerativeAIModel.py b/src/backend/langflow/components/models/GoogleGenerativeAIModel.py index 289fef7c7..663238219 100644 --- a/src/backend/langflow/components/models/GoogleGenerativeAIModel.py +++ b/src/backend/langflow/components/models/GoogleGenerativeAIModel.py @@ -56,6 +56,10 @@ class GoogleGenerativeAIComponent(LCModelComponent): "display_name": "Stream", "info": "Stream the response from the model.", }, + "system_message": { + "display_name": "System Message", + "info": "System message to pass to the model.", + }, } def build( @@ -69,6 +73,7 @@ class GoogleGenerativeAIComponent(LCModelComponent): top_p: Optional[float] = None, n: Optional[int] = 1, stream: bool = False, + system_message: Optional[str] = None, ) -> Text: output = ChatGoogleGenerativeAI( model=model, @@ -79,4 +84,4 @@ class GoogleGenerativeAIComponent(LCModelComponent): n=n or 1, google_api_key=SecretStr(google_api_key), ) - return self.get_result(output=output, stream=stream, input_value=input_value) + return self.get_chat_result(output, stream, input_value, system_message) diff --git a/src/backend/langflow/components/models/HuggingFaceModel.py b/src/backend/langflow/components/models/HuggingFaceModel.py index 413ebf462..7090fb445 100644 --- a/src/backend/langflow/components/models/HuggingFaceModel.py +++ b/src/backend/langflow/components/models/HuggingFaceModel.py @@ -30,6 +30,10 @@ class HuggingFaceEndpointsComponent(LCModelComponent): "display_name": "Stream", "info": "Stream the response from the model.", }, + "system_message": { + "display_name": "System Message", + "info": "System message to pass to the model.", + }, } def build( @@ -41,6 +45,7 @@ class HuggingFaceEndpointsComponent(LCModelComponent): huggingfacehub_api_token: Optional[str] = None, model_kwargs: Optional[dict] = None, stream: bool = False, + system_message: Optional[str] = None, ) -> Text: try: llm = HuggingFaceEndpoint( # type: ignore @@ -53,4 +58,4 @@ class HuggingFaceEndpointsComponent(LCModelComponent): except Exception as e: raise ValueError("Could not connect to HuggingFace Endpoints API.") from e output = ChatHuggingFace(llm=llm) - return self.get_result(output=output, stream=stream, input_value=input_value) + return self.get_chat_result(output, stream, input_value, system_message) diff --git a/src/backend/langflow/components/models/LlamaCppModel.py b/src/backend/langflow/components/models/LlamaCppModel.py index d25e66405..621089979 100644 --- a/src/backend/langflow/components/models/LlamaCppModel.py +++ b/src/backend/langflow/components/models/LlamaCppModel.py @@ -61,6 +61,10 @@ class LlamaCppComponent(LCModelComponent): "display_name": "Stream", "info": "Stream the response from the model.", }, + "system_message": { + "display_name": "System Message", + "info": "System message to pass to the model.", + }, } def build( @@ -141,4 +145,4 @@ class LlamaCppComponent(LCModelComponent): vocab_only=vocab_only, ) - return self.get_result(output=output, stream=stream, input_value=input_value) + return self.get_result(runnable=output, stream=stream, input_value=input_value) diff --git a/src/backend/langflow/components/models/OllamaModel.py b/src/backend/langflow/components/models/OllamaModel.py index 559c4a6a2..14f9c59d3 100644 --- a/src/backend/langflow/components/models/OllamaModel.py +++ b/src/backend/langflow/components/models/OllamaModel.py @@ -171,6 +171,10 @@ class ChatOllamaComponent(LCModelComponent): "display_name": "Stream", "info": "Stream the response from the model.", }, + "system_message": { + "display_name": "System Message", + "info": "System message to pass to the model.", + }, } def build( @@ -204,6 +208,7 @@ class ChatOllamaComponent(LCModelComponent): top_k: Optional[int] = None, top_p: Optional[int] = None, stream: bool = False, + system_message: Optional[str] = None, ) -> Text: if not base_url: base_url = "http://localhost:11434" @@ -258,4 +263,4 @@ class ChatOllamaComponent(LCModelComponent): except Exception as e: raise ValueError("Could not initialize Ollama LLM.") from e - return self.get_result(output=output, stream=stream, input_value=input_value) + return self.get_chat_result(output, stream, input_value, system_message) diff --git a/src/backend/langflow/components/models/OpenAIModel.py b/src/backend/langflow/components/models/OpenAIModel.py index 37dcd5c5b..e68e7736e 100644 --- a/src/backend/langflow/components/models/OpenAIModel.py +++ b/src/backend/langflow/components/models/OpenAIModel.py @@ -62,6 +62,10 @@ class OpenAIModelComponent(LCModelComponent): "display_name": "Stream", "info": "Stream the response from the model.", }, + "system_message": { + "display_name": "System Message", + "info": "System message to pass to the model.", + }, } def build( @@ -74,6 +78,7 @@ class OpenAIModelComponent(LCModelComponent): openai_api_key: Optional[str] = None, temperature: float = 0.7, stream: bool = False, + system_message: Optional[str] = None, ) -> Text: if not openai_api_base: openai_api_base = "https://api.openai.com/v1" @@ -86,4 +91,4 @@ class OpenAIModelComponent(LCModelComponent): temperature=temperature, ) - return self.get_result(output=output, stream=stream, input_value=input_value) + return self.get_chat_result(output, stream, input_value, system_message) diff --git a/src/backend/langflow/components/models/VertexAiModel.py b/src/backend/langflow/components/models/VertexAiModel.py index 9ff8be561..e5281da16 100644 --- a/src/backend/langflow/components/models/VertexAiModel.py +++ b/src/backend/langflow/components/models/VertexAiModel.py @@ -63,6 +63,10 @@ class ChatVertexAIComponent(LCModelComponent): "display_name": "Stream", "info": "Stream the response from the model.", }, + "system_message": { + "display_name": "System Message", + "info": "System message to pass to the model.", + }, } def build( @@ -79,6 +83,7 @@ class ChatVertexAIComponent(LCModelComponent): top_p: float = 0.95, verbose: bool = False, stream: bool = False, + system_message: Optional[str] = None, ) -> Text: try: from langchain_google_vertexai import ChatVertexAI # type: ignore @@ -99,4 +104,4 @@ class ChatVertexAIComponent(LCModelComponent): verbose=verbose, ) - return self.get_result(output=output, stream=stream, input_value=input_value) + return self.get_chat_result(output, stream, input_value, system_message)