Add system_message parameter to model components

This commit is contained in:
Gabriel Luiz Freitas Almeida 2024-03-25 15:06:17 -03:00
commit 6493d51bac
12 changed files with 71 additions and 12 deletions

View file

@ -36,6 +36,7 @@ class AmazonBedrockComponent(LCModelComponent):
"cache": {"display_name": "Cache"},
"code": {"advanced": True},
"input_value": {"display_name": "Input"},
"system_message": {"display_name": "System Message", "info": "System message to pass to the model."},
"stream": {
"display_name": "Stream",
"info": "Stream the response from the model.",
@ -45,6 +46,7 @@ class AmazonBedrockComponent(LCModelComponent):
def build(
self,
input_value: Text,
system_message: Optional[str] = None,
model_id: str = "anthropic.claude-instant-v1",
credentials_profile_name: Optional[str] = None,
region_name: Optional[str] = None,
@ -67,4 +69,4 @@ class AmazonBedrockComponent(LCModelComponent):
except Exception as e:
raise ValueError("Could not connect to AmazonBedrock API.") from e
return self.get_result(output=output, stream=stream, input_value=input_value)
return self.get_chat_result(output, stream, input_value, system_message)

View file

@ -53,12 +53,17 @@ class AnthropicLLM(LCModelComponent):
"display_name": "Stream",
"info": "Stream the response from the model.",
},
"system_message": {
"display_name": "System Message",
"info": "System message to pass to the model.",
},
}
def build(
self,
model: str,
input_value: Text,
system_message: Optional[str] = None,
anthropic_api_key: Optional[str] = None,
max_tokens: Optional[int] = None,
temperature: Optional[float] = None,
@ -80,4 +85,4 @@ class AnthropicLLM(LCModelComponent):
except Exception as e:
raise ValueError("Could not connect to Anthropic API.") from e
return self.get_result(output=output, stream=stream, input_value=input_value)
return self.get_chat_result(output, stream, input_value, system_message)

View file

@ -79,6 +79,14 @@ class AzureChatOpenAIComponent(LCModelComponent):
"display_name": "Stream",
"info": "Stream the response from the model.",
},
"system_message": {
"display_name": "System Message",
"info": "System message to pass to the model.",
},
"system_message": {
"display_name": "System Message",
"info": "System message to pass to the model.",
},
}
def build(
@ -89,6 +97,7 @@ class AzureChatOpenAIComponent(LCModelComponent):
azure_deployment: str,
api_key: str,
api_version: str,
system_message: Optional[str] = None,
temperature: float = 0.7,
max_tokens: Optional[int] = 1000,
stream: bool = False,
@ -107,4 +116,4 @@ class AzureChatOpenAIComponent(LCModelComponent):
except Exception as e:
raise ValueError("Could not connect to AzureOpenAI API.") from e
return self.get_result(output=output, stream=stream, input_value=input_value)
return self.get_chat_result(output, stream, input_value, system_message)

View file

@ -74,6 +74,10 @@ class QianfanChatEndpointComponent(LCModelComponent):
"display_name": "Stream",
"info": "Stream the response from the model.",
},
"system_message": {
"display_name": "System Message",
"info": "System message to pass to the model.",
},
}
def build(
@ -87,6 +91,7 @@ class QianfanChatEndpointComponent(LCModelComponent):
penalty_score: Optional[float] = None,
endpoint: Optional[str] = None,
stream: bool = False,
system_message: Optional[str] = None,
) -> Text:
try:
output = QianfanChatEndpoint( # type: ignore
@ -101,4 +106,4 @@ class QianfanChatEndpointComponent(LCModelComponent):
except Exception as e:
raise ValueError("Could not connect to Baidu Qianfan API.") from e
return self.get_result(output=output, stream=stream, input_value=input_value)
return self.get_chat_result(output, stream, input_value, system_message)

View file

@ -33,6 +33,10 @@ class CTransformersComponent(LCModelComponent):
"display_name": "Stream",
"info": "Stream the response from the model.",
},
"system_message": {
"display_name": "System Message",
"info": "System message to pass to the model.",
},
}
def build(
@ -52,4 +56,4 @@ class CTransformersComponent(LCModelComponent):
config=config, # noqa
)
return self.get_result(output=output, stream=stream, input_value=input_value)
return self.get_result(runnable=output, stream=stream, input_value=input_value)

View file

@ -36,6 +36,10 @@ class CohereComponent(LCModelComponent):
"display_name": "Stream",
"info": "Stream the response from the model.",
},
"system_message": {
"display_name": "System Message",
"info": "System message to pass to the model.",
},
}
def build(
@ -44,10 +48,11 @@ class CohereComponent(LCModelComponent):
input_value: Text,
temperature: float = 0.75,
stream: bool = False,
system_message: Optional[str] = None,
) -> Text:
api_key = SecretStr(cohere_api_key)
output = ChatCohere( # type: ignore
cohere_api_key=api_key,
temperature=temperature,
)
return self.get_result(output=output, stream=stream, input_value=input_value)
return self.get_chat_result(output, stream, input_value, system_message)

View file

@ -56,6 +56,10 @@ class GoogleGenerativeAIComponent(LCModelComponent):
"display_name": "Stream",
"info": "Stream the response from the model.",
},
"system_message": {
"display_name": "System Message",
"info": "System message to pass to the model.",
},
}
def build(
@ -69,6 +73,7 @@ class GoogleGenerativeAIComponent(LCModelComponent):
top_p: Optional[float] = None,
n: Optional[int] = 1,
stream: bool = False,
system_message: Optional[str] = None,
) -> Text:
output = ChatGoogleGenerativeAI(
model=model,
@ -79,4 +84,4 @@ class GoogleGenerativeAIComponent(LCModelComponent):
n=n or 1,
google_api_key=SecretStr(google_api_key),
)
return self.get_result(output=output, stream=stream, input_value=input_value)
return self.get_chat_result(output, stream, input_value, system_message)

View file

@ -30,6 +30,10 @@ class HuggingFaceEndpointsComponent(LCModelComponent):
"display_name": "Stream",
"info": "Stream the response from the model.",
},
"system_message": {
"display_name": "System Message",
"info": "System message to pass to the model.",
},
}
def build(
@ -41,6 +45,7 @@ class HuggingFaceEndpointsComponent(LCModelComponent):
huggingfacehub_api_token: Optional[str] = None,
model_kwargs: Optional[dict] = None,
stream: bool = False,
system_message: Optional[str] = None,
) -> Text:
try:
llm = HuggingFaceEndpoint( # type: ignore
@ -53,4 +58,4 @@ class HuggingFaceEndpointsComponent(LCModelComponent):
except Exception as e:
raise ValueError("Could not connect to HuggingFace Endpoints API.") from e
output = ChatHuggingFace(llm=llm)
return self.get_result(output=output, stream=stream, input_value=input_value)
return self.get_chat_result(output, stream, input_value, system_message)

View file

@ -61,6 +61,10 @@ class LlamaCppComponent(LCModelComponent):
"display_name": "Stream",
"info": "Stream the response from the model.",
},
"system_message": {
"display_name": "System Message",
"info": "System message to pass to the model.",
},
}
def build(
@ -141,4 +145,4 @@ class LlamaCppComponent(LCModelComponent):
vocab_only=vocab_only,
)
return self.get_result(output=output, stream=stream, input_value=input_value)
return self.get_result(runnable=output, stream=stream, input_value=input_value)

View file

@ -171,6 +171,10 @@ class ChatOllamaComponent(LCModelComponent):
"display_name": "Stream",
"info": "Stream the response from the model.",
},
"system_message": {
"display_name": "System Message",
"info": "System message to pass to the model.",
},
}
def build(
@ -204,6 +208,7 @@ class ChatOllamaComponent(LCModelComponent):
top_k: Optional[int] = None,
top_p: Optional[int] = None,
stream: bool = False,
system_message: Optional[str] = None,
) -> Text:
if not base_url:
base_url = "http://localhost:11434"
@ -258,4 +263,4 @@ class ChatOllamaComponent(LCModelComponent):
except Exception as e:
raise ValueError("Could not initialize Ollama LLM.") from e
return self.get_result(output=output, stream=stream, input_value=input_value)
return self.get_chat_result(output, stream, input_value, system_message)

View file

@ -62,6 +62,10 @@ class OpenAIModelComponent(LCModelComponent):
"display_name": "Stream",
"info": "Stream the response from the model.",
},
"system_message": {
"display_name": "System Message",
"info": "System message to pass to the model.",
},
}
def build(
@ -74,6 +78,7 @@ class OpenAIModelComponent(LCModelComponent):
openai_api_key: Optional[str] = None,
temperature: float = 0.7,
stream: bool = False,
system_message: Optional[str] = None,
) -> Text:
if not openai_api_base:
openai_api_base = "https://api.openai.com/v1"
@ -86,4 +91,4 @@ class OpenAIModelComponent(LCModelComponent):
temperature=temperature,
)
return self.get_result(output=output, stream=stream, input_value=input_value)
return self.get_chat_result(output, stream, input_value, system_message)

View file

@ -63,6 +63,10 @@ class ChatVertexAIComponent(LCModelComponent):
"display_name": "Stream",
"info": "Stream the response from the model.",
},
"system_message": {
"display_name": "System Message",
"info": "System message to pass to the model.",
},
}
def build(
@ -79,6 +83,7 @@ class ChatVertexAIComponent(LCModelComponent):
top_p: float = 0.95,
verbose: bool = False,
stream: bool = False,
system_message: Optional[str] = None,
) -> Text:
try:
from langchain_google_vertexai import ChatVertexAI # type: ignore
@ -99,4 +104,4 @@ class ChatVertexAIComponent(LCModelComponent):
verbose=verbose,
)
return self.get_result(output=output, stream=stream, input_value=input_value)
return self.get_chat_result(output, stream, input_value, system_message)