Update input variable names in build functions

This commit is contained in:
Gabriel Luiz Freitas Almeida 2024-02-27 12:55:34 -03:00
commit e2e6f80461
23 changed files with 62 additions and 52 deletions

View file

@ -23,7 +23,7 @@ class ConversationChainComponent(CustomComponent):
def build(
self,
inputs: str,
input_value: str,
llm: BaseLanguageModel,
memory: Optional[BaseMemory] = None,
) -> Text:

View file

@ -18,7 +18,7 @@ class LLMCheckerChainComponent(CustomComponent):
def build(
self,
inputs: str,
input_value: str,
llm: BaseLanguageModel,
) -> Text:

View file

@ -24,7 +24,7 @@ class LLMMathChainComponent(CustomComponent):
def build(
self,
inputs: Text,
input_value: Text,
llm: BaseLanguageModel,
llm_chain: LLMChain,
input_key: str = "question",

View file

@ -27,7 +27,7 @@ class RetrievalQAComponent(CustomComponent):
self,
combine_documents_chain: BaseCombineDocumentsChain,
retriever: BaseRetriever,
inputs: str = "",
input_value: str = "",
memory: Optional[BaseMemory] = None,
input_key: str = "query",
output_key: str = "result",

View file

@ -26,7 +26,7 @@ class RetrievalQAWithSourcesChainComponent(CustomComponent):
def build(
self,
inputs: str,
input_value: str,
retriever: BaseRetriever,
llm: BaseLanguageModel,
chain_type: str,

View file

@ -28,7 +28,7 @@ class SQLGeneratorComponent(CustomComponent):
def build(
self,
inputs: Text,
input_value: Text,
db: SQLDatabase,
llm: BaseLanguageModel,
top_k: int = 5,

View file

@ -11,7 +11,7 @@ class ChatInput(CustomComponent):
def build_config(self):
return {
"message": {
"input_value": {
"input_types": ["Text"],
"display_name": "Message",
"multiline": True,
@ -35,26 +35,26 @@ class ChatInput(CustomComponent):
self,
sender: Optional[str] = "User",
sender_name: Optional[str] = "User",
message: Optional[str] = None,
input_value: Optional[str] = None,
session_id: Optional[str] = None,
return_record: Optional[bool] = False,
) -> Union[Text, Record]:
if return_record:
if isinstance(message, Record):
if isinstance(input_value, Record):
# Update the data of the record
message.data["sender"] = sender
message.data["sender_name"] = sender_name
message.data["session_id"] = session_id
input_value.data["sender"] = sender
input_value.data["sender_name"] = sender_name
input_value.data["session_id"] = session_id
else:
message = Record(
text=message,
input_value = Record(
text=input_value,
data={
"sender": sender,
"sender_name": sender_name,
"session_id": session_id,
},
)
if not message:
message = ""
self.status = message
return message
if not input_value:
input_value = ""
self.status = input_value
return input_value

View file

@ -17,7 +17,7 @@ class ChatOutput(CustomComponent):
def build_config(self):
return {
"message": {"input_types": ["Text"], "display_name": "Message"},
"input_value": {"input_types": ["Text"], "display_name": "Message"},
"sender": {
"options": ["Machine", "User"],
"display_name": "Sender Type",
@ -39,25 +39,25 @@ class ChatOutput(CustomComponent):
sender: Optional[str] = "Machine",
sender_name: Optional[str] = "AI",
session_id: Optional[str] = None,
message: Optional[str] = None,
input_value: Optional[str] = None,
return_record: Optional[bool] = False,
) -> Union[Text, Record]:
if return_record:
if isinstance(message, Record):
if isinstance(input_value, Record):
# Update the data of the record
message.data["sender"] = sender
message.data["sender_name"] = sender_name
message.data["session_id"] = session_id
input_value.data["sender"] = sender
input_value.data["sender_name"] = sender_name
input_value.data["session_id"] = session_id
else:
message = Record(
text=message,
input_value = Record(
text=input_value,
data={
"sender": sender,
"sender_name": sender_name,
"session_id": session_id,
},
)
if not message:
message = ""
self.status = message
return message
if not input_value:
input_value = ""
self.status = input_value
return input_value

View file

@ -39,7 +39,7 @@ class AmazonBedrockComponent(CustomComponent):
def build(
self,
inputs: str,
input_value: str,
model_id: str = "anthropic.claude-instant-v1",
credentials_profile_name: Optional[str] = None,
region_name: Optional[str] = None,

View file

@ -9,7 +9,9 @@ from langflow.field_typing import Text
class AnthropicLLM(CustomComponent):
display_name: str = "AnthropicModel"
description: str = "Generate text using Anthropic Chat&Completion large language models."
description: str = (
"Generate text using Anthropic Chat&Completion large language models."
)
def build_config(self):
return {
@ -53,7 +55,7 @@ class AnthropicLLM(CustomComponent):
def build(
self,
model: str,
inputs: str,
input_value: str,
anthropic_api_key: Optional[str] = None,
max_tokens: Optional[int] = None,
temperature: Optional[float] = None,
@ -66,7 +68,9 @@ class AnthropicLLM(CustomComponent):
try:
output = ChatAnthropic(
model_name=model,
anthropic_api_key=(SecretStr(anthropic_api_key) if anthropic_api_key else None),
anthropic_api_key=(
SecretStr(anthropic_api_key) if anthropic_api_key else None
),
max_tokens_to_sample=max_tokens, # type: ignore
temperature=temperature,
anthropic_api_url=api_endpoint,

View file

@ -9,7 +9,9 @@ from langflow import CustomComponent
class AzureChatOpenAIComponent(CustomComponent):
display_name: str = "AzureOpenAI Model"
description: str = "Generate text using LLM model from Azure OpenAI."
documentation: str = "https://python.langchain.com/docs/integrations/llms/azure_openai"
documentation: str = (
"https://python.langchain.com/docs/integrations/llms/azure_openai"
)
beta = False
AZURE_OPENAI_MODELS = [
@ -78,7 +80,7 @@ class AzureChatOpenAIComponent(CustomComponent):
self,
model: str,
azure_endpoint: str,
inputs: str,
input_value: str,
azure_deployment: str,
api_key: str,
api_version: str,

View file

@ -73,7 +73,7 @@ class QianfanChatEndpointComponent(CustomComponent):
def build(
self,
inputs: str,
input_value: str,
model: str = "ERNIE-Bot-turbo",
qianfan_ak: Optional[str] = None,
qianfan_sk: Optional[str] = None,

View file

@ -35,11 +35,13 @@ class CTransformersComponent(CustomComponent):
self,
model: str,
model_file: str,
inputs: str,
input_value: str,
model_type: str,
config: Optional[Dict] = None,
) -> Text:
output = CTransformers(model=model, model_file=model_file, model_type=model_type, config=config)
output = CTransformers(
model=model, model_file=model_file, model_type=model_type, config=config
)
message = output.invoke(inputs)
result = message.content if hasattr(message, "content") else message
self.status = result

View file

@ -34,7 +34,7 @@ class CohereComponent(CustomComponent):
def build(
self,
cohere_api_key: str,
inputs: str,
input_value: str,
max_tokens: int = 256,
temperature: float = 0.75,
) -> Text:

View file

@ -57,7 +57,7 @@ class GoogleGenerativeAIComponent(CustomComponent):
self,
google_api_key: str,
model: str,
inputs: str,
input_value: str,
max_output_tokens: Optional[int] = None,
temperature: float = 0.1,
top_k: Optional[int] = None,

View file

@ -4,7 +4,6 @@ from langchain_community.chat_models.huggingface import ChatHuggingFace
from langchain_community.llms.huggingface_endpoint import HuggingFaceEndpoint
from langflow import CustomComponent
from langflow.field_typing import Text
@ -30,7 +29,7 @@ class HuggingFaceEndpointsComponent(CustomComponent):
def build(
self,
inputs: str,
input_value: str,
endpoint_url: str,
task: str = "text2text-generation",
huggingfacehub_api_token: Optional[str] = None,

View file

@ -62,7 +62,7 @@ class LlamaCppComponent(CustomComponent):
def build(
self,
model_path: str,
inputs: str,
input_value: str,
grammar: Optional[str] = None,
cache: Optional[bool] = None,
client: Optional[Any] = None,

View file

@ -171,7 +171,7 @@ class ChatOllamaComponent(CustomComponent):
self,
base_url: Optional[str],
model: str,
inputs: str,
input_value: str,
mirostat: Optional[str],
mirostat_eta: Optional[float] = None,
mirostat_tau: Optional[float] = None,

View file

@ -1,6 +1,7 @@
from typing import Optional
from langchain_openai import ChatOpenAI
from langflow import CustomComponent
from langflow.field_typing import NestedDict, Text
@ -60,7 +61,7 @@ class OpenAIModelComponent(CustomComponent):
def build(
self,
inputs: Text,
input_value: Text,
max_tokens: Optional[int] = 256,
model_kwargs: NestedDict = {},
model_name: str = "gpt-4-1106-preview",

View file

@ -62,7 +62,7 @@ class ChatVertexAIComponent(CustomComponent):
def build(
self,
inputs: str,
input_value: str,
credentials: Optional[str],
project: str,
examples: Optional[List[BaseMessage]] = [],

View file

@ -32,7 +32,7 @@ class RunnableExecComponent(CustomComponent):
def build(
self,
input_key: str,
inputs: str,
input_value: str,
runnable: Runnable,
output_key: str = "output",
) -> Text:

View file

@ -2,6 +2,7 @@ from typing import List, Optional
import chromadb # type: ignore
from langchain_community.vectorstores.chroma import Chroma
from langflow import CustomComponent
from langflow.field_typing import Embeddings, Text
from langflow.schema import Record, docs_to_records
@ -57,7 +58,7 @@ class ChromaSearchComponent(CustomComponent):
def build(
self,
inputs: Text,
input_value: Text,
search_type: str,
collection_name: str,
embedding: Embeddings,
@ -92,7 +93,8 @@ class ChromaSearchComponent(CustomComponent):
if chroma_server_host is not None:
chroma_settings = chromadb.config.Settings(
chroma_server_cors_allow_origins=chroma_server_cors_allow_origins or None,
chroma_server_cors_allow_origins=chroma_server_cors_allow_origins
or None,
chroma_server_host=chroma_server_host,
chroma_server_port=chroma_server_port or None,
chroma_server_grpc_port=chroma_server_grpc_port or None,

View file

@ -47,10 +47,10 @@ class CustomComponent(Component):
"""The icon of the component. It should be an emoji. Defaults to None."""
is_input: Optional[bool] = None
"""The input state of the component. Defaults to None.
If True, the component must have a field named 'message'."""
If True, the component must have a field named 'input_value'."""
is_output: Optional[bool] = None
"""The output state of the component. Defaults to None.
If True, the component must have a field named 'message'."""
If True, the component must have a field named 'input_value'."""
code: Optional[str] = None
"""The code of the component. Defaults to None."""
field_config: dict = {}