Update input_value type to Text

This commit is contained in:
Gabriel Luiz Freitas Almeida 2024-02-28 21:38:41 -03:00
commit 92ceaa7e19
38 changed files with 93 additions and 70 deletions

View file

@ -23,7 +23,7 @@ class ConversationChainComponent(CustomComponent):
def build(
self,
input_value: str,
input_value: Text,
llm: BaseLanguageModel,
memory: Optional[BaseMemory] = None,
) -> Text:
@ -34,7 +34,7 @@ class ConversationChainComponent(CustomComponent):
result = chain.invoke({chain.input_key: input_value})
# result is an AIMessage which is a subclass of BaseMessage
# We need to check if it is a string or a BaseMessage
result_str: str = ""
result_str: Text = ""
if hasattr(result, "content") and isinstance(result.content, str):
result_str = result.content
@ -43,6 +43,6 @@ class ConversationChainComponent(CustomComponent):
result_str = result
else:
# is dict
result_str = result.get("response")
result_str = Text(result.get("response"))
self.status = result_str
return result_str

View file

@ -18,13 +18,13 @@ class LLMCheckerChainComponent(CustomComponent):
def build(
self,
input_value: str,
input_value: Text,
llm: BaseLanguageModel,
) -> Text:
chain = LLMCheckerChain.from_llm(llm=llm)
response = chain.invoke({chain.input_key: input_value})
result = response.get(chain.output_key, "")
result_str = str(result)
result_str = Text(result)
self.status = result_str
return result_str

View file

@ -40,6 +40,6 @@ class LLMMathChainComponent(CustomComponent):
)
response = chain.invoke({input_key: input_value})
result = response.get(output_key)
result_str = str(result)
result_str = Text(result)
self.status = result_str
return result_str

View file

@ -57,6 +57,6 @@ class RetrievalQAComponent(CustomComponent):
references_str = self.create_references_from_records(records)
result_str = result.get("result", "")
final_result = "\n".join([str(result_str), references_str])
final_result = "\n".join([Text(result_str), references_str])
self.status = final_result
return final_result # OK

View file

@ -26,7 +26,7 @@ class RetrievalQAWithSourcesChainComponent(CustomComponent):
def build(
self,
input_value: str,
input_value: Text,
retriever: BaseRetriever,
llm: BaseLanguageModel,
chain_type: str,
@ -52,7 +52,7 @@ class RetrievalQAWithSourcesChainComponent(CustomComponent):
references_str = ""
if return_source_documents:
references_str = self.create_references_from_records(records)
result_str = str(result.get("answer", ""))
result_str = Text(result.get("answer", ""))
final_result = "\n".join([result_str, references_str])
self.status = final_result
return final_result

View file

@ -1,6 +1,6 @@
from concurrent import futures
from pathlib import Path
from typing import Any, Dict, List, Optional
from typing import Any, Dict, List, Optional, Text
from langflow import CustomComponent
from langflow.schema import Record
@ -71,7 +71,9 @@ class GatherRecordsComponent(CustomComponent):
glob = "**/*" if recursive else "*"
paths = walk_level(path_obj, depth) if depth else path_obj.glob(glob)
file_paths = [
str(p) for p in paths if p.is_file() and match_types(p) and is_not_hidden(p)
Text(p)
for p in paths
if p.is_file() and match_types(p) and is_not_hidden(p)
]
return file_paths
@ -90,7 +92,7 @@ class GatherRecordsComponent(CustomComponent):
return None
# Create a Record
text = "\n\n".join([str(el) for el in elements])
text = "\n\n".join([Text(el) for el in elements])
metadata = elements.metadata if hasattr(elements, "metadata") else {}
metadata["file_path"] = file_path
record = Record(text=text, data=metadata)
@ -136,7 +138,7 @@ class GatherRecordsComponent(CustomComponent):
recursive: bool = True,
silent_errors: bool = False,
use_multithreading: bool = True,
) -> List[Record]:
) -> List[Optional[Record]]:
if types is None:
types = []
resolved_path = self.resolve_path(path)

View file

@ -98,7 +98,7 @@ class ChatComponent(CustomComponent):
if not input_value:
input_value = ""
if return_record and input_value_record:
result = input_value_record
result: Union[Text, Record] = input_value_record
else:
result = input_value
self.status = result

View file

@ -139,7 +139,7 @@ class ChatLiteLLMComponent(CustomComponent):
"OpenRouter": "openrouter_api_key",
}
# Set the API key based on the provider
api_keys = {v: None for v in provider_map.values()}
api_keys: dict[str, Optional[str]] = {v: None for v in provider_map.values()}
if variable_name := provider_map.get(provider):
api_keys[variable_name] = api_key

View file

@ -44,7 +44,7 @@ class AmazonBedrockComponent(LCModelComponent):
def build(
self,
input_value: str,
input_value: Text,
model_id: str = "anthropic.claude-instant-v1",
credentials_profile_name: Optional[str] = None,
region_name: Optional[str] = None,

View file

@ -60,7 +60,7 @@ class AnthropicLLM(LCModelComponent):
def build(
self,
model: str,
input_value: str,
input_value: Text,
anthropic_api_key: Optional[str] = None,
max_tokens: Optional[int] = None,
temperature: Optional[float] = None,

View file

@ -2,9 +2,10 @@ from typing import Optional
from langchain.llms.base import BaseLanguageModel
from langchain_openai import AzureChatOpenAI
from pydantic.v1 import SecretStr
from langflow.components.models.base.model import LCModelComponent
from pydantic.v1 import SecretStr
from langflow.field_typing import Text
class AzureChatOpenAIComponent(LCModelComponent):
@ -86,7 +87,7 @@ class AzureChatOpenAIComponent(LCModelComponent):
self,
model: str,
azure_endpoint: str,
input_value: str,
input_value: Text,
azure_deployment: str,
api_key: str,
api_version: str,

View file

@ -78,7 +78,7 @@ class QianfanChatEndpointComponent(LCModelComponent):
def build(
self,
input_value: str,
input_value: Text,
model: str = "ERNIE-Bot-turbo",
qianfan_ak: Optional[str] = None,
qianfan_sk: Optional[str] = None,

View file

@ -39,7 +39,7 @@ class CTransformersComponent(LCModelComponent):
self,
model: str,
model_file: str,
input_value: str,
input_value: Text,
model_type: str,
stream: bool = False,
config: Optional[Dict] = None,

View file

@ -40,7 +40,7 @@ class CohereComponent(LCModelComponent):
def build(
self,
cohere_api_key: str,
input_value: str,
input_value: Text,
temperature: float = 0.75,
stream: bool = False,
) -> Text:

View file

@ -62,7 +62,7 @@ class GoogleGenerativeAIComponent(LCModelComponent):
self,
google_api_key: str,
model: str,
input_value: str,
input_value: Text,
max_output_tokens: Optional[int] = None,
temperature: float = 0.1,
top_k: Optional[int] = None,

View file

@ -34,7 +34,7 @@ class HuggingFaceEndpointsComponent(LCModelComponent):
def build(
self,
input_value: str,
input_value: Text,
endpoint_url: str,
model: Optional[str] = None,
task: str = "text2text-generation",

View file

@ -66,7 +66,7 @@ class LlamaCppComponent(LCModelComponent):
def build(
self,
model_path: str,
input_value: str,
input_value: Text,
grammar: Optional[str] = None,
cache: Optional[bool] = None,
client: Optional[Any] = None,

View file

@ -177,7 +177,7 @@ class ChatOllamaComponent(LCModelComponent):
self,
base_url: Optional[str],
model: str,
input_value: str,
input_value: Text,
mirostat: Optional[str],
mirostat_eta: Optional[float] = None,
mirostat_tau: Optional[float] = None,

View file

@ -9,8 +9,7 @@ from langflow.field_typing import Text
class ChatVertexAIComponent(LCModelComponent):
display_name = "ChatVertexAIModel"
description = "Generate text using Vertex AI Chat large language models API."
icon="VertexAI"
icon = "VertexAI"
def build_config(self):
return {
@ -68,7 +67,7 @@ class ChatVertexAIComponent(LCModelComponent):
def build(
self,
input_value: str,
input_value: Text,
credentials: Optional[str],
project: str,
examples: Optional[List[BaseMessage]] = [],

View file

@ -20,7 +20,7 @@ class PromptComponent(CustomComponent):
template: Prompt,
**kwargs,
) -> Text:
prompt_template = PromptTemplate.from_template(str(template))
prompt_template = PromptTemplate.from_template(Text(template))
attributes_to_check = ["text", "page_content"]
for key, value in kwargs.items():

View file

@ -1,7 +1,8 @@
from typing import Optional
from typing import Optional, Text
import requests
from langchain_core.documents import Document
from langflow import CustomComponent
from langflow.services.database.models.base import orjson_dumps
@ -31,7 +32,9 @@ class GetRequest(CustomComponent):
},
}
def get_document(self, session: requests.Session, url: str, headers: Optional[dict], timeout: int) -> Document:
def get_document(
self, session: requests.Session, url: str, headers: Optional[dict], timeout: int
) -> Document:
try:
response = session.get(url, headers=headers, timeout=int(timeout))
try:
@ -55,7 +58,7 @@ class GetRequest(CustomComponent):
)
except Exception as exc:
return Document(
page_content=str(exc),
page_content=Text(exc),
metadata={"source": url, "headers": headers, "status_code": 500},
)

View file

@ -9,7 +9,7 @@ class UUIDGeneratorComponent(CustomComponent):
description = "Generates a unique ID."
def generate(self, *args, **kwargs):
return str(uuid.uuid4().hex)
return Text(uuid.uuid4().hex)
def build_config(self):
return {"unique_id": {"display_name": "Value", "value": self.generate}}

View file

@ -1,7 +1,8 @@
from typing import Optional
from typing import Optional, Text
import requests
from langchain_core.documents import Document
from langflow import CustomComponent
from langflow.services.database.models.base import orjson_dumps
@ -47,7 +48,7 @@ class PostRequest(CustomComponent):
)
except Exception as exc:
return Document(
page_content=str(exc),
page_content=Text(exc),
metadata={
"source": url,
"headers": headers,
@ -66,12 +67,16 @@ class PostRequest(CustomComponent):
if not isinstance(document, list) and isinstance(document, Document):
documents: list[Document] = [document]
elif isinstance(document, list) and all(isinstance(doc, Document) for doc in document):
elif isinstance(document, list) and all(
isinstance(doc, Document) for doc in document
):
documents = document
else:
raise ValueError("document must be a Document or a list of Documents")
with requests.Session() as session:
documents = [self.post_document(session, doc, url, headers) for doc in documents]
documents = [
self.post_document(session, doc, url, headers) for doc in documents
]
self.repr_value = documents
return documents

View file

@ -32,7 +32,7 @@ class RunnableExecComponent(CustomComponent):
def build(
self,
input_key: str,
input_value: str,
input_value: Text,
runnable: Runnable,
output_key: str = "output",
) -> Text:

View file

@ -40,7 +40,7 @@ class SQLExecutorComponent(CustomComponent):
result = tool.run(query, include_columns=include_columns)
self.status = result
except Exception as e:
result = str(e)
result = Text(e)
self.status = result
if not passthrough:
raise e

View file

@ -1,4 +1,5 @@
# Implement ShouldRunNext component
from typing import Text
from langchain_core.prompts import PromptTemplate
from langflow import CustomComponent
@ -23,7 +24,7 @@ class ShouldRunNext(CustomComponent):
def build(self, template: Prompt, llm: BaseLanguageModel, **kwargs) -> dict:
# This is a simple component that always returns True
prompt_template = PromptTemplate.from_template(str(template))
prompt_template = PromptTemplate.from_template(Text(template))
attributes_to_check = ["text", "page_content"]
for key, value in kwargs.items():

View file

@ -1,7 +1,8 @@
from typing import List, Optional
from typing import List, Optional, Text
import requests
from langchain_core.documents import Document
from langflow import CustomComponent
from langflow.services.database.models.base import orjson_dumps
@ -40,7 +41,9 @@ class UpdateRequest(CustomComponent):
) -> Document:
try:
if method == "PATCH":
response = session.patch(url, headers=headers, data=document.page_content)
response = session.patch(
url, headers=headers, data=document.page_content
)
elif method == "PUT":
response = session.put(url, headers=headers, data=document.page_content)
else:
@ -61,7 +64,7 @@ class UpdateRequest(CustomComponent):
)
except Exception as exc:
return Document(
page_content=str(exc),
page_content=Text(exc),
metadata={"source": url, "headers": headers, "status_code": 500},
)
@ -77,12 +80,17 @@ class UpdateRequest(CustomComponent):
if not isinstance(document, list) and isinstance(document, Document):
documents: list[Document] = [document]
elif isinstance(document, list) and all(isinstance(doc, Document) for doc in document):
elif isinstance(document, list) and all(
isinstance(doc, Document) for doc in document
):
documents = document
else:
raise ValueError("document must be a Document or a list of Documents")
with requests.Session() as session:
documents = [self.update_document(session, doc, url, headers, method) for doc in documents]
documents = [
self.update_document(session, doc, url, headers, method)
for doc in documents
]
self.repr_value = documents
return documents

View file

@ -1,4 +1,4 @@
from typing import List, Union
from typing import List, Text, Union
from langchain.schema import BaseRetriever
from langchain_community.vectorstores import VectorStore
@ -35,5 +35,5 @@ class FAISSComponent(CustomComponent):
if not folder_path:
raise ValueError("Folder path is required to save the FAISS index.")
path = self.resolve_path(folder_path)
vector_store.save_local(str(path), index_name)
vector_store.save_local(Text(path), index_name)
return vector_store

View file

@ -3,7 +3,7 @@ from typing import List
from langchain_community.vectorstores.faiss import FAISS
from langflow.components.vectorstores.base.model import LCVectorStoreComponent
from langflow.field_typing import Embeddings
from langflow.field_typing import Embeddings, Text
from langflow.schema import Record
@ -26,7 +26,7 @@ class FAISSSearchComponent(LCVectorStoreComponent):
def build(
self,
input_value: str,
input_value: Text,
embedding: Embeddings,
folder_path: str,
index_name: str = "langflow_index",
@ -35,7 +35,7 @@ class FAISSSearchComponent(LCVectorStoreComponent):
raise ValueError("Folder path is required to save the FAISS index.")
path = self.resolve_path(folder_path)
vector_store = FAISS.load_local(
folder_path=str(path), embeddings=embedding, index_name=index_name
folder_path=Text(path), embeddings=embedding, index_name=index_name
)
if not vector_store:
raise ValueError("Failed to load the FAISS index.")

View file

@ -2,7 +2,7 @@ from typing import List, Optional
from langflow.components.vectorstores.base.model import LCVectorStoreComponent
from langflow.components.vectorstores.MongoDBAtlasVector import MongoDBAtlasComponent
from langflow.field_typing import Embeddings, NestedDict
from langflow.field_typing import Embeddings, NestedDict, Text
from langflow.schema import Record
@ -27,7 +27,7 @@ class MongoDBAtlasSearchComponent(MongoDBAtlasComponent, LCVectorStoreComponent)
def build( # type: ignore[override]
self,
input_value: str,
input_value: Text,
search_type: str,
embedding: Embeddings,
collection_name: str = "",

View file

@ -2,7 +2,7 @@ from typing import List, Optional
from langflow.components.vectorstores.base.model import LCVectorStoreComponent
from langflow.components.vectorstores.Pinecone import PineconeComponent
from langflow.field_typing import Embeddings
from langflow.field_typing import Embeddings, Text
from langflow.schema import Record
@ -42,7 +42,7 @@ class PineconeSearchComponent(PineconeComponent, LCVectorStoreComponent):
def build( # type: ignore[override]
self,
input_value: str,
input_value: Text,
embedding: Embeddings,
pinecone_env: str,
text_key: str = "text",

View file

@ -2,15 +2,14 @@ from typing import List, Optional
from langflow.components.vectorstores.base.model import LCVectorStoreComponent
from langflow.components.vectorstores.Qdrant import QdrantComponent
from langflow.field_typing import Embeddings, NestedDict
from langflow.field_typing import Embeddings, NestedDict, Text
from langflow.schema import Record
class QdrantSearchComponent(QdrantComponent, LCVectorStoreComponent):
display_name = "Qdrant Search"
description = "Construct Qdrant wrapper from a list of texts."
icon="Qdrant"
icon = "Qdrant"
def build_config(self):
return {
@ -46,7 +45,7 @@ class QdrantSearchComponent(QdrantComponent, LCVectorStoreComponent):
def build( # type: ignore[override]
self,
input_value: str,
input_value: Text,
embedding: Embeddings,
collection_name: str,
search_type: str = "similarity",

View file

@ -4,6 +4,7 @@ from langchain.embeddings.base import Embeddings
from langflow.components.vectorstores.base.model import LCVectorStoreComponent
from langflow.components.vectorstores.Redis import RedisComponent
from langflow.field_typing import Text
from langflow.schema import Record
@ -44,7 +45,7 @@ class RedisSearchComponent(RedisComponent, LCVectorStoreComponent):
def build( # type: ignore[override]
self,
input_value: str,
input_value: Text,
search_type: str,
embedding: Embeddings,
redis_server_url: str,

View file

@ -4,14 +4,14 @@ from langchain_community.vectorstores.supabase import SupabaseVectorStore
from supabase.client import Client, create_client
from langflow.components.vectorstores.base.model import LCVectorStoreComponent
from langflow.field_typing import Embeddings
from langflow.field_typing import Embeddings, Text
from langflow.schema import Record
class SupabaseSearchComponent(LCVectorStoreComponent):
display_name = "Supabase Search"
description = "Search a Supabase Vector Store for similar documents."
icon="Supabase"
icon = "Supabase"
def build_config(self):
return {
@ -30,7 +30,7 @@ class SupabaseSearchComponent(LCVectorStoreComponent):
def build(
self,
input_value: str,
input_value: Text,
search_type: str,
embedding: Embeddings,
query_name: str = "",

View file

@ -4,6 +4,7 @@ from langchain_community.vectorstores.vectara import Vectara
from langflow.components.vectorstores.base.model import LCVectorStoreComponent
from langflow.components.vectorstores.Vectara import VectaraComponent
from langflow.field_typing import Text
from langflow.schema import Record
@ -14,7 +15,7 @@ class VectaraSearchComponent(VectaraComponent, LCVectorStoreComponent):
"https://python.langchain.com/docs/integrations/vectorstores/vectara"
)
beta = True
icon="Vectara"
icon = "Vectara"
field_config = {
"search_type": {
@ -44,7 +45,7 @@ class VectaraSearchComponent(VectaraComponent, LCVectorStoreComponent):
def build( # type: ignore[override]
self,
input_value: str,
input_value: Text,
search_type: str,
vectara_customer_id: str,
vectara_corpus_id: str,

View file

@ -4,6 +4,7 @@ from langchain.embeddings.base import Embeddings
from langflow.components.vectorstores.base.model import LCVectorStoreComponent
from langflow.components.vectorstores.Weaviate import WeaviateVectorStoreComponent
from langflow.field_typing import Text
from langflow.schema import Record
@ -14,7 +15,7 @@ class WeaviateSearchVectorStore(WeaviateVectorStoreComponent, LCVectorStoreCompo
"https://python.langchain.com/docs/integrations/vectorstores/weaviate"
)
beta = True
icon="Weaviate"
icon = "Weaviate"
field_config = {
"search_type": {
@ -57,7 +58,7 @@ class WeaviateSearchVectorStore(WeaviateVectorStoreComponent, LCVectorStoreCompo
def build( # type: ignore[override]
self,
input_value: str,
input_value: Text,
search_type: str,
url: str,
search_by_text: bool = False,

View file

@ -5,7 +5,8 @@ from langchain_core.retrievers import BaseRetriever
from langchain_core.vectorstores import VectorStore
from langflow import CustomComponent
from langflow.schema import Record, docs_to_records
from langflow.field_typing import Text, docs_to_records
from langflow.schema import Record
class LCVectorStoreComponent(CustomComponent):
@ -16,7 +17,7 @@ class LCVectorStoreComponent(CustomComponent):
def search_with_vector_store(
self,
input_value: str,
input_value: Text,
search_type: str,
vector_store: Union[VectorStore, BaseRetriever],
) -> List[Record]:

View file

@ -4,6 +4,7 @@ from langchain.embeddings.base import Embeddings
from langflow.components.vectorstores.base.model import LCVectorStoreComponent
from langflow.components.vectorstores.pgvector import PGVectorComponent
from langflow.field_typing import Text
from langflow.schema import Record
@ -42,7 +43,7 @@ class PGVectorSearchComponent(PGVectorComponent, LCVectorStoreComponent):
def build( # type: ignore[override]
self,
input_value: str,
input_value: Text,
embedding: Embeddings,
search_type: str,
pg_server_url: str,