Merge remote-tracking branch 'origin/dev' into zustand/io/migration

This commit is contained in:
Gabriel Luiz Freitas Almeida 2024-02-15 18:39:59 -03:00
commit 5b0bf9e116
26 changed files with 1267 additions and 1111 deletions

View file

@ -16,6 +16,7 @@ jobs:
python-version:
- "3.9"
- "3.10"
- "3.11"
steps:
- uses: actions/checkout@v4
- name: Install poetry

View file

@ -16,6 +16,7 @@ jobs:
matrix:
python-version:
- "3.10"
- "3.11"
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
steps:

2076
poetry.lock generated

File diff suppressed because it is too large Load diff

View file

@ -25,18 +25,20 @@ documentation = "https://docs.langflow.org"
langflow = "langflow.__main__:main"
[tool.poetry.dependencies]
python = ">=3.9,<3.11"
python = ">=3.9,<3.12"
duckdb = "^0.9.2"
fastapi = "^0.109.0"
uvicorn = "^0.27.0"
beautifulsoup4 = "^4.12.2"
google-search-results = "^2.4.1"
google-api-python-client = "^2.79.0"
google-api-python-client = "^2.118.0"
typer = "^0.9.0"
gunicorn = "^21.2.0"
langchain = "~0.1.0"
duckdb = "^0.9.2"
openai = "^1.11.0"
pandas = "2.0.3"
openai = "^1.12.0"
pandas = "2.2.0"
chromadb = "^0.4.0"
huggingface-hub = { version = "^0.20.0", extras = ["inference"] }
rich = "^13.7.0"
@ -50,15 +52,14 @@ fake-useragent = "^1.4.0"
docstring-parser = "^0.15"
psycopg2-binary = "^2.9.6"
pyarrow = "^14.0.0"
tiktoken = "~0.5.0"
tiktoken = "~0.6.0"
wikipedia = "^1.4.0"
qdrant-client = "^1.7.0"
weaviate-client = "*"
jina = "*"
sentence-transformers = { version = "^2.3.1", optional = true }
ctransformers = { version = "^0.2.10", optional = true }
cohere = "^4.45.0"
python-multipart = "^0.0.6"
cohere = "^4.47.0"
python-multipart = "^0.0.7"
sqlmodel = "^0.0.14"
faiss-cpu = "^1.7.4"
anthropic = "^0.15.0"
@ -67,17 +68,17 @@ multiprocess = "^0.70.14"
cachetools = "^5.3.1"
types-cachetools = "^5.3.0.5"
platformdirs = "^4.2.0"
pinecone-client = "^2.2.2"
pinecone-client = "^3.0.3"
pymongo = "^4.6.0"
supabase = "^2.3.0"
certifi = "^2023.11.17"
google-cloud-aiplatform = "^1.36.0"
google-cloud-aiplatform = "^1.42.0"
psycopg = "^3.1.9"
psycopg-binary = "^3.1.9"
fastavro = "^1.8.0"
langchain-experimental = "*"
celery = { extras = ["redis"], version = "^5.3.6", optional = true }
redis = { version = "^4.6.0", optional = true }
redis = { version = "^5.0.1", optional = true }
flower = { version = "^2.0.0", optional = true }
alembic = "^1.13.0"
passlib = "^1.7.4"
@ -90,46 +91,46 @@ zep-python = "*"
pywin32 = { version = "^306", markers = "sys_platform == 'win32'" }
loguru = "^0.7.1"
langfuse = "^2.9.0"
pillow = "^10.0.0"
metal-sdk = "^2.4.0"
pillow = "^10.2.0"
metal-sdk = "^2.5.0"
markupsafe = "^2.1.3"
extract-msg = "^0.45.0"
extract-msg = "^0.47.0"
# jq is not available for windows
jq = { version = "^1.6.0", markers = "sys_platform != 'win32'" }
boto3 = "^1.34.0"
numexpr = "^2.8.6"
qianfan = "0.2.0"
qianfan = "0.3.0"
pgvector = "^0.2.3"
pyautogen = "^0.2.0"
langchain-google-genai = "^0.0.6"
elasticsearch = "^8.11.1"
elasticsearch = "^8.12.0"
pytube = "^15.0.0"
python-socketio = "^5.11.0"
llama-index = "^0.9.44"
langchain-openai = "^0.0.5"
llama-index = "0.9.48"
langchain-openai = "^0.0.6"
[tool.poetry.group.dev.dependencies]
pytest-asyncio = "^0.23.1"
types-redis = "^4.6.0.5"
ipykernel = "^6.27.0"
ipykernel = "^6.29.0"
mypy = "^1.8.0"
ruff = "^0.1.5"
ruff = "^0.2.1"
httpx = "*"
pytest = "^7.4.2"
pytest = "^8.0.0"
types-requests = "^2.31.0"
requests = "^2.31.0"
pytest-cov = "^4.1.0"
pandas-stubs = "^2.0.0.230412"
types-pillow = "^9.5.0.2"
pandas-stubs = "^2.1.4.231227"
types-pillow = "^10.2.0.20240213"
types-pyyaml = "^6.0.12.8"
types-python-jose = "^3.3.4.8"
types-passlib = "^1.7.7.13"
locust = "^2.19.1"
locust = "^2.23.1"
pytest-mock = "^3.12.0"
pytest-xdist = "^3.5.0"
types-pywin32 = "^306.0.0.4"
types-google-cloud-ndb = "^2.2.0.0"
pytest-sugar = "^0.9.7"
pytest-sugar = "^1.0.0"
pytest-instafail = "^0.5.0"

View file

@ -2,8 +2,9 @@ import asyncio
from typing import TYPE_CHECKING, Any, Dict, List, Optional
from uuid import UUID
from langchain.callbacks.base import AsyncCallbackHandler, BaseCallbackHandler
from langchain.schema import AgentAction, AgentFinish
from langchain_core.callbacks.base import (AsyncCallbackHandler,
BaseCallbackHandler)
from langflow.api.v1.schemas import ChatResponse, PromptResponse
from langflow.services.deps import get_chat_service
from langflow.utils.util import remove_ansi_escape_codes

View file

@ -1,10 +1,8 @@
from langflow import CustomComponent
from typing import Callable, Union
from langchain.chains import LLMCheckerChain
from typing import Union, Callable
from langflow.field_typing import (
BaseLanguageModel,
Chain,
)
from langflow import CustomComponent
from langflow.field_typing import BaseLanguageModel, Chain
class LLMCheckerChainComponent(CustomComponent):
@ -21,4 +19,4 @@ class LLMCheckerChainComponent(CustomComponent):
self,
llm: BaseLanguageModel,
) -> Union[Chain, Callable]:
return LLMCheckerChain(llm=llm)
return LLMCheckerChain.from_llm(llm=llm)

View file

@ -1,6 +1,8 @@
from langflow import CustomComponent
from typing import Any, Dict, List
from langchain.docstore.document import Document
from typing import Optional, Dict, Any
from langchain.document_loaders.directory import DirectoryLoader
from langflow import CustomComponent
class DirectoryLoaderComponent(CustomComponent):
@ -23,20 +25,18 @@ class DirectoryLoaderComponent(CustomComponent):
self,
glob: str,
path: str,
load_hidden: Optional[bool] = False,
max_concurrency: Optional[int] = 10,
metadata: Optional[dict] = {},
recursive: Optional[bool] = True,
silent_errors: Optional[bool] = False,
use_multithreading: Optional[bool] = True,
) -> Document:
return Document(
max_concurrency: int = 2,
load_hidden: bool = False,
recursive: bool = True,
silent_errors: bool = False,
use_multithreading: bool = True,
) -> List[Document]:
return DirectoryLoader(
glob=glob,
path=path,
load_hidden=load_hidden,
max_concurrency=max_concurrency,
metadata=metadata,
recursive=recursive,
silent_errors=silent_errors,
use_multithreading=use_multithreading,
)
).load()

View file

@ -1,14 +1,14 @@
from langflow import CustomComponent
from typing import Optional, Dict
from typing import Dict, Optional
from langchain_community.embeddings.huggingface import HuggingFaceInferenceAPIEmbeddings
from langflow import CustomComponent
from pydantic.v1.types import SecretStr
class HuggingFaceInferenceAPIEmbeddingsComponent(CustomComponent):
display_name = "HuggingFaceInferenceAPIEmbeddings"
description = "HuggingFace sentence_transformers embedding models, API version."
documentation = (
"https://github.com/huggingface/text-embeddings-inference"
)
documentation = "https://github.com/huggingface/text-embeddings-inference"
def build_config(self):
return {
@ -31,12 +31,12 @@ class HuggingFaceInferenceAPIEmbeddingsComponent(CustomComponent):
model_kwargs: Optional[Dict] = {},
multi_process: bool = False,
) -> HuggingFaceInferenceAPIEmbeddings:
if api_key:
secret_api_key = SecretStr(api_key)
else:
raise ValueError("API Key is required")
return HuggingFaceInferenceAPIEmbeddings(
api_key=api_key,
api_key=secret_api_key,
api_url=api_url,
model_name=model_name,
cache_folder=cache_folder,
encode_kwargs=encode_kwargs,
model_kwargs=model_kwargs,
multi_process=multi_process,
)

View file

@ -1,9 +1,9 @@
from typing import Any, Callable, Dict, List, Optional, Union
from langchain_openai.embeddings.base import OpenAIEmbeddings
from langflow import CustomComponent
from langflow.field_typing import NestedDict
from pydantic.v1.types import SecretStr
class OpenAIEmbeddingsComponent(CustomComponent):
@ -67,7 +67,7 @@ class OpenAIEmbeddingsComponent(CustomComponent):
},
"skip_empty": {"display_name": "Skip Empty", "advanced": True},
"tiktoken_model_name": {"display_name": "TikToken Model Name"},
"tikToken_enable": {"display_name": "TikToken Enable"},
"tikToken_enable": {"display_name": "TikToken Enable", "advanced": True},
}
def build(
@ -92,14 +92,17 @@ class OpenAIEmbeddingsComponent(CustomComponent):
request_timeout: Optional[float] = None,
show_progress_bar: bool = False,
skip_empty: bool = False,
tikToken_enable: bool = True,
tiktoken_enable: bool = True,
tiktoken_model_name: Optional[str] = None,
) -> Union[OpenAIEmbeddings, Callable]:
# This is to avoid errors with Vector Stores (e.g Chroma)
if disallowed_special == ["all"]:
disallowed_special = "all"
disallowed_special = "all" # type: ignore
api_key = SecretStr(openai_api_key) if openai_api_key else None
return OpenAIEmbeddings(
tiktoken_enabled=tikToken_enable,
tiktoken_enabled=tiktoken_enable,
default_headers=default_headers,
default_query=default_query,
allowed_special=set(allowed_special),
@ -112,7 +115,7 @@ class OpenAIEmbeddingsComponent(CustomComponent):
model=model,
model_kwargs=model_kwargs,
base_url=openai_api_base,
api_key=openai_api_key,
api_key=api_key,
openai_api_type=openai_api_type,
api_version=openai_api_version,
organization=openai_organization,

View file

@ -1,4 +1,4 @@
from pydantic import SecretStr
from pydantic.v1.types import SecretStr
from langflow import CustomComponent
from typing import Optional, Union, Callable
from langflow.field_typing import BaseLanguageModel

View file

@ -1,9 +1,9 @@
from typing import Optional
from langchain_google_genai import ChatGoogleGenerativeAI # type: ignore
from langflow import CustomComponent
from langflow.field_typing import BaseLanguageModel, RangeSpec, TemplateField
from pydantic.v1.types import SecretStr
class GoogleGenerativeAIComponent(CustomComponent):
@ -63,10 +63,10 @@ class GoogleGenerativeAIComponent(CustomComponent):
) -> BaseLanguageModel:
return ChatGoogleGenerativeAI(
model=model,
max_output_tokens=max_output_tokens or None,
max_output_tokens=max_output_tokens or None, # type: ignore
temperature=temperature,
top_k=top_k or None,
top_p=top_p or None,
top_p=top_p or None, # type: ignore
n=n or 1,
google_api_key=google_api_key,
google_api_key=SecretStr(google_api_key),
)

View file

@ -1,8 +1,7 @@
from langchain_community.agent_toolkits.openapi.toolkit import BaseToolkit, OpenAPIToolkit
from langchain_community.utilities.requests import TextRequestsWrapper
from langflow import CustomComponent
from langflow.field_typing import AgentExecutor
from typing import Callable
from langchain_community.utilities.requests import TextRequestsWrapper
from langchain_community.agent_toolkits.openapi.toolkit import OpenAPIToolkit
class OpenAPIToolkitComponent(CustomComponent):
@ -19,5 +18,5 @@ class OpenAPIToolkitComponent(CustomComponent):
self,
json_agent: AgentExecutor,
requests_wrapper: TextRequestsWrapper,
) -> Callable:
) -> BaseToolkit:
return OpenAPIToolkit(json_agent=json_agent, requests_wrapper=requests_wrapper)

View file

@ -1,6 +1,7 @@
from langflow import CustomComponent
from typing import Union, Callable
from typing import Callable, Union
from langchain_community.utilities.google_search import GoogleSearchAPIWrapper
from langflow import CustomComponent
class GoogleSearchAPIWrapperComponent(CustomComponent):
@ -18,4 +19,4 @@ class GoogleSearchAPIWrapperComponent(CustomComponent):
google_api_key: str,
google_cse_id: str,
) -> Union[GoogleSearchAPIWrapper, Callable]:
return GoogleSearchAPIWrapper(google_api_key=google_api_key, google_cse_id=google_cse_id)
return GoogleSearchAPIWrapper(google_api_key=google_api_key, google_cse_id=google_cse_id) # type: ignore

View file

@ -1,9 +1,9 @@
from langflow import CustomComponent
from typing import Dict, Optional
from typing import Dict
# Assuming the existence of GoogleSerperAPIWrapper class in the serper module
# If this class does not exist, you would need to create it or import the appropriate class from another module
from langchain_community.utilities.google_serper import GoogleSerperAPIWrapper
from langflow import CustomComponent
class GoogleSerperAPIWrapperComponent(CustomComponent):
@ -42,6 +42,5 @@ class GoogleSerperAPIWrapperComponent(CustomComponent):
def build(
self,
serper_api_key: str,
result_key_for_type: Optional[Dict[str, str]] = None,
) -> GoogleSerperAPIWrapper:
return GoogleSerperAPIWrapper(result_key_for_type=result_key_for_type, serper_api_key=serper_api_key)
return GoogleSerperAPIWrapper(serper_api_key=serper_api_key)

View file

@ -5,7 +5,6 @@ import pinecone # type: ignore
from langchain.schema import BaseRetriever
from langchain_community.vectorstores import VectorStore
from langchain_community.vectorstores.pinecone import Pinecone
from langflow import CustomComponent
from langflow.field_typing import Document, Embeddings
@ -31,11 +30,11 @@ class PineconeComponent(CustomComponent):
embedding: Embeddings,
pinecone_env: str,
documents: List[Document],
text_key: str = "text",
pool_threads: int = 4,
index_name: Optional[str] = None,
pinecone_api_key: Optional[str] = None,
text_key: Optional[str] = "text",
namespace: Optional[str] = "default",
pool_threads: Optional[int] = None,
) -> Union[VectorStore, Pinecone, BaseRetriever]:
if pinecone_api_key is None or pinecone_env is None:
raise ValueError("Pinecone API Key and Environment are required.")
@ -43,6 +42,8 @@ class PineconeComponent(CustomComponent):
raise ValueError("Pinecone API Key is required.")
pinecone.init(api_key=pinecone_api_key, environment=pinecone_env) # type: ignore
if not index_name:
raise ValueError("Index Name is required.")
if documents:
return Pinecone.from_documents(
documents=documents,

View file

@ -1,4 +1,4 @@
from typing import List, Optional, Union
from typing import Optional, Union
from langchain.schema import BaseRetriever
from langchain_community.vectorstores import VectorStore
@ -36,14 +36,14 @@ class QdrantComponent(CustomComponent):
def build(
self,
embedding: Embeddings,
collection_name: str,
documents: Optional[Document] = None,
api_key: Optional[str] = None,
collection_name: Optional[str] = None,
content_payload_key: str = "page_content",
distance_func: str = "Cosine",
grpc_port: Optional[int] = 6334,
host: Optional[str] = None,
grpc_port: int = 6334,
https: bool = False,
host: Optional[str] = None,
location: Optional[str] = None,
metadata_payload_key: str = "metadata",
path: Optional[str] = None,
@ -51,14 +51,15 @@ class QdrantComponent(CustomComponent):
prefer_grpc: bool = False,
prefix: Optional[str] = None,
search_kwargs: Optional[NestedDict] = None,
timeout: Optional[float] = None,
timeout: Optional[int] = None,
url: Optional[str] = None,
) -> Union[VectorStore, Qdrant, BaseRetriever]:
if documents is None:
from qdrant_client import QdrantClient
client = QdrantClient(
location=location,
url=host,
url=host,
port=port,
grpc_port=grpc_port,
https=https,
@ -71,17 +72,16 @@ class QdrantComponent(CustomComponent):
collection_name=collection_name,
host=host,
path=path,
)
vs = Qdrant(client=client,
collection_name=collection_name,
embeddings=embedding,
search_kwargs=search_kwargs,
distance_func=distance_func,
)
)
vs = Qdrant(
client=client,
collection_name=collection_name,
embeddings=embedding,
)
return vs
else:
vs = Qdrant.from_documents(
documents=documents,
documents=documents, # type: ignore
embedding=embedding,
api_key=api_key,
collection_name=collection_name,
@ -99,5 +99,5 @@ class QdrantComponent(CustomComponent):
search_kwargs=search_kwargs,
timeout=timeout,
url=url,
)
)
return vs

View file

@ -5,7 +5,6 @@ from langchain_community.vectorstores import VectorStore
from langchain_community.vectorstores.redis import Redis
from langchain_core.documents import Document
from langchain_core.retrievers import BaseRetriever
from langflow import CustomComponent
@ -31,6 +30,7 @@ class RedisComponent(CustomComponent):
"code": {"show": False, "display_name": "Code"},
"documents": {"display_name": "Documents", "is_list": True},
"embedding": {"display_name": "Embedding"},
"schema": {"display_name": "Schema", "file_types": [".yaml"]},
"redis_server_url": {
"display_name": "Redis Server Connection String",
"advanced": False,
@ -43,6 +43,7 @@ class RedisComponent(CustomComponent):
embedding: Embeddings,
redis_server_url: str,
redis_index_name: str,
schema: Optional[str] = None,
documents: Optional[Document] = None,
) -> Union[VectorStore, BaseRetriever]:
"""
@ -58,10 +59,12 @@ class RedisComponent(CustomComponent):
- VectorStore: The Vector Store object.
"""
if documents is None:
if schema is None:
raise ValueError("If no documents are provided, a schema must be provided.")
redis_vs = Redis.from_existing_index(
embedding=embedding,
index_name=redis_index_name,
schema=None,
schema=schema,
key_prefix=None,
redis_url=redis_server_url,
)

View file

@ -6,7 +6,6 @@ from typing import List, Optional, Union
from langchain_community.embeddings import FakeEmbeddings
from langchain_community.vectorstores.vectara import Vectara
from langchain_core.vectorstores import VectorStore
from langflow import CustomComponent
from langflow.field_typing import BaseRetriever, Document
@ -46,7 +45,7 @@ class VectaraComponent(CustomComponent):
if documents is not None:
return Vectara.from_documents(
documents=documents,
documents=documents, # type: ignore
embedding=FakeEmbeddings(size=768),
vectara_customer_id=vectara_customer_id,
vectara_corpus_id=vectara_corpus_id,

View file

@ -5,7 +5,6 @@ from langchain_community.vectorstores import VectorStore
from langchain_community.vectorstores.pgvector import PGVector
from langchain_core.documents import Document
from langchain_core.retrievers import BaseRetriever
from langflow import CustomComponent
@ -63,13 +62,13 @@ class PGVectorComponent(CustomComponent):
collection_name=collection_name,
connection_string=pg_server_url,
)
vector_store = PGVector.from_documents(
embedding=embedding,
documents=documents,
collection_name=collection_name,
connection_string=pg_server_url,
)
else:
vector_store = PGVector.from_documents(
embedding=embedding,
documents=documents, # type: ignore
collection_name=collection_name,
connection_string=pg_server_url,
)
except Exception as e:
raise RuntimeError(f"Failed to build PGVector: {e}")
return vector_store

View file

@ -37,7 +37,7 @@ class Component:
setattr(self, key, value)
# Validate the emoji at the icon field
if self.icon:
if hasattr(self, "icon") and self.icon:
self.icon = self.validate_icon(self.icon)
def __setattr__(self, key, value):

View file

@ -7,8 +7,6 @@ from loguru import logger
from langflow.api.v1.callback import AsyncStreamingLLMCallbackHandler, StreamingLLMCallbackHandler
from langflow.processing.process import fix_memory_inputs, format_actions
from langflow.services.deps import get_plugins_service
from langflow.processing.process import fix_memory_inputs, format_actions
from langflow.services.deps import get_plugins_service
if TYPE_CHECKING:
from langfuse.callback import CallbackHandler # type: ignore

View file

@ -30,6 +30,7 @@ export default function App() {
);
const loading = useAlertStore((state) => state.loading);
const [fetchError, setFetchError] = useState(false);
const isLoading = useFlowsManagerStore((state) => state.isLoading);
const removeAlert = (id: string) => {
removeFromTempNotificationList(id);
@ -86,7 +87,7 @@ export default function App() {
description={FETCH_ERROR_DESCRIPION}
message={FETCH_ERROR_MESSAGE}
></FetchErrorComponent>
) : loading ? (
) : isLoading ? (
<div className="loading-page-panel">
<LoadingComponent remSize={50} />
</div>

View file

@ -94,12 +94,6 @@ export default function ComponentsComponent({
setPageSize(10);
}
useEffect(() => {
setTimeout(() => {
setLoadingScreen(false);
}, 600);
}, []);
return (
<CardsWrapComponent
onFileDrop={onFileDrop}
@ -107,7 +101,7 @@ export default function ComponentsComponent({
>
<div className="flex h-full w-full flex-col justify-between">
<div className="flex w-full flex-col gap-4">
{!loadingScreen && data.length === 0 ? (
{!isLoading && data.length === 0 ? (
<div className="mt-6 flex w-full items-center justify-center text-center">
<div className="flex-max-width h-full flex-col">
<div className="flex w-full flex-col gap-4">
@ -136,7 +130,7 @@ export default function ComponentsComponent({
</div>
) : (
<div className="grid w-full gap-4 md:grid-cols-2 lg:grid-cols-2">
{loadingScreen === false && data?.length > 0 ? (
{isLoading === false && data?.length > 0 ? (
data?.map((item, idx) => (
<CollectionCardComponent
onDelete={() => {
@ -185,7 +179,7 @@ export default function ComponentsComponent({
</div>
)}
</div>
{!loadingScreen && data.length > 0 && (
{!isLoading && data.length > 0 && (
<div className="relative py-6">
<PaginatorComponent
storeComponent={true}

View file

@ -62,10 +62,10 @@ const useFlowsManagerStore = create<FlowsManagerStoreType>((set, get) => ({
if (dbData) {
const { data, flows } = processFlows(dbData, false);
get().setFlows(flows);
set({ isLoading: false });
useTypesStore.setState((state) => ({
data: { ...state.data, ["saved_components"]: data },
}));
set({ isLoading: false });
resolve();
}
})

View file

@ -4,6 +4,7 @@ import { APIDataType } from "../types/api";
import { TypesStoreType } from "../types/zustand/types";
import { templatesGenerator, typesGenerator } from "../utils/reactflowUtils";
import useAlertStore from "./alertStore";
import useFlowsManagerStore from "./flowsManagerStore";
export const useTypesStore = create<TypesStoreType>((set, get) => ({
types: {},
@ -11,6 +12,8 @@ export const useTypesStore = create<TypesStoreType>((set, get) => ({
data: {},
getTypes: () => {
return new Promise<void>(async (resolve, reject) => {
const setLoading = useFlowsManagerStore.getState().setIsLoading;
setLoading(true);
getAll()
.then((response) => {
const data = response.data;
@ -20,6 +23,7 @@ export const useTypesStore = create<TypesStoreType>((set, get) => ({
data: { ...old.data, ...data },
templates: templatesGenerator(data),
}));
setLoading(false)
resolve();
})
.catch((error) => {

View file

@ -611,35 +611,36 @@ def test_async_task_processing(distributed_client, flow, created_api_key):
assert "Gabriel" in task_status_json["result"]["text"], task_status_json["result"]
# ! Deactivating this until updating the test
# Test function without loop
@pytest.mark.async_test
def test_async_task_processing_vector_store(client, added_vector_store, created_api_key):
headers = {"x-api-key": created_api_key.api_key}
post_data = {"inputs": {"input": "How do I upload examples?"}}
# @pytest.mark.async_test
# def test_async_task_processing_vector_store(client, added_vector_store, created_api_key):
# headers = {"x-api-key": created_api_key.api_key}
# post_data = {"inputs": {"input": "How do I upload examples?"}}
# Run the /api/v1/process/{flow_id} endpoint with sync=False
response = client.post(
f"api/v1/process/{added_vector_store.get('id')}",
headers=headers,
json={**post_data, "sync": False},
)
assert response.status_code == 200, response.json()
assert "result" in response.json()
assert "FAILURE" not in response.json()["result"]
# # Run the /api/v1/process/{flow_id} endpoint with sync=False
# response = client.post(
# f"api/v1/process/{added_vector_store.get('id')}",
# headers=headers,
# json={**post_data, "sync": False},
# )
# assert response.status_code == 200, response.json()
# assert "result" in response.json()
# assert "FAILURE" not in response.json()["result"]
# Extract the task ID from the response
task = response.json().get("task")
task_id = task.get("id")
task_href = task.get("href")
assert task_id is not None
assert task_href is not None
assert task_href == f"api/v1/task/{task_id}"
# # Extract the task ID from the response
# task = response.json().get("task")
# task_id = task.get("id")
# task_href = task.get("href")
# assert task_id is not None
# assert task_href is not None
# assert task_href == f"api/v1/task/{task_id}"
# Polling the task status using the helper function
task_status_json = poll_task_status(client, headers, task_href)
assert task_status_json is not None, "Task did not complete in time"
# # Polling the task status using the helper function
# task_status_json = poll_task_status(client, headers, task_href)
# assert task_status_json is not None, "Task did not complete in time"
# Validate that the task completed successfully and the result is as expected
assert "result" in task_status_json, task_status_json
assert "output" in task_status_json["result"], task_status_json["result"]
assert "Langflow" in task_status_json["result"]["output"], task_status_json["result"]
# # Validate that the task completed successfully and the result is as expected
# assert "result" in task_status_json, task_status_json
# assert "output" in task_status_json["result"], task_status_json["result"]
# assert "Langflow" in task_status_json["result"]["output"], task_status_json["result"]