Merge remote-tracking branch 'origin/dev' into streaming
This commit is contained in:
commit
fc007e4349
25 changed files with 384 additions and 937 deletions
2
.github/workflows/test.yml
vendored
2
.github/workflows/test.yml
vendored
|
|
@ -30,4 +30,4 @@ jobs:
|
|||
run: poetry install
|
||||
- name: Run unit tests
|
||||
run: |
|
||||
make test
|
||||
make tests
|
||||
|
|
|
|||
7
Makefile
7
Makefile
|
|
@ -1,4 +1,4 @@
|
|||
.PHONY: all format lint build
|
||||
.PHONY: all format lint build build_frontend install_frontend run_frontend run_backend dev help tests coverage
|
||||
|
||||
all: help
|
||||
|
||||
|
|
@ -8,7 +8,7 @@ coverage:
|
|||
--cov-report xml \
|
||||
--cov-report term-missing:skip-covered
|
||||
|
||||
test:
|
||||
tests:
|
||||
poetry run pytest tests
|
||||
|
||||
format:
|
||||
|
|
@ -71,3 +71,6 @@ help:
|
|||
@echo 'build - build the frontend static files and package the project'
|
||||
@echo 'publish - build the frontend static files and package the project and publish it to PyPI'
|
||||
@echo 'dev - run the project in development mode with docker compose'
|
||||
@echo 'tests - run the tests'
|
||||
@echo 'coverage - run the tests and generate a coverage report'
|
||||
@echo '----'
|
||||
|
|
|
|||
39
poetry.lock
generated
39
poetry.lock
generated
|
|
@ -1708,14 +1708,14 @@ test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"]
|
|||
|
||||
[[package]]
|
||||
name = "langchain"
|
||||
version = "0.0.158"
|
||||
version = "0.0.160"
|
||||
description = "Building applications with LLMs through composability"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.8.1,<4.0"
|
||||
files = [
|
||||
{file = "langchain-0.0.158-py3-none-any.whl", hash = "sha256:2121874e3c72db8987467e77c1890d53e146e5f90ab249e82194187568285e43"},
|
||||
{file = "langchain-0.0.158.tar.gz", hash = "sha256:29b578a6c3ccb97b63ffa93451c590e03aa55c19a834c0a2d2f8573139ca5e90"},
|
||||
{file = "langchain-0.0.160-py3-none-any.whl", hash = "sha256:e09310cc07c38a5e6777bd3d30c51227e2da775d4267d3fb72697a4de3931da3"},
|
||||
{file = "langchain-0.0.160.tar.gz", hash = "sha256:427c142e2fdb9f9ef9f2352a2c82db28db4d11c61f02b20dd33e77d850fe81cc"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -1728,12 +1728,12 @@ openapi-schema-pydantic = ">=1.2,<2.0"
|
|||
pydantic = ">=1,<2"
|
||||
PyYAML = ">=5.4.1"
|
||||
requests = ">=2,<3"
|
||||
SQLAlchemy = ">=1.3,<3"
|
||||
SQLAlchemy = ">=1.4,<3"
|
||||
tenacity = ">=8.1.0,<9.0.0"
|
||||
tqdm = ">=4.48.0"
|
||||
|
||||
[package.extras]
|
||||
all = ["O365 (>=2.0.26,<3.0.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.2.6,<0.3.0)", "arxiv (>=1.4,<2.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "beautifulsoup4 (>=4,<5)", "clickhouse-connect (>=0.5.14,<0.6.0)", "cohere (>=3,<4)", "deeplake (>=3.3.0,<4.0.0)", "duckduckgo-search (>=2.8.6,<3.0.0)", "elasticsearch (>=8,<9)", "faiss-cpu (>=1,<2)", "google-api-python-client (==2.70.0)", "google-search-results (>=2,<3)", "gptcache (>=0.1.7)", "html2text (>=2020.1.16,<2021.0.0)", "huggingface_hub (>=0,<1)", "jina (>=3.14,<4.0)", "jinja2 (>=3,<4)", "lancedb (>=0.1,<0.2)", "lark (>=1.1.5,<2.0.0)", "manifest-ml (>=0.0.1,<0.0.2)", "networkx (>=2.6.3,<3.0.0)", "nlpcloud (>=1,<2)", "nltk (>=3,<4)", "nomic (>=1.0.43,<2.0.0)", "openai (>=0,<1)", "opensearch-py (>=2.0.0,<3.0.0)", "pexpect (>=4.8.0,<5.0.0)", "pgvector (>=0.1.6,<0.2.0)", "pinecone-client (>=2,<3)", "pinecone-text (>=0.4.2,<0.5.0)", "psycopg2-binary (>=2.9.5,<3.0.0)", "pyowm (>=3.3.0,<4.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pytesseract (>=0.3.10,<0.4.0)", "pyvespa (>=0.33.0,<0.34.0)", "qdrant-client (>=1.1.2,<2.0.0)", "redis (>=4,<5)", "sentence-transformers (>=2,<3)", "spacy (>=3,<4)", "tensorflow-text (>=2.11.0,<3.0.0)", "tiktoken (>=0.3.2,<0.4.0)", "torch (>=1,<3)", "transformers (>=4,<5)", "weaviate-client (>=3,<4)", "wikipedia (>=1,<2)", "wolframalpha (==5.0.0)"]
|
||||
all = ["O365 (>=2.0.26,<3.0.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.2.6,<0.3.0)", "arxiv (>=1.4,<2.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "beautifulsoup4 (>=4,<5)", "clickhouse-connect (>=0.5.14,<0.6.0)", "cohere (>=3,<4)", "deeplake (>=3.3.0,<4.0.0)", "duckduckgo-search (>=2.8.6,<3.0.0)", "elasticsearch (>=8,<9)", "faiss-cpu (>=1,<2)", "google-api-python-client (==2.70.0)", "google-search-results (>=2,<3)", "gptcache (>=0.1.7)", "html2text (>=2020.1.16,<2021.0.0)", "huggingface_hub (>=0,<1)", "jina (>=3.14,<4.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "lancedb (>=0.1,<0.2)", "lark (>=1.1.5,<2.0.0)", "manifest-ml (>=0.0.1,<0.0.2)", "networkx (>=2.6.3,<3.0.0)", "nlpcloud (>=1,<2)", "nltk (>=3,<4)", "nomic (>=1.0.43,<2.0.0)", "openai (>=0,<1)", "opensearch-py (>=2.0.0,<3.0.0)", "pexpect (>=4.8.0,<5.0.0)", "pgvector (>=0.1.6,<0.2.0)", "pinecone-client (>=2,<3)", "pinecone-text (>=0.4.2,<0.5.0)", "psycopg2-binary (>=2.9.5,<3.0.0)", "pyowm (>=3.3.0,<4.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pytesseract (>=0.3.10,<0.4.0)", "pyvespa (>=0.33.0,<0.34.0)", "qdrant-client (>=1.1.2,<2.0.0)", "redis (>=4,<5)", "sentence-transformers (>=2,<3)", "spacy (>=3,<4)", "tensorflow-text (>=2.11.0,<3.0.0)", "tiktoken (>=0.3.2,<0.4.0)", "torch (>=1,<3)", "transformers (>=4,<5)", "weaviate-client (>=3,<4)", "wikipedia (>=1,<2)", "wolframalpha (==5.0.0)"]
|
||||
azure = ["azure-core (>=1.26.4,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "openai (>=0,<1)"]
|
||||
cohere = ["cohere (>=3,<4)"]
|
||||
embeddings = ["sentence-transformers (>=2,<3)"]
|
||||
|
|
@ -1743,13 +1743,13 @@ qdrant = ["qdrant-client (>=1.1.2,<2.0.0)"]
|
|||
|
||||
[[package]]
|
||||
name = "lit"
|
||||
version = "16.0.2"
|
||||
version = "16.0.3"
|
||||
description = "A Software Testing Tool"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "lit-16.0.2.tar.gz", hash = "sha256:d743ef55cb58764bba85768c502e2d68d87aeb4303d508a18abaa8a35077ab25"},
|
||||
{file = "lit-16.0.3.tar.gz", hash = "sha256:25524fe51fa3261212cfd86a8891429ed0460e247384c5a2001612d08e362e00"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -4555,18 +4555,18 @@ files = [
|
|||
|
||||
[[package]]
|
||||
name = "types-requests"
|
||||
version = "2.29.0.0"
|
||||
version = "2.30.0.0"
|
||||
description = "Typing stubs for requests"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "types-requests-2.29.0.0.tar.gz", hash = "sha256:c86f4a955d943d2457120dbe719df24ef0924e11177164d10a0373cf311d7b4d"},
|
||||
{file = "types_requests-2.29.0.0-py3-none-any.whl", hash = "sha256:4cf6e323e856c779fbe8815bb977a5bf5d6c5034713e4c17ff2a9a20610f5b27"},
|
||||
{file = "types-requests-2.30.0.0.tar.gz", hash = "sha256:dec781054324a70ba64430ae9e62e7e9c8e4618c185a5cb3f87a6738251b5a31"},
|
||||
{file = "types_requests-2.30.0.0-py3-none-any.whl", hash = "sha256:c6cf08e120ca9f0dc4fa4e32c3f953c3fba222bcc1db6b97695bce8da1ba9864"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
types-urllib3 = "<1.27"
|
||||
types-urllib3 = "*"
|
||||
|
||||
[[package]]
|
||||
name = "types-urllib3"
|
||||
|
|
@ -4891,6 +4891,21 @@ files = [
|
|||
[package.extras]
|
||||
test = ["pytest (>=6.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "wikipedia"
|
||||
version = "1.4.0"
|
||||
description = "Wikipedia API for Python"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "wikipedia-1.4.0.tar.gz", hash = "sha256:db0fad1829fdd441b1852306e9856398204dc0786d2996dd2e0c8bb8e26133b2"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
beautifulsoup4 = "*"
|
||||
requests = ">=2.0.0,<3.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "xlsxwriter"
|
||||
version = "3.1.0"
|
||||
|
|
@ -5069,4 +5084,4 @@ cffi = ["cffi (>=1.11)"]
|
|||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.9"
|
||||
content-hash = "5e7c877648cffc95b5312d87c82afa7bb6a9ce331492769c93efcb8f99252d9a"
|
||||
content-hash = "5aca7cac07e5b678d4b01d994227da5ef06f4e8da6c2a854bab7e7e6cd17ccb5"
|
||||
|
|
|
|||
|
|
@ -49,6 +49,7 @@ psycopg2-binary = "^2.9.6"
|
|||
pyarrow = "^11.0.0"
|
||||
websockets = "^11.0.2"
|
||||
tiktoken = "^0.3.3"
|
||||
wikipedia = "^1.4.0"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
black = "^23.1.0"
|
||||
|
|
|
|||
|
|
@ -5,6 +5,12 @@ from fastapi import APIRouter, HTTPException
|
|||
|
||||
from langflow.interface.run import process_graph_cached
|
||||
from langflow.interface.types import build_langchain_types_dict
|
||||
from langflow.api.schemas import (
|
||||
ExportedFlow,
|
||||
GraphData,
|
||||
PredictRequest,
|
||||
PredictResponse,
|
||||
)
|
||||
|
||||
# build router
|
||||
router = APIRouter()
|
||||
|
|
@ -16,10 +22,14 @@ def get_all():
|
|||
return build_langchain_types_dict()
|
||||
|
||||
|
||||
@router.post("/predict")
|
||||
def get_load(data: Dict[str, Any]):
|
||||
@router.post("/predict", response_model=PredictResponse)
|
||||
async def get_load(predict_request: PredictRequest):
|
||||
try:
|
||||
return process_graph_cached(data)
|
||||
exported_flow: ExportedFlow = predict_request.exported_flow
|
||||
graph_data: GraphData = exported_flow.data
|
||||
data = graph_data.dict()
|
||||
response = process_graph_cached(data, predict_request.message)
|
||||
return PredictResponse(result=response.get("result", ""))
|
||||
except Exception as e:
|
||||
# Log stack trace
|
||||
logger.exception(e)
|
||||
|
|
|
|||
|
|
@ -1,8 +1,37 @@
|
|||
from typing import Any, Union
|
||||
from typing import Any, Union, Dict, List
|
||||
|
||||
from pydantic import BaseModel, validator
|
||||
|
||||
|
||||
class GraphData(BaseModel):
|
||||
"""Data inside the exported flow."""
|
||||
|
||||
nodes: List[Dict[str, Any]]
|
||||
edges: List[Dict[str, Any]]
|
||||
|
||||
|
||||
class ExportedFlow(BaseModel):
|
||||
"""Exported flow from LangFlow."""
|
||||
|
||||
description: str
|
||||
name: str
|
||||
id: str
|
||||
data: GraphData
|
||||
|
||||
|
||||
class PredictRequest(BaseModel):
|
||||
"""Predict request schema."""
|
||||
|
||||
message: str
|
||||
exported_flow: ExportedFlow
|
||||
|
||||
|
||||
class PredictResponse(BaseModel):
|
||||
"""Predict response schema."""
|
||||
|
||||
result: str
|
||||
|
||||
|
||||
class ChatMessage(BaseModel):
|
||||
"""Chat message schema."""
|
||||
|
||||
|
|
|
|||
|
|
@ -49,5 +49,5 @@ def post_validate_node(node_id: str, data: dict):
|
|||
return str(node.params)
|
||||
raise Exception(f"Node {node_id} not found")
|
||||
except Exception as e:
|
||||
logger.exception(e)
|
||||
logger.error(e)
|
||||
raise HTTPException(status_code=500, detail=str(e)) from e
|
||||
|
|
|
|||
1
src/backend/langflow/cache/base.py
vendored
1
src/backend/langflow/cache/base.py
vendored
|
|
@ -48,6 +48,7 @@ def memoize_dict(maxsize=128):
|
|||
cache.clear()
|
||||
|
||||
wrapper.clear_cache = clear_cache # type: ignore
|
||||
wrapper.cache = cache # type: ignore
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
|
|
|||
|
|
@ -51,6 +51,7 @@ tools:
|
|||
- BingSearchRun
|
||||
- GoogleSearchRun
|
||||
- GoogleSearchResults
|
||||
- GoogleSerperRun
|
||||
- JsonListKeysTool
|
||||
- JsonGetValueTool
|
||||
- PythonREPLTool
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ from langchain.agents.agent_toolkits.vectorstore.prompt import (
|
|||
from langchain.agents.mrkl.prompt import FORMAT_INSTRUCTIONS
|
||||
from langchain.agents.mrkl.prompt import FORMAT_INSTRUCTIONS as SQL_FORMAT_INSTRUCTIONS
|
||||
from langchain.base_language import BaseLanguageModel
|
||||
from langchain.llms.base import BaseLLM
|
||||
|
||||
from langchain.memory.chat_memory import BaseChatMemory
|
||||
from langchain.sql_database import SQLDatabase
|
||||
from langchain.tools.python.tool import PythonAstREPLTool
|
||||
|
|
@ -63,7 +63,7 @@ class JsonAgent(AgentExecutor):
|
|||
llm=llm,
|
||||
prompt=prompt,
|
||||
)
|
||||
agent = ZeroShotAgent(llm_chain=llm_chain, allowed_tools=tool_names)
|
||||
agent = ZeroShotAgent(llm_chain=llm_chain, allowed_tools=tool_names) # type: ignore
|
||||
return cls.from_agent_and_tools(agent=agent, tools=tools, verbose=True)
|
||||
|
||||
def run(self, *args, **kwargs):
|
||||
|
|
@ -110,7 +110,7 @@ class CSVAgent(AgentExecutor):
|
|||
prompt=partial_prompt,
|
||||
)
|
||||
tool_names = {tool.name for tool in tools}
|
||||
agent = ZeroShotAgent(llm_chain=llm_chain, allowed_tools=tool_names, **kwargs)
|
||||
agent = ZeroShotAgent(llm_chain=llm_chain, allowed_tools=tool_names, **kwargs) # type: ignore
|
||||
|
||||
return cls.from_agent_and_tools(agent=agent, tools=tools, verbose=True)
|
||||
|
||||
|
|
@ -134,7 +134,7 @@ class VectorStoreAgent(AgentExecutor):
|
|||
|
||||
@classmethod
|
||||
def from_toolkit_and_llm(
|
||||
cls, llm: BaseLLM, vectorstoreinfo: VectorStoreInfo, **kwargs: Any
|
||||
cls, llm: BaseLanguageModel, vectorstoreinfo: VectorStoreInfo, **kwargs: Any
|
||||
):
|
||||
"""Construct a vectorstore agent from an LLM and tools."""
|
||||
|
||||
|
|
@ -147,7 +147,7 @@ class VectorStoreAgent(AgentExecutor):
|
|||
prompt=prompt,
|
||||
)
|
||||
tool_names = {tool.name for tool in tools}
|
||||
agent = ZeroShotAgent(llm_chain=llm_chain, allowed_tools=tool_names, **kwargs)
|
||||
agent = ZeroShotAgent(llm_chain=llm_chain, allowed_tools=tool_names, **kwargs) # type: ignore
|
||||
return AgentExecutor.from_agent_and_tools(
|
||||
agent=agent, tools=tools, verbose=True
|
||||
)
|
||||
|
|
@ -171,7 +171,9 @@ class SQLAgent(AgentExecutor):
|
|||
super().__init__(*args, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def from_toolkit_and_llm(cls, llm: BaseLLM, database_uri: str, **kwargs: Any):
|
||||
def from_toolkit_and_llm(
|
||||
cls, llm: BaseLanguageModel, database_uri: str, **kwargs: Any
|
||||
):
|
||||
"""Construct a sql agent from an LLM and tools."""
|
||||
db = SQLDatabase.from_uri(database_uri)
|
||||
toolkit = SQLDatabaseToolkit(db=db, llm=llm)
|
||||
|
|
@ -213,7 +215,7 @@ class SQLAgent(AgentExecutor):
|
|||
prompt=prompt,
|
||||
)
|
||||
tool_names = {tool.name for tool in tools} # type: ignore
|
||||
agent = ZeroShotAgent(llm_chain=llm_chain, allowed_tools=tool_names, **kwargs)
|
||||
agent = ZeroShotAgent(llm_chain=llm_chain, allowed_tools=tool_names, **kwargs) # type: ignore
|
||||
return AgentExecutor.from_agent_and_tools(
|
||||
agent=agent,
|
||||
tools=tools, # type: ignore
|
||||
|
|
@ -256,7 +258,7 @@ class VectorStoreRouterAgent(AgentExecutor):
|
|||
prompt=prompt,
|
||||
)
|
||||
tool_names = {tool.name for tool in tools}
|
||||
agent = ZeroShotAgent(llm_chain=llm_chain, allowed_tools=tool_names, **kwargs)
|
||||
agent = ZeroShotAgent(llm_chain=llm_chain, allowed_tools=tool_names, **kwargs) # type: ignore
|
||||
return AgentExecutor.from_agent_and_tools(
|
||||
agent=agent, tools=tools, verbose=True
|
||||
)
|
||||
|
|
@ -275,7 +277,7 @@ class InitializeAgent(AgentExecutor):
|
|||
@classmethod
|
||||
def initialize(
|
||||
cls,
|
||||
llm: BaseLLM,
|
||||
llm: BaseLanguageModel,
|
||||
tools: List[Tool],
|
||||
agent: str,
|
||||
memory: Optional[BaseChatMemory] = None,
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ class MalfoyAgent(AgentExecutor):
|
|||
llm=llm,
|
||||
prompt=prompt,
|
||||
)
|
||||
agent = ZeroShotAgent(llm_chain=llm_chain, allowed_tools=tool_names)
|
||||
agent = ZeroShotAgent(llm_chain=llm_chain, allowed_tools=tool_names) # type: ignore
|
||||
return cls.from_agent_and_tools(agent=agent, tools=tools, verbose=True)
|
||||
|
||||
def run(self, *args, **kwargs):
|
||||
|
|
|
|||
|
|
@ -7,11 +7,9 @@ from langchain import PromptTemplate
|
|||
from langchain.agents import Agent
|
||||
from langchain.chains.base import Chain
|
||||
from langchain.chat_models.base import BaseChatModel
|
||||
from langchain.llms.base import BaseLLM
|
||||
from langchain.base_language import BaseLanguageModel
|
||||
from langchain.tools import BaseTool
|
||||
|
||||
from langflow.interface.tools.base import tool_creator
|
||||
|
||||
|
||||
def import_module(module_path: str) -> Any:
|
||||
"""Import module from module path"""
|
||||
|
|
@ -100,15 +98,19 @@ def import_agent(agent: str) -> Agent:
|
|||
return import_class(f"langchain.agents.{agent}")
|
||||
|
||||
|
||||
def import_llm(llm: str) -> BaseLLM:
|
||||
def import_llm(llm: str) -> BaseLanguageModel:
|
||||
"""Import llm from llm name"""
|
||||
return import_class(f"langchain.llms.{llm}")
|
||||
|
||||
|
||||
def import_tool(tool: str) -> BaseTool:
|
||||
"""Import tool from tool name"""
|
||||
from langflow.interface.tools.base import tool_creator
|
||||
|
||||
return tool_creator.type_to_loader_dict[tool]["fcn"]
|
||||
if tool in tool_creator.type_to_loader_dict:
|
||||
return tool_creator.type_to_loader_dict[tool]["fcn"]
|
||||
|
||||
return import_class(f"langchain.tools.{tool}")
|
||||
|
||||
|
||||
def import_chain(chain: str) -> Type[Chain]:
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ from langchain.agents.loading import load_agent_from_config
|
|||
from langchain.agents.tools import Tool
|
||||
from langchain.callbacks.base import BaseCallbackManager
|
||||
from langchain.chains.loading import load_chain_from_config
|
||||
from langchain.llms.base import BaseLLM
|
||||
from langchain.base_language import BaseLanguageModel
|
||||
from langchain.llms.loading import load_llm_from_config
|
||||
from pydantic import ValidationError
|
||||
|
||||
|
|
@ -74,12 +74,10 @@ def instantiate_class(node_type: str, base_type: str, params: Dict) -> Any:
|
|||
return loaded_toolkit
|
||||
elif base_type == "embeddings":
|
||||
# ? Why remove model from params?
|
||||
|
||||
try:
|
||||
params.pop("model")
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
# remove all params that are not in class_object.__fields__
|
||||
try:
|
||||
return class_object(**params)
|
||||
|
|
@ -188,7 +186,7 @@ def load_langchain_type_from_config(config: Dict[str, Any]):
|
|||
|
||||
def load_agent_executor_from_config(
|
||||
config: dict,
|
||||
llm: Optional[BaseLLM] = None,
|
||||
llm: Optional[BaseLanguageModel] = None,
|
||||
tools: Optional[list[Tool]] = None,
|
||||
callback_manager: Optional[BaseCallbackManager] = None,
|
||||
**kwargs: Any,
|
||||
|
|
|
|||
|
|
@ -101,13 +101,12 @@ def process_graph(data_graph: Dict[str, Any]):
|
|||
return {"result": str(result), "thought": thought.strip()}
|
||||
|
||||
|
||||
def process_graph_cached(data_graph: Dict[str, Any]):
|
||||
def process_graph_cached(data_graph: Dict[str, Any], message: str):
|
||||
"""
|
||||
Process graph by extracting input variables and replacing ZeroShotPrompt
|
||||
with PromptTemplate,then run the graph and return the result and thought.
|
||||
"""
|
||||
# Load langchain object
|
||||
message = data_graph.pop("message", "")
|
||||
is_first_message = len(data_graph.get("chatHistory", [])) == 0
|
||||
langchain_object = load_or_build_langchain_object(data_graph, is_first_message)
|
||||
logger.debug("Loaded langchain object")
|
||||
|
|
@ -120,7 +119,7 @@ def process_graph_cached(data_graph: Dict[str, Any]):
|
|||
|
||||
# Generate result and thought
|
||||
logger.debug("Generating result and thought")
|
||||
result, thought = get_result_and_steps(langchain_object, message)
|
||||
result, thought = get_result_and_thought(langchain_object, message)
|
||||
logger.debug("Generated result and thought")
|
||||
return {"result": str(result), "thought": thought.strip()}
|
||||
|
||||
|
|
@ -247,7 +246,7 @@ async def get_result_and_steps(langchain_object, message: str, **kwargs):
|
|||
return result, thought
|
||||
|
||||
|
||||
def async_get_result_and_steps(langchain_object, message: str):
|
||||
def get_result_and_thought(langchain_object, message: str):
|
||||
"""Get result and thought from extracted json"""
|
||||
try:
|
||||
if hasattr(langchain_object, "verbose"):
|
||||
|
|
@ -302,34 +301,6 @@ def async_get_result_and_steps(langchain_object, message: str):
|
|||
return result, thought
|
||||
|
||||
|
||||
def get_result_and_thought(extracted_json: Dict[str, Any], message: str):
|
||||
"""Get result and thought from extracted json"""
|
||||
try:
|
||||
langchain_object = loading.load_langchain_type_from_config(
|
||||
config=extracted_json
|
||||
)
|
||||
with io.StringIO() as output_buffer, contextlib.redirect_stdout(output_buffer):
|
||||
output = langchain_object(message)
|
||||
intermediate_steps = (
|
||||
output.get("intermediate_steps", []) if isinstance(output, dict) else []
|
||||
)
|
||||
result = (
|
||||
output.get(langchain_object.output_keys[0])
|
||||
if isinstance(output, dict)
|
||||
else output
|
||||
)
|
||||
|
||||
if intermediate_steps:
|
||||
thought = format_intermediate_steps(intermediate_steps)
|
||||
else:
|
||||
thought = output_buffer.getvalue()
|
||||
|
||||
except Exception as e:
|
||||
result = f"Error: {str(e)}"
|
||||
thought = ""
|
||||
return result, thought
|
||||
|
||||
|
||||
def format_intermediate_steps(intermediate_steps):
|
||||
formatted_chain = "> Entering new AgentExecutor chain...\n"
|
||||
for step in intermediate_steps:
|
||||
|
|
|
|||
|
|
@ -29,7 +29,9 @@ TOOL_INPUTS = {
|
|||
placeholder="",
|
||||
value="",
|
||||
),
|
||||
"llm": TemplateField(field_type="BaseLLM", required=True, is_list=False, show=True),
|
||||
"llm": TemplateField(
|
||||
field_type="BaseLanguageModel", required=True, is_list=False, show=True
|
||||
),
|
||||
"func": TemplateField(
|
||||
field_type="function",
|
||||
required=True,
|
||||
|
|
@ -65,6 +67,7 @@ class ToolCreator(LangChainTypeCreator):
|
|||
def type_to_loader_dict(self) -> Dict:
|
||||
if self.tools_dict is None:
|
||||
all_tools = {}
|
||||
|
||||
for tool, tool_fcn in ALL_TOOLS_NAMES.items():
|
||||
tool_params = get_tool_params(tool_fcn)
|
||||
tool_name = tool_params.get("name", tool)
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
from langchain import tools
|
||||
from langchain.agents import Tool
|
||||
from langchain.agents.load_tools import (
|
||||
_BASE_TOOLS,
|
||||
|
|
@ -5,50 +6,16 @@ from langchain.agents.load_tools import (
|
|||
_EXTRA_OPTIONAL_TOOLS,
|
||||
_LLM_TOOLS,
|
||||
)
|
||||
from langchain.tools.bing_search.tool import BingSearchRun
|
||||
from langchain.tools.google_search.tool import GoogleSearchResults, GoogleSearchRun
|
||||
from langchain.tools.json.tool import JsonGetValueTool, JsonListKeysTool, JsonSpec
|
||||
from langchain.tools.python.tool import PythonAstREPLTool, PythonREPLTool
|
||||
from langchain.tools.requests.tool import (
|
||||
RequestsDeleteTool,
|
||||
RequestsGetTool,
|
||||
RequestsPatchTool,
|
||||
RequestsPostTool,
|
||||
RequestsPutTool,
|
||||
)
|
||||
from langchain.tools.sql_database.tool import (
|
||||
InfoSQLDatabaseTool,
|
||||
ListSQLDatabaseTool,
|
||||
QueryCheckerTool,
|
||||
QuerySQLDataBaseTool,
|
||||
)
|
||||
from langchain.tools.wikipedia.tool import WikipediaQueryRun
|
||||
from langchain.tools.wolfram_alpha.tool import WolframAlphaQueryRun
|
||||
from langchain.tools.json.tool import JsonSpec
|
||||
|
||||
from langflow.interface.importing.utils import import_class
|
||||
from langflow.interface.tools.custom import PythonFunction
|
||||
|
||||
FILE_TOOLS = {"JsonSpec": JsonSpec}
|
||||
CUSTOM_TOOLS = {"Tool": Tool, "PythonFunction": PythonFunction}
|
||||
OTHER_TOOLS = {
|
||||
"QuerySQLDataBaseTool": QuerySQLDataBaseTool,
|
||||
"InfoSQLDatabaseTool": InfoSQLDatabaseTool,
|
||||
"ListSQLDatabaseTool": ListSQLDatabaseTool,
|
||||
"QueryCheckerTool": QueryCheckerTool,
|
||||
"BingSearchRun": BingSearchRun,
|
||||
"GoogleSearchRun": GoogleSearchRun,
|
||||
"GoogleSearchResults": GoogleSearchResults,
|
||||
"JsonListKeysTool": JsonListKeysTool,
|
||||
"JsonGetValueTool": JsonGetValueTool,
|
||||
"PythonREPLTool": PythonREPLTool,
|
||||
"PythonAstREPLTool": PythonAstREPLTool,
|
||||
"RequestsGetTool": RequestsGetTool,
|
||||
"RequestsPostTool": RequestsPostTool,
|
||||
"RequestsPatchTool": RequestsPatchTool,
|
||||
"RequestsPutTool": RequestsPutTool,
|
||||
"RequestsDeleteTool": RequestsDeleteTool,
|
||||
"WikipediaQueryRun": WikipediaQueryRun,
|
||||
"WolframAlphaQueryRun": WolframAlphaQueryRun,
|
||||
}
|
||||
|
||||
OTHER_TOOLS = {tool: import_class(f"langchain.tools.{tool}") for tool in tools.__all__}
|
||||
|
||||
ALL_TOOLS_NAMES = {
|
||||
**_BASE_TOOLS,
|
||||
**_LLM_TOOLS, # type: ignore
|
||||
|
|
|
|||
|
|
@ -15,7 +15,10 @@ WORKDIR /home/node/app
|
|||
COPY --chown=node:node . ./
|
||||
|
||||
COPY ./set_proxy.sh .
|
||||
RUN chmod +x set_proxy.sh && ./set_proxy.sh
|
||||
RUN chmod +x set_proxy.sh && \
|
||||
cat set_proxy.sh | tr -d '\r' > set_proxy_unix.sh && \
|
||||
chmod +x set_proxy_unix.sh && \
|
||||
./set_proxy_unix.sh
|
||||
|
||||
USER node
|
||||
|
||||
|
|
|
|||
|
|
@ -7,4 +7,4 @@ packagejson=$(cat package.json)
|
|||
|
||||
packagejson=$(echo "$packagejson" | jq ".proxy = \"$backend_url\"")
|
||||
|
||||
echo "$packagejson" > package.json
|
||||
echo "$packagejson" > package.json
|
||||
|
|
@ -1,10 +1,10 @@
|
|||
import {
|
||||
createContext,
|
||||
useEffect,
|
||||
useState,
|
||||
useRef,
|
||||
ReactNode,
|
||||
useContext,
|
||||
createContext,
|
||||
useEffect,
|
||||
useState,
|
||||
useRef,
|
||||
ReactNode,
|
||||
useContext,
|
||||
} from "react";
|
||||
import { FlowType } from "../types/flow";
|
||||
import { LangFlowState, TabsContextType } from "../types/tabs";
|
||||
|
|
@ -15,221 +15,221 @@ import { APITemplateType, TemplateVariableType } from "../types/api";
|
|||
const { v4: uuidv4 } = require("uuid");
|
||||
|
||||
const TabsContextInitialValue: TabsContextType = {
|
||||
save: () => {},
|
||||
tabIndex: 0,
|
||||
setTabIndex: (index: number) => {},
|
||||
flows: [],
|
||||
removeFlow: (id: string) => {},
|
||||
addFlow: (flowData?: any) => {},
|
||||
updateFlow: (newFlow: FlowType) => {},
|
||||
incrementNodeId: () => 0,
|
||||
downloadFlow: (flow: FlowType) => {},
|
||||
uploadFlow: () => {},
|
||||
hardReset: () => {},
|
||||
save: () => {},
|
||||
tabIndex: 0,
|
||||
setTabIndex: (index: number) => {},
|
||||
flows: [],
|
||||
removeFlow: (id: string) => {},
|
||||
addFlow: (flowData?: any) => {},
|
||||
updateFlow: (newFlow: FlowType) => {},
|
||||
incrementNodeId: () => 0,
|
||||
downloadFlow: (flow: FlowType) => {},
|
||||
uploadFlow: () => {},
|
||||
hardReset: () => {},
|
||||
};
|
||||
|
||||
export const TabsContext = createContext<TabsContextType>(
|
||||
TabsContextInitialValue
|
||||
TabsContextInitialValue
|
||||
);
|
||||
|
||||
export function TabsProvider({ children }: { children: ReactNode }) {
|
||||
const { setNoticeData } = useContext(alertContext);
|
||||
const [tabIndex, setTabIndex] = useState(0);
|
||||
const [flows, setFlows] = useState<Array<FlowType>>([]);
|
||||
const [id, setId] = useState("");
|
||||
const { templates } = useContext(typesContext);
|
||||
const { setNoticeData } = useContext(alertContext);
|
||||
const [tabIndex, setTabIndex] = useState(0);
|
||||
const [flows, setFlows] = useState<Array<FlowType>>([]);
|
||||
const [id, setId] = useState("");
|
||||
const { templates } = useContext(typesContext);
|
||||
|
||||
const newNodeId = useRef(0);
|
||||
function incrementNodeId() {
|
||||
newNodeId.current = newNodeId.current + 1;
|
||||
return newNodeId.current;
|
||||
}
|
||||
function save() {
|
||||
if (flows.length !== 0)
|
||||
window.localStorage.setItem(
|
||||
"tabsData",
|
||||
JSON.stringify({ tabIndex, flows, id, nodeId: newNodeId.current })
|
||||
);
|
||||
}
|
||||
useEffect(() => {
|
||||
//save tabs locally
|
||||
save();
|
||||
}, [flows, id, tabIndex, newNodeId]);
|
||||
const newNodeId = useRef(0);
|
||||
function incrementNodeId() {
|
||||
newNodeId.current = newNodeId.current + 1;
|
||||
return newNodeId.current;
|
||||
}
|
||||
function save() {
|
||||
if (flows.length !== 0)
|
||||
window.localStorage.setItem(
|
||||
"tabsData",
|
||||
JSON.stringify({ tabIndex, flows, id, nodeId: newNodeId.current })
|
||||
);
|
||||
}
|
||||
useEffect(() => {
|
||||
//save tabs locally
|
||||
save();
|
||||
}, [flows, id, tabIndex, newNodeId]);
|
||||
|
||||
useEffect(() => {
|
||||
//get tabs locally saved
|
||||
let cookie = window.localStorage.getItem("tabsData");
|
||||
if (cookie && Object.keys(templates).length > 0) {
|
||||
let cookieObject: LangFlowState = JSON.parse(cookie);
|
||||
cookieObject.flows.forEach((flow) => {
|
||||
flow.data.nodes.forEach((node) => {
|
||||
if (Object.keys(templates[node.data.type]["template"]).length > 0) {
|
||||
node.data.node.template = updateTemplate(
|
||||
templates[node.data.type][
|
||||
"template"
|
||||
] as unknown as APITemplateType,
|
||||
useEffect(() => {
|
||||
//get tabs locally saved
|
||||
let cookie = window.localStorage.getItem("tabsData");
|
||||
if (cookie && Object.keys(templates).length > 0) {
|
||||
let cookieObject: LangFlowState = JSON.parse(cookie);
|
||||
cookieObject.flows.forEach((flow) => {
|
||||
flow.data.nodes.forEach((node) => {
|
||||
if (Object.keys(templates[node.data.type]["template"]).length > 0) {
|
||||
node.data.node.template = updateTemplate(
|
||||
templates[node.data.type][
|
||||
"template"
|
||||
] as unknown as APITemplateType,
|
||||
|
||||
node.data.node.template as APITemplateType
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
setTabIndex(cookieObject.tabIndex);
|
||||
setFlows(cookieObject.flows);
|
||||
setId(cookieObject.id);
|
||||
newNodeId.current = cookieObject.nodeId;
|
||||
}
|
||||
}, [templates]);
|
||||
function hardReset() {
|
||||
newNodeId.current = 0;
|
||||
setTabIndex(0);
|
||||
setFlows([]);
|
||||
setId("");
|
||||
}
|
||||
node.data.node.template as APITemplateType
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
setTabIndex(cookieObject.tabIndex);
|
||||
setFlows(cookieObject.flows);
|
||||
setId(cookieObject.id);
|
||||
newNodeId.current = cookieObject.nodeId;
|
||||
}
|
||||
}, [templates]);
|
||||
function hardReset() {
|
||||
newNodeId.current = 0;
|
||||
setTabIndex(0);
|
||||
setFlows([]);
|
||||
setId("");
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloads the current flow as a JSON file
|
||||
*/
|
||||
function downloadFlow(flow: FlowType) {
|
||||
// create a data URI with the current flow data
|
||||
const jsonString = `data:text/json;chatset=utf-8,${encodeURIComponent(
|
||||
JSON.stringify(flow)
|
||||
)}`;
|
||||
/**
|
||||
* Downloads the current flow as a JSON file
|
||||
*/
|
||||
function downloadFlow(flow: FlowType) {
|
||||
// create a data URI with the current flow data
|
||||
const jsonString = `data:text/json;chatset=utf-8,${encodeURIComponent(
|
||||
JSON.stringify(flow)
|
||||
)}`;
|
||||
|
||||
// create a link element and set its properties
|
||||
const link = document.createElement("a");
|
||||
link.href = jsonString;
|
||||
link.download = `${normalCaseToSnakeCase(flows[tabIndex].name)}.json`;
|
||||
// create a link element and set its properties
|
||||
const link = document.createElement("a");
|
||||
link.href = jsonString;
|
||||
link.download = `${flows[tabIndex].name}.json`;
|
||||
|
||||
// simulate a click on the link element to trigger the download
|
||||
link.click();
|
||||
setNoticeData({
|
||||
title: "Warning: Critical data,JSON file may including API keys.",
|
||||
});
|
||||
}
|
||||
// simulate a click on the link element to trigger the download
|
||||
link.click();
|
||||
setNoticeData({
|
||||
title: "Warning: Critical data,JSON file may including API keys.",
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a file input and listens to a change event to upload a JSON flow file.
|
||||
* If the file type is application/json, the file is read and parsed into a JSON object.
|
||||
* The resulting JSON object is passed to the addFlow function.
|
||||
*/
|
||||
function uploadFlow() {
|
||||
// create a file input
|
||||
const input = document.createElement("input");
|
||||
input.type = "file";
|
||||
// add a change event listener to the file input
|
||||
input.onchange = (e: Event) => {
|
||||
// check if the file type is application/json
|
||||
if ((e.target as HTMLInputElement).files[0].type === "application/json") {
|
||||
// get the file from the file input
|
||||
const file = (e.target as HTMLInputElement).files[0];
|
||||
// read the file as text
|
||||
file.text().then((text) => {
|
||||
// parse the text into a JSON object
|
||||
let flow: FlowType = JSON.parse(text);
|
||||
/**
|
||||
* Creates a file input and listens to a change event to upload a JSON flow file.
|
||||
* If the file type is application/json, the file is read and parsed into a JSON object.
|
||||
* The resulting JSON object is passed to the addFlow function.
|
||||
*/
|
||||
function uploadFlow() {
|
||||
// create a file input
|
||||
const input = document.createElement("input");
|
||||
input.type = "file";
|
||||
// add a change event listener to the file input
|
||||
input.onchange = (e: Event) => {
|
||||
// check if the file type is application/json
|
||||
if ((e.target as HTMLInputElement).files[0].type === "application/json") {
|
||||
// get the file from the file input
|
||||
const file = (e.target as HTMLInputElement).files[0];
|
||||
// read the file as text
|
||||
file.text().then((text) => {
|
||||
// parse the text into a JSON object
|
||||
let flow: FlowType = JSON.parse(text);
|
||||
|
||||
addFlow(flow);
|
||||
});
|
||||
}
|
||||
};
|
||||
// trigger the file input click event to open the file dialog
|
||||
input.click();
|
||||
}
|
||||
/**
|
||||
* Removes a flow from an array of flows based on its id.
|
||||
* Updates the state of flows and tabIndex using setFlows and setTabIndex hooks.
|
||||
* @param {string} id - The id of the flow to remove.
|
||||
*/
|
||||
function removeFlow(id: string) {
|
||||
setFlows((prevState) => {
|
||||
const newFlows = [...prevState];
|
||||
const index = newFlows.findIndex((flow) => flow.id === id);
|
||||
if (index >= 0) {
|
||||
if (index === tabIndex) {
|
||||
setTabIndex(flows.length - 2);
|
||||
newFlows.splice(index, 1);
|
||||
} else {
|
||||
let flowId = flows[tabIndex].id;
|
||||
newFlows.splice(index, 1);
|
||||
setTabIndex(newFlows.findIndex((flow) => flow.id === flowId));
|
||||
}
|
||||
}
|
||||
return newFlows;
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Add a new flow to the list of flows.
|
||||
* @param flow Optional flow to add.
|
||||
*/
|
||||
function addFlow(flow?: FlowType) {
|
||||
// Get data from the flow or set it to null if there's no flow provided.
|
||||
const data = flow?.data ? flow.data : null;
|
||||
const description = flow?.description ? flow.description : "";
|
||||
addFlow(flow);
|
||||
});
|
||||
}
|
||||
};
|
||||
// trigger the file input click event to open the file dialog
|
||||
input.click();
|
||||
}
|
||||
/**
|
||||
* Removes a flow from an array of flows based on its id.
|
||||
* Updates the state of flows and tabIndex using setFlows and setTabIndex hooks.
|
||||
* @param {string} id - The id of the flow to remove.
|
||||
*/
|
||||
function removeFlow(id: string) {
|
||||
setFlows((prevState) => {
|
||||
const newFlows = [...prevState];
|
||||
const index = newFlows.findIndex((flow) => flow.id === id);
|
||||
if (index >= 0) {
|
||||
if (index === tabIndex) {
|
||||
setTabIndex(flows.length - 2);
|
||||
newFlows.splice(index, 1);
|
||||
} else {
|
||||
let flowId = flows[tabIndex].id;
|
||||
newFlows.splice(index, 1);
|
||||
setTabIndex(newFlows.findIndex((flow) => flow.id === flowId));
|
||||
}
|
||||
}
|
||||
return newFlows;
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Add a new flow to the list of flows.
|
||||
* @param flow Optional flow to add.
|
||||
*/
|
||||
function addFlow(flow?: FlowType) {
|
||||
// Get data from the flow or set it to null if there's no flow provided.
|
||||
const data = flow?.data ? flow.data : null;
|
||||
const description = flow?.description ? flow.description : "";
|
||||
|
||||
if (data) {
|
||||
data.nodes.forEach((node) => {
|
||||
if (Object.keys(templates[node.data.type]["template"]).length > 0) {
|
||||
node.data.node.template = updateTemplate(
|
||||
templates[node.data.type]["template"] as unknown as APITemplateType,
|
||||
node.data.node.template as APITemplateType
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
// Create a new flow with a default name if no flow is provided.
|
||||
let newFlow: FlowType = {
|
||||
description,
|
||||
name: flow?.name ?? "New Flow",
|
||||
id: id.toString(),
|
||||
data,
|
||||
};
|
||||
if (data) {
|
||||
data.nodes.forEach((node) => {
|
||||
if (Object.keys(templates[node.data.type]["template"]).length > 0) {
|
||||
node.data.node.template = updateTemplate(
|
||||
templates[node.data.type]["template"] as unknown as APITemplateType,
|
||||
node.data.node.template as APITemplateType
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
// Create a new flow with a default name if no flow is provided.
|
||||
let newFlow: FlowType = {
|
||||
description,
|
||||
name: flow?.name ?? "New Flow",
|
||||
id: id.toString(),
|
||||
data,
|
||||
};
|
||||
|
||||
// Increment the ID counter.
|
||||
setId(uuidv4());
|
||||
// Increment the ID counter.
|
||||
setId(uuidv4());
|
||||
|
||||
// Add the new flow to the list of flows.
|
||||
setFlows((prevState) => {
|
||||
const newFlows = [...prevState, newFlow];
|
||||
return newFlows;
|
||||
});
|
||||
// Add the new flow to the list of flows.
|
||||
setFlows((prevState) => {
|
||||
const newFlows = [...prevState, newFlow];
|
||||
return newFlows;
|
||||
});
|
||||
|
||||
// Set the tab index to the new flow.
|
||||
setTabIndex(flows.length);
|
||||
}
|
||||
/**
|
||||
* Updates an existing flow with new data
|
||||
* @param newFlow - The new flow object containing the updated data
|
||||
*/
|
||||
function updateFlow(newFlow: FlowType) {
|
||||
setFlows((prevState) => {
|
||||
const newFlows = [...prevState];
|
||||
const index = newFlows.findIndex((flow) => flow.id === newFlow.id);
|
||||
if (index !== -1) {
|
||||
newFlows[index].description = newFlow.description ?? "";
|
||||
newFlows[index].data = newFlow.data;
|
||||
newFlows[index].name = newFlow.name;
|
||||
}
|
||||
return newFlows;
|
||||
});
|
||||
}
|
||||
// Set the tab index to the new flow.
|
||||
setTabIndex(flows.length);
|
||||
}
|
||||
/**
|
||||
* Updates an existing flow with new data
|
||||
* @param newFlow - The new flow object containing the updated data
|
||||
*/
|
||||
function updateFlow(newFlow: FlowType) {
|
||||
setFlows((prevState) => {
|
||||
const newFlows = [...prevState];
|
||||
const index = newFlows.findIndex((flow) => flow.id === newFlow.id);
|
||||
if (index !== -1) {
|
||||
newFlows[index].description = newFlow.description ?? "";
|
||||
newFlows[index].data = newFlow.data;
|
||||
newFlows[index].name = newFlow.name;
|
||||
}
|
||||
return newFlows;
|
||||
});
|
||||
}
|
||||
|
||||
return (
|
||||
<TabsContext.Provider
|
||||
value={{
|
||||
save,
|
||||
hardReset,
|
||||
tabIndex,
|
||||
setTabIndex,
|
||||
flows,
|
||||
incrementNodeId,
|
||||
removeFlow,
|
||||
addFlow,
|
||||
updateFlow,
|
||||
downloadFlow,
|
||||
uploadFlow,
|
||||
}}
|
||||
>
|
||||
{children}
|
||||
</TabsContext.Provider>
|
||||
);
|
||||
return (
|
||||
<TabsContext.Provider
|
||||
value={{
|
||||
save,
|
||||
hardReset,
|
||||
tabIndex,
|
||||
setTabIndex,
|
||||
flows,
|
||||
incrementNodeId,
|
||||
removeFlow,
|
||||
addFlow,
|
||||
updateFlow,
|
||||
downloadFlow,
|
||||
uploadFlow,
|
||||
}}
|
||||
>
|
||||
{children}
|
||||
</TabsContext.Provider>
|
||||
);
|
||||
}
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
|
|
@ -48,7 +48,6 @@ def test_zero_shot_agent(client: TestClient):
|
|||
"type": "Tool",
|
||||
"list": True,
|
||||
"advanced": False,
|
||||
"value": [],
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,10 +1,11 @@
|
|||
import json
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
from langflow.cache.base import PREFIX, save_cache
|
||||
from langflow.interface.run import load_langchain_object
|
||||
from langflow.interface.run import (
|
||||
build_graph,
|
||||
build_langchain_object_with_caching,
|
||||
load_or_build_langchain_object,
|
||||
)
|
||||
|
||||
|
||||
def get_graph(_type="basic"):
|
||||
|
|
@ -40,26 +41,44 @@ def langchain_objects_are_equal(obj1, obj2):
|
|||
return str(obj1) == str(obj2)
|
||||
|
||||
|
||||
def test_cache_creation(basic_data_graph):
|
||||
# Compute hash for the input data_graph
|
||||
# Call process_graph function to build and cache the langchain_object
|
||||
is_first_message = True
|
||||
computed_hash, langchain_object = load_langchain_object(
|
||||
basic_data_graph, is_first_message=is_first_message
|
||||
)
|
||||
save_cache(computed_hash, langchain_object, is_first_message)
|
||||
# Check if the cache file exists
|
||||
cache_file = Path(tempfile.gettempdir()) / f"{PREFIX}/{computed_hash}.dill"
|
||||
|
||||
assert cache_file.exists()
|
||||
# Test load_or_build_langchain_object
|
||||
def test_load_or_build_langchain_object_first_message_true(basic_data_graph):
|
||||
build_langchain_object_with_caching.clear_cache()
|
||||
graph = load_or_build_langchain_object(basic_data_graph, is_first_message=True)
|
||||
assert graph is not None
|
||||
|
||||
|
||||
def test_cache_reuse(basic_data_graph):
|
||||
# Call process_graph function to build and cache the langchain_object
|
||||
result1 = load_langchain_object(basic_data_graph)
|
||||
def test_load_or_build_langchain_object_first_message_false(basic_data_graph):
|
||||
graph = load_or_build_langchain_object(basic_data_graph, is_first_message=False)
|
||||
assert graph is not None
|
||||
|
||||
# Call process_graph function again to use the cached langchain_object
|
||||
result2 = load_langchain_object(basic_data_graph)
|
||||
|
||||
# Compare the results to ensure the same langchain_object was used
|
||||
assert langchain_objects_are_equal(result1, result2)
|
||||
# Test build_langchain_object_with_caching
|
||||
def test_build_langchain_object_with_caching(basic_data_graph):
|
||||
build_langchain_object_with_caching.clear_cache()
|
||||
graph = build_langchain_object_with_caching(basic_data_graph)
|
||||
assert graph is not None
|
||||
|
||||
|
||||
# Test build_graph
|
||||
def test_build_graph(basic_data_graph):
|
||||
graph = build_graph(basic_data_graph)
|
||||
assert graph is not None
|
||||
assert len(graph.nodes) == len(basic_data_graph["nodes"])
|
||||
assert len(graph.edges) == len(basic_data_graph["edges"])
|
||||
|
||||
|
||||
# Test cache size limit
|
||||
def test_cache_size_limit(basic_data_graph):
|
||||
build_langchain_object_with_caching.clear_cache()
|
||||
for i in range(11):
|
||||
modified_data_graph = basic_data_graph.copy()
|
||||
nodes = modified_data_graph["nodes"]
|
||||
node_id = nodes[0]["id"]
|
||||
# Now we replace all instances ode node_id with a new id in the json
|
||||
json_string = json.dumps(modified_data_graph)
|
||||
modified_json_string = json_string.replace(node_id, f"{node_id}_{i}")
|
||||
modified_data_graph_new_id = json.loads(modified_json_string)
|
||||
build_langchain_object_with_caching(modified_data_graph_new_id)
|
||||
|
||||
assert len(build_langchain_object_with_caching.cache) == 10
|
||||
|
|
|
|||
|
|
@ -191,7 +191,7 @@ def test_llm_checker_chain(client: TestClient):
|
|||
"multiline": False,
|
||||
"password": False,
|
||||
"name": "llm",
|
||||
"type": "BaseLLM",
|
||||
"type": "BaseLanguageModel",
|
||||
"list": False,
|
||||
"advanced": False,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
from typing import Type, Union
|
||||
|
||||
import pytest
|
||||
from langchain.agents import AgentExecutor
|
||||
from langchain.chains.base import Chain
|
||||
from langchain.llms.fake import FakeListLLM
|
||||
from langflow.graph import Edge, Graph, Node
|
||||
from langflow.graph.nodes import (
|
||||
|
|
@ -15,7 +15,7 @@ from langflow.graph.nodes import (
|
|||
WrapperNode,
|
||||
)
|
||||
from langflow.interface.run import get_result_and_steps
|
||||
from langflow.utils.payload import build_json, get_root_node
|
||||
from langflow.utils.payload import get_root_node
|
||||
|
||||
# Test cases for the graph module
|
||||
|
||||
|
|
@ -102,32 +102,13 @@ def test_get_node_neighbors_basic(basic_graph):
|
|||
# We need to check if there is a Chain in the one of the neighbors'
|
||||
# data attribute in the type key
|
||||
assert any(
|
||||
"Chain" in neighbor.data["type"] for neighbor, val in neighbors.items() if val
|
||||
)
|
||||
# assert Serper Search is in the neighbors
|
||||
assert any(
|
||||
"Serper" in neighbor.data["type"] for neighbor, val in neighbors.items() if val
|
||||
)
|
||||
# Now on to the Chain's neighbors
|
||||
chain = next(
|
||||
neighbor
|
||||
"ConversationBufferMemory" in neighbor.data["type"]
|
||||
for neighbor, val in neighbors.items()
|
||||
if "Chain" in neighbor.data["type"] and val
|
||||
)
|
||||
chain_neighbors = basic_graph.get_node_neighbors(chain)
|
||||
assert chain_neighbors is not None
|
||||
assert isinstance(chain_neighbors, dict)
|
||||
# Check if there is a LLM in the chain's neighbors
|
||||
assert any(
|
||||
"OpenAI" in neighbor.data["type"]
|
||||
for neighbor, val in chain_neighbors.items()
|
||||
if val
|
||||
)
|
||||
# Chain should have a Prompt as a neighbor
|
||||
|
||||
assert any(
|
||||
"Prompt" in neighbor.data["type"]
|
||||
for neighbor, val in chain_neighbors.items()
|
||||
if val
|
||||
"OpenAI" in neighbor.data["type"] for neighbor, val in neighbors.items() if val
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -209,7 +190,7 @@ def test_get_root_node(basic_graph, complex_graph):
|
|||
root = get_root_node(basic_graph)
|
||||
assert root is not None
|
||||
assert isinstance(root, Node)
|
||||
assert root.data["type"] == "ZeroShotAgent"
|
||||
assert root.data["type"] == "TimeTravelGuideChain"
|
||||
# For complex example, the root node is a ZeroShotAgent too
|
||||
assert isinstance(complex_graph, Graph)
|
||||
root = get_root_node(complex_graph)
|
||||
|
|
@ -218,26 +199,6 @@ def test_get_root_node(basic_graph, complex_graph):
|
|||
assert root.data["type"] == "ZeroShotAgent"
|
||||
|
||||
|
||||
def test_build_json(basic_graph):
|
||||
"""Test building JSON from graph"""
|
||||
assert isinstance(basic_graph, Graph)
|
||||
root = get_root_node(basic_graph)
|
||||
json_data = build_json(root, basic_graph)
|
||||
assert isinstance(json_data, dict)
|
||||
assert json_data["_type"] == "zero-shot-react-description"
|
||||
assert isinstance(json_data["llm_chain"], dict)
|
||||
assert json_data["llm_chain"]["_type"] == "llm_chain"
|
||||
assert json_data["llm_chain"]["memory"] is None
|
||||
assert json_data["llm_chain"]["verbose"] is False
|
||||
assert isinstance(json_data["llm_chain"]["prompt"], dict)
|
||||
assert isinstance(json_data["llm_chain"]["llm"], dict)
|
||||
assert json_data["llm_chain"]["output_key"] == "text"
|
||||
assert isinstance(json_data["allowed_tools"], list)
|
||||
assert all(isinstance(tool, dict) for tool in json_data["allowed_tools"])
|
||||
assert isinstance(json_data["return_values"], list)
|
||||
assert all(isinstance(val, str) for val in json_data["return_values"])
|
||||
|
||||
|
||||
def test_validate_edges(basic_graph):
|
||||
"""Test validating edges"""
|
||||
|
||||
|
|
@ -269,45 +230,11 @@ def test_build_params(basic_graph):
|
|||
assert all(edge.matched_type in edge.source_types for edge in basic_graph.edges)
|
||||
# Get the root node
|
||||
root = get_root_node(basic_graph)
|
||||
# Root node is a ZeroShotAgent
|
||||
# which requires an llm_chain, allowed_tools and return_values
|
||||
# Root node is a TimeTravelGuideChain
|
||||
# which requires an llm and memory
|
||||
assert isinstance(root.params, dict)
|
||||
assert "llm_chain" in root.params
|
||||
assert "allowed_tools" in root.params
|
||||
assert "return_values" in root.params
|
||||
# The llm_chain should be a Node
|
||||
assert isinstance(root.params["llm_chain"], Node)
|
||||
# The allowed_tools should be a list of Nodes
|
||||
assert isinstance(root.params["allowed_tools"], list)
|
||||
assert all(isinstance(tool, Node) for tool in root.params["allowed_tools"])
|
||||
# The return_values is of type str so it should be a list of strings
|
||||
assert isinstance(root.params["return_values"], list)
|
||||
assert all(isinstance(val, str) for val in root.params["return_values"])
|
||||
# The llm_chain should have a prompt and llm
|
||||
llm_chain_node = root.params["llm_chain"]
|
||||
assert isinstance(llm_chain_node.params, dict)
|
||||
assert "prompt" in llm_chain_node.params
|
||||
assert "llm" in llm_chain_node.params
|
||||
# The prompt should be a Node
|
||||
assert isinstance(llm_chain_node.params["prompt"], Node)
|
||||
# The llm should be a Node
|
||||
assert isinstance(llm_chain_node.params["llm"], Node)
|
||||
# The prompt should have format_insctructions, suffix, prefix
|
||||
prompt_node = llm_chain_node.params["prompt"]
|
||||
assert isinstance(prompt_node.params, dict)
|
||||
assert "format_instructions" in prompt_node.params
|
||||
assert "suffix" in prompt_node.params
|
||||
assert "prefix" in prompt_node.params
|
||||
# All of them should be of type str
|
||||
assert isinstance(prompt_node.params["format_instructions"], str)
|
||||
assert isinstance(prompt_node.params["suffix"], str)
|
||||
assert isinstance(prompt_node.params["prefix"], str)
|
||||
# The llm should have a model
|
||||
llm_node = llm_chain_node.params["llm"]
|
||||
assert isinstance(llm_node.params, dict)
|
||||
assert "model_name" in llm_node.params
|
||||
# The model should be a str
|
||||
assert isinstance(llm_node.params["model_name"], str)
|
||||
assert "llm" in root.params
|
||||
assert "memory" in root.params
|
||||
|
||||
|
||||
def test_build(basic_graph, complex_graph, openapi_graph):
|
||||
|
|
@ -324,18 +251,18 @@ def assert_agent_was_built(graph):
|
|||
# Build the Agent
|
||||
result = graph.build()
|
||||
# The agent should be a AgentExecutor
|
||||
assert isinstance(result, AgentExecutor)
|
||||
assert isinstance(result, Chain)
|
||||
|
||||
|
||||
def test_agent_node_build(basic_graph):
|
||||
agent_node = get_node_by_type(basic_graph, AgentNode)
|
||||
def test_agent_node_build(complex_graph):
|
||||
agent_node = get_node_by_type(complex_graph, AgentNode)
|
||||
assert agent_node is not None
|
||||
built_object = agent_node.build()
|
||||
assert built_object is not None
|
||||
|
||||
|
||||
def test_tool_node_build(basic_graph):
|
||||
tool_node = get_node_by_type(basic_graph, ToolNode)
|
||||
def test_tool_node_build(complex_graph):
|
||||
tool_node = get_node_by_type(complex_graph, ToolNode)
|
||||
assert tool_node is not None
|
||||
built_object = tool_node.build()
|
||||
assert built_object is not None
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import json
|
||||
|
||||
import pytest
|
||||
from langchain.agents import AgentExecutor
|
||||
from langchain.chains.base import Chain
|
||||
from langflow import load_flow_from_json
|
||||
from langflow.graph import Graph
|
||||
from langflow.utils.payload import get_root_node
|
||||
|
|
@ -11,7 +11,7 @@ def test_load_flow_from_json():
|
|||
"""Test loading a flow from a json file"""
|
||||
loaded = load_flow_from_json(pytest.BASIC_EXAMPLE_PATH)
|
||||
assert loaded is not None
|
||||
assert isinstance(loaded, AgentExecutor)
|
||||
assert isinstance(loaded, Chain)
|
||||
|
||||
|
||||
def test_get_root_node():
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue