From 55c595eaead68f6c0cf86a1a990eea901b9a2948 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 26 Feb 2024 20:25:59 -0300 Subject: [PATCH 01/98] Add markdown package and update unstructured package --- poetry.lock | 21 ++++++++++++++++++++- pyproject.toml | 2 +- 2 files changed, 21 insertions(+), 2 deletions(-) diff --git a/poetry.lock b/poetry.lock index a7ed6a921..b3dc0efeb 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3681,6 +3681,24 @@ babel = ["Babel"] lingua = ["lingua"] testing = ["pytest"] +[[package]] +name = "markdown" +version = "3.5.2" +description = "Python implementation of John Gruber's Markdown." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Markdown-3.5.2-py3-none-any.whl", hash = "sha256:d43323865d89fc0cb9b20c75fc8ad313af307cc087e84b657d9eec768eddeadd"}, + {file = "Markdown-3.5.2.tar.gz", hash = "sha256:e1ac7b3dc550ee80e602e71c1d168002f062e49f1b11e26a36264dafd4df2ef8"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] +testing = ["coverage", "pyyaml"] + [[package]] name = "markdown-it-py" version = "3.0.0" @@ -8212,6 +8230,7 @@ emoji = "*" filetype = "*" langdetect = "*" lxml = "*" +markdown = {version = "*", optional = true, markers = "extra == \"md\""} nltk = "*" numpy = "*" python-iso639 = "*" @@ -9026,4 +9045,4 @@ local = ["ctransformers", "llama-cpp-python", "sentence-transformers"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.12" -content-hash = "1462954b3befc2989ae226f2214111be786eb05bade578c9c80b4ed80d5b59ff" +content-hash = "b35a356770d3425f524b0c46a449696db1fa7c13fae77324188cb6ffa4a4c5a7" diff --git a/pyproject.toml b/pyproject.toml index 339bdff5e..09d70518a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -105,7 +105,7 @@ pytube = "^15.0.0" python-socketio = "^5.11.0" llama-index = "0.9.48" langchain-openai = "^0.0.6" -unstructured = "^0.12.4" +unstructured = {extras = ["md"], version = "^0.12.4"} [tool.poetry.group.dev.dependencies] pytest-asyncio = "^0.23.1" From 67aca6dd369d8161dcf3e84a7313955764deed44 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 26 Feb 2024 20:29:12 -0300 Subject: [PATCH 02/98] Refactor SQLGeneratorComponent to handle prompt template --- .../components/chains/SQLGenerator.py | 32 +++++++++++++++---- 1 file changed, 25 insertions(+), 7 deletions(-) diff --git a/src/backend/langflow/components/chains/SQLGenerator.py b/src/backend/langflow/components/chains/SQLGenerator.py index 5efb0f738..ea22a6de0 100644 --- a/src/backend/langflow/components/chains/SQLGenerator.py +++ b/src/backend/langflow/components/chains/SQLGenerator.py @@ -32,21 +32,39 @@ class SQLGeneratorComponent(CustomComponent): db: SQLDatabase, llm: BaseLanguageModel, top_k: int = 5, - prompt: Optional[PromptTemplate] = None, + prompt: Optional[Text] = None, ) -> Text: + if prompt: + prompt_template = PromptTemplate.from_template(template=prompt) + else: + prompt_template = None + if top_k > 0: kwargs = { "k": top_k, } - if not prompt: + if not prompt_template: sql_query_chain = create_sql_query_chain(llm=llm, db=db, **kwargs) else: - template = prompt.template if hasattr(prompt, "template") else prompt + template = ( + prompt_template.template + if hasattr(prompt, "template") + else prompt_template + ) # Check if {question} is in the prompt - if "{question}" not in template or "question" not in template.input_variables: - raise ValueError("Prompt must contain `{question}` to be used with Natural Language to SQL.") - sql_query_chain = create_sql_query_chain(llm=llm, db=db, prompt=prompt, **kwargs) - query_writer = sql_query_chain | {"query": lambda x: x.replace("SQLQuery:", "").strip()} + if ( + "{question}" not in template + or "question" not in template.input_variables + ): + raise ValueError( + "Prompt must contain `{question}` to be used with Natural Language to SQL." + ) + sql_query_chain = create_sql_query_chain( + llm=llm, db=db, prompt=prompt_template, **kwargs + ) + query_writer = sql_query_chain | { + "query": lambda x: x.replace("SQLQuery:", "").strip() + } response = query_writer.invoke({"question": inputs}) query = response.get("query") self.status = query From e2c53f1166d54590414c2bd7ef762bc7e2db2238 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 26 Feb 2024 20:37:10 -0300 Subject: [PATCH 03/98] Refactor ConversationChain and LLMCheckerChain components --- .../components/chains/ConversationChain.py | 6 ++--- .../components/chains/LLMCheckerChain.py | 19 ++++++++++----- .../components/chains/LLMMathChain.py | 23 +++++++++++++++---- 3 files changed, 34 insertions(+), 14 deletions(-) diff --git a/src/backend/langflow/components/chains/ConversationChain.py b/src/backend/langflow/components/chains/ConversationChain.py index 43f71e67b..3183954a3 100644 --- a/src/backend/langflow/components/chains/ConversationChain.py +++ b/src/backend/langflow/components/chains/ConversationChain.py @@ -1,9 +1,9 @@ -from typing import Callable, Optional, Union +from typing import Optional from langchain.chains import ConversationChain from langflow import CustomComponent -from langflow.field_typing import BaseLanguageModel, BaseMemory, Chain, Text +from langflow.field_typing import BaseLanguageModel, BaseMemory, Text class ConversationChainComponent(CustomComponent): @@ -26,7 +26,7 @@ class ConversationChainComponent(CustomComponent): inputs: str, llm: BaseLanguageModel, memory: Optional[BaseMemory] = None, - ) -> Union[Chain, Callable, Text]: + ) -> Text: if memory is None: chain = ConversationChain(llm=llm) else: diff --git a/src/backend/langflow/components/chains/LLMCheckerChain.py b/src/backend/langflow/components/chains/LLMCheckerChain.py index 527cafbb7..bfee0b5a9 100644 --- a/src/backend/langflow/components/chains/LLMCheckerChain.py +++ b/src/backend/langflow/components/chains/LLMCheckerChain.py @@ -1,14 +1,15 @@ -from typing import Callable, Union - from langchain.chains import LLMCheckerChain + from langflow import CustomComponent -from langflow.field_typing import BaseLanguageModel, Chain +from langflow.field_typing import BaseLanguageModel, Text class LLMCheckerChainComponent(CustomComponent): display_name = "LLMCheckerChain" description = "" - documentation = "https://python.langchain.com/docs/modules/chains/additional/llm_checker" + documentation = ( + "https://python.langchain.com/docs/modules/chains/additional/llm_checker" + ) def build_config(self): return { @@ -17,6 +18,12 @@ class LLMCheckerChainComponent(CustomComponent): def build( self, + inputs: str, llm: BaseLanguageModel, - ) -> Union[Chain, Callable]: - return LLMCheckerChain.from_llm(llm=llm) + ) -> Text: + + chain = LLMCheckerChain.from_llm(llm=llm) + response = chain.invoke({chain.input_key: inputs}) + result = response.get(chain.output_key) + self.status = result + return result diff --git a/src/backend/langflow/components/chains/LLMMathChain.py b/src/backend/langflow/components/chains/LLMMathChain.py index 28f430e6d..919de34e6 100644 --- a/src/backend/langflow/components/chains/LLMMathChain.py +++ b/src/backend/langflow/components/chains/LLMMathChain.py @@ -1,15 +1,17 @@ -from typing import Callable, Optional, Union +from typing import Optional from langchain.chains import LLMChain, LLMMathChain from langflow import CustomComponent -from langflow.field_typing import BaseLanguageModel, BaseMemory, Chain +from langflow.field_typing import BaseLanguageModel, BaseMemory, Text class LLMMathChainComponent(CustomComponent): display_name = "LLMMathChain" description = "Chain that interprets a prompt and executes python code to do math." - documentation = "https://python.langchain.com/docs/modules/chains/additional/llm_math" + documentation = ( + "https://python.langchain.com/docs/modules/chains/additional/llm_math" + ) def build_config(self): return { @@ -22,10 +24,21 @@ class LLMMathChainComponent(CustomComponent): def build( self, + inputs: Text, llm: BaseLanguageModel, llm_chain: LLMChain, input_key: str = "question", output_key: str = "answer", memory: Optional[BaseMemory] = None, - ) -> Union[LLMMathChain, Callable, Chain]: - return LLMMathChain(llm=llm, llm_chain=llm_chain, input_key=input_key, output_key=output_key, memory=memory) + ) -> Text: + chain = LLMMathChain( + llm=llm, + llm_chain=llm_chain, + input_key=input_key, + output_key=output_key, + memory=memory, + ) + response = chain.invoke({input_key: inputs}) + result = response.get(output_key) + self.status = result + return result From 6544a618b656e1f1a5b83fc9a9b87a1da2729560 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 26 Feb 2024 20:49:40 -0300 Subject: [PATCH 04/98] Update config.yaml with chain documentation links --- src/backend/langflow/config.yaml | 25 ------------------------- 1 file changed, 25 deletions(-) diff --git a/src/backend/langflow/config.yaml b/src/backend/langflow/config.yaml index 85ac1785d..df3b83434 100644 --- a/src/backend/langflow/config.yaml +++ b/src/backend/langflow/config.yaml @@ -11,31 +11,6 @@ agents: documentation: "" SQLAgent: documentation: "" -chains: - # LLMChain: - # documentation: "https://python.langchain.com/docs/modules/chains/foundational/llm_chain" - LLMMathChain: - documentation: "https://python.langchain.com/docs/modules/chains/additional/llm_math" - LLMCheckerChain: - documentation: "https://python.langchain.com/docs/modules/chains/additional/llm_checker" - # ConversationChain: - # documentation: "" - SeriesCharacterChain: - documentation: "" - MidJourneyPromptChain: - documentation: "" - TimeTravelGuideChain: - documentation: "" - SQLDatabaseChain: - documentation: "" - RetrievalQA: - documentation: "https://python.langchain.com/docs/modules/chains/popular/vector_db_qa" - RetrievalQAWithSourcesChain: - documentation: "" - ConversationalRetrievalChain: - documentation: "https://python.langchain.com/docs/modules/chains/popular/chat_vector_db" - CombineDocsChain: - documentation: "" documentloaders: AirbyteJSONLoader: documentation: "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/airbyte_json" From 0e3b970570d04e2029f6c2b4cbd46391a12d434a Mon Sep 17 00:00:00 2001 From: igorrCarvalho Date: Mon, 26 Feb 2024 21:08:44 -0300 Subject: [PATCH 05/98] Fix: Vectara minimize --- src/frontend/src/CustomNodes/GenericNode/index.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/frontend/src/CustomNodes/GenericNode/index.tsx b/src/frontend/src/CustomNodes/GenericNode/index.tsx index 18ce539c6..c42a6474d 100644 --- a/src/frontend/src/CustomNodes/GenericNode/index.tsx +++ b/src/frontend/src/CustomNodes/GenericNode/index.tsx @@ -135,7 +135,7 @@ export default function GenericNode({ const iconName = iconElement || (data.node?.flow ? "group_components" : name); const iconClassName = `generic-node-icon ${ - !showNode ? "absolute inset-x-6 h-12 w-12" : "" + !showNode ? " absolute inset-x-6 h-12 w-12 " : "" }`; if (iconElement && isEmoji) { return nodeIconFragment(iconElement); @@ -295,7 +295,7 @@ export default function GenericNode({
{iconNodeRender()} From 7be62430dba891cc582bdd0b0f5ded4418e9d72e Mon Sep 17 00:00:00 2001 From: igorrCarvalho Date: Mon, 26 Feb 2024 21:13:37 -0300 Subject: [PATCH 06/98] Feat: Add delay to handle tooltip --- .../GenericNode/components/parameterComponent/index.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx b/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx index 917103260..0aae88857 100644 --- a/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx +++ b/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx @@ -265,7 +265,7 @@ export default function ParameterComponent({
From b837b350f024deb286f9ac96df8f4cc71ffdc988 Mon Sep 17 00:00:00 2001 From: igorrCarvalho Date: Mon, 26 Feb 2024 21:21:31 -0300 Subject: [PATCH 07/98] Refactor: Change toolbar shortcut icon color --- .../pages/FlowPage/components/nodeToolbarComponent/index.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/frontend/src/pages/FlowPage/components/nodeToolbarComponent/index.tsx b/src/frontend/src/pages/FlowPage/components/nodeToolbarComponent/index.tsx index e205a4616..f5b521dd5 100644 --- a/src/frontend/src/pages/FlowPage/components/nodeToolbarComponent/index.tsx +++ b/src/frontend/src/pages/FlowPage/components/nodeToolbarComponent/index.tsx @@ -431,7 +431,7 @@ export default function NodeToolbarComponent({
From cca7cb7a4641863e06a34b45b0badab19e01c710 Mon Sep 17 00:00:00 2001 From: igorrCarvalho Date: Mon, 26 Feb 2024 21:28:04 -0300 Subject: [PATCH 08/98] Refactor: add apply class to dark-mode error border --- src/frontend/src/CustomNodes/GenericNode/index.tsx | 2 +- src/frontend/src/style/applies.css | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/src/frontend/src/CustomNodes/GenericNode/index.tsx b/src/frontend/src/CustomNodes/GenericNode/index.tsx index 8d59ac002..1753d50d7 100644 --- a/src/frontend/src/CustomNodes/GenericNode/index.tsx +++ b/src/frontend/src/CustomNodes/GenericNode/index.tsx @@ -212,7 +212,7 @@ export default function GenericNode({ } if (buildStatus === BuildStatus.BUILT && isInvalid) { return isDark - ? "border-none ring ring-[#751C1C]" + ? "built-invalid-status-dark" : "built-invalid-status"; } else if (buildStatus === BuildStatus.BUILDING) { return "building-status"; diff --git a/src/frontend/src/style/applies.css b/src/frontend/src/style/applies.css index 794032875..628d74cbd 100644 --- a/src/frontend/src/style/applies.css +++ b/src/frontend/src/style/applies.css @@ -315,6 +315,9 @@ .built-invalid-status { @apply border-none ring ring-[#FF9090]; } + .built-invalid-status-dark { + @apply border-none ring ring-[#751C1C] + } .building-status { @apply border-none ring; } From ae700d77375f780fdbecd9d191b91d80129d873b Mon Sep 17 00:00:00 2001 From: igorrCarvalho Date: Mon, 26 Feb 2024 21:30:52 -0300 Subject: [PATCH 09/98] Fix: remove second tooltip from node name --- src/frontend/src/CustomNodes/GenericNode/index.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/frontend/src/CustomNodes/GenericNode/index.tsx b/src/frontend/src/CustomNodes/GenericNode/index.tsx index 1753d50d7..32339e453 100644 --- a/src/frontend/src/CustomNodes/GenericNode/index.tsx +++ b/src/frontend/src/CustomNodes/GenericNode/index.tsx @@ -331,7 +331,7 @@ export default function GenericNode({ ) : (
- +
{ if (nameEditable) { @@ -346,7 +346,7 @@ export default function GenericNode({ > {data.node?.display_name}
-
+ {nameEditable && (
{ From 311dcc812d96ecdc3c3d5cf2cec01beffd3a0e80 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 26 Feb 2024 21:48:23 -0300 Subject: [PATCH 10/98] Refactor vertex types and add filtering for vertex edges --- src/backend/langflow/graph/graph/base.py | 21 +++++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/src/backend/langflow/graph/graph/base.py b/src/backend/langflow/graph/graph/base.py index 1ea34fd51..400ae99b9 100644 --- a/src/backend/langflow/graph/graph/base.py +++ b/src/backend/langflow/graph/graph/base.py @@ -9,8 +9,13 @@ from langflow.graph.graph.constants import lazy_load_vertex_dict from langflow.graph.graph.utils import process_flow from langflow.graph.schema import InterfaceComponentTypes from langflow.graph.vertex.base import Vertex -from langflow.graph.vertex.types import (ChatVertex, FileToolVertex, LLMVertex, - RoutingVertex, ToolkitVertex) +from langflow.graph.vertex.types import ( + ChatVertex, + FileToolVertex, + LLMVertex, + RoutingVertex, + ToolkitVertex, +) from langflow.interface.tools.constants import FILE_TOOLS from langflow.utils import payload @@ -317,12 +322,20 @@ class Graph: except KeyError: raise ValueError(f"Vertex {vertex_id} not found") - def get_vertex_edges(self, vertex_id: str) -> List[ContractEdge]: + def get_vertex_edges( + self, + vertex_id: str, + is_target: Optional[bool] = None, + is_source: Optional[bool] = None, + ) -> List[ContractEdge]: """Returns a list of edges for a given vertex.""" + # The idea here is to return the edges that have the vertex_id as source or target + # or both return [ edge for edge in self.edges - if edge.source_id == vertex_id or edge.target_id == vertex_id + if (edge.source_id == vertex_id and is_source is not False) + or (edge.target_id == vertex_id and is_target is not False) ] def get_vertices_with_target(self, vertex_id: str) -> List[Vertex]: From 38ed38d64ce395b8232c731bc7cc1682410e3d72 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 26 Feb 2024 21:48:52 -0300 Subject: [PATCH 11/98] Refactor vertex class and add custom component functionality --- src/backend/langflow/graph/vertex/base.py | 16 ++- .../custom_component/custom_component.py | 55 +++++++-- .../langflow/interface/initialize/loading.py | 113 ++++++++++++++---- 3 files changed, 150 insertions(+), 34 deletions(-) diff --git a/src/backend/langflow/graph/vertex/base.py b/src/backend/langflow/graph/vertex/base.py index d31edfbce..8addb1ac7 100644 --- a/src/backend/langflow/graph/vertex/base.py +++ b/src/backend/langflow/graph/vertex/base.py @@ -2,8 +2,7 @@ import ast import inspect import types from enum import Enum -from typing import (TYPE_CHECKING, Any, Callable, Coroutine, Dict, List, - Optional) +from typing import TYPE_CHECKING, Any, Callable, Coroutine, Dict, List, Optional from loguru import logger @@ -41,6 +40,7 @@ class Vertex: ) -> None: # is_external means that the Vertex send or receives data from # an external source (e.g the chat) + self._custom_component = None self.has_external_input = False self.has_external_output = False self.graph = graph @@ -202,6 +202,7 @@ class Vertex: self.output = self.data["node"]["base_classes"] self.display_name = self.data["node"]["display_name"] self.pinned = self.data["node"].get("pinned", False) + self.selected_output_type = self.data["node"].get("selected_output_type") template_dicts = { key: value for key, value in self.data["node"]["template"].items() @@ -500,11 +501,17 @@ class Vertex: if self.base_type is None: raise ValueError(f"Base type for node {self.display_name} not found") try: + outgoing_edges = self.graph.get_vertex_edges( + self.id, is_source=True, is_target=False + ) + result = await loading.instantiate_class( node_type=self.vertex_type, base_type=self.base_type, params=self.params, user_id=user_id, + outgoing_edges=outgoing_edges, + selected_output_type=self.selected_output_type, ) self._update_built_object_and_artifacts(result) except Exception as exc: @@ -518,7 +525,10 @@ class Vertex: Updates the built object and its artifacts. """ if isinstance(result, tuple): - self._built_object, self.artifacts = result + if len(result) == 2: + self._built_object, self.artifacts = result + elif len(result) == 3: + self._custom_component, self._built_object, self.artifacts = result else: self._built_object = result diff --git a/src/backend/langflow/interface/custom/custom_component/custom_component.py b/src/backend/langflow/interface/custom/custom_component/custom_component.py index 3534baf2a..7d8794878 100644 --- a/src/backend/langflow/interface/custom/custom_component/custom_component.py +++ b/src/backend/langflow/interface/custom/custom_component/custom_component.py @@ -1,6 +1,15 @@ import operator from pathlib import Path -from typing import Any, Callable, ClassVar, List, Optional, Sequence, Union +from typing import ( + TYPE_CHECKING, + Any, + Callable, + ClassVar, + List, + Optional, + Sequence, + Union, +) from uuid import UUID import yaml @@ -24,6 +33,9 @@ from langflow.services.deps import ( from langflow.services.storage.service import StorageService from langflow.utils import validate +if TYPE_CHECKING: + from langflow.graph.edge.base import ContractEdge + class CustomComponent(Component): display_name: Optional[str] = None @@ -40,6 +52,12 @@ class CustomComponent(Component): """The field order of the component. Defaults to an empty list.""" pinned: Optional[bool] = False """The default pinned state of the component. Defaults to False.""" + build_parameters: Optional[dict] = None + """The build parameters of the component. Defaults to None.""" + selected_output_type: Optional[str] = None + """The selected output type of the component. Defaults to None.""" + outgoing_edges: Optional[List["ContractEdge"]] = None + """The edge target parameter of the component. Defaults to None.""" code_class_base_inheritance: ClassVar[str] = "CustomComponent" function_entrypoint_name: ClassVar[str] = "build" function: Optional[Callable] = None @@ -88,7 +106,9 @@ class CustomComponent(Component): def tree(self): return self.get_code_tree(self.code or "") - def to_records(self, data: Any, text_key: str = "text", data_key: str = "data") -> List[dict]: + def to_records( + self, data: Any, text_key: str = "text", data_key: str = "data" + ) -> List[dict]: """ Convert data into a list of records. @@ -115,7 +135,9 @@ class CustomComponent(Component): return records - def create_references_from_records(self, records: List[dict], include_data: bool = False) -> str: + def create_references_from_records( + self, records: List[dict], include_data: bool = False + ) -> str: """ Create references from a list of records. @@ -150,7 +172,8 @@ class CustomComponent(Component): detail={ "error": "Type hint Error", "traceback": ( - "Prompt type is not supported in the build method." " Try using PromptTemplate instead." + "Prompt type is not supported in the build method." + " Try using PromptTemplate instead." ), }, ) @@ -164,14 +187,20 @@ class CustomComponent(Component): if not self.code: return {} - component_classes = [cls for cls in self.tree["classes"] if self.code_class_base_inheritance in cls["bases"]] + component_classes = [ + cls + for cls in self.tree["classes"] + if self.code_class_base_inheritance in cls["bases"] + ] if not component_classes: return {} # Assume the first Component class is the one we're interested in component_class = component_classes[0] build_methods = [ - method for method in component_class["methods"] if method["name"] == self.function_entrypoint_name + method + for method in component_class["methods"] + if method["name"] == self.function_entrypoint_name ] return build_methods[0] if build_methods else {} @@ -228,7 +257,9 @@ class CustomComponent(Component): # Retrieve and decrypt the credential by name for the current user db_service = get_db_service() with session_getter(db_service) as session: - return credential_service.get_credential(user_id=self._user_id or "", name=name, session=session) + return credential_service.get_credential( + user_id=self._user_id or "", name=name, session=session + ) return get_credential @@ -238,7 +269,9 @@ class CustomComponent(Component): credential_service = get_credential_service() db_service = get_db_service() with session_getter(db_service) as session: - return credential_service.list_credentials(user_id=self._user_id, session=session) + return credential_service.list_credentials( + user_id=self._user_id, session=session + ) def index(self, value: int = 0): """Returns a function that returns the value at the given index in the iterable.""" @@ -289,7 +322,11 @@ class CustomComponent(Component): if flow_id: flow = session.query(Flow).get(flow_id) elif flow_name: - flow = (session.query(Flow).filter(Flow.name == flow_name).filter(Flow.user_id == self.user_id)).first() + flow = ( + session.query(Flow) + .filter(Flow.name == flow_name) + .filter(Flow.user_id == self.user_id) + ).first() else: raise ValueError("Either flow_name or flow_id must be provided") diff --git a/src/backend/langflow/interface/initialize/loading.py b/src/backend/langflow/interface/initialize/loading.py index 83bd67321..6694da26a 100644 --- a/src/backend/langflow/interface/initialize/loading.py +++ b/src/backend/langflow/interface/initialize/loading.py @@ -1,6 +1,6 @@ import inspect import json -from typing import TYPE_CHECKING, Any, Callable, Dict, Sequence, Type +from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Sequence, Type import orjson from langchain.agents import agent as agent_module @@ -34,16 +34,27 @@ from langflow.utils import validate if TYPE_CHECKING: from langflow import CustomComponent + from langflow.graph.edge.base import ContractEdge def build_vertex_in_params(params: Dict) -> Dict: from langflow.graph.vertex.base import Vertex # If any of the values in params is a Vertex, we will build it - return {key: value.build() if isinstance(value, Vertex) else value for key, value in params.items()} + return { + key: value.build() if isinstance(value, Vertex) else value + for key, value in params.items() + } -async def instantiate_class(node_type: str, base_type: str, params: Dict, user_id=None) -> Any: +async def instantiate_class( + node_type: str, + base_type: str, + params: Dict, + user_id=None, + outgoing_edges: Optional[List["ContractEdge"]] = None, + selected_output_type: Optional[str] = None, +) -> Any: """Instantiate class from module type and key, and params""" params = convert_params_to_sets(params) params = convert_kwargs(params) @@ -55,7 +66,15 @@ async def instantiate_class(node_type: str, base_type: str, params: Dict, user_i return custom_node(**params) logger.debug(f"Instantiating {node_type} of type {base_type}") class_object = import_by_type(_type=base_type, name=node_type) - return await instantiate_based_on_type(class_object, base_type, node_type, params, user_id=user_id) + return await instantiate_based_on_type( + class_object, + base_type, + node_type, + params, + user_id=user_id, + outgoing_edges=outgoing_edges, + selected_output_type=selected_output_type, + ) def convert_params_to_sets(params): @@ -82,7 +101,15 @@ def convert_kwargs(params): return params -async def instantiate_based_on_type(class_object, base_type, node_type, params, user_id): +async def instantiate_based_on_type( + class_object, + base_type, + node_type, + params, + user_id, + outgoing_edges, + selected_output_type, +): if base_type == "agents": return instantiate_agent(node_type, class_object, params) elif base_type == "prompts": @@ -116,17 +143,33 @@ async def instantiate_based_on_type(class_object, base_type, node_type, params, elif base_type == "memory": return instantiate_memory(node_type, class_object, params) elif base_type == "custom_components": - return await instantiate_custom_component(node_type, class_object, params, user_id) + return await instantiate_custom_component( + node_type, + class_object, + params, + user_id, + outgoing_edges, + selected_output_type, + ) elif base_type == "wrappers": return instantiate_wrapper(node_type, class_object, params) else: return class_object(**params) -async def instantiate_custom_component(node_type, class_object, params, user_id): +async def instantiate_custom_component( + node_type, class_object, params, user_id, outgoing_edges, selected_output_type +): params_copy = params.copy() - class_object: Type["CustomComponent"] = eval_custom_component_code(params_copy.pop("code")) - custom_component: "CustomComponent" = class_object(user_id=user_id) + class_object: Type["CustomComponent"] = eval_custom_component_code( + params_copy.pop("code") + ) + custom_component: "CustomComponent" = class_object( + user_id=user_id, + parameters=params_copy, + outgoing_edges=outgoing_edges, + selected_output_type=selected_output_type, + ) if "retriever" in params_copy and hasattr(params_copy["retriever"], "as_retriever"): params_copy["retriever"] = params_copy["retriever"].as_retriever() @@ -141,7 +184,7 @@ async def instantiate_custom_component(node_type, class_object, params, user_id) # Call the build method directly if it's sync build_result = custom_component.build(**params_copy) - return build_result, {"repr": custom_component.custom_repr()} + return custom_component, build_result, {"repr": custom_component.custom_repr()} def instantiate_wrapper(node_type, class_object, params): @@ -194,7 +237,9 @@ def instantiate_memory(node_type, class_object, params): # I want to catch a specific attribute error that happens # when the object does not have a cursor attribute except Exception as exc: - if "object has no attribute 'cursor'" in str(exc) or 'object has no field "conn"' in str(exc): + if "object has no attribute 'cursor'" in str( + exc + ) or 'object has no field "conn"' in str(exc): raise AttributeError( ( "Failed to build connection to database." @@ -237,7 +282,9 @@ def instantiate_agent(node_type, class_object: Type[agent_module.Agent], params: if class_method := getattr(class_object, method, None): agent = class_method(**params) tools = params.get("tools", []) - return AgentExecutor.from_agent_and_tools(agent=agent, tools=tools, handle_parsing_errors=True) + return AgentExecutor.from_agent_and_tools( + agent=agent, tools=tools, handle_parsing_errors=True + ) return load_agent_executor(class_object, params) @@ -293,7 +340,11 @@ def instantiate_embedding(node_type, class_object, params: Dict): try: return class_object(**params) except ValidationError: - params = {key: value for key, value in params.items() if key in class_object.model_fields} + params = { + key: value + for key, value in params.items() + if key in class_object.model_fields + } return class_object(**params) @@ -305,7 +356,9 @@ def instantiate_vectorstore(class_object: Type[VectorStore], params: Dict): if "texts" in params: params["documents"] = params.pop("texts") if "documents" in params: - params["documents"] = [doc for doc in params["documents"] if isinstance(doc, Document)] + params["documents"] = [ + doc for doc in params["documents"] if isinstance(doc, Document) + ] if initializer := vecstore_initializer.get(class_object.__name__): vecstore = initializer(class_object, params) else: @@ -320,7 +373,9 @@ def instantiate_vectorstore(class_object: Type[VectorStore], params: Dict): return vecstore -def instantiate_documentloader(node_type: str, class_object: Type[BaseLoader], params: Dict): +def instantiate_documentloader( + node_type: str, class_object: Type[BaseLoader], params: Dict +): if "file_filter" in params: # file_filter will be a string but we need a function # that will be used to filter the files using file_filter @@ -329,13 +384,17 @@ def instantiate_documentloader(node_type: str, class_object: Type[BaseLoader], p # in x and if it is, we will return True file_filter = params.pop("file_filter") extensions = file_filter.split(",") - params["file_filter"] = lambda x: any(extension.strip() in x for extension in extensions) + params["file_filter"] = lambda x: any( + extension.strip() in x for extension in extensions + ) metadata = params.pop("metadata", None) if metadata and isinstance(metadata, str): try: metadata = orjson.loads(metadata) except json.JSONDecodeError as exc: - raise ValueError("The metadata you provided is not a valid JSON string.") from exc + raise ValueError( + "The metadata you provided is not a valid JSON string." + ) from exc if node_type == "WebBaseLoader": if web_path := params.pop("web_path", None): @@ -368,12 +427,16 @@ def instantiate_textsplitter( "Try changing the chunk_size of the Text Splitter." ) from exc - if ("separator_type" in params and params["separator_type"] == "Text") or "separator_type" not in params: + if ( + "separator_type" in params and params["separator_type"] == "Text" + ) or "separator_type" not in params: params.pop("separator_type", None) # separators might come in as an escaped string like \\n # so we need to convert it to a string if "separators" in params: - params["separators"] = params["separators"].encode().decode("unicode-escape") + params["separators"] = ( + params["separators"].encode().decode("unicode-escape") + ) text_splitter = class_object(**params) else: from langchain.text_splitter import Language @@ -400,7 +463,8 @@ def replace_zero_shot_prompt_with_prompt_template(nodes): tools = [ tool for tool in nodes - if tool["type"] != "chatOutputNode" and "Tool" in tool["data"]["node"]["base_classes"] + if tool["type"] != "chatOutputNode" + and "Tool" in tool["data"]["node"]["base_classes"] ] node["data"] = build_prompt_template(prompt=node["data"], tools=tools) break @@ -414,7 +478,9 @@ def load_agent_executor(agent_class: type[agent_module.Agent], params, **kwargs) # agent has hidden args for memory. might need to be support # memory = params["memory"] # if allowed_tools is not a list or set, make it a list - if not isinstance(allowed_tools, (list, set)) and isinstance(allowed_tools, BaseTool): + if not isinstance(allowed_tools, (list, set)) and isinstance( + allowed_tools, BaseTool + ): allowed_tools = [allowed_tools] tool_names = [tool.name for tool in allowed_tools] # Agent class requires an output_parser but Agent classes @@ -442,7 +508,10 @@ def build_prompt_template(prompt, tools): format_instructions = prompt["node"]["template"]["format_instructions"]["value"] tool_strings = "\n".join( - [f"{tool['data']['node']['name']}: {tool['data']['node']['description']}" for tool in tools] + [ + f"{tool['data']['node']['name']}: {tool['data']['node']['description']}" + for tool in tools + ] ) tool_names = ", ".join([tool["data"]["node"]["name"] for tool in tools]) format_instructions = format_instructions.format(tool_names=tool_names) From 635f8d785876c84563690acd8a2671426f8389b0 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 26 Feb 2024 21:50:46 -0300 Subject: [PATCH 12/98] Update dependencies in pyproject.toml --- poetry.lock | 392 +++++++++++++++++++++++++++++++++++++++++++++---- pyproject.toml | 4 +- 2 files changed, 368 insertions(+), 28 deletions(-) diff --git a/poetry.lock b/poetry.lock index b3dc0efeb..9e3d5480d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -418,17 +418,17 @@ files = [ [[package]] name = "boto3" -version = "1.34.49" +version = "1.34.50" description = "The AWS SDK for Python" optional = false python-versions = ">= 3.8" files = [ - {file = "boto3-1.34.49-py3-none-any.whl", hash = "sha256:ce8d1de03024f52a1810e8d71ad4dba3a5b9bb48b35567191500e3432a9130b4"}, - {file = "boto3-1.34.49.tar.gz", hash = "sha256:96b9dc85ce8d52619b56ca7b1ac1423eaf0af5ce132904bcc8aa81396eec2abf"}, + {file = "boto3-1.34.50-py3-none-any.whl", hash = "sha256:8d709365231234bc4f0ca98fdf33a25eeebf78072853c6aa3d259f0f5cf09877"}, + {file = "boto3-1.34.50.tar.gz", hash = "sha256:290952be7899560039cb0042e8a2354f61a7dead0d0ca8bea6ba901930df0468"}, ] [package.dependencies] -botocore = ">=1.34.49,<1.35.0" +botocore = ">=1.34.50,<1.35.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -437,13 +437,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.34.49" +version = "1.34.50" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">= 3.8" files = [ - {file = "botocore-1.34.49-py3-none-any.whl", hash = "sha256:4ed9d7603a04b5bb5bd5de63b513bc2c8a7e8b1cd0088229c5ceb461161f43b6"}, - {file = "botocore-1.34.49.tar.gz", hash = "sha256:d89410bc60673eaff1699f3f1fdcb0e3a5e1f7a6a048c0d88c3ce5c3549433ec"}, + {file = "botocore-1.34.50-py3-none-any.whl", hash = "sha256:fda510559dbe796eefdb59561cc81be1b99afba3dee53fd23db9a3d587adc0ab"}, + {file = "botocore-1.34.50.tar.gz", hash = "sha256:33ab82cb96c4bb684f0dbafb071808e4817d83debc88b223e7d988256370c6d7"}, ] [package.dependencies] @@ -549,6 +549,20 @@ files = [ {file = "Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724"}, ] +[[package]] +name = "bs4" +version = "0.0.2" +description = "Dummy package for Beautiful Soup (beautifulsoup4)" +optional = false +python-versions = "*" +files = [ + {file = "bs4-0.0.2-py2.py3-none-any.whl", hash = "sha256:abf8742c0805ef7f662dce4b51cca104cffe52b835238afc169142ab9b3fbccc"}, + {file = "bs4-0.0.2.tar.gz", hash = "sha256:a48685c58f50fe127722417bae83fe6badf500d54b55f7e39ffe43b798653925"}, +] + +[package.dependencies] +beautifulsoup4 = "*" + [[package]] name = "build" version = "1.0.3" @@ -575,13 +589,13 @@ virtualenv = ["virtualenv (>=20.0.35)"] [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -2893,13 +2907,13 @@ files = [ [[package]] name = "ipykernel" -version = "6.29.2" +version = "6.29.3" description = "IPython Kernel for Jupyter" optional = false python-versions = ">=3.8" files = [ - {file = "ipykernel-6.29.2-py3-none-any.whl", hash = "sha256:50384f5c577a260a1d53f1f59a828c7266d321c9b7d00d345693783f66616055"}, - {file = "ipykernel-6.29.2.tar.gz", hash = "sha256:3bade28004e3ff624ed57974948116670604ac5f676d12339693f3142176d3f0"}, + {file = "ipykernel-6.29.3-py3-none-any.whl", hash = "sha256:5aa086a4175b0229d4eca211e181fb473ea78ffd9869af36ba7694c947302a21"}, + {file = "ipykernel-6.29.3.tar.gz", hash = "sha256:e14c250d1f9ea3989490225cc1a542781b095a18a19447fcf2b5eaf7d0ac5bd2"}, ] [package.dependencies] @@ -2922,7 +2936,7 @@ cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] pyqt5 = ["pyqt5"] pyside6 = ["pyside6"] -test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (==0.23.4)", "pytest-cov", "pytest-timeout"] +test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.23.5)", "pytest-cov", "pytest-timeout"] [[package]] name = "ipython" @@ -3433,13 +3447,13 @@ llama-index = ["llama-index (>=0.10.6,<0.11.0)"] [[package]] name = "langsmith" -version = "0.1.8" +version = "0.1.9" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langsmith-0.1.8-py3-none-any.whl", hash = "sha256:f4320fd80ec9d311a648e7d4c44e0814e6e5454772c5026f40db0307bc07e287"}, - {file = "langsmith-0.1.8.tar.gz", hash = "sha256:ab5f1cdfb7d418109ea506d41928fb8708547db2f6c7f7da7cfe997f3c55767b"}, + {file = "langsmith-0.1.9-py3-none-any.whl", hash = "sha256:f821b3cb07a87eac5cb2181ff0b61051811e4eef09ae4b46e700981f7ae5dfb9"}, + {file = "langsmith-0.1.9.tar.gz", hash = "sha256:9bd3e80607722c3d2db84cf3440005491a859b80b5e499bc988032d5c2da91f0"}, ] [package.dependencies] @@ -3488,13 +3502,142 @@ test = ["httpx (>=0.24.1)", "pytest (>=7.4.0)", "scipy (>=1.10)"] [[package]] name = "llama-index" +version = "0.10.13.post1" +description = "Interface between LLMs and your data" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "llama_index-0.10.13.post1-py3-none-any.whl", hash = "sha256:3a1281eb5b5505d3c4b5d8da036561e267c5b9311bd3ddbeeab3e1eeb92df86a"}, + {file = "llama_index-0.10.13.post1.tar.gz", hash = "sha256:55a8bb34b4f538fb33f6db914d89ad2dbc7ae5e0ec24d8bc4238ed05ff502ee2"}, +] + +[package.dependencies] +llama-index-agent-openai = ">=0.1.4,<0.2.0" +llama-index-cli = ">=0.1.2,<0.2.0" +llama-index-core = ">=0.10.13,<0.11.0" +llama-index-embeddings-openai = ">=0.1.5,<0.2.0" +llama-index-indices-managed-llama-cloud = ">=0.1.2,<0.2.0" +llama-index-legacy = ">=0.9.48,<0.10.0" +llama-index-llms-openai = ">=0.1.5,<0.2.0" +llama-index-multi-modal-llms-openai = ">=0.1.3,<0.2.0" +llama-index-program-openai = ">=0.1.3,<0.2.0" +llama-index-question-gen-openai = ">=0.1.2,<0.2.0" +llama-index-readers-file = ">=0.1.4,<0.2.0" +llama-index-readers-llama-parse = ">=0.1.2,<0.2.0" + +[[package]] +name = "llama-index-agent-openai" +version = "0.1.5" +description = "llama-index agent openai integration" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "llama_index_agent_openai-0.1.5-py3-none-any.whl", hash = "sha256:1ab06fe853d9d391ba724dcb0009b249ae88ca4de4b5842226373b0c55ee435a"}, + {file = "llama_index_agent_openai-0.1.5.tar.gz", hash = "sha256:42099326d526af140493c5f744ef70bef0aed8a941b6c9aea4b3eff9c63f0ba6"}, +] + +[package.dependencies] +llama-index-core = ">=0.10.1,<0.11.0" +llama-index-llms-openai = ">=0.1.5,<0.2.0" + +[[package]] +name = "llama-index-cli" +version = "0.1.5" +description = "llama-index cli" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "llama_index_cli-0.1.5-py3-none-any.whl", hash = "sha256:a0fcfc3239d8b05158558423ca5c1a426d2a455eab44128b2b786cab566f74ad"}, + {file = "llama_index_cli-0.1.5.tar.gz", hash = "sha256:e2493ff7ecfd1983fd15c28c6c0c7bfdba66662c1d8960f6aea229db3d7fafda"}, +] + +[package.dependencies] +llama-index-core = ">=0.10.11.post1,<0.11.0" +llama-index-embeddings-openai = ">=0.1.1,<0.2.0" +llama-index-llms-openai = ">=0.1.1,<0.2.0" +llama-index-vector-stores-chroma = ">=0.1.1,<0.2.0" + +[[package]] +name = "llama-index-core" +version = "0.10.13" +description = "Interface between LLMs and your data" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "llama_index_core-0.10.13-py3-none-any.whl", hash = "sha256:40c76fc02be7cd948a333ca541f2ff38cf02774e1c960674e2b68c61943bac90"}, + {file = "llama_index_core-0.10.13.tar.gz", hash = "sha256:826fded00767923fba8aca94f46c32b259e8879f517016ab7a3801b1b37187a1"}, +] + +[package.dependencies] +aiohttp = ">=3.8.6,<4.0.0" +dataclasses-json = "*" +deprecated = ">=1.2.9.3" +dirtyjson = ">=1.0.8,<2.0.0" +fsspec = ">=2023.5.0" +httpx = "*" +llamaindex-py-client = ">=0.1.13,<0.2.0" +nest-asyncio = ">=1.5.8,<2.0.0" +networkx = ">=3.0" +nltk = ">=3.8.1,<4.0.0" +numpy = "*" +openai = ">=1.1.0" +pandas = "*" +pillow = ">=9.0.0" +PyYAML = ">=6.0.1" +requests = ">=2.31.0" +SQLAlchemy = {version = ">=1.4.49", extras = ["asyncio"]} +tenacity = ">=8.2.0,<9.0.0" +tiktoken = ">=0.3.3" +tqdm = ">=4.66.1,<5.0.0" +typing-extensions = ">=4.5.0" +typing-inspect = ">=0.8.0" + +[package.extras] +gradientai = ["gradientai (>=1.4.0)"] +html = ["beautifulsoup4 (>=4.12.2,<5.0.0)"] +langchain = ["langchain (>=0.0.303)"] +local-models = ["optimum[onnxruntime] (>=1.13.2,<2.0.0)", "sentencepiece (>=0.1.99,<0.2.0)", "transformers[torch] (>=4.33.1,<5.0.0)"] +postgres = ["asyncpg (>=0.28.0,<0.29.0)", "pgvector (>=0.1.0,<0.2.0)", "psycopg2-binary (>=2.9.9,<3.0.0)"] +query-tools = ["guidance (>=0.0.64,<0.0.65)", "jsonpath-ng (>=1.6.0,<2.0.0)", "lm-format-enforcer (>=0.4.3,<0.5.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "scikit-learn", "spacy (>=3.7.1,<4.0.0)"] + +[[package]] +name = "llama-index-embeddings-openai" +version = "0.1.6" +description = "llama-index embeddings openai integration" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "llama_index_embeddings_openai-0.1.6-py3-none-any.whl", hash = "sha256:f8b2dded0718e9f57c08ce352d186941e6acf7de414c64219210b66f7a6d6d2d"}, + {file = "llama_index_embeddings_openai-0.1.6.tar.gz", hash = "sha256:f12f0ef6f92211efe1a022a97bb68fc8731c93bd20df3b0567dba69c610033db"}, +] + +[package.dependencies] +llama-index-core = ">=0.10.1,<0.11.0" + +[[package]] +name = "llama-index-indices-managed-llama-cloud" +version = "0.1.3" +description = "llama-index indices llama-cloud integration" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "llama_index_indices_managed_llama_cloud-0.1.3-py3-none-any.whl", hash = "sha256:9fe2823855f00bf8b091be008ce953b9a9c5d4b2d976b54ab0d37877c83457f5"}, + {file = "llama_index_indices_managed_llama_cloud-0.1.3.tar.gz", hash = "sha256:5db725cb7db675019dc65e38153890802e2ae89838c127c19d3184efc46ea28b"}, +] + +[package.dependencies] +llama-index-core = ">=0.10.0,<0.11.0" +llamaindex-py-client = ">=0.1.13,<0.2.0" + +[[package]] +name = "llama-index-legacy" version = "0.9.48" description = "Interface between LLMs and your data" optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "llama_index-0.9.48-py3-none-any.whl", hash = "sha256:56aa406d39e7ca53a5d990b55d69901fbb9eddc9af6a40950367dc5d734f6283"}, - {file = "llama_index-0.9.48.tar.gz", hash = "sha256:c50d02ac8c7e4ff9fb41f0860391fe0020ad8a3d7c30048db52d17d8be654bf3"}, + {file = "llama_index_legacy-0.9.48-py3-none-any.whl", hash = "sha256:714ada95beac179b4acefa4d2deff74bb7b2f22b0f699ac247d4cb67738d16d4"}, + {file = "llama_index_legacy-0.9.48.tar.gz", hash = "sha256:82ddc4691edbf49533d65582c249ba22c03fe96fbd3e92f7758dccef28e43834"}, ] [package.dependencies] @@ -3525,6 +3668,146 @@ local-models = ["optimum[onnxruntime] (>=1.13.2,<2.0.0)", "sentencepiece (>=0.1. postgres = ["asyncpg (>=0.28.0,<0.29.0)", "pgvector (>=0.1.0,<0.2.0)", "psycopg2-binary (>=2.9.9,<3.0.0)"] query-tools = ["guidance (>=0.0.64,<0.0.65)", "jsonpath-ng (>=1.6.0,<2.0.0)", "lm-format-enforcer (>=0.4.3,<0.5.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "scikit-learn", "spacy (>=3.7.1,<4.0.0)"] +[[package]] +name = "llama-index-llms-openai" +version = "0.1.6" +description = "llama-index llms openai integration" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "llama_index_llms_openai-0.1.6-py3-none-any.whl", hash = "sha256:4260ad31c3444e97ec8a8d061cb6dbf1074262b82341a2b69d2b27e8a23efe62"}, + {file = "llama_index_llms_openai-0.1.6.tar.gz", hash = "sha256:15530dfa3893b15c5576ebc71e01b77acbf47abd689219436fdf7b6ca567a9fd"}, +] + +[package.dependencies] +llama-index-core = ">=0.10.1,<0.11.0" + +[[package]] +name = "llama-index-multi-modal-llms-openai" +version = "0.1.4" +description = "llama-index multi-modal-llms openai integration" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "llama_index_multi_modal_llms_openai-0.1.4-py3-none-any.whl", hash = "sha256:03b887d110551d5d5b99b9fd110824e6311f2e31f4d5e67dafd2ee66da32818d"}, + {file = "llama_index_multi_modal_llms_openai-0.1.4.tar.gz", hash = "sha256:6a5d6584c33a9d1b06cf5c874c63af2603fc93b660bde481a8c547e876c6e2c3"}, +] + +[package.dependencies] +llama-index-core = ">=0.10.1,<0.11.0" +llama-index-llms-openai = ">=0.1.1,<0.2.0" + +[[package]] +name = "llama-index-program-openai" +version = "0.1.4" +description = "llama-index program openai integration" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "llama_index_program_openai-0.1.4-py3-none-any.whl", hash = "sha256:cfa8f00f3743d2fc70043e80f7c3925d23b1413a0cc7a72863ad60497a18307d"}, + {file = "llama_index_program_openai-0.1.4.tar.gz", hash = "sha256:573e99a2dd16ad3caf382c8ab28d1ac10eb2571bc9481d84a6d89806ad6aa5d4"}, +] + +[package.dependencies] +llama-index-agent-openai = ">=0.1.1,<0.2.0" +llama-index-core = ">=0.10.1,<0.11.0" +llama-index-llms-openai = ">=0.1.1,<0.2.0" + +[[package]] +name = "llama-index-question-gen-openai" +version = "0.1.3" +description = "llama-index question_gen openai integration" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "llama_index_question_gen_openai-0.1.3-py3-none-any.whl", hash = "sha256:1f83b49e8b2e665030d1ec8c54687d6985d9fa8426147b64e46628a9e489b302"}, + {file = "llama_index_question_gen_openai-0.1.3.tar.gz", hash = "sha256:4486198117a45457d2e036ae60b93af58052893cc7d78fa9b6f47dd47b81e2e1"}, +] + +[package.dependencies] +llama-index-core = ">=0.10.1,<0.11.0" +llama-index-llms-openai = ">=0.1.1,<0.2.0" +llama-index-program-openai = ">=0.1.1,<0.2.0" + +[[package]] +name = "llama-index-readers-file" +version = "0.1.6" +description = "llama-index readers file integration" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "llama_index_readers_file-0.1.6-py3-none-any.whl", hash = "sha256:f583bd90353a0c0985213af02c97aa2f2f22e702d4311fe719de91382c9ad8dd"}, + {file = "llama_index_readers_file-0.1.6.tar.gz", hash = "sha256:d9fc0ca84926d04bd757c57fe87841cd9dbc2606aab5f2ce927deec14aaa1a74"}, +] + +[package.dependencies] +beautifulsoup4 = ">=4.12.3,<5.0.0" +bs4 = ">=0.0.2,<0.0.3" +llama-index-core = ">=0.10.1,<0.11.0" +pymupdf = ">=1.23.21,<2.0.0" +pypdf = ">=4.0.1,<5.0.0" + +[[package]] +name = "llama-index-readers-llama-parse" +version = "0.1.3" +description = "llama-index readers llama-parse integration" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "llama_index_readers_llama_parse-0.1.3-py3-none-any.whl", hash = "sha256:f52a06a2765a2ffe6c138cf1703ab1de6249ff069ba62d80b9147e849bbcbc27"}, + {file = "llama_index_readers_llama_parse-0.1.3.tar.gz", hash = "sha256:e0ee0c393e10fc80eac644788338bbd2032050c8b8a474f3d0b5ebd08e9867fe"}, +] + +[package.dependencies] +llama-index-core = ">=0.10.7,<0.11.0" +llama-parse = ">=0.3.3,<0.4.0" + +[[package]] +name = "llama-index-vector-stores-chroma" +version = "0.1.4" +description = "llama-index vector_stores chroma integration" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "llama_index_vector_stores_chroma-0.1.4-py3-none-any.whl", hash = "sha256:f475a450431ee4d9b2915ba9da2112dfdfacaee1ea220b8603720be1c116786c"}, + {file = "llama_index_vector_stores_chroma-0.1.4.tar.gz", hash = "sha256:7364f2a3f8a51b83d350da39da7e7046704cfa9c848ebe8fd1c6cb39ad4878f9"}, +] + +[package.dependencies] +chromadb = ">=0.4.22,<0.5.0" +llama-index-core = ">=0.10.1,<0.11.0" +onnxruntime = ">=1.17.0,<2.0.0" +tokenizers = ">=0.15.1,<0.16.0" + +[[package]] +name = "llama-parse" +version = "0.3.4" +description = "Parse files into RAG-Optimized formats." +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "llama_parse-0.3.4-py3-none-any.whl", hash = "sha256:b667c78d4c32fc5d0561e6e3ca6c53648a6701b436f21d0d252cd46774927660"}, + {file = "llama_parse-0.3.4.tar.gz", hash = "sha256:5a30569c390ab9089dad66cf2a8c967f8c21d77641deec0a922672df4e16cfa3"}, +] + +[package.dependencies] +llama-index-core = ">=0.10.7" + +[[package]] +name = "llamaindex-py-client" +version = "0.1.13" +description = "" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "llamaindex_py_client-0.1.13-py3-none-any.whl", hash = "sha256:02400c90655da80ae373e0455c829465208607d72462f1898fd383fdfe8dabce"}, + {file = "llamaindex_py_client-0.1.13.tar.gz", hash = "sha256:3bd9b435ee0a78171eba412dea5674d813eb5bf36e577d3c7c7e90edc54900d9"}, +] + +[package.dependencies] +httpx = ">=0.20.0" +pydantic = ">=1.10" + [[package]] name = "locust" version = "2.23.1" @@ -3794,22 +4077,21 @@ files = [ [[package]] name = "marshmallow" -version = "3.20.2" +version = "3.21.0" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." optional = false python-versions = ">=3.8" files = [ - {file = "marshmallow-3.20.2-py3-none-any.whl", hash = "sha256:c21d4b98fee747c130e6bc8f45c4b3199ea66bc00c12ee1f639f0aeca034d5e9"}, - {file = "marshmallow-3.20.2.tar.gz", hash = "sha256:4c1daff273513dc5eb24b219a8035559dc573c8f322558ef85f5438ddd1236dd"}, + {file = "marshmallow-3.21.0-py3-none-any.whl", hash = "sha256:e7997f83571c7fd476042c2c188e4ee8a78900ca5e74bd9c8097afa56624e9bd"}, + {file = "marshmallow-3.21.0.tar.gz", hash = "sha256:20f53be28c6e374a711a16165fb22a8dc6003e3f7cda1285e3ca777b9193885b"}, ] [package.dependencies] packaging = ">=17.0" [package.extras] -dev = ["pre-commit (>=2.4,<4.0)", "pytest", "pytz", "simplejson", "tox"] -docs = ["alabaster (==0.7.15)", "autodocsumm (==0.2.12)", "sphinx (==7.2.6)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"] -lint = ["pre-commit (>=2.4,<4.0)"] +dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] +docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.2.6)", "sphinx-issues (==4.0.0)", "sphinx-version-warning (==1.1.2)"] tests = ["pytest", "pytz", "simplejson"] [[package]] @@ -6005,6 +6287,64 @@ snappy = ["python-snappy"] test = ["pytest (>=7)"] zstd = ["zstandard"] +[[package]] +name = "pymupdf" +version = "1.23.25" +description = "A high performance Python library for data extraction, analysis, conversion & manipulation of PDF (and other) documents." +optional = false +python-versions = ">=3.8" +files = [ + {file = "PyMuPDF-1.23.25-cp310-none-macosx_10_9_x86_64.whl", hash = "sha256:6be2b20fbff40602f673fc8e60fde3e5911397f8ca9ed6aa2d15be94b12cc2c4"}, + {file = "PyMuPDF-1.23.25-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:0f6923a44fbeaeefaabb2fa10955dcef3624e8826db661201951f3b3409fed32"}, + {file = "PyMuPDF-1.23.25-cp310-none-manylinux2014_aarch64.whl", hash = "sha256:8eeb2e97347586ec293fddaf61e8dfc58d6b2763406e8f7a6e45b560bf9b15a3"}, + {file = "PyMuPDF-1.23.25-cp310-none-manylinux2014_x86_64.whl", hash = "sha256:dca46799c152051697c5e88d66c17ba6d0244668d0c4dd8a2ba2d8d3cb745988"}, + {file = "PyMuPDF-1.23.25-cp310-none-win32.whl", hash = "sha256:88bfed1bd13ec84869489fc7b97381016cb8b99956073f4c3e8ac8c840bbb15a"}, + {file = "PyMuPDF-1.23.25-cp310-none-win_amd64.whl", hash = "sha256:98a78582c8a0c61b372e2bcd63dc61efc873e40b7d1f0b896a195e1a9ef9ffa7"}, + {file = "PyMuPDF-1.23.25-cp311-none-macosx_10_9_x86_64.whl", hash = "sha256:d7792810634036a745ea3eb3c4ccf2b6adab55ca9644e3352747d2b5aa5327f9"}, + {file = "PyMuPDF-1.23.25-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:03bd1985b0234c3d2b8e26bb3e9ab1d2641dbada1e199b838a6bf884f35224c8"}, + {file = "PyMuPDF-1.23.25-cp311-none-manylinux2014_aarch64.whl", hash = "sha256:638fcb1f7551eb5ab582e412e204e8ded94acbbc37bc7f1e891a5dfc428881ee"}, + {file = "PyMuPDF-1.23.25-cp311-none-manylinux2014_x86_64.whl", hash = "sha256:067c88b4e6609cb7e74d98d0b0a35c11eb8e29f4fc51dc7ed1dd448b81d347c7"}, + {file = "PyMuPDF-1.23.25-cp311-none-win32.whl", hash = "sha256:a694f160d1701285cf3152951430740878d168511cd9ea0a3adcfaf3cac00322"}, + {file = "PyMuPDF-1.23.25-cp311-none-win_amd64.whl", hash = "sha256:514bcb679926b33413637b0bd73b223c90fb0d19352caf3395d0f23b1d47e8af"}, + {file = "PyMuPDF-1.23.25-cp312-none-macosx_10_9_x86_64.whl", hash = "sha256:bba342321e1b5574631894d7d34ec046605d953a23553b7d2f9c0e4d3c27254b"}, + {file = "PyMuPDF-1.23.25-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:b2cb058c8229f9697deebe0574f7d95e4b9a5e295ceafd554346bbd464141e89"}, + {file = "PyMuPDF-1.23.25-cp312-none-manylinux2014_aarch64.whl", hash = "sha256:2479473b533936593428ce78499a1e9901570110ac602f03f1f3174efa0fa6a8"}, + {file = "PyMuPDF-1.23.25-cp312-none-manylinux2014_x86_64.whl", hash = "sha256:a247a4be1e43a6127ee305eae9f65767ee7519a2aa0cb1a2aa6acfd4e7fe7a9b"}, + {file = "PyMuPDF-1.23.25-cp312-none-win32.whl", hash = "sha256:b062be400bbaff6e8b17c0a8da9481e01ec935f97967e0870e9aacd7ba60a52a"}, + {file = "PyMuPDF-1.23.25-cp312-none-win_amd64.whl", hash = "sha256:b12e608761e1586a65f6e96a34417a91f814dbab29f2929b41d825ab32fab6ef"}, + {file = "PyMuPDF-1.23.25-cp38-none-macosx_10_9_x86_64.whl", hash = "sha256:ac97691c0e0e23607626d394bd660a46ea33f64921dc9288cf24daee207f9fe3"}, + {file = "PyMuPDF-1.23.25-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:c0a16cda5dc9b59d494ae23bdd9c4a3db53d04f2b6390265f5c0fe6269777975"}, + {file = "PyMuPDF-1.23.25-cp38-none-manylinux2014_aarch64.whl", hash = "sha256:23d735db51722a889bb50636d161d2747f08fa0b82cc2e4a7eb8e228b25d1c4e"}, + {file = "PyMuPDF-1.23.25-cp38-none-manylinux2014_x86_64.whl", hash = "sha256:cbc1407dcf01b2e3e547b2d7643b97cc44c0950d2bb4b12c74322664c5cb37d7"}, + {file = "PyMuPDF-1.23.25-cp38-none-win32.whl", hash = "sha256:c29518701d6360beb01c25cf69a77b6426db90a9e7cd11179b3bd783c7fb4cb1"}, + {file = "PyMuPDF-1.23.25-cp38-none-win_amd64.whl", hash = "sha256:c1bb6fa9e00c846e6829dec2bee8326754adaef5c80626b99233c01923f0342c"}, + {file = "PyMuPDF-1.23.25-cp39-none-macosx_10_9_x86_64.whl", hash = "sha256:514b272bfcd897f9ae29384da04167dcdea3b13ce0f2b9099b645314355d037d"}, + {file = "PyMuPDF-1.23.25-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:ef345a5b050d0869ef404845075edd5f4bd7fd99e235f4d32ce85f423779a120"}, + {file = "PyMuPDF-1.23.25-cp39-none-manylinux2014_aarch64.whl", hash = "sha256:b3ade5b349c38ddffb24f8c266fbcd7161f488c43960ff0f03f977d40d4df967"}, + {file = "PyMuPDF-1.23.25-cp39-none-manylinux2014_x86_64.whl", hash = "sha256:111d795a3e840aec2ad66beebd90a5327994ec85ed56fd68312f5463062dbbfa"}, + {file = "PyMuPDF-1.23.25-cp39-none-win32.whl", hash = "sha256:2237ce9897771f4af686cc0c81517ffb020fc1a011b95ccf5ccf05383492bd6d"}, + {file = "PyMuPDF-1.23.25-cp39-none-win_amd64.whl", hash = "sha256:251c9c321a2112716068d5ae11deedd1911d0387cbdd0ef19adb216a3adf882c"}, + {file = "PyMuPDF-1.23.25.tar.gz", hash = "sha256:eb414e92f08107f43576a1fedea28aa837220b15ad58c8e32015435fe96cc03e"}, +] + +[package.dependencies] +PyMuPDFb = "1.23.22" + +[[package]] +name = "pymupdfb" +version = "1.23.22" +description = "MuPDF shared libraries for PyMuPDF." +optional = false +python-versions = ">=3.8" +files = [ + {file = "PyMuPDFb-1.23.22-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:9085a1e2fbf16f2820f9f7ad3d25e85f81d9b9eb0409110c1670d4cf5a27a678"}, + {file = "PyMuPDFb-1.23.22-py3-none-macosx_11_0_arm64.whl", hash = "sha256:01016dd33220cef4ecaf929d09fd27a584dc3ec3e5c9f4112dfe63613ea35135"}, + {file = "PyMuPDFb-1.23.22-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cf50e814db91f2a2325219302fbac229a23682c372cf8232aabd51ea3f18210e"}, + {file = "PyMuPDFb-1.23.22-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3ffa713ad18e816e584c8a5f569995c32d22f8ac76ab6e4a61f2d2983c4b73d9"}, + {file = "PyMuPDFb-1.23.22-py3-none-win32.whl", hash = "sha256:d00e372452845aea624659c302d25e935052269fd3aafe26948301576d6f2ee8"}, + {file = "PyMuPDFb-1.23.22-py3-none-win_amd64.whl", hash = "sha256:7c9c157281fdee9f296e666a323307dbf74cb38f017921bb131fa7bfcd39c2bd"}, +] + [[package]] name = "pyparsing" version = "2.4.7" @@ -9045,4 +9385,4 @@ local = ["ctransformers", "llama-cpp-python", "sentence-transformers"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.12" -content-hash = "b35a356770d3425f524b0c46a449696db1fa7c13fae77324188cb6ffa4a4c5a7" +content-hash = "e34d70b4ca2e9bdab5478d4b0b31dc39379c4506d1cc6962e378090570ce757c" diff --git a/pyproject.toml b/pyproject.toml index 09d70518a..76f8c02ed 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -103,9 +103,9 @@ langchain-google-genai = "^0.0.6" elasticsearch = "^8.12.0" pytube = "^15.0.0" python-socketio = "^5.11.0" -llama-index = "0.9.48" +llama-index = "^0.10.13" langchain-openai = "^0.0.6" -unstructured = {extras = ["md"], version = "^0.12.4"} +unstructured = { extras = ["md"], version = "^0.12.4" } [tool.poetry.group.dev.dependencies] pytest-asyncio = "^0.23.1" From 1f3d162e86505956c7a7a113ceb6b06deae703f0 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 26 Feb 2024 21:50:51 -0300 Subject: [PATCH 13/98] Remove unused ChatDefinition class --- src/backend/langflow/utils/chat.py | 34 ------------------------------ 1 file changed, 34 deletions(-) delete mode 100644 src/backend/langflow/utils/chat.py diff --git a/src/backend/langflow/utils/chat.py b/src/backend/langflow/utils/chat.py deleted file mode 100644 index e1621bdab..000000000 --- a/src/backend/langflow/utils/chat.py +++ /dev/null @@ -1,34 +0,0 @@ -from typing import Any, Callable, Optional, Union - -from langchain_core.prompts import PromptTemplate as LCPromptTemplate -from langflow.utils.prompt import GenericPromptTemplate -from llama_index.prompts import PromptTemplate as LIPromptTemplate - -PromptTemplate = Union[LCPromptTemplate, LIPromptTemplate] - - -class ChatDefinition: - def __init__( - self, - func: Callable, - inputs: list[str], - output_key: Optional[str] = None, - prompt_template: Optional[PromptTemplate] = None, - ): - self.func = func - self.input_keys = inputs - self.output_key = output_key - self.prompt_template = prompt_template - - @classmethod - def from_prompt_template(cls, prompt_template: PromptTemplate, func: Callable, output_key: Optional[str] = None): - prompt = GenericPromptTemplate(prompt_template) - return cls( - func=func, - inputs=prompt.input_keys, - output_key=output_key, - prompt_template=prompt_template, - ) - - def __call__(self, inputs: dict, callbacks: Optional[Any] = None) -> dict: - return self.func(inputs, callbacks) From cb999b26bb63b304120425f601be2ada07f08f38 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 26 Feb 2024 21:56:52 -0300 Subject: [PATCH 14/98] Add Text import to ChatInput.py --- src/backend/langflow/components/io/ChatInput.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/backend/langflow/components/io/ChatInput.py b/src/backend/langflow/components/io/ChatInput.py index 4d8fc509c..6d96a6b96 100644 --- a/src/backend/langflow/components/io/ChatInput.py +++ b/src/backend/langflow/components/io/ChatInput.py @@ -1,6 +1,7 @@ -from typing import Optional +from typing import Optional, Union from langflow import CustomComponent +from langflow.field_typing import Text from langflow.schema import Record @@ -37,7 +38,7 @@ class ChatInput(CustomComponent): message: Optional[str] = None, session_id: Optional[str] = None, return_record: Optional[bool] = False, - ) -> Record: + ) -> Union[Text, Record]: if return_record: if isinstance(message, Record): # Update the data of the record From 5c73c01ea87210b18c14cd7048358919e4695f5b Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 26 Feb 2024 22:01:44 -0300 Subject: [PATCH 15/98] Update version to 0.7.0a0 in pyproject.toml --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 76f8c02ed..a52871cf6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langflow" -version = "0.6.7" +version = "0.7.0a0" description = "A Python package with a built-in web application" authors = ["Logspace "] maintainers = [ From 19d0da7f74acb6753d66e23930d369aa0a86e017 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 26 Feb 2024 22:05:31 -0300 Subject: [PATCH 16/98] Update TextInput field_config to support multiline input --- src/backend/langflow/components/io/TextInput.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/backend/langflow/components/io/TextInput.py b/src/backend/langflow/components/io/TextInput.py index b0d2eb0a7..3fba54fda 100644 --- a/src/backend/langflow/components/io/TextInput.py +++ b/src/backend/langflow/components/io/TextInput.py @@ -9,7 +9,7 @@ class TextInput(CustomComponent): description = "Used to pass text input to the next component." field_config = { - "value": {"display_name": "Value"}, + "value": {"display_name": "Value", "multiline": True}, } def build(self, value: Optional[str] = "") -> Text: From c786e9970dd107b9ab27b3c8b52ea982a1f9eb94 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 26 Feb 2024 23:01:20 -0300 Subject: [PATCH 17/98] Format exception message --- src/backend/langflow/api/utils.py | 54 +++++++++++++++++++++++++---- src/backend/langflow/api/v1/chat.py | 29 ++++++++++++---- 2 files changed, 69 insertions(+), 14 deletions(-) diff --git a/src/backend/langflow/api/utils.py b/src/backend/langflow/api/utils.py index bd71e6dcb..703fbd9e3 100644 --- a/src/backend/langflow/api/utils.py +++ b/src/backend/langflow/api/utils.py @@ -22,7 +22,9 @@ API_WORDS = ["api", "key", "token"] def has_api_terms(word: str): - return "api" in word and ("key" in word or ("token" in word and "tokens" not in word)) + return "api" in word and ( + "key" in word or ("token" in word and "tokens" not in word) + ) def remove_api_keys(flow: dict): @@ -32,7 +34,11 @@ def remove_api_keys(flow: dict): node_data = node.get("data").get("node") template = node_data.get("template") for value in template.values(): - if isinstance(value, dict) and has_api_terms(value["name"]) and value.get("password"): + if ( + isinstance(value, dict) + and has_api_terms(value["name"]) + and value.get("password") + ): value["value"] = None return flow @@ -53,7 +59,9 @@ def build_input_keys_response(langchain_object, artifacts): input_keys_response["input_keys"][key] = value # If the object has memory, that memory will have a memory_variables attribute # memory variables should be removed from the input keys - if hasattr(langchain_object, "memory") and hasattr(langchain_object.memory, "memory_variables"): + if hasattr(langchain_object, "memory") and hasattr( + langchain_object.memory, "memory_variables" + ): # Remove memory variables from input keys input_keys_response["input_keys"] = { key: value @@ -63,7 +71,9 @@ def build_input_keys_response(langchain_object, artifacts): # Add memory variables to memory_keys input_keys_response["memory_keys"] = langchain_object.memory.memory_variables - if hasattr(langchain_object, "prompt") and hasattr(langchain_object.prompt, "template"): + if hasattr(langchain_object, "prompt") and hasattr( + langchain_object.prompt, "template" + ): input_keys_response["template"] = langchain_object.prompt.template return input_keys_response @@ -98,7 +108,11 @@ def raw_frontend_data_is_valid(raw_frontend_data): def is_valid_data(frontend_node, raw_frontend_data): """Check if the data is valid for processing.""" - return frontend_node and "template" in frontend_node and raw_frontend_data_is_valid(raw_frontend_data) + return ( + frontend_node + and "template" in frontend_node + and raw_frontend_data_is_valid(raw_frontend_data) + ) def update_template_values(frontend_template, raw_template): @@ -138,7 +152,9 @@ def get_file_path_value(file_path): # If the path is not in the cache dir, return empty string # This is to prevent access to files outside the cache dir # If the path is not a file, return empty string - if not path.exists() or not str(path).startswith(user_cache_dir("langflow", "langflow")): + if not path.exists() or not str(path).startswith( + user_cache_dir("langflow", "langflow") + ): return "" return file_path @@ -169,7 +185,9 @@ async def check_langflow_version(component: StoreComponentCreate): langflow_version = get_lf_version_from_pypi() if langflow_version is None: - raise HTTPException(status_code=500, detail="Unable to verify the latest version of Langflow") + raise HTTPException( + status_code=500, detail="Unable to verify the latest version of Langflow" + ) elif langflow_version != component.last_tested_version: warnings.warn( f"Your version of Langflow ({component.last_tested_version}) is outdated. " @@ -230,3 +248,25 @@ def build_and_cache_graph( graph = graph.update(other_graph) chat_service.set_cache(flow_id, graph) return graph + + +def format_syntax_error_message(exc: SyntaxError) -> str: + """Format a SyntaxError message for returning to the frontend.""" + return f"Syntax error in code. Error on line {exc.lineno}: {exc.text.strip()}" + + +def get_causing_exception(exc: Exception) -> Exception: + """Get the causing exception from an exception.""" + if hasattr(exc, "__cause__") and exc.__cause__: + return get_causing_exception(exc.__cause__) + return exc + + +def format_exception_message(exc: Exception) -> str: + """Format an exception message for returning to the frontend.""" + # We need to check if the __cause__ is a SyntaxError + # If it is, we need to return the message of the SyntaxError + causing_exception = get_causing_exception(exc) + if isinstance(causing_exception, SyntaxError): + return format_syntax_error_message(causing_exception) + return str(exc) diff --git a/src/backend/langflow/api/v1/chat.py b/src/backend/langflow/api/v1/chat.py index d04588cba..1163ee749 100644 --- a/src/backend/langflow/api/v1/chat.py +++ b/src/backend/langflow/api/v1/chat.py @@ -14,7 +14,11 @@ from fastapi.responses import StreamingResponse from loguru import logger from sqlmodel import Session -from langflow.api.utils import build_and_cache_graph, format_elapsed_time +from langflow.api.utils import ( + build_and_cache_graph, + format_elapsed_time, + format_exception_message, +) from langflow.api.v1.schemas import ( ResultData, StreamData, @@ -45,9 +49,13 @@ async def chat( user = await get_current_user_for_websocket(websocket, db) await websocket.accept() if not user: - await websocket.close(code=status.WS_1008_POLICY_VIOLATION, reason="Unauthorized") + await websocket.close( + code=status.WS_1008_POLICY_VIOLATION, reason="Unauthorized" + ) elif not user.is_active: - await websocket.close(code=status.WS_1008_POLICY_VIOLATION, reason="Unauthorized") + await websocket.close( + code=status.WS_1008_POLICY_VIOLATION, reason="Unauthorized" + ) if client_id in chat_service.cache_service: await chat_service.handle_websocket(client_id, websocket) @@ -63,7 +71,9 @@ async def chat( logger.error(f"Error in chat websocket: {exc}") messsage = exc.detail if isinstance(exc, HTTPException) else str(exc) if "Could not validate credentials" in str(exc): - await websocket.close(code=status.WS_1008_POLICY_VIOLATION, reason="Unauthorized") + await websocket.close( + code=status.WS_1008_POLICY_VIOLATION, reason="Unauthorized" + ) else: await websocket.close(code=status.WS_1011_INTERNAL_ERROR, reason=messsage) @@ -133,8 +143,12 @@ async def build_vertex( cache = chat_service.get_cache(flow_id) if not cache: # If there's no cache - logger.warning(f"No cache found for {flow_id}. Building graph starting at {vertex_id}") - graph = build_and_cache_graph(flow_id=flow_id, session=next(get_session()), chat_service=chat_service) + logger.warning( + f"No cache found for {flow_id}. Building graph starting at {vertex_id}" + ) + graph = build_and_cache_graph( + flow_id=flow_id, session=next(get_session()), chat_service=chat_service + ) else: graph = cache.get("result") result_dict = {} @@ -165,7 +179,8 @@ async def build_vertex( raise ValueError(f"No result found for vertex {vertex_id}") except Exception as exc: - params = str(exc) + # + params = format_exception_message(exc) valid = False result_dict = ResultData(results={}) artifacts = {} From 1df99008a10a8d1701956cd49660dac955a5afd1 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 26 Feb 2024 23:03:16 -0300 Subject: [PATCH 18/98] Fix vertex building error and improve vertex data comparison --- src/backend/langflow/graph/graph/base.py | 5 ++++- src/backend/langflow/graph/vertex/base.py | 1 + 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/src/backend/langflow/graph/graph/base.py b/src/backend/langflow/graph/graph/base.py index 400ae99b9..2f94f150a 100644 --- a/src/backend/langflow/graph/graph/base.py +++ b/src/backend/langflow/graph/graph/base.py @@ -162,7 +162,10 @@ class Graph: self.edges = new_edges def vertex_data_is_identical(self, vertex: Vertex, other_vertex: Vertex) -> bool: - return vertex.__repr__() == other_vertex.__repr__() + data_is_equivalent = vertex.__repr__() == other_vertex.__repr__() + if not data_is_equivalent: + return False + return self.vertex_edges_are_identical(vertex, other_vertex) def vertex_edges_are_identical(self, vertex: Vertex, other_vertex: Vertex) -> bool: same_length = len(vertex.edges) == len(other_vertex.edges) diff --git a/src/backend/langflow/graph/vertex/base.py b/src/backend/langflow/graph/vertex/base.py index 8addb1ac7..874d4bd21 100644 --- a/src/backend/langflow/graph/vertex/base.py +++ b/src/backend/langflow/graph/vertex/base.py @@ -516,6 +516,7 @@ class Vertex: self._update_built_object_and_artifacts(result) except Exception as exc: logger.exception(exc) + raise ValueError( f"Error building node {self.display_name}: {str(exc)}" ) from exc From fef68c30a8a5e1ff4fb6a27d4c410f3b59ff2132 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 27 Feb 2024 11:35:27 -0300 Subject: [PATCH 19/98] Refactor chat message handling in newChatView --- .../src/components/newChatView/chatMessage/index.tsx | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/src/frontend/src/components/newChatView/chatMessage/index.tsx b/src/frontend/src/components/newChatView/chatMessage/index.tsx index 31f4f0345..266444d8b 100644 --- a/src/frontend/src/components/newChatView/chatMessage/index.tsx +++ b/src/frontend/src/components/newChatView/chatMessage/index.tsx @@ -24,7 +24,10 @@ export default function ChatMessage({ const template = chat.template; const [promptOpen, setPromptOpen] = useState(false); const [streamUrl, setStreamUrl] = useState(chat.stream_url); - const [chatMessage, setChatMessage] = useState(chat.message.toString()); + // We need to check if message is not undefined because + // we need to run .toString() on it + const chatMessageString = chat.message ? chat.message.toString() : ""; + const [chatMessage, setChatMessage] = useState(chatMessageString); const [isStreaming, setIsStreaming] = useState(false); // The idea now is that chat.stream_url MAY be a URL if we should stream the output of the chat @@ -146,7 +149,7 @@ export default function ChatMessage({
{useMemo( () => - chat.message.toString() === "" && lockChat ? ( + chatMessage === "" && lockChat ? ( {parts}

; }) - : chat.message.toString()} + : chatMessage} ) : ( - {chat.message.toString()} + {chatMessage} )}
)} From 7dbffe6331ec924b6aebbe22cba35d8b37de7daf Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 27 Feb 2024 11:36:06 -0300 Subject: [PATCH 20/98] Add is_input and is_output attributes to CustomComponent and FrontendNode classes --- .../langflow/interface/custom/attributes.py | 2 ++ .../custom_component/custom_component.py | 10 +++++- .../langflow/template/frontend_node/base.py | 33 +++++++++++++++---- src/frontend/src/types/api/index.ts | 2 ++ 4 files changed, 40 insertions(+), 7 deletions(-) diff --git a/src/backend/langflow/interface/custom/attributes.py b/src/backend/langflow/interface/custom/attributes.py index 8ec2a0daa..d3119cd3d 100644 --- a/src/backend/langflow/interface/custom/attributes.py +++ b/src/backend/langflow/interface/custom/attributes.py @@ -37,4 +37,6 @@ ATTR_FUNC_MAPPING = { "documentation": getattr_return_str, "icon": validate_icon, "pinned": getattr_return_bool, + "is_input": getattr_return_bool, + "is_output": getattr_return_bool, } diff --git a/src/backend/langflow/interface/custom/custom_component/custom_component.py b/src/backend/langflow/interface/custom/custom_component/custom_component.py index 7d8794878..549c0dad3 100644 --- a/src/backend/langflow/interface/custom/custom_component/custom_component.py +++ b/src/backend/langflow/interface/custom/custom_component/custom_component.py @@ -35,6 +35,7 @@ from langflow.utils import validate if TYPE_CHECKING: from langflow.graph.edge.base import ContractEdge + from langflow.graph.vertex.base import Vertex class CustomComponent(Component): @@ -44,6 +45,12 @@ class CustomComponent(Component): """The description of the component. Defaults to None.""" icon: Optional[str] = None """The icon of the component. It should be an emoji. Defaults to None.""" + is_input: Optional[bool] = None + """The input state of the component. Defaults to None. + If True, the component must have a field named 'message'.""" + is_output: Optional[bool] = None + """The output state of the component. Defaults to None. + If True, the component must have a field named 'message'.""" code: Optional[str] = None """The code of the component. Defaults to None.""" field_config: dict = {} @@ -56,7 +63,7 @@ class CustomComponent(Component): """The build parameters of the component. Defaults to None.""" selected_output_type: Optional[str] = None """The selected output type of the component. Defaults to None.""" - outgoing_edges: Optional[List["ContractEdge"]] = None + vertex: Optional["Vertex"] = None """The edge target parameter of the component. Defaults to None.""" code_class_base_inheritance: ClassVar[str] = "CustomComponent" function_entrypoint_name: ClassVar[str] = "build" @@ -65,6 +72,7 @@ class CustomComponent(Component): user_id: Optional[Union[UUID, str]] = None status: Optional[Any] = None """The status of the component. This is displayed on the frontend. Defaults to None.""" + _tree: Optional[dict] = None def __init__(self, **data): diff --git a/src/backend/langflow/template/frontend_node/base.py b/src/backend/langflow/template/frontend_node/base.py index 8b400b849..9f62c7054 100644 --- a/src/backend/langflow/template/frontend_node/base.py +++ b/src/backend/langflow/template/frontend_node/base.py @@ -47,6 +47,12 @@ class FrontendNode(BaseModel): """Description of the frontend node.""" icon: Optional[str] = None """Icon of the frontend node.""" + is_input: Optional[bool] = None + """Whether the frontend node is used as an input when processing the Graph. + If True, there should be a field named 'message'.""" + is_output: Optional[bool] = None + """Whether the frontend node is used as an output when processing the Graph. + If True, there should be a field named 'message'.""" is_composition: Optional[bool] = None """Whether the frontend node is used for composition.""" base_classes: List[str] @@ -165,7 +171,9 @@ class FrontendNode(BaseModel): return _type @staticmethod - def handle_special_field(field, key: str, _type: str, SPECIAL_FIELD_HANDLERS) -> str: + def handle_special_field( + field, key: str, _type: str, SPECIAL_FIELD_HANDLERS + ) -> str: """Handles special field by using the respective handler if present.""" handler = SPECIAL_FIELD_HANDLERS.get(key) return handler(field) if handler else _type @@ -176,7 +184,11 @@ class FrontendNode(BaseModel): if "dict" in _type.lower() and field.name == "dict_": field.field_type = "file" field.file_types = [".json", ".yaml", ".yml"] - elif _type.startswith("Dict") or _type.startswith("Mapping") or _type.startswith("dict"): + elif ( + _type.startswith("Dict") + or _type.startswith("Mapping") + or _type.startswith("dict") + ): field.field_type = "dict" return _type @@ -187,7 +199,9 @@ class FrontendNode(BaseModel): field.value = value["default"] @staticmethod - def handle_specific_field_values(field: TemplateField, key: str, name: Optional[str] = None) -> None: + def handle_specific_field_values( + field: TemplateField, key: str, name: Optional[str] = None + ) -> None: """Handles specific field values for certain fields.""" if key == "headers": field.value = """{"Authorization": "Bearer "}""" @@ -195,7 +209,9 @@ class FrontendNode(BaseModel): FrontendNode._handle_api_key_specific_field_values(field, key, name) @staticmethod - def _handle_model_specific_field_values(field: TemplateField, key: str, name: Optional[str] = None) -> None: + def _handle_model_specific_field_values( + field: TemplateField, key: str, name: Optional[str] = None + ) -> None: """Handles specific field values related to models.""" model_dict = { "OpenAI": constants.OPENAI_MODELS, @@ -208,7 +224,9 @@ class FrontendNode(BaseModel): field.is_list = True @staticmethod - def _handle_api_key_specific_field_values(field: TemplateField, key: str, name: Optional[str] = None) -> None: + def _handle_api_key_specific_field_values( + field: TemplateField, key: str, name: Optional[str] = None + ) -> None: """Handles specific field values related to API keys.""" if "api_key" in key and "OpenAI" in str(name): field.display_name = "OpenAI API Key" @@ -248,7 +266,10 @@ class FrontendNode(BaseModel): @staticmethod def should_be_password(key: str, show: bool) -> bool: """Determines whether the field should be a password field.""" - return any(text in key.lower() for text in {"password", "token", "api", "key"}) and show + return ( + any(text in key.lower() for text in {"password", "token", "api", "key"}) + and show + ) @staticmethod def should_be_multiline(key: str) -> bool: diff --git a/src/frontend/src/types/api/index.ts b/src/frontend/src/types/api/index.ts index 59b8a5ca5..3e9f4a0f5 100644 --- a/src/frontend/src/types/api/index.ts +++ b/src/frontend/src/types/api/index.ts @@ -18,6 +18,8 @@ export type APIClassType = { template: APITemplateType; display_name: string; icon?: string; + is_input?: boolean; + is_output?: boolean; input_types?: Array; output_types?: Array; custom_fields?: CustomFieldsType; From e4ffe4b9d45f0407caaa615c6fbfd55ce7ebcfad Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 27 Feb 2024 11:36:28 -0300 Subject: [PATCH 21/98] Use vertex instead of outgoing edges --- .../langflow/interface/initialize/loading.py | 37 ++++++------------- 1 file changed, 12 insertions(+), 25 deletions(-) diff --git a/src/backend/langflow/interface/initialize/loading.py b/src/backend/langflow/interface/initialize/loading.py index 6694da26a..b87923c7f 100644 --- a/src/backend/langflow/interface/initialize/loading.py +++ b/src/backend/langflow/interface/initialize/loading.py @@ -1,6 +1,6 @@ import inspect import json -from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Sequence, Type +from typing import TYPE_CHECKING, Any, Callable, Dict, Optional, Sequence, Type import orjson from langchain.agents import agent as agent_module @@ -35,25 +35,15 @@ from langflow.utils import validate if TYPE_CHECKING: from langflow import CustomComponent from langflow.graph.edge.base import ContractEdge - - -def build_vertex_in_params(params: Dict) -> Dict: from langflow.graph.vertex.base import Vertex - # If any of the values in params is a Vertex, we will build it - return { - key: value.build() if isinstance(value, Vertex) else value - for key, value in params.items() - } - async def instantiate_class( node_type: str, base_type: str, params: Dict, user_id=None, - outgoing_edges: Optional[List["ContractEdge"]] = None, - selected_output_type: Optional[str] = None, + vertex: Optional["Vertex"] = None, ) -> Any: """Instantiate class from module type and key, and params""" params = convert_params_to_sets(params) @@ -67,13 +57,12 @@ async def instantiate_class( logger.debug(f"Instantiating {node_type} of type {base_type}") class_object = import_by_type(_type=base_type, name=node_type) return await instantiate_based_on_type( - class_object, - base_type, - node_type, - params, + class_object=class_object, + base_type=base_type, + node_type=node_type, + params=params, user_id=user_id, - outgoing_edges=outgoing_edges, - selected_output_type=selected_output_type, + vertex=vertex, ) @@ -107,8 +96,7 @@ async def instantiate_based_on_type( node_type, params, user_id, - outgoing_edges, - selected_output_type, + vertex, ): if base_type == "agents": return instantiate_agent(node_type, class_object, params) @@ -148,8 +136,7 @@ async def instantiate_based_on_type( class_object, params, user_id, - outgoing_edges, - selected_output_type, + vertex, ) elif base_type == "wrappers": return instantiate_wrapper(node_type, class_object, params) @@ -158,7 +145,7 @@ async def instantiate_based_on_type( async def instantiate_custom_component( - node_type, class_object, params, user_id, outgoing_edges, selected_output_type + node_type, class_object, params, user_id, vertex ): params_copy = params.copy() class_object: Type["CustomComponent"] = eval_custom_component_code( @@ -167,8 +154,8 @@ async def instantiate_custom_component( custom_component: "CustomComponent" = class_object( user_id=user_id, parameters=params_copy, - outgoing_edges=outgoing_edges, - selected_output_type=selected_output_type, + vertex=vertex, + selected_output_type=vertex.selected_output_type, ) if "retriever" in params_copy and hasattr(params_copy["retriever"], "as_retriever"): From b7e52f62be4d6852a4ab72b38de12e7acfe63c6a Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 27 Feb 2024 11:36:58 -0300 Subject: [PATCH 22/98] Refactor API schemas and update dependencies --- src/backend/langflow/api/v1/schemas.py | 18 +++++++----------- .../langflow/services/monitor/utils.py | 19 +++++++++++++------ src/backend/langflow/services/socket/utils.py | 13 ++++++++----- 3 files changed, 28 insertions(+), 22 deletions(-) diff --git a/src/backend/langflow/api/v1/schemas.py b/src/backend/langflow/api/v1/schemas.py index 23d8ddf9b..adb26202a 100644 --- a/src/backend/langflow/api/v1/schemas.py +++ b/src/backend/langflow/api/v1/schemas.py @@ -4,12 +4,12 @@ from pathlib import Path from typing import Any, Dict, List, Optional, Union from uuid import UUID -from langflow.api.utils import serialize_field +from pydantic import BaseModel, Field, field_validator + from langflow.services.database.models.api_key.model import ApiKeyRead from langflow.services.database.models.base import orjson_dumps from langflow.services.database.models.flow import FlowCreate, FlowRead from langflow.services.database.models.user import UserRead -from pydantic import BaseModel, Field, field_serializer, field_validator class BuildStatus(Enum): @@ -161,7 +161,9 @@ class StreamData(BaseModel): data: dict def __str__(self) -> str: - return f"event: {self.event}\ndata: {orjson_dumps(self.data, indent_2=False)}\n\n" + return ( + f"event: {self.event}\ndata: {orjson_dumps(self.data, indent_2=False)}\n\n" + ) class CustomComponentCode(BaseModel): @@ -220,18 +222,12 @@ class VerticesOrderResponse(BaseModel): ids: List[List[str]] -class ResultData(BaseModel): +class ResultDataResponse(BaseModel): results: Optional[Any] = Field(default_factory=dict) artifacts: Optional[Any] = Field(default_factory=dict) timedelta: Optional[float] = None duration: Optional[str] = None - @field_serializer("results") - def serialize_results(self, value): - if isinstance(value, dict): - return {key: serialize_field(val) for key, val in value.items()} - return serialize_field(value) - class VertexBuildResponse(BaseModel): id: Optional[str] = None @@ -239,7 +235,7 @@ class VertexBuildResponse(BaseModel): valid: bool params: Optional[str] """JSON string of the params.""" - data: ResultData + data: ResultDataResponse """Mapping of vertex ids to result dict containing the param name and result value.""" timestamp: Optional[datetime] = Field(default_factory=datetime.utcnow) """Timestamp of the build.""" diff --git a/src/backend/langflow/services/monitor/utils.py b/src/backend/langflow/services/monitor/utils.py index 87f58aa9f..d308e7653 100644 --- a/src/backend/langflow/services/monitor/utils.py +++ b/src/backend/langflow/services/monitor/utils.py @@ -1,12 +1,13 @@ from typing import TYPE_CHECKING, Any, Dict, Optional, Type, Union import duckdb -from langflow.services.deps import get_monitor_service from loguru import logger from pydantic import BaseModel +from langflow.services.deps import get_monitor_service + if TYPE_CHECKING: - from langflow.api.v1.schemas import ResultData + from langflow.api.v1.schemas import ResultDataResponse INDEX_KEY = "index" @@ -45,7 +46,9 @@ def model_to_sql_column_definitions(model: Type[BaseModel]) -> dict: return columns -def drop_and_create_table_if_schema_mismatch(db_path: str, table_name: str, model: Type[BaseModel]): +def drop_and_create_table_if_schema_mismatch( + db_path: str, table_name: str, model: Type[BaseModel] +): with duckdb.connect(db_path) as conn: # Get the current schema from the database try: @@ -66,8 +69,12 @@ def drop_and_create_table_if_schema_mismatch(db_path: str, table_name: str, mode conn.execute(f"CREATE SEQUENCE seq_{table_name} START 1;") except duckdb.CatalogException: pass - desired_schema[INDEX_KEY] = f"INTEGER PRIMARY KEY DEFAULT NEXTVAL('seq_{table_name}')" - columns_sql = ", ".join(f"{name} {data_type}" for name, data_type in desired_schema.items()) + desired_schema[INDEX_KEY] = ( + f"INTEGER PRIMARY KEY DEFAULT NEXTVAL('seq_{table_name}')" + ) + columns_sql = ", ".join( + f"{name} {data_type}" for name, data_type in desired_schema.items() + ) create_table_sql = f"CREATE TABLE {table_name} ({columns_sql})" conn.execute(create_table_sql) @@ -138,7 +145,7 @@ async def log_vertex_build( vertex_id: str, valid: bool, params: Any, - data: "ResultData", + data: "ResultDataResponse", artifacts: Optional[dict] = None, ): try: diff --git a/src/backend/langflow/services/socket/utils.py b/src/backend/langflow/services/socket/utils.py index 64ffdc15c..48208403a 100644 --- a/src/backend/langflow/services/socket/utils.py +++ b/src/backend/langflow/services/socket/utils.py @@ -2,14 +2,15 @@ import time from typing import Callable import socketio +from sqlmodel import select + from langflow.api.utils import format_elapsed_time -from langflow.api.v1.schemas import ResultData, VertexBuildResponse +from langflow.api.v1.schemas import ResultDataResponse, VertexBuildResponse from langflow.graph.graph.base import Graph from langflow.graph.vertex.base import StatelessVertex from langflow.services.database.models.flow.model import Flow from langflow.services.deps import get_session from langflow.services.monitor.utils import log_vertex_build -from sqlmodel import select def set_socketio_server(socketio_server): @@ -73,7 +74,7 @@ async def build_vertex( artifacts = vertex.artifacts timedelta = time.perf_counter() - start_time duration = format_elapsed_time(timedelta) - result_dict = ResultData( + result_dict = ResultDataResponse( results=result_dict, artifacts=artifacts, duration=duration, @@ -82,7 +83,7 @@ async def build_vertex( except Exception as exc: params = str(exc) valid = False - result_dict = ResultData(results={}) + result_dict = ResultDataResponse(results={}) artifacts = {} set_cache(flow_id, graph) await log_vertex_build( @@ -95,7 +96,9 @@ async def build_vertex( ) # Emit the vertex build response - response = VertexBuildResponse(valid=valid, params=params, id=vertex.id, data=result_dict) + response = VertexBuildResponse( + valid=valid, params=params, id=vertex.id, data=result_dict + ) await sio.emit("vertex_build", data=response.model_dump(), to=sid) except Exception as exc: From 9beadd70f19ef8bb794c2469d949603857e9a5f1 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 27 Feb 2024 11:37:27 -0300 Subject: [PATCH 23/98] Move ResultData to graph --- src/backend/langflow/api/utils.py | 16 -------------- src/backend/langflow/graph/schema.py | 31 +++++++++++++++++++++++++++- 2 files changed, 30 insertions(+), 17 deletions(-) diff --git a/src/backend/langflow/api/utils.py b/src/backend/langflow/api/utils.py index 703fbd9e3..4db323537 100644 --- a/src/backend/langflow/api/utils.py +++ b/src/backend/langflow/api/utils.py @@ -3,9 +3,7 @@ from pathlib import Path from typing import TYPE_CHECKING, List, Optional from fastapi import HTTPException -from langchain_core.documents import Document from platformdirs import user_cache_dir -from pydantic import BaseModel from sqlmodel import Session from langflow.graph.graph.base import Graph @@ -217,20 +215,6 @@ def format_elapsed_time(elapsed_time: float) -> str: return f"{minutes} {minutes_unit}, {seconds} {seconds_unit}" -def serialize_field(value): - """Unified serialization function for handling both BaseModel and Document types, - including handling lists of these types.""" - if isinstance(value, (list, tuple)): - return [serialize_field(v) for v in value] - elif isinstance(value, Document): - return value.to_json() - elif isinstance(value, BaseModel): - return value.model_dump() - elif isinstance(value, str): - return {"result": value} - return value - - def build_and_cache_graph( flow_id: str, session: Session, diff --git a/src/backend/langflow/graph/schema.py b/src/backend/langflow/graph/schema.py index a3a2822e1..d41e0544a 100644 --- a/src/backend/langflow/graph/schema.py +++ b/src/backend/langflow/graph/schema.py @@ -1,8 +1,37 @@ from enum import Enum +from typing import Any, Optional +from langflow.graph.utils import serialize_field + +from pydantic import BaseModel, Field, field_serializer -class InterfaceComponentTypes(Enum): +class ResultData(BaseModel): + results: Optional[Any] = Field(default_factory=dict) + artifacts: Optional[Any] = Field(default_factory=dict) + timedelta: Optional[float] = None + duration: Optional[str] = None + + @field_serializer("results") + def serialize_results(self, value): + if isinstance(value, dict): + return {key: serialize_field(val) for key, val in value.items()} + return serialize_field(value) + + +class InterfaceComponentTypes(str, Enum): # ChatInput and ChatOutput are the only ones that are # power components ChatInput = "ChatInput" ChatOutput = "ChatOutput" + TextInput = "TextInput" + TextOutput = "TextOutput" + + +INPUT_COMPONENTS = [ + InterfaceComponentTypes.ChatInput, + InterfaceComponentTypes.TextInput, +] +OUTPUT_COMPONENTS = [ + InterfaceComponentTypes.ChatOutput, + InterfaceComponentTypes.TextOutput, +] From 04de488edee64d4eb2784ab9622049aca2215563 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 27 Feb 2024 11:37:44 -0300 Subject: [PATCH 24/98] Refactor vertex class and update build process --- src/backend/langflow/graph/vertex/base.py | 59 ++++++++++++++++++---- src/backend/langflow/graph/vertex/types.py | 15 +----- 2 files changed, 51 insertions(+), 23 deletions(-) diff --git a/src/backend/langflow/graph/vertex/base.py b/src/backend/langflow/graph/vertex/base.py index 874d4bd21..e87389c7b 100644 --- a/src/backend/langflow/graph/vertex/base.py +++ b/src/backend/langflow/graph/vertex/base.py @@ -6,7 +6,12 @@ from typing import TYPE_CHECKING, Any, Callable, Coroutine, Dict, List, Optional from loguru import logger -from langflow.graph.schema import InterfaceComponentTypes +from langflow.graph.schema import ( + INPUT_COMPONENTS, + OUTPUT_COMPONENTS, + InterfaceComponentTypes, + ResultData, +) from langflow.graph.utils import UnbuiltObject, UnbuiltResult from langflow.graph.vertex.utils import generate_result from langflow.interface.initialize import loading @@ -16,7 +21,6 @@ from langflow.utils.constants import DIRECT_TYPES from langflow.utils.util import sync_to_async if TYPE_CHECKING: - from langflow.api.v1.schemas import ResultData from langflow.graph.edge.base import ContractEdge from langflow.graph.graph.base import Graph @@ -40,11 +44,19 @@ class Vertex: ) -> None: # is_external means that the Vertex send or receives data from # an external source (e.g the chat) + + self.id: str = data["id"] + self.is_input = any( + input_component_name in self.id for input_component_name in INPUT_COMPONENTS + ) + self.is_output = any( + output_component_name in self.id + for output_component_name in OUTPUT_COMPONENTS + ) self._custom_component = None self.has_external_input = False self.has_external_output = False self.graph = graph - self.id: str = data["id"] self._data = data self.base_type: Optional[str] = base_type self._parse_data() @@ -61,7 +73,7 @@ class Vertex: self.parent_is_top_level = False self.layer = None self.should_run = True - self.result: Optional["ResultData"] = None + self.result: Optional[ResultData] = None try: self.is_interface_component = InterfaceComponentTypes(self.vertex_type) except ValueError: @@ -116,7 +128,7 @@ class Vertex: ) return edge_results - def set_result(self, result: "ResultData") -> None: + def set_result(self, result: ResultData) -> None: self.result = result def get_built_result(self): @@ -203,6 +215,8 @@ class Vertex: self.display_name = self.data["node"]["display_name"] self.pinned = self.data["node"].get("pinned", False) self.selected_output_type = self.data["node"].get("selected_output_type") + self.is_input = self.data["node"].get("is_input") or self.is_input + self.is_output = self.data["node"].get("is_output") or self.is_output template_dicts = { key: value for key, value in self.data["node"]["template"].items() @@ -359,6 +373,21 @@ class Vertex: self.params = params self._raw_params = params.copy() + def update_raw_params(self, new_params: Dict[str, str]): + """ + Update the raw parameters of the vertex with the given new parameters. + + Args: + new_params (Dict[str, Any]): The new parameters to update. + + Raises: + ValueError: If any key in new_params is not found in self._raw_params. + """ + for key in new_params: + if key not in self._raw_params: + raise ValueError(f"Key {key} not found in raw params") + self._raw_params.update(new_params) + async def _build(self, user_id=None): """ Initiate the build process. @@ -370,6 +399,18 @@ class Vertex: self._built = True + def _finalize_build(self): + result_dict = self.get_built_result() + # We need to set the artifacts to pass information + # to the frontend + self.set_artifacts() + artifacts = self.artifacts + result_dict = ResultData( + results=result_dict, + artifacts=artifacts, + ) + self.set_result(result_dict) + async def _run( self, user_id: str, @@ -501,17 +542,13 @@ class Vertex: if self.base_type is None: raise ValueError(f"Base type for node {self.display_name} not found") try: - outgoing_edges = self.graph.get_vertex_edges( - self.id, is_source=True, is_target=False - ) result = await loading.instantiate_class( node_type=self.vertex_type, base_type=self.base_type, params=self.params, user_id=user_id, - outgoing_edges=outgoing_edges, - selected_output_type=self.selected_output_type, + vertex=self, ) self._update_built_object_and_artifacts(result) except Exception as exc: @@ -584,6 +621,8 @@ class Vertex: step(user_id=user_id, **kwargs) self.steps_ran.append(step) + self._finalize_build() + return await self.get_requester_result(requester) async def get_requester_result(self, requester: Optional["Vertex"]): diff --git a/src/backend/langflow/graph/vertex/types.py b/src/backend/langflow/graph/vertex/types.py index b7746b6a1..45bcb9ccd 100644 --- a/src/backend/langflow/graph/vertex/types.py +++ b/src/backend/langflow/graph/vertex/types.py @@ -1,10 +1,10 @@ import ast import json -from typing import AsyncIterator, Callable, Dict, Iterator, List, Optional, Union +from typing import (AsyncIterator, Callable, Dict, Iterator, List, Optional, + Union) import yaml from langchain_core.messages import AIMessage -from loguru import logger from langflow.graph.utils import UnbuiltObject, flatten_list from langflow.graph.vertex.base import StatefulVertex, StatelessVertex @@ -344,17 +344,6 @@ class ChatVertex(StatelessVertex): def build_stream_url(self): return f"/api/v1/build/{self.graph.flow_id}/{self.id}/stream" - async def _build(self, user_id=None): - """ - Initiate the build process. - """ - logger.debug(f"Building {self.vertex_type}") - await self._build_each_node_in_params_dict(user_id) - await self._get_and_instantiate_class(user_id) - self._validate_built_object() - - self._built = True - def _built_object_repr(self): if self.task_id and self.is_task: if task := self.get_task(): From 073e4b7ccf3494a7e02d3a5f00a3691c817855bb Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 27 Feb 2024 11:37:53 -0300 Subject: [PATCH 25/98] Add input and output definitions to Graph class --- src/backend/langflow/graph/graph/base.py | 46 +++++++++++++++++++++++- 1 file changed, 45 insertions(+), 1 deletion(-) diff --git a/src/backend/langflow/graph/graph/base.py b/src/backend/langflow/graph/graph/base.py index 2f94f150a..68e16bed8 100644 --- a/src/backend/langflow/graph/graph/base.py +++ b/src/backend/langflow/graph/graph/base.py @@ -1,5 +1,5 @@ from collections import defaultdict, deque -from typing import Dict, Generator, List, Optional, Type, Union +from typing import TYPE_CHECKING, Dict, Generator, List, Optional, Type, Union from langchain.chains.base import Chain from loguru import logger @@ -19,6 +19,9 @@ from langflow.graph.vertex.types import ( from langflow.interface.tools.constants import FILE_TOOLS from langflow.utils import payload +if TYPE_CHECKING: + from langflow.graph.schema import ResultData + class Graph: """A class representing a graph of vertices and edges.""" @@ -37,6 +40,8 @@ class Graph: self._runs = 0 self._updates = 0 self.flow_id = flow_id + self._inputs = [] + self._outputs = [] self.top_level_vertices = [] for vertex in self._vertices: @@ -49,6 +54,45 @@ class Graph: self.inactive_vertices = set() self._build_graph() self.build_graph_maps() + self.define_inputs_and_outputs() + + def define_inputs_and_outputs(self): + """ + Defines the input and output vertices of the graph. + """ + for vertex in self.vertices: + if vertex.is_input: + self._inputs.append(vertex.id) + if vertex.is_output: + self._outputs.append(vertex.id) + + def run(self, inputs: Dict[str, str]) -> List["ResultData"]: + """Runs the graph with the given inputs.""" + + # inputs is {"message": "Hello, world!"} + # we need to go through self.inputs and update the self._raw_params + # of the vertices that are inputs + + for vertex_id in self.inputs: + vertex = self.get_vertex(vertex_id) + if vertex is None: + raise ValueError(f"Vertex {vertex_id} not found") + vertex.update_raw_params(inputs) + try: + self.build() + self.increment_run_count() + except Exception as exc: + logger.exception(exc) + raise ValueError(f"Error running graph: {exc}") from exc + + # Now we get the outputs from the self.outputs + outputs = [] + for vertex_id in self.outputs: + vertex = self.get_vertex(vertex_id) + if vertex is None: + raise ValueError(f"Vertex {vertex_id} not found") + outputs.append(vertex.result) + return outputs @property def metadata(self): From acfb9b10618a37686c0c778823913aba7a929d7b Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 27 Feb 2024 11:38:03 -0300 Subject: [PATCH 26/98] Add serialize_field function to handle serialization of BaseModel and Document types --- src/backend/langflow/graph/utils.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/src/backend/langflow/graph/utils.py b/src/backend/langflow/graph/utils.py index 4bfd71fc5..83e2177b1 100644 --- a/src/backend/langflow/graph/utils.py +++ b/src/backend/langflow/graph/utils.py @@ -1,5 +1,8 @@ from typing import Any, Union +from langchain_core.documents import Document +from pydantic import BaseModel + from langflow.interface.utils import extract_input_variables_from_prompt @@ -33,3 +36,17 @@ def flatten_list(list_of_lists: list[Union[list, Any]]) -> list: else: new_list.append(item) return new_list + + +def serialize_field(value): + """Unified serialization function for handling both BaseModel and Document types, + including handling lists of these types.""" + if isinstance(value, (list, tuple)): + return [serialize_field(v) for v in value] + elif isinstance(value, Document): + return value.to_json() + elif isinstance(value, BaseModel): + return value.model_dump() + elif isinstance(value, str): + return {"result": value} + return value From f6e7fa63744cca3d34b0a6507991c8df6cf28b70 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 27 Feb 2024 11:38:09 -0300 Subject: [PATCH 27/98] Refactor build_vertex function in chat.py --- src/backend/langflow/api/v1/chat.py | 33 +++++++++++------------------ 1 file changed, 12 insertions(+), 21 deletions(-) diff --git a/src/backend/langflow/api/v1/chat.py b/src/backend/langflow/api/v1/chat.py index 1163ee749..ca067ce6f 100644 --- a/src/backend/langflow/api/v1/chat.py +++ b/src/backend/langflow/api/v1/chat.py @@ -20,7 +20,7 @@ from langflow.api.utils import ( format_exception_message, ) from langflow.api.v1.schemas import ( - ResultData, + ResultDataResponse, StreamData, VertexBuildResponse, VerticesOrderResponse, @@ -151,26 +151,15 @@ async def build_vertex( ) else: graph = cache.get("result") - result_dict = {} + result_data_response = {} duration = "" vertex = graph.get_vertex(vertex_id) try: if not vertex.pinned or not vertex._built: await vertex.build(user_id=current_user.id) - params = vertex._built_object_repr() - valid = True - result_dict = vertex.get_built_result() - # We need to set the artifacts to pass information - # to the frontend - vertex.set_artifacts() - artifacts = vertex.artifacts - result_dict = ResultData( - results=result_dict, - artifacts=artifacts, - ) - vertex.set_result(result_dict) - elif vertex.result is not None: + + if vertex.result is not None: params = vertex._built_object_repr() valid = True result_dict = vertex.result @@ -178,11 +167,13 @@ async def build_vertex( else: raise ValueError(f"No result found for vertex {vertex_id}") + result_data_response = ResultDataResponse(**result_dict.model_dump()) + except Exception as exc: - # + logger.error(f"Error building vertex: {exc}") params = format_exception_message(exc) valid = False - result_dict = ResultData(results={}) + result_data_response = ResultDataResponse(results={}) artifacts = {} # If there's an error building the vertex # we need to clear the cache @@ -195,14 +186,14 @@ async def build_vertex( vertex_id=vertex_id, valid=valid, params=params, - data=result_dict, + data=result_data_response, artifacts=artifacts, ) timedelta = time.perf_counter() - start_time duration = format_elapsed_time(timedelta) - result_dict.duration = duration - result_dict.timedelta = timedelta + result_data_response.duration = duration + result_data_response.timedelta = timedelta vertex.add_build_time(timedelta) inactive_vertices = None if graph.inactive_vertices: @@ -215,7 +206,7 @@ async def build_vertex( valid=valid, params=params, id=vertex.id, - data=result_dict, + data=result_data_response, ) except Exception as exc: logger.error(f"Error building vertex: {exc}") From e2e6f804618c650d17034b76e9392d31caf11307 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 27 Feb 2024 12:55:34 -0300 Subject: [PATCH 28/98] Update input variable names in build functions --- .../components/chains/ConversationChain.py | 2 +- .../components/chains/LLMCheckerChain.py | 2 +- .../components/chains/LLMMathChain.py | 2 +- .../langflow/components/chains/RetrievalQA.py | 2 +- .../chains/RetrievalQAWithSourcesChain.py | 2 +- .../components/chains/SQLGenerator.py | 2 +- .../langflow/components/io/ChatInput.py | 24 +++++++++---------- .../langflow/components/io/ChatOutput.py | 24 +++++++++---------- .../components/models/AmazonBedrockModel.py | 2 +- .../components/models/AnthropicModel.py | 10 +++++--- .../components/models/AzureOpenAIModel.py | 6 +++-- .../models/BaiduQianfanChatModel.py | 2 +- .../components/models/CTransformersModel.py | 6 +++-- .../langflow/components/models/CohereModel.py | 2 +- .../models/GoogleGenerativeAIModel.py | 2 +- .../components/models/HuggingFaceModel.py | 3 +-- .../components/models/LlamaCppModel.py | 2 +- .../langflow/components/models/OllamaModel.py | 2 +- .../langflow/components/models/OpenAIModel.py | 3 ++- .../components/models/VertexAiModel.py | 2 +- .../components/utilities/RunnableExecutor.py | 2 +- .../components/vectorstores/ChromaSearch.py | 6 +++-- .../custom_component/custom_component.py | 4 ++-- 23 files changed, 62 insertions(+), 52 deletions(-) diff --git a/src/backend/langflow/components/chains/ConversationChain.py b/src/backend/langflow/components/chains/ConversationChain.py index 3183954a3..7d9d28dcc 100644 --- a/src/backend/langflow/components/chains/ConversationChain.py +++ b/src/backend/langflow/components/chains/ConversationChain.py @@ -23,7 +23,7 @@ class ConversationChainComponent(CustomComponent): def build( self, - inputs: str, + input_value: str, llm: BaseLanguageModel, memory: Optional[BaseMemory] = None, ) -> Text: diff --git a/src/backend/langflow/components/chains/LLMCheckerChain.py b/src/backend/langflow/components/chains/LLMCheckerChain.py index bfee0b5a9..15a540311 100644 --- a/src/backend/langflow/components/chains/LLMCheckerChain.py +++ b/src/backend/langflow/components/chains/LLMCheckerChain.py @@ -18,7 +18,7 @@ class LLMCheckerChainComponent(CustomComponent): def build( self, - inputs: str, + input_value: str, llm: BaseLanguageModel, ) -> Text: diff --git a/src/backend/langflow/components/chains/LLMMathChain.py b/src/backend/langflow/components/chains/LLMMathChain.py index 919de34e6..7fb253b83 100644 --- a/src/backend/langflow/components/chains/LLMMathChain.py +++ b/src/backend/langflow/components/chains/LLMMathChain.py @@ -24,7 +24,7 @@ class LLMMathChainComponent(CustomComponent): def build( self, - inputs: Text, + input_value: Text, llm: BaseLanguageModel, llm_chain: LLMChain, input_key: str = "question", diff --git a/src/backend/langflow/components/chains/RetrievalQA.py b/src/backend/langflow/components/chains/RetrievalQA.py index 2fe31353e..4968afe87 100644 --- a/src/backend/langflow/components/chains/RetrievalQA.py +++ b/src/backend/langflow/components/chains/RetrievalQA.py @@ -27,7 +27,7 @@ class RetrievalQAComponent(CustomComponent): self, combine_documents_chain: BaseCombineDocumentsChain, retriever: BaseRetriever, - inputs: str = "", + input_value: str = "", memory: Optional[BaseMemory] = None, input_key: str = "query", output_key: str = "result", diff --git a/src/backend/langflow/components/chains/RetrievalQAWithSourcesChain.py b/src/backend/langflow/components/chains/RetrievalQAWithSourcesChain.py index faf3ab7dd..8be64c631 100644 --- a/src/backend/langflow/components/chains/RetrievalQAWithSourcesChain.py +++ b/src/backend/langflow/components/chains/RetrievalQAWithSourcesChain.py @@ -26,7 +26,7 @@ class RetrievalQAWithSourcesChainComponent(CustomComponent): def build( self, - inputs: str, + input_value: str, retriever: BaseRetriever, llm: BaseLanguageModel, chain_type: str, diff --git a/src/backend/langflow/components/chains/SQLGenerator.py b/src/backend/langflow/components/chains/SQLGenerator.py index ea22a6de0..39b8fe394 100644 --- a/src/backend/langflow/components/chains/SQLGenerator.py +++ b/src/backend/langflow/components/chains/SQLGenerator.py @@ -28,7 +28,7 @@ class SQLGeneratorComponent(CustomComponent): def build( self, - inputs: Text, + input_value: Text, db: SQLDatabase, llm: BaseLanguageModel, top_k: int = 5, diff --git a/src/backend/langflow/components/io/ChatInput.py b/src/backend/langflow/components/io/ChatInput.py index 6d96a6b96..0666f92d1 100644 --- a/src/backend/langflow/components/io/ChatInput.py +++ b/src/backend/langflow/components/io/ChatInput.py @@ -11,7 +11,7 @@ class ChatInput(CustomComponent): def build_config(self): return { - "message": { + "input_value": { "input_types": ["Text"], "display_name": "Message", "multiline": True, @@ -35,26 +35,26 @@ class ChatInput(CustomComponent): self, sender: Optional[str] = "User", sender_name: Optional[str] = "User", - message: Optional[str] = None, + input_value: Optional[str] = None, session_id: Optional[str] = None, return_record: Optional[bool] = False, ) -> Union[Text, Record]: if return_record: - if isinstance(message, Record): + if isinstance(input_value, Record): # Update the data of the record - message.data["sender"] = sender - message.data["sender_name"] = sender_name - message.data["session_id"] = session_id + input_value.data["sender"] = sender + input_value.data["sender_name"] = sender_name + input_value.data["session_id"] = session_id else: - message = Record( - text=message, + input_value = Record( + text=input_value, data={ "sender": sender, "sender_name": sender_name, "session_id": session_id, }, ) - if not message: - message = "" - self.status = message - return message + if not input_value: + input_value = "" + self.status = input_value + return input_value diff --git a/src/backend/langflow/components/io/ChatOutput.py b/src/backend/langflow/components/io/ChatOutput.py index 05639cdb2..72667374f 100644 --- a/src/backend/langflow/components/io/ChatOutput.py +++ b/src/backend/langflow/components/io/ChatOutput.py @@ -17,7 +17,7 @@ class ChatOutput(CustomComponent): def build_config(self): return { - "message": {"input_types": ["Text"], "display_name": "Message"}, + "input_value": {"input_types": ["Text"], "display_name": "Message"}, "sender": { "options": ["Machine", "User"], "display_name": "Sender Type", @@ -39,25 +39,25 @@ class ChatOutput(CustomComponent): sender: Optional[str] = "Machine", sender_name: Optional[str] = "AI", session_id: Optional[str] = None, - message: Optional[str] = None, + input_value: Optional[str] = None, return_record: Optional[bool] = False, ) -> Union[Text, Record]: if return_record: - if isinstance(message, Record): + if isinstance(input_value, Record): # Update the data of the record - message.data["sender"] = sender - message.data["sender_name"] = sender_name - message.data["session_id"] = session_id + input_value.data["sender"] = sender + input_value.data["sender_name"] = sender_name + input_value.data["session_id"] = session_id else: - message = Record( - text=message, + input_value = Record( + text=input_value, data={ "sender": sender, "sender_name": sender_name, "session_id": session_id, }, ) - if not message: - message = "" - self.status = message - return message + if not input_value: + input_value = "" + self.status = input_value + return input_value diff --git a/src/backend/langflow/components/models/AmazonBedrockModel.py b/src/backend/langflow/components/models/AmazonBedrockModel.py index a2e008e2e..68e404773 100644 --- a/src/backend/langflow/components/models/AmazonBedrockModel.py +++ b/src/backend/langflow/components/models/AmazonBedrockModel.py @@ -39,7 +39,7 @@ class AmazonBedrockComponent(CustomComponent): def build( self, - inputs: str, + input_value: str, model_id: str = "anthropic.claude-instant-v1", credentials_profile_name: Optional[str] = None, region_name: Optional[str] = None, diff --git a/src/backend/langflow/components/models/AnthropicModel.py b/src/backend/langflow/components/models/AnthropicModel.py index 793bec46a..be6e46d9a 100644 --- a/src/backend/langflow/components/models/AnthropicModel.py +++ b/src/backend/langflow/components/models/AnthropicModel.py @@ -9,7 +9,9 @@ from langflow.field_typing import Text class AnthropicLLM(CustomComponent): display_name: str = "AnthropicModel" - description: str = "Generate text using Anthropic Chat&Completion large language models." + description: str = ( + "Generate text using Anthropic Chat&Completion large language models." + ) def build_config(self): return { @@ -53,7 +55,7 @@ class AnthropicLLM(CustomComponent): def build( self, model: str, - inputs: str, + input_value: str, anthropic_api_key: Optional[str] = None, max_tokens: Optional[int] = None, temperature: Optional[float] = None, @@ -66,7 +68,9 @@ class AnthropicLLM(CustomComponent): try: output = ChatAnthropic( model_name=model, - anthropic_api_key=(SecretStr(anthropic_api_key) if anthropic_api_key else None), + anthropic_api_key=( + SecretStr(anthropic_api_key) if anthropic_api_key else None + ), max_tokens_to_sample=max_tokens, # type: ignore temperature=temperature, anthropic_api_url=api_endpoint, diff --git a/src/backend/langflow/components/models/AzureOpenAIModel.py b/src/backend/langflow/components/models/AzureOpenAIModel.py index 1e646e43a..be1f724bf 100644 --- a/src/backend/langflow/components/models/AzureOpenAIModel.py +++ b/src/backend/langflow/components/models/AzureOpenAIModel.py @@ -9,7 +9,9 @@ from langflow import CustomComponent class AzureChatOpenAIComponent(CustomComponent): display_name: str = "AzureOpenAI Model" description: str = "Generate text using LLM model from Azure OpenAI." - documentation: str = "https://python.langchain.com/docs/integrations/llms/azure_openai" + documentation: str = ( + "https://python.langchain.com/docs/integrations/llms/azure_openai" + ) beta = False AZURE_OPENAI_MODELS = [ @@ -78,7 +80,7 @@ class AzureChatOpenAIComponent(CustomComponent): self, model: str, azure_endpoint: str, - inputs: str, + input_value: str, azure_deployment: str, api_key: str, api_version: str, diff --git a/src/backend/langflow/components/models/BaiduQianfanChatModel.py b/src/backend/langflow/components/models/BaiduQianfanChatModel.py index 88051d0e9..9eadb7013 100644 --- a/src/backend/langflow/components/models/BaiduQianfanChatModel.py +++ b/src/backend/langflow/components/models/BaiduQianfanChatModel.py @@ -73,7 +73,7 @@ class QianfanChatEndpointComponent(CustomComponent): def build( self, - inputs: str, + input_value: str, model: str = "ERNIE-Bot-turbo", qianfan_ak: Optional[str] = None, qianfan_sk: Optional[str] = None, diff --git a/src/backend/langflow/components/models/CTransformersModel.py b/src/backend/langflow/components/models/CTransformersModel.py index 932b1b351..60cc2eb12 100644 --- a/src/backend/langflow/components/models/CTransformersModel.py +++ b/src/backend/langflow/components/models/CTransformersModel.py @@ -35,11 +35,13 @@ class CTransformersComponent(CustomComponent): self, model: str, model_file: str, - inputs: str, + input_value: str, model_type: str, config: Optional[Dict] = None, ) -> Text: - output = CTransformers(model=model, model_file=model_file, model_type=model_type, config=config) + output = CTransformers( + model=model, model_file=model_file, model_type=model_type, config=config + ) message = output.invoke(inputs) result = message.content if hasattr(message, "content") else message self.status = result diff --git a/src/backend/langflow/components/models/CohereModel.py b/src/backend/langflow/components/models/CohereModel.py index 3912cb855..28b198ec1 100644 --- a/src/backend/langflow/components/models/CohereModel.py +++ b/src/backend/langflow/components/models/CohereModel.py @@ -34,7 +34,7 @@ class CohereComponent(CustomComponent): def build( self, cohere_api_key: str, - inputs: str, + input_value: str, max_tokens: int = 256, temperature: float = 0.75, ) -> Text: diff --git a/src/backend/langflow/components/models/GoogleGenerativeAIModel.py b/src/backend/langflow/components/models/GoogleGenerativeAIModel.py index ce967bd57..2ff01c4c7 100644 --- a/src/backend/langflow/components/models/GoogleGenerativeAIModel.py +++ b/src/backend/langflow/components/models/GoogleGenerativeAIModel.py @@ -57,7 +57,7 @@ class GoogleGenerativeAIComponent(CustomComponent): self, google_api_key: str, model: str, - inputs: str, + input_value: str, max_output_tokens: Optional[int] = None, temperature: float = 0.1, top_k: Optional[int] = None, diff --git a/src/backend/langflow/components/models/HuggingFaceModel.py b/src/backend/langflow/components/models/HuggingFaceModel.py index 4357ede61..394938344 100644 --- a/src/backend/langflow/components/models/HuggingFaceModel.py +++ b/src/backend/langflow/components/models/HuggingFaceModel.py @@ -4,7 +4,6 @@ from langchain_community.chat_models.huggingface import ChatHuggingFace from langchain_community.llms.huggingface_endpoint import HuggingFaceEndpoint from langflow import CustomComponent - from langflow.field_typing import Text @@ -30,7 +29,7 @@ class HuggingFaceEndpointsComponent(CustomComponent): def build( self, - inputs: str, + input_value: str, endpoint_url: str, task: str = "text2text-generation", huggingfacehub_api_token: Optional[str] = None, diff --git a/src/backend/langflow/components/models/LlamaCppModel.py b/src/backend/langflow/components/models/LlamaCppModel.py index af0de5159..53a6f8ace 100644 --- a/src/backend/langflow/components/models/LlamaCppModel.py +++ b/src/backend/langflow/components/models/LlamaCppModel.py @@ -62,7 +62,7 @@ class LlamaCppComponent(CustomComponent): def build( self, model_path: str, - inputs: str, + input_value: str, grammar: Optional[str] = None, cache: Optional[bool] = None, client: Optional[Any] = None, diff --git a/src/backend/langflow/components/models/OllamaModel.py b/src/backend/langflow/components/models/OllamaModel.py index 129f96482..3dc8dacab 100644 --- a/src/backend/langflow/components/models/OllamaModel.py +++ b/src/backend/langflow/components/models/OllamaModel.py @@ -171,7 +171,7 @@ class ChatOllamaComponent(CustomComponent): self, base_url: Optional[str], model: str, - inputs: str, + input_value: str, mirostat: Optional[str], mirostat_eta: Optional[float] = None, mirostat_tau: Optional[float] = None, diff --git a/src/backend/langflow/components/models/OpenAIModel.py b/src/backend/langflow/components/models/OpenAIModel.py index 1cc352b20..07ba7013c 100644 --- a/src/backend/langflow/components/models/OpenAIModel.py +++ b/src/backend/langflow/components/models/OpenAIModel.py @@ -1,6 +1,7 @@ from typing import Optional from langchain_openai import ChatOpenAI + from langflow import CustomComponent from langflow.field_typing import NestedDict, Text @@ -60,7 +61,7 @@ class OpenAIModelComponent(CustomComponent): def build( self, - inputs: Text, + input_value: Text, max_tokens: Optional[int] = 256, model_kwargs: NestedDict = {}, model_name: str = "gpt-4-1106-preview", diff --git a/src/backend/langflow/components/models/VertexAiModel.py b/src/backend/langflow/components/models/VertexAiModel.py index eee804e02..81338f723 100644 --- a/src/backend/langflow/components/models/VertexAiModel.py +++ b/src/backend/langflow/components/models/VertexAiModel.py @@ -62,7 +62,7 @@ class ChatVertexAIComponent(CustomComponent): def build( self, - inputs: str, + input_value: str, credentials: Optional[str], project: str, examples: Optional[List[BaseMessage]] = [], diff --git a/src/backend/langflow/components/utilities/RunnableExecutor.py b/src/backend/langflow/components/utilities/RunnableExecutor.py index f83f352b4..5533e6d1d 100644 --- a/src/backend/langflow/components/utilities/RunnableExecutor.py +++ b/src/backend/langflow/components/utilities/RunnableExecutor.py @@ -32,7 +32,7 @@ class RunnableExecComponent(CustomComponent): def build( self, input_key: str, - inputs: str, + input_value: str, runnable: Runnable, output_key: str = "output", ) -> Text: diff --git a/src/backend/langflow/components/vectorstores/ChromaSearch.py b/src/backend/langflow/components/vectorstores/ChromaSearch.py index c6eb1ebac..5dd33abf2 100644 --- a/src/backend/langflow/components/vectorstores/ChromaSearch.py +++ b/src/backend/langflow/components/vectorstores/ChromaSearch.py @@ -2,6 +2,7 @@ from typing import List, Optional import chromadb # type: ignore from langchain_community.vectorstores.chroma import Chroma + from langflow import CustomComponent from langflow.field_typing import Embeddings, Text from langflow.schema import Record, docs_to_records @@ -57,7 +58,7 @@ class ChromaSearchComponent(CustomComponent): def build( self, - inputs: Text, + input_value: Text, search_type: str, collection_name: str, embedding: Embeddings, @@ -92,7 +93,8 @@ class ChromaSearchComponent(CustomComponent): if chroma_server_host is not None: chroma_settings = chromadb.config.Settings( - chroma_server_cors_allow_origins=chroma_server_cors_allow_origins or None, + chroma_server_cors_allow_origins=chroma_server_cors_allow_origins + or None, chroma_server_host=chroma_server_host, chroma_server_port=chroma_server_port or None, chroma_server_grpc_port=chroma_server_grpc_port or None, diff --git a/src/backend/langflow/interface/custom/custom_component/custom_component.py b/src/backend/langflow/interface/custom/custom_component/custom_component.py index 549c0dad3..a8c81f041 100644 --- a/src/backend/langflow/interface/custom/custom_component/custom_component.py +++ b/src/backend/langflow/interface/custom/custom_component/custom_component.py @@ -47,10 +47,10 @@ class CustomComponent(Component): """The icon of the component. It should be an emoji. Defaults to None.""" is_input: Optional[bool] = None """The input state of the component. Defaults to None. - If True, the component must have a field named 'message'.""" + If True, the component must have a field named 'input_value'.""" is_output: Optional[bool] = None """The output state of the component. Defaults to None. - If True, the component must have a field named 'message'.""" + If True, the component must have a field named 'input_value'.""" code: Optional[str] = None """The code of the component. Defaults to None.""" field_config: dict = {} From 53af441ec92dc796577fd8e8d0954045f9ecbf91 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 27 Feb 2024 12:55:42 -0300 Subject: [PATCH 29/98] Update field name in FrontendNode class --- src/backend/langflow/template/frontend_node/base.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/backend/langflow/template/frontend_node/base.py b/src/backend/langflow/template/frontend_node/base.py index 9f62c7054..cc8356104 100644 --- a/src/backend/langflow/template/frontend_node/base.py +++ b/src/backend/langflow/template/frontend_node/base.py @@ -49,10 +49,10 @@ class FrontendNode(BaseModel): """Icon of the frontend node.""" is_input: Optional[bool] = None """Whether the frontend node is used as an input when processing the Graph. - If True, there should be a field named 'message'.""" + If True, there should be a field named 'input_value'.""" is_output: Optional[bool] = None """Whether the frontend node is used as an output when processing the Graph. - If True, there should be a field named 'message'.""" + If True, there should be a field named 'input_value'.""" is_composition: Optional[bool] = None """Whether the frontend node is used for composition.""" base_classes: List[str] From e02b477fc96a9424fb38b7c3c82945d592a6aded Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 27 Feb 2024 13:44:30 -0300 Subject: [PATCH 30/98] Add new process method and vertices lists --- src/backend/langflow/graph/graph/base.py | 86 +++++++++++++++++++----- 1 file changed, 68 insertions(+), 18 deletions(-) diff --git a/src/backend/langflow/graph/graph/base.py b/src/backend/langflow/graph/graph/base.py index 68e16bed8..341a4729c 100644 --- a/src/backend/langflow/graph/graph/base.py +++ b/src/backend/langflow/graph/graph/base.py @@ -1,3 +1,4 @@ +import asyncio from collections import defaultdict, deque from typing import TYPE_CHECKING, Dict, Generator, List, Optional, Type, Union @@ -40,8 +41,10 @@ class Graph: self._runs = 0 self._updates = 0 self.flow_id = flow_id - self._inputs = [] - self._outputs = [] + self._is_input_vertices = [] + self._is_output_vertices = [] + self._has_session_id_vertices = [] + self._sorted_vertices_layers = [] self.top_level_vertices = [] for vertex in self._vertices: @@ -54,38 +57,37 @@ class Graph: self.inactive_vertices = set() self._build_graph() self.build_graph_maps() - self.define_inputs_and_outputs() + self.define_vertices_lists() - def define_inputs_and_outputs(self): + @property + def sorted_vertices_layers(self): + if not self._sorted_vertices_layers: + self.sort_vertices() + return self._sorted_vertices_layers + + def define_vertices_lists(self): """ - Defines the input and output vertices of the graph. + Defines the lists of vertices that are inputs, outputs, and have session_id. """ + attributes = ["is_input", "is_output", "has_session_id"] for vertex in self.vertices: - if vertex.is_input: - self._inputs.append(vertex.id) - if vertex.is_output: - self._outputs.append(vertex.id) + for attribute in attributes: + if getattr(vertex, attribute): + getattr(self, f"_{attribute}_vertices").append(vertex.id) - def run(self, inputs: Dict[str, str]) -> List["ResultData"]: + async def _run(self, inputs: Dict[str, str]) -> List["ResultData"]: """Runs the graph with the given inputs.""" - - # inputs is {"message": "Hello, world!"} - # we need to go through self.inputs and update the self._raw_params - # of the vertices that are inputs - for vertex_id in self.inputs: vertex = self.get_vertex(vertex_id) if vertex is None: raise ValueError(f"Vertex {vertex_id} not found") vertex.update_raw_params(inputs) try: - self.build() + await self.process() self.increment_run_count() except Exception as exc: logger.exception(exc) raise ValueError(f"Error running graph: {exc}") from exc - - # Now we get the outputs from the self.outputs outputs = [] for vertex_id in self.outputs: vertex = self.get_vertex(vertex_id) @@ -94,6 +96,23 @@ class Graph: outputs.append(vertex.result) return outputs + async def run(self, inputs: Dict[str, Union[str, list[str]]]) -> List["ResultData"]: + """Runs the graph with the given inputs.""" + + # inputs is {"message": "Hello, world!"} + # we need to go through self.inputs and update the self._raw_params + # of the vertices that are inputs + # if the value is a list, we need to run multiple times + outputs = [] + inputs_values = inputs.get("input_value") + if not isinstance(inputs_values, list): + inputs_values = [inputs_values] + for input_value in inputs_values: + run_outputs = await self._run({"input_value": input_value}) + logger.debug(f"Run outputs: {run_outputs}") + outputs.extend(run_outputs) + return outputs + @property def metadata(self): return { @@ -404,6 +423,36 @@ class Graph: raise ValueError("No root vertex found") return await root_vertex.build() + async def process(self) -> "Graph": + """Processes the graph with vertices in each layer run in parallel.""" + vertices_layers = self.sorted_vertices_layers + + for layer_index, layer in enumerate(vertices_layers): + tasks = [] + for vertex_id in layer: + vertex = self.get_vertex(vertex_id) + task = asyncio.create_task( + vertex.build(), name=f"layer-{layer_index}-vertex-{vertex_id}" + ) + tasks.append(task) + logger.debug(f"Running layer {layer_index} with {len(tasks)} tasks") + await self._execute_tasks(tasks) + logger.debug("Graph processing complete") + return self + + async def _execute_tasks(self, tasks): + """Executes tasks in parallel, handling exceptions for each task.""" + results = [] + for task in asyncio.as_completed(tasks): + try: + result = await task + results.append(result) + except Exception as e: + # Log the exception along with the task name for easier debugging + task_name = task.get_name() + logger.error(f"Task {task_name} failed with exception: {e}") + return results + def topological_sort(self) -> List[Vertex]: """ Performs a topological sort of the vertices in the graph. @@ -671,6 +720,7 @@ class Graph: vertices_layers = self.sort_by_avg_build_time(vertices_layers) vertices_layers = self.sort_chat_inputs_first(vertices_layers) self.increment_run_count() + self._sorted_vertices_layers = vertices_layers return vertices_layers def sort_interface_components_first( From 5a39af29a342b671caf4e872b6c6d47a683de8c0 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 27 Feb 2024 13:44:38 -0300 Subject: [PATCH 31/98] Update process.py with import statements and formatting improvements --- src/backend/langflow/processing/process.py | 61 +++++++++++++++++----- 1 file changed, 49 insertions(+), 12 deletions(-) diff --git a/src/backend/langflow/processing/process.py b/src/backend/langflow/processing/process.py index ad4f8fb78..69e47b242 100644 --- a/src/backend/langflow/processing/process.py +++ b/src/backend/langflow/processing/process.py @@ -7,6 +7,9 @@ from langchain.schema import AgentAction, Document from langchain_community.vectorstores import VectorStore from langchain_core.messages import AIMessage from langchain_core.runnables.base import Runnable +from loguru import logger +from pydantic import BaseModel + from langflow.graph.graph.base import Graph from langflow.graph.vertex.base import Vertex from langflow.interface.custom.custom_component import CustomComponent @@ -17,8 +20,6 @@ from langflow.interface.run import ( ) from langflow.services.deps import get_session_service from langflow.services.session.service import SessionService -from loguru import logger -from pydantic import BaseModel def fix_memory_inputs(langchain_object): @@ -146,7 +147,9 @@ async def process_runnable(runnable: Runnable, inputs: Union[dict, List[dict]]): elif isinstance(inputs, dict) and hasattr(runnable, "ainvoke"): result = await runnable.ainvoke(inputs) else: - raise ValueError(f"Runnable {runnable} does not support inputs of type {type(inputs)}") + raise ValueError( + f"Runnable {runnable} does not support inputs of type {type(inputs)}" + ) # Check if the result is a list of AIMessages if isinstance(result, list) and all(isinstance(r, AIMessage) for r in result): result = [r.content for r in result] @@ -155,7 +158,9 @@ async def process_runnable(runnable: Runnable, inputs: Union[dict, List[dict]]): return result -async def process_inputs_dict(built_object: Union[Chain, VectorStore, Runnable], inputs: dict): +async def process_inputs_dict( + built_object: Union[Chain, VectorStore, Runnable], inputs: dict +): if isinstance(built_object, Chain): if inputs is None: raise ValueError("Inputs must be provided for a Chain") @@ -190,7 +195,9 @@ async def process_inputs_list(built_object: Runnable, inputs: List[dict]): return await process_runnable(built_object, inputs) -async def generate_result(built_object: Union[Chain, VectorStore, Runnable], inputs: Union[dict, List[dict]]): +async def generate_result( + built_object: Union[Chain, VectorStore, Runnable], inputs: Union[dict, List[dict]] +): if isinstance(inputs, dict): result = await process_inputs_dict(built_object, inputs) elif isinstance(inputs, List) and isinstance(built_object, Runnable): @@ -222,7 +229,9 @@ async def process_graph_cached( if clear_cache: session_service.clear_session(session_id) if session_id is None: - session_id = session_service.generate_key(session_id=session_id, data_graph=data_graph) + session_id = session_service.generate_key( + session_id=session_id, data_graph=data_graph + ) # Load the graph using SessionService session = await session_service.load_session(session_id, data_graph) graph, artifacts = session if session else (None, None) @@ -258,14 +267,34 @@ async def build_graph_and_generate_result( return Result(result=result, session_id=session_id) -def validate_input(graph_data: Dict[str, Any], tweaks: Dict[str, Dict[str, Any]]) -> List[Dict[str, Any]]: +async def run_graph( + graph: Union["Graph", dict], + session_id: str, + inputs: Optional[Union[dict, List[dict]]] = None, + artifacts: Optional[Dict[str, Any]] = None, + session_service: Optional[SessionService] = None, +): + """Run the graph and generate the result""" + if isinstance(graph, dict): + graph = Graph.from_payload(graph) + outputs = await graph.run(inputs) + if session_id and session_service: + session_service.update_session(session_id, (graph, artifacts)) + return outputs + + +def validate_input( + graph_data: Dict[str, Any], tweaks: Dict[str, Dict[str, Any]] +) -> List[Dict[str, Any]]: if not isinstance(graph_data, dict) or not isinstance(tweaks, dict): raise ValueError("graph_data and tweaks should be dictionaries") nodes = graph_data.get("data", {}).get("nodes") or graph_data.get("nodes") if not isinstance(nodes, list): - raise ValueError("graph_data should contain a list of nodes under 'data' key or directly under 'nodes' key") + raise ValueError( + "graph_data should contain a list of nodes under 'data' key or directly under 'nodes' key" + ) return nodes @@ -274,7 +303,9 @@ def apply_tweaks(node: Dict[str, Any], node_tweaks: Dict[str, Any]) -> None: template_data = node.get("data", {}).get("node", {}).get("template") if not isinstance(template_data, dict): - logger.warning(f"Template data for node {node.get('id')} should be a dictionary") + logger.warning( + f"Template data for node {node.get('id')} should be a dictionary" + ) return for tweak_name, tweak_value in node_tweaks.items(): @@ -289,7 +320,9 @@ def apply_tweaks_on_vertex(vertex: Vertex, node_tweaks: Dict[str, Any]) -> None: vertex.params[tweak_name] = tweak_value -def process_tweaks(graph_data: Dict[str, Any], tweaks: Dict[str, Dict[str, Any]]) -> Dict[str, Any]: +def process_tweaks( + graph_data: Dict[str, Any], tweaks: Dict[str, Dict[str, Any]] +) -> Dict[str, Any]: """ This function is used to tweak the graph data using the node id and the tweaks dict. @@ -310,7 +343,9 @@ def process_tweaks(graph_data: Dict[str, Any], tweaks: Dict[str, Dict[str, Any]] if node_tweaks := tweaks.get(node_id): apply_tweaks(node, node_tweaks) else: - logger.warning("Each node should be a dictionary with an 'id' key of type str") + logger.warning( + "Each node should be a dictionary with an 'id' key of type str" + ) return graph_data @@ -322,6 +357,8 @@ def process_tweaks_on_graph(graph: Graph, tweaks: Dict[str, Dict[str, Any]]): if node_tweaks := tweaks.get(node_id): apply_tweaks_on_vertex(vertex, node_tweaks) else: - logger.warning("Each node should be a Vertex with an 'id' attribute of type str") + logger.warning( + "Each node should be a Vertex with an 'id' attribute of type str" + ) return graph From 31fc5c4be0a3b14f52914c381bafa2064273e598 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 27 Feb 2024 13:45:02 -0300 Subject: [PATCH 32/98] Add session_id check in Vertex class --- src/backend/langflow/graph/vertex/base.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/backend/langflow/graph/vertex/base.py b/src/backend/langflow/graph/vertex/base.py index e87389c7b..fde96d9a5 100644 --- a/src/backend/langflow/graph/vertex/base.py +++ b/src/backend/langflow/graph/vertex/base.py @@ -53,6 +53,7 @@ class Vertex: output_component_name in self.id for output_component_name in OUTPUT_COMPONENTS ) + self.has_session_id = None self._custom_component = None self.has_external_input = False self.has_external_output = False @@ -223,6 +224,8 @@ class Vertex: if isinstance(value, dict) } + self.has_session_id = "session_id" in template_dicts + self.required_inputs = [ template_dicts[key]["type"] for key, value in template_dicts.items() From c26175aede4b94dcf61cea20c8e0ff3cae2d4946 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 27 Feb 2024 13:46:00 -0300 Subject: [PATCH 33/98] Add new run_flow_with_caching endpoint --- src/backend/langflow/api/v1/endpoints.py | 139 ++++++++++++++++++++--- 1 file changed, 124 insertions(+), 15 deletions(-) diff --git a/src/backend/langflow/api/v1/endpoints.py b/src/backend/langflow/api/v1/endpoints.py index 8a3f67ddf..2dc79e85a 100644 --- a/src/backend/langflow/api/v1/endpoints.py +++ b/src/backend/langflow/api/v1/endpoints.py @@ -3,11 +3,15 @@ from typing import Annotated, Any, List, Optional, Union import sqlalchemy as sa from fastapi import APIRouter, Body, Depends, HTTPException, UploadFile, status +from loguru import logger +from sqlmodel import select + from langflow.api.utils import update_frontend_node_with_template_values from langflow.api.v1.schemas import ( CustomComponentCode, PreloadResponse, ProcessResponse, + RunResponse, TaskResponse, TaskStatusResponse, UploadFileResponse, @@ -15,15 +19,23 @@ from langflow.api.v1.schemas import ( from langflow.interface.custom.custom_component import CustomComponent from langflow.interface.custom.directory_reader import DirectoryReader from langflow.interface.custom.utils import build_custom_component_template -from langflow.processing.process import build_graph_and_generate_result, process_graph_cached, process_tweaks +from langflow.processing.process import ( + build_graph_and_generate_result, + process_graph_cached, + process_tweaks, + run_graph, +) from langflow.services.auth.utils import api_key_security, get_current_active_user from langflow.services.cache.utils import save_uploaded_file from langflow.services.database.models.flow import Flow from langflow.services.database.models.user.model import User -from langflow.services.deps import get_session, get_session_service, get_settings_service, get_task_service +from langflow.services.deps import ( + get_session, + get_session_service, + get_settings_service, + get_task_service, +) from langflow.services.session.service import SessionService -from loguru import logger -from sqlmodel import select try: from langflow.worker import process_graph_cached_task @@ -33,9 +45,10 @@ except ImportError: raise NotImplementedError("Celery is not installed") -from langflow.services.task.service import TaskService from sqlmodel import Session +from langflow.services.task.service import TaskService + # build router router = APIRouter(tags=["Base"]) @@ -80,9 +93,15 @@ async def process_graph_data( ) if session_id is None: # Generate a session ID - session_id = get_session_service().generate_key(session_id=session_id, data_graph=graph_data) + session_id = get_session_service().generate_key( + session_id=session_id, data_graph=graph_data + ) task_id, task = await task_service.launch_task( - process_graph_cached_task if task_service.use_celery else process_graph_cached, + ( + process_graph_cached_task + if task_service.use_celery + else process_graph_cached + ), graph_data, inputs, clear_cache, @@ -176,7 +195,11 @@ async def preload_flow( else: if session_id is None: session_id = flow_id - flow = session.exec(select(Flow).where(Flow.id == flow_id).where(Flow.user_id == api_key_user.id)).first() + flow = session.exec( + select(Flow) + .where(Flow.id == flow_id) + .where(Flow.user_id == api_key_user.id) + ).first() if flow is None: raise ValueError(f"Flow {flow_id} not found") @@ -197,6 +220,76 @@ async def preload_flow( raise HTTPException(status_code=500, detail=str(exc)) from exc +@router.post("/run/{flow_id}", response_model=ProcessResponse) +async def run_flow_with_caching( + session: Annotated[Session, Depends(get_session)], + flow_id: str, + inputs: Optional[Union[List[dict], dict]] = None, + tweaks: Optional[dict] = None, + session_id: Annotated[Union[None, str], Body(embed=True)] = None, # noqa: F821 + api_key_user: User = Depends(api_key_security), + session_service: SessionService = Depends(get_session_service), +): + try: + if session_id: + session_data = await session_service.load_session(session_id) + graph, artifacts = session_data if session_data else (None, None) + task_result: Any = None + task_status = None + if not graph: + raise ValueError("Graph not found in the session") + task_result = await run_graph( + graph, + session_id, + inputs, + artifacts=artifacts, + session_service=session_service, + ) + + else: + # Get the flow that matches the flow_id and belongs to the user + # flow = session.query(Flow).filter(Flow.id == flow_id).filter(Flow.user_id == api_key_user.id).first() + flow = session.exec( + select(Flow) + .where(Flow.id == flow_id) + .where(Flow.user_id == api_key_user.id) + ).first() + if flow is None: + raise ValueError(f"Flow {flow_id} not found") + + if flow.data is None: + raise ValueError(f"Flow {flow_id} has no data") + graph_data = flow.data + graph_data = process_tweaks(graph_data, tweaks) + task_result = await run_graph( + graph_data, + inputs, + tweaks, + session_id, + session_service=session_service, + ) + + return RunResponse( + outputs=task_result, session_id=session_id, status=task_status + ) + except sa.exc.StatementError as exc: + # StatementError('(builtins.ValueError) badly formed hexadecimal UUID string') + if "badly formed hexadecimal UUID string" in str(exc): + # This means the Flow ID is not a valid UUID which means it can't find the flow + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail=str(exc) + ) from exc + except ValueError as exc: + if f"Flow {flow_id} not found" in str(exc): + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail=str(exc) + ) from exc + else: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(exc) + ) from exc + + @router.post( "/predict/{flow_id}", response_model=ProcessResponse, @@ -269,7 +362,11 @@ async def process( # Get the flow that matches the flow_id and belongs to the user # flow = session.query(Flow).filter(Flow.id == flow_id).filter(Flow.user_id == api_key_user.id).first() - flow = session.exec(select(Flow).where(Flow.id == flow_id).where(Flow.user_id == api_key_user.id)).first() + flow = session.exec( + select(Flow) + .where(Flow.id == flow_id) + .where(Flow.user_id == api_key_user.id) + ).first() if flow is None: raise ValueError(f"Flow {flow_id} not found") @@ -289,12 +386,18 @@ async def process( # StatementError('(builtins.ValueError) badly formed hexadecimal UUID string') if "badly formed hexadecimal UUID string" in str(exc): # This means the Flow ID is not a valid UUID which means it can't find the flow - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)) from exc + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail=str(exc) + ) from exc except ValueError as exc: if f"Flow {flow_id} not found" in str(exc): - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)) from exc + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail=str(exc) + ) from exc else: - raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(exc)) from exc + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(exc) + ) from exc except Exception as e: # Log stack trace logger.exception(e) @@ -364,12 +467,16 @@ async def custom_component( built_frontend_node = build_custom_component_template(component, user_id=user.id) - built_frontend_node = update_frontend_node_with_template_values(built_frontend_node, raw_code.frontend_node) + built_frontend_node = update_frontend_node_with_template_values( + built_frontend_node, raw_code.frontend_node + ) return built_frontend_node @router.post("/custom_component/reload", status_code=HTTPStatus.OK) -async def reload_custom_component(path: str, user: User = Depends(get_current_active_user)): +async def reload_custom_component( + path: str, user: User = Depends(get_current_active_user) +): from langflow.interface.custom.utils import build_custom_component_template try: @@ -391,6 +498,8 @@ async def custom_component_update( ): component = CustomComponent(code=raw_code.code) - component_node = build_custom_component_template(component, user_id=user.id, update_field=raw_code.field) + component_node = build_custom_component_template( + component, user_id=user.id, update_field=raw_code.field + ) # Update the field return component_node From 0d215c67f5acc0eaaa59ad8ce3f46cd483903168 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 27 Feb 2024 13:46:09 -0300 Subject: [PATCH 34/98] Add RunResponse schema --- src/backend/langflow/api/v1/schemas.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/backend/langflow/api/v1/schemas.py b/src/backend/langflow/api/v1/schemas.py index adb26202a..c1c7e0a61 100644 --- a/src/backend/langflow/api/v1/schemas.py +++ b/src/backend/langflow/api/v1/schemas.py @@ -66,6 +66,14 @@ class ProcessResponse(BaseModel): backend: Optional[str] = None +class RunResponse(BaseModel): + """Run response schema.""" + + outputs: Optional[List[Any]] = None + status: Optional[str] = None + session_id: Optional[str] = None + + class PreloadResponse(BaseModel): """Preload response schema.""" From d5600f5304cab792a95ae3f17e7e4e4b38b6bed5 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 27 Feb 2024 13:46:17 -0300 Subject: [PATCH 35/98] Update VertexStates enum to inherit from str in base.py --- src/backend/langflow/graph/vertex/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/backend/langflow/graph/vertex/base.py b/src/backend/langflow/graph/vertex/base.py index fde96d9a5..3e1133491 100644 --- a/src/backend/langflow/graph/vertex/base.py +++ b/src/backend/langflow/graph/vertex/base.py @@ -25,7 +25,7 @@ if TYPE_CHECKING: from langflow.graph.graph.base import Graph -class VertexStates(Enum): +class VertexStates(str, Enum): """Vertex are related to it being active, inactive, or in an error state.""" ACTIVE = "active" From bf9fd0130e481990d07bbf113e7137cc7943a376 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 27 Feb 2024 13:57:05 -0300 Subject: [PATCH 36/98] Remove commented code in schemas.py --- src/backend/langflow/api/v1/schemas.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/backend/langflow/api/v1/schemas.py b/src/backend/langflow/api/v1/schemas.py index c1c7e0a61..38db8f85a 100644 --- a/src/backend/langflow/api/v1/schemas.py +++ b/src/backend/langflow/api/v1/schemas.py @@ -81,9 +81,6 @@ class PreloadResponse(BaseModel): is_clear: Optional[bool] = None -# TaskStatusResponse( -# status=task.status, result=task.result if task.ready() else None -# ) class TaskStatusResponse(BaseModel): """Task status response schema.""" From 6da52d3b13ad492820791e1421dffa4d958c86ad Mon Sep 17 00:00:00 2001 From: igorrCarvalho Date: Tue, 27 Feb 2024 14:09:40 -0300 Subject: [PATCH 37/98] Feat: add Copy to node toolbar --- .../components/nodeToolbarComponent/index.tsx | 25 ++++++++++++++++--- src/frontend/src/stores/flowStore.ts | 4 +-- src/frontend/src/utils/styleUtils.ts | 2 ++ 3 files changed, 26 insertions(+), 5 deletions(-) diff --git a/src/frontend/src/pages/FlowPage/components/nodeToolbarComponent/index.tsx b/src/frontend/src/pages/FlowPage/components/nodeToolbarComponent/index.tsx index f5b521dd5..a0136ba85 100644 --- a/src/frontend/src/pages/FlowPage/components/nodeToolbarComponent/index.tsx +++ b/src/frontend/src/pages/FlowPage/components/nodeToolbarComponent/index.tsx @@ -1,4 +1,4 @@ -import { cloneDeep } from "lodash"; +import _, { cloneDeep } from "lodash"; import { useEffect, useState } from "react"; import ShadTooltip from "../../../../components/ShadTooltipComponent"; import CodeAreaComponent from "../../../../components/codeAreaComponent"; @@ -89,7 +89,7 @@ export default function NodeToolbarComponent({ }, [showModalAdvanced]); const updateNodeInternals = useUpdateNodeInternals(); - + const setLastCopiedSelection = useFlowStore(state => state.setLastCopiedSelection); useEffect(() => { setFlowComponent(createFlowComponent(cloneDeep(data), version)); }, [ @@ -143,6 +143,9 @@ export default function NodeToolbarComponent({ case "delete": deleteNode(data.id); break; + case "copy": + const node = nodes.filter(node => node.id === data.id) + setLastCopiedSelection({ nodes: _.cloneDeep(node), edges: [] }) } }; @@ -361,6 +364,22 @@ export default function NodeToolbarComponent({ ) )} + +
+ {" "} + Copy{" "} + + + C + +
+
{hasStore && (
diff --git a/src/frontend/src/stores/flowStore.ts b/src/frontend/src/stores/flowStore.ts index b8b2508ce..9db5c108f 100644 --- a/src/frontend/src/stores/flowStore.ts +++ b/src/frontend/src/stores/flowStore.ts @@ -9,7 +9,7 @@ import { applyNodeChanges, } from "reactflow"; import { create } from "zustand"; -import { FLOW_BUILD_SUCCESS_ALERT } from "../alerts_constants"; +import { FLOW_BUILD_SUCCESS_ALERT, MISSED_ERROR_ALERT } from "../alerts_constants"; import { BuildStatus } from "../constants/enums"; import { getFlowPool, updateFlowInDatabase } from "../controllers/API"; import { VertexBuildTypeAPI } from "../types/api"; @@ -385,7 +385,7 @@ const useFlowStore = create((set, get) => ({ ); if (errors.length > 0) { setErrorData({ - title: "Oops! Looks like you missed something", + title: MISSED_ERROR_ALERT, list: errors, }); get().setIsBuilding(false); diff --git a/src/frontend/src/utils/styleUtils.ts b/src/frontend/src/utils/styleUtils.ts index 2f9907965..62e1c6ec7 100644 --- a/src/frontend/src/utils/styleUtils.ts +++ b/src/frontend/src/utils/styleUtils.ts @@ -25,6 +25,7 @@ import { Code, Code2, Combine, + Command, Compass, Copy, Cpu, @@ -426,4 +427,5 @@ export const nodeIconsLucide: iconsType = { AlertCircle, Bot, Delete, + Command, }; From 12d9aea93250d673293d8e09c742e122a282b65d Mon Sep 17 00:00:00 2001 From: igorrCarvalho Date: Tue, 27 Feb 2024 14:48:23 -0300 Subject: [PATCH 38/98] Fix: left tab dont scroll on chat view --- src/frontend/src/components/IOview/index.tsx | 4 ++-- src/frontend/src/components/codeTabsComponent/index.tsx | 2 +- src/frontend/src/style/applies.css | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/frontend/src/components/IOview/index.tsx b/src/frontend/src/components/IOview/index.tsx index 4ac7bb00d..5fb5ba665 100644 --- a/src/frontend/src/components/IOview/index.tsx +++ b/src/frontend/src/components/IOview/index.tsx @@ -97,14 +97,14 @@ export default function IOView({ children, open, setOpen }): JSX.Element { {selectedTab !== 0 && (
{ setSelectedTab(Number(value)); diff --git a/src/frontend/src/components/codeTabsComponent/index.tsx b/src/frontend/src/components/codeTabsComponent/index.tsx index 2bea4265f..d6fabb0cf 100644 --- a/src/frontend/src/components/codeTabsComponent/index.tsx +++ b/src/frontend/src/components/codeTabsComponent/index.tsx @@ -181,7 +181,7 @@ export default function CodeTabsComponent({ {tabs.map((tab, idx) => ( {idx < 4 ? ( diff --git a/src/frontend/src/style/applies.css b/src/frontend/src/style/applies.css index 628d74cbd..ab3e8e23d 100644 --- a/src/frontend/src/style/applies.css +++ b/src/frontend/src/style/applies.css @@ -851,7 +851,7 @@ @apply flex items-center justify-between px-2 py-2; } .api-modal-tabs-content { - @apply -mt-1 h-full w-full overflow-hidden px-4 pb-4; + @apply -mt-1 h-full w-full px-4 pb-4; } .api-modal-accordion-display { @apply mt-2 flex h-full w-full; From 3a3ffba6bddef9d46dc9cbe22bef693ce1cfed76 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 27 Feb 2024 14:55:23 -0300 Subject: [PATCH 39/98] Remove unused prompt.py file --- src/backend/langflow/utils/prompt.py | 58 ---------------------------- 1 file changed, 58 deletions(-) delete mode 100644 src/backend/langflow/utils/prompt.py diff --git a/src/backend/langflow/utils/prompt.py b/src/backend/langflow/utils/prompt.py deleted file mode 100644 index 871193f45..000000000 --- a/src/backend/langflow/utils/prompt.py +++ /dev/null @@ -1,58 +0,0 @@ -from typing import Any, Union - -from langchain_core.prompts import PromptTemplate as LCPromptTemplate -from llama_index.prompts import PromptTemplate as LIPromptTemplate - -PromptTemplateTypes = Union[LCPromptTemplate, LIPromptTemplate] - - -class GenericPromptTemplate: - def __init__(self, prompt_template: PromptTemplateTypes): - object.__setattr__(self, "prompt_template", prompt_template) - - @property - def input_keys(self): - prompt_template = object.__getattribute__(self, "prompt_template") - if isinstance(prompt_template, LCPromptTemplate): - return prompt_template.input_variables - elif isinstance(prompt_template, LIPromptTemplate): - return prompt_template.template_vars - else: - raise TypeError(f"Unknown prompt template type {type(prompt_template)}") - - def to_lc_prompt(self): - prompt_template = object.__getattribute__(self, "prompt_template") - if isinstance(prompt_template, LCPromptTemplate): - return prompt_template - elif isinstance(prompt_template, LIPromptTemplate): - return LCPromptTemplate.from_template(prompt_template.get_template()) - else: - raise TypeError(f"Unknown prompt template type {type(prompt_template)}") - - def to_li_prompt(self): - prompt_template = object.__getattribute__(self, "prompt_template") - if isinstance(prompt_template, LIPromptTemplate): - return prompt_template - elif isinstance(prompt_template, LCPromptTemplate): - return LIPromptTemplate(template=prompt_template.template) - else: - raise TypeError(f"Unknown prompt template type {type(prompt_template)}") - - def __or__(self, other): - prompt_template = object.__getattribute__(self, "prompt_template") - if isinstance(prompt_template, LIPromptTemplate): - return self.to_lc_prompt() | other - else: - raise TypeError(f"Unknown prompt template type {type(other)}") - - def __getattribute__(self, name: str) -> Any: - if name in { - "input_keys", - "to_lc_prompt", - "to_li_prompt", - "__or__", - "prompt_template", - }: - return object.__getattribute__(self, name) - prompt_template = object.__getattribute__(self, "prompt_template") - return getattr(prompt_template, name) From 99803c53f7ebc9fa29c80bc9def3d8cd0585e5a7 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 27 Feb 2024 15:30:47 -0300 Subject: [PATCH 40/98] Add run_id to VerticesOrderResponse --- src/backend/langflow/api/v1/chat.py | 4 +++- src/backend/langflow/api/v1/schemas.py | 1 + 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/src/backend/langflow/api/v1/chat.py b/src/backend/langflow/api/v1/chat.py index ca067ce6f..7f85bb211 100644 --- a/src/backend/langflow/api/v1/chat.py +++ b/src/backend/langflow/api/v1/chat.py @@ -1,5 +1,6 @@ import time from typing import Optional +import uuid from fastapi import ( APIRouter, @@ -120,7 +121,8 @@ async def get_vertices( # Now vertices is a list of lists # We need to get the id of each vertex # and return the same structure but only with the ids - return VerticesOrderResponse(ids=vertices) + run_id = uuid.uuid4() + return VerticesOrderResponse(ids=vertices, run_id=run_id) except Exception as exc: logger.error(f"Error checking build status: {exc}") diff --git a/src/backend/langflow/api/v1/schemas.py b/src/backend/langflow/api/v1/schemas.py index 38db8f85a..0c33fdb36 100644 --- a/src/backend/langflow/api/v1/schemas.py +++ b/src/backend/langflow/api/v1/schemas.py @@ -225,6 +225,7 @@ class ApiKeyCreateRequest(BaseModel): class VerticesOrderResponse(BaseModel): ids: List[List[str]] + run_id: UUID class ResultDataResponse(BaseModel): From ce32f49a2ebc50dbe8492a8aeec7bdfbf9c942df Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 27 Feb 2024 15:34:25 -0300 Subject: [PATCH 41/98] Refactor ChatInput and ChatOutput components --- .../langflow/components/io/ChatInput.py | 52 ++------- .../langflow/components/io/ChatOutput.py | 57 ++-------- src/backend/langflow/io/__init__.py | 0 src/backend/langflow/io/schema.py | 101 ++++++++++++++++++ 4 files changed, 120 insertions(+), 90 deletions(-) create mode 100644 src/backend/langflow/io/__init__.py create mode 100644 src/backend/langflow/io/schema.py diff --git a/src/backend/langflow/components/io/ChatInput.py b/src/backend/langflow/components/io/ChatInput.py index 0666f92d1..653054e0a 100644 --- a/src/backend/langflow/components/io/ChatInput.py +++ b/src/backend/langflow/components/io/ChatInput.py @@ -1,36 +1,14 @@ from typing import Optional, Union -from langflow import CustomComponent from langflow.field_typing import Text +from langflow.io.schema import ChatComponent from langflow.schema import Record -class ChatInput(CustomComponent): +class ChatInput(ChatComponent): display_name = "Chat Input" description = "Used to get user input from the chat." - def build_config(self): - return { - "input_value": { - "input_types": ["Text"], - "display_name": "Message", - "multiline": True, - }, - "sender": { - "options": ["Machine", "User"], - "display_name": "Sender Type", - }, - "sender_name": {"display_name": "Sender Name"}, - "session_id": { - "display_name": "Session ID", - "info": "Session ID of the chat history.", - }, - "return_record": { - "display_name": "Return Record", - "info": "Return the message as a record containing the sender, sender_name, and session_id.", - }, - } - def build( self, sender: Optional[str] = "User", @@ -39,22 +17,10 @@ class ChatInput(CustomComponent): session_id: Optional[str] = None, return_record: Optional[bool] = False, ) -> Union[Text, Record]: - if return_record: - if isinstance(input_value, Record): - # Update the data of the record - input_value.data["sender"] = sender - input_value.data["sender_name"] = sender_name - input_value.data["session_id"] = session_id - else: - input_value = Record( - text=input_value, - data={ - "sender": sender, - "sender_name": sender_name, - "session_id": session_id, - }, - ) - if not input_value: - input_value = "" - self.status = input_value - return input_value + return super().build( + sender=sender, + sender_name=sender_name, + input_value=input_value, + session_id=session_id, + return_record=return_record, + ) diff --git a/src/backend/langflow/components/io/ChatOutput.py b/src/backend/langflow/components/io/ChatOutput.py index 72667374f..e4c5d03ee 100644 --- a/src/backend/langflow/components/io/ChatOutput.py +++ b/src/backend/langflow/components/io/ChatOutput.py @@ -9,55 +9,18 @@ class ChatOutput(CustomComponent): display_name = "Chat Output" description = "Used to send a message to the chat." - field_config = { - "code": { - "show": True, - } - } - - def build_config(self): - return { - "input_value": {"input_types": ["Text"], "display_name": "Message"}, - "sender": { - "options": ["Machine", "User"], - "display_name": "Sender Type", - }, - "sender_name": {"display_name": "Sender Name"}, - "session_id": { - "display_name": "Session ID", - "info": "Session ID of the chat history.", - "input_types": ["Text"], - }, - "return_record": { - "display_name": "Return Record", - "info": "Return the message as a record containing the sender, sender_name, and session_id.", - }, - } - def build( self, - sender: Optional[str] = "Machine", - sender_name: Optional[str] = "AI", - session_id: Optional[str] = None, + sender: Optional[str] = "User", + sender_name: Optional[str] = "User", input_value: Optional[str] = None, + session_id: Optional[str] = None, return_record: Optional[bool] = False, ) -> Union[Text, Record]: - if return_record: - if isinstance(input_value, Record): - # Update the data of the record - input_value.data["sender"] = sender - input_value.data["sender_name"] = sender_name - input_value.data["session_id"] = session_id - else: - input_value = Record( - text=input_value, - data={ - "sender": sender, - "sender_name": sender_name, - "session_id": session_id, - }, - ) - if not input_value: - input_value = "" - self.status = input_value - return input_value + return super().build( + sender=sender, + sender_name=sender_name, + input_value=input_value, + session_id=session_id, + return_record=return_record, + ) diff --git a/src/backend/langflow/io/__init__.py b/src/backend/langflow/io/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/langflow/io/schema.py b/src/backend/langflow/io/schema.py new file mode 100644 index 000000000..d83d3472e --- /dev/null +++ b/src/backend/langflow/io/schema.py @@ -0,0 +1,101 @@ +import warnings +from typing import Optional, Union + +from langflow import CustomComponent +from langflow.field_typing import Text +from langflow.memory import add_messages +from langflow.schema import Record + + +class ChatComponent(CustomComponent): + display_name = "Chat Component" + description = "Use as base for chat components." + + def build_config(self): + return { + "input_value": { + "input_types": ["Text"], + "display_name": "Message", + "multiline": True, + }, + "sender": { + "options": ["Machine", "User"], + "display_name": "Sender Type", + }, + "sender_name": {"display_name": "Sender Name"}, + "session_id": { + "display_name": "Session ID", + "info": "If provided, the message will be stored in the memory.", + }, + "return_record": { + "display_name": "Return Record", + "info": "Return the message as a record containing the sender, sender_name, and session_id.", + }, + } + + def store_message( + self, + message: Union[Text, Record], + session_id: Optional[str] = None, + sender: Optional[str] = None, + sender_name: Optional[str] = None, + ) -> list[Record]: + if not message: + warnings.warn("No message provided.") + return [] + + if not session_id or not sender or not sender_name: + raise ValueError( + "All of session_id, sender, and sender_name must be provided." + ) + + if not record: + record = [] + if not session_id or not sender or not sender_name: + raise ValueError + for text in text: + record = Record( + text=text, + data={ + "session_id": session_id, + "sender": sender, + "sender_name": sender_name, + }, + ) + record.append(record) + elif isinstance(record, Record): + record = [record] + + self.status = record + record = add_messages(record) + return record + + def build( + self, + sender: Optional[str] = "User", + sender_name: Optional[str] = "User", + input_value: Optional[str] = None, + session_id: Optional[str] = None, + return_record: Optional[bool] = False, + ) -> Union[Text, Record]: + if return_record: + if isinstance(input_value, Record): + # Update the data of the record + input_value.data["sender"] = sender + input_value.data["sender_name"] = sender_name + input_value.data["session_id"] = session_id + else: + input_value = Record( + text=input_value, + data={ + "sender": sender, + "sender_name": sender_name, + "session_id": session_id, + }, + ) + if not input_value: + input_value = "" + self.status = input_value + if session_id: + self.store_message(input_value, session_id, sender, sender_name) + return input_value From 223db74410a09475c7b773ab16b205de07932c7f Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 27 Feb 2024 15:37:56 -0300 Subject: [PATCH 42/98] Add run_id to VerticesOrderTypeAPI --- src/frontend/src/types/api/index.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/src/frontend/src/types/api/index.ts b/src/frontend/src/types/api/index.ts index 3e9f4a0f5..6663692dc 100644 --- a/src/frontend/src/types/api/index.ts +++ b/src/frontend/src/types/api/index.ts @@ -135,6 +135,7 @@ export type Component = { export type VerticesOrderTypeAPI = { ids: Array>; + run_id: string; }; export type VertexBuildTypeAPI = { From 8461eec1803604e6d32337b69880e9980fc419a0 Mon Sep 17 00:00:00 2001 From: igorrCarvalho Date: Tue, 27 Feb 2024 15:46:07 -0300 Subject: [PATCH 43/98] Refactor: Move texts to constants files --- .../components/parameterComponent/index.tsx | 6 ++-- .../src/CustomNodes/GenericNode/index.tsx | 6 ++-- .../src/alerts/alertDropDown/index.tsx | 3 +- src/frontend/src/components/IOview/index.tsx | 6 ++-- .../chatComponent/buildTrigger/index.tsx | 2 +- .../components/menuBar/index.tsx | 3 +- .../components/inputFileComponent/index.tsx | 2 +- .../newChatView/chatInput/index.tsx | 5 +-- .../src/components/newChatView/index.tsx | 7 ++-- .../components/textAreaComponent/index.tsx | 3 +- .../src/{ => constants}/alerts_constants.tsx | 0 src/frontend/src/constants/constants.ts | 32 +++++++++++++++++++ .../src/modals/SecretKeyModal/index.tsx | 2 +- .../src/modals/StoreApiKeyModal/index.tsx | 11 ++++--- .../src/modals/codeAreaModal/index.tsx | 6 ++-- src/frontend/src/modals/exportModal/index.tsx | 9 +++--- .../src/modals/formModal/chatInput/index.tsx | 7 ++-- src/frontend/src/modals/formModal/index.tsx | 10 +++--- .../src/modals/genericModal/index.tsx | 8 +++-- .../src/pages/AdminPage/LoginPage/index.tsx | 2 +- src/frontend/src/pages/AdminPage/index.tsx | 2 +- src/frontend/src/pages/ApiKeysPage/index.tsx | 2 +- .../components/PageComponent/index.tsx | 2 +- .../extraSidebarComponent/index.tsx | 2 +- .../MainPage/components/components/index.tsx | 2 +- src/frontend/src/pages/MainPage/index.tsx | 6 ++-- .../src/pages/ProfileSettingsPage/index.tsx | 2 +- src/frontend/src/pages/StorePage/index.tsx | 7 ++-- src/frontend/src/pages/loginPage/index.tsx | 2 +- src/frontend/src/pages/signUpPage/index.tsx | 2 +- src/frontend/src/stores/flowStore.ts | 2 +- src/frontend/src/utils/reactflowUtils.ts | 3 +- 32 files changed, 104 insertions(+), 60 deletions(-) rename src/frontend/src/{ => constants}/alerts_constants.tsx (100%) diff --git a/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx b/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx index 0aae88857..9006039d1 100644 --- a/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx +++ b/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx @@ -19,6 +19,8 @@ import { Button } from "../../../../components/ui/button"; import { LANGFLOW_SUPPORTED_TYPES, TOOLTIP_EMPTY, + inputHandleHover, + outputHandleHover, } from "../../../../constants/constants"; import { postCustomComponentUpdate } from "../../../../controllers/API"; import useAlertStore from "../../../../stores/alertStore"; @@ -182,8 +184,8 @@ export default function ParameterComponent({ {index === 0 && ( {left - ? "Avaliable input components:" - : "Avaliable output components:"} + ? inputHandleHover + : outputHandleHover} )} Building... + {statusBuilding} ) : !validationStatus ? ( - Build to validate status. + {statusBuild} ) : (
{typeof validationStatus.params === "string" diff --git a/src/frontend/src/alerts/alertDropDown/index.tsx b/src/frontend/src/alerts/alertDropDown/index.tsx index 967d82174..cf3fa13dc 100644 --- a/src/frontend/src/alerts/alertDropDown/index.tsx +++ b/src/frontend/src/alerts/alertDropDown/index.tsx @@ -8,6 +8,7 @@ import { import useAlertStore from "../../stores/alertStore"; import { AlertDropdownType } from "../../types/alerts"; import SingleAlert from "./components/singleAlertComponent"; +import { zeroNotifications } from "../../constants/constants"; export default function AlertDropdown({ children, @@ -68,7 +69,7 @@ export default function AlertDropdown({ )) ) : (
- No new notifications + {zeroNotifications}
)}
diff --git a/src/frontend/src/components/IOview/index.tsx b/src/frontend/src/components/IOview/index.tsx index 5fb5ba665..a8edfa9b6 100644 --- a/src/frontend/src/components/IOview/index.tsx +++ b/src/frontend/src/components/IOview/index.tsx @@ -1,6 +1,6 @@ import { cloneDeep } from "lodash"; import { useEffect, useState } from "react"; -import { CHAT_FORM_DIALOG_SUBTITLE } from "../../constants/constants"; +import { CHAT_FORM_DIALOG_SUBTITLE, outputsModalTitle, textInputModalTitle } from "../../constants/constants"; import BaseModal from "../../modals/baseModal"; import useAlertStore from "../../stores/alertStore"; import useFlowStore from "../../stores/flowStore"; @@ -127,7 +127,7 @@ export default function IOView({ children, open, setOpen }): JSX.Element { >
- Text Inputs + {textInputModalTitle}
{nodes .filter((node) => @@ -188,7 +188,7 @@ export default function IOView({ children, open, setOpen }): JSX.Element { >
- Prompt Outputs + {outputsModalTitle}
{nodes .filter((node) => diff --git a/src/frontend/src/components/chatComponent/buildTrigger/index.tsx b/src/frontend/src/components/chatComponent/buildTrigger/index.tsx index db5a81758..2db2e0c3b 100644 --- a/src/frontend/src/components/chatComponent/buildTrigger/index.tsx +++ b/src/frontend/src/components/chatComponent/buildTrigger/index.tsx @@ -8,7 +8,7 @@ import useFlowStore from "../../../stores/flowStore"; import { validateNodes } from "../../../utils/reactflowUtils"; import RadialProgressComponent from "../../RadialProgress"; import IconComponent from "../../genericIconComponent"; -import { MISSED_ERROR_ALERT } from "../../../alerts_constants"; +import { MISSED_ERROR_ALERT } from "../../../constants/alerts_constants"; export default function BuildTrigger({ open, diff --git a/src/frontend/src/components/headerComponent/components/menuBar/index.tsx b/src/frontend/src/components/headerComponent/components/menuBar/index.tsx index db6fa5b2c..7dcc62ee2 100644 --- a/src/frontend/src/components/headerComponent/components/menuBar/index.tsx +++ b/src/frontend/src/components/headerComponent/components/menuBar/index.tsx @@ -17,6 +17,7 @@ import { cn } from "../../../../utils/utils"; import ShadTooltip from "../../../ShadTooltipComponent"; import IconComponent from "../../../genericIconComponent"; import { Button } from "../../../ui/button"; +import { savedHover } from "../../../../constants/constants"; export const MenuBar = ({ removeFunction, @@ -127,7 +128,7 @@ export const MenuBar = ({
diff --git a/src/frontend/src/components/newChatView/index.tsx b/src/frontend/src/components/newChatView/index.tsx index 94231d4b2..86b8ace6a 100644 --- a/src/frontend/src/components/newChatView/index.tsx +++ b/src/frontend/src/components/newChatView/index.tsx @@ -14,7 +14,8 @@ import { import { classNames } from "../../utils/utils"; import ChatInput from "./chatInput"; import ChatMessage from "./chatMessage"; -import { INFO_MISSING_ALERT, NOCHATOUTPUT_NOTICE_ALERT } from "../../alerts_constants"; +import { INFO_MISSING_ALERT, NOCHATOUTPUT_NOTICE_ALERT } from "../../constants/alerts_constants"; +import { chatFirstInitialText, chatSecondInitialText } from "../../constants/constants"; export default function NewChatView({ sendMessage, @@ -178,14 +179,14 @@ export default function NewChatView({
- Start a conversation and click the agent's thoughts{" "} + {chatFirstInitialText}{" "} {" "} - to inspect the chaining process. + {chatSecondInitialText}
diff --git a/src/frontend/src/components/textAreaComponent/index.tsx b/src/frontend/src/components/textAreaComponent/index.tsx index 7b41f3757..14ba7c67e 100644 --- a/src/frontend/src/components/textAreaComponent/index.tsx +++ b/src/frontend/src/components/textAreaComponent/index.tsx @@ -4,6 +4,7 @@ import GenericModal from "../../modals/genericModal"; import { TextAreaComponentType } from "../../types/components"; import IconComponent from "../genericIconComponent"; import { Input } from "../ui/input"; +import { editTextModalTitle } from "../../constants/constants"; export default function TextAreaComponent({ value, @@ -37,7 +38,7 @@ export default function TextAreaComponent({ { onChange(value); diff --git a/src/frontend/src/alerts_constants.tsx b/src/frontend/src/constants/alerts_constants.tsx similarity index 100% rename from src/frontend/src/alerts_constants.tsx rename to src/frontend/src/constants/alerts_constants.tsx diff --git a/src/frontend/src/constants/constants.ts b/src/frontend/src/constants/constants.ts index d6eb6eb56..2c0df8fad 100644 --- a/src/frontend/src/constants/constants.ts +++ b/src/frontend/src/constants/constants.ts @@ -682,3 +682,35 @@ export const priorityFields = new Set(["code", "template"]); export const INPUT_TYPES = new Set(["ChatInput", "TextInput"]); export const OUTPUT_TYPES = new Set(["ChatOutput", "TextOutput"]); + +export const chatFirstInitialText = "Start a conversation and click the agent's thoughts"; + +export const chatSecondInitialText = "to inspect the chaining process."; + +export const zeroNotifications = "No new notifications"; + +export const successBuild = "Built sucessfully ✨"; + +export const alertSaveWApi = "Caution: Uncheck this box only removes API keys from fields specifically designated for API keys." + +export const saveWApiCheckbox = "Save with my API keys"; +export const editTextModalTitle = "Edit Text" +export const editTextPlaceholder = "Type message here." +export const inputHandleHover = "Avaliable input components:"; +export const outputHandleHover = "Avaliable output components:"; +export const textInputModalTitle = "Text Inputs"; +export const outputsModalTitle = "Prompt Outputs" +export const langflowChatTitle = "Langflow Chat" +export const chatInputPlaceholder = "No chat input variables found. Click to run your flow." +export const chatInputPlaceholderSend = "Send a message..." +export const editCodeTitle = "Edit Code" +export const myCollectionDesc = "Manage your personal projects. Download or upload your collection." +export const storeDesc = "Search flows and components from the community."; +export const storeTitle = "Langflow Store" +export const noApi = "You don't have an API key. " +export const insertApi = "Insert your Langflow API key." +export const invalidApi = "Your API key is not valid. " +export const createApi = `Don’t have an API key? Sign up at` +export const statusBuild = "Build to validate status." +export const statusBuilding = "Building..." +export const savedHover = "Last saved at " \ No newline at end of file diff --git a/src/frontend/src/modals/SecretKeyModal/index.tsx b/src/frontend/src/modals/SecretKeyModal/index.tsx index a09e58c36..a03ce8262 100644 --- a/src/frontend/src/modals/SecretKeyModal/index.tsx +++ b/src/frontend/src/modals/SecretKeyModal/index.tsx @@ -8,7 +8,7 @@ import useAlertStore from "../../stores/alertStore"; import { ApiKeyType } from "../../types/components"; import { nodeIconsLucide } from "../../utils/styleUtils"; import BaseModal from "../baseModal"; -import { COPIED_NOTICE_ALERT } from "../../alerts_constants"; +import { COPIED_NOTICE_ALERT } from "../../constants/alerts_constants"; export default function SecretKeyModal({ title, diff --git a/src/frontend/src/modals/StoreApiKeyModal/index.tsx b/src/frontend/src/modals/StoreApiKeyModal/index.tsx index 038e33402..168fc79fc 100644 --- a/src/frontend/src/modals/StoreApiKeyModal/index.tsx +++ b/src/frontend/src/modals/StoreApiKeyModal/index.tsx @@ -9,7 +9,8 @@ import useAlertStore from "../../stores/alertStore"; import { useStoreStore } from "../../stores/storeStore"; import { StoreApiKeyType } from "../../types/components"; import BaseModal from "../baseModal"; -import { API_ERROR_ALERT, API_SUCCESS_ALERT } from "../../alerts_constants"; +import { API_ERROR_ALERT, API_SUCCESS_ALERT } from "../../constants/alerts_constants"; +import { createApi, insertApi, invalidApi, noApi } from "../../constants/constants"; export default function StoreApiKeyModal({ children, @@ -60,10 +61,10 @@ export default function StoreApiKeyModal({ API Key @@ -98,7 +99,7 @@ export default function StoreApiKeyModal({
- Caution: Uncheck this box only removes API keys from fields - specifically designated for API keys. + {alertSaveWApi} diff --git a/src/frontend/src/modals/formModal/chatInput/index.tsx b/src/frontend/src/modals/formModal/chatInput/index.tsx index 242ce6532..bf39740d4 100644 --- a/src/frontend/src/modals/formModal/chatInput/index.tsx +++ b/src/frontend/src/modals/formModal/chatInput/index.tsx @@ -3,6 +3,7 @@ import IconComponent from "../../../components/genericIconComponent"; import { Textarea } from "../../../components/ui/textarea"; import { chatInputType } from "../../../types/components"; import { classNames } from "../../../utils/utils"; +import { chatInputPlaceholder, chatInputPlaceholderSend } from "../../../constants/constants"; export default function ChatInput({ lockChat, @@ -51,7 +52,7 @@ export default function ChatInput({ ? "Thinking..." : typeof chatValue === "object" && Object.keys(chatValue)?.length === 0 - ? "No chat input variables found. Click to run your flow." + ? chatInputPlaceholder : chatValue } onChange={(event): void => { @@ -68,8 +69,8 @@ export default function ChatInput({ )} placeholder={ noInput - ? "No chat input variables found. Click to run your flow." - : "Send a message..." + ? chatInputPlaceholder + : chatInputPlaceholderSend } />
diff --git a/src/frontend/src/modals/formModal/index.tsx b/src/frontend/src/modals/formModal/index.tsx index f18eff977..1d000ee31 100644 --- a/src/frontend/src/modals/formModal/index.tsx +++ b/src/frontend/src/modals/formModal/index.tsx @@ -20,14 +20,14 @@ import { DialogTrigger, } from "../../components/ui/dialog"; import { Textarea } from "../../components/ui/textarea"; -import { CHAT_FORM_DIALOG_SUBTITLE } from "../../constants/constants"; +import { CHAT_FORM_DIALOG_SUBTITLE, chatFirstInitialText, chatSecondInitialText, langflowChatTitle } from "../../constants/constants"; import { AuthContext } from "../../contexts/authContext"; import { getBuildStatus } from "../../controllers/API"; import useAlertStore from "../../stores/alertStore"; import useFlowStore from "../../stores/flowStore"; import { FlowState } from "../../types/tabs"; import { validateNodes } from "../../utils/reactflowUtils"; -import { CHAT_ERROR_ALERT, INFO_MISSING_ALERT, MSG_ERROR_ALERT } from "../../alerts_constants"; +import { CHAT_ERROR_ALERT, INFO_MISSING_ALERT, MSG_ERROR_ALERT } from "../../constants/alerts_constants"; export default function FormModal({ flow, @@ -585,20 +585,20 @@ export default function FormModal({ 👋{" "} - Langflow Chat + {langflowChatTitle}
- Start a conversation and click the agent's thoughts{" "} + {chatFirstInitialText} {" "} {" "} - to inspect the chaining process. + {chatSecondInitialText}
diff --git a/src/frontend/src/modals/genericModal/index.tsx b/src/frontend/src/modals/genericModal/index.tsx index 54ea08edb..b79431ac9 100644 --- a/src/frontend/src/modals/genericModal/index.tsx +++ b/src/frontend/src/modals/genericModal/index.tsx @@ -10,6 +10,8 @@ import { MAX_WORDS_HIGHLIGHT, PROMPT_DIALOG_SUBTITLE, TEXT_DIALOG_SUBTITLE, + editTextModalTitle, + editTextPlaceholder, regexHighlight, } from "../../constants/constants"; import { TypeModal } from "../../constants/enums"; @@ -19,7 +21,7 @@ import { genericModalPropsType } from "../../types/components"; import { handleKeyDown } from "../../utils/reactflowUtils"; import { classNames, varHighlightHTML } from "../../utils/utils"; import BaseModal from "../baseModal"; -import { BUG_ALERT, PROMPT_ERROR_ALERT, PROMPT_SUCCESS_ALERT, TEMP_NOTICE_ALERT } from "../../alerts_constants"; +import { BUG_ALERT, PROMPT_ERROR_ALERT, PROMPT_SUCCESS_ALERT, TEMP_NOTICE_ALERT } from "../../constants/alerts_constants"; export default function GenericModal({ field_name = "", @@ -211,7 +213,7 @@ export default function GenericModal({ setInputValue(event.target.value); checkVariables(event.target.value); }} - placeholder="Type message here." + placeholder={editTextPlaceholder} onKeyDown={(e) => { handleKeyDown(e, inputValue, ""); }} @@ -233,7 +235,7 @@ export default function GenericModal({ onChange={(event) => { setInputValue(event.target.value); }} - placeholder="Type message here." + placeholder={editTextPlaceholder} onKeyDown={(e) => { handleKeyDown(e, value, ""); }} diff --git a/src/frontend/src/pages/AdminPage/LoginPage/index.tsx b/src/frontend/src/pages/AdminPage/LoginPage/index.tsx index c94474ab8..335cf2a6a 100644 --- a/src/frontend/src/pages/AdminPage/LoginPage/index.tsx +++ b/src/frontend/src/pages/AdminPage/LoginPage/index.tsx @@ -11,7 +11,7 @@ import { inputHandlerEventType, loginInputStateType, } from "../../../types/components"; -import { SIGNIN_ERROR_ALERT } from "../../../alerts_constants"; +import { SIGNIN_ERROR_ALERT } from "../../../constants/alerts_constants"; export default function LoginAdminPage() { const navigate = useNavigate(); diff --git a/src/frontend/src/pages/AdminPage/index.tsx b/src/frontend/src/pages/AdminPage/index.tsx index 037fce720..16cce09da 100644 --- a/src/frontend/src/pages/AdminPage/index.tsx +++ b/src/frontend/src/pages/AdminPage/index.tsx @@ -33,7 +33,7 @@ import useAlertStore from "../../stores/alertStore"; import useFlowsManagerStore from "../../stores/flowsManagerStore"; import { Users } from "../../types/api"; import { UserInputType } from "../../types/components"; -import { USER_ADD_ERROR_ALERT, USER_ADD_SUCCESS_ALERT, USER_DEL_ERROR_ALERT, USER_DEL_SUCCESS_ALERT, USER_EDIT_ERROR_ALERT, USER_EDIT_SUCCESS_ALERT } from "../../alerts_constants"; +import { USER_ADD_ERROR_ALERT, USER_ADD_SUCCESS_ALERT, USER_DEL_ERROR_ALERT, USER_DEL_SUCCESS_ALERT, USER_EDIT_ERROR_ALERT, USER_EDIT_SUCCESS_ALERT } from "../../constants/alerts_constants"; export default function AdminPage() { const [inputValue, setInputValue] = useState(""); diff --git a/src/frontend/src/pages/ApiKeysPage/index.tsx b/src/frontend/src/pages/ApiKeysPage/index.tsx index 3143c52a6..bf9c62e7f 100644 --- a/src/frontend/src/pages/ApiKeysPage/index.tsx +++ b/src/frontend/src/pages/ApiKeysPage/index.tsx @@ -26,7 +26,7 @@ import { } from "../../constants/constants"; import useAlertStore from "../../stores/alertStore"; import { ApiKey } from "../../types/components"; -import { DEL_KEY_ERROR_ALERT, DEL_KEY_SUCCESS_ALERT } from "../../alerts_constants"; +import { DEL_KEY_ERROR_ALERT, DEL_KEY_SUCCESS_ALERT } from "../../constants/alerts_constants"; export default function ApiKeysPage() { const [loadingKeys, setLoadingKeys] = useState(true); diff --git a/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx b/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx index d7358b27a..ba12a25ea 100644 --- a/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx +++ b/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx @@ -31,7 +31,7 @@ import { getRandomName, isWrappedWithClass } from "../../../../utils/utils"; import ConnectionLineComponent from "../ConnectionLineComponent"; import SelectionMenu from "../SelectionMenuComponent"; import ExtraSidebar from "../extraSidebarComponent"; -import { INVALID_SELECTION_ERROR_ALERT, UPLOAD_ALERT_LIST, UPLOAD_ERROR_ALERT, WRONG_FILE_ERROR_ALERT } from "../../../../alerts_constants"; +import { INVALID_SELECTION_ERROR_ALERT, UPLOAD_ALERT_LIST, UPLOAD_ERROR_ALERT, WRONG_FILE_ERROR_ALERT } from "../../../../constants/alerts_constants"; const nodeTypes = { genericNode: GenericNode, diff --git a/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx b/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx index 84446eaad..a756ae8c3 100644 --- a/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx +++ b/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx @@ -25,7 +25,7 @@ import { } from "../../../../utils/utils"; import DisclosureComponent from "../DisclosureComponent"; import SidebarDraggableComponent from "./sideBarDraggableComponent"; -import { UPLOAD_ERROR_ALERT } from "../../../../alerts_constants"; +import { UPLOAD_ERROR_ALERT } from "../../../../constants/alerts_constants"; export default function ExtraSidebar(): JSX.Element { const data = useTypesStore((state) => state.data); diff --git a/src/frontend/src/pages/MainPage/components/components/index.tsx b/src/frontend/src/pages/MainPage/components/components/index.tsx index 43e351e65..d93811d5b 100644 --- a/src/frontend/src/pages/MainPage/components/components/index.tsx +++ b/src/frontend/src/pages/MainPage/components/components/index.tsx @@ -9,7 +9,7 @@ import { Button } from "../../../../components/ui/button"; import useAlertStore from "../../../../stores/alertStore"; import useFlowsManagerStore from "../../../../stores/flowsManagerStore"; import { FlowType } from "../../../../types/flow"; -import { CONSOLE_ERROR_MSG, UPLOAD_ALERT_LIST, WRONG_FILE_ERROR_ALERT } from "../../../../alerts_constants"; +import { CONSOLE_ERROR_MSG, UPLOAD_ALERT_LIST, WRONG_FILE_ERROR_ALERT } from "../../../../constants/alerts_constants"; export default function ComponentsComponent({ is_component = true, diff --git a/src/frontend/src/pages/MainPage/index.tsx b/src/frontend/src/pages/MainPage/index.tsx index fb636a4d5..e8f0f4136 100644 --- a/src/frontend/src/pages/MainPage/index.tsx +++ b/src/frontend/src/pages/MainPage/index.tsx @@ -6,11 +6,11 @@ import IconComponent from "../../components/genericIconComponent"; import PageLayout from "../../components/pageLayout"; import SidebarNav from "../../components/sidebarComponent"; import { Button } from "../../components/ui/button"; -import { USER_PROJECTS_HEADER } from "../../constants/constants"; +import { USER_PROJECTS_HEADER, myCollectionDesc } from "../../constants/constants"; import useAlertStore from "../../stores/alertStore"; import useFlowsManagerStore from "../../stores/flowsManagerStore"; import { downloadFlows } from "../../utils/reactflowUtils"; -import { CONSOLE_ERROR_MSG } from "../../alerts_constants"; +import { CONSOLE_ERROR_MSG } from "../../constants/alerts_constants"; export default function HomePage(): JSX.Element { const addFlow = useFlowsManagerStore((state) => state.addFlow); const uploadFlow = useFlowsManagerStore((state) => state.uploadFlow); @@ -72,7 +72,7 @@ export default function HomePage(): JSX.Element { return (
diff --git a/src/frontend/src/pages/FlowPage/components/nodeToolbarComponent/index.tsx b/src/frontend/src/pages/FlowPage/components/nodeToolbarComponent/index.tsx index a0136ba85..f7ee2eba4 100644 --- a/src/frontend/src/pages/FlowPage/components/nodeToolbarComponent/index.tsx +++ b/src/frontend/src/pages/FlowPage/components/nodeToolbarComponent/index.tsx @@ -1,5 +1,6 @@ import _, { cloneDeep } from "lodash"; import { useEffect, useState } from "react"; +import { useUpdateNodeInternals } from "reactflow"; import ShadTooltip from "../../../../components/ShadTooltipComponent"; import CodeAreaComponent from "../../../../components/codeAreaComponent"; import IconComponent from "../../../../components/genericIconComponent"; @@ -26,7 +27,6 @@ import { updateFlowPosition, } from "../../../../utils/reactflowUtils"; import { classNames, cn } from "../../../../utils/utils"; -import { useUpdateNodeInternals } from "reactflow"; export default function NodeToolbarComponent({ data, @@ -89,7 +89,9 @@ export default function NodeToolbarComponent({ }, [showModalAdvanced]); const updateNodeInternals = useUpdateNodeInternals(); - const setLastCopiedSelection = useFlowStore(state => state.setLastCopiedSelection); + const setLastCopiedSelection = useFlowStore( + (state) => state.setLastCopiedSelection + ); useEffect(() => { setFlowComponent(createFlowComponent(cloneDeep(data), version)); }, [ @@ -144,8 +146,8 @@ export default function NodeToolbarComponent({ deleteNode(data.id); break; case "copy": - const node = nodes.filter(node => node.id === data.id) - setLastCopiedSelection({ nodes: _.cloneDeep(node), edges: [] }) + const node = nodes.filter((node) => node.id === data.id); + setLastCopiedSelection({ nodes: _.cloneDeep(node), edges: [] }); } }; @@ -233,7 +235,7 @@ export default function NodeToolbarComponent({ id={"code-input-node-toolbar-" + name} /> - + ) : ( @@ -371,13 +373,11 @@ export default function NodeToolbarComponent({ className="relative top-0.5 mr-2 h-4 w-4 " />{" "} Copy{" "} - - - C - + + C {hasStore && ( @@ -450,7 +450,7 @@ export default function NodeToolbarComponent({ diff --git a/src/frontend/src/utils/styleUtils.ts b/src/frontend/src/utils/styleUtils.ts index 62e1c6ec7..4abcecdaf 100644 --- a/src/frontend/src/utils/styleUtils.ts +++ b/src/frontend/src/utils/styleUtils.ts @@ -219,7 +219,7 @@ export const nodeColors: { [char: string]: string } = { wrappers: "#E6277A", utilities: "#31A3CC", output_parsers: "#E6A627", - str: "#049524", + str: "#31a3cc", retrievers: "#e6b25a", unknown: "#9CA3AF", custom_components: "#ab11ab",