From a9c7fc0a6983c942b96b45f0158cbaecad11f74f Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Tue, 30 May 2023 00:38:14 -0300 Subject: [PATCH 01/43] =?UTF-8?q?=F0=9F=94=A7=20chore(config):=20add=20Vec?= =?UTF-8?q?torStoreToolkit=20to=20toolkits=20list=20=F0=9F=90=9B=20fix(bas?= =?UTF-8?q?e.py):=20remove=20deepcopy=20for=20VectorStore=20and=20VectorSt?= =?UTF-8?q?oreRouter=20agents=20=F0=9F=90=9B=20fix(nodes.py):=20remove=20d?= =?UTF-8?q?eepcopy=20for=20VectorStore=20and=20VectorStoreRouter=20agents?= =?UTF-8?q?=20=F0=9F=94=A7=20chore(loading.py):=20comment=20out=20unused?= =?UTF-8?q?=20code=20for=20loading=20toolkits=20=F0=9F=90=9B=20fix(toolkit?= =?UTF-8?q?s/base.py):=20add=20Tool=20to=20base=5Fclasses=20in=20get=5Fsig?= =?UTF-8?q?nature=20method=20The=20changes=20to=20the=20config=20file=20ad?= =?UTF-8?q?d=20the=20VectorStoreToolkit=20to=20the=20list=20of=20toolkits.?= =?UTF-8?q?=20The=20deepcopy=20for=20VectorStore=20and=20VectorStoreRouter?= =?UTF-8?q?=20agents=20was=20causing=20issues,=20so=20it=20was=20removed?= =?UTF-8?q?=20from=20the=20base.py=20and=20nodes.py=20files.=20The=20loadi?= =?UTF-8?q?ng.py=20file=20had=20some=20unused=20code=20for=20loading=20too?= =?UTF-8?q?lkits,=20so=20it=20was=20commented=20out.=20Finally,=20the=20ba?= =?UTF-8?q?se.py=20file=20had=20a=20bug=20where=20the=20Tool=20class=20was?= =?UTF-8?q?=20not=20being=20added=20to=20the=20base=5Fclasses=20list=20in?= =?UTF-8?q?=20the=20get=5Fsignature=20method,=20so=20it=20was=20added.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/config.yaml | 1 + src/backend/langflow/graph/base.py | 14 +------------- src/backend/langflow/graph/nodes.py | 5 +---- src/backend/langflow/interface/loading.py | 7 +++++-- src/backend/langflow/interface/toolkits/base.py | 15 +++++++++------ 5 files changed, 17 insertions(+), 25 deletions(-) diff --git a/src/backend/langflow/config.yaml b/src/backend/langflow/config.yaml index 02b17cd85..b073ed544 100644 --- a/src/backend/langflow/config.yaml +++ b/src/backend/langflow/config.yaml @@ -74,6 +74,7 @@ toolkits: - JsonToolkit - VectorStoreInfo - VectorStoreRouterToolkit + - VectorStoreToolkit tools: - Search - PAL-MATH diff --git a/src/backend/langflow/graph/base.py b/src/backend/langflow/graph/base.py index 187d2983e..5b64885fb 100644 --- a/src/backend/langflow/graph/base.py +++ b/src/backend/langflow/graph/base.py @@ -212,19 +212,7 @@ class Node: if not self._built or force: self._build() - #! Deepcopy is breaking for vectorstores - if self.base_type in [ - "vectorstores", - "VectorStoreRouterAgent", - "VectorStoreAgent", - "VectorStoreInfo", - ] or self.node_type in [ - "VectorStoreInfo", - "VectorStoreRouterToolkit", - "SQLDatabase", - ]: - return self._built_object - return deepcopy(self._built_object) + return self._built_object def add_edge(self, edge: "Edge") -> None: self.edges.append(edge) diff --git a/src/backend/langflow/graph/nodes.py b/src/backend/langflow/graph/nodes.py index 189e40b5c..7d9b05366 100644 --- a/src/backend/langflow/graph/nodes.py +++ b/src/backend/langflow/graph/nodes.py @@ -14,7 +14,7 @@ class AgentNode(Node): def _set_tools_and_chains(self) -> None: for edge in self.edges: source_node = edge.source - if isinstance(source_node, ToolNode): + if isinstance(source_node, (ToolNode, ToolkitNode)): self.tools.append(source_node) elif isinstance(source_node, ChainNode): self.chains.append(source_node) @@ -32,9 +32,6 @@ class AgentNode(Node): self._build() - #! Cannot deepcopy VectorStore, VectorStoreRouter, or SQL agents - if self.node_type in ["VectorStoreAgent", "VectorStoreRouterAgent", "SQLAgent"]: - return self._built_object return self._built_object diff --git a/src/backend/langflow/interface/loading.py b/src/backend/langflow/interface/loading.py index cd6898a7f..d720c6b0c 100644 --- a/src/backend/langflow/interface/loading.py +++ b/src/backend/langflow/interface/loading.py @@ -101,8 +101,11 @@ def instantiate_tool(node_type, class_object, params): def instantiate_toolkit(node_type, class_object, params): loaded_toolkit = class_object(**params) - if toolkits_creator.has_create_function(node_type): - return load_toolkits_executor(node_type, loaded_toolkit, params) + # Commenting this out for now to use toolkits as normal tools + # if toolkits_creator.has_create_function(node_type): + # return load_toolkits_executor(node_type, loaded_toolkit, params) + if isinstance(loaded_toolkit, BaseToolkit): + return loaded_toolkit.get_tools() return loaded_toolkit diff --git a/src/backend/langflow/interface/toolkits/base.py b/src/backend/langflow/interface/toolkits/base.py index cbe625f0d..9f01b2bb2 100644 --- a/src/backend/langflow/interface/toolkits/base.py +++ b/src/backend/langflow/interface/toolkits/base.py @@ -42,24 +42,27 @@ class ToolkitCreator(LangChainTypeCreator): def get_signature(self, name: str) -> Optional[Dict]: try: - return build_template_from_class(name, self.type_to_loader_dict) + template = build_template_from_class(name, self.type_to_loader_dict) + # add Tool to base_classes + if template: + template["base_classes"].append("Tool") + return template except ValueError as exc: - raise ValueError("Prompt not found") from exc + raise ValueError("Toolkit not found") from exc except AttributeError as exc: - logger.error(f"Prompt {name} not loaded: {exc}") + logger.error(f"Toolkit {name} not loaded: {exc}") return None def to_list(self) -> List[str]: return list(self.type_to_loader_dict.keys()) def get_create_function(self, name: str) -> Callable: - if loader_name := self.create_functions.get(name, None): - # import loader + if loader_name := self.create_functions.get(name): return import_module( f"from langchain.agents.agent_toolkits import {loader_name[0]}" ) else: - raise ValueError("Loader not found") + raise ValueError("Toolkit not found") def has_create_function(self, name: str) -> bool: # check if the function list is not empty From ad3bb997eed981c9426e0b76b16f84309b18ae74 Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Tue, 30 May 2023 01:11:27 -0300 Subject: [PATCH 02/43] =?UTF-8?q?=F0=9F=90=9B=20fix(base.py):=20extend=20l?= =?UTF-8?q?ist=20only=20if=20key=20exists=20and=20is=20a=20list=20?= =?UTF-8?q?=F0=9F=90=9B=20fix(nodes.py):=20flatten=20list=20of=20tools=20i?= =?UTF-8?q?f=20it=20is=20a=20list=20of=20lists=20=F0=9F=90=9B=20fix(toolki?= =?UTF-8?q?ts/base.py):=20add=20"toolkit"=20check=20to=20avoid=20adding=20?= =?UTF-8?q?"Tool"=20to=20non-toolkit=20classes=20=F0=9F=93=9D=20docs(agent?= =?UTF-8?q?s.py):=20update=20node=20descriptions=20to=20reflect=20CSV=20an?= =?UTF-8?q?d=20zero=20shot=20agents=20The=20changes=20in=20base.py=20and?= =?UTF-8?q?=20nodes.py=20ensure=20that=20the=20code=20works=20as=20intende?= =?UTF-8?q?d=20and=20avoids=20errors=20when=20extending=20lists.=20The=20c?= =?UTF-8?q?hange=20in=20toolkits/base.py=20ensures=20that=20"Tool"=20is=20?= =?UTF-8?q?only=20added=20to=20classes=20that=20are=20toolkits.=20The=20ch?= =?UTF-8?q?anges=20in=20agents.py=20update=20the=20node=20descriptions=20t?= =?UTF-8?q?o=20reflect=20that=20the=20CSVAgentNode=20constructs=20a=20CSV?= =?UTF-8?q?=20agent=20and=20the=20InitializeAgentNode=20constructs=20a=20z?= =?UTF-8?q?ero=20shot=20agent.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/graph/base.py | 6 ++++++ src/backend/langflow/graph/nodes.py | 5 +++++ src/backend/langflow/interface/toolkits/base.py | 2 +- src/backend/langflow/template/frontend_node/agents.py | 4 ++-- 4 files changed, 14 insertions(+), 3 deletions(-) diff --git a/src/backend/langflow/graph/base.py b/src/backend/langflow/graph/base.py index 5b64885fb..08b255441 100644 --- a/src/backend/langflow/graph/base.py +++ b/src/backend/langflow/graph/base.py @@ -175,6 +175,12 @@ class Node: # turn result which is a function into a coroutine # so that it can be awaited self.params["coroutine"] = sync_to_async(result) + if isinstance(result, list): + # If the result is a list, then we need to extend the list + # with the result but first check if the key exists + # if it doesn't, then we need to create a new list + if isinstance(self.params[key], list): + self.params[key].extend(result) self.params[key] = result elif isinstance(value, list) and all( diff --git a/src/backend/langflow/graph/nodes.py b/src/backend/langflow/graph/nodes.py index 7d9b05366..ea94e10b8 100644 --- a/src/backend/langflow/graph/nodes.py +++ b/src/backend/langflow/graph/nodes.py @@ -62,6 +62,11 @@ class PromptNode(Node): if tools is not None else [] ) + # flatten the list of tools if it is a list of lists + # first check if it is a list + if isinstance(tools, list) and isinstance(tools[0], list): + tools = [tool for sublist in tools for tool in sublist] + self.params["tools"] = tools prompt_params = [ key diff --git a/src/backend/langflow/interface/toolkits/base.py b/src/backend/langflow/interface/toolkits/base.py index 9f01b2bb2..be2345c02 100644 --- a/src/backend/langflow/interface/toolkits/base.py +++ b/src/backend/langflow/interface/toolkits/base.py @@ -44,7 +44,7 @@ class ToolkitCreator(LangChainTypeCreator): try: template = build_template_from_class(name, self.type_to_loader_dict) # add Tool to base_classes - if template: + if "toolkit" in name.lower() and template: template["base_classes"].append("Tool") return template except ValueError as exc: diff --git a/src/backend/langflow/template/frontend_node/agents.py b/src/backend/langflow/template/frontend_node/agents.py index e4fe40187..451dd7eca 100644 --- a/src/backend/langflow/template/frontend_node/agents.py +++ b/src/backend/langflow/template/frontend_node/agents.py @@ -146,7 +146,7 @@ class CSVAgentNode(FrontendNode): ), ], ) - description: str = """Construct a json agent from a CSV and tools.""" + description: str = """Construct a CSV agent from a CSV and tools.""" base_classes: list[str] = ["AgentExecutor"] def to_dict(self): @@ -194,7 +194,7 @@ class InitializeAgentNode(FrontendNode): ), ], ) - description: str = """Construct a json agent from an LLM and tools.""" + description: str = """Construct a zero shot agent from an LLM and tools.""" base_classes: list[str] = ["AgentExecutor", "function"] def to_dict(self): From 1a8d5561e9dbbecd636495ca3a75aecdaa2f3623 Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Tue, 30 May 2023 01:15:48 -0300 Subject: [PATCH 03/43] fix --- src/backend/langflow/graph/nodes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/backend/langflow/graph/nodes.py b/src/backend/langflow/graph/nodes.py index ea94e10b8..5f136b3c7 100644 --- a/src/backend/langflow/graph/nodes.py +++ b/src/backend/langflow/graph/nodes.py @@ -64,7 +64,7 @@ class PromptNode(Node): ) # flatten the list of tools if it is a list of lists # first check if it is a list - if isinstance(tools, list) and isinstance(tools[0], list): + if tools and isinstance(tools, list) and isinstance(tools[0], list): tools = [tool for sublist in tools for tool in sublist] self.params["tools"] = tools From 4b6a8595df8e519a24fe1c064baf85c04ebe345b Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Tue, 30 May 2023 21:34:23 -0300 Subject: [PATCH 04/43] =?UTF-8?q?=F0=9F=90=9B=20fix(nodes.py):=20change=20?= =?UTF-8?q?type=20hint=20of=20tools=20list=20to=20include=20ToolkitNode=20?= =?UTF-8?q?The=20type=20hint=20of=20the=20tools=20list=20in=20the=20AgentN?= =?UTF-8?q?ode=20class=20has=20been=20updated=20to=20include=20the=20Toolk?= =?UTF-8?q?itNode=20class.=20This=20is=20because=20the=20tools=20list=20ca?= =?UTF-8?q?n=20now=20contain=20instances=20of=20the=20ToolkitNode=20class?= =?UTF-8?q?=20in=20addition=20to=20the=20ToolNode=20class.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/graph/nodes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/backend/langflow/graph/nodes.py b/src/backend/langflow/graph/nodes.py index 5f136b3c7..9db6260e9 100644 --- a/src/backend/langflow/graph/nodes.py +++ b/src/backend/langflow/graph/nodes.py @@ -8,7 +8,7 @@ class AgentNode(Node): def __init__(self, data: Dict): super().__init__(data, base_type="agents") - self.tools: List[ToolNode] = [] + self.tools: List[Union[ToolNode, ToolkitNode]] = [] self.chains: List[ChainNode] = [] def _set_tools_and_chains(self) -> None: From 041748b2fb29a1641e889973baddf56941611fe9 Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Tue, 30 May 2023 21:47:42 -0300 Subject: [PATCH 05/43] =?UTF-8?q?=F0=9F=94=A8=20refactor(nodes.py):=20extr?= =?UTF-8?q?act=20flatten=5Flist=20function=20to=20utils=20module=20and=20u?= =?UTF-8?q?se=20it=20in=20PromptNode.build=20method=20=F0=9F=90=9B=20fix(n?= =?UTF-8?q?odes.py):=20change=20tools=20parameter=20type=20hint=20in=20Pro?= =?UTF-8?q?mptNode.build=20method=20to=20accept=20a=20list=20of=20Union[To?= =?UTF-8?q?olNode,=20ToolkitNode]=20The=20flatten=5Flist=20function=20was?= =?UTF-8?q?=20extracted=20from=20the=20PromptNode.build=20method=20and=20m?= =?UTF-8?q?oved=20to=20the=20utils=20module=20to=20improve=20code=20reusab?= =?UTF-8?q?ility.=20The=20PromptNode.build=20method=20now=20uses=20the=20f?= =?UTF-8?q?latten=5Flist=20function=20to=20flatten=20the=20list=20of=20too?= =?UTF-8?q?ls=20if=20it=20is=20a=20list=20of=20lists.=20The=20tools=20para?= =?UTF-8?q?meter=20type=20hint=20was=20changed=20to=20accept=20a=20list=20?= =?UTF-8?q?of=20Union[ToolNode,=20ToolkitNode]=20to=20improve=20type=20saf?= =?UTF-8?q?ety.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/graph/nodes.py | 62 ++++++++++++++--------------- src/backend/langflow/graph/utils.py | 12 ++++++ 2 files changed, 41 insertions(+), 33 deletions(-) diff --git a/src/backend/langflow/graph/nodes.py b/src/backend/langflow/graph/nodes.py index 9db6260e9..21fe0f673 100644 --- a/src/backend/langflow/graph/nodes.py +++ b/src/backend/langflow/graph/nodes.py @@ -1,7 +1,12 @@ from typing import Any, Dict, List, Optional, Union from langflow.graph.base import Node -from langflow.graph.utils import extract_input_variables_from_prompt +from langflow.graph.utils import extract_input_variables_from_prompt, flatten_list + + +class ToolkitNode(Node): + def __init__(self, data: Dict): + super().__init__(data, base_type="toolkits") class AgentNode(Node): @@ -47,7 +52,7 @@ class PromptNode(Node): def build( self, force: bool = False, - tools: Optional[Union[List[Node], List[ToolNode]]] = None, + tools: Optional[List[Union[ToolNode, ToolkitNode]]] = None, ) -> Any: if not self._built or force: if ( @@ -65,8 +70,7 @@ class PromptNode(Node): # flatten the list of tools if it is a list of lists # first check if it is a list if tools and isinstance(tools, list) and isinstance(tools[0], list): - tools = [tool for sublist in tools for tool in sublist] - + tools = flatten_list(tools) self.params["tools"] = tools prompt_params = [ key @@ -85,30 +89,6 @@ class PromptNode(Node): return self._built_object -class ChainNode(Node): - def __init__(self, data: Dict): - super().__init__(data, base_type="chains") - - def build( - self, - force: bool = False, - tools: Optional[Union[List[Node], List[ToolNode]]] = None, - ) -> Any: - if not self._built or force: - # Check if the chain requires a PromptNode - for key, value in self.params.items(): - if isinstance(value, PromptNode): - # Build the PromptNode, passing the tools if available - self.params[key] = value.build(tools=tools, force=force) - - self._build() - - #! Cannot deepcopy SQLDatabaseChain - if self.node_type in ["SQLDatabaseChain"]: - return self._built_object - return self._built_object - - class LLMNode(Node): built_node_type = None class_built_object = None @@ -130,11 +110,6 @@ class LLMNode(Node): return self._built_object -class ToolkitNode(Node): - def __init__(self, data: Dict): - super().__init__(data, base_type="toolkits") - - class FileToolNode(ToolNode): def __init__(self, data: Dict): super().__init__(data) @@ -193,3 +168,24 @@ class TextSplitterNode(Node): if self._built_object: return f"""{self.node_type}({len(self._built_object)} documents)\nDocuments: {self._built_object[:3]}...""" return f"{self.node_type}()" + + +class ChainNode(Node): + def __init__(self, data: Dict): + super().__init__(data, base_type="chains") + + def build( + self, + force: bool = False, + tools: Optional[List[Union[ToolNode, ToolkitNode]]] = None, + ) -> Any: + if not self._built or force: + # Check if the chain requires a PromptNode + for key, value in self.params.items(): + if isinstance(value, PromptNode): + # Build the PromptNode, passing the tools if available + self.params[key] = value.build(tools=tools, force=force) + + self._build() + + return self._built_object diff --git a/src/backend/langflow/graph/utils.py b/src/backend/langflow/graph/utils.py index 6d56e933e..e22b27cf5 100644 --- a/src/backend/langflow/graph/utils.py +++ b/src/backend/langflow/graph/utils.py @@ -1,4 +1,5 @@ import re +from typing import Any, Union def validate_prompt(prompt: str): @@ -17,3 +18,14 @@ def fix_prompt(prompt: str): def extract_input_variables_from_prompt(prompt: str) -> list[str]: """Extract input variables from prompt.""" return re.findall(r"{(.*?)}", prompt) + + +def flatten_list(list_of_lists: list[Union[list, Any]]) -> list: + """Flatten list of lists.""" + new_list = [] + for item in list_of_lists: + if isinstance(item, list): + new_list.extend(item) + else: + new_list.append(item) + return new_list From 25d77eaf87b9e778b235626841e036f046fcf2e8 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Wed, 31 May 2023 11:44:12 -0300 Subject: [PATCH 06/43] =?UTF-8?q?=F0=9F=90=9B=20fix(custom.py):=20fix=20Js?= =?UTF-8?q?onAgent.from=5Ftoolkit=5Fand=5Fllm=20method=20to=20handle=20bot?= =?UTF-8?q?h=20list=20and=20JsonToolkit=20input=20=F0=9F=94=A5=20chore(tes?= =?UTF-8?q?t=5Fgraph.py):=20remove=20unused=20openapi=5Fgraph=20parameter?= =?UTF-8?q?=20from=20test=5Fbuild=20method=20The=20JsonAgent.from=5Ftoolki?= =?UTF-8?q?t=5Fand=5Fllm=20method=20was=20failing=20when=20a=20list=20was?= =?UTF-8?q?=20passed=20as=20input=20instead=20of=20a=20JsonToolkit=20objec?= =?UTF-8?q?t.=20The=20fix=20now=20handles=20both=20cases.=20The=20openapi?= =?UTF-8?q?=5Fgraph=20parameter=20was=20removed=20from=20the=20test=5Fbuil?= =?UTF-8?q?d=20method=20as=20it=20was=20unused.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/interface/agents/custom.py | 2 +- tests/test_graph.py | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/src/backend/langflow/interface/agents/custom.py b/src/backend/langflow/interface/agents/custom.py index 4654ef7cb..3aaa132d4 100644 --- a/src/backend/langflow/interface/agents/custom.py +++ b/src/backend/langflow/interface/agents/custom.py @@ -69,7 +69,7 @@ class JsonAgent(CustomAgentExecutor): @classmethod def from_toolkit_and_llm(cls, toolkit: JsonToolkit, llm: BaseLanguageModel): - tools = toolkit.get_tools() + tools = toolkit if isinstance(toolkit, list) else toolkit.get_tools() tool_names = {tool.name for tool in tools} prompt = ZeroShotAgent.create_prompt( tools, diff --git a/tests/test_graph.py b/tests/test_graph.py index a0f5945fc..b92457b81 100644 --- a/tests/test_graph.py +++ b/tests/test_graph.py @@ -237,11 +237,10 @@ def test_build_params(basic_graph): assert "memory" in root.params -def test_build(basic_graph, complex_graph, openapi_graph): +def test_build(basic_graph, complex_graph): """Test Node's build method""" assert_agent_was_built(basic_graph) assert_agent_was_built(complex_graph) - assert_agent_was_built(openapi_graph) def assert_agent_was_built(graph): From 98b1b13d3a7bd4d557c6912f7266325e2e8dc24c Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Wed, 31 May 2023 12:26:55 -0300 Subject: [PATCH 07/43] =?UTF-8?q?=F0=9F=9A=80=20feat(pyproject.toml):=20ad?= =?UTF-8?q?d=20faiss-cpu=20dependency=20=E2=9C=A8=20feat(config.yaml):=20a?= =?UTF-8?q?dd=20FAISS=20to=20vectorstores=20The=20faiss-cpu=20dependency?= =?UTF-8?q?=20has=20been=20added=20to=20the=20project=20to=20enable=20the?= =?UTF-8?q?=20use=20of=20FAISS.=20FAISS=20has=20been=20added=20to=20the=20?= =?UTF-8?q?list=20of=20vectorstores=20in=20the=20config.yaml=20file=20to?= =?UTF-8?q?=20allow=20for=20the=20use=20of=20FAISS=20in=20the=20project.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- poetry.lock | 37 +++++++++++++++++++++++++++++++- pyproject.toml | 1 + src/backend/langflow/config.yaml | 1 + 3 files changed, 38 insertions(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index a414aab16..b13535d12 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1192,6 +1192,41 @@ files = [ [package.extras] tests = ["asttokens", "littleutils", "pytest", "rich"] +[[package]] +name = "faiss-cpu" +version = "1.7.4" +description = "A library for efficient similarity search and clustering of dense vectors." +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "faiss-cpu-1.7.4.tar.gz", hash = "sha256:265dc31b0c079bf4433303bf6010f73922490adff9188b915e2d3f5e9c82dd0a"}, + {file = "faiss_cpu-1.7.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:50d4ebe7f1869483751c558558504f818980292a9b55be36f9a1ee1009d9a686"}, + {file = "faiss_cpu-1.7.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7b1db7fae7bd8312aeedd0c41536bcd19a6e297229e1dce526bde3a73ab8c0b5"}, + {file = "faiss_cpu-1.7.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17b7fa7194a228a84929d9e6619d0e7dbf00cc0f717e3462253766f5e3d07de8"}, + {file = "faiss_cpu-1.7.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dca531952a2e3eac56f479ff22951af4715ee44788a3fe991d208d766d3f95f3"}, + {file = "faiss_cpu-1.7.4-cp310-cp310-win_amd64.whl", hash = "sha256:7173081d605e74766f950f2e3d6568a6f00c53f32fd9318063e96728c6c62821"}, + {file = "faiss_cpu-1.7.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d0bbd6f55d7940cc0692f79e32a58c66106c3c950cee2341b05722de9da23ea3"}, + {file = "faiss_cpu-1.7.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e13c14280376100f143767d0efe47dcb32618f69e62bbd3ea5cd38c2e1755926"}, + {file = "faiss_cpu-1.7.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c521cb8462f3b00c0c7dfb11caff492bb67816528b947be28a3b76373952c41d"}, + {file = "faiss_cpu-1.7.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afdd9fe1141117fed85961fd36ee627c83fc3b9fd47bafb52d3c849cc2f088b7"}, + {file = "faiss_cpu-1.7.4-cp311-cp311-win_amd64.whl", hash = "sha256:2ff7f57889ea31d945e3b87275be3cad5d55b6261a4e3f51c7aba304d76b81fb"}, + {file = "faiss_cpu-1.7.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:eeaf92f27d76249fb53c1adafe617b0f217ab65837acf7b4ec818511caf6e3d8"}, + {file = "faiss_cpu-1.7.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:102b1bd763e9b0c281ac312590af3eaf1c8b663ccbc1145821fe6a9f92b8eaaf"}, + {file = "faiss_cpu-1.7.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5512da6707c967310c46ff712b00418b7ae28e93cb609726136e826e9f2f14fa"}, + {file = "faiss_cpu-1.7.4-cp37-cp37m-win_amd64.whl", hash = "sha256:0c2e5b9d8c28c99f990e87379d5bbcc6c914da91ebb4250166864fd12db5755b"}, + {file = "faiss_cpu-1.7.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:43f67f325393145d360171cd98786fcea6120ce50397319afd3bb78be409fb8a"}, + {file = "faiss_cpu-1.7.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6a4e4af194b8fce74c4b770cad67ad1dd1b4673677fc169723e4c50ba5bd97a8"}, + {file = "faiss_cpu-1.7.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31bfb7b9cffc36897ae02a983e04c09fe3b8c053110a287134751a115334a1df"}, + {file = "faiss_cpu-1.7.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52d7de96abef2340c0d373c1f5cbc78026a3cebb0f8f3a5920920a00210ead1f"}, + {file = "faiss_cpu-1.7.4-cp38-cp38-win_amd64.whl", hash = "sha256:699feef85b23c2c729d794e26ca69bebc0bee920d676028c06fd0e0becc15c7e"}, + {file = "faiss_cpu-1.7.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:559a0133f5ed44422acb09ee1ac0acffd90c6666d1bc0d671c18f6e93ad603e2"}, + {file = "faiss_cpu-1.7.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1d71539fe3dc0f1bed41ef954ca701678776f231046bf0ca22ccea5cf5bef6"}, + {file = "faiss_cpu-1.7.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12d45e0157024eb3249842163162983a1ac8b458f1a8b17bbf86f01be4585a99"}, + {file = "faiss_cpu-1.7.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f0eab359e066d32c874f51a7d4bf6440edeec068b7fe47e6d803c73605a8b4c"}, + {file = "faiss_cpu-1.7.4-cp39-cp39-win_amd64.whl", hash = "sha256:98459ceeeb735b9df1a5b94572106ffe0a6ce740eb7e4626715dd218657bb4dc"}, +] + [[package]] name = "fake-useragent" version = "1.1.3" @@ -6178,4 +6213,4 @@ deploy = ["langchain-serve"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.12" -content-hash = "9ce165d2decf2190d7ce69be608872b3ed9abe705a276045623706d01665754b" +content-hash = "0c15df0da26611ffa09238660bc024b485ffb5f1b06890808751947a8467ae58" diff --git a/pyproject.toml b/pyproject.toml index 79d09b422..0f7360de2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -57,6 +57,7 @@ jina = "3.15.2" sentence-transformers = "^2.2.2" ctransformers = "^0.2.2" cohere = "^4.6.0" +faiss-cpu = "^1.7.4" [tool.poetry.group.dev.dependencies] diff --git a/src/backend/langflow/config.yaml b/src/backend/langflow/config.yaml index 4060d1f3e..3b8554360 100644 --- a/src/backend/langflow/config.yaml +++ b/src/backend/langflow/config.yaml @@ -118,6 +118,7 @@ vectorstores: - Chroma - Qdrant - Weaviate + - FAISS wrappers: - RequestsWrapper # - ChatPromptTemplate From b72f46c37afb93a3f7fc69dc833a2ee4a21c1900 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Wed, 31 May 2023 12:27:57 -0300 Subject: [PATCH 08/43] =?UTF-8?q?=F0=9F=94=96=20chore(pyproject.toml):=20b?= =?UTF-8?q?ump=20up=20version=20to=200.0.80=20The=20version=20number=20in?= =?UTF-8?q?=20the=20pyproject.toml=20file=20has=20been=20updated=20from=20?= =?UTF-8?q?0.0.79=20to=200.0.80.=20This=20is=20a=20chore=20commit=20as=20i?= =?UTF-8?q?t=20does=20not=20introduce=20any=20new=20features=20or=20fix=20?= =?UTF-8?q?any=20bugs,=20but=20it=20is=20necessary=20to=20keep=20track=20o?= =?UTF-8?q?f=20the=20version=20number=20of=20the=20package.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- poetry.lock | 56 +++++++++++++++++++++++++------------------------- pyproject.toml | 2 +- 2 files changed, 29 insertions(+), 29 deletions(-) diff --git a/poetry.lock b/poetry.lock index b13535d12..6256be508 100644 --- a/poetry.lock +++ b/poetry.lock @@ -862,31 +862,31 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "40.0.2" +version = "41.0.0" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "cryptography-40.0.2-cp36-abi3-macosx_10_12_universal2.whl", hash = "sha256:8f79b5ff5ad9d3218afb1e7e20ea74da5f76943ee5edb7f76e56ec5161ec782b"}, - {file = "cryptography-40.0.2-cp36-abi3-macosx_10_12_x86_64.whl", hash = "sha256:05dc219433b14046c476f6f09d7636b92a1c3e5808b9a6536adf4932b3b2c440"}, - {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4df2af28d7bedc84fe45bd49bc35d710aede676e2a4cb7fc6d103a2adc8afe4d"}, - {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dcca15d3a19a66e63662dc8d30f8036b07be851a8680eda92d079868f106288"}, - {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:a04386fb7bc85fab9cd51b6308633a3c271e3d0d3eae917eebab2fac6219b6d2"}, - {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:adc0d980fd2760c9e5de537c28935cc32b9353baaf28e0814df417619c6c8c3b"}, - {file = "cryptography-40.0.2-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d5a1bd0e9e2031465761dfa920c16b0065ad77321d8a8c1f5ee331021fda65e9"}, - {file = "cryptography-40.0.2-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a95f4802d49faa6a674242e25bfeea6fc2acd915b5e5e29ac90a32b1139cae1c"}, - {file = "cryptography-40.0.2-cp36-abi3-win32.whl", hash = "sha256:aecbb1592b0188e030cb01f82d12556cf72e218280f621deed7d806afd2113f9"}, - {file = "cryptography-40.0.2-cp36-abi3-win_amd64.whl", hash = "sha256:b12794f01d4cacfbd3177b9042198f3af1c856eedd0a98f10f141385c809a14b"}, - {file = "cryptography-40.0.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:142bae539ef28a1c76794cca7f49729e7c54423f615cfd9b0b1fa90ebe53244b"}, - {file = "cryptography-40.0.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:956ba8701b4ffe91ba59665ed170a2ebbdc6fc0e40de5f6059195d9f2b33ca0e"}, - {file = "cryptography-40.0.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4f01c9863da784558165f5d4d916093737a75203a5c5286fde60e503e4276c7a"}, - {file = "cryptography-40.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3daf9b114213f8ba460b829a02896789751626a2a4e7a43a28ee77c04b5e4958"}, - {file = "cryptography-40.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48f388d0d153350f378c7f7b41497a54ff1513c816bcbbcafe5b829e59b9ce5b"}, - {file = "cryptography-40.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c0764e72b36a3dc065c155e5b22f93df465da9c39af65516fe04ed3c68c92636"}, - {file = "cryptography-40.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:cbaba590180cba88cb99a5f76f90808a624f18b169b90a4abb40c1fd8c19420e"}, - {file = "cryptography-40.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7a38250f433cd41df7fcb763caa3ee9362777fdb4dc642b9a349721d2bf47404"}, - {file = "cryptography-40.0.2.tar.gz", hash = "sha256:c33c0d32b8594fa647d2e01dbccc303478e16fdd7cf98652d5b3ed11aa5e5c99"}, + {file = "cryptography-41.0.0-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:3c5ef25d060c80d6d9f7f9892e1d41bb1c79b78ce74805b8cb4aa373cb7d5ec8"}, + {file = "cryptography-41.0.0-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:8362565b3835ceacf4dc8f3b56471a2289cf51ac80946f9087e66dc283a810e0"}, + {file = "cryptography-41.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3680248309d340fda9611498a5319b0193a8dbdb73586a1acf8109d06f25b92d"}, + {file = "cryptography-41.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84a165379cb9d411d58ed739e4af3396e544eac190805a54ba2e0322feb55c46"}, + {file = "cryptography-41.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:4ab14d567f7bbe7f1cdff1c53d5324ed4d3fc8bd17c481b395db224fb405c237"}, + {file = "cryptography-41.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9f65e842cb02550fac96536edb1d17f24c0a338fd84eaf582be25926e993dde4"}, + {file = "cryptography-41.0.0-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:b7f2f5c525a642cecad24ee8670443ba27ac1fab81bba4cc24c7b6b41f2d0c75"}, + {file = "cryptography-41.0.0-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:7d92f0248d38faa411d17f4107fc0bce0c42cae0b0ba5415505df72d751bf62d"}, + {file = "cryptography-41.0.0-cp37-abi3-win32.whl", hash = "sha256:34d405ea69a8b34566ba3dfb0521379b210ea5d560fafedf9f800a9a94a41928"}, + {file = "cryptography-41.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:344c6de9f8bda3c425b3a41b319522ba3208551b70c2ae00099c205f0d9fd3be"}, + {file = "cryptography-41.0.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:88ff107f211ea696455ea8d911389f6d2b276aabf3231bf72c8853d22db755c5"}, + {file = "cryptography-41.0.0-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b846d59a8d5a9ba87e2c3d757ca019fa576793e8758174d3868aecb88d6fc8eb"}, + {file = "cryptography-41.0.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f5d0bf9b252f30a31664b6f64432b4730bb7038339bd18b1fafe129cfc2be9be"}, + {file = "cryptography-41.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5c1f7293c31ebc72163a9a0df246f890d65f66b4a40d9ec80081969ba8c78cc9"}, + {file = "cryptography-41.0.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bf8fc66012ca857d62f6a347007e166ed59c0bc150cefa49f28376ebe7d992a2"}, + {file = "cryptography-41.0.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a4fc68d1c5b951cfb72dfd54702afdbbf0fb7acdc9b7dc4301bbf2225a27714d"}, + {file = "cryptography-41.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:14754bcdae909d66ff24b7b5f166d69340ccc6cb15731670435efd5719294895"}, + {file = "cryptography-41.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0ddaee209d1cf1f180f1efa338a68c4621154de0afaef92b89486f5f96047c55"}, + {file = "cryptography-41.0.0.tar.gz", hash = "sha256:6b71f64beeea341c9b4f963b48ee3b62d62d57ba93eb120e1196b31dc1025e78"}, ] [package.dependencies] @@ -895,23 +895,23 @@ cffi = ">=1.12" [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] -pep8test = ["black", "check-manifest", "mypy", "ruff"] -sdist = ["setuptools-rust (>=0.11.4)"] +nox = ["nox"] +pep8test = ["black", "check-sdist", "mypy", "ruff"] +sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-shard (>=0.1.2)", "pytest-subtests", "pytest-xdist"] +test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] -tox = ["tox"] [[package]] name = "ctransformers" -version = "0.2.2" +version = "0.2.3" description = "Python bindings for the Transformer models implemented in C/C++ using GGML library." category = "main" optional = false python-versions = "*" files = [ - {file = "ctransformers-0.2.2-py3-none-any.whl", hash = "sha256:bf682dd0293dd87911c9a9a1169a4873ff55baebc16d465c6029c77f11b18cf6"}, - {file = "ctransformers-0.2.2.tar.gz", hash = "sha256:1fc36b3fde36d9fd3cb69e48993315bb1f5f54ae552720eb909dc4b3a131c743"}, + {file = "ctransformers-0.2.3-py3-none-any.whl", hash = "sha256:5043b0808839cd34b0c7d1b897b81ac7e3d4778674b6226aef18b622be4b75c9"}, + {file = "ctransformers-0.2.3.tar.gz", hash = "sha256:87fc9966b62fbdadb01b91b6373287e1af50e176b5dd409f4f2d1ff0fa9f7c99"}, ] [package.dependencies] diff --git a/pyproject.toml b/pyproject.toml index 0f7360de2..c95d29bc9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langflow" -version = "0.0.79" +version = "0.0.80" description = "A Python package with a built-in web application" authors = ["Logspace "] maintainers = [ From dedbe3c906807f9f1ede4d4b941960b0b34e05dc Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Wed, 31 May 2023 15:40:20 -0300 Subject: [PATCH 09/43] =?UTF-8?q?=F0=9F=8E=A8=20refactor(complex=5Fexample?= =?UTF-8?q?.json):=20rename=20"PythonFunction"=20to=20"PythonFunctionTool"?= =?UTF-8?q?=20and=20add=20"description"=20and=20"name"=20fields=20The=20"P?= =?UTF-8?q?ythonFunction"=20type=20has=20been=20renamed=20to=20"PythonFunc?= =?UTF-8?q?tionTool"=20to=20better=20reflect=20its=20purpose.=20Additional?= =?UTF-8?q?ly,=20two=20new=20fields=20have=20been=20added:=20"description"?= =?UTF-8?q?=20and=20"name".=20These=20fields=20are=20required=20and=20allo?= =?UTF-8?q?w=20for=20a=20more=20detailed=20description=20of=20the=20tool?= =?UTF-8?q?=20and=20its=20name.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- tests/data/complex_example.json | 22 +++++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/tests/data/complex_example.json b/tests/data/complex_example.json index 8d46a97d6..89a3b9324 100644 --- a/tests/data/complex_example.json +++ b/tests/data/complex_example.json @@ -197,7 +197,7 @@ "y": 136.29836646158452 }, "data": { - "type": "PythonFunction", + "type": "PythonFunctionTool", "node": { "template": { "code": { @@ -210,6 +210,26 @@ "type": "str", "list": false }, + "description": { + "required": true, + "placeholder": "", + "show": true, + "multiline": true, + "value": "My description", + "name": "description", + "type": "str", + "list": false + }, + "name": { + "required": true, + "placeholder": "", + "show": true, + "multiline": true, + "value": "My Tool", + "name": "name", + "type": "str", + "list": false + }, "_type": "python_function" }, "description": "Python function to be executed.", From be0984908146cc39ddef30bd1f1e27b67fcbb934 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Wed, 31 May 2023 15:41:16 -0300 Subject: [PATCH 10/43] =?UTF-8?q?=F0=9F=9A=80=20feat(langflow):=20rename?= =?UTF-8?q?=20PythonFunction=20to=20PythonFunctionTool=20for=20better=20se?= =?UTF-8?q?mantics=20=F0=9F=9A=80=20feat(langflow):=20add=20PythonFunction?= =?UTF-8?q?ToolNode=20to=20the=20frontend=20node=20tools=20=F0=9F=9A=80=20?= =?UTF-8?q?feat(langflow):=20add=20PythonFunctionTool=20to=20the=20custom?= =?UTF-8?q?=20tools=20=F0=9F=9A=80=20feat(langflow):=20add=20get=5Ffunctio?= =?UTF-8?q?n=20to=20importing=20utils=20to=20get=20the=20function=20from?= =?UTF-8?q?=20code=20=F0=9F=9A=80=20feat(langflow):=20add=20func=20paramet?= =?UTF-8?q?er=20to=20PythonFunctionTool=20to=20store=20the=20function=20?= =?UTF-8?q?=F0=9F=9A=80=20feat(langflow):=20add=20name=20and=20description?= =?UTF-8?q?=20parameters=20to=20PythonFunctionTool=20=F0=9F=9A=80=20feat(l?= =?UTF-8?q?angflow):=20update=20instantiate=5Ftool=20to=20use=20PythonFunc?= =?UTF-8?q?tionTool=20instead=20of=20PythonFunction=20=F0=9F=9A=80=20feat(?= =?UTF-8?q?langflow):=20update=20constants=20to=20use=20PythonFunctionTool?= =?UTF-8?q?=20instead=20of=20PythonFunction=20=F0=9F=9A=80=20feat(langflow?= =?UTF-8?q?):=20update=20custom.py=20to=20use=20PythonFunctionTool=20inste?= =?UTF-8?q?ad=20of=20PythonFunction=20=F0=9F=9A=80=20feat(langflow):=20upd?= =?UTF-8?q?ate=20loading.py=20to=20use=20get=5Ffunction=20and=20PythonFunc?= =?UTF-8?q?tionTool=20=F0=9F=9A=80=20feat(langflow):=20update=20utils.py?= =?UTF-8?q?=20to=20use=20get=5Ffunction=20=F0=9F=9A=80=20feat(langflow):?= =?UTF-8?q?=20update=20test=5Fcustom=5Ftypes.py=20to=20use=20get=5Ffunctio?= =?UTF-8?q?n=20and=20PythonFunctionTool=20=F0=9F=9A=80=20feat(langflow):?= =?UTF-8?q?=20update=20test=5Fgraph.py=20to=20use=20PythonFunctionTool=20i?= =?UTF-8?q?nstead=20of=20PythonFunction=20The=20changes=20rename=20PythonF?= =?UTF-8?q?unction=20to=20PythonFunctionTool=20for=20better=20semantics.?= =?UTF-8?q?=20The=20frontend=20node=20tools,=20custom=20tools,=20and=20con?= =?UTF-8?q?stants=20are=20updated=20to=20use=20PythonFunctionTool=20instea?= =?UTF-8?q?d=20of=20PythonFunction.=20The=20get=5Ffunction=20function=20is?= =?UTF-8?q?=20added=20to=20importing=20utils=20to=20get=20the=20function?= =?UTF-8?q?=20from=20code.=20The=20PythonFunctionTool=20is=20updated=20to?= =?UTF-8?q?=20store=20the=20function=20in=20the=20func=20parameter=20and?= =?UTF-8?q?=20to=20have=20name=20and=20description=20parameters.=20The=20i?= =?UTF-8?q?nstantiate=5Ftool,=20loading.py,=20and=20utils.py=20are=20updat?= =?UTF-8?q?ed=20to=20use=20get=5Ffunction=20and=20PythonFunctionTool.=20Th?= =?UTF-8?q?e=20test=5Fcustom=5Ftypes.py=20and=20test=5Fgraph.py=20are=20up?= =?UTF-8?q?dated=20to=20use=20PythonFunctionTool=20instead=20of=20PythonFu?= =?UTF-8?q?nction.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/config.yaml | 2 +- src/backend/langflow/custom/customs.py | 2 +- .../langflow/interface/importing/utils.py | 8 +++++ src/backend/langflow/interface/loading.py | 12 +++---- src/backend/langflow/interface/tools/base.py | 3 +- .../langflow/interface/tools/constants.py | 4 +-- .../langflow/interface/tools/custom.py | 22 +++++++------ .../langflow/template/frontend_node/tools.py | 32 ++++++++++++++++--- tests/test_custom_types.py | 15 ++++++--- tests/test_graph.py | 4 ++- 10 files changed, 73 insertions(+), 31 deletions(-) diff --git a/src/backend/langflow/config.yaml b/src/backend/langflow/config.yaml index 3b8554360..fe05eb406 100644 --- a/src/backend/langflow/config.yaml +++ b/src/backend/langflow/config.yaml @@ -79,7 +79,7 @@ tools: - Calculator - Serper Search - Tool - - PythonFunction + - PythonFunctionTool - JsonSpec - News API - TMDB API diff --git a/src/backend/langflow/custom/customs.py b/src/backend/langflow/custom/customs.py index ee266b0ee..f7a82e4a3 100644 --- a/src/backend/langflow/custom/customs.py +++ b/src/backend/langflow/custom/customs.py @@ -4,7 +4,7 @@ from langflow.template import frontend_node CUSTOM_NODES = { "prompts": {"ZeroShotPrompt": frontend_node.prompts.ZeroShotPromptNode()}, "tools": { - "PythonFunction": frontend_node.tools.PythonFunctionNode(), + "PythonFunctionTool": frontend_node.tools.PythonFunctionToolNode(), "Tool": frontend_node.tools.ToolNode(), }, "agents": { diff --git a/src/backend/langflow/interface/importing/utils.py b/src/backend/langflow/interface/importing/utils.py index d08e52999..f65376d48 100644 --- a/src/backend/langflow/interface/importing/utils.py +++ b/src/backend/langflow/interface/importing/utils.py @@ -9,6 +9,7 @@ from langchain.base_language import BaseLanguageModel from langchain.chains.base import Chain from langchain.chat_models.base import BaseChatModel from langchain.tools import BaseTool +from langflow.utils import validate def import_module(module_path: str) -> Any: @@ -147,3 +148,10 @@ def import_utility(utility: str) -> Any: if utility == "SQLDatabase": return import_class(f"langchain.sql_database.{utility}") return import_class(f"langchain.utilities.{utility}") + + +def get_function(code): + """Get the function""" + function_name = validate.extract_function_name(code) + + return validate.create_function(code, function_name) diff --git a/src/backend/langflow/interface/loading.py b/src/backend/langflow/interface/loading.py index 69c697823..05ca348a7 100644 --- a/src/backend/langflow/interface/loading.py +++ b/src/backend/langflow/interface/loading.py @@ -20,12 +20,12 @@ from langchain.llms.loading import load_llm_from_config from pydantic import ValidationError from langflow.interface.agents.custom import CUSTOM_AGENTS -from langflow.interface.importing.utils import import_by_type +from langflow.interface.importing.utils import get_function, import_by_type from langflow.interface.run import fix_memory_inputs from langflow.interface.toolkits.base import toolkits_creator from langflow.interface.types import get_type_list from langflow.interface.utils import load_file_into_dict -from langflow.utils import util, validate +from langflow.utils import util def instantiate_class(node_type: str, base_type: str, params: Dict) -> Any: @@ -99,11 +99,9 @@ def instantiate_tool(node_type, class_object, params): if node_type == "JsonSpec": params["dict_"] = load_file_into_dict(params.pop("path")) return class_object(**params) - elif node_type == "PythonFunction": - function_string = params["code"] - if isinstance(function_string, str): - return validate.eval_function(function_string) - raise ValueError("Function should be a string") + elif node_type == "PythonFunctionTool": + params["func"] = get_function(params.get("code")) + return class_object(**params) elif node_type.lower() == "tool": return class_object(**params) return class_object(**params) diff --git a/src/backend/langflow/interface/tools/base.py b/src/backend/langflow/interface/tools/base.py index a8e7045c0..d6b114e4c 100644 --- a/src/backend/langflow/interface/tools/base.py +++ b/src/backend/langflow/interface/tools/base.py @@ -71,7 +71,8 @@ class ToolCreator(LangChainTypeCreator): for tool, tool_fcn in ALL_TOOLS_NAMES.items(): tool_params = get_tool_params(tool_fcn) - tool_name = tool_params.get("name", tool) + + tool_name = tool_params.get("name") or tool if tool_name in settings.tools or settings.dev: if tool_name == "JsonSpec": diff --git a/src/backend/langflow/interface/tools/constants.py b/src/backend/langflow/interface/tools/constants.py index f939d55ad..31c75ec08 100644 --- a/src/backend/langflow/interface/tools/constants.py +++ b/src/backend/langflow/interface/tools/constants.py @@ -9,10 +9,10 @@ from langchain.agents.load_tools import ( from langchain.tools.json.tool import JsonSpec from langflow.interface.importing.utils import import_class -from langflow.interface.tools.custom import PythonFunction +from langflow.interface.tools.custom import PythonFunctionTool FILE_TOOLS = {"JsonSpec": JsonSpec} -CUSTOM_TOOLS = {"Tool": Tool, "PythonFunction": PythonFunction} +CUSTOM_TOOLS = {"Tool": Tool, "PythonFunctionTool": PythonFunctionTool} OTHER_TOOLS = {tool: import_class(f"langchain.tools.{tool}") for tool in tools.__all__} diff --git a/src/backend/langflow/interface/tools/custom.py b/src/backend/langflow/interface/tools/custom.py index 4c641f388..b2d43565d 100644 --- a/src/backend/langflow/interface/tools/custom.py +++ b/src/backend/langflow/interface/tools/custom.py @@ -1,13 +1,14 @@ -from typing import Callable, Optional +from typing import Optional +from langflow.interface.importing.utils import get_function from pydantic import BaseModel, validator from langflow.utils import validate +from langchain.agents.tools import Tool class Function(BaseModel): code: str - function: Optional[Callable] = None imports: Optional[str] = None # Eval code and store the function @@ -24,14 +25,17 @@ class Function(BaseModel): return v - def get_function(self): - """Get the function""" - function_name = validate.extract_function_name(self.code) - return validate.create_function(self.code, function_name) - - -class PythonFunction(Function): +class PythonFunctionTool(Function, Tool): """Python function""" + name: str = "Custom Tool" + description: str code: str + + def ___init__(self, name: str, description: str, code: str): + self.name = name + self.description = description + self.code = code + self.func = get_function(self.code) + super().__init__(name=name, description=description, func=self.func) diff --git a/src/backend/langflow/template/frontend_node/tools.py b/src/backend/langflow/template/frontend_node/tools.py index 2819be4d9..4e97fec8c 100644 --- a/src/backend/langflow/template/frontend_node/tools.py +++ b/src/backend/langflow/template/frontend_node/tools.py @@ -59,11 +59,33 @@ class ToolNode(FrontendNode): return super().to_dict() -class PythonFunctionNode(FrontendNode): - name: str = "PythonFunction" +class PythonFunctionToolNode(FrontendNode): + name: str = "PythonFunctionTool" template: Template = Template( - type_name="python_function", + type_name="PythonFunctionTool", fields=[ + TemplateField( + field_type="str", + required=True, + placeholder="", + is_list=False, + show=True, + multiline=False, + value="", + name="name", + advanced=False, + ), + TemplateField( + field_type="str", + required=True, + placeholder="", + is_list=False, + show=True, + multiline=False, + value="", + name="description", + advanced=False, + ), TemplateField( field_type="code", required=True, @@ -73,11 +95,11 @@ class PythonFunctionNode(FrontendNode): value=DEFAULT_PYTHON_FUNCTION, name="code", advanced=False, - ) + ), ], ) description: str = "Python function to be executed." - base_classes: list[str] = ["function"] + base_classes: list[str] = ["Tool"] def to_dict(self): return super().to_dict() diff --git a/tests/test_custom_types.py b/tests/test_custom_types.py index 399450e2e..7503426ab 100644 --- a/tests/test_custom_types.py +++ b/tests/test_custom_types.py @@ -1,16 +1,23 @@ # Test this: +from langflow.interface.importing.utils import get_function import pytest -from langflow.interface.tools.custom import PythonFunction +from langflow.interface.tools.custom import PythonFunctionTool from langflow.utils import constants def test_python_function(): """Test Python function""" - func = PythonFunction(code=constants.DEFAULT_PYTHON_FUNCTION) - assert func.get_function()("text") == "text" + code = constants.DEFAULT_PYTHON_FUNCTION + func = get_function(code) + func = PythonFunctionTool(name="Test", description="Testing", code=code, func=func) + assert func("text") == "text" # the tool decorator should raise an error if # the function is not str -> str # This raises ValidationError with pytest.raises(SyntaxError): - func = PythonFunction(code=pytest.CODE_WITH_SYNTAX_ERROR) + code = pytest.CODE_WITH_SYNTAX_ERROR + func = get_function(code) + func = PythonFunctionTool( + name="Test", description="Testing", code=code, func=func + ) diff --git a/tests/test_graph.py b/tests/test_graph.py index a0f5945fc..7826870e8 100644 --- a/tests/test_graph.py +++ b/tests/test_graph.py @@ -156,7 +156,9 @@ def test_get_node_neighbors_complex(complex_graph): tool_neighbors = complex_graph.get_nodes_with_target(tool) assert tool_neighbors is not None # Check if there is a PythonFunction in the tool's neighbors - assert any("PythonFunction" in neighbor.data["type"] for neighbor in tool_neighbors) + assert any( + "PythonFunctionTool" in neighbor.data["type"] for neighbor in tool_neighbors + ) def test_get_node(basic_graph): From 585e94285d89a63f63014015fd4f8f20a16a1fbe Mon Sep 17 00:00:00 2001 From: Cristhian Zanforlin Lousa Date: Thu, 1 Jun 2023 00:27:01 -0300 Subject: [PATCH 11/43] Adding title and icon of the parent menu on hover of the handle --- .../components/parameterComponent/index.tsx | 37 ++++++++++++++++++- src/frontend/src/types/components/index.ts | 1 + 2 files changed, 37 insertions(+), 1 deletion(-) diff --git a/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx b/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx index 653248763..e26ae589f 100644 --- a/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx +++ b/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx @@ -15,6 +15,8 @@ import InputFileComponent from "../../../../components/inputFileComponent"; import { TabsContext } from "../../../../contexts/tabsContext"; import IntComponent from "../../../../components/intComponent"; import PromptAreaComponent from "../../../../components/promptComponent"; +import { nodeNames, nodeIcons } from "../../../../utils"; +import React from "react"; export default function ParameterComponent({ left, @@ -28,6 +30,8 @@ export default function ParameterComponent({ required = false, }: ParameterComponentType) { const ref = useRef(null); + const refParent = useRef(""); + const refParentIcon = useRef(null); const updateNodeInternals = useUpdateNodeInternals(); const [position, setPosition] = useState(0); useEffect(() => { @@ -48,6 +52,19 @@ export default function ParameterComponent({ let disabled = reactFlowInstance?.getEdges().some((e) => e.targetHandle === id) ?? false; const { save } = useContext(TabsContext); + const [myData, setMyData] = useState(useContext(typesContext).data); + + useEffect(() => { + Object.keys(myData).forEach((d) => { + let keys = Object.keys(myData[d]).filter( + (nd) => nd.toLowerCase() == data.type.toLowerCase() + ); + if (keys.length > 0) { + refParent.current = d; + refParentIcon.current = nodeIcons[d]; + } + }); + }, []); return (
) : ( - + +
+
+ {React.createElement(refParentIcon.current)} +
+ + {nodeNames[refParent?.current] ?? ""} + +
+ + } + > Date: Thu, 1 Jun 2023 15:37:11 -0300 Subject: [PATCH 12/43] refactor: change Node to Vertex --- src/backend/langflow/api/validate.py | 4 +- src/backend/langflow/graph/__init__.py | 54 ++++++++-------- src/backend/langflow/graph/edge/base.py | 10 +-- src/backend/langflow/graph/graph/base.py | 56 ++++++++-------- src/backend/langflow/graph/graph/constants.py | 54 ++++++++-------- .../graph/{node => vertex}/__init__.py | 0 .../langflow/graph/{node => vertex}/base.py | 26 ++++---- .../graph/{node => vertex}/constants.py | 0 .../langflow/graph/{node => vertex}/types.py | 64 ++++++++++--------- tests/test_graph.py | 54 ++++++++-------- 10 files changed, 164 insertions(+), 158 deletions(-) rename src/backend/langflow/graph/{node => vertex}/__init__.py (100%) rename src/backend/langflow/graph/{node => vertex}/base.py (92%) rename src/backend/langflow/graph/{node => vertex}/constants.py (100%) rename src/backend/langflow/graph/{node => vertex}/types.py (78%) diff --git a/src/backend/langflow/api/validate.py b/src/backend/langflow/api/validate.py index 53a7ee350..e90e554f0 100644 --- a/src/backend/langflow/api/validate.py +++ b/src/backend/langflow/api/validate.py @@ -9,7 +9,7 @@ from langflow.api.base import ( PromptValidationResponse, validate_prompt, ) -from langflow.graph.node.types import VectorStoreNode +from langflow.graph.vertex.types import VectorStoreVertex from langflow.interface.run import build_graph from langflow.utils.logger import logger from langflow.utils.validate import validate_code @@ -49,7 +49,7 @@ def post_validate_node(node_id: str, data: dict): node = graph.get_node(node_id) if node is None: raise ValueError(f"Node {node_id} not found") - if not isinstance(node, VectorStoreNode): + if not isinstance(node, VectorStoreVertex): node.build() return json.dumps({"valid": True, "params": str(node._built_object_repr())}) except Exception as e: diff --git a/src/backend/langflow/graph/__init__.py b/src/backend/langflow/graph/__init__.py index 44859da02..a68e844ee 100644 --- a/src/backend/langflow/graph/__init__.py +++ b/src/backend/langflow/graph/__init__.py @@ -1,35 +1,35 @@ from langflow.graph.edge.base import Edge from langflow.graph.graph.base import Graph -from langflow.graph.node.base import Node -from langflow.graph.node.types import ( - AgentNode, - ChainNode, - DocumentLoaderNode, - EmbeddingNode, - LLMNode, - MemoryNode, - PromptNode, - TextSplitterNode, - ToolNode, - ToolkitNode, - VectorStoreNode, - WrapperNode, +from langflow.graph.vertex.base import Vertex +from langflow.graph.vertex.types import ( + AgentVertex, + ChainVertex, + DocumentLoaderVertex, + EmbeddingVertex, + LLMVertex, + MemoryVertex, + PromptVertex, + TextSplitterVertex, + ToolVertex, + ToolkitVertex, + VectorStoreVertex, + WrapperVertex, ) __all__ = [ "Graph", - "Node", + "Vertex", "Edge", - "AgentNode", - "ChainNode", - "DocumentLoaderNode", - "EmbeddingNode", - "LLMNode", - "MemoryNode", - "PromptNode", - "TextSplitterNode", - "ToolNode", - "ToolkitNode", - "VectorStoreNode", - "WrapperNode", + "AgentVertex", + "ChainVertex", + "DocumentLoaderVertex", + "EmbeddingVertex", + "LLMVertex", + "MemoryVertex", + "PromptVertex", + "TextSplitterVertex", + "ToolVertex", + "ToolkitVertex", + "VectorStoreVertex", + "WrapperVertex", ] diff --git a/src/backend/langflow/graph/edge/base.py b/src/backend/langflow/graph/edge/base.py index 2bf5a1ba4..08f084a5c 100644 --- a/src/backend/langflow/graph/edge/base.py +++ b/src/backend/langflow/graph/edge/base.py @@ -2,13 +2,13 @@ from langflow.utils.logger import logger from typing import TYPE_CHECKING if TYPE_CHECKING: - from langflow.graph.node.base import Node + from langflow.graph.vertex.base import Vertex class Edge: - def __init__(self, source: "Node", target: "Node"): - self.source: "Node" = source - self.target: "Node" = target + def __init__(self, source: "Vertex", target: "Vertex"): + self.source: "Vertex" = source + self.target: "Vertex" = target self.validate_edge() def validate_edge(self) -> None: @@ -41,7 +41,7 @@ class Edge: logger.debug(self.target_reqs) if no_matched_type: raise ValueError( - f"Edge between {self.source.node_type} and {self.target.node_type} " + f"Edge between {self.source.vertex_type} and {self.target.vertex_type} " f"has no matched type" ) diff --git a/src/backend/langflow/graph/graph/base.py b/src/backend/langflow/graph/graph/base.py index 3ba67837f..020f539ec 100644 --- a/src/backend/langflow/graph/graph/base.py +++ b/src/backend/langflow/graph/graph/base.py @@ -1,12 +1,12 @@ from typing import Dict, List, Type, Union from langflow.graph.edge.base import Edge -from langflow.graph.graph.constants import NODE_TYPE_MAP -from langflow.graph.node.base import Node -from langflow.graph.node.types import ( - FileToolNode, - LLMNode, - ToolkitNode, +from langflow.graph.graph.constants import VERTEX_TYPE_MAP +from langflow.graph.vertex.base import Vertex +from langflow.graph.vertex.types import ( + FileToolVertex, + LLMVertex, + ToolkitVertex, ) from langflow.interface.tools.constants import FILE_TOOLS from langflow.utils import payload @@ -26,7 +26,7 @@ class Graph: def _build_graph(self) -> None: """Builds the graph from the nodes and edges.""" - self.nodes = self._build_nodes() + self.nodes = self._build_vertices() self.edges = self._build_edges() for edge in self.edges: edge.source.add_edge(edge) @@ -43,12 +43,12 @@ class Graph: llm_node = None for node in self.nodes: node._build_params() - if isinstance(node, LLMNode): + if isinstance(node, LLMVertex): llm_node = node if llm_node: for node in self.nodes: - if isinstance(node, ToolkitNode): + if isinstance(node, ToolkitVertex): node.params["llm"] = llm_node def _remove_invalid_nodes(self) -> None: @@ -60,23 +60,23 @@ class Graph: or (len(self.nodes) == 1 and len(self.edges) == 0) ] - def _validate_node(self, node: Node) -> bool: + def _validate_node(self, node: Vertex) -> bool: """Validates a node.""" # All nodes that do not have edges are invalid return len(node.edges) > 0 - def get_node(self, node_id: str) -> Union[None, Node]: + def get_node(self, node_id: str) -> Union[None, Vertex]: """Returns a node by id.""" return next((node for node in self.nodes if node.id == node_id), None) - def get_nodes_with_target(self, node: Node) -> List[Node]: + def get_nodes_with_target(self, node: Vertex) -> List[Vertex]: """Returns the nodes connected to a node.""" - connected_nodes: List[Node] = [ + connected_nodes: List[Vertex] = [ edge.source for edge in self.edges if edge.target == node ] return connected_nodes - def build(self) -> List[Node]: + def build(self) -> List[Vertex]: """Builds the graph.""" # Get root node root_node = payload.get_root_node(self) @@ -84,9 +84,9 @@ class Graph: raise ValueError("No root node found") return root_node.build() - def get_node_neighbors(self, node: Node) -> Dict[Node, int]: + def get_node_neighbors(self, node: Vertex) -> Dict[Vertex, int]: """Returns the neighbors of a node.""" - neighbors: Dict[Node, int] = {} + neighbors: Dict[Vertex, int] = {} for edge in self.edges: if edge.source == node: neighbor = edge.target @@ -117,28 +117,30 @@ class Graph: edges.append(Edge(source, target)) return edges - def _get_node_class(self, node_type: str, node_lc_type: str) -> Type[Node]: + def _get_vertex_class(self, node_type: str, node_lc_type: str) -> Type[Vertex]: """Returns the node class based on the node type.""" if node_type in FILE_TOOLS: - return FileToolNode - if node_type in NODE_TYPE_MAP: - return NODE_TYPE_MAP[node_type] - return NODE_TYPE_MAP[node_lc_type] if node_lc_type in NODE_TYPE_MAP else Node + return FileToolVertex + if node_type in VERTEX_TYPE_MAP: + return VERTEX_TYPE_MAP[node_type] + return ( + VERTEX_TYPE_MAP[node_lc_type] if node_lc_type in VERTEX_TYPE_MAP else Vertex + ) - def _build_nodes(self) -> List[Node]: - """Builds the nodes of the graph.""" - nodes: List[Node] = [] + def _build_vertices(self) -> List[Vertex]: + """Builds the vertices of the graph.""" + nodes: List[Vertex] = [] for node in self._nodes: node_data = node["data"] node_type: str = node_data["type"] # type: ignore node_lc_type: str = node_data["node"]["template"]["_type"] # type: ignore - NodeClass = self._get_node_class(node_type, node_lc_type) - nodes.append(NodeClass(node)) + VertexClass = self._get_vertex_class(node_type, node_lc_type) + nodes.append(VertexClass(node)) return nodes - def get_children_by_node_type(self, node: Node, node_type: str) -> List[Node]: + def get_children_by_node_type(self, node: Vertex, node_type: str) -> List[Vertex]: """Returns the children of a node based on the node type.""" children = [] node_types = [node.data["type"]] diff --git a/src/backend/langflow/graph/graph/constants.py b/src/backend/langflow/graph/graph/constants.py index f5bc9b8e3..ff1317d39 100644 --- a/src/backend/langflow/graph/graph/constants.py +++ b/src/backend/langflow/graph/graph/constants.py @@ -1,17 +1,17 @@ -from langflow.graph.node.base import Node -from langflow.graph.node.types import ( - AgentNode, - ChainNode, - DocumentLoaderNode, - EmbeddingNode, - LLMNode, - MemoryNode, - PromptNode, - TextSplitterNode, - ToolNode, - ToolkitNode, - VectorStoreNode, - WrapperNode, +from langflow.graph.vertex.base import Vertex +from langflow.graph.vertex.types import ( + AgentVertex, + ChainVertex, + DocumentLoaderVertex, + EmbeddingVertex, + LLMVertex, + MemoryVertex, + PromptVertex, + TextSplitterVertex, + ToolVertex, + ToolkitVertex, + VectorStoreVertex, + WrapperVertex, ) from langflow.interface.agents.base import agent_creator from langflow.interface.chains.base import chain_creator @@ -33,17 +33,17 @@ from typing import Dict, Type DIRECT_TYPES = ["str", "bool", "code", "int", "float", "Any", "prompt"] -NODE_TYPE_MAP: Dict[str, Type[Node]] = { - **{t: PromptNode for t in prompt_creator.to_list()}, - **{t: AgentNode for t in agent_creator.to_list()}, - **{t: ChainNode for t in chain_creator.to_list()}, - **{t: ToolNode for t in tool_creator.to_list()}, - **{t: ToolkitNode for t in toolkits_creator.to_list()}, - **{t: WrapperNode for t in wrapper_creator.to_list()}, - **{t: LLMNode for t in llm_creator.to_list()}, - **{t: MemoryNode for t in memory_creator.to_list()}, - **{t: EmbeddingNode for t in embedding_creator.to_list()}, - **{t: VectorStoreNode for t in vectorstore_creator.to_list()}, - **{t: DocumentLoaderNode for t in documentloader_creator.to_list()}, - **{t: TextSplitterNode for t in textsplitter_creator.to_list()}, +VERTEX_TYPE_MAP: Dict[str, Type[Vertex]] = { + **{t: PromptVertex for t in prompt_creator.to_list()}, + **{t: AgentVertex for t in agent_creator.to_list()}, + **{t: ChainVertex for t in chain_creator.to_list()}, + **{t: ToolVertex for t in tool_creator.to_list()}, + **{t: ToolkitVertex for t in toolkits_creator.to_list()}, + **{t: WrapperVertex for t in wrapper_creator.to_list()}, + **{t: LLMVertex for t in llm_creator.to_list()}, + **{t: MemoryVertex for t in memory_creator.to_list()}, + **{t: EmbeddingVertex for t in embedding_creator.to_list()}, + **{t: VectorStoreVertex for t in vectorstore_creator.to_list()}, + **{t: DocumentLoaderVertex for t in documentloader_creator.to_list()}, + **{t: TextSplitterVertex for t in textsplitter_creator.to_list()}, } diff --git a/src/backend/langflow/graph/node/__init__.py b/src/backend/langflow/graph/vertex/__init__.py similarity index 100% rename from src/backend/langflow/graph/node/__init__.py rename to src/backend/langflow/graph/vertex/__init__.py diff --git a/src/backend/langflow/graph/node/base.py b/src/backend/langflow/graph/vertex/base.py similarity index 92% rename from src/backend/langflow/graph/node/base.py rename to src/backend/langflow/graph/vertex/base.py index 5076deb9c..4593e0a40 100644 --- a/src/backend/langflow/graph/node/base.py +++ b/src/backend/langflow/graph/vertex/base.py @@ -1,5 +1,5 @@ from langflow.cache import base as cache_utils -from langflow.graph.node.constants import DIRECT_TYPES +from langflow.graph.vertex.constants import DIRECT_TYPES from langflow.interface import loading from langflow.interface.listing import ALL_TYPES_DICT from langflow.utils.logger import logger @@ -17,7 +17,7 @@ if TYPE_CHECKING: from langflow.graph.edge.base import Edge -class Node: +class Vertex: def __init__(self, data: Dict, base_type: Optional[str] = None) -> None: self.id: str = data["id"] self._data = data @@ -48,12 +48,12 @@ class Node: ] template_dict = self.data["node"]["template"] - self.node_type = ( + self.vertex_type = ( self.data["type"] if "Tool" not in self.output else template_dict["_type"] ) if self.base_type is None: for base_type, value in ALL_TYPES_DICT.items(): - if self.node_type in value: + if self.vertex_type in value: self.base_type = base_type break @@ -113,7 +113,7 @@ class Node: if value["required"] and not edges: # If a required parameter is not found, raise an error raise ValueError( - f"Required input {key} for module {self.node_type} not found" + f"Required input {key} for module {self.vertex_type} not found" ) elif value["list"]: # If this is a list parameter, append all sources to a list @@ -128,7 +128,7 @@ class Node: # so we need to check if value has value new_value = value.get("value") if new_value is None: - warnings.warn(f"Value for {key} in {self.node_type} is None. ") + warnings.warn(f"Value for {key} in {self.vertex_type} is None. ") if value.get("type") == "int": with contextlib.suppress(TypeError, ValueError): new_value = int(new_value) # type: ignore @@ -148,12 +148,12 @@ class Node: # and continue # Another aspect is that the node_type is the class that we need to import # and instantiate with these built params - logger.debug(f"Building {self.node_type}") + logger.debug(f"Building {self.vertex_type}") # Build each node in the params dict for key, value in self.params.copy().items(): # Check if Node or list of Nodes and not self # to avoid recursion - if isinstance(value, Node): + if isinstance(value, Vertex): if value == self: del self.params[key] continue @@ -177,7 +177,7 @@ class Node: self.params[key] = result elif isinstance(value, list) and all( - isinstance(node, Node) for node in value + isinstance(node, Vertex) for node in value ): self.params[key] = [] for node in value: @@ -193,17 +193,17 @@ class Node: try: self._built_object = loading.instantiate_class( - node_type=self.node_type, + node_type=self.vertex_type, base_type=self.base_type, params=self.params, ) except Exception as exc: raise ValueError( - f"Error building node {self.node_type}: {str(exc)}" + f"Error building node {self.vertex_type}: {str(exc)}" ) from exc if self._built_object is None: - raise ValueError(f"Node type {self.node_type} not found") + raise ValueError(f"Node type {self.vertex_type} not found") self._built = True @@ -220,7 +220,7 @@ class Node: return f"Node(id={self.id}, data={self.data})" def __eq__(self, __o: object) -> bool: - return self.id == __o.id if isinstance(__o, Node) else False + return self.id == __o.id if isinstance(__o, Vertex) else False def __hash__(self) -> int: return id(self) diff --git a/src/backend/langflow/graph/node/constants.py b/src/backend/langflow/graph/vertex/constants.py similarity index 100% rename from src/backend/langflow/graph/node/constants.py rename to src/backend/langflow/graph/vertex/constants.py diff --git a/src/backend/langflow/graph/node/types.py b/src/backend/langflow/graph/vertex/types.py similarity index 78% rename from src/backend/langflow/graph/node/types.py rename to src/backend/langflow/graph/vertex/types.py index 9b25fd6ee..4a3290c13 100644 --- a/src/backend/langflow/graph/node/types.py +++ b/src/backend/langflow/graph/vertex/types.py @@ -1,22 +1,22 @@ from typing import Any, Dict, List, Optional, Union -from langflow.graph.node.base import Node +from langflow.graph.vertex.base import Vertex from langflow.graph.utils import extract_input_variables_from_prompt -class AgentNode(Node): +class AgentVertex(Vertex): def __init__(self, data: Dict): super().__init__(data, base_type="agents") - self.tools: List[ToolNode] = [] - self.chains: List[ChainNode] = [] + self.tools: List[ToolVertex] = [] + self.chains: List[ChainVertex] = [] def _set_tools_and_chains(self) -> None: for edge in self.edges: source_node = edge.source - if isinstance(source_node, ToolNode): + if isinstance(source_node, ToolVertex): self.tools.append(source_node) - elif isinstance(source_node, ChainNode): + elif isinstance(source_node, ChainVertex): self.chains.append(source_node) def build(self, force: bool = False) -> Any: @@ -33,24 +33,28 @@ class AgentNode(Node): self._build() #! Cannot deepcopy VectorStore, VectorStoreRouter, or SQL agents - if self.node_type in ["VectorStoreAgent", "VectorStoreRouterAgent", "SQLAgent"]: + if self.vertex_type in [ + "VectorStoreAgent", + "VectorStoreRouterAgent", + "SQLAgent", + ]: return self._built_object return self._built_object -class ToolNode(Node): +class ToolVertex(Vertex): def __init__(self, data: Dict): super().__init__(data, base_type="tools") -class PromptNode(Node): +class PromptVertex(Vertex): def __init__(self, data: Dict): super().__init__(data, base_type="prompts") def build( self, force: bool = False, - tools: Optional[Union[List[Node], List[ToolNode]]] = None, + tools: Optional[Union[List[Vertex], List[ToolVertex]]] = None, ) -> Any: if not self._built or force: if ( @@ -59,7 +63,7 @@ class PromptNode(Node): ): self.params["input_variables"] = [] # Check if it is a ZeroShotPrompt and needs a tool - if "ShotPrompt" in self.node_type: + if "ShotPrompt" in self.vertex_type: tools = ( [tool_node.build() for tool_node in tools] if tools is not None @@ -83,31 +87,31 @@ class PromptNode(Node): return self._built_object -class ChainNode(Node): +class ChainVertex(Vertex): def __init__(self, data: Dict): super().__init__(data, base_type="chains") def build( self, force: bool = False, - tools: Optional[Union[List[Node], List[ToolNode]]] = None, + tools: Optional[Union[List[Vertex], List[ToolVertex]]] = None, ) -> Any: if not self._built or force: # Check if the chain requires a PromptNode for key, value in self.params.items(): - if isinstance(value, PromptNode): + if isinstance(value, PromptVertex): # Build the PromptNode, passing the tools if available self.params[key] = value.build(tools=tools, force=force) self._build() #! Cannot deepcopy SQLDatabaseChain - if self.node_type in ["SQLDatabaseChain"]: + if self.vertex_type in ["SQLDatabaseChain"]: return self._built_object return self._built_object -class LLMNode(Node): +class LLMVertex(Vertex): built_node_type = None class_built_object = None @@ -117,28 +121,28 @@ class LLMNode(Node): def build(self, force: bool = False) -> Any: # LLM is different because some models might take up too much memory # or time to load. So we only load them when we need them.ß - if self.node_type == self.built_node_type: + if self.vertex_type == self.built_node_type: return self.class_built_object if not self._built or force: self._build() - self.built_node_type = self.node_type + self.built_node_type = self.vertex_type self.class_built_object = self._built_object # Avoid deepcopying the LLM # that are loaded from a file return self._built_object -class ToolkitNode(Node): +class ToolkitVertex(Vertex): def __init__(self, data: Dict): super().__init__(data, base_type="toolkits") -class FileToolNode(ToolNode): +class FileToolVertex(ToolVertex): def __init__(self, data: Dict): super().__init__(data) -class WrapperNode(Node): +class WrapperVertex(Vertex): def __init__(self, data: Dict): super().__init__(data, base_type="wrappers") @@ -150,7 +154,7 @@ class WrapperNode(Node): return self._built_object -class DocumentLoaderNode(Node): +class DocumentLoaderVertex(Vertex): def __init__(self, data: Dict): super().__init__(data, base_type="documentloaders") @@ -158,17 +162,17 @@ class DocumentLoaderNode(Node): # This built_object is a list of documents. Maybe we should # show how many documents are in the list? if self._built_object: - return f"""{self.node_type}({len(self._built_object)} documents) + return f"""{self.vertex_type}({len(self._built_object)} documents) Documents: {self._built_object[:3]}...""" - return f"{self.node_type}()" + return f"{self.vertex_type}()" -class EmbeddingNode(Node): +class EmbeddingVertex(Vertex): def __init__(self, data: Dict): super().__init__(data, base_type="embeddings") -class VectorStoreNode(Node): +class VectorStoreVertex(Vertex): def __init__(self, data: Dict): super().__init__(data, base_type="vectorstores") @@ -176,12 +180,12 @@ class VectorStoreNode(Node): return "Vector stores can take time to build. It will build on the first query." -class MemoryNode(Node): +class MemoryVertex(Vertex): def __init__(self, data: Dict): super().__init__(data, base_type="memory") -class TextSplitterNode(Node): +class TextSplitterVertex(Vertex): def __init__(self, data: Dict): super().__init__(data, base_type="textsplitters") @@ -189,5 +193,5 @@ class TextSplitterNode(Node): # This built_object is a list of documents. Maybe we should # show how many documents are in the list? if self._built_object: - return f"""{self.node_type}({len(self._built_object)} documents)\nDocuments: {self._built_object[:3]}...""" - return f"{self.node_type}()" + return f"""{self.vertex_type}({len(self._built_object)} documents)\nDocuments: {self._built_object[:3]}...""" + return f"{self.vertex_type}()" diff --git a/tests/test_graph.py b/tests/test_graph.py index cdbe0ba93..c7b5ddf0c 100644 --- a/tests/test_graph.py +++ b/tests/test_graph.py @@ -1,20 +1,20 @@ from typing import Type, Union from langflow.graph.edge.base import Edge -from langflow.graph.node.base import Node +from langflow.graph.vertex.base import Vertex import pytest from langchain.chains.base import Chain from langchain.llms.fake import FakeListLLM from langflow.graph import Graph -from langflow.graph.node.types import ( - AgentNode, - ChainNode, - FileToolNode, - LLMNode, - PromptNode, - ToolkitNode, - ToolNode, - WrapperNode, +from langflow.graph.vertex.types import ( + AgentVertex, + ChainVertex, + FileToolVertex, + LLMVertex, + PromptVertex, + ToolkitVertex, + ToolVertex, + WrapperVertex, ) from langflow.interface.run import get_result_and_thought from langflow.utils.payload import get_root_node @@ -25,7 +25,7 @@ from langflow.utils.payload import get_root_node # BASIC_EXAMPLE_PATH, COMPLEX_EXAMPLE_PATH, OPENAPI_EXAMPLE_PATH -def get_node_by_type(graph, node_type: Type[Node]) -> Union[Node, None]: +def get_node_by_type(graph, node_type: Type[Vertex]) -> Union[Vertex, None]: """Get a node by type""" return next((node for node in graph.nodes if isinstance(node, node_type)), None) @@ -35,7 +35,7 @@ def test_graph_structure(basic_graph): assert len(basic_graph.nodes) > 0 assert len(basic_graph.edges) > 0 for node in basic_graph.nodes: - assert isinstance(node, Node) + assert isinstance(node, Vertex) for edge in basic_graph.edges: assert isinstance(edge, Edge) assert edge.source in basic_graph.nodes @@ -165,7 +165,7 @@ def test_get_node(basic_graph): """Test getting a single node""" node_id = basic_graph.nodes[0].id node = basic_graph.get_node(node_id) - assert isinstance(node, Node) + assert isinstance(node, Vertex) assert node.id == node_id @@ -174,7 +174,7 @@ def test_build_nodes(basic_graph): assert len(basic_graph.nodes) == len(basic_graph._nodes) for node in basic_graph.nodes: - assert isinstance(node, Node) + assert isinstance(node, Vertex) def test_build_edges(basic_graph): @@ -182,8 +182,8 @@ def test_build_edges(basic_graph): assert len(basic_graph.edges) == len(basic_graph._edges) for edge in basic_graph.edges: assert isinstance(edge, Edge) - assert isinstance(edge.source, Node) - assert isinstance(edge.target, Node) + assert isinstance(edge.source, Vertex) + assert isinstance(edge.target, Vertex) def test_get_root_node(basic_graph, complex_graph): @@ -191,13 +191,13 @@ def test_get_root_node(basic_graph, complex_graph): assert isinstance(basic_graph, Graph) root = get_root_node(basic_graph) assert root is not None - assert isinstance(root, Node) + assert isinstance(root, Vertex) assert root.data["type"] == "TimeTravelGuideChain" # For complex example, the root node is a ZeroShotAgent too assert isinstance(complex_graph, Graph) root = get_root_node(complex_graph) assert root is not None - assert isinstance(root, Node) + assert isinstance(root, Vertex) assert root.data["type"] == "ZeroShotAgent" @@ -257,14 +257,14 @@ def assert_agent_was_built(graph): def test_agent_node_build(complex_graph): - agent_node = get_node_by_type(complex_graph, AgentNode) + agent_node = get_node_by_type(complex_graph, AgentVertex) assert agent_node is not None built_object = agent_node.build() assert built_object is not None def test_tool_node_build(complex_graph): - tool_node = get_node_by_type(complex_graph, ToolNode) + tool_node = get_node_by_type(complex_graph, ToolVertex) assert tool_node is not None built_object = tool_node.build() assert built_object is not None @@ -272,7 +272,7 @@ def test_tool_node_build(complex_graph): def test_chain_node_build(complex_graph): - chain_node = get_node_by_type(complex_graph, ChainNode) + chain_node = get_node_by_type(complex_graph, ChainVertex) assert chain_node is not None built_object = chain_node.build() assert built_object is not None @@ -280,7 +280,7 @@ def test_chain_node_build(complex_graph): def test_prompt_node_build(complex_graph): - prompt_node = get_node_by_type(complex_graph, PromptNode) + prompt_node = get_node_by_type(complex_graph, PromptVertex) assert prompt_node is not None built_object = prompt_node.build() assert built_object is not None @@ -288,7 +288,7 @@ def test_prompt_node_build(complex_graph): def test_llm_node_build(basic_graph): - llm_node = get_node_by_type(basic_graph, LLMNode) + llm_node = get_node_by_type(basic_graph, LLMVertex) assert llm_node is not None built_object = llm_node.build() assert built_object is not None @@ -296,7 +296,7 @@ def test_llm_node_build(basic_graph): def test_toolkit_node_build(openapi_graph): - toolkit_node = get_node_by_type(openapi_graph, ToolkitNode) + toolkit_node = get_node_by_type(openapi_graph, ToolkitVertex) assert toolkit_node is not None built_object = toolkit_node.build() assert built_object is not None @@ -304,7 +304,7 @@ def test_toolkit_node_build(openapi_graph): def test_file_tool_node_build(openapi_graph): - file_tool_node = get_node_by_type(openapi_graph, FileToolNode) + file_tool_node = get_node_by_type(openapi_graph, FileToolVertex) assert file_tool_node is not None built_object = file_tool_node.build() assert built_object is not None @@ -312,7 +312,7 @@ def test_file_tool_node_build(openapi_graph): def test_wrapper_node_build(openapi_graph): - wrapper_node = get_node_by_type(openapi_graph, WrapperNode) + wrapper_node = get_node_by_type(openapi_graph, WrapperVertex) assert wrapper_node is not None built_object = wrapper_node.build() assert built_object is not None @@ -327,7 +327,7 @@ def test_get_result_and_thought(basic_graph): message = "Hello" # Find the node that is an LLMNode and change the # _built_object to a FakeListLLM - llm_node = get_node_by_type(basic_graph, LLMNode) + llm_node = get_node_by_type(basic_graph, LLMVertex) assert llm_node is not None llm_node._built_object = FakeListLLM(responses=responses) llm_node._built = True From da5b15fa6899e9a461d72b6d6806a2575f183914 Mon Sep 17 00:00:00 2001 From: Cristhian Zanforlin Lousa Date: Thu, 1 Jun 2023 17:47:27 -0300 Subject: [PATCH 13/43] Bringing the menu options opened on search --- .../pages/FlowPage/components/DisclosureComponent/index.tsx | 5 +++-- .../FlowPage/components/extraSidebarComponent/index.tsx | 3 +++ src/frontend/src/types/components/index.ts | 1 + 3 files changed, 7 insertions(+), 2 deletions(-) diff --git a/src/frontend/src/pages/FlowPage/components/DisclosureComponent/index.tsx b/src/frontend/src/pages/FlowPage/components/DisclosureComponent/index.tsx index 31bf78156..6878ad3c4 100644 --- a/src/frontend/src/pages/FlowPage/components/DisclosureComponent/index.tsx +++ b/src/frontend/src/pages/FlowPage/components/DisclosureComponent/index.tsx @@ -5,6 +5,7 @@ import { DisclosureComponentType } from "../../../../types/components"; export default function DisclosureComponent({ button: { title, Icon, buttons = [] }, children, + openDisc, }: DisclosureComponentType) { return ( @@ -27,14 +28,14 @@ export default function DisclosureComponent({
- + {children} diff --git a/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx b/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx index e8dbfbb6b..deeed6c70 100644 --- a/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx +++ b/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx @@ -15,6 +15,7 @@ import { MagnifyingGlassIcon } from "@heroicons/react/24/outline"; export default function ExtraSidebar() { const { data } = useContext(typesContext); const [dataFilter, setFilterData] = useState(data); + const [search, setSearch] = useState(""); function onDragStart( event: React.DragEvent, @@ -58,6 +59,7 @@ export default function ExtraSidebar() { className="dark:text-white focus:outline-none block w-full rounded-md py-1.5 ps-3 pr-9 text-gray-900 shadow-sm ring-1 ring-inset ring-gray-300 placeholder:text-gray-400 sm:text-sm sm:leading-6 dark:ring-0 dark:bg-[#2d3747] dark:focus:outline-none" onChange={(e) => { handleSearchInput(e.target.value); + setSearch(e.target.value); }} />
@@ -71,6 +73,7 @@ export default function ExtraSidebar() { .map((d: keyof APIObjectType, i) => Object.keys(dataFilter[d]).length > 0 ? ( >; From 76bbf6247502bf67fa7575d78902e613544f3e6c Mon Sep 17 00:00:00 2001 From: Jacob Lee Date: Thu, 1 Jun 2023 18:04:56 -0500 Subject: [PATCH 14/43] Surface an error message when a flow has an unknown node type. This improves the user experience for issues like #283. --- src/frontend/src/contexts/tabsContext.tsx | 38 +++++++++++++---------- 1 file changed, 21 insertions(+), 17 deletions(-) diff --git a/src/frontend/src/contexts/tabsContext.tsx b/src/frontend/src/contexts/tabsContext.tsx index 1d7ad43d5..acc7011bc 100644 --- a/src/frontend/src/contexts/tabsContext.tsx +++ b/src/frontend/src/contexts/tabsContext.tsx @@ -47,7 +47,7 @@ export const TabsContext = createContext( ); export function TabsProvider({ children }: { children: ReactNode }) { - const { setNoticeData } = useContext(alertContext); + const { setErrorData, setNoticeData } = useContext(alertContext); const [tabIndex, setTabIndex] = useState(0); const [flows, setFlows] = useState>([]); const [id, setId] = useState(uuidv4()); @@ -95,25 +95,25 @@ export function TabsProvider({ children }: { children: ReactNode }) { edge.style = { stroke: "#555555" }; }); flow.data.nodes.forEach((node) => { - if (Object.keys(templates[node.data.type]["template"]).length > 0) { - node.data.node.base_classes = - templates[node.data.type]["base_classes"]; + const template = templates[node.data.type]; + if (!template) { + setErrorData({ title: `Unknown node type: ${node.data.type}` }); + return; + } + if (Object.keys(template["template"]).length > 0) { + node.data.node.base_classes = template["base_classes"]; flow.data.edges.forEach((edge) => { if (edge.source === node.id) { edge.sourceHandle = edge.sourceHandle .split("|") .slice(0, 2) - .concat(templates[node.data.type]["base_classes"]) + .concat(template["base_classes"]) .join("|"); } }); - node.data.node.description = - templates[node.data.type]["description"]; + node.data.node.description = template["description"]; node.data.node.template = updateTemplate( - templates[node.data.type][ - "template" - ] as unknown as APITemplateType, - + template["template"] as unknown as APITemplateType, node.data.node.template as APITemplateType ); } @@ -316,21 +316,25 @@ export function TabsProvider({ children }: { children: ReactNode }) { edge.animated = edge.targetHandle.split("|")[0] === "Text"; }); data.nodes.forEach((node) => { - if (Object.keys(templates[node.data.type]["template"]).length > 0) { - node.data.node.base_classes = - templates[node.data.type]["base_classes"]; + const template = templates[node.data.type]; + if (!template) { + setErrorData({ title: `Unknown node type: ${node.data.type}` }); + return; + } + if (Object.keys(template["template"]).length > 0) { + node.data.node.base_classes = template["base_classes"]; flow.data.edges.forEach((edge) => { if (edge.source === node.id) { edge.sourceHandle = edge.sourceHandle .split("|") .slice(0, 2) - .concat(templates[node.data.type]["base_classes"]) + .concat(template["base_classes"]) .join("|"); } }); - node.data.node.description = templates[node.data.type]["description"]; + node.data.node.description = template["description"]; node.data.node.template = updateTemplate( - templates[node.data.type]["template"] as unknown as APITemplateType, + template["template"] as unknown as APITemplateType, node.data.node.template as APITemplateType ); } From 8a049187df86a524aeb3cfd8260fb17042874986 Mon Sep 17 00:00:00 2001 From: Jim White Date: Thu, 1 Jun 2023 22:55:02 -0700 Subject: [PATCH 15/43] Add Anthropic to LLMs including an icon. --- poetry.lock | 369 ++++-------------- pyproject.toml | 1 + src/backend/langflow/config.yaml | 1 + .../src/icons/Anthropic/anthropic.svg | 19 + .../src/icons/Anthropic/anthropic_box.svg | 11 + src/frontend/src/icons/Anthropic/index.tsx | 9 + src/frontend/src/utils.ts | 2 + 7 files changed, 121 insertions(+), 291 deletions(-) create mode 100644 src/frontend/src/icons/Anthropic/anthropic.svg create mode 100644 src/frontend/src/icons/Anthropic/anthropic_box.svg create mode 100644 src/frontend/src/icons/Anthropic/index.tsx diff --git a/poetry.lock b/poetry.lock index 5e53d5282..fde4fcc7d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,10 +1,9 @@ -# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "aiofiles" version = "23.1.0" description = "File support for asyncio." -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -16,7 +15,6 @@ files = [ name = "aiohttp" version = "3.8.4" description = "Async http client/server framework (asyncio)" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -125,7 +123,6 @@ speedups = ["Brotli", "aiodns", "cchardet"] name = "aiosignal" version = "1.3.1" description = "aiosignal: a list of registered asynchronous callbacks" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -140,7 +137,6 @@ frozenlist = ">=1.1.0" name = "aiostream" version = "0.4.5" description = "Generator-based operators for asynchronous iteration" -category = "main" optional = false python-versions = "*" files = [ @@ -148,11 +144,30 @@ files = [ {file = "aiostream-0.4.5.tar.gz", hash = "sha256:3ecbf87085230fbcd9605c32ca20c4fb41af02c71d076eab246ea22e35947d88"}, ] +[[package]] +name = "anthropic" +version = "0.2.9" +description = "Library for accessing the anthropic API" +optional = false +python-versions = ">=3.8" +files = [ + {file = "anthropic-0.2.9-py3-none-any.whl", hash = "sha256:e7cce215cf6c446de29280deb31b07b5587993d48e84850eaad3fc69bd1fec0a"}, + {file = "anthropic-0.2.9.tar.gz", hash = "sha256:2d44564d362cced6e8e662366e4de7f94dcdc6cb61346a5e528359b0afc1f2f3"}, +] + +[package.dependencies] +aiohttp = "*" +httpx = "*" +requests = "*" +tokenizers = "*" + +[package.extras] +dev = ["black (>=22.3.0)", "pytest"] + [[package]] name = "anyio" version = "3.7.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -174,7 +189,6 @@ trio = ["trio (<0.22)"] name = "appnope" version = "0.1.3" description = "Disable App Nap on macOS >= 10.9" -category = "dev" optional = false python-versions = "*" files = [ @@ -186,7 +200,6 @@ files = [ name = "argilla" version = "1.3.2" description = "Open-source tool for exploring, labeling, and monitoring data for NLP projects." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -214,7 +227,6 @@ server = ["Deprecated (>=1.2.0,<1.3.0)", "PyYAML (>=5.4.1,<6.1.0)", "aiofiles (> name = "asgiref" version = "3.7.2" description = "ASGI specs, helper code, and adapters" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -232,7 +244,6 @@ tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] name = "asttokens" version = "2.2.1" description = "Annotate AST trees with source code positions" -category = "dev" optional = false python-versions = "*" files = [ @@ -250,7 +261,6 @@ test = ["astroid", "pytest"] name = "async-timeout" version = "4.0.2" description = "Timeout context manager for asyncio programs" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -262,7 +272,6 @@ files = [ name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -281,7 +290,6 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte name = "authlib" version = "1.2.0" description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients." -category = "main" optional = false python-versions = "*" files = [ @@ -296,7 +304,6 @@ cryptography = ">=3.2" name = "backcall" version = "0.2.0" description = "Specifications for callback functions passed in to an API" -category = "dev" optional = false python-versions = "*" files = [ @@ -308,7 +315,6 @@ files = [ name = "backoff" version = "2.2.1" description = "Function decoration for backoff and retry" -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -320,7 +326,6 @@ files = [ name = "beautifulsoup4" version = "4.12.2" description = "Screen-scraping library" -category = "main" optional = false python-versions = ">=3.6.0" files = [ @@ -339,7 +344,6 @@ lxml = ["lxml"] name = "black" version = "23.3.0" description = "The uncompromising code formatter." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -389,7 +393,6 @@ uvloop = ["uvloop (>=0.15.2)"] name = "cachetools" version = "5.3.1" description = "Extensible memoizing collections and decorators" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -401,7 +404,6 @@ files = [ name = "certifi" version = "2023.5.7" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -413,7 +415,6 @@ files = [ name = "cffi" version = "1.15.1" description = "Foreign Function Interface for Python calling C code." -category = "main" optional = false python-versions = "*" files = [ @@ -490,7 +491,6 @@ pycparser = "*" name = "chardet" version = "5.1.0" description = "Universal encoding detector for Python 3" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -502,7 +502,6 @@ files = [ name = "charset-normalizer" version = "3.1.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -587,7 +586,6 @@ files = [ name = "chromadb" version = "0.3.25" description = "Chroma." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -616,7 +614,6 @@ uvicorn = {version = ">=0.18.3", extras = ["standard"]} name = "click" version = "8.1.3" description = "Composable command line interface toolkit" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -631,7 +628,6 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "clickhouse-connect" version = "0.5.25" description = "ClickHouse core driver, SqlAlchemy, and Superset libraries" -category = "main" optional = false python-versions = "~=3.7" files = [ @@ -719,14 +715,13 @@ superset = ["apache-superset (>=1.4.1)"] [[package]] name = "cohere" -version = "4.6.0" +version = "4.6.1" description = "" -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ - {file = "cohere-4.6.0-py3-none-any.whl", hash = "sha256:fc60fa73a2d96bdb9f70da4a290d3ede320b74ac01a24c229011049d7cb3511f"}, - {file = "cohere-4.6.0.tar.gz", hash = "sha256:43218a0a40f6fc023e068732994fb631ce5d160a0bc9f9a3a22524b5932f34ea"}, + {file = "cohere-4.6.1-py3-none-any.whl", hash = "sha256:bee44741e91b22f49a36c9c4f014e2a26eb752251fb860176a0462c6a1ae4e79"}, + {file = "cohere-4.6.1.tar.gz", hash = "sha256:69f0cf0a6202f89de04358028e68b33aa7f8e11ea64fb4f5a28904632d2aa672"}, ] [package.dependencies] @@ -738,7 +733,6 @@ requests = ">=2.0,<3.0" name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -750,7 +744,6 @@ files = [ name = "coloredlogs" version = "15.0.1" description = "Colored terminal output for Python's logging module" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -768,7 +761,6 @@ cron = ["capturer (>=2.4)"] name = "comm" version = "0.1.3" description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -788,7 +780,6 @@ typing = ["mypy (>=0.990)"] name = "coverage" version = "7.2.7" description = "Code coverage measurement for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -862,31 +853,30 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "40.0.2" +version = "41.0.1" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "cryptography-40.0.2-cp36-abi3-macosx_10_12_universal2.whl", hash = "sha256:8f79b5ff5ad9d3218afb1e7e20ea74da5f76943ee5edb7f76e56ec5161ec782b"}, - {file = "cryptography-40.0.2-cp36-abi3-macosx_10_12_x86_64.whl", hash = "sha256:05dc219433b14046c476f6f09d7636b92a1c3e5808b9a6536adf4932b3b2c440"}, - {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4df2af28d7bedc84fe45bd49bc35d710aede676e2a4cb7fc6d103a2adc8afe4d"}, - {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dcca15d3a19a66e63662dc8d30f8036b07be851a8680eda92d079868f106288"}, - {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:a04386fb7bc85fab9cd51b6308633a3c271e3d0d3eae917eebab2fac6219b6d2"}, - {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:adc0d980fd2760c9e5de537c28935cc32b9353baaf28e0814df417619c6c8c3b"}, - {file = "cryptography-40.0.2-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d5a1bd0e9e2031465761dfa920c16b0065ad77321d8a8c1f5ee331021fda65e9"}, - {file = "cryptography-40.0.2-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a95f4802d49faa6a674242e25bfeea6fc2acd915b5e5e29ac90a32b1139cae1c"}, - {file = "cryptography-40.0.2-cp36-abi3-win32.whl", hash = "sha256:aecbb1592b0188e030cb01f82d12556cf72e218280f621deed7d806afd2113f9"}, - {file = "cryptography-40.0.2-cp36-abi3-win_amd64.whl", hash = "sha256:b12794f01d4cacfbd3177b9042198f3af1c856eedd0a98f10f141385c809a14b"}, - {file = "cryptography-40.0.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:142bae539ef28a1c76794cca7f49729e7c54423f615cfd9b0b1fa90ebe53244b"}, - {file = "cryptography-40.0.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:956ba8701b4ffe91ba59665ed170a2ebbdc6fc0e40de5f6059195d9f2b33ca0e"}, - {file = "cryptography-40.0.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4f01c9863da784558165f5d4d916093737a75203a5c5286fde60e503e4276c7a"}, - {file = "cryptography-40.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3daf9b114213f8ba460b829a02896789751626a2a4e7a43a28ee77c04b5e4958"}, - {file = "cryptography-40.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48f388d0d153350f378c7f7b41497a54ff1513c816bcbbcafe5b829e59b9ce5b"}, - {file = "cryptography-40.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c0764e72b36a3dc065c155e5b22f93df465da9c39af65516fe04ed3c68c92636"}, - {file = "cryptography-40.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:cbaba590180cba88cb99a5f76f90808a624f18b169b90a4abb40c1fd8c19420e"}, - {file = "cryptography-40.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7a38250f433cd41df7fcb763caa3ee9362777fdb4dc642b9a349721d2bf47404"}, - {file = "cryptography-40.0.2.tar.gz", hash = "sha256:c33c0d32b8594fa647d2e01dbccc303478e16fdd7cf98652d5b3ed11aa5e5c99"}, + {file = "cryptography-41.0.1-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:f73bff05db2a3e5974a6fd248af2566134d8981fd7ab012e5dd4ddb1d9a70699"}, + {file = "cryptography-41.0.1-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:1a5472d40c8f8e91ff7a3d8ac6dfa363d8e3138b961529c996f3e2df0c7a411a"}, + {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fa01527046ca5facdf973eef2535a27fec4cb651e4daec4d043ef63f6ecd4ca"}, + {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b46e37db3cc267b4dea1f56da7346c9727e1209aa98487179ee8ebed09d21e43"}, + {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d198820aba55660b4d74f7b5fd1f17db3aa5eb3e6893b0a41b75e84e4f9e0e4b"}, + {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:948224d76c4b6457349d47c0c98657557f429b4e93057cf5a2f71d603e2fc3a3"}, + {file = "cryptography-41.0.1-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:059e348f9a3c1950937e1b5d7ba1f8e968508ab181e75fc32b879452f08356db"}, + {file = "cryptography-41.0.1-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b4ceb5324b998ce2003bc17d519080b4ec8d5b7b70794cbd2836101406a9be31"}, + {file = "cryptography-41.0.1-cp37-abi3-win32.whl", hash = "sha256:8f4ab7021127a9b4323537300a2acfb450124b2def3756f64dc3a3d2160ee4b5"}, + {file = "cryptography-41.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:1fee5aacc7367487b4e22484d3c7e547992ed726d14864ee33c0176ae43b0d7c"}, + {file = "cryptography-41.0.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9a6c7a3c87d595608a39980ebaa04d5a37f94024c9f24eb7d10262b92f739ddb"}, + {file = "cryptography-41.0.1-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5d092fdfedaec4cbbffbf98cddc915ba145313a6fdaab83c6e67f4e6c218e6f3"}, + {file = "cryptography-41.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1a8e6c2de6fbbcc5e14fd27fb24414507cb3333198ea9ab1258d916f00bc3039"}, + {file = "cryptography-41.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:cb33ccf15e89f7ed89b235cff9d49e2e62c6c981a6061c9c8bb47ed7951190bc"}, + {file = "cryptography-41.0.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f0ff6e18d13a3de56f609dd1fd11470918f770c6bd5d00d632076c727d35485"}, + {file = "cryptography-41.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7bfc55a5eae8b86a287747053140ba221afc65eb06207bedf6e019b8934b477c"}, + {file = "cryptography-41.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:eb8163f5e549a22888c18b0d53d6bb62a20510060a22fd5a995ec8a05268df8a"}, + {file = "cryptography-41.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8dde71c4169ec5ccc1087bb7521d54251c016f126f922ab2dfe6649170a3b8c5"}, + {file = "cryptography-41.0.1.tar.gz", hash = "sha256:d34579085401d3f49762d2f7d6634d6b6c2ae1242202e860f4d26b046e3a1006"}, ] [package.dependencies] @@ -895,23 +885,22 @@ cffi = ">=1.12" [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] -pep8test = ["black", "check-manifest", "mypy", "ruff"] -sdist = ["setuptools-rust (>=0.11.4)"] +nox = ["nox"] +pep8test = ["black", "check-sdist", "mypy", "ruff"] +sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-shard (>=0.1.2)", "pytest-subtests", "pytest-xdist"] +test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] -tox = ["tox"] [[package]] name = "ctransformers" -version = "0.2.2" +version = "0.2.4" description = "Python bindings for the Transformer models implemented in C/C++ using GGML library." -category = "main" optional = false python-versions = "*" files = [ - {file = "ctransformers-0.2.2-py3-none-any.whl", hash = "sha256:bf682dd0293dd87911c9a9a1169a4873ff55baebc16d465c6029c77f11b18cf6"}, - {file = "ctransformers-0.2.2.tar.gz", hash = "sha256:1fc36b3fde36d9fd3cb69e48993315bb1f5f54ae552720eb909dc4b3a131c743"}, + {file = "ctransformers-0.2.4-py3-none-any.whl", hash = "sha256:0476764b50863ab5032f75c5fefbfb8bebe19e5aadfa103bc9e0706edfc530be"}, + {file = "ctransformers-0.2.4.tar.gz", hash = "sha256:e569260cee73b3ffb24aa1473a56539f75bfbca54a89c83844b3fbcb952f00a8"}, ] [package.dependencies] @@ -924,7 +913,6 @@ tests = ["pytest"] name = "dataclasses-json" version = "0.5.7" description = "Easily serialize dataclasses to and from JSON" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -944,7 +932,6 @@ dev = ["flake8", "hypothesis", "ipython", "mypy (>=0.710)", "portray", "pytest ( name = "debugpy" version = "1.6.7" description = "An implementation of the Debug Adapter Protocol for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -972,7 +959,6 @@ files = [ name = "decorator" version = "5.1.1" description = "Decorators for Humans" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -984,7 +970,6 @@ files = [ name = "deprecated" version = "1.2.14" description = "Python @deprecated decorator to deprecate old python classes, functions or methods." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1002,7 +987,6 @@ dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] name = "dill" version = "0.3.6" description = "serialize all of python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1017,7 +1001,6 @@ graph = ["objgraph (>=1.7.2)"] name = "docarray" version = "0.21.0" description = "The data structure for unstructured data" -category = "main" optional = false python-versions = "*" files = [ @@ -1044,14 +1027,13 @@ weaviate = ["weaviate-client (>=3.9.0,<3.10.0)"] [[package]] name = "docker" -version = "6.1.2" +version = "6.1.3" description = "A Python library for the Docker Engine API." -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "docker-6.1.2-py3-none-any.whl", hash = "sha256:134cd828f84543cbf8e594ff81ca90c38288df3c0a559794c12f2e4b634ea19e"}, - {file = "docker-6.1.2.tar.gz", hash = "sha256:dcc088adc2ec4e7cfc594e275d8bd2c9738c56c808de97476939ef67db5af8c2"}, + {file = "docker-6.1.3-py3-none-any.whl", hash = "sha256:aecd2277b8bf8e506e484f6ab7aec39abe0038e29fa4a6d3ba86c3fe01844ed9"}, + {file = "docker-6.1.3.tar.gz", hash = "sha256:aa6d17830045ba5ef0168d5eaa34d37beeb113948c413affe1d5991fc11f9a20"}, ] [package.dependencies] @@ -1068,7 +1050,6 @@ ssh = ["paramiko (>=2.4.3)"] name = "docstring-parser" version = "0.15" description = "Parse Python docstrings in reST, Google and Numpydoc format" -category = "main" optional = false python-versions = ">=3.6,<4.0" files = [ @@ -1080,7 +1061,6 @@ files = [ name = "duckdb" version = "0.8.0" description = "DuckDB embedded database" -category = "main" optional = false python-versions = "*" files = [ @@ -1137,7 +1117,6 @@ files = [ name = "ecdsa" version = "0.18.0" description = "ECDSA cryptographic signature library (pure python)" -category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -1156,7 +1135,6 @@ gmpy2 = ["gmpy2"] name = "et-xmlfile" version = "1.1.0" description = "An implementation of lxml.xmlfile for the standard library" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1168,7 +1146,6 @@ files = [ name = "exceptiongroup" version = "1.1.1" description = "Backport of PEP 654 (exception groups)" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1183,7 +1160,6 @@ test = ["pytest (>=6)"] name = "executing" version = "1.2.0" description = "Get the currently executing AST node of a frame, and other information" -category = "dev" optional = false python-versions = "*" files = [ @@ -1198,7 +1174,6 @@ tests = ["asttokens", "littleutils", "pytest", "rich"] name = "fake-useragent" version = "1.1.3" description = "Up-to-date simple useragent faker with real world database" -category = "main" optional = false python-versions = "*" files = [ @@ -1213,7 +1188,6 @@ importlib-resources = {version = ">=5.0", markers = "python_version < \"3.10\""} name = "fastapi" version = "0.95.2" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1235,7 +1209,6 @@ test = ["anyio[trio] (>=3.2.1,<4.0.0)", "black (==23.1.0)", "coverage[toml] (>=6 name = "filelock" version = "3.12.0" description = "A platform independent file lock." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1251,7 +1224,6 @@ testing = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "diff-cover (>=7.5)", "p name = "flatbuffers" version = "23.5.26" description = "The FlatBuffers serialization format for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -1263,7 +1235,6 @@ files = [ name = "frozenlist" version = "1.3.3" description = "A list-like structure which implements collections.abc.MutableSequence" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1347,7 +1318,6 @@ files = [ name = "google-api-core" version = "2.11.0" description = "Google API client core library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1370,7 +1340,6 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0dev)"] name = "google-api-python-client" version = "2.88.0" description = "Google API Client Library for Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1379,7 +1348,7 @@ files = [ ] [package.dependencies] -google-api-core = ">=1.31.5,<2.0.0 || >2.3.0,<3.0.0dev" +google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0dev" google-auth = ">=1.19.0,<3.0.0dev" google-auth-httplib2 = ">=0.1.0" httplib2 = ">=0.15.0,<1dev" @@ -1387,14 +1356,13 @@ uritemplate = ">=3.0.1,<5" [[package]] name = "google-auth" -version = "2.19.0" +version = "2.19.1" description = "Google Authentication Library" -category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "google-auth-2.19.0.tar.gz", hash = "sha256:f39d528077ac540793dd3c22a8706178f157642a67d874db25c640b7fead277e"}, - {file = "google_auth-2.19.0-py2.py3-none-any.whl", hash = "sha256:be617bfaf77774008e9d177573f782e109188c8a64ae6e744285df5cea3e7df6"}, + {file = "google-auth-2.19.1.tar.gz", hash = "sha256:a9cfa88b3e16196845e64a3658eb953992129d13ac7337b064c6546f77c17183"}, + {file = "google_auth-2.19.1-py2.py3-none-any.whl", hash = "sha256:ea165e014c7cbd496558796b627c271aa8c18b4cba79dc1cc962b24c5efdfb85"}, ] [package.dependencies] @@ -1415,7 +1383,6 @@ requests = ["requests (>=2.20.0,<3.0.0dev)"] name = "google-auth-httplib2" version = "0.1.0" description = "Google Authentication Library: httplib2 transport" -category = "main" optional = false python-versions = "*" files = [ @@ -1432,7 +1399,6 @@ six = "*" name = "google-search-results" version = "2.4.2" description = "Scrape and search localized results from Google, Bing, Baidu, Yahoo, Yandex, Ebay, Homedepot, youtube at scale using SerpApi.com" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1446,7 +1412,6 @@ requests = "*" name = "googleapis-common-protos" version = "1.59.0" description = "Common protobufs used in Google APIs" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1464,7 +1429,6 @@ grpc = ["grpcio (>=1.44.0,<2.0.0dev)"] name = "greenlet" version = "2.0.2" description = "Lightweight in-process concurrent programming" -category = "main" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" files = [ @@ -1538,7 +1502,6 @@ test = ["objgraph", "psutil"] name = "grpcio" version = "1.47.5" description = "HTTP/2-based RPC framework" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1600,7 +1563,6 @@ protobuf = ["grpcio-tools (>=1.47.5)"] name = "grpcio-health-checking" version = "1.47.5" description = "Standard Health Checking Service for gRPC" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1616,7 +1578,6 @@ protobuf = ">=3.12.0" name = "grpcio-reflection" version = "1.47.5" description = "Standard Protobuf Reflection Service for gRPC" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1632,7 +1593,6 @@ protobuf = ">=3.12.0" name = "grpcio-tools" version = "1.47.5" description = "Protobuf code generator for gRPC" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1693,7 +1653,6 @@ setuptools = "*" name = "gunicorn" version = "20.1.0" description = "WSGI HTTP Server for UNIX" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1714,7 +1673,6 @@ tornado = ["tornado (>=0.2)"] name = "h11" version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1726,7 +1684,6 @@ files = [ name = "h2" version = "4.1.0" description = "HTTP/2 State-Machine based protocol implementation" -category = "main" optional = false python-versions = ">=3.6.1" files = [ @@ -1742,7 +1699,6 @@ hyperframe = ">=6.0,<7" name = "hnswlib" version = "0.7.0" description = "hnswlib" -category = "main" optional = false python-versions = "*" files = [ @@ -1756,7 +1712,6 @@ numpy = "*" name = "hpack" version = "4.0.0" description = "Pure-Python HPACK header compression" -category = "main" optional = false python-versions = ">=3.6.1" files = [ @@ -1768,7 +1723,6 @@ files = [ name = "httpcore" version = "0.16.3" description = "A minimal low-level HTTP client." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1780,17 +1734,16 @@ files = [ anyio = ">=3.0,<5.0" certifi = "*" h11 = ">=0.13,<0.15" -sniffio = ">=1.0.0,<2.0.0" +sniffio = "==1.*" [package.extras] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (>=1.0.0,<2.0.0)"] +socks = ["socksio (==1.*)"] [[package]] name = "httplib2" version = "0.22.0" description = "A comprehensive HTTP client library." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1805,7 +1758,6 @@ pyparsing = {version = ">=2.4.2,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.0.2 || >3.0 name = "httptools" version = "0.5.0" description = "A collection of framework independent HTTP protocol utils." -category = "main" optional = false python-versions = ">=3.5.0" files = [ @@ -1859,7 +1811,6 @@ test = ["Cython (>=0.29.24,<0.30.0)"] name = "httpx" version = "0.23.3" description = "The next generation HTTP client." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1876,15 +1827,14 @@ sniffio = "*" [package.extras] brotli = ["brotli", "brotlicffi"] -cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<13)"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<13)"] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (>=1.0.0,<2.0.0)"] +socks = ["socksio (==1.*)"] [[package]] name = "huggingface-hub" version = "0.13.4" description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -1915,7 +1865,6 @@ typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "t name = "humanfriendly" version = "10.0" description = "Human friendly output for text interfaces using Python" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -1930,7 +1879,6 @@ pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_ve name = "hyperframe" version = "6.0.1" description = "HTTP/2 framing layer for Python" -category = "main" optional = false python-versions = ">=3.6.1" files = [ @@ -1942,7 +1890,6 @@ files = [ name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1954,7 +1901,6 @@ files = [ name = "importlib-metadata" version = "6.0.1" description = "Read metadata from Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1974,7 +1920,6 @@ testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packag name = "importlib-resources" version = "5.12.0" description = "Read resources from Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1993,7 +1938,6 @@ testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-chec name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2005,7 +1949,6 @@ files = [ name = "ipykernel" version = "6.23.1" description = "IPython Kernel for Jupyter" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2019,7 +1962,7 @@ comm = ">=0.1.1" debugpy = ">=1.6.5" ipython = ">=7.23.1" jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" matplotlib-inline = ">=0.1" nest-asyncio = "*" packaging = "*" @@ -2039,7 +1982,6 @@ test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio" name = "ipython" version = "8.13.2" description = "IPython: Productive Interactive Computing" -category = "dev" optional = false python-versions = ">=3.9" files = [ @@ -2079,7 +2021,6 @@ test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pa name = "jcloud" version = "0.2.10" description = "Simplify deploying and managing Jina projects on Jina Cloud" -category = "main" optional = false python-versions = "*" files = [ @@ -2102,7 +2043,6 @@ test = ["black (==22.3.0)", "jina (>=3.7.0)", "mock", "pytest", "pytest-asyncio" name = "jedi" version = "0.18.2" description = "An autocompletion tool for Python that can be used for text editors." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2122,7 +2062,6 @@ testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] name = "jina" version = "3.15.2" description = "Build multimodal AI services via cloud native technologies · Neural Search · Generative AI · MLOps" -category = "main" optional = false python-versions = "*" files = [ @@ -2240,7 +2179,6 @@ websockets = ["websockets"] name = "jina-hubble-sdk" version = "0.38.0" description = "SDK for Hubble API at Jina AI." -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -2266,7 +2204,6 @@ full = ["aiohttp", "black (==22.3.0)", "docker", "filelock", "flake8 (==4.0.1)", name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2284,7 +2221,6 @@ i18n = ["Babel (>=2.7)"] name = "joblib" version = "1.2.0" description = "Lightweight pipelining with Python functions" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2296,7 +2232,6 @@ files = [ name = "jupyter-client" version = "8.2.0" description = "Jupyter protocol implementation and client libraries" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2306,7 +2241,7 @@ files = [ [package.dependencies] importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} -jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" python-dateutil = ">=2.8.2" pyzmq = ">=23.0" tornado = ">=6.2" @@ -2320,7 +2255,6 @@ test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pyt name = "jupyter-core" version = "5.3.0" description = "Jupyter core package. A base package on which Jupyter projects rely." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2341,7 +2275,6 @@ test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] name = "langchain" version = "0.0.186" description = "Building applications with LLMs through composability" -category = "main" optional = false python-versions = ">=3.8.1,<4.0" files = [ @@ -2376,13 +2309,12 @@ text-helpers = ["chardet (>=5.1.0,<6.0.0)"] [[package]] name = "langchain-serve" -version = "0.0.40" +version = "0.0.41" description = "Langchain Serve - serve your langchain apps on Jina AI Cloud." -category = "main" optional = true python-versions = "*" files = [ - {file = "langchain-serve-0.0.40.tar.gz", hash = "sha256:c60b173fcf0b682fbb70d34e8f485ce168e2229f55cb5c4ffbc26a5206af1c06"}, + {file = "langchain-serve-0.0.41.tar.gz", hash = "sha256:fcf0d3ac9e48b5b24825ae2e1d8a383795c5744b18ef61cb74a99020a9e5d46a"}, ] [package.dependencies] @@ -2405,7 +2337,6 @@ test = ["psutil", "pytest", "pytest-asyncio"] name = "linkify-it-py" version = "2.0.2" description = "Links recognition library with FULL unicode support." -category = "main" optional = true python-versions = ">=3.7" files = [ @@ -2426,7 +2357,6 @@ test = ["coverage", "pytest", "pytest-cov"] name = "llama-cpp-python" version = "0.1.55" description = "A Python wrapper for llama.cpp" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2440,7 +2370,6 @@ typing-extensions = ">=4.5.0,<5.0.0" name = "lxml" version = "4.9.2" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" files = [ @@ -2533,7 +2462,6 @@ source = ["Cython (>=0.29.7)"] name = "lz4" version = "4.3.2" description = "LZ4 Bindings for Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2583,7 +2511,6 @@ tests = ["psutil", "pytest (!=3.3.0)", "pytest-cov"] name = "markdown" version = "3.4.3" description = "Python implementation of John Gruber's Markdown." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2601,7 +2528,6 @@ testing = ["coverage", "pyyaml"] name = "markdown-it-py" version = "2.2.0" description = "Python port of markdown-it. Markdown parsing, done right!" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2628,7 +2554,6 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] name = "markupsafe" version = "2.1.2" description = "Safely add untrusted strings to HTML/XML markup." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2688,7 +2613,6 @@ files = [ name = "marshmallow" version = "3.19.0" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2709,7 +2633,6 @@ tests = ["pytest", "pytz", "simplejson"] name = "marshmallow-enum" version = "1.5.1" description = "Enum field for Marshmallow" -category = "main" optional = false python-versions = "*" files = [ @@ -2724,7 +2647,6 @@ marshmallow = ">=2.0.0" name = "matplotlib-inline" version = "0.1.6" description = "Inline Matplotlib backend for Jupyter" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -2739,7 +2661,6 @@ traitlets = "*" name = "mdit-py-plugins" version = "0.3.5" description = "Collection of plugins for markdown-it-py" -category = "main" optional = true python-versions = ">=3.7" files = [ @@ -2759,7 +2680,6 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] name = "mdurl" version = "0.1.2" description = "Markdown URL utilities" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2771,7 +2691,6 @@ files = [ name = "monotonic" version = "1.6" description = "An implementation of time.monotonic() for Python 2 & < 3.3" -category = "main" optional = false python-versions = "*" files = [ @@ -2783,7 +2702,6 @@ files = [ name = "mpmath" version = "1.3.0" description = "Python library for arbitrary-precision floating-point arithmetic" -category = "main" optional = false python-versions = "*" files = [ @@ -2801,7 +2719,6 @@ tests = ["pytest (>=4.6)"] name = "msg-parser" version = "1.2.0" description = "This module enables reading, parsing and converting Microsoft Outlook MSG E-Mail files." -category = "main" optional = false python-versions = ">=3.4" files = [ @@ -2819,7 +2736,6 @@ rtf = ["compressed-rtf (>=1.0.5)"] name = "multidict" version = "6.0.4" description = "multidict implementation" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2903,7 +2819,6 @@ files = [ name = "mypy" version = "1.3.0" description = "Optional static typing for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2950,7 +2865,6 @@ reports = ["lxml"] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -2962,7 +2876,6 @@ files = [ name = "nest-asyncio" version = "1.5.6" description = "Patch asyncio to allow nested event loops" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -2974,7 +2887,6 @@ files = [ name = "networkx" version = "3.1" description = "Python package for creating and manipulating graphs and networks" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2993,7 +2905,6 @@ test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] name = "nltk" version = "3.8.1" description = "Natural Language Toolkit" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3019,7 +2930,6 @@ twitter = ["twython"] name = "numexpr" version = "2.8.4" description = "Fast numerical expression evaluator for NumPy" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3062,7 +2972,6 @@ numpy = ">=1.13.3" name = "numpy" version = "1.23.5" description = "NumPy is the fundamental package for array computing with Python." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3100,7 +3009,6 @@ files = [ name = "olefile" version = "0.46" description = "Python package to parse, read and write Microsoft OLE2 files (Structured Storage or Compound Document, Microsoft Office)" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -3111,7 +3019,6 @@ files = [ name = "onnxruntime" version = "1.15.0" description = "ONNX Runtime is a runtime accelerator for Machine Learning models" -category = "main" optional = false python-versions = "*" files = [ @@ -3153,7 +3060,6 @@ sympy = "*" name = "openai" version = "0.27.7" description = "Python client library for the OpenAI API" -category = "main" optional = false python-versions = ">=3.7.1" files = [ @@ -3168,7 +3074,7 @@ tqdm = "*" [package.extras] datalib = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] -dev = ["black (>=21.6b0,<22.0)", "pytest (>=6.0.0,<7.0.0)", "pytest-asyncio", "pytest-mock"] +dev = ["black (>=21.6b0,<22.0)", "pytest (==6.*)", "pytest-asyncio", "pytest-mock"] embeddings = ["matplotlib", "numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "plotly", "scikit-learn (>=1.0.2)", "scipy", "tenacity (>=8.0.1)"] wandb = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "wandb"] @@ -3176,7 +3082,6 @@ wandb = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1 name = "openapi-schema-pydantic" version = "1.2.4" description = "OpenAPI (v3) specification schema as pydantic class" -category = "main" optional = false python-versions = ">=3.6.1" files = [ @@ -3191,7 +3096,6 @@ pydantic = ">=1.8.2" name = "openpyxl" version = "3.1.2" description = "A Python library to read/write Excel 2010 xlsx/xlsm files" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3206,7 +3110,6 @@ et-xmlfile = "*" name = "opentelemetry-api" version = "1.18.0" description = "OpenTelemetry Python API" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3223,7 +3126,6 @@ setuptools = ">=16.0" name = "opentelemetry-exporter-otlp" version = "1.18.0" description = "OpenTelemetry Collector Exporters" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3239,7 +3141,6 @@ opentelemetry-exporter-otlp-proto-http = "1.18.0" name = "opentelemetry-exporter-otlp-proto-common" version = "1.18.0" description = "OpenTelemetry Protobuf encoding" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3254,7 +3155,6 @@ opentelemetry-proto = "1.18.0" name = "opentelemetry-exporter-otlp-proto-grpc" version = "1.18.0" description = "OpenTelemetry Collector Protobuf over gRPC Exporter" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3279,7 +3179,6 @@ test = ["pytest-grpc"] name = "opentelemetry-exporter-otlp-proto-http" version = "1.18.0" description = "OpenTelemetry Collector Protobuf over HTTP Exporter" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3304,7 +3203,6 @@ test = ["responses (==0.22.0)"] name = "opentelemetry-exporter-prometheus" version = "1.12.0rc1" description = "Prometheus Metric Exporter for OpenTelemetry" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3321,7 +3219,6 @@ prometheus-client = ">=0.5.0,<1.0.0" name = "opentelemetry-instrumentation" version = "0.39b0" description = "Instrumentation Tools & Auto Instrumentation for OpenTelemetry Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3338,7 +3235,6 @@ wrapt = ">=1.0.0,<2.0.0" name = "opentelemetry-instrumentation-aiohttp-client" version = "0.39b0" description = "OpenTelemetry aiohttp client instrumentation" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3361,7 +3257,6 @@ test = ["opentelemetry-instrumentation-aiohttp-client[instruments]"] name = "opentelemetry-instrumentation-asgi" version = "0.39b0" description = "ASGI instrumentation for OpenTelemetry" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3384,7 +3279,6 @@ test = ["opentelemetry-instrumentation-asgi[instruments]", "opentelemetry-test-u name = "opentelemetry-instrumentation-fastapi" version = "0.39b0" description = "OpenTelemetry FastAPI Instrumentation" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3407,7 +3301,6 @@ test = ["httpx (>=0.22,<1.0)", "opentelemetry-instrumentation-fastapi[instrument name = "opentelemetry-instrumentation-grpc" version = "0.39b0" description = "OpenTelemetry gRPC instrumentation" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3430,7 +3323,6 @@ test = ["opentelemetry-instrumentation-grpc[instruments]", "opentelemetry-sdk (> name = "opentelemetry-proto" version = "1.18.0" description = "OpenTelemetry Python Proto" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3445,7 +3337,6 @@ protobuf = ">=3.19,<5.0" name = "opentelemetry-sdk" version = "1.18.0" description = "OpenTelemetry Python SDK" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3463,7 +3354,6 @@ typing-extensions = ">=3.7.4" name = "opentelemetry-semantic-conventions" version = "0.39b0" description = "OpenTelemetry Semantic Conventions" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3475,7 +3365,6 @@ files = [ name = "opentelemetry-util-http" version = "0.39b0" description = "Web util for OpenTelemetry" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3487,7 +3376,6 @@ files = [ name = "overrides" version = "7.3.1" description = "A decorator to automatically detect mismatch when overriding a method." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3499,7 +3387,6 @@ files = [ name = "packaging" version = "23.1" description = "Core utilities for Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3511,7 +3398,6 @@ files = [ name = "pandas" version = "1.5.3" description = "Powerful data structures for data analysis, time series, and statistics" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3560,7 +3446,6 @@ test = ["hypothesis (>=5.5.3)", "pytest (>=6.0)", "pytest-xdist (>=1.31)"] name = "pandas-stubs" version = "2.0.1.230501" description = "Type annotations for pandas" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3575,7 +3460,6 @@ types-pytz = ">=2022.1.1" name = "parso" version = "0.8.3" description = "A Python Parser" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -3591,7 +3475,6 @@ testing = ["docopt", "pytest (<6.0.0)"] name = "pathspec" version = "0.11.1" description = "Utility library for gitignore style pattern matching of file paths." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3603,7 +3486,6 @@ files = [ name = "pexpect" version = "4.8.0" description = "Pexpect allows easy control of interactive console applications." -category = "dev" optional = false python-versions = "*" files = [ @@ -3618,7 +3500,6 @@ ptyprocess = ">=0.5" name = "pickleshare" version = "0.7.5" description = "Tiny 'shelve'-like database with concurrency support" -category = "dev" optional = false python-versions = "*" files = [ @@ -3630,7 +3511,6 @@ files = [ name = "pillow" version = "9.5.0" description = "Python Imaging Library (Fork)" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3710,7 +3590,6 @@ tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "pa name = "platformdirs" version = "3.5.1" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3726,7 +3605,6 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest- name = "pluggy" version = "1.0.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -3742,7 +3620,6 @@ testing = ["pytest", "pytest-benchmark"] name = "portalocker" version = "2.7.0" description = "Wraps the portalocker recipe for easy usage" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -3762,7 +3639,6 @@ tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "p name = "posthog" version = "3.0.1" description = "Integrate PostHog into any python application." -category = "main" optional = false python-versions = "*" files = [ @@ -3786,7 +3662,6 @@ test = ["coverage", "flake8", "freezegun (==0.3.15)", "mock (>=2.0.0)", "pylint" name = "prometheus-client" version = "0.17.0" description = "Python client for the Prometheus monitoring system." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3801,7 +3676,6 @@ twisted = ["twisted"] name = "prompt-toolkit" version = "3.0.38" description = "Library for building powerful interactive command lines in Python" -category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -3816,7 +3690,6 @@ wcwidth = "*" name = "protobuf" version = "3.20.3" description = "Protocol Buffers" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3848,7 +3721,6 @@ files = [ name = "psutil" version = "5.9.5" description = "Cross-platform lib for process and system monitoring in Python." -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -3875,7 +3747,6 @@ test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] name = "psycopg2-binary" version = "2.9.6" description = "psycopg2 - Python-PostgreSQL Database Adapter" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3947,7 +3818,6 @@ files = [ name = "ptyprocess" version = "0.7.0" description = "Run a subprocess in a pseudo terminal" -category = "dev" optional = false python-versions = "*" files = [ @@ -3959,7 +3829,6 @@ files = [ name = "pure-eval" version = "0.2.2" description = "Safely evaluate AST nodes without side effects" -category = "dev" optional = false python-versions = "*" files = [ @@ -3974,7 +3843,6 @@ tests = ["pytest"] name = "pyarrow" version = "11.0.0" description = "Python library for Apache Arrow" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4012,7 +3880,6 @@ numpy = ">=1.16.6" name = "pyasn1" version = "0.5.0" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -4024,7 +3891,6 @@ files = [ name = "pyasn1-modules" version = "0.3.0" description = "A collection of ASN.1-based protocols modules" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -4039,7 +3905,6 @@ pyasn1 = ">=0.4.6,<0.6.0" name = "pycparser" version = "2.21" description = "C parser in Python" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -4051,7 +3916,6 @@ files = [ name = "pydantic" version = "1.10.8" description = "Data validation and settings management using python type hints" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4104,7 +3968,6 @@ email = ["email-validator (>=1.0.3)"] name = "pygments" version = "2.15.1" description = "Pygments is a syntax highlighting package written in Python." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4119,7 +3982,6 @@ plugins = ["importlib-metadata"] name = "pypandoc" version = "1.11" description = "Thin wrapper for pandoc." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -4131,7 +3993,6 @@ files = [ name = "pyparsing" version = "3.0.9" description = "pyparsing module - Classes and methods to define and execute parsing grammars" -category = "main" optional = false python-versions = ">=3.6.8" files = [ @@ -4146,7 +4007,6 @@ diagrams = ["jinja2", "railroad-diagrams"] name = "pypdf" version = "3.9.0" description = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -4168,7 +4028,6 @@ image = ["Pillow"] name = "pyreadline3" version = "3.4.1" description = "A python implementation of GNU readline." -category = "main" optional = false python-versions = "*" files = [ @@ -4180,7 +4039,6 @@ files = [ name = "pysrt" version = "1.1.2" description = "SubRip (.srt) subtitle parser and writer" -category = "main" optional = false python-versions = "*" files = [ @@ -4194,7 +4052,6 @@ chardet = "*" name = "pytest" version = "7.3.1" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4217,7 +4074,6 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-cov" version = "4.1.0" description = "Pytest plugin for measuring coverage." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4236,7 +4092,6 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -4251,7 +4106,6 @@ six = ">=1.5" name = "python-docx" version = "0.8.11" description = "Create and update Microsoft Word .docx files." -category = "main" optional = false python-versions = "*" files = [ @@ -4265,7 +4119,6 @@ lxml = ">=2.3.2" name = "python-dotenv" version = "1.0.0" description = "Read key-value pairs from a .env file and set them as environment variables" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -4280,7 +4133,6 @@ cli = ["click (>=5.0)"] name = "python-jose" version = "3.3.0" description = "JOSE implementation in Python" -category = "main" optional = false python-versions = "*" files = [ @@ -4302,7 +4154,6 @@ pycryptodome = ["pyasn1", "pycryptodome (>=3.3.1,<4.0.0)"] name = "python-magic" version = "0.4.27" description = "File type identification using libmagic" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -4314,7 +4165,6 @@ files = [ name = "python-multipart" version = "0.0.6" description = "A streaming multipart parser for Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4329,7 +4179,6 @@ dev = ["atomicwrites (==1.2.1)", "attrs (==19.2.0)", "coverage (==6.5.0)", "hatc name = "python-pptx" version = "0.6.21" description = "Generate and manipulate Open XML PowerPoint (.pptx) files" -category = "main" optional = false python-versions = "*" files = [ @@ -4345,7 +4194,6 @@ XlsxWriter = ">=0.5.7" name = "pytz" version = "2023.3" description = "World timezone definitions, modern and historical" -category = "main" optional = false python-versions = "*" files = [ @@ -4357,7 +4205,6 @@ files = [ name = "pywin32" version = "306" description = "Python for Window Extensions" -category = "main" optional = false python-versions = "*" files = [ @@ -4381,7 +4228,6 @@ files = [ name = "pyyaml" version = "6.0" description = "YAML parser and emitter for Python" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -4431,7 +4277,6 @@ files = [ name = "pyzmq" version = "25.1.0" description = "Python bindings for 0MQ" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -4521,7 +4366,6 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""} name = "qdrant-client" version = "1.2.0" description = "Client library for the Qdrant vector search engine" -category = "main" optional = false python-versions = ">=3.7,<3.12" files = [ @@ -4543,7 +4387,6 @@ urllib3 = ">=1.26.14,<2.0.0" name = "regex" version = "2023.5.5" description = "Alternative regular expression module, to replace re." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -4641,7 +4484,6 @@ files = [ name = "requests" version = "2.28.2" description = "Python HTTP for Humans." -category = "main" optional = false python-versions = ">=3.7, <4" files = [ @@ -4663,7 +4505,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "rfc3986" version = "1.5.0" description = "Validating URI References per RFC 3986" -category = "main" optional = false python-versions = "*" files = [ @@ -4679,14 +4520,13 @@ idna2008 = ["idna"] [[package]] name = "rich" -version = "13.3.5" +version = "13.4.1" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -category = "main" optional = false python-versions = ">=3.7.0" files = [ - {file = "rich-13.3.5-py3-none-any.whl", hash = "sha256:69cdf53799e63f38b95b9bf9c875f8c90e78dd62b2f00c13a911c7a3b9fa4704"}, - {file = "rich-13.3.5.tar.gz", hash = "sha256:2d11b9b8dd03868f09b4fffadc84a6a8cda574e40dc90821bd845720ebb8e89c"}, + {file = "rich-13.4.1-py3-none-any.whl", hash = "sha256:d204aadb50b936bf6b1a695385429d192bc1fdaf3e8b907e8e26f4c4e4b5bf75"}, + {file = "rich-13.4.1.tar.gz", hash = "sha256:76f6b65ea7e5c5d924ba80e322231d7cb5b5981aa60bfc1e694f1bc097fe6fe1"}, ] [package.dependencies] @@ -4700,7 +4540,6 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] name = "rsa" version = "4.9" description = "Pure-Python RSA implementation" -category = "main" optional = false python-versions = ">=3.6,<4" files = [ @@ -4715,7 +4554,6 @@ pyasn1 = ">=0.1.3" name = "ruff" version = "0.0.254" description = "An extremely fast Python linter, written in Rust." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4742,7 +4580,6 @@ files = [ name = "scikit-learn" version = "1.2.2" description = "A set of python modules for machine learning and data mining" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -4785,7 +4622,6 @@ tests = ["black (>=22.3.0)", "flake8 (>=3.8.2)", "matplotlib (>=3.1.3)", "mypy ( name = "scipy" version = "1.10.1" description = "Fundamental algorithms for scientific computing in Python" -category = "main" optional = false python-versions = "<3.12,>=3.8" files = [ @@ -4824,7 +4660,6 @@ test = ["asv", "gmpy2", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeo name = "sentence-transformers" version = "2.2.2" description = "Multilingual text embeddings" -category = "main" optional = false python-versions = ">=3.6.0" files = [ @@ -4847,7 +4682,6 @@ transformers = ">=4.6.0,<5.0.0" name = "sentencepiece" version = "0.1.99" description = "SentencePiece python wrapper" -category = "main" optional = false python-versions = "*" files = [ @@ -4902,7 +4736,6 @@ files = [ name = "setuptools" version = "67.8.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4919,7 +4752,6 @@ testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs ( name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -4931,7 +4763,6 @@ files = [ name = "sniffio" version = "1.3.0" description = "Sniff out which async library your code is running under" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4943,7 +4774,6 @@ files = [ name = "soupsieve" version = "2.4.1" description = "A modern CSS selector implementation for Beautiful Soup." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4955,7 +4785,6 @@ files = [ name = "sqlalchemy" version = "2.0.15" description = "Database Abstraction Library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -5003,7 +4832,7 @@ files = [ ] [package.dependencies] -greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} +greenlet = {version = "!=0.4.17", markers = "platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\""} typing-extensions = ">=4.2.0" [package.extras] @@ -5033,7 +4862,6 @@ sqlcipher = ["sqlcipher3-binary"] name = "stack-data" version = "0.6.2" description = "Extract data from python stack frames and tracebacks for informative displays" -category = "dev" optional = false python-versions = "*" files = [ @@ -5053,7 +4881,6 @@ tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] name = "starlette" version = "0.27.0" description = "The little ASGI library that shines." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -5072,7 +4899,6 @@ full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart", "pyyam name = "sympy" version = "1.12" description = "Computer algebra system (CAS) in Python" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -5087,7 +4913,6 @@ mpmath = ">=0.19" name = "tenacity" version = "8.2.2" description = "Retry code until it succeeds" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -5100,14 +4925,13 @@ doc = ["reno", "sphinx", "tornado (>=4.5)"] [[package]] name = "textual" -version = "0.26.0" +version = "0.27.0" description = "Modern Text User Interface framework" -category = "main" optional = true python-versions = ">=3.7,<4.0" files = [ - {file = "textual-0.26.0-py3-none-any.whl", hash = "sha256:1efd04e9f61b3e95fd1c65436d3262f99e3f86cdeb524d13045bb551eb615c02"}, - {file = "textual-0.26.0.tar.gz", hash = "sha256:78094c83017d2836b726513abdf434cc034a0e68cc45e63b3b056c9b8b7fa673"}, + {file = "textual-0.27.0-py3-none-any.whl", hash = "sha256:dc45eaf7da330686c56d6f76f59d05fd216ce6aad90fa44ee269881efc622151"}, + {file = "textual-0.27.0.tar.gz", hash = "sha256:8bdcb09dc35a706ef939b1276ccfdec10eaaee6147b41cb7587cf33298a8dd33"}, ] [package.dependencies] @@ -5123,7 +4947,6 @@ dev = ["aiohttp (>=3.8.1)", "click (>=8.1.2)", "msgpack (>=1.0.3)"] name = "threadpoolctl" version = "3.1.0" description = "threadpoolctl" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -5135,7 +4958,6 @@ files = [ name = "tiktoken" version = "0.3.3" description = "tiktoken is a fast BPE tokeniser for use with OpenAI's models" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -5181,7 +5003,6 @@ blobfile = ["blobfile (>=2)"] name = "tokenizers" version = "0.13.3" description = "Fast and Customizable Tokenizers" -category = "main" optional = false python-versions = "*" files = [ @@ -5236,7 +5057,6 @@ testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"] name = "toml" version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" -category = "main" optional = true python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -5248,7 +5068,6 @@ files = [ name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -5260,7 +5079,6 @@ files = [ name = "torch" version = "2.0.1" description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration" -category = "main" optional = false python-versions = ">=3.8.0" files = [ @@ -5300,7 +5118,6 @@ opt-einsum = ["opt-einsum (>=3.3)"] name = "torchvision" version = "0.15.2" description = "image and video datasets and models for torch deep learning" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -5328,7 +5145,7 @@ files = [ [package.dependencies] numpy = "*" -pillow = ">=5.3.0,<8.3.0 || >=8.4.0" +pillow = ">=5.3.0,<8.3.dev0 || >=8.4.dev0" requests = "*" torch = "2.0.1" @@ -5339,7 +5156,6 @@ scipy = ["scipy"] name = "tornado" version = "6.3.2" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." -category = "dev" optional = false python-versions = ">= 3.8" files = [ @@ -5360,7 +5176,6 @@ files = [ name = "tqdm" version = "4.65.0" description = "Fast, Extensible Progress Meter" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -5381,7 +5196,6 @@ telegram = ["requests"] name = "traitlets" version = "5.9.0" description = "Traitlets Python configuration system" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -5397,7 +5211,6 @@ test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] name = "transformers" version = "4.29.0" description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow" -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -5465,7 +5278,6 @@ vision = ["Pillow"] name = "typer" version = "0.7.0" description = "Typer, build great CLIs. Easy to code. Based on Python type hints." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -5486,7 +5298,6 @@ test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6. name = "types-pillow" version = "9.5.0.4" description = "Typing stubs for Pillow" -category = "dev" optional = false python-versions = "*" files = [ @@ -5498,7 +5309,6 @@ files = [ name = "types-pytz" version = "2023.3.0.0" description = "Typing stubs for pytz" -category = "dev" optional = false python-versions = "*" files = [ @@ -5510,7 +5320,6 @@ files = [ name = "types-pyyaml" version = "6.0.12.10" description = "Typing stubs for PyYAML" -category = "main" optional = false python-versions = "*" files = [ @@ -5522,7 +5331,6 @@ files = [ name = "types-requests" version = "2.31.0.1" description = "Typing stubs for requests" -category = "dev" optional = false python-versions = "*" files = [ @@ -5537,7 +5345,6 @@ types-urllib3 = "*" name = "types-urllib3" version = "1.26.25.13" description = "Typing stubs for urllib3" -category = "dev" optional = false python-versions = "*" files = [ @@ -5549,7 +5356,6 @@ files = [ name = "typing-extensions" version = "4.5.0" description = "Backported and Experimental Type Hints for Python 3.7+" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -5561,7 +5367,6 @@ files = [ name = "typing-inspect" version = "0.8.0" description = "Runtime inspection utilities for typing module." -category = "main" optional = false python-versions = "*" files = [ @@ -5577,7 +5382,6 @@ typing-extensions = ">=3.7.4" name = "uc-micro-py" version = "1.0.2" description = "Micro subset of unicode data files for linkify-it-py projects." -category = "main" optional = true python-versions = ">=3.7" files = [ @@ -5592,7 +5396,6 @@ test = ["coverage", "pytest", "pytest-cov"] name = "unstructured" version = "0.5.13" description = "A library that prepares raw documents for downstream ML tasks." -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -5632,7 +5435,6 @@ wikipedia = ["wikipedia"] name = "uritemplate" version = "4.1.1" description = "Implementation of RFC 6570 URI Templates" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -5644,7 +5446,6 @@ files = [ name = "urllib3" version = "1.26.16" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -5661,7 +5462,6 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] name = "uvicorn" version = "0.20.0" description = "The lightning-fast ASGI server." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -5676,7 +5476,7 @@ h11 = ">=0.8" httptools = {version = ">=0.5.0", optional = true, markers = "extra == \"standard\""} python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""} -uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\" and extra == \"standard\""} +uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""} watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""} @@ -5687,7 +5487,6 @@ standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", name = "uvloop" version = "0.17.0" description = "Fast implementation of asyncio event loop on top of libuv" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -5732,7 +5531,6 @@ test = ["Cython (>=0.29.32,<0.30.0)", "aiohttp", "flake8 (>=3.9.2,<3.10.0)", "my name = "validators" version = "0.20.0" description = "Python Data Validation for Humans™." -category = "main" optional = false python-versions = ">=3.4" files = [ @@ -5749,7 +5547,6 @@ test = ["flake8 (>=2.4.0)", "isort (>=4.2.2)", "pytest (>=2.2.3)"] name = "watchfiles" version = "0.19.0" description = "Simple, modern and high performance file watching and code reload in python." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -5784,7 +5581,6 @@ anyio = ">=3.0.0" name = "wcwidth" version = "0.2.6" description = "Measures the displayed width of unicode strings in a terminal" -category = "dev" optional = false python-versions = "*" files = [ @@ -5796,7 +5592,6 @@ files = [ name = "weaviate-client" version = "3.19.2" description = "A python native weaviate client" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -5817,7 +5612,6 @@ grpc = ["grpcio", "grpcio-tools"] name = "websocket-client" version = "1.5.2" description = "WebSocket client for Python with low level API options" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -5834,7 +5628,6 @@ test = ["websockets"] name = "websockets" version = "11.0.3" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -5914,7 +5707,6 @@ files = [ name = "wikipedia" version = "1.4.0" description = "Wikipedia API for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -5929,7 +5721,6 @@ requests = ">=2.0.0,<3.0.0" name = "wrapt" version = "1.14.1" description = "Module for decorators, wrappers and monkey patching." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ @@ -6003,7 +5794,6 @@ files = [ name = "xlsxwriter" version = "3.1.2" description = "A Python module for creating Excel XLSX files." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -6015,7 +5805,6 @@ files = [ name = "yarl" version = "1.9.2" description = "Yet another URL library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -6103,7 +5892,6 @@ multidict = ">=4.0" name = "zipp" version = "3.15.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -6119,7 +5907,6 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more name = "zstandard" version = "0.21.0" description = "Zstandard bindings for Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -6180,4 +5967,4 @@ deploy = ["langchain-serve"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.12" -content-hash = "2bf357ad30f79c68751b34c991b4a73767ceb628657f4133228d4eb487d8a6fb" +content-hash = "7c5d90d962ae4ee5d725de2b5986d09f5aba60d68b327e0f96653dbba113d10e" diff --git a/pyproject.toml b/pyproject.toml index 78104f327..15e6ea6ff 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -57,6 +57,7 @@ jina = "3.15.2" sentence-transformers = "^2.2.2" ctransformers = "^0.2.2" cohere = "^4.6.0" +anthropic = "^0.2.9" [tool.poetry.group.dev.dependencies] diff --git a/src/backend/langflow/config.yaml b/src/backend/langflow/config.yaml index 4060d1f3e..f2833ae26 100644 --- a/src/backend/langflow/config.yaml +++ b/src/backend/langflow/config.yaml @@ -55,6 +55,7 @@ llms: - LlamaCpp - CTransformers - Cohere + - Anthropic memories: - ConversationBufferMemory - ConversationSummaryMemory diff --git a/src/frontend/src/icons/Anthropic/anthropic.svg b/src/frontend/src/icons/Anthropic/anthropic.svg new file mode 100644 index 000000000..1cb8c0185 --- /dev/null +++ b/src/frontend/src/icons/Anthropic/anthropic.svg @@ -0,0 +1,19 @@ + + + + + + + \ No newline at end of file diff --git a/src/frontend/src/icons/Anthropic/anthropic_box.svg b/src/frontend/src/icons/Anthropic/anthropic_box.svg new file mode 100644 index 000000000..fa9923ed7 --- /dev/null +++ b/src/frontend/src/icons/Anthropic/anthropic_box.svg @@ -0,0 +1,11 @@ + + + + + + + + + \ No newline at end of file diff --git a/src/frontend/src/icons/Anthropic/index.tsx b/src/frontend/src/icons/Anthropic/index.tsx new file mode 100644 index 000000000..4cdf8f910 --- /dev/null +++ b/src/frontend/src/icons/Anthropic/index.tsx @@ -0,0 +1,9 @@ +import React, { forwardRef } from "react"; +import { ReactComponent as AnthropicSVG } from "./anthropic_box.svg"; + +export const AnthropicIcon = forwardRef< + SVGSVGElement, + React.PropsWithChildren<{}> +>((props, ref) => { + return ; +}); diff --git a/src/frontend/src/utils.ts b/src/frontend/src/utils.ts index 559080f3f..0e7289dd0 100644 --- a/src/frontend/src/utils.ts +++ b/src/frontend/src/utils.ts @@ -21,6 +21,7 @@ import { FlowType, NodeType } from "./types/flow"; import { APITemplateType, TemplateVariableType } from "./types/api"; import _ from "lodash"; import { ChromaIcon } from "./icons/ChromaIcon"; +import { AnthropicIcon } from "./icons/Anthropic"; import { AirbyteIcon } from "./icons/Airbyte"; import { AzIcon } from "./icons/AzLogo"; import { BingIcon } from "./icons/Bing"; @@ -153,6 +154,7 @@ export const nodeIcons: { AirbyteJSONLoader: AirbyteIcon, // SerpAPIWrapper: SerperIcon, // AZLyricsLoader: AzIcon, + Anthropic: AnthropicIcon, BingSearchAPIWrapper: BingIcon, BingSearchRun: BingIcon, Cohere: CohereIcon, From 4f152bc7efe9174e6adb3280d45a1eab29fcb661 Mon Sep 17 00:00:00 2001 From: Jim White Date: Thu, 1 Jun 2023 23:28:44 -0700 Subject: [PATCH 16/43] Fix view for regular Anthropic logo --- src/frontend/src/icons/Anthropic/anthropic.svg | 14 ++------------ 1 file changed, 2 insertions(+), 12 deletions(-) diff --git a/src/frontend/src/icons/Anthropic/anthropic.svg b/src/frontend/src/icons/Anthropic/anthropic.svg index 1cb8c0185..67ae02ea5 100644 --- a/src/frontend/src/icons/Anthropic/anthropic.svg +++ b/src/frontend/src/icons/Anthropic/anthropic.svg @@ -1,19 +1,9 @@ - + - + \ No newline at end of file From 7d49319696c1d0c4191772a473fe882b0cc9446c Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Fri, 2 Jun 2023 11:23:55 -0300 Subject: [PATCH 17/43] saving darkMode on localStorage --- src/frontend/src/contexts/darkContext.tsx | 5 ++++- .../pages/FlowPage/components/tabsManagerComponent/index.tsx | 2 +- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/src/frontend/src/contexts/darkContext.tsx b/src/frontend/src/contexts/darkContext.tsx index f612fe381..2a76d0e53 100644 --- a/src/frontend/src/contexts/darkContext.tsx +++ b/src/frontend/src/contexts/darkContext.tsx @@ -13,13 +13,16 @@ const initialValue = { export const darkContext = createContext(initialValue); export function DarkProvider({ children }) { - const [dark, setDark] = useState(false); + const [dark, setDark] = useState( + JSON.parse(window.localStorage.getItem("isDark")) ?? false + ); useEffect(() => { if (dark) { document.getElementById("body").classList.add("dark"); } else { document.getElementById("body").classList.remove("dark"); } + window.localStorage.setItem("isDark", dark.toString()); }, [dark]); return (
-
+
{flows[tabIndex] ? ( From b7286afd323983f88c1085c40294e516c25be525 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Fri, 2 Jun 2023 12:14:03 -0300 Subject: [PATCH 18/43] =?UTF-8?q?=F0=9F=8E=A8=20style(types.py):=20improve?= =?UTF-8?q?=20readability=20of=20return=20statement=20in=20TextSplitterVer?= =?UTF-8?q?tex=20The=20return=20statement=20in=20TextSplitterVertex=20was?= =?UTF-8?q?=20improved=20to=20be=20more=20readable=20by=20adding=20a=20new?= =?UTF-8?q?=20line=20before=20the=20Documents=20field.=20This=20makes=20it?= =?UTF-8?q?=20easier=20to=20read=20and=20understand=20the=20output=20of=20?= =?UTF-8?q?the=20function.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/graph/vertex/types.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/backend/langflow/graph/vertex/types.py b/src/backend/langflow/graph/vertex/types.py index 4a3290c13..7d61f2393 100644 --- a/src/backend/langflow/graph/vertex/types.py +++ b/src/backend/langflow/graph/vertex/types.py @@ -193,5 +193,6 @@ class TextSplitterVertex(Vertex): # This built_object is a list of documents. Maybe we should # show how many documents are in the list? if self._built_object: - return f"""{self.vertex_type}({len(self._built_object)} documents)\nDocuments: {self._built_object[:3]}...""" + return f"""{self.vertex_type}({len(self._built_object)} documents) + \nDocuments: {self._built_object[:3]}...""" return f"{self.vertex_type}()" From 534f47f1532ffc183a0d9d8c15cc5db830a0ac5b Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Fri, 2 Jun 2023 14:14:47 -0300 Subject: [PATCH 19/43] =?UTF-8?q?=F0=9F=94=A8=20refactor(types.py):=20impo?= =?UTF-8?q?rt=20flatten=5Flist=20function=20from=20graph.utils=20module=20?= =?UTF-8?q?This=20commit=20simply=20imports=20the=20flatten=5Flist=20funct?= =?UTF-8?q?ion=20from=20the=20graph.utils=20module=20to=20be=20used=20in?= =?UTF-8?q?=20the=20AgentVertex=20class.=20This=20improves=20the=20readabi?= =?UTF-8?q?lity=20of=20the=20code=20and=20reduces=20the=20number=20of=20li?= =?UTF-8?q?nes=20of=20code.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/graph/vertex/types.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/backend/langflow/graph/vertex/types.py b/src/backend/langflow/graph/vertex/types.py index 5b4e01ede..0b0d0923f 100644 --- a/src/backend/langflow/graph/vertex/types.py +++ b/src/backend/langflow/graph/vertex/types.py @@ -1,15 +1,15 @@ from typing import Any, Dict, List, Optional, Union from langflow.graph.vertex.base import Vertex -from langflow.graph.utils import extract_input_variables_from_prompt +from langflow.graph.utils import extract_input_variables_from_prompt, flatten_list class AgentVertex(Vertex): def __init__(self, data: Dict): super().__init__(data, base_type="agents") - self.tools: List[Union[ToolNode, ToolkitNode]] = [] - self.chains: List[ChainNode] = [] + self.tools: List[Union[ToolVertex, ToolkitVertex]] = [] + self.chains: List[ChainVertex] = [] def _set_tools_and_chains(self) -> None: for edge in self.edges: @@ -94,10 +94,10 @@ class ChainVertex(Vertex): tools: Optional[Union[List[Vertex], List[ToolVertex]]] = None, ) -> Any: if not self._built or force: - # Check if the chain requires a PromptNode + # Check if the chain requires a PromptVertex for key, value in self.params.items(): if isinstance(value, PromptVertex): - # Build the PromptNode, passing the tools if available + # Build the PromptVertex, passing the tools if available self.params[key] = value.build(tools=tools, force=force) self._build() From 430043098dd280143b271168a7c87e52af2a1ac6 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Fri, 2 Jun 2023 14:15:06 -0300 Subject: [PATCH 20/43] fix dropdown value with wrong color --- src/frontend/src/components/dropdownComponent/index.tsx | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/frontend/src/components/dropdownComponent/index.tsx b/src/frontend/src/components/dropdownComponent/index.tsx index eebbe1554..0a214eb4e 100644 --- a/src/frontend/src/components/dropdownComponent/index.tsx +++ b/src/frontend/src/components/dropdownComponent/index.tsx @@ -25,7 +25,9 @@ export default function Dropdown({ <>
- {internalValue} + + {internalValue} + Date: Fri, 2 Jun 2023 14:21:38 -0300 Subject: [PATCH 21/43] =?UTF-8?q?=F0=9F=94=A8=20refactor(types.py):=20reor?= =?UTF-8?q?der=20class=20definitions=20to=20match=20the=20order=20of=20the?= =?UTF-8?q?ir=20usage=20in=20the=20code=20The=20order=20of=20the=20class?= =?UTF-8?q?=20definitions=20in=20the=20file=20has=20been=20changed=20to=20?= =?UTF-8?q?match=20the=20order=20of=20their=20usage=20in=20the=20code.=20T?= =?UTF-8?q?his=20improves=20the=20readability=20of=20the=20code=20and=20ma?= =?UTF-8?q?kes=20it=20easier=20to=20understand=20the=20relationships=20bet?= =?UTF-8?q?ween=20the=20classes.=20No=20functionality=20has=20been=20chang?= =?UTF-8?q?ed.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/graph/vertex/types.py | 137 ++++++++++----------- 1 file changed, 67 insertions(+), 70 deletions(-) diff --git a/src/backend/langflow/graph/vertex/types.py b/src/backend/langflow/graph/vertex/types.py index 0b0d0923f..b81e72439 100644 --- a/src/backend/langflow/graph/vertex/types.py +++ b/src/backend/langflow/graph/vertex/types.py @@ -8,13 +8,13 @@ class AgentVertex(Vertex): def __init__(self, data: Dict): super().__init__(data, base_type="agents") - self.tools: List[Union[ToolVertex, ToolkitVertex]] = [] + self.tools: List[Union[ToolkitVertex, ToolVertex]] = [] self.chains: List[ChainVertex] = [] def _set_tools_and_chains(self) -> None: for edge in self.edges: source_node = edge.source - if isinstance(source_node, ToolVertex): + if isinstance(source_node, (ToolVertex, ToolkitVertex)): self.tools.append(source_node) elif isinstance(source_node, ChainVertex): self.chains.append(source_node) @@ -40,74 +40,6 @@ class ToolVertex(Vertex): super().__init__(data, base_type="tools") -class PromptVertex(Vertex): - def __init__(self, data: Dict): - super().__init__(data, base_type="prompts") - - def build( - self, - force: bool = False, - tools: Optional[Union[List[Vertex], List[ToolVertex]]] = None, - ) -> Any: - if not self._built or force: - if ( - "input_variables" not in self.params - or self.params["input_variables"] is None - ): - self.params["input_variables"] = [] - # Check if it is a ZeroShotPrompt and needs a tool - if "ShotPrompt" in self.vertex_type: - tools = ( - [tool_node.build() for tool_node in tools] - if tools is not None - else [] - ) - # flatten the list of tools if it is a list of lists - # first check if it is a list - if tools and isinstance(tools, list) and isinstance(tools[0], list): - tools = flatten_list(tools) - self.params["tools"] = tools - prompt_params = [ - key - for key, value in self.params.items() - if isinstance(value, str) and key != "format_instructions" - ] - else: - prompt_params = ["template"] - for param in prompt_params: - prompt_text = self.params[param] - variables = extract_input_variables_from_prompt(prompt_text) - self.params["input_variables"].extend(variables) - self.params["input_variables"] = list(set(self.params["input_variables"])) - - self._build() - return self._built_object - - -class ChainVertex(Vertex): - def __init__(self, data: Dict): - super().__init__(data, base_type="chains") - - def build( - self, - force: bool = False, - tools: Optional[Union[List[Vertex], List[ToolVertex]]] = None, - ) -> Any: - if not self._built or force: - # Check if the chain requires a PromptVertex - for key, value in self.params.items(): - if isinstance(value, PromptVertex): - # Build the PromptVertex, passing the tools if available - self.params[key] = value.build(tools=tools, force=force) - - self._build() - - #! Cannot deepcopy SQLDatabaseChain - if self.vertex_type in ["SQLDatabaseChain"]: - return self._built_object - return self._built_object - - class LLMVertex(Vertex): built_node_type = None class_built_object = None @@ -193,3 +125,68 @@ class TextSplitterVertex(Vertex): return f"""{self.vertex_type}({len(self._built_object)} documents) \nDocuments: {self._built_object[:3]}...""" return f"{self.vertex_type}()" + + +class ChainVertex(Vertex): + def __init__(self, data: Dict): + super().__init__(data, base_type="chains") + + def build( + self, + force: bool = False, + tools: Optional[List[Union[ToolkitVertex, ToolVertex]]] = None, + ) -> Any: + if not self._built or force: + # Check if the chain requires a PromptVertex + for key, value in self.params.items(): + if isinstance(value, PromptVertex): + # Build the PromptVertex, passing the tools if available + self.params[key] = value.build(tools=tools, force=force) + + self._build() + + return self._built_object + + +class PromptVertex(Vertex): + def __init__(self, data: Dict): + super().__init__(data, base_type="prompts") + + def build( + self, + force: bool = False, + tools: Optional[List[Union[ToolkitVertex, ToolVertex]]] = None, + ) -> Any: + if not self._built or force: + if ( + "input_variables" not in self.params + or self.params["input_variables"] is None + ): + self.params["input_variables"] = [] + # Check if it is a ZeroShotPrompt and needs a tool + if "ShotPrompt" in self.vertex_type: + tools = ( + [tool_node.build() for tool_node in tools] + if tools is not None + else [] + ) + # flatten the list of tools if it is a list of lists + # first check if it is a list + if tools and isinstance(tools, list) and isinstance(tools[0], list): + tools = flatten_list(tools) + self.params["tools"] = tools + prompt_params = [ + key + for key, value in self.params.items() + if isinstance(value, str) and key != "format_instructions" + ] + else: + prompt_params = ["template"] + for param in prompt_params: + prompt_text = self.params[param] + variables = extract_input_variables_from_prompt(prompt_text) + self.params["input_variables"].extend(variables) + self.params["input_variables"] = list(set(self.params["input_variables"])) + + self._build() + return self._built_object From 07afc08129a2fed81c452e243be931102ea250a5 Mon Sep 17 00:00:00 2001 From: Jim White Date: Fri, 2 Jun 2023 12:08:57 -0700 Subject: [PATCH 22/43] Add model names for Anthropic and add ChatAnthropic (preferred over the deprecated Anthropic LLM). --- src/backend/langflow/config.yaml | 1 + src/backend/langflow/interface/custom_lists.py | 2 ++ .../langflow/template/frontend_node/base.py | 3 +++ src/backend/langflow/utils/constants.py | 14 ++++++++++++++ src/backend/langflow/utils/util.py | 4 +++- src/frontend/src/utils.ts | 1 + 6 files changed, 24 insertions(+), 1 deletion(-) diff --git a/src/backend/langflow/config.yaml b/src/backend/langflow/config.yaml index f2833ae26..329d3321e 100644 --- a/src/backend/langflow/config.yaml +++ b/src/backend/langflow/config.yaml @@ -56,6 +56,7 @@ llms: - CTransformers - Cohere - Anthropic + - ChatAnthropic memories: - ConversationBufferMemory - ConversationSummaryMemory diff --git a/src/backend/langflow/interface/custom_lists.py b/src/backend/langflow/interface/custom_lists.py index 0fea838b6..a944363ae 100644 --- a/src/backend/langflow/interface/custom_lists.py +++ b/src/backend/langflow/interface/custom_lists.py @@ -11,12 +11,14 @@ from langchain import ( text_splitter, ) from langchain.agents import agent_toolkits +from langchain.chat_models import ChatAnthropic from langchain.chat_models import ChatOpenAI from langflow.interface.importing.utils import import_class ## LLMs llm_type_to_cls_dict = llms.type_to_cls_dict +llm_type_to_cls_dict["anthropic-chat"] = ChatAnthropic # type: ignore llm_type_to_cls_dict["openai-chat"] = ChatOpenAI # type: ignore ## Chains diff --git a/src/backend/langflow/template/frontend_node/base.py b/src/backend/langflow/template/frontend_node/base.py index a64195813..a97c7b8b0 100644 --- a/src/backend/langflow/template/frontend_node/base.py +++ b/src/backend/langflow/template/frontend_node/base.py @@ -125,6 +125,9 @@ class FrontendNode(BaseModel): elif name == "ChatOpenAI" and key == "model_name": field.options = constants.CHAT_OPENAI_MODELS field.is_list = True + elif (name == "Anthropic" or name == "ChatAnthropic") and key == "model_name": + field.options = constants.ANTHROPIC_MODELS + field.is_list = True if "api_key" in key and "OpenAI" in str(name): field.display_name = "OpenAI API Key" field.required = False diff --git a/src/backend/langflow/utils/constants.py b/src/backend/langflow/utils/constants.py index 2d101ab98..d41cd7dfc 100644 --- a/src/backend/langflow/utils/constants.py +++ b/src/backend/langflow/utils/constants.py @@ -7,6 +7,20 @@ OPENAI_MODELS = [ ] CHAT_OPENAI_MODELS = ["gpt-3.5-turbo", "gpt-4", "gpt-4-32k"] +ANTHROPIC_MODELS = [ + "claude-v1", # largest model, ideal for a wide range of more complex tasks. + "claude-v1-100k", # An enhanced version of claude-v1 with a 100,000 token (roughly 75,000 word) context window. + "claude-instant-v1", # A smaller model with far lower latency, sampling at roughly 40 words/sec! + "claude-instant-v1-100k", # An enhanced version of claude-instant-v1 with a 100,000 token context window that retains its performance. + # Specific sub-versions of the above models: + "claude-v1.3", # Compared to claude-v1.2, it's more robust against red-team inputs, better at precise instruction-following, better at code, and better and non-English dialogue and writing. + "claude-v1.3-100k", # An enhanced version of claude-v1.3 with a 100,000 token (roughly 75,000 word) context window. + "claude-v1.2", # An improved version of claude-v1. It is slightly improved at general helpfulness, instruction following, coding, and other tasks. It is also considerably better with non-English languages. This model also has the ability to role play (in harmless ways) more consistently, and it defaults to writing somewhat longer and more thorough responses. + "claude-v1.0", # An earlier version of claude-v1. + "claude-instant-v1.1", # latest version of claude-instant-v1. It is better than claude-instant-v1.0 at a wide variety of tasks including writing, coding, and instruction following. + "claude-instant-v1.1-100k", # An enhanced version of claude-instant-v1.1 with a 100,000 token context window that retains its lightning fast 40 word/sec performance. + "claude-instant-v1.0", # An earlier version of claude-instant-v1. +] DEFAULT_PYTHON_FUNCTION = """ def python_function(text: str) -> str: diff --git a/src/backend/langflow/utils/util.py b/src/backend/langflow/utils/util.py index 293d31154..f4e4927d8 100644 --- a/src/backend/langflow/utils/util.py +++ b/src/backend/langflow/utils/util.py @@ -302,7 +302,9 @@ def format_dict(d, name: Optional[str] = None): elif name == "ChatOpenAI" and key == "model_name": value["options"] = constants.CHAT_OPENAI_MODELS value["list"] = True - + elif (name == "Anthropic" or name == "ChatAnthropic") and key == "model_name": + value["options"] = constants.ANTHROPIC_MODELS + value["list"] = True return d diff --git a/src/frontend/src/utils.ts b/src/frontend/src/utils.ts index 0e7289dd0..119383667 100644 --- a/src/frontend/src/utils.ts +++ b/src/frontend/src/utils.ts @@ -155,6 +155,7 @@ export const nodeIcons: { // SerpAPIWrapper: SerperIcon, // AZLyricsLoader: AzIcon, Anthropic: AnthropicIcon, + ChatAnthropic: AnthropicIcon, BingSearchAPIWrapper: BingIcon, BingSearchRun: BingIcon, Cohere: CohereIcon, From cce41189e9a2ba66c560c1c5bd57172cc32ff9d7 Mon Sep 17 00:00:00 2001 From: Jim White Date: Fri, 2 Jun 2023 12:18:38 -0700 Subject: [PATCH 23/43] Trim too long comments about Anthropic models. --- src/backend/langflow/utils/constants.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/backend/langflow/utils/constants.py b/src/backend/langflow/utils/constants.py index d41cd7dfc..1b6bbdcc3 100644 --- a/src/backend/langflow/utils/constants.py +++ b/src/backend/langflow/utils/constants.py @@ -11,14 +11,14 @@ ANTHROPIC_MODELS = [ "claude-v1", # largest model, ideal for a wide range of more complex tasks. "claude-v1-100k", # An enhanced version of claude-v1 with a 100,000 token (roughly 75,000 word) context window. "claude-instant-v1", # A smaller model with far lower latency, sampling at roughly 40 words/sec! - "claude-instant-v1-100k", # An enhanced version of claude-instant-v1 with a 100,000 token context window that retains its performance. + "claude-instant-v1-100k", # Like claude-instant-v1 with a 100,000 token context window but retains its performance. # Specific sub-versions of the above models: - "claude-v1.3", # Compared to claude-v1.2, it's more robust against red-team inputs, better at precise instruction-following, better at code, and better and non-English dialogue and writing. + "claude-v1.3", # Vs claude-v1.2: better instruction-following, code, and non-English dialogue and writing. "claude-v1.3-100k", # An enhanced version of claude-v1.3 with a 100,000 token (roughly 75,000 word) context window. - "claude-v1.2", # An improved version of claude-v1. It is slightly improved at general helpfulness, instruction following, coding, and other tasks. It is also considerably better with non-English languages. This model also has the ability to role play (in harmless ways) more consistently, and it defaults to writing somewhat longer and more thorough responses. + "claude-v1.2", # Vs claude-v1.1: small adv in general helpfulness, instruction following, coding, and other tasks. "claude-v1.0", # An earlier version of claude-v1. - "claude-instant-v1.1", # latest version of claude-instant-v1. It is better than claude-instant-v1.0 at a wide variety of tasks including writing, coding, and instruction following. - "claude-instant-v1.1-100k", # An enhanced version of claude-instant-v1.1 with a 100,000 token context window that retains its lightning fast 40 word/sec performance. + "claude-instant-v1.1", # Latest version of claude-instant-v1. Better than claude-instant-v1.0 at most tasks. + "claude-instant-v1.1-100k", # Version of claude-instant-v1.1 with a 100K token context window. "claude-instant-v1.0", # An earlier version of claude-instant-v1. ] From 02cb94709d2e6b4a82f7334199ab746c55e104b4 Mon Sep 17 00:00:00 2001 From: Cristhian Zanforlin Lousa Date: Fri, 2 Jun 2023 17:15:59 -0300 Subject: [PATCH 24/43] Adding shad tooltip and grouping by class on edges --- src/frontend/package-lock.json | 467 +++++++++++++++++- src/frontend/package.json | 6 + .../components/parameterComponent/index.tsx | 85 ++-- .../src/CustomNodes/GenericNode/index.tsx | 19 +- .../ReactTooltipComponent/index.tsx | 4 +- .../components/ShadTooltipComponent/index.tsx | 25 + src/frontend/src/components/ui/tooltip.tsx | 29 ++ src/frontend/src/index.css | 84 ++++ .../extraSidebarComponent/index.tsx | 14 +- src/frontend/src/utils.ts | 61 +++ src/frontend/tailwind.config.js | 85 +++- src/frontend/tsconfig.json | 3 +- 12 files changed, 812 insertions(+), 70 deletions(-) create mode 100644 src/frontend/src/components/ShadTooltipComponent/index.tsx create mode 100644 src/frontend/src/components/ui/tooltip.tsx diff --git a/src/frontend/package-lock.json b/src/frontend/package-lock.json index 374236168..9000c133f 100644 --- a/src/frontend/package-lock.json +++ b/src/frontend/package-lock.json @@ -13,6 +13,7 @@ "@headlessui/react": "^1.7.10", "@heroicons/react": "^2.0.15", "@mui/material": "^5.11.9", + "@radix-ui/react-tooltip": "^1.0.6", "@tabler/icons-react": "^2.18.0", "@tailwindcss/forms": "^0.5.3", "@tailwindcss/line-clamp": "^0.4.4", @@ -20,7 +21,10 @@ "ansi-to-html": "^0.7.2", "axios": "^1.3.2", "base64-js": "^1.5.1", + "class-variance-authority": "^0.6.0", + "clsx": "^1.2.1", "lodash": "^4.17.21", + "lucide-react": "^0.233.0", "react": "^18.2.0", "react-ace": "^10.1.0", "react-cookie": "^4.1.1", @@ -37,6 +41,8 @@ "rehype-mathjax": "^4.0.2", "remark-gfm": "^3.0.1", "remark-math": "^5.1.1", + "tailwind-merge": "^1.13.0", + "tailwindcss-animate": "^1.0.5", "uuid": "^9.0.0", "vite-plugin-svgr": "^3.2.0", "web-vitals": "^2.1.4" @@ -911,6 +917,18 @@ "@floating-ui/core": "^1.2.6" } }, + "node_modules/@floating-ui/react-dom": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@floating-ui/react-dom/-/react-dom-2.0.0.tgz", + "integrity": "sha512-Ke0oU3SeuABC2C4OFu2mSAwHIP5WUiV98O9YWoHV4Q5aT6E9k06DV0Khi5uYspR8xmmBk08t8ZDcz3TR3ARkEg==", + "dependencies": { + "@floating-ui/dom": "^1.2.7" + }, + "peerDependencies": { + "react": ">=16.8.0", + "react-dom": ">=16.8.0" + } + }, "node_modules/@headlessui/react": { "version": "1.7.10", "resolved": "https://registry.npmjs.org/@headlessui/react/-/react-1.7.10.tgz", @@ -1274,6 +1292,407 @@ "url": "https://opencollective.com/popperjs" } }, + "node_modules/@radix-ui/primitive": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@radix-ui/primitive/-/primitive-1.0.1.tgz", + "integrity": "sha512-yQ8oGX2GVsEYMWGxcovu1uGWPCxV5BFfeeYxqPmuAzUyLT9qmaMXSAhXpb0WrspIeqYzdJpkh2vHModJPgRIaw==", + "dependencies": { + "@babel/runtime": "^7.13.10" + } + }, + "node_modules/@radix-ui/react-arrow": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-arrow/-/react-arrow-1.0.3.tgz", + "integrity": "sha512-wSP+pHsB/jQRaL6voubsQ/ZlrGBHHrOjmBnr19hxYgtS0WvAFwZhK2WP/YY5yF9uKECCEEDGxuLxq1NBK51wFA==", + "dependencies": { + "@babel/runtime": "^7.13.10", + "@radix-ui/react-primitive": "1.0.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0", + "react-dom": "^16.8 || ^17.0 || ^18.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-compose-refs": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-1.0.1.tgz", + "integrity": "sha512-fDSBgd44FKHa1FRMU59qBMPFcl2PZE+2nmqunj+BWFyYYjnhIDWL2ItDs3rrbJDQOtzt5nIebLCQc4QRfz6LJw==", + "dependencies": { + "@babel/runtime": "^7.13.10" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-context": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.0.1.tgz", + "integrity": "sha512-ebbrdFoYTcuZ0v4wG5tedGnp9tzcV8awzsxYph7gXUyvnNLuTIcCk1q17JEbnVhXAKG9oX3KtchwiMIAYp9NLg==", + "dependencies": { + "@babel/runtime": "^7.13.10" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-dismissable-layer": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.0.4.tgz", + "integrity": "sha512-7UpBa/RKMoHJYjie1gkF1DlK8l1fdU/VKDpoS3rCCo8YBJR294GwcEHyxHw72yvphJ7ld0AXEcSLAzY2F/WyCg==", + "dependencies": { + "@babel/runtime": "^7.13.10", + "@radix-ui/primitive": "1.0.1", + "@radix-ui/react-compose-refs": "1.0.1", + "@radix-ui/react-primitive": "1.0.3", + "@radix-ui/react-use-callback-ref": "1.0.1", + "@radix-ui/react-use-escape-keydown": "1.0.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0", + "react-dom": "^16.8 || ^17.0 || ^18.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-id": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-id/-/react-id-1.0.1.tgz", + "integrity": "sha512-tI7sT/kqYp8p96yGWY1OAnLHrqDgzHefRBKQ2YAkBS5ja7QLcZ9Z/uY7bEjPUatf8RomoXM8/1sMj1IJaE5UzQ==", + "dependencies": { + "@babel/runtime": "^7.13.10", + "@radix-ui/react-use-layout-effect": "1.0.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-popper": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-popper/-/react-popper-1.1.2.tgz", + "integrity": "sha512-1CnGGfFi/bbqtJZZ0P/NQY20xdG3E0LALJaLUEoKwPLwl6PPPfbeiCqMVQnhoFRAxjJj4RpBRJzDmUgsex2tSg==", + "dependencies": { + "@babel/runtime": "^7.13.10", + "@floating-ui/react-dom": "^2.0.0", + "@radix-ui/react-arrow": "1.0.3", + "@radix-ui/react-compose-refs": "1.0.1", + "@radix-ui/react-context": "1.0.1", + "@radix-ui/react-primitive": "1.0.3", + "@radix-ui/react-use-callback-ref": "1.0.1", + "@radix-ui/react-use-layout-effect": "1.0.1", + "@radix-ui/react-use-rect": "1.0.1", + "@radix-ui/react-use-size": "1.0.1", + "@radix-ui/rect": "1.0.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0", + "react-dom": "^16.8 || ^17.0 || ^18.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-portal": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-portal/-/react-portal-1.0.3.tgz", + "integrity": "sha512-xLYZeHrWoPmA5mEKEfZZevoVRK/Q43GfzRXkWV6qawIWWK8t6ifIiLQdd7rmQ4Vk1bmI21XhqF9BN3jWf+phpA==", + "dependencies": { + "@babel/runtime": "^7.13.10", + "@radix-ui/react-primitive": "1.0.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0", + "react-dom": "^16.8 || ^17.0 || ^18.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-presence": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-presence/-/react-presence-1.0.1.tgz", + "integrity": "sha512-UXLW4UAbIY5ZjcvzjfRFo5gxva8QirC9hF7wRE4U5gz+TP0DbRk+//qyuAQ1McDxBt1xNMBTaciFGvEmJvAZCg==", + "dependencies": { + "@babel/runtime": "^7.13.10", + "@radix-ui/react-compose-refs": "1.0.1", + "@radix-ui/react-use-layout-effect": "1.0.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0", + "react-dom": "^16.8 || ^17.0 || ^18.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-primitive": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-1.0.3.tgz", + "integrity": "sha512-yi58uVyoAcK/Nq1inRY56ZSjKypBNKTa/1mcL8qdl6oJeEaDbOldlzrGn7P6Q3Id5d+SYNGc5AJgc4vGhjs5+g==", + "dependencies": { + "@babel/runtime": "^7.13.10", + "@radix-ui/react-slot": "1.0.2" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0", + "react-dom": "^16.8 || ^17.0 || ^18.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-slot": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.0.2.tgz", + "integrity": "sha512-YeTpuq4deV+6DusvVUW4ivBgnkHwECUu0BiN43L5UCDFgdhsRUWAghhTF5MbvNTPzmiFOx90asDSUjWuCNapwg==", + "dependencies": { + "@babel/runtime": "^7.13.10", + "@radix-ui/react-compose-refs": "1.0.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-tooltip": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/@radix-ui/react-tooltip/-/react-tooltip-1.0.6.tgz", + "integrity": "sha512-DmNFOiwEc2UDigsYj6clJENma58OelxD24O4IODoZ+3sQc3Zb+L8w1EP+y9laTuKCLAysPw4fD6/v0j4KNV8rg==", + "dependencies": { + "@babel/runtime": "^7.13.10", + "@radix-ui/primitive": "1.0.1", + "@radix-ui/react-compose-refs": "1.0.1", + "@radix-ui/react-context": "1.0.1", + "@radix-ui/react-dismissable-layer": "1.0.4", + "@radix-ui/react-id": "1.0.1", + "@radix-ui/react-popper": "1.1.2", + "@radix-ui/react-portal": "1.0.3", + "@radix-ui/react-presence": "1.0.1", + "@radix-ui/react-primitive": "1.0.3", + "@radix-ui/react-slot": "1.0.2", + "@radix-ui/react-use-controllable-state": "1.0.1", + "@radix-ui/react-visually-hidden": "1.0.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0", + "react-dom": "^16.8 || ^17.0 || ^18.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-callback-ref": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-1.0.1.tgz", + "integrity": "sha512-D94LjX4Sp0xJFVaoQOd3OO9k7tpBYNOXdVhkltUbGv2Qb9OXdrg/CpsjlZv7ia14Sylv398LswWBVVu5nqKzAQ==", + "dependencies": { + "@babel/runtime": "^7.13.10" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-controllable-state": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-1.0.1.tgz", + "integrity": "sha512-Svl5GY5FQeN758fWKrjM6Qb7asvXeiZltlT4U2gVfl8Gx5UAv2sMR0LWo8yhsIZh2oQ0eFdZ59aoOOMV7b47VA==", + "dependencies": { + "@babel/runtime": "^7.13.10", + "@radix-ui/react-use-callback-ref": "1.0.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-escape-keydown": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-escape-keydown/-/react-use-escape-keydown-1.0.3.tgz", + "integrity": "sha512-vyL82j40hcFicA+M4Ex7hVkB9vHgSse1ZWomAqV2Je3RleKGO5iM8KMOEtfoSB0PnIelMd2lATjTGMYqN5ylTg==", + "dependencies": { + "@babel/runtime": "^7.13.10", + "@radix-ui/react-use-callback-ref": "1.0.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-layout-effect": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-1.0.1.tgz", + "integrity": "sha512-v/5RegiJWYdoCvMnITBkNNx6bCj20fiaJnWtRkU18yITptraXjffz5Qbn05uOiQnOvi+dbkznkoaMltz1GnszQ==", + "dependencies": { + "@babel/runtime": "^7.13.10" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-rect": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-rect/-/react-use-rect-1.0.1.tgz", + "integrity": "sha512-Cq5DLuSiuYVKNU8orzJMbl15TXilTnJKUCltMVQg53BQOF1/C5toAaGrowkgksdBQ9H+SRL23g0HDmg9tvmxXw==", + "dependencies": { + "@babel/runtime": "^7.13.10", + "@radix-ui/rect": "1.0.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-size": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-size/-/react-use-size-1.0.1.tgz", + "integrity": "sha512-ibay+VqrgcaI6veAojjofPATwledXiSmX+C0KrBk/xgpX9rBzPV3OsfwlhQdUOFbh+LKQorLYT+xTXW9V8yd0g==", + "dependencies": { + "@babel/runtime": "^7.13.10", + "@radix-ui/react-use-layout-effect": "1.0.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-visually-hidden": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-visually-hidden/-/react-visually-hidden-1.0.3.tgz", + "integrity": "sha512-D4w41yN5YRKtu464TLnByKzMDG/JlMPHtfZgQAu9v6mNakUqGUI9vUrfQKz8NK41VMm/xbZbh76NUTVtIYqOMA==", + "dependencies": { + "@babel/runtime": "^7.13.10", + "@radix-ui/react-primitive": "1.0.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0", + "react-dom": "^16.8 || ^17.0 || ^18.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/rect": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@radix-ui/rect/-/rect-1.0.1.tgz", + "integrity": "sha512-fyrgCaedtvMg9NK3en0pnOYJdtfwxUcNolezkNPUsoX57X8oQk+NkqcvzHXD2uKNij6GXmWU9NDru2IWjrO4BQ==", + "dependencies": { + "@babel/runtime": "^7.13.10" + } + }, "node_modules/@reactflow/background": { "version": "11.1.7", "resolved": "https://registry.npmjs.org/@reactflow/background/-/background-11.1.7.tgz", @@ -2422,7 +2841,7 @@ "version": "18.2.4", "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.2.4.tgz", "integrity": "sha512-G2mHoTMTL4yoydITgOGwWdWMVd8sNgyEP85xVmMKAPUBwQWm9wBPQUmvbeF4V3WBY1P7mmL4BkjQ0SqUpf1snw==", - "dev": true, + "devOptional": true, "dependencies": { "@types/react": "*" } @@ -2947,6 +3366,25 @@ "node": ">= 6" } }, + "node_modules/class-variance-authority": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/class-variance-authority/-/class-variance-authority-0.6.0.tgz", + "integrity": "sha512-qdRDgfjx3GRb9fpwpSvn+YaidnT7IUJNe4wt5/SWwM+PmUwJUhQRk/8zAyNro0PmVfmen2635UboTjIBXXxy5A==", + "dependencies": { + "clsx": "1.2.1" + }, + "funding": { + "url": "https://joebell.co.uk" + }, + "peerDependencies": { + "typescript": ">= 4.5.5 < 6" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, "node_modules/classcat": { "version": "5.0.4", "resolved": "https://registry.npmjs.org/classcat/-/classcat-5.0.4.tgz", @@ -4728,6 +5166,14 @@ "yallist": "^3.0.2" } }, + "node_modules/lucide-react": { + "version": "0.233.0", + "resolved": "https://registry.npmjs.org/lucide-react/-/lucide-react-0.233.0.tgz", + "integrity": "sha512-r0jMHF0vPDq2wBbZ0B3rtIcBjDyWDKpHu+vAjD2OHn2WLUr3HN5IHovtO0EMgQXuSI7YrMZbjsEZWC2uBHr8nQ==", + "peerDependencies": { + "react": "^16.5.1 || ^17.0.0 || ^18.0.0" + } + }, "node_modules/lz-string": { "version": "1.4.4", "resolved": "https://registry.npmjs.org/lz-string/-/lz-string-1.4.4.tgz", @@ -7001,6 +7447,15 @@ "resolved": "https://registry.npmjs.org/symbol-tree/-/symbol-tree-3.2.4.tgz", "integrity": "sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==" }, + "node_modules/tailwind-merge": { + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/tailwind-merge/-/tailwind-merge-1.13.0.tgz", + "integrity": "sha512-mUTmDbcU+IhOvJ0c42eLQ/nRkvolTqfpVaVQRSxfJAv9TabS6Y2zW/1wKpKLdKzyL3Gh8j6NTLl6MWNmvOM6kA==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/dcastil" + } + }, "node_modules/tailwindcss": { "version": "3.3.2", "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.3.2.tgz", @@ -7038,6 +7493,14 @@ "node": ">=14.0.0" } }, + "node_modules/tailwindcss-animate": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/tailwindcss-animate/-/tailwindcss-animate-1.0.5.tgz", + "integrity": "sha512-UU3qrOJ4lFQABY+MVADmBm+0KW3xZyhMdRvejwtXqYOL7YjHYxmuREFAZdmVG5LPe5E9CAst846SLC4j5I3dcw==", + "peerDependencies": { + "tailwindcss": ">=3.0.0 || insiders" + } + }, "node_modules/terser": { "version": "5.16.3", "resolved": "https://registry.npmjs.org/terser/-/terser-5.16.3.tgz", @@ -7156,7 +7619,7 @@ "version": "5.0.4", "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.0.4.tgz", "integrity": "sha512-cW9T5W9xY37cc+jfEnaUvX91foxtHkza3Nw3wkoF4sSlKn0MONdkdEndig/qPBWXNkmplh3NzayQzCiHM4/hqw==", - "dev": true, + "devOptional": true, "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" diff --git a/src/frontend/package.json b/src/frontend/package.json index bd846ce19..c7af46608 100644 --- a/src/frontend/package.json +++ b/src/frontend/package.json @@ -8,6 +8,7 @@ "@headlessui/react": "^1.7.10", "@heroicons/react": "^2.0.15", "@mui/material": "^5.11.9", + "@radix-ui/react-tooltip": "^1.0.6", "@tabler/icons-react": "^2.18.0", "@tailwindcss/forms": "^0.5.3", "@tailwindcss/line-clamp": "^0.4.4", @@ -15,7 +16,10 @@ "ansi-to-html": "^0.7.2", "axios": "^1.3.2", "base64-js": "^1.5.1", + "class-variance-authority": "^0.6.0", + "clsx": "^1.2.1", "lodash": "^4.17.21", + "lucide-react": "^0.233.0", "react": "^18.2.0", "react-ace": "^10.1.0", "react-cookie": "^4.1.1", @@ -32,6 +36,8 @@ "rehype-mathjax": "^4.0.2", "remark-gfm": "^3.0.1", "remark-math": "^5.1.1", + "tailwind-merge": "^1.13.0", + "tailwindcss-animate": "^1.0.5", "uuid": "^9.0.0", "vite-plugin-svgr": "^3.2.0", "web-vitals": "^2.1.4" diff --git a/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx b/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx index e26ae589f..d6b8781a4 100644 --- a/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx +++ b/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx @@ -1,6 +1,11 @@ import { Handle, Position, useUpdateNodeInternals } from "reactflow"; import Tooltip from "../../../../components/TooltipComponent"; -import { classNames, isValidConnection } from "../../../../utils"; +import { + classNames, + groupByFamily, + isValidConnection, + toFirstUpperCase, +} from "../../../../utils"; import { useContext, useEffect, useRef, useState } from "react"; import InputComponent from "../../../../components/inputComponent"; import ToggleComponent from "../../../../components/toggleComponent"; @@ -17,6 +22,8 @@ import IntComponent from "../../../../components/intComponent"; import PromptAreaComponent from "../../../../components/promptComponent"; import { nodeNames, nodeIcons } from "../../../../utils"; import React from "react"; +import { nodeColors } from "../../../../utils"; +import ShadTooltip from "../../../../components/ShadTooltipComponent"; export default function ParameterComponent({ left, @@ -30,8 +37,7 @@ export default function ParameterComponent({ required = false, }: ParameterComponentType) { const ref = useRef(null); - const refParent = useRef(""); - const refParentIcon = useRef(null); + const refHtml = useRef(null); const updateNodeInternals = useUpdateNodeInternals(); const [position, setPosition] = useState(0); useEffect(() => { @@ -55,16 +61,45 @@ export default function ParameterComponent({ const [myData, setMyData] = useState(useContext(typesContext).data); useEffect(() => { - Object.keys(myData).forEach((d) => { - let keys = Object.keys(myData[d]).filter( - (nd) => nd.toLowerCase() == data.type.toLowerCase() - ); - if (keys.length > 0) { - refParent.current = d; - refParentIcon.current = nodeIcons[d]; - } - }); - }, []); + const groupedObj = groupByFamily(myData, tooltipTitle); + + refHtml.current = groupedObj.map((item, i) => ( + 0 ? "items-center flex mt-3" : "items-center flex" + )} + > +
+ {React.createElement(nodeIcons[item.family])} +
+ + {nodeNames[item.family] ?? ""}{" "} + + {" "} + -  + {item.type.split(", ").length > 2 + ? item.type.split(", ").map((el, i) => ( + <> + + {i == item.type.split(", ").length - 1 + ? el + : (el += `, `)} + + {i % 2 == 0 && i > 0 &&

} + + )) + : item.type} +
+
+
+ )); + }, [tooltipTitle]); return (
) : ( - -
-
- {React.createElement(refParentIcon.current)} -
- - {nodeNames[refParent?.current] ?? ""} - -
- - } + -
+ )} {left === true && diff --git a/src/frontend/src/CustomNodes/GenericNode/index.tsx b/src/frontend/src/CustomNodes/GenericNode/index.tsx index 1a7f93ae5..79a241160 100644 --- a/src/frontend/src/CustomNodes/GenericNode/index.tsx +++ b/src/frontend/src/CustomNodes/GenericNode/index.tsx @@ -28,8 +28,8 @@ import NodeModal from "../../modals/NodeModal"; import { useCallback } from "react"; import { TabsContext } from "../../contexts/tabsContext"; import { debounce } from "../../utils"; -import TooltipReact from "../../components/ReactTooltipComponent"; import Tooltip from "../../components/TooltipComponent"; +import ShadTooltip from "../../components/ShadTooltipComponent"; export default function GenericNode({ data, selected, @@ -115,14 +115,9 @@ export default function GenericNode({ }} />
- +
{data.type}
-
+
@@ -253,11 +248,7 @@ export default function GenericNode({ : toTitleCase(t) } name={t} - tooltipTitle={ - "Type: " + - data.node.template[t].type + - (data.node.template[t].list ? " list" : "") - } + tooltipTitle={data.node.template[t].type} required={data.node.template[t].required} id={data.node.template[t].type + "|" + t + "|" + data.id} left={true} @@ -283,7 +274,7 @@ export default function GenericNode({ data={data} color={nodeColors[types[data.type]] ?? nodeColors.unknown} title={data.type} - tooltipTitle={`Type: ${data.node.base_classes.join(" | ")}`} + tooltipTitle={`${data.node.base_classes.join("\n")}`} id={[data.type, data.id, ...data.node.base_classes].join("|")} type={data.node.base_classes.join("|")} left={false} diff --git a/src/frontend/src/components/ReactTooltipComponent/index.tsx b/src/frontend/src/components/ReactTooltipComponent/index.tsx index aa736c212..cb2a54f7c 100644 --- a/src/frontend/src/components/ReactTooltipComponent/index.tsx +++ b/src/frontend/src/components/ReactTooltipComponent/index.tsx @@ -37,13 +37,15 @@ const TooltipReact: FC = ({ id={selector} content={content} className={classNames( - "!bg-white !text-xs !font-normal !text-gray-700 !shadow-md !opacity-100 z-20", + "!bg-white !text-xs !font-normal !text-gray-700 !shadow-md !opacity-100 z-[9999]", className )} place={position} clickable={clickable} isOpen={disabled ? false : undefined} delayShow={delayShow} + positionStrategy="absolute" + float={true} > {htmlContent && htmlContent} diff --git a/src/frontend/src/components/ShadTooltipComponent/index.tsx b/src/frontend/src/components/ShadTooltipComponent/index.tsx new file mode 100644 index 000000000..a360f3ff0 --- /dev/null +++ b/src/frontend/src/components/ShadTooltipComponent/index.tsx @@ -0,0 +1,25 @@ +import { + Tooltip, + TooltipContent, + TooltipProvider, + TooltipTrigger, +} from "../ui/tooltip"; + +const ShadTooltip = (props) => { + return ( + + + {props.children} + + {props.content} + + + + ); +}; + +export default ShadTooltip; diff --git a/src/frontend/src/components/ui/tooltip.tsx b/src/frontend/src/components/ui/tooltip.tsx new file mode 100644 index 000000000..b2ec0f701 --- /dev/null +++ b/src/frontend/src/components/ui/tooltip.tsx @@ -0,0 +1,29 @@ +"use client"; + +import * as React from "react"; +import * as TooltipPrimitive from "@radix-ui/react-tooltip"; +import { cn } from "../../utils"; + +const TooltipProvider = TooltipPrimitive.Provider; + +const Tooltip = TooltipPrimitive.Root; + +const TooltipTrigger = TooltipPrimitive.Trigger; + +const TooltipContent = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, sideOffset = 4, ...props }, ref) => ( + +)); +TooltipContent.displayName = TooltipPrimitive.Content.displayName; + +export { Tooltip, TooltipTrigger, TooltipContent, TooltipProvider }; diff --git a/src/frontend/src/index.css b/src/frontend/src/index.css index 899fb1c87..8809ecc3a 100644 --- a/src/frontend/src/index.css +++ b/src/frontend/src/index.css @@ -2,6 +2,84 @@ @tailwind components; @tailwind utilities; +@layer base { + :root { + --background: 0 0% 100%; + --foreground: 222.2 47.4% 11.2%; + + --muted: 210 40% 96.1%; + --muted-foreground: 215.4 16.3% 46.9%; + + --popover: 0 0% 100%; + --popover-foreground: 222.2 47.4% 11.2%; + + --card: 0 0% 100%; + --card-foreground: 222.2 47.4% 11.2%; + + --border: 214.3 31.8% 91.4%; + --input: 214.3 31.8% 91.4%; + + --primary: 222.2 47.4% 11.2%; + --primary-foreground: 210 40% 98%; + + --secondary: 210 40% 96.1%; + --secondary-foreground: 222.2 47.4% 11.2%; + + --accent: 210 40% 96.1%; + --accent-foreground: 222.2 47.4% 11.2%; + + --destructive: 0 100% 50%; + --destructive-foreground: 210 40% 98%; + + --ring: 215 20.2% 65.1%; + + --radius: 0.5rem; + } + + .dark { + --background: 224 71% 4%; + --foreground: 213 31% 91%; + + --muted: 223 47% 11%; + --muted-foreground: 215.4 16.3% 56.9%; + + --popover: 224 71% 4%; + --popover-foreground: 215 20.2% 65.1%; + + --card: 224 71% 4%; + --card-foreground: 213 31% 91%; + + --border: 216 34% 17%; + --input: 216 34% 17%; + + --primary: 210 40% 98%; + --primary-foreground: 222.2 47.4% 1.2%; + + --secondary: 222.2 47.4% 11.2%; + --secondary-foreground: 210 40% 98%; + + --accent: 216 34% 17%; + --accent-foreground: 210 40% 98%; + + --destructive: 0 63% 31%; + --destructive-foreground: 210 40% 98%; + + --ring: 216 34% 17%; + + --radius: 0.5rem; + } + } + + @layer base { + * { + @apply border-border; + } + body { + @apply bg-background text-foreground; + font-feature-settings: "rlig" 1, "calt" 1; + } + } + body { margin: 0; font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", "Roboto", "Oxygen", @@ -15,3 +93,9 @@ code { font-family: source-code-pro, Menlo, Monaco, Consolas, "Courier New", monospace; } + +/* The style below sets the cursor property of the element with the class .react-flow__pane to the default cursor. +The cursor: default; property value restores the browser's default cursor style for the targeted element. By applying this style, the element will no longer have a custom cursor appearance such as "grab" or any other custom cursor defined elsewhere in the application. Instead, it will revert to the default cursor style determined by the browser, typically an arrow-shaped cursor. */ +.react-flow__pane { + cursor: default; +} \ No newline at end of file diff --git a/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx b/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx index e8dbfbb6b..2a8ef1217 100644 --- a/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx +++ b/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx @@ -9,8 +9,8 @@ import { import { useContext, useEffect, useState, useRef } from "react"; import { typesContext } from "../../../../contexts/typesContext"; import { APIClassType, APIObjectType } from "../../../../types/api"; -import TooltipReact from "../../../../components/ReactTooltipComponent"; import { MagnifyingGlassIcon } from "@heroicons/react/24/outline"; +import ShadTooltip from "../../../../components/ShadTooltipComponent"; export default function ExtraSidebar() { const { data } = useContext(typesContext); @@ -81,12 +81,10 @@ export default function ExtraSidebar() { {Object.keys(dataFilter[d]) .sort() .map((t: string, k) => ( -
-
+ ))}
diff --git a/src/frontend/src/utils.ts b/src/frontend/src/utils.ts index 559080f3f..4e1ea4f47 100644 --- a/src/frontend/src/utils.ts +++ b/src/frontend/src/utils.ts @@ -47,6 +47,12 @@ import { WolframIcon } from "./icons/Wolfram"; import { WordIcon } from "./icons/Word"; import { SerperIcon } from "./icons/Serper"; import { v4 as uuidv4 } from "uuid"; +import { clsx, type ClassValue } from "clsx"; +import { twMerge } from "tailwind-merge"; + +export function cn(...inputs: ClassValue[]) { + return twMerge(clsx(inputs)); +} export function classNames(...classes: Array) { return classes.filter(Boolean).join(" "); @@ -632,3 +638,58 @@ export function updateIds(newFlow, getNodeId) { e.targetHandle; }); } + +export function groupByFamily(data, baseClasses) { + let arrOfParent: string[] = []; + let arrOfType: { family: string; type: string }[] = []; + + Object.keys(data).map((d) => { + Object.keys(data[d]).map((n) => { + if ( + data[d][n].base_classes.some((r) => baseClasses.split("\n").includes(r)) + ) { + arrOfParent.push(d); + } + }); + }); + + let uniq = arrOfParent.filter( + (item, index) => arrOfParent.indexOf(item) === index + ); + + Object.keys(data).map((d) => { + Object.keys(data[d]).map((n) => { + baseClasses.split("\n").forEach((tol) => { + data[d][n].base_classes.forEach((data) => { + if (tol == data) { + arrOfType.push({ + family: d, + type: data, + }); + } + }); + }); + }); + }); + + let groupedBy = arrOfType.filter((object, index, self) => { + const foundIndex = self.findIndex( + (o) => o.family === object.family && o.type === object.type + ); + return foundIndex === index; + }); + + let groupedObj = groupedBy.reduce((result, item) => { + const existingGroup = result.find((group) => group.family === item.family); + + if (existingGroup) { + existingGroup.type += `, ${item.type}`; + } else { + result.push({ family: item.family, type: item.type }); + } + + return result; + }, []); + + return groupedObj; +} diff --git a/src/frontend/tailwind.config.js b/src/frontend/tailwind.config.js index 8df8b8c1e..5644a21bc 100644 --- a/src/frontend/tailwind.config.js +++ b/src/frontend/tailwind.config.js @@ -1,11 +1,83 @@ /** @type {import('tailwindcss').Config} */ +const { fontFamily } = require("tailwindcss/defaultTheme") + import plugin from "tailwindcss/plugin"; module.exports = { content: ["./index.html", "./src/**/*.{js,ts,tsx,jsx}"], darkMode: "class", important: true, theme: { + container: { + center: true, + padding: "2rem", + screens: { + "2xl": "1400px", + }, + }, extend: { + colors: { + border: "hsl(var(--border))", + input: "hsl(var(--input))", + ring: "hsl(var(--ring))", + background: "hsl(var(--background))", + foreground: "hsl(var(--foreground))", + primary: { + DEFAULT: "hsl(var(--primary))", + foreground: "hsl(var(--primary-foreground))", + }, + secondary: { + DEFAULT: "hsl(var(--secondary))", + foreground: "hsl(var(--secondary-foreground))", + }, + destructive: { + DEFAULT: "hsl(var(--destructive))", + foreground: "hsl(var(--destructive-foreground))", + }, + muted: { + DEFAULT: "hsl(var(--muted))", + foreground: "hsl(var(--muted-foreground))", + }, + accent: { + DEFAULT: "hsl(var(--accent))", + foreground: "hsl(var(--accent-foreground))", + }, + popover: { + DEFAULT: "hsl(var(--popover))", + foreground: "hsl(var(--popover-foreground))", + }, + card: { + DEFAULT: "hsl(var(--card))", + foreground: "hsl(var(--card-foreground))", + }, + }, + borderRadius: { + lg: `var(--radius)`, + md: `calc(var(--radius) - 2px)`, + sm: "calc(var(--radius) - 4px)", + }, + fontFamily: { + sans: ["var(--font-sans)", ...fontFamily.sans], + }, + keyframes: { + "accordion-down": { + from: { height: 0 }, + to: { height: "var(--radix-accordion-content-height)" }, + }, + "accordion-up": { + from: { height: "var(--radix-accordion-content-height)" }, + to: { height: 0 }, + }, + pulseGreen: { + "0%": { boxShadow: "0 0 0 0 rgba(72, 187, 120, 0.7)" }, + "100%": { boxShadow: "0 0 0 10px rgba(72, 187, 120, 0)" }, + }, + }, + animation: { + "accordion-down": "accordion-down 0.2s ease-out", + "accordion-up": "accordion-up 0.2s ease-out", + "pulse-green": "pulseGreen 1s linear", + 'spin-once': 'spin 1s linear 0.7' + }, borderColor: { "red-outline": "rgba(255, 0, 0, 0.8)", "green-outline": "rgba(72, 187, 120, 0.7)", @@ -14,17 +86,6 @@ module.exports = { "red-outline": "0 0 5px rgba(255, 0, 0, 0.5)", "green-outline": "0 0 5px rgba(72, 187, 120, 0.7)", }, - - animation: { - "pulse-green": "pulseGreen 1s linear", - 'spin-once': 'spin 1s linear 0.7' - }, - keyframes: { - pulseGreen: { - "0%": { boxShadow: "0 0 0 0 rgba(72, 187, 120, 0.7)" }, - "100%": { boxShadow: "0 0 0 10px rgba(72, 187, 120, 0)" }, - }, - }, }, }, plugins: [ @@ -96,4 +157,4 @@ module.exports = { }), require("@tailwindcss/typography"), ], -}; +}; \ No newline at end of file diff --git a/src/frontend/tsconfig.json b/src/frontend/tsconfig.json index 3a05105de..2ed005aff 100644 --- a/src/frontend/tsconfig.json +++ b/src/frontend/tsconfig.json @@ -19,7 +19,8 @@ "isolatedModules": true, "noEmit": true, "jsx": "react-jsx", - "noImplicitAny": false + "noImplicitAny": false, + "baseUrl": "." }, "include": [ "src" From f0ca7eeac8b5cbb37a23d1daf3e20293595228e1 Mon Sep 17 00:00:00 2001 From: Cristhian Zanforlin Lousa Date: Fri, 2 Jun 2023 17:29:22 -0300 Subject: [PATCH 25/43] formatting with codacy --- src/frontend/src/index.css | 92 +++++++++++++++++--------------------- 1 file changed, 40 insertions(+), 52 deletions(-) diff --git a/src/frontend/src/index.css b/src/frontend/src/index.css index 962e3c025..7c41b934e 100644 --- a/src/frontend/src/index.css +++ b/src/frontend/src/index.css @@ -6,70 +6,50 @@ :root { --background: 0 0% 100%; --foreground: 222.2 47.4% 11.2%; - --muted: 210 40% 96.1%; --muted-foreground: 215.4 16.3% 46.9%; - --popover: 0 0% 100%; --popover-foreground: 222.2 47.4% 11.2%; - --card: 0 0% 100%; --card-foreground: 222.2 47.4% 11.2%; - --border: 214.3 31.8% 91.4%; --input: 214.3 31.8% 91.4%; - --primary: 222.2 47.4% 11.2%; --primary-foreground: 210 40% 98%; - --secondary: 210 40% 96.1%; --secondary-foreground: 222.2 47.4% 11.2%; - --accent: 210 40% 96.1%; --accent-foreground: 222.2 47.4% 11.2%; - --destructive: 0 100% 50%; --destructive-foreground: 210 40% 98%; - --ring: 215 20.2% 65.1%; - --radius: 0.5rem; } - + .dark { - --background: 224 71% 4%; - --foreground: 213 31% 91%; - - --muted: 223 47% 11%; - --muted-foreground: 215.4 16.3% 56.9%; - - --popover: 224 71% 4%; - --popover-foreground: 215 20.2% 65.1%; - - --card: 224 71% 4%; - --card-foreground: 213 31% 91%; - - --border: 216 34% 17%; - --input: 216 34% 17%; - - --primary: 210 40% 98%; - --primary-foreground: 222.2 47.4% 1.2%; - - --secondary: 222.2 47.4% 11.2%; - --secondary-foreground: 210 40% 98%; - - --accent: 216 34% 17%; - --accent-foreground: 210 40% 98%; - - --destructive: 0 63% 31%; - --destructive-foreground: 210 40% 98%; - - --ring: 216 34% 17%; - - --radius: 0.5rem; + -background: 224 71% 4%; + -foreground: 213 31% 91%; + -muted: 223 47% 11%; + -muted-foreground: 215.4 16.3% 56.9%; + -popover: 224 71% 4%; + -popover-foreground: 215 20.2% 65.1%; + -card: 224 71% 4%; + -card-foreground: 213 31% 91%; + -border: 216 34% 17%; + -input: 216 34% 17%; + -primary: 210 40% 98%; + -primary-foreground: 222.2 47.4% 1.2%; + -secondary: 222.2 47.4% 11.2%; + -secondary-foreground: 210 40% 98%; + -accent: 216 34% 17%; + -accent-foreground: 210 40% 98%; + -destructive: 0 63% 31%; + -destructive-foreground: 210 40% 98%; + -ring: 216 34% 17%; + -radius: 0.5rem; } } - + @layer base { * { @apply border-border; @@ -80,22 +60,30 @@ } } +@layer base { + * { + @apply border-border; + } + body { + @apply bg-background text-foreground; + font-feature-settings: "rlig" 1, "calt" 1; + } +} + body { - margin: 0; - font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", "Roboto", "Oxygen", - "Ubuntu", "Cantarell", "Fira Sans", "Droid Sans", "Helvetica Neue", - sans-serif; - -webkit-font-smoothing: antialiased; - -moz-osx-font-smoothing: grayscale; + margin: 0; + font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", "Roboto", "Oxygen", + "Ubuntu", "Cantarell", "Fira Sans", "Droid Sans", "Helvetica Neue", + sans-serif; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; } code { - font-family: source-code-pro, Menlo, Monaco, Consolas, "Courier New", - monospace; + font-family: source-code-pro, Menlo, Monaco, Consolas, "Courier New", + monospace; } -/* The style below sets the cursor property of the element with the class .react-flow__pane to the default cursor. -The cursor: default; property value restores the browser's default cursor style for the targeted element. By applying this style, the element will no longer have a custom cursor appearance such as "grab" or any other custom cursor defined elsewhere in the application. Instead, it will revert to the default cursor style determined by the browser, typically an arrow-shaped cursor. */ .react-flow__pane { cursor: default; } From c8665ef42241b8b529568754d7d7b9199fdae9a9 Mon Sep 17 00:00:00 2001 From: Cristhian Zanforlin Lousa Date: Fri, 2 Jun 2023 17:35:30 -0300 Subject: [PATCH 26/43] Fixing css formatting codacy --- src/frontend/src/index.css | 86 +++++++++++++++++++------------------- 1 file changed, 43 insertions(+), 43 deletions(-) diff --git a/src/frontend/src/index.css b/src/frontend/src/index.css index 7c41b934e..14580c658 100644 --- a/src/frontend/src/index.css +++ b/src/frontend/src/index.css @@ -3,52 +3,52 @@ @tailwind utilities; @layer base { - :root { - --background: 0 0% 100%; - --foreground: 222.2 47.4% 11.2%; - --muted: 210 40% 96.1%; - --muted-foreground: 215.4 16.3% 46.9%; - --popover: 0 0% 100%; - --popover-foreground: 222.2 47.4% 11.2%; - --card: 0 0% 100%; - --card-foreground: 222.2 47.4% 11.2%; - --border: 214.3 31.8% 91.4%; - --input: 214.3 31.8% 91.4%; - --primary: 222.2 47.4% 11.2%; - --primary-foreground: 210 40% 98%; - --secondary: 210 40% 96.1%; - --secondary-foreground: 222.2 47.4% 11.2%; - --accent: 210 40% 96.1%; - --accent-foreground: 222.2 47.4% 11.2%; - --destructive: 0 100% 50%; - --destructive-foreground: 210 40% 98%; - --ring: 215 20.2% 65.1%; - --radius: 0.5rem; + :root { + --background: 0 0% 100%; + --foreground: 222.2 47.4% 11.2%; + --muted: 210 40% 96.1%; + --muted-foreground: 215.4 16.3% 46.9%; + --popover: 0 0% 100%; + --popover-foreground: 222.2 47.4% 11.2%; + --card: 0 0% 100%; + --card-foreground: 222.2 47.4% 11.2%; + --border: 214.3 31.8% 91.4%; + --input: 214.3 31.8% 91.4%; + --primary: 222.2 47.4% 11.2%; + --primary-foreground: 210 40% 98%; + --secondary: 210 40% 96.1%; + --secondary-foreground: 222.2 47.4% 11.2%; + --accent: 210 40% 96.1%; + --accent-foreground: 222.2 47.4% 11.2%; + --destructive: 0 100% 50%; + --destructive-foreground: 210 40% 98%; + --ring: 215 20.2% 65.1%; + --radius: 0.5rem; } - .dark { - -background: 224 71% 4%; - -foreground: 213 31% 91%; - -muted: 223 47% 11%; - -muted-foreground: 215.4 16.3% 56.9%; - -popover: 224 71% 4%; - -popover-foreground: 215 20.2% 65.1%; - -card: 224 71% 4%; - -card-foreground: 213 31% 91%; - -border: 216 34% 17%; - -input: 216 34% 17%; - -primary: 210 40% 98%; - -primary-foreground: 222.2 47.4% 1.2%; - -secondary: 222.2 47.4% 11.2%; - -secondary-foreground: 210 40% 98%; - -accent: 216 34% 17%; - -accent-foreground: 210 40% 98%; - -destructive: 0 63% 31%; - -destructive-foreground: 210 40% 98%; - -ring: 216 34% 17%; - -radius: 0.5rem; + .dark { + -background: 224 71% 4%; + -foreground: 213 31% 91%; + -muted: 223 47% 11%; + -muted-foreground: 215.4 16.3% 56.9%; + -popover: 224 71% 4%; + -popover-foreground: 215 20.2% 65.1%; + -card: 224 71% 4%; + -card-foreground: 213 31% 91%; + -border: 216 34% 17%; + -input: 216 34% 17%; + -primary: 210 40% 98%; + -primary-foreground: 222.2 47.4% 1.2%; + -secondary: 222.2 47.4% 11.2%; + -secondary-foreground: 210 40% 98%; + -accent: 216 34% 17%; + -accent-foreground: 210 40% 98%; + -destructive: 0 63% 31%; + -destructive-foreground: 210 40% 98%; + -ring: 216 34% 17%; + -radius: 0.5rem; } - } +} @layer base { * { From 1bd3303c52bcf8befd7da89abdb396ba39b14b8b Mon Sep 17 00:00:00 2001 From: Cristhian Zanforlin Lousa Date: Fri, 2 Jun 2023 17:49:53 -0300 Subject: [PATCH 27/43] Change the delay to open to instant --- .../GenericNode/components/parameterComponent/index.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx b/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx index d6b8781a4..dc77c2877 100644 --- a/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx +++ b/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx @@ -122,7 +122,7 @@ export default function ParameterComponent({ <> ) : ( From eeee895f6ad3c9cb3d73b54097568ee39bda636c Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Sun, 4 Jun 2023 21:42:14 -0300 Subject: [PATCH 28/43] =?UTF-8?q?=F0=9F=94=92=20chore(pyproject.toml):=20u?= =?UTF-8?q?pdate=20fastapi=20dependency=20to=20version=200.96.0=20FastAPI?= =?UTF-8?q?=20dependency=20has=20been=20updated=20to=20version=200.96.0=20?= =?UTF-8?q?to=20ensure=20that=20the=20application=20is=20using=20the=20lat?= =?UTF-8?q?est=20version=20of=20the=20framework.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 69619220a..58498c4d1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1265,14 +1265,14 @@ importlib-resources = {version = ">=5.0", markers = "python_version < \"3.10\""} [[package]] name = "fastapi" -version = "0.95.2" +version = "0.96.0" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "fastapi-0.95.2-py3-none-any.whl", hash = "sha256:d374dbc4ef2ad9b803899bd3360d34c534adc574546e25314ab72c0c4411749f"}, - {file = "fastapi-0.95.2.tar.gz", hash = "sha256:4d9d3e8c71c73f11874bcf5e33626258d143252e329a01002f767306c64fb982"}, + {file = "fastapi-0.96.0-py3-none-any.whl", hash = "sha256:b8e11fe81e81eab4e1504209917338e0b80f783878a42c2b99467e5e1019a1e9"}, + {file = "fastapi-0.96.0.tar.gz", hash = "sha256:71232d47c2787446991c81c41c249f8a16238d52d779c0e6b43927d3773dbe3c"}, ] [package.dependencies] @@ -6234,4 +6234,4 @@ deploy = ["langchain-serve"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.12" -content-hash = "9dbfdba31c45b1d79c6b487cb4e5e9cb43252e2f2f52fe625648162bf4cd4d9a" +content-hash = "4cac7dea0c1222711ba7eed82d5716d5e361d454edb6e0299b387fbb115d2c3d" diff --git a/pyproject.toml b/pyproject.toml index 780aa1b90..cde7ddca5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,7 +22,7 @@ langflow = "langflow.__main__:main" [tool.poetry.dependencies] python = ">=3.9,<3.12" -fastapi = "^0.95.0" +fastapi = "^0.96.0" uvicorn = "^0.20.0" beautifulsoup4 = "^4.11.2" google-search-results = "^2.4.1" From 5b28bbb7953f96af64f4b7c5fd020f1923956343 Mon Sep 17 00:00:00 2001 From: Jacob Lee Date: Mon, 5 Jun 2023 08:56:44 -0500 Subject: [PATCH 29/43] Initial support for Azure LLM nodes. There are still some rough edges due to underlying langchain and openai API limitations, e.g. hwchase17/langchain#3769 and openai/openai-python#411. Notably, you can't use the Azure and non-Azure node types in the same server, since there's global openai configuration needed to choose between the two. So it's probably best to still leave the Azure node types commented out in the default config. But with this PR, if you uncomment those nodes and start the server with OPENAI_API_TYPE=azure, you will have working Azure nodes. --- src/backend/langflow/config.yaml | 1 + .../langflow/interface/custom_lists.py | 3 +- .../langflow/template/frontend_node/llms.py | 22 +++++- tests/test_llms_template.py | 74 +++++++++++++++++++ 4 files changed, 98 insertions(+), 2 deletions(-) diff --git a/src/backend/langflow/config.yaml b/src/backend/langflow/config.yaml index 963c29549..effeeb001 100644 --- a/src/backend/langflow/config.yaml +++ b/src/backend/langflow/config.yaml @@ -51,6 +51,7 @@ embeddings: llms: - OpenAI # - AzureOpenAI + # - AzureChatOpenAI - ChatOpenAI - LlamaCpp - CTransformers diff --git a/src/backend/langflow/interface/custom_lists.py b/src/backend/langflow/interface/custom_lists.py index a944363ae..34bc0103e 100644 --- a/src/backend/langflow/interface/custom_lists.py +++ b/src/backend/langflow/interface/custom_lists.py @@ -11,14 +11,15 @@ from langchain import ( text_splitter, ) from langchain.agents import agent_toolkits +from langchain.chat_models import AzureChatOpenAI, ChatOpenAI from langchain.chat_models import ChatAnthropic -from langchain.chat_models import ChatOpenAI from langflow.interface.importing.utils import import_class ## LLMs llm_type_to_cls_dict = llms.type_to_cls_dict llm_type_to_cls_dict["anthropic-chat"] = ChatAnthropic # type: ignore +llm_type_to_cls_dict["azure-chat"] = AzureChatOpenAI # type: ignore llm_type_to_cls_dict["openai-chat"] = ChatOpenAI # type: ignore ## Chains diff --git a/src/backend/langflow/template/frontend_node/llms.py b/src/backend/langflow/template/frontend_node/llms.py index 272770e2e..39e82422f 100644 --- a/src/backend/langflow/template/frontend_node/llms.py +++ b/src/backend/langflow/template/frontend_node/llms.py @@ -12,6 +12,18 @@ class LLMFrontendNode(FrontendNode): field.name.title().replace("Openai", "OpenAI").replace("_", " ") ).replace("Api", "API") + @staticmethod + def format_azure_field(field: TemplateField): + if field.name == "model_name": + field.show = False # Azure uses deployment_name instead of model_name. + if field.name == "openai_api_type": + field.show = False + field.password = False + field.value = "azure" + if field.name == "openai_api_version": + field.password = False + field.value = "2023-03-15-preview" + @staticmethod def format_field(field: TemplateField, name: Optional[str] = None) -> None: display_names_dict = { @@ -43,8 +55,16 @@ class LLMFrontendNode(FrontendNode): field.field_type = "code" field.advanced = True field.show = True - elif field.name in ["model_name", "temperature", "model_file", "model_type"]: + elif field.name in [ + "model_name", + "temperature", + "model_file", + "model_type", + "deployment_name", + ]: field.advanced = False field.show = True LLMFrontendNode.format_openai_field(field) + if "azure" in name.lower(): + LLMFrontendNode.format_azure_field(field) diff --git a/tests/test_llms_template.py b/tests/test_llms_template.py index ccf2f6388..f54b452f1 100644 --- a/tests/test_llms_template.py +++ b/tests/test_llms_template.py @@ -482,3 +482,77 @@ def test_chat_open_ai(client: TestClient): "ChatOpenAI", "BaseLanguageModel", } + + +def test_azure_open_ai(client: TestClient): + response = client.get("/all") + assert response.status_code == 200 + json_response = response.json() + language_models = json_response["llms"] + + model = language_models["AzureOpenAI"] + template = model["template"] + + assert template["model_name"].show is False + assert template["deployment_name"] == { + "required": False, + "placeholder": "", + "show": True, + "multiline": False, + "value": "", + "password": False, + "name": "deployment_name", + "advanced": False, + "type": "str", + "list": False, + } + + +def test_azure_chat_open_ai(client: TestClient): + response = client.get("/all") + assert response.status_code == 200 + json_response = response.json() + language_models = json_response["llms"] + + model = language_models["AzureChatOpenAI"] + template = model["template"] + + assert template["model_name"].show is False + assert template["deployment_name"] == { + "required": False, + "placeholder": "", + "show": True, + "multiline": False, + "value": "", + "password": False, + "name": "deployment_name", + "advanced": False, + "type": "str", + "list": False, + } + assert template["openai_api_type"] == { + "required": False, + "placeholder": "", + "show": False, + "multiline": False, + "value": "azure", + "password": False, + "name": "openai_api_type", + "display_name": "OpenAI API Type", + "advanced": False, + "type": "str", + "list": False, + } + assert template["openai_api_version"] == { + "required": False, + "placeholder": "", + "show": True, + "multiline": False, + "value": "2023-03-15-preview", + "password": False, + "name": "openai_api_version", + "display_name": "OpenAI API Version", + "advanced": False, + "type": "str", + "list": False, + } From 5a103f0f6f0b5dedb7dd38b81d65e84d011c10ce Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 5 Jun 2023 13:48:17 -0300 Subject: [PATCH 30/43] =?UTF-8?q?=F0=9F=94=A8=20refactor(base.py):=20extra?= =?UTF-8?q?ct=20methods=20to=20handle=20model-specific=20and=20API=20key-s?= =?UTF-8?q?pecific=20field=20values=20This=20commit=20refactors=20the=20Fr?= =?UTF-8?q?ontendNode=20class=20by=20extracting=20two=20methods=20to=20han?= =?UTF-8?q?dle=20specific=20field=20values=20related=20to=20models=20and?= =?UTF-8?q?=20API=20keys.=20The=20=5Fhandle=5Fmodel=5Fspecific=5Ffield=5Fv?= =?UTF-8?q?alues=20method=20handles=20the=20options=20and=20is=5Flist=20pr?= =?UTF-8?q?operties=20for=20fields=20related=20to=20models,=20while=20the?= =?UTF-8?q?=20=5Fhandle=5Fapi=5Fkey=5Fspecific=5Ffield=5Fvalues=20method?= =?UTF-8?q?=20handles=20the=20display=5Fname=20and=20required=20properties?= =?UTF-8?q?=20for=20fields=20related=20to=20API=20keys.=20This=20improves?= =?UTF-8?q?=20the=20readability=20and=20maintainability=20of=20the=20code.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../langflow/template/frontend_node/base.py | 33 +++++++++++++------ 1 file changed, 23 insertions(+), 10 deletions(-) diff --git a/src/backend/langflow/template/frontend_node/base.py b/src/backend/langflow/template/frontend_node/base.py index a97c7b8b0..6d00cead0 100644 --- a/src/backend/langflow/template/frontend_node/base.py +++ b/src/backend/langflow/template/frontend_node/base.py @@ -117,17 +117,30 @@ class FrontendNode(BaseModel): ) -> None: """Handles specific field values for certain fields.""" if key == "headers": - field.value = """{'Authorization': - 'Bearer '}""" - if name == "OpenAI" and key == "model_name": - field.options = constants.OPENAI_MODELS - field.is_list = True - elif name == "ChatOpenAI" and key == "model_name": - field.options = constants.CHAT_OPENAI_MODELS - field.is_list = True - elif (name == "Anthropic" or name == "ChatAnthropic") and key == "model_name": - field.options = constants.ANTHROPIC_MODELS + field.value = """{'Authorization': 'Bearer '}""" + FrontendNode._handle_model_specific_field_values(field, key, name) + FrontendNode._handle_api_key_specific_field_values(field, key, name) + + @staticmethod + def _handle_model_specific_field_values( + field: TemplateField, key: str, name: Optional[str] = None + ) -> None: + """Handles specific field values related to models.""" + model_dict = { + "OpenAI": constants.OPENAI_MODELS, + "ChatOpenAI": constants.CHAT_OPENAI_MODELS, + "Anthropic": constants.ANTHROPIC_MODELS, + "ChatAnthropic": constants.ANTHROPIC_MODELS, + } + if name in model_dict and key == "model_name": + field.options = model_dict[name] field.is_list = True + + @staticmethod + def _handle_api_key_specific_field_values( + field: TemplateField, key: str, name: Optional[str] = None + ) -> None: + """Handles specific field values related to API keys.""" if "api_key" in key and "OpenAI" in str(name): field.display_name = "OpenAI API Key" field.required = False From 79c677fb090a975a15f42fdea21432206640dddf Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 6 Jun 2023 09:58:30 -0300 Subject: [PATCH 31/43] =?UTF-8?q?=F0=9F=9A=80=20feat(pyproject.toml):=20ad?= =?UTF-8?q?d=20pytest=20configuration=20options=20Added=20pytest=20configu?= =?UTF-8?q?ration=20options=20to=20the=20pyproject.toml=20file.=20The=20mi?= =?UTF-8?q?nimum=20version=20of=20pytest=20is=20set=20to=206.0,=20the=20'-?= =?UTF-8?q?ra'=20option=20is=20added=20to=20addopts=20to=20show=20all=20te?= =?UTF-8?q?st=20results,=20testpaths=20are=20set=20to=20include=20both=20'?= =?UTF-8?q?tests'=20and=20'integration'=20directories,=20console=20output?= =?UTF-8?q?=20style=20is=20set=20to=20'progress',=20and=20DeprecationWarni?= =?UTF-8?q?ng=20is=20ignored.=20log=5Fcli=20is=20set=20to=20true=20to=20en?= =?UTF-8?q?able=20logging=20of=20pytest=20output=20to=20the=20console.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- pyproject.toml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index cde7ddca5..87843198b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -78,6 +78,15 @@ types-pillow = "^9.5.0.2" [tool.poetry.extras] deploy = ["langchain-serve"] +[tool.pytest.ini_options] +minversion = "6.0" +addopts = "-ra" +testpaths = ["tests", "integration"] +console_output_style = "progress" +filterwarnings = ["ignore::DeprecationWarning"] +log_cli = true + + [tool.ruff] line-length = 120 From 3342e03a2cb75bcbcc36060b5204bee89ba773af Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 6 Jun 2023 09:59:37 -0300 Subject: [PATCH 32/43] =?UTF-8?q?=F0=9F=94=80=20refactor(langflow):=20move?= =?UTF-8?q?=20routers=20to=20a=20single=20file=20and=20add=20health=20chec?= =?UTF-8?q?k=20endpoint=20The=20routers=20for=20the=20langflow=20API=20hav?= =?UTF-8?q?e=20been=20moved=20to=20a=20single=20file=20for=20better=20orga?= =?UTF-8?q?nization=20and=20maintainability.=20The=20routers=20have=20been?= =?UTF-8?q?=20imported=20and=20included=20in=20the=20main.py=20file=20usin?= =?UTF-8?q?g=20the=20new=20file.=20A=20new=20health=20check=20endpoint=20h?= =?UTF-8?q?as=20been=20added=20to=20the=20API=20to=20check=20the=20status?= =?UTF-8?q?=20of=20the=20application.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/__init__.py | 2 +- src/backend/langflow/main.py | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/src/backend/langflow/__init__.py b/src/backend/langflow/__init__.py index 35fe814d2..17b1d940c 100644 --- a/src/backend/langflow/__init__.py +++ b/src/backend/langflow/__init__.py @@ -1,4 +1,4 @@ from langflow.cache import cache_manager -from langflow.interface.loading import load_flow_from_json +from langflow.processing.process import load_flow_from_json __all__ = ["load_flow_from_json", "cache_manager"] diff --git a/src/backend/langflow/main.py b/src/backend/langflow/main.py index 56cc32e46..de39d8750 100644 --- a/src/backend/langflow/main.py +++ b/src/backend/langflow/main.py @@ -1,9 +1,7 @@ from fastapi import FastAPI from fastapi.middleware.cors import CORSMiddleware -from langflow.api.chat import router as chat_router -from langflow.api.endpoints import router as endpoints_router -from langflow.api.validate import router as validate_router +from langflow.api import router def create_app(): @@ -14,6 +12,10 @@ def create_app(): "*", ] + @app.get("/health") + def get_health(): + return {"status": "OK"} + app.add_middleware( CORSMiddleware, allow_origins=origins, @@ -22,9 +24,7 @@ def create_app(): allow_headers=["*"], ) - app.include_router(endpoints_router) - app.include_router(validate_router) - app.include_router(chat_router) + app.include_router(router) return app From ac42e8a66c1ce0c31c732cdcbb35980aecb96c69 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 6 Jun 2023 10:00:15 -0300 Subject: [PATCH 33/43] chore: remove refactored files --- src/backend/langflow/api/base.py | 84 --------- src/backend/langflow/api/callback.py | 32 ---- src/backend/langflow/api/chat.py | 26 --- src/backend/langflow/api/chat_manager.py | 223 ----------------------- src/backend/langflow/api/endpoints.py | 47 ----- src/backend/langflow/api/schemas.py | 70 ------- src/backend/langflow/api/validate.py | 57 ------ 7 files changed, 539 deletions(-) delete mode 100644 src/backend/langflow/api/base.py delete mode 100644 src/backend/langflow/api/callback.py delete mode 100644 src/backend/langflow/api/chat.py delete mode 100644 src/backend/langflow/api/chat_manager.py delete mode 100644 src/backend/langflow/api/endpoints.py delete mode 100644 src/backend/langflow/api/schemas.py delete mode 100644 src/backend/langflow/api/validate.py diff --git a/src/backend/langflow/api/base.py b/src/backend/langflow/api/base.py deleted file mode 100644 index 8cddc52e4..000000000 --- a/src/backend/langflow/api/base.py +++ /dev/null @@ -1,84 +0,0 @@ -from pydantic import BaseModel, validator - -from langflow.graph.utils import extract_input_variables_from_prompt - - -class CacheResponse(BaseModel): - data: dict - - -class Code(BaseModel): - code: str - - -class Prompt(BaseModel): - template: str - - -# Build ValidationResponse class for {"imports": {"errors": []}, "function": {"errors": []}} -class CodeValidationResponse(BaseModel): - imports: dict - function: dict - - @validator("imports") - def validate_imports(cls, v): - return v or {"errors": []} - - @validator("function") - def validate_function(cls, v): - return v or {"errors": []} - - -class PromptValidationResponse(BaseModel): - input_variables: list - - -INVALID_CHARACTERS = { - " ", - ",", - ".", - ":", - ";", - "!", - "?", - "/", - "\\", - "(", - ")", - "[", - "]", - "{", - "}", -} - - -def validate_prompt(template: str): - input_variables = extract_input_variables_from_prompt(template) - - # Check if there are invalid characters in the input_variables - input_variables = check_input_variables(input_variables) - - return PromptValidationResponse(input_variables=input_variables) - - -def check_input_variables(input_variables: list): - invalid_chars = [] - fixed_variables = [] - for variable in input_variables: - new_var = variable - for char in INVALID_CHARACTERS: - if char in variable: - invalid_chars.append(char) - new_var = new_var.replace(char, "") - fixed_variables.append(new_var) - if new_var != variable: - input_variables.remove(variable) - input_variables.append(new_var) - # If any of the input_variables is not in the fixed_variables, then it means that - # there are invalid characters in the input_variables - if any(var not in fixed_variables for var in input_variables): - raise ValueError( - f"Invalid input variables: {input_variables}. Please, use something like {fixed_variables} instead." - ) - - return input_variables diff --git a/src/backend/langflow/api/callback.py b/src/backend/langflow/api/callback.py deleted file mode 100644 index d63e107c4..000000000 --- a/src/backend/langflow/api/callback.py +++ /dev/null @@ -1,32 +0,0 @@ -import asyncio -from typing import Any - -from langchain.callbacks.base import AsyncCallbackHandler, BaseCallbackHandler - -from langflow.api.schemas import ChatResponse - - -# https://github.com/hwchase17/chat-langchain/blob/master/callback.py -class AsyncStreamingLLMCallbackHandler(AsyncCallbackHandler): - """Callback handler for streaming LLM responses.""" - - def __init__(self, websocket): - self.websocket = websocket - - async def on_llm_new_token(self, token: str, **kwargs: Any) -> None: - resp = ChatResponse(message=token, type="stream", intermediate_steps="") - await self.websocket.send_json(resp.dict()) - - -class StreamingLLMCallbackHandler(BaseCallbackHandler): - """Callback handler for streaming LLM responses.""" - - def __init__(self, websocket): - self.websocket = websocket - - def on_llm_new_token(self, token: str, **kwargs: Any) -> None: - resp = ChatResponse(message=token, type="stream", intermediate_steps="") - - loop = asyncio.get_event_loop() - coroutine = self.websocket.send_json(resp.dict()) - asyncio.run_coroutine_threadsafe(coroutine, loop) diff --git a/src/backend/langflow/api/chat.py b/src/backend/langflow/api/chat.py deleted file mode 100644 index 4afa6c22f..000000000 --- a/src/backend/langflow/api/chat.py +++ /dev/null @@ -1,26 +0,0 @@ -from fastapi import ( - APIRouter, - WebSocket, - WebSocketDisconnect, - WebSocketException, - status, -) - -from langflow.api.chat_manager import ChatManager -from langflow.utils.logger import logger - -router = APIRouter() -chat_manager = ChatManager() - - -@router.websocket("/chat/{client_id}") -async def websocket_endpoint(client_id: str, websocket: WebSocket): - """Websocket endpoint for chat.""" - try: - await chat_manager.handle_websocket(client_id, websocket) - except WebSocketException as exc: - logger.error(exc) - await websocket.close(code=status.WS_1011_INTERNAL_ERROR, reason=str(exc)) - except WebSocketDisconnect as exc: - logger.error(exc) - await websocket.close(code=status.WS_1000_NORMAL_CLOSURE, reason=str(exc)) diff --git a/src/backend/langflow/api/chat_manager.py b/src/backend/langflow/api/chat_manager.py deleted file mode 100644 index 8b1c7a621..000000000 --- a/src/backend/langflow/api/chat_manager.py +++ /dev/null @@ -1,223 +0,0 @@ -import asyncio -import json -from collections import defaultdict -from typing import Dict, List - -from fastapi import WebSocket, status - -from langflow.api.schemas import ChatMessage, ChatResponse, FileResponse -from langflow.cache import cache_manager -from langflow.cache.manager import Subject -from langflow.interface.run import ( - get_result_and_steps, - load_or_build_langchain_object, -) -from langflow.interface.utils import pil_to_base64, try_setting_streaming_options -from langflow.utils.logger import logger - - -class ChatHistory(Subject): - def __init__(self): - super().__init__() - self.history: Dict[str, List[ChatMessage]] = defaultdict(list) - - def add_message(self, client_id: str, message: ChatMessage): - """Add a message to the chat history.""" - - self.history[client_id].append(message) - - if not isinstance(message, FileResponse): - self.notify() - - def get_history(self, client_id: str, filter_messages=True) -> List[ChatMessage]: - """Get the chat history for a client.""" - if history := self.history.get(client_id, []): - if filter_messages: - return [msg for msg in history if msg.type not in ["start", "stream"]] - return history - else: - return [] - - def empty_history(self, client_id: str): - """Empty the chat history for a client.""" - self.history[client_id] = [] - - -class ChatManager: - def __init__(self): - self.active_connections: Dict[str, WebSocket] = {} - self.chat_history = ChatHistory() - self.cache_manager = cache_manager - self.cache_manager.attach(self.update) - - def on_chat_history_update(self): - """Send the last chat message to the client.""" - client_id = self.cache_manager.current_client_id - if client_id in self.active_connections: - chat_response = self.chat_history.get_history( - client_id, filter_messages=False - )[-1] - if chat_response.is_bot: - # Process FileResponse - if isinstance(chat_response, FileResponse): - # If data_type is pandas, convert to csv - if chat_response.data_type == "pandas": - chat_response.data = chat_response.data.to_csv() - elif chat_response.data_type == "image": - # Base64 encode the image - chat_response.data = pil_to_base64(chat_response.data) - # get event loop - loop = asyncio.get_event_loop() - - coroutine = self.send_json(client_id, chat_response) - asyncio.run_coroutine_threadsafe(coroutine, loop) - - def update(self): - if self.cache_manager.current_client_id in self.active_connections: - self.last_cached_object_dict = self.cache_manager.get_last() - # Add a new ChatResponse with the data - chat_response = FileResponse( - message=None, - type="file", - data=self.last_cached_object_dict["obj"], - data_type=self.last_cached_object_dict["type"], - ) - - self.chat_history.add_message( - self.cache_manager.current_client_id, chat_response - ) - - async def connect(self, client_id: str, websocket: WebSocket): - await websocket.accept() - self.active_connections[client_id] = websocket - - def disconnect(self, client_id: str): - self.active_connections.pop(client_id, None) - - async def send_message(self, client_id: str, message: str): - websocket = self.active_connections[client_id] - await websocket.send_text(message) - - async def send_json(self, client_id: str, message: ChatMessage): - websocket = self.active_connections[client_id] - await websocket.send_json(message.dict()) - - async def process_message(self, client_id: str, payload: Dict): - # Process the graph data and chat message - chat_message = payload.pop("message", "") - chat_message = ChatMessage(message=chat_message) - self.chat_history.add_message(client_id, chat_message) - - graph_data = payload - start_resp = ChatResponse(message=None, type="start", intermediate_steps="") - await self.send_json(client_id, start_resp) - - is_first_message = len(self.chat_history.get_history(client_id=client_id)) <= 1 - # Generate result and thought - try: - logger.debug("Generating result and thought") - - result, intermediate_steps = await process_graph( - graph_data=graph_data, - is_first_message=is_first_message, - chat_message=chat_message, - websocket=self.active_connections[client_id], - ) - except Exception as e: - # Log stack trace - logger.exception(e) - self.chat_history.empty_history(client_id) - raise e - # Send a response back to the frontend, if needed - intermediate_steps = intermediate_steps or "" - history = self.chat_history.get_history(client_id, filter_messages=False) - file_responses = [] - if history: - # Iterate backwards through the history - for msg in reversed(history): - if isinstance(msg, FileResponse): - if msg.data_type == "image": - # Base64 encode the image - msg.data = pil_to_base64(msg.data) - file_responses.append(msg) - if msg.type == "start": - break - - response = ChatResponse( - message=result, - intermediate_steps=intermediate_steps.strip(), - type="end", - files=file_responses, - ) - await self.send_json(client_id, response) - self.chat_history.add_message(client_id, response) - - async def handle_websocket(self, client_id: str, websocket: WebSocket): - await self.connect(client_id, websocket) - - try: - chat_history = self.chat_history.get_history(client_id) - # iterate and make BaseModel into dict - chat_history = [chat.dict() for chat in chat_history] - await websocket.send_json(chat_history) - - while True: - json_payload = await websocket.receive_json() - try: - payload = json.loads(json_payload) - except TypeError: - payload = json_payload - if "clear_history" in payload: - self.chat_history.history[client_id] = [] - continue - - with self.cache_manager.set_client_id(client_id): - await self.process_message(client_id, payload) - - except Exception as e: - # Handle any exceptions that might occur - logger.exception(e) - # send a message to the client - await self.active_connections[client_id].close( - code=status.WS_1011_INTERNAL_ERROR, reason=str(e)[:120] - ) - self.disconnect(client_id) - finally: - try: - connection = self.active_connections.get(client_id) - if connection: - await connection.close(code=1000, reason="Client disconnected") - self.disconnect(client_id) - except Exception as e: - logger.exception(e) - self.disconnect(client_id) - - -async def process_graph( - graph_data: Dict, - is_first_message: bool, - chat_message: ChatMessage, - websocket: WebSocket, -): - langchain_object = load_or_build_langchain_object(graph_data, is_first_message) - langchain_object = try_setting_streaming_options(langchain_object, websocket) - logger.debug("Loaded langchain object") - - if langchain_object is None: - # Raise user facing error - raise ValueError( - "There was an error loading the langchain_object. Please, check all the nodes and try again." - ) - - # Generate result and thought - try: - logger.debug("Generating result and thought") - result, intermediate_steps = await get_result_and_steps( - langchain_object, chat_message.message or "", websocket=websocket - ) - logger.debug("Generated result and intermediate_steps") - return result, intermediate_steps - except Exception as e: - # Log stack trace - logger.exception(e) - raise e diff --git a/src/backend/langflow/api/endpoints.py b/src/backend/langflow/api/endpoints.py deleted file mode 100644 index 021a81ca8..000000000 --- a/src/backend/langflow/api/endpoints.py +++ /dev/null @@ -1,47 +0,0 @@ -import logging -from importlib.metadata import version - -from fastapi import APIRouter, HTTPException - -from langflow.api.schemas import ( - ExportedFlow, - GraphData, - PredictRequest, - PredictResponse, -) -from langflow.interface.run import process_graph_cached -from langflow.interface.types import build_langchain_types_dict - -# build router -router = APIRouter() -logger = logging.getLogger(__name__) - - -@router.get("/all") -def get_all(): - return build_langchain_types_dict() - - -@router.post("/predict", response_model=PredictResponse) -async def get_load(predict_request: PredictRequest): - try: - exported_flow: ExportedFlow = predict_request.exported_flow - graph_data: GraphData = exported_flow.data - data = graph_data.dict() - response = process_graph_cached(data, predict_request.message) - return PredictResponse(result=response.get("result", "")) - except Exception as e: - # Log stack trace - logger.exception(e) - raise HTTPException(status_code=500, detail=str(e)) from e - - -# get endpoint to return version of langflow -@router.get("/version") -def get_version(): - return {"version": version("langflow")} - - -@router.get("/health") -def get_health(): - return {"status": "OK"} diff --git a/src/backend/langflow/api/schemas.py b/src/backend/langflow/api/schemas.py deleted file mode 100644 index f73b0642d..000000000 --- a/src/backend/langflow/api/schemas.py +++ /dev/null @@ -1,70 +0,0 @@ -from typing import Any, Dict, List, Union - -from pydantic import BaseModel, validator - - -class GraphData(BaseModel): - """Data inside the exported flow.""" - - nodes: List[Dict[str, Any]] - edges: List[Dict[str, Any]] - - -class ExportedFlow(BaseModel): - """Exported flow from LangFlow.""" - - description: str - name: str - id: str - data: GraphData - - -class PredictRequest(BaseModel): - """Predict request schema.""" - - message: str - exported_flow: ExportedFlow - - -class PredictResponse(BaseModel): - """Predict response schema.""" - - result: str - - -class ChatMessage(BaseModel): - """Chat message schema.""" - - is_bot: bool = False - message: Union[str, None] = None - type: str = "human" - - -class ChatResponse(ChatMessage): - """Chat response schema.""" - - intermediate_steps: str - type: str - is_bot: bool = True - files: list = [] - - @validator("type") - def validate_message_type(cls, v): - if v not in ["start", "stream", "end", "error", "info", "file"]: - raise ValueError("type must be start, stream, end, error, info, or file") - return v - - -class FileResponse(ChatMessage): - """File response schema.""" - - data: Any - data_type: str - type: str = "file" - is_bot: bool = True - - @validator("data_type") - def validate_data_type(cls, v): - if v not in ["image", "csv"]: - raise ValueError("data_type must be image or csv") - return v diff --git a/src/backend/langflow/api/validate.py b/src/backend/langflow/api/validate.py deleted file mode 100644 index e90e554f0..000000000 --- a/src/backend/langflow/api/validate.py +++ /dev/null @@ -1,57 +0,0 @@ -import json - -from fastapi import APIRouter, HTTPException - -from langflow.api.base import ( - Code, - CodeValidationResponse, - Prompt, - PromptValidationResponse, - validate_prompt, -) -from langflow.graph.vertex.types import VectorStoreVertex -from langflow.interface.run import build_graph -from langflow.utils.logger import logger -from langflow.utils.validate import validate_code - -# build router -router = APIRouter(prefix="/validate", tags=["validate"]) - - -@router.post("/code", status_code=200, response_model=CodeValidationResponse) -def post_validate_code(code: Code): - try: - errors = validate_code(code.code) - return CodeValidationResponse( - imports=errors.get("imports", {}), - function=errors.get("function", {}), - ) - except Exception as e: - return HTTPException(status_code=500, detail=str(e)) - - -@router.post("/prompt", status_code=200, response_model=PromptValidationResponse) -def post_validate_prompt(prompt: Prompt): - try: - return validate_prompt(prompt.template) - except Exception as e: - logger.exception(e) - raise HTTPException(status_code=500, detail=str(e)) from e - - -# validate node -@router.post("/node/{node_id}", status_code=200) -def post_validate_node(node_id: str, data: dict): - try: - # build graph - graph = build_graph(data) - # validate node - node = graph.get_node(node_id) - if node is None: - raise ValueError(f"Node {node_id} not found") - if not isinstance(node, VectorStoreVertex): - node.build() - return json.dumps({"valid": True, "params": str(node._built_object_repr())}) - except Exception as e: - logger.exception(e) - return json.dumps({"valid": False, "params": str(e)}) From bdbb4a81279f535538cc55128b358674c5ec7a45 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 6 Jun 2023 10:00:38 -0300 Subject: [PATCH 34/43] =?UTF-8?q?=F0=9F=9A=80=20feat(api):=20add=20version?= =?UTF-8?q?ing=20to=20the=20API=20and=20restructure=20the=20router=20The?= =?UTF-8?q?=20API=20now=20has=20versioning,=20with=20the=20prefix=20"/api/?= =?UTF-8?q?v1".=20The=20router=20has=20been=20restructured=20to=20include?= =?UTF-8?q?=20the=20chat,=20endpoints,=20and=20validate=20routers.=20This?= =?UTF-8?q?=20improves=20the=20organization=20of=20the=20code=20and=20make?= =?UTF-8?q?s=20it=20easier=20to=20add=20new=20routers=20in=20the=20future.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/api/__init__.py | 3 +++ src/backend/langflow/api/router.py | 8 ++++++++ src/backend/langflow/api/v1/__init__.py | 5 +++++ 3 files changed, 16 insertions(+) create mode 100644 src/backend/langflow/api/router.py create mode 100644 src/backend/langflow/api/v1/__init__.py diff --git a/src/backend/langflow/api/__init__.py b/src/backend/langflow/api/__init__.py index e69de29bb..f887c47e1 100644 --- a/src/backend/langflow/api/__init__.py +++ b/src/backend/langflow/api/__init__.py @@ -0,0 +1,3 @@ +from langflow.api.router import router + +__all__ = ["router"] diff --git a/src/backend/langflow/api/router.py b/src/backend/langflow/api/router.py new file mode 100644 index 000000000..23b5aa1c5 --- /dev/null +++ b/src/backend/langflow/api/router.py @@ -0,0 +1,8 @@ +# Router for base api +from fastapi import APIRouter +from langflow.api.v1 import chat_router, endpoints_router, validate_router + +router = APIRouter(prefix="/api/v1", tags=["api"]) +router.include_router(chat_router) +router.include_router(endpoints_router) +router.include_router(validate_router) diff --git a/src/backend/langflow/api/v1/__init__.py b/src/backend/langflow/api/v1/__init__.py new file mode 100644 index 000000000..d835b4535 --- /dev/null +++ b/src/backend/langflow/api/v1/__init__.py @@ -0,0 +1,5 @@ +from langflow.api.v1.endpoints import router as endpoints_router +from langflow.api.v1.validate import router as validate_router +from langflow.api.v1.chat import router as chat_router + +__all__ = ["chat_router", "endpoints_router", "validate_router"] From 3e5878ddc282557315f532605357ff782af13d64 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 6 Jun 2023 10:01:18 -0300 Subject: [PATCH 35/43] =?UTF-8?q?=F0=9F=8E=89=20feat(langflow):=20add=20ne?= =?UTF-8?q?w=20files=20base.py=20and=20callback.py=20The=20base.py=20file?= =?UTF-8?q?=20contains=20the=20following=20classes=20and=20functions:=20-?= =?UTF-8?q?=20CacheResponse:=20a=20pydantic=20BaseModel=20that=20represent?= =?UTF-8?q?s=20a=20response=20containing=20a=20dictionary=20of=20data=20-?= =?UTF-8?q?=20Code:=20a=20pydantic=20BaseModel=20that=20represents=20a=20c?= =?UTF-8?q?ode=20string=20-=20Prompt:=20a=20pydantic=20BaseModel=20that=20?= =?UTF-8?q?represents=20a=20prompt=20template=20string=20-=20CodeValidatio?= =?UTF-8?q?nResponse:=20a=20pydantic=20BaseModel=20that=20represents=20a?= =?UTF-8?q?=20response=20containing=20the=20validation=20results=20of=20co?= =?UTF-8?q?de=20-=20PromptValidationResponse:=20a=20pydantic=20BaseModel?= =?UTF-8?q?=20that=20represents=20a=20response=20containing=20the=20valida?= =?UTF-8?q?tion=20results=20of=20a=20prompt=20-=20validate=5Fprompt:=20a?= =?UTF-8?q?=20function=20that=20validates=20a=20prompt=20template=20string?= =?UTF-8?q?=20and=20returns=20a=20PromptValidationResponse=20object=20-=20?= =?UTF-8?q?check=5Finput=5Fvariables:=20a=20function=20that=20checks=20if?= =?UTF-8?q?=20input=20variables=20contain=20invalid=20characters=20and=20r?= =?UTF-8?q?eturns=20a=20list=20of=20fixed=20input=20variables?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The callback.py file contains the following classes: - AsyncStreamingLLMCallbackHandler: an AsyncCallbackHandler that handles streaming LLM responses asynchronously - StreamingLLMCallbackHandler: a BaseCallbackHandler that handles streaming LLM responses These files were added to provide support for Langflow's backend API. --- src/backend/langflow/api/v1/base.py | 84 +++++++++++++++++++++++++ src/backend/langflow/api/v1/callback.py | 32 ++++++++++ 2 files changed, 116 insertions(+) create mode 100644 src/backend/langflow/api/v1/base.py create mode 100644 src/backend/langflow/api/v1/callback.py diff --git a/src/backend/langflow/api/v1/base.py b/src/backend/langflow/api/v1/base.py new file mode 100644 index 000000000..6941bedf3 --- /dev/null +++ b/src/backend/langflow/api/v1/base.py @@ -0,0 +1,84 @@ +from pydantic import BaseModel, validator + +from langflow.interface.utils import extract_input_variables_from_prompt + + +class CacheResponse(BaseModel): + data: dict + + +class Code(BaseModel): + code: str + + +class Prompt(BaseModel): + template: str + + +# Build ValidationResponse class for {"imports": {"errors": []}, "function": {"errors": []}} +class CodeValidationResponse(BaseModel): + imports: dict + function: dict + + @validator("imports") + def validate_imports(cls, v): + return v or {"errors": []} + + @validator("function") + def validate_function(cls, v): + return v or {"errors": []} + + +class PromptValidationResponse(BaseModel): + input_variables: list + + +INVALID_CHARACTERS = { + " ", + ",", + ".", + ":", + ";", + "!", + "?", + "/", + "\\", + "(", + ")", + "[", + "]", + "{", + "}", +} + + +def validate_prompt(template: str): + input_variables = extract_input_variables_from_prompt(template) + + # Check if there are invalid characters in the input_variables + input_variables = check_input_variables(input_variables) + + return PromptValidationResponse(input_variables=input_variables) + + +def check_input_variables(input_variables: list): + invalid_chars = [] + fixed_variables = [] + for variable in input_variables: + new_var = variable + for char in INVALID_CHARACTERS: + if char in variable: + invalid_chars.append(char) + new_var = new_var.replace(char, "") + fixed_variables.append(new_var) + if new_var != variable: + input_variables.remove(variable) + input_variables.append(new_var) + # If any of the input_variables is not in the fixed_variables, then it means that + # there are invalid characters in the input_variables + if any(var not in fixed_variables for var in input_variables): + raise ValueError( + f"Invalid input variables: {input_variables}. Please, use something like {fixed_variables} instead." + ) + + return input_variables diff --git a/src/backend/langflow/api/v1/callback.py b/src/backend/langflow/api/v1/callback.py new file mode 100644 index 000000000..b58393d7b --- /dev/null +++ b/src/backend/langflow/api/v1/callback.py @@ -0,0 +1,32 @@ +import asyncio +from typing import Any + +from langchain.callbacks.base import AsyncCallbackHandler, BaseCallbackHandler + +from langflow.api.v1.schemas import ChatResponse + + +# https://github.com/hwchase17/chat-langchain/blob/master/callback.py +class AsyncStreamingLLMCallbackHandler(AsyncCallbackHandler): + """Callback handler for streaming LLM responses.""" + + def __init__(self, websocket): + self.websocket = websocket + + async def on_llm_new_token(self, token: str, **kwargs: Any) -> None: + resp = ChatResponse(message=token, type="stream", intermediate_steps="") + await self.websocket.send_json(resp.dict()) + + +class StreamingLLMCallbackHandler(BaseCallbackHandler): + """Callback handler for streaming LLM responses.""" + + def __init__(self, websocket): + self.websocket = websocket + + def on_llm_new_token(self, token: str, **kwargs: Any) -> None: + resp = ChatResponse(message=token, type="stream", intermediate_steps="") + + loop = asyncio.get_event_loop() + coroutine = self.websocket.send_json(resp.dict()) + asyncio.run_coroutine_threadsafe(coroutine, loop) From 2bfe93e0b8e44ff82785889e7589928d2cb8799b Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 6 Jun 2023 10:01:44 -0300 Subject: [PATCH 36/43] =?UTF-8?q?=F0=9F=9A=80=20feat(langflow):=20add=20ne?= =?UTF-8?q?w=20API=20endpoints=20for=20chat,=20validation,=20and=20version?= =?UTF-8?q?=20This=20commit=20adds=20new=20API=20endpoints=20for=20chat,?= =?UTF-8?q?=20validation,=20and=20version.=20The=20chat=20endpoint=20is=20?= =?UTF-8?q?a=20websocket=20endpoint=20for=20chat.=20The=20validation=20end?= =?UTF-8?q?point=20has=20three=20sub-endpoints=20for=20validating=20code,?= =?UTF-8?q?=20prompt,=20and=20node.=20The=20version=20endpoint=20returns?= =?UTF-8?q?=20the=20version=20of=20LangFlow.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/api/v1/chat.py | 26 +++++++++ src/backend/langflow/api/v1/endpoints.py | 44 +++++++++++++++ src/backend/langflow/api/v1/schemas.py | 70 ++++++++++++++++++++++++ src/backend/langflow/api/v1/validate.py | 57 +++++++++++++++++++ 4 files changed, 197 insertions(+) create mode 100644 src/backend/langflow/api/v1/chat.py create mode 100644 src/backend/langflow/api/v1/endpoints.py create mode 100644 src/backend/langflow/api/v1/schemas.py create mode 100644 src/backend/langflow/api/v1/validate.py diff --git a/src/backend/langflow/api/v1/chat.py b/src/backend/langflow/api/v1/chat.py new file mode 100644 index 000000000..7df4c65ed --- /dev/null +++ b/src/backend/langflow/api/v1/chat.py @@ -0,0 +1,26 @@ +from fastapi import ( + APIRouter, + WebSocket, + WebSocketDisconnect, + WebSocketException, + status, +) + +from langflow.chat.manager import ChatManager +from langflow.utils.logger import logger + +router = APIRouter() +chat_manager = ChatManager() + + +@router.websocket("/chat/{client_id}") +async def websocket_endpoint(client_id: str, websocket: WebSocket): + """Websocket endpoint for chat.""" + try: + await chat_manager.handle_websocket(client_id, websocket) + except WebSocketException as exc: + logger.error(exc) + await websocket.close(code=status.WS_1011_INTERNAL_ERROR, reason=str(exc)) + except WebSocketDisconnect as exc: + logger.error(exc) + await websocket.close(code=status.WS_1000_NORMAL_CLOSURE, reason=str(exc)) diff --git a/src/backend/langflow/api/v1/endpoints.py b/src/backend/langflow/api/v1/endpoints.py new file mode 100644 index 000000000..1e9b0deb1 --- /dev/null +++ b/src/backend/langflow/api/v1/endpoints.py @@ -0,0 +1,44 @@ +import logging +from importlib.metadata import version + +from fastapi import APIRouter, HTTPException + +from langflow.api.v1.schemas import ( + ExportedFlow, + GraphData, + PredictRequest, + PredictResponse, +) + +from langflow.interface.types import build_langchain_types_dict + +# build router +router = APIRouter() +logger = logging.getLogger(__name__) + + +@router.get("/all") +def get_all(): + return build_langchain_types_dict() + + +@router.post("/predict", response_model=PredictResponse) +async def get_load(predict_request: PredictRequest): + try: + from langflow.processing.process import process_graph_cached + + exported_flow: ExportedFlow = predict_request.exported_flow + graph_data: GraphData = exported_flow.data + data = graph_data.dict() + response = process_graph_cached(data, predict_request.message) + return PredictResponse(result=response.get("result", "")) + except Exception as e: + # Log stack trace + logger.exception(e) + raise HTTPException(status_code=500, detail=str(e)) from e + + +# get endpoint to return version of langflow +@router.get("/version") +def get_version(): + return {"version": version("langflow")} diff --git a/src/backend/langflow/api/v1/schemas.py b/src/backend/langflow/api/v1/schemas.py new file mode 100644 index 000000000..f73b0642d --- /dev/null +++ b/src/backend/langflow/api/v1/schemas.py @@ -0,0 +1,70 @@ +from typing import Any, Dict, List, Union + +from pydantic import BaseModel, validator + + +class GraphData(BaseModel): + """Data inside the exported flow.""" + + nodes: List[Dict[str, Any]] + edges: List[Dict[str, Any]] + + +class ExportedFlow(BaseModel): + """Exported flow from LangFlow.""" + + description: str + name: str + id: str + data: GraphData + + +class PredictRequest(BaseModel): + """Predict request schema.""" + + message: str + exported_flow: ExportedFlow + + +class PredictResponse(BaseModel): + """Predict response schema.""" + + result: str + + +class ChatMessage(BaseModel): + """Chat message schema.""" + + is_bot: bool = False + message: Union[str, None] = None + type: str = "human" + + +class ChatResponse(ChatMessage): + """Chat response schema.""" + + intermediate_steps: str + type: str + is_bot: bool = True + files: list = [] + + @validator("type") + def validate_message_type(cls, v): + if v not in ["start", "stream", "end", "error", "info", "file"]: + raise ValueError("type must be start, stream, end, error, info, or file") + return v + + +class FileResponse(ChatMessage): + """File response schema.""" + + data: Any + data_type: str + type: str = "file" + is_bot: bool = True + + @validator("data_type") + def validate_data_type(cls, v): + if v not in ["image", "csv"]: + raise ValueError("data_type must be image or csv") + return v diff --git a/src/backend/langflow/api/v1/validate.py b/src/backend/langflow/api/v1/validate.py new file mode 100644 index 000000000..009cb9a30 --- /dev/null +++ b/src/backend/langflow/api/v1/validate.py @@ -0,0 +1,57 @@ +import json + +from fastapi import APIRouter, HTTPException + +from langflow.api.v1.base import ( + Code, + CodeValidationResponse, + Prompt, + PromptValidationResponse, + validate_prompt, +) +from langflow.graph.vertex.types import VectorStoreVertex +from langflow.graph import Graph +from langflow.utils.logger import logger +from langflow.utils.validate import validate_code + +# build router +router = APIRouter(prefix="/validate", tags=["validate"]) + + +@router.post("/code", status_code=200, response_model=CodeValidationResponse) +def post_validate_code(code: Code): + try: + errors = validate_code(code.code) + return CodeValidationResponse( + imports=errors.get("imports", {}), + function=errors.get("function", {}), + ) + except Exception as e: + return HTTPException(status_code=500, detail=str(e)) + + +@router.post("/prompt", status_code=200, response_model=PromptValidationResponse) +def post_validate_prompt(prompt: Prompt): + try: + return validate_prompt(prompt.template) + except Exception as e: + logger.exception(e) + raise HTTPException(status_code=500, detail=str(e)) from e + + +# validate node +@router.post("/node/{node_id}", status_code=200) +def post_validate_node(node_id: str, data: dict): + try: + # build graph + graph = Graph.from_payload(data) + # validate node + node = graph.get_node(node_id) + if node is None: + raise ValueError(f"Node {node_id} not found") + if not isinstance(node, VectorStoreVertex): + node.build() + return json.dumps({"valid": True, "params": str(node._built_object_repr())}) + except Exception as e: + logger.exception(e) + return json.dumps({"valid": False, "params": str(e)}) From 7f4eea1e593f29438986b7d21568b06664bb5c13 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 6 Jun 2023 10:02:21 -0300 Subject: [PATCH 37/43] =?UTF-8?q?=F0=9F=9A=80=20feat(chat):=20add=20ChatMa?= =?UTF-8?q?nager=20and=20ChatHistory=20classes=20to=20manage=20chat=20hist?= =?UTF-8?q?ory=20and=20active=20connections=20=E2=9C=A8=20feat(utils.py):?= =?UTF-8?q?=20add=20process=5Fgraph=20function=20to=20process=20graph=20da?= =?UTF-8?q?ta=20and=20generate=20result=20and=20thought=20The=20ChatManage?= =?UTF-8?q?r=20class=20manages=20active=20connections=20and=20chat=20histo?= =?UTF-8?q?ry.=20The=20ChatHistory=20class=20manages=20the=20chat=20histor?= =?UTF-8?q?y=20for=20a=20client.=20The=20process=5Fgraph=20function=20proc?= =?UTF-8?q?esses=20graph=20data=20and=20generates=20a=20result=20and=20tho?= =?UTF-8?q?ught.=20This=20function=20is=20used=20in=20the=20ChatManager=20?= =?UTF-8?q?class=20to=20generate=20a=20response=20back=20to=20the=20fronte?= =?UTF-8?q?nd.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/chat/__init__.py | 0 src/backend/langflow/chat/manager.py | 190 ++++++++++++++++++++++++++ src/backend/langflow/chat/utils.py | 41 ++++++ 3 files changed, 231 insertions(+) create mode 100644 src/backend/langflow/chat/__init__.py create mode 100644 src/backend/langflow/chat/manager.py create mode 100644 src/backend/langflow/chat/utils.py diff --git a/src/backend/langflow/chat/__init__.py b/src/backend/langflow/chat/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/langflow/chat/manager.py b/src/backend/langflow/chat/manager.py new file mode 100644 index 000000000..d24057b68 --- /dev/null +++ b/src/backend/langflow/chat/manager.py @@ -0,0 +1,190 @@ +from collections import defaultdict +from fastapi import WebSocket, status +from langflow.api.v1.schemas import ChatMessage, ChatResponse, FileResponse +from langflow.cache import cache_manager +from langflow.cache.manager import Subject +from langflow.chat.utils import process_graph +from langflow.interface.utils import pil_to_base64 +from langflow.utils.logger import logger + + +import asyncio +import json +from typing import Dict, List + + +class ChatHistory(Subject): + def __init__(self): + super().__init__() + self.history: Dict[str, List[ChatMessage]] = defaultdict(list) + + def add_message(self, client_id: str, message: ChatMessage): + """Add a message to the chat history.""" + + self.history[client_id].append(message) + + if not isinstance(message, FileResponse): + self.notify() + + def get_history(self, client_id: str, filter_messages=True) -> List[ChatMessage]: + """Get the chat history for a client.""" + if history := self.history.get(client_id, []): + if filter_messages: + return [msg for msg in history if msg.type not in ["start", "stream"]] + return history + else: + return [] + + def empty_history(self, client_id: str): + """Empty the chat history for a client.""" + self.history[client_id] = [] + + +class ChatManager: + def __init__(self): + self.active_connections: Dict[str, WebSocket] = {} + self.chat_history = ChatHistory() + self.cache_manager = cache_manager + self.cache_manager.attach(self.update) + + def on_chat_history_update(self): + """Send the last chat message to the client.""" + client_id = self.cache_manager.current_client_id + if client_id in self.active_connections: + chat_response = self.chat_history.get_history( + client_id, filter_messages=False + )[-1] + if chat_response.is_bot: + # Process FileResponse + if isinstance(chat_response, FileResponse): + # If data_type is pandas, convert to csv + if chat_response.data_type == "pandas": + chat_response.data = chat_response.data.to_csv() + elif chat_response.data_type == "image": + # Base64 encode the image + chat_response.data = pil_to_base64(chat_response.data) + # get event loop + loop = asyncio.get_event_loop() + + coroutine = self.send_json(client_id, chat_response) + asyncio.run_coroutine_threadsafe(coroutine, loop) + + def update(self): + if self.cache_manager.current_client_id in self.active_connections: + self.last_cached_object_dict = self.cache_manager.get_last() + # Add a new ChatResponse with the data + chat_response = FileResponse( + message=None, + type="file", + data=self.last_cached_object_dict["obj"], + data_type=self.last_cached_object_dict["type"], + ) + + self.chat_history.add_message( + self.cache_manager.current_client_id, chat_response + ) + + async def connect(self, client_id: str, websocket: WebSocket): + await websocket.accept() + self.active_connections[client_id] = websocket + + def disconnect(self, client_id: str): + self.active_connections.pop(client_id, None) + + async def send_message(self, client_id: str, message: str): + websocket = self.active_connections[client_id] + await websocket.send_text(message) + + async def send_json(self, client_id: str, message: ChatMessage): + websocket = self.active_connections[client_id] + await websocket.send_json(message.dict()) + + async def process_message(self, client_id: str, payload: Dict): + # Process the graph data and chat message + chat_message = payload.pop("message", "") + chat_message = ChatMessage(message=chat_message) + self.chat_history.add_message(client_id, chat_message) + + graph_data = payload + start_resp = ChatResponse(message=None, type="start", intermediate_steps="") + await self.send_json(client_id, start_resp) + + is_first_message = len(self.chat_history.get_history(client_id=client_id)) <= 1 + # Generate result and thought + try: + logger.debug("Generating result and thought") + + result, intermediate_steps = await process_graph( + graph_data=graph_data, + is_first_message=is_first_message, + chat_message=chat_message, + websocket=self.active_connections[client_id], + ) + except Exception as e: + # Log stack trace + logger.exception(e) + self.chat_history.empty_history(client_id) + raise e + # Send a response back to the frontend, if needed + intermediate_steps = intermediate_steps or "" + history = self.chat_history.get_history(client_id, filter_messages=False) + file_responses = [] + if history: + # Iterate backwards through the history + for msg in reversed(history): + if isinstance(msg, FileResponse): + if msg.data_type == "image": + # Base64 encode the image + msg.data = pil_to_base64(msg.data) + file_responses.append(msg) + if msg.type == "start": + break + + response = ChatResponse( + message=result, + intermediate_steps=intermediate_steps.strip(), + type="end", + files=file_responses, + ) + await self.send_json(client_id, response) + self.chat_history.add_message(client_id, response) + + async def handle_websocket(self, client_id: str, websocket: WebSocket): + await self.connect(client_id, websocket) + + try: + chat_history = self.chat_history.get_history(client_id) + # iterate and make BaseModel into dict + chat_history = [chat.dict() for chat in chat_history] + await websocket.send_json(chat_history) + + while True: + json_payload = await websocket.receive_json() + try: + payload = json.loads(json_payload) + except TypeError: + payload = json_payload + if "clear_history" in payload: + self.chat_history.history[client_id] = [] + continue + + with self.cache_manager.set_client_id(client_id): + await self.process_message(client_id, payload) + + except Exception as e: + # Handle any exceptions that might occur + logger.exception(e) + # send a message to the client + await self.active_connections[client_id].close( + code=status.WS_1011_INTERNAL_ERROR, reason=str(e)[:120] + ) + self.disconnect(client_id) + finally: + try: + connection = self.active_connections.get(client_id) + if connection: + await connection.close(code=1000, reason="Client disconnected") + self.disconnect(client_id) + except Exception as e: + logger.exception(e) + self.disconnect(client_id) diff --git a/src/backend/langflow/chat/utils.py b/src/backend/langflow/chat/utils.py new file mode 100644 index 000000000..410a442be --- /dev/null +++ b/src/backend/langflow/chat/utils.py @@ -0,0 +1,41 @@ +from fastapi import WebSocket +from langflow.api.v1.schemas import ChatMessage +from langflow.processing.process import ( + load_or_build_langchain_object, +) +from langflow.processing.base import get_result_and_steps +from langflow.interface.utils import try_setting_streaming_options +from langflow.utils.logger import logger + + +from typing import Dict + + +async def process_graph( + graph_data: Dict, + is_first_message: bool, + chat_message: ChatMessage, + websocket: WebSocket, +): + langchain_object = load_or_build_langchain_object(graph_data, is_first_message) + langchain_object = try_setting_streaming_options(langchain_object, websocket) + logger.debug("Loaded langchain object") + + if langchain_object is None: + # Raise user facing error + raise ValueError( + "There was an error loading the langchain_object. Please, check all the nodes and try again." + ) + + # Generate result and thought + try: + logger.debug("Generating result and thought") + result, intermediate_steps = await get_result_and_steps( + langchain_object, chat_message.message or "", websocket=websocket + ) + logger.debug("Generated result and intermediate_steps") + return result, intermediate_steps + except Exception as e: + # Log stack trace + logger.exception(e) + raise e From 3bfee4d4455af8ae18a947ca31785f7978e1384d Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 6 Jun 2023 10:05:01 -0300 Subject: [PATCH 38/43] =?UTF-8?q?=F0=9F=9A=80=20feat(graph):=20add=20from?= =?UTF-8?q?=5Fpayload=20class=20method=20to=20Graph=20class=20=F0=9F=9A=80?= =?UTF-8?q?=20feat(utils.py):=20import=20extract=5Finput=5Fvariables=5Ffro?= =?UTF-8?q?m=5Fprompt=20from=20langflow.interface.utils=20The=20`from=5Fpa?= =?UTF-8?q?yload`=20class=20method=20is=20added=20to=20the=20`Graph`=20cla?= =?UTF-8?q?ss=20to=20create=20a=20graph=20from=20a=20payload.=20This=20met?= =?UTF-8?q?hod=20takes=20a=20dictionary=20as=20input=20and=20returns=20a?= =?UTF-8?q?=20`Graph`=20object.=20The=20`extract=5Finput=5Fvariables=5Ffro?= =?UTF-8?q?m=5Fprompt`=20function=20is=20imported=20from=20`langflow.inter?= =?UTF-8?q?face.utils`=20to=20extract=20input=20variables=20from=20a=20pro?= =?UTF-8?q?mpt.=20This=20function=20is=20used=20in=20other=20parts=20of=20?= =?UTF-8?q?the=20codebase=20to=20extract=20input=20variables=20from=20prom?= =?UTF-8?q?pts.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/graph/graph/base.py | 21 +++++++++++++++++++++ src/backend/langflow/graph/graph/utils.py | 0 src/backend/langflow/graph/utils.py | 8 ++------ 3 files changed, 23 insertions(+), 6 deletions(-) create mode 100644 src/backend/langflow/graph/graph/utils.py diff --git a/src/backend/langflow/graph/graph/base.py b/src/backend/langflow/graph/graph/base.py index 020f539ec..5fd00d09b 100644 --- a/src/backend/langflow/graph/graph/base.py +++ b/src/backend/langflow/graph/graph/base.py @@ -24,6 +24,27 @@ class Graph: self._edges = edges self._build_graph() + @classmethod + @classmethod + def from_payload(cls, payload: Dict) -> "Graph": + """ + Creates a graph from a payload. + + Args: + payload (Dict): The payload to create the graph from. + + Returns: + Graph: The created graph. + """ + if "data" in payload: + payload = payload["data"] + try: + nodes = payload["nodes"] + edges = payload["edges"] + return cls(nodes, edges) + except KeyError as exc: + raise ValueError("Invalid payload") from exc + def _build_graph(self) -> None: """Builds the graph from the nodes and edges.""" self.nodes = self._build_vertices() diff --git a/src/backend/langflow/graph/graph/utils.py b/src/backend/langflow/graph/graph/utils.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/langflow/graph/utils.py b/src/backend/langflow/graph/utils.py index e22b27cf5..b78b2f961 100644 --- a/src/backend/langflow/graph/utils.py +++ b/src/backend/langflow/graph/utils.py @@ -1,6 +1,7 @@ -import re from typing import Any, Union +from langflow.interface.utils import extract_input_variables_from_prompt + def validate_prompt(prompt: str): """Validate prompt.""" @@ -15,11 +16,6 @@ def fix_prompt(prompt: str): return prompt + " {input}" -def extract_input_variables_from_prompt(prompt: str) -> list[str]: - """Extract input variables from prompt.""" - return re.findall(r"{(.*?)}", prompt) - - def flatten_list(list_of_lists: list[Union[list, Any]]) -> list: """Flatten list of lists.""" new_list = [] From 228f938cd8371ab5ecd5aa2d5b68622ad40ab03b Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 6 Jun 2023 10:05:46 -0300 Subject: [PATCH 39/43] =?UTF-8?q?=F0=9F=94=A8=20refactor(types.py):=20move?= =?UTF-8?q?=20extract=5Finput=5Fvariables=5Ffrom=5Fprompt=20import=20to=20?= =?UTF-8?q?interface.utils=20module=20=F0=9F=94=A8=20refactor(custom.py,?= =?UTF-8?q?=20loading.py,=20prompts/custom.py,=20run.py):=20update=20impor?= =?UTF-8?q?t=20statements=20to=20use=20extract=5Finput=5Fvariables=5Ffrom?= =?UTF-8?q?=5Fprompt=20from=20interface.utils=20module=20=F0=9F=94=A8=20re?= =?UTF-8?q?factor(run.py):=20remove=20unused=20imports=20and=20functions?= =?UTF-8?q?=20=F0=9F=94=A8=20refactor(utils.py):=20add=20type=20hinting=20?= =?UTF-8?q?to=20extract=5Finput=5Fvariables=5Ffrom=5Fprompt=20function=20a?= =?UTF-8?q?nd=20remove=20unused=20imports=20The=20extract=5Finput=5Fvariab?= =?UTF-8?q?les=5Ffrom=5Fprompt=20function=20has=20been=20moved=20to=20the?= =?UTF-8?q?=20interface.utils=20module=20to=20improve=20code=20organizatio?= =?UTF-8?q?n.=20The=20import=20statements=20in=20the=20affected=20modules?= =?UTF-8?q?=20have=20been=20updated=20to=20reflect=20this=20change.=20Unus?= =?UTF-8?q?ed=20imports=20and=20functions=20have=20been=20removed=20from?= =?UTF-8?q?=20the=20run.py=20module.=20Type=20hinting=20has=20been=20added?= =?UTF-8?q?=20to=20the=20extract=5Finput=5Fvariables=5Ffrom=5Fprompt=20fun?= =?UTF-8?q?ction=20in=20the=20interface.utils=20module.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 🚀 feat(processing): add processing module with get_result_and_steps and fix_memory_inputs functions The processing module was added to the project with two functions: get_result_and_steps and fix_memory_inputs. The get_result_and_steps function extracts the result and thought from a LangChain object and returns them. The fix_memory_inputs function checks if a LangChain object has a memory attribute and if that memory key exists in the object's input variables. If not, it gets a possible new memory key using the get_memory_key function and updates the memory keys using the update_memory_keys function. --- src/backend/langflow/graph/vertex/types.py | 3 +- .../langflow/interface/chains/custom.py | 2 +- src/backend/langflow/interface/loading.py | 33 --- .../langflow/interface/prompts/custom.py | 2 +- src/backend/langflow/interface/run.py | 191 +----------------- src/backend/langflow/interface/utils.py | 6 + src/backend/langflow/processing/__init__.py | 0 src/backend/langflow/processing/base.py | 55 +++++ src/backend/langflow/processing/process.py | 172 ++++++++++++++++ 9 files changed, 238 insertions(+), 226 deletions(-) create mode 100644 src/backend/langflow/processing/__init__.py create mode 100644 src/backend/langflow/processing/base.py create mode 100644 src/backend/langflow/processing/process.py diff --git a/src/backend/langflow/graph/vertex/types.py b/src/backend/langflow/graph/vertex/types.py index b81e72439..4eb20f416 100644 --- a/src/backend/langflow/graph/vertex/types.py +++ b/src/backend/langflow/graph/vertex/types.py @@ -1,7 +1,8 @@ from typing import Any, Dict, List, Optional, Union from langflow.graph.vertex.base import Vertex -from langflow.graph.utils import extract_input_variables_from_prompt, flatten_list +from langflow.graph.utils import flatten_list +from langflow.interface.utils import extract_input_variables_from_prompt class AgentVertex(Vertex): diff --git a/src/backend/langflow/interface/chains/custom.py b/src/backend/langflow/interface/chains/custom.py index cb76a53c8..ba4ba8b62 100644 --- a/src/backend/langflow/interface/chains/custom.py +++ b/src/backend/langflow/interface/chains/custom.py @@ -5,7 +5,7 @@ from langchain.memory.buffer import ConversationBufferMemory from langchain.schema import BaseMemory from pydantic import Field, root_validator -from langflow.graph.utils import extract_input_variables_from_prompt +from langflow.interface.utils import extract_input_variables_from_prompt DEFAULT_SUFFIX = """" Current conversation: diff --git a/src/backend/langflow/interface/loading.py b/src/backend/langflow/interface/loading.py index 16a7b186c..eb4623f5a 100644 --- a/src/backend/langflow/interface/loading.py +++ b/src/backend/langflow/interface/loading.py @@ -12,7 +12,6 @@ from langchain.agents.load_tools import ( _LLM_TOOLS, ) from langchain.agents.loading import load_agent_from_config -from langflow.graph import Graph from langchain.agents.tools import Tool from langchain.base_language import BaseLanguageModel from langchain.callbacks.base import BaseCallbackManager @@ -22,7 +21,6 @@ from pydantic import ValidationError from langflow.interface.agents.custom import CUSTOM_AGENTS from langflow.interface.importing.utils import get_function, import_by_type -from langflow.interface.run import fix_memory_inputs from langflow.interface.toolkits.base import toolkits_creator from langflow.interface.types import get_type_list from langflow.interface.utils import load_file_into_dict @@ -163,37 +161,6 @@ def instantiate_utility(node_type, class_object, params): return class_object(**params) -def load_flow_from_json(path: str, build=True): - """Load flow from json file""" - # This is done to avoid circular imports - - with open(path, "r", encoding="utf-8") as f: - flow_graph = json.load(f) - data_graph = flow_graph["data"] - nodes = data_graph["nodes"] - # Substitute ZeroShotPrompt with PromptTemplate - # nodes = replace_zero_shot_prompt_with_prompt_template(nodes) - # Add input variables - # nodes = payload.extract_input_variables(nodes) - - # Nodes, edges and root node - edges = data_graph["edges"] - graph = Graph(nodes, edges) - if build: - langchain_object = graph.build() - if hasattr(langchain_object, "verbose"): - langchain_object.verbose = True - - if hasattr(langchain_object, "return_intermediate_steps"): - # https://github.com/hwchase17/langchain/issues/2068 - # Deactivating until we have a frontend solution - # to display intermediate steps - langchain_object.return_intermediate_steps = False - fix_memory_inputs(langchain_object) - return langchain_object - return graph - - def replace_zero_shot_prompt_with_prompt_template(nodes): """Replace ZeroShotPrompt with PromptTemplate""" for node in nodes: diff --git a/src/backend/langflow/interface/prompts/custom.py b/src/backend/langflow/interface/prompts/custom.py index b1dbef370..286210271 100644 --- a/src/backend/langflow/interface/prompts/custom.py +++ b/src/backend/langflow/interface/prompts/custom.py @@ -3,7 +3,7 @@ from typing import Dict, List, Optional, Type from langchain.prompts import PromptTemplate from pydantic import root_validator -from langflow.graph.utils import extract_input_variables_from_prompt +from langflow.interface.utils import extract_input_variables_from_prompt # Steps to create a BaseCustomPrompt: # 1. Create a prompt template that endes with: diff --git a/src/backend/langflow/interface/run.py b/src/backend/langflow/interface/run.py index c2483416f..89f71fd8b 100644 --- a/src/backend/langflow/interface/run.py +++ b/src/backend/langflow/interface/run.py @@ -1,10 +1,3 @@ -import contextlib -import io -from typing import Any, Dict, List, Tuple - -from langchain.schema import AgentAction - -from langflow.api.callback import AsyncStreamingLLMCallbackHandler, StreamingLLMCallbackHandler # type: ignore from langflow.cache.base import compute_dict_hash, load_cache, memoize_dict from langflow.graph import Graph from langflow.utils.logger import logger @@ -24,15 +17,6 @@ def load_langchain_object(data_graph, is_first_message=False): return computed_hash, langchain_object -def load_or_build_langchain_object(data_graph, is_first_message=False): - """ - Load langchain object from cache if it exists, otherwise build it. - """ - if is_first_message: - build_langchain_object_with_caching.clear_cache() - return build_langchain_object_with_caching(data_graph) - - @memoize_dict(maxsize=10) def build_langchain_object_with_caching(data_graph): """ @@ -40,16 +24,10 @@ def build_langchain_object_with_caching(data_graph): """ logger.debug("Building langchain object") - graph = build_graph(data_graph) + graph = Graph.from_payload(data_graph) return graph.build() -def build_graph(data_graph): - nodes = data_graph["nodes"] - edges = data_graph["edges"] - return Graph(nodes, edges) - - def build_langchain_object(data_graph): """ Build langchain object from data_graph. @@ -66,29 +44,6 @@ def build_langchain_object(data_graph): return graph.build() -def process_graph_cached(data_graph: Dict[str, Any], message: str): - """ - Process graph by extracting input variables and replacing ZeroShotPrompt - with PromptTemplate,then run the graph and return the result and thought. - """ - # Load langchain object - is_first_message = len(data_graph.get("chatHistory", [])) == 0 - langchain_object = load_or_build_langchain_object(data_graph, is_first_message) - logger.debug("Loaded langchain object") - - if langchain_object is None: - # Raise user facing error - raise ValueError( - "There was an error loading the langchain_object. Please, check all the nodes and try again." - ) - - # Generate result and thought - logger.debug("Generating result and thought") - result, thought = get_result_and_thought(langchain_object, message) - logger.debug("Generated result and thought") - return {"result": str(result), "thought": thought.strip()} - - def get_memory_key(langchain_object): """ Given a LangChain object, this function retrieves the current memory key from the object's memory attribute. @@ -124,147 +79,3 @@ def update_memory_keys(langchain_object, possible_new_mem_key): langchain_object.memory.input_key = input_key langchain_object.memory.output_key = output_key langchain_object.memory.memory_key = possible_new_mem_key - - -def fix_memory_inputs(langchain_object): - """ - Given a LangChain object, this function checks if it has a memory attribute and if that memory key exists in the - object's input variables. If so, it does nothing. Otherwise, it gets a possible new memory key using the - get_memory_key function and updates the memory keys using the update_memory_keys function. - """ - if hasattr(langchain_object, "memory") and langchain_object.memory is not None: - try: - if langchain_object.memory.memory_key in langchain_object.input_variables: - return - except AttributeError: - input_variables = ( - langchain_object.prompt.input_variables - if hasattr(langchain_object, "prompt") - else langchain_object.input_keys - ) - if langchain_object.memory.memory_key in input_variables: - return - - possible_new_mem_key = get_memory_key(langchain_object) - if possible_new_mem_key is not None: - update_memory_keys(langchain_object, possible_new_mem_key) - - -async def get_result_and_steps(langchain_object, message: str, **kwargs): - """Get result and thought from extracted json""" - - try: - if hasattr(langchain_object, "verbose"): - langchain_object.verbose = True - chat_input = None - memory_key = "" - if hasattr(langchain_object, "memory") and langchain_object.memory is not None: - memory_key = langchain_object.memory.memory_key - - if hasattr(langchain_object, "input_keys"): - for key in langchain_object.input_keys: - if key not in [memory_key, "chat_history"]: - chat_input = {key: message} - else: - chat_input = message # type: ignore - - if hasattr(langchain_object, "return_intermediate_steps"): - # https://github.com/hwchase17/langchain/issues/2068 - # Deactivating until we have a frontend solution - # to display intermediate steps - langchain_object.return_intermediate_steps = True - - fix_memory_inputs(langchain_object) - try: - async_callbacks = [AsyncStreamingLLMCallbackHandler(**kwargs)] - output = await langchain_object.acall(chat_input, callbacks=async_callbacks) - except Exception as exc: - # make the error message more informative - logger.debug(f"Error: {str(exc)}") - sync_callbacks = [StreamingLLMCallbackHandler(**kwargs)] - output = langchain_object(chat_input, callbacks=sync_callbacks) - - intermediate_steps = ( - output.get("intermediate_steps", []) if isinstance(output, dict) else [] - ) - - result = ( - output.get(langchain_object.output_keys[0]) - if isinstance(output, dict) - else output - ) - thought = format_actions(intermediate_steps) if intermediate_steps else "" - except Exception as exc: - raise ValueError(f"Error: {str(exc)}") from exc - return result, thought - - -def get_result_and_thought(langchain_object, message: str): - """Get result and thought from extracted json""" - try: - if hasattr(langchain_object, "verbose"): - langchain_object.verbose = True - chat_input = None - memory_key = "" - if hasattr(langchain_object, "memory") and langchain_object.memory is not None: - memory_key = langchain_object.memory.memory_key - - if hasattr(langchain_object, "input_keys"): - for key in langchain_object.input_keys: - if key not in [memory_key, "chat_history"]: - chat_input = {key: message} - else: - chat_input = message # type: ignore - - if hasattr(langchain_object, "return_intermediate_steps"): - # https://github.com/hwchase17/langchain/issues/2068 - # Deactivating until we have a frontend solution - # to display intermediate steps - langchain_object.return_intermediate_steps = False - - fix_memory_inputs(langchain_object) - - with io.StringIO() as output_buffer, contextlib.redirect_stdout(output_buffer): - try: - # if hasattr(langchain_object, "acall"): - # output = await langchain_object.acall(chat_input) - # else: - output = langchain_object(chat_input) - except ValueError as exc: - # make the error message more informative - logger.debug(f"Error: {str(exc)}") - output = langchain_object.run(chat_input) - - intermediate_steps = ( - output.get("intermediate_steps", []) if isinstance(output, dict) else [] - ) - - result = ( - output.get(langchain_object.output_keys[0]) - if isinstance(output, dict) - else output - ) - if intermediate_steps: - thought = format_actions(intermediate_steps) - else: - thought = output_buffer.getvalue() - - except Exception as exc: - raise ValueError(f"Error: {str(exc)}") from exc - return result, thought - - -def format_actions(actions: List[Tuple[AgentAction, str]]) -> str: - """Format a list of (AgentAction, answer) tuples into a string.""" - output = [] - for action, answer in actions: - log = action.log - tool = action.tool - tool_input = action.tool_input - output.append(f"Log: {log}") - if "Action" not in log and "Action Input" not in log: - output.append(f"Tool: {tool}") - output.append(f"Tool Input: {tool_input}") - output.append(f"Answer: {answer}") - output.append("") # Add a blank line - return "\n".join(output) diff --git a/src/backend/langflow/interface/utils.py b/src/backend/langflow/interface/utils.py index 2b7c5acd1..32c605654 100644 --- a/src/backend/langflow/interface/utils.py +++ b/src/backend/langflow/interface/utils.py @@ -2,6 +2,7 @@ import base64 import json import os from io import BytesIO +import re import yaml from langchain.base_language import BaseLanguageModel @@ -48,3 +49,8 @@ def try_setting_streaming_options(langchain_object, websocket): llm.streaming = True return langchain_object + + +def extract_input_variables_from_prompt(prompt: str) -> list[str]: + """Extract input variables from prompt.""" + return re.findall(r"{(.*?)}", prompt) diff --git a/src/backend/langflow/processing/__init__.py b/src/backend/langflow/processing/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/langflow/processing/base.py b/src/backend/langflow/processing/base.py new file mode 100644 index 000000000..97b0d5be0 --- /dev/null +++ b/src/backend/langflow/processing/base.py @@ -0,0 +1,55 @@ +from langflow.api.v1.callback import ( + AsyncStreamingLLMCallbackHandler, + StreamingLLMCallbackHandler, +) +from langflow.processing.process import fix_memory_inputs, format_actions +from langflow.utils.logger import logger + + +async def get_result_and_steps(langchain_object, message: str, **kwargs): + """Get result and thought from extracted json""" + + try: + if hasattr(langchain_object, "verbose"): + langchain_object.verbose = True + chat_input = None + memory_key = "" + if hasattr(langchain_object, "memory") and langchain_object.memory is not None: + memory_key = langchain_object.memory.memory_key + + if hasattr(langchain_object, "input_keys"): + for key in langchain_object.input_keys: + if key not in [memory_key, "chat_history"]: + chat_input = {key: message} + else: + chat_input = message # type: ignore + + if hasattr(langchain_object, "return_intermediate_steps"): + # https://github.com/hwchase17/langchain/issues/2068 + # Deactivating until we have a frontend solution + # to display intermediate steps + langchain_object.return_intermediate_steps = True + + fix_memory_inputs(langchain_object) + try: + async_callbacks = [AsyncStreamingLLMCallbackHandler(**kwargs)] + output = await langchain_object.acall(chat_input, callbacks=async_callbacks) + except Exception as exc: + # make the error message more informative + logger.debug(f"Error: {str(exc)}") + sync_callbacks = [StreamingLLMCallbackHandler(**kwargs)] + output = langchain_object(chat_input, callbacks=sync_callbacks) + + intermediate_steps = ( + output.get("intermediate_steps", []) if isinstance(output, dict) else [] + ) + + result = ( + output.get(langchain_object.output_keys[0]) + if isinstance(output, dict) + else output + ) + thought = format_actions(intermediate_steps) if intermediate_steps else "" + except Exception as exc: + raise ValueError(f"Error: {str(exc)}") from exc + return result, thought diff --git a/src/backend/langflow/processing/process.py b/src/backend/langflow/processing/process.py new file mode 100644 index 000000000..3b8852e00 --- /dev/null +++ b/src/backend/langflow/processing/process.py @@ -0,0 +1,172 @@ +import contextlib +import io +from langchain.schema import AgentAction +import json +from langflow.interface.run import ( + build_langchain_object_with_caching, + get_memory_key, + update_memory_keys, +) +from langflow.utils.logger import logger +from langflow.graph import Graph + + +from typing import Any, Dict, List, Tuple + + +def fix_memory_inputs(langchain_object): + """ + Given a LangChain object, this function checks if it has a memory attribute and if that memory key exists in the + object's input variables. If so, it does nothing. Otherwise, it gets a possible new memory key using the + get_memory_key function and updates the memory keys using the update_memory_keys function. + """ + if hasattr(langchain_object, "memory") and langchain_object.memory is not None: + try: + if langchain_object.memory.memory_key in langchain_object.input_variables: + return + except AttributeError: + input_variables = ( + langchain_object.prompt.input_variables + if hasattr(langchain_object, "prompt") + else langchain_object.input_keys + ) + if langchain_object.memory.memory_key in input_variables: + return + + possible_new_mem_key = get_memory_key(langchain_object) + if possible_new_mem_key is not None: + update_memory_keys(langchain_object, possible_new_mem_key) + + +def format_actions(actions: List[Tuple[AgentAction, str]]) -> str: + """Format a list of (AgentAction, answer) tuples into a string.""" + output = [] + for action, answer in actions: + log = action.log + tool = action.tool + tool_input = action.tool_input + output.append(f"Log: {log}") + if "Action" not in log and "Action Input" not in log: + output.append(f"Tool: {tool}") + output.append(f"Tool Input: {tool_input}") + output.append(f"Answer: {answer}") + output.append("") # Add a blank line + return "\n".join(output) + + +def get_result_and_thought(langchain_object, message: str): + """Get result and thought from extracted json""" + try: + if hasattr(langchain_object, "verbose"): + langchain_object.verbose = True + chat_input = None + memory_key = "" + if hasattr(langchain_object, "memory") and langchain_object.memory is not None: + memory_key = langchain_object.memory.memory_key + + if hasattr(langchain_object, "input_keys"): + for key in langchain_object.input_keys: + if key not in [memory_key, "chat_history"]: + chat_input = {key: message} + else: + chat_input = message # type: ignore + + if hasattr(langchain_object, "return_intermediate_steps"): + # https://github.com/hwchase17/langchain/issues/2068 + # Deactivating until we have a frontend solution + # to display intermediate steps + langchain_object.return_intermediate_steps = False + + fix_memory_inputs(langchain_object) + + with io.StringIO() as output_buffer, contextlib.redirect_stdout(output_buffer): + try: + # if hasattr(langchain_object, "acall"): + # output = await langchain_object.acall(chat_input) + # else: + output = langchain_object(chat_input) + except ValueError as exc: + # make the error message more informative + logger.debug(f"Error: {str(exc)}") + output = langchain_object.run(chat_input) + + intermediate_steps = ( + output.get("intermediate_steps", []) if isinstance(output, dict) else [] + ) + + result = ( + output.get(langchain_object.output_keys[0]) + if isinstance(output, dict) + else output + ) + if intermediate_steps: + thought = format_actions(intermediate_steps) + else: + thought = output_buffer.getvalue() + + except Exception as exc: + raise ValueError(f"Error: {str(exc)}") from exc + return result, thought + + +def load_or_build_langchain_object(data_graph, is_first_message=False): + """ + Load langchain object from cache if it exists, otherwise build it. + """ + if is_first_message: + build_langchain_object_with_caching.clear_cache() + return build_langchain_object_with_caching(data_graph) + + +def process_graph_cached(data_graph: Dict[str, Any], message: str): + """ + Process graph by extracting input variables and replacing ZeroShotPrompt + with PromptTemplate,then run the graph and return the result and thought. + """ + # Load langchain object + is_first_message = len(data_graph.get("chatHistory", [])) == 0 + langchain_object = load_or_build_langchain_object(data_graph, is_first_message) + logger.debug("Loaded langchain object") + + if langchain_object is None: + # Raise user facing error + raise ValueError( + "There was an error loading the langchain_object. Please, check all the nodes and try again." + ) + + # Generate result and thought + logger.debug("Generating result and thought") + result, thought = get_result_and_thought(langchain_object, message) + logger.debug("Generated result and thought") + return {"result": str(result), "thought": thought.strip()} + + +def load_flow_from_json(path: str, build=True): + """Load flow from json file""" + # This is done to avoid circular imports + + with open(path, "r", encoding="utf-8") as f: + flow_graph = json.load(f) + data_graph = flow_graph["data"] + nodes = data_graph["nodes"] + # Substitute ZeroShotPrompt with PromptTemplate + # nodes = replace_zero_shot_prompt_with_prompt_template(nodes) + # Add input variables + # nodes = payload.extract_input_variables(nodes) + + # Nodes, edges and root node + edges = data_graph["edges"] + graph = Graph(nodes, edges) + if build: + langchain_object = graph.build() + if hasattr(langchain_object, "verbose"): + langchain_object.verbose = True + + if hasattr(langchain_object, "return_intermediate_steps"): + # https://github.com/hwchase17/langchain/issues/2068 + # Deactivating until we have a frontend solution + # to display intermediate steps + langchain_object.return_intermediate_steps = False + fix_memory_inputs(langchain_object) + return langchain_object + return graph From 478bb446c3fd9ee6077c7abad6698a583e075e5b Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 6 Jun 2023 10:06:14 -0300 Subject: [PATCH 40/43] =?UTF-8?q?=F0=9F=90=9B=20fix(frontend):=20add=20mis?= =?UTF-8?q?sing=20api/v1=20prefix=20to=20API=20routes=20=F0=9F=90=9B=20fix?= =?UTF-8?q?(frontend):=20add=20missing=20api/v1=20prefix=20to=20WebSocket?= =?UTF-8?q?=20URL=20=F0=9F=90=9B=20fix(frontend):=20add=20missing=20api/v1?= =?UTF-8?q?=20prefix=20to=20Vite=20proxy=20target=20The=20API=20routes,=20?= =?UTF-8?q?WebSocket=20URL,=20and=20Vite=20proxy=20target=20were=20missing?= =?UTF-8?q?=20the=20"api/v1"=20prefix,=20causing=20the=20frontend=20to=20n?= =?UTF-8?q?ot=20be=20able=20to=20communicate=20with=20the=20backend.=20Thi?= =?UTF-8?q?s=20commit=20adds=20the=20missing=20prefix=20to=20all=20three?= =?UTF-8?q?=20locations=20to=20fix=20the=20issue.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/frontend/src/controllers/API/index.ts | 4 ++-- src/frontend/src/modals/chatModal/index.tsx | 4 ++-- src/frontend/vite.config.ts | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/frontend/src/controllers/API/index.ts b/src/frontend/src/controllers/API/index.ts index f6f46404b..0cffd04bf 100644 --- a/src/frontend/src/controllers/API/index.ts +++ b/src/frontend/src/controllers/API/index.ts @@ -14,13 +14,13 @@ export async function sendAll(data: sendAllProps) { export async function checkCode( code: string ): Promise> { - return await axios.post("/validate/code", { code }); + return await axios.post("api/v1/validate/code", { code }); } export async function checkPrompt( template: string ): Promise> { - return await axios.post("/validate/prompt", { template }); + return await axios.post("api/v1/validate/prompt", { template }); } export async function getExamples(): Promise { diff --git a/src/frontend/src/modals/chatModal/index.tsx b/src/frontend/src/modals/chatModal/index.tsx index cf2b52aac..39bb72994 100644 --- a/src/frontend/src/modals/chatModal/index.tsx +++ b/src/frontend/src/modals/chatModal/index.tsx @@ -182,10 +182,10 @@ export default function ChatModal({ try { const urlWs = process.env.NODE_ENV === "development" - ? `ws://localhost:7860/chat/${id.current}` + ? `ws://localhost:7860/api/v1/chat/${id.current}` : `${window.location.protocol === "https:" ? "wss" : "ws"}://${ window.location.host - }/chat/${id.current}`; + }api/v1/chat/${id.current}`; const newWs = new WebSocket(urlWs); newWs.onopen = () => { console.log("WebSocket connection established!"); diff --git a/src/frontend/vite.config.ts b/src/frontend/vite.config.ts index 172b37733..d4fa2248b 100644 --- a/src/frontend/vite.config.ts +++ b/src/frontend/vite.config.ts @@ -11,7 +11,7 @@ const apiRoutes = [ ]; // Use environment variable to determine the target. -const target = process.env.VITE_PROXY_TARGET || "http://127.0.0.1:7860"; +const target = process.env.VITE_PROXY_TARGET || "http://127.0.0.1:7860/api/v1"; const proxyTargets = apiRoutes.reduce((proxyObj, route) => { proxyObj[route] = { From 6b5539232fa98fcb8323aa66289d6414b4fa1dd2 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 6 Jun 2023 10:06:40 -0300 Subject: [PATCH 41/43] =?UTF-8?q?=F0=9F=9A=80=20chore(server,=20tests):=20?= =?UTF-8?q?update=20API=20endpoint=20URLs=20to=20include=20version=20numbe?= =?UTF-8?q?r=20The=20API=20endpoint=20URLs=20have=20been=20updated=20to=20?= =?UTF-8?q?include=20the=20version=20number=20to=20improve=20the=20API's?= =?UTF-8?q?=20versioning=20and=20maintainability.=20The=20changes=20were?= =?UTF-8?q?=20made=20to=20the=20server.ts=20file=20and=20the=20tests=20tha?= =?UTF-8?q?t=20use=20the=20API=20endpoints.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 🐛 fix(tests): update API endpoint paths in test files The API endpoint paths in the test files were outdated and have been updated to reflect the current API version. This ensures that the tests are running against the correct endpoints and that the tests are up-to-date with the current API version. --- tests/test_agents_template.py | 10 +++++----- tests/test_cache.py | 6 +++--- tests/test_chains_template.py | 16 ++++++++-------- tests/test_endpoints.py | 20 ++++++++++---------- tests/test_graph.py | 2 +- tests/test_llms_template.py | 8 ++++---- tests/test_loading.py | 2 +- tests/test_prompts_template.py | 8 ++++---- tests/test_websocket.py | 8 ++++---- 9 files changed, 40 insertions(+), 40 deletions(-) diff --git a/tests/test_agents_template.py b/tests/test_agents_template.py index 7aa8de176..8e181711f 100644 --- a/tests/test_agents_template.py +++ b/tests/test_agents_template.py @@ -5,7 +5,7 @@ from langflow.settings import settings # check that all agents are in settings.agents # are in json_response["agents"] def test_agents_settings(client: TestClient): - response = client.get("/all") + response = client.get("api/v1/all") assert response.status_code == 200 json_response = response.json() agents = json_response["agents"] @@ -13,7 +13,7 @@ def test_agents_settings(client: TestClient): def test_zero_shot_agent(client: TestClient): - response = client.get("/all") + response = client.get("api/v1/all") assert response.status_code == 200 json_response = response.json() agents = json_response["agents"] @@ -52,7 +52,7 @@ def test_zero_shot_agent(client: TestClient): def test_json_agent(client: TestClient): - response = client.get("/all") + response = client.get("api/v1/all") assert response.status_code == 200 json_response = response.json() agents = json_response["agents"] @@ -87,7 +87,7 @@ def test_json_agent(client: TestClient): def test_csv_agent(client: TestClient): - response = client.get("/all") + response = client.get("api/v1/all") assert response.status_code == 200 json_response = response.json() agents = json_response["agents"] @@ -126,7 +126,7 @@ def test_csv_agent(client: TestClient): def test_initialize_agent(client: TestClient): - response = client.get("/all") + response = client.get("api/v1/all") assert response.status_code == 200 json_response = response.json() agents = json_response["agents"] diff --git a/tests/test_cache.py b/tests/test_cache.py index 3d3e951fc..3214e7d15 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -1,10 +1,10 @@ import json +from langflow.graph import Graph +from langflow.processing.process import load_or_build_langchain_object import pytest from langflow.interface.run import ( - build_graph, build_langchain_object_with_caching, - load_or_build_langchain_object, ) @@ -62,7 +62,7 @@ def test_build_langchain_object_with_caching(basic_data_graph): # Test build_graph def test_build_graph(basic_data_graph): - graph = build_graph(basic_data_graph) + graph = Graph.from_payload(basic_data_graph) assert graph is not None assert len(graph.nodes) == len(basic_data_graph["nodes"]) assert len(graph.edges) == len(basic_data_graph["edges"]) diff --git a/tests/test_chains_template.py b/tests/test_chains_template.py index c958cf64d..0c7af56ad 100644 --- a/tests/test_chains_template.py +++ b/tests/test_chains_template.py @@ -3,7 +3,7 @@ from langflow.settings import settings def test_chains_settings(client: TestClient): - response = client.get("/all") + response = client.get("api/v1/all") assert response.status_code == 200 json_response = response.json() chains = json_response["chains"] @@ -12,7 +12,7 @@ def test_chains_settings(client: TestClient): # Test the ConversationChain object def test_conversation_chain(client: TestClient): - response = client.get("/all") + response = client.get("api/v1/all") assert response.status_code == 200 json_response = response.json() chains = json_response["chains"] @@ -94,7 +94,7 @@ def test_conversation_chain(client: TestClient): def test_llm_chain(client: TestClient): - response = client.get("/all") + response = client.get("api/v1/all") assert response.status_code == 200 json_response = response.json() chains = json_response["chains"] @@ -152,7 +152,7 @@ def test_llm_chain(client: TestClient): def test_llm_checker_chain(client: TestClient): - response = client.get("/all") + response = client.get("api/v1/all") assert response.status_code == 200 json_response = response.json() chains = json_response["chains"] @@ -228,7 +228,7 @@ def test_llm_checker_chain(client: TestClient): def test_llm_math_chain(client: TestClient): - response = client.get("/all") + response = client.get("api/v1/all") assert response.status_code == 200 json_response = response.json() chains = json_response["chains"] @@ -306,7 +306,7 @@ def test_llm_math_chain(client: TestClient): def test_series_character_chain(client: TestClient): - response = client.get("/all") + response = client.get("api/v1/all") assert response.status_code == 200 json_response = response.json() chains = json_response["chains"] @@ -368,7 +368,7 @@ def test_series_character_chain(client: TestClient): def test_mid_journey_prompt_chain(client: TestClient): - response = client.get("/all") + response = client.get("api/v1/all") assert response.status_code == 200 json_response = response.json() chains = json_response["chains"] @@ -407,7 +407,7 @@ def test_mid_journey_prompt_chain(client: TestClient): def test_time_travel_guide_chain(client: TestClient): - response = client.get("/all") + response = client.get("api/v1/all") assert response.status_code == 200 json_response = response.json() chains = json_response["chains"] diff --git a/tests/test_endpoints.py b/tests/test_endpoints.py index 83f6c62b1..9e07dfb24 100644 --- a/tests/test_endpoints.py +++ b/tests/test_endpoints.py @@ -4,7 +4,7 @@ from langflow.interface.tools.constants import CUSTOM_TOOLS def test_get_all(client: TestClient): - response = client.get("/all") + response = client.get("api/v1/all") assert response.status_code == 200 json_response = response.json() # We need to test the custom nodes @@ -21,7 +21,7 @@ import math def square(x): return x ** 2 """ - response1 = client.post("/validate/code", json={"code": code1}) + response1 = client.post("api/v1/validate/code", json={"code": code1}) assert response1.status_code == 200 assert response1.json() == {"imports": {"errors": []}, "function": {"errors": []}} @@ -32,7 +32,7 @@ import non_existent_module def square(x): return x ** 2 """ - response2 = client.post("/validate/code", json={"code": code2}) + response2 = client.post("api/v1/validate/code", json={"code": code2}) assert response2.status_code == 200 assert response2.json() == { "imports": {"errors": ["No module named 'non_existent_module'"]}, @@ -46,7 +46,7 @@ import math def square(x) return x ** 2 """ - response3 = client.post("/validate/code", json={"code": code3}) + response3 = client.post("api/v1/validate/code", json={"code": code3}) assert response3.status_code == 200 assert response3.json() == { "imports": {"errors": []}, @@ -54,11 +54,11 @@ def square(x) } # Test case with invalid JSON payload - response4 = client.post("/validate/code", json={"invalid_key": code1}) + response4 = client.post("api/v1/validate/code", json={"invalid_key": code1}) assert response4.status_code == 422 # Test case with an empty code string - response5 = client.post("/validate/code", json={"code": ""}) + response5 = client.post("api/v1/validate/code", json={"code": ""}) assert response5.status_code == 200 assert response5.json() == {"imports": {"errors": []}, "function": {"errors": []}} @@ -69,7 +69,7 @@ import math def square(x) return x ** 2 """ - response6 = client.post("/validate/code", json={"code": code6}) + response6 = client.post("api/v1/validate/code", json={"code": code6}) assert response6.status_code == 200 assert response6.json() == { "imports": {"errors": []}, @@ -95,13 +95,13 @@ INVALID_PROMPT = "This is an invalid prompt without any input variable." def test_valid_prompt(client: TestClient): - response = client.post("/validate/prompt", json={"template": VALID_PROMPT}) + response = client.post("api/v1/validate/prompt", json={"template": VALID_PROMPT}) assert response.status_code == 200 assert response.json() == {"input_variables": ["product"]} def test_invalid_prompt(client: TestClient): - response = client.post("/validate/prompt", json={"template": INVALID_PROMPT}) + response = client.post("api/v1/validate/prompt", json={"template": INVALID_PROMPT}) assert response.status_code == 200 assert response.json() == {"input_variables": []} @@ -116,7 +116,7 @@ def test_invalid_prompt(client: TestClient): ], ) def test_various_prompts(client, prompt, expected_input_variables): - response = client.post("/validate/prompt", json={"template": prompt}) + response = client.post("api/v1/validate/prompt", json={"template": prompt}) assert response.status_code == 200 assert response.json() == { "input_variables": expected_input_variables, diff --git a/tests/test_graph.py b/tests/test_graph.py index 8c6560d54..69a926cc3 100644 --- a/tests/test_graph.py +++ b/tests/test_graph.py @@ -16,7 +16,7 @@ from langflow.graph.vertex.types import ( ToolVertex, WrapperVertex, ) -from langflow.interface.run import get_result_and_thought +from langflow.processing.process import get_result_and_thought from langflow.utils.payload import get_root_node # Test cases for the graph module diff --git a/tests/test_llms_template.py b/tests/test_llms_template.py index ccf2f6388..da0b94318 100644 --- a/tests/test_llms_template.py +++ b/tests/test_llms_template.py @@ -3,7 +3,7 @@ from langflow.settings import settings def test_llms_settings(client: TestClient): - response = client.get("/all") + response = client.get("api/v1/all") assert response.status_code == 200 json_response = response.json() llms = json_response["llms"] @@ -11,7 +11,7 @@ def test_llms_settings(client: TestClient): # def test_hugging_face_hub(client: TestClient): -# response = client.get("/all") +# response = client.get("api/v1/all") # assert response.status_code == 200 # json_response = response.json() # language_models = json_response["llms"] @@ -103,7 +103,7 @@ def test_llms_settings(client: TestClient): def test_openai(client: TestClient): - response = client.get("/all") + response = client.get("api/v1/all") assert response.status_code == 200 json_response = response.json() language_models = json_response["llms"] @@ -333,7 +333,7 @@ def test_openai(client: TestClient): def test_chat_open_ai(client: TestClient): - response = client.get("/all") + response = client.get("api/v1/all") assert response.status_code == 200 json_response = response.json() language_models = json_response["llms"] diff --git a/tests/test_loading.py b/tests/test_loading.py index 872314699..885eb7a82 100644 --- a/tests/test_loading.py +++ b/tests/test_loading.py @@ -2,7 +2,7 @@ import json import pytest from langchain.chains.base import Chain -from langflow import load_flow_from_json +from langflow.processing.process import load_flow_from_json from langflow.graph import Graph from langflow.utils.payload import get_root_node diff --git a/tests/test_prompts_template.py b/tests/test_prompts_template.py index 83da2f14d..a8562898c 100644 --- a/tests/test_prompts_template.py +++ b/tests/test_prompts_template.py @@ -3,7 +3,7 @@ from langflow.settings import settings def test_prompts_settings(client: TestClient): - response = client.get("/all") + response = client.get("api/v1/all") assert response.status_code == 200 json_response = response.json() prompts = json_response["prompts"] @@ -11,7 +11,7 @@ def test_prompts_settings(client: TestClient): def test_prompt_template(client: TestClient): - response = client.get("/all") + response = client.get("api/v1/all") assert response.status_code == 200 json_response = response.json() prompts = json_response["prompts"] @@ -89,7 +89,7 @@ def test_prompt_template(client: TestClient): def test_few_shot_prompt_template(client: TestClient): - response = client.get("/all") + response = client.get("api/v1/all") assert response.status_code == 200 json_response = response.json() prompts = json_response["prompts"] @@ -168,7 +168,7 @@ def test_few_shot_prompt_template(client: TestClient): def test_zero_shot_prompt(client: TestClient): - response = client.get("/all") + response = client.get("api/v1/all") assert response.status_code == 200 json_response = response.json() prompts = json_response["prompts"] diff --git a/tests/test_websocket.py b/tests/test_websocket.py index 5b60d0fed..611faff79 100644 --- a/tests/test_websocket.py +++ b/tests/test_websocket.py @@ -5,17 +5,17 @@ from fastapi.testclient import TestClient def test_websocket_connection(client: TestClient): - with client.websocket_connect("/chat/test_client") as websocket: + with client.websocket_connect("api/v1/chat/test_client") as websocket: assert websocket.scope["client"] == ["testclient", 50000] - assert websocket.scope["path"] == "/chat/test_client" + assert websocket.scope["path"] == "/api/v1/chat/test_client" def test_chat_history(client: TestClient): # Mock the process_graph function to return a specific value - with patch("langflow.api.chat_manager.process_graph") as mock_process_graph: + with patch("langflow.chat.manager.process_graph") as mock_process_graph: mock_process_graph.return_value = ("Hello, I'm a mock response!", "") - with client.websocket_connect("/chat/test_client") as websocket: + with client.websocket_connect("api/v1/chat/test_client") as websocket: # First message should be the history history = websocket.receive_json() assert history == [] # Empty history From 1e854fc4694b6404b6f09050be9566f8fd1d6784 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 6 Jun 2023 10:06:51 -0300 Subject: [PATCH 42/43] update endpoint --- tests/test_vectorstore_template.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_vectorstore_template.py b/tests/test_vectorstore_template.py index 5b1d7e5bc..0aa823786 100644 --- a/tests/test_vectorstore_template.py +++ b/tests/test_vectorstore_template.py @@ -5,7 +5,7 @@ from langflow.settings import settings # check that all agents are in settings.agents # are in json_response["agents"] def test_vectorstores_settings(client: TestClient): - response = client.get("/all") + response = client.get("api/v1/all") assert response.status_code == 200 json_response = response.json() vectorstores = json_response["vectorstores"] From 998d1bf5ed071b4152c0472a73ebdc0d9e80a8f2 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 6 Jun 2023 12:55:12 -0300 Subject: [PATCH 43/43] =?UTF-8?q?=F0=9F=9A=80=20chore(Makefile):=20add=20i?= =?UTF-8?q?nstall=5Fbackend=20command=20to=20backend=20target=20The=20Make?= =?UTF-8?q?file=20has=20been=20updated=20to=20include=20the=20`install=5Fb?= =?UTF-8?q?ackend`=20command=20as=20a=20dependency=20of=20the=20`backend`?= =?UTF-8?q?=20target.=20This=20ensures=20that=20the=20backend=20dependenci?= =?UTF-8?q?es=20are=20installed=20before=20running=20the=20backend=20serve?= =?UTF-8?q?r.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- Makefile | 1 + 1 file changed, 1 insertion(+) diff --git a/Makefile b/Makefile index 15337f65b..baf4220ff 100644 --- a/Makefile +++ b/Makefile @@ -43,6 +43,7 @@ install_backend: poetry install backend: + make install_backend poetry run uvicorn langflow.main:app --port 7860 --reload --log-level debug build_frontend: