From 2bd3cd0d8fca77bec86a55459d7df6f30078ca89 Mon Sep 17 00:00:00 2001 From: Jordi Adame Date: Fri, 7 Apr 2023 18:37:52 -0700 Subject: [PATCH 01/17] Update CONTRIBUTING.md s/I/We fire consistency --- CONTRIBUTING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 98dddaa84..f98336ba9 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,6 +1,6 @@ # Contributing to LangFlow -Hello there! I appreciate your interest in contributing to LangFlow. +Hello there! We appreciate your interest in contributing to LangFlow. As an open-source project in a rapidly developing field, we are extremely open to contributions, whether it be in the form of a new feature, improved infra, or better documentation. From cfd4a229ea464ab7ac9b9786aba7b11ec50d0b69 Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Sat, 8 Apr 2023 08:55:22 -0300 Subject: [PATCH 02/17] fix: adding chainfrontendnode --- src/backend/langflow/interface/chains/base.py | 7 ++++++- src/backend/langflow/template/nodes.py | 10 ++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/src/backend/langflow/interface/chains/base.py b/src/backend/langflow/interface/chains/base.py index 0a46d3474..6f90b0e62 100644 --- a/src/backend/langflow/interface/chains/base.py +++ b/src/backend/langflow/interface/chains/base.py @@ -1,9 +1,10 @@ -from typing import Dict, List, Optional +from typing import Dict, List, Optional, Type from langflow.custom.customs import get_custom_nodes from langflow.interface.base import LangChainTypeCreator from langflow.interface.custom_lists import chain_type_to_cls_dict from langflow.settings import settings +from langflow.template.nodes import ChainFrontendNode from langflow.utils.util import build_template_from_class # Assuming necessary imports for Field, Template, and FrontendNode classes @@ -12,6 +13,10 @@ from langflow.utils.util import build_template_from_class class ChainCreator(LangChainTypeCreator): type_name: str = "chains" + @property + def frontend_node_class(self) -> Type[ChainFrontendNode]: + return ChainFrontendNode + @property def type_to_loader_dict(self) -> Dict: if self.type_dict is None: diff --git a/src/backend/langflow/template/nodes.py b/src/backend/langflow/template/nodes.py index c76c56b14..c5d9bf23d 100644 --- a/src/backend/langflow/template/nodes.py +++ b/src/backend/langflow/template/nodes.py @@ -289,3 +289,13 @@ class MemoryFrontendNode(FrontendNode): field.field_type = "int" field.value = 10 field.display_name = "Memory Size" + + +class ChainFrontendNode(FrontendNode): + @staticmethod + def format_field(field: TemplateField, name: Optional[str] = None) -> None: + FrontendNode.format_field(field, name) + + if "key" in field.name: + field.password = False + field.show = False From 46b24095269a307bddc6c6e1dbd6feb811348583 Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Sat, 8 Apr 2023 08:55:46 -0300 Subject: [PATCH 03/17] fix: templates were not being processed correctly --- src/backend/langflow/graph/base.py | 9 ++++++++- src/backend/langflow/graph/constants.py | 2 +- src/backend/langflow/graph/nodes.py | 3 ++- 3 files changed, 11 insertions(+), 3 deletions(-) diff --git a/src/backend/langflow/graph/base.py b/src/backend/langflow/graph/base.py index fa93ed8ed..cde4622d3 100644 --- a/src/backend/langflow/graph/base.py +++ b/src/backend/langflow/graph/base.py @@ -12,6 +12,7 @@ from langflow.graph.utils import load_file from langflow.interface import loading from langflow.interface.listing import ALL_TYPES_DICT from langflow.utils.logger import logger +import warnings class Node: @@ -119,7 +120,13 @@ class Node: params[key] = edges[0].source elif value["required"] or value.get("value"): - params[key] = value["value"] + # If value does not have value this still passes + # but then gives a keyError + # so we need to check if value has value + new_value = value.get("value") + if new_value is None: + warnings.warn(f"Value for {key} in {self.node_type} is None. ") + params[key] = new_value # Add _type to params self.params = params diff --git a/src/backend/langflow/graph/constants.py b/src/backend/langflow/graph/constants.py index 095843816..8372e13a7 100644 --- a/src/backend/langflow/graph/constants.py +++ b/src/backend/langflow/graph/constants.py @@ -1 +1 @@ -DIRECT_TYPES = ["str", "bool", "code", "int", "float", "Any"] +DIRECT_TYPES = ["str", "bool", "code", "int", "float", "Any", "prompt"] diff --git a/src/backend/langflow/graph/nodes.py b/src/backend/langflow/graph/nodes.py index bf2f0bcec..a58300854 100644 --- a/src/backend/langflow/graph/nodes.py +++ b/src/backend/langflow/graph/nodes.py @@ -75,7 +75,8 @@ class PromptNode(Node): for param in prompt_params: prompt_text = self.params[param] variables = extract_input_variables_from_prompt(prompt_text) - + if self.params["input_variables"] is None: + self.params["input_variables"] = [] self.params["input_variables"].extend(variables) self.params["input_variables"] = list(set(self.params["input_variables"])) From c5f24e7dfa2e22119f6d89b090530b9bf664a64b Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Sat, 8 Apr 2023 09:02:42 -0300 Subject: [PATCH 04/17] fix: linting --- src/backend/langflow/graph/nodes.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/backend/langflow/graph/nodes.py b/src/backend/langflow/graph/nodes.py index a58300854..5320adc61 100644 --- a/src/backend/langflow/graph/nodes.py +++ b/src/backend/langflow/graph/nodes.py @@ -55,7 +55,10 @@ class PromptNode(Node): tools: Optional[Union[List[Node], List[ToolNode]]] = None, ) -> Any: if not self._built or force: - if "input_variables" not in self.params: + if ( + "input_variables" not in self.params + or self.params["input_variables"] is None + ): self.params["input_variables"] = [] # Check if it is a ZeroShotPrompt and needs a tool if "ShotPrompt" in self.node_type: @@ -75,8 +78,6 @@ class PromptNode(Node): for param in prompt_params: prompt_text = self.params[param] variables = extract_input_variables_from_prompt(prompt_text) - if self.params["input_variables"] is None: - self.params["input_variables"] = [] self.params["input_variables"].extend(variables) self.params["input_variables"] = list(set(self.params["input_variables"])) From cd6ef4a875d1b6f53b2b17f7aec809f790441f8e Mon Sep 17 00:00:00 2001 From: Ibis Prevedello Date: Sat, 8 Apr 2023 13:49:06 -0300 Subject: [PATCH 05/17] refac: fix tool bug --- src/backend/langflow/graph/base.py | 2 +- src/backend/langflow/interface/tools/base.py | 28 +++++++++++++------- 2 files changed, 20 insertions(+), 10 deletions(-) diff --git a/src/backend/langflow/graph/base.py b/src/backend/langflow/graph/base.py index cde4622d3..8eb78e06c 100644 --- a/src/backend/langflow/graph/base.py +++ b/src/backend/langflow/graph/base.py @@ -4,6 +4,7 @@ # - Build each inner agent first, then build the outer agent import types +import warnings from copy import deepcopy from typing import Any, Dict, List, Optional @@ -12,7 +13,6 @@ from langflow.graph.utils import load_file from langflow.interface import loading from langflow.interface.listing import ALL_TYPES_DICT from langflow.utils.logger import logger -import warnings class Node: diff --git a/src/backend/langflow/interface/tools/base.py b/src/backend/langflow/interface/tools/base.py index 4989a7d4b..8a89c429d 100644 --- a/src/backend/langflow/interface/tools/base.py +++ b/src/backend/langflow/interface/tools/base.py @@ -73,9 +73,14 @@ class ToolCreator(LangChainTypeCreator): base_classes = ["Tool"] all_tools = {} for tool in self.type_to_loader_dict.keys(): - if tool_params := get_tool_params(get_tool_by_name(tool)): + tool_fcn = get_tool_by_name(tool) + if tool_params := get_tool_params(tool_fcn): tool_name = tool_params.get("name") or str(tool) - all_tools[tool_name] = {"type": tool, "params": tool_params} + all_tools[tool_name] = { + "type": tool, + "params": tool_params, + "fcn": tool_fcn, + } # Raise error if name is not in tools if name not in all_tools.keys(): @@ -83,15 +88,21 @@ class ToolCreator(LangChainTypeCreator): tool_type: str = all_tools[name]["type"] # type: ignore - if tool_type in _BASE_TOOLS: + if all_tools[tool_type]["fcn"] in _BASE_TOOLS.values(): params = [] - elif tool_type in _LLM_TOOLS: + elif all_tools[tool_type]["fcn"] in _LLM_TOOLS.values(): params = ["llm"] - elif tool_type in _EXTRA_LLM_TOOLS: - _, extra_keys = _EXTRA_LLM_TOOLS[tool_type] + elif all_tools[tool_type]["fcn"] in [ + val[0] for val in _EXTRA_LLM_TOOLS.values() + ]: + n_dict = {val[0]: val[1] for val in _EXTRA_LLM_TOOLS.values()} + extra_keys = n_dict[all_tools[tool_type]["fcn"]] params = ["llm"] + extra_keys - elif tool_type in _EXTRA_OPTIONAL_TOOLS: - _, extra_keys = _EXTRA_OPTIONAL_TOOLS[tool_type] + elif all_tools[tool_type]["fcn"] in [ + val[0] for val in _EXTRA_OPTIONAL_TOOLS.values() + ]: + n_dict = {val[0]: val[1] for val in _EXTRA_OPTIONAL_TOOLS.values()} # type: ignore + extra_keys = n_dict[all_tools[tool_type]["fcn"]] params = extra_keys elif tool_type == "Tool": params = ["name", "description", "func"] @@ -104,7 +115,6 @@ class ToolCreator(LangChainTypeCreator): elif tool_type in FILE_TOOLS: params = all_tools[name]["params"] # type: ignore base_classes += [name] - else: params = [] From a9f5da948bddb4845c8d0bbc12d788fb9f3fa524 Mon Sep 17 00:00:00 2001 From: Ibis Prevedello Date: Sat, 8 Apr 2023 13:49:22 -0300 Subject: [PATCH 06/17] Bump version --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 0eef15a03..bab62904e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langflow" -version = "0.0.54" +version = "0.0.55" description = "A Python package with a built-in web application" authors = ["Logspace "] maintainers = [ From 945f160db79cf59f8ee215fa86ed20f6fc28659e Mon Sep 17 00:00:00 2001 From: jacobhrussell Date: Sat, 8 Apr 2023 10:58:15 -0400 Subject: [PATCH 07/17] Add contrib link to readme --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 8de32742a..970496349 100644 --- a/README.md +++ b/README.md @@ -49,7 +49,7 @@ flow("Hey, have you heard of LangFlow?") ## 👋 Contributing -We welcome contributions from developers of all levels to our open-source project on GitHub. If you'd like to contribute, please check our contributing guidelines and help make LangFlow more accessible. +We welcome contributions from developers of all levels to our open-source project on GitHub. If you'd like to contribute, please check our [contributing guidelines](./CONTRIBUTING.md) and help make LangFlow more accessible. [![Star History Chart](https://api.star-history.com/svg?repos=logspace-ai/langflow&type=Timeline)](https://star-history.com/#logspace-ai/langflow&Date) From f02f0f9c9826ee44c41213945f4493eb279b9618 Mon Sep 17 00:00:00 2001 From: Yoaz Menda Date: Sat, 8 Apr 2023 08:24:10 +0200 Subject: [PATCH 08/17] Add support to llama as llm --- dev.Dockerfile | 5 ++++- docker-compose.yml | 6 ++++++ poetry.lock | 18 ++++++++++++++++-- pyproject.toml | 1 + src/backend/langflow/config.yaml | 1 + src/backend/langflow/interface/custom_lists.py | 2 ++ src/frontend/package.json | 2 +- 7 files changed, 31 insertions(+), 4 deletions(-) diff --git a/dev.Dockerfile b/dev.Dockerfile index 7e439c69a..1f9a27ccc 100644 --- a/dev.Dockerfile +++ b/dev.Dockerfile @@ -3,7 +3,7 @@ FROM python:3.10-slim WORKDIR /app # Install Poetry -RUN apt-get update && apt-get install gcc curl -y +RUN apt-get update && apt-get install gcc g++ curl -y RUN curl -sSL https://install.python-poetry.org | python3 - # # Add Poetry to PATH ENV PATH="${PATH}:/root/.local/bin" @@ -15,4 +15,7 @@ COPY ./ ./ # Install dependencies RUN poetry config virtualenvs.create false && poetry install --no-interaction --no-ansi +# Set the logging level to DEBUG +ENV LOG_LEVEL=debug + CMD ["uvicorn", "langflow.main:app", "--host", "0.0.0.0", "--port", "5003", "--reload", "log-level", "debug"] \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index d9ba84030..f37406757 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -10,6 +10,12 @@ services: volumes: - ./:/app command: bash -c "uvicorn langflow.main:app --host 0.0.0.0 --port 7860 --reload" + deploy: + resources: + limits: + cpus: '4' + memory: 16G + frontend: build: diff --git a/poetry.lock b/poetry.lock index 353921ea0..6dd39bcde 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.4.0 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. [[package]] name = "aiohttp" @@ -1274,6 +1274,20 @@ llms = ["anthropic (>=0.2.4,<0.3.0)", "cohere (>=3,<4)", "huggingface_hub (>=0,< openai = ["openai (>=0,<1)"] qdrant = ["qdrant-client (>=1.1.1,<2.0.0)"] +[[package]] +name = "llama-cpp-python" +version = "0.1.23" +description = "A Python wrapper for llama.cpp" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "llama_cpp_python-0.1.23.tar.gz", hash = "sha256:323a937e68e04251b5ad1804922e05d15c8b6bfbcf7c3e683a7b39a20e165ebf"}, +] + +[package.dependencies] +typing-extensions = ">=4.5.0,<5.0.0" + [[package]] name = "markdown-it-py" version = "2.2.0" @@ -2763,4 +2777,4 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "70e86f7d3b5caed792e37ccf9e11ed95008e5078dd8830e4f8b96cc1d35c7b60" +content-hash = "a5f1a33bedd704cea56a6c8d3d97c8d8daad4b78f47765cca068f88face28647" diff --git a/pyproject.toml b/pyproject.toml index bab62904e..75c8374b3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,6 +36,7 @@ dill = "^0.3.6" pandas = "^1.5.3" huggingface-hub = "^0.13.3" rich = "^13.3.3" +llama-cpp-python = "0.1.23" [tool.poetry.group.dev.dependencies] black = "^23.1.0" diff --git a/src/backend/langflow/config.yaml b/src/backend/langflow/config.yaml index a00d91bff..0fd8033e0 100644 --- a/src/backend/langflow/config.yaml +++ b/src/backend/langflow/config.yaml @@ -27,6 +27,7 @@ llms: # - AzureOpenAI - ChatOpenAI - HuggingFaceHub + - LlamaCpp tools: - Search diff --git a/src/backend/langflow/interface/custom_lists.py b/src/backend/langflow/interface/custom_lists.py index 746c58325..d221901f6 100644 --- a/src/backend/langflow/interface/custom_lists.py +++ b/src/backend/langflow/interface/custom_lists.py @@ -12,12 +12,14 @@ from langchain import ( ) from langchain.agents import agent_toolkits from langchain.chat_models import ChatOpenAI +from langchain.llms import LlamaCpp from langflow.interface.importing.utils import import_class ## LLM llm_type_to_cls_dict = llms.type_to_cls_dict llm_type_to_cls_dict["openai-chat"] = ChatOpenAI # type: ignore +llm_type_to_cls_dict["llamacpp"] = LlamaCpp # type: ignore ## Chain chain_type_to_cls_dict: dict[str, Any] = { diff --git a/src/frontend/package.json b/src/frontend/package.json index b669569e3..84e9ad595 100644 --- a/src/frontend/package.json +++ b/src/frontend/package.json @@ -59,5 +59,5 @@ "last 1 safari version" ] }, - "proxy": "http://backend:7860" + "proxy": "http://127.0.0.1:5003" } \ No newline at end of file From 7e39aff3ed5e503c3f9b20f860a388588464ef2c Mon Sep 17 00:00:00 2001 From: Yoaz Menda Date: Sat, 8 Apr 2023 08:37:38 +0200 Subject: [PATCH 09/17] revert local dev changes --- dev.Dockerfile | 3 --- docker-compose.yml | 5 ----- src/frontend/package.json | 2 +- 3 files changed, 1 insertion(+), 9 deletions(-) diff --git a/dev.Dockerfile b/dev.Dockerfile index 1f9a27ccc..435fa0818 100644 --- a/dev.Dockerfile +++ b/dev.Dockerfile @@ -15,7 +15,4 @@ COPY ./ ./ # Install dependencies RUN poetry config virtualenvs.create false && poetry install --no-interaction --no-ansi -# Set the logging level to DEBUG -ENV LOG_LEVEL=debug - CMD ["uvicorn", "langflow.main:app", "--host", "0.0.0.0", "--port", "5003", "--reload", "log-level", "debug"] \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index f37406757..6f4ef74ff 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -10,11 +10,6 @@ services: volumes: - ./:/app command: bash -c "uvicorn langflow.main:app --host 0.0.0.0 --port 7860 --reload" - deploy: - resources: - limits: - cpus: '4' - memory: 16G frontend: diff --git a/src/frontend/package.json b/src/frontend/package.json index 84e9ad595..b669569e3 100644 --- a/src/frontend/package.json +++ b/src/frontend/package.json @@ -59,5 +59,5 @@ "last 1 safari version" ] }, - "proxy": "http://127.0.0.1:5003" + "proxy": "http://backend:7860" } \ No newline at end of file From bf52b1d00a86229d92e68d3ae3c7655ca83e63aa Mon Sep 17 00:00:00 2001 From: Yoaz Menda Date: Sat, 8 Apr 2023 09:09:26 +0200 Subject: [PATCH 10/17] revert local dev changes --- docker-compose.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index 6f4ef74ff..d9ba84030 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -11,7 +11,6 @@ services: - ./:/app command: bash -c "uvicorn langflow.main:app --host 0.0.0.0 --port 7860 --reload" - frontend: build: context: ./src/frontend From ff42fce86e95082e99ece8d8de6f2bed9793196b Mon Sep 17 00:00:00 2001 From: Yoaz Menda Date: Sun, 9 Apr 2023 11:35:52 +0200 Subject: [PATCH 11/17] remove unnecessary custom llama object initialization --- src/backend/langflow/interface/custom_lists.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/backend/langflow/interface/custom_lists.py b/src/backend/langflow/interface/custom_lists.py index d221901f6..912393b36 100644 --- a/src/backend/langflow/interface/custom_lists.py +++ b/src/backend/langflow/interface/custom_lists.py @@ -19,7 +19,6 @@ from langflow.interface.importing.utils import import_class ## LLM llm_type_to_cls_dict = llms.type_to_cls_dict llm_type_to_cls_dict["openai-chat"] = ChatOpenAI # type: ignore -llm_type_to_cls_dict["llamacpp"] = LlamaCpp # type: ignore ## Chain chain_type_to_cls_dict: dict[str, Any] = { From 4bff12c80fd0a15e79e4e80d262268c30fdae3cc Mon Sep 17 00:00:00 2001 From: Yoaz Menda Date: Sun, 9 Apr 2023 11:37:51 +0200 Subject: [PATCH 12/17] remove unnecessary llama import --- src/backend/langflow/interface/custom_lists.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/backend/langflow/interface/custom_lists.py b/src/backend/langflow/interface/custom_lists.py index 912393b36..746c58325 100644 --- a/src/backend/langflow/interface/custom_lists.py +++ b/src/backend/langflow/interface/custom_lists.py @@ -12,7 +12,6 @@ from langchain import ( ) from langchain.agents import agent_toolkits from langchain.chat_models import ChatOpenAI -from langchain.llms import LlamaCpp from langflow.interface.importing.utils import import_class From abfea7fe8f63b5a906b4a85758822e9118523e64 Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Thu, 6 Apr 2023 10:16:03 -0300 Subject: [PATCH 13/17] feat: added LLMFrontendNode --- src/backend/langflow/interface/llms/base.py | 7 +++++- src/backend/langflow/template/nodes.py | 28 +++++++++++++++++++++ 2 files changed, 34 insertions(+), 1 deletion(-) diff --git a/src/backend/langflow/interface/llms/base.py b/src/backend/langflow/interface/llms/base.py index 85f9035db..91eefd845 100644 --- a/src/backend/langflow/interface/llms/base.py +++ b/src/backend/langflow/interface/llms/base.py @@ -1,14 +1,19 @@ -from typing import Dict, List, Optional +from typing import Dict, List, Optional, Type from langflow.interface.base import LangChainTypeCreator from langflow.interface.custom_lists import llm_type_to_cls_dict from langflow.settings import settings +from langflow.template.nodes import LLMFrontendNode from langflow.utils.util import build_template_from_class class LLMCreator(LangChainTypeCreator): type_name: str = "llms" + @property + def frontend_node_class(self) -> Type[LLMFrontendNode]: + return LLMFrontendNode + @property def type_to_loader_dict(self) -> Dict: if self.type_dict is None: diff --git a/src/backend/langflow/template/nodes.py b/src/backend/langflow/template/nodes.py index c5d9bf23d..99c180c9e 100644 --- a/src/backend/langflow/template/nodes.py +++ b/src/backend/langflow/template/nodes.py @@ -299,3 +299,31 @@ class ChainFrontendNode(FrontendNode): if "key" in field.name: field.password = False field.show = False + + +class LLMFrontendNode(FrontendNode): + @staticmethod + def format_field(field: TemplateField, name: Optional[str] = None) -> None: + display_names_dict = { + "huggingfacehub_api_token": "HuggingFace Hub API Token", + } + FrontendNode.format_field(field, name) + SHOW_FIELDS = ["repo_id", "task", "model_kwargs"] + if field.name in SHOW_FIELDS: + field.show = True + + if "api" in field.name and ("key" in field.name or "token" in field.name): + field.password = True + field.show = True + field.required = True + + if field.name == "task": + field.required = True + field.show = True + field.is_list = True + field.options = ["text-generation", "text2text-generation"] + + if display_name := display_names_dict.get(field.name): + field.display_name = display_name + if field.name == "model_kwargs": + field.field_type = "code" From 8c044513d29575be1b1aedaeea0993fe0145f0ec Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Sat, 8 Apr 2023 18:07:20 -0300 Subject: [PATCH 14/17] feat(nodes.py): add ChainFrontendNode class and format_field method to handle key field display --- src/backend/langflow/template/nodes.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/src/backend/langflow/template/nodes.py b/src/backend/langflow/template/nodes.py index 99c180c9e..dcb394f39 100644 --- a/src/backend/langflow/template/nodes.py +++ b/src/backend/langflow/template/nodes.py @@ -327,3 +327,13 @@ class LLMFrontendNode(FrontendNode): field.display_name = display_name if field.name == "model_kwargs": field.field_type = "code" + + +class ChainFrontendNode(FrontendNode): + @staticmethod + def format_field(field: TemplateField, name: Optional[str] = None) -> None: + FrontendNode.format_field(field, name) + + if "key" in field.name: + field.password = False + field.show = False From 2e3c0f3f51f62f3b9ce46bcfe3313cff05232215 Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Sat, 8 Apr 2023 18:13:57 -0300 Subject: [PATCH 15/17] style(test_llms_template.py): change boolean values to True in 'show' key for some template fields feat(test_llms_template.py): make 'task' and 'huggingfacehub_api_token' fields required feat(test_llms_template.py): add options to 'task' field and display name to 'huggingfacehub_api_token' field --- tests/test_llms_template.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/tests/test_llms_template.py b/tests/test_llms_template.py index e5bfe7e6e..de1ebfb29 100644 --- a/tests/test_llms_template.py +++ b/tests/test_llms_template.py @@ -53,7 +53,7 @@ def test_hugging_face_hub(client: TestClient): assert template["repo_id"] == { "required": False, "placeholder": "", - "show": False, + "show": True, "multiline": False, "value": "gpt2", "password": False, @@ -62,19 +62,20 @@ def test_hugging_face_hub(client: TestClient): "list": False, } assert template["task"] == { - "required": False, + "required": True, "placeholder": "", - "show": False, + "show": True, "multiline": False, "password": False, + "options": ["text-generation", "text2text-generation"], "name": "task", "type": "str", - "list": False, + "list": True, } assert template["model_kwargs"] == { "required": False, "placeholder": "", - "show": False, + "show": True, "multiline": False, "password": False, "name": "model_kwargs", @@ -82,12 +83,13 @@ def test_hugging_face_hub(client: TestClient): "list": False, } assert template["huggingfacehub_api_token"] == { - "required": False, + "required": True, "placeholder": "", "show": True, "multiline": False, "password": True, "name": "huggingfacehub_api_token", + "display_name": "HuggingFace Hub API Token", "type": "str", "list": False, } @@ -231,7 +233,7 @@ def test_openai(client: TestClient): assert template["model_kwargs"] == { "required": False, "placeholder": "", - "show": False, + "show": True, "multiline": False, "password": False, "name": "model_kwargs", @@ -361,7 +363,7 @@ def test_chat_open_ai(client: TestClient): assert template["model_kwargs"] == { "required": False, "placeholder": "", - "show": False, + "show": True, "multiline": False, "password": False, "name": "model_kwargs", From e420f9dd004a220316f03d15a058631b319dcd29 Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Sun, 9 Apr 2023 06:57:19 -0300 Subject: [PATCH 16/17] refactor(nodes.py): remove unused ChainFrontendNode class and its format_field method. --- src/backend/langflow/template/nodes.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/src/backend/langflow/template/nodes.py b/src/backend/langflow/template/nodes.py index dcb394f39..99c180c9e 100644 --- a/src/backend/langflow/template/nodes.py +++ b/src/backend/langflow/template/nodes.py @@ -327,13 +327,3 @@ class LLMFrontendNode(FrontendNode): field.display_name = display_name if field.name == "model_kwargs": field.field_type = "code" - - -class ChainFrontendNode(FrontendNode): - @staticmethod - def format_field(field: TemplateField, name: Optional[str] = None) -> None: - FrontendNode.format_field(field, name) - - if "key" in field.name: - field.password = False - field.show = False From cdf50ebe7a38164cbfe0352e8ba8fd8408ea007b Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Sun, 9 Apr 2023 09:23:47 -0300 Subject: [PATCH 17/17] test(graph.py): add test for get_result_and_thought_using_graph method --- tests/test_graph.py | 30 +++++++++++++++++++++++++++++- 1 file changed, 29 insertions(+), 1 deletion(-) diff --git a/tests/test_graph.py b/tests/test_graph.py index bbdacc7cb..65a9d0ea6 100644 --- a/tests/test_graph.py +++ b/tests/test_graph.py @@ -1,4 +1,5 @@ import json +from typing import Type, Union import pytest from langchain.agents import AgentExecutor @@ -14,6 +15,8 @@ from langflow.graph.nodes import ( WrapperNode, ) from langflow.utils.payload import build_json, get_root_node +from langflow.interface.run import get_result_and_thought_using_graph +from langchain.llms.fake import FakeListLLM # Test cases for the graph module @@ -53,7 +56,7 @@ def openapi_graph(): return get_graph("openapi") -def get_node_by_type(graph, node_type): +def get_node_by_type(graph, node_type: Type[Node]) -> Union[Node, None]: """Get a node by type""" return next((node for node in graph.nodes if isinstance(node, node_type)), None) @@ -418,3 +421,28 @@ def test_wrapper_node_build(openapi_graph): built_object = wrapper_node.build() assert built_object is not None # Add any further assertions specific to the WrapperNode's build() method + + +def test_get_result_and_thought(basic_graph): + """Test the get_result_and_thought method""" + responses = [ + "Final Answer: I am a response", + ] + message = "Hello" + # Find the node that is an LLMNode and change the + # _built_object to a FakeListLLM + llm_node = get_node_by_type(basic_graph, LLMNode) + assert llm_node is not None + llm_node._built_object = FakeListLLM(responses=responses) + llm_node._built = True + langchain_object = basic_graph.build() + # assert all nodes are built + assert all(node._built for node in basic_graph.nodes) + # now build again and check if FakeListLLM was used + + # Get the result and thought + result, thought = get_result_and_thought_using_graph(langchain_object, message) + # The result should be a str + assert isinstance(result, str) + # The thought should be a Thought + assert isinstance(thought, str)