From 08d60f18ea71d441b982c6bacb7cc0aa0ec54a48 Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Mon, 3 Apr 2023 17:06:15 -0300 Subject: [PATCH 01/26] feat: frontend_node_class property and other prompts --- src/backend/langflow/config.yaml | 4 +++ src/backend/langflow/custom/customs.py | 1 + src/backend/langflow/interface/base.py | 8 ++++- .../langflow/interface/prompts/base.py | 33 ++++++++++++------- src/backend/langflow/template/base.py | 2 ++ src/backend/langflow/template/nodes.py | 22 ++++++++++++- 6 files changed, 57 insertions(+), 13 deletions(-) diff --git a/src/backend/langflow/config.yaml b/src/backend/langflow/config.yaml index 08beaae08..af369f193 100644 --- a/src/backend/langflow/config.yaml +++ b/src/backend/langflow/config.yaml @@ -13,6 +13,10 @@ agents: prompts: - PromptTemplate - FewShotPromptTemplate + - ChatPromptTemplate + - SystemMessagePromptTemplate + - AIMessagePromptTemplate + - HumanMessagePromptTemplate llms: - OpenAI diff --git a/src/backend/langflow/custom/customs.py b/src/backend/langflow/custom/customs.py index 112b8db26..22e833362 100644 --- a/src/backend/langflow/custom/customs.py +++ b/src/backend/langflow/custom/customs.py @@ -1,5 +1,6 @@ from langflow.template import nodes +# These should always be instantiated CUSTOM_NODES = { "prompts": {"ZeroShotPrompt": nodes.ZeroShotPromptNode()}, "tools": {"PythonFunction": nodes.PythonFunctionNode(), "Tool": nodes.ToolNode()}, diff --git a/src/backend/langflow/interface/base.py b/src/backend/langflow/interface/base.py index ad8ccfc6a..87f716ac2 100644 --- a/src/backend/langflow/interface/base.py +++ b/src/backend/langflow/interface/base.py @@ -1,4 +1,5 @@ from abc import ABC, abstractmethod +import abc from typing import Any, Dict, List, Optional, Union from pydantic import BaseModel @@ -12,6 +13,11 @@ class LangChainTypeCreator(BaseModel, ABC): type_name: str type_dict: Optional[Dict] = None + @property + def frontend_node_class(self) -> str: + """The class type of the FrontendNode created in frontend_node.""" + return FrontendNode + @property @abstractmethod def type_to_loader_dict(self) -> Dict: @@ -62,7 +68,7 @@ class LangChainTypeCreator(BaseModel, ABC): if key != "_type" ] template = Template(type_name=name, fields=fields) - return FrontendNode( + return self.frontend_node_class( template=template, description=signature.get("description", ""), base_classes=signature["base_classes"], diff --git a/src/backend/langflow/interface/prompts/base.py b/src/backend/langflow/interface/prompts/base.py index f730481a9..b24522d5c 100644 --- a/src/backend/langflow/interface/prompts/base.py +++ b/src/backend/langflow/interface/prompts/base.py @@ -1,39 +1,50 @@ from typing import Dict, List from langchain.prompts import loading - +from langchain import prompts from langflow.custom.customs import get_custom_nodes from langflow.interface.base import LangChainTypeCreator +from langflow.interface.importing.utils import import_class from langflow.settings import settings -from langflow.utils.util import build_template_from_function +from langflow.template.nodes import PromptFrontendNode +from langflow.utils.util import build_template_from_class class PromptCreator(LangChainTypeCreator): type_name: str = "prompts" + @property + def frontend_node_class(self) -> str: + return PromptFrontendNode + @property def type_to_loader_dict(self) -> Dict: if self.type_dict is None: - self.type_dict = loading.type_to_loader_dict + self.type_dict = { + prompt_name: import_class(f"langchain.prompts.{prompt_name}") + # if prompt_name is not lower case it is a class + for prompt_name in prompts.__all__ + if not prompt_name.islower() and prompt_name in settings.prompts + } return self.type_dict def get_signature(self, name: str) -> Dict | None: try: if name in get_custom_nodes(self.type_name).keys(): return get_custom_nodes(self.type_name)[name] - return build_template_from_function(name, self.type_to_loader_dict) + return build_template_from_class(name, self.type_to_loader_dict) except ValueError as exc: raise ValueError("Prompt not found") from exc def to_list(self) -> List[str]: custom_prompts = get_custom_nodes("prompts") - library_prompts = [ - prompt.__annotations__["return"].__name__ - for prompt in self.type_to_loader_dict.values() - if prompt.__annotations__["return"].__name__ in settings.prompts - or settings.dev - ] - return library_prompts + list(custom_prompts.keys()) + # library_prompts = [ + # prompt.__annotations__["return"].__name__ + # for prompt in self.type_to_loader_dict.values() + # if prompt.__annotations__["return"].__name__ in settings.prompts + # or settings.dev + # ] + return list(self.type_to_loader_dict.keys()) + list(custom_prompts.keys()) prompt_creator = PromptCreator() diff --git a/src/backend/langflow/template/base.py b/src/backend/langflow/template/base.py index 887ab187f..da2c8312d 100644 --- a/src/backend/langflow/template/base.py +++ b/src/backend/langflow/template/base.py @@ -219,3 +219,5 @@ class FrontendNode(BaseModel): elif name == "ChatOpenAI" and key == "model_name": field.options = constants.CHAT_OPENAI_MODELS field.is_list = True + + diff --git a/src/backend/langflow/template/nodes.py b/src/backend/langflow/template/nodes.py index 6bd23d59a..ad408a6c8 100644 --- a/src/backend/langflow/template/nodes.py +++ b/src/backend/langflow/template/nodes.py @@ -6,7 +6,17 @@ from langflow.utils.constants import DEFAULT_PYTHON_FUNCTION from langchain.agents import loading -class ZeroShotPromptNode(FrontendNode): +class BasePromptFrontendNode(FrontendNode): + name: str + template: Template + description: str + base_classes: list[str] + + def to_dict(self): + return super().to_dict() + + +class ZeroShotPromptNode(BasePromptFrontendNode): name: str = "ZeroShotPrompt" template: Template = Template( type_name="zero_shot", @@ -227,3 +237,13 @@ class CSVAgentNode(FrontendNode): def to_dict(self): return super().to_dict() + + +class PromptFrontendNode(FrontendNode): + @staticmethod + def format_field(field: TemplateField, name: Optional[str] = None) -> None: + # if field.field_type == "StringPromptTemplate" + # change it to str + if field.field_type == "StringPromptTemplate": + field.field_type = "str" + field.multiline = True From 376265cb8f9d37cdd9774296d727cacbac9b680f Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Mon, 3 Apr 2023 20:22:16 -0300 Subject: [PATCH 02/26] fix: change verbose only if possible --- src/backend/langflow/interface/run.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/backend/langflow/interface/run.py b/src/backend/langflow/interface/run.py index 78974bd24..797e04091 100644 --- a/src/backend/langflow/interface/run.py +++ b/src/backend/langflow/interface/run.py @@ -76,7 +76,8 @@ def process_graph(data_graph: Dict[str, Any]): def get_result_and_thought_using_graph(loaded_langchain, message: str): """Get result and thought from extracted json""" try: - loaded_langchain.verbose = True + if hasattr(loaded_langchain, "verbose"): + loaded_langchain.verbose = True with io.StringIO() as output_buffer, contextlib.redirect_stdout(output_buffer): chat_input = None for key in loaded_langchain.input_keys: From e0c96b2cbbe905f001f68bcfbb34b01a0da01290 Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Mon, 3 Apr 2023 20:49:19 -0300 Subject: [PATCH 03/26] feat: messages nodes implemented but deactivated for now --- src/backend/langflow/config.yaml | 9 +++++---- src/backend/langflow/template/constants.py | 21 +++++++++++++++++++++ src/backend/langflow/template/nodes.py | 12 +++++++++++- 3 files changed, 37 insertions(+), 5 deletions(-) diff --git a/src/backend/langflow/config.yaml b/src/backend/langflow/config.yaml index af369f193..da314b751 100644 --- a/src/backend/langflow/config.yaml +++ b/src/backend/langflow/config.yaml @@ -13,10 +13,11 @@ agents: prompts: - PromptTemplate - FewShotPromptTemplate - - ChatPromptTemplate - - SystemMessagePromptTemplate - - AIMessagePromptTemplate - - HumanMessagePromptTemplate + - ZeroShotPrompt + # Wait more tests + # - ChatPromptTemplate + # - SystemMessagePromptTemplate + # - HumanMessagePromptTemplate llms: - OpenAI diff --git a/src/backend/langflow/template/constants.py b/src/backend/langflow/template/constants.py index bb8920a8d..ae08d3691 100644 --- a/src/backend/langflow/template/constants.py +++ b/src/backend/langflow/template/constants.py @@ -9,3 +9,24 @@ FORCE_SHOW_FIELDS = [ "max_value_length", "max_tokens", ] + +DEFAULT_PROMPT = """ +I want you to act as a naming consultant for new companies. + +Here are some examples of good company names: + +- search engine, Google +- social media, Facebook +- video sharing, YouTube + +The name should be short, catchy and easy to remember. + +What is a good name for a company that makes {product}? +""" + +SYSTEM_PROMPT = """ +You are a helpful assistant that talks casually about life in general. +You are a good listener and you can talk about anything. +""" + +HUMAN_PROMPT = "{input}" diff --git a/src/backend/langflow/template/nodes.py b/src/backend/langflow/template/nodes.py index ad408a6c8..b58775a5c 100644 --- a/src/backend/langflow/template/nodes.py +++ b/src/backend/langflow/template/nodes.py @@ -2,6 +2,7 @@ from typing import Optional from langchain.agents.mrkl import prompt from langflow.template.base import FrontendNode, Template, TemplateField +from langflow.template.constants import DEFAULT_PROMPT, HUMAN_PROMPT, SYSTEM_PROMPT from langflow.utils.constants import DEFAULT_PYTHON_FUNCTION from langchain.agents import loading @@ -244,6 +245,15 @@ class PromptFrontendNode(FrontendNode): def format_field(field: TemplateField, name: Optional[str] = None) -> None: # if field.field_type == "StringPromptTemplate" # change it to str - if field.field_type == "StringPromptTemplate": + if field.field_type == "StringPromptTemplate" and "Message" in name: field.field_type = "str" field.multiline = True + field.value = HUMAN_PROMPT if "Human" in field.name else SYSTEM_PROMPT + if field.name == "template": + field.value = DEFAULT_PROMPT + + if ( + "Union" in field.field_type + and "BaseMessagePromptTemplate" in field.field_type + ): + field.field_type = "BaseMessagePromptTemplate" From 67bc082569d63433aad6023c75099a0f1744af5b Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Tue, 4 Apr 2023 11:52:13 -0300 Subject: [PATCH 04/26] feat: added custom conversation chains and prompts --- src/backend/langflow/config.yaml | 3 + src/backend/langflow/graph/nodes.py | 2 + src/backend/langflow/interface/chains/base.py | 19 +++- .../langflow/interface/chains/custom.py | 100 ++++++++++++++++++ .../langflow/interface/importing/utils.py | 9 ++ .../langflow/interface/prompts/base.py | 4 + .../langflow/interface/prompts/custom.py | 63 ++++++----- src/backend/langflow/interface/run.py | 26 ++++- src/backend/langflow/template/nodes.py | 2 +- 9 files changed, 193 insertions(+), 35 deletions(-) create mode 100644 src/backend/langflow/interface/chains/custom.py diff --git a/src/backend/langflow/config.yaml b/src/backend/langflow/config.yaml index 85130c9a0..25ad70839 100644 --- a/src/backend/langflow/config.yaml +++ b/src/backend/langflow/config.yaml @@ -3,6 +3,9 @@ chains: - LLMMathChain - LLMCheckerChain - ConversationChain + - SeriesCharacterChain + - MidJourneyPromptChain + - TimeTravelGuideChain agents: - ZeroShotAgent diff --git a/src/backend/langflow/graph/nodes.py b/src/backend/langflow/graph/nodes.py index b465e3817..df109d3a8 100644 --- a/src/backend/langflow/graph/nodes.py +++ b/src/backend/langflow/graph/nodes.py @@ -75,7 +75,9 @@ class PromptNode(Node): for param in prompt_params: prompt_text = self.params[param] variables = extract_input_variables_from_prompt(prompt_text) + self.params["input_variables"].extend(variables) + self.params["input_variables"] = list(set(self.params["input_variables"])) self._build() return deepcopy(self._built_object) diff --git a/src/backend/langflow/interface/chains/base.py b/src/backend/langflow/interface/chains/base.py index 36542b7d4..45a9b2ddb 100644 --- a/src/backend/langflow/interface/chains/base.py +++ b/src/backend/langflow/interface/chains/base.py @@ -1,4 +1,5 @@ from typing import Dict, List, Optional +from langflow.custom.customs import get_custom_nodes from langflow.interface.base import LangChainTypeCreator from langflow.interface.custom_lists import chain_type_to_cls_dict @@ -15,19 +16,27 @@ class ChainCreator(LangChainTypeCreator): def type_to_loader_dict(self) -> Dict: if self.type_dict is None: self.type_dict = chain_type_to_cls_dict + from langflow.interface.chains.custom import CUSTOM_CHAINS + + self.type_dict.update(CUSTOM_CHAINS) return self.type_dict def get_signature(self, name: str) -> Optional[Dict]: try: - return build_template_from_class(name, chain_type_to_cls_dict) + if name in get_custom_nodes(self.type_name).keys(): + return get_custom_nodes(self.type_name)[name] + return build_template_from_class(name, self.type_to_loader_dict) except ValueError as exc: - raise ValueError("Memory not found") from exc + raise ValueError("Chain not found") from exc def to_list(self) -> List[str]: + custom_chains = list(get_custom_nodes("chains").keys()) + default_chains = list(self.type_to_loader_dict.keys()) + # Check if the chain is in the settings return [ - chain.__name__ - for chain in self.type_to_loader_dict.values() - if chain.__name__ in settings.chains or settings.dev + chain + for chain in default_chains + custom_chains + if chain in settings.chains or settings.dev ] diff --git a/src/backend/langflow/interface/chains/custom.py b/src/backend/langflow/interface/chains/custom.py new file mode 100644 index 000000000..07e08699f --- /dev/null +++ b/src/backend/langflow/interface/chains/custom.py @@ -0,0 +1,100 @@ +from typing import Optional +from langchain.chains import ConversationChain +from langflow.graph.utils import extract_input_variables_from_prompt +from pydantic import root_validator, Field +from langchain.memory.buffer import ConversationBufferMemory +from langchain.schema import BaseMemory + + +DEFAULT_SUFFIX = """" +Current conversation: +{history} +Human: {input} +{ai_prefix}""" + + +class BaseCustomChain(ConversationChain): + """BaseCustomChain is a chain you can use to have a conversation with a custom character.""" + + template: Optional[str] + + ai_prefix_key: Optional[str] + """Field to use as the ai_prefix. It needs to be set and has to be in the template""" + + @root_validator(pre=False) + def build_template(cls, values): + format_dict = {} + input_variables = extract_input_variables_from_prompt(values["template"]) + + if values.get("ai_prefix_key", None) is None: + values["ai_prefix_key"] = values["memory"].ai_prefix + + for key in input_variables: + new_value = values.get(key, f"{{{key}}}") + format_dict[key] = new_value + if key == values.get("ai_prefix_key", None): + values["memory"].ai_prefix = new_value + + values["template"] = values["template"].format(**format_dict) + + values["template"] = values["template"] + values["input_variables"] = extract_input_variables_from_prompt( + values["template"] + ) + values["prompt"].template = values["template"] + values["prompt"].input_variables = values["input_variables"] + return values + + +class SeriesCharacterChain(BaseCustomChain): + """SeriesCharacterChain is a chain you can use to have a conversation with a character from a series.""" + + character: str + series: str + template: Optional[ + str + ] = """I want you to act like {character} from {series}. +I want you to respond and answer like {character}. do not write any explanations. only answer like {character}. +You must know all of the knowledge of {character}. +Current conversation: +{history} +Human: {input} +{character}:""" + memory: BaseMemory = Field(default_factory=ConversationBufferMemory) + ai_prefix_key: Optional[str] = "character" + """Default memory store.""" + + +class MidJourneyPromptChain(BaseCustomChain): + """MidJourneyPromptChain is a chain you can use to generate new MidJourney prompts.""" + + template: Optional[ + str + ] = """I want you to act as a prompt generator for Midjourney's artificial intelligence program. + Your job is to provide detailed and creative descriptions that will inspire unique and interesting images from the AI. + Keep in mind that the AI is capable of understanding a wide range of language and can interpret abstract concepts, so feel free to be as imaginative and descriptive as possible. + For example, you could describe a scene from a futuristic city, or a surreal landscape filled with strange creatures. + The more detailed and imaginative your description, the more interesting the resulting image will be. Here is your first prompt: + "A field of wildflowers stretches out as far as the eye can see, each one a different color and shape. In the distance, a massive tree towers over the landscape, its branches reaching up to the sky like tentacles.\" + + Current conversation: + {history} + Human: {input} + AI:""" + + +class TimeTravelGuideChain(BaseCustomChain): + template: Optional[ + str + ] = """I want you to act as my time travel guide. You are helpful and creative. I will provide you with the historical period or future time I want to visit and you will suggest the best events, sights, or people to experience. Provide the suggestions and any necessary information. + Current conversation: + {history} + Human: {input} + AI:""" + + +CUSTOM_CHAINS = { + "SeriesCharacterChain": SeriesCharacterChain, + "MidJourneyPromptChain": MidJourneyPromptChain, + "TimeTravelGuideChain": TimeTravelGuideChain, +} diff --git a/src/backend/langflow/interface/importing/utils.py b/src/backend/langflow/interface/importing/utils.py index 0ada410e4..af4631ed2 100644 --- a/src/backend/langflow/interface/importing/utils.py +++ b/src/backend/langflow/interface/importing/utils.py @@ -10,6 +10,7 @@ from langchain.chat_models.base import BaseChatModel from langchain.llms.base import BaseLLM from langchain.tools import BaseTool + from langflow.interface.tools.util import get_tool_by_name @@ -66,9 +67,13 @@ def import_class(class_path: str) -> Any: def import_prompt(prompt: str) -> PromptTemplate: + from langflow.interface.prompts.custom import CUSTOM_PROMPTS + """Import prompt from prompt name""" if prompt == "ZeroShotPrompt": return import_class("langchain.prompts.PromptTemplate") + elif prompt in CUSTOM_PROMPTS: + return CUSTOM_PROMPTS[prompt] return import_class(f"langchain.prompts.{prompt}") @@ -102,4 +107,8 @@ def import_tool(tool: str) -> BaseTool: def import_chain(chain: str) -> Chain: """Import chain from chain name""" + from langflow.interface.chains.custom import CUSTOM_CHAINS + + if chain in CUSTOM_CHAINS: + return CUSTOM_CHAINS[chain] return import_class(f"langchain.chains.{chain}") diff --git a/src/backend/langflow/interface/prompts/base.py b/src/backend/langflow/interface/prompts/base.py index a7de7a611..ad289c531 100644 --- a/src/backend/langflow/interface/prompts/base.py +++ b/src/backend/langflow/interface/prompts/base.py @@ -26,6 +26,10 @@ class PromptCreator(LangChainTypeCreator): for prompt_name in prompts.__all__ if not prompt_name.islower() and prompt_name in settings.prompts } + # Merge CUSTOM_PROMPTS into self.type_dict + from langflow.interface.prompts.custom import CUSTOM_PROMPTS + + self.type_dict.update(CUSTOM_PROMPTS) return self.type_dict def get_signature(self, name: str) -> Optional[Dict]: diff --git a/src/backend/langflow/interface/prompts/custom.py b/src/backend/langflow/interface/prompts/custom.py index d1bb98c62..295316fce 100644 --- a/src/backend/langflow/interface/prompts/custom.py +++ b/src/backend/langflow/interface/prompts/custom.py @@ -1,4 +1,4 @@ -from typing import List, Optional +from typing import Dict, List, Optional from langchain.prompts import PromptTemplate from pydantic import root_validator @@ -7,43 +7,49 @@ from langflow.graph.utils import extract_input_variables_from_prompt from langflow.template.base import Template, TemplateField from langflow.template.nodes import PromptTemplateNode -CHARACTER_PROMPT = """I want you to act like {character} from {series}. -I want you to respond and answer like {character}. do not write any explanations. only answer like {character}. -You must know all of the knowledge of {character}.""" + +# Steps to create a BaseCustomPrompt: +# 1. Create a prompt template that endes with: +# Current conversation: +# {history} +# Human: {input} +# {ai_prefix}: +# 2. Create a class that inherits from BaseCustomPrompt +# 3. Add the following class attributes: +# template: str = "" +# description: Optional[str] +# ai_prefix: Optional[str] = "{ai_prefix}" +# 3.1. The ai_prefix should be a value in input_variables +# SeriesCharacterPrompt is a working example +# If used in a LLMChain, with a Memory module, it will work as expected +# We should consider creating ConversationalChains that expose custom parameters +# That way it will be easier to create custom prompts class BaseCustomPrompt(PromptTemplate): template: str = "" description: Optional[str] - human_text: str = "\n {input}" + ai_prefix: Optional[str] @root_validator(pre=False) def build_template(cls, values): format_dict = {} + ai_prefix_format_dict = {} for key in values.get("input_variables", []): - new_value = values[key] + new_value = values.get(key, f"{{{key}}}") format_dict[key] = new_value + if key in values["ai_prefix"]: + ai_prefix_format_dict[key] = new_value + values["ai_prefix"] = values["ai_prefix"].format(**ai_prefix_format_dict) values["template"] = values["template"].format(**format_dict) - values["template"] = values["template"] + values["human_text"] + values["template"] = values["template"] values["input_variables"] = extract_input_variables_from_prompt( values["template"] ) return values - def build_frontend_node(self) -> PromptTemplateNode: - return PromptTemplateNode( - template=Template( - type_name="test", - fields=[ - TemplateField(name=field, field_type="str", required=True) - for field in self.input_variables - ], - ), - description=self.description or "", - ) - class SeriesCharacterPrompt(BaseCustomPrompt): # Add a very descriptive description for the prompt generator @@ -52,14 +58,21 @@ class SeriesCharacterPrompt(BaseCustomPrompt): ] = "A prompt that asks the AI to act like a character from a series." character: str series: str - human_text: str = "\n {input}" - template: str = CHARACTER_PROMPT + template: str = """I want you to act like {character} from {series}. +I want you to respond and answer like {character}. do not write any explanations. only answer like {character}. +You must know all of the knowledge of {character}. +Current conversation: +{history} +Human: {input} +{character}:""" + + ai_prefix: str = "{character}" input_variables: List[str] = ["character", "series"] +CUSTOM_PROMPTS = {"SeriesCharacterPrompt": SeriesCharacterPrompt} + if __name__ == "__main__": - prompt = SeriesCharacterPrompt(character="Walter White", series="Breaking Bad") - user_input = "I am the one who knocks" - full_prompt = prompt.format(input=user_input) - print(full_prompt) + prompt = SeriesCharacterPrompt(character="Harry Potter", series="Harry Potter") + print(prompt.template) diff --git a/src/backend/langflow/interface/run.py b/src/backend/langflow/interface/run.py index 0da273722..9e8aaa841 100644 --- a/src/backend/langflow/interface/run.py +++ b/src/backend/langflow/interface/run.py @@ -52,6 +52,12 @@ def process_graph(data_graph: Dict[str, Any]): ) logger.debug("Loaded langchain object") + if langchain_object is None: + # Raise user facing error + raise ValueError( + "There was an error loading the flow. Please, check all the nodes and try again." + ) + # Generate result and thought logger.debug("Generating result and thought") result, thought = get_result_and_thought_using_graph(langchain_object, message) @@ -73,18 +79,30 @@ def get_result_and_thought_using_graph(loaded_langchain, message: str): loaded_langchain.verbose = True with io.StringIO() as output_buffer, contextlib.redirect_stdout(output_buffer): chat_input = None + memory_key = "" + if hasattr(loaded_langchain, "memory"): + mem_vars = loaded_langchain.memory.memory_variables + memory_key = mem_vars[0] if mem_vars else "" + for key in loaded_langchain.input_keys: - if key == "chat_history" and hasattr(loaded_langchain, "memory"): - loaded_langchain.memory.memory_key = "chat_history" - else: + if key != memory_key: chat_input = {key: message} if hasattr(loaded_langchain, "return_intermediate_steps"): # https://github.com/hwchase17/langchain/issues/2068 loaded_langchain.return_intermediate_steps = False + # I'm not sure about this yet. + function_to_call = None + if hasattr(loaded_langchain, "memory"): + elif hasattr(loaded_langchain, "run"): + function_to_call = loaded_langchain.run + function_to_call = loaded_langchain.predict + else: + function_to_call = loaded_langchain + try: - output = loaded_langchain(chat_input) + output = function_to_call(chat_input) except ValueError as exc: logger.debug("Error: %s", str(exc)) output = loaded_langchain.run(chat_input) diff --git a/src/backend/langflow/template/nodes.py b/src/backend/langflow/template/nodes.py index b28a38842..6be772483 100644 --- a/src/backend/langflow/template/nodes.py +++ b/src/backend/langflow/template/nodes.py @@ -251,7 +251,7 @@ class PromptFrontendNode(FrontendNode): field.field_type = "str" field.multiline = True field.value = HUMAN_PROMPT if "Human" in field.name else SYSTEM_PROMPT - if field.name == "template": + if field.name == "template" and field.value == "": field.value = DEFAULT_PROMPT if ( From 5beaf7bd1308058d8f5fc77ac2095bcab099bf5f Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Tue, 4 Apr 2023 11:53:13 -0300 Subject: [PATCH 05/26] fix: typo --- src/backend/langflow/interface/run.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/backend/langflow/interface/run.py b/src/backend/langflow/interface/run.py index 9e8aaa841..28afb4a27 100644 --- a/src/backend/langflow/interface/run.py +++ b/src/backend/langflow/interface/run.py @@ -95,9 +95,9 @@ def get_result_and_thought_using_graph(loaded_langchain, message: str): # I'm not sure about this yet. function_to_call = None if hasattr(loaded_langchain, "memory"): + function_to_call = loaded_langchain.predict elif hasattr(loaded_langchain, "run"): function_to_call = loaded_langchain.run - function_to_call = loaded_langchain.predict else: function_to_call = loaded_langchain From 743e935becf47f5bb319e20381f69cf27fd172ae Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Tue, 4 Apr 2023 19:19:41 -0300 Subject: [PATCH 06/26] feat: added debug option in make dev --- Makefile | 11 ++++++----- docker-compose.debug.yml | 28 ++++++++++++++++++++++++++++ 2 files changed, 34 insertions(+), 5 deletions(-) create mode 100644 docker-compose.debug.yml diff --git a/Makefile b/Makefile index 70407436b..37317c91d 100644 --- a/Makefile +++ b/Makefile @@ -40,16 +40,17 @@ build: rm -rf src/backend/langflow/frontend dev: - make install_frontend + make install_frontend ifeq ($(build),1) - @echo 'Running docker compose up with build' - docker compose up --build + @echo 'Running docker compose up with build' + docker compose up $(if $(debug),-f docker-compose.debug.yml) --build else - @echo 'Running docker compose up without build' - docker compose up + @echo 'Running docker compose up without build' + docker compose up $(if $(debug),-f docker-compose.debug.yml) endif + publish: make build poetry publish diff --git a/docker-compose.debug.yml b/docker-compose.debug.yml new file mode 100644 index 000000000..581bdc6da --- /dev/null +++ b/docker-compose.debug.yml @@ -0,0 +1,28 @@ +version: '3.4' + +services: + backend: + volumes: + - ./:/app + build: + context: ./ + dockerfile: ./dev.Dockerfile + command: ["sh", "-c", "pip install debugpy -t /tmp && python /tmp/debugpy --wait-for-client --listen 0.0.0.0:5678 -m uvicorn langflow.main:app --host 0.0.0.0 --port 7860 --reload"] + ports: + - 7860:7860 + - 5678:5678 + restart: on-failure + + frontend: + build: + context: ./src/frontend + dockerfile: ./dev.Dockerfile + args: + - BACKEND_URL=http://backend:7860 + ports: + - "3000:3000" + volumes: + - ./src/frontend/public:/home/node/app/public + - ./src/frontend/src:/home/node/app/src + - ./src/frontend/package.json:/home/node/app/package.json + restart: on-failure \ No newline at end of file From 4de895f834e226cf9c47362c904b06339ffb7d1a Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Tue, 4 Apr 2023 19:22:16 -0300 Subject: [PATCH 07/26] bump langchain --- poetry.lock | 19 +++++++++++-------- pyproject.toml | 2 +- 2 files changed, 12 insertions(+), 9 deletions(-) diff --git a/poetry.lock b/poetry.lock index 4aa7d59ca..ea47f222f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1199,14 +1199,14 @@ test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] [[package]] name = "langchain" -version = "0.0.127" +version = "0.0.131" description = "Building applications with LLMs through composability" category = "main" optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langchain-0.0.127-py3-none-any.whl", hash = "sha256:04ba053881e6098e80e0f4afc8922f3fe78923b160fd12d856aebce49c261918"}, - {file = "langchain-0.0.127.tar.gz", hash = "sha256:e8a3b67fd86a6f79c4334f0a7588c9476fcb57b27a8fb0e617f47c01eaab8be8"}, + {file = "langchain-0.0.131-py3-none-any.whl", hash = "sha256:3564a759e85095c9d71a78817da9cec1e2a8a0cda1bdd94ef8ac7008e432717a"}, + {file = "langchain-0.0.131.tar.gz", hash = "sha256:61baf67fbec561ce38d187915a46e1c41139270826453600951760fde1a5d98a"}, ] [package.dependencies] @@ -1220,8 +1220,11 @@ SQLAlchemy = ">=1,<2" tenacity = ">=8.1.0,<9.0.0" [package.extras] -all = ["aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.2.4,<0.3.0)", "beautifulsoup4 (>=4,<5)", "boto3 (>=1.26.96,<2.0.0)", "cohere (>=3,<4)", "deeplake (>=3.2.9,<4.0.0)", "elasticsearch (>=8,<9)", "faiss-cpu (>=1,<2)", "google-api-python-client (==2.70.0)", "google-search-results (>=2,<3)", "huggingface_hub (>=0,<1)", "jina (>=3.14,<4.0)", "jinja2 (>=3,<4)", "manifest-ml (>=0.0.1,<0.0.2)", "networkx (>=2.6.3,<3.0.0)", "nlpcloud (>=1,<2)", "nltk (>=3,<4)", "nomic (>=1.0.43,<2.0.0)", "openai (>=0,<1)", "opensearch-py (>=2.0.0,<3.0.0)", "pgvector (>=0.1.6,<0.2.0)", "pinecone-client (>=2,<3)", "psycopg2-binary (>=2.9.5,<3.0.0)", "pyowm (>=3.3.0,<4.0.0)", "pypdf (>=3.4.0,<4.0.0)", "qdrant-client (>=1.0.4,<2.0.0)", "redis (>=4,<5)", "sentence-transformers (>=2,<3)", "spacy (>=3,<4)", "tensorflow-text (>=2.11.0,<3.0.0)", "tiktoken (>=0.3.2,<0.4.0)", "torch (>=1,<2)", "transformers (>=4,<5)", "weaviate-client (>=3,<4)", "wikipedia (>=1,<2)", "wolframalpha (==5.0.0)"] +all = ["aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.2.4,<0.3.0)", "beautifulsoup4 (>=4,<5)", "boto3 (>=1.26.96,<2.0.0)", "cohere (>=3,<4)", "deeplake (>=3.2.9,<4.0.0)", "elasticsearch (>=8,<9)", "faiss-cpu (>=1,<2)", "google-api-python-client (==2.70.0)", "google-search-results (>=2,<3)", "huggingface_hub (>=0,<1)", "jina (>=3.14,<4.0)", "jinja2 (>=3,<4)", "manifest-ml (>=0.0.1,<0.0.2)", "networkx (>=2.6.3,<3.0.0)", "nlpcloud (>=1,<2)", "nltk (>=3,<4)", "nomic (>=1.0.43,<2.0.0)", "openai (>=0,<1)", "opensearch-py (>=2.0.0,<3.0.0)", "pgvector (>=0.1.6,<0.2.0)", "pinecone-client (>=2,<3)", "psycopg2-binary (>=2.9.5,<3.0.0)", "pyowm (>=3.3.0,<4.0.0)", "pypdf (>=3.4.0,<4.0.0)", "qdrant-client (>=1.1.1,<2.0.0)", "redis (>=4,<5)", "sentence-transformers (>=2,<3)", "spacy (>=3,<4)", "tensorflow-text (>=2.11.0,<3.0.0)", "tiktoken (>=0.3.2,<0.4.0)", "torch (>=1,<2)", "transformers (>=4,<5)", "weaviate-client (>=3,<4)", "wikipedia (>=1,<2)", "wolframalpha (==5.0.0)"] +cohere = ["cohere (>=3,<4)"] llms = ["anthropic (>=0.2.4,<0.3.0)", "cohere (>=3,<4)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (>=0,<1)", "torch (>=1,<2)", "transformers (>=4,<5)"] +openai = ["openai (>=0,<1)"] +qdrant = ["qdrant-client (>=1.1.1,<2.0.0)"] [[package]] name = "markdown-it-py" @@ -1506,14 +1509,14 @@ files = [ [[package]] name = "openai" -version = "0.27.2" +version = "0.27.4" description = "Python client library for the OpenAI API" category = "main" optional = false python-versions = ">=3.7.1" files = [ - {file = "openai-0.27.2-py3-none-any.whl", hash = "sha256:6df674cf257e9e0504f1fd191c333d3f6a2442b13218d0eccf06230eb24d320e"}, - {file = "openai-0.27.2.tar.gz", hash = "sha256:5869fdfa34b0ec66c39afa22f4a0fb83a135dff81f6505f52834c6ab3113f762"}, + {file = "openai-0.27.4-py3-none-any.whl", hash = "sha256:3b82c867d531e1fd2003d9de2131e1c4bfd4c70b1a3149e0543a555b30807b70"}, + {file = "openai-0.27.4.tar.gz", hash = "sha256:9f9d27d26e62c6068f516c0729449954b5ef6994be1a6cbfe7dbefbc84423a04"}, ] [package.dependencies] @@ -2712,4 +2715,4 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "2b523f3d737ef8f7082e8156f096bce6f4f84a8bee9d07bd4ed23a29d3dcfab1" +content-hash = "91c68c5a5673f7b2bd0833af35da1262afd21d631cc62ec6ff9c65f69a96af0a" diff --git a/pyproject.toml b/pyproject.toml index ec715385f..c50b5e12e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,7 +29,7 @@ google-search-results = "^2.4.1" google-api-python-client = "^2.79.0" typer = "^0.7.0" gunicorn = "^20.1.0" -langchain = "^0.0.127" +langchain = "^0.0.131" openai = "^0.27.2" types-pyyaml = "^6.0.12.8" dill = "^0.3.6" From b82e923fdf469d21775e73e5dec951b3b134c6d3 Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Tue, 4 Apr 2023 19:25:20 -0300 Subject: [PATCH 08/26] feat: adding AzureOpenAI #85 --- src/backend/langflow/config.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/backend/langflow/config.yaml b/src/backend/langflow/config.yaml index 25ad70839..fafd3fb0d 100644 --- a/src/backend/langflow/config.yaml +++ b/src/backend/langflow/config.yaml @@ -24,7 +24,7 @@ prompts: llms: - OpenAI - - ChatOpenAI + - AzureOpenAI tools: - Search @@ -44,6 +44,7 @@ toolkits: memories: - ConversationBufferMemory + - ConversationSummaryMemory embeddings: [] From 8873a798b0ff1b5a9c23a4dd921441d926512c85 Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Tue, 4 Apr 2023 19:27:37 -0300 Subject: [PATCH 09/26] feat: adding memory frontend node --- src/backend/langflow/interface/memories/base.py | 6 ++++++ src/backend/langflow/interface/wrappers/base.py | 2 +- src/backend/langflow/template/nodes.py | 14 ++++++++++++++ 3 files changed, 21 insertions(+), 1 deletion(-) diff --git a/src/backend/langflow/interface/memories/base.py b/src/backend/langflow/interface/memories/base.py index 1bb4b054b..fee179d21 100644 --- a/src/backend/langflow/interface/memories/base.py +++ b/src/backend/langflow/interface/memories/base.py @@ -3,12 +3,18 @@ from typing import Dict, List, Optional from langflow.interface.base import LangChainTypeCreator from langflow.interface.custom_lists import memory_type_to_cls_dict from langflow.settings import settings +from langflow.template.nodes import MemoryFrontendNode from langflow.utils.util import build_template_from_class class MemoryCreator(LangChainTypeCreator): type_name: str = "memories" + @property + def frontend_node_class(self) -> str: + """The class type of the FrontendNode created in frontend_node.""" + return MemoryFrontendNode + @property def type_to_loader_dict(self) -> Dict: if self.type_dict is None: diff --git a/src/backend/langflow/interface/wrappers/base.py b/src/backend/langflow/interface/wrappers/base.py index 8c5978013..8d0df8141 100644 --- a/src/backend/langflow/interface/wrappers/base.py +++ b/src/backend/langflow/interface/wrappers/base.py @@ -13,7 +13,7 @@ class WrapperCreator(LangChainTypeCreator): def type_to_loader_dict(self) -> Dict: if self.type_dict is None: self.type_dict = { - wrapper.__name__: wrapper for wrapper in [requests.RequestsWrapper] + wrapper.__name__: wrapper for wrapper in [requests.TextRequestsWrapper] } return self.type_dict diff --git a/src/backend/langflow/template/nodes.py b/src/backend/langflow/template/nodes.py index 6be772483..0a2c149da 100644 --- a/src/backend/langflow/template/nodes.py +++ b/src/backend/langflow/template/nodes.py @@ -259,3 +259,17 @@ class PromptFrontendNode(FrontendNode): and "BaseMessagePromptTemplate" in field.field_type ): field.field_type = "BaseMessagePromptTemplate" + + +class MemoryFrontendNode(FrontendNode): + @staticmethod + def format_field(field: TemplateField, name: Optional[str] = None) -> None: + FrontendNode.format_field(field, name) + + if not isinstance(field.value, str): + field.value = None + if field.name == "k": + field.required = True + field.show = True + field.field_type = "int" + field.value = 10 From f6fc9f2c3b42ca6ca8a7f97aaa3efb83fbfe0766 Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Tue, 4 Apr 2023 19:35:44 -0300 Subject: [PATCH 10/26] feat: added first display_name --- src/backend/langflow/template/nodes.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/backend/langflow/template/nodes.py b/src/backend/langflow/template/nodes.py index 0a2c149da..dae282163 100644 --- a/src/backend/langflow/template/nodes.py +++ b/src/backend/langflow/template/nodes.py @@ -273,3 +273,4 @@ class MemoryFrontendNode(FrontendNode): field.show = True field.field_type = "int" field.value = 10 + field.display_name = "Memory Size" From a98c1b54e5a317a059cd7b02185db87fdec63743 Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Tue, 4 Apr 2023 19:36:08 -0300 Subject: [PATCH 11/26] fix: fixes for memory and better error message --- src/backend/langflow/interface/run.py | 76 +++++++++++++++++---------- 1 file changed, 49 insertions(+), 27 deletions(-) diff --git a/src/backend/langflow/interface/run.py b/src/backend/langflow/interface/run.py index 28afb4a27..060308216 100644 --- a/src/backend/langflow/interface/run.py +++ b/src/backend/langflow/interface/run.py @@ -55,7 +55,7 @@ def process_graph(data_graph: Dict[str, Any]): if langchain_object is None: # Raise user facing error raise ValueError( - "There was an error loading the flow. Please, check all the nodes and try again." + "There was an error loading the langchain_object. Please, check all the nodes and try again." ) # Generate result and thought @@ -72,47 +72,69 @@ def process_graph(data_graph: Dict[str, Any]): return {"result": str(result), "thought": thought.strip()} -def get_result_and_thought_using_graph(loaded_langchain, message: str): +def fix_memory_inputs_for_intermediate_steps(langchain_object): + """ + Fix memory inputs by replacing the memory key with the input key. + """ + langchain_object.return_intermediate_steps = True + langchain_object.memory.memory_key + input_key = [ + key + for key in langchain_object.input_keys + if key != langchain_object.memory.memory_key + ][0] + # get output_key + output_key = [ + key + for key in langchain_object.output_keys + if key != langchain_object.memory.memory_key + ][0] + # set input_key and output_key in memory + langchain_object.memory.input_key = input_key + langchain_object.memory.output_key = output_key + + +def get_result_and_thought_using_graph(langchain_object, message: str): """Get result and thought from extracted json""" try: - if hasattr(loaded_langchain, "verbose"): - loaded_langchain.verbose = True + if hasattr(langchain_object, "verbose"): + langchain_object.verbose = True with io.StringIO() as output_buffer, contextlib.redirect_stdout(output_buffer): chat_input = None memory_key = "" - if hasattr(loaded_langchain, "memory"): - mem_vars = loaded_langchain.memory.memory_variables - memory_key = mem_vars[0] if mem_vars else "" + if ( + hasattr(langchain_object, "memory") + and langchain_object.memory is not None + ): + memory_key = langchain_object.memory.memory_key - for key in loaded_langchain.input_keys: - if key != memory_key: + for key in langchain_object.input_keys: + if key not in [memory_key, "chat_history"]: chat_input = {key: message} - if hasattr(loaded_langchain, "return_intermediate_steps"): + if hasattr(langchain_object, "return_intermediate_steps"): # https://github.com/hwchase17/langchain/issues/2068 - loaded_langchain.return_intermediate_steps = False - - # I'm not sure about this yet. - function_to_call = None - if hasattr(loaded_langchain, "memory"): - function_to_call = loaded_langchain.predict - elif hasattr(loaded_langchain, "run"): - function_to_call = loaded_langchain.run - else: - function_to_call = loaded_langchain + # Deactivating until we have a frontend solution + # to display intermediate steps + langchain_object.return_intermediate_steps = False + if langchain_object.return_intermediate_steps: + fix_memory_inputs_for_intermediate_steps(langchain_object) try: - output = function_to_call(chat_input) + output = langchain_object(chat_input) except ValueError as exc: - logger.debug("Error: %s", str(exc)) - output = loaded_langchain.run(chat_input) + # make the error message more informative + logger.debug(f"Error: {str(exc)}") + if hasattr(langchain_object, "memory"): + langchain_object.memory.memory_key = memory_key + output = langchain_object(chat_input) intermediate_steps = ( output.get("intermediate_steps", []) if isinstance(output, dict) else [] ) result = ( - output.get(loaded_langchain.output_keys[0]) + output.get(langchain_object.output_keys[0]) if isinstance(output, dict) else output ) @@ -129,16 +151,16 @@ def get_result_and_thought_using_graph(loaded_langchain, message: str): def get_result_and_thought(extracted_json: Dict[str, Any], message: str): """Get result and thought from extracted json""" try: - loaded_langchain = loading.load_langchain_type_from_config( + langchain_object = loading.load_langchain_type_from_config( config=extracted_json ) with io.StringIO() as output_buffer, contextlib.redirect_stdout(output_buffer): - output = loaded_langchain(message) + output = langchain_object(message) intermediate_steps = ( output.get("intermediate_steps", []) if isinstance(output, dict) else [] ) result = ( - output.get(loaded_langchain.output_keys[0]) + output.get(langchain_object.output_keys[0]) if isinstance(output, dict) else output ) From be80e6007f0f02a4bf4625d2541765c3cf26090f Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Tue, 4 Apr 2023 19:36:26 -0300 Subject: [PATCH 12/26] feat: added display_name attr --- src/backend/langflow/template/base.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/backend/langflow/template/base.py b/src/backend/langflow/template/base.py index da2c8312d..f6a5773ec 100644 --- a/src/backend/langflow/template/base.py +++ b/src/backend/langflow/template/base.py @@ -22,6 +22,7 @@ class TemplateFieldCreator(BaseModel, ABC): password: bool = False options: list[str] = [] name: str = "" + display_name: str = "" def to_dict(self): result = self.dict() @@ -219,5 +220,3 @@ class FrontendNode(BaseModel): elif name == "ChatOpenAI" and key == "model_name": field.options = constants.CHAT_OPENAI_MODELS field.is_list = True - - From 272006d7491543ab58e0808deae4a6661326c3e4 Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Tue, 4 Apr 2023 19:41:08 -0300 Subject: [PATCH 13/26] fix proxy --- src/frontend/package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/frontend/package.json b/src/frontend/package.json index 41b9bcdfe..b669569e3 100644 --- a/src/frontend/package.json +++ b/src/frontend/package.json @@ -59,5 +59,5 @@ "last 1 safari version" ] }, - "proxy": "http://localhost:7860" -} + "proxy": "http://backend:7860" +} \ No newline at end of file From c41957c67e77049d38717d90c8232d4ad3d42d8c Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Tue, 4 Apr 2023 20:43:41 -0300 Subject: [PATCH 14/26] feat: adding validate/prompt endpoint --- src/backend/langflow/api/base.py | 20 ++++-- src/backend/langflow/api/endpoints.py | 14 ---- src/backend/langflow/api/validate.py | 38 +++++++++++ src/backend/langflow/main.py | 2 + src/frontend/src/controllers/API/index.ts | 2 +- tests/test_endpoints.py | 79 +++++++++++++++++++++-- 6 files changed, 130 insertions(+), 25 deletions(-) create mode 100644 src/backend/langflow/api/validate.py diff --git a/src/backend/langflow/api/base.py b/src/backend/langflow/api/base.py index 3c3e0d8eb..e3c749aab 100644 --- a/src/backend/langflow/api/base.py +++ b/src/backend/langflow/api/base.py @@ -1,16 +1,17 @@ +from langflow.graph.utils import extract_input_variables_from_prompt from pydantic import BaseModel, validator class Code(BaseModel): code: str - @validator("code") - def validate_code(cls, v): - return v + +class Prompt(BaseModel): + template: str # Build ValidationResponse class for {"imports": {"errors": []}, "function": {"errors": []}} -class ValidationResponse(BaseModel): +class CodeValidationResponse(BaseModel): imports: dict function: dict @@ -21,3 +22,14 @@ class ValidationResponse(BaseModel): @validator("function") def validate_function(cls, v): return v or {"errors": []} + + +class PromptValidationResponse(BaseModel): + input_variables: list + valid: bool + + +def validate_prompt(template): + # Extract the input variables from template + input_variables = extract_input_variables_from_prompt(template) + return input_variables, len(input_variables) > 0 diff --git a/src/backend/langflow/api/endpoints.py b/src/backend/langflow/api/endpoints.py index c82616d3e..22f548156 100644 --- a/src/backend/langflow/api/endpoints.py +++ b/src/backend/langflow/api/endpoints.py @@ -3,10 +3,8 @@ from typing import Any, Dict from fastapi import APIRouter, HTTPException -from langflow.api.base import Code, ValidationResponse from langflow.interface.run import process_graph from langflow.interface.types import build_langchain_types_dict -from langflow.utils.validate import validate_code # build router router = APIRouter() @@ -26,15 +24,3 @@ def get_load(data: Dict[str, Any]): # Log stack trace logger.exception(e) raise HTTPException(status_code=500, detail=str(e)) from e - - -@router.post("/validate", status_code=200, response_model=ValidationResponse) -def post_validate_code(code: Code): - try: - errors = validate_code(code.code) - return ValidationResponse( - imports=errors.get("imports", {}), - function=errors.get("function", {}), - ) - except Exception as e: - return HTTPException(status_code=500, detail=str(e)) diff --git a/src/backend/langflow/api/validate.py b/src/backend/langflow/api/validate.py new file mode 100644 index 000000000..7ea5d6eb7 --- /dev/null +++ b/src/backend/langflow/api/validate.py @@ -0,0 +1,38 @@ +from fastapi import HTTPException +from langflow.api.base import ( + Code, + CodeValidationResponse, + Prompt, + PromptValidationResponse, + validate_prompt, +) + +from langflow.utils.validate import validate_code +from langflow.utils.logger import logger + + +from fastapi import APIRouter, HTTPException + +# build router +router = APIRouter(prefix="/validate", tags=["validate"]) + + +@router.post("/code", status_code=200, response_model=CodeValidationResponse) +def post_validate_code(code: Code): + try: + errors = validate_code(code.code) + return CodeValidationResponse( + imports=errors.get("imports", {}), + function=errors.get("function", {}), + ) + except Exception as e: + return HTTPException(status_code=500, detail=str(e)) + + +@router.post("/prompt", status_code=200, response_model=PromptValidationResponse) +def post_validate_prompt(prompt: Prompt): + try: + input_variables, valid = validate_prompt(prompt.template) + return PromptValidationResponse(input_variables=input_variables, valid=valid) + except Exception as e: + return HTTPException(status_code=500, detail=str(e)) diff --git a/src/backend/langflow/main.py b/src/backend/langflow/main.py index 21d17690a..176e46236 100644 --- a/src/backend/langflow/main.py +++ b/src/backend/langflow/main.py @@ -2,6 +2,7 @@ from fastapi import FastAPI from fastapi.middleware.cors import CORSMiddleware from langflow.api.endpoints import router as endpoints_router +from langflow.api.validate import router as validate_router def create_app(): @@ -21,6 +22,7 @@ def create_app(): ) app.include_router(endpoints_router) + app.include_router(validate_router) return app diff --git a/src/frontend/src/controllers/API/index.ts b/src/frontend/src/controllers/API/index.ts index c6315d1b3..bad966ea9 100644 --- a/src/frontend/src/controllers/API/index.ts +++ b/src/frontend/src/controllers/API/index.ts @@ -12,5 +12,5 @@ export async function sendAll(data:sendAllProps) { export async function checkCode(code:string):Promise>{ - return await axios.post('/validate',{code}) + return await axios.post('/validate/code',{code}) } \ No newline at end of file diff --git a/tests/test_endpoints.py b/tests/test_endpoints.py index 570bf554e..55f002dad 100644 --- a/tests/test_endpoints.py +++ b/tests/test_endpoints.py @@ -1,5 +1,25 @@ +import json +from typing import Dict from fastapi.testclient import TestClient from langflow.interface.tools.constants import CUSTOM_TOOLS +from pathlib import Path + +import pytest + + +def test_post_predict(client: TestClient): + with open(Path(__file__).parent / "data" / "Build_error.json") as f: + data = f.read() + json_data = json.loads(data) + data: Dict = json_data["data"] + data["message"] = "I'm Bob" + response = client.post("/predict", json=data) + assert response.status_code == 200 + data["message"] = "What is my name?" + data["chatHistory"] = ["I'm Bob"] + response = client.post("/predict", json=data) + assert response.status_code == 200 + assert "Bob" in response.json()["result"] def test_get_all(client: TestClient): @@ -20,7 +40,7 @@ import math def square(x): return x ** 2 """ - response1 = client.post("/validate", json={"code": code1}) + response1 = client.post("/validate/code", json={"code": code1}) assert response1.status_code == 200 assert response1.json() == {"imports": {"errors": []}, "function": {"errors": []}} @@ -31,7 +51,7 @@ import non_existent_module def square(x): return x ** 2 """ - response2 = client.post("/validate", json={"code": code2}) + response2 = client.post("/validate/code", json={"code": code2}) assert response2.status_code == 200 assert response2.json() == { "imports": {"errors": ["No module named 'non_existent_module'"]}, @@ -45,7 +65,7 @@ import math def square(x) return x ** 2 """ - response3 = client.post("/validate", json={"code": code3}) + response3 = client.post("/validate/code", json={"code": code3}) assert response3.status_code == 200 assert response3.json() == { "imports": {"errors": []}, @@ -53,11 +73,11 @@ def square(x) } # Test case with invalid JSON payload - response4 = client.post("/validate", json={"invalid_key": code1}) + response4 = client.post("/validate/code", json={"invalid_key": code1}) assert response4.status_code == 422 # Test case with an empty code string - response5 = client.post("/validate", json={"code": ""}) + response5 = client.post("/validate/code", json={"code": ""}) assert response5.status_code == 200 assert response5.json() == {"imports": {"errors": []}, "function": {"errors": []}} @@ -68,9 +88,56 @@ import math def square(x) return x ** 2 """ - response6 = client.post("/validate", json={"code": code6}) + response6 = client.post("/validate/code", json={"code": code6}) assert response6.status_code == 200 assert response6.json() == { "imports": {"errors": []}, "function": {"errors": ["expected ':' (, line 4)"]}, } + + +VALID_PROMPT = """ +I want you to act as a naming consultant for new companies. + +Here are some examples of good company names: + +- search engine, Google +- social media, Facebook +- video sharing, YouTube + +The name should be short, catchy and easy to remember. + +What is a good name for a company that makes {product}? +""" + +INVALID_PROMPT = "This is an invalid prompt without any input variable." + + +def test_valid_prompt(client: TestClient): + response = client.post("/validate/prompt", json={"template": VALID_PROMPT}) + assert response.status_code == 200 + assert response.json() == {"input_variables": ["product"], "valid": True} + + +def test_invalid_prompt(client: TestClient): + response = client.post("/validate/prompt", json={"template": INVALID_PROMPT}) + assert response.status_code == 200 + assert response.json() == {"input_variables": [], "valid": False} + + +@pytest.mark.parametrize( + "prompt,expected_input_variables,expected_validity", + [ + ("{color} is my favorite color.", ["color"], True), + ("The weather is {weather} today.", ["weather"], True), + ("This prompt has no variables.", [], False), + ("{a}, {b}, and {c} are variables.", ["a", "b", "c"], True), + ], +) +def test_various_prompts(client, prompt, expected_input_variables, expected_validity): + response = client.post("/validate/prompt", json={"template": prompt}) + assert response.status_code == 200 + assert response.json() == { + "input_variables": expected_input_variables, + "valid": expected_validity, + } From 4ff42190a1ca02d4306382047d6919cd286dca4e Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Tue, 4 Apr 2023 21:45:26 -0300 Subject: [PATCH 15/26] formating and linting --- src/backend/langflow/api/base.py | 3 ++- src/backend/langflow/api/validate.py | 9 +++------ src/backend/langflow/interface/base.py | 6 +++--- src/backend/langflow/interface/chains/base.py | 2 +- src/backend/langflow/interface/chains/custom.py | 9 +++++---- src/backend/langflow/interface/importing/utils.py | 7 +++---- src/backend/langflow/interface/memories/base.py | 5 +++-- src/backend/langflow/interface/prompts/base.py | 3 ++- src/backend/langflow/interface/prompts/custom.py | 7 ++++--- 9 files changed, 26 insertions(+), 25 deletions(-) diff --git a/src/backend/langflow/api/base.py b/src/backend/langflow/api/base.py index e3c749aab..04ae0c3bb 100644 --- a/src/backend/langflow/api/base.py +++ b/src/backend/langflow/api/base.py @@ -1,6 +1,7 @@ -from langflow.graph.utils import extract_input_variables_from_prompt from pydantic import BaseModel, validator +from langflow.graph.utils import extract_input_variables_from_prompt + class Code(BaseModel): code: str diff --git a/src/backend/langflow/api/validate.py b/src/backend/langflow/api/validate.py index 7ea5d6eb7..81d7f8500 100644 --- a/src/backend/langflow/api/validate.py +++ b/src/backend/langflow/api/validate.py @@ -1,4 +1,5 @@ -from fastapi import HTTPException +from fastapi import APIRouter, HTTPException + from langflow.api.base import ( Code, CodeValidationResponse, @@ -6,12 +7,8 @@ from langflow.api.base import ( PromptValidationResponse, validate_prompt, ) - -from langflow.utils.validate import validate_code from langflow.utils.logger import logger - - -from fastapi import APIRouter, HTTPException +from langflow.utils.validate import validate_code # build router router = APIRouter(prefix="/validate", tags=["validate"]) diff --git a/src/backend/langflow/interface/base.py b/src/backend/langflow/interface/base.py index 87f716ac2..24293a478 100644 --- a/src/backend/langflow/interface/base.py +++ b/src/backend/langflow/interface/base.py @@ -1,6 +1,6 @@ -from abc import ABC, abstractmethod import abc -from typing import Any, Dict, List, Optional, Union +from abc import ABC, abstractmethod +from typing import Any, Dict, List, Optional, Type, Union from pydantic import BaseModel @@ -14,7 +14,7 @@ class LangChainTypeCreator(BaseModel, ABC): type_dict: Optional[Dict] = None @property - def frontend_node_class(self) -> str: + def frontend_node_class(self) -> Type[FrontendNode]: """The class type of the FrontendNode created in frontend_node.""" return FrontendNode diff --git a/src/backend/langflow/interface/chains/base.py b/src/backend/langflow/interface/chains/base.py index 45a9b2ddb..b4bb58b9b 100644 --- a/src/backend/langflow/interface/chains/base.py +++ b/src/backend/langflow/interface/chains/base.py @@ -1,6 +1,6 @@ from typing import Dict, List, Optional -from langflow.custom.customs import get_custom_nodes +from langflow.custom.customs import get_custom_nodes from langflow.interface.base import LangChainTypeCreator from langflow.interface.custom_lists import chain_type_to_cls_dict from langflow.settings import settings diff --git a/src/backend/langflow/interface/chains/custom.py b/src/backend/langflow/interface/chains/custom.py index 07e08699f..8a191f594 100644 --- a/src/backend/langflow/interface/chains/custom.py +++ b/src/backend/langflow/interface/chains/custom.py @@ -1,10 +1,11 @@ -from typing import Optional +from typing import Dict, Optional, Type + from langchain.chains import ConversationChain -from langflow.graph.utils import extract_input_variables_from_prompt -from pydantic import root_validator, Field from langchain.memory.buffer import ConversationBufferMemory from langchain.schema import BaseMemory +from pydantic import Field, root_validator +from langflow.graph.utils import extract_input_variables_from_prompt DEFAULT_SUFFIX = """" Current conversation: @@ -93,7 +94,7 @@ class TimeTravelGuideChain(BaseCustomChain): AI:""" -CUSTOM_CHAINS = { +CUSTOM_CHAINS: Dict[str, Type[ConversationChain]] = { "SeriesCharacterChain": SeriesCharacterChain, "MidJourneyPromptChain": MidJourneyPromptChain, "TimeTravelGuideChain": TimeTravelGuideChain, diff --git a/src/backend/langflow/interface/importing/utils.py b/src/backend/langflow/interface/importing/utils.py index af4631ed2..e322831ad 100644 --- a/src/backend/langflow/interface/importing/utils.py +++ b/src/backend/langflow/interface/importing/utils.py @@ -1,7 +1,7 @@ # This module is used to import any langchain class by name. import importlib -from typing import Any +from typing import Any, Type from langchain import PromptTemplate from langchain.agents import Agent @@ -10,7 +10,6 @@ from langchain.chat_models.base import BaseChatModel from langchain.llms.base import BaseLLM from langchain.tools import BaseTool - from langflow.interface.tools.util import get_tool_by_name @@ -66,7 +65,7 @@ def import_class(class_path: str) -> Any: return getattr(module, class_name) -def import_prompt(prompt: str) -> PromptTemplate: +def import_prompt(prompt: str) -> Type[PromptTemplate]: from langflow.interface.prompts.custom import CUSTOM_PROMPTS """Import prompt from prompt name""" @@ -105,7 +104,7 @@ def import_tool(tool: str) -> BaseTool: return get_tool_by_name(tool) -def import_chain(chain: str) -> Chain: +def import_chain(chain: str) -> Type[Chain]: """Import chain from chain name""" from langflow.interface.chains.custom import CUSTOM_CHAINS diff --git a/src/backend/langflow/interface/memories/base.py b/src/backend/langflow/interface/memories/base.py index fee179d21..7db99c0a1 100644 --- a/src/backend/langflow/interface/memories/base.py +++ b/src/backend/langflow/interface/memories/base.py @@ -1,8 +1,9 @@ -from typing import Dict, List, Optional +from typing import Dict, List, Optional, Type from langflow.interface.base import LangChainTypeCreator from langflow.interface.custom_lists import memory_type_to_cls_dict from langflow.settings import settings +from langflow.template.base import FrontendNode from langflow.template.nodes import MemoryFrontendNode from langflow.utils.util import build_template_from_class @@ -11,7 +12,7 @@ class MemoryCreator(LangChainTypeCreator): type_name: str = "memories" @property - def frontend_node_class(self) -> str: + def frontend_node_class(self) -> Type[FrontendNode]: """The class type of the FrontendNode created in frontend_node.""" return MemoryFrontendNode diff --git a/src/backend/langflow/interface/prompts/base.py b/src/backend/langflow/interface/prompts/base.py index ad289c531..462fd6257 100644 --- a/src/backend/langflow/interface/prompts/base.py +++ b/src/backend/langflow/interface/prompts/base.py @@ -1,7 +1,8 @@ from typing import Dict, List, Optional -from langchain.prompts import loading from langchain import prompts +from langchain.prompts import loading + from langflow.custom.customs import get_custom_nodes from langflow.interface.base import LangChainTypeCreator from langflow.interface.importing.utils import import_class diff --git a/src/backend/langflow/interface/prompts/custom.py b/src/backend/langflow/interface/prompts/custom.py index 295316fce..e80d62312 100644 --- a/src/backend/langflow/interface/prompts/custom.py +++ b/src/backend/langflow/interface/prompts/custom.py @@ -1,4 +1,4 @@ -from typing import Dict, List, Optional +from typing import Dict, List, Optional, Type from langchain.prompts import PromptTemplate from pydantic import root_validator @@ -7,7 +7,6 @@ from langflow.graph.utils import extract_input_variables_from_prompt from langflow.template.base import Template, TemplateField from langflow.template.nodes import PromptTemplateNode - # Steps to create a BaseCustomPrompt: # 1. Create a prompt template that endes with: # Current conversation: @@ -71,7 +70,9 @@ Human: {input} input_variables: List[str] = ["character", "series"] -CUSTOM_PROMPTS = {"SeriesCharacterPrompt": SeriesCharacterPrompt} +CUSTOM_PROMPTS: Dict[str, Type[BaseCustomPrompt]] = { + "SeriesCharacterPrompt": SeriesCharacterPrompt +} if __name__ == "__main__": prompt = SeriesCharacterPrompt(character="Harry Potter", series="Harry Potter") From 64fb056ba906bbf6512c5a4ce10543243ef7f25b Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Tue, 4 Apr 2023 21:45:45 -0300 Subject: [PATCH 16/26] fix: removed chat agents for now --- src/backend/langflow/template/nodes.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/src/backend/langflow/template/nodes.py b/src/backend/langflow/template/nodes.py index dae282163..acfc5c2d2 100644 --- a/src/backend/langflow/template/nodes.py +++ b/src/backend/langflow/template/nodes.py @@ -7,6 +7,12 @@ from langflow.template.base import FrontendNode, Template, TemplateField from langflow.template.constants import DEFAULT_PROMPT, HUMAN_PROMPT, SYSTEM_PROMPT from langflow.utils.constants import DEFAULT_PYTHON_FUNCTION +NON_CHAT_AGENTS = { + agent_type: agent_class + for agent_type, agent_class in loading.AGENT_TO_CLASS.items() + if "chat" not in agent_type.value +} + class BasePromptFrontendNode(FrontendNode): name: str @@ -176,8 +182,8 @@ class InitializeAgentNode(FrontendNode): is_list=True, show=True, multiline=False, - options=list(loading.AGENT_TO_CLASS.keys()), - value=list(loading.AGENT_TO_CLASS.keys())[0], + options=list(NON_CHAT_AGENTS.keys()), + value=list(NON_CHAT_AGENTS.keys())[0], name="agent", ), TemplateField( @@ -247,7 +253,7 @@ class PromptFrontendNode(FrontendNode): def format_field(field: TemplateField, name: Optional[str] = None) -> None: # if field.field_type == "StringPromptTemplate" # change it to str - if field.field_type == "StringPromptTemplate" and "Message" in name: + if field.field_type == "StringPromptTemplate" and "Message" in str(name): field.field_type = "str" field.multiline = True field.value = HUMAN_PROMPT if "Human" in field.name else SYSTEM_PROMPT From fe95790331dd8bba1e5d378d4e3e40a8db4ed502 Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Tue, 4 Apr 2023 21:46:05 -0300 Subject: [PATCH 17/26] fix: function to try to avoid input keys erros with memory --- src/backend/langflow/interface/run.py | 57 ++++++++++++++++----------- 1 file changed, 35 insertions(+), 22 deletions(-) diff --git a/src/backend/langflow/interface/run.py b/src/backend/langflow/interface/run.py index 060308216..773b21b7f 100644 --- a/src/backend/langflow/interface/run.py +++ b/src/backend/langflow/interface/run.py @@ -72,26 +72,41 @@ def process_graph(data_graph: Dict[str, Any]): return {"result": str(result), "thought": thought.strip()} -def fix_memory_inputs_for_intermediate_steps(langchain_object): +def fix_memory_inputs(langchain_object): """ Fix memory inputs by replacing the memory key with the input key. """ - langchain_object.return_intermediate_steps = True - langchain_object.memory.memory_key - input_key = [ - key - for key in langchain_object.input_keys - if key != langchain_object.memory.memory_key - ][0] - # get output_key - output_key = [ - key - for key in langchain_object.output_keys - if key != langchain_object.memory.memory_key - ][0] - # set input_key and output_key in memory - langchain_object.memory.input_key = input_key - langchain_object.memory.output_key = output_key + # Possible memory keys + # "chat_history", "history" + # if memory_key is "chat_history" and input_keys has "history" + # we need to replace "chat_history" with "history" + mem_key_dict = { + "chat_history": "history", + "history": "chat_history", + } + memory_key = langchain_object.memory.memory_key + possible_new_mem_key = mem_key_dict.get(memory_key) + if possible_new_mem_key is not None: + # get input_key + input_key = [ + key + for key in langchain_object.input_keys + if key not in [memory_key, possible_new_mem_key] + ][0] + + # get output_key + output_key = [ + key + for key in langchain_object.output_keys + if key not in [memory_key, possible_new_mem_key] + ][0] + + # set input_key and output_key in memory + langchain_object.memory.input_key = input_key + langchain_object.memory.output_key = output_key + for input_key in langchain_object.input_keys: + if input_key == possible_new_mem_key: + langchain_object.memory.memory_key = possible_new_mem_key def get_result_and_thought_using_graph(langchain_object, message: str): @@ -117,17 +132,15 @@ def get_result_and_thought_using_graph(langchain_object, message: str): # Deactivating until we have a frontend solution # to display intermediate steps langchain_object.return_intermediate_steps = False - if langchain_object.return_intermediate_steps: - fix_memory_inputs_for_intermediate_steps(langchain_object) + + fix_memory_inputs(langchain_object) try: output = langchain_object(chat_input) except ValueError as exc: # make the error message more informative logger.debug(f"Error: {str(exc)}") - if hasattr(langchain_object, "memory"): - langchain_object.memory.memory_key = memory_key - output = langchain_object(chat_input) + output = langchain_object.run(chat_input) intermediate_steps = ( output.get("intermediate_steps", []) if isinstance(output, dict) else [] From 09412b337ef5f58cf6ff62650b79196351a0bc0f Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Tue, 4 Apr 2023 21:46:28 -0300 Subject: [PATCH 18/26] feat: fix formatting in makefile --- Makefile | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/Makefile b/Makefile index 37317c91d..7ab775a98 100644 --- a/Makefile +++ b/Makefile @@ -40,16 +40,14 @@ build: rm -rf src/backend/langflow/frontend dev: - make install_frontend -ifeq ($(build),1) - @echo 'Running docker compose up with build' - docker compose up $(if $(debug),-f docker-compose.debug.yml) --build -else - @echo 'Running docker compose up without build' - docker compose up $(if $(debug),-f docker-compose.debug.yml) -endif - - + make install_frontend + ifeq ($(build),1) + @echo 'Running docker compose up with build' + docker compose up $(if $(debug),-f docker-compose.debug.yml) --build + else + @echo 'Running docker compose up without build' + docker compose up $(if $(debug),-f docker-compose.debug.yml) + endif publish: make build From 8c1783eee59301b57523d26ca6166e159b132c47 Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Tue, 4 Apr 2023 21:48:23 -0300 Subject: [PATCH 19/26] fix: ignore AgentType enum for now --- src/backend/langflow/interface/agents/custom.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/backend/langflow/interface/agents/custom.py b/src/backend/langflow/interface/agents/custom.py index ad9f6c918..4032eab19 100644 --- a/src/backend/langflow/interface/agents/custom.py +++ b/src/backend/langflow/interface/agents/custom.py @@ -111,7 +111,8 @@ class InitializeAgent(AgentExecutor): return initialize_agent( tools=tools, llm=llm, - agent=agent, + # LangChain now uses Enum for agent, but we still support string + agent=agent, # type: ignore memory=memory, return_intermediate_steps=True, ) From 41f05b2e85f25fef66b2d0efbfe9c1ef6cf24684 Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Tue, 4 Apr 2023 22:07:40 -0300 Subject: [PATCH 20/26] fix: update to validate prompt endpoint --- src/backend/langflow/api/base.py | 7 ----- src/backend/langflow/api/validate.py | 7 ++--- tests/test_endpoints.py | 38 +++++++--------------------- 3 files changed, 13 insertions(+), 39 deletions(-) diff --git a/src/backend/langflow/api/base.py b/src/backend/langflow/api/base.py index 04ae0c3bb..11b31b004 100644 --- a/src/backend/langflow/api/base.py +++ b/src/backend/langflow/api/base.py @@ -27,10 +27,3 @@ class CodeValidationResponse(BaseModel): class PromptValidationResponse(BaseModel): input_variables: list - valid: bool - - -def validate_prompt(template): - # Extract the input variables from template - input_variables = extract_input_variables_from_prompt(template) - return input_variables, len(input_variables) > 0 diff --git a/src/backend/langflow/api/validate.py b/src/backend/langflow/api/validate.py index 81d7f8500..6dea45df0 100644 --- a/src/backend/langflow/api/validate.py +++ b/src/backend/langflow/api/validate.py @@ -5,8 +5,8 @@ from langflow.api.base import ( CodeValidationResponse, Prompt, PromptValidationResponse, - validate_prompt, ) +from langflow.graph.utils import extract_input_variables_from_prompt from langflow.utils.logger import logger from langflow.utils.validate import validate_code @@ -29,7 +29,8 @@ def post_validate_code(code: Code): @router.post("/prompt", status_code=200, response_model=PromptValidationResponse) def post_validate_prompt(prompt: Prompt): try: - input_variables, valid = validate_prompt(prompt.template) - return PromptValidationResponse(input_variables=input_variables, valid=valid) + input_variables = extract_input_variables_from_prompt(prompt.template) + return PromptValidationResponse(input_variables=input_variables) except Exception as e: + logger.exception(e) return HTTPException(status_code=500, detail=str(e)) diff --git a/tests/test_endpoints.py b/tests/test_endpoints.py index 55f002dad..83f6c62b1 100644 --- a/tests/test_endpoints.py +++ b/tests/test_endpoints.py @@ -1,25 +1,6 @@ -import json -from typing import Dict +import pytest from fastapi.testclient import TestClient from langflow.interface.tools.constants import CUSTOM_TOOLS -from pathlib import Path - -import pytest - - -def test_post_predict(client: TestClient): - with open(Path(__file__).parent / "data" / "Build_error.json") as f: - data = f.read() - json_data = json.loads(data) - data: Dict = json_data["data"] - data["message"] = "I'm Bob" - response = client.post("/predict", json=data) - assert response.status_code == 200 - data["message"] = "What is my name?" - data["chatHistory"] = ["I'm Bob"] - response = client.post("/predict", json=data) - assert response.status_code == 200 - assert "Bob" in response.json()["result"] def test_get_all(client: TestClient): @@ -116,28 +97,27 @@ INVALID_PROMPT = "This is an invalid prompt without any input variable." def test_valid_prompt(client: TestClient): response = client.post("/validate/prompt", json={"template": VALID_PROMPT}) assert response.status_code == 200 - assert response.json() == {"input_variables": ["product"], "valid": True} + assert response.json() == {"input_variables": ["product"]} def test_invalid_prompt(client: TestClient): response = client.post("/validate/prompt", json={"template": INVALID_PROMPT}) assert response.status_code == 200 - assert response.json() == {"input_variables": [], "valid": False} + assert response.json() == {"input_variables": []} @pytest.mark.parametrize( - "prompt,expected_input_variables,expected_validity", + "prompt,expected_input_variables", [ - ("{color} is my favorite color.", ["color"], True), - ("The weather is {weather} today.", ["weather"], True), - ("This prompt has no variables.", [], False), - ("{a}, {b}, and {c} are variables.", ["a", "b", "c"], True), + ("{color} is my favorite color.", ["color"]), + ("The weather is {weather} today.", ["weather"]), + ("This prompt has no variables.", []), + ("{a}, {b}, and {c} are variables.", ["a", "b", "c"]), ], ) -def test_various_prompts(client, prompt, expected_input_variables, expected_validity): +def test_various_prompts(client, prompt, expected_input_variables): response = client.post("/validate/prompt", json={"template": prompt}) assert response.status_code == 200 assert response.json() == { "input_variables": expected_input_variables, - "valid": expected_validity, } From e7b4c2f4b274375b1569cde03bb091933cf4e8f5 Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Tue, 4 Apr 2023 22:10:30 -0300 Subject: [PATCH 21/26] lint --- src/backend/langflow/interface/agents/base.py | 3 ++- src/backend/langflow/interface/prompts/base.py | 4 ++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/src/backend/langflow/interface/agents/base.py b/src/backend/langflow/interface/agents/base.py index 94cab1411..e01737624 100644 --- a/src/backend/langflow/interface/agents/base.py +++ b/src/backend/langflow/interface/agents/base.py @@ -18,7 +18,8 @@ class AgentCreator(LangChainTypeCreator): self.type_dict = loading.AGENT_TO_CLASS # Add JsonAgent to the list of agents for name, agent in CUSTOM_AGENTS.items(): - self.type_dict[name] = agent + # TODO: validate AgentType + self.type_dict[name] = agent # type: ignore return self.type_dict def get_signature(self, name: str) -> Optional[Dict]: diff --git a/src/backend/langflow/interface/prompts/base.py b/src/backend/langflow/interface/prompts/base.py index 462fd6257..42942a0dc 100644 --- a/src/backend/langflow/interface/prompts/base.py +++ b/src/backend/langflow/interface/prompts/base.py @@ -1,4 +1,4 @@ -from typing import Dict, List, Optional +from typing import Dict, List, Optional, Type from langchain import prompts from langchain.prompts import loading @@ -15,7 +15,7 @@ class PromptCreator(LangChainTypeCreator): type_name: str = "prompts" @property - def frontend_node_class(self) -> str: + def frontend_node_class(self) -> Type[PromptFrontendNode]: return PromptFrontendNode @property From cd95e2500c374657da2642b747f22e35ec58cf0e Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Tue, 4 Apr 2023 22:14:34 -0300 Subject: [PATCH 22/26] fix: readded chatgpt and fixed wrapper name --- src/backend/langflow/config.yaml | 1 + tests/data/Openapi.json | 14 +++++++------- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/src/backend/langflow/config.yaml b/src/backend/langflow/config.yaml index fafd3fb0d..61bc0486c 100644 --- a/src/backend/langflow/config.yaml +++ b/src/backend/langflow/config.yaml @@ -25,6 +25,7 @@ prompts: llms: - OpenAI - AzureOpenAI + - ChatOpenAI tools: - Search diff --git a/tests/data/Openapi.json b/tests/data/Openapi.json index 1d880aa9d..143dd6ad1 100644 --- a/tests/data/Openapi.json +++ b/tests/data/Openapi.json @@ -74,7 +74,7 @@ "multiline": false, "password": false, "name": "requests_wrapper", - "type": "RequestsWrapper", + "type": "TextRequestsWrapper", "list": false }, "_type": "OpenAPIToolkit" @@ -154,7 +154,7 @@ "y": 532.9920887988924 }, "data": { - "type": "RequestsWrapper", + "type": "TextRequestsWrapper", "node": { "template": { "headers": { @@ -178,11 +178,11 @@ "type": "ClientSession", "list": false }, - "_type": "RequestsWrapper" + "_type": "TextRequestsWrapper" }, "description": "Lightweight wrapper around requests library.", "base_classes": [ - "RequestsWrapper" + "TextRequestsWrapper" ] }, "id": "dndnode_34", @@ -405,11 +405,11 @@ }, { "source": "dndnode_34", - "sourceHandle": "RequestsWrapper|dndnode_34|RequestsWrapper", + "sourceHandle": "TextRequestsWrapper|dndnode_34|TextRequestsWrapper", "target": "dndnode_32", - "targetHandle": "RequestsWrapper|requests_wrapper|dndnode_32", + "targetHandle": "TextRequestsWrapper|requests_wrapper|dndnode_32", "className": "animate-pulse", - "id": "reactflow__edge-dndnode_34RequestsWrapper|dndnode_34|RequestsWrapper-dndnode_32RequestsWrapper|requests_wrapper|dndnode_32", + "id": "reactflow__edge-dndnode_34RequestsWrapper|dndnode_34|TextRequestsWrapper-dndnode_32RequestsWrapper|requests_wrapper|dndnode_32", "selected": false }, { From b0a1c89e47143543d2a9dfa9423b61b994eb2313 Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Tue, 4 Apr 2023 22:15:34 -0300 Subject: [PATCH 23/26] fix: make display_name optional --- src/backend/langflow/template/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/backend/langflow/template/base.py b/src/backend/langflow/template/base.py index f6a5773ec..be19e468a 100644 --- a/src/backend/langflow/template/base.py +++ b/src/backend/langflow/template/base.py @@ -22,7 +22,7 @@ class TemplateFieldCreator(BaseModel, ABC): password: bool = False options: list[str] = [] name: str = "" - display_name: str = "" + display_name: Optional[str] def to_dict(self): result = self.dict() From 7fd0e7140b1b7d2b5b780b133c1adb27c90ba337 Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Tue, 4 Apr 2023 22:17:38 -0300 Subject: [PATCH 24/26] fix --- src/backend/langflow/template/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/backend/langflow/template/base.py b/src/backend/langflow/template/base.py index be19e468a..0f3c816e3 100644 --- a/src/backend/langflow/template/base.py +++ b/src/backend/langflow/template/base.py @@ -22,7 +22,7 @@ class TemplateFieldCreator(BaseModel, ABC): password: bool = False options: list[str] = [] name: str = "" - display_name: Optional[str] + display_name: Optional[str] = None def to_dict(self): result = self.dict() From f3a56181ced908a9a03755dc604d8150dad109b2 Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Tue, 4 Apr 2023 22:21:19 -0300 Subject: [PATCH 25/26] fixes --- src/backend/langflow/api/base.py | 1 - src/backend/langflow/interface/base.py | 1 - src/backend/langflow/interface/chains/custom.py | 4 ++-- src/backend/langflow/interface/prompts/base.py | 1 - src/backend/langflow/interface/prompts/custom.py | 2 -- 5 files changed, 2 insertions(+), 7 deletions(-) diff --git a/src/backend/langflow/api/base.py b/src/backend/langflow/api/base.py index 11b31b004..66afb654b 100644 --- a/src/backend/langflow/api/base.py +++ b/src/backend/langflow/api/base.py @@ -1,6 +1,5 @@ from pydantic import BaseModel, validator -from langflow.graph.utils import extract_input_variables_from_prompt class Code(BaseModel): diff --git a/src/backend/langflow/interface/base.py b/src/backend/langflow/interface/base.py index 24293a478..2694fd7a5 100644 --- a/src/backend/langflow/interface/base.py +++ b/src/backend/langflow/interface/base.py @@ -1,4 +1,3 @@ -import abc from abc import ABC, abstractmethod from typing import Any, Dict, List, Optional, Type, Union diff --git a/src/backend/langflow/interface/chains/custom.py b/src/backend/langflow/interface/chains/custom.py index 8a191f594..98470d54b 100644 --- a/src/backend/langflow/interface/chains/custom.py +++ b/src/backend/langflow/interface/chains/custom.py @@ -81,7 +81,7 @@ class MidJourneyPromptChain(BaseCustomChain): Current conversation: {history} Human: {input} - AI:""" + AI:""" # noqa: E501 class TimeTravelGuideChain(BaseCustomChain): @@ -91,7 +91,7 @@ class TimeTravelGuideChain(BaseCustomChain): Current conversation: {history} Human: {input} - AI:""" + AI:""" # noqa: E501 CUSTOM_CHAINS: Dict[str, Type[ConversationChain]] = { diff --git a/src/backend/langflow/interface/prompts/base.py b/src/backend/langflow/interface/prompts/base.py index 42942a0dc..6718d03a0 100644 --- a/src/backend/langflow/interface/prompts/base.py +++ b/src/backend/langflow/interface/prompts/base.py @@ -1,7 +1,6 @@ from typing import Dict, List, Optional, Type from langchain import prompts -from langchain.prompts import loading from langflow.custom.customs import get_custom_nodes from langflow.interface.base import LangChainTypeCreator diff --git a/src/backend/langflow/interface/prompts/custom.py b/src/backend/langflow/interface/prompts/custom.py index e80d62312..b1dbef370 100644 --- a/src/backend/langflow/interface/prompts/custom.py +++ b/src/backend/langflow/interface/prompts/custom.py @@ -4,8 +4,6 @@ from langchain.prompts import PromptTemplate from pydantic import root_validator from langflow.graph.utils import extract_input_variables_from_prompt -from langflow.template.base import Template, TemplateField -from langflow.template.nodes import PromptTemplateNode # Steps to create a BaseCustomPrompt: # 1. Create a prompt template that endes with: From 0c003f00e309ab4ccf43889e52305a11746ccf57 Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Tue, 4 Apr 2023 22:23:55 -0300 Subject: [PATCH 26/26] format --- src/backend/langflow/api/base.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/backend/langflow/api/base.py b/src/backend/langflow/api/base.py index 66afb654b..9096aff1b 100644 --- a/src/backend/langflow/api/base.py +++ b/src/backend/langflow/api/base.py @@ -1,7 +1,6 @@ from pydantic import BaseModel, validator - class Code(BaseModel): code: str