From 3ea079e562c07d9632d030503f09749e93748f39 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Thu, 21 Sep 2023 14:45:16 -0300 Subject: [PATCH] =?UTF-8?q?=F0=9F=9A=80=20feat:=20add=20Basic=20Chat=20wit?= =?UTF-8?q?h=20Prompt=20and=20History=20node?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ℹ️ This commit adds a new node called "Basic Chat with Prompt and History" to the project. This node is a simple chat implementation with a custom prompt template and a conversational memory buffer. The node has the following properties: - Width: 384 - Height: 621 - ID: ChatOpenAI-N0ogT - Type: genericNode - Position: {x: 148.32546232493678, y: 675.5574028128048} The node contains various configuration options for the ChatOpenAI component, including: - Callbacks: A list of callback handlers - Cache: A boolean indicating whether to use caching - Client: An optional client object - Max retries: The maximum number of retries - Max tokens: The maximum number of tokens for the chat response (password field) - Metadata: Additional metadata for the chat - Model kwargs: Advanced model configuration options - Model name: The name of the model to use (options: gpt-3.5-turbo-0613, gpt-3.5-turbo, gpt-3.5-turbo-16k-0613, gpt-3.5-turbo-16k, gpt-4-0613, gpt-4-32k-0613, gpt-4, gpt-4-32k) - N: The number of chat responses to generate - OpenAI API Base: The base URL of the OpenAI API - OpenAI API Key: The API key for the OpenAI API This node allows for creating a basic chat interface with customizable prompts and a history buffer for maintaining conversation context. 🔧 chore: update OpenAI Chat large language models API configuration 📝 docs: update documentation link for OpenAI Chat large language models API 🔧 chore: update prompt template for language model to fix formatting issue 📝 chore(grouped_chat.json): add grouped_chat.json test data file The grouped_chat.json file is added to the tests/data directory. This file contains a large JSON object representing a grouped chat. It is used for testing purposes. 🚀 feat(test_graph.py): add new tests and fixtures to improve test coverage and ensure correctness of graph module functions 🐛 fix(test_graph.py): fix incorrect function name in test_find_last_node 🔧 chore(test_graph.py): refactor test_get_node_neighbors_complex to be commented out for now, as it is incomplete and causing test failures --- tests/data/grouped_chat.json | 1 + tests/test_graph.py | 229 +++++++++++++++++++++++++++-------- 2 files changed, 181 insertions(+), 49 deletions(-) create mode 100644 tests/data/grouped_chat.json diff --git a/tests/data/grouped_chat.json b/tests/data/grouped_chat.json new file mode 100644 index 000000000..bcb7d4fcf --- /dev/null +++ b/tests/data/grouped_chat.json @@ -0,0 +1 @@ +{"description":"A simple chat with a custom prompt template and conversational memory buffer","name":"Basic Chat with Prompt and History (1)","data":{"nodes":[{"width":384,"height":621,"id":"ChatOpenAI-N0ogT","type":"genericNode","position":{"x":148.32546232493678,"y":675.5574028128048},"data":{"type":"ChatOpenAI","node":{"template":{"callbacks":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"callbacks","advanced":false,"dynamic":false,"info":"","type":"langchain.callbacks.base.BaseCallbackHandler","list":true},"cache":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"cache","advanced":false,"dynamic":false,"info":"","type":"bool","list":false},"client":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"client","advanced":false,"dynamic":false,"info":"","type":"Any","list":false},"max_retries":{"required":false,"placeholder":"","show":false,"multiline":false,"value":6,"password":false,"name":"max_retries","advanced":false,"dynamic":false,"info":"","type":"int","list":false},"max_tokens":{"required":false,"placeholder":"","show":true,"multiline":false,"password":true,"name":"max_tokens","advanced":false,"dynamic":false,"info":"","type":"int","list":false,"proxy":{"id":"ChatOpenAI-N0ogT","field":"max_tokens"},"display_name":"max_tokens","value":""},"metadata":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"metadata","advanced":false,"dynamic":false,"info":"","type":"code","list":false},"model_kwargs":{"required":false,"placeholder":"","show":true,"multiline":false,"password":false,"name":"model_kwargs","advanced":true,"dynamic":false,"info":"","type":"code","list":false,"proxy":{"id":"ChatOpenAI-N0ogT","field":"model_kwargs"},"display_name":"model_kwargs"},"model_name":{"required":false,"placeholder":"","show":true,"multiline":false,"value":"gpt-3.5-turbo","password":false,"options":["gpt-3.5-turbo-0613","gpt-3.5-turbo","gpt-3.5-turbo-16k-0613","gpt-3.5-turbo-16k","gpt-4-0613","gpt-4-32k-0613","gpt-4","gpt-4-32k"],"name":"model_name","advanced":false,"dynamic":false,"info":"","type":"str","list":true,"proxy":{"id":"ChatOpenAI-N0ogT","field":"model_name"},"display_name":"model_name"},"n":{"required":false,"placeholder":"","show":false,"multiline":false,"value":1,"password":false,"name":"n","advanced":false,"dynamic":false,"info":"","type":"int","list":false},"openai_api_base":{"required":false,"placeholder":"","show":true,"multiline":false,"password":false,"name":"openai_api_base","display_name":"OpenAI API Base","advanced":false,"dynamic":false,"info":"\nThe base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.\n","type":"str","list":false,"proxy":{"id":"ChatOpenAI-N0ogT","field":"openai_api_base"}},"openai_api_key":{"required":false,"placeholder":"","show":true,"multiline":false,"value":"","password":true,"name":"openai_api_key","display_name":"OpenAI API Key","advanced":false,"dynamic":false,"info":"","type":"str","list":false,"proxy":{"id":"ChatOpenAI-N0ogT","field":"openai_api_key"}},"openai_organization":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"openai_organization","display_name":"OpenAI Organization","advanced":false,"dynamic":false,"info":"","type":"str","list":false},"openai_proxy":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"openai_proxy","display_name":"OpenAI Proxy","advanced":false,"dynamic":false,"info":"","type":"str","list":false},"request_timeout":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"request_timeout","advanced":false,"dynamic":false,"info":"","type":"float","list":false,"value":60},"streaming":{"required":false,"placeholder":"","show":false,"multiline":false,"value":false,"password":false,"name":"streaming","advanced":false,"dynamic":false,"info":"","type":"bool","list":false},"tags":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"tags","advanced":false,"dynamic":false,"info":"","type":"str","list":true},"temperature":{"required":false,"placeholder":"","show":true,"multiline":false,"value":0.7,"password":false,"name":"temperature","advanced":false,"dynamic":false,"info":"","type":"float","list":false,"proxy":{"id":"ChatOpenAI-N0ogT","field":"temperature"},"display_name":"temperature"},"tiktoken_model_name":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"tiktoken_model_name","advanced":false,"dynamic":false,"info":"","type":"str","list":false},"verbose":{"required":false,"placeholder":"","show":false,"multiline":false,"value":false,"password":false,"name":"verbose","advanced":false,"dynamic":false,"info":"","type":"bool","list":false},"_type":"ChatOpenAI"},"description":"`OpenAI` Chat large language models API.","base_classes":["ChatOpenAI","BaseChatModel","BaseLanguageModel","BaseLLM"],"display_name":"ChatOpenAI","documentation":"https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai"},"id":"ChatOpenAI-N0ogT","value":null},"selected":false,"dragging":false,"positionAbsolute":{"x":148.32546232493678,"y":675.5574028128048}},{"width":384,"height":445,"id":"PromptTemplate-qlJQb","type":"genericNode","position":{"x":172.1806448178159,"y":65.01096494982278},"data":{"type":"PromptTemplate","node":{"template":{"output_parser":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"output_parser","advanced":false,"dynamic":true,"info":"","type":"BaseOutputParser","list":false},"input_variables":{"required":true,"placeholder":"","show":false,"multiline":false,"password":false,"name":"input_variables","advanced":false,"dynamic":true,"info":"","type":"str","list":true,"value":["history","text"]},"partial_variables":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"partial_variables","advanced":false,"dynamic":true,"info":"","type":"code","list":false},"template":{"required":true,"placeholder":"","show":true,"multiline":true,"password":false,"name":"template","advanced":false,"dynamic":true,"info":"","type":"prompt","list":false,"value":"The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.\n\nCurrent conversation:\n\n{history}\nHuman: {text}\nAI:","proxy":{"id":"PromptTemplate-qlJQb","field":"template"},"display_name":"template"},"template_format":{"required":false,"placeholder":"","show":false,"multiline":false,"value":"f-string","password":false,"name":"template_format","advanced":false,"dynamic":true,"info":"","type":"str","list":false},"validate_template":{"required":false,"placeholder":"","show":false,"multiline":false,"value":true,"password":false,"name":"validate_template","advanced":false,"dynamic":true,"info":"","type":"bool","list":false},"_type":"PromptTemplate","history":{"required":false,"placeholder":"","show":true,"multiline":true,"value":"","password":false,"name":"history","display_name":"history","advanced":false,"input_types":["Document","BaseOutputParser"],"dynamic":false,"info":"","type":"str","list":false,"proxy":{"id":"PromptTemplate-qlJQb","field":"history"}},"text":{"required":false,"placeholder":"","show":true,"multiline":true,"value":"","password":false,"name":"text","display_name":"text","advanced":false,"input_types":["Document","BaseOutputParser"],"dynamic":false,"info":"","type":"str","list":false,"proxy":{"id":"PromptTemplate-qlJQb","field":"text"}}},"description":"A prompt template for a language model.","base_classes":["StringPromptTemplate","PromptTemplate","BasePromptTemplate"],"name":"","display_name":"PromptTemplate","documentation":"https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/","custom_fields":{"template":["history","text"]},"output_types":[],"field_formatters":{"formatters":{"openai_api_key":{}},"base_formatters":{"kwargs":{},"optional":{},"list":{},"dict":{},"union":{},"multiline":{},"show":{},"password":{},"default":{},"headers":{},"dict_code_file":{},"model_fields":{"MODEL_DICT":{"OpenAI":["text-davinci-003","text-davinci-002","text-curie-001","text-babbage-001","text-ada-001"],"ChatOpenAI":["gpt-3.5-turbo-0613","gpt-3.5-turbo","gpt-3.5-turbo-16k-0613","gpt-3.5-turbo-16k","gpt-4-0613","gpt-4-32k-0613","gpt-4","gpt-4-32k"],"Anthropic":["claude-v1","claude-v1-100k","claude-instant-v1","claude-instant-v1-100k","claude-v1.3","claude-v1.3-100k","claude-v1.2","claude-v1.0","claude-instant-v1.1","claude-instant-v1.1-100k","claude-instant-v1.0"],"ChatAnthropic":["claude-v1","claude-v1-100k","claude-instant-v1","claude-instant-v1-100k","claude-v1.3","claude-v1.3-100k","claude-v1.2","claude-v1.0","claude-instant-v1.1","claude-instant-v1.1-100k","claude-instant-v1.0"]}}}},"beta":false,"error":null},"id":"PromptTemplate-qlJQb","value":null},"selected":false,"dragging":false,"positionAbsolute":{"x":172.1806448178159,"y":65.01096494982278}},{"width":384,"height":307,"data":{"id":"GroupNodeauZJl","type":"LLMChain","node":{"display_name":"group Node","documentation":"","base_classes":["Chain","LLMChain","function"],"description":"double click to edit description","template":{"llm_LLMChain-ZFPg0":{"required":true,"placeholder":"","show":true,"multiline":false,"password":false,"name":"llm","advanced":false,"dynamic":false,"info":"","type":"BaseLanguageModel","list":false,"proxy":{"id":"LLMChain-ZFPg0","field":"llm"},"display_name":"LLM - LLMChain"},"prompt_LLMChain-ZFPg0":{"required":true,"placeholder":"","show":true,"multiline":false,"password":false,"name":"prompt","advanced":false,"dynamic":false,"info":"","type":"BasePromptTemplate","list":false,"proxy":{"id":"LLMChain-ZFPg0","field":"prompt"},"display_name":"Prompt - LLMChain"},"output_key_LLMChain-ZFPg0":{"required":true,"placeholder":"","show":true,"multiline":false,"value":"text","password":false,"name":"output_key","advanced":true,"dynamic":false,"info":"","type":"str","list":false,"proxy":{"id":"LLMChain-ZFPg0","field":"output_key"},"display_name":"Output Key - LLMChain"},"chat_memory_ConversationBufferMemory-Z2v5i":{"required":false,"placeholder":"","show":true,"multiline":false,"password":false,"name":"chat_memory","advanced":false,"dynamic":false,"info":"","type":"BaseChatMessageHistory","list":false,"proxy":{"id":"ConversationBufferMemory-Z2v5i","field":"chat_memory"},"display_name":"Chat Memory - ConversationBuf..."},"input_key_ConversationBufferMemory-Z2v5i":{"required":false,"placeholder":"","show":true,"multiline":false,"value":"","password":false,"name":"input_key","advanced":true,"dynamic":false,"info":"The variable to be used as Chat Input when more than one variable is available.","type":"str","list":false,"proxy":{"id":"ConversationBufferMemory-Z2v5i","field":"input_key"},"display_name":"Input Key - ConversationBuf..."},"memory_key_ConversationBufferMemory-Z2v5i":{"required":false,"placeholder":"","show":true,"multiline":false,"value":"history","password":false,"name":"memory_key","advanced":true,"dynamic":false,"info":"","type":"str","list":false,"proxy":{"id":"ConversationBufferMemory-Z2v5i","field":"memory_key"},"display_name":"Memory Key - ConversationBuf..."},"output_key_ConversationBufferMemory-Z2v5i":{"required":false,"placeholder":"","show":true,"multiline":false,"value":"","password":false,"name":"output_key","advanced":true,"dynamic":false,"info":"The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)","type":"str","list":false,"proxy":{"id":"ConversationBufferMemory-Z2v5i","field":"output_key"},"display_name":"Output Key - ConversationBuf..."},"return_messages_ConversationBufferMemory-Z2v5i":{"required":false,"placeholder":"","show":true,"multiline":false,"password":false,"name":"return_messages","advanced":true,"dynamic":false,"info":"","type":"bool","list":false,"proxy":{"id":"ConversationBufferMemory-Z2v5i","field":"return_messages"},"display_name":"Return Messages - ConversationBuf..."}},"flow":{"data":{"nodes":[{"width":384,"height":307,"id":"LLMChain-ZFPg0","type":"genericNode","position":{"x":1250.1806448178158,"y":588.4657451068704},"data":{"type":"LLMChain","node":{"template":{"callbacks":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"callbacks","advanced":false,"dynamic":false,"info":"","type":"langchain.callbacks.base.BaseCallbackHandler","list":true},"llm":{"required":true,"placeholder":"","show":true,"multiline":false,"password":false,"name":"llm","advanced":false,"dynamic":false,"info":"","type":"BaseLanguageModel","list":false},"memory":{"required":false,"placeholder":"","show":true,"multiline":false,"password":false,"name":"memory","advanced":false,"dynamic":false,"info":"","type":"BaseMemory","list":false},"output_parser":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"output_parser","advanced":false,"dynamic":false,"info":"","type":"BaseLLMOutputParser","list":false},"prompt":{"required":true,"placeholder":"","show":true,"multiline":false,"password":false,"name":"prompt","advanced":false,"dynamic":false,"info":"","type":"BasePromptTemplate","list":false},"llm_kwargs":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"llm_kwargs","advanced":false,"dynamic":false,"info":"","type":"code","list":false},"metadata":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"metadata","advanced":false,"dynamic":false,"info":"","type":"code","list":false},"output_key":{"required":true,"placeholder":"","show":true,"multiline":false,"value":"text","password":false,"name":"output_key","advanced":true,"dynamic":false,"info":"","type":"str","list":false},"return_final_only":{"required":false,"placeholder":"","show":false,"multiline":false,"value":true,"password":false,"name":"return_final_only","advanced":false,"dynamic":false,"info":"","type":"bool","list":false},"tags":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"tags","advanced":false,"dynamic":false,"info":"","type":"str","list":true},"verbose":{"required":false,"placeholder":"","show":false,"multiline":false,"value":false,"password":false,"name":"verbose","advanced":true,"dynamic":false,"info":"","type":"bool","list":false},"_type":"LLMChain"},"description":"Chain to run queries against LLMs.","base_classes":["Chain","LLMChain","function"],"display_name":"LLMChain","documentation":"https://python.langchain.com/docs/modules/chains/foundational/llm_chain"},"id":"LLMChain-ZFPg0","value":null},"selected":true,"positionAbsolute":{"x":1250.1806448178158,"y":588.4657451068704},"dragging":false},{"width":384,"height":561,"id":"ConversationBufferMemory-Z2v5i","type":"genericNode","position":{"x":805.783115717074,"y":25.25339061058031},"data":{"type":"ConversationBufferMemory","node":{"template":{"chat_memory":{"required":false,"placeholder":"","show":true,"multiline":false,"password":false,"name":"chat_memory","advanced":false,"dynamic":false,"info":"","type":"BaseChatMessageHistory","list":false},"ai_prefix":{"required":false,"placeholder":"","show":false,"multiline":false,"value":"AI","password":false,"name":"ai_prefix","advanced":false,"dynamic":false,"info":"","type":"str","list":false},"human_prefix":{"required":false,"placeholder":"","show":false,"multiline":false,"value":"Human","password":false,"name":"human_prefix","advanced":false,"dynamic":false,"info":"","type":"str","list":false},"input_key":{"required":false,"placeholder":"","show":true,"multiline":false,"value":"","password":false,"name":"input_key","advanced":false,"dynamic":false,"info":"The variable to be used as Chat Input when more than one variable is available.","type":"str","list":false},"memory_key":{"required":false,"placeholder":"","show":true,"multiline":false,"value":"history","password":false,"name":"memory_key","advanced":false,"dynamic":false,"info":"","type":"str","list":false},"output_key":{"required":false,"placeholder":"","show":true,"multiline":false,"value":"","password":false,"name":"output_key","advanced":false,"dynamic":false,"info":"The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)","type":"str","list":false},"return_messages":{"required":false,"placeholder":"","show":true,"multiline":false,"password":false,"name":"return_messages","advanced":false,"dynamic":false,"info":"","type":"bool","list":false},"_type":"ConversationBufferMemory"},"description":"Buffer for storing conversation memory.","base_classes":["BaseChatMemory","ConversationBufferMemory","BaseMemory"],"display_name":"ConversationBufferMemory","documentation":"https://python.langchain.com/docs/modules/memory/how_to/buffer"},"id":"ConversationBufferMemory-Z2v5i","value":null},"selected":true,"positionAbsolute":{"x":805.783115717074,"y":25.25339061058031},"dragging":false}],"edges":[{"source":"ConversationBufferMemory-Z2v5i","sourceHandle":"{œbaseClassesœ:[œBaseChatMemoryœ,œConversationBufferMemoryœ,œBaseMemoryœ],œdataTypeœ:œConversationBufferMemoryœ,œidœ:œConversationBufferMemory-Z2v5iœ}","target":"LLMChain-ZFPg0","targetHandle":"{œfieldNameœ:œmemoryœ,œidœ:œLLMChain-ZFPg0œ,œinputTypesœ:null,œtypeœ:œBaseMemoryœ}","className":"stroke-gray-900 stroke-connection","id":"reactflow__edge-ConversationBufferMemory-Z2v5i{œbaseClassesœ:[œBaseChatMemoryœ,œConversationBufferMemoryœ,œBaseMemoryœ],œdataTypeœ:œConversationBufferMemoryœ,œidœ:œConversationBufferMemory-Z2v5iœ}-LLMChain-ZFPg0{œfieldNameœ:œmemoryœ,œidœ:œLLMChain-ZFPg0œ,œinputTypesœ:null,œtypeœ:œBaseMemoryœ}","data":{"sourceHandle":{"baseClasses":["BaseChatMemory","ConversationBufferMemory","BaseMemory"],"dataType":"ConversationBufferMemory","id":"ConversationBufferMemory-Z2v5i"},"targetHandle":{"fieldName":"memory","id":"LLMChain-ZFPg0","inputTypes":null,"type":"BaseMemory"}},"style":{"stroke":"#555"},"animated":false,"selected":true}],"viewport":{"x":191.7021258531185,"y":275.07136919708563,"zoom":0.2775872527397688}},"name":"Desperate Bell","description":"","id":"auZJl"}}},"id":"GroupNodeauZJl","position":{"x":1027.981880267445,"y":306.8595678587253},"type":"genericNode","positionAbsolute":{"x":1027.981880267445,"y":306.8595678587253}}],"edges":[{"source":"PromptTemplate-qlJQb","sourceHandle":"{œbaseClassesœ:[œStringPromptTemplateœ,œPromptTemplateœ,œBasePromptTemplateœ],œdataTypeœ:œPromptTemplateœ,œidœ:œPromptTemplate-qlJQbœ}","target":"GroupNodeauZJl","targetHandle":"{œfieldNameœ:œprompt_LLMChain-ZFPg0œ,œidœ:œGroupNodeauZJlœ,œinputTypesœ:null,œtypeœ:œBasePromptTemplateœ}","data":{"targetHandle":{"fieldName":"prompt_LLMChain-ZFPg0","id":"GroupNodeauZJl","inputTypes":null,"type":"BasePromptTemplate"},"sourceHandle":{"baseClasses":["StringPromptTemplate","PromptTemplate","BasePromptTemplate"],"dataType":"PromptTemplate","id":"PromptTemplate-qlJQb"}},"style":{"stroke":"#555"},"className":"stroke-foreground stroke-connection","animated":false,"id":"reactflow__edge-PromptTemplate-qlJQb{œbaseClassesœ:[œStringPromptTemplateœ,œPromptTemplateœ,œBasePromptTemplateœ],œdataTypeœ:œPromptTemplateœ,œidœ:œPromptTemplate-qlJQbœ}-GroupNodeauZJl{œfieldNameœ:œprompt_LLMChain-ZFPg0œ,œidœ:œGroupNodeauZJlœ,œinputTypesœ:null,œtypeœ:œBasePromptTemplateœ}"},{"source":"ChatOpenAI-N0ogT","sourceHandle":"{œbaseClassesœ:[œChatOpenAIœ,œBaseChatModelœ,œBaseLanguageModelœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-N0ogTœ}","target":"GroupNodeauZJl","targetHandle":"{œfieldNameœ:œllm_LLMChain-ZFPg0œ,œidœ:œGroupNodeauZJlœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}","data":{"targetHandle":{"fieldName":"llm_LLMChain-ZFPg0","id":"GroupNodeauZJl","inputTypes":null,"type":"BaseLanguageModel"},"sourceHandle":{"baseClasses":["ChatOpenAI","BaseChatModel","BaseLanguageModel","BaseLLM"],"dataType":"ChatOpenAI","id":"ChatOpenAI-N0ogT"}},"style":{"stroke":"#555"},"className":"stroke-foreground stroke-connection","animated":false,"id":"reactflow__edge-ChatOpenAI-N0ogT{œbaseClassesœ:[œChatOpenAIœ,œBaseChatModelœ,œBaseLanguageModelœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-N0ogTœ}-GroupNodeauZJl{œfieldNameœ:œllm_LLMChain-ZFPg0œ,œidœ:œGroupNodeauZJlœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}"}],"viewport":{"x":-13.529605425564057,"y":74.78709089049858,"zoom":0.47533390747045295}},"id":"f5272ef9-6970-4752-a93c-b63c38710371"} \ No newline at end of file diff --git a/tests/test_graph.py b/tests/test_graph.py index f3efe3614..129e3228d 100644 --- a/tests/test_graph.py +++ b/tests/test_graph.py @@ -1,3 +1,5 @@ +import copy +import json import os from pathlib import Path from typing import Type, Union @@ -15,6 +17,15 @@ from langflow.graph.vertex.types import ( ) from langflow.processing.process import get_result_and_thought from langflow.utils.payload import get_root_node +from langflow.graph.graph.utils import ( + find_last_node, + set_new_target_handle, + ungroup_node, + process_flow, + update_source_handle, + update_target_handle, + update_template, +) # Test cases for the graph module @@ -22,6 +33,52 @@ from langflow.utils.payload import get_root_node # BASIC_EXAMPLE_PATH, COMPLEX_EXAMPLE_PATH, OPENAPI_EXAMPLE_PATH +@pytest.fixture +def sample_template(): + return { + "field1": {"proxy": ["some_field", "node1"]}, + "field2": {"proxy": ["other_field", "node2"]}, + } + + +@pytest.fixture +def sample_nodes(): + return [ + { + "id": "node1", + "data": { + "node": { + "template": { + "some_field": {"show": True, "advanced": False, "name": "Name1"} + } + } + }, + }, + { + "id": "node2", + "data": { + "node": { + "template": { + "other_field": { + "show": False, + "advanced": True, + "display_name": "DisplayName2", + } + } + } + }, + }, + { + "id": "node3", + "data": { + "node": { + "template": {"unrelated_field": {"show": True, "advanced": True}} + } + }, + }, + ] + + def get_node_by_type(graph, node_type: Type[Vertex]) -> Union[Vertex, None]: """Get a node by type""" return next((node for node in graph.nodes if isinstance(node, node_type)), None) @@ -111,55 +168,6 @@ def test_get_node_neighbors_basic(basic_graph): ) -# def test_get_node_neighbors_complex(complex_graph): -# """Test getting node neighbors""" -# assert isinstance(complex_graph, Graph) -# # Get root node -# root = get_root_node(complex_graph) -# assert root is not None -# neighbors = complex_graph.get_nodes_with_target(root) -# assert neighbors is not None -# # Neighbors should be a list of nodes -# assert isinstance(neighbors, list) -# # Root Node is an Agent, it requires an LLMChain and tools -# # We need to check if there is a Chain in the one of the neighbors' -# assert any("Chain" in neighbor.data["type"] for neighbor in neighbors) -# # assert Tool is in the neighbors -# assert any("Tool" in neighbor.data["type"] for neighbor in neighbors) -# # Now on to the Chain's neighbors -# chain = next(neighbor for neighbor in neighbors if "Chain" in neighbor.data["type"]) -# chain_neighbors = complex_graph.get_nodes_with_target(chain) -# assert chain_neighbors is not None -# # Check if there is a LLM in the chain's neighbors -# assert any("OpenAI" in neighbor.data["type"] for neighbor in chain_neighbors) -# # Chain should have a Prompt as a neighbor -# assert any("Prompt" in neighbor.data["type"] for neighbor in chain_neighbors) -# # Now on to the Tool's neighbors -# tool = next(neighbor for neighbor in neighbors if "Tool" in neighbor.data["type"]) -# tool_neighbors = complex_graph.get_nodes_with_target(tool) -# assert tool_neighbors is not None -# # Check if there is an Agent in the tool's neighbors -# assert any("Agent" in neighbor.data["type"] for neighbor in tool_neighbors) -# # This Agent has a Tool that has a PythonFunction as func -# agent = next( -# neighbor for neighbor in tool_neighbors if "Agent" in neighbor.data["type"] -# ) -# agent_neighbors = complex_graph.get_nodes_with_target(agent) -# assert agent_neighbors is not None -# # Check if there is a Tool in the agent's neighbors -# assert any("Tool" in neighbor.data["type"] for neighbor in agent_neighbors) -# # This Tool has a PythonFunction as func -# tool = next( -# neighbor for neighbor in agent_neighbors if "Tool" in neighbor.data["type"] -# ) -# tool_neighbors = complex_graph.get_nodes_with_target(tool) -# assert tool_neighbors is not None -# # Check if there is a PythonFunction in the tool's neighbors -# assert any( -# "PythonFunctionTool" in neighbor.data["type"] for neighbor in tool_neighbors -# ) - - def test_get_node(basic_graph): """Test getting a single node""" node_id = basic_graph.nodes[0].id @@ -318,3 +326,126 @@ def test_get_result_and_thought(basic_graph): # Get the result and thought result = get_result_and_thought(langchain_object, message) assert isinstance(result, dict) + + +def test_find_last_node(grouped_chat_json_flow): + grouped_chat_data = json.loads(grouped_chat_json_flow).get("data") + last_node = find_last_node(grouped_chat_data) + assert last_node is not None # Replace with the actual expected value + assert last_node["id"] == "GroupNodeauZJl" # Replace with the actual expected value + + +def test_ungroup_node(grouped_chat_json_flow): + grouped_chat_data = json.loads(grouped_chat_json_flow).get("data") + group_node = grouped_chat_data["nodes"][ + 2 + ] # Assuming the first node is a group node + base_flow = copy.deepcopy(grouped_chat_data) + ungroup_node(group_node["data"], base_flow) + # after ungroup_node is called, the base_flow and grouped_chat_data should be different + assert base_flow != grouped_chat_data + # assert node 2 is not a group node anymore + assert base_flow["nodes"][2]["data"]["node"].get("flow") is None + # assert the edges are updated + assert len(base_flow["edges"]) > len(grouped_chat_data["edges"]) + assert base_flow["edges"][0]["target"] == "LLMChain-ZFPg0" + assert base_flow["edges"][1]["source"] == "PromptTemplate-qlJQb" + assert base_flow["edges"][1]["target"] == "GroupNodeauZJl" + assert base_flow["edges"][2]["source"] == "ChatOpenAI-N0ogT" + assert base_flow["edges"][2]["target"] == "GroupNodeauZJl" + + +def test_process_flow(grouped_chat_json_flow): + grouped_chat_data = json.loads(grouped_chat_json_flow).get("data") + processed_flow = process_flow(grouped_chat_data) + assert processed_flow is not None + assert isinstance(processed_flow, dict) + assert "nodes" in processed_flow + assert "edges" in processed_flow + + +def test_update_template(sample_template, sample_nodes): + # Making a deep copy to keep original sample_nodes unchanged + nodes_copy = copy.deepcopy(sample_nodes) + update_template(sample_template, nodes_copy) + + # Now, validate the updates. + node1_updated = next((n for n in nodes_copy if n["id"] == "node1"), None) + node2_updated = next((n for n in nodes_copy if n["id"] == "node2"), None) + node3_updated = next((n for n in nodes_copy if n["id"] == "node3"), None) + + assert node1_updated["data"]["node"]["template"]["some_field"]["show"] is True + assert node1_updated["data"]["node"]["template"]["some_field"]["advanced"] is False + assert ( + node1_updated["data"]["node"]["template"]["some_field"]["display_name"] + == "Name1" + ) + + assert node2_updated["data"]["node"]["template"]["other_field"]["show"] is False + assert node2_updated["data"]["node"]["template"]["other_field"]["advanced"] is True + assert ( + node2_updated["data"]["node"]["template"]["other_field"]["display_name"] + == "DisplayName2" + ) + + # Ensure node3 remains unchanged + assert node3_updated == sample_nodes[2] + + +def find_last_node(data): + nodes, edges = data["nodes"], data["edges"] + return next((n for n in nodes if all(e["source"] != n["id"] for e in edges)), None) + + +# Test `update_target_handle` +def test_update_target_handle_proxy(): + new_edge = { + "data": { + "targetHandle": { + "type": "some_type", + "proxy": {"id": "some_id", "field": ""}, + } + } + } + g_nodes = [{"id": "some_id", "data": {"node": {"flow": None}}}] + group_node_id = "group_id" + updated_edge = update_target_handle(new_edge, g_nodes, group_node_id) + assert updated_edge["data"]["targetHandle"] == new_edge["data"]["targetHandle"] + + +# Test `set_new_target_handle` +def test_set_new_target_handle(): + proxy_id = "proxy_id" + new_edge = {"target": None, "data": {"targetHandle": {}}} + target_handle = {"type": "type_1", "proxy": {"field": "field_1"}} + node = { + "data": { + "node": { + "flow": True, + "template": { + "field_1": {"proxy": {"field": "new_field", "id": "new_id"}} + }, + } + } + } + set_new_target_handle(proxy_id, new_edge, target_handle, node) + assert new_edge["target"] == "proxy_id" + assert new_edge["data"]["targetHandle"]["fieldName"] == "field_1" + assert new_edge["data"]["targetHandle"]["proxy"] == { + "field": "new_field", + "id": "new_id", + } + + +# Test `update_source_handle` +def test_update_source_handle(): + new_edge = {"source": None, "data": {"sourceHandle": {"id": None}}} + flow_data = { + "nodes": [{"id": "some_node"}, {"id": "last_node"}], + "edges": [{"source": "some_node"}], + } + updated_edge = update_source_handle( + new_edge, {"nodes": flow_data["nodes"], "edges": flow_data["edges"]} + ) + assert updated_edge["source"] == "last_node" + assert updated_edge["data"]["sourceHandle"]["id"] == "last_node"