🐛 fix(flows.py): change Flow.from_orm() to Flow.model_validate() to ensure data integrity and validation 🐛 fix(users.py): remove unused import statements to improve code cleanliness and maintainability 🐛 fix(users.py): change User.from_orm() to User.model_validate() to ensure data integrity and validation 🐛 fix(LLMChain.py): remove unused import statements to improve code cleanliness and maintainability 🐛 fix(LLMChain.py): remove unnecessary line breaks to improve code readability 🐛 fix(base.py): remove unused import statements to improve code cleanliness and maintainability 🐛 fix(base.py): remove unnecessary line breaks to improve code readability 🐛 fix(base.py): fix condition to append vertex_id to top_level_vertices to avoid appending non-string values 🐛 fix(vertex/base.py): add parent_node_id attribute to Vertex class to support hierarchical graph structures 🐛 fix(base.py): remove unused import statements to improve code cleanliness and maintainability 🚀 feat(GroupTest): add a new node for a simple chat with a custom prompt template and conversational memory buffer ℹ️ This commit adds a new node to the GroupTest project. The node is a genericNode with the following properties: - Width: 384 - Height: 621 - ID: ChatOpenAI-rUJ1b - Type: genericNode - Position: x: 170.87326389541306, y: 465.8628482073749 - Data: - Type: ChatOpenAI - Node: - Template: - Callbacks: - Required: false - Placeholder: "" - Show: false - Multiline: false - Password: false - Name: callbacks - Advanced: false - Dynamic: false - Info: "" - Type: langchain.callbacks.base.BaseCallbackHandler - List: true - Cache: - Required: false - Placeholder: "" - Show: false - Multiline: false - Password: false - Name: cache - Advanced: false - Dynamic: false - Info: "" - Type: bool - List: false - Client: - Required: false - Placeholder: "" - Show: false - Multiline: false - Password: false - Name: client - Advanced: false - Dynamic: false - Info: "" - Type: Any - List: false - Max Retries: - Required: false - Placeholder: "" - Show: false - Multiline: false - Value: 6 - Password: false - Name: max_retries - Advanced: false - Dynamic: false - Info: "" - Type: int - List: false - Max Tokens: - Required: false - Placeholder: "" - Show: true - Multiline: false - Password: true - Name: max_tokens - Advanced: false - Dynamic: false - Info: "" - Type: int - List: false 🔧 chore: fix formatting issue in code 📝 docs: update documentation link for `OpenAI` Chat large language models API 🔧 chore: update prompt template configuration in LLMChain node 📝 docs: add documentation link for PromptTemplate in the description 📝 chore(grouped_chat.json): add grouped_chat.json test data file This commit adds the `grouped_chat.json` file to the `tests/data` directory. The file contains a JSON object representing grouped chat data. This file is necessary for testing and will be used in the test suite. 📝 chore(one_group_chat.json): add one_group_chat.json test data file This commit adds the one_group_chat.json file, which contains a simple chat with a custom prompt template and conversational memory buffer. This file is used for testing purposes. 🔧 chore: update node configuration for ConversationBufferMemory, ChatOpenAI, and LLMChain 📝 docs: update documentation links for ConversationBufferMemory and LLMChain 🔧 fix: update prompt template in LLMChain to include conversation history and text input variables 🔧 fix: update ConversationBufferMemory node to include description and documentation link 🎨 style: format and organize code for better readability and maintainability 🆕 feat(Vector Store): add Vector Store agent and Vector Store Info node The Vector Store agent allows querying a Vector Store. It can be used to construct an agent from a Vector Store. The Vector Store Info node provides information about a Vector Store. The Vector Store agent and Vector Store Info node are added to support the functionality of querying a Vector Store. 🔧 chore: update configuration options in the OpenAI API client The configuration options in the OpenAI API client have been updated. This commit includes changes to the following options: - `max_tokens`: Removed the `required` flag and set `show` to `true` - `metadata`: Set `show` to `false` - `model_kwargs`: Set `show` to `true` and `advanced` to `true` - `model_name`: Added options `gpt-3.5-turbo-0613`, `gpt-3.5-turbo`, `gpt-3.5-turbo-16k-0613`, `gpt-3.5-turbo-16k`, `gpt-4-0613`, `gpt-4-32k-0613`, `gpt-4`, `gpt-4-32k` - `n`: Removed the `show` flag - `openai_api_base`: Added `display_name` as "OpenAI API Base" and updated `info` with additional details - `openai_api_key`: Removed the `required` flag and set `show` to `true` - `openai_organization`: Removed the `show` flag - `openai_proxy`: Removed the `show` flag - `request_timeout`: Removed the `show` flag - `streaming`: Removed the `show` flag - `tags`: Removed the `show` flag - `temperature`: Removed the `show` flag - `tiktoken_model_name`: Removed the `show` flag - `verbose`: Removed the `show` flag 🔧 chore: update configuration for ChatOpenAI and Chroma nodes The configuration for the ChatOpenAI and Chroma nodes has been updated. This includes changes to the allowed_special, disallowed_special, chunk_size, client, deployment, embedding_ctx_length, and max_retries properties. These changes were made to improve the functionality and performance of the nodes. 🔧 chore(config): update OpenAIEmbeddings-YwSvx configuration options The OpenAIEmbeddings-YwSvx configuration options have been updated to include new fields and values. This commit updates the configuration file to reflect these changes. 🔧 chore(config): update configuration options for OpenAIEmbeddings and Chroma 🔧 chore(config): update configuration options for OpenAIEmbeddings and Chroma to improve flexibility and customization 🔧 chore: update configuration options for RecursiveCharacterTextSplitter and WebBaseLoader in flow The configuration options for RecursiveCharacterTextSplitter and WebBaseLoader in the flow have been updated. The changes include: - Persist Directory - Chroma: The persist directory option for Chroma has been modified. - Search Kwargs - Chroma: The search kwargs option for Chroma has been modified. - Chunk Overlap - RecursiveCharacterTextSplitter: The chunk overlap option for RecursiveCharacterTextSplitter has been modified. - Chunk Size - RecursiveCharacterTextSplitter: The chunk size option for RecursiveCharacterTextSplitter has been modified. - Separator Type - RecursiveCharacterTextSplitter: The separator type option for RecursiveCharacterTextSplitter has been modified. - Separator - RecursiveCharacterTextSplitter: The separator option for RecursiveCharacterTextSplitter has been modified. - Metadata - WebBaseLoader: The metadata option for WebBaseLoader has been modified. - Web Page - WebBaseLoader: The web page option for WebBaseLoader has been modified. 🔧 chore(OpenAIEmbeddings): update OpenAIEmbeddings configuration options The OpenAIEmbeddings node configuration options have been updated to include the following changes: - `allowed_special` and `disallowed_special` now accept a list of values instead of a single value - `chunk_size` now accepts an integer value - `deployment` now accepts a string value - `embedding_ctx_length` now accepts an integer value - `headers` now supports multiline values - `max_retries` now accepts an integer value - `model` now accepts a string value - `model_kwargs` now accepts code input - `openai_api_base` now accepts a password input - `openai_api_key` now accepts a password input - `openai_api_type` now accepts a password input - `openai_api_version` now accepts a password input - `openai_organization` has been removed from the configuration options 🔧 chore: update OpenAIEmbeddings configuration options in the UI The OpenAIEmbeddings configuration options in the UI have been updated to include the following changes: - Added the `openai_organization` option to specify the OpenAI organization. - Added the `openai_proxy` option to configure the OpenAI proxy. - Added the `request_timeout` option to set the request timeout. - Added the `show_progress_bar` option to control the visibility of the progress bar. - Changed the `tiktoken_model_name` option to be a password field. - Updated the documentation link for OpenAIEmbeddings. This commit updates the configuration options to improve the usability and functionality of the OpenAIEmbeddings module in the UI. 🔧 chore: clean up unused code and remove unnecessary fields in the configuration file 📝 docs: update documentation link for the Chroma vectorstore module 🔧 chore: update configuration options for RecursiveCharacterTextSplitter in flow The configuration options for the RecursiveCharacterTextSplitter node in the flow have been updated. The following changes were made: - `chunk_size` option: The default value has been changed to 1000. - `separator_type` option: The available options have been updated to include "Text", "cpp", "go", "html", "java", "js", "latex", "markdown", "php", "proto", "python", "rst", "ruby", "rust", "scala", "sol", and "swift". - `separators` option: The default value has been changed to ".". These changes were made to improve the usability and flexibility of the RecursiveCharacterTextSplitter node in the flow. 📝 chore(vector_store_grouped.json): add vector_store_grouped.json test data file 🔀 chore(vector_store_grouped.json): add vector_store_grouped.json test data file 🔨 refactor(test_graph.py): reformat import statements and improve code readability 🔨 refactor(test_prompts_template.py): change dynamic attribute to True for input variables, output parser, partial variables, template, and validate template 🔨 refactor(test_template.py): reformat import statements and remove duplicate import of BaseModel 🔨 refactor(test_template.py): update value for options in format_dict test
285 lines
7.7 KiB
Python
285 lines
7.7 KiB
Python
import importlib
|
|
from typing import Dict, List, Optional
|
|
|
|
import pytest
|
|
from pydantic import BaseModel
|
|
|
|
from langflow.utils.constants import CHAT_OPENAI_MODELS, OPENAI_MODELS
|
|
from langflow.utils.util import (
|
|
build_template_from_class,
|
|
build_template_from_function,
|
|
format_dict,
|
|
get_base_classes,
|
|
get_default_factory,
|
|
)
|
|
|
|
|
|
# Dummy classes for testing purposes
|
|
class Parent(BaseModel):
|
|
"""Parent Class"""
|
|
|
|
parent_field: str
|
|
|
|
|
|
class Child(Parent):
|
|
"""Child Class"""
|
|
|
|
child_field: int
|
|
|
|
|
|
class ExampleClass1(BaseModel):
|
|
"""Example class 1."""
|
|
|
|
def __init__(self, data: Optional[List[int]] = None):
|
|
self.data = data or [1, 2, 3]
|
|
|
|
|
|
class ExampleClass2(BaseModel):
|
|
"""Example class 2."""
|
|
|
|
def __init__(self, data: Optional[Dict[str, int]] = None):
|
|
self.data = data or {"a": 1, "b": 2, "c": 3}
|
|
|
|
|
|
def example_loader_1() -> ExampleClass1:
|
|
"""Example loader function 1."""
|
|
return ExampleClass1()
|
|
|
|
|
|
def example_loader_2() -> ExampleClass2:
|
|
"""Example loader function 2."""
|
|
return ExampleClass2()
|
|
|
|
|
|
def test_build_template_from_function():
|
|
type_to_loader_dict = {
|
|
"example1": example_loader_1,
|
|
"example2": example_loader_2,
|
|
}
|
|
|
|
# Test with valid name
|
|
result = build_template_from_function("ExampleClass1", type_to_loader_dict)
|
|
|
|
assert result is not None
|
|
assert "template" in result
|
|
assert "description" in result
|
|
assert "base_classes" in result
|
|
|
|
# Test with add_function=True
|
|
result_with_function = build_template_from_function("ExampleClass1", type_to_loader_dict, add_function=True)
|
|
assert result_with_function is not None
|
|
assert "Callable" in result_with_function["base_classes"]
|
|
|
|
# Test with invalid name
|
|
with pytest.raises(ValueError, match=r".* not found"):
|
|
build_template_from_function("NonExistent", type_to_loader_dict)
|
|
|
|
|
|
# Test build_template_from_class
|
|
def test_build_template_from_class():
|
|
type_to_cls_dict: Dict[str, type] = {"parent": Parent, "child": Child}
|
|
|
|
# Test valid input
|
|
result = build_template_from_class("Child", type_to_cls_dict)
|
|
assert result is not None
|
|
assert "template" in result
|
|
assert "description" in result
|
|
assert "base_classes" in result
|
|
assert "Child" in result["base_classes"]
|
|
assert "Parent" in result["base_classes"]
|
|
assert result["description"] == "Child Class"
|
|
|
|
# Test invalid input
|
|
with pytest.raises(ValueError, match="InvalidClass not found."):
|
|
build_template_from_class("InvalidClass", type_to_cls_dict)
|
|
|
|
|
|
# Test format_dict
|
|
def test_format_dict():
|
|
# Test 1: Optional type removal
|
|
input_dict = {
|
|
"field1": {"type": "Optional[str]", "required": False},
|
|
}
|
|
expected_output = {
|
|
"field1": {
|
|
"type": "str",
|
|
"required": False,
|
|
"list": False,
|
|
"show": False,
|
|
"password": False,
|
|
"multiline": False,
|
|
},
|
|
}
|
|
assert format_dict(input_dict) == expected_output
|
|
|
|
# Test 2: List type processing
|
|
input_dict = {
|
|
"field1": {"type": "List[str]", "required": False},
|
|
}
|
|
expected_output = {
|
|
"field1": {
|
|
"type": "str",
|
|
"required": False,
|
|
"list": True,
|
|
"show": False,
|
|
"password": False,
|
|
"multiline": False,
|
|
},
|
|
}
|
|
assert format_dict(input_dict) == expected_output
|
|
|
|
# Test 3: Mapping type replacement
|
|
input_dict = {
|
|
"field1": {"type": "Mapping[str, int]", "required": False},
|
|
}
|
|
expected_output = {
|
|
"field1": {
|
|
"type": "dict[str, int]", # Mapping type is replaced with dict which is replaced with code
|
|
"required": False,
|
|
"list": False,
|
|
"show": False,
|
|
"password": False,
|
|
"multiline": False,
|
|
},
|
|
}
|
|
assert format_dict(input_dict) == expected_output
|
|
|
|
# Test 4: Replace default value with actual value
|
|
input_dict = {
|
|
"field1": {"type": "str", "required": False, "default": "test"},
|
|
}
|
|
expected_output = {
|
|
"field1": {
|
|
"type": "str",
|
|
"required": False,
|
|
"list": False,
|
|
"show": False,
|
|
"password": False,
|
|
"multiline": False,
|
|
"value": "test",
|
|
},
|
|
}
|
|
assert format_dict(input_dict) == expected_output
|
|
|
|
# Test 5: Add password field
|
|
input_dict = {
|
|
"field1": {"type": "str", "required": False},
|
|
"api_key": {"type": "str", "required": False},
|
|
}
|
|
expected_output = {
|
|
"field1": {
|
|
"type": "str",
|
|
"required": False,
|
|
"list": False,
|
|
"show": False,
|
|
"password": False,
|
|
"multiline": False,
|
|
},
|
|
"api_key": {
|
|
"type": "str",
|
|
"required": False,
|
|
"list": False,
|
|
"show": True,
|
|
"password": True,
|
|
"multiline": False,
|
|
},
|
|
}
|
|
assert format_dict(input_dict) == expected_output
|
|
|
|
# Test 6: Add multiline
|
|
input_dict = {
|
|
"field1": {"type": "str", "required": False},
|
|
"prefix": {"type": "str", "required": False},
|
|
}
|
|
expected_output = {
|
|
"field1": {
|
|
"type": "str",
|
|
"required": False,
|
|
"list": False,
|
|
"show": False,
|
|
"password": False,
|
|
"multiline": False,
|
|
},
|
|
"prefix": {
|
|
"type": "str",
|
|
"required": False,
|
|
"list": False,
|
|
"show": True,
|
|
"password": False,
|
|
"multiline": True,
|
|
},
|
|
}
|
|
assert format_dict(input_dict) == expected_output
|
|
|
|
# Test 7: Check class name-specific cases (OpenAI, ChatOpenAI)
|
|
input_dict = {
|
|
"model_name": {"type": "str", "required": False},
|
|
}
|
|
expected_output_openai = {
|
|
"model_name": {
|
|
"type": "str",
|
|
"required": False,
|
|
"list": True,
|
|
"show": True,
|
|
"password": False,
|
|
"multiline": False,
|
|
"options": OPENAI_MODELS,
|
|
"value": "text-davinci-003",
|
|
},
|
|
}
|
|
expected_output_openai_chat = {
|
|
"model_name": {
|
|
"type": "str",
|
|
"required": False,
|
|
"list": True,
|
|
"show": True,
|
|
"password": False,
|
|
"multiline": False,
|
|
"options": CHAT_OPENAI_MODELS,
|
|
"value": "gpt-4-1106-preview",
|
|
},
|
|
}
|
|
assert format_dict(input_dict, "OpenAI") == expected_output_openai
|
|
assert format_dict(input_dict, "ChatOpenAI") == expected_output_openai_chat
|
|
|
|
# Test 8: Replace dict type with str
|
|
input_dict = {
|
|
"field1": {"type": "Dict[str, int]", "required": False},
|
|
}
|
|
expected_output = {
|
|
"field1": {
|
|
"type": "Dict[str, int]",
|
|
"required": False,
|
|
"list": False,
|
|
"show": False,
|
|
"password": False,
|
|
"multiline": False,
|
|
},
|
|
}
|
|
assert format_dict(input_dict) == expected_output
|
|
|
|
|
|
# Test get_base_classes
|
|
def test_get_base_classes():
|
|
base_classes_parent = get_base_classes(Parent)
|
|
base_classes_child = get_base_classes(Child)
|
|
|
|
assert "Parent" in base_classes_parent
|
|
assert "Child" in base_classes_child
|
|
assert "Parent" in base_classes_child
|
|
|
|
|
|
# Test get_default_factory
|
|
def test_get_default_factory():
|
|
module_name = "langflow.utils.util"
|
|
function_repr = "<function dummy_function>"
|
|
|
|
def dummy_function():
|
|
return "default_value"
|
|
|
|
# Add dummy_function to your_module
|
|
setattr(importlib.import_module(module_name), "dummy_function", dummy_function)
|
|
|
|
default_value = get_default_factory(module_name, function_repr)
|
|
|
|
assert default_value == "default_value"
|