v2
This commit is contained in:
parent
2078778976
commit
2fe4b2ac44
25 changed files with 397 additions and 686 deletions
915
poetry.lock
generated
915
poetry.lock
generated
File diff suppressed because it is too large
Load diff
|
|
@ -33,7 +33,7 @@ google-search-results = "^2.4.1"
|
|||
google-api-python-client = "^2.79.0"
|
||||
typer = "^0.9.0"
|
||||
gunicorn = "^21.1.0"
|
||||
langchain = "^0.0.256"
|
||||
langchain = "^0.0.268"
|
||||
openai = "^0.27.8"
|
||||
pandas = "^2.0.0"
|
||||
chromadb = "^0.3.21"
|
||||
|
|
@ -60,7 +60,8 @@ sentence-transformers = { version = "^2.2.2", optional = true }
|
|||
ctransformers = { version = "^0.2.10", optional = true }
|
||||
cohere = "^4.11.0"
|
||||
python-multipart = "^0.0.6"
|
||||
sqlmodel = "^0.0.8"
|
||||
# install sqlmodel using https://github.com/honglei/sqlmodel.git
|
||||
sqlmodel = { git = "https://github.com/honglei/sqlmodel.git", branch = "main" }
|
||||
faiss-cpu = "^1.7.4"
|
||||
anthropic = "^0.3.0"
|
||||
orjson = "3.9.3"
|
||||
|
|
@ -82,6 +83,7 @@ passlib = "^1.7.4"
|
|||
bcrypt = "^4.0.1"
|
||||
python-jose = "^3.3.0"
|
||||
metaphor-python = "^0.1.11"
|
||||
pydantic-settings = "^2.0.3"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
black = "^23.1.0"
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
from langflow.template.frontend_node.base import FrontendNode
|
||||
from pydantic import BaseModel, validator
|
||||
from pydantic import field_validator, BaseModel
|
||||
|
||||
from langflow.interface.utils import extract_input_variables_from_prompt
|
||||
from langchain.prompts import PromptTemplate
|
||||
|
|
@ -28,11 +28,13 @@ class CodeValidationResponse(BaseModel):
|
|||
imports: dict
|
||||
function: dict
|
||||
|
||||
@validator("imports")
|
||||
@field_validator("imports")
|
||||
@classmethod
|
||||
def validate_imports(cls, v):
|
||||
return v or {"errors": []}
|
||||
|
||||
@validator("function")
|
||||
@field_validator("function")
|
||||
@classmethod
|
||||
def validate_function(cls, v):
|
||||
return v or {"errors": []}
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ from enum import Enum
|
|||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional, Union
|
||||
from langflow.services.database.models.flow import FlowCreate, FlowRead
|
||||
from pydantic import BaseModel, Field, validator
|
||||
from pydantic import BaseModel, Field
|
||||
import json
|
||||
|
||||
|
||||
|
|
@ -66,7 +66,8 @@ class ChatResponse(ChatMessage):
|
|||
is_bot: bool = True
|
||||
files: list = []
|
||||
|
||||
@validator("type")
|
||||
@field_validator("type")
|
||||
@classmethod
|
||||
def validate_message_type(cls, v):
|
||||
if v not in ["start", "stream", "end", "error", "info", "file"]:
|
||||
raise ValueError("type must be start, stream, end, error, info, or file")
|
||||
|
|
@ -76,12 +77,13 @@ class ChatResponse(ChatMessage):
|
|||
class FileResponse(ChatMessage):
|
||||
"""File response schema."""
|
||||
|
||||
data: Any
|
||||
data: Any = None
|
||||
data_type: str
|
||||
type: str = "file"
|
||||
is_bot: bool = True
|
||||
|
||||
@validator("data_type")
|
||||
@field_validator("data_type")
|
||||
@classmethod
|
||||
def validate_data_type(cls, v):
|
||||
if v not in ["image", "csv"]:
|
||||
raise ValueError("data_type must be image or csv")
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Dict, List, Optional
|
||||
from typing import ClassVar, Dict, List, Optional
|
||||
|
||||
from langchain.agents import types
|
||||
|
||||
|
|
@ -15,7 +15,7 @@ from langflow.utils.util import build_template_from_class, build_template_from_m
|
|||
class AgentCreator(LangChainTypeCreator):
|
||||
type_name: str = "agents"
|
||||
|
||||
from_method_nodes = {"ZeroShotAgent": "from_llm_and_tools"}
|
||||
from_method_nodes: ClassVar[Dict] = {"ZeroShotAgent": "from_llm_and_tools"}
|
||||
|
||||
@property
|
||||
def frontend_node_class(self) -> type[AgentFrontendNode]:
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Any, Dict, List, Optional, Type
|
||||
from typing import Any, ClassVar, Dict, List, Optional, Type
|
||||
|
||||
from langflow.custom.customs import get_custom_nodes
|
||||
from langflow.interface.base import LangChainTypeCreator
|
||||
|
|
@ -9,7 +9,6 @@ from langflow.template.frontend_node.chains import ChainFrontendNode
|
|||
from langflow.utils.logger import logger
|
||||
from langflow.utils.util import build_template_from_class, build_template_from_method
|
||||
from langchain import chains
|
||||
from langchain_experimental.sql import SQLDatabaseChain # type: ignore
|
||||
|
||||
# Assuming necessary imports for Field, Template, and FrontendNode classes
|
||||
|
||||
|
|
@ -22,7 +21,7 @@ class ChainCreator(LangChainTypeCreator):
|
|||
return ChainFrontendNode
|
||||
|
||||
#! We need to find a better solution for this
|
||||
from_method_nodes = {
|
||||
from_method_nodes: ClassVar[Dict] = {
|
||||
"ConversationalRetrievalChain": "from_llm",
|
||||
"LLMCheckerChain": "from_llm",
|
||||
"SQLDatabaseChain": "from_llm",
|
||||
|
|
@ -38,7 +37,7 @@ class ChainCreator(LangChainTypeCreator):
|
|||
}
|
||||
from langflow.interface.chains.custom import CUSTOM_CHAINS
|
||||
|
||||
self.type_dict["SQLDatabaseChain"] = SQLDatabaseChain
|
||||
# self.type_dict["SQLDatabaseChain"] = SQLDatabaseChain
|
||||
|
||||
self.type_dict.update(CUSTOM_CHAINS)
|
||||
# Filter according to settings.chains
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ from langchain.chains import ConversationChain
|
|||
from langchain.memory.buffer import ConversationBufferMemory
|
||||
from langchain.schema import BaseMemory
|
||||
from langflow.interface.base import CustomChain
|
||||
from pydantic import Field, root_validator
|
||||
from pydantic.v1 import Field, root_validator
|
||||
from langchain.chains.question_answering import load_qa_chain
|
||||
from langflow.interface.utils import extract_input_variables_from_prompt
|
||||
from langchain.base_language import BaseLanguageModel
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import ast
|
||||
from typing import Any, Optional
|
||||
from typing import Any, ClassVar, Dict, Optional
|
||||
from pydantic import BaseModel
|
||||
from fastapi import HTTPException
|
||||
|
||||
|
|
@ -16,13 +16,13 @@ class ComponentFunctionEntrypointNameNullError(HTTPException):
|
|||
|
||||
|
||||
class Component(BaseModel):
|
||||
ERROR_CODE_NULL = "Python code must be provided."
|
||||
ERROR_FUNCTION_ENTRYPOINT_NAME_NULL = (
|
||||
"The name of the entrypoint function must be provided."
|
||||
)
|
||||
ERROR_CODE_NULL: ClassVar[Dict] = "Python code must be provided."
|
||||
ERROR_FUNCTION_ENTRYPOINT_NAME_NULL: ClassVar[
|
||||
Dict
|
||||
] = "The name of the entrypoint function must be provided."
|
||||
|
||||
code: Optional[str]
|
||||
function_entrypoint_name = "build"
|
||||
code: Optional[str] = None
|
||||
function_entrypoint_name: ClassVar[Dict] = "build"
|
||||
field_config: dict = {}
|
||||
|
||||
def __init__(self, **data):
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Any, Callable, List, Optional
|
||||
from typing import Any, Callable, ClassVar, Dict, List, Optional
|
||||
from fastapi import HTTPException
|
||||
from langflow.interface.custom.constants import CUSTOM_COMPONENT_SUPPORTED_TYPES
|
||||
from langflow.interface.custom.component import Component
|
||||
|
|
@ -14,12 +14,14 @@ import yaml
|
|||
|
||||
|
||||
class CustomComponent(Component, extra=Extra.allow):
|
||||
code: Optional[str]
|
||||
code: Optional[str] = None
|
||||
field_config: dict = {}
|
||||
code_class_base_inheritance = "CustomComponent"
|
||||
function_entrypoint_name = "build"
|
||||
code_class_base_inheritance: ClassVar[Dict] = "CustomComponent"
|
||||
function_entrypoint_name: ClassVar[Dict] = "build"
|
||||
function: Optional[Callable] = None
|
||||
return_type_valid_list = list(CUSTOM_COMPONENT_SUPPORTED_TYPES.keys())
|
||||
return_type_valid_list: ClassVar[Dict] = list(
|
||||
CUSTOM_COMPONENT_SUPPORTED_TYPES.keys()
|
||||
)
|
||||
repr_value: Optional[str] = ""
|
||||
|
||||
def __init__(self, **data):
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ class ClassCodeDetails(BaseModel):
|
|||
"""
|
||||
|
||||
name: str
|
||||
doc: Optional[str]
|
||||
doc: Optional[str] = None
|
||||
bases: list
|
||||
attributes: list
|
||||
methods: list
|
||||
|
|
@ -23,7 +23,7 @@ class CallableCodeDetails(BaseModel):
|
|||
"""
|
||||
|
||||
name: str
|
||||
doc: Optional[str]
|
||||
doc: Optional[str] = None
|
||||
args: list
|
||||
body: list
|
||||
return_type: Optional[str]
|
||||
return_type: Optional[str] = None
|
||||
|
|
|
|||
|
|
@ -274,7 +274,7 @@ def instantiate_embedding(node_type, class_object, params: Dict):
|
|||
params = {
|
||||
key: value
|
||||
for key, value in params.items()
|
||||
if key in class_object.__fields__
|
||||
if key in class_object.model_fields
|
||||
}
|
||||
return class_object(**params)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Dict, List, Optional, Type
|
||||
from typing import ClassVar, Dict, List, Optional, Type
|
||||
|
||||
from langflow.interface.base import LangChainTypeCreator
|
||||
from langflow.interface.custom_lists import memory_type_to_cls_dict
|
||||
|
|
@ -14,7 +14,7 @@ from langflow.custom.customs import get_custom_nodes
|
|||
class MemoryCreator(LangChainTypeCreator):
|
||||
type_name: str = "memories"
|
||||
|
||||
from_method_nodes = {
|
||||
from_method_nodes: ClassVar[Dict] = {
|
||||
"ZepChatMessageHistory": "__init__",
|
||||
"SQLiteEntityStore": "__init__",
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Dict, List, Optional, Type
|
||||
from typing import ClassVar, Dict, List, Optional, Type
|
||||
|
||||
from langchain import output_parsers
|
||||
|
||||
|
|
@ -13,7 +13,7 @@ from langflow.utils.util import build_template_from_class, build_template_from_m
|
|||
|
||||
class OutputParserCreator(LangChainTypeCreator):
|
||||
type_name: str = "output_parsers"
|
||||
from_method_nodes = {
|
||||
from_method_nodes: ClassVar[Dict] = {
|
||||
"StructuredOutputParser": "from_response_schemas",
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
from typing import Dict, List, Optional, Type
|
||||
|
||||
from langchain.prompts import PromptTemplate
|
||||
from pydantic import root_validator
|
||||
from pydantic.v1 import root_validator
|
||||
|
||||
from langflow.interface.utils import extract_input_variables_from_prompt
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Any, Dict, List, Optional, Type
|
||||
from typing import Any, ClassVar, Dict, List, Optional, Type
|
||||
|
||||
from langchain import retrievers
|
||||
|
||||
|
|
@ -14,7 +14,10 @@ from langflow.utils.util import build_template_from_method, build_template_from_
|
|||
class RetrieverCreator(LangChainTypeCreator):
|
||||
type_name: str = "retrievers"
|
||||
|
||||
from_method_nodes = {"MultiQueryRetriever": "from_llm", "ZepRetriever": "__init__"}
|
||||
from_method_nodes: ClassVar[Dict] = {
|
||||
"MultiQueryRetriever": "from_llm",
|
||||
"ZepRetriever": "__init__",
|
||||
}
|
||||
|
||||
@property
|
||||
def frontend_node_class(self) -> Type[RetrieverFrontendNode]:
|
||||
|
|
|
|||
|
|
@ -21,6 +21,7 @@ from langflow.template.field.base import TemplateField
|
|||
from langflow.template.template.base import Template
|
||||
from langflow.utils import util
|
||||
from langflow.utils.util import build_template_from_class
|
||||
from langflow.utils.logger import logger
|
||||
|
||||
TOOL_INPUTS = {
|
||||
"str": TemplateField(
|
||||
|
|
@ -72,7 +73,11 @@ class ToolCreator(LangChainTypeCreator):
|
|||
all_tools = {}
|
||||
|
||||
for tool, tool_fcn in ALL_TOOLS_NAMES.items():
|
||||
tool_params = get_tool_params(tool_fcn)
|
||||
try:
|
||||
tool_params = get_tool_params(tool_fcn)
|
||||
except Exception:
|
||||
logger.error(f"Error getting params for tool {tool}")
|
||||
continue
|
||||
|
||||
tool_name = tool_params.get("name") or tool
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
from typing import Callable, Optional
|
||||
from langflow.interface.importing.utils import get_function
|
||||
|
||||
from pydantic import BaseModel, validator
|
||||
from pydantic.v1 import BaseModel, validator
|
||||
|
||||
from langflow.utils import validate
|
||||
from langchain.agents.tools import Tool
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Dict, List, Optional
|
||||
from typing import ClassVar, Dict, List, Optional
|
||||
|
||||
from langchain import requests, sql_database
|
||||
|
||||
|
|
@ -10,7 +10,7 @@ from langflow.utils.util import build_template_from_class, build_template_from_m
|
|||
class WrapperCreator(LangChainTypeCreator):
|
||||
type_name: str = "wrappers"
|
||||
|
||||
from_method_nodes = {"SQLDatabase": "from_uri"}
|
||||
from_method_nodes: ClassVar[Dict] = {"SQLDatabase": "from_uri"}
|
||||
|
||||
@property
|
||||
def type_to_loader_dict(self) -> Dict:
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
from sqlmodel import SQLModel
|
||||
import orjson
|
||||
from pydantic import ConfigDict
|
||||
|
||||
|
||||
def orjson_dumps(v, *, default):
|
||||
|
|
@ -8,7 +9,8 @@ def orjson_dumps(v, *, default):
|
|||
|
||||
|
||||
class SQLModelSerializable(SQLModel):
|
||||
class Config:
|
||||
orm_mode = True
|
||||
json_loads = orjson.loads
|
||||
json_dumps = orjson_dumps
|
||||
# TODO[pydantic]: The following keys were removed: `json_loads`, `json_dumps`.
|
||||
# Check https://docs.pydantic.dev/dev-v2/migration/#changes-to-config for more information.
|
||||
model_config = ConfigDict(
|
||||
from_attributes=True, json_loads=orjson.loads, json_dumps=orjson_dumps
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
# Path: src/backend/langflow/database/models/flow.py
|
||||
|
||||
from langflow.services.database.models.base import SQLModelSerializable
|
||||
from pydantic import validator
|
||||
from sqlmodel import Field, JSON, Column
|
||||
from uuid import UUID, uuid4
|
||||
from typing import Dict, Optional
|
||||
|
|
@ -14,8 +13,9 @@ class FlowBase(SQLModelSerializable):
|
|||
description: Optional[str] = Field(index=True)
|
||||
data: Optional[Dict] = Field(default=None)
|
||||
|
||||
@validator("data")
|
||||
def validate_json(v):
|
||||
@field_validator("data")
|
||||
@classmethod
|
||||
def validate_json(cls, v):
|
||||
# dict_keys(['description', 'name', 'id', 'data'])
|
||||
if not v:
|
||||
return v
|
||||
|
|
|
|||
|
|
@ -7,7 +7,8 @@ from typing import Optional, List
|
|||
from pathlib import Path
|
||||
|
||||
import yaml
|
||||
from pydantic import BaseSettings, root_validator, validator
|
||||
from pydantic_settings import SettingsConfigDict, BaseSettings
|
||||
from pydantic import field_validator
|
||||
from langflow.utils.logger import logger
|
||||
|
||||
# BASE_COMPONENTS_PATH = str(Path(__file__).parent / "components")
|
||||
|
|
@ -109,7 +110,7 @@ class Settings(BaseSettings):
|
|||
|
||||
return value
|
||||
|
||||
@validator("COMPONENTS_PATH", pre=True)
|
||||
@field_validator("COMPONENTS_PATH", mode="before")
|
||||
def set_components_path(cls, value):
|
||||
if os.getenv("LANGFLOW_COMPONENTS_PATH"):
|
||||
logger.debug("Adding LANGFLOW_COMPONENTS_PATH to components_path")
|
||||
|
|
@ -141,17 +142,17 @@ class Settings(BaseSettings):
|
|||
logger.debug(f"Components path: {value}")
|
||||
return value
|
||||
|
||||
class Config:
|
||||
validate_assignment = True
|
||||
extra = "ignore"
|
||||
env_prefix = "LANGFLOW_"
|
||||
model_config = SettingsConfigDict(
|
||||
validate_assignment=True, extra="ignore", env_prefix="LANGFLOW_"
|
||||
)
|
||||
|
||||
@root_validator(allow_reuse=True)
|
||||
def validate_lists(cls, values):
|
||||
for key, value in values.items():
|
||||
if key != "dev" and not value:
|
||||
values[key] = []
|
||||
return values
|
||||
# @model_validator()
|
||||
# @classmethod
|
||||
# def validate_lists(cls, values):
|
||||
# for key, value in values.items():
|
||||
# if key != "dev" and not value:
|
||||
# values[key] = []
|
||||
# return values
|
||||
|
||||
def update_from_yaml(self, file_path: str, dev: bool = False):
|
||||
new_settings = load_settings_from_yaml(file_path)
|
||||
|
|
@ -225,7 +226,7 @@ def load_settings_from_yaml(file_path: str) -> Settings:
|
|||
settings_dict = {k.upper(): v for k, v in settings_dict.items()}
|
||||
|
||||
for key in settings_dict:
|
||||
if key not in Settings.__fields__.keys():
|
||||
if key not in Settings.model_fields.keys():
|
||||
raise KeyError(f"Key {key} not found in settings")
|
||||
logger.debug(f"Loading {len(settings_dict[key])} {key} from {file_path}")
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
from collections import defaultdict
|
||||
import re
|
||||
from typing import List, Optional
|
||||
from typing import ClassVar, DefaultDict, Dict, List, Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
|
@ -15,10 +15,10 @@ from langflow.utils import constants
|
|||
|
||||
|
||||
class FieldFormatters(BaseModel):
|
||||
formatters = {
|
||||
formatters: ClassVar[Dict] = {
|
||||
"openai_api_key": field_formatters.OpenAIAPIKeyFormatter(),
|
||||
}
|
||||
base_formatters = {
|
||||
base_formatters: ClassVar[Dict] = {
|
||||
"kwargs": field_formatters.KwargsFormatter(),
|
||||
"optional": field_formatters.RemoveOptionalFormatter(),
|
||||
"list": field_formatters.ListTypeFormatter(),
|
||||
|
|
@ -49,7 +49,7 @@ class FrontendNode(BaseModel):
|
|||
name: str = ""
|
||||
display_name: str = ""
|
||||
documentation: str = ""
|
||||
custom_fields: defaultdict = defaultdict(list)
|
||||
custom_fields: Optional[DefaultDict[str, List[str]]] = defaultdict(list)
|
||||
output_types: List[str] = []
|
||||
field_formatters: FieldFormatters = Field(default_factory=FieldFormatters)
|
||||
beta: bool = False
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Optional
|
||||
from typing import ClassVar, Dict, Optional
|
||||
from langflow.template.field.base import TemplateField
|
||||
from langflow.template.frontend_node.base import FrontendNode
|
||||
|
||||
|
|
@ -23,7 +23,7 @@ class DocumentLoaderFrontNode(FrontendNode):
|
|||
self.base_classes = ["Document"]
|
||||
self.output_types = ["Document"]
|
||||
|
||||
file_path_templates = {
|
||||
file_path_templates: ClassVar[Dict] = {
|
||||
"AirbyteJSONLoader": build_file_field(suffixes=[".json"], fileTypes=["json"]),
|
||||
"CoNLLULoader": build_file_field(suffixes=[".csv"], fileTypes=["csv"]),
|
||||
"CSVLoader": build_file_field(suffixes=[".csv"], fileTypes=["csv"]),
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Optional
|
||||
from typing import ClassVar, Dict, Optional
|
||||
from langflow.template.field.base import TemplateField
|
||||
from langflow.template.frontend_node.constants import FORCE_SHOW_FIELDS
|
||||
from langflow.template.frontend_node.formatter.base import FieldFormatter
|
||||
|
|
@ -21,7 +21,7 @@ class OpenAIAPIKeyFormatter(FieldFormatter):
|
|||
|
||||
|
||||
class ModelSpecificFieldFormatter(FieldFormatter):
|
||||
MODEL_DICT = {
|
||||
MODEL_DICT: ClassVar[Dict] = {
|
||||
"OpenAI": OPENAI_MODELS,
|
||||
"ChatOpenAI": CHAT_OPENAI_MODELS,
|
||||
"Anthropic": ANTHROPIC_MODELS,
|
||||
|
|
@ -86,7 +86,7 @@ class UnionTypeFormatter(FieldFormatter):
|
|||
|
||||
|
||||
class SpecialFieldFormatter(FieldFormatter):
|
||||
SPECIAL_FIELD_HANDLERS = {
|
||||
SPECIAL_FIELD_HANDLERS: ClassVar[Dict] = {
|
||||
"allowed_tools": lambda field: "Tool",
|
||||
"max_value_length": lambda field: "int",
|
||||
}
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ def build_template_from_function(
|
|||
docs = parse(_class.__doc__)
|
||||
|
||||
variables = {"_type": _type}
|
||||
for class_field_items, value in _class.__fields__.items():
|
||||
for class_field_items, value in _class.model_fields.items():
|
||||
if class_field_items in ["callback_manager"]:
|
||||
continue
|
||||
variables[class_field_items] = {}
|
||||
|
|
@ -84,8 +84,8 @@ def build_template_from_class(
|
|||
|
||||
variables = {"_type": _type}
|
||||
|
||||
if "__fields__" in _class.__dict__:
|
||||
for class_field_items, value in _class.__fields__.items():
|
||||
if "model_fields" in _class.__dict__:
|
||||
for class_field_items, value in _class.model_fields.items():
|
||||
if class_field_items in ["callback_manager"]:
|
||||
continue
|
||||
variables[class_field_items] = {}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue