feat: Add ClassCodeExtractor and is_valid_class_template functions, and custom_component endpoint

This commit adds a new file called `extract_info_from_class.py` containing a `ClassCodeExtractor` class that can extract information (imports, class details and functions) from a Python class code. It also adds a function called `is_valid_class_template` that checks if a given Python class code matches a certain template. Additionally, the commit adds a new endpoint in the `/custom_component` route of `endpoints.py` that returns a dictionary of all `langchain` types created by a specific creator. Finally, the commit adds a new section to `types.py` named `template_node`, which describes a default dictionary that can be used to define a new node for Langflow's template editor.
This commit is contained in:
gustavoschaedler 2023-06-23 02:54:43 +01:00
commit cf7cd979dc
5 changed files with 350 additions and 2 deletions

View file

@ -0,0 +1,70 @@
import ast
class ClassCodeExtractor:
def __init__(self, code):
self.code = code
self.data = {
"imports": [],
"class": {
"inherited_classes": "",
"name": "",
"init": ""
},
"functions": []
}
def _handle_import(self, node):
for alias in node.names:
module_name = getattr(node, 'module', None)
self.data['imports'].append(
f"{module_name}.{alias.name}" if module_name else alias.name)
def _handle_class(self, node):
self.data['class'].update({
'name': node.name,
'inherited_classes': [ast.unparse(base) for base in node.bases]
})
for inner_node in node.body:
if isinstance(inner_node, ast.FunctionDef):
self._handle_function(inner_node)
def _handle_function(self, node):
function_name = node.name
function_args_str = ast.unparse(node.args)
function_args = function_args_str.split(
", ") if function_args_str else []
return_type = ast.unparse(node.returns) if node.returns else "None"
function_data = {
"name": function_name,
"arguments": function_args,
"return_type": return_type
}
if function_name == "__init__":
self.data['class']['init'] = function_args_str.split(
", ") if function_args_str else []
else:
self.data["functions"].append(function_data)
def extract_class_info(self):
module = ast.parse(self.code)
for node in module.body:
if isinstance(node, (ast.Import, ast.ImportFrom)):
self._handle_import(node)
elif isinstance(node, ast.ClassDef):
self._handle_class(node)
return self.data
def is_valid_class_template(code: dict) -> bool:
class_name_ok = code["class"]["name"] == "PythonFunction"
function_run_exists = len(
[f for f in code["functions"] if f["name"] == "run"]) == 1
return (class_name_ok and function_run_exists)

View file

@ -7,9 +7,13 @@ from fastapi import APIRouter, Depends, HTTPException
from langflow.api.v1.schemas import (
PredictRequest,
PredictResponse,
CustomComponentResponse,
)
from langflow.interface.types import build_langchain_types_dict
from langflow.interface.types import (
build_langchain_types_dict,
build_langchain_types_dict_by_creator
)
from langflow.database.base import get_session
from sqlmodel import Session
@ -62,3 +66,9 @@ def get_version():
from langflow import __version__
return {"version": __version__}
# @router.post("/custom_component", response_model=CustomComponentResponse, status_code=200)
@router.post("/custom_component", status_code=200)
def custom_component(code: dict):
return build_langchain_types_dict_by_creator("a")

View file

@ -69,7 +69,8 @@ class ChatResponse(ChatMessage):
@validator("type")
def validate_message_type(cls, v):
if v not in ["start", "stream", "end", "error", "info", "file"]:
raise ValueError("type must be start, stream, end, error, info, or file")
raise ValueError(
"type must be start, stream, end, error, info, or file")
return v
@ -110,3 +111,8 @@ class StreamData(BaseModel):
def __str__(self) -> str:
return f"event: {self.event}\ndata: {json.dumps(self.data)}\n\n"
class CustomComponentResponse(BaseModel):
model: str = ""
step: str = ""

View file

@ -52,3 +52,28 @@ def build_langchain_types_dict(): # sourcery skip: dict-assign-update-to-union
if created_types[creator.type_name].values():
all_types.update(created_types)
return all_types
# sourcery skip: dict-assign-update-to-union
def build_langchain_types_dict_by_creator(creator: str):
"""Build a dictionary of all langchain types"""
all_types = {}
creators = [
chain_creator,
agent_creator,
prompt_creator,
llm_creator,
memory_creator,
tool_creator,
toolkits_creator,
wrapper_creator,
embedding_creator,
vectorstore_creator,
documentloader_creator,
textsplitter_creator,
utility_creator,
]
return chain_creator.to_dict()['chains']['ConversationChain']

View file

@ -4,6 +4,239 @@ from fastapi.middleware.cors import CORSMiddleware
from langflow.api import router
from langflow.database.base import create_db_and_tables
template_node = {
"template": {
"code": {
"required": True,
"placeholder": "",
"show": True,
"multiline": True,
"value": "\ndef my_user_python_function(text: str) -> str:\n \"\"\"This is a default python function that returns the input text\"\"\"\n return text.upper()\n",
"password": False,
"name": "code",
"advanced": False,
"type": "code",
"list": False
},
"lc_kwargs": {
"required": False,
"placeholder": "",
"show": False,
"multiline": False,
"password": False,
"name": "lc_kwargs",
"advanced": True,
"type": "code",
"list": False
},
"verbose": {
"required": False,
"placeholder": "",
"show": False,
"multiline": False,
"value": False,
"password": False,
"name": "verbose",
"advanced": False,
"type": "bool",
"list": False
},
"callbacks": {
"required": False,
"placeholder": "",
"show": False,
"multiline": False,
"password": False,
"name": "callbacks",
"advanced": False,
"type": "langchain.callbacks.base.BaseCallbackHandler",
"list": True
},
"tags": {
"required": False,
"placeholder": "",
"show": False,
"multiline": False,
"password": False,
"name": "tags",
"advanced": False,
"type": "str",
"list": True
},
"client": {
"required": False,
"placeholder": "",
"show": False,
"multiline": False,
"password": False,
"name": "client",
"advanced": False,
"type": "Any",
"list": False
},
"model_name": {
"required": False,
"placeholder": "",
"show": True,
"multiline": False,
"value": "gpt-3.5-turbo",
"password": False,
"options": [
"gpt-3.5-turbo-0613",
"gpt-3.5-turbo",
"gpt-3.5-turbo-16k-0613",
"gpt-3.5-turbo-16k",
"gpt-4-0613",
"gpt-4-32k-0613",
"gpt-4",
"gpt-4-32k"
],
"name": "model_name",
"advanced": False,
"type": "str",
"list": True
},
"temperature": {
"required": False,
"placeholder": "",
"show": True,
"multiline": False,
"value": 0.7,
"password": False,
"name": "temperature",
"advanced": False,
"type": "float",
"list": False
},
"model_kwargs": {
"required": False,
"placeholder": "",
"show": True,
"multiline": False,
"password": False,
"name": "model_kwargs",
"advanced": True,
"type": "code",
"list": False
},
"openai_api_key": {
"required": False,
"placeholder": "",
"show": True,
"multiline": False,
"value": "",
"password": True,
"name": "openai_api_key",
"display_name": "OpenAI API Key",
"advanced": False,
"type": "str",
"list": False
},
"openai_api_base": {
"required": False,
"placeholder": "",
"show": True,
"multiline": False,
"password": False,
"name": "openai_api_base",
"display_name": "OpenAI API Base",
"advanced": False,
"type": "str",
"list": False
},
"openai_organization": {
"required": False,
"placeholder": "",
"show": False,
"multiline": False,
"password": False,
"name": "openai_organization",
"display_name": "OpenAI Organization",
"advanced": False,
"type": "str",
"list": False
},
"openai_proxy": {
"required": False,
"placeholder": "",
"show": False,
"multiline": False,
"password": False,
"name": "openai_proxy",
"display_name": "OpenAI Proxy",
"advanced": False,
"type": "str",
"list": False
},
"request_timeout": {
"required": False,
"placeholder": "",
"show": False,
"multiline": False,
"password": False,
"name": "request_timeout",
"advanced": False,
"type": "float",
"list": False
},
"max_retries": {
"required": False,
"placeholder": "",
"show": False,
"multiline": False,
"value": 6,
"password": False,
"name": "max_retries",
"advanced": False,
"type": "int",
"list": False
},
"streaming": {
"required": False,
"placeholder": "",
"show": False,
"multiline": False,
"value": False,
"password": False,
"name": "streaming",
"advanced": False,
"type": "bool",
"list": False
},
"n": {
"required": False,
"placeholder": "",
"show": False,
"multiline": False,
"value": 1,
"password": False,
"name": "n",
"advanced": False,
"type": "int",
"list": False
},
"max_tokens": {
"required": False,
"placeholder": "",
"show": True,
"multiline": False,
"password": True,
"name": "max_tokens",
"advanced": False,
"type": "int",
"list": False
},
"_type": "ChatOpenAI"
},
"base_classes": [
"BaseChatModel",
"Serializable",
"BaseLanguageModel",
"ChatOpenAI"
],
"description": "Wrapper around OpenAI Chat large language models."
}
def create_app():
"""Create the FastAPI app and include the router."""
@ -18,6 +251,10 @@ def create_app():
def get_health():
return {"status": "OK"}
@app.get("/dynamic_node")
def get_dynamic_nome():
return template_node
app.add_middleware(
CORSMiddleware,
allow_origins=origins,