Merge branch 'merge_repo' into dev

This commit is contained in:
Ibis Prevedello 2023-02-28 20:27:52 -03:00
commit 28a8f23620
16 changed files with 6314 additions and 0 deletions

View file

@ -0,0 +1,34 @@
// For format details, see https://aka.ms/devcontainer.json. For config options, see the
// README at: https://github.com/devcontainers/templates/tree/main/src/universal
{
"name": "Default Linux Universal",
// Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile
"image": "mcr.microsoft.com/devcontainers/universal:2-linux",
"features": {
"ghcr.io/devcontainers/features/aws-cli:1": {}
},
"customizations": {
"vscode": {"extensions": [
"actboy168.tasks",
"GitHub.copilot",
"ms-python.python",
"sourcery.sourcery",
"eamodio.gitlens"
]}
}
// Features to add to the dev container. More info: https://containers.dev/features.
// "features": {},
// Use 'forwardPorts' to make a list of ports inside the container available locally.
// "forwardPorts": [],
// Use 'postCreateCommand' to run commands after the container is created.
// "postCreateCommand": "uname -a",
// Configure tool-specific properties.
// "customizations": {},
// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
// "remoteUser": "root"
}

130
.gitignore vendored
View file

@ -102,3 +102,133 @@ dist
# TernJS port file
.tern-port
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
notebooks
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
.python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/

130
backend/.gitignore vendored Normal file
View file

@ -0,0 +1,130 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
notebooks
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
.python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/

59
backend/Dockerfile Normal file
View file

@ -0,0 +1,59 @@
# `python-base` sets up all our shared environment variables
FROM python:3.10-slim
# python
ENV PYTHONUNBUFFERED=1 \
# prevents python creating .pyc files
PYTHONDONTWRITEBYTECODE=1 \
\
# pip
PIP_NO_CACHE_DIR=off \
PIP_DISABLE_PIP_VERSION_CHECK=on \
PIP_DEFAULT_TIMEOUT=100 \
\
# poetry
# https://python-poetry.org/docs/configuration/#using-environment-variables
POETRY_VERSION=1.3.2 \
# make poetry install to this location
POETRY_HOME="/opt/poetry" \
# make poetry create the virtual environment in the project's root
# it gets named `.venv`
POETRY_VIRTUALENVS_IN_PROJECT=true \
# do not ask any interactive question
POETRY_NO_INTERACTION=1 \
\
# paths
# this is where our requirements + virtual environment will live
PYSETUP_PATH="/opt/pysetup" \
VENV_PATH="/opt/pysetup/.venv"
# prepend poetry and venv to path
ENV PATH="$POETRY_HOME/bin:$VENV_PATH/bin:$PATH"
RUN apt-get update \
&& apt-get install --no-install-recommends -y \
# deps for installing poetry
curl \
# deps for building python deps
build-essential libpq-dev
# install poetry - respects $POETRY_VERSION & $POETRY_HOME
RUN curl -sSL https://install.python-poetry.org | python3 -
# copy project requirement files here to ensure they will be cached.
WORKDIR $PYSETUP_PATH
COPY poetry.lock pyproject.toml ./
# install runtime deps - uses $POETRY_VIRTUALENVS_IN_PROJECT internally
RUN poetry install --only-root
RUN poetry add "git+https://github.com/ibiscp/langchain.git@ibis"
WORKDIR /app
ENV PATH="/opt/pysetup/.venv/bin:$PATH"
COPY src/ .
EXPOSE 80
CMD [ "uvicorn", "--host", "0.0.0.0", "--port", "80", "app:app" ]

5
backend/build_and_push Executable file
View file

@ -0,0 +1,5 @@
#! /bin/bash
poetry remove langchain
docker build -t ibiscp/expert:v0.0.15 . && docker push ibiscp/expert:v0.0.15
poetry add --editable ../langchain

2170
backend/poetry.lock generated Normal file

File diff suppressed because it is too large Load diff

25
backend/pyproject.toml Normal file
View file

@ -0,0 +1,25 @@
[tool.poetry]
name = "expert-backend"
version = "0.0.3"
description = ""
authors = ["Ibis Prevedello <ibiscp@gmail.com>"]
readme = "README.md"
[tool.poetry.dependencies]
python = "^3.10"
openai = "^0.26.5"
fastapi = "^0.91.0"
uvicorn = "^0.20.0"
beautifulsoup4 = "^4.11.2"
google-search-results = "^2.4.1"
google-api-python-client = "^2.79.0"
langchain = {path = "../langchain", develop = true}
[tool.poetry.group.dev.dependencies]
black = "^23.1.0"
ipykernel = "^6.21.2"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"

37
backend/src/app.py Normal file
View file

@ -0,0 +1,37 @@
from fastapi import FastAPI
from endpoints import router as endpoints_router
from list_endpoints import router as list_router
from signature import router as signatures_router
from fastapi.middleware.cors import CORSMiddleware
def create_app():
"""Create the FastAPI app and include the router."""
app = FastAPI()
origins = [
"http://localhost",
"http://localhost:8080",
"http://localhost:3000",
]
app.add_middleware(
CORSMiddleware,
allow_origins=origins,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
app.include_router(endpoints_router)
app.include_router(list_router)
app.include_router(signatures_router)
return app
app = create_app()
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=5003)

107
backend/src/endpoints.py Normal file
View file

@ -0,0 +1,107 @@
from fastapi import APIRouter
import signature
import list_endpoints
import payload
from langchain.agents.loading import load_agent_executor_from_config
from langchain.chains.loading import load_chain_from_config
from langchain.llms.loading import load_llm_from_config
from langchain.prompts.loading import load_prompt_from_config
from typing import Any
# build router
router = APIRouter()
def get_type_list():
all_types = get_all()
all_types.pop("tools")
for key, value in all_types.items():
all_types[key] = [item["template"]["_type"] for item in value.values()]
return all_types
@router.get("/")
def get_all():
return {
"chains": {
chain: signature.get_chain(chain) for chain in list_endpoints.list_chains()
},
"agents": {
agent: signature.get_agent(agent) for agent in list_endpoints.list_agents()
},
"prompts": {
prompt: signature.get_prompt(prompt)
for prompt in list_endpoints.list_prompts()
},
"llms": {llm: signature.get_llm(llm) for llm in list_endpoints.list_llms()},
# "utilities": {
# "template": {
# # utility: templates.utility(utility) for utility in list.list_utilities()
# }
# },
"memories": {
memory: signature.get_memory(memory)
for memory in list_endpoints.list_memories()
},
# "document_loaders": {
# "template": {
# # memory: templates.document_loader(memory)
# # for memory in list.list_document_loaders()
# }
# },
# "vectorstores": {"template": {}},
# "docstores": {"template": {}},
# "tools": {
# tool: {"template": signature.tool(tool), **values}
# for tool, values in tools.items()
# },
"tools": {
tool: signature.get_tool(tool) for tool in list_endpoints.list_tools()
},
}
@router.post("/predict")
def get_load(data: dict[str, Any]):
type_list = get_type_list()
# Add input variables
data = payload.extract_input_variables(data)
# Nodes, edges and root node
message = data["message"]
nodes = data["nodes"]
edges = data["edges"]
root = payload.get_root_node(data)
extracted_json = payload.build_json(root, nodes, edges)
# Build json
if extracted_json["_type"] in type_list["agents"]:
loaded = load_agent_executor_from_config(extracted_json)
return {"result": loaded.run(message)}
elif extracted_json["_type"] in type_list["chains"]:
loaded = load_chain_from_config(extracted_json)
return {"result": loaded.run(message)}
elif extracted_json["_type"] in type_list["llms"]:
loaded = load_llm_from_config(extracted_json)
return {"result": loaded(message)}
else:
return {"result": "Error: Type should be either agent, chain or llm"}
# elif extracted_json["_type"] in type_list["prompts"]:
# loaded = load_prompt_from_config(extracted_json)
# print(loaded.format(product=''))
# return {'result': loaded.format(product=message)}
# if type in a["prompts"]:
# return a

View file

@ -0,0 +1,104 @@
from fastapi import APIRouter
from langchain import chains
from langchain import agents
from langchain import prompts
from langchain import llms
from langchain.chains.conversation import memory as memories
from langchain.agents.load_tools import get_all_tool_names
import util
# build router
router = APIRouter(
prefix="/list",
tags=["list"],
)
@router.get("/")
def read_items():
"""List all components"""
return [
"chains",
"agents",
"prompts",
"llms",
# "utilities",
"memories",
# "document_loaders",
# "vectorstores",
# "docstores",
"tools",
]
@router.get("/chains")
def list_chains():
"""List all chain types"""
return [
chain.__annotations__["return"].__name__
for chain in chains.loading.type_to_loader_dict.values()
]
@router.get("/agents")
def list_agents():
"""List all agent types"""
# return list(agents.loading.AGENT_TO_CLASS.keys())
return [agent.__name__ for agent in agents.loading.AGENT_TO_CLASS.values()]
@router.get("/prompts")
def list_prompts():
"""List all prompt types"""
return [
prompt.__annotations__["return"].__name__
for prompt in prompts.loading.type_to_loader_dict.values()
]
@router.get("/llms")
def list_llms():
"""List all llm types"""
return [llm.__name__ for llm in llms.type_to_cls_dict.values()]
@router.get("/memories")
def list_memories():
"""List all memory types"""
return [memory.__name__ for memory in memories.type_to_cls_dict.values()]
# @router.get("/utilities")
# def list_utilities():
# """List all utility types"""
# return list(utilities.__all__)
# @router.get("/document_loaders")
# def list_document_loaders():
# """List all document loader types"""
# return list(document_loaders.__all__)
# @router.get("/vectorstores")
# def list_vectorstores():
# """List all vector store types"""
# return list(vectorstores.__all__)
# @router.get("/docstores")
# def list_docstores():
# """List all document store types"""
# return list(docstore.__all__)
@router.get("/tools")
def list_tools():
"""List all load tools"""
return [
util.get_tool_params(util.get_tools_dict(tool))["name"]
for tool in get_all_tool_names()
]

90
backend/src/payload.py Normal file
View file

@ -0,0 +1,90 @@
import re
def extract_input_variables(data):
"""
Extracts input variables from the template and adds them to the input_variables field.
"""
for node in data["nodes"]:
try:
if "input_variables" in node["data"]["node"]["template"]:
if node["data"]["node"]["template"]["_type"] == "prompt":
variables = re.findall(
r"\{(.*?)\}",
node["data"]["node"]["template"]["template"]["value"],
)
elif node["data"]["node"]["template"]["_type"] == "few_shot":
variables = re.findall(
r"\{(.*?)\}",
node["data"]["node"]["template"]["prefix"]["value"]
+ node["data"]["node"]["template"]["suffix"]["value"],
)
else:
variables = []
node["data"]["node"]["template"]["input_variables"]["value"] = variables
except:
pass
return data
def get_root_node(data):
"""
Returns the root node of the template.
"""
root = None
incoming_edges = {edge["source"] for edge in data["edges"]}
for node in data["nodes"]:
if node["id"] not in incoming_edges:
root = node
break
return root
def build_json(root, nodes, edges):
edge_ids = [edge["source"] for edge in edges if edge["target"] == root["id"]]
local_nodes = [node for node in nodes if node["id"] in edge_ids]
if "node" not in root["data"]:
return build_json(local_nodes[0], nodes, edges)
final_dict = root["data"]["node"]["template"].copy()
for key, value in final_dict.items():
if key == "_type":
continue
# elif key == "prompt":
# pass
module_type = value["type"]
# if module_type == "Tool":
# pass
if module_type in ["str", "bool", "int", "float", "Any"]:
value = value["value"]
elif "dict" in module_type:
value = {}
else:
# if value['list']:
print(key)
children = []
for c in local_nodes:
module_types = [c["data"]["type"]]
if "node" in c["data"]:
module_types += c["data"]["node"]["base_classes"]
if module_type in module_types:
children.append(c)
# children = [
# c
# for c in local_nodes
# if module_type
# in [c["data"]["type"]] + c["data"]["node"]["base_classes"]
# ]
# else:
# children = next((c for c in local_nodes if type in [c['data']['type']] + c['data']['node']['base_classes']), None)
if value["required"] and not children:
raise ValueError(f"No child with type {module_type} found")
values = [
build_json(child, nodes, edges) for child in children
] # if children else None
value = list(values) if value["list"] else next(iter(values), None)
final_dict[key] = value
return final_dict

226
backend/src/signature.py Normal file
View file

@ -0,0 +1,226 @@
from fastapi import APIRouter, HTTPException
from langchain import agents, chains, llms, prompts
from langchain.agents.load_tools import (
_BASE_TOOLS,
_EXTRA_LLM_TOOLS,
_EXTRA_OPTIONAL_TOOLS,
_LLM_TOOLS,
get_all_tool_names,
)
from langchain.chains.conversation import memory as memories
import util
# build router
router = APIRouter(
prefix="/signatures",
tags=["signatures"],
)
def build_template_from_function(name: str, type_to_loader_dict: dict):
classes = [
item.__annotations__["return"].__name__ for item in type_to_loader_dict.values()
]
# Raise error if name is not in chains
if name not in classes:
raise ValueError(f"{name} not found")
for _type, v in type_to_loader_dict.items():
if v.__annotations__["return"].__name__ == name:
_class = v.__annotations__["return"]
docs = util.get_class_doc(_class)
variables = {"_type": _type}
for name, value in _class.__fields__.items():
if name in ["callback_manager", "requests_wrapper"]:
continue
variables[name] = {}
for name_, value_ in value.__repr_args__():
if name_ == "default_factory":
try:
variables[name]["default"] = util.get_default_factory(
module=_class.__base__.__module__, function=value_
)
except Exception:
variables[name]["default"] = None
elif name_ not in ["name"]:
variables[name][name_] = value_
variables[name]["placeholder"] = (
docs["Attributes"][name] if name in docs["Attributes"] else ""
)
return {
"template": util.format_dict(variables),
"description": docs["Description"],
"base_classes": util.get_base_classes(_class),
}
def build_template_from_class(name: str, type_to_cls_dict: dict):
classes = [item.__name__ for item in type_to_cls_dict.values()]
# Raise error if name is not in chains
if name not in classes:
raise ValueError(f"{name} not found.")
for _type, v in type_to_cls_dict.items():
if v.__name__ == name:
_class = v
docs = util.get_class_doc(_class)
variables = {"_type": _type}
for name, value in _class.__fields__.items():
if name in ["callback_manager"]:
continue
variables[name] = {}
for name_, value_ in value.__repr_args__():
if name_ == "default_factory":
try:
variables[name]["default"] = util.get_default_factory(
module=_class.__base__.__module__, function=value_
)
except Exception:
variables[name]["default"] = None
elif name_ not in ["name"]:
variables[name][name_] = value_
variables[name]["placeholder"] = (
docs["Attributes"][name] if name in docs["Attributes"] else ""
)
return {
"template": util.format_dict(variables),
"description": docs["Description"],
"base_classes": util.get_base_classes(_class),
}
@router.get("/chain")
def get_chain(name: str):
"""Get the signature of a chain."""
try:
return build_template_from_function(name, chains.loading.type_to_loader_dict)
except ValueError as exc:
raise HTTPException(status_code=404, detail="Chain not found") from exc
@router.get("/agent")
def get_agent(name: str):
"""Get the signature of an agent."""
try:
return build_template_from_class(name, agents.loading.AGENT_TO_CLASS)
except ValueError as exc:
raise HTTPException(status_code=404, detail="Agent not found") from exc
@router.get("/prompt")
def get_prompt(name: str):
"""Get the signature of a prompt."""
try:
return build_template_from_function(name, prompts.loading.type_to_loader_dict)
except ValueError as exc:
raise HTTPException(status_code=404, detail="Prompt not found") from exc
@router.get("/llm")
def get_llm(name: str):
"""Get the signature of an llm."""
try:
return build_template_from_class(name, llms.type_to_cls_dict)
except ValueError as exc:
raise HTTPException(status_code=404, detail="LLM not found") from exc
# @router.get("/utility")
# def utility(name: str):
# # Raise error if name is not in utilities
# if name not in utilities.__all__:
# raise Exception(f"Prompt {name} not found.")
# _class = getattr(utilities, name)
# return {
# name: {name: value for (name, value) in value.__repr_args__() if name != "name"}
# for name, value in _class.__fields__.items()
# }
@router.get("/memory")
def get_memory(name: str):
"""Get the signature of a memory."""
try:
return build_template_from_class(name, memories.type_to_cls_dict)
except ValueError as exc:
raise HTTPException(status_code=404, detail="Memory not found") from exc
# @router.get("/document_loader")
# def document_loader(name: str):
# # Raise error if name is not in document_loader
# if name not in document_loaders.__all__:
# raise Exception(f"Prompt {name} not found.")
# _class = getattr(document_loaders, name)
# return {
# name: {name: value for (name, value) in value.__repr_args__() if name != "name"}
# for name, value in _class.__fields__.items()
# }
@router.get("/tool")
def get_tool(name: str):
"""Get the signature of a tool."""
all_tools = {
util.get_tool_params(util.get_tools_dict(tool))["name"]: tool
for tool in get_all_tool_names()
}
# Raise error if name is not in tools
if name not in all_tools.keys():
raise HTTPException(status_code=404, detail=f"Tool {name} not found.")
type_dict = {
"str": {
"type": "str",
"required": True,
"list": False,
"show": True,
"placeholder": "",
"value": "",
},
"llm": {"type": "BaseLLM", "required": True, "list": False, "show": True},
}
tool_type = all_tools[name]
if tool_type in _BASE_TOOLS:
params = []
elif tool_type in _LLM_TOOLS:
params = ["llm"]
elif tool_type in _EXTRA_LLM_TOOLS:
_, extra_keys = _EXTRA_LLM_TOOLS[tool_type]
params = ["llm"] + extra_keys
elif tool_type in _EXTRA_OPTIONAL_TOOLS:
_, extra_keys = _EXTRA_OPTIONAL_TOOLS[tool_type]
params = extra_keys
template = {
param: (type_dict[param] if param == "llm" else type_dict["str"])
for param in params
}
template["_type"] = tool_type
return {
"template": template,
**util.get_tool_params(util.get_tools_dict(tool_type)),
"base_classes": ["Tool"],
}
# {"template": signature.tool(tool), **values}
# for tool, values in tools.items()
# }
# return {k: util.get_tool_params(v) for k, v in merged_dict.items()}

181
backend/src/util.py Normal file
View file

@ -0,0 +1,181 @@
import ast
import inspect
import re
import importlib
from langchain.agents.load_tools import (
_BASE_TOOLS,
_LLM_TOOLS,
_EXTRA_LLM_TOOLS,
_EXTRA_OPTIONAL_TOOLS,
)
from typing import Optional
def get_base_classes(cls):
bases = cls.__bases__
if not bases:
return []
else:
result = []
for base in bases:
if any(type in base.__module__ for type in ["pydantic", "abc"]):
continue
result.append(base.__name__)
result.extend(get_base_classes(base))
return result
def get_default_factory(module: str, function: str):
pattern = r"<function (\w+)>"
if match := re.search(pattern, function):
module = importlib.import_module(module)
return getattr(module, match[1])()
return None
def get_tools_dict(name: Optional[str] = None):
"""Get the tools dictionary."""
tools = {
**_BASE_TOOLS,
**_LLM_TOOLS,
**{k: v[0] for k, v in _EXTRA_LLM_TOOLS.items()},
**{k: v[0] for k, v in _EXTRA_OPTIONAL_TOOLS.items()},
}
return tools[name] if name else tools
def get_tool_params(func):
# Parse the function code into an abstract syntax tree
tree = ast.parse(inspect.getsource(func))
# Iterate over the statements in the abstract syntax tree
for node in ast.walk(tree):
# Find the first return statement
if isinstance(node, ast.Return):
tool = node.value
if isinstance(tool, ast.Call) and tool.func.id == "Tool":
if tool.keywords:
tool_params = {}
for keyword in tool.keywords:
if keyword.arg == "name":
tool_params["name"] = ast.literal_eval(keyword.value)
elif keyword.arg == "description":
tool_params["description"] = ast.literal_eval(keyword.value)
return tool_params
return {
"name": ast.literal_eval(tool.args[0]),
"description": ast.literal_eval(tool.args[2]),
}
# Return None if no return statement was found
return None
def get_class_doc(class_name):
"""
Extracts information from the docstring of a given class.
Args:
class_name: the class to extract information from
Returns:
A dictionary containing the extracted information, with keys
for 'Description', 'Parameters', 'Attributes', and 'Returns'.
"""
# Get the class docstring
docstring = class_name.__doc__
# Parse the docstring to extract information
lines = docstring.split("\n")
data = {
"Description": "",
"Parameters": {},
"Attributes": {},
"Example": [],
"Returns": {},
}
current_section = "Description"
for line in lines:
line = line.strip()
if not line:
continue
if (
line.startswith(tuple(data.keys()))
and len(line.split()) == 1
and line.endswith(":")
):
current_section = line[:-1]
continue
if current_section in ["Description", "Example"]:
data[current_section] += line
else:
param, desc = line.split(":")
data[current_section][param.strip()] = desc.strip()
return data
def format_dict(d):
"""
Formats a dictionary by removing certain keys and modifying the
values of other keys.
Args:
d: the dictionary to format
Returns:
A new dictionary with the desired modifications applied.
"""
# Process remaining keys
for key, value in d.items():
if key == "examples":
pass
if key == "_type":
continue
_type = value["type"]
# Remove 'Optional' wrapper
if "Optional" in _type:
_type = _type.replace("Optional[", "")[:-1]
# Check for list type
if "List" in _type:
_type = _type.replace("List[", "")[:-1]
value["list"] = True
else:
value["list"] = False
# Replace 'Mapping' with 'dict'
if "Mapping" in _type:
_type = _type.replace("Mapping", "dict")
value["type"] = "Tool" if key == "allowed_tools" else _type
# Show if required
value["show"] = bool(
(value["required"] and key not in ["input_variables"])
or key
in ["allowed_tools", "verbose", "Memory", "memory", "prefix", "examples"]
or "api_key" in key
)
# Add multline
value["multiline"] = key in ["suffix", "prefix", "template", "examples"]
# Replace default value with actual value
# if _type in ["str", "bool"]:
# value["value"] = value.get("default", "")
# if "default" in value:
# value.pop("default")
if "default" in value:
value["value"] = value["default"]
value.pop("default")
# Filter out keys that should not be shown
return d

0
docker-compose.yml Normal file
View file

4
requirements.txt Normal file
View file

@ -0,0 +1,4 @@
langchain==0.0.82
openai==0.26.5
fastapi==0.91.0

3012
response.json Normal file

File diff suppressed because it is too large Load diff