From 7d483eb7c954f7c228525f7e12fb99c8539778bf Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Wed, 28 Jun 2023 16:32:48 +0100 Subject: [PATCH 001/221] Add ClassCodeExtractor and related functions to extract and handle information from a class. Also add the utility functions is_valid_class_template, get_entrypoint_function_args_and_return_type, find_class_type, and build_langchain_template_custom_component. --- .../langflow/api/extract_info_from_class.py | 101 ++++++++++++++++++ src/backend/langflow/api/v1/endpoints.py | 27 +++++ src/backend/langflow/api/v1/schemas.py | 7 +- src/backend/langflow/interface/types.py | 43 ++++++++ 4 files changed, 177 insertions(+), 1 deletion(-) create mode 100644 src/backend/langflow/api/extract_info_from_class.py diff --git a/src/backend/langflow/api/extract_info_from_class.py b/src/backend/langflow/api/extract_info_from_class.py new file mode 100644 index 000000000..b1923068b --- /dev/null +++ b/src/backend/langflow/api/extract_info_from_class.py @@ -0,0 +1,101 @@ +import ast + + +class ClassCodeExtractor: + def __init__(self, code): + self.code = code + self.function_entrypoint_name = "build" + self.data = { + "imports": [], + "class": { + "inherited_classes": "", + "name": "", + "init": "" + }, + "functions": [] + } + + def _handle_import(self, node): + for alias in node.names: + module_name = getattr(node, 'module', None) + self.data['imports'].append( + f"{module_name}.{alias.name}" if module_name else alias.name) + + def _handle_class(self, node): + self.data['class'].update({ + 'name': node.name, + 'inherited_classes': [ast.unparse(base) for base in node.bases] + }) + + for inner_node in node.body: + if isinstance(inner_node, ast.FunctionDef): + self._handle_function(inner_node) + + def _handle_function(self, node): + function_name = node.name + function_args_str = ast.unparse(node.args) + function_args = function_args_str.split( + ", ") if function_args_str else [] + + return_type = ast.unparse(node.returns) if node.returns else "None" + + function_data = { + "name": function_name, + "arguments": function_args, + "return_type": return_type + } + + if function_name == "__init__": + self.data['class']['init'] = function_args_str.split( + ", ") if function_args_str else [] + else: + self.data["functions"].append(function_data) + + def extract_class_info(self): + module = ast.parse(self.code) + + for node in module.body: + if isinstance(node, (ast.Import, ast.ImportFrom)): + self._handle_import(node) + elif isinstance(node, ast.ClassDef): + self._handle_class(node) + + return self.data + + def get_entrypoint_function_args_and_return_type(self): + data = self.extract_class_info() + functions = data.get("functions", []) + + build_function = next( + (f for f in functions if f["name"] == + self.function_entrypoint_name), None + ) + + funtion_args = build_function.get("arguments", None) + return_type = build_function.get("return_type", None) + + return funtion_args, return_type + + +def is_valid_class_template(code: dict): + function_entrypoint_name = "build" + return_type_valid_list = ["ConversationChain", "Tool"] + + class_name = code.get("class", {}).get("name", None) + if not class_name: # this will also check for None, empty string, etc. + return False + + functions = code.get("functions", []) + # use a generator and next to find if a function matching the criteria exists + build_function = next( + (f for f in functions if f["name"] == function_entrypoint_name), None + ) + + if not build_function: + return False + + # Check if the return type of the build function is valid + if build_function.get("return_type") not in return_type_valid_list: + return False + + return True diff --git a/src/backend/langflow/api/v1/endpoints.py b/src/backend/langflow/api/v1/endpoints.py index 1114412c5..48a7a6261 100644 --- a/src/backend/langflow/api/v1/endpoints.py +++ b/src/backend/langflow/api/v1/endpoints.py @@ -6,14 +6,22 @@ from langflow.utils.logger import logger from fastapi import APIRouter, Depends, HTTPException, UploadFile +from langflow.api.extract_info_from_class import ( + ClassCodeExtractor, + is_valid_class_template +) + from langflow.api.v1.schemas import ( ProcessResponse, UploadFileResponse, + CustomComponentCode, ) from langflow.interface.types import ( build_langchain_types_dict, + build_langchain_template_custom_component ) + from langflow.database.base import get_session from sqlmodel import Session @@ -83,3 +91,22 @@ def get_version(): from langflow import __version__ return {"version": __version__} + + +# @router.post("/custom_component", response_model=CustomComponentResponse, status_code=200) +@router.post("/custom_component", status_code=200) +def custom_component( + raw_code: CustomComponentCode, + session: Session = Depends(get_session), +): + extractor = ClassCodeExtractor(raw_code.code) + data = extractor.extract_class_info() + valid = is_valid_class_template(data) + + function_args, function_return_type = extractor.get_entrypoint_function_args_and_return_type() + + return build_langchain_template_custom_component( + raw_code.code, + function_args, + function_return_type + ) diff --git a/src/backend/langflow/api/v1/schemas.py b/src/backend/langflow/api/v1/schemas.py index ed5bf8b3b..6448f07bb 100644 --- a/src/backend/langflow/api/v1/schemas.py +++ b/src/backend/langflow/api/v1/schemas.py @@ -58,7 +58,8 @@ class ChatResponse(ChatMessage): @validator("type") def validate_message_type(cls, v): if v not in ["start", "stream", "end", "error", "info", "file"]: - raise ValueError("type must be start, stream, end, error, info, or file") + raise ValueError( + "type must be start, stream, end, error, info, or file") return v @@ -106,3 +107,7 @@ class StreamData(BaseModel): def __str__(self) -> str: return f"event: {self.event}\ndata: {json.dumps(self.data)}\n\n" + + +class CustomComponentCode(BaseModel): + code: str diff --git a/src/backend/langflow/interface/types.py b/src/backend/langflow/interface/types.py index 085537756..17c1562e4 100644 --- a/src/backend/langflow/interface/types.py +++ b/src/backend/langflow/interface/types.py @@ -52,3 +52,46 @@ def build_langchain_types_dict(): # sourcery skip: dict-assign-update-to-union if created_types[creator.type_name].values(): all_types.update(created_types) return all_types + + +def find_class_type(class_name, classes_dict): + return next( + ( + {"type": class_type, "class": class_name} + for class_type, class_list in classes_dict.items() + if class_name in class_list + ), + {"error": "class not found"}, + ) + + +def build_langchain_template_custom_component(raw_code, function_args, function_return_type): + type_list = get_type_list() + type_and_class = find_class_type("Tool", type_list) + + # Field with the Python code to allow update + code_field = { + "code": { + "required": True, + "placeholder": "", + "show": True, + "multiline": True, + "value": raw_code, + "password": False, + "name": "code", + "advanced": False, + "type": "code", + "list": False + } + } + + # TODO: Add extra fields + + # TODO: Build template result + template = chain_creator.to_dict()['chains']['ConversationChain'] + + template.get('template')['code'] = code_field.get('code') + + return template + # return globals()['tool_creator'].to_dict()[type_and_class['type']][type_and_class['class']] + # return chain_creator.to_dict()['chains']['ConversationChain'] From cd4c73ac283bfdc1dbe5da12338c47a96f00546a Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Wed, 28 Jun 2023 17:37:48 +0100 Subject: [PATCH 002/221] Import random module and HTTPStatus from http module in endpoints.py. Add CustomComponentResponseError to the import in schemas.py. Change the status_code in create_upload_file endpoint to use the HTTPStatus.CREATED constant. Change the status_code in custom_component endpoint to use the HTTPStatus.OK constant. Add a new endpoint for custom_component_error that returns a random error response. --- src/backend/langflow/api/v1/endpoints.py | 37 +++- src/backend/langflow/api/v1/schemas.py | 5 + src/backend/langflow/main.py | 242 +---------------------- 3 files changed, 43 insertions(+), 241 deletions(-) diff --git a/src/backend/langflow/api/v1/endpoints.py b/src/backend/langflow/api/v1/endpoints.py index 48a7a6261..7104a2002 100644 --- a/src/backend/langflow/api/v1/endpoints.py +++ b/src/backend/langflow/api/v1/endpoints.py @@ -1,3 +1,6 @@ +import random + +from http import HTTPStatus from typing import Optional from langflow.cache.utils import save_uploaded_file from langflow.database.models.flow import Flow @@ -15,6 +18,7 @@ from langflow.api.v1.schemas import ( ProcessResponse, UploadFileResponse, CustomComponentCode, + CustomComponentResponseError, ) from langflow.interface.types import ( @@ -70,7 +74,7 @@ async def process_flow( raise HTTPException(status_code=500, detail=str(e)) from e -@router.post("/upload/{flow_id}", response_model=UploadFileResponse, status_code=201) +@router.post("/upload/{flow_id}", response_model=UploadFileResponse, status_code=HTTPStatus.CREATED) async def create_upload_file(file: UploadFile, flow_id: str): # Cache file try: @@ -93,11 +97,9 @@ def get_version(): return {"version": __version__} -# @router.post("/custom_component", response_model=CustomComponentResponse, status_code=200) -@router.post("/custom_component", status_code=200) -def custom_component( +@router.post("/custom_component", status_code=HTTPStatus.OK) +async def custom_component( raw_code: CustomComponentCode, - session: Session = Depends(get_session), ): extractor = ClassCodeExtractor(raw_code.code) data = extractor.extract_class_info() @@ -110,3 +112,28 @@ def custom_component( function_args, function_return_type ) + + +# TODO: Just for test - will be remove +@router.get("/custom_component_error", + response_model=CustomComponentResponseError, + status_code=HTTPStatus.BAD_REQUEST) +async def custom_component_error(): + error1 = { + "detail": "'int' object has no attribute 'get'", + "traceback": "Traceback (most recent call last):\n File \"/Users/gustavopoa/Library/Caches/pypoetry/virtualenvs/langflow-3LyDxlRJ-py3.10/lib/python3.10/site-packages/starlette/middleware/errors.py\", line 162, in __call__\n await self.app(scope, receive, _send)\n File \"/Users/gustavopoa/Library/Caches/pypoetry/virtualenvs/langflow-3LyDxlRJ-py3.10/lib/python3.10/site-packages/starlette/middleware/cors.py\", line 83, in __call__\n await self.app(scope, receive, send)\n File \"/Users/gustavopoa/Library/Caches/pypoetry/virtualenvs/langflow-3LyDxlRJ-py3.10/lib/python3.10/site-packages/starlette/middleware/exceptions.py\", line 79, in __call__\n raise exc\n File \"/Users/gustavopoa/Library/Caches/pypoetry/virtualenvs/langflow-3LyDxlRJ-py3.10/lib/python3.10/site-packages/starlette/middleware/exceptions.py\", line 68, in __call__\n await self.app(scope, receive, sender)\n File \"/Users/gustavopoa/Library/Caches/pypoetry/virtualenvs/langflow-3LyDxlRJ-py3.10/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py\", line 20, in __call__\n raise e\n File \"/Users/gustavopoa/Library/Caches/pypoetry/virtualenvs/langflow-3LyDxlRJ-py3.10/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py\", line 17, in __call__\n await self.app(scope, receive, send)\n File \"/Users/gustavopoa/Library/Caches/pypoetry/virtualenvs/langflow-3LyDxlRJ-py3.10/lib/python3.10/site-packages/starlette/routing.py\", line 718, in __call__\n await route.handle(scope, receive, send)\n File \"/Users/gustavopoa/Library/Caches/pypoetry/virtualenvs/langflow-3LyDxlRJ-py3.10/lib/python3.10/site-packages/starlette/routing.py\", line 276, in handle\n await self.app(scope, receive, send)\n File \"/Users/gustavopoa/Library/Caches/pypoetry/virtualenvs/langflow-3LyDxlRJ-py3.10/lib/python3.10/site-packages/starlette/routing.py\", line 66, in app\n response = await func(request)\n File \"/Users/gustavopoa/Library/Caches/pypoetry/virtualenvs/langflow-3LyDxlRJ-py3.10/lib/python3.10/site-packages/fastapi/routing.py\", line 241, in app\n raw_response = await run_endpoint_function(\n File \"/Users/gustavopoa/Library/Caches/pypoetry/virtualenvs/langflow-3LyDxlRJ-py3.10/lib/python3.10/site-packages/fastapi/routing.py\", line 167, in run_endpoint_function\n return await dependant.call(**values)\n File \"/Users/gustavopoa/Documents/Langspace/langflow/src/backend/langflow/api/v1/endpoints.py\", line 124, in custom_component_error\n c = x.get(\"a\")\nAttributeError: 'int' object has no attribute 'get'\n" + } + + error2 = { + "detail": "division by zero", + "traceback": "Traceback (most recent call last):\n File \"/Users/gustavopoa/Library/Caches/pypoetry/virtualenvs/langflow-3LyDxlRJ-py3.10/lib/python3.10/site-packages/starlette/middleware/errors.py\", line 162, in __call__\n await self.app(scope, receive, _send)\n File \"/Users/gustavopoa/Library/Caches/pypoetry/virtualenvs/langflow-3LyDxlRJ-py3.10/lib/python3.10/site-packages/starlette/middleware/cors.py\", line 83, in __call__\n await self.app(scope, receive, send)\n File \"/Users/gustavopoa/Library/Caches/pypoetry/virtualenvs/langflow-3LyDxlRJ-py3.10/lib/python3.10/site-packages/starlette/middleware/exceptions.py\", line 79, in __call__\n raise exc\n File \"/Users/gustavopoa/Library/Caches/pypoetry/virtualenvs/langflow-3LyDxlRJ-py3.10/lib/python3.10/site-packages/starlette/middleware/exceptions.py\", line 68, in __call__\n await self.app(scope, receive, sender)\n File \"/Users/gustavopoa/Library/Caches/pypoetry/virtualenvs/langflow-3LyDxlRJ-py3.10/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py\", line 20, in __call__\n raise e\n File \"/Users/gustavopoa/Library/Caches/pypoetry/virtualenvs/langflow-3LyDxlRJ-py3.10/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py\", line 17, in __call__\n await self.app(scope, receive, send)\n File \"/Users/gustavopoa/Library/Caches/pypoetry/virtualenvs/langflow-3LyDxlRJ-py3.10/lib/python3.10/site-packages/starlette/routing.py\", line 718, in __call__\n await route.handle(scope, receive, send)\n File \"/Users/gustavopoa/Library/Caches/pypoetry/virtualenvs/langflow-3LyDxlRJ-py3.10/lib/python3.10/site-packages/starlette/routing.py\", line 276, in handle\n await self.app(scope, receive, send)\n File \"/Users/gustavopoa/Library/Caches/pypoetry/virtualenvs/langflow-3LyDxlRJ-py3.10/lib/python3.10/site-packages/starlette/routing.py\", line 66, in app\n response = await func(request)\n File \"/Users/gustavopoa/Library/Caches/pypoetry/virtualenvs/langflow-3LyDxlRJ-py3.10/lib/python3.10/site-packages/fastapi/routing.py\", line 241, in app\n raw_response = await run_endpoint_function(\n File \"/Users/gustavopoa/Library/Caches/pypoetry/virtualenvs/langflow-3LyDxlRJ-py3.10/lib/python3.10/site-packages/fastapi/routing.py\", line 167, in run_endpoint_function\n return await dependant.call(**values)\n File \"/Users/gustavopoa/Documents/Langspace/langflow/src/backend/langflow/api/v1/endpoints.py\", line 130, in custom_component_error\n return 1/0\nZeroDivisionError: division by zero\n" + } + + error3 = { + "detail": "name 'CreateObject' is not defined", + "traceback": "Traceback (most recent call last):\n File \"/Users/gustavopoa/Library/Caches/pypoetry/virtualenvs/langflow-3LyDxlRJ-py3.10/lib/python3.10/site-packages/starlette/middleware/errors.py\", line 162, in __call__\n await self.app(scope, receive, _send)\n File \"/Users/gustavopoa/Library/Caches/pypoetry/virtualenvs/langflow-3LyDxlRJ-py3.10/lib/python3.10/site-packages/starlette/middleware/cors.py\", line 83, in __call__\n await self.app(scope, receive, send)\n File \"/Users/gustavopoa/Library/Caches/pypoetry/virtualenvs/langflow-3LyDxlRJ-py3.10/lib/python3.10/site-packages/starlette/middleware/exceptions.py\", line 79, in __call__\n raise exc\n File \"/Users/gustavopoa/Library/Caches/pypoetry/virtualenvs/langflow-3LyDxlRJ-py3.10/lib/python3.10/site-packages/starlette/middleware/exceptions.py\", line 68, in __call__\n await self.app(scope, receive, sender)\n File \"/Users/gustavopoa/Library/Caches/pypoetry/virtualenvs/langflow-3LyDxlRJ-py3.10/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py\", line 20, in __call__\n raise e\n File \"/Users/gustavopoa/Library/Caches/pypoetry/virtualenvs/langflow-3LyDxlRJ-py3.10/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py\", line 17, in __call__\n await self.app(scope, receive, send)\n File \"/Users/gustavopoa/Library/Caches/pypoetry/virtualenvs/langflow-3LyDxlRJ-py3.10/lib/python3.10/site-packages/starlette/routing.py\", line 718, in __call__\n await route.handle(scope, receive, send)\n File \"/Users/gustavopoa/Library/Caches/pypoetry/virtualenvs/langflow-3LyDxlRJ-py3.10/lib/python3.10/site-packages/starlette/routing.py\", line 276, in handle\n await self.app(scope, receive, send)\n File \"/Users/gustavopoa/Library/Caches/pypoetry/virtualenvs/langflow-3LyDxlRJ-py3.10/lib/python3.10/site-packages/starlette/routing.py\", line 66, in app\n response = await func(request)\n File \"/Users/gustavopoa/Library/Caches/pypoetry/virtualenvs/langflow-3LyDxlRJ-py3.10/lib/python3.10/site-packages/fastapi/routing.py\", line 241, in app\n raw_response = await run_endpoint_function(\n File \"/Users/gustavopoa/Library/Caches/pypoetry/virtualenvs/langflow-3LyDxlRJ-py3.10/lib/python3.10/site-packages/fastapi/routing.py\", line 167, in run_endpoint_function\n return await dependant.call(**values)\n File \"/Users/gustavopoa/Documents/Langspace/langflow/src/backend/langflow/api/v1/endpoints.py\", line 130, in custom_component_error\n error3 = CreateObject()\nNameError: name 'CreateObject' is not defined\n" + } + + error = [error1, error2, error3] + + return error[random.randint(0, 2)] diff --git a/src/backend/langflow/api/v1/schemas.py b/src/backend/langflow/api/v1/schemas.py index eac732575..1d38998b2 100644 --- a/src/backend/langflow/api/v1/schemas.py +++ b/src/backend/langflow/api/v1/schemas.py @@ -111,3 +111,8 @@ class StreamData(BaseModel): class CustomComponentCode(BaseModel): code: str + + +class CustomComponentResponseError(BaseModel): + detail: str + traceback: str diff --git a/src/backend/langflow/main.py b/src/backend/langflow/main.py index 12d70bfb4..e6594742c 100644 --- a/src/backend/langflow/main.py +++ b/src/backend/langflow/main.py @@ -5,238 +5,12 @@ from langflow.api import router from langflow.database.base import create_db_and_tables from langflow.interface.utils import setup_llm_caching -template_node = { - "template": { - "code": { - "required": True, - "placeholder": "", - "show": True, - "multiline": True, - "value": "\ndef my_user_python_function(text: str) -> str:\n \"\"\"This is a default python function that returns the input text\"\"\"\n return text.upper()\n", - "password": False, - "name": "code", - "advanced": False, - "type": "code", - "list": False - }, - "lc_kwargs": { - "required": False, - "placeholder": "", - "show": False, - "multiline": False, - "password": False, - "name": "lc_kwargs", - "advanced": True, - "type": "code", - "list": False - }, - "verbose": { - "required": False, - "placeholder": "", - "show": False, - "multiline": False, - "value": False, - "password": False, - "name": "verbose", - "advanced": False, - "type": "bool", - "list": False - }, - "callbacks": { - "required": False, - "placeholder": "", - "show": False, - "multiline": False, - "password": False, - "name": "callbacks", - "advanced": False, - "type": "langchain.callbacks.base.BaseCallbackHandler", - "list": True - }, - "tags": { - "required": False, - "placeholder": "", - "show": False, - "multiline": False, - "password": False, - "name": "tags", - "advanced": False, - "type": "str", - "list": True - }, - "client": { - "required": False, - "placeholder": "", - "show": False, - "multiline": False, - "password": False, - "name": "client", - "advanced": False, - "type": "Any", - "list": False - }, - "model_name": { - "required": False, - "placeholder": "", - "show": True, - "multiline": False, - "value": "gpt-3.5-turbo", - "password": False, - "options": [ - "gpt-3.5-turbo-0613", - "gpt-3.5-turbo", - "gpt-3.5-turbo-16k-0613", - "gpt-3.5-turbo-16k", - "gpt-4-0613", - "gpt-4-32k-0613", - "gpt-4", - "gpt-4-32k" - ], - "name": "model_name", - "advanced": False, - "type": "str", - "list": True - }, - "temperature": { - "required": False, - "placeholder": "", - "show": True, - "multiline": False, - "value": 0.7, - "password": False, - "name": "temperature", - "advanced": False, - "type": "float", - "list": False - }, - "model_kwargs": { - "required": False, - "placeholder": "", - "show": True, - "multiline": False, - "password": False, - "name": "model_kwargs", - "advanced": True, - "type": "code", - "list": False - }, - "openai_api_key": { - "required": False, - "placeholder": "", - "show": True, - "multiline": False, - "value": "", - "password": True, - "name": "openai_api_key", - "display_name": "OpenAI API Key", - "advanced": False, - "type": "str", - "list": False - }, - "openai_api_base": { - "required": False, - "placeholder": "", - "show": True, - "multiline": False, - "password": False, - "name": "openai_api_base", - "display_name": "OpenAI API Base", - "advanced": False, - "type": "str", - "list": False - }, - "openai_organization": { - "required": False, - "placeholder": "", - "show": False, - "multiline": False, - "password": False, - "name": "openai_organization", - "display_name": "OpenAI Organization", - "advanced": False, - "type": "str", - "list": False - }, - "openai_proxy": { - "required": False, - "placeholder": "", - "show": False, - "multiline": False, - "password": False, - "name": "openai_proxy", - "display_name": "OpenAI Proxy", - "advanced": False, - "type": "str", - "list": False - }, - "request_timeout": { - "required": False, - "placeholder": "", - "show": False, - "multiline": False, - "password": False, - "name": "request_timeout", - "advanced": False, - "type": "float", - "list": False - }, - "max_retries": { - "required": False, - "placeholder": "", - "show": False, - "multiline": False, - "value": 6, - "password": False, - "name": "max_retries", - "advanced": False, - "type": "int", - "list": False - }, - "streaming": { - "required": False, - "placeholder": "", - "show": False, - "multiline": False, - "value": False, - "password": False, - "name": "streaming", - "advanced": False, - "type": "bool", - "list": False - }, - "n": { - "required": False, - "placeholder": "", - "show": False, - "multiline": False, - "value": 1, - "password": False, - "name": "n", - "advanced": False, - "type": "int", - "list": False - }, - "max_tokens": { - "required": False, - "placeholder": "", - "show": True, - "multiline": False, - "password": True, - "name": "max_tokens", - "advanced": False, - "type": "int", - "list": False - }, - "_type": "ChatOpenAI" - }, - "base_classes": [ - "BaseChatModel", - "Serializable", - "BaseLanguageModel", - "ChatOpenAI" - ], - "description": "Wrapper around OpenAI Chat large language models." -} +from pydantic import BaseModel + + +class ErrorMessage(BaseModel): + detail: str + traceback: str def create_app(): @@ -252,10 +26,6 @@ def create_app(): def get_health(): return {"status": "OK"} - @app.get("/dynamic_node") - def get_dynamic_nome(): - return template_node - app.add_middleware( CORSMiddleware, allow_origins=origins, From 82b76840e31d67f58d7c3de0a0828a714ff09788 Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Thu, 29 Jun 2023 01:57:53 +0100 Subject: [PATCH 003/221] feat: Add CustomComponent tool to Langflow API - Added support for the CustomComponent tool in the Langflow API. - The tool has been added to the config.yaml file. - The CustomComponentNode class has been implemented in the frontend nodes. - The code changes include modifications in various files for the implementation of the CustomComponent tool. - The code changes include the addition of a new field "code" in the TemplateField class. - The build_langchain_template_custom_component function has been implemented to build the template for the CustomComponent tool. - New custom fields "my_id", "year", and "other_field" have been added to the template for the CustomComponent tool. --- .../langflow/api/extract_info_from_class.py | 10 ++- src/backend/langflow/config.yaml | 1 + src/backend/langflow/custom/customs.py | 1 + .../langflow/interface/initialize/loading.py | 8 ++- .../langflow/interface/tools/constants.py | 10 ++- .../langflow/interface/tools/custom.py | 6 ++ src/backend/langflow/interface/types.py | 63 +++++++++++++++++-- .../langflow/template/frontend_node/tools.py | 29 ++++++++- src/backend/langflow/utils/constants.py | 42 +++++++++---- 9 files changed, 144 insertions(+), 26 deletions(-) diff --git a/src/backend/langflow/api/extract_info_from_class.py b/src/backend/langflow/api/extract_info_from_class.py index b1923068b..032652b44 100644 --- a/src/backend/langflow/api/extract_info_from_class.py +++ b/src/backend/langflow/api/extract_info_from_class.py @@ -71,10 +71,14 @@ class ClassCodeExtractor: self.function_entrypoint_name), None ) - funtion_args = build_function.get("arguments", None) - return_type = build_function.get("return_type", None) + if build_function: + function_args = build_function.get("arguments", None) + return_type = build_function.get("return_type", None) + else: + function_args = None + return_type = None - return funtion_args, return_type + return function_args, return_type def is_valid_class_template(code: dict): diff --git a/src/backend/langflow/config.yaml b/src/backend/langflow/config.yaml index 26fec2be3..0896bb77d 100644 --- a/src/backend/langflow/config.yaml +++ b/src/backend/langflow/config.yaml @@ -95,6 +95,7 @@ tools: - Calculator - Serper Search - Tool + - CustomComponent - PythonFunctionTool - PythonFunction - JsonSpec diff --git a/src/backend/langflow/custom/customs.py b/src/backend/langflow/custom/customs.py index fb6c1da16..a6ecb75f7 100644 --- a/src/backend/langflow/custom/customs.py +++ b/src/backend/langflow/custom/customs.py @@ -8,6 +8,7 @@ CUSTOM_NODES = { "tools": { "PythonFunctionTool": frontend_node.tools.PythonFunctionToolNode(), "PythonFunction": frontend_node.tools.PythonFunctionNode(), + "CustomComponent": frontend_node.tools.CustomComponentNode(), "Tool": frontend_node.tools.ToolNode(), }, "agents": { diff --git a/src/backend/langflow/interface/initialize/loading.py b/src/backend/langflow/interface/initialize/loading.py index bbaa1f131..1ddae19b7 100644 --- a/src/backend/langflow/interface/initialize/loading.py +++ b/src/backend/langflow/interface/initialize/loading.py @@ -131,7 +131,7 @@ def instantiate_tool(node_type, class_object, params): if node_type == "JsonSpec": params["dict_"] = load_file_into_dict(params.pop("path")) return class_object(**params) - elif node_type == "PythonFunctionTool": + elif node_type in ["PythonFunctionTool", "CustomComponent"]: params["func"] = get_function(params.get("code")) return class_object(**params) # For backward compatibility @@ -243,7 +243,8 @@ def replace_zero_shot_prompt_with_prompt_template(nodes): if tool["type"] != "chatOutputNode" and "Tool" in tool["data"]["node"]["base_classes"] ] - node["data"] = build_prompt_template(prompt=node["data"], tools=tools) + node["data"] = build_prompt_template( + prompt=node["data"], tools=tools) break return nodes @@ -260,7 +261,8 @@ def load_agent_executor(agent_class: type[agent_module.Agent], params, **kwargs) tool_names = [tool.name for tool in allowed_tools] # Agent class requires an output_parser but Agent classes # have a default output_parser. - agent = agent_class(allowed_tools=tool_names, llm_chain=llm_chain) # type: ignore + agent = agent_class(allowed_tools=tool_names, + llm_chain=llm_chain) # type: ignore return AgentExecutor.from_agent_and_tools( agent=agent, tools=allowed_tools, diff --git a/src/backend/langflow/interface/tools/constants.py b/src/backend/langflow/interface/tools/constants.py index fea3c5237..fa2ce87a4 100644 --- a/src/backend/langflow/interface/tools/constants.py +++ b/src/backend/langflow/interface/tools/constants.py @@ -9,16 +9,22 @@ from langchain.agents.load_tools import ( from langchain.tools.json.tool import JsonSpec from langflow.interface.importing.utils import import_class -from langflow.interface.tools.custom import PythonFunctionTool, PythonFunction +from langflow.interface.tools.custom import ( + PythonFunctionTool, + PythonFunction, + CustomComponent +) FILE_TOOLS = {"JsonSpec": JsonSpec} CUSTOM_TOOLS = { "Tool": Tool, + "CustomComponent": CustomComponent, "PythonFunctionTool": PythonFunctionTool, "PythonFunction": PythonFunction, } -OTHER_TOOLS = {tool: import_class(f"langchain.tools.{tool}") for tool in tools.__all__} +OTHER_TOOLS = {tool: import_class(f"langchain.tools.{tool}") + for tool in tools.__all__} ALL_TOOLS_NAMES = { **_BASE_TOOLS, diff --git a/src/backend/langflow/interface/tools/custom.py b/src/backend/langflow/interface/tools/custom.py index 0e2e5ff57..6c6703f36 100644 --- a/src/backend/langflow/interface/tools/custom.py +++ b/src/backend/langflow/interface/tools/custom.py @@ -52,3 +52,9 @@ class PythonFunction(Function): """Python function""" code: str + + +class CustomComponent(Function): + """Python function""" + + code: str diff --git a/src/backend/langflow/interface/types.py b/src/backend/langflow/interface/types.py index 17c1562e4..28c3a8840 100644 --- a/src/backend/langflow/interface/types.py +++ b/src/backend/langflow/interface/types.py @@ -12,6 +12,9 @@ from langflow.interface.utilities.base import utility_creator from langflow.interface.vector_store.base import vectorstore_creator from langflow.interface.wrappers.base import wrapper_creator +from langflow.template.field.base import TemplateField +from langflow.template.frontend_node.tools import CustomComponentNode + def get_type_list(): """Get a list of all langchain types""" @@ -54,6 +57,7 @@ def build_langchain_types_dict(): # sourcery skip: dict-assign-update-to-union return all_types +# TODO: Move to correct place def find_class_type(class_name, classes_dict): return next( ( @@ -65,10 +69,23 @@ def find_class_type(class_name, classes_dict): ) -def build_langchain_template_custom_component(raw_code, function_args, function_return_type): - type_list = get_type_list() - type_and_class = find_class_type("Tool", type_list) +# TODO: Move to correct place +def add_new_custom_field(template, field_name: str, field_type: str): + new_field = TemplateField( + name=field_name, + field_type=field_type, + show=True, + advanced=False + ) + template.get('template')[field_name] = new_field.to_dict() + template.get('custom_fields').append(field_name) + return template + +# TODO: Move to correct place + + +def add_code_field(template, raw_code): # Field with the Python code to allow update code_field = { "code": { @@ -84,13 +101,47 @@ def build_langchain_template_custom_component(raw_code, function_args, function_ "list": False } } + template.get('template')['code'] = code_field.get('code') + + return template + + +def build_langchain_template_custom_component(raw_code, function_args, function_return_type): + # type_list = get_type_list() + # type_and_class = find_class_type("Tool", type_list) + # node = get_custom_nodes(node_type: str) + + # TODO: Build base template + template = llm_creator.to_dict()['llms']['ChatOpenAI'] + + template = CustomComponentNode().to_dict().get('CustomComponent') # TODO: Add extra fields + template = add_new_custom_field( + template, + "my_id", + "str" + ) - # TODO: Build template result - template = chain_creator.to_dict()['chains']['ConversationChain'] + template = add_new_custom_field( + template, + "year", + "int" + ) - template.get('template')['code'] = code_field.get('code') + template = add_new_custom_field( + template, + "other_field", + "bool" + ) + + template = add_code_field( + template, + raw_code + ) + + # criar um vertex + # olhar loading.py return template # return globals()['tool_creator'].to_dict()[type_and_class['type']][type_and_class['class']] diff --git a/src/backend/langflow/template/frontend_node/tools.py b/src/backend/langflow/template/frontend_node/tools.py index fa3942bd2..bfde54cb5 100644 --- a/src/backend/langflow/template/frontend_node/tools.py +++ b/src/backend/langflow/template/frontend_node/tools.py @@ -1,7 +1,10 @@ from langflow.template.field.base import TemplateField from langflow.template.frontend_node.base import FrontendNode from langflow.template.template.base import Template -from langflow.utils.constants import DEFAULT_PYTHON_FUNCTION +from langflow.utils.constants import ( + DEFAULT_PYTHON_FUNCTION, + DEFAULT_CUSTOM_COMPONENT_CODE +) class ToolNode(FrontendNode): @@ -137,3 +140,27 @@ class PythonFunctionNode(FrontendNode): def to_dict(self): return super().to_dict() + + +class CustomComponentNode(FrontendNode): + name: str = "CustomComponent" + template: Template = Template( + type_name="CustomComponent", + fields=[ + TemplateField( + field_type="code", + required=True, + placeholder="", + is_list=False, + show=True, + value=DEFAULT_CUSTOM_COMPONENT_CODE, + name="code", + advanced=False, + ) + ], + ) + description: str = "Python Class to be executed." + base_classes: list[str] = [] + + def to_dict(self): + return super().to_dict() diff --git a/src/backend/langflow/utils/constants.py b/src/backend/langflow/utils/constants.py index 44103c2b7..70ad06ee3 100644 --- a/src/backend/langflow/utils/constants.py +++ b/src/backend/langflow/utils/constants.py @@ -17,18 +17,30 @@ CHAT_OPENAI_MODELS = [ ] ANTHROPIC_MODELS = [ - "claude-v1", # largest model, ideal for a wide range of more complex tasks. - "claude-v1-100k", # An enhanced version of claude-v1 with a 100,000 token (roughly 75,000 word) context window. - "claude-instant-v1", # A smaller model with far lower latency, sampling at roughly 40 words/sec! - "claude-instant-v1-100k", # Like claude-instant-v1 with a 100,000 token context window but retains its performance. + # largest model, ideal for a wide range of more complex tasks. + "claude-v1", + # An enhanced version of claude-v1 with a 100,000 token (roughly 75,000 word) context window. + "claude-v1-100k", + # A smaller model with far lower latency, sampling at roughly 40 words/sec! + "claude-instant-v1", + # Like claude-instant-v1 with a 100,000 token context window but retains its performance. + "claude-instant-v1-100k", + # Specific sub-versions of the above models: - "claude-v1.3", # Vs claude-v1.2: better instruction-following, code, and non-English dialogue and writing. - "claude-v1.3-100k", # An enhanced version of claude-v1.3 with a 100,000 token (roughly 75,000 word) context window. - "claude-v1.2", # Vs claude-v1.1: small adv in general helpfulness, instruction following, coding, and other tasks. - "claude-v1.0", # An earlier version of claude-v1. - "claude-instant-v1.1", # Latest version of claude-instant-v1. Better than claude-instant-v1.0 at most tasks. - "claude-instant-v1.1-100k", # Version of claude-instant-v1.1 with a 100K token context window. - "claude-instant-v1.0", # An earlier version of claude-instant-v1. + # Vs claude-v1.2: better instruction-following, code, and non-English dialogue and writing. + "claude-v1.3", + # An enhanced version of claude-v1.3 with a 100,000 token (roughly 75,000 word) context window. + "claude-v1.3-100k", + # Vs claude-v1.1: small adv in general helpfulness, instruction following, coding, and other tasks. + "claude-v1.2", + # An earlier version of claude-v1. + "claude-v1.0", + # Latest version of claude-instant-v1. Better than claude-instant-v1.0 at most tasks. + "claude-instant-v1.1", + # Version of claude-instant-v1.1 with a 100K token context window. + "claude-instant-v1.1-100k", + # An earlier version of claude-instant-v1. + "claude-instant-v1.0", ] DEFAULT_PYTHON_FUNCTION = """ @@ -36,4 +48,12 @@ def python_function(text: str) -> str: \"\"\"This is a default python function that returns the input text\"\"\" return text """ + +DEFAULT_CUSTOM_COMPONENT_CODE = """ +def custom_component(text: str) -> str: + \"\"\"This is a default custom component function that returns the input text\"\"\" + \"\"\"TODO: Add a Class template\"\"\" + return text +""" + DIRECT_TYPES = ["str", "bool", "code", "int", "float", "Any", "prompt"] From 97d59963538a43011b41f86abc11fc92b7309926 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Thu, 29 Jun 2023 09:50:25 -0300 Subject: [PATCH 004/221] fix(codeAreaModal): fix tabs rendering issue and add support for multiple tabs in CodeAreaModal component feat(codeAreaModal): add functionality to switch between "code" and "errors" tabs in CodeAreaModal component --- .../src/modals/codeAreaModal/index.tsx | 63 ++++++++++++++----- 1 file changed, 46 insertions(+), 17 deletions(-) diff --git a/src/frontend/src/modals/codeAreaModal/index.tsx b/src/frontend/src/modals/codeAreaModal/index.tsx index 3c2da7eea..1ed8b3a2c 100644 --- a/src/frontend/src/modals/codeAreaModal/index.tsx +++ b/src/frontend/src/modals/codeAreaModal/index.tsx @@ -22,6 +22,12 @@ import { Button } from "../../components/ui/button"; import { CODE_PROMPT_DIALOG_SUBTITLE } from "../../constants"; import { TerminalSquare } from "lucide-react"; import { APIClassType } from "../../types/api"; +import { + Tabs, + TabsContent, + TabsList, + TabsTrigger, +} from "../../components/ui/tabs"; export default function CodeAreaModal({ value, @@ -40,6 +46,7 @@ export default function CodeAreaModal({ const { dark } = useContext(darkContext); const { setErrorData, setSuccessData } = useContext(alertContext); const { closePopUp } = useContext(PopUpContext); + const [activeTab, setActiveTab] = useState("0"); const ref = useRef(); function setModalOpen(x: boolean) { setOpen(x); @@ -97,6 +104,7 @@ export default function CodeAreaModal({ } }); } + const tabs = [{ name: "code" }, { name: "errors" }] return ( @@ -112,24 +120,45 @@ export default function CodeAreaModal({ {CODE_PROMPT_DIALOG_SUBTITLE} + setActiveTab(value)} + > +
+ + {tabs.map((tab, index) => ( + {tab.name} + ))} + + {tabs.map((tab, index) => ( + + {tab.name === "code" ?
+ { + setCode(value); + }} + className="w-full rounded-lg h-full custom-scroll border-[1px] border-gray-300 dark:border-gray-600" + /> +
:
errors
} +
)) + } +
+
+ -
- { - setCode(value); - }} - className="w-full rounded-lg h-[300px] custom-scroll border-[1px] border-gray-300 dark:border-gray-600" - /> -
+ + + + ); } From f92fefba46c252b8812ca8fc5b6e7d6d05b62dda Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Thu, 6 Jul 2023 00:21:35 +0100 Subject: [PATCH 039/221] Refactor code for CustomComponent class and build_langchain_template_custom_component function The code in the CustomComponent class has been refactored to improve readability and maintainability. The `_handle_function` method now handles function arguments correctly. Additionally, the `is_valid_class_template` method has been renamed to `_class_template_validation`, and it now raises an HTTPException with a detailed error message when the main class or the build function contains invalid information. In the `build_langchain_template_custom_component` function, base classes are now retrieved from the `return_type` and added to the `template.base_classes` list. A try-except block is used to handle possible KeyError or AttributeError exceptions, and an HTTPException is raised with the corresponding error message and traceback if an error occurs. These changes ensure more accurate validation and handle potential errors more gracefully. --- .../langflow/interface/tools/custom.py | 20 ++++++++++++--- src/backend/langflow/interface/types.py | 25 ++++++++++++++----- 2 files changed, 35 insertions(+), 10 deletions(-) diff --git a/src/backend/langflow/interface/tools/custom.py b/src/backend/langflow/interface/tools/custom.py index 5b0b589da..60541960e 100644 --- a/src/backend/langflow/interface/tools/custom.py +++ b/src/backend/langflow/interface/tools/custom.py @@ -185,10 +185,16 @@ class CustomComponent(BaseModel): return function_args, return_type - def is_valid_class_template(self, code: dict): + def _class_template_validation(self, code: dict): class_name = code.get("class", {}).get("name", None) if not class_name: # this will also check for None, empty string, etc. - return False + raise HTTPException( + status_code=400, + detail={ + "error": "The main class must have a valid name.", + "traceback": "", + }, + ) functions = code.get("functions", []) if build_function := next( @@ -198,7 +204,13 @@ class CustomComponent(BaseModel): # Check if the return type of the build function is valid return build_function.get("return_type") in self.return_type_valid_list else: - return False + raise HTTPException( + status_code=400, + detail={ + "error": f"The class return [{str(build_function.get('return_type'))}] needs to be an item from this list. [{str(self.return_type_valid_list)}]", + "traceback": "", + }, + ) def get_function(self): return validate.create_function(self.code, self.function_entrypoint_name) @@ -209,7 +221,7 @@ class CustomComponent(BaseModel): @property def is_valid(self): - return self.is_valid_class_template(self.data) + return self._class_template_validation(self.data) @property def args_and_return_type(self): diff --git a/src/backend/langflow/interface/types.py b/src/backend/langflow/interface/types.py index 854e0a9ac..58238958c 100644 --- a/src/backend/langflow/interface/types.py +++ b/src/backend/langflow/interface/types.py @@ -18,6 +18,13 @@ from langflow.template.field.base import TemplateField from langflow.template.frontend_node.tools import CustomComponentNode from langflow.interface.retrievers.base import retriever_creator +from langflow.utils.util import get_base_classes + +from fastapi import HTTPException +import traceback + +# Used to get the base_classes list + def get_type_list(): """Get a list of all langchain types""" @@ -117,12 +124,18 @@ def build_langchain_template_custom_component(extractor: CustomComponent): template = add_code_field(template, raw_code) - # TODO: Get base classes from "return_type" and add to template.base_classes - template.get("base_classes").append("ConversationChain") - template.get("base_classes").append("LLMChain") - template.get("base_classes").append("Chain") - template.get("base_classes").append("Serializable") - template.get("base_classes").append("function") + # Get base classes from "return_type" and add to template.base_classes + try: + return_type_instance = globals()[return_type] + base_classes = get_base_classes(return_type_instance) + except (KeyError, AttributeError) as err: + raise HTTPException( + status_code=400, + detail={"error": type(err).__name__, "traceback": traceback.format_exc()}, + ) from err + + for base_class in base_classes: + template.get("base_classes").append(base_class) return template From 11cece92b84806ff5ff6f20d4e7567d40c42bcf6 Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Thu, 6 Jul 2023 01:02:34 +0100 Subject: [PATCH 040/221] Refactor code for CustomComponent class - Refactored code to replace the deprecated is_valid property with the is_check_valid method. - Added validation for the entrypoint function name and return type. - Modified the error messages to provide more specific details. This commit implements the necessary changes to refactor the CustomComponent class in the endpoints.py file. The is_valid property has been replaced with the is_check_valid method to check the validity of the custom component. Additionally, validation has been added to ensure the presence of a valid entrypoint function and a valid return type. If any of the validation checks fail, appropriate error messages are raised to provide detailed traceback information. --- src/backend/langflow/api/v1/endpoints.py | 5 +--- .../langflow/interface/tools/custom.py | 28 +++++++++++++------ 2 files changed, 20 insertions(+), 13 deletions(-) diff --git a/src/backend/langflow/api/v1/endpoints.py b/src/backend/langflow/api/v1/endpoints.py index 6909a299f..c9f671bcf 100644 --- a/src/backend/langflow/api/v1/endpoints.py +++ b/src/backend/langflow/api/v1/endpoints.py @@ -100,9 +100,6 @@ async def custom_component( raw_code: CustomComponentCode, ): extractor = CustomComponent(code=raw_code.code) - - if not extractor.is_valid: - print("ERROR") - # TODO: Raise error + extractor.is_check_valid() return build_langchain_template_custom_component(extractor) diff --git a/src/backend/langflow/interface/tools/custom.py b/src/backend/langflow/interface/tools/custom.py index 60541960e..d18577599 100644 --- a/src/backend/langflow/interface/tools/custom.py +++ b/src/backend/langflow/interface/tools/custom.py @@ -197,21 +197,32 @@ class CustomComponent(BaseModel): ) functions = code.get("functions", []) - if build_function := next( + build_function = next( (f for f in functions if f["name"] == self.function_entrypoint_name), None, - ): - # Check if the return type of the build function is valid - return build_function.get("return_type") in self.return_type_valid_list - else: + ) + + if not build_function: raise HTTPException( status_code=400, detail={ - "error": f"The class return [{str(build_function.get('return_type'))}] needs to be an item from this list. [{str(self.return_type_valid_list)}]", - "traceback": "", + "error": "Invalid entrypoint function name", + "traceback": f"There needs to be at least one entrypoint function named '{self.function_entrypoint_name}' and it needs to return one of the types from this list {str(self.return_type_valid_list)}.", }, ) + return_type = build_function.get("return_type") + if return_type not in self.return_type_valid_list: + raise HTTPException( + status_code=400, + detail={ + "error": "Invalid entrypoint function return", + "traceback": f"The entrypoint function return '{return_type}' needs to be an item from this list {str(self.return_type_valid_list)}.", + }, + ) + + return True + def get_function(self): return validate.create_function(self.code, self.function_entrypoint_name) @@ -219,8 +230,7 @@ class CustomComponent(BaseModel): def data(self): return self.extract_class_info() - @property - def is_valid(self): + def is_check_valid(self): return self._class_template_validation(self.data) @property From 38da7992242a72704b61bfed85673b61f9c49055 Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Thu, 6 Jul 2023 01:05:56 +0100 Subject: [PATCH 041/221] Add langchain imports and format error detail message - Import new modules for langchain functionality - Format error detail message for better readability and organization --- src/backend/langflow/interface/types.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/backend/langflow/interface/types.py b/src/backend/langflow/interface/types.py index 58238958c..f331c3416 100644 --- a/src/backend/langflow/interface/types.py +++ b/src/backend/langflow/interface/types.py @@ -24,6 +24,9 @@ from fastapi import HTTPException import traceback # Used to get the base_classes list +from langchain.chains import ConversationChain # noqa: F401 +from langchain.llms.base import BaseLLM # noqa: F401 +from langchain.tools import Tool # noqa: F401 def get_type_list(): From 2459e4ecb97105f78bde0eb5c487b197ad9c913e Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Thu, 6 Jul 2023 17:42:20 -0300 Subject: [PATCH 042/221] =?UTF-8?q?=F0=9F=94=80=20chore(config.yaml):=20re?= =?UTF-8?q?move=20unused=20CustomComponent=20entry=20and=20add=20custom=20?= =?UTF-8?q?configuration=20for=20CustomComponent=20=F0=9F=94=80=20fix(conf?= =?UTF-8?q?ig.yaml):=20fix=20indentation=20issue=20in=20the=20custom=20con?= =?UTF-8?q?figuration=20section=20The=20unused=20CustomComponent=20entry?= =?UTF-8?q?=20has=20been=20removed=20from=20the=20tools=20section=20to=20c?= =?UTF-8?q?lean=20up=20the=20configuration=20file.=20Additionally,=20a=20c?= =?UTF-8?q?ustom=20configuration=20section=20has=20been=20added=20for=20th?= =?UTF-8?q?e=20CustomComponent,=20allowing=20for=20specific=20configuratio?= =?UTF-8?q?n=20options=20for=20this=20component.=20The=20indentation=20iss?= =?UTF-8?q?ue=20in=20the=20custom=20configuration=20section=20has=20also?= =?UTF-8?q?=20been=20fixed=20for=20consistency.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/config.yaml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/backend/langflow/config.yaml b/src/backend/langflow/config.yaml index 93664f460..5d3f69dc1 100644 --- a/src/backend/langflow/config.yaml +++ b/src/backend/langflow/config.yaml @@ -189,8 +189,6 @@ tools: documentation: "" Calculator: documentation: "" - CustomComponent: - documentation: "" Serper Search: documentation: "" Tool: @@ -289,3 +287,6 @@ output_parsers: documentation: "https://python.langchain.com/docs/modules/model_io/output_parsers/structured" ResponseSchema: documentation: "https://python.langchain.com/docs/modules/model_io/output_parsers/structured" +custom: + CustomComponent: + documentation: "" From 0733b56b8f5ad5765eb9d2c375468d3658b69b0d Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Thu, 6 Jul 2023 19:30:37 -0300 Subject: [PATCH 043/221] refactor(codeAreaModal/v2.tsx): improve code readability and error display in CodeAreaModal component The changes in this commit include: - Added `overflow-y-scroll` and `overflow-x-clip` classes to the error message container to enable vertical scrolling and prevent horizontal scrolling. - Added `break-all` class to the error message and traceback to ensure long lines of text are broken and wrapped within the container. - Added `whitespace-pre-wrap` class to the error message to preserve whitespace and line breaks. These changes were made to improve the readability of the code and enhance the display of error messages in the CodeAreaModal component. --- src/frontend/src/modals/codeAreaModal/v2.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/frontend/src/modals/codeAreaModal/v2.tsx b/src/frontend/src/modals/codeAreaModal/v2.tsx index 965acf05b..13d522039 100644 --- a/src/frontend/src/modals/codeAreaModal/v2.tsx +++ b/src/frontend/src/modals/codeAreaModal/v2.tsx @@ -141,9 +141,9 @@ export default function CodeAreaModal({ />
-
+

{error?.detail?.error}

-
{error?.detail?.traceback}
+
{error?.detail?.traceback}
From a06b47c9a820265238aa7071e5edfbd5cd092a7d Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Thu, 6 Jul 2023 23:51:07 -0300 Subject: [PATCH 044/221] =?UTF-8?q?=F0=9F=90=9B=20fix(GenericNode/index.ts?= =?UTF-8?q?x):=20remove=20unnecessary=20console.log=20statement=20?= =?UTF-8?q?=F0=9F=94=A7=20chore(GenericNode/index.tsx):=20optimize=20rende?= =?UTF-8?q?ring=20of=20GenericNode=20component=20by=20removing=20unused=20?= =?UTF-8?q?useEffect=20dependency=20The=20console.log=20statement=20was=20?= =?UTF-8?q?removed=20as=20it=20was=20no=20longer=20needed.=20The=20useEffe?= =?UTF-8?q?ct=20dependency=20was=20optimized=20by=20removing=20the=20unuse?= =?UTF-8?q?d=20data.node.template=20dependency,=20which=20improves=20the?= =?UTF-8?q?=20performance=20of=20the=20component=20rendering.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 🔧 chore(utils.ts): add support for custom components in nodeColors and nodeNames The nodeColors and nodeNames objects were updated to include support for custom components. The custom_components key was added to both objects with the corresponding color and display name. This allows for consistent styling and labeling of custom components throughout the application. 🔧 chore(utils.ts): add Sparkles icon for custom_components in nodeIconsLucide The nodeIconsLucide object was updated to include the Sparkles icon for the custom_components key. This ensures that the custom components are visually represented with an appropriate icon in the application. 🔧 chore(utils.ts): optimize groupByFamily function in utils.ts The groupByFamily function in utils.ts was optimized to improve performance and readability. The code was refactored to eliminate unnecessary code duplication and improve variable naming. The function now correctly groups the data based on the specified criteria and returns the desired result. --- .../src/CustomNodes/GenericNode/index.tsx | 10 +- src/frontend/src/utils.ts | 127 ++++++++++-------- 2 files changed, 73 insertions(+), 64 deletions(-) diff --git a/src/frontend/src/CustomNodes/GenericNode/index.tsx b/src/frontend/src/CustomNodes/GenericNode/index.tsx index e9ab81a81..c2226a126 100644 --- a/src/frontend/src/CustomNodes/GenericNode/index.tsx +++ b/src/frontend/src/CustomNodes/GenericNode/index.tsx @@ -66,7 +66,6 @@ export default function GenericNode({ deleteNode(data.id); return; } - console.log(data.node.template); useEffect(() => {}, [closePopUp, data.node.template]); return ( <> @@ -121,10 +120,11 @@ export default function GenericNode({ "Validating..." ) : (
- {validationStatus.params || - "" - .split("\n") - .map((line, index) =>
{line}
)} + {validationStatus.params + ? validationStatus.params + .split("\n") + .map((line, index) =>
{line}
) + : ""}
) } diff --git a/src/frontend/src/utils.ts b/src/frontend/src/utils.ts index 8be2ca07a..ce091bd7b 100644 --- a/src/frontend/src/utils.ts +++ b/src/frontend/src/utils.ts @@ -46,6 +46,7 @@ import { TerminalSquare, Wand2, Wrench, + Sparkles, } from "lucide-react"; import { SupabaseIcon } from "./icons/supabase"; import { MongoDBIcon } from "./icons/MongoDB"; @@ -128,6 +129,7 @@ export const nodeColors: { [char: string]: string } = { output_parsers: "#E6A627", str: "#049524", retrievers: "#e6b25a", + custom_components: "#ab11ab", unknown: "#9CA3AF", }; @@ -149,6 +151,7 @@ export const nodeNames: { [char: string]: string } = { retrievers: "Retrievers", utilities: "Utilities", output_parsers: "Output Parsers", + custom_components: "Custom", unknown: "Unknown", }; @@ -304,12 +307,15 @@ export const nodeIconsLucide: { retrievers: FileSearch as React.ForwardRefExoticComponent< ComponentType> >, + custom_components: Sparkles as React.ForwardRefExoticComponent< + ComponentType> + >, unknown: HelpCircle as React.ForwardRefExoticComponent< ComponentType> >, custom: Edit as React.ForwardRefExoticComponent< - ComponentType> ->, + ComponentType> + >, }; export const gradients = [ @@ -796,36 +802,36 @@ export function groupByFamily(data, baseClasses, left, type) { let parentOutput: string; let arrOfParent: string[] = []; let arrOfType: { family: string; type: string; component: string }[] = []; - let arrOfLength: { length: number; type: string; }[] = []; + let arrOfLength: { length: number; type: string }[] = []; let lastType = ""; Object.keys(data).map((d) => { - Object.keys(data[d]).map((n) => { - try { - if ( - data[d][n].base_classes.some((r) => - baseClasses.split("\n").includes(r) - ) - ) { - arrOfParent.push(d); - } - if (n === type) { - parentOutput = d; - } - - if (d !== lastType) { - arrOfLength.push({ - length: Object.keys(data[d]).length, - type: d - }); - - lastType = d; - } - } catch (e) { - console.log(e); - } - }); + Object.keys(data[d]).map((n) => { + try { + if ( + data[d][n].base_classes.some((r) => + baseClasses.split("\n").includes(r) + ) + ) { + arrOfParent.push(d); + } + if (n === type) { + parentOutput = d; + } + + if (d !== lastType) { + arrOfLength.push({ + length: Object.keys(data[d]).length, + type: d, + }); + + lastType = d; + } + } catch (e) { + console.log(e); + } + }); }); - + Object.keys(data).map((d) => { Object.keys(data[d]).map((n) => { try { @@ -835,7 +841,7 @@ export function groupByFamily(data, baseClasses, left, type) { arrOfType.push({ family: d, type: data, - component: n + component: n, }); } }); @@ -846,61 +852,64 @@ export function groupByFamily(data, baseClasses, left, type) { }); }); - if(left == false){ + if (left == false) { let groupedBy = arrOfType.filter((object, index, self) => { const foundIndex = self.findIndex( (o) => o.family === object.family && o.type === object.type ); return foundIndex === index; }); - + return groupedBy.reduce((result, item) => { - const existingGroup = result.find((group) => group.family === item.family); - + const existingGroup = result.find( + (group) => group.family === item.family + ); + if (existingGroup) { existingGroup.type += `, ${item.type}`; } else { - result.push({ family: item.family, type: item.type, component: item.component }); + result.push({ + family: item.family, + type: item.type, + component: item.component, + }); } - + if (left == false) { let resFil = result.filter((group) => group.family === parentOutput); result = resFil; } - + return result; }, []); - } - - else{ + } else { const groupedArray = []; const groupedData = {}; - + arrOfType.forEach((item) => { - const { family, type, component } = item; - const key = `${family}-${type}`; - - if (!groupedData[key]) { - groupedData[key] = { family, type, component: [component] }; - } else { - groupedData[key].component.push(component); - } + const { family, type, component } = item; + const key = `${family}-${type}`; + + if (!groupedData[key]) { + groupedData[key] = { family, type, component: [component] }; + } else { + groupedData[key].component.push(component); + } }); - + for (const key in groupedData) { - groupedArray.push(groupedData[key]); + groupedArray.push(groupedData[key]); } groupedArray.forEach((object, index, self) => { - const findObj = arrOfLength.find(x => x.type == object.family); - if(object.component.length == findObj.length){ - self[index]['type'] = ""; + const findObj = arrOfLength.find((x) => x.type == object.family); + if (object.component.length == findObj.length) { + self[index]["type"] = ""; + } else { + self[index]["type"] = object.component.join(", "); } - else{ - self[index]['type'] = object.component.join(', '); - } - }) - return groupedArray + }); + return groupedArray; } } From a29b83e4fa3b50f5b123c3496788167325873d34 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Thu, 6 Jul 2023 23:52:43 -0300 Subject: [PATCH 045/221] =?UTF-8?q?=F0=9F=94=A7=20chore(settings.py):=20ad?= =?UTF-8?q?d=20support=20for=20custom=20components=20in=20the=20settings?= =?UTF-8?q?=20=E2=9C=A8=20feat(settings.py):=20allow=20updating=20custom?= =?UTF-8?q?=20components=20in=20the=20settings=20The=20`custom=5Fcomponent?= =?UTF-8?q?s`=20attribute=20has=20been=20added=20to=20the=20`Settings`=20c?= =?UTF-8?q?lass=20to=20support=20custom=20components=20in=20the=20applicat?= =?UTF-8?q?ion.=20This=20allows=20users=20to=20define=20and=20use=20their?= =?UTF-8?q?=20own=20components=20in=20addition=20to=20the=20built-in=20one?= =?UTF-8?q?s.=20The=20`update=5Fsettings`=20method=20has=20been=20updated?= =?UTF-8?q?=20to=20include=20the=20`custom=5Fcomponents`=20attribute=20whe?= =?UTF-8?q?n=20updating=20the=20settings.=20This=20change=20improves=20the?= =?UTF-8?q?=20flexibility=20and=20extensibility=20of=20the=20application.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/settings.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/backend/langflow/settings.py b/src/backend/langflow/settings.py index 5be06292e..d2b9ab938 100644 --- a/src/backend/langflow/settings.py +++ b/src/backend/langflow/settings.py @@ -21,6 +21,8 @@ class Settings(BaseSettings): textsplitters: dict = {} utilities: dict = {} output_parsers: dict = {} + custom_components: dict = {} + dev: bool = False database_url: str cache: str = "InMemoryCache" @@ -66,6 +68,7 @@ class Settings(BaseSettings): self.documentloaders = new_settings.documentloaders or {} self.retrievers = new_settings.retrievers or {} self.output_parsers = new_settings.output_parsers or {} + self.custom_components = new_settings.custom_components or {} self.dev = dev def update_settings(self, **kwargs): From bed962e51359cefe91eee5470305dba147c2601e Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Thu, 6 Jul 2023 23:52:59 -0300 Subject: [PATCH 046/221] =?UTF-8?q?=F0=9F=94=80=20refactor(endpoints.py):?= =?UTF-8?q?=20fix=20import=20statement=20for=20CustomComponent=20The=20imp?= =?UTF-8?q?ort=20statement=20for=20CustomComponent=20has=20been=20updated?= =?UTF-8?q?=20to=20reflect=20the=20correct=20module=20path.=20This=20ensur?= =?UTF-8?q?es=20that=20the=20correct=20CustomComponent=20class=20is=20impo?= =?UTF-8?q?rted=20and=20used=20in=20the=20code.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/api/v1/endpoints.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/backend/langflow/api/v1/endpoints.py b/src/backend/langflow/api/v1/endpoints.py index c9f671bcf..e12f2076e 100644 --- a/src/backend/langflow/api/v1/endpoints.py +++ b/src/backend/langflow/api/v1/endpoints.py @@ -7,7 +7,7 @@ from langflow.utils.logger import logger from fastapi import APIRouter, Depends, HTTPException, UploadFile -from langflow.interface.tools.custom import CustomComponent +from langflow.interface.custom.custom import CustomComponent from langflow.api.v1.schemas import ( ProcessResponse, From fc615bf319d043cb895c3a28d4e4c91061d5022a Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Thu, 6 Jul 2023 23:53:27 -0300 Subject: [PATCH 047/221] =?UTF-8?q?=F0=9F=94=A5=20refactor(customs.py):=20?= =?UTF-8?q?remove=20unused=20"CustomComponent"=20node=20from=20CUSTOM=5FNO?= =?UTF-8?q?DES=20dictionary=20The=20"CustomComponent"=20node=20was=20remov?= =?UTF-8?q?ed=20from=20the=20CUSTOM=5FNODES=20dictionary=20as=20it=20was?= =?UTF-8?q?=20no=20longer=20being=20used.=20This=20improves=20code=20clean?= =?UTF-8?q?liness=20and=20removes=20unnecessary=20clutter.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/custom/customs.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/backend/langflow/custom/customs.py b/src/backend/langflow/custom/customs.py index 68dedbaad..1c51ae32f 100644 --- a/src/backend/langflow/custom/customs.py +++ b/src/backend/langflow/custom/customs.py @@ -8,7 +8,6 @@ CUSTOM_NODES = { "tools": { "PythonFunctionTool": frontend_node.tools.PythonFunctionToolNode(), "PythonFunction": frontend_node.tools.PythonFunctionNode(), - "CustomComponent": frontend_node.tools.CustomComponentNode(), "Tool": frontend_node.tools.ToolNode(), }, "agents": { @@ -31,6 +30,9 @@ CUSTOM_NODES = { "MidJourneyPromptChain": frontend_node.chains.MidJourneyPromptChainNode(), "load_qa_chain": frontend_node.chains.CombineDocsChainNode(), }, + "custom_components": { + "CustomComponent": frontend_node.custom_components.CustomComponentFrontendNode(), + }, } From f41cd1905f48e8e631f6d090b25e9cea439cdb10 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Thu, 6 Jul 2023 23:53:38 -0300 Subject: [PATCH 048/221] =?UTF-8?q?=F0=9F=9A=80=20feat(listing.py):=20add?= =?UTF-8?q?=20support=20for=20custom=20components=20in=20the=20type=20dict?= =?UTF-8?q?ionary=20The=20type=20dictionary=20now=20includes=20a=20new=20k?= =?UTF-8?q?ey=20"custom=5Fcomponents"=20which=20contains=20a=20list=20of?= =?UTF-8?q?=20custom=20components=20created=20using=20the=20custom=5Fcompo?= =?UTF-8?q?nent=5Fcreator.=20This=20allows=20for=20better=20organization?= =?UTF-8?q?=20and=20management=20of=20custom=20components=20within=20the?= =?UTF-8?q?=20application.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/interface/listing.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/backend/langflow/interface/listing.py b/src/backend/langflow/interface/listing.py index 0893f855a..fe3090f65 100644 --- a/src/backend/langflow/interface/listing.py +++ b/src/backend/langflow/interface/listing.py @@ -13,6 +13,7 @@ from langflow.interface.vector_store.base import vectorstore_creator from langflow.interface.wrappers.base import wrapper_creator from langflow.interface.output_parsers.base import output_parser_creator from langflow.interface.retrievers.base import retriever_creator +from langflow.interface.custom.base import custom_component_creator def get_type_dict(): @@ -32,6 +33,7 @@ def get_type_dict(): "utilities": utility_creator.to_list(), "outputParsers": output_parser_creator.to_list(), "retrievers": retriever_creator.to_list(), + "custom_components": custom_component_creator.to_list(), } From 024cd3398a9d5a0baecf8370eb44bb4d51865be4 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Thu, 6 Jul 2023 23:56:04 -0300 Subject: [PATCH 049/221] =?UTF-8?q?=F0=9F=94=A7=20fix(types.py):=20import?= =?UTF-8?q?=20correct=20module=20for=20custom=20component=20creator=20?= =?UTF-8?q?=F0=9F=94=A7=20fix(types.py):=20fix=20import=20for=20CustomComp?= =?UTF-8?q?onent=20class=20=F0=9F=94=A7=20fix(types.py):=20remove=20unused?= =?UTF-8?q?=20imports=20=F0=9F=94=A7=20fix(types.py):=20fix=20function=20s?= =?UTF-8?q?ignature=20for=20add=5Fnew=5Fcustom=5Ffield=20=F0=9F=94=A7=20fi?= =?UTF-8?q?x(types.py):=20fix=20function=20signature=20for=20build=5Flangc?= =?UTF-8?q?hain=5Ftemplate=5Fcustom=5Fcomponent=20=F0=9F=94=A7=20fix(types?= =?UTF-8?q?.py):=20fix=20return=20type=20validation=20and=20error=20handli?= =?UTF-8?q?ng=20in=20build=5Flangchain=5Ftemplate=5Fcustom=5Fcomponent=20?= =?UTF-8?q?=F0=9F=94=A7=20fix(types.py):=20fix=20appending=20base=20classe?= =?UTF-8?q?s=20to=20frontend=5Fnode=20in=20build=5Flangchain=5Ftemplate=5F?= =?UTF-8?q?custom=5Fcomponent=20=F0=9F=94=A7=20fix(types.py):=20fix=20retu?= =?UTF-8?q?rn=20statement=20in=20build=5Flangchain=5Ftemplate=5Fcustom=5Fc?= =?UTF-8?q?omponent=20The=20changes=20in=20this=20commit=20fix=20import=20?= =?UTF-8?q?statements,=20function=20signatures,=20and=20error=20handling?= =?UTF-8?q?=20in=20the=20types.py=20file.=20The=20correct=20module=20is=20?= =?UTF-8?q?now=20imported=20for=20the=20custom=20component=20creator.=20Th?= =?UTF-8?q?e=20import=20for=20the=20CustomComponent=20class=20is=20fixed.?= =?UTF-8?q?=20Unused=20imports=20are=20removed.=20The=20function=20signatu?= =?UTF-8?q?re=20for=20add=5Fnew=5Fcustom=5Ffield=20is=20fixed=20to=20inclu?= =?UTF-8?q?de=20the=20field=5Fconfig=20parameter.=20The=20function=20signa?= =?UTF-8?q?ture=20for=20build=5Flangchain=5Ftemplate=5Fcustom=5Fcomponent?= =?UTF-8?q?=20is=20fixed=20to=20include=20the=20field=5Fconfig=20parameter?= =?UTF-8?q?.=20The=20return=20type=20validation=20and=20error=20handling?= =?UTF-8?q?=20in=20build=5Flangchain=5Ftemplate=5Fcustom=5Fcomponent=20are?= =?UTF-8?q?=20fixed=20to=20handle=20invalid=20return=20types.=20The=20base?= =?UTF-8?q?=20classes=20are=20correctly=20appended=20to=20the=20frontend?= =?UTF-8?q?=5Fnode=20in=20build=5Flangchain=5Ftemplate=5Fcustom=5Fcomponen?= =?UTF-8?q?t.=20The=20return=20statement=20in=20build=5Flangchain=5Ftempla?= =?UTF-8?q?te=5Fcustom=5Fcomponent=20is=20fixed=20to=20return=20the=20fron?= =?UTF-8?q?tend=5Fnode.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/interface/types.py | 70 +++++++++++++++++-------- 1 file changed, 47 insertions(+), 23 deletions(-) diff --git a/src/backend/langflow/interface/types.py b/src/backend/langflow/interface/types.py index f331c3416..c91abcfbe 100644 --- a/src/backend/langflow/interface/types.py +++ b/src/backend/langflow/interface/types.py @@ -1,5 +1,6 @@ from langflow.interface.agents.base import agent_creator from langflow.interface.chains.base import chain_creator +from langflow.interface.custom.constants import LANGCHAIN_BASE_TYPES from langflow.interface.document_loaders.base import documentloader_creator from langflow.interface.embeddings.base import embedding_creator from langflow.interface.llms.base import llm_creator @@ -12,7 +13,8 @@ from langflow.interface.utilities.base import utility_creator from langflow.interface.vector_store.base import vectorstore_creator from langflow.interface.wrappers.base import wrapper_creator from langflow.interface.output_parsers.base import output_parser_creator -from langflow.interface.tools.custom import CustomComponent +from langflow.interface.custom.base import custom_component_creator +from langflow.interface.custom.custom import CustomComponent from langflow.template.field.base import TemplateField from langflow.template.frontend_node.tools import CustomComponentNode @@ -24,9 +26,6 @@ from fastapi import HTTPException import traceback # Used to get the base_classes list -from langchain.chains import ConversationChain # noqa: F401 -from langchain.llms.base import BaseLLM # noqa: F401 -from langchain.tools import Tool # noqa: F401 def get_type_list(): @@ -62,6 +61,7 @@ def build_langchain_types_dict(): # sourcery skip: dict-assign-update-to-union utility_creator, output_parser_creator, retriever_creator, + custom_component_creator, ] all_types = {} @@ -73,9 +73,16 @@ def build_langchain_types_dict(): # sourcery skip: dict-assign-update-to-union # TODO: Move to correct place -def add_new_custom_field(template, field_name: str, field_type: str): +def add_new_custom_field( + template, field_name: str, field_type: str, field_config: dict +): new_field = TemplateField( - name=field_name, field_type=field_type, show=True, required=True, advanced=False + name=field_name, + field_type=field_type, + show=True, + required=True, + advanced=False, + **field_config, ) template.get("template")[field_name] = new_field.to_dict() template.get("custom_fields")[field_name] = None @@ -108,28 +115,45 @@ def add_code_field(template, raw_code): def build_langchain_template_custom_component(extractor: CustomComponent): # Build base "CustomComponent" template - template = CustomComponentNode().to_dict().get(type(extractor).__name__) + frontend_node = CustomComponentNode().to_dict().get(type(extractor).__name__) - function_args, return_type = extractor.args_and_return_type + function_args, return_type, template_config = extractor.args_and_return_type + + if "display_name" in template_config and frontend_node is not None: + frontend_node["display_name"] = template_config["display_name"] raw_code = extractor.code + field_config = template_config.get("field_config", {}) + if function_args is not None: + # Add extra fields + for extra_field in function_args: + def_field = extra_field[0] + def_type = extra_field[1] - # Add extra fields - for extra_field in function_args: - def_field = extra_field[0] - def_type = extra_field[1] + if def_field != "self": + # TODO: Validate type - if is possible to render into frontend + if not def_type: + def_type = "str" + config = field_config.get(def_field, {}) + frontend_node = add_new_custom_field( + frontend_node, def_field, def_type, config + ) - if def_field != "self": - # TODO: Validate type - if is possible to render into frontend - if not def_type: - def_type = "str" - - template = add_new_custom_field(template, def_field, def_type) - - template = add_code_field(template, raw_code) + frontend_node = add_code_field(frontend_node, raw_code) # Get base classes from "return_type" and add to template.base_classes try: - return_type_instance = globals()[return_type] + if return_type not in LANGCHAIN_BASE_TYPES or return_type is None: + raise HTTPException( + status_code=400, + detail={ + "error": ( + "Invalid return type should be one of: " + f"{list(LANGCHAIN_BASE_TYPES.keys())}" + ), + "traceback": traceback.format_exc(), + }, + ) + return_type_instance = LANGCHAIN_BASE_TYPES.get(return_type) base_classes = get_base_classes(return_type_instance) except (KeyError, AttributeError) as err: raise HTTPException( @@ -138,9 +162,9 @@ def build_langchain_template_custom_component(extractor: CustomComponent): ) from err for base_class in base_classes: - template.get("base_classes").append(base_class) + frontend_node.get("base_classes").append(base_class) - return template + return frontend_node langchain_types_dict = build_langchain_types_dict() From 5e4507852c07fbd3ff1a704a1464258958113645 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Thu, 6 Jul 2023 23:56:21 -0300 Subject: [PATCH 050/221] =?UTF-8?q?=E2=9C=A8=20feat(constants.py):=20add?= =?UTF-8?q?=20constants=20file=20for=20langflow=20interface=20custom=20typ?= =?UTF-8?q?es=20This=20commit=20adds=20a=20new=20file=20`constants.py`=20t?= =?UTF-8?q?o=20the=20`src/backend/langflow/interface/custom`=20directory.?= =?UTF-8?q?=20The=20file=20defines=20a=20dictionary=20`LANGCHAIN=5FBASE=5F?= =?UTF-8?q?TYPES`=20which=20maps=20string=20names=20to=20corresponding=20l?= =?UTF-8?q?angflow=20interface=20custom=20types.=20This=20file=20will=20be?= =?UTF-8?q?=20used=20to=20store=20and=20access=20the=20custom=20types=20us?= =?UTF-8?q?ed=20in=20the=20langflow=20interface.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../langflow/interface/custom/constants.py | 23 +++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 src/backend/langflow/interface/custom/constants.py diff --git a/src/backend/langflow/interface/custom/constants.py b/src/backend/langflow/interface/custom/constants.py new file mode 100644 index 000000000..920c2bc09 --- /dev/null +++ b/src/backend/langflow/interface/custom/constants.py @@ -0,0 +1,23 @@ +from langchain import PromptTemplate +from langchain.chains.base import Chain +from langchain.document_loaders.base import BaseLoader +from langchain.embeddings.base import Embeddings +from langchain.llms.base import BaseLLM +from langchain.schema import BaseRetriever, Document +from langchain.text_splitter import TextSplitter +from langchain.tools import Tool +from langchain.vectorstores.base import VectorStore + + +LANGCHAIN_BASE_TYPES = { + "Chain": Chain, + "Tool": Tool, + "BaseLLM": BaseLLM, + "PromptTemplate": PromptTemplate, + "BaseLoader": BaseLoader, + "Document": Document, + "TextSplitter": TextSplitter, + "VectorStore": VectorStore, + "Embeddings": Embeddings, + "BaseRetriever": BaseRetriever, +} From 692994f100405e10f27192490a94bd22661baa31 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Thu, 6 Jul 2023 23:56:49 -0300 Subject: [PATCH 051/221] =?UTF-8?q?=E2=9C=A8=20feat(custom):=20add=20custo?= =?UTF-8?q?m=20component=20interface=20and=20base=20classes=20=F0=9F=94=A7?= =?UTF-8?q?=20chore(custom):=20create=20a=20custom=20component=20creator?= =?UTF-8?q?=20class=20to=20handle=20custom=20component=20creation=20and=20?= =?UTF-8?q?loading?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The commit adds two new files: `__init__.py` and `base.py` under the `src/backend/langflow/interface/custom` directory. The `__init__.py` file imports the `CustomComponentCreator` and `CustomComponent` classes from the `base.py` file. The `base.py` file defines the `CustomComponentCreator` class, which is responsible for creating and loading custom components. It also includes necessary imports and a `CustomComponentFrontendNode` class. The addition of these files is necessary to support custom components in the application. The `CustomComponentCreator` class provides a way to create and load custom components, and the `CustomComponent` class represents a custom component. This allows for the dynamic creation and usage of custom components in the application. --- .../langflow/interface/custom/__init__.py | 4 ++ src/backend/langflow/interface/custom/base.py | 43 +++++++++++++++++++ 2 files changed, 47 insertions(+) create mode 100644 src/backend/langflow/interface/custom/__init__.py create mode 100644 src/backend/langflow/interface/custom/base.py diff --git a/src/backend/langflow/interface/custom/__init__.py b/src/backend/langflow/interface/custom/__init__.py new file mode 100644 index 000000000..48672e52b --- /dev/null +++ b/src/backend/langflow/interface/custom/__init__.py @@ -0,0 +1,4 @@ +from langflow.interface.custom.base import CustomComponentCreator +from langflow.interface.custom.custom import CustomComponent + +__all__ = ["CustomComponentCreator", "CustomComponent"] diff --git a/src/backend/langflow/interface/custom/base.py b/src/backend/langflow/interface/custom/base.py new file mode 100644 index 000000000..359b799ce --- /dev/null +++ b/src/backend/langflow/interface/custom/base.py @@ -0,0 +1,43 @@ +from typing import Any, Dict, List, Optional, Type + +from langflow.custom.customs import get_custom_nodes +from langflow.interface.base import LangChainTypeCreator +from langflow.interface.custom.custom import CustomComponent +from langflow.template.frontend_node.custom_components import ( + CustomComponentFrontendNode, +) +from langflow.utils.logger import logger + +# Assuming necessary imports for Field, Template, and FrontendNode classes + + +class CustomComponentCreator(LangChainTypeCreator): + type_name: str = "custom_components" + + @property + def frontend_node_class(self) -> Type[CustomComponentFrontendNode]: + return CustomComponentFrontendNode + + @property + def type_to_loader_dict(self) -> Dict: + if self.type_dict is None: + self.type_dict: dict[str, Any] = { + "CustomComponent": CustomComponent, + } + return self.type_dict + + def get_signature(self, name: str) -> Optional[Dict]: + try: + if name in get_custom_nodes(self.type_name).keys(): + return get_custom_nodes(self.type_name)[name] + except ValueError as exc: + raise ValueError(f"CustomComponent {name} not found: {exc}") from exc + except AttributeError as exc: + logger.error(f"CustomComponent {name} not loaded: {exc}") + return None + + def to_list(self) -> List[str]: + return list(self.type_to_loader_dict.keys()) + + +custom_component_creator = CustomComponentCreator() From 97399632e21bf98c924e8520d9b48b72d34584e9 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Thu, 6 Jul 2023 23:57:04 -0300 Subject: [PATCH 052/221] =?UTF-8?q?=E2=9C=A8=20feat(custom.py):=20add=20Cu?= =?UTF-8?q?stomComponent=20class=20to=20handle=20custom=20code=20component?= =?UTF-8?q?s=20=F0=9F=90=9B=20fix(custom.py):=20fix=20typo=20in=20function?= =?UTF-8?q?=5Fentrypoint=5Fname=20variable=20assignment=20The=20CustomComp?= =?UTF-8?q?onent=20class=20is=20added=20to=20handle=20custom=20code=20comp?= =?UTF-8?q?onents.=20It=20includes=20methods=20to=20handle=20imports,=20cl?= =?UTF-8?q?asses,=20and=20functions=20in=20the=20provided=20code.=20The=20?= =?UTF-8?q?class=20also=20has=20methods=20to=20extract=20class=20informati?= =?UTF-8?q?on,=20get=20entrypoint=20function=20arguments=20and=20return=20?= =?UTF-8?q?type,=20build=20a=20template=20configuration,=20validate=20the?= =?UTF-8?q?=20class=20template,=20and=20get=20the=20entrypoint=20function.?= =?UTF-8?q?=20A=20typo=20in=20the=20assignment=20of=20the=20function=5Fent?= =?UTF-8?q?rypoint=5Fname=20variable=20is=20fixed.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../langflow/interface/custom/custom.py | 192 ++++++++++++++++++ 1 file changed, 192 insertions(+) create mode 100644 src/backend/langflow/interface/custom/custom.py diff --git a/src/backend/langflow/interface/custom/custom.py b/src/backend/langflow/interface/custom/custom.py new file mode 100644 index 000000000..c47e67edd --- /dev/null +++ b/src/backend/langflow/interface/custom/custom.py @@ -0,0 +1,192 @@ +import ast +import traceback +from typing import Callable, Optional +from fastapi import HTTPException +from langflow.interface.custom.constants import LANGCHAIN_BASE_TYPES + +from langflow.utils import validate +from pydantic import BaseModel + + +class CustomComponent(BaseModel): + field_config: dict = {} + code: str + function: Optional[Callable] = None + function_entrypoint_name = "build" + return_type_valid_list = list(LANGCHAIN_BASE_TYPES.keys()) + class_template = { + "imports": [], + "class": {"inherited_classes": "", "name": "", "init": "", "attributes": {}}, + "functions": [], + } + + def __init__(self, **data): + super().__init__(**data) + + def _handle_import(self, node): + for alias in node.names: + module_name = getattr(node, "module", None) + self.class_template["imports"].append( + f"{module_name}.{alias.name}" if module_name else alias.name + ) + + def _handle_class(self, node): + self.class_template["class"].update( + { + "name": node.name, + "inherited_classes": [ast.unparse(base) for base in node.bases], + } + ) + + attributes = {} # To store the attributes and their values + + for inner_node in node.body: + if isinstance(inner_node, ast.Assign): # An assignment + for target in inner_node.targets: # Targets of the assignment + if isinstance(target, ast.Name): # A simple variable + # Add the attribute and its value to the dictionary + attributes[target.id] = ast.unparse(inner_node.value) + elif isinstance(inner_node, ast.FunctionDef): + self._handle_function(inner_node) + + # You can add these attributes to your class_template if you want + self.class_template["class"]["attributes"] = attributes + + def _handle_function(self, node): + function_name = node.name + function_args_str = ast.unparse(node.args) + function_args = function_args_str.split(", ") if function_args_str else [] + + return_type = ast.unparse(node.returns) if node.returns else "None" + + function_data = { + "name": function_name, + "arguments": function_args, + "return_type": return_type, + } + + if function_name == "__init__": + self.class_template["class"]["init"] = ( + function_args_str.split(", ") if function_args_str else [] + ) + else: + self.class_template["functions"].append(function_data) + + def transform_list(self, input_list): + output_list = [] + for item in input_list: + # Split each item on ':' to separate variable name and type + split_item = item.split(":") + + # If there is a type, strip any leading/trailing spaces from it + if len(split_item) > 1: + split_item[1] = split_item[1].strip() + # If there isn't a type, append None + else: + split_item.append(None) + output_list.append(split_item) + + return output_list + + def extract_class_info(self): + try: + module = ast.parse(self.code) + except SyntaxError as err: + raise HTTPException( + status_code=400, + detail={"error": err.msg, "traceback": traceback.format_exc()}, + ) from err + + for node in module.body: + if isinstance(node, (ast.Import, ast.ImportFrom)): + self._handle_import(node) + elif isinstance(node, ast.ClassDef): + self._handle_class(node) + + return self.class_template + + def get_entrypoint_function_args_and_return_type(self): + data = self.extract_class_info() + attributes = data.get("class", {}).get("attributes", {}) + functions = data.get("functions", []) + template_config = self._build_template_config(attributes) + if build_function := next( + (f for f in functions if f["name"] == self.function_entrypoint_name), + None, + ): + function_args = build_function.get("arguments", None) + function_args = self.transform_list(function_args) + + return_type = build_function.get("return_type", None) + else: + function_args = None + return_type = None + + return function_args, return_type, template_config + + def _build_template_config(self, attributes): + template_config = {} + if "field_config" in attributes: + template_config["field_config"] = ast.literal_eval( + attributes["field_config"] + ) + if "display_name" in attributes: + template_config["display_name"] = ast.literal_eval( + attributes["display_name"] + ) + return template_config + + def _class_template_validation(self, code: dict): + class_name = code.get("class", {}).get("name", None) + if not class_name: # this will also check for None, empty string, etc. + raise HTTPException( + status_code=400, + detail={ + "error": "The main class must have a valid name.", + "traceback": "", + }, + ) + + functions = code.get("functions", []) + build_function = next( + (f for f in functions if f["name"] == self.function_entrypoint_name), + None, + ) + + if not build_function: + raise HTTPException( + status_code=400, + detail={ + "error": "Invalid entrypoint function name", + "traceback": f"There needs to be at least one entrypoint function named '{self.function_entrypoint_name}' and it needs to return one of the types from this list {str(self.return_type_valid_list)}.", + }, + ) + + return_type = build_function.get("return_type") + if return_type not in self.return_type_valid_list: + raise HTTPException( + status_code=400, + detail={ + "error": "Invalid entrypoint function return", + "traceback": f"The entrypoint function return '{return_type}' needs to be an item from this list {str(self.return_type_valid_list)}.", + }, + ) + + return True + + def get_function(self): + return validate.create_function(self.code, self.function_entrypoint_name) + + def build(self): + pass + + @property + def data(self): + return self.extract_class_info() + + def is_check_valid(self): + return self._class_template_validation(self.data) + + @property + def args_and_return_type(self): + return self.get_entrypoint_function_args_and_return_type() From 97572bea25f79391ad7ff89477189fa87a6be10e Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Thu, 6 Jul 2023 23:57:17 -0300 Subject: [PATCH 053/221] =?UTF-8?q?=F0=9F=94=A5=20refactor(constants.py):?= =?UTF-8?q?=20remove=20unused=20import=20and=20unused=20CustomComponent=20?= =?UTF-8?q?from=20CUSTOM=5FTOOLS=20dictionary=20The=20import=20statement?= =?UTF-8?q?=20for=20CustomComponent=20is=20no=20longer=20needed=20as=20it?= =?UTF-8?q?=20is=20not=20used=20in=20the=20CUSTOM=5FTOOLS=20dictionary.=20?= =?UTF-8?q?Removing=20the=20unused=20import=20and=20the=20unused=20CustomC?= =?UTF-8?q?omponent=20entry=20from=20the=20dictionary=20improves=20code=20?= =?UTF-8?q?cleanliness=20and=20reduces=20potential=20confusion.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/interface/tools/constants.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/backend/langflow/interface/tools/constants.py b/src/backend/langflow/interface/tools/constants.py index cac65826b..dc1bfe0c1 100644 --- a/src/backend/langflow/interface/tools/constants.py +++ b/src/backend/langflow/interface/tools/constants.py @@ -12,13 +12,11 @@ from langflow.interface.importing.utils import import_class from langflow.interface.tools.custom import ( PythonFunctionTool, PythonFunction, - CustomComponent, ) FILE_TOOLS = {"JsonSpec": JsonSpec} CUSTOM_TOOLS = { "Tool": Tool, - "CustomComponent": CustomComponent, "PythonFunctionTool": PythonFunctionTool, "PythonFunction": PythonFunction, } From 2775789ccb178b178ff357cc06b20a1735b97d70 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Thu, 6 Jul 2023 23:57:28 -0300 Subject: [PATCH 054/221] =?UTF-8?q?=F0=9F=94=A5=20refactor(custom.py):=20r?= =?UTF-8?q?emove=20unused=20imports=20and=20commented=20out=20code=20?= =?UTF-8?q?=F0=9F=9A=80=20feat(custom.py):=20refactor=20CustomComponent=20?= =?UTF-8?q?class=20to=20remove=20unused=20code=20and=20improve=20code=20or?= =?UTF-8?q?ganization=20The=20changes=20in=20this=20commit=20remove=20unus?= =?UTF-8?q?ed=20imports=20and=20commented=20out=20code=20from=20the=20`cus?= =?UTF-8?q?tom.py`=20file.=20The=20`CustomComponent`=20class=20has=20been?= =?UTF-8?q?=20refactored=20to=20remove=20the=20`CustomComponent=5Fold`=20c?= =?UTF-8?q?lass=20and=20unused=20methods.=20The=20code=20has=20been=20reor?= =?UTF-8?q?ganized=20to=20improve=20readability=20and=20maintainability.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../langflow/interface/tools/custom.py | 162 ------------------ 1 file changed, 162 deletions(-) diff --git a/src/backend/langflow/interface/tools/custom.py b/src/backend/langflow/interface/tools/custom.py index d18577599..a0ed5d378 100644 --- a/src/backend/langflow/interface/tools/custom.py +++ b/src/backend/langflow/interface/tools/custom.py @@ -1,6 +1,3 @@ -import ast -import traceback - from typing import Callable, Optional from langflow.interface.importing.utils import get_function @@ -9,8 +6,6 @@ from pydantic import BaseModel, validator from langflow.utils import validate from langchain.agents.tools import Tool -from fastapi import HTTPException - class Function(BaseModel): code: str @@ -79,160 +74,3 @@ class CustomComponent_old(BaseModel): function_name = validate.extract_function_name(self.code) return validate.create_function(self.code, function_name) - - -class CustomComponent(BaseModel): - code: str - function: Optional[Callable] = None - function_entrypoint_name = "build" - return_type_valid_list = ["ConversationChain", "BaseLLM", "Tool"] - class_template = { - "imports": [], - "class": {"inherited_classes": "", "name": "", "init": ""}, - "functions": [], - } - - def __init__(self, **data): - super().__init__(**data) - - def _handle_import(self, node): - for alias in node.names: - module_name = getattr(node, "module", None) - self.class_template["imports"].append( - f"{module_name}.{alias.name}" if module_name else alias.name - ) - - def _handle_class(self, node): - self.class_template["class"].update( - { - "name": node.name, - "inherited_classes": [ast.unparse(base) for base in node.bases], - } - ) - - for inner_node in node.body: - if isinstance(inner_node, ast.FunctionDef): - self._handle_function(inner_node) - - def _handle_function(self, node): - function_name = node.name - function_args_str = ast.unparse(node.args) - function_args = function_args_str.split(", ") if function_args_str else [] - - return_type = ast.unparse(node.returns) if node.returns else "None" - - function_data = { - "name": function_name, - "arguments": function_args, - "return_type": return_type, - } - - if function_name == "__init__": - self.class_template["class"]["init"] = ( - function_args_str.split(", ") if function_args_str else [] - ) - else: - self.class_template["functions"].append(function_data) - - def transform_list(self, input_list): - output_list = [] - for item in input_list: - # Split each item on ':' to separate variable name and type - split_item = item.split(":") - - # If there is a type, strip any leading/trailing spaces from it - if len(split_item) > 1: - split_item[1] = split_item[1].strip() - # If there isn't a type, append None - else: - split_item.append(None) - output_list.append(split_item) - - return output_list - - def extract_class_info(self): - try: - module = ast.parse(self.code) - except SyntaxError as err: - raise HTTPException( - status_code=400, - detail={"error": err.msg, "traceback": traceback.format_exc()}, - ) from err - - for node in module.body: - if isinstance(node, (ast.Import, ast.ImportFrom)): - self._handle_import(node) - elif isinstance(node, ast.ClassDef): - self._handle_class(node) - - return self.class_template - - def get_entrypoint_function_args_and_return_type(self): - data = self.extract_class_info() - functions = data.get("functions", []) - - if build_function := next( - (f for f in functions if f["name"] == self.function_entrypoint_name), - None, - ): - function_args = build_function.get("arguments", None) - function_args = self.transform_list(function_args) - - return_type = build_function.get("return_type", None) - else: - function_args = None - return_type = None - - return function_args, return_type - - def _class_template_validation(self, code: dict): - class_name = code.get("class", {}).get("name", None) - if not class_name: # this will also check for None, empty string, etc. - raise HTTPException( - status_code=400, - detail={ - "error": "The main class must have a valid name.", - "traceback": "", - }, - ) - - functions = code.get("functions", []) - build_function = next( - (f for f in functions if f["name"] == self.function_entrypoint_name), - None, - ) - - if not build_function: - raise HTTPException( - status_code=400, - detail={ - "error": "Invalid entrypoint function name", - "traceback": f"There needs to be at least one entrypoint function named '{self.function_entrypoint_name}' and it needs to return one of the types from this list {str(self.return_type_valid_list)}.", - }, - ) - - return_type = build_function.get("return_type") - if return_type not in self.return_type_valid_list: - raise HTTPException( - status_code=400, - detail={ - "error": "Invalid entrypoint function return", - "traceback": f"The entrypoint function return '{return_type}' needs to be an item from this list {str(self.return_type_valid_list)}.", - }, - ) - - return True - - def get_function(self): - return validate.create_function(self.code, self.function_entrypoint_name) - - @property - def data(self): - return self.extract_class_info() - - def is_check_valid(self): - return self._class_template_validation(self.data) - - @property - def args_and_return_type(self): - return self.get_entrypoint_function_args_and_return_type() From 93dbf552f01eba2fc7b9270a3a4a4cc8a5c47ce7 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Thu, 6 Jul 2023 23:57:47 -0300 Subject: [PATCH 055/221] =?UTF-8?q?=E2=9C=A8=20feat(frontend=5Fnode):=20ad?= =?UTF-8?q?d=20custom=5Fcomponents=20to=20the=20list=20of=20exported=20mod?= =?UTF-8?q?ules=20The=20custom=5Fcomponents=20module=20is=20now=20included?= =?UTF-8?q?=20in=20the=20list=20of=20exported=20modules=20in=20the=20front?= =?UTF-8?q?end=5Fnode=20package.=20This=20change=20allows=20other=20module?= =?UTF-8?q?s=20or=20packages=20to=20import=20and=20use=20the=20custom=20co?= =?UTF-8?q?mponents=20provided=20by=20the=20frontend=5Fnode=20package.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/template/frontend_node/__init__.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/backend/langflow/template/frontend_node/__init__.py b/src/backend/langflow/template/frontend_node/__init__.py index c36234364..e13aa1ded 100644 --- a/src/backend/langflow/template/frontend_node/__init__.py +++ b/src/backend/langflow/template/frontend_node/__init__.py @@ -9,6 +9,7 @@ from langflow.template.frontend_node import ( vectorstores, documentloaders, textsplitters, + custom_components, ) __all__ = [ @@ -22,4 +23,5 @@ __all__ = [ "vectorstores", "documentloaders", "textsplitters", + "custom_components", ] From 790d8b56691886996e8130f27988bb3a488d8ab4 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Thu, 6 Jul 2023 23:57:57 -0300 Subject: [PATCH 056/221] =?UTF-8?q?=E2=9C=A8=20feat(custom=5Fcomponents.py?= =?UTF-8?q?):=20add=20CustomComponentFrontendNode=20class=20to=20create=20?= =?UTF-8?q?a=20custom=20component=20The=20CustomComponentFrontendNode=20cl?= =?UTF-8?q?ass=20is=20added=20to=20the=20custom=5Fcomponents.py=20file.=20?= =?UTF-8?q?This=20class=20represents=20a=20custom=20component=20in=20the?= =?UTF-8?q?=20frontend=20of=20the=20application.=20It=20has=20properties?= =?UTF-8?q?=20such=20as=20name,=20display=5Fname,=20template,=20descriptio?= =?UTF-8?q?n,=20and=20base=5Fclasses.=20The=20template=20property=20define?= =?UTF-8?q?s=20the=20structure=20of=20the=20custom=20component,=20includin?= =?UTF-8?q?g=20a=20code=20field=20with=20default=20value.=20The=20to=5Fdic?= =?UTF-8?q?t()=20method=20is=20also=20implemented=20to=20convert=20the=20c?= =?UTF-8?q?lass=20instance=20to=20a=20dictionary.=20This=20allows=20the=20?= =?UTF-8?q?custom=20component=20to=20be=20serialized=20and=20used=20in=20o?= =?UTF-8?q?ther=20parts=20of=20the=20application.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../frontend_node/custom_components.py | 30 +++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 src/backend/langflow/template/frontend_node/custom_components.py diff --git a/src/backend/langflow/template/frontend_node/custom_components.py b/src/backend/langflow/template/frontend_node/custom_components.py new file mode 100644 index 000000000..6d360ce96 --- /dev/null +++ b/src/backend/langflow/template/frontend_node/custom_components.py @@ -0,0 +1,30 @@ +from langflow.template.field.base import TemplateField +from langflow.template.frontend_node.base import FrontendNode +from langflow.template.template.base import Template +from langflow.utils.constants import DEFAULT_CUSTOM_COMPONENT_CODE + + +class CustomComponentFrontendNode(FrontendNode): + name: str = "CustomComponent" + display_name: str = "Custom Component" + template: Template = Template( + type_name="CustomComponent", + fields=[ + TemplateField( + field_type="code", + required=True, + placeholder="", + is_list=False, + show=True, + value=DEFAULT_CUSTOM_COMPONENT_CODE, + name="code", + advanced=False, + dynamic=True, + ) + ], + ) + description: str = "Create any custom component you want!" + base_classes: list[str] = [] + + def to_dict(self): + return super().to_dict() From ae59104b3af75ed80cee15f6c253d50cad6df383 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Fri, 7 Jul 2023 00:41:35 -0300 Subject: [PATCH 057/221] =?UTF-8?q?=F0=9F=94=A8=20refactor(formatter/base.?= =?UTF-8?q?py):=20make=20FieldFormatter=20inherit=20from=20pydantic.BaseMo?= =?UTF-8?q?del=20for=20improved=20type=20checking=20=F0=9F=94=A5=20chore(c?= =?UTF-8?q?onstants.py):=20remove=20unused=20DEFAULT=5FCUSTOM=5FCOMPONENT?= =?UTF-8?q?=5FCODE=20constant=20The=20FieldFormatter=20class=20now=20inher?= =?UTF-8?q?its=20from=20pydantic.BaseModel,=20which=20allows=20for=20impro?= =?UTF-8?q?ved=20type=20checking=20and=20validation=20of=20the=20format=20?= =?UTF-8?q?method=20arguments.=20The=20unused=20DEFAULT=5FCUSTOM=5FCOMPONE?= =?UTF-8?q?NT=5FCODE=20constant=20has=20been=20removed=20to=20clean=20up?= =?UTF-8?q?=20the=20codebase.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../template/frontend_node/formatter/base.py | 3 ++- src/backend/langflow/utils/constants.py | 20 ------------------- 2 files changed, 2 insertions(+), 21 deletions(-) diff --git a/src/backend/langflow/template/frontend_node/formatter/base.py b/src/backend/langflow/template/frontend_node/formatter/base.py index 67e906593..f582bc298 100644 --- a/src/backend/langflow/template/frontend_node/formatter/base.py +++ b/src/backend/langflow/template/frontend_node/formatter/base.py @@ -2,9 +2,10 @@ from abc import ABC, abstractmethod from typing import Optional from langflow.template.field.base import TemplateField +from pydantic import BaseModel -class FieldFormatter(ABC): +class FieldFormatter(BaseModel, ABC): @abstractmethod def format(self, field: TemplateField, name: Optional[str]) -> None: pass diff --git a/src/backend/langflow/utils/constants.py b/src/backend/langflow/utils/constants.py index 7d81c5f03..e473d855b 100644 --- a/src/backend/langflow/utils/constants.py +++ b/src/backend/langflow/utils/constants.py @@ -48,24 +48,4 @@ def python_function(text: str) -> str: return text """ -DEFAULT_CUSTOM_COMPONENT_CODE = """ -from langchain.llms import OpenAI -from langchain.chains import ConversationChain -from langchain.memory import ConversationBufferMemory - - -class MyPythonClass: - def my_conversation(self, openai_api_key): - llm = OpenAI( - openai_api_key=openai_api_key, - temperature=0 - ) - return ConversationChain( - llm=llm, verbose=True, memory=ConversationBufferMemory() - ) - - def build(self, openai_api_key: str) -> ConversationChain: - return self.my_conversation(openai_api_key) -""" - DIRECT_TYPES = ["str", "bool", "code", "int", "float", "Any", "prompt"] From daad3bb4e74a02e0ea0f1a0ae874972aff1f1d85 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Fri, 7 Jul 2023 00:42:16 -0300 Subject: [PATCH 058/221] =?UTF-8?q?=F0=9F=90=9B=20fix(custom.py):=20add=20?= =?UTF-8?q?support=20for=20parsing=20annotated=20assignments=20in=20Custom?= =?UTF-8?q?Component=20class=20=F0=9F=90=9B=20fix(custom=5Fcomponents.py):?= =?UTF-8?q?=20update=20import=20statement=20for=20DEFAULT=5FCUSTOM=5FCOMPO?= =?UTF-8?q?NENT=5FCODE=20constant=20=F0=9F=90=9B=20fix(tools.py):=20update?= =?UTF-8?q?=20import=20statement=20for=20DEFAULT=5FCUSTOM=5FCOMPONENT=5FCO?= =?UTF-8?q?DE=20constant?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The `CustomComponent` class now supports parsing annotated assignments, allowing attributes with annotated values to be added to the `attributes` dictionary. This improves the flexibility and extensibility of the class. The import statement for the `DEFAULT_CUSTOM_COMPONENT_CODE` constant in `custom_components.py` and `tools.py` has been updated to reflect the correct location of the constant in the `langflow.interface.custom.constants` module. This ensures that the correct value is imported and used in the code. --- src/backend/langflow/interface/custom/custom.py | 6 ++++++ .../langflow/template/frontend_node/custom_components.py | 2 +- src/backend/langflow/template/frontend_node/tools.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/src/backend/langflow/interface/custom/custom.py b/src/backend/langflow/interface/custom/custom.py index c47e67edd..8985945a9 100644 --- a/src/backend/langflow/interface/custom/custom.py +++ b/src/backend/langflow/interface/custom/custom.py @@ -46,6 +46,10 @@ class CustomComponent(BaseModel): if isinstance(target, ast.Name): # A simple variable # Add the attribute and its value to the dictionary attributes[target.id] = ast.unparse(inner_node.value) + elif isinstance(inner_node, ast.AnnAssign): # An annotated assignment + if isinstance(inner_node.target, ast.Name) and inner_node.value: + attributes[inner_node.target.id] = ast.unparse(inner_node.value) + elif isinstance(inner_node, ast.FunctionDef): self._handle_function(inner_node) @@ -134,6 +138,8 @@ class CustomComponent(BaseModel): template_config["display_name"] = ast.literal_eval( attributes["display_name"] ) + if "description" in attributes: + template_config["description"] = ast.literal_eval(attributes["description"]) return template_config def _class_template_validation(self, code: dict): diff --git a/src/backend/langflow/template/frontend_node/custom_components.py b/src/backend/langflow/template/frontend_node/custom_components.py index 6d360ce96..8a3474d24 100644 --- a/src/backend/langflow/template/frontend_node/custom_components.py +++ b/src/backend/langflow/template/frontend_node/custom_components.py @@ -1,7 +1,7 @@ from langflow.template.field.base import TemplateField from langflow.template.frontend_node.base import FrontendNode from langflow.template.template.base import Template -from langflow.utils.constants import DEFAULT_CUSTOM_COMPONENT_CODE +from langflow.interface.custom.constants import DEFAULT_CUSTOM_COMPONENT_CODE class CustomComponentFrontendNode(FrontendNode): diff --git a/src/backend/langflow/template/frontend_node/tools.py b/src/backend/langflow/template/frontend_node/tools.py index f2e77eb9f..07db16e3c 100644 --- a/src/backend/langflow/template/frontend_node/tools.py +++ b/src/backend/langflow/template/frontend_node/tools.py @@ -1,9 +1,9 @@ +from langflow.interface.custom.constants import DEFAULT_CUSTOM_COMPONENT_CODE from langflow.template.field.base import TemplateField from langflow.template.frontend_node.base import FrontendNode from langflow.template.template.base import Template from langflow.utils.constants import ( DEFAULT_PYTHON_FUNCTION, - DEFAULT_CUSTOM_COMPONENT_CODE, ) From 1f4e94751672c9728e0efa1535862bc66d485eed Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Fri, 7 Jul 2023 00:42:36 -0300 Subject: [PATCH 059/221] =?UTF-8?q?=F0=9F=94=A7=20fix(base.py):=20remove?= =?UTF-8?q?=20unused=20import=20statement=20=F0=9F=94=A7=20fix(constants.p?= =?UTF-8?q?y):=20remove=20unused=20import=20statements=20=F0=9F=94=A7=20fi?= =?UTF-8?q?x(types.py):=20refactor=20add=5Fnew=5Fcustom=5Ffield=20function?= =?UTF-8?q?=20to=20update=20field=5Fconfig=20values?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The import statement for `get_custom_nodes` in `base.py` is removed as it is not being used in the code. Similarly, the import statements for `requests` and `LLMChain` in `constants.py` are removed as they are not being used either. In `types.py`, the `add_new_custom_field` function is refactored to update the values of `field_config` if any of the keys are present in it. This allows for more flexibility in customizing the field configuration for a custom component. --- src/backend/langflow/interface/custom/base.py | 4 +++- .../langflow/interface/custom/constants.py | 17 +++++++++++++++++ src/backend/langflow/interface/types.py | 14 ++++++++++++-- 3 files changed, 32 insertions(+), 3 deletions(-) diff --git a/src/backend/langflow/interface/custom/base.py b/src/backend/langflow/interface/custom/base.py index 359b799ce..bf5ca80d9 100644 --- a/src/backend/langflow/interface/custom/base.py +++ b/src/backend/langflow/interface/custom/base.py @@ -1,6 +1,6 @@ from typing import Any, Dict, List, Optional, Type -from langflow.custom.customs import get_custom_nodes + from langflow.interface.base import LangChainTypeCreator from langflow.interface.custom.custom import CustomComponent from langflow.template.frontend_node.custom_components import ( @@ -27,6 +27,8 @@ class CustomComponentCreator(LangChainTypeCreator): return self.type_dict def get_signature(self, name: str) -> Optional[Dict]: + from langflow.custom.customs import get_custom_nodes + try: if name in get_custom_nodes(self.type_name).keys(): return get_custom_nodes(self.type_name)[name] diff --git a/src/backend/langflow/interface/custom/constants.py b/src/backend/langflow/interface/custom/constants.py index 920c2bc09..9e7b27750 100644 --- a/src/backend/langflow/interface/custom/constants.py +++ b/src/backend/langflow/interface/custom/constants.py @@ -21,3 +21,20 @@ LANGCHAIN_BASE_TYPES = { "Embeddings": Embeddings, "BaseRetriever": BaseRetriever, } +DEFAULT_CUSTOM_COMPONENT_CODE = """ +from langchain.chains import LLMChain +from langflow.interface.custom import CustomComponent +from langchain.schema import Document +import requests + +class YourComponent(CustomComponent): + display_name: str = "Your Component" + description: str = "Your description" + field_config = { "url": { "multiline": True, "required": True } } + + def build(self, url: str, llm: BaseLLM, prompt: prompt) -> Document: + response = requests.get(url) + chain = LLMChain(llm=llm, prompt=prompt) + result = chain.run(response.text) + return Document(page_content=str(result)) +""" diff --git a/src/backend/langflow/interface/types.py b/src/backend/langflow/interface/types.py index c91abcfbe..f9d4f72ba 100644 --- a/src/backend/langflow/interface/types.py +++ b/src/backend/langflow/interface/types.py @@ -76,12 +76,20 @@ def build_langchain_types_dict(): # sourcery skip: dict-assign-update-to-union def add_new_custom_field( template, field_name: str, field_type: str, field_config: dict ): + # Check field_config if any of the keys are in it + # if it is, update the value + name = field_config.pop("name", field_name) + field_type = field_config.pop("field_type", field_type) + required = field_config.pop("required", True) + placeholder = field_config.pop("placeholder", "") + new_field = TemplateField( - name=field_name, + name=name, field_type=field_type, show=True, - required=True, + required=required, advanced=False, + placeholder=placeholder, **field_config, ) template.get("template")[field_name] = new_field.to_dict() @@ -121,6 +129,8 @@ def build_langchain_template_custom_component(extractor: CustomComponent): if "display_name" in template_config and frontend_node is not None: frontend_node["display_name"] = template_config["display_name"] + if "description" in template_config and frontend_node is not None: + frontend_node["description"] = template_config["description"] raw_code = extractor.code field_config = template_config.get("field_config", {}) if function_args is not None: From 34be9577c2bb0086da29bbd8ab173854f354d618 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Fri, 7 Jul 2023 01:33:15 -0300 Subject: [PATCH 060/221] =?UTF-8?q?=F0=9F=94=80=20refactor(constants.py):?= =?UTF-8?q?=20refactor=20YourComponent=20class=20to=20improve=20readabilit?= =?UTF-8?q?y=20and=20remove=20unused=20imports=20=F0=9F=94=80=20refactor(c?= =?UTF-8?q?onstants.py):=20refactor=20build=20method=20in=20YourComponent?= =?UTF-8?q?=20class=20to=20use=20PromptTemplate=20and=20limit=20response?= =?UTF-8?q?=20text=20length=20The=20YourComponent=20class=20in=20constants?= =?UTF-8?q?.py=20has=20been=20refactored=20to=20improve=20readability=20an?= =?UTF-8?q?d=20remove=20unused=20imports.=20The=20build=20method=20now=20u?= =?UTF-8?q?ses=20PromptTemplate=20to=20handle=20the=20template=20parameter?= =?UTF-8?q?=20and=20limits=20the=20length=20of=20the=20response=20text=20t?= =?UTF-8?q?o=20300=20characters=20to=20avoid=20potential=20issues.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../langflow/interface/custom/constants.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/src/backend/langflow/interface/custom/constants.py b/src/backend/langflow/interface/custom/constants.py index 9e7b27750..b99dd7bfb 100644 --- a/src/backend/langflow/interface/custom/constants.py +++ b/src/backend/langflow/interface/custom/constants.py @@ -21,20 +21,30 @@ LANGCHAIN_BASE_TYPES = { "Embeddings": Embeddings, "BaseRetriever": BaseRetriever, } + + DEFAULT_CUSTOM_COMPONENT_CODE = """ +from langflow import Prompt +from langchain.llms.base import BaseLLM from langchain.chains import LLMChain from langflow.interface.custom import CustomComponent +from langchain import PromptTemplate from langchain.schema import Document import requests -class YourComponent(CustomComponent): +class YourComponent: display_name: str = "Your Component" description: str = "Your description" field_config = { "url": { "multiline": True, "required": True } } - def build(self, url: str, llm: BaseLLM, prompt: prompt) -> Document: + def build(self, url: str, llm: BaseLLM, template: Prompt) -> Document: response = requests.get(url) + prompt = PromptTemplate.from_template(template) chain = LLMChain(llm=llm, prompt=prompt) - result = chain.run(response.text) + result = chain.run(response.text[:300]) return Document(page_content=str(result)) """ + + +# Create a new class that can be used as a type +# that returns type "prompt" if we get a certain param From 2815f5a1399c0b0f31dd448a5f41e7dcd5c6bc44 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Fri, 7 Jul 2023 01:33:36 -0300 Subject: [PATCH 061/221] =?UTF-8?q?=F0=9F=9A=80=20feat(langflow):=20add=20?= =?UTF-8?q?Prompt=20class=20to=20types=20module=20The=20Prompt=20class=20i?= =?UTF-8?q?s=20added=20to=20the=20types=20module=20in=20the=20langflow=20p?= =?UTF-8?q?ackage.=20This=20class=20will=20be=20used=20for=20defining=20pr?= =?UTF-8?q?ompts=20in=20the=20language=20flow=20processing.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/__init__.py | 3 ++- src/backend/langflow/utils/types.py | 2 ++ 2 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 src/backend/langflow/utils/types.py diff --git a/src/backend/langflow/__init__.py b/src/backend/langflow/__init__.py index 9b80c2ea7..d6c645486 100644 --- a/src/backend/langflow/__init__.py +++ b/src/backend/langflow/__init__.py @@ -1,6 +1,7 @@ from importlib import metadata from langflow.cache import cache_manager from langflow.processing.process import load_flow_from_json +from langflow.utils.types import Prompt try: __version__ = metadata.version(__package__) @@ -9,4 +10,4 @@ except metadata.PackageNotFoundError: __version__ = "" del metadata # optional, avoids polluting the results of dir(__package__) -__all__ = ["load_flow_from_json", "cache_manager"] +__all__ = ["load_flow_from_json", "cache_manager", "Prompt"] diff --git a/src/backend/langflow/utils/types.py b/src/backend/langflow/utils/types.py new file mode 100644 index 000000000..3657d550e --- /dev/null +++ b/src/backend/langflow/utils/types.py @@ -0,0 +1,2 @@ +class Prompt: + pass From 93892df8058e1e7ab45d16745b65f453e6a72942 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Fri, 7 Jul 2023 01:34:14 -0300 Subject: [PATCH 062/221] =?UTF-8?q?=F0=9F=94=A7=20chore(validate.py):=20re?= =?UTF-8?q?move=20unnecessary=20contextlib.suppress=20block=20The=20contex?= =?UTF-8?q?tlib.suppress=20block=20was=20suppressing=20import=20errors,=20?= =?UTF-8?q?but=20it=20is=20no=20longer=20needed=20as=20the=20import=20erro?= =?UTF-8?q?rs=20are=20handled=20elsewhere=20in=20the=20code.=20Removing=20?= =?UTF-8?q?this=20block=20improves=20code=20readability=20and=20removes=20?= =?UTF-8?q?unnecessary=20complexity.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/utils/validate.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/backend/langflow/utils/validate.py b/src/backend/langflow/utils/validate.py index 819a01c28..f8a9c1d1d 100644 --- a/src/backend/langflow/utils/validate.py +++ b/src/backend/langflow/utils/validate.py @@ -204,8 +204,9 @@ def create_class(code, class_name): code_obj = compile( ast.Module(body=[class_code], type_ignores=[]), "", "exec" ) - with contextlib.suppress(Exception): - exec(code_obj, exec_globals, locals()) + # This suppresses import errors + # with contextlib.suppress(Exception): + exec(code_obj, exec_globals, locals()) exec_globals[class_name] = locals()[class_name] # Return a function that imports necessary modules and creates an instance of the target class From 92217155e4d0ed5f5ce9a6e4e429f54d5a911ed3 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Fri, 7 Jul 2023 01:34:26 -0300 Subject: [PATCH 063/221] =?UTF-8?q?=F0=9F=94=A8=20refactor(loading.py):=20?= =?UTF-8?q?reorganize=20code=20and=20remove=20duplicate=20import=5Fby=5Fty?= =?UTF-8?q?pe=20function=20=F0=9F=90=9B=20fix(loading.py):=20fix=20instant?= =?UTF-8?q?iation=20of=20custom=20components=20and=20handle=20single=20doc?= =?UTF-8?q?ument=20input=20in=20instantiate=5Ftextsplitter=20function=20Th?= =?UTF-8?q?e=20code=20in=20loading.py=20has=20been=20reorganized=20to=20im?= =?UTF-8?q?prove=20readability=20and=20remove=20duplicate=20import=5Fby=5F?= =?UTF-8?q?type=20function.=20The=20instantiation=20of=20custom=20componen?= =?UTF-8?q?ts=20has=20been=20fixed=20to=20correctly=20build=20the=20class?= =?UTF-8?q?=20object.=20Additionally,=20the=20instantiate=5Ftextsplitter?= =?UTF-8?q?=20function=20now=20handles=20single=20document=20input=20by=20?= =?UTF-8?q?converting=20it=20to=20a=20list=20before=20processing.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../langflow/interface/initialize/loading.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/src/backend/langflow/interface/initialize/loading.py b/src/backend/langflow/interface/initialize/loading.py index 17f55bb1d..2c2bd3e9d 100644 --- a/src/backend/langflow/interface/initialize/loading.py +++ b/src/backend/langflow/interface/initialize/loading.py @@ -15,8 +15,8 @@ from pydantic import ValidationError from langflow.interface.importing.utils import ( get_function, - import_by_type, get_function_custom, + import_by_type, ) from langflow.interface.custom_lists import CUSTOM_NODES from langflow.interface.toolkits.base import toolkits_creator @@ -92,10 +92,17 @@ def instantiate_based_on_type(class_object, base_type, node_type, params): return instantiate_retriever(node_type, class_object, params) elif base_type == "memory": return instantiate_memory(node_type, class_object, params) + elif base_type == "custom_components": + return instantiate_custom_component(node_type, class_object, params) else: return class_object(**params) +def instantiate_custom_component(node_type, class_object, params): + class_object = get_function_custom(params.pop("code")) + return class_object().build(**params) + + def instantiate_output_parser(node_type, class_object, params): if node_type in output_parser_creator.from_method_nodes: method = output_parser_creator.from_method_nodes[node_type] @@ -229,9 +236,6 @@ def instantiate_tool(node_type, class_object: Type[BaseTool], params: Dict): elif node_type == "PythonFunctionTool": params["func"] = get_function(params.get("code")) return class_object(**params) - elif node_type == "CustomComponent": - class_object = get_function_custom(params.pop("code")) - return class_object().build(**params) # For backward compatibility elif node_type == "PythonFunction": function_string = params["code"] @@ -322,6 +326,8 @@ def instantiate_textsplitter( ): try: documents = params.pop("documents") + if not isinstance(documents, list): + documents = [documents] except KeyError as exc: raise ValueError( "The source you provided did not load correctly or was empty." From 99bc9f01ba2b19170f76e12618e74d3bf5f27a94 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Fri, 7 Jul 2023 01:34:37 -0300 Subject: [PATCH 064/221] =?UTF-8?q?=F0=9F=90=9B=20fix(custom.py):=20evalua?= =?UTF-8?q?te=20items=20in=20split=5Fitem=20list=20using=20ast.literal=5Fe?= =?UTF-8?q?val=20to=20handle=20cases=20where=20items=20are=20not=20strings?= =?UTF-8?q?=20The=20code=20now=20uses=20ast.literal=5Feval=20to=20evaluate?= =?UTF-8?q?=20each=20item=20in=20the=20split=5Fitem=20list.=20This=20is=20?= =?UTF-8?q?done=20to=20handle=20cases=20where=20the=20items=20are=20not=20?= =?UTF-8?q?strings=20and=20cannot=20be=20directly=20converted=20to=20their?= =?UTF-8?q?=20respective=20types.=20This=20ensures=20that=20the=20output?= =?UTF-8?q?=5Flist=20contains=20correctly=20evaluated=20items.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/interface/custom/custom.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/backend/langflow/interface/custom/custom.py b/src/backend/langflow/interface/custom/custom.py index 8985945a9..05d9b732a 100644 --- a/src/backend/langflow/interface/custom/custom.py +++ b/src/backend/langflow/interface/custom/custom.py @@ -88,6 +88,14 @@ class CustomComponent(BaseModel): # If there isn't a type, append None else: split_item.append(None) + for i in range(len(split_item)): + try: + # Try to evaluate the item + split_item[i] = ast.literal_eval(split_item[i]) + except ValueError: + # If it fails, just pass + pass + output_list.append(split_item) return output_list From c49b764fc004db61a01ad99ad119a9a4a0af2ed8 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Fri, 7 Jul 2023 01:34:51 -0300 Subject: [PATCH 065/221] =?UTF-8?q?=F0=9F=94=A7=20fix(utils.py):=20import?= =?UTF-8?q?=20CustomComponent=20class=20and=20add=20import=5Fcustom=5Fcomp?= =?UTF-8?q?onent=20function=20=F0=9F=94=A7=20fix(types.py):=20process=20fi?= =?UTF-8?q?eld=5Ftype=20to=20convert=20"Prompt"=20to=20"prompt"=20The=20`u?= =?UTF-8?q?tils.py`=20file=20was=20missing=20an=20import=20statement=20for?= =?UTF-8?q?=20the=20`CustomComponent`=20class=20from=20the=20`langflow.int?= =?UTF-8?q?erface.custom.custom`=20module.=20This=20import=20statement=20h?= =?UTF-8?q?as=20been=20added=20to=20the=20top=20of=20the=20file.=20Additio?= =?UTF-8?q?nally,=20a=20new=20function=20`import=5Fcustom=5Fcomponent`=20h?= =?UTF-8?q?as=20been=20added=20to=20import=20a=20custom=20component=20base?= =?UTF-8?q?d=20on=20its=20name.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit In the `types.py` file, a new function `process_type` has been added to convert the field_type "Prompt" to "prompt". This is done to ensure consistency in the field types used throughout the codebase. --- src/backend/langflow/interface/importing/utils.py | 7 +++++++ src/backend/langflow/interface/types.py | 6 ++++++ 2 files changed, 13 insertions(+) diff --git a/src/backend/langflow/interface/importing/utils.py b/src/backend/langflow/interface/importing/utils.py index 5c40ed34c..bfcd18caa 100644 --- a/src/backend/langflow/interface/importing/utils.py +++ b/src/backend/langflow/interface/importing/utils.py @@ -9,6 +9,7 @@ from langchain.base_language import BaseLanguageModel from langchain.chains.base import Chain from langchain.chat_models.base import BaseChatModel from langchain.tools import BaseTool +from langflow.interface.custom.custom import CustomComponent from langflow.utils import validate @@ -46,6 +47,7 @@ def import_by_type(_type: str, name: str) -> Any: "utilities": import_utility, "output_parsers": import_output_parser, "retrievers": import_retriever, + "custom_components": import_custom_component, } if _type == "llms": key = "chat" if "chat" in name.lower() else "llm" @@ -56,6 +58,11 @@ def import_by_type(_type: str, name: str) -> Any: return loaded_func(name) +def import_custom_component(custom_component: str) -> CustomComponent: + """Import custom component from custom component name""" + return import_class(f"langflow.interface.custom.custom.{custom_component}") + + def import_output_parser(output_parser: str) -> Any: """Import output parser from output parser name""" return import_module(f"from langchain.output_parsers import {output_parser}") diff --git a/src/backend/langflow/interface/types.py b/src/backend/langflow/interface/types.py index f9d4f72ba..9b8b8cebe 100644 --- a/src/backend/langflow/interface/types.py +++ b/src/backend/langflow/interface/types.py @@ -72,6 +72,10 @@ def build_langchain_types_dict(): # sourcery skip: dict-assign-update-to-union return all_types +def process_type(field_type: str): + return "prompt" if field_type == "Prompt" else field_type + + # TODO: Move to correct place def add_new_custom_field( template, field_name: str, field_type: str, field_config: dict @@ -80,6 +84,8 @@ def add_new_custom_field( # if it is, update the value name = field_config.pop("name", field_name) field_type = field_config.pop("field_type", field_type) + field_type = process_type(field_type) + required = field_config.pop("required", True) placeholder = field_config.pop("placeholder", "") From 7744ee3639761a41f32f517536ea5d1f7df81b5a Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Fri, 7 Jul 2023 23:12:32 +0100 Subject: [PATCH 066/221] Add postCustomComponent API controller for uploading custom components. This commit adds the postCustomComponent function to the API controller. It allows for uploading custom components by sending a POST request to `/api/v1/custom_component` with the code as a parameter. --- src/frontend/src/controllers/API/index.ts | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/frontend/src/controllers/API/index.ts b/src/frontend/src/controllers/API/index.ts index 2d6f951f0..d2456eb40 100644 --- a/src/frontend/src/controllers/API/index.ts +++ b/src/frontend/src/controllers/API/index.ts @@ -339,3 +339,10 @@ export async function uploadFile( formData.append("file", file); return await axios.post(`/api/v1/upload/${id}`, formData); } + +export async function postCustomComponent( + code: string, + apiClass: APIClassType +): Promise> { + return await axios.post(`/api/v1/custom_component`, { code }); +} \ No newline at end of file From a55088d7cbeef0dd11d05a0a9764e7e7eb3d2aa8 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Fri, 7 Jul 2023 19:38:38 -0300 Subject: [PATCH 067/221] fix(codeAreaModal/index.tsx): add useEffect import to fix missing dependency warning feat(codeAreaModal/index.tsx): add postCustomComponent import to enable posting custom components --- src/frontend/src/modals/codeAreaModal/index.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/frontend/src/modals/codeAreaModal/index.tsx b/src/frontend/src/modals/codeAreaModal/index.tsx index e2c69c8cc..f310a219c 100644 --- a/src/frontend/src/modals/codeAreaModal/index.tsx +++ b/src/frontend/src/modals/codeAreaModal/index.tsx @@ -1,5 +1,5 @@ // organize-imports-ignore -import { useContext, useRef, useState } from "react"; +import { useContext, useEffect, useRef, useState } from "react"; import { PopUpContext } from "../../contexts/popUpContext"; import AceEditor from "react-ace"; import "ace-builds/src-noconflict/ext-language_tools"; @@ -23,7 +23,7 @@ import { import { CODE_PROMPT_DIALOG_SUBTITLE } from "../../constants"; import { alertContext } from "../../contexts/alertContext"; import { darkContext } from "../../contexts/darkContext"; -import { postValidateCode } from "../../controllers/API"; +import { postCustomComponent, postValidateCode } from "../../controllers/API"; import { APIClassType } from "../../types/api"; import { Tabs, From 19b84247cff426dfcee48b8e2118a6c979645808 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Sun, 9 Jul 2023 11:08:50 -0300 Subject: [PATCH 068/221] =?UTF-8?q?=F0=9F=90=9B=20fix(custom.py):=20improv?= =?UTF-8?q?e=20error=20message=20formatting=20for=20invalid=20entrypoint?= =?UTF-8?q?=20function=20name=20and=20return=20type?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/interface/custom/custom.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/src/backend/langflow/interface/custom/custom.py b/src/backend/langflow/interface/custom/custom.py index 05d9b732a..1ff79ca68 100644 --- a/src/backend/langflow/interface/custom/custom.py +++ b/src/backend/langflow/interface/custom/custom.py @@ -172,7 +172,10 @@ class CustomComponent(BaseModel): status_code=400, detail={ "error": "Invalid entrypoint function name", - "traceback": f"There needs to be at least one entrypoint function named '{self.function_entrypoint_name}' and it needs to return one of the types from this list {str(self.return_type_valid_list)}.", + "traceback": ( + f"There needs to be at least one entrypoint function named '{self.function_entrypoint_name}'" + f" and it needs to return one of the types from this list {str(self.return_type_valid_list)}.", + ), }, ) @@ -182,7 +185,10 @@ class CustomComponent(BaseModel): status_code=400, detail={ "error": "Invalid entrypoint function return", - "traceback": f"The entrypoint function return '{return_type}' needs to be an item from this list {str(self.return_type_valid_list)}.", + "traceback": ( + f"The entrypoint function return '{return_type}' needs to be an item " + f"from this list {str(self.return_type_valid_list)}." + ), }, ) From dc80513691178c0aa28f123877a417b0851d9f1f Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Sun, 9 Jul 2023 11:09:19 -0300 Subject: [PATCH 069/221] =?UTF-8?q?=F0=9F=90=9B=20fix(types.py):=20change?= =?UTF-8?q?=20variable=20name=20from=20'name'=20to=20'display=5Fname'=20in?= =?UTF-8?q?=20add=5Fnew=5Fcustom=5Ffield=20function=20to=20improve=20clari?= =?UTF-8?q?ty=20=F0=9F=94=A7=20chore(types.py):=20add=20warning=20when=20'?= =?UTF-8?q?name'=20key=20is=20used=20in=20field=5Fconfig=20to=20inform=20t?= =?UTF-8?q?hat=20it=20can't=20be=20changed?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/interface/types.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/src/backend/langflow/interface/types.py b/src/backend/langflow/interface/types.py index 9b8b8cebe..8b6d78f9a 100644 --- a/src/backend/langflow/interface/types.py +++ b/src/backend/langflow/interface/types.py @@ -21,7 +21,7 @@ from langflow.template.frontend_node.tools import CustomComponentNode from langflow.interface.retrievers.base import retriever_creator from langflow.utils.util import get_base_classes - +import warnings from fastapi import HTTPException import traceback @@ -82,20 +82,26 @@ def add_new_custom_field( ): # Check field_config if any of the keys are in it # if it is, update the value - name = field_config.pop("name", field_name) + display_name = field_config.pop("display_name", field_name) field_type = field_config.pop("field_type", field_type) field_type = process_type(field_type) + if "name" in field_config: + warnings.warn( + "The 'name' key in field_config is used to build the object and can't be changed." + ) + field_config.pop("name", None) required = field_config.pop("required", True) placeholder = field_config.pop("placeholder", "") new_field = TemplateField( - name=name, + name=field_name, field_type=field_type, show=True, required=required, advanced=False, placeholder=placeholder, + display_name=display_name, **field_config, ) template.get("template")[field_name] = new_field.to_dict() From 2f24fca1f275a1a7062e74c6d7cb02a554212d08 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Sun, 9 Jul 2023 11:09:44 -0300 Subject: [PATCH 070/221] =?UTF-8?q?=F0=9F=90=9B=20fix(base.py):=20fix=20is?= =?UTF-8?q?sue=20where=20CustomComponentCreator.to=5Flist()=20method=20alw?= =?UTF-8?q?ays=20returns=20None?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/interface/custom/base.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/backend/langflow/interface/custom/base.py b/src/backend/langflow/interface/custom/base.py index bf5ca80d9..8dfa127cc 100644 --- a/src/backend/langflow/interface/custom/base.py +++ b/src/backend/langflow/interface/custom/base.py @@ -37,6 +37,7 @@ class CustomComponentCreator(LangChainTypeCreator): except AttributeError as exc: logger.error(f"CustomComponent {name} not loaded: {exc}") return None + return None def to_list(self) -> List[str]: return list(self.type_to_loader_dict.keys()) From d2750dd3dca48f70391a6a3ea15f7ed32ba2eeb0 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Sun, 9 Jul 2023 11:10:07 -0300 Subject: [PATCH 071/221] =?UTF-8?q?=F0=9F=94=A5=20refactor(base.py):=20rem?= =?UTF-8?q?ove=20unused=20method=20`process=5Fbase=5Fclasses`=20from=20Fro?= =?UTF-8?q?ntendNode=20class?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The `process_base_classes` method in the `FrontendNode` class is no longer used and can be safely removed to improve code readability and maintainability. --- src/backend/langflow/template/frontend_node/base.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/src/backend/langflow/template/frontend_node/base.py b/src/backend/langflow/template/frontend_node/base.py index f8b879a22..6241e709e 100644 --- a/src/backend/langflow/template/frontend_node/base.py +++ b/src/backend/langflow/template/frontend_node/base.py @@ -52,14 +52,6 @@ class FrontendNode(BaseModel): output_types: List[str] = [] field_formatters: FieldFormatters = Field(default_factory=FieldFormatters) - def process_base_classes(self) -> None: - """Removes unwanted base classes from the list of base classes.""" - self.base_classes = [ - base_class - for base_class in self.base_classes - if base_class not in CLASSES_TO_REMOVE - ] - # field formatters is an instance attribute but it is not used in the class # so we need to create a method to get it @staticmethod From 5ece7e2fbc0ecf288b73ea55f6693ff399addb35 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Sun, 9 Jul 2023 11:10:35 -0300 Subject: [PATCH 072/221] =?UTF-8?q?=F0=9F=94=A5=20refactor(loading.py):=20?= =?UTF-8?q?remove=20unused=20imports=20and=20reorganize=20import=20stateme?= =?UTF-8?q?nts=20for=20better=20readability?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/interface/initialize/loading.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/backend/langflow/interface/initialize/loading.py b/src/backend/langflow/interface/initialize/loading.py index a33ac8b05..2ec59e3b5 100644 --- a/src/backend/langflow/interface/initialize/loading.py +++ b/src/backend/langflow/interface/initialize/loading.py @@ -19,7 +19,6 @@ from langflow.interface.importing.utils import ( import_by_type, ) from langflow.interface.custom_lists import CUSTOM_NODES -from langflow.interface.importing.utils import get_function, import_by_type from langflow.interface.agents.base import agent_creator from langflow.interface.toolkits.base import toolkits_creator from langflow.interface.chains.base import chain_creator From 352cb8bc8545d099a2727f0017ac0cb6c806e4b6 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Sun, 9 Jul 2023 11:12:41 -0300 Subject: [PATCH 073/221] formatting --- .../components/parameterComponent/index.tsx | 4 +- .../components/inputFileComponent/index.tsx | 1 - src/frontend/src/controllers/API/index.ts | 2 +- .../src/modals/EditNodeModal/index.tsx | 10 +- .../NodeModal/components/ModalField/index.tsx | 2 +- src/frontend/src/modals/baseModal/index.tsx | 121 ++++---- src/frontend/src/modals/codeAreaModal/v2.tsx | 274 +++++++++--------- src/frontend/src/utils.ts | 2 +- 8 files changed, 211 insertions(+), 205 deletions(-) diff --git a/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx b/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx index 0e4e42cd7..06b7ebaa8 100644 --- a/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx +++ b/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx @@ -268,9 +268,9 @@ export default function ParameterComponent({ >
) : left === true && type === "code" ? ( -
+
{ data.node = nodeClass; }} diff --git a/src/frontend/src/components/inputFileComponent/index.tsx b/src/frontend/src/components/inputFileComponent/index.tsx index ab8fd3fd1..fc69e5f95 100644 --- a/src/frontend/src/components/inputFileComponent/index.tsx +++ b/src/frontend/src/components/inputFileComponent/index.tsx @@ -91,7 +91,6 @@ export default function InputFileComponent({ input.click(); }; - return (
diff --git a/src/frontend/src/controllers/API/index.ts b/src/frontend/src/controllers/API/index.ts index d2456eb40..fdfe721a9 100644 --- a/src/frontend/src/controllers/API/index.ts +++ b/src/frontend/src/controllers/API/index.ts @@ -345,4 +345,4 @@ export async function postCustomComponent( apiClass: APIClassType ): Promise> { return await axios.post(`/api/v1/custom_component`, { code }); -} \ No newline at end of file +} diff --git a/src/frontend/src/modals/EditNodeModal/index.tsx b/src/frontend/src/modals/EditNodeModal/index.tsx index 2c84d7e56..2a5a686a0 100644 --- a/src/frontend/src/modals/EditNodeModal/index.tsx +++ b/src/frontend/src/modals/EditNodeModal/index.tsx @@ -66,7 +66,7 @@ export default function EditNodeModal({ data }: { data: NodeDataType }) { } } - useEffect(() => { }, [closePopUp, data.node.template]); + useEffect(() => {}, [closePopUp, data.node.template]); function changeAdvanced(node): void { Object.keys(data.node.template).filter((n, i) => { @@ -140,7 +140,7 @@ export default function EditNodeModal({ data }: { data: NodeDataType }) { {data.node.template[n].type === "str" && - !data.node.template[n].options ? ( + !data.node.template[n].options ? (
{data.node.template[n].list ? ( { data.node = nodeClass; }} diff --git a/src/frontend/src/modals/NodeModal/components/ModalField/index.tsx b/src/frontend/src/modals/NodeModal/components/ModalField/index.tsx index dce616132..db63adc39 100644 --- a/src/frontend/src/modals/NodeModal/components/ModalField/index.tsx +++ b/src/frontend/src/modals/NodeModal/components/ModalField/index.tsx @@ -63,7 +63,7 @@ export default function ModalField({ disabled={false} value={ !data.node.template[name].value || - data.node.template[name].value === "" + data.node.template[name].value === "" ? [""] : data.node.template[name].value } diff --git a/src/frontend/src/modals/baseModal/index.tsx b/src/frontend/src/modals/baseModal/index.tsx index e4d01e06b..13893d8c1 100644 --- a/src/frontend/src/modals/baseModal/index.tsx +++ b/src/frontend/src/modals/baseModal/index.tsx @@ -1,78 +1,69 @@ -import { ReactNode, useContext, useEffect, useRef } from "react"; +import { ReactNode, useContext } from "react"; -import _ from "lodash"; -import { - Dialog, - DialogContent, - DialogDescription, - DialogHeader, - DialogTitle, - DialogTrigger, -} from "../../components/ui/dialog"; import React from "react"; +import { + Dialog, + DialogContent, + DialogDescription, + DialogHeader, + DialogTitle, + DialogTrigger, +} from "../../components/ui/dialog"; import { PopUpContext } from "../../contexts/popUpContext"; -type ContentProps = {children:ReactNode}; -type HeaderProps = {children:ReactNode,description:string}; - +type ContentProps = { children: ReactNode }; +type HeaderProps = { children: ReactNode; description: string }; const Content: React.FC = ({ children }) => { - return ( -
- {children} -
) -} + return
{children}
; +}; -const Header: React.FC<{ children: ReactNode, description:string }> = ({ children,description }) => { - return ( - - - {children} - - {description} - - ) -} +const Header: React.FC<{ children: ReactNode; description: string }> = ({ + children, + description, +}) => { + return ( + + {children} + {description} + + ); +}; interface BaseModalProps { - children: [React.ReactElement, React.ReactElement]; - open: boolean; - setOpen: (open: boolean) => void; + children: [React.ReactElement, React.ReactElement]; + open: boolean; + setOpen: (open: boolean) => void; +} +function BaseModal({ open, setOpen, children }: BaseModalProps) { + const { closePopUp, setCloseEdit } = useContext(PopUpContext); + + function setModalOpen(x: boolean) { + setOpen(x); + if (x === false) { + setTimeout(() => { + setCloseEdit("editcode"); + closePopUp(); + }, 300); + } } -function BaseModal({ - open, - setOpen, - children, -}: BaseModalProps) { - const {closePopUp, setCloseEdit} = useContext(PopUpContext) - - function setModalOpen(x: boolean) { - setOpen(x); - if (x === false) { - setTimeout(() => { - setCloseEdit("editcode"); - closePopUp(); - }, 300); - } - } - const headerChild = React.Children.toArray(children).find((child) => (child as React.ReactElement).type === Header); - const ContentChild = React.Children.toArray(children).find( - (child) => (child as React.ReactElement).type === Content - ); - //UPDATE COLORS AND STYLE CLASSSES - return ( - - - - {headerChild} -
- {ContentChild} -
-
-
- ); - + const headerChild = React.Children.toArray(children).find( + (child) => (child as React.ReactElement).type === Header + ); + const ContentChild = React.Children.toArray(children).find( + (child) => (child as React.ReactElement).type === Content + ); + //UPDATE COLORS AND STYLE CLASSSES + return ( + + + + {headerChild} +
{ContentChild}
+
+
+ ); } BaseModal.Content = Content; BaseModal.Header = Header; -export default BaseModal; \ No newline at end of file +export default BaseModal; diff --git a/src/frontend/src/modals/codeAreaModal/v2.tsx b/src/frontend/src/modals/codeAreaModal/v2.tsx index 13d522039..9655945f7 100644 --- a/src/frontend/src/modals/codeAreaModal/v2.tsx +++ b/src/frontend/src/modals/codeAreaModal/v2.tsx @@ -1,13 +1,13 @@ +// organize-imports-ignore import { useContext, useEffect, useRef, useState } from "react"; import { PopUpContext } from "../../contexts/popUpContext"; -import 'ace-builds/src-noconflict/ace'; +import "ace-builds/src-noconflict/ace"; import { darkContext } from "../../contexts/darkContext"; import { postCustomComponent, postValidateCode } from "../../controllers/API"; import { alertContext } from "../../contexts/alertContext"; import { Button } from "../../components/ui/button"; import { CODE_PROMPT_DIALOG_SUBTITLE } from "../../constants"; import { APIClassType } from "../../types/api"; -import TwoColumnsModal from "../baseModal"; import { DialogTitle } from "@radix-ui/react-dialog"; import { TerminalSquare } from "lucide-react"; import AceEditor from "react-ace"; @@ -15,143 +15,157 @@ import "ace-builds/src-noconflict/mode-python"; import "ace-builds/src-noconflict/theme-github"; import "ace-builds/src-noconflict/theme-twilight"; import "ace-builds/src-noconflict/ext-language_tools"; -import 'ace-builds/src-noconflict/ace'; -import { XCircle } from 'lucide-react'; +import "ace-builds/src-noconflict/ace"; import BaseModal from "../baseModal"; export default function CodeAreaModal({ - value, - setValue, - nodeClass, - setNodeClass, - dynamic + value, + setValue, + nodeClass, + setNodeClass, + dynamic, }: { - setValue: (value: string) => void; - value: string; - nodeClass: APIClassType; - setNodeClass: (Class: APIClassType) => void; - dynamic?: boolean; + setValue: (value: string) => void; + value: string; + nodeClass: APIClassType; + setNodeClass: (Class: APIClassType) => void; + dynamic?: boolean; }) { - const [open, setOpen] = useState(true); - const [code, setCode] = useState(value); - const [loading, setLoading] = useState(false); - const { dark } = useContext(darkContext); - const { setErrorData, setSuccessData } = useContext(alertContext); - const [activeTab, setActiveTab] = useState("0"); - const [error, setError] = useState<{ detail: { error: string, traceback: string } }>(null) - const { closePopUp, setCloseEdit } = useContext(PopUpContext); - const ref = useRef(); - function setModalOpen(x: boolean) { - setOpen(x); - if (x === false) { - setTimeout(() => { - setCloseEdit("editcode"); - closePopUp(); - }, 300); - } + const [open, setOpen] = useState(true); + const [code, setCode] = useState(value); + const [loading, setLoading] = useState(false); + const { dark } = useContext(darkContext); + const { setErrorData, setSuccessData } = useContext(alertContext); + const [activeTab, setActiveTab] = useState("0"); + const [error, setError] = useState<{ + detail: { error: string; traceback: string }; + }>(null); + const { closePopUp, setCloseEdit } = useContext(PopUpContext); + const ref = useRef(); + function setModalOpen(x: boolean) { + setOpen(x); + if (x === false) { + setTimeout(() => { + setCloseEdit("editcode"); + closePopUp(); + }, 300); } - useEffect(() => { - setValue(code); - }, [code, setValue]) + } + useEffect(() => { + setValue(code); + }, [code, setValue]); - function handleClick() { - setLoading(true); - if (!dynamic) { - postValidateCode(code) - .then((apiReturn) => { - setLoading(false); - if (apiReturn.data) { - let importsErrors = apiReturn.data.imports.errors; - let funcErrors = apiReturn.data.function.errors; - if (funcErrors.length === 0 && importsErrors.length === 0) { - setSuccessData({ - title: "Code is ready to run", - }); - // setValue(code); - } else { - if (funcErrors.length !== 0) { - setErrorData({ - title: "There is an error in your function", - list: funcErrors, - }); - } - if (importsErrors.length !== 0) { - setErrorData({ - title: "There is an error in your imports", - list: importsErrors, - }); - } - } - } else { - setErrorData({ - title: "Something went wrong, please try again", - }); - } - }) - .catch((_) => { - setLoading(false); - setErrorData({ - title: "There is something wrong with this code, please review it", - }); + function handleClick() { + setLoading(true); + if (!dynamic) { + postValidateCode(code) + .then((apiReturn) => { + setLoading(false); + if (apiReturn.data) { + let importsErrors = apiReturn.data.imports.errors; + let funcErrors = apiReturn.data.function.errors; + if (funcErrors.length === 0 && importsErrors.length === 0) { + setSuccessData({ + title: "Code is ready to run", + }); + // setValue(code); + } else { + if (funcErrors.length !== 0) { + setErrorData({ + title: "There is an error in your function", + list: funcErrors, }); - } - else { - postCustomComponent(code, nodeClass).then((apiReturn) => { - const { data } = apiReturn; - if (data) { - setNodeClass(data); - setModalOpen(false); - } - }).catch((err) => { - setError(err.response.data); + } + if (importsErrors.length !== 0) { + setErrorData({ + title: "There is an error in your imports", + list: importsErrors, + }); + } + } + } else { + setErrorData({ + title: "Something went wrong, please try again", }); - } - + } + }) + .catch((_) => { + setLoading(false); + setErrorData({ + title: "There is something wrong with this code, please review it", + }); + }); + } else { + postCustomComponent(code, nodeClass) + .then((apiReturn) => { + const { data } = apiReturn; + if (data) { + setNodeClass(data); + setModalOpen(false); + } + }) + .catch((err) => { + setError(err.response.data); + }); } - const tabs = [{ name: "code" }, { name: "errors" }] + } + const tabs = [{ name: "code" }, { name: "errors" }]; - return ( - - - - Edit Code - - -
-
- { - setCode(value); - }} - className="w-full rounded-lg h-full custom-scroll border-[1px] border-gray-300 dark:border-gray-600" - /> -
-
-
-

{error?.detail?.error}

-
{error?.detail?.traceback}
-
-
-
-
-
-
-
- ); + return ( + + + + Edit Code + + + +
+
+ { + setCode(value); + }} + className="h-full w-full rounded-lg border-[1px] border-gray-300 custom-scroll dark:border-gray-600" + /> +
+
+
+

+ {error?.detail?.error} +

+
+
+                  {error?.detail?.traceback}
+                
+
+
+
+
+ +
+
+
+
+ ); } diff --git a/src/frontend/src/utils.ts b/src/frontend/src/utils.ts index 710c27971..d9be31ad5 100644 --- a/src/frontend/src/utils.ts +++ b/src/frontend/src/utils.ts @@ -17,10 +17,10 @@ import { Paperclip, Rocket, Scissors, + Sparkles, TerminalSquare, Wand2, Wrench, - Sparkles, } from "lucide-react"; import { ComponentType, SVGProps } from "react"; import { Connection, Edge, Node, ReactFlowInstance } from "reactflow"; From 6122521783de17e7dc02c4af0fd182489b0ed9be Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Mon, 10 Jul 2023 19:34:36 +0100 Subject: [PATCH 074/221] =?UTF-8?q?=F0=9F=90=9B=20fix(custom.py):=20remove?= =?UTF-8?q?=20unused=20imports=20and=20unused=20code=20block=20to=20improv?= =?UTF-8?q?e=20code=20cleanliness=20and=20performance=20=E2=9C=A8=20feat(c?= =?UTF-8?q?ustom.py):=20add=20NotImplementedError=20to=20the=20build=20met?= =?UTF-8?q?hod=20to=20indicate=20that=20it=20needs=20to=20be=20implemented?= =?UTF-8?q?=20in=20subclasses=20=F0=9F=9A=A7=20chore(test=5Fcustom=5Fcompo?= =?UTF-8?q?nent.py):=20add=20test=20cases=20for=20various=20methods=20in?= =?UTF-8?q?=20the=20CustomComponent=20class=20to=20improve=20test=20covera?= =?UTF-8?q?ge=20and=20ensure=20code=20correctness?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../langflow/interface/custom/custom.py | 8 +- tests/test_custom_component.py | 180 ++++++++++++++++++ 2 files changed, 183 insertions(+), 5 deletions(-) create mode 100644 tests/test_custom_component.py diff --git a/src/backend/langflow/interface/custom/custom.py b/src/backend/langflow/interface/custom/custom.py index 1ff79ca68..91ec84e84 100644 --- a/src/backend/langflow/interface/custom/custom.py +++ b/src/backend/langflow/interface/custom/custom.py @@ -1,4 +1,5 @@ import ast +import contextlib import traceback from typing import Callable, Optional from fastapi import HTTPException @@ -89,12 +90,9 @@ class CustomComponent(BaseModel): else: split_item.append(None) for i in range(len(split_item)): - try: + with contextlib.suppress(ValueError): # Try to evaluate the item split_item[i] = ast.literal_eval(split_item[i]) - except ValueError: - # If it fails, just pass - pass output_list.append(split_item) @@ -198,7 +196,7 @@ class CustomComponent(BaseModel): return validate.create_function(self.code, self.function_entrypoint_name) def build(self): - pass + raise NotImplementedError @property def data(self): diff --git a/tests/test_custom_component.py b/tests/test_custom_component.py new file mode 100644 index 000000000..fcb5a03eb --- /dev/null +++ b/tests/test_custom_component.py @@ -0,0 +1,180 @@ +import ast +import pytest +from fastapi import HTTPException +from langflow.interface.custom.custom import CustomComponent +from langflow.interface.custom.constants import DEFAULT_CUSTOM_COMPONENT_CODE + + +# Test the __init__ method +def test_init(): + component = CustomComponent(field_config={}, code=DEFAULT_CUSTOM_COMPONENT_CODE) + assert isinstance(component, CustomComponent) + assert component.code == DEFAULT_CUSTOM_COMPONENT_CODE + + +# Test the _handle_import method +def test_handle_import(): + component = CustomComponent(field_config={}, code=DEFAULT_CUSTOM_COMPONENT_CODE) + node = ast.parse("import math").body[0] + component._handle_import(node) + assert "math" in component.class_template["imports"] + + +# Test the _handle_class method +def test_handle_class(): + component = CustomComponent(field_config={}, code=DEFAULT_CUSTOM_COMPONENT_CODE) + node = ast.parse("class Test: pass").body[0] + component._handle_class(node) + assert component.class_template["class"]["name"] == "Test" + + +# Test the _handle_function method +def test_handle_function(): + component = CustomComponent(field_config={}, code=DEFAULT_CUSTOM_COMPONENT_CODE) + node = ast.parse("def func(): pass").body[0] + component._handle_function(node) + function_data = {"name": "func", "arguments": [], "return_type": "None"} + assert function_data in component.class_template["functions"] + + +# Test the transform_list method +def test_transform_list(): + component = CustomComponent(field_config={}, code=DEFAULT_CUSTOM_COMPONENT_CODE) + input_list = ["var1: int", "var2: str", "var3"] + output_list = [["var1", "int"], ["var2", "str"], ["var3", None]] + assert component.transform_list(input_list) == output_list + + +# Test the extract_class_info method with valid code +def test_extract_class_info(): + component = CustomComponent(field_config={}, code=DEFAULT_CUSTOM_COMPONENT_CODE) + class_info = component.extract_class_info() + assert "requests" in class_info["imports"] + assert class_info["class"]["name"] == "YourComponent" + function_data = { + "name": "build", + "arguments": ["self", "url: str", "llm: BaseLLM", "template: Prompt"], + "return_type": "Document", + } + assert function_data in class_info["functions"] + + +# Test the extract_class_info method with invalid code +def test_extract_class_info_invalid_code(): + component = CustomComponent(field_config={}, code="invalid code") + with pytest.raises(HTTPException) as e: + component.extract_class_info() + + exception = e.value + assert exception.status_code == 400 + assert exception.detail["error"] == "invalid syntax" + + +# Test the get_entrypoint_function_args_and_return_type method +def test_get_entrypoint_function_args_and_return_type(): + component = CustomComponent(field_config={}, code=DEFAULT_CUSTOM_COMPONENT_CODE) + ( + function_args, + return_type, + template_config, + ) = component.get_entrypoint_function_args_and_return_type() + assert function_args == [ + ["self", None], + ["url", "str"], + ["llm", "BaseLLM"], + ["template", "Prompt"], + ] + assert return_type == "Document" + assert template_config == { + "description": "Your description", + "display_name": "Your Component", + "field_config": {"url": {"multiline": True, "required": True}}, + } + + +# Test the _build_template_config method +def test__build_template_config(): + attributes = { + "field_config": "'field_config_value'", + "display_name": "'display_name_value'", + "description": "'description_value'", + } + component = CustomComponent(field_config={}, code=DEFAULT_CUSTOM_COMPONENT_CODE) + template_config = component._build_template_config(attributes) + + assert template_config == { + "field_config": "field_config_value", + "display_name": "display_name_value", + "description": "description_value", + } + + +# Test the _class_template_validation method with a valid class template +def test__class_template_validation_valid(): + component = CustomComponent(field_config={}, code=DEFAULT_CUSTOM_COMPONENT_CODE) + assert component._class_template_validation(code=component.data) is True + + +# Test the _class_template_validation method with an invalid class template +def test__class_template_validation_invalid(): + component = CustomComponent(field_config={}, code=DEFAULT_CUSTOM_COMPONENT_CODE) + class_template = {} + + with pytest.raises(Exception) as e: + component._class_template_validation(class_template) + + exception = e.value + assert exception.status_code == 400 + assert exception.detail["error"] == "The main class must have a valid name." + + +# Test the build method +def test_build(): + component = CustomComponent(field_config={}, code=DEFAULT_CUSTOM_COMPONENT_CODE) + with pytest.raises(Exception) as e: + component.build() + + assert e.type == NotImplementedError + + +# Test the data property +def test_data(): + code = DEFAULT_CUSTOM_COMPONENT_CODE + component = CustomComponent(field_config={}, code=code) + class_info = component.data + assert "requests" in class_info["imports"] + assert class_info["class"]["name"] == "YourComponent" + function_data = { + "name": "build", + "arguments": ["self", "url: str", "llm: BaseLLM", "template: Prompt"], + "return_type": "Document", + } + assert function_data in class_info["functions"] + + +# Test the is_check_valid method +def test_is_check_valid(): + component = CustomComponent(field_config={}, code=DEFAULT_CUSTOM_COMPONENT_CODE) + + assert component.is_check_valid() is True + + +# Test the args_and_return_type property +def test_args_and_return_type(): + component = CustomComponent(field_config={}, code=DEFAULT_CUSTOM_COMPONENT_CODE) + + function_args, return_type, template_config = component.args_and_return_type + + assert function_args == [ + ["self", None], + ["url", "str"], + ["llm", "BaseLLM"], + ["template", "Prompt"], + ] + + assert return_type == "Document" + assert template_config == { + "description": "Your description", + "display_name": "Your Component", + "field_config": {"url": {"multiline": True, "required": True}}, + } From 719015b5bb82f806347eecfa4c7f47f8d152c3a5 Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Mon, 10 Jul 2023 23:38:01 +0100 Subject: [PATCH 075/221] =?UTF-8?q?=F0=9F=90=9B=20fix(custom.py):=20import?= =?UTF-8?q?=20re=20module=20to=20fix=20NameError=20when=20using=20re.split?= =?UTF-8?q?=20=F0=9F=90=9B=20fix(custom.py):=20fix=20indentation=20of=20cl?= =?UTF-8?q?ass=5Ftemplate=20dictionary=20to=20improve=20readability=20?= =?UTF-8?q?=F0=9F=90=9B=20fix(custom.py):=20fix=20indentation=20of=20class?= =?UTF-8?q?=20dictionary=20to=20improve=20readability=20=F0=9F=90=9B=20fix?= =?UTF-8?q?(custom.py):=20fix=20indentation=20of=20=5Fhandle=5Ffunction=20?= =?UTF-8?q?method=20to=20improve=20readability=20=F0=9F=90=9B=20fix(custom?= =?UTF-8?q?.py):=20fix=20indentation=20of=20transform=5Flist=20method=20to?= =?UTF-8?q?=20improve=20readability=20=F0=9F=90=9B=20fix(custom.py):=20fix?= =?UTF-8?q?=20indentation=20of=20extract=5Fclass=5Finfo=20method=20to=20im?= =?UTF-8?q?prove=20readability=20=F0=9F=90=9B=20fix(custom.py):=20fix=20in?= =?UTF-8?q?dentation=20of=20=5Fclass=5Ftemplate=5Fvalidation=20method=20to?= =?UTF-8?q?=20improve=20readability=20=F0=9F=90=9B=20fix(custom.py):=20fix?= =?UTF-8?q?=20indentation=20of=20build=5Flangchain=5Ftemplate=5Fcustom=5Fc?= =?UTF-8?q?omponent=20method=20to=20improve=20readability=20=F0=9F=90=9B?= =?UTF-8?q?=20fix(custom.py):=20fix=20indentation=20of=20add=5Fnew=5Fcusto?= =?UTF-8?q?m=5Ffield=20method=20to=20improve=20readability=20=F0=9F=90=9B?= =?UTF-8?q?=20fix(custom.py):=20fix=20indentation=20of=20add=5Fcode=5Ffiel?= =?UTF-8?q?d=20method=20to=20improve=20readability=20=F0=9F=90=9B=20fix(cu?= =?UTF-8?q?stom.py):=20fix=20indentation=20of=20extract=5Ftype=5Ffrom=5Fop?= =?UTF-8?q?tional=20method=20to=20improve=20readability=20=F0=9F=90=9B=20f?= =?UTF-8?q?ix(custom.py):=20fix=20indentation=20of=20build=5Flangchain=5Ft?= =?UTF-8?q?emplate=5Fcustom=5Fcomponent=20method=20to=20improve=20readabil?= =?UTF-8?q?ity=20=F0=9F=94=A5=20chore(custom.py):=20remove=20unused=20impo?= =?UTF-8?q?rts=20and=20variables=20=E2=9C=A8=20feat(custom.py):=20add=20su?= =?UTF-8?q?pport=20for=20splitting=20a=20string=20by=20':'=20or=20'=3D'=20?= =?UTF-8?q?and=20padding=20with=20None=20until=20length=20is=203=20in=20?= =?UTF-8?q?=5Fsplit=5Fstring=20method=20=E2=9C=A8=20feat(custom.py):=20add?= =?UTF-8?q?=20support=20for=20transforming=20a=20list=20of=20strings=20by?= =?UTF-8?q?=20splitting=20each=20string=20and=20padding=20with=20None=20in?= =?UTF-8?q?=20transform=5Flist=20method=20=E2=9C=A8=20feat(custom.py):=20a?= =?UTF-8?q?dd=20support=20for=20extracting=20the=20type=20from=20a=20strin?= =?UTF-8?q?g=20formatted=20as=20"Optional[]"=20in=20extract=5Ftype?= =?UTF-8?q?=5Ffrom=5Foptional=20method=20=E2=9C=A8=20feat(custom.py):=20ad?= =?UTF-8?q?d=20support=20for=20passing=20field=5Fvalue=20and=20field=5Freq?= =?UTF-8?q?uired=20parameters=20to=20add=5Fnew=5Fcustom=5Ffield=20method?= =?UTF-8?q?=20=E2=9C=A8=20feat(custom.py):=20add=20support=20for=20passing?= =?UTF-8?q?=20field=5Fvalue=20and=20field=5Frequired=20parameters=20to=20b?= =?UTF-8?q?uild=5Flangchain=5Ftemplate=5Fcustom=5Fcomponent=20method=20?= =?UTF-8?q?=E2=9C=A8=20feat(custom.py):=20add=20support=20for=20passing=20?= =?UTF-8?q?field=5Fvalue=20and=20field=5Frequired=20parameters=20to=20add?= =?UTF-8?q?=5Fnew=5Fcustom=5Ffield=20method=20=E2=9C=A8=20feat(custom.py):?= =?UTF-8?q?=20add=20support=20for=20passing=20field=5Fvalue=20and=20field?= =?UTF-8?q?=5Frequired=20parameters=20to=20build=5Flangchain=5Ftemplate=5F?= =?UTF-8?q?custom=5Fcomponent=20method=20=E2=9C=A8=20feat(custom.py):=20ad?= =?UTF-8?q?d=20support=20for=20passing=20field=5Fvalue=20and=20field=5Freq?= =?UTF-8?q?uired=20parameters=20to=20add=5Fnew=5Fcustom=5Ffield=20method?= =?UTF-8?q?=20=E2=9C=A8=20feat(custom.py):=20add=20support=20for=20passing?= =?UTF-8?q?=20field=5Fvalue=20and=20field=5Frequired=20parameters=20to=20b?= =?UTF-8?q?uild=5Flangchain=5Ftemplate=5Fcustom=5Fcomponent=20method=20?= =?UTF-8?q?=E2=9C=A8=20feat(custom.py):=20add=20support=20for=20passing=20?= =?UTF-8?q?field=5Fvalue=20and=20field=5Frequired=20parameters=20to=20add?= =?UTF-8?q?=5Fnew=5Fcustom=5Ffield=20method=20=E2=9C=A8=20feat(custom.py):?= =?UTF-8?q?=20add=20support=20for?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../langflow/interface/custom/custom.py | 46 ++++++++------ src/backend/langflow/interface/types.py | 62 +++++++++++++++---- 2 files changed, 77 insertions(+), 31 deletions(-) diff --git a/src/backend/langflow/interface/custom/custom.py b/src/backend/langflow/interface/custom/custom.py index 91ec84e84..6d46c5d18 100644 --- a/src/backend/langflow/interface/custom/custom.py +++ b/src/backend/langflow/interface/custom/custom.py @@ -1,5 +1,5 @@ +import re import ast -import contextlib import traceback from typing import Callable, Optional from fastapi import HTTPException @@ -77,26 +77,34 @@ class CustomComponent(BaseModel): else: self.class_template["functions"].append(function_data) + def _split_string(self, text): + """ + Split a string by ':' or '=' and append None until the resulting list has 3 items. + + Parameters: + text (str): The string to be split. + + Returns: + list: A list of strings resulting from the split operation, + padded with None until its length is 3. + """ + items = [item.strip() for item in re.split(r"[:=]", text) if item.strip()] + while len(items) < 3: + items.append(None) + + return items + def transform_list(self, input_list): - output_list = [] - for item in input_list: - # Split each item on ':' to separate variable name and type - split_item = item.split(":") + """ + Transform a list of strings by splitting each string and padding with None. - # If there is a type, strip any leading/trailing spaces from it - if len(split_item) > 1: - split_item[1] = split_item[1].strip() - # If there isn't a type, append None - else: - split_item.append(None) - for i in range(len(split_item)): - with contextlib.suppress(ValueError): - # Try to evaluate the item - split_item[i] = ast.literal_eval(split_item[i]) + Parameters: + input_list (list): The list of strings to be transformed. - output_list.append(split_item) - - return output_list + Returns: + list: A list of lists, each containing the result of the split operation. + """ + return [self._split_string(item) for item in input_list] def extract_class_info(self): try: @@ -120,6 +128,7 @@ class CustomComponent(BaseModel): attributes = data.get("class", {}).get("attributes", {}) functions = data.get("functions", []) template_config = self._build_template_config(attributes) + if build_function := next( (f for f in functions if f["name"] == self.function_entrypoint_name), None, @@ -146,6 +155,7 @@ class CustomComponent(BaseModel): ) if "description" in attributes: template_config["description"] = ast.literal_eval(attributes["description"]) + return template_config def _class_template_validation(self, code: dict): diff --git a/src/backend/langflow/interface/types.py b/src/backend/langflow/interface/types.py index 8b6d78f9a..08bfe77c7 100644 --- a/src/backend/langflow/interface/types.py +++ b/src/backend/langflow/interface/types.py @@ -20,14 +20,14 @@ from langflow.template.field.base import TemplateField from langflow.template.frontend_node.tools import CustomComponentNode from langflow.interface.retrievers.base import retriever_creator -from langflow.utils.util import get_base_classes +import re import warnings -from fastapi import HTTPException import traceback +from fastapi import HTTPException +from langflow.utils.util import get_base_classes + # Used to get the base_classes list - - def get_type_list(): """Get a list of all langchain types""" all_types = build_langchain_types_dict() @@ -69,6 +69,7 @@ def build_langchain_types_dict(): # sourcery skip: dict-assign-update-to-union created_types = creator.to_dict() if created_types[creator.type_name].values(): all_types.update(created_types) + return all_types @@ -78,25 +79,35 @@ def process_type(field_type: str): # TODO: Move to correct place def add_new_custom_field( - template, field_name: str, field_type: str, field_config: dict + template, + field_name: str, + field_type: str, + field_value: str, + field_required: bool, + field_config: dict, ): # Check field_config if any of the keys are in it # if it is, update the value display_name = field_config.pop("display_name", field_name) field_type = field_config.pop("field_type", field_type) field_type = process_type(field_type) + + if field_value is not None: + field_value = field_value.replace("'", "").replace('"', "") + if "name" in field_config: warnings.warn( "The 'name' key in field_config is used to build the object and can't be changed." ) field_config.pop("name", None) - required = field_config.pop("required", True) + required = field_config.pop("required", field_required) placeholder = field_config.pop("placeholder", "") new_field = TemplateField( name=field_name, field_type=field_type, + value=field_value, show=True, required=required, advanced=False, @@ -133,6 +144,20 @@ def add_code_field(template, raw_code): return template +def extract_type_from_optional(field_type): + """ + Extract the type from a string formatted as "Optional[]". + + Parameters: + field_type (str): The string from which to extract the type. + + Returns: + str: The extracted type, or an empty string if no type was found. + """ + match = re.search(r"\[(.*?)\]", field_type) + return match[1] if match else None + + def build_langchain_template_custom_component(extractor: CustomComponent): # Build base "CustomComponent" template frontend_node = CustomComponentNode().to_dict().get(type(extractor).__name__) @@ -145,19 +170,30 @@ def build_langchain_template_custom_component(extractor: CustomComponent): frontend_node["description"] = template_config["description"] raw_code = extractor.code field_config = template_config.get("field_config", {}) + if function_args is not None: # Add extra fields for extra_field in function_args: - def_field = extra_field[0] - def_type = extra_field[1] + field_required = True + field_name, field_type, field_value = extra_field - if def_field != "self": + if field_name != "self": # TODO: Validate type - if is possible to render into frontend - if not def_type: - def_type = "str" - config = field_config.get(def_field, {}) + if "optional" in field_type.lower(): + field_type = extract_type_from_optional(field_type) + field_required = False + + if not field_type: + field_type = "str" + + config = field_config.get(field_name, {}) frontend_node = add_new_custom_field( - frontend_node, def_field, def_type, config + frontend_node, + field_name, + field_type, + field_value, + field_required, + config, ) frontend_node = add_code_field(frontend_node, raw_code) From fb91b17c512e249dcade2346d879e42f70805531 Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Wed, 12 Jul 2023 00:08:52 +0100 Subject: [PATCH 076/221] =?UTF-8?q?=F0=9F=93=A6=20chore(router.py):=20add?= =?UTF-8?q?=20component=5Frouter=20to=20APIRouter=20to=20include=20compone?= =?UTF-8?q?nt=20routes=20=F0=9F=93=A6=20chore(=5F=5Finit=5F=5F.py):=20add?= =?UTF-8?q?=20component=5Frouter=20to=20=5F=5Fall=5F=5F=20list=20to=20expo?= =?UTF-8?q?se=20component=20routes=20=F0=9F=93=A6=20feat(components.py):?= =?UTF-8?q?=20add=20routes=20for=20creating,=20reading,=20updating,=20and?= =?UTF-8?q?=20deleting=20components=20=F0=9F=93=A6=20chore(endpoints.py):?= =?UTF-8?q?=20import=20Component=20model=20from=20database.models.componen?= =?UTF-8?q?t=20=F0=9F=93=A6=20chore(schemas.py):=20add=20ComponentListCrea?= =?UTF-8?q?te=20and=20ComponentListRead=20schemas=20=F0=9F=93=A6=20feat(mo?= =?UTF-8?q?dels/component.py):=20add=20Component=20model=20with=20fields?= =?UTF-8?q?=20for=20name,=20description,=20and=20data=20=F0=9F=93=A6=20fea?= =?UTF-8?q?t(models/component.py):=20add=20ComponentCreate,=20ComponentRea?= =?UTF-8?q?d,=20and=20ComponentUpdate=20models=20for=20CRUD=20operations?= =?UTF-8?q?=20on=20components?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- poetry.lock | 1108 ++++++----------- src/backend/langflow/api/router.py | 2 + src/backend/langflow/api/v1/__init__.py | 2 + src/backend/langflow/api/v1/components.py | 83 ++ src/backend/langflow/api/v1/schemas.py | 8 + .../langflow/database/models/component.py | 52 + 6 files changed, 561 insertions(+), 694 deletions(-) create mode 100644 src/backend/langflow/api/v1/components.py create mode 100644 src/backend/langflow/database/models/component.py diff --git a/poetry.lock b/poetry.lock index 5977a7d22..cac5bb037 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,10 +1,9 @@ -# This file is automatically @generated by Poetry 1.4.0 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "aiofiles" version = "23.1.0" description = "File support for asyncio." -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -16,7 +15,6 @@ files = [ name = "aiohttp" version = "3.8.4" description = "Async http client/server framework (asyncio)" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -125,7 +123,6 @@ speedups = ["Brotli", "aiodns", "cchardet"] name = "aiosignal" version = "1.3.1" description = "aiosignal: a list of registered asynchronous callbacks" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -140,7 +137,6 @@ frozenlist = ">=1.1.0" name = "aiostream" version = "0.4.5" description = "Generator-based operators for asynchronous iteration" -category = "main" optional = false python-versions = "*" files = [ @@ -150,29 +146,27 @@ files = [ [[package]] name = "anthropic" -version = "0.3.2" +version = "0.3.4" description = "Client library for the anthropic API" -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ - {file = "anthropic-0.3.2-py3-none-any.whl", hash = "sha256:43ad86df406bf91419e3c651e20dcc69ae273c932c92c26973a1621a72ff1d86"}, - {file = "anthropic-0.3.2.tar.gz", hash = "sha256:f968e970bb0dfa38b1ec59db7bb4162fd1e0f2bef95c3203e926effe62bfcf38"}, + {file = "anthropic-0.3.4-py3-none-any.whl", hash = "sha256:7b0396f663b0e4eaaf485ae59a0be014cddfc0f0b8f4dad79bb35d8f28439097"}, + {file = "anthropic-0.3.4.tar.gz", hash = "sha256:36184840bd33184697666d4f1ec951d78ef5da22e87d936cd3c04b611d84e93c"}, ] [package.dependencies] -anyio = ">=3.5.0" -distro = ">=1.7.0" -httpx = ">=0.23.0" +anyio = ">=3.5.0,<4" +distro = ">=1.7.0,<2" +httpx = ">=0.23.0,<1" pydantic = ">=1.9.0,<2.0.0" tokenizers = ">=0.13.0" -typing-extensions = ">=4.1.1" +typing-extensions = ">=4.1.1,<5" [[package]] name = "anyio" version = "3.7.1" description = "High level compatibility layer for multiple asynchronous event loop implementations" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -194,7 +188,6 @@ trio = ["trio (<0.22)"] name = "appdirs" version = "1.4.4" description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "main" optional = false python-versions = "*" files = [ @@ -206,7 +199,6 @@ files = [ name = "appnope" version = "0.1.3" description = "Disable App Nap on macOS >= 10.9" -category = "dev" optional = false python-versions = "*" files = [ @@ -218,7 +210,6 @@ files = [ name = "argilla" version = "0.0.1" description = "" -category = "main" optional = false python-versions = "*" files = [ @@ -230,7 +221,6 @@ files = [ name = "asgiref" version = "3.7.2" description = "ASGI specs, helper code, and adapters" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -248,7 +238,6 @@ tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] name = "asttokens" version = "2.2.1" description = "Annotate AST trees with source code positions" -category = "dev" optional = false python-versions = "*" files = [ @@ -266,7 +255,6 @@ test = ["astroid", "pytest"] name = "async-timeout" version = "4.0.2" description = "Timeout context manager for asyncio programs" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -278,7 +266,6 @@ files = [ name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -297,7 +284,6 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte name = "authlib" version = "1.2.1" description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients." -category = "main" optional = false python-versions = "*" files = [ @@ -312,7 +298,6 @@ cryptography = ">=3.2" name = "backcall" version = "0.2.0" description = "Specifications for callback functions passed in to an API" -category = "dev" optional = false python-versions = "*" files = [ @@ -324,7 +309,6 @@ files = [ name = "backoff" version = "2.2.1" description = "Function decoration for backoff and retry" -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -336,7 +320,6 @@ files = [ name = "beautifulsoup4" version = "4.12.2" description = "Screen-scraping library" -category = "main" optional = false python-versions = ">=3.6.0" files = [ @@ -353,37 +336,33 @@ lxml = ["lxml"] [[package]] name = "black" -version = "23.3.0" +version = "23.7.0" description = "The uncompromising code formatter." -category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "black-23.3.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:0945e13506be58bf7db93ee5853243eb368ace1c08a24c65ce108986eac65915"}, - {file = "black-23.3.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:67de8d0c209eb5b330cce2469503de11bca4085880d62f1628bd9972cc3366b9"}, - {file = "black-23.3.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:7c3eb7cea23904399866c55826b31c1f55bbcd3890ce22ff70466b907b6775c2"}, - {file = "black-23.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32daa9783106c28815d05b724238e30718f34155653d4d6e125dc7daec8e260c"}, - {file = "black-23.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:35d1381d7a22cc5b2be2f72c7dfdae4072a3336060635718cc7e1ede24221d6c"}, - {file = "black-23.3.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:a8a968125d0a6a404842fa1bf0b349a568634f856aa08ffaff40ae0dfa52e7c6"}, - {file = "black-23.3.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c7ab5790333c448903c4b721b59c0d80b11fe5e9803d8703e84dcb8da56fec1b"}, - {file = "black-23.3.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:a6f6886c9869d4daae2d1715ce34a19bbc4b95006d20ed785ca00fa03cba312d"}, - {file = "black-23.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f3c333ea1dd6771b2d3777482429864f8e258899f6ff05826c3a4fcc5ce3f70"}, - {file = "black-23.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:11c410f71b876f961d1de77b9699ad19f939094c3a677323f43d7a29855fe326"}, - {file = "black-23.3.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:1d06691f1eb8de91cd1b322f21e3bfc9efe0c7ca1f0e1eb1db44ea367dff656b"}, - {file = "black-23.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50cb33cac881766a5cd9913e10ff75b1e8eb71babf4c7104f2e9c52da1fb7de2"}, - {file = "black-23.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e114420bf26b90d4b9daa597351337762b63039752bdf72bf361364c1aa05925"}, - {file = "black-23.3.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:48f9d345675bb7fbc3dd85821b12487e1b9a75242028adad0333ce36ed2a6d27"}, - {file = "black-23.3.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:714290490c18fb0126baa0fca0a54ee795f7502b44177e1ce7624ba1c00f2331"}, - {file = "black-23.3.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:064101748afa12ad2291c2b91c960be28b817c0c7eaa35bec09cc63aa56493c5"}, - {file = "black-23.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:562bd3a70495facf56814293149e51aa1be9931567474993c7942ff7d3533961"}, - {file = "black-23.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:e198cf27888ad6f4ff331ca1c48ffc038848ea9f031a3b40ba36aced7e22f2c8"}, - {file = "black-23.3.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:3238f2aacf827d18d26db07524e44741233ae09a584273aa059066d644ca7b30"}, - {file = "black-23.3.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:f0bd2f4a58d6666500542b26354978218a9babcdc972722f4bf90779524515f3"}, - {file = "black-23.3.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:92c543f6854c28a3c7f39f4d9b7694f9a6eb9d3c5e2ece488c327b6e7ea9b266"}, - {file = "black-23.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a150542a204124ed00683f0db1f5cf1c2aaaa9cc3495b7a3b5976fb136090ab"}, - {file = "black-23.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:6b39abdfb402002b8a7d030ccc85cf5afff64ee90fa4c5aebc531e3ad0175ddb"}, - {file = "black-23.3.0-py3-none-any.whl", hash = "sha256:ec751418022185b0c1bb7d7736e6933d40bbb14c14a0abcf9123d1b159f98dd4"}, - {file = "black-23.3.0.tar.gz", hash = "sha256:1c7b8d606e728a41ea1ccbd7264677e494e87cf630e399262ced92d4a8dac940"}, + {file = "black-23.7.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:5c4bc552ab52f6c1c506ccae05681fab58c3f72d59ae6e6639e8885e94fe2587"}, + {file = "black-23.7.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:552513d5cd5694590d7ef6f46e1767a4df9af168d449ff767b13b084c020e63f"}, + {file = "black-23.7.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:86cee259349b4448adb4ef9b204bb4467aae74a386bce85d56ba4f5dc0da27be"}, + {file = "black-23.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:501387a9edcb75d7ae8a4412bb8749900386eaef258f1aefab18adddea1936bc"}, + {file = "black-23.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb074d8b213749fa1d077d630db0d5f8cc3b2ae63587ad4116e8a436e9bbe995"}, + {file = "black-23.7.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b5b0ee6d96b345a8b420100b7d71ebfdd19fab5e8301aff48ec270042cd40ac2"}, + {file = "black-23.7.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:893695a76b140881531062d48476ebe4a48f5d1e9388177e175d76234ca247cd"}, + {file = "black-23.7.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:c333286dc3ddca6fdff74670b911cccedacb4ef0a60b34e491b8a67c833b343a"}, + {file = "black-23.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:831d8f54c3a8c8cf55f64d0422ee875eecac26f5f649fb6c1df65316b67c8926"}, + {file = "black-23.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:7f3bf2dec7d541b4619b8ce526bda74a6b0bffc480a163fed32eb8b3c9aed8ad"}, + {file = "black-23.7.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:f9062af71c59c004cd519e2fb8f5d25d39e46d3af011b41ab43b9c74e27e236f"}, + {file = "black-23.7.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:01ede61aac8c154b55f35301fac3e730baf0c9cf8120f65a9cd61a81cfb4a0c3"}, + {file = "black-23.7.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:327a8c2550ddc573b51e2c352adb88143464bb9d92c10416feb86b0f5aee5ff6"}, + {file = "black-23.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1c6022b86f83b632d06f2b02774134def5d4d4f1dac8bef16d90cda18ba28a"}, + {file = "black-23.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:27eb7a0c71604d5de083757fbdb245b1a4fae60e9596514c6ec497eb63f95320"}, + {file = "black-23.7.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:8417dbd2f57b5701492cd46edcecc4f9208dc75529bcf76c514864e48da867d9"}, + {file = "black-23.7.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:47e56d83aad53ca140da0af87678fb38e44fd6bc0af71eebab2d1f59b1acf1d3"}, + {file = "black-23.7.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:25cc308838fe71f7065df53aedd20327969d05671bac95b38fdf37ebe70ac087"}, + {file = "black-23.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:642496b675095d423f9b8448243336f8ec71c9d4d57ec17bf795b67f08132a91"}, + {file = "black-23.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:ad0014efc7acf0bd745792bd0d8857413652979200ab924fbf239062adc12491"}, + {file = "black-23.7.0-py3-none-any.whl", hash = "sha256:9fd59d418c60c0348505f2ddf9609c1e1de8e7493eab96198fc89d9f865e7a96"}, + {file = "black-23.7.0.tar.gz", hash = "sha256:022a582720b0d9480ed82576c920a8c1dde97cc38ff11d8d8859b3bd6ca9eedb"}, ] [package.dependencies] @@ -405,7 +384,6 @@ uvloop = ["uvloop (>=0.15.2)"] name = "bleach" version = "6.0.0" description = "An easy safelist-based HTML-sanitizing tool." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -424,7 +402,6 @@ css = ["tinycss2 (>=1.1.0,<1.2)"] name = "cachetools" version = "5.3.1" description = "Extensible memoizing collections and decorators" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -436,7 +413,6 @@ files = [ name = "certifi" version = "2023.5.7" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -448,7 +424,6 @@ files = [ name = "cffi" version = "1.15.1" description = "Foreign Function Interface for Python calling C code." -category = "main" optional = false python-versions = "*" files = [ @@ -525,7 +500,6 @@ pycparser = "*" name = "chardet" version = "5.1.0" description = "Universal encoding detector for Python 3" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -535,94 +509,92 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.1.0" +version = "3.2.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"}, - {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"}, + {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, + {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, ] [[package]] name = "chromadb" version = "0.3.26" description = "Chroma." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -650,14 +622,13 @@ uvicorn = {version = ">=0.18.3", extras = ["standard"]} [[package]] name = "click" -version = "8.1.3" +version = "8.1.4" description = "Composable command line interface toolkit" -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, - {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, + {file = "click-8.1.4-py3-none-any.whl", hash = "sha256:2739815aaa5d2c986a88f1e9230c55e17f0caad3d958a5e13ad0797c166db9e3"}, + {file = "click-8.1.4.tar.gz", hash = "sha256:b97d0c74955da062a7d4ef92fadb583806a585b2ea81958a81bd72726cbb8e37"}, ] [package.dependencies] @@ -667,7 +638,6 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "click-log" version = "0.4.0" description = "Logging integration for Click" -category = "main" optional = false python-versions = "*" files = [ @@ -680,81 +650,81 @@ click = "*" [[package]] name = "clickhouse-connect" -version = "0.6.4" +version = "0.6.6" description = "ClickHouse Database Core Driver for Python, Pandas, and Superset" -category = "main" optional = false python-versions = "~=3.7" files = [ - {file = "clickhouse-connect-0.6.4.tar.gz", hash = "sha256:0afe555e7a20df2e06341d00935b4298b6a5a1eabee3db43a897719a9bf7f047"}, - {file = "clickhouse_connect-0.6.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:387f4c3bc4a988ba6b233de642bc849718fc6e142130f3ff62529b7b093e4242"}, - {file = "clickhouse_connect-0.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3483a9945ecfbab9d498422d7a0e7e600c3c7e2e7a6178852e355fbda9871ac6"}, - {file = "clickhouse_connect-0.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db39119dc1905487a80be8b9a8505b45dae98f39d8f2ebfa355f9489d6a9958d"}, - {file = "clickhouse_connect-0.6.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:025438e7420d8f610d34e5743c17c273fda74a72741c561767896632f896709a"}, - {file = "clickhouse_connect-0.6.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29ffc92bea0c8ed2eb9e45a79bb708816f9bb5041c23fca0a44b4c73a79d9d53"}, - {file = "clickhouse_connect-0.6.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:040602c1be63c6bdbd9c5b03218c3aca60ce33ee22871b56f810671665e31d27"}, - {file = "clickhouse_connect-0.6.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:50b037c41453dcb7a47160dca8f3c05f4817e49d5d4ed01aace2c619c0109cfd"}, - {file = "clickhouse_connect-0.6.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ca5da538f50771004e931508f0d84680299df35576a05665edd39400b6d1d486"}, - {file = "clickhouse_connect-0.6.4-cp310-cp310-win32.whl", hash = "sha256:2286687bcff686c68df349686694557f3142e2792506ef0ca41664ee54b48122"}, - {file = "clickhouse_connect-0.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:49fd9b6fa03025b3e04d6ddc3c3443e4383f44b63dcf551d8fddd8a149c06993"}, - {file = "clickhouse_connect-0.6.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:82fb4120968fb007408e41b6e799a389e1e5f94144362dd25640c89633424295"}, - {file = "clickhouse_connect-0.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3b6a4e0f5c84b516142353feb315a7c93fee2fe732536cc1f0e3f994001d6771"}, - {file = "clickhouse_connect-0.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04c77676f0afcb45f8443e051849d9d34d88d7925adcb2d14a5320188e3d9ad3"}, - {file = "clickhouse_connect-0.6.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbc27ca039da6260f749b81740e3ecff4f6d251f39f3c507510a4bd06455b49a"}, - {file = "clickhouse_connect-0.6.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec2f5ab36dea0037d9cc783b80c4994f176b38aed419c32f6ac7168be76e7667"}, - {file = "clickhouse_connect-0.6.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:167f87309d0384ca290bb5891293e9be1f89fa2557b7642a9d3cbeeb423271cc"}, - {file = "clickhouse_connect-0.6.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9e3b422e64661820d47f98eebe7e27d26082c6eba82a83d82e1682d33e6b92b0"}, - {file = "clickhouse_connect-0.6.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fbc6b3a39dd0d55e0723f0394b8a6608ebd864c0e4775e40a65acbd13fbb07cc"}, - {file = "clickhouse_connect-0.6.4-cp311-cp311-win32.whl", hash = "sha256:744bb4e40834b026f7422b990f5e1c9dc0cb3c9b6da9d79e9479edd53dd873fb"}, - {file = "clickhouse_connect-0.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:cde51f67054fd465925928fa3eb40a23ee691057c55ae58ccf8fae6903abcbe7"}, - {file = "clickhouse_connect-0.6.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:76d4e4c388f6ca1bc310f6d40791ee84f7ddba8e06d8f737d21669a6e4f58f0f"}, - {file = "clickhouse_connect-0.6.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:785b6386ed1a43912db2badd2f1f480fe4817e87f8b88296c335243288aa1077"}, - {file = "clickhouse_connect-0.6.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66a4baa63a073c843a86fbff64b4c500f636bf8978beddf3c0181491d57d5c8a"}, - {file = "clickhouse_connect-0.6.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:896d9c812e7429581ee99e920bd0064c004b2cce258a3548c124ad95b2ac46d8"}, - {file = "clickhouse_connect-0.6.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c236b8db0a382f4998a564b47504727c815d276c9527f6ea43128c323742f6f5"}, - {file = "clickhouse_connect-0.6.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:8dfa63f063a3c75236e499ea8582b1d3d4d56180dad316cc3644967c03db4f36"}, - {file = "clickhouse_connect-0.6.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:aa46ca9c76bef313e67b9eae0774517b76d2e22bfe4df7e092bad3838b82dee8"}, - {file = "clickhouse_connect-0.6.4-cp37-cp37m-win32.whl", hash = "sha256:c20f83955356302250e1830408654cf665ca9101794621dd67301529540715e1"}, - {file = "clickhouse_connect-0.6.4-cp37-cp37m-win_amd64.whl", hash = "sha256:10193393835a28a7211bb16daeb1a3d98e4dd9eba649279faad68de328d79136"}, - {file = "clickhouse_connect-0.6.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b3ec2ea59da24177f8128ce75421cf498d8d647006c2134f388f4437f9171149"}, - {file = "clickhouse_connect-0.6.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ddf71c7014b5dc2ec08e5301892b8025eb254a063f5a339ec9c3f956a3e11135"}, - {file = "clickhouse_connect-0.6.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1d00b581608440f8356a3f51a25dbc00526108126811f79b9271f4e0cacc5db"}, - {file = "clickhouse_connect-0.6.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b7fbb81d1b68ddff43cc1b117884030cf28ad8b0668e96703a63c4b1780f26d"}, - {file = "clickhouse_connect-0.6.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9eb0ea6af4a44f6a9b264ba6416ced81de6e250fd1fc6b6903ce0c20b457520"}, - {file = "clickhouse_connect-0.6.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:626d9fbc9cc787afe3c234f2db762571756e0114829e5c36ff0dd2f949720827"}, - {file = "clickhouse_connect-0.6.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:59a947df5301175432ad436022ef74fc8864de7201f438de96772250fd8fb749"}, - {file = "clickhouse_connect-0.6.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:adc0c122a38fae3cd02f76a81be0a7feb2d12d290003a7c3c314525c910bbb04"}, - {file = "clickhouse_connect-0.6.4-cp38-cp38-win32.whl", hash = "sha256:5391a21e7d3c44c49d05fff06384d84f85db64be3a399bb07d8acf043e8caad9"}, - {file = "clickhouse_connect-0.6.4-cp38-cp38-win_amd64.whl", hash = "sha256:abeca87c81965a3e18e62608c98f8436615409c5a3669203b266f2f6f23ee16f"}, - {file = "clickhouse_connect-0.6.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:097bfc3da261c9a33df1a0dcb74351b150cb0fed5c570f0dbdf9fb010e820897"}, - {file = "clickhouse_connect-0.6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:44cf2ac9ee2b996ef3c2946ad1321d8536fe97b4ddbeeaf2d36f6a1f9d5a53ba"}, - {file = "clickhouse_connect-0.6.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86c5edb9c6f2e8d2093420747d4f1e5f2d4f901a9cf47c276d400b75e5e07b0e"}, - {file = "clickhouse_connect-0.6.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4eae52f4167beb961c11f462abba49c3d06037cba126c1febc414ee42aab0b23"}, - {file = "clickhouse_connect-0.6.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:73999d748089f4cb28917d63446b80ebffb8939dc3728ede86e3a580494ee7a1"}, - {file = "clickhouse_connect-0.6.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e8ad1765c78b5e4e8936b8e6044b3da4f31cd24cf15b6e6f1adca542072abc50"}, - {file = "clickhouse_connect-0.6.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:dfca12b0eb0c4f2e60815abae3f15e38ff5d22c48d89ed8b8914d83dc23f6404"}, - {file = "clickhouse_connect-0.6.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:203e9fc0315373996c32e4f9be3012ee0caed6b92b404653bf7c432318c3107a"}, - {file = "clickhouse_connect-0.6.4-cp39-cp39-win32.whl", hash = "sha256:fac7c375b4644f9866310a11bb13299e0070f38c5a975222b7eb5bc330ef753a"}, - {file = "clickhouse_connect-0.6.4-cp39-cp39-win_amd64.whl", hash = "sha256:2efece8266091df991bb343c5ab1d29ec4e164791b60d4c62f508b1f46086c9a"}, - {file = "clickhouse_connect-0.6.4-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9de809b027e2b1451e117478933e023ee56c48fbc049aef28ef09ab570e0e203"}, - {file = "clickhouse_connect-0.6.4-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b69fb748f4c5fa60eca91f9782be8506d5d2d197ec324a9586f693ae0c1cc94"}, - {file = "clickhouse_connect-0.6.4-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1edadb0e68fa5d834ab2e4a778f99e803261cd1f0ebc513f60d0f8f5044f7b1a"}, - {file = "clickhouse_connect-0.6.4-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3e26d2fa4a86c98a69d0dee057b5d5e0317208971da6ee8ff0765f50b1b267db"}, - {file = "clickhouse_connect-0.6.4-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:76f63eedbec2e603f93f25022cc821d147339be482221213949ea0f0b5915eb5"}, - {file = "clickhouse_connect-0.6.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6a9e6710a7b61e08c8ce64091624af9e37f23804211eebfb647623f33804ccdd"}, - {file = "clickhouse_connect-0.6.4-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab7266c8019e6fede18cd69161934ffba4a1c1910175300492b6ee1da47785b5"}, - {file = "clickhouse_connect-0.6.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf7df49434c4330dc58d749c8685a76ee441bf1c5776230dbf57952af0409194"}, - {file = "clickhouse_connect-0.6.4-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:549691452128eb3035021b9b7be4f3bdc6c4b9192213b167de2ea3dfae87d01c"}, - {file = "clickhouse_connect-0.6.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:387b82f38b1977c2a38545172838504591ad123d87c09a82758d87e76453beb0"}, - {file = "clickhouse_connect-0.6.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f1ea1a89e6696a9905157b1a65884dd33dc88d7d50e74434c5bd650f8cfe1701"}, - {file = "clickhouse_connect-0.6.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b697e0e97f3e3404cf5b7d515adb8c025ba21083ed6c1dd4b7b1c789a10343bc"}, - {file = "clickhouse_connect-0.6.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22bdad905a6fe6cea576f03cc948b719f44b94bcdc5a00728621d0d3082c724c"}, - {file = "clickhouse_connect-0.6.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52fb56fcc82825b3e4760878ca447b5ffbde0fef60c9048ac8c04b3f40fd773a"}, - {file = "clickhouse_connect-0.6.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2577ab779607839570472f889957ad85e71c27afba975d47f3906665865113c9"}, + {file = "clickhouse-connect-0.6.6.tar.gz", hash = "sha256:28d261b95fe9818f4d8bc4ad48087cbff3c9f0b6574ff04d234ed5bca6619474"}, + {file = "clickhouse_connect-0.6.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:31187a9947f5771c9e2a4c5d5c33d8c42f1c0f83b1223277c8faf47da0fcd1dc"}, + {file = "clickhouse_connect-0.6.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1e1713d1f9f294c0cf05ded6f7eff227dde2b19f0d19423fbbeb05fbf5d7c484"}, + {file = "clickhouse_connect-0.6.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:961c463de6f0de93fc11f1c1f81efc1ec5b5895481cfdf79b3f832e0e242e7e1"}, + {file = "clickhouse_connect-0.6.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18900f1a13b3b120252fc3583ca1e0fc4d3a33ea98fcf63d33d168a469561056"}, + {file = "clickhouse_connect-0.6.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4cbbea1a943e742ea649c82f85109b9a9928e61b038923de2813977966acd76"}, + {file = "clickhouse_connect-0.6.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2714ab61f063a65419278b97f8785ce2440fdb1ef46d9a6703cef9cd38517521"}, + {file = "clickhouse_connect-0.6.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:823756569f6bea58ff9286cf494abaca5db8652e33ee4a6e7ecb40efbf945088"}, + {file = "clickhouse_connect-0.6.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11aff145aacfae92b941b95ec5943fb62ea241ec2225b8ecefc4cadadf699893"}, + {file = "clickhouse_connect-0.6.6-cp310-cp310-win32.whl", hash = "sha256:4f5f9e3dcece211dc711088a5b264e66e8198b878bdf99619a3a7c54976c118d"}, + {file = "clickhouse_connect-0.6.6-cp310-cp310-win_amd64.whl", hash = "sha256:8268927ef8d476ef4c81d9562d049f38bc534c4d1d441e072cf8428f08ff6eaa"}, + {file = "clickhouse_connect-0.6.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5f9cb2ebe0deaa78c942888aad32fa42beb4e75c2377e8784baf3d737c23e5f1"}, + {file = "clickhouse_connect-0.6.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d502b7f35008facf2774f411eed6b35010923acaac254a8c5683fdf8a11abd62"}, + {file = "clickhouse_connect-0.6.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87e0f2afe464be0947947d98482eb12b25be8857ae1a31c1aaa17a67f616174d"}, + {file = "clickhouse_connect-0.6.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69f2c517943eeb7663a9d42bd9b737b8ec5513ddcf58f2372f8b2074a315bae2"}, + {file = "clickhouse_connect-0.6.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa6c2b488cf9558c2b71a2599d812fe4368d5199edaa011731a8bc7bfe019751"}, + {file = "clickhouse_connect-0.6.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:df9e80d0b3f5614d38026e7e2e7e7412dec942df8d765c082177879b37e678e2"}, + {file = "clickhouse_connect-0.6.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a20351fb2ae47aac1ae9b1de0585949616baedd6dbdee5272f466a2aea6ec4dd"}, + {file = "clickhouse_connect-0.6.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:af40eaa20998d96198563748a6fd9796843b6f22e9e95b2136aabd917db33fff"}, + {file = "clickhouse_connect-0.6.6-cp311-cp311-win32.whl", hash = "sha256:9591a9bfa58ace467544227f83226b22a1554e2db4cfcf658f25f43c9d94e960"}, + {file = "clickhouse_connect-0.6.6-cp311-cp311-win_amd64.whl", hash = "sha256:3b6f6159f8eddb0cad4d7e0cbad5944e97e0146ee9f416fc663f7bd3d4e9ea46"}, + {file = "clickhouse_connect-0.6.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8b941c85fe9ddd5e5edf6fc7458563d9e51ad900d95fe0b87b0458be166693a1"}, + {file = "clickhouse_connect-0.6.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c642696a758fa726c86ca624dd40acded100d79a9f4bd9f5b56ba0ea4dc44099"}, + {file = "clickhouse_connect-0.6.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57b6b36b316451c1bdc4450f9418c017af84af57d52d03cd4deb85480819a934"}, + {file = "clickhouse_connect-0.6.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:17cfb1d103b47350c3ba824641fb5ba730e6e29274077a6f8975a3394a1abadb"}, + {file = "clickhouse_connect-0.6.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d52c7e7560666b93c078bf082e4ed87689fd283e6295a6d8d1dd491d4d7b6072"}, + {file = "clickhouse_connect-0.6.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:0a6d498b689aa09e9d1b0051480a04ecc3509002f54bfb82998d030b4675bb24"}, + {file = "clickhouse_connect-0.6.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:28c876f7a4713662af2ded7350a0262756ec4da9262bb76cc85cfe2e88015b74"}, + {file = "clickhouse_connect-0.6.6-cp37-cp37m-win32.whl", hash = "sha256:74bf0a95c7c5644948be0ba9c0abcad7615b806fd2545501862526dbe684db71"}, + {file = "clickhouse_connect-0.6.6-cp37-cp37m-win_amd64.whl", hash = "sha256:0aaa4194d11cb7513de69b791911ff60b3ad8b86f125446a37347208e9b9ae6d"}, + {file = "clickhouse_connect-0.6.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3b873d138dfedbe761f2d66ad1257ea253394c4f8dcffd6ff34dfb990f13a18b"}, + {file = "clickhouse_connect-0.6.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7140705d05a05ac39eecf86727ab55985e5dba9d1734df8921cc417853a18b7f"}, + {file = "clickhouse_connect-0.6.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69085fa0f4e5da5cef4ae5249e19f10d91e57ae78628e49e8853b71b6003dbae"}, + {file = "clickhouse_connect-0.6.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e6ec081d87cc37be3ecf60b88002c58add76a72b4124525cb5cd28539e7d488"}, + {file = "clickhouse_connect-0.6.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1afe04eb239b72bc9fa4f1999cd292f82af507cbe1f07546f26a3332c50a294b"}, + {file = "clickhouse_connect-0.6.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:244bbf7ad92f1f030378412358c47cd377aa6d469b548dba2406a7894c8da2ab"}, + {file = "clickhouse_connect-0.6.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:69e91bdb25166b6fa4eb55601d86fa57dee82070bce9b97a858c8973615ab8b8"}, + {file = "clickhouse_connect-0.6.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d2627c8a9625e1c9058cfb5b231a0d0180ed9215d901b601d367de598f27a90d"}, + {file = "clickhouse_connect-0.6.6-cp38-cp38-win32.whl", hash = "sha256:87fb937b34b561703eaba5781404736120bab691f4525096d5dfb4b99d4890a6"}, + {file = "clickhouse_connect-0.6.6-cp38-cp38-win_amd64.whl", hash = "sha256:366c5765e6b7863b3a8d565d5a3b27f9f8731f6f4b016048fa172c6ad6485594"}, + {file = "clickhouse_connect-0.6.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0c1b0d8bee6399f5b68bb0832fae51fd0f5e4bcb539bae2df36d8433b6e38a0b"}, + {file = "clickhouse_connect-0.6.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3f7e3ead1429ec82b9cd0cf7b807bacf69d895042f75276f63d732378344376"}, + {file = "clickhouse_connect-0.6.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36df02ebfbfa4dbe3667bf5b3402ff0193d0f682b9aa09d71469c15745473d8e"}, + {file = "clickhouse_connect-0.6.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa636b0cbbff52c9fafe287d1d818fc9947feaa840c951b8bfd8f8d4d1ee45a0"}, + {file = "clickhouse_connect-0.6.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:af4968b6b48baae43d62c241bee9e1c8f680ee3d054254e3959c2d2fb7d370ee"}, + {file = "clickhouse_connect-0.6.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a4156de52fe1f9b19f8c3a820d57c012a55644c56a87c8d31ecff89115959d60"}, + {file = "clickhouse_connect-0.6.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:fccbe34878e6202ff5715284cbe57e748d36f4c8ad6217f9c80f84a086013fb9"}, + {file = "clickhouse_connect-0.6.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:70bfe48c0e4340ccf234b691fbd52f32db74649cb84ca28b98a211cc3e30b30c"}, + {file = "clickhouse_connect-0.6.6-cp39-cp39-win32.whl", hash = "sha256:9f80b64e2268293a918721e1c122c54e2a1592bb74824fdd70e9add9fbcea31a"}, + {file = "clickhouse_connect-0.6.6-cp39-cp39-win_amd64.whl", hash = "sha256:04a5030b76ee930b18eb3aeb7847146c2fa29da0feb0ec7dd3a0564a3de944f1"}, + {file = "clickhouse_connect-0.6.6-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:75e84c827c8180d5dc66b0e99dba422a3ffd2c7d8ee5ba80e00b9c942dff8a36"}, + {file = "clickhouse_connect-0.6.6-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e627061336142d02e9c900a96bcd87372e88f05755bf19b158e68472b99a921"}, + {file = "clickhouse_connect-0.6.6-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:194f72e8f3f24c207aa87113b8d11674dab12b35232fd8b7b19b97257796be45"}, + {file = "clickhouse_connect-0.6.6-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bf755b46089ee6a7f1ab3e24fc6fbacefc54cfefceb0ed81ebf198abf6937dac"}, + {file = "clickhouse_connect-0.6.6-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:39e58756a13872a24304b1987fafb7d5112ea88469eb55303b1183ebdd7a0be5"}, + {file = "clickhouse_connect-0.6.6-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1e29de1264ffa26eb822e57c5715974c9818ae8e16bb114e54352d66947cdf7f"}, + {file = "clickhouse_connect-0.6.6-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a74ed74427aaf10d2e8f7697b8ec53479f6068287ea695a5f3d3927db40be3c3"}, + {file = "clickhouse_connect-0.6.6-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abc910b0f6c93d0d703809fd92cf19b71dcaf8c6d5f328deddae1709061a0aa2"}, + {file = "clickhouse_connect-0.6.6-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23b17236e08da8b5d737ccd983db56a2d2222955a49c4b312b12e4a2b4a06c9b"}, + {file = "clickhouse_connect-0.6.6-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d4d76560d0ce84d0ba550918433dd1f8da6983edabe2685cd84679cd7a90c179"}, + {file = "clickhouse_connect-0.6.6-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:815bd0d5f40174716ffdf1adab066cd0e36c82c81b227224fb7281bdf8734eb6"}, + {file = "clickhouse_connect-0.6.6-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82abd319ba51e0c5c2d123e2cf30b1604b0d46f4de694096aa911ddd63701f60"}, + {file = "clickhouse_connect-0.6.6-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa3eea5dac3a7cd52523b556ecd05940c4710c96b6e39ec5a05ed7859bddc7f6"}, + {file = "clickhouse_connect-0.6.6-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0bbc28cdf903b4b2805199ce7d4580814a8b9bb4766ddd835cab46a81e6fcd63"}, + {file = "clickhouse_connect-0.6.6-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:5fc4deda5a97e672135b4330d81109b443266aa948b09a24a02db58c0fc96bc1"}, ] [package.dependencies] certifi = "*" +importlib-metadata = "*" lz4 = "*" pytz = "*" urllib3 = ">=1.26" @@ -769,14 +739,13 @@ sqlalchemy = ["sqlalchemy (>1.3.21,<2.0)"] [[package]] name = "cohere" -version = "4.11.2" +version = "4.12.1" description = "" -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ - {file = "cohere-4.11.2-py3-none-any.whl", hash = "sha256:c5032f4a2aafbcfdf1cacd5b49121c8cc4804fbd121d4a7ac0dfea499398ea28"}, - {file = "cohere-4.11.2.tar.gz", hash = "sha256:4d3e663a306e6fcb87c41cded2195257ebc6992d361a70417f6616f045c4ec47"}, + {file = "cohere-4.12.1-py3-none-any.whl", hash = "sha256:80d17ae928873cdf63883a338618e477de5c71b3d510d7891af7dfdabc25186e"}, + {file = "cohere-4.12.1.tar.gz", hash = "sha256:2e93a094757576d6c8d42e76363aa7841eb4166c5b0de8e5ed7272783982d2a4"}, ] [package.dependencies] @@ -789,7 +758,6 @@ requests = ">=2.0,<3.0" name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -801,7 +769,6 @@ files = [ name = "coloredlogs" version = "15.0.1" description = "Colored terminal output for Python's logging module" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -819,7 +786,6 @@ cron = ["capturer (>=2.4)"] name = "comm" version = "0.1.3" description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -839,7 +805,6 @@ typing = ["mypy (>=0.990)"] name = "coverage" version = "7.2.7" description = "Code coverage measurement for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -913,31 +878,34 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "41.0.1" +version = "41.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.1-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:f73bff05db2a3e5974a6fd248af2566134d8981fd7ab012e5dd4ddb1d9a70699"}, - {file = "cryptography-41.0.1-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:1a5472d40c8f8e91ff7a3d8ac6dfa363d8e3138b961529c996f3e2df0c7a411a"}, - {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fa01527046ca5facdf973eef2535a27fec4cb651e4daec4d043ef63f6ecd4ca"}, - {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b46e37db3cc267b4dea1f56da7346c9727e1209aa98487179ee8ebed09d21e43"}, - {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d198820aba55660b4d74f7b5fd1f17db3aa5eb3e6893b0a41b75e84e4f9e0e4b"}, - {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:948224d76c4b6457349d47c0c98657557f429b4e93057cf5a2f71d603e2fc3a3"}, - {file = "cryptography-41.0.1-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:059e348f9a3c1950937e1b5d7ba1f8e968508ab181e75fc32b879452f08356db"}, - {file = "cryptography-41.0.1-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b4ceb5324b998ce2003bc17d519080b4ec8d5b7b70794cbd2836101406a9be31"}, - {file = "cryptography-41.0.1-cp37-abi3-win32.whl", hash = "sha256:8f4ab7021127a9b4323537300a2acfb450124b2def3756f64dc3a3d2160ee4b5"}, - {file = "cryptography-41.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:1fee5aacc7367487b4e22484d3c7e547992ed726d14864ee33c0176ae43b0d7c"}, - {file = "cryptography-41.0.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9a6c7a3c87d595608a39980ebaa04d5a37f94024c9f24eb7d10262b92f739ddb"}, - {file = "cryptography-41.0.1-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5d092fdfedaec4cbbffbf98cddc915ba145313a6fdaab83c6e67f4e6c218e6f3"}, - {file = "cryptography-41.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1a8e6c2de6fbbcc5e14fd27fb24414507cb3333198ea9ab1258d916f00bc3039"}, - {file = "cryptography-41.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:cb33ccf15e89f7ed89b235cff9d49e2e62c6c981a6061c9c8bb47ed7951190bc"}, - {file = "cryptography-41.0.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f0ff6e18d13a3de56f609dd1fd11470918f770c6bd5d00d632076c727d35485"}, - {file = "cryptography-41.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7bfc55a5eae8b86a287747053140ba221afc65eb06207bedf6e019b8934b477c"}, - {file = "cryptography-41.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:eb8163f5e549a22888c18b0d53d6bb62a20510060a22fd5a995ec8a05268df8a"}, - {file = "cryptography-41.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8dde71c4169ec5ccc1087bb7521d54251c016f126f922ab2dfe6649170a3b8c5"}, - {file = "cryptography-41.0.1.tar.gz", hash = "sha256:d34579085401d3f49762d2f7d6634d6b6c2ae1242202e860f4d26b046e3a1006"}, + {file = "cryptography-41.0.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:01f1d9e537f9a15b037d5d9ee442b8c22e3ae11ce65ea1f3316a41c78756b711"}, + {file = "cryptography-41.0.2-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:079347de771f9282fbfe0e0236c716686950c19dee1b76240ab09ce1624d76d7"}, + {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:439c3cc4c0d42fa999b83ded80a9a1fb54d53c58d6e59234cfe97f241e6c781d"}, + {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f14ad275364c8b4e525d018f6716537ae7b6d369c094805cae45300847e0894f"}, + {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:84609ade00a6ec59a89729e87a503c6e36af98ddcd566d5f3be52e29ba993182"}, + {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:49c3222bb8f8e800aead2e376cbef687bc9e3cb9b58b29a261210456a7783d83"}, + {file = "cryptography-41.0.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d73f419a56d74fef257955f51b18d046f3506270a5fd2ac5febbfa259d6c0fa5"}, + {file = "cryptography-41.0.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:2a034bf7d9ca894720f2ec1d8b7b5832d7e363571828037f9e0c4f18c1b58a58"}, + {file = "cryptography-41.0.2-cp37-abi3-win32.whl", hash = "sha256:d124682c7a23c9764e54ca9ab5b308b14b18eba02722b8659fb238546de83a76"}, + {file = "cryptography-41.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:9c3fe6534d59d071ee82081ca3d71eed3210f76ebd0361798c74abc2bcf347d4"}, + {file = "cryptography-41.0.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a719399b99377b218dac6cf547b6ec54e6ef20207b6165126a280b0ce97e0d2a"}, + {file = "cryptography-41.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:182be4171f9332b6741ee818ec27daff9fb00349f706629f5cbf417bd50e66fd"}, + {file = "cryptography-41.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7a9a3bced53b7f09da251685224d6a260c3cb291768f54954e28f03ef14e3766"}, + {file = "cryptography-41.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f0dc40e6f7aa37af01aba07277d3d64d5a03dc66d682097541ec4da03cc140ee"}, + {file = "cryptography-41.0.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:674b669d5daa64206c38e507808aae49904c988fa0a71c935e7006a3e1e83831"}, + {file = "cryptography-41.0.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7af244b012711a26196450d34f483357e42aeddb04128885d95a69bd8b14b69b"}, + {file = "cryptography-41.0.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9b6d717393dbae53d4e52684ef4f022444fc1cce3c48c38cb74fca29e1f08eaa"}, + {file = "cryptography-41.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:192255f539d7a89f2102d07d7375b1e0a81f7478925b3bc2e0549ebf739dae0e"}, + {file = "cryptography-41.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f772610fe364372de33d76edcd313636a25684edb94cee53fd790195f5989d14"}, + {file = "cryptography-41.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b332cba64d99a70c1e0836902720887fb4529ea49ea7f5462cf6640e095e11d2"}, + {file = "cryptography-41.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9a6673c1828db6270b76b22cc696f40cde9043eb90373da5c2f8f2158957f42f"}, + {file = "cryptography-41.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:342f3767e25876751e14f8459ad85e77e660537ca0a066e10e75df9c9e9099f0"}, + {file = "cryptography-41.0.2.tar.gz", hash = "sha256:7d230bf856164de164ecb615ccc14c7fc6de6906ddd5b491f3af90d3514c925c"}, ] [package.dependencies] @@ -957,7 +925,6 @@ test-randomorder = ["pytest-randomly"] name = "ctransformers" version = "0.2.11" description = "Python bindings for the Transformer models implemented in C/C++ using GGML library." -category = "main" optional = false python-versions = "*" files = [ @@ -975,7 +942,6 @@ tests = ["pytest"] name = "dataclasses-json" version = "0.5.9" description = "Easily serialize dataclasses to and from JSON" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -995,7 +961,6 @@ dev = ["flake8", "hypothesis", "ipython", "mypy (>=0.710)", "portray", "pytest ( name = "debugpy" version = "1.6.7" description = "An implementation of the Debug Adapter Protocol for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1023,7 +988,6 @@ files = [ name = "decorator" version = "5.1.1" description = "Decorators for Humans" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1035,7 +999,6 @@ files = [ name = "deprecated" version = "1.2.14" description = "Python @deprecated decorator to deprecate old python classes, functions or methods." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1053,7 +1016,6 @@ dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] name = "deprecation" version = "2.1.0" description = "A library to handle automated deprecations" -category = "main" optional = false python-versions = "*" files = [ @@ -1068,7 +1030,6 @@ packaging = "*" name = "dill" version = "0.3.6" description = "serialize all of python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1079,11 +1040,21 @@ files = [ [package.extras] graph = ["objgraph (>=1.7.2)"] +[[package]] +name = "diskcache" +version = "5.6.1" +description = "Disk Cache -- Disk and file backed persistent cache." +optional = false +python-versions = ">=3" +files = [ + {file = "diskcache-5.6.1-py3-none-any.whl", hash = "sha256:558c6a2d5d7c721bb00e40711803d6804850c9f76c426ed81ecc627fe9d2ce2d"}, + {file = "diskcache-5.6.1.tar.gz", hash = "sha256:e4c978532feff5814c4cc00fe1e11e40501985946643d73220d41ee7737c72c3"}, +] + [[package]] name = "distro" version = "1.8.0" description = "Distro - an OS platform information API" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1095,7 +1066,6 @@ files = [ name = "dnspython" version = "2.3.0" description = "DNS toolkit" -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -1116,7 +1086,6 @@ wmi = ["wmi (>=1.5.1,<2.0.0)"] name = "docarray" version = "0.21.1" description = "The data structure for unstructured data" -category = "main" optional = false python-versions = "*" files = [ @@ -1145,7 +1114,6 @@ weaviate = ["weaviate-client (>=3.9.0,<3.10.0)"] name = "docker" version = "6.1.3" description = "A Python library for the Docker Engine API." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1167,7 +1135,6 @@ ssh = ["paramiko (>=2.4.3)"] name = "docstring-parser" version = "0.15" description = "Parse Python docstrings in reST, Google and Numpydoc format" -category = "main" optional = false python-versions = ">=3.6,<4.0" files = [ @@ -1179,7 +1146,6 @@ files = [ name = "docutils" version = "0.20.1" description = "Docutils -- Python Documentation Utilities" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1191,7 +1157,6 @@ files = [ name = "dotty-dict" version = "1.3.1" description = "Dictionary wrapper for quick access to deeply nested keys." -category = "main" optional = false python-versions = ">=3.5,<4.0" files = [ @@ -1203,7 +1168,6 @@ files = [ name = "duckdb" version = "0.8.1" description = "DuckDB embedded database" -category = "main" optional = false python-versions = "*" files = [ @@ -1265,7 +1229,6 @@ files = [ name = "ecdsa" version = "0.18.0" description = "ECDSA cryptographic signature library (pure python)" -category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -1284,7 +1247,6 @@ gmpy2 = ["gmpy2"] name = "et-xmlfile" version = "1.1.0" description = "An implementation of lxml.xmlfile for the standard library" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1296,7 +1258,6 @@ files = [ name = "exceptiongroup" version = "1.1.2" description = "Backport of PEP 654 (exception groups)" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1311,7 +1272,6 @@ test = ["pytest (>=6)"] name = "executing" version = "1.2.0" description = "Get the currently executing AST node of a frame, and other information" -category = "dev" optional = false python-versions = "*" files = [ @@ -1326,7 +1286,6 @@ tests = ["asttokens", "littleutils", "pytest", "rich"] name = "faiss-cpu" version = "1.7.4" description = "A library for efficient similarity search and clustering of dense vectors." -category = "main" optional = false python-versions = "*" files = [ @@ -1361,7 +1320,6 @@ files = [ name = "fake-useragent" version = "1.1.3" description = "Up-to-date simple useragent faker with real world database" -category = "main" optional = false python-versions = "*" files = [ @@ -1376,7 +1334,6 @@ importlib-resources = {version = ">=5.0", markers = "python_version < \"3.10\""} name = "fastapi" version = "0.99.1" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1396,7 +1353,6 @@ all = ["email-validator (>=1.1.1)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)" name = "filelock" version = "3.12.2" description = "A platform independent file lock." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1412,7 +1368,6 @@ testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "p name = "filetype" version = "1.2.0" description = "Infer file type and MIME type of any file/buffer. No external dependencies." -category = "main" optional = false python-versions = "*" files = [ @@ -1424,7 +1379,6 @@ files = [ name = "flatbuffers" version = "23.5.26" description = "The FlatBuffers serialization format for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -1436,7 +1390,6 @@ files = [ name = "frozenlist" version = "1.3.3" description = "A list-like structure which implements collections.abc.MutableSequence" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1520,7 +1473,6 @@ files = [ name = "fsspec" version = "2023.6.0" description = "File-system specification" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1556,7 +1508,6 @@ tqdm = ["tqdm"] name = "gitdb" version = "4.0.10" description = "Git Object Database" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1569,14 +1520,13 @@ smmap = ">=3.0.1,<6" [[package]] name = "gitpython" -version = "3.1.31" +version = "3.1.32" description = "GitPython is a Python library used to interact with Git repositories" -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "GitPython-3.1.31-py3-none-any.whl", hash = "sha256:f04893614f6aa713a60cbbe1e6a97403ef633103cdd0ef5eb6efe0deb98dbe8d"}, - {file = "GitPython-3.1.31.tar.gz", hash = "sha256:8ce3bcf69adfdf7c7d503e78fd3b1c492af782d58893b650adb2ac8912ddd573"}, + {file = "GitPython-3.1.32-py3-none-any.whl", hash = "sha256:e3d59b1c2c6ebb9dfa7a184daf3b6dd4914237e7488a1730a6d8f6f5d0b4187f"}, + {file = "GitPython-3.1.32.tar.gz", hash = "sha256:8d9b8cb1e80b9735e8717c9362079d3ce4c6e5ddeebedd0361b228c3a67a62f6"}, ] [package.dependencies] @@ -1586,7 +1536,6 @@ gitdb = ">=4.0.1,<5" name = "google-api-core" version = "2.11.1" description = "Google API client core library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1609,18 +1558,17 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-api-python-client" -version = "2.92.0" +version = "2.93.0" description = "Google API Client Library for Python" -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-python-client-2.92.0.tar.gz", hash = "sha256:f38a6e106a7417719715506d36f0a233ec253335e422bda311352866a86c4187"}, - {file = "google_api_python_client-2.92.0-py2.py3-none-any.whl", hash = "sha256:e0b74ed5fa9bdb07a66fb030d3f4cae550ed1c07e23600d86450d3c3c5efae51"}, + {file = "google-api-python-client-2.93.0.tar.gz", hash = "sha256:62ee28e96031a10a1c341f226a75ac6a4f16bdb1d888dc8222b2cdca133d0031"}, + {file = "google_api_python_client-2.93.0-py2.py3-none-any.whl", hash = "sha256:f34abb671afd488bd19d30721ea20fb30d3796ddd825d6f91f26d8c718a9f07d"}, ] [package.dependencies] -google-api-core = ">=1.31.5,<2.0.0 || >2.3.0,<3.0.0.dev0" +google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0.dev0" google-auth = ">=1.19.0,<3.0.0.dev0" google-auth-httplib2 = ">=0.1.0" httplib2 = ">=0.15.0,<1.dev0" @@ -1628,14 +1576,13 @@ uritemplate = ">=3.0.1,<5" [[package]] name = "google-auth" -version = "2.21.0" +version = "2.22.0" description = "Google Authentication Library" -category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "google-auth-2.21.0.tar.gz", hash = "sha256:b28e8048e57727e7cf0e5bd8e7276b212aef476654a09511354aa82753b45c66"}, - {file = "google_auth-2.21.0-py2.py3-none-any.whl", hash = "sha256:da3f18d074fa0f5a7061d99b9af8cee3aa6189c987af7c1b07d94566b6b11268"}, + {file = "google-auth-2.22.0.tar.gz", hash = "sha256:164cba9af4e6e4e40c3a4f90a1a6c12ee56f14c0b4868d1ca91b32826ab334ce"}, + {file = "google_auth-2.22.0-py2.py3-none-any.whl", hash = "sha256:d61d1b40897407b574da67da1a833bdc10d5a11642566e506565d1b1a46ba873"}, ] [package.dependencies] @@ -1656,7 +1603,6 @@ requests = ["requests (>=2.20.0,<3.0.0.dev0)"] name = "google-auth-httplib2" version = "0.1.0" description = "Google Authentication Library: httplib2 transport" -category = "main" optional = false python-versions = "*" files = [ @@ -1671,18 +1617,17 @@ six = "*" [[package]] name = "google-cloud-aiplatform" -version = "1.27.0" +version = "1.28.0" description = "Vertex AI API client library" -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "google-cloud-aiplatform-1.27.0.tar.gz", hash = "sha256:ba9724e51eefddd284547ed632afcf23573b9040ea8ca5fd668a9ea093d5dcec"}, - {file = "google_cloud_aiplatform-1.27.0-py2.py3-none-any.whl", hash = "sha256:501a1dd8ad0012d73da3f3938140113f163d7c75b442bfc2e5f9a8889aca4119"}, + {file = "google-cloud-aiplatform-1.28.0.tar.gz", hash = "sha256:810339254f354f9a0084f020aab43a56f710348910c177821f7d962b461244a0"}, + {file = "google_cloud_aiplatform-1.28.0-py2.py3-none-any.whl", hash = "sha256:9fff957e193cc6de88a189b5a967f0cbd358c1da9b7faf36f3b7141fc0486243"}, ] [package.dependencies] -google-api-core = {version = ">=1.32.0,<2.0.0 || >=2.8.0,<3.0.0dev", extras = ["grpc"]} +google-api-core = {version = ">=1.32.0,<2.0.dev0 || >=2.8.dev0,<3.0.0dev", extras = ["grpc"]} google-cloud-bigquery = ">=1.15.0,<4.0.0dev" google-cloud-resource-manager = ">=1.3.3,<3.0.0dev" google-cloud-storage = ">=1.32.0,<3.0.0dev" @@ -1711,7 +1656,6 @@ xai = ["tensorflow (>=2.3.0,<3.0.0dev)"] name = "google-cloud-bigquery" version = "3.11.3" description = "Google BigQuery API client library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1720,7 +1664,7 @@ files = [ ] [package.dependencies] -google-api-core = {version = ">=1.31.5,<2.0.0 || >2.3.0,<3.0.0dev", extras = ["grpc"]} +google-api-core = {version = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0dev", extras = ["grpc"]} google-cloud-core = ">=1.6.0,<3.0.0dev" google-resumable-media = ">=0.6.0,<3.0dev" grpcio = ">=1.47.0,<2.0dev" @@ -1742,18 +1686,17 @@ tqdm = ["tqdm (>=4.7.4,<5.0.0dev)"] [[package]] name = "google-cloud-core" -version = "2.3.2" +version = "2.3.3" description = "Google Cloud API client core library" -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "google-cloud-core-2.3.2.tar.gz", hash = "sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a"}, - {file = "google_cloud_core-2.3.2-py2.py3-none-any.whl", hash = "sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe"}, + {file = "google-cloud-core-2.3.3.tar.gz", hash = "sha256:37b80273c8d7eee1ae816b3a20ae43585ea50506cb0e60f3cf5be5f87f1373cb"}, + {file = "google_cloud_core-2.3.3-py2.py3-none-any.whl", hash = "sha256:fbd11cad3e98a7e5b0343dc07cb1039a5ffd7a5bb96e1f1e27cee4bda4a90863"}, ] [package.dependencies] -google-api-core = ">=1.31.6,<2.0.0 || >2.3.0,<3.0.0dev" +google-api-core = ">=1.31.6,<2.0.dev0 || >2.3.0,<3.0.0dev" google-auth = ">=1.25.0,<3.0dev" [package.extras] @@ -1763,7 +1706,6 @@ grpc = ["grpcio (>=1.38.0,<2.0dev)"] name = "google-cloud-resource-manager" version = "1.10.2" description = "Google Cloud Resource Manager API client library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1772,7 +1714,7 @@ files = [ ] [package.dependencies] -google-api-core = {version = ">=1.34.0,<2.0.0 || >=2.11.0,<3.0.0dev", extras = ["grpc"]} +google-api-core = {version = ">=1.34.0,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev" proto-plus = ">=1.22.0,<2.0.0dev" protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" @@ -1781,7 +1723,6 @@ protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4 name = "google-cloud-storage" version = "2.10.0" description = "Google Cloud Storage API client library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1790,7 +1731,7 @@ files = [ ] [package.dependencies] -google-api-core = ">=1.31.5,<2.0.0 || >2.3.0,<3.0.0dev" +google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0dev" google-auth = ">=1.25.0,<3.0dev" google-cloud-core = ">=2.3.0,<3.0dev" google-resumable-media = ">=2.3.2" @@ -1803,7 +1744,6 @@ protobuf = ["protobuf (<5.0.0dev)"] name = "google-crc32c" version = "1.5.0" description = "A python wrapper of the C library 'Google CRC32C'" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1884,7 +1824,6 @@ testing = ["pytest"] name = "google-resumable-media" version = "2.5.0" description = "Utilities for Google Media Downloads and Resumable Uploads" -category = "main" optional = false python-versions = ">= 3.7" files = [ @@ -1903,7 +1842,6 @@ requests = ["requests (>=2.18.0,<3.0.0dev)"] name = "google-search-results" version = "2.4.2" description = "Scrape and search localized results from Google, Bing, Baidu, Yahoo, Yandex, Ebay, Homedepot, youtube at scale using SerpApi.com" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1917,7 +1855,6 @@ requests = "*" name = "googleapis-common-protos" version = "1.59.1" description = "Common protobufs used in Google APIs" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1936,7 +1873,6 @@ grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] name = "gotrue" version = "1.0.2" description = "Python Client Library for GoTrue" -category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -1952,7 +1888,6 @@ pydantic = ">=1.10.0,<2.0.0" name = "greenlet" version = "2.0.2" description = "Lightweight in-process concurrent programming" -category = "main" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" files = [ @@ -2026,7 +1961,6 @@ test = ["objgraph", "psutil"] name = "grpc-google-iam-v1" version = "0.12.6" description = "IAM API client library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2043,7 +1977,6 @@ protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4 name = "grpcio" version = "1.47.5" description = "HTTP/2-based RPC framework" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2105,7 +2038,6 @@ protobuf = ["grpcio-tools (>=1.47.5)"] name = "grpcio-health-checking" version = "1.47.5" description = "Standard Health Checking Service for gRPC" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2121,7 +2053,6 @@ protobuf = ">=3.12.0" name = "grpcio-reflection" version = "1.47.5" description = "Standard Protobuf Reflection Service for gRPC" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2137,7 +2068,6 @@ protobuf = ">=3.12.0" name = "grpcio-status" version = "1.47.5" description = "Status proto mapping for gRPC" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2154,7 +2084,6 @@ protobuf = ">=3.12.0" name = "grpcio-tools" version = "1.47.5" description = "Protobuf code generator for gRPC" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2215,7 +2144,6 @@ setuptools = "*" name = "gunicorn" version = "20.1.0" description = "WSGI HTTP Server for UNIX" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -2236,7 +2164,6 @@ tornado = ["tornado (>=0.2)"] name = "h11" version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2248,7 +2175,6 @@ files = [ name = "h2" version = "4.1.0" description = "HTTP/2 State-Machine based protocol implementation" -category = "main" optional = false python-versions = ">=3.6.1" files = [ @@ -2264,7 +2190,6 @@ hyperframe = ">=6.0,<7" name = "hnswlib" version = "0.7.0" description = "hnswlib" -category = "main" optional = false python-versions = "*" files = [ @@ -2278,7 +2203,6 @@ numpy = "*" name = "hpack" version = "4.0.0" description = "Pure-Python HPACK header compression" -category = "main" optional = false python-versions = ">=3.6.1" files = [ @@ -2290,7 +2214,6 @@ files = [ name = "httpcore" version = "0.16.3" description = "A minimal low-level HTTP client." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2302,17 +2225,16 @@ files = [ anyio = ">=3.0,<5.0" certifi = "*" h11 = ">=0.13,<0.15" -sniffio = ">=1.0.0,<2.0.0" +sniffio = "==1.*" [package.extras] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (>=1.0.0,<2.0.0)"] +socks = ["socksio (==1.*)"] [[package]] name = "httplib2" version = "0.22.0" description = "A comprehensive HTTP client library." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2325,53 +2247,46 @@ pyparsing = {version = ">=2.4.2,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.0.2 || >3.0 [[package]] name = "httptools" -version = "0.5.0" +version = "0.6.0" description = "A collection of framework independent HTTP protocol utils." -category = "main" optional = false python-versions = ">=3.5.0" files = [ - {file = "httptools-0.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8f470c79061599a126d74385623ff4744c4e0f4a0997a353a44923c0b561ee51"}, - {file = "httptools-0.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e90491a4d77d0cb82e0e7a9cb35d86284c677402e4ce7ba6b448ccc7325c5421"}, - {file = "httptools-0.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1d2357f791b12d86faced7b5736dea9ef4f5ecdc6c3f253e445ee82da579449"}, - {file = "httptools-0.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f90cd6fd97c9a1b7fe9215e60c3bd97336742a0857f00a4cb31547bc22560c2"}, - {file = "httptools-0.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5230a99e724a1bdbbf236a1b58d6e8504b912b0552721c7c6b8570925ee0ccde"}, - {file = "httptools-0.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3a47a34f6015dd52c9eb629c0f5a8a5193e47bf2a12d9a3194d231eaf1bc451a"}, - {file = "httptools-0.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:24bb4bb8ac3882f90aa95403a1cb48465de877e2d5298ad6ddcfdebec060787d"}, - {file = "httptools-0.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e67d4f8734f8054d2c4858570cc4b233bf753f56e85217de4dfb2495904cf02e"}, - {file = "httptools-0.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7e5eefc58d20e4c2da82c78d91b2906f1a947ef42bd668db05f4ab4201a99f49"}, - {file = "httptools-0.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0297822cea9f90a38df29f48e40b42ac3d48a28637368f3ec6d15eebefd182f9"}, - {file = "httptools-0.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:557be7fbf2bfa4a2ec65192c254e151684545ebab45eca5d50477d562c40f986"}, - {file = "httptools-0.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:54465401dbbec9a6a42cf737627fb0f014d50dc7365a6b6cd57753f151a86ff0"}, - {file = "httptools-0.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4d9ebac23d2de960726ce45f49d70eb5466725c0087a078866043dad115f850f"}, - {file = "httptools-0.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:e8a34e4c0ab7b1ca17b8763613783e2458e77938092c18ac919420ab8655c8c1"}, - {file = "httptools-0.5.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f659d7a48401158c59933904040085c200b4be631cb5f23a7d561fbae593ec1f"}, - {file = "httptools-0.5.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef1616b3ba965cd68e6f759eeb5d34fbf596a79e84215eeceebf34ba3f61fdc7"}, - {file = "httptools-0.5.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3625a55886257755cb15194efbf209584754e31d336e09e2ffe0685a76cb4b60"}, - {file = "httptools-0.5.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:72ad589ba5e4a87e1d404cc1cb1b5780bfcb16e2aec957b88ce15fe879cc08ca"}, - {file = "httptools-0.5.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:850fec36c48df5a790aa735417dca8ce7d4b48d59b3ebd6f83e88a8125cde324"}, - {file = "httptools-0.5.0-cp36-cp36m-win_amd64.whl", hash = "sha256:f222e1e9d3f13b68ff8a835574eda02e67277d51631d69d7cf7f8e07df678c86"}, - {file = "httptools-0.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3cb8acf8f951363b617a8420768a9f249099b92e703c052f9a51b66342eea89b"}, - {file = "httptools-0.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:550059885dc9c19a072ca6d6735739d879be3b5959ec218ba3e013fd2255a11b"}, - {file = "httptools-0.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a04fe458a4597aa559b79c7f48fe3dceabef0f69f562daf5c5e926b153817281"}, - {file = "httptools-0.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7d0c1044bce274ec6711f0770fd2d5544fe392591d204c68328e60a46f88843b"}, - {file = "httptools-0.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c6eeefd4435055a8ebb6c5cc36111b8591c192c56a95b45fe2af22d9881eee25"}, - {file = "httptools-0.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:5b65be160adcd9de7a7e6413a4966665756e263f0d5ddeffde277ffeee0576a5"}, - {file = "httptools-0.5.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fe9c766a0c35b7e3d6b6939393c8dfdd5da3ac5dec7f971ec9134f284c6c36d6"}, - {file = "httptools-0.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:85b392aba273566c3d5596a0a490978c085b79700814fb22bfd537d381dd230c"}, - {file = "httptools-0.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5e3088f4ed33947e16fd865b8200f9cfae1144f41b64a8cf19b599508e096bc"}, - {file = "httptools-0.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c2a56b6aad7cc8f5551d8e04ff5a319d203f9d870398b94702300de50190f63"}, - {file = "httptools-0.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9b571b281a19762adb3f48a7731f6842f920fa71108aff9be49888320ac3e24d"}, - {file = "httptools-0.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa47ffcf70ba6f7848349b8a6f9b481ee0f7637931d91a9860a1838bfc586901"}, - {file = "httptools-0.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:bede7ee075e54b9a5bde695b4fc8f569f30185891796b2e4e09e2226801d09bd"}, - {file = "httptools-0.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:64eba6f168803a7469866a9c9b5263a7463fa8b7a25b35e547492aa7322036b6"}, - {file = "httptools-0.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4b098e4bb1174096a93f48f6193e7d9aa7071506a5877da09a783509ca5fff42"}, - {file = "httptools-0.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9423a2de923820c7e82e18980b937893f4aa8251c43684fa1772e341f6e06887"}, - {file = "httptools-0.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca1b7becf7d9d3ccdbb2f038f665c0f4857e08e1d8481cbcc1a86a0afcfb62b2"}, - {file = "httptools-0.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:50d4613025f15f4b11f1c54bbed4761c0020f7f921b95143ad6d58c151198142"}, - {file = "httptools-0.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8ffce9d81c825ac1deaa13bc9694c0562e2840a48ba21cfc9f3b4c922c16f372"}, - {file = "httptools-0.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:1af91b3650ce518d226466f30bbba5b6376dbd3ddb1b2be8b0658c6799dd450b"}, - {file = "httptools-0.5.0.tar.gz", hash = "sha256:295874861c173f9101960bba332429bb77ed4dcd8cdf5cee9922eb00e4f6bc09"}, + {file = "httptools-0.6.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:818325afee467d483bfab1647a72054246d29f9053fd17cc4b86cda09cc60339"}, + {file = "httptools-0.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72205730bf1be875003692ca54a4a7c35fac77b4746008966061d9d41a61b0f5"}, + {file = "httptools-0.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33eb1d4e609c835966e969a31b1dedf5ba16b38cab356c2ce4f3e33ffa94cad3"}, + {file = "httptools-0.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdc6675ec6cb79d27e0575750ac6e2b47032742e24eed011b8db73f2da9ed40"}, + {file = "httptools-0.6.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:463c3bc5ef64b9cf091be9ac0e0556199503f6e80456b790a917774a616aff6e"}, + {file = "httptools-0.6.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:82f228b88b0e8c6099a9c4757ce9fdbb8b45548074f8d0b1f0fc071e35655d1c"}, + {file = "httptools-0.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:0781fedc610293a2716bc7fa142d4c85e6776bc59d617a807ff91246a95dea35"}, + {file = "httptools-0.6.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:721e503245d591527cddd0f6fd771d156c509e831caa7a57929b55ac91ee2b51"}, + {file = "httptools-0.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:274bf20eeb41b0956e34f6a81f84d26ed57c84dd9253f13dcb7174b27ccd8aaf"}, + {file = "httptools-0.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:259920bbae18740a40236807915def554132ad70af5067e562f4660b62c59b90"}, + {file = "httptools-0.6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03bfd2ae8a2d532952ac54445a2fb2504c804135ed28b53fefaf03d3a93eb1fd"}, + {file = "httptools-0.6.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f959e4770b3fc8ee4dbc3578fd910fab9003e093f20ac8c621452c4d62e517cb"}, + {file = "httptools-0.6.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6e22896b42b95b3237eccc42278cd72c0df6f23247d886b7ded3163452481e38"}, + {file = "httptools-0.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:38f3cafedd6aa20ae05f81f2e616ea6f92116c8a0f8dcb79dc798df3356836e2"}, + {file = "httptools-0.6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:47043a6e0ea753f006a9d0dd076a8f8c99bc0ecae86a0888448eb3076c43d717"}, + {file = "httptools-0.6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35a541579bed0270d1ac10245a3e71e5beeb1903b5fbbc8d8b4d4e728d48ff1d"}, + {file = "httptools-0.6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65d802e7b2538a9756df5acc062300c160907b02e15ed15ba035b02bce43e89c"}, + {file = "httptools-0.6.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:26326e0a8fe56829f3af483200d914a7cd16d8d398d14e36888b56de30bec81a"}, + {file = "httptools-0.6.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e41ccac9e77cd045f3e4ee0fc62cbf3d54d7d4b375431eb855561f26ee7a9ec4"}, + {file = "httptools-0.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4e748fc0d5c4a629988ef50ac1aef99dfb5e8996583a73a717fc2cac4ab89932"}, + {file = "httptools-0.6.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:cf8169e839a0d740f3d3c9c4fa630ac1a5aaf81641a34575ca6773ed7ce041a1"}, + {file = "httptools-0.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5dcc14c090ab57b35908d4a4585ec5c0715439df07be2913405991dbb37e049d"}, + {file = "httptools-0.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d0b0571806a5168013b8c3d180d9f9d6997365a4212cb18ea20df18b938aa0b"}, + {file = "httptools-0.6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fb4a608c631f7dcbdf986f40af7a030521a10ba6bc3d36b28c1dc9e9035a3c0"}, + {file = "httptools-0.6.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:93f89975465133619aea8b1952bc6fa0e6bad22a447c6d982fc338fbb4c89649"}, + {file = "httptools-0.6.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:73e9d66a5a28b2d5d9fbd9e197a31edd02be310186db423b28e6052472dc8201"}, + {file = "httptools-0.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:22c01fcd53648162730a71c42842f73b50f989daae36534c818b3f5050b54589"}, + {file = "httptools-0.6.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3f96d2a351b5625a9fd9133c95744e8ca06f7a4f8f0b8231e4bbaae2c485046a"}, + {file = "httptools-0.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:72ec7c70bd9f95ef1083d14a755f321d181f046ca685b6358676737a5fecd26a"}, + {file = "httptools-0.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b703d15dbe082cc23266bf5d9448e764c7cb3fcfe7cb358d79d3fd8248673ef9"}, + {file = "httptools-0.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82c723ed5982f8ead00f8e7605c53e55ffe47c47465d878305ebe0082b6a1755"}, + {file = "httptools-0.6.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b0a816bb425c116a160fbc6f34cece097fd22ece15059d68932af686520966bd"}, + {file = "httptools-0.6.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:dea66d94e5a3f68c5e9d86e0894653b87d952e624845e0b0e3ad1c733c6cc75d"}, + {file = "httptools-0.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:23b09537086a5a611fad5696fc8963d67c7e7f98cb329d38ee114d588b0b74cd"}, + {file = "httptools-0.6.0.tar.gz", hash = "sha256:9fc6e409ad38cbd68b177cd5158fc4042c796b82ca88d99ec78f07bed6c6b796"}, ] [package.extras] @@ -2381,7 +2296,6 @@ test = ["Cython (>=0.29.24,<0.30.0)"] name = "httpx" version = "0.23.3" description = "The next generation HTTP client." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2398,15 +2312,14 @@ sniffio = "*" [package.extras] brotli = ["brotli", "brotlicffi"] -cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<13)"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<13)"] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (>=1.0.0,<2.0.0)"] +socks = ["socksio (==1.*)"] [[package]] name = "huggingface-hub" version = "0.15.1" description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -2438,7 +2351,6 @@ typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "t name = "humanfriendly" version = "10.0" description = "Human friendly output for text interfaces using Python" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -2453,7 +2365,6 @@ pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_ve name = "hyperframe" version = "6.0.1" description = "HTTP/2 framing layer for Python" -category = "main" optional = false python-versions = ">=3.6.1" files = [ @@ -2465,7 +2376,6 @@ files = [ name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -2477,7 +2387,6 @@ files = [ name = "importlib-metadata" version = "6.0.1" description = "Read metadata from Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2495,28 +2404,26 @@ testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packag [[package]] name = "importlib-resources" -version = "5.12.0" +version = "6.0.0" description = "Read resources from Python packages" -category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "importlib_resources-5.12.0-py3-none-any.whl", hash = "sha256:7b1deeebbf351c7578e09bf2f63fa2ce8b5ffec296e0d349139d43cca061a81a"}, - {file = "importlib_resources-5.12.0.tar.gz", hash = "sha256:4be82589bf5c1d7999aedf2a45159d10cb3ca4f19b2271f8792bc8e6da7b22f6"}, + {file = "importlib_resources-6.0.0-py3-none-any.whl", hash = "sha256:d952faee11004c045f785bb5636e8f885bed30dc3c940d5d42798a2a4541c185"}, + {file = "importlib_resources-6.0.0.tar.gz", hash = "sha256:4cf94875a8368bd89531a756df9a9ebe1f150e0f885030b461237bc7f2d905f2"}, ] [package.dependencies] zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"] [[package]] name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2528,7 +2435,6 @@ files = [ name = "invoke" version = "1.7.3" description = "Pythonic task execution" -category = "main" optional = false python-versions = "*" files = [ @@ -2540,7 +2446,6 @@ files = [ name = "ipykernel" version = "6.24.0" description = "IPython Kernel for Jupyter" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2554,7 +2459,7 @@ comm = ">=0.1.1" debugpy = ">=1.6.5" ipython = ">=7.23.1" jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" matplotlib-inline = ">=0.1" nest-asyncio = "*" packaging = "*" @@ -2574,7 +2479,6 @@ test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio" name = "ipython" version = "8.14.0" description = "IPython: Productive Interactive Computing" -category = "dev" optional = false python-versions = ">=3.9" files = [ @@ -2612,28 +2516,26 @@ test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pa [[package]] name = "jaraco-classes" -version = "3.2.3" +version = "3.3.0" description = "Utility functions for Python class constructs" -category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "jaraco.classes-3.2.3-py3-none-any.whl", hash = "sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158"}, - {file = "jaraco.classes-3.2.3.tar.gz", hash = "sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a"}, + {file = "jaraco.classes-3.3.0-py3-none-any.whl", hash = "sha256:10afa92b6743f25c0cf5f37c6bb6e18e2c5bb84a16527ccfc0040ea377e7aaeb"}, + {file = "jaraco.classes-3.3.0.tar.gz", hash = "sha256:c063dd08e89217cee02c8d5e5ec560f2c8ce6cdc2fcdc2e68f7b2e5547ed3621"}, ] [package.dependencies] more-itertools = "*" [package.extras] -docs = ["jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] -testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"] [[package]] name = "jcloud" version = "0.2.12" description = "Simplify deploying and managing Jina projects on Jina Cloud" -category = "main" optional = false python-versions = "*" files = [ @@ -2656,7 +2558,6 @@ test = ["black (==22.3.0)", "jina (>=3.7.0)", "mock", "pytest", "pytest-asyncio" name = "jedi" version = "0.18.2" description = "An autocompletion tool for Python that can be used for text editors." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2676,7 +2577,6 @@ testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] name = "jeepney" version = "0.8.0" description = "Low-level, pure Python DBus protocol wrapper." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2692,7 +2592,6 @@ trio = ["async_generator", "trio"] name = "jina" version = "3.15.2" description = "Build multimodal AI services via cloud native technologies · Neural Search · Generative AI · MLOps" -category = "main" optional = false python-versions = "*" files = [ @@ -2810,7 +2709,6 @@ websockets = ["websockets"] name = "jina-hubble-sdk" version = "0.39.0" description = "SDK for Hubble API at Jina AI." -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -2836,7 +2734,6 @@ full = ["aiohttp", "black (==22.3.0)", "docker", "filelock", "flake8 (==4.0.1)", name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2854,7 +2751,6 @@ i18n = ["Babel (>=2.7)"] name = "joblib" version = "1.3.1" description = "Lightweight pipelining with Python functions" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2866,7 +2762,6 @@ files = [ name = "jupyter-client" version = "8.3.0" description = "Jupyter protocol implementation and client libraries" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2876,7 +2771,7 @@ files = [ [package.dependencies] importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} -jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" python-dateutil = ">=2.8.2" pyzmq = ">=23.0" tornado = ">=6.2" @@ -2890,7 +2785,6 @@ test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pyt name = "jupyter-core" version = "5.3.1" description = "Jupyter core package. A base package on which Jupyter projects rely." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2911,7 +2805,6 @@ test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] name = "keyring" version = "24.2.0" description = "Store and access your passwords safely." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2935,7 +2828,6 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", name = "langchain" version = "0.0.219" description = "Building applications with LLMs through composability" -category = "main" optional = false python-versions = ">=3.8.1,<4.0" files = [ @@ -2973,18 +2865,17 @@ text-helpers = ["chardet (>=5.1.0,<6.0.0)"] [[package]] name = "langchain-serve" -version = "0.0.52" +version = "0.0.54" description = "Langchain Serve - serve your langchain apps on Jina AI Cloud." -category = "main" optional = true python-versions = "*" files = [ - {file = "langchain-serve-0.0.52.tar.gz", hash = "sha256:e69dcf6022423279059ab7ebda025e252aba4d9fd8e3e49776300355dbf85d1c"}, + {file = "langchain-serve-0.0.54.tar.gz", hash = "sha256:5cbc980886c81f3bac7ed3337adeb0b94fc9f3645e4501dd7f0702f90766bbaa"}, ] [package.dependencies] click = "*" -jcloud = ">=0.2.8" +jcloud = ">=0.2.8,<=0.2.12" jina = "3.15.2" jina-hubble-sdk = "*" langchain = "*" @@ -3001,7 +2892,6 @@ test = ["psutil", "pytest", "pytest-asyncio"] name = "langchainplus-sdk" version = "0.0.20" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." -category = "main" optional = false python-versions = ">=3.8.1,<4.0" files = [ @@ -3018,7 +2908,6 @@ tenacity = ">=8.1.0,<9.0.0" name = "linkify-it-py" version = "2.0.2" description = "Links recognition library with FULL unicode support." -category = "main" optional = true python-versions = ">=3.7" files = [ @@ -3037,23 +2926,26 @@ test = ["coverage", "pytest", "pytest-cov"] [[package]] name = "llama-cpp-python" -version = "0.1.55" +version = "0.1.70" description = "A Python wrapper for llama.cpp" -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "llama_cpp_python-0.1.55.tar.gz", hash = "sha256:1bc749f314a979c601b2dae22eb1f2d63fe791bc1237cce24d36b4f856be8ca2"}, + {file = "llama_cpp_python-0.1.70.tar.gz", hash = "sha256:616ea7ad87417eba9c76d6ffe060b855af39ab7e795032dcf19fc49a7e73806b"}, ] [package.dependencies] -typing-extensions = ">=4.5.0,<5.0.0" +diskcache = ">=5.6.1" +numpy = ">=1.20.0" +typing-extensions = ">=4.5.0" + +[package.extras] +server = ["fastapi (>=0.100.0)", "pydantic-settings (>=2.0.1)", "sse-starlette (>=1.6.1)", "uvicorn (>=0.22.1)"] [[package]] name = "loguru" version = "0.7.0" description = "Python logging made (stupidly) simple" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -3072,17 +2964,19 @@ dev = ["Sphinx (==5.3.0)", "colorama (==0.4.5)", "colorama (==0.4.6)", "freezegu name = "lxml" version = "4.9.3" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" files = [ {file = "lxml-4.9.3-cp27-cp27m-macosx_11_0_x86_64.whl", hash = "sha256:b0a545b46b526d418eb91754565ba5b63b1c0b12f9bd2f808c852d9b4b2f9b5c"}, {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:075b731ddd9e7f68ad24c635374211376aa05a281673ede86cbe1d1b3455279d"}, {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1e224d5755dba2f4a9498e150c43792392ac9b5380aa1b845f98a1618c94eeef"}, + {file = "lxml-4.9.3-cp27-cp27m-win32.whl", hash = "sha256:2c74524e179f2ad6d2a4f7caf70e2d96639c0954c943ad601a9e146c76408ed7"}, + {file = "lxml-4.9.3-cp27-cp27m-win_amd64.whl", hash = "sha256:4f1026bc732b6a7f96369f7bfe1a4f2290fb34dce00d8644bc3036fb351a4ca1"}, {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c0781a98ff5e6586926293e59480b64ddd46282953203c76ae15dbbbf302e8bb"}, {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cef2502e7e8a96fe5ad686d60b49e1ab03e438bd9123987994528febd569868e"}, {file = "lxml-4.9.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b86164d2cff4d3aaa1f04a14685cbc072efd0b4f99ca5708b2ad1b9b5988a991"}, {file = "lxml-4.9.3-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:42871176e7896d5d45138f6d28751053c711ed4d48d8e30b498da155af39aebd"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae8b9c6deb1e634ba4f1930eb67ef6e6bf6a44b6eb5ad605642b2d6d5ed9ce3c"}, {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:411007c0d88188d9f621b11d252cce90c4a2d1a49db6c068e3c16422f306eab8"}, {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:cd47b4a0d41d2afa3e58e5bf1f62069255aa2fd6ff5ee41604418ca925911d76"}, {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e2cb47860da1f7e9a5256254b74ae331687b9672dfa780eed355c4c9c3dbd23"}, @@ -3091,6 +2985,7 @@ files = [ {file = "lxml-4.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:97047f0d25cd4bcae81f9ec9dc290ca3e15927c192df17331b53bebe0e3ff96d"}, {file = "lxml-4.9.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:1f447ea5429b54f9582d4b955f5f1985f278ce5cf169f72eea8afd9502973dd5"}, {file = "lxml-4.9.3-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:57d6ba0ca2b0c462f339640d22882acc711de224d769edf29962b09f77129cbf"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:9767e79108424fb6c3edf8f81e6730666a50feb01a328f4a016464a5893f835a"}, {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:71c52db65e4b56b8ddc5bb89fb2e66c558ed9d1a74a45ceb7dcb20c191c3df2f"}, {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d73d8ecf8ecf10a3bd007f2192725a34bd62898e8da27eb9d32a58084f93962b"}, {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0a3d3487f07c1d7f150894c238299934a2a074ef590b583103a45002035be120"}, @@ -3110,6 +3005,7 @@ files = [ {file = "lxml-4.9.3-cp36-cp36m-macosx_11_0_x86_64.whl", hash = "sha256:64f479d719dc9f4c813ad9bb6b28f8390360660b73b2e4beb4cb0ae7104f1c12"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:dd708cf4ee4408cf46a48b108fb9427bfa00b9b85812a9262b5c668af2533ea5"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c31c7462abdf8f2ac0577d9f05279727e698f97ecbb02f17939ea99ae8daa98"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e3cd95e10c2610c360154afdc2f1480aea394f4a4f1ea0a5eacce49640c9b190"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:4930be26af26ac545c3dffb662521d4e6268352866956672231887d18f0eaab2"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4aec80cde9197340bc353d2768e2a75f5f60bacda2bab72ab1dc499589b3878c"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:14e019fd83b831b2e61baed40cab76222139926b1fb5ed0e79225bc0cae14584"}, @@ -3119,6 +3015,7 @@ files = [ {file = "lxml-4.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bef4e656f7d98aaa3486d2627e7d2df1157d7e88e7efd43a65aa5dd4714916cf"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:46f409a2d60f634fe550f7133ed30ad5321ae2e6630f13657fb9479506b00601"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:4c28a9144688aef80d6ea666c809b4b0e50010a2aca784c97f5e6bf143d9f129"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:141f1d1a9b663c679dc524af3ea1773e618907e96075262726c7612c02b149a4"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:53ace1c1fd5a74ef662f844a0413446c0629d151055340e9893da958a374f70d"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:17a753023436a18e27dd7769e798ce302963c236bc4114ceee5b25c18c52c693"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7d298a1bd60c067ea75d9f684f5f3992c9d6766fadbc0bcedd39750bf344c2f4"}, @@ -3128,6 +3025,7 @@ files = [ {file = "lxml-4.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:120fa9349a24c7043854c53cae8cec227e1f79195a7493e09e0c12e29f918e52"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4d2d1edbca80b510443f51afd8496be95529db04a509bc8faee49c7b0fb6d2cc"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d7e43bd40f65f7d97ad8ef5c9b1778943d02f04febef12def25f7583d19baac"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:71d66ee82e7417828af6ecd7db817913cb0cf9d4e61aa0ac1fde0583d84358db"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:6fc3c450eaa0b56f815c7b62f2b7fba7266c4779adcf1cece9e6deb1de7305ce"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:65299ea57d82fb91c7f019300d24050c4ddeb7c5a190e076b5f48a2b43d19c42"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eadfbbbfb41b44034a4c757fd5d70baccd43296fb894dba0295606a7cf3124aa"}, @@ -3137,6 +3035,7 @@ files = [ {file = "lxml-4.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:92af161ecbdb2883c4593d5ed4815ea71b31fafd7fd05789b23100d081ecac96"}, {file = "lxml-4.9.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:9bb6ad405121241e99a86efff22d3ef469024ce22875a7ae045896ad23ba2340"}, {file = "lxml-4.9.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8ed74706b26ad100433da4b9d807eae371efaa266ffc3e9191ea436087a9d6a7"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fbf521479bcac1e25a663df882c46a641a9bff6b56dc8b0fafaebd2f66fb231b"}, {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:303bf1edce6ced16bf67a18a1cf8339d0db79577eec5d9a6d4a80f0fb10aa2da"}, {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:5515edd2a6d1a5a70bfcdee23b42ec33425e405c5b351478ab7dc9347228f96e"}, {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:690dafd0b187ed38583a648076865d8c229661ed20e48f2335d68e2cf7dc829d"}, @@ -3147,13 +3046,16 @@ files = [ {file = "lxml-4.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:4dd9a263e845a72eacb60d12401e37c616438ea2e5442885f65082c276dfb2b2"}, {file = "lxml-4.9.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6689a3d7fd13dc687e9102a27e98ef33730ac4fe37795d5036d18b4d527abd35"}, {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f6bdac493b949141b733c5345b6ba8f87a226029cbabc7e9e121a413e49441e0"}, + {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:05186a0f1346ae12553d66df1cfce6f251589fea3ad3da4f3ef4e34b2d58c6a3"}, {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c2006f5c8d28dee289f7020f721354362fa304acbaaf9745751ac4006650254b"}, {file = "lxml-4.9.3-pp38-pypy38_pp73-macosx_11_0_x86_64.whl", hash = "sha256:5c245b783db29c4e4fbbbfc9c5a78be496c9fea25517f90606aa1f6b2b3d5f7b"}, {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4fb960a632a49f2f089d522f70496640fdf1218f1243889da3822e0a9f5f3ba7"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:50670615eaf97227d5dc60de2dc99fb134a7130d310d783314e7724bf163f75d"}, {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9719fe17307a9e814580af1f5c6e05ca593b12fb7e44fe62450a5384dbf61b4b"}, {file = "lxml-4.9.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3331bece23c9ee066e0fb3f96c61322b9e0f54d775fccefff4c38ca488de283a"}, {file = "lxml-4.9.3-pp39-pypy39_pp73-macosx_11_0_x86_64.whl", hash = "sha256:ed667f49b11360951e201453fc3967344d0d0263aa415e1619e85ae7fd17b4e0"}, {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8b77946fd508cbf0fccd8e400a7f71d4ac0e1595812e66025bac475a8e811694"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e4da8ca0c0c0aea88fd46be8e44bd49716772358d648cce45fe387f7b92374a7"}, {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fe4bda6bd4340caa6e5cf95e73f8fea5c4bfc55763dd42f1b50a94c1b4a2fbd4"}, {file = "lxml-4.9.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f3df3db1d336b9356dd3112eae5f5c2b8b377f3bc826848567f10bfddfee77e9"}, {file = "lxml-4.9.3.tar.gz", hash = "sha256:48628bd53a426c9eb9bc066a923acaa0878d1e86129fd5359aee99285f4eed9c"}, @@ -3169,7 +3071,6 @@ source = ["Cython (>=0.29.35)"] name = "lz4" version = "4.3.2" description = "LZ4 Bindings for Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3219,7 +3120,6 @@ tests = ["psutil", "pytest (!=3.3.0)", "pytest-cov"] name = "markdown" version = "3.4.3" description = "Python implementation of John Gruber's Markdown." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3237,7 +3137,6 @@ testing = ["coverage", "pyyaml"] name = "markdown-it-py" version = "3.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3264,7 +3163,6 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] name = "markupsafe" version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3324,7 +3222,6 @@ files = [ name = "marshmallow" version = "3.19.0" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3345,7 +3242,6 @@ tests = ["pytest", "pytz", "simplejson"] name = "marshmallow-enum" version = "1.5.1" description = "Enum field for Marshmallow" -category = "main" optional = false python-versions = "*" files = [ @@ -3360,7 +3256,6 @@ marshmallow = ">=2.0.0" name = "matplotlib-inline" version = "0.1.6" description = "Inline Matplotlib backend for Jupyter" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -3375,7 +3270,6 @@ traitlets = "*" name = "mdit-py-plugins" version = "0.4.0" description = "Collection of plugins for markdown-it-py" -category = "main" optional = true python-versions = ">=3.8" files = [ @@ -3395,7 +3289,6 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] name = "mdurl" version = "0.1.2" description = "Markdown URL utilities" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3407,7 +3300,6 @@ files = [ name = "monotonic" version = "1.6" description = "An implementation of time.monotonic() for Python 2 & < 3.3" -category = "main" optional = false python-versions = "*" files = [ @@ -3419,7 +3311,6 @@ files = [ name = "more-itertools" version = "9.1.0" description = "More routines for operating on iterables, beyond itertools" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3431,7 +3322,6 @@ files = [ name = "mpmath" version = "1.3.0" description = "Python library for arbitrary-precision floating-point arithmetic" -category = "main" optional = false python-versions = "*" files = [ @@ -3449,7 +3339,6 @@ tests = ["pytest (>=4.6)"] name = "msg-parser" version = "1.2.0" description = "This module enables reading, parsing and converting Microsoft Outlook MSG E-Mail files." -category = "main" optional = false python-versions = ">=3.4" files = [ @@ -3467,7 +3356,6 @@ rtf = ["compressed-rtf (>=1.0.5)"] name = "multidict" version = "6.0.4" description = "multidict implementation" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3551,7 +3439,6 @@ files = [ name = "multiprocess" version = "0.70.14" description = "better multiprocessing and multithreading in python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3578,7 +3465,6 @@ dill = ">=0.3.6" name = "mypy" version = "1.4.1" description = "Optional static typing for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3625,7 +3511,6 @@ reports = ["lxml"] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -3637,7 +3522,6 @@ files = [ name = "nest-asyncio" version = "1.5.6" description = "Patch asyncio to allow nested event loops" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -3649,7 +3533,6 @@ files = [ name = "networkx" version = "3.1" description = "Python package for creating and manipulating graphs and networks" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3668,7 +3551,6 @@ test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] name = "nltk" version = "3.8.1" description = "Natural Language Toolkit" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3694,7 +3576,6 @@ twitter = ["twython"] name = "numexpr" version = "2.8.4" description = "Fast numerical expression evaluator for NumPy" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3735,44 +3616,42 @@ numpy = ">=1.13.3" [[package]] name = "numpy" -version = "1.25.0" +version = "1.25.1" description = "Fundamental package for array computing in Python" -category = "main" optional = false python-versions = ">=3.9" files = [ - {file = "numpy-1.25.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8aa130c3042052d656751df5e81f6d61edff3e289b5994edcf77f54118a8d9f4"}, - {file = "numpy-1.25.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e3f2b96e3b63c978bc29daaa3700c028fe3f049ea3031b58aa33fe2a5809d24"}, - {file = "numpy-1.25.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6b267f349a99d3908b56645eebf340cb58f01bd1e773b4eea1a905b3f0e4208"}, - {file = "numpy-1.25.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4aedd08f15d3045a4e9c648f1e04daca2ab1044256959f1f95aafeeb3d794c16"}, - {file = "numpy-1.25.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6d183b5c58513f74225c376643234c369468e02947b47942eacbb23c1671f25d"}, - {file = "numpy-1.25.0-cp310-cp310-win32.whl", hash = "sha256:d76a84998c51b8b68b40448ddd02bd1081bb33abcdc28beee6cd284fe11036c6"}, - {file = "numpy-1.25.0-cp310-cp310-win_amd64.whl", hash = "sha256:c0dc071017bc00abb7d7201bac06fa80333c6314477b3d10b52b58fa6a6e38f6"}, - {file = "numpy-1.25.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c69fe5f05eea336b7a740e114dec995e2f927003c30702d896892403df6dbf0"}, - {file = "numpy-1.25.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c7211d7920b97aeca7b3773a6783492b5b93baba39e7c36054f6e749fc7490c"}, - {file = "numpy-1.25.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecc68f11404930e9c7ecfc937aa423e1e50158317bf67ca91736a9864eae0232"}, - {file = "numpy-1.25.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e559c6afbca484072a98a51b6fa466aae785cfe89b69e8b856c3191bc8872a82"}, - {file = "numpy-1.25.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6c284907e37f5e04d2412950960894b143a648dea3f79290757eb878b91acbd1"}, - {file = "numpy-1.25.0-cp311-cp311-win32.whl", hash = "sha256:95367ccd88c07af21b379be1725b5322362bb83679d36691f124a16357390153"}, - {file = "numpy-1.25.0-cp311-cp311-win_amd64.whl", hash = "sha256:b76aa836a952059d70a2788a2d98cb2a533ccd46222558b6970348939e55fc24"}, - {file = "numpy-1.25.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b792164e539d99d93e4e5e09ae10f8cbe5466de7d759fc155e075237e0c274e4"}, - {file = "numpy-1.25.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7cd981ccc0afe49b9883f14761bb57c964df71124dcd155b0cba2b591f0d64b9"}, - {file = "numpy-1.25.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5aa48bebfb41f93043a796128854b84407d4df730d3fb6e5dc36402f5cd594c0"}, - {file = "numpy-1.25.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5177310ac2e63d6603f659fadc1e7bab33dd5a8db4e0596df34214eeab0fee3b"}, - {file = "numpy-1.25.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0ac6edfb35d2a99aaf102b509c8e9319c499ebd4978df4971b94419a116d0790"}, - {file = "numpy-1.25.0-cp39-cp39-win32.whl", hash = "sha256:7412125b4f18aeddca2ecd7219ea2d2708f697943e6f624be41aa5f8a9852cc4"}, - {file = "numpy-1.25.0-cp39-cp39-win_amd64.whl", hash = "sha256:26815c6c8498dc49d81faa76d61078c4f9f0859ce7817919021b9eba72b425e3"}, - {file = "numpy-1.25.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5b1b90860bf7d8a8c313b372d4f27343a54f415b20fb69dd601b7efe1029c91e"}, - {file = "numpy-1.25.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85cdae87d8c136fd4da4dad1e48064d700f63e923d5af6c8c782ac0df8044542"}, - {file = "numpy-1.25.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cc3fda2b36482891db1060f00f881c77f9423eead4c3579629940a3e12095fe8"}, - {file = "numpy-1.25.0.tar.gz", hash = "sha256:f1accae9a28dc3cda46a91de86acf69de0d1b5f4edd44a9b0c3ceb8036dfff19"}, + {file = "numpy-1.25.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:77d339465dff3eb33c701430bcb9c325b60354698340229e1dff97745e6b3efa"}, + {file = "numpy-1.25.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d736b75c3f2cb96843a5c7f8d8ccc414768d34b0a75f466c05f3a739b406f10b"}, + {file = "numpy-1.25.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a90725800caeaa160732d6b31f3f843ebd45d6b5f3eec9e8cc287e30f2805bf"}, + {file = "numpy-1.25.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c6c9261d21e617c6dc5eacba35cb68ec36bb72adcff0dee63f8fbc899362588"}, + {file = "numpy-1.25.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0def91f8af6ec4bb94c370e38c575855bf1d0be8a8fbfba42ef9c073faf2cf19"}, + {file = "numpy-1.25.1-cp310-cp310-win32.whl", hash = "sha256:fd67b306320dcadea700a8f79b9e671e607f8696e98ec255915c0c6d6b818503"}, + {file = "numpy-1.25.1-cp310-cp310-win_amd64.whl", hash = "sha256:c1516db588987450b85595586605742879e50dcce923e8973f79529651545b57"}, + {file = "numpy-1.25.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6b82655dd8efeea69dbf85d00fca40013d7f503212bc5259056244961268b66e"}, + {file = "numpy-1.25.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e8f6049c4878cb16960fbbfb22105e49d13d752d4d8371b55110941fb3b17800"}, + {file = "numpy-1.25.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41a56b70e8139884eccb2f733c2f7378af06c82304959e174f8e7370af112e09"}, + {file = "numpy-1.25.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5154b1a25ec796b1aee12ac1b22f414f94752c5f94832f14d8d6c9ac40bcca6"}, + {file = "numpy-1.25.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:38eb6548bb91c421261b4805dc44def9ca1a6eef6444ce35ad1669c0f1a3fc5d"}, + {file = "numpy-1.25.1-cp311-cp311-win32.whl", hash = "sha256:791f409064d0a69dd20579345d852c59822c6aa087f23b07b1b4e28ff5880fcb"}, + {file = "numpy-1.25.1-cp311-cp311-win_amd64.whl", hash = "sha256:c40571fe966393b212689aa17e32ed905924120737194b5d5c1b20b9ed0fb171"}, + {file = "numpy-1.25.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3d7abcdd85aea3e6cdddb59af2350c7ab1ed764397f8eec97a038ad244d2d105"}, + {file = "numpy-1.25.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1a180429394f81c7933634ae49b37b472d343cccb5bb0c4a575ac8bbc433722f"}, + {file = "numpy-1.25.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d412c1697c3853c6fc3cb9751b4915859c7afe6a277c2bf00acf287d56c4e625"}, + {file = "numpy-1.25.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20e1266411120a4f16fad8efa8e0454d21d00b8c7cee5b5ccad7565d95eb42dd"}, + {file = "numpy-1.25.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f76aebc3358ade9eacf9bc2bb8ae589863a4f911611694103af05346637df1b7"}, + {file = "numpy-1.25.1-cp39-cp39-win32.whl", hash = "sha256:247d3ffdd7775bdf191f848be8d49100495114c82c2bd134e8d5d075fb386a1c"}, + {file = "numpy-1.25.1-cp39-cp39-win_amd64.whl", hash = "sha256:1d5d3c68e443c90b38fdf8ef40e60e2538a27548b39b12b73132456847f4b631"}, + {file = "numpy-1.25.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:35a9527c977b924042170a0887de727cd84ff179e478481404c5dc66b4170009"}, + {file = "numpy-1.25.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d3fe3dd0506a28493d82dc3cf254be8cd0d26f4008a417385cbf1ae95b54004"}, + {file = "numpy-1.25.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:012097b5b0d00a11070e8f2e261128c44157a8689f7dedcf35576e525893f4fe"}, + {file = "numpy-1.25.1.tar.gz", hash = "sha256:9a3a9f3a61480cc086117b426a8bd86869c213fc4072e606f01c4e4b66eb92bf"}, ] [[package]] name = "olefile" version = "0.46" description = "Python package to parse, read and write Microsoft OLE2 files (Structured Storage or Compound Document, Microsoft Office)" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -3783,7 +3662,6 @@ files = [ name = "onnxruntime" version = "1.15.1" description = "ONNX Runtime is a runtime accelerator for Machine Learning models" -category = "main" optional = false python-versions = "*" files = [ @@ -3825,7 +3703,6 @@ sympy = "*" name = "openai" version = "0.27.8" description = "Python client library for the OpenAI API" -category = "main" optional = false python-versions = ">=3.7.1" files = [ @@ -3840,7 +3717,7 @@ tqdm = "*" [package.extras] datalib = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] -dev = ["black (>=21.6b0,<22.0)", "pytest (>=6.0.0,<7.0.0)", "pytest-asyncio", "pytest-mock"] +dev = ["black (>=21.6b0,<22.0)", "pytest (==6.*)", "pytest-asyncio", "pytest-mock"] embeddings = ["matplotlib", "numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "plotly", "scikit-learn (>=1.0.2)", "scipy", "tenacity (>=8.0.1)"] wandb = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "wandb"] @@ -3848,7 +3725,6 @@ wandb = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1 name = "openapi-schema-pydantic" version = "1.2.4" description = "OpenAPI (v3) specification schema as pydantic class" -category = "main" optional = false python-versions = ">=3.6.1" files = [ @@ -3863,7 +3739,6 @@ pydantic = ">=1.8.2" name = "openpyxl" version = "3.1.2" description = "A Python library to read/write Excel 2010 xlsx/xlsm files" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3878,7 +3753,6 @@ et-xmlfile = "*" name = "opentelemetry-api" version = "1.18.0" description = "OpenTelemetry Python API" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3895,7 +3769,6 @@ setuptools = ">=16.0" name = "opentelemetry-exporter-otlp" version = "1.18.0" description = "OpenTelemetry Collector Exporters" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3911,7 +3784,6 @@ opentelemetry-exporter-otlp-proto-http = "1.18.0" name = "opentelemetry-exporter-otlp-proto-common" version = "1.18.0" description = "OpenTelemetry Protobuf encoding" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3926,7 +3798,6 @@ opentelemetry-proto = "1.18.0" name = "opentelemetry-exporter-otlp-proto-grpc" version = "1.18.0" description = "OpenTelemetry Collector Protobuf over gRPC Exporter" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3951,7 +3822,6 @@ test = ["pytest-grpc"] name = "opentelemetry-exporter-otlp-proto-http" version = "1.18.0" description = "OpenTelemetry Collector Protobuf over HTTP Exporter" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3976,7 +3846,6 @@ test = ["responses (==0.22.0)"] name = "opentelemetry-exporter-prometheus" version = "1.12.0rc1" description = "Prometheus Metric Exporter for OpenTelemetry" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3993,7 +3862,6 @@ prometheus-client = ">=0.5.0,<1.0.0" name = "opentelemetry-instrumentation" version = "0.39b0" description = "Instrumentation Tools & Auto Instrumentation for OpenTelemetry Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4010,7 +3878,6 @@ wrapt = ">=1.0.0,<2.0.0" name = "opentelemetry-instrumentation-aiohttp-client" version = "0.39b0" description = "OpenTelemetry aiohttp client instrumentation" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4033,7 +3900,6 @@ test = ["opentelemetry-instrumentation-aiohttp-client[instruments]"] name = "opentelemetry-instrumentation-asgi" version = "0.39b0" description = "ASGI instrumentation for OpenTelemetry" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4056,7 +3922,6 @@ test = ["opentelemetry-instrumentation-asgi[instruments]", "opentelemetry-test-u name = "opentelemetry-instrumentation-fastapi" version = "0.39b0" description = "OpenTelemetry FastAPI Instrumentation" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4079,7 +3944,6 @@ test = ["httpx (>=0.22,<1.0)", "opentelemetry-instrumentation-fastapi[instrument name = "opentelemetry-instrumentation-grpc" version = "0.39b0" description = "OpenTelemetry gRPC instrumentation" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4102,7 +3966,6 @@ test = ["opentelemetry-instrumentation-grpc[instruments]", "opentelemetry-sdk (> name = "opentelemetry-proto" version = "1.18.0" description = "OpenTelemetry Python Proto" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4117,7 +3980,6 @@ protobuf = ">=3.19,<5.0" name = "opentelemetry-sdk" version = "1.18.0" description = "OpenTelemetry Python SDK" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4135,7 +3997,6 @@ typing-extensions = ">=3.7.4" name = "opentelemetry-semantic-conventions" version = "0.39b0" description = "OpenTelemetry Semantic Conventions" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4147,7 +4008,6 @@ files = [ name = "opentelemetry-util-http" version = "0.39b0" description = "Web util for OpenTelemetry" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4157,65 +4017,63 @@ files = [ [[package]] name = "orjson" -version = "3.9.1" +version = "3.9.2" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "orjson-3.9.1-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:c4434b7b786fdc394b95d029fb99949d7c2b05bbd4bf5cb5e3906be96ffeee3b"}, - {file = "orjson-3.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09faf14f74ed47e773fa56833be118e04aa534956f661eb491522970b7478e3b"}, - {file = "orjson-3.9.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:503eb86a8d53a187fe66aa80c69295a3ca35475804da89a9547e4fce5f803822"}, - {file = "orjson-3.9.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:20f2804b5a1dbd3609c086041bd243519224d47716efd7429db6c03ed28b7cc3"}, - {file = "orjson-3.9.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fd828e0656615a711c4cc4da70f3cac142e66a6703ba876c20156a14e28e3fa"}, - {file = "orjson-3.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec53d648176f873203b9c700a0abacab33ca1ab595066e9d616f98cdc56f4434"}, - {file = "orjson-3.9.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e186ae76b0d97c505500664193ddf508c13c1e675d9b25f1f4414a7606100da6"}, - {file = "orjson-3.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d4edee78503016f4df30aeede0d999b3cb11fb56f47e9db0e487bce0aaca9285"}, - {file = "orjson-3.9.1-cp310-none-win_amd64.whl", hash = "sha256:a4cc5d21e68af982d9a2528ac61e604f092c60eed27aef3324969c68f182ec7e"}, - {file = "orjson-3.9.1-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:761b6efd33c49de20dd73ce64cc59da62c0dab10aa6015f582680e0663cc792c"}, - {file = "orjson-3.9.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31229f9d0b8dc2ef7ee7e4393f2e4433a28e16582d4b25afbfccc9d68dc768f8"}, - {file = "orjson-3.9.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b7ab18d55ecb1de543d452f0a5f8094b52282b916aa4097ac11a4c79f317b86"}, - {file = "orjson-3.9.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db774344c39041f4801c7dfe03483df9203cbd6c84e601a65908e5552228dd25"}, - {file = "orjson-3.9.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ae47ef8c0fe89c4677db7e9e1fb2093ca6e66c3acbee5442d84d74e727edad5e"}, - {file = "orjson-3.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:103952c21575b9805803c98add2eaecd005580a1e746292ed2ec0d76dd3b9746"}, - {file = "orjson-3.9.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2cb0121e6f2c9da3eddf049b99b95fef0adf8480ea7cb544ce858706cdf916eb"}, - {file = "orjson-3.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:24d4ddaa2876e657c0fd32902b5c451fd2afc35159d66a58da7837357044b8c2"}, - {file = "orjson-3.9.1-cp311-none-win_amd64.whl", hash = "sha256:0b53b5f72cf536dd8aa4fc4c95e7e09a7adb119f8ff8ee6cc60f735d7740ad6a"}, - {file = "orjson-3.9.1-cp37-cp37m-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:d4b68d01a506242316a07f1d2f29fb0a8b36cee30a7c35076f1ef59dce0890c1"}, - {file = "orjson-3.9.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9dd4abe6c6fd352f00f4246d85228f6a9847d0cc14f4d54ee553718c225388f"}, - {file = "orjson-3.9.1-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9e20bca5e13041e31ceba7a09bf142e6d63c8a7467f5a9c974f8c13377c75af2"}, - {file = "orjson-3.9.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d8ae0467d01eb1e4bcffef4486d964bfd1c2e608103e75f7074ed34be5df48cc"}, - {file = "orjson-3.9.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:06f6ab4697fab090517f295915318763a97a12ee8186054adf21c1e6f6abbd3d"}, - {file = "orjson-3.9.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8515867713301fa065c58ec4c9053ba1a22c35113ab4acad555317b8fd802e50"}, - {file = "orjson-3.9.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:393d0697d1dfa18d27d193e980c04fdfb672c87f7765b87952f550521e21b627"}, - {file = "orjson-3.9.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d96747662d3666f79119e5d28c124e7d356c7dc195cd4b09faea4031c9079dc9"}, - {file = "orjson-3.9.1-cp37-none-win_amd64.whl", hash = "sha256:6d173d3921dd58a068c88ec22baea7dbc87a137411501618b1292a9d6252318e"}, - {file = "orjson-3.9.1-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:d1c2b0b4246c992ce2529fc610a446b945f1429445ece1c1f826a234c829a918"}, - {file = "orjson-3.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19f70ba1f441e1c4bb1a581f0baa092e8b3e3ce5b2aac2e1e090f0ac097966da"}, - {file = "orjson-3.9.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:375d65f002e686212aac42680aed044872c45ee4bc656cf63d4a215137a6124a"}, - {file = "orjson-3.9.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4751cee4a7b1daeacb90a7f5adf2170ccab893c3ab7c5cea58b45a13f89b30b3"}, - {file = "orjson-3.9.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78d9a2a4b2302d5ebc3695498ebc305c3568e5ad4f3501eb30a6405a32d8af22"}, - {file = "orjson-3.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46b4facc32643b2689dfc292c0c463985dac4b6ab504799cf51fc3c6959ed668"}, - {file = "orjson-3.9.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ec7c8a0f1bf35da0d5fd14f8956f3b82a9a6918a3c6963d718dfd414d6d3b604"}, - {file = "orjson-3.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d3a40b0fbe06ccd4d6a99e523d20b47985655bcada8d1eba485b1b32a43e4904"}, - {file = "orjson-3.9.1-cp38-none-win_amd64.whl", hash = "sha256:402f9d3edfec4560a98880224ec10eba4c5f7b4791e4bc0d4f4d8df5faf2a006"}, - {file = "orjson-3.9.1-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:49c0d78dcd34626e2e934f1192d7c052b94e0ecadc5f386fd2bda6d2e03dadf5"}, - {file = "orjson-3.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:125f63e56d38393daa0a1a6dc6fedefca16c538614b66ea5997c3bd3af35ef26"}, - {file = "orjson-3.9.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:08927970365d2e1f3ce4894f9ff928a7b865d53f26768f1bbdd85dd4fee3e966"}, - {file = "orjson-3.9.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f9a744e212d4780ecd67f4b6b128b2e727bee1df03e7059cddb2dfe1083e7dc4"}, - {file = "orjson-3.9.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d1dbf36db7240c61eec98c8d21545d671bce70be0730deb2c0d772e06b71af3"}, - {file = "orjson-3.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80a1e384626f76b66df615f7bb622a79a25c166d08c5d2151ffd41f24c4cc104"}, - {file = "orjson-3.9.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:15d28872fb055bf17ffca913826e618af61b2f689d2b170f72ecae1a86f80d52"}, - {file = "orjson-3.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1e4d905338f9ef32c67566929dfbfbb23cc80287af8a2c38930fb0eda3d40b76"}, - {file = "orjson-3.9.1-cp39-none-win_amd64.whl", hash = "sha256:48a27da6c7306965846565cc385611d03382bbd84120008653aa2f6741e2105d"}, - {file = "orjson-3.9.1.tar.gz", hash = "sha256:db373a25ec4a4fccf8186f9a72a1b3442837e40807a736a815ab42481e83b7d0"}, + {file = "orjson-3.9.2-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:7323e4ca8322b1ecb87562f1ec2491831c086d9faa9a6c6503f489dadbed37d7"}, + {file = "orjson-3.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1272688ea1865f711b01ba479dea2d53e037ea00892fd04196b5875f7021d9d3"}, + {file = "orjson-3.9.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b9a26f1d1427a9101a1e8910f2e2df1f44d3d18ad5480ba031b15d5c1cb282e"}, + {file = "orjson-3.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6a5ca55b0d8f25f18b471e34abaee4b175924b6cd62f59992945b25963443141"}, + {file = "orjson-3.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:877872db2c0f41fbe21f852ff642ca842a43bc34895b70f71c9d575df31fffb4"}, + {file = "orjson-3.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a39c2529d75373b7167bf84c814ef9b8f3737a339c225ed6c0df40736df8748"}, + {file = "orjson-3.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:84ebd6fdf138eb0eb4280045442331ee71c0aab5e16397ba6645f32f911bfb37"}, + {file = "orjson-3.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5a60a1cfcfe310547a1946506dd4f1ed0a7d5bd5b02c8697d9d5dcd8d2e9245e"}, + {file = "orjson-3.9.2-cp310-none-win_amd64.whl", hash = "sha256:c290c4f81e8fd0c1683638802c11610b2f722b540f8e5e858b6914b495cf90c8"}, + {file = "orjson-3.9.2-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:02ef014f9a605e84b675060785e37ec9c0d2347a04f1307a9d6840ab8ecd6f55"}, + {file = "orjson-3.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:992af54265ada1c1579500d6594ed73fe333e726de70d64919cf37f93defdd06"}, + {file = "orjson-3.9.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a40958f7af7c6d992ee67b2da4098dca8b770fc3b4b3834d540477788bfa76d3"}, + {file = "orjson-3.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93864dec3e3dd058a2dbe488d11ac0345214a6a12697f53a63e34de7d28d4257"}, + {file = "orjson-3.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16fdf5a82df80c544c3c91516ab3882cd1ac4f1f84eefeafa642e05cef5f6699"}, + {file = "orjson-3.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:275b5a18fd9ed60b2720543d3ddac170051c43d680e47d04ff5203d2c6d8ebf1"}, + {file = "orjson-3.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b9aea6dcb99fcbc9f6d1dd84fca92322fda261da7fb014514bb4689c7c2097a8"}, + {file = "orjson-3.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7d74ae0e101d17c22ef67b741ba356ab896fc0fa64b301c2bf2bb0a4d874b190"}, + {file = "orjson-3.9.2-cp311-none-win_amd64.whl", hash = "sha256:6320b28e7bdb58c3a3a5efffe04b9edad3318d82409e84670a9b24e8035a249d"}, + {file = "orjson-3.9.2-cp37-cp37m-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:368e9cc91ecb7ac21f2aa475e1901204110cf3e714e98649c2502227d248f947"}, + {file = "orjson-3.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58e9e70f0dcd6a802c35887f306b555ff7a214840aad7de24901fc8bd9cf5dde"}, + {file = "orjson-3.9.2-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:00c983896c2e01c94c0ef72fd7373b2aa06d0c0eed0342c4884559f812a6835b"}, + {file = "orjson-3.9.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ee743e8890b16c87a2f89733f983370672272b61ee77429c0a5899b2c98c1a7"}, + {file = "orjson-3.9.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7b065942d362aad4818ff599d2f104c35a565c2cbcbab8c09ec49edba91da75"}, + {file = "orjson-3.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e46e9c5b404bb9e41d5555762fd410d5466b7eb1ec170ad1b1609cbebe71df21"}, + {file = "orjson-3.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8170157288714678ffd64f5de33039e1164a73fd8b6be40a8a273f80093f5c4f"}, + {file = "orjson-3.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e3e2f087161947dafe8319ea2cfcb9cea4bb9d2172ecc60ac3c9738f72ef2909"}, + {file = "orjson-3.9.2-cp37-none-win_amd64.whl", hash = "sha256:d7de3dbbe74109ae598692113cec327fd30c5a30ebca819b21dfa4052f7b08ef"}, + {file = "orjson-3.9.2-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:8cd4385c59bbc1433cad4a80aca65d2d9039646a9c57f8084897549b55913b17"}, + {file = "orjson-3.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a74036aab1a80c361039290cdbc51aa7adc7ea13f56e5ef94e9be536abd227bd"}, + {file = "orjson-3.9.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1aaa46d7d4ae55335f635eadc9be0bd9bcf742e6757209fc6dc697e390010adc"}, + {file = "orjson-3.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e52c67ed6bb368083aa2078ea3ccbd9721920b93d4b06c43eb4e20c4c860046"}, + {file = "orjson-3.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a6cdfcf9c7dd4026b2b01fdff56986251dc0cc1e980c690c79eec3ae07b36e7"}, + {file = "orjson-3.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1882a70bb69595b9ec5aac0040a819e94d2833fe54901e2b32f5e734bc259a8b"}, + {file = "orjson-3.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:fc05e060d452145ab3c0b5420769e7356050ea311fc03cb9d79c481982917cca"}, + {file = "orjson-3.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f8bc2c40d9bb26efefb10949d261a47ca196772c308babc538dd9f4b73e8d386"}, + {file = "orjson-3.9.2-cp38-none-win_amd64.whl", hash = "sha256:3164fc20a585ec30a9aff33ad5de3b20ce85702b2b2a456852c413e3f0d7ab09"}, + {file = "orjson-3.9.2-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:7a6ccadf788531595ed4728aa746bc271955448d2460ff0ef8e21eb3f2a281ba"}, + {file = "orjson-3.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3245d230370f571c945f69aab823c279a868dc877352817e22e551de155cb06c"}, + {file = "orjson-3.9.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:205925b179550a4ee39b8418dd4c94ad6b777d165d7d22614771c771d44f57bd"}, + {file = "orjson-3.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0325fe2d69512187761f7368c8cda1959bcb75fc56b8e7a884e9569112320e57"}, + {file = "orjson-3.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:806704cd58708acc66a064a9a58e3be25cf1c3f9f159e8757bd3f515bfabdfa1"}, + {file = "orjson-3.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03fb36f187a0c19ff38f6289418863df8b9b7880cdbe279e920bef3a09d8dab1"}, + {file = "orjson-3.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:20925d07a97c49c6305bff1635318d9fc1804aa4ccacb5fb0deb8a910e57d97a"}, + {file = "orjson-3.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:eebfed53bec5674e981ebe8ed2cf00b3f7bcda62d634733ff779c264307ea505"}, + {file = "orjson-3.9.2-cp39-none-win_amd64.whl", hash = "sha256:869b961df5fcedf6c79f4096119b35679b63272362e9b745e668f0391a892d39"}, + {file = "orjson-3.9.2.tar.gz", hash = "sha256:24257c8f641979bf25ecd3e27251b5cc194cdd3a6e96004aac8446f5e63d9664"}, ] [[package]] name = "overrides" version = "7.3.1" description = "A decorator to automatically detect mismatch when overriding a method." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -4227,7 +4085,6 @@ files = [ name = "packaging" version = "23.1" description = "Core utilities for Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4239,7 +4096,6 @@ files = [ name = "pandas" version = "2.0.3" description = "Powerful data structures for data analysis, time series, and statistics" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -4306,7 +4162,6 @@ xml = ["lxml (>=4.6.3)"] name = "pandas-stubs" version = "2.0.2.230605" description = "Type annotations for pandas" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -4322,7 +4177,6 @@ types-pytz = ">=2022.1.1" name = "parso" version = "0.8.3" description = "A Python Parser" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -4338,7 +4192,6 @@ testing = ["docopt", "pytest (<6.0.0)"] name = "pathspec" version = "0.11.1" description = "Utility library for gitignore style pattern matching of file paths." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4350,7 +4203,6 @@ files = [ name = "pdf2image" version = "1.16.3" description = "A wrapper around the pdftoppm and pdftocairo command line tools to convert PDF to a PIL Image list." -category = "main" optional = false python-versions = "*" files = [ @@ -4365,7 +4217,6 @@ pillow = "*" name = "pdfminer-six" version = "20221105" description = "PDF parser and analyzer" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -4386,7 +4237,6 @@ image = ["Pillow"] name = "pexpect" version = "4.8.0" description = "Pexpect allows easy control of interactive console applications." -category = "dev" optional = false python-versions = "*" files = [ @@ -4401,7 +4251,6 @@ ptyprocess = ">=0.5" name = "pickleshare" version = "0.7.5" description = "Tiny 'shelve'-like database with concurrency support" -category = "dev" optional = false python-versions = "*" files = [ @@ -4413,7 +4262,6 @@ files = [ name = "pillow" version = "10.0.0" description = "Python Imaging Library (Fork)" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -4435,6 +4283,7 @@ files = [ {file = "Pillow-10.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3b08d4cc24f471b2c8ca24ec060abf4bebc6b144cb89cba638c720546b1cf538"}, {file = "Pillow-10.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d737a602fbd82afd892ca746392401b634e278cb65d55c4b7a8f48e9ef8d008d"}, {file = "Pillow-10.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:3a82c40d706d9aa9734289740ce26460a11aeec2d9c79b7af87bb35f0073c12f"}, + {file = "Pillow-10.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:bc2ec7c7b5d66b8ec9ce9f720dbb5fa4bace0f545acd34870eff4a369b44bf37"}, {file = "Pillow-10.0.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:d80cf684b541685fccdd84c485b31ce73fc5c9b5d7523bf1394ce134a60c6883"}, {file = "Pillow-10.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76de421f9c326da8f43d690110f0e79fe3ad1e54be811545d7d91898b4c8493e"}, {file = "Pillow-10.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81ff539a12457809666fef6624684c008e00ff6bf455b4b89fd00a140eecd640"}, @@ -4444,6 +4293,7 @@ files = [ {file = "Pillow-10.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d50b6aec14bc737742ca96e85d6d0a5f9bfbded018264b3b70ff9d8c33485551"}, {file = "Pillow-10.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:00e65f5e822decd501e374b0650146063fbb30a7264b4d2744bdd7b913e0cab5"}, {file = "Pillow-10.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:f31f9fdbfecb042d046f9d91270a0ba28368a723302786c0009ee9b9f1f60199"}, + {file = "Pillow-10.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:1ce91b6ec08d866b14413d3f0bbdea7e24dfdc8e59f562bb77bc3fe60b6144ca"}, {file = "Pillow-10.0.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:349930d6e9c685c089284b013478d6f76e3a534e36ddfa912cde493f235372f3"}, {file = "Pillow-10.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3a684105f7c32488f7153905a4e3015a3b6c7182e106fe3c37fbb5ef3e6994c3"}, {file = "Pillow-10.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4f69b3700201b80bb82c3a97d5e9254084f6dd5fb5b16fc1a7b974260f89f43"}, @@ -4481,7 +4331,6 @@ tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "pa name = "pinecone-client" version = "2.2.2" description = "Pinecone client and SDK" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -4507,7 +4356,6 @@ grpc = ["googleapis-common-protos (>=1.53.0)", "grpc-gateway-protoc-gen-openapiv name = "pkginfo" version = "1.9.6" description = "Query metadata from sdists / bdists / installed packages." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -4520,14 +4368,13 @@ testing = ["pytest", "pytest-cov"] [[package]] name = "platformdirs" -version = "3.8.0" +version = "3.8.1" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-3.8.0-py3-none-any.whl", hash = "sha256:ca9ed98ce73076ba72e092b23d3c93ea6c4e186b3f1c3dad6edd98ff6ffcca2e"}, - {file = "platformdirs-3.8.0.tar.gz", hash = "sha256:b0cabcb11063d21a0b261d557acb0a9d2126350e63b70cdf7db6347baea456dc"}, + {file = "platformdirs-3.8.1-py3-none-any.whl", hash = "sha256:cec7b889196b9144d088e4c57d9ceef7374f6c39694ad1577a0aab50d27ea28c"}, + {file = "platformdirs-3.8.1.tar.gz", hash = "sha256:f87ca4fcff7d2b0f81c6a748a77973d7af0f4d526f98f308477c3c436c74d528"}, ] [package.extras] @@ -4538,7 +4385,6 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest- name = "pluggy" version = "1.2.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4554,7 +4400,6 @@ testing = ["pytest", "pytest-benchmark"] name = "portalocker" version = "2.7.0" description = "Wraps the portalocker recipe for easy usage" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -4574,7 +4419,6 @@ tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "p name = "postgrest" version = "0.10.6" description = "PostgREST client for Python. This library provides an ORM interface to PostgREST." -category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -4592,7 +4436,6 @@ strenum = ">=0.4.9,<0.5.0" name = "posthog" version = "3.0.1" description = "Integrate PostHog into any python application." -category = "main" optional = false python-versions = "*" files = [ @@ -4614,14 +4457,13 @@ test = ["coverage", "flake8", "freezegun (==0.3.15)", "mock (>=2.0.0)", "pylint" [[package]] name = "prometheus-client" -version = "0.17.0" +version = "0.17.1" description = "Python client for the Prometheus monitoring system." -category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "prometheus_client-0.17.0-py3-none-any.whl", hash = "sha256:a77b708cf083f4d1a3fb3ce5c95b4afa32b9c521ae363354a4a910204ea095ce"}, - {file = "prometheus_client-0.17.0.tar.gz", hash = "sha256:9c3b26f1535945e85b8934fb374678d263137b78ef85f305b1156c7c881cd11b"}, + {file = "prometheus_client-0.17.1-py3-none-any.whl", hash = "sha256:e537f37160f6807b8202a6fc4764cdd19bac5480ddd3e0d463c3002b34462101"}, + {file = "prometheus_client-0.17.1.tar.gz", hash = "sha256:21e674f39831ae3f8acde238afd9a27a37d0d2fb5a28ea094f0ce25d2cbf2091"}, ] [package.extras] @@ -4631,7 +4473,6 @@ twisted = ["twisted"] name = "prompt-toolkit" version = "3.0.39" description = "Library for building powerful interactive command lines in Python" -category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -4646,7 +4487,6 @@ wcwidth = "*" name = "proto-plus" version = "1.22.3" description = "Beautiful, Pythonic protocol buffers." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -4664,7 +4504,6 @@ testing = ["google-api-core[grpc] (>=1.31.5)"] name = "protobuf" version = "3.20.3" description = "Protocol Buffers" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4696,7 +4535,6 @@ files = [ name = "psutil" version = "5.9.5" description = "Cross-platform lib for process and system monitoring in Python." -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -4723,7 +4561,6 @@ test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] name = "psycopg" version = "3.1.9" description = "PostgreSQL database adapter for Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4747,7 +4584,6 @@ test = ["anyio (>=3.6.2)", "mypy (>=1.2)", "pproxy (>=2.7)", "pytest (>=6.2.5)", name = "psycopg-binary" version = "3.1.9" description = "PostgreSQL database adapter for Python -- C optimisation distribution" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4811,7 +4647,6 @@ files = [ name = "psycopg2-binary" version = "2.9.6" description = "psycopg2 - Python-PostgreSQL Database Adapter" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -4883,7 +4718,6 @@ files = [ name = "ptyprocess" version = "0.7.0" description = "Run a subprocess in a pseudo terminal" -category = "dev" optional = false python-versions = "*" files = [ @@ -4895,7 +4729,6 @@ files = [ name = "pulsar-client" version = "3.2.0" description = "Apache Pulsar Python client library" -category = "main" optional = false python-versions = "*" files = [ @@ -4943,7 +4776,6 @@ functions = ["apache-bookkeeper-client (>=4.16.1)", "grpcio (>=1.8.2)", "prometh name = "pure-eval" version = "0.2.2" description = "Safely evaluate AST nodes without side effects" -category = "dev" optional = false python-versions = "*" files = [ @@ -4958,7 +4790,6 @@ tests = ["pytest"] name = "pyarrow" version = "12.0.1" description = "Python library for Apache Arrow" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4996,7 +4827,6 @@ numpy = ">=1.16.6" name = "pyasn1" version = "0.5.0" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -5008,7 +4838,6 @@ files = [ name = "pyasn1-modules" version = "0.3.0" description = "A collection of ASN.1-based protocols modules" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -5023,7 +4852,6 @@ pyasn1 = ">=0.4.6,<0.6.0" name = "pycparser" version = "2.21" description = "C parser in Python" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -5035,7 +4863,6 @@ files = [ name = "pydantic" version = "1.10.11" description = "Data validation and settings management using python type hints" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -5088,7 +4915,6 @@ email = ["email-validator (>=1.0.3)"] name = "pygments" version = "2.15.1" description = "Pygments is a syntax highlighting package written in Python." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -5103,7 +4929,6 @@ plugins = ["importlib-metadata"] name = "pymongo" version = "4.4.0" description = "Python driver for MongoDB " -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -5198,7 +5023,6 @@ zstd = ["zstandard"] name = "pypandoc" version = "1.11" description = "Thin wrapper for pandoc." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -5210,7 +5034,6 @@ files = [ name = "pyparsing" version = "3.1.0" description = "pyparsing module - Classes and methods to define and execute parsing grammars" -category = "main" optional = false python-versions = ">=3.6.8" files = [ @@ -5223,14 +5046,13 @@ diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pypdf" -version = "3.12.0" +version = "3.12.1" description = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files" -category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "pypdf-3.12.0-py3-none-any.whl", hash = "sha256:826ad4681660394d7a5742fe8380168cf13058e27b826b7f5b798e994cb77b38"}, - {file = "pypdf-3.12.0.tar.gz", hash = "sha256:cebac920db0698369f49c389018858a5436862bf3c45b64b10c55c008878db95"}, + {file = "pypdf-3.12.1-py3-none-any.whl", hash = "sha256:74aa287c83e9aad2ce4a3627458dad729e39b5deae52175fe9f97bfffdde41bc"}, + {file = "pypdf-3.12.1.tar.gz", hash = "sha256:68bf9e089caaab356518410168df9ed90f0a6109e29adac168449d4054fa0094"}, ] [package.dependencies] @@ -5247,7 +5069,6 @@ image = ["Pillow"] name = "pyreadline3" version = "3.4.1" description = "A python implementation of GNU readline." -category = "main" optional = false python-versions = "*" files = [ @@ -5259,7 +5080,6 @@ files = [ name = "pysrt" version = "1.1.2" description = "SubRip (.srt) subtitle parser and writer" -category = "main" optional = false python-versions = "*" files = [ @@ -5273,7 +5093,6 @@ chardet = "*" name = "pytest" version = "7.4.0" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -5296,7 +5115,6 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-cov" version = "4.1.0" description = "Pytest plugin for measuring coverage." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -5315,7 +5133,6 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -5330,7 +5147,6 @@ six = ">=1.5" name = "python-docx" version = "0.8.11" description = "Create and update Microsoft Word .docx files." -category = "main" optional = false python-versions = "*" files = [ @@ -5344,7 +5160,6 @@ lxml = ">=2.3.2" name = "python-dotenv" version = "1.0.0" description = "Read key-value pairs from a .env file and set them as environment variables" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -5359,7 +5174,6 @@ cli = ["click (>=5.0)"] name = "python-gitlab" version = "3.15.0" description = "Interact with GitLab API" -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -5379,7 +5193,6 @@ yaml = ["PyYaml (>=5.2)"] name = "python-jose" version = "3.3.0" description = "JOSE implementation in Python" -category = "main" optional = false python-versions = "*" files = [ @@ -5401,7 +5214,6 @@ pycryptodome = ["pyasn1", "pycryptodome (>=3.3.1,<4.0.0)"] name = "python-magic" version = "0.4.27" description = "File type identification using libmagic" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -5413,7 +5225,6 @@ files = [ name = "python-multipart" version = "0.0.6" description = "A streaming multipart parser for Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -5428,7 +5239,6 @@ dev = ["atomicwrites (==1.2.1)", "attrs (==19.2.0)", "coverage (==6.5.0)", "hatc name = "python-pptx" version = "0.6.21" description = "Generate and manipulate Open XML PowerPoint (.pptx) files" -category = "main" optional = false python-versions = "*" files = [ @@ -5444,7 +5254,6 @@ XlsxWriter = ">=0.5.7" name = "python-semantic-release" version = "7.33.2" description = "Automatic Semantic Versioning for Python projects" -category = "main" optional = false python-versions = "*" files = [ @@ -5476,7 +5285,6 @@ test = ["coverage (>=5,<6)", "mock (==1.3.0)", "pytest (>=7,<8)", "pytest-mock ( name = "pytz" version = "2023.3" description = "World timezone definitions, modern and historical" -category = "main" optional = false python-versions = "*" files = [ @@ -5488,7 +5296,6 @@ files = [ name = "pywin32" version = "306" description = "Python for Window Extensions" -category = "main" optional = false python-versions = "*" files = [ @@ -5512,7 +5319,6 @@ files = [ name = "pywin32-ctypes" version = "0.2.2" description = "A (partial) reimplementation of pywin32 using ctypes/cffi" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -5524,7 +5330,6 @@ files = [ name = "pyyaml" version = "6.0" description = "YAML parser and emitter for Python" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -5574,7 +5379,6 @@ files = [ name = "pyzmq" version = "25.1.0" description = "Python bindings for 0MQ" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -5664,7 +5468,6 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""} name = "qdrant-client" version = "1.3.1" description = "Client library for the Qdrant vector search engine" -category = "main" optional = false python-versions = ">=3.7,<3.12" files = [ @@ -5686,7 +5489,6 @@ urllib3 = ">=1.26.14,<2.0.0" name = "readme-renderer" version = "40.0" description = "readme_renderer is a library for rendering \"readme\" descriptions for Warehouse" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -5706,7 +5508,6 @@ md = ["cmarkgfm (>=0.8.0)"] name = "realtime" version = "1.0.0" description = "" -category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -5723,7 +5524,6 @@ websockets = ">=10.3,<11.0" name = "regex" version = "2023.6.3" description = "Alternative regular expression module, to replace re." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -5821,7 +5621,6 @@ files = [ name = "requests" version = "2.31.0" description = "Python HTTP for Humans." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -5843,7 +5642,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "requests-toolbelt" version = "1.0.0" description = "A utility belt for advanced users of python-requests" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -5858,7 +5656,6 @@ requests = ">=2.0.1,<3.0.0" name = "rfc3986" version = "1.5.0" description = "Validating URI References per RFC 3986" -category = "main" optional = false python-versions = "*" files = [ @@ -5876,7 +5673,6 @@ idna2008 = ["idna"] name = "rich" version = "13.4.2" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -5895,7 +5691,6 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] name = "rsa" version = "4.9" description = "Pure-Python RSA implementation" -category = "main" optional = false python-versions = ">=3.6,<4" files = [ @@ -5910,7 +5705,6 @@ pyasn1 = ">=0.1.3" name = "ruff" version = "0.0.254" description = "An extremely fast Python linter, written in Rust." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -5937,7 +5731,6 @@ files = [ name = "safetensors" version = "0.3.1" description = "Fast and Safe Tensor serialization" -category = "main" optional = false python-versions = "*" files = [ @@ -5998,7 +5791,6 @@ torch = ["torch (>=1.10)"] name = "scikit-learn" version = "1.3.0" description = "A set of python modules for machine learning and data mining" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -6041,7 +5833,6 @@ tests = ["black (>=23.3.0)", "matplotlib (>=3.1.3)", "mypy (>=1.3)", "numpydoc ( name = "scipy" version = "1.11.1" description = "Fundamental algorithms for scientific computing in Python" -category = "main" optional = false python-versions = "<3.13,>=3.9" files = [ @@ -6078,7 +5869,6 @@ test = ["asv", "gmpy2", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeo name = "secretstorage" version = "3.3.3" description = "Python bindings to FreeDesktop.org Secret Service API" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -6094,7 +5884,6 @@ jeepney = ">=0.6" name = "semver" version = "2.13.0" description = "Python helper for Semantic Versioning (http://semver.org/)" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -6106,7 +5895,6 @@ files = [ name = "sentence-transformers" version = "2.2.2" description = "Multilingual text embeddings" -category = "main" optional = false python-versions = ">=3.6.0" files = [ @@ -6129,7 +5917,6 @@ transformers = ">=4.6.0,<5.0.0" name = "sentencepiece" version = "0.1.99" description = "SentencePiece python wrapper" -category = "main" optional = false python-versions = "*" files = [ @@ -6184,7 +5971,6 @@ files = [ name = "setuptools" version = "68.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -6201,7 +5987,6 @@ testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs ( name = "shapely" version = "1.8.5.post1" description = "Geometric objects, predicates, and operations" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -6258,7 +6043,6 @@ vectorized = ["numpy"] name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -6270,7 +6054,6 @@ files = [ name = "slack-bolt" version = "1.18.0" description = "The Bolt Framework for Python" -category = "main" optional = true python-versions = ">=3.6" files = [ @@ -6292,7 +6075,6 @@ testing-without-asyncio = ["Flask-Sockets (>=0.2,<1)", "Jinja2 (==3.0.3)", "Werk name = "slack-sdk" version = "3.21.3" description = "The Slack API Platform SDK for Python" -category = "main" optional = true python-versions = ">=3.6.0" files = [ @@ -6308,7 +6090,6 @@ testing = ["Flask (>=1,<2)", "Flask-Sockets (>=0.2,<1)", "Jinja2 (==3.0.3)", "We name = "smmap" version = "5.0.0" description = "A pure Python implementation of a sliding window memory map manager" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -6320,7 +6101,6 @@ files = [ name = "sniffio" version = "1.3.0" description = "Sniff out which async library your code is running under" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -6332,7 +6112,6 @@ files = [ name = "soupsieve" version = "2.4.1" description = "A modern CSS selector implementation for Beautiful Soup." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -6344,7 +6123,6 @@ files = [ name = "sqlalchemy" version = "1.4.41" description = "Database Abstraction Library" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -6392,7 +6170,7 @@ files = [ ] [package.dependencies] -greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and platform_machine == \"aarch64\" or python_version >= \"3\" and platform_machine == \"ppc64le\" or python_version >= \"3\" and platform_machine == \"x86_64\" or python_version >= \"3\" and platform_machine == \"amd64\" or python_version >= \"3\" and platform_machine == \"AMD64\" or python_version >= \"3\" and platform_machine == \"win32\" or python_version >= \"3\" and platform_machine == \"WIN32\""} +greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\")"} [package.extras] aiomysql = ["aiomysql", "greenlet (!=0.4.17)"] @@ -6417,14 +6195,13 @@ sqlcipher = ["sqlcipher3-binary"] [[package]] name = "sqlalchemy2-stubs" -version = "0.0.2a34" +version = "0.0.2a35" description = "Typing Stubs for SQLAlchemy 1.4" -category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "sqlalchemy2-stubs-0.0.2a34.tar.gz", hash = "sha256:2432137ab2fde1a608df4544f6712427b0b7ff25990cfbbc5a9d1db6c8c6f489"}, - {file = "sqlalchemy2_stubs-0.0.2a34-py3-none-any.whl", hash = "sha256:a313220ac793404349899faf1272e821a62dbe1d3a029bd444faa8d3e966cd07"}, + {file = "sqlalchemy2-stubs-0.0.2a35.tar.gz", hash = "sha256:bd5d530697d7e8c8504c7fe792ef334538392a5fb7aa7e4f670bfacdd668a19d"}, + {file = "sqlalchemy2_stubs-0.0.2a35-py3-none-any.whl", hash = "sha256:593784ff9fc0dc2ded1895e3322591689db3be06f3ca006e3ef47640baf2d38a"}, ] [package.dependencies] @@ -6434,7 +6211,6 @@ typing-extensions = ">=3.7.4" name = "sqlmodel" version = "0.0.8" description = "SQLModel, SQL databases in Python, designed for simplicity, compatibility, and robustness." -category = "main" optional = false python-versions = ">=3.6.1,<4.0.0" files = [ @@ -6451,7 +6227,6 @@ sqlalchemy2-stubs = "*" name = "stack-data" version = "0.6.2" description = "Extract data from python stack frames and tracebacks for informative displays" -category = "dev" optional = false python-versions = "*" files = [ @@ -6471,7 +6246,6 @@ tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] name = "starlette" version = "0.27.0" description = "The little ASGI library that shines." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -6490,7 +6264,6 @@ full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart", "pyyam name = "storage3" version = "0.5.2" description = "Supabase Storage client for Python." -category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -6507,7 +6280,6 @@ typing-extensions = ">=4.2.0,<5.0.0" name = "strenum" version = "0.4.15" description = "An Enum that inherits from str." -category = "main" optional = false python-versions = "*" files = [ @@ -6524,7 +6296,6 @@ test = ["pylint", "pytest", "pytest-black", "pytest-cov", "pytest-pylint"] name = "supabase" version = "1.0.3" description = "Supabase client for Python." -category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -6545,7 +6316,6 @@ supafunc = ">=0.2.2,<0.3.0" name = "supafunc" version = "0.2.2" description = "Library for Supabase Functions" -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -6560,7 +6330,6 @@ httpx = ">=0.23.0,<0.24.0" name = "sympy" version = "1.12" description = "Computer algebra system (CAS) in Python" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -6575,7 +6344,6 @@ mpmath = ">=0.19" name = "tabulate" version = "0.9.0" description = "Pretty-print tabular data" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -6590,7 +6358,6 @@ widechars = ["wcwidth"] name = "tenacity" version = "8.2.2" description = "Retry code until it succeeds" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -6605,7 +6372,6 @@ doc = ["reno", "sphinx", "tornado (>=4.5)"] name = "textual" version = "0.29.0" description = "Modern Text User Interface framework" -category = "main" optional = true python-versions = ">=3.7,<4.0" files = [ @@ -6623,7 +6389,6 @@ typing-extensions = ">=4.4.0,<5.0.0" name = "threadpoolctl" version = "3.1.0" description = "threadpoolctl" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -6635,7 +6400,6 @@ files = [ name = "tiktoken" version = "0.4.0" description = "tiktoken is a fast BPE tokeniser for use with OpenAI's models" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -6681,7 +6445,6 @@ blobfile = ["blobfile (>=2)"] name = "tokenizers" version = "0.13.3" description = "Fast and Customizable Tokenizers" -category = "main" optional = false python-versions = "*" files = [ @@ -6736,7 +6499,6 @@ testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"] name = "toml" version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" -category = "main" optional = true python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -6748,7 +6510,6 @@ files = [ name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -6760,7 +6521,6 @@ files = [ name = "tomlkit" version = "0.11.8" description = "Style preserving TOML library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -6772,7 +6532,6 @@ files = [ name = "torch" version = "2.0.1" description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration" -category = "main" optional = false python-versions = ">=3.8.0" files = [ @@ -6812,7 +6571,6 @@ opt-einsum = ["opt-einsum (>=3.3)"] name = "torchvision" version = "0.15.2" description = "image and video datasets and models for torch deep learning" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -6840,7 +6598,7 @@ files = [ [package.dependencies] numpy = "*" -pillow = ">=5.3.0,<8.3.0 || >=8.4.0" +pillow = ">=5.3.0,<8.3.dev0 || >=8.4.dev0" requests = "*" torch = "2.0.1" @@ -6851,7 +6609,6 @@ scipy = ["scipy"] name = "tornado" version = "6.3.2" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." -category = "dev" optional = false python-versions = ">= 3.8" files = [ @@ -6872,7 +6629,6 @@ files = [ name = "tqdm" version = "4.65.0" description = "Fast, Extensible Progress Meter" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -6893,7 +6649,6 @@ telegram = ["requests"] name = "traitlets" version = "5.9.0" description = "Traitlets Python configuration system" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -6909,7 +6664,6 @@ test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] name = "transformers" version = "4.30.2" description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow" -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -6979,7 +6733,6 @@ vision = ["Pillow"] name = "twine" version = "3.8.0" description = "Collection of utilities for publishing packages on PyPI" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -7003,7 +6756,6 @@ urllib3 = ">=1.26.0" name = "typer" version = "0.9.0" description = "Typer, build great CLIs. Easy to code. Based on Python type hints." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -7025,7 +6777,6 @@ test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6. name = "types-appdirs" version = "1.4.3.5" description = "Typing stubs for appdirs" -category = "dev" optional = false python-versions = "*" files = [ @@ -7037,7 +6788,6 @@ files = [ name = "types-cachetools" version = "5.3.0.5" description = "Typing stubs for cachetools" -category = "main" optional = false python-versions = "*" files = [ @@ -7049,7 +6799,6 @@ files = [ name = "types-pillow" version = "9.5.0.6" description = "Typing stubs for Pillow" -category = "dev" optional = false python-versions = "*" files = [ @@ -7061,7 +6810,6 @@ files = [ name = "types-pytz" version = "2023.3.0.0" description = "Typing stubs for pytz" -category = "dev" optional = false python-versions = "*" files = [ @@ -7073,7 +6821,6 @@ files = [ name = "types-pyyaml" version = "6.0.12.10" description = "Typing stubs for PyYAML" -category = "dev" optional = false python-versions = "*" files = [ @@ -7085,7 +6832,6 @@ files = [ name = "types-requests" version = "2.31.0.1" description = "Typing stubs for requests" -category = "dev" optional = false python-versions = "*" files = [ @@ -7100,7 +6846,6 @@ types-urllib3 = "*" name = "types-urllib3" version = "1.26.25.13" description = "Typing stubs for urllib3" -category = "dev" optional = false python-versions = "*" files = [ @@ -7112,7 +6857,6 @@ files = [ name = "typing-extensions" version = "4.5.0" description = "Backported and Experimental Type Hints for Python 3.7+" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -7124,7 +6868,6 @@ files = [ name = "typing-inspect" version = "0.9.0" description = "Runtime inspection utilities for typing module." -category = "main" optional = false python-versions = "*" files = [ @@ -7140,7 +6883,6 @@ typing-extensions = ">=3.7.4" name = "tzdata" version = "2023.3" description = "Provider of IANA time zone data" -category = "main" optional = false python-versions = ">=2" files = [ @@ -7152,7 +6894,6 @@ files = [ name = "uc-micro-py" version = "1.0.2" description = "Micro subset of unicode data files for linkify-it-py projects." -category = "main" optional = true python-versions = ">=3.7" files = [ @@ -7167,7 +6908,6 @@ test = ["coverage", "pytest", "pytest-cov"] name = "unstructured" version = "0.7.12" description = "A library that prepares raw documents for downstream ML tasks." -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -7215,7 +6955,6 @@ wikipedia = ["wikipedia"] name = "uritemplate" version = "4.1.1" description = "Implementation of RFC 6570 URI Templates" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -7227,7 +6966,6 @@ files = [ name = "urllib3" version = "1.26.16" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -7244,7 +6982,6 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] name = "uvicorn" version = "0.22.0" description = "The lightning-fast ASGI server." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -7259,7 +6996,7 @@ h11 = ">=0.8" httptools = {version = ">=0.5.0", optional = true, markers = "extra == \"standard\""} python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""} -uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\" and extra == \"standard\""} +uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""} watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""} @@ -7270,7 +7007,6 @@ standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", name = "uvloop" version = "0.17.0" description = "Fast implementation of asyncio event loop on top of libuv" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -7315,7 +7051,6 @@ test = ["Cython (>=0.29.32,<0.30.0)", "aiohttp", "flake8 (>=3.9.2,<3.10.0)", "my name = "validators" version = "0.20.0" description = "Python Data Validation for Humans™." -category = "main" optional = false python-versions = ">=3.4" files = [ @@ -7332,7 +7067,6 @@ test = ["flake8 (>=2.4.0)", "isort (>=4.2.2)", "pytest (>=2.2.3)"] name = "watchfiles" version = "0.19.0" description = "Simple, modern and high performance file watching and code reload in python." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -7367,7 +7101,6 @@ anyio = ">=3.0.0" name = "wcwidth" version = "0.2.6" description = "Measures the displayed width of unicode strings in a terminal" -category = "dev" optional = false python-versions = "*" files = [ @@ -7377,14 +7110,13 @@ files = [ [[package]] name = "weaviate-client" -version = "3.21.0" +version = "3.22.1" description = "A python native Weaviate client" -category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "weaviate-client-3.21.0.tar.gz", hash = "sha256:ec94ac554883c765e94da8b2947c4f0fa4a0378ed3bbe9f3653df3a5b1745a6d"}, - {file = "weaviate_client-3.21.0-py3-none-any.whl", hash = "sha256:420444ded7106fb000f4f8b2321b5f5fa2387825aa7a303d702accf61026f9d2"}, + {file = "weaviate-client-3.22.1.tar.gz", hash = "sha256:aff61bd3f5d74df20a62328443e3aa9c860d5330fdfb19c4d8ddc44cb604032f"}, + {file = "weaviate_client-3.22.1-py3-none-any.whl", hash = "sha256:01843a4899a227300e570409e77628e9d1b28476313f94943c37aee3f75112e1"}, ] [package.dependencies] @@ -7400,7 +7132,6 @@ grpc = ["grpcio", "grpcio-tools"] name = "webencodings" version = "0.5.1" description = "Character encoding aliases for legacy web content" -category = "main" optional = false python-versions = "*" files = [ @@ -7412,7 +7143,6 @@ files = [ name = "websocket-client" version = "1.6.1" description = "WebSocket client for Python with low level API options" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -7429,7 +7159,6 @@ test = ["websockets"] name = "websockets" version = "10.4" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -7508,7 +7237,6 @@ files = [ name = "wheel" version = "0.40.0" description = "A built-package format for Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -7523,7 +7251,6 @@ test = ["pytest (>=6.0.0)"] name = "wikipedia" version = "1.4.0" description = "Wikipedia API for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -7538,7 +7265,6 @@ requests = ">=2.0.0,<3.0.0" name = "win32-setctime" version = "1.1.0" description = "A small Python utility to set file creation time on Windows" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -7553,7 +7279,6 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] name = "wrapt" version = "1.15.0" description = "Module for decorators, wrappers and monkey patching." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ @@ -7638,7 +7363,6 @@ files = [ name = "xlrd" version = "2.0.1" description = "Library for developers to extract data from Microsoft Excel (tm) .xls spreadsheet files" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -7655,7 +7379,6 @@ test = ["pytest", "pytest-cov"] name = "xlsxwriter" version = "3.1.2" description = "A Python module for creating Excel XLSX files." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -7667,7 +7390,6 @@ files = [ name = "yarl" version = "1.9.2" description = "Yet another URL library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -7753,25 +7475,23 @@ multidict = ">=4.0" [[package]] name = "zipp" -version = "3.15.0" +version = "3.16.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, - {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, + {file = "zipp-3.16.0-py3-none-any.whl", hash = "sha256:5dadc3ad0a1f825fe42ce1bce0f2fc5a13af2e6b2d386af5b0ff295bc0a287d3"}, + {file = "zipp-3.16.0.tar.gz", hash = "sha256:1876cb065531855bbe83b6c489dcf69ecc28f1068d8e95959fe8bbc77774c941"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"] [[package]] name = "zstandard" version = "0.21.0" description = "Zstandard bindings for Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -7832,4 +7552,4 @@ deploy = ["langchain-serve"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.11" -content-hash = "c0143f38de64a4ce537edc800380802537b6591202182e3bf9dc647ba666e8b4" +content-hash = "e25e43fde8f96f57beab702ac4c51cb3e569b81f85c540a7b4b5fb7b6388d04e" diff --git a/src/backend/langflow/api/router.py b/src/backend/langflow/api/router.py index f090abe74..b9c51c11e 100644 --- a/src/backend/langflow/api/router.py +++ b/src/backend/langflow/api/router.py @@ -6,6 +6,7 @@ from langflow.api.v1 import ( validate_router, flows_router, flow_styles_router, + component_router, ) router = APIRouter( @@ -14,5 +15,6 @@ router = APIRouter( router.include_router(chat_router) router.include_router(endpoints_router) router.include_router(validate_router) +router.include_router(component_router) router.include_router(flows_router) router.include_router(flow_styles_router) diff --git a/src/backend/langflow/api/v1/__init__.py b/src/backend/langflow/api/v1/__init__.py index f18f90e42..f001152a9 100644 --- a/src/backend/langflow/api/v1/__init__.py +++ b/src/backend/langflow/api/v1/__init__.py @@ -3,10 +3,12 @@ from langflow.api.v1.validate import router as validate_router from langflow.api.v1.chat import router as chat_router from langflow.api.v1.flows import router as flows_router from langflow.api.v1.flow_styles import router as flow_styles_router +from langflow.api.v1.components import router as component_router __all__ = [ "chat_router", "endpoints_router", + "component_router", "validate_router", "flows_router", "flow_styles_router", diff --git a/src/backend/langflow/api/v1/components.py b/src/backend/langflow/api/v1/components.py new file mode 100644 index 000000000..8ca808eef --- /dev/null +++ b/src/backend/langflow/api/v1/components.py @@ -0,0 +1,83 @@ +from uuid import UUID +from langflow.settings import settings +from langflow.api.utils import remove_api_keys +from langflow.database.models.component import ( + Component, + ComponentCreate, + ComponentRead, + ComponentUpdate, +) +from langflow.database.base import get_session +from sqlmodel import Session, select +from fastapi import APIRouter, Depends, HTTPException +from fastapi.encoders import jsonable_encoder + + +COMPONENT_NOT_FOUND = "Component not found" + +router = APIRouter(prefix="/components", tags=["Components"]) + + +@router.post("/", response_model=ComponentRead, status_code=201) +def create(*, session: Session = Depends(get_session), component: ComponentCreate): + db = Component.from_orm(component) + session.add(db) + session.commit() + session.refresh(db) + + return db + + +@router.get("/", response_model=list[ComponentRead], status_code=200) +def read_all(*, session: Session = Depends(get_session)): + try: + sql = select(Component) + components = session.exec(sql).all() + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) from e + + return [jsonable_encoder(component) for component in components] + + +@router.get("/{id}", response_model=ComponentRead, status_code=200) +def read(*, session: Session = Depends(get_session), id: UUID): + if component := session.get(Component, id): + return component + else: + raise HTTPException(status_code=404, detail=COMPONENT_NOT_FOUND) + + +@router.patch("/{id}", response_model=ComponentRead, status_code=200) +def update( + *, session: Session = Depends(get_session), id: UUID, component: ComponentUpdate +): + db = session.get(Component, id) + if not db: + raise HTTPException(status_code=404, detail=COMPONENT_NOT_FOUND) + + data = component.dict(exclude_unset=True) + + if settings.remove_api_keys: + data = remove_api_keys(data) + + for key, value in data.items(): + setattr(db, key, value) + + session.add(db) + session.commit() + session.refresh(db) + + return db + + +@router.delete("/{id}", status_code=200) +def delete(*, session: Session = Depends(get_session), id: UUID): + component = session.get(Component, id) + + if not component: + raise HTTPException(status_code=404, detail=COMPONENT_NOT_FOUND) + + session.delete(component) + session.commit() + + return {"message": "Component deleted successfully"} diff --git a/src/backend/langflow/api/v1/schemas.py b/src/backend/langflow/api/v1/schemas.py index 7b1ba4774..0148dac6d 100644 --- a/src/backend/langflow/api/v1/schemas.py +++ b/src/backend/langflow/api/v1/schemas.py @@ -125,3 +125,11 @@ class CustomComponentCode(BaseModel): class CustomComponentResponseError(BaseModel): detail: str traceback: str + + +class ComponentListCreate(BaseModel): + flows: List[FlowCreate] + + +class ComponentListRead(BaseModel): + flows: List[FlowRead] diff --git a/src/backend/langflow/database/models/component.py b/src/backend/langflow/database/models/component.py new file mode 100644 index 000000000..ddac8309b --- /dev/null +++ b/src/backend/langflow/database/models/component.py @@ -0,0 +1,52 @@ +from uuid import UUID, uuid4 +from pydantic import validator +from typing import Dict, Optional +from sqlmodel import Field, JSON, Column + +from langflow.database.models.base import SQLModelSerializable + + +class ComponentBase(SQLModelSerializable): + name: str = Field(index=True) + description: Optional[str] = Field(index=True) + data: Optional[Dict] = Field(default=None) + + @validator("data") + def validate_json(v): + # dict_keys(['description', 'name', 'id', 'data']) + if not v: + return v + if not isinstance(v, dict): + raise ValueError("Flow must be a valid JSON") + + # data must contain nodes and edges + if "nodes" not in v.keys(): + raise ValueError("Flow must have nodes") + if "edges" not in v.keys(): + raise ValueError("Flow must have edges") + + return v + + +class Component(ComponentBase, table=True): + id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True) + data: Optional[Dict] = Field(default=None, sa_column=Column(JSON)) + # style: Optional["FlowStyle"] = Relationship( + # back_populates="flow", + # # use "uselist=False" to make it a one-to-one relationship + # sa_relationship_kwargs={"uselist": False}, + # ) + + +class ComponentCreate(ComponentBase): + pass + + +class ComponentRead(ComponentBase): + id: UUID + + +class ComponentUpdate(SQLModelSerializable): + name: Optional[str] = None + description: Optional[str] = None + data: Optional[Dict] = None From 6c1a6f3bd928d26aa3f58d182d940a2ebafad378 Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Wed, 12 Jul 2023 13:02:38 +0100 Subject: [PATCH 077/221] =?UTF-8?q?=F0=9F=94=A7=20fix(components.py):=20re?= =?UTF-8?q?move=20unused=20imports=20and=20commented=20out=20code=20?= =?UTF-8?q?=E2=9C=A8=20feat(components.py):=20refactor=20create=5Fcomponen?= =?UTF-8?q?t=20endpoint=20to=20use=20the=20Component=20model=20directly=20?= =?UTF-8?q?instead=20of=20ComponentCreate=20model=20=E2=9C=A8=20feat(compo?= =?UTF-8?q?nents.py):=20refactor=20read=5Fcomponent=20endpoint=20to=20retu?= =?UTF-8?q?rn=20a=20Component=20model=20instead=20of=20ComponentRead=20mod?= =?UTF-8?q?el=20=E2=9C=A8=20feat(components.py):=20refactor=20read=5Fcompo?= =?UTF-8?q?nents=20endpoint=20to=20return=20a=20list=20of=20Component=20mo?= =?UTF-8?q?dels=20instead=20of=20a=20list=20of=20ComponentRead=20models=20?= =?UTF-8?q?=E2=9C=A8=20feat(components.py):=20refactor=20update=5Fcomponen?= =?UTF-8?q?t=20endpoint=20to=20use=20the=20Component=20model=20directly=20?= =?UTF-8?q?instead=20of=20ComponentUpdate=20model=20=E2=9C=A8=20feat(compo?= =?UTF-8?q?nents.py):=20refactor=20delete=5Fcomponent=20endpoint=20to=20us?= =?UTF-8?q?e=20the=20Component=20model=20directly=20instead=20of=20Compone?= =?UTF-8?q?ntUpdate=20model=20=F0=9F=94=A7=20fix(component.py):=20remove?= =?UTF-8?q?=20unused=20imports=20and=20commented=20out=20code=20=E2=9C=A8?= =?UTF-8?q?=20feat(component.py):=20add=20missing=20import=20for=20orjson?= =?UTF-8?q?=20=E2=9C=A8=20feat(component.py):=20add=20missing=20import=20f?= =?UTF-8?q?or=20FastAPI,=20HTTPException,=20and=20Depends=20=E2=9C=A8=20fe?= =?UTF-8?q?at(component.py):=20add=20missing=20import=20for=20List=20and?= =?UTF-8?q?=20Optional=20=E2=9C=A8=20feat(component.py):=20add=20missing?= =?UTF-8?q?=20import=20for=20datetime=20=E2=9C=A8=20feat(component.py):=20?= =?UTF-8?q?add=20missing=20import=20for=20uuid=20=E2=9C=A8=20feat(componen?= =?UTF-8?q?t.py):=20add=20missing=20import=20for=20StaticPool=20=E2=9C=A8?= =?UTF-8?q?=20feat(component.py):=20add=20missing=20import=20for=20create?= =?UTF-8?q?=5Fengine=20=E2=9C=A8=20feat(component.py):=20add=20missing=20i?= =?UTF-8?q?mport=20for=20select=20=E2=9C=A8=20feat(component.py):=20add=20?= =?UTF-8?q?missing=20import=20for=20orjson=20=E2=9C=A8=20feat(component.py?= =?UTF-8?q?):=20add=20missing=20import=20for=20SQLModel=20=E2=9C=A8=20feat?= =?UTF-8?q?(component.py):=20add=20missing=20import=20for=20Session=20?= =?UTF-8?q?=E2=9C=A8=20feat(component.py):=20add=20missing=20import=20for?= =?UTF-8?q?=20orjson=20=E2=9C=A8=20feat(component.py):=20add=20missing=20i?= =?UTF-8?q?mport=20for=20orjson=5Fdumps=20function=20=E2=9C=A8=20feat(comp?= =?UTF-8?q?onent.py):=20add=20missing=20import=20for=20Component=20model?= =?UTF-8?q?=20=E2=9C=A8=20feat(component.py):=20add=20missing=20import=20f?= =?UTF-8?q?or=20Field=20=E2=9C=A8=20feat(component.py):=20add=20missing=20?= =?UTF-8?q?import=20for=20SQLModel=20=E2=9C=A8=20feat(component.py):=20add?= =?UTF-8?q?=20missing=20import=20for=20Session=20=E2=9C=A8=20feat(componen?= =?UTF-8?q?t.py):=20add=20missing=20import=20for=20create=5Fengine=20?= =?UTF-8?q?=E2=9C=A8=20feat(component.py):=20add=20missing=20import=20for?= =?UTF-8?q?=20select=20=E2=9C=A8=20feat(component.py):=20add=20missing=20i?= =?UTF-8?q?mport=20for=20Optional=20=E2=9C=A8=20feat(component.py):=20add?= =?UTF-8?q?=20missing=20import=20for=20List=20=E2=9C=A8=20feat(component.p?= =?UTF-8?q?y):=20add=20missing=20import=20for=20datetime=20=E2=9C=A8=20fea?= =?UTF-8?q?t(component.py):=20add=20missing=20import=20for=20uuid=20?= =?UTF-8?q?=E2=9C=A8=20feat(component.py):=20add=20missing=20import=20for?= =?UTF-8?q?=20StaticPool=20=E2=9C=A8=20feat(component.py):=20add=20missing?= =?UTF-8?q?=20import=20for=20orjson=20=E2=9C=A8=20feat(component.py):=20ad?= =?UTF-8?q?d=20missing=20import=20for=20orjson=5Fdumps=20function=20?= =?UTF-8?q?=E2=9C=A8=20feat(component.py):=20add=20missing=20import=20for?= =?UTF-8?q?=20Component=20model=20=E2=9C=A8=20feat(component?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/api/v1/components.py | 153 +++++++++++------- .../langflow/database/models/component.py | 109 ++++++++----- 2 files changed, 166 insertions(+), 96 deletions(-) diff --git a/src/backend/langflow/api/v1/components.py b/src/backend/langflow/api/v1/components.py index 8ca808eef..299f78371 100644 --- a/src/backend/langflow/api/v1/components.py +++ b/src/backend/langflow/api/v1/components.py @@ -1,16 +1,9 @@ +from typing import List from uuid import UUID -from langflow.settings import settings -from langflow.api.utils import remove_api_keys -from langflow.database.models.component import ( - Component, - ComponentCreate, - ComponentRead, - ComponentUpdate, -) +from langflow.database.models.component import Component from langflow.database.base import get_session from sqlmodel import Session, select from fastapi import APIRouter, Depends, HTTPException -from fastapi.encoders import jsonable_encoder COMPONENT_NOT_FOUND = "Component not found" @@ -18,66 +11,112 @@ COMPONENT_NOT_FOUND = "Component not found" router = APIRouter(prefix="/components", tags=["Components"]) -@router.post("/", response_model=ComponentRead, status_code=201) -def create(*, session: Session = Depends(get_session), component: ComponentCreate): - db = Component.from_orm(component) - session.add(db) - session.commit() - session.refresh(db) - - return db +@router.post("/", response_model=Component) +def create_component(component: Component, db: Session = Depends(get_session)): + db.add(component) + db.commit() + db.refresh(component) + return component -@router.get("/", response_model=list[ComponentRead], status_code=200) -def read_all(*, session: Session = Depends(get_session)): - try: - sql = select(Component) - components = session.exec(sql).all() - except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) from e - - return [jsonable_encoder(component) for component in components] - - -@router.get("/{id}", response_model=ComponentRead, status_code=200) -def read(*, session: Session = Depends(get_session), id: UUID): - if component := session.get(Component, id): +@router.get("/{component_id}", response_model=Component) +def read_component(component_id: UUID, db: Session = Depends(get_session)): + if component := db.get(Component, component_id): return component else: raise HTTPException(status_code=404, detail=COMPONENT_NOT_FOUND) -@router.patch("/{id}", response_model=ComponentRead, status_code=200) -def update( - *, session: Session = Depends(get_session), id: UUID, component: ComponentUpdate +@router.get("/", response_model=List[Component]) +def read_components(skip: int = 0, limit: int = 50, db: Session = Depends(get_session)): + return db.execute(select(Component).offset(skip).limit(limit)).fetchall() + + +@router.patch("/{component_id}", response_model=Component) +def update_component( + component_id: UUID, component: Component, db: Session = Depends(get_session) ): - db = session.get(Component, id) - if not db: + db_component = db.get(Component, component_id) + if not db_component: raise HTTPException(status_code=404, detail=COMPONENT_NOT_FOUND) - - data = component.dict(exclude_unset=True) - - if settings.remove_api_keys: - data = remove_api_keys(data) - - for key, value in data.items(): - setattr(db, key, value) - - session.add(db) - session.commit() - session.refresh(db) - - return db + component_data = component.dict(exclude_unset=True) + for key, value in component_data.items(): + setattr(db_component, key, value) + db.commit() + db.refresh(db_component) + return db_component -@router.delete("/{id}", status_code=200) -def delete(*, session: Session = Depends(get_session), id: UUID): - component = session.get(Component, id) - +@router.delete("/{component_id}") +def delete_component(component_id: UUID, db: Session = Depends(get_session)): + component = db.get(Component, component_id) if not component: raise HTTPException(status_code=404, detail=COMPONENT_NOT_FOUND) + db.delete(component) + db.commit() + return {"detail": "Component deleted"} - session.delete(component) - session.commit() - return {"message": "Component deleted successfully"} +# @router.post("/", response_model=ComponentRead, status_code=201) +# def create(*, session: Session = Depends(get_session), component: ComponentCreate): +# db = Component.from_orm(component) +# session.add(db) +# session.commit() +# session.refresh(db) + +# return db + + +# @router.get("/", response_model=list[ComponentRead], status_code=200) +# def read_all(*, session: Session = Depends(get_session)): +# try: +# sql = select(Component) +# components = session.exec(sql).all() +# except Exception as e: +# raise HTTPException(status_code=500, detail=str(e)) from e + +# return [jsonable_encoder(component) for component in components] + + +# @router.get("/{id}", response_model=ComponentRead, status_code=200) +# def read(*, session: Session = Depends(get_session), id: UUID): +# if component := session.get(Component, id): +# return component +# else: +# raise HTTPException(status_code=404, detail=COMPONENT_NOT_FOUND) + + +# @router.patch("/{id}", response_model=ComponentRead, status_code=200) +# def update( +# *, session: Session = Depends(get_session), id: UUID, component: ComponentUpdate +# ): +# db = session.get(Component, id) +# if not db: +# raise HTTPException(status_code=404, detail=COMPONENT_NOT_FOUND) + +# data = component.dict(exclude_unset=True) + +# if settings.remove_api_keys: +# data = remove_api_keys(data) + +# for key, value in data.items(): +# setattr(db, key, value) + +# session.add(db) +# session.commit() +# session.refresh(db) + +# return db + + +# @router.delete("/{id}", status_code=200) +# def delete(*, session: Session = Depends(get_session), id: UUID): +# component = session.get(Component, id) + +# if not component: +# raise HTTPException(status_code=404, detail=COMPONENT_NOT_FOUND) + +# session.delete(component) +# session.commit() + +# return {"message": "Component deleted successfully"} diff --git a/src/backend/langflow/database/models/component.py b/src/backend/langflow/database/models/component.py index ddac8309b..bad2d7a54 100644 --- a/src/backend/langflow/database/models/component.py +++ b/src/backend/langflow/database/models/component.py @@ -1,52 +1,83 @@ -from uuid import UUID, uuid4 -from pydantic import validator -from typing import Dict, Optional -from sqlmodel import Field, JSON, Column - from langflow.database.models.base import SQLModelSerializable +from sqlmodel import Field +from typing import Optional +from datetime import datetime +import uuid + +# def orjson_dumps(v, *, default): +# # orjson.dumps returns bytes, to match standard json.dumps we need to decode +# return orjson.dumps(v, default=default).decode() + +# class SQLModelSerializable(SQLModel): +# class Config: +# orm_mode = True +# json_loads = orjson.loads +# json_dumps = orjson_dumps + +# DATABASE_URL = "sqlite+pysqlite:///./database.db" + +# engine = create_engine(DATABASE_URL, connect_args={"check_same_thread": False}, poolclass=StaticPool) -class ComponentBase(SQLModelSerializable): +class Component(SQLModelSerializable, table=True): + id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) + id_frontend_node: uuid.UUID = Field(index=True) name: str = Field(index=True) description: Optional[str] = Field(index=True) - data: Optional[Dict] = Field(default=None) - - @validator("data") - def validate_json(v): - # dict_keys(['description', 'name', 'id', 'data']) - if not v: - return v - if not isinstance(v, dict): - raise ValueError("Flow must be a valid JSON") - - # data must contain nodes and edges - if "nodes" not in v.keys(): - raise ValueError("Flow must have nodes") - if "edges" not in v.keys(): - raise ValueError("Flow must have edges") - - return v + code_python: Optional[str] = Field(default=None) + return_type: Optional[str] = Field(index=True) + create_at: datetime = Field(default_factory=datetime.utcnow) + update_at: datetime = Field(default_factory=datetime.utcnow) + is_disabled: bool = Field(default=False) + is_read_only: bool = Field(default=False) -class Component(ComponentBase, table=True): - id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True) - data: Optional[Dict] = Field(default=None, sa_column=Column(JSON)) - # style: Optional["FlowStyle"] = Relationship( - # back_populates="flow", - # # use "uselist=False" to make it a one-to-one relationship - # sa_relationship_kwargs={"uselist": False}, - # ) +# app = FastAPI() +# def get_db(): +# with Session(engine) as session: +# yield session -class ComponentCreate(ComponentBase): - pass +# @app.on_event("startup") +# def on_startup(): +# SQLModel.metadata.create_all(engine) +# @app.post("/components/", response_model=Component) +# def create_component(component: Component, db: Session = Depends(get_db)): +# db.add(component) +# db.commit() +# db.refresh(component) +# return component -class ComponentRead(ComponentBase): - id: UUID +# @app.get("/components/{component_id}", response_model=Component) +# def read_component(component_id: uuid.UUID, db: Session = Depends(get_db)): +# component = db.get(Component, component_id) +# if not component: +# raise HTTPException(status_code=404, detail="Component not found") +# return component +# @app.get("/components/", response_model=List[Component]) +# def read_components(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)): +# components = db.execute(select(Component).offset(skip).limit(limit)).fetchall() +# return components -class ComponentUpdate(SQLModelSerializable): - name: Optional[str] = None - description: Optional[str] = None - data: Optional[Dict] = None +# @app.put("/components/{component_id}", response_model=Component) +# def update_component(component_id: uuid.UUID, component: Component, db: Session = Depends(get_db)): +# db_component = db.get(Component, component_id) +# if not db_component: +# raise HTTPException(status_code=404, detail="Component not found") +# component_data = component.dict(exclude_unset=True) +# for key, value in component_data.items(): +# setattr(db_component, key, value) +# db.commit() +# db.refresh(db_component) +# return db_component + +# @app.delete("/components/{component_id}") +# def delete_component(component_id: uuid.UUID, db: Session = Depends(get_db)): +# component = db.get(Component, component_id) +# if not component: +# raise HTTPException(status_code=404, detail="Component not found") +# db.delete(component) +# db.commit() +# return {"detail": "Component deleted"} From 587b203b669494744a3b13779d95ed6e803feaa7 Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Wed, 12 Jul 2023 13:12:52 +0100 Subject: [PATCH 078/221] =?UTF-8?q?=F0=9F=90=9B=20fix(components.py):=20ha?= =?UTF-8?q?ndle=20IntegrityError=20when=20creating=20a=20component=20to=20?= =?UTF-8?q?avoid=20duplicate=20entries=20=E2=9C=A8=20feat(components.py):?= =?UTF-8?q?=20add=20error=20handling=20for=20creating=20a=20component=20wi?= =?UTF-8?q?th=20the=20same=20id=20to=20return=20a=20400=20status=20code=20?= =?UTF-8?q?and=20a=20detailed=20error=20message?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/api/v1/components.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/src/backend/langflow/api/v1/components.py b/src/backend/langflow/api/v1/components.py index 299f78371..f9cd8bce8 100644 --- a/src/backend/langflow/api/v1/components.py +++ b/src/backend/langflow/api/v1/components.py @@ -4,6 +4,7 @@ from langflow.database.models.component import Component from langflow.database.base import get_session from sqlmodel import Session, select from fastapi import APIRouter, Depends, HTTPException +from sqlalchemy.exc import IntegrityError COMPONENT_NOT_FOUND = "Component not found" @@ -13,9 +14,16 @@ router = APIRouter(prefix="/components", tags=["Components"]) @router.post("/", response_model=Component) def create_component(component: Component, db: Session = Depends(get_session)): - db.add(component) - db.commit() - db.refresh(component) + try: + db.add(component) + db.commit() + db.refresh(component) + except IntegrityError as e: + db.rollback() + raise HTTPException( + status_code=400, + detail="A component with the same id already exists.", + ) from e return component From 2f4e98477c3be6dd5dd9f045c09729e8cfacef86 Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Wed, 12 Jul 2023 15:10:37 +0100 Subject: [PATCH 079/221] =?UTF-8?q?=F0=9F=90=9B=20fix(components.py):=20ch?= =?UTF-8?q?ange=20variable=20name=20from=20`component`=20to=20`component?= =?UTF-8?q?=5Fmodel`=20in=20create=5Fcomponent=20and=20update=5Fcomponent?= =?UTF-8?q?=20functions=20for=20better=20readability=20=E2=9C=A8=20feat(co?= =?UTF-8?q?mponents.py):=20add=20support=20for=20`create=5Fat`=20and=20`up?= =?UTF-8?q?date=5Fat`=20fields=20in=20Component=20model=20to=20track=20cre?= =?UTF-8?q?ation=20and=20update=20timestamps=20=F0=9F=94=A5=20refactor(com?= =?UTF-8?q?ponents.py):=20remove=20unused=20imports=20and=20variables=20in?= =?UTF-8?q?=20components.py=20=F0=9F=94=A5=20refactor(component.py):=20rem?= =?UTF-8?q?ove=20unused=20imports=20and=20variables=20in=20component.py=20?= =?UTF-8?q?=F0=9F=94=A5=20refactor(component.py):=20remove=20commented=20o?= =?UTF-8?q?ut=20code=20in=20component.py?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/api/v1/components.py | 94 ++++--------------- .../langflow/database/models/component.py | 86 ++++------------- 2 files changed, 36 insertions(+), 144 deletions(-) diff --git a/src/backend/langflow/api/v1/components.py b/src/backend/langflow/api/v1/components.py index f9cd8bce8..646fcb3f6 100644 --- a/src/backend/langflow/api/v1/components.py +++ b/src/backend/langflow/api/v1/components.py @@ -1,30 +1,35 @@ from typing import List from uuid import UUID -from langflow.database.models.component import Component +from langflow.database.models.component import Component, ComponentModel from langflow.database.base import get_session from sqlmodel import Session, select from fastapi import APIRouter, Depends, HTTPException from sqlalchemy.exc import IntegrityError +from datetime import datetime COMPONENT_NOT_FOUND = "Component not found" +COMPONENT_ALREADY_EXISTS = "A component with the same id already exists." +COMPONENT_DELETED = "Component deleted" + router = APIRouter(prefix="/components", tags=["Components"]) @router.post("/", response_model=Component) -def create_component(component: Component, db: Session = Depends(get_session)): +def create_component(component: ComponentModel, db: Session = Depends(get_session)): + db_component = Component(**component.dict()) try: - db.add(component) + db.add(db_component) db.commit() - db.refresh(component) + db.refresh(db_component) except IntegrityError as e: db.rollback() raise HTTPException( status_code=400, - detail="A component with the same id already exists.", + detail=COMPONENT_ALREADY_EXISTS, ) from e - return component + return db_component @router.get("/{component_id}", response_model=Component) @@ -37,19 +42,25 @@ def read_component(component_id: UUID, db: Session = Depends(get_session)): @router.get("/", response_model=List[Component]) def read_components(skip: int = 0, limit: int = 50, db: Session = Depends(get_session)): - return db.execute(select(Component).offset(skip).limit(limit)).fetchall() + query = select(Component) + query = query.offset(skip).limit(limit) + + return db.execute(query).fetchall() @router.patch("/{component_id}", response_model=Component) def update_component( - component_id: UUID, component: Component, db: Session = Depends(get_session) + component_id: UUID, component: ComponentModel, db: Session = Depends(get_session) ): db_component = db.get(Component, component_id) if not db_component: raise HTTPException(status_code=404, detail=COMPONENT_NOT_FOUND) component_data = component.dict(exclude_unset=True) + for key, value in component_data.items(): setattr(db_component, key, value) + + db_component.update_at = datetime.utcnow() db.commit() db.refresh(db_component) return db_component @@ -62,69 +73,4 @@ def delete_component(component_id: UUID, db: Session = Depends(get_session)): raise HTTPException(status_code=404, detail=COMPONENT_NOT_FOUND) db.delete(component) db.commit() - return {"detail": "Component deleted"} - - -# @router.post("/", response_model=ComponentRead, status_code=201) -# def create(*, session: Session = Depends(get_session), component: ComponentCreate): -# db = Component.from_orm(component) -# session.add(db) -# session.commit() -# session.refresh(db) - -# return db - - -# @router.get("/", response_model=list[ComponentRead], status_code=200) -# def read_all(*, session: Session = Depends(get_session)): -# try: -# sql = select(Component) -# components = session.exec(sql).all() -# except Exception as e: -# raise HTTPException(status_code=500, detail=str(e)) from e - -# return [jsonable_encoder(component) for component in components] - - -# @router.get("/{id}", response_model=ComponentRead, status_code=200) -# def read(*, session: Session = Depends(get_session), id: UUID): -# if component := session.get(Component, id): -# return component -# else: -# raise HTTPException(status_code=404, detail=COMPONENT_NOT_FOUND) - - -# @router.patch("/{id}", response_model=ComponentRead, status_code=200) -# def update( -# *, session: Session = Depends(get_session), id: UUID, component: ComponentUpdate -# ): -# db = session.get(Component, id) -# if not db: -# raise HTTPException(status_code=404, detail=COMPONENT_NOT_FOUND) - -# data = component.dict(exclude_unset=True) - -# if settings.remove_api_keys: -# data = remove_api_keys(data) - -# for key, value in data.items(): -# setattr(db, key, value) - -# session.add(db) -# session.commit() -# session.refresh(db) - -# return db - - -# @router.delete("/{id}", status_code=200) -# def delete(*, session: Session = Depends(get_session), id: UUID): -# component = session.get(Component, id) - -# if not component: -# raise HTTPException(status_code=404, detail=COMPONENT_NOT_FOUND) - -# session.delete(component) -# session.commit() - -# return {"message": "Component deleted successfully"} + return {"detail": COMPONENT_DELETED} diff --git a/src/backend/langflow/database/models/component.py b/src/backend/langflow/database/models/component.py index bad2d7a54..bb2408cdb 100644 --- a/src/backend/langflow/database/models/component.py +++ b/src/backend/langflow/database/models/component.py @@ -1,83 +1,29 @@ -from langflow.database.models.base import SQLModelSerializable +from langflow.database.models.base import SQLModelSerializable, SQLModel from sqlmodel import Field from typing import Optional from datetime import datetime import uuid -# def orjson_dumps(v, *, default): -# # orjson.dumps returns bytes, to match standard json.dumps we need to decode -# return orjson.dumps(v, default=default).decode() - -# class SQLModelSerializable(SQLModel): -# class Config: -# orm_mode = True -# json_loads = orjson.loads -# json_dumps = orjson_dumps - -# DATABASE_URL = "sqlite+pysqlite:///./database.db" - -# engine = create_engine(DATABASE_URL, connect_args={"check_same_thread": False}, poolclass=StaticPool) - class Component(SQLModelSerializable, table=True): id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) - id_frontend_node: uuid.UUID = Field(index=True) + frontend_node_id: uuid.UUID = Field(index=True) name: str = Field(index=True) - description: Optional[str] = Field(index=True) - code_python: Optional[str] = Field(default=None) - return_type: Optional[str] = Field(index=True) - create_at: datetime = Field(default_factory=datetime.utcnow) - update_at: datetime = Field(default_factory=datetime.utcnow) + description: Optional[str] = Field(default=None) + python_code: Optional[str] = Field(default=None) + return_type: Optional[str] = Field(default=None) is_disabled: bool = Field(default=False) is_read_only: bool = Field(default=False) + create_at: datetime = Field(default_factory=datetime.utcnow) + update_at: datetime = Field(default_factory=datetime.utcnow) -# app = FastAPI() - -# def get_db(): -# with Session(engine) as session: -# yield session - -# @app.on_event("startup") -# def on_startup(): -# SQLModel.metadata.create_all(engine) - -# @app.post("/components/", response_model=Component) -# def create_component(component: Component, db: Session = Depends(get_db)): -# db.add(component) -# db.commit() -# db.refresh(component) -# return component - -# @app.get("/components/{component_id}", response_model=Component) -# def read_component(component_id: uuid.UUID, db: Session = Depends(get_db)): -# component = db.get(Component, component_id) -# if not component: -# raise HTTPException(status_code=404, detail="Component not found") -# return component - -# @app.get("/components/", response_model=List[Component]) -# def read_components(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)): -# components = db.execute(select(Component).offset(skip).limit(limit)).fetchall() -# return components - -# @app.put("/components/{component_id}", response_model=Component) -# def update_component(component_id: uuid.UUID, component: Component, db: Session = Depends(get_db)): -# db_component = db.get(Component, component_id) -# if not db_component: -# raise HTTPException(status_code=404, detail="Component not found") -# component_data = component.dict(exclude_unset=True) -# for key, value in component_data.items(): -# setattr(db_component, key, value) -# db.commit() -# db.refresh(db_component) -# return db_component - -# @app.delete("/components/{component_id}") -# def delete_component(component_id: uuid.UUID, db: Session = Depends(get_db)): -# component = db.get(Component, component_id) -# if not component: -# raise HTTPException(status_code=404, detail="Component not found") -# db.delete(component) -# db.commit() -# return {"detail": "Component deleted"} +class ComponentModel(SQLModel): + id: uuid.UUID = Field(default_factory=uuid.uuid4) + frontend_node_id: uuid.UUID = Field(default=uuid.uuid4()) + name: str = Field(default="") + description: Optional[str] = None + python_code: Optional[str] = None + return_type: Optional[str] = None + is_disabled: bool = False + is_read_only: bool = False From e8c844a75fdf68b7d32c7ca616ea90726be33a31 Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Wed, 12 Jul 2023 21:54:15 +0100 Subject: [PATCH 080/221] =?UTF-8?q?=F0=9F=90=9B=20fix(types.py):=20handle?= =?UTF-8?q?=20case=20when=20field=5Ftype=20is=20None=20by=20assigning=20an?= =?UTF-8?q?=20empty=20string=20to=20improve=20code=20robustness=20?= =?UTF-8?q?=F0=9F=90=9B=20fix(types.py):=20reformat=20detail=20dictionary?= =?UTF-8?q?=20in=20raise=20HTTPException=20to=20improve=20readability?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/interface/types.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/backend/langflow/interface/types.py b/src/backend/langflow/interface/types.py index 08bfe77c7..41343089c 100644 --- a/src/backend/langflow/interface/types.py +++ b/src/backend/langflow/interface/types.py @@ -177,6 +177,9 @@ def build_langchain_template_custom_component(extractor: CustomComponent): field_required = True field_name, field_type, field_value = extra_field + if not field_type: + field_type = "" + if field_name != "self": # TODO: Validate type - if is possible to render into frontend if "optional" in field_type.lower(): From 79d2d551ff98b35da58b6801c475e9376e97b09a Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Fri, 14 Jul 2023 04:49:42 +0100 Subject: [PATCH 081/221] =?UTF-8?q?=F0=9F=94=80=20refactor(langflow):=20re?= =?UTF-8?q?name=20custom.py=20to=20custom=5Fcomponent.py=20for=20clarity?= =?UTF-8?q?=20=F0=9F=94=A5=20remove(langflow):=20delete=20custom.py=20as?= =?UTF-8?q?=20it's=20replaced=20by=20custom=5Fcomponent.py=20=F0=9F=93=A6?= =?UTF-8?q?=20feat(langflow):=20add=20code=5Fparser.py=20to=20parse=20Pyth?= =?UTF-8?q?on=20source=20code=20=F0=9F=90=9B=20fix(langflow):=20update=20i?= =?UTF-8?q?mport=20paths=20due=20to=20file=20renaming=20=F0=9F=8E=A8=20sty?= =?UTF-8?q?le(langflow):=20improve=20code=20formatting=20for=20readability?= =?UTF-8?q?=20=F0=9F=90=9B=20fix(langflow):=20correct=20handling=20of=20fu?= =?UTF-8?q?nction=20arguments=20and=20return=20types=20in=20custom=20compo?= =?UTF-8?q?nents=20=F0=9F=94=A7=20chore(langflow):=20update=20function=20c?= =?UTF-8?q?alls=20due=20to=20changes=20in=20custom=20components?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/api/v1/components.py | 3 +- src/backend/langflow/api/v1/endpoints.py | 2 +- .../langflow/interface/custom/__init__.py | 2 +- src/backend/langflow/interface/custom/base.py | 4 +- .../langflow/interface/custom/code_parser.py | 178 ++++++++++++++ .../langflow/interface/custom/component.py | 53 +++++ .../langflow/interface/custom/custom.py | 220 ------------------ .../interface/custom/custom_component.py | 119 ++++++++++ .../langflow/interface/importing/utils.py | 7 +- src/backend/langflow/interface/types.py | 40 ++-- 10 files changed, 379 insertions(+), 249 deletions(-) create mode 100644 src/backend/langflow/interface/custom/code_parser.py create mode 100644 src/backend/langflow/interface/custom/component.py delete mode 100644 src/backend/langflow/interface/custom/custom.py create mode 100644 src/backend/langflow/interface/custom/custom_component.py diff --git a/src/backend/langflow/api/v1/components.py b/src/backend/langflow/api/v1/components.py index 646fcb3f6..1e34da2aa 100644 --- a/src/backend/langflow/api/v1/components.py +++ b/src/backend/langflow/api/v1/components.py @@ -1,3 +1,4 @@ +from datetime import timezone from typing import List from uuid import UUID from langflow.database.models.component import Component, ComponentModel @@ -60,7 +61,7 @@ def update_component( for key, value in component_data.items(): setattr(db_component, key, value) - db_component.update_at = datetime.utcnow() + db_component.update_at = datetime.now(timezone.utc) db.commit() db.refresh(db_component) return db_component diff --git a/src/backend/langflow/api/v1/endpoints.py b/src/backend/langflow/api/v1/endpoints.py index e12f2076e..c51f9ce78 100644 --- a/src/backend/langflow/api/v1/endpoints.py +++ b/src/backend/langflow/api/v1/endpoints.py @@ -7,7 +7,7 @@ from langflow.utils.logger import logger from fastapi import APIRouter, Depends, HTTPException, UploadFile -from langflow.interface.custom.custom import CustomComponent +from langflow.interface.custom.custom_component import CustomComponent from langflow.api.v1.schemas import ( ProcessResponse, diff --git a/src/backend/langflow/interface/custom/__init__.py b/src/backend/langflow/interface/custom/__init__.py index 48672e52b..5b87e9fa3 100644 --- a/src/backend/langflow/interface/custom/__init__.py +++ b/src/backend/langflow/interface/custom/__init__.py @@ -1,4 +1,4 @@ from langflow.interface.custom.base import CustomComponentCreator -from langflow.interface.custom.custom import CustomComponent +from langflow.interface.custom.custom_component import CustomComponent __all__ = ["CustomComponentCreator", "CustomComponent"] diff --git a/src/backend/langflow/interface/custom/base.py b/src/backend/langflow/interface/custom/base.py index 8dfa127cc..06e874fa7 100644 --- a/src/backend/langflow/interface/custom/base.py +++ b/src/backend/langflow/interface/custom/base.py @@ -2,7 +2,9 @@ from typing import Any, Dict, List, Optional, Type from langflow.interface.base import LangChainTypeCreator -from langflow.interface.custom.custom import CustomComponent + +# from langflow.interface.custom.custom import CustomComponent +from langflow.interface.custom.custom_component import CustomComponent from langflow.template.frontend_node.custom_components import ( CustomComponentFrontendNode, ) diff --git a/src/backend/langflow/interface/custom/code_parser.py b/src/backend/langflow/interface/custom/code_parser.py new file mode 100644 index 000000000..8a67fa733 --- /dev/null +++ b/src/backend/langflow/interface/custom/code_parser.py @@ -0,0 +1,178 @@ +import ast +import traceback + +from typing import Dict, Any, Union +from fastapi import HTTPException + + +class CodeSyntaxError(HTTPException): + pass + + +class CodeParser: + """ + A parser for Python source code, extracting code details. + """ + + def __init__(self, code: str) -> None: + """ + Initializes the parser with the provided code. + """ + self.code = code + self.data: Dict[str, Any] = { + "imports": [], + "functions": [], + "classes": [], + "global_vars": [], + } + self.handlers = { + ast.Import: self.parse_imports, + ast.ImportFrom: self.parse_imports, + ast.FunctionDef: self.parse_functions, + ast.ClassDef: self.parse_classes, + ast.Assign: self.parse_global_vars, + } + + def __get_tree(self): + """ + Parses the provided code to validate its syntax. + It tries to parse the code into an abstract syntax tree (AST). + """ + try: + tree = ast.parse(self.code) + except SyntaxError as err: + raise CodeSyntaxError( + status_code=400, + detail={"error": err.msg, "traceback": traceback.format_exc()}, + ) from err + + return tree + + def parse_node(self, node: ast.AST) -> None: + """ + Parses an AST node and updates the data + dictionary with the relevant information. + """ + if handler := self.handlers.get(type(node)): + handler(node) + + def parse_imports(self, node: Union[ast.Import, ast.ImportFrom]) -> None: + """ + Extracts "imports" from the code. + """ + if isinstance(node, ast.Import): + module = node.names[0].name + self.data["imports"].append(module) + elif isinstance(node, ast.ImportFrom): + module = node.module + names = [alias.name for alias in node.names] + self.data["imports"].append((module, names)) + + def parse_functions(self, node: ast.FunctionDef) -> None: + """ + Extracts "functions" from the code. + """ + self.data["functions"].append(self.parse_callable_details(node)) + + def parse_arg(self, arg, default): + """ + Parses an argument and its default value. + """ + arg_dict = {"name": arg.arg, "default": default} + if arg.annotation: + arg_dict["type"] = ast.unparse(arg.annotation) + return arg_dict + + def parse_callable_details(self, node: ast.FunctionDef) -> Dict[str, Any]: + """ + Extracts details from a single function or method node. + """ + func = { + "name": node.name, + "doc": ast.get_docstring(node), + "args": [], + "body": [], + "return_type": ast.unparse(node.returns) if node.returns else None, + } + + # Handle positional arguments with default values + defaults = [None] * (len(node.args.args) - len(node.args.defaults)) + [ + ast.unparse(default) for default in node.args.defaults + ] + + for arg, default in zip(node.args.args, defaults): + func["args"].append(self.parse_arg(arg, default)) + + # Handle *args + if node.args.vararg: + func["args"].append(self.parse_arg(node.args.vararg, None)) + + # Handle keyword-only arguments with default values + kw_defaults = [None] * ( + len(node.args.kwonlyargs) - len(node.args.kw_defaults) + ) + [ + ast.unparse(default) if default else None + for default in node.args.kw_defaults + ] + + for arg, default in zip(node.args.kwonlyargs, kw_defaults): + func["args"].append(self.parse_arg(arg, default)) + + # Handle **kwargs + if node.args.kwarg: + func["args"].append(self.parse_arg(node.args.kwarg, None)) + + for line in node.body: + func["body"].append(ast.unparse(line)) + return func + + def parse_classes(self, node: ast.ClassDef) -> None: + """ + Extracts "classes" from the code, including + inheritance and init methods. + """ + class_dict = { + "name": node.name, + "doc": ast.get_docstring(node), + "bases": [ast.unparse(base) for base in node.bases], + "attributes": [], + "methods": [], + } + + for stmt in node.body: + if isinstance(stmt, ast.AnnAssign): + attr = {"name": stmt.target.id, "type": ast.unparse(stmt.annotation)} + class_dict["attributes"].append(attr) + elif isinstance(stmt, ast.Assign): + attr = {"name": stmt.targets[0].id, "value": ast.unparse(stmt.value)} + class_dict["attributes"].append(attr) + elif isinstance(stmt, ast.FunctionDef): + method = self.parse_callable_details(stmt) + if stmt.name == "__init__": + class_dict["init"] = method + else: + class_dict["methods"].append(method) + + self.data["classes"].append(class_dict) + + def parse_global_vars(self, node: ast.Assign) -> None: + """ + Extracts global variables from the code. + """ + global_var = { + "targets": [ + t.id if hasattr(t, "id") else ast.dump(t) for t in node.targets + ], + "value": ast.unparse(node.value), + } + self.data["global_vars"].append(global_var) + + def parse_code(self) -> Dict[str, Any]: + """ + Runs all parsing operations and returns the resulting data. + """ + tree = self.__get_tree() + + for node in ast.walk(tree): + self.parse_node(node) + return self.data diff --git a/src/backend/langflow/interface/custom/component.py b/src/backend/langflow/interface/custom/component.py new file mode 100644 index 000000000..a0f99fa38 --- /dev/null +++ b/src/backend/langflow/interface/custom/component.py @@ -0,0 +1,53 @@ + +from pydantic import BaseModel +from fastapi import HTTPException + +from langflow.utils import validate +from langflow.interface.custom.code_parser import CodeParser + + +class ComponentCodeNullError(HTTPException): + pass + + +class ComponentFunctionEntrypointNameNullError(HTTPException): + pass + + +class Component(BaseModel): + ERROR_CODE_NULL = "Python code must be provided." + ERROR_FUNCTION_ENTRYPOINT_NAME_NULL = ( + "The name of the entrypoint function must be provided." + ) + + code: str + function_entrypoint_name = "build" + field_config: dict = {} + + def __init__(self, **data): + super().__init__(**data) + + def get_code_tree(self, code: str): + parser = CodeParser(code) + return parser.parse_code() + + def get_function(self): + if not self.code: + raise ComponentCodeNullError( + status_code=400, + detail={"error": self.ERROR_CODE_NULL, "traceback": ""}, + ) + + if not self.function_entrypoint_name: + raise ComponentFunctionEntrypointNameNullError( + status_code=400, + detail={ + "error": self.ERROR_FUNCTION_ENTRYPOINT_NAME_NULL, + "traceback": "", + }, + ) + + return validate.create_function(self.code, self.function_entrypoint_name) + + def build(self): + raise NotImplementedError diff --git a/src/backend/langflow/interface/custom/custom.py b/src/backend/langflow/interface/custom/custom.py deleted file mode 100644 index 6d46c5d18..000000000 --- a/src/backend/langflow/interface/custom/custom.py +++ /dev/null @@ -1,220 +0,0 @@ -import re -import ast -import traceback -from typing import Callable, Optional -from fastapi import HTTPException -from langflow.interface.custom.constants import LANGCHAIN_BASE_TYPES - -from langflow.utils import validate -from pydantic import BaseModel - - -class CustomComponent(BaseModel): - field_config: dict = {} - code: str - function: Optional[Callable] = None - function_entrypoint_name = "build" - return_type_valid_list = list(LANGCHAIN_BASE_TYPES.keys()) - class_template = { - "imports": [], - "class": {"inherited_classes": "", "name": "", "init": "", "attributes": {}}, - "functions": [], - } - - def __init__(self, **data): - super().__init__(**data) - - def _handle_import(self, node): - for alias in node.names: - module_name = getattr(node, "module", None) - self.class_template["imports"].append( - f"{module_name}.{alias.name}" if module_name else alias.name - ) - - def _handle_class(self, node): - self.class_template["class"].update( - { - "name": node.name, - "inherited_classes": [ast.unparse(base) for base in node.bases], - } - ) - - attributes = {} # To store the attributes and their values - - for inner_node in node.body: - if isinstance(inner_node, ast.Assign): # An assignment - for target in inner_node.targets: # Targets of the assignment - if isinstance(target, ast.Name): # A simple variable - # Add the attribute and its value to the dictionary - attributes[target.id] = ast.unparse(inner_node.value) - elif isinstance(inner_node, ast.AnnAssign): # An annotated assignment - if isinstance(inner_node.target, ast.Name) and inner_node.value: - attributes[inner_node.target.id] = ast.unparse(inner_node.value) - - elif isinstance(inner_node, ast.FunctionDef): - self._handle_function(inner_node) - - # You can add these attributes to your class_template if you want - self.class_template["class"]["attributes"] = attributes - - def _handle_function(self, node): - function_name = node.name - function_args_str = ast.unparse(node.args) - function_args = function_args_str.split(", ") if function_args_str else [] - - return_type = ast.unparse(node.returns) if node.returns else "None" - - function_data = { - "name": function_name, - "arguments": function_args, - "return_type": return_type, - } - - if function_name == "__init__": - self.class_template["class"]["init"] = ( - function_args_str.split(", ") if function_args_str else [] - ) - else: - self.class_template["functions"].append(function_data) - - def _split_string(self, text): - """ - Split a string by ':' or '=' and append None until the resulting list has 3 items. - - Parameters: - text (str): The string to be split. - - Returns: - list: A list of strings resulting from the split operation, - padded with None until its length is 3. - """ - items = [item.strip() for item in re.split(r"[:=]", text) if item.strip()] - while len(items) < 3: - items.append(None) - - return items - - def transform_list(self, input_list): - """ - Transform a list of strings by splitting each string and padding with None. - - Parameters: - input_list (list): The list of strings to be transformed. - - Returns: - list: A list of lists, each containing the result of the split operation. - """ - return [self._split_string(item) for item in input_list] - - def extract_class_info(self): - try: - module = ast.parse(self.code) - except SyntaxError as err: - raise HTTPException( - status_code=400, - detail={"error": err.msg, "traceback": traceback.format_exc()}, - ) from err - - for node in module.body: - if isinstance(node, (ast.Import, ast.ImportFrom)): - self._handle_import(node) - elif isinstance(node, ast.ClassDef): - self._handle_class(node) - - return self.class_template - - def get_entrypoint_function_args_and_return_type(self): - data = self.extract_class_info() - attributes = data.get("class", {}).get("attributes", {}) - functions = data.get("functions", []) - template_config = self._build_template_config(attributes) - - if build_function := next( - (f for f in functions if f["name"] == self.function_entrypoint_name), - None, - ): - function_args = build_function.get("arguments", None) - function_args = self.transform_list(function_args) - - return_type = build_function.get("return_type", None) - else: - function_args = None - return_type = None - - return function_args, return_type, template_config - - def _build_template_config(self, attributes): - template_config = {} - if "field_config" in attributes: - template_config["field_config"] = ast.literal_eval( - attributes["field_config"] - ) - if "display_name" in attributes: - template_config["display_name"] = ast.literal_eval( - attributes["display_name"] - ) - if "description" in attributes: - template_config["description"] = ast.literal_eval(attributes["description"]) - - return template_config - - def _class_template_validation(self, code: dict): - class_name = code.get("class", {}).get("name", None) - if not class_name: # this will also check for None, empty string, etc. - raise HTTPException( - status_code=400, - detail={ - "error": "The main class must have a valid name.", - "traceback": "", - }, - ) - - functions = code.get("functions", []) - build_function = next( - (f for f in functions if f["name"] == self.function_entrypoint_name), - None, - ) - - if not build_function: - raise HTTPException( - status_code=400, - detail={ - "error": "Invalid entrypoint function name", - "traceback": ( - f"There needs to be at least one entrypoint function named '{self.function_entrypoint_name}'" - f" and it needs to return one of the types from this list {str(self.return_type_valid_list)}.", - ), - }, - ) - - return_type = build_function.get("return_type") - if return_type not in self.return_type_valid_list: - raise HTTPException( - status_code=400, - detail={ - "error": "Invalid entrypoint function return", - "traceback": ( - f"The entrypoint function return '{return_type}' needs to be an item " - f"from this list {str(self.return_type_valid_list)}." - ), - }, - ) - - return True - - def get_function(self): - return validate.create_function(self.code, self.function_entrypoint_name) - - def build(self): - raise NotImplementedError - - @property - def data(self): - return self.extract_class_info() - - def is_check_valid(self): - return self._class_template_validation(self.data) - - @property - def args_and_return_type(self): - return self.get_entrypoint_function_args_and_return_type() diff --git a/src/backend/langflow/interface/custom/custom_component.py b/src/backend/langflow/interface/custom/custom_component.py new file mode 100644 index 000000000..5a9ddecbb --- /dev/null +++ b/src/backend/langflow/interface/custom/custom_component.py @@ -0,0 +1,119 @@ +import ast +from typing import Callable, Optional +from fastapi import HTTPException +from langflow.interface.custom.constants import LANGCHAIN_BASE_TYPES +from langflow.interface.custom.component import Component + +from langflow.utils import validate + + +class CustomComponent(Component): + code: str + field_config: dict = {} + code_class_base_inheritance = "CustomComponent" + function_entrypoint_name = "build" + function: Optional[Callable] = None + return_type_valid_list = list(LANGCHAIN_BASE_TYPES.keys()) + + def __init__(self, **data): + super().__init__(**data) + + def _class_template_validation(self, code: str) -> bool: + if not code: + raise HTTPException( + status_code=400, + detail={ + "error": self.ERROR_CODE_NULL, + "traceback": "", + }, + ) + + # TODO: build logic + return True + + def is_check_valid(self) -> bool: + return self._class_template_validation(self.code) + + def get_code_tree(self, code: str): + return super().get_code_tree(code) + + @property + def get_function_entrypoint_args(self) -> str: + tree = self.get_code_tree(self.code) + + component_classes = [ + cls + for cls in tree["classes"] + if self.code_class_base_inheritance in cls["bases"] + ] + if not component_classes: + return "" + + # Assume the first Component class is the one we're interested in + component_class = component_classes[0] + build_methods = [ + method + for method in component_class["methods"] + if method["name"] == self.function_entrypoint_name + ] + + if not build_methods: + return "" + + build_method = build_methods[0] + + return build_method["args"] + + @property + def get_function_entrypoint_return_type(self) -> str: + tree = self.get_code_tree(self.code) + + component_classes = [ + cls + for cls in tree["classes"] + if self.code_class_base_inheritance in cls["bases"] + ] + if not component_classes: + return "" + + # Assume the first Component class is the one we're interested in + component_class = component_classes[0] + build_methods = [ + method + for method in component_class["methods"] + if method["name"] == self.function_entrypoint_name + ] + + if not build_methods: + return "" + + build_method = build_methods[0] + + return build_method["return_type"] + + @property + def get_template_config(self) -> dict: + extra_attributes = {} # self.get_extra_attributes + template_config = {} + + if "field_config" in extra_attributes: + template_config["field_config"] = ast.literal_eval( + extra_attributes["field_config"] + ) + if "display_name" in extra_attributes: + template_config["display_name"] = ast.literal_eval( + extra_attributes["display_name"] + ) + if "description" in extra_attributes: + template_config["description"] = ast.literal_eval( + extra_attributes["description"] + ) + + return template_config + + @property + def get_function(self): + return validate.create_function(self.code, self.function_entrypoint_name) + + def build(self): + raise NotImplementedError diff --git a/src/backend/langflow/interface/importing/utils.py b/src/backend/langflow/interface/importing/utils.py index 04ee8aba1..0acb2cff5 100644 --- a/src/backend/langflow/interface/importing/utils.py +++ b/src/backend/langflow/interface/importing/utils.py @@ -9,7 +9,7 @@ from langchain.base_language import BaseLanguageModel from langchain.chains.base import Chain from langchain.chat_models.base import BaseChatModel from langchain.tools import BaseTool -from langflow.interface.custom.custom import CustomComponent +from langflow.interface.custom.custom_component import CustomComponent from langflow.utils import validate from langflow.interface.wrappers.base import wrapper_creator @@ -61,7 +61,9 @@ def import_by_type(_type: str, name: str) -> Any: def import_custom_component(custom_component: str) -> CustomComponent: """Import custom component from custom component name""" - return import_class(f"langflow.interface.custom.custom.{custom_component}") + return import_class( + f"langflow.interface.custom.custom_component.{custom_component}" + ) def import_output_parser(output_parser: str) -> Any: @@ -183,5 +185,4 @@ def get_function(code): def get_function_custom(code): class_name = validate.extract_class_name(code) - return validate.create_class(code, class_name) diff --git a/src/backend/langflow/interface/types.py b/src/backend/langflow/interface/types.py index 41343089c..892b70260 100644 --- a/src/backend/langflow/interface/types.py +++ b/src/backend/langflow/interface/types.py @@ -14,7 +14,7 @@ from langflow.interface.vector_store.base import vectorstore_creator from langflow.interface.wrappers.base import wrapper_creator from langflow.interface.output_parsers.base import output_parser_creator from langflow.interface.custom.base import custom_component_creator -from langflow.interface.custom.custom import CustomComponent +from langflow.interface.custom.custom_component import CustomComponent from langflow.template.field.base import TemplateField from langflow.template.frontend_node.tools import CustomComponentNode @@ -92,9 +92,6 @@ def add_new_custom_field( field_type = field_config.pop("field_type", field_type) field_type = process_type(field_type) - if field_value is not None: - field_value = field_value.replace("'", "").replace('"', "") - if "name" in field_config: warnings.warn( "The 'name' key in field_config is used to build the object and can't be changed." @@ -158,29 +155,27 @@ def extract_type_from_optional(field_type): return match[1] if match else None -def build_langchain_template_custom_component(extractor: CustomComponent): +def build_langchain_template_custom_component(custom_component: CustomComponent): # Build base "CustomComponent" template - frontend_node = CustomComponentNode().to_dict().get(type(extractor).__name__) + frontend_node = CustomComponentNode().to_dict().get(type(custom_component).__name__) - function_args, return_type, template_config = extractor.args_and_return_type - - if "display_name" in template_config and frontend_node is not None: - frontend_node["display_name"] = template_config["display_name"] - if "description" in template_config and frontend_node is not None: - frontend_node["description"] = template_config["description"] - raw_code = extractor.code - field_config = template_config.get("field_config", {}) + function_args = custom_component.get_function_entrypoint_args + return_type = custom_component.get_function_entrypoint_return_type + # template_config = custom_component.get_template_config if function_args is not None: # Add extra fields for extra_field in function_args: - field_required = True - field_name, field_type, field_value = extra_field - - if not field_type: - field_type = "" + field_name = extra_field.get("name") if "name" in extra_field else "" if field_name != "self": + field_type = extra_field.get("type") if "type" in extra_field else "" + field_value = ( + extra_field.get("default") if "default" in extra_field else "" + ) + field_required = True + field_config = {} + # TODO: Validate type - if is possible to render into frontend if "optional" in field_type.lower(): field_type = extract_type_from_optional(field_type) @@ -189,17 +184,16 @@ def build_langchain_template_custom_component(extractor: CustomComponent): if not field_type: field_type = "str" - config = field_config.get(field_name, {}) frontend_node = add_new_custom_field( frontend_node, field_name, field_type, field_value, field_required, - config, + field_config, ) - frontend_node = add_code_field(frontend_node, raw_code) + frontend_node = add_code_field(frontend_node, custom_component.code) # Get base classes from "return_type" and add to template.base_classes try: @@ -214,8 +208,10 @@ def build_langchain_template_custom_component(extractor: CustomComponent): "traceback": traceback.format_exc(), }, ) + return_type_instance = LANGCHAIN_BASE_TYPES.get(return_type) base_classes = get_base_classes(return_type_instance) + except (KeyError, AttributeError) as err: raise HTTPException( status_code=400, From 75452fb758101f21259865ae48fed2e0f733f116 Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Fri, 14 Jul 2023 05:08:12 +0100 Subject: [PATCH 082/221] =?UTF-8?q?=F0=9F=94=A7=20refactor(custom=5Fcompon?= =?UTF-8?q?ent.py):=20make=20'code'=20attribute=20optional=20in=20CustomCo?= =?UTF-8?q?mponent=20class=20to=20allow=20instances=20without=20explicit?= =?UTF-8?q?=20code=20value?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/interface/custom/custom_component.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/backend/langflow/interface/custom/custom_component.py b/src/backend/langflow/interface/custom/custom_component.py index 5a9ddecbb..8fb5af62c 100644 --- a/src/backend/langflow/interface/custom/custom_component.py +++ b/src/backend/langflow/interface/custom/custom_component.py @@ -8,7 +8,7 @@ from langflow.utils import validate class CustomComponent(Component): - code: str + code: Optional[str] field_config: dict = {} code_class_base_inheritance = "CustomComponent" function_entrypoint_name = "build" From e90fd26984dbc6ebb28085ad5abcffec5072723d Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Fri, 14 Jul 2023 05:12:05 +0100 Subject: [PATCH 083/221] =?UTF-8?q?=F0=9F=94=A5=20remove(component.py):=20?= =?UTF-8?q?unnecessary=20blank=20line=20at=20the=20start=20of=20the=20file?= =?UTF-8?q?=20for=20cleaner=20code=20structure?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/interface/custom/component.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/backend/langflow/interface/custom/component.py b/src/backend/langflow/interface/custom/component.py index a0f99fa38..f6ef62802 100644 --- a/src/backend/langflow/interface/custom/component.py +++ b/src/backend/langflow/interface/custom/component.py @@ -1,4 +1,3 @@ - from pydantic import BaseModel from fastapi import HTTPException From 10c0b3871c60db216b09b1d001ac5039989b2ced Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Fri, 14 Jul 2023 13:10:06 -0300 Subject: [PATCH 084/221] =?UTF-8?q?=E2=9C=A8=20feat(conftest.py):=20add=20?= =?UTF-8?q?custom=5Fchain=20fixture=20to=20provide=20a=20custom=20chain=20?= =?UTF-8?q?for=20testing=20purposes?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- tests/conftest.py | 119 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 119 insertions(+) diff --git a/tests/conftest.py b/tests/conftest.py index f893533ac..ca0bb1dc0 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -116,3 +116,122 @@ def client_fixture(session: Session): # yield TestClient(app) app.dependency_overrides.clear() # + + +@pytest.fixture +def custom_chain(): + return '''from __future__ import annotations + + from typing import Any, Dict, List, Optional + + from pydantic import Extra + + from langchain.schema import BaseLanguageModel, Document + from langchain.callbacks.manager import ( + AsyncCallbackManagerForChainRun, + CallbackManagerForChainRun, + ) + from langchain.chains.base import Chain + from langchain.prompts import StringPromptTemplate + from langflow.interface.custom.base import CustomComponent + + class MyCustomChain(Chain): + """ + An example of a custom chain. + """ + + prompt: StringPromptTemplate + """Prompt object to use.""" + llm: BaseLanguageModel + output_key: str = "text" #: :meta private: + + class Config: + """Configuration for this pydantic object.""" + + extra = Extra.forbid + arbitrary_types_allowed = True + + @property + def input_keys(self) -> List[str]: + """Will be whatever keys the prompt expects. + + :meta private: + """ + return self.prompt.input_variables + + @property + def output_keys(self) -> List[str]: + """Will always return text key. + + :meta private: + """ + return [self.output_key] + + def _call( + self, + inputs: Dict[str, Any], + run_manager: Optional[CallbackManagerForChainRun] = None, + ) -> Dict[str, str]: + # Your custom chain logic goes here + # This is just an example that mimics LLMChain + prompt_value = self.prompt.format_prompt(**inputs) + + # Whenever you call a language model, or another chain, you should pass + # a callback manager to it. This allows the inner run to be tracked by + # any callbacks that are registered on the outer run. + # You can always obtain a callback manager for this by calling + # `run_manager.get_child()` as shown below. + response = self.llm.generate_prompt( + [prompt_value], + callbacks=run_manager.get_child() if run_manager else None, + ) + + # If you want to log something about this run, you can do so by calling + # methods on the `run_manager`, as shown below. This will trigger any + # callbacks that are registered for that event. + if run_manager: + run_manager.on_text("Log something about this run") + + return {self.output_key: response.generations[0][0].text} + + async def _acall( + self, + inputs: Dict[str, Any], + run_manager: Optional[AsyncCallbackManagerForChainRun] = None, + ) -> Dict[str, str]: + # Your custom chain logic goes here + # This is just an example that mimics LLMChain + prompt_value = self.prompt.format_prompt(**inputs) + + # Whenever you call a language model, or another chain, you should pass + # a callback manager to it. This allows the inner run to be tracked by + # any callbacks that are registered on the outer run. + # You can always obtain a callback manager for this by calling + # `run_manager.get_child()` as shown below. + response = await self.llm.agenerate_prompt( + [prompt_value], + callbacks=run_manager.get_child() if run_manager else None, + ) + + # If you want to log something about this run, you can do so by calling + # methods on the `run_manager`, as shown below. This will trigger any + # callbacks that are registered for that event. + if run_manager: + await run_manager.on_text("Log something about this run") + + return {self.output_key: response.generations[0][0].text} + + @property + def _chain_type(self) -> str: + return "my_custom_chain" + + class CustomChain(CustomComponent): + display_name: str = "Custom Chain" + field_config = { + "prompt": {"field_type": "prompt"}, + "llm": {"field_type": "BaseLanguageModel"}, + } + + def build(self, prompt, llm, input: str) -> Document: + chain = MyCustomChain(prompt=prompt, llm=llm) + return chain(input)''' From 99ef7c728dc997aa578f70ba9ddb5346680c403b Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Fri, 14 Jul 2023 14:05:57 -0300 Subject: [PATCH 085/221] =?UTF-8?q?=F0=9F=94=A8=20refactor(conftest.py):?= =?UTF-8?q?=20reformat=20code=20for=20better=20readability=20and=20maintai?= =?UTF-8?q?nability=20=E2=9C=A8=20feat(conftest.py):=20add=20MyCustomChain?= =?UTF-8?q?=20class=20as=20an=20example=20of=20a=20custom=20chain=20?= =?UTF-8?q?=E2=9C=A8=20feat(conftest.py):=20add=20CustomChain=20class=20as?= =?UTF-8?q?=20a=20custom=20component=20for=20building=20a=20document=20?= =?UTF-8?q?=E2=9C=A8=20feat(conftest.py):=20add=20CSVLoaderComponent=20cla?= =?UTF-8?q?ss=20as=20a=20custom=20component=20for=20loading=20CSV=20files?= =?UTF-8?q?=20and=20converting=20rows=20to=20documents?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- tests/conftest.py | 210 ++++++++++++++++++++++++++-------------------- 1 file changed, 120 insertions(+), 90 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index ca0bb1dc0..328a168ad 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -122,116 +122,146 @@ def client_fixture(session: Session): # def custom_chain(): return '''from __future__ import annotations - from typing import Any, Dict, List, Optional +from typing import Any, Dict, List, Optional - from pydantic import Extra +from pydantic import Extra - from langchain.schema import BaseLanguageModel, Document - from langchain.callbacks.manager import ( - AsyncCallbackManagerForChainRun, - CallbackManagerForChainRun, - ) - from langchain.chains.base import Chain - from langchain.prompts import StringPromptTemplate - from langflow.interface.custom.base import CustomComponent +from langchain.schema import BaseLanguageModel, Document +from langchain.callbacks.manager import ( + AsyncCallbackManagerForChainRun, + CallbackManagerForChainRun, +) +from langchain.chains.base import Chain +from langchain.prompts import StringPromptTemplate +from langflow.interface.custom.base import CustomComponent - class MyCustomChain(Chain): +class MyCustomChain(Chain): + """ + An example of a custom chain. + """ + + prompt: StringPromptTemplate + """Prompt object to use.""" + llm: BaseLanguageModel + output_key: str = "text" #: :meta private: + + class Config: + """Configuration for this pydantic object.""" + + extra = Extra.forbid + arbitrary_types_allowed = True + + @property + def input_keys(self) -> List[str]: + """Will be whatever keys the prompt expects. + + :meta private: """ - An example of a custom chain. + return self.prompt.input_variables + + @property + def output_keys(self) -> List[str]: + """Will always return text key. + + :meta private: """ + return [self.output_key] - prompt: StringPromptTemplate - """Prompt object to use.""" - llm: BaseLanguageModel - output_key: str = "text" #: :meta private: + def _call( + self, + inputs: Dict[str, Any], + run_manager: Optional[CallbackManagerForChainRun] = None, + ) -> Dict[str, str]: + # Your custom chain logic goes here + # This is just an example that mimics LLMChain + prompt_value = self.prompt.format_prompt(**inputs) - class Config: - """Configuration for this pydantic object.""" + # Whenever you call a language model, or another chain, you should pass + # a callback manager to it. This allows the inner run to be tracked by + # any callbacks that are registered on the outer run. + # You can always obtain a callback manager for this by calling + # `run_manager.get_child()` as shown below. + response = self.llm.generate_prompt( + [prompt_value], + callbacks=run_manager.get_child() if run_manager else None, + ) - extra = Extra.forbid - arbitrary_types_allowed = True + # If you want to log something about this run, you can do so by calling + # methods on the `run_manager`, as shown below. This will trigger any + # callbacks that are registered for that event. + if run_manager: + run_manager.on_text("Log something about this run") - @property - def input_keys(self) -> List[str]: - """Will be whatever keys the prompt expects. + return {self.output_key: response.generations[0][0].text} - :meta private: - """ - return self.prompt.input_variables + async def _acall( + self, + inputs: Dict[str, Any], + run_manager: Optional[AsyncCallbackManagerForChainRun] = None, + ) -> Dict[str, str]: + # Your custom chain logic goes here + # This is just an example that mimics LLMChain + prompt_value = self.prompt.format_prompt(**inputs) - @property - def output_keys(self) -> List[str]: - """Will always return text key. + # Whenever you call a language model, or another chain, you should pass + # a callback manager to it. This allows the inner run to be tracked by + # any callbacks that are registered on the outer run. + # You can always obtain a callback manager for this by calling + # `run_manager.get_child()` as shown below. + response = await self.llm.agenerate_prompt( + [prompt_value], + callbacks=run_manager.get_child() if run_manager else None, + ) - :meta private: - """ - return [self.output_key] + # If you want to log something about this run, you can do so by calling + # methods on the `run_manager`, as shown below. This will trigger any + # callbacks that are registered for that event. + if run_manager: + await run_manager.on_text("Log something about this run") - def _call( - self, - inputs: Dict[str, Any], - run_manager: Optional[CallbackManagerForChainRun] = None, - ) -> Dict[str, str]: - # Your custom chain logic goes here - # This is just an example that mimics LLMChain - prompt_value = self.prompt.format_prompt(**inputs) + return {self.output_key: response.generations[0][0].text} - # Whenever you call a language model, or another chain, you should pass - # a callback manager to it. This allows the inner run to be tracked by - # any callbacks that are registered on the outer run. - # You can always obtain a callback manager for this by calling - # `run_manager.get_child()` as shown below. - response = self.llm.generate_prompt( - [prompt_value], - callbacks=run_manager.get_child() if run_manager else None, - ) + @property + def _chain_type(self) -> str: + return "my_custom_chain" - # If you want to log something about this run, you can do so by calling - # methods on the `run_manager`, as shown below. This will trigger any - # callbacks that are registered for that event. - if run_manager: - run_manager.on_text("Log something about this run") +class CustomChain(CustomComponent): + display_name: str = "Custom Chain" + field_config = { + "prompt": {"field_type": "prompt"}, + "llm": {"field_type": "BaseLanguageModel"}, + } - return {self.output_key: response.generations[0][0].text} + def build(self, prompt, llm, input: str) -> Document: + chain = MyCustomChain(prompt=prompt, llm=llm) + return chain(input)''' - async def _acall( - self, - inputs: Dict[str, Any], - run_manager: Optional[AsyncCallbackManagerForChainRun] = None, - ) -> Dict[str, str]: - # Your custom chain logic goes here - # This is just an example that mimics LLMChain - prompt_value = self.prompt.format_prompt(**inputs) - # Whenever you call a language model, or another chain, you should pass - # a callback manager to it. This allows the inner run to be tracked by - # any callbacks that are registered on the outer run. - # You can always obtain a callback manager for this by calling - # `run_manager.get_child()` as shown below. - response = await self.llm.agenerate_prompt( - [prompt_value], - callbacks=run_manager.get_child() if run_manager else None, - ) +@pytest.fixture +def data_processing(): + return """import pandas as pd +from langchain.schema import Document +from langflow.interface.custom.base import CustomComponent - # If you want to log something about this run, you can do so by calling - # methods on the `run_manager`, as shown below. This will trigger any - # callbacks that are registered for that event. - if run_manager: - await run_manager.on_text("Log something about this run") +class CSVLoaderComponent(CustomComponent): + display_name: str = "CSV Loader" + field_config = { + "filename": {"field_type": "str", "required": True}, + "column_name": {"field_type": "str", "required": True}, + } - return {self.output_key: response.generations[0][0].text} + def build(self, filename: str, column_name: str) -> List[Document]: + # Load the CSV file + df = pd.read_csv(filename) - @property - def _chain_type(self) -> str: - return "my_custom_chain" + # Verify the column exists + if column_name not in df.columns: + raise ValueError(f"Column '{column_name}' not found in the CSV file") - class CustomChain(CustomComponent): - display_name: str = "Custom Chain" - field_config = { - "prompt": {"field_type": "prompt"}, - "llm": {"field_type": "BaseLanguageModel"}, - } + # Convert each row of the specified column to a document object + documents = [] + for content in df[column_name]: + metadata = {"filename": filename} + documents.append(Document(page_content=str(content), metadata=metadata)) - def build(self, prompt, llm, input: str) -> Document: - chain = MyCustomChain(prompt=prompt, llm=llm) - return chain(input)''' + return documents""" From 49029d6cdab54a841618e34460628af71046f8da Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Fri, 14 Jul 2023 14:13:27 -0300 Subject: [PATCH 086/221] =?UTF-8?q?=F0=9F=94=A7=20chore(base.py):=20refact?= =?UTF-8?q?or=20TemplateFieldCreator=20class=20to=20improve=20code=20reada?= =?UTF-8?q?bility=20and=20maintainability?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/template/field/base.py | 36 ++++++++++++++++++++- 1 file changed, 35 insertions(+), 1 deletion(-) diff --git a/src/backend/langflow/template/field/base.py b/src/backend/langflow/template/field/base.py index dcdb3ea92..31c68d094 100644 --- a/src/backend/langflow/template/field/base.py +++ b/src/backend/langflow/template/field/base.py @@ -6,24 +6,58 @@ from pydantic import BaseModel class TemplateFieldCreator(BaseModel, ABC): field_type: str = "str" + """The type of field this is. Default is a string.""" + required: bool = False + """Specifies if the field is required. Defaults to False.""" + placeholder: str = "" + """A placeholder string for the field. Default is an empty string.""" + is_list: bool = False + """Defines if the field is a list. Default is False.""" + show: bool = True + """Should the field be shown. Defaults to True.""" + multiline: bool = False + """Defines if the field will allow the user to open a text editor. Default is False.""" + value: Any = None + """The value of the field. Default is None.""" + suffixes: list[str] = [] - fileTypes: list[str] = [] + """List of suffixes for a file field. Default is an empty list.""" + file_types: list[str] = [] + """List of file types associated with the field. Default is an empty list. (duplicate)""" + file_path: Union[str, None] = None + """The file path of the field if it is a file. Defaults to None.""" + password: bool = False + """Specifies if the field is a password. Defaults to False.""" + options: list[str] = [] + """List of options for the field. Only used when is_list=True. Default is an empty list.""" + name: str = "" + """Name of the field. Default is an empty string.""" + display_name: Optional[str] = None + """Display name of the field. Defaults to None.""" + advanced: bool = False + """Specifies if the field will an advanced parameter (hidden). Defaults to False.""" + input_types: list[str] = [] + """List of input types for the handle when the field has more than one type. Default is an empty list.""" + dynamic: bool = False + """Specifies if the field is dynamic. Defaults to False.""" + info: Optional[str] = "" + """Additional information about the field to be shown in the tooltip. Defaults to an empty string.""" def to_dict(self): result = self.dict() From 1863d463d0deab90f8418d0f84be31ba21d7e13c Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Fri, 14 Jul 2023 14:20:09 -0300 Subject: [PATCH 087/221] =?UTF-8?q?=F0=9F=94=A8=20refactor(conftest.py):?= =?UTF-8?q?=20change=20return=20type=20of=20`build`=20method=20in=20`CSVLo?= =?UTF-8?q?aderComponent`=20from=20List[Document]=20to=20Document=20to=20m?= =?UTF-8?q?atch=20the=20actual=20return=20type=20=F0=9F=94=A7=20chore(conf?= =?UTF-8?q?test.py):=20add=20new=20fixture=20`filter=5Fdocs`=20for=20testi?= =?UTF-8?q?ng=20`DocumentFilterByLengthComponent`?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- tests/conftest.py | 22 +++++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index 328a168ad..79704e3b6 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -250,7 +250,7 @@ class CSVLoaderComponent(CustomComponent): "column_name": {"field_type": "str", "required": True}, } - def build(self, filename: str, column_name: str) -> List[Document]: + def build(self, filename: str, column_name: str) -> Document: # Load the CSV file df = pd.read_csv(filename) @@ -265,3 +265,23 @@ class CSVLoaderComponent(CustomComponent): documents.append(Document(page_content=str(content), metadata=metadata)) return documents""" + + +@pytest.fixture +def filter_docs(): + return """from langchain.schema import Document +from langflow.interface.custom.base import CustomComponent +from typing import List + +class DocumentFilterByLengthComponent(CustomComponent): + display_name: str = "Document Filter By Length" + field_config = { + "documents": {"field_type": "Document", "required": True}, + "max_length": {"field_type": "int", "required": True}, + } + + def build(self, documents: List[Document], max_length: int) -> List[Document]: + # Filter the documents by length + filtered_documents = [doc for doc in documents if len(doc.page_content) <= max_length] + + return filtered_documents""" From cc2dbe149063341f10d9f5964b39b912635ac1d2 Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Fri, 14 Jul 2023 18:29:54 +0100 Subject: [PATCH 088/221] =?UTF-8?q?=F0=9F=94=A7=20refactor(constants.py,?= =?UTF-8?q?=20conftest.py):=20improve=20code=20structure=20and=20readabili?= =?UTF-8?q?ty?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 🔨 refactor(constants.py): make YourComponent inherit from CustomComponent for better code reuse and structure 🔨 refactor(conftest.py): restructure custom_chain fixture for better readability and maintainability --- .../langflow/interface/custom/constants.py | 16 +- tests/conftest.py | 197 +++++++++--------- 2 files changed, 106 insertions(+), 107 deletions(-) diff --git a/src/backend/langflow/interface/custom/constants.py b/src/backend/langflow/interface/custom/constants.py index b99dd7bfb..c07d3a080 100644 --- a/src/backend/langflow/interface/custom/constants.py +++ b/src/backend/langflow/interface/custom/constants.py @@ -25,17 +25,19 @@ LANGCHAIN_BASE_TYPES = { DEFAULT_CUSTOM_COMPONENT_CODE = """ from langflow import Prompt +from langflow.interface.custom.custom_component import CustomComponent + from langchain.llms.base import BaseLLM from langchain.chains import LLMChain -from langflow.interface.custom import CustomComponent from langchain import PromptTemplate from langchain.schema import Document + import requests -class YourComponent: - display_name: str = "Your Component" - description: str = "Your description" - field_config = { "url": { "multiline": True, "required": True } } +class YourComponent(CustomComponent): + #display_name: str = "Your Component" + #description: str = "Your description" + #field_config = { "url": { "multiline": True, "required": True } } def build(self, url: str, llm: BaseLLM, template: Prompt) -> Document: response = requests.get(url) @@ -44,7 +46,3 @@ class YourComponent: result = chain.run(response.text[:300]) return Document(page_content=str(result)) """ - - -# Create a new class that can be used as a type -# that returns type "prompt" if we get a certain param diff --git a/tests/conftest.py b/tests/conftest.py index ca0bb1dc0..3cabc0520 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -120,118 +120,119 @@ def client_fixture(session: Session): # @pytest.fixture def custom_chain(): - return '''from __future__ import annotations + return ''' +from __future__ import annotations +from typing import Any, Dict, List, Optional - from typing import Any, Dict, List, Optional +from pydantic import Extra - from pydantic import Extra +from langchain.schema import BaseLanguageModel, Document +from langchain.callbacks.manager import ( + AsyncCallbackManagerForChainRun, + CallbackManagerForChainRun, +) +from langchain.chains.base import Chain +from langchain.prompts import StringPromptTemplate +from langflow.interface.custom.base import CustomComponent - from langchain.schema import BaseLanguageModel, Document - from langchain.callbacks.manager import ( - AsyncCallbackManagerForChainRun, - CallbackManagerForChainRun, - ) - from langchain.chains.base import Chain - from langchain.prompts import StringPromptTemplate - from langflow.interface.custom.base import CustomComponent +class MyCustomChain(Chain): + """ + An example of a custom chain. + """ - class MyCustomChain(Chain): + prompt: StringPromptTemplate + """Prompt object to use.""" + llm: BaseLanguageModel + output_key: str = "text" #: :meta private: + + class Config: + """Configuration for this pydantic object.""" + + extra = Extra.forbid + arbitrary_types_allowed = True + + @property + def input_keys(self) -> List[str]: + """Will be whatever keys the prompt expects. + + :meta private: """ - An example of a custom chain. + return self.prompt.input_variables + + @property + def output_keys(self) -> List[str]: + """Will always return text key. + + :meta private: """ + return [self.output_key] - prompt: StringPromptTemplate - """Prompt object to use.""" - llm: BaseLanguageModel - output_key: str = "text" #: :meta private: + def _call( + self, + inputs: Dict[str, Any], + run_manager: Optional[CallbackManagerForChainRun] = None, + ) -> Dict[str, str]: + # Your custom chain logic goes here + # This is just an example that mimics LLMChain + prompt_value = self.prompt.format_prompt(**inputs) - class Config: - """Configuration for this pydantic object.""" + # Whenever you call a language model, or another chain, you should pass + # a callback manager to it. This allows the inner run to be tracked by + # any callbacks that are registered on the outer run. + # You can always obtain a callback manager for this by calling + # `run_manager.get_child()` as shown below. + response = self.llm.generate_prompt( + [prompt_value], + callbacks=run_manager.get_child() if run_manager else None, + ) - extra = Extra.forbid - arbitrary_types_allowed = True + # If you want to log something about this run, you can do so by calling + # methods on the `run_manager`, as shown below. This will trigger any + # callbacks that are registered for that event. + if run_manager: + run_manager.on_text("Log something about this run") - @property - def input_keys(self) -> List[str]: - """Will be whatever keys the prompt expects. + return {self.output_key: response.generations[0][0].text} - :meta private: - """ - return self.prompt.input_variables + async def _acall( + self, + inputs: Dict[str, Any], + run_manager: Optional[AsyncCallbackManagerForChainRun] = None, + ) -> Dict[str, str]: + # Your custom chain logic goes here + # This is just an example that mimics LLMChain + prompt_value = self.prompt.format_prompt(**inputs) - @property - def output_keys(self) -> List[str]: - """Will always return text key. + # Whenever you call a language model, or another chain, you should pass + # a callback manager to it. This allows the inner run to be tracked by + # any callbacks that are registered on the outer run. + # You can always obtain a callback manager for this by calling + # `run_manager.get_child()` as shown below. + response = await self.llm.agenerate_prompt( + [prompt_value], + callbacks=run_manager.get_child() if run_manager else None, + ) - :meta private: - """ - return [self.output_key] + # If you want to log something about this run, you can do so by calling + # methods on the `run_manager`, as shown below. This will trigger any + # callbacks that are registered for that event. + if run_manager: + await run_manager.on_text("Log something about this run") - def _call( - self, - inputs: Dict[str, Any], - run_manager: Optional[CallbackManagerForChainRun] = None, - ) -> Dict[str, str]: - # Your custom chain logic goes here - # This is just an example that mimics LLMChain - prompt_value = self.prompt.format_prompt(**inputs) + return {self.output_key: response.generations[0][0].text} - # Whenever you call a language model, or another chain, you should pass - # a callback manager to it. This allows the inner run to be tracked by - # any callbacks that are registered on the outer run. - # You can always obtain a callback manager for this by calling - # `run_manager.get_child()` as shown below. - response = self.llm.generate_prompt( - [prompt_value], - callbacks=run_manager.get_child() if run_manager else None, - ) + @property + def _chain_type(self) -> str: + return "my_custom_chain" - # If you want to log something about this run, you can do so by calling - # methods on the `run_manager`, as shown below. This will trigger any - # callbacks that are registered for that event. - if run_manager: - run_manager.on_text("Log something about this run") +class CustomChain(CustomComponent): + display_name: str = "Custom Chain" + field_config = { + "prompt": {"field_type": "prompt"}, + "llm": {"field_type": "BaseLanguageModel"}, + } - return {self.output_key: response.generations[0][0].text} - - async def _acall( - self, - inputs: Dict[str, Any], - run_manager: Optional[AsyncCallbackManagerForChainRun] = None, - ) -> Dict[str, str]: - # Your custom chain logic goes here - # This is just an example that mimics LLMChain - prompt_value = self.prompt.format_prompt(**inputs) - - # Whenever you call a language model, or another chain, you should pass - # a callback manager to it. This allows the inner run to be tracked by - # any callbacks that are registered on the outer run. - # You can always obtain a callback manager for this by calling - # `run_manager.get_child()` as shown below. - response = await self.llm.agenerate_prompt( - [prompt_value], - callbacks=run_manager.get_child() if run_manager else None, - ) - - # If you want to log something about this run, you can do so by calling - # methods on the `run_manager`, as shown below. This will trigger any - # callbacks that are registered for that event. - if run_manager: - await run_manager.on_text("Log something about this run") - - return {self.output_key: response.generations[0][0].text} - - @property - def _chain_type(self) -> str: - return "my_custom_chain" - - class CustomChain(CustomComponent): - display_name: str = "Custom Chain" - field_config = { - "prompt": {"field_type": "prompt"}, - "llm": {"field_type": "BaseLanguageModel"}, - } - - def build(self, prompt, llm, input: str) -> Document: - chain = MyCustomChain(prompt=prompt, llm=llm) - return chain(input)''' + def build(self, prompt: StringPromptTemplate, llm: BaseLanguageModel, input: str) -> Document: + chain = MyCustomChain(prompt=prompt, llm=llm) + return chain(input) +''' From 405191de22e07884cd5665fb9e2cbeecf40b5d9d Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Fri, 14 Jul 2023 15:04:30 -0300 Subject: [PATCH 089/221] =?UTF-8?q?=E2=9C=A8=20feat(conftest.py):=20add=20?= =?UTF-8?q?fixtures=20for=20GET=20and=20POST=20request=20components=20to?= =?UTF-8?q?=20be=20used=20in=20tests?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- tests/conftest.py | 55 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 55 insertions(+) diff --git a/tests/conftest.py b/tests/conftest.py index 7a87f908b..ae78483e6 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -307,3 +307,58 @@ class DocumentFilterByLengthComponent(CustomComponent): return filtered_documents """ + + +@pytest.fixture +def get_request(): + return """import requests +from typing import Dict, Union +from langchain.schema import Document +from langflow.interface.custom.base import CustomComponent + +class GetRequestComponent(CustomComponent): + display_name: str = "GET Request" + field_config = { + "url": {"field_type": "str", "required": True}, + } + + def build(self, url: str) -> Document: + # Send a GET request to the URL + response = requests.get(url) + + # Raise an exception if the request was not successful + if response.status_code != 200: + raise ValueError(f"GET request failed: {response.status_code} status code") + + # Create a document with the response text and the URL as metadata + document = Document(page_content=response.text, metadata={"url": url}) + + return document""" + + +@pytest.fixture +def post_request(): + return """import requests +from typing import Dict, Union +from langchain.schema import Document +from langflow.interface.custom.base import CustomComponent + +class PostRequestComponent(CustomComponent): + display_name: str = "POST Request" + field_config = { + "url": {"field_type": "str", "required": True}, + "data": {"field_type": "dict", "required": True}, + } + + def build(self, url: str, data: Dict[str, Union[str, int]]) -> Document: + # Send a POST request to the URL + response = requests.post(url, data=data) + + # Raise an exception if the request was not successful + if response.status_code != 200: + raise ValueError(f"POST request failed: {response.status_code} status code") + + # Create a document with the response text and the URL and data as metadata + document = Document(page_content=response.text, metadata={"url": url, "data": data}) + + return document""" From 7c37c6d74fc1899a7e3c1bd79c99cce42fa37919 Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Fri, 14 Jul 2023 19:19:42 +0100 Subject: [PATCH 090/221] =?UTF-8?q?=F0=9F=94=A7=20refactor(conftest.py):?= =?UTF-8?q?=20remove=20unnecessary=20comments=20for=20cleaner=20code=20?= =?UTF-8?q?=F0=9F=94=A7=20refactor(conftest.py):=20reformat=20code=20for?= =?UTF-8?q?=20better=20readability=20=F0=9F=94=A7=20refactor(conftest.py):?= =?UTF-8?q?=20move=20import=20statements=20to=20new=20lines=20for=20better?= =?UTF-8?q?=20readability?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- tests/conftest.py | 26 +++++++++++++++----------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index ae78483e6..8be738632 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -93,8 +93,8 @@ def json_flow(): return f.read() -@pytest.fixture(name="session") # -def session_fixture(): # +@pytest.fixture(name="session") +def session_fixture(): engine = create_engine( "sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool ) @@ -103,19 +103,19 @@ def session_fixture(): # yield session -@pytest.fixture(name="client") # -def client_fixture(session: Session): # - def get_session_override(): # +@pytest.fixture(name="client") +def client_fixture(session: Session): + def get_session_override(): return session from langflow.main import create_app app = create_app() - app.dependency_overrides[get_session] = get_session_override # + app.dependency_overrides[get_session] = get_session_override yield TestClient(app) - app.dependency_overrides.clear() # + app.dependency_overrides.clear() @pytest.fixture @@ -311,7 +311,8 @@ class DocumentFilterByLengthComponent(CustomComponent): @pytest.fixture def get_request(): - return """import requests + return """ +import requests from typing import Dict, Union from langchain.schema import Document from langflow.interface.custom.base import CustomComponent @@ -333,12 +334,14 @@ class GetRequestComponent(CustomComponent): # Create a document with the response text and the URL as metadata document = Document(page_content=response.text, metadata={"url": url}) - return document""" + return document +""" @pytest.fixture def post_request(): - return """import requests + return """ +import requests from typing import Dict, Union from langchain.schema import Document from langflow.interface.custom.base import CustomComponent @@ -361,4 +364,5 @@ class PostRequestComponent(CustomComponent): # Create a document with the response text and the URL and data as metadata document = Document(page_content=response.text, metadata={"url": url, "data": data}) - return document""" + return document +""" From cd94c47b0e9844c78fb86eb97ed6ff4719f144cc Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Fri, 14 Jul 2023 18:11:29 -0300 Subject: [PATCH 091/221] fix bug of scroll on ace Editor --- src/frontend/src/modals/codeAreaModal/v2.tsx | 23 +++++++++++++++++--- 1 file changed, 20 insertions(+), 3 deletions(-) diff --git a/src/frontend/src/modals/codeAreaModal/v2.tsx b/src/frontend/src/modals/codeAreaModal/v2.tsx index 9655945f7..2f61b67c4 100644 --- a/src/frontend/src/modals/codeAreaModal/v2.tsx +++ b/src/frontend/src/modals/codeAreaModal/v2.tsx @@ -37,11 +37,11 @@ export default function CodeAreaModal({ const { dark } = useContext(darkContext); const { setErrorData, setSuccessData } = useContext(alertContext); const [activeTab, setActiveTab] = useState("0"); + const [height, setHeight] = useState(null); const [error, setError] = useState<{ detail: { error: string; traceback: string }; }>(null); const { closePopUp, setCloseEdit } = useContext(PopUpContext); - const ref = useRef(); function setModalOpen(x: boolean) { setOpen(x); if (x === false) { @@ -109,7 +109,23 @@ export default function CodeAreaModal({ }); } } - const tabs = [{ name: "code" }, { name: "errors" }]; + + /// use effect to update ace editor on error to handle right scroll + useEffect(() => { + // Function to be executed after the state changes + const delayedFunction = setTimeout(() => { + if (error?.detail.error !== undefined) { + //trigger to update the height, does not really apply any height + setHeight("90%"); + } + //600 to happen after the transition of 500ms + }, 600); + + // Cleanup function to clear the timeout if the component unmounts or the state changes again + return () => { + clearTimeout(delayedFunction); + }; + }, [error, setHeight]); return ( @@ -129,6 +145,7 @@ export default function CodeAreaModal({ -
+

{error?.detail?.error}

From 0aab360629f9f3bd5bb4743878074947bf589147 Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Sat, 15 Jul 2023 00:41:31 +0100 Subject: [PATCH 092/221] =?UTF-8?q?=F0=9F=94=A8=20refactor(langflow):=20im?= =?UTF-8?q?prove=20code=20parsing=20and=20custom=20component=20handling?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Refactor code parsing in `code_parser.py` to handle imports, function definitions, and class attributes more robustly and cleanly. - Add new methods in `component.py` to parse Assign and AnnAssign statements, and FunctionDef statements. - Refactor `custom_component.py` to improve the handling of custom components, including better extraction of main class name and template configuration. - Update `types.py` to better handle the building of custom component templates, including handling of field configurations and error handling. - Minor formatting fix in `conftest.py` test fixture. These changes improve the robustness and readability of the code, and provide better handling and validation of custom components. --- .../langflow/interface/custom/code_parser.py | 59 ++++++++++++++----- .../langflow/interface/custom/component.py | 19 ++++++ .../interface/custom/custom_component.py | 46 +++++++++------ src/backend/langflow/interface/types.py | 17 +++++- tests/conftest.py | 3 +- 5 files changed, 106 insertions(+), 38 deletions(-) diff --git a/src/backend/langflow/interface/custom/code_parser.py b/src/backend/langflow/interface/custom/code_parser.py index 8a67fa733..e86c12cef 100644 --- a/src/backend/langflow/interface/custom/code_parser.py +++ b/src/backend/langflow/interface/custom/code_parser.py @@ -61,12 +61,11 @@ class CodeParser: Extracts "imports" from the code. """ if isinstance(node, ast.Import): - module = node.names[0].name - self.data["imports"].append(module) + for alias in node.names: + self.data["imports"].append(alias.name) elif isinstance(node, ast.ImportFrom): - module = node.module - names = [alias.name for alias in node.names] - self.data["imports"].append((module, names)) + for alias in node.names: + self.data["imports"].append((node.module, alias.name)) def parse_functions(self, node: ast.FunctionDef) -> None: """ @@ -97,7 +96,7 @@ class CodeParser: # Handle positional arguments with default values defaults = [None] * (len(node.args.args) - len(node.args.defaults)) + [ - ast.unparse(default) for default in node.args.defaults + ast.unparse(default) if default else None for default in node.args.defaults ] for arg, default in zip(node.args.args, defaults): @@ -126,10 +125,38 @@ class CodeParser: func["body"].append(ast.unparse(line)) return func + def parse_assign(self, stmt): + """ + Parses an Assign statement and returns a dictionary + with the target's name and value. + """ + for target in stmt.targets: + if isinstance(target, ast.Name): + return {"name": target.id, "value": ast.unparse(stmt.value)} + + def parse_ann_assign(self, stmt): + """ + Parses an AnnAssign statement and returns a dictionary + with the target's name, value, and annotation. + """ + if isinstance(stmt.target, ast.Name): + return { + "name": stmt.target.id, + "value": ast.unparse(stmt.value) if stmt.value else None, + "annotation": ast.unparse(stmt.annotation), + } + + def parse_function_def(self, stmt): + """ + Parses a FunctionDef statement and returns the parsed + method and a boolean indicating if it's an __init__ method. + """ + method = self.parse_callable_details(stmt) + return (method, True) if stmt.name == "__init__" else (method, False) + def parse_classes(self, node: ast.ClassDef) -> None: """ - Extracts "classes" from the code, including - inheritance and init methods. + Extracts "classes" from the code, including inheritance and init methods. """ class_dict = { "name": node.name, @@ -140,15 +167,15 @@ class CodeParser: } for stmt in node.body: - if isinstance(stmt, ast.AnnAssign): - attr = {"name": stmt.target.id, "type": ast.unparse(stmt.annotation)} - class_dict["attributes"].append(attr) - elif isinstance(stmt, ast.Assign): - attr = {"name": stmt.targets[0].id, "value": ast.unparse(stmt.value)} - class_dict["attributes"].append(attr) + if isinstance(stmt, ast.Assign): + if attr := self.parse_assign(stmt): + class_dict["attributes"].append(attr) + elif isinstance(stmt, ast.AnnAssign): + if attr := self.parse_ann_assign(stmt): + class_dict["attributes"].append(attr) elif isinstance(stmt, ast.FunctionDef): - method = self.parse_callable_details(stmt) - if stmt.name == "__init__": + method, is_init = self.parse_function_def(stmt) + if is_init: class_dict["init"] = method else: class_dict["methods"].append(method) diff --git a/src/backend/langflow/interface/custom/component.py b/src/backend/langflow/interface/custom/component.py index f6ef62802..5e84c235e 100644 --- a/src/backend/langflow/interface/custom/component.py +++ b/src/backend/langflow/interface/custom/component.py @@ -1,3 +1,4 @@ +import ast from pydantic import BaseModel from fastapi import HTTPException @@ -48,5 +49,23 @@ class Component(BaseModel): return validate.create_function(self.code, self.function_entrypoint_name) + def build_template_config(self, attributes) -> dict: + template_config = {} + + for item in attributes: + item_name = item.get("name") + + if item_value := item.get("value"): + if "langflow_display_name" in item_name: + template_config["display_name"] = ast.literal_eval(item_value) + + elif "langflow_description" in item_name: + template_config["description"] = ast.literal_eval(item_value) + + elif "langflow_field_config" in item_name: + template_config["field_config"] = ast.literal_eval(item_value) + + return template_config + def build(self): raise NotImplementedError diff --git a/src/backend/langflow/interface/custom/custom_component.py b/src/backend/langflow/interface/custom/custom_component.py index 8fb5af62c..c439b0d2a 100644 --- a/src/backend/langflow/interface/custom/custom_component.py +++ b/src/backend/langflow/interface/custom/custom_component.py @@ -1,4 +1,3 @@ -import ast from typing import Callable, Optional from fastapi import HTTPException from langflow.interface.custom.constants import LANGCHAIN_BASE_TYPES @@ -28,7 +27,7 @@ class CustomComponent(Component): }, ) - # TODO: build logic + # TODO: Create the logic to validate what the Custom Component should have as a prerequisite to be able to execute return True def is_check_valid(self) -> bool: @@ -92,24 +91,35 @@ class CustomComponent(Component): return build_method["return_type"] @property - def get_template_config(self) -> dict: - extra_attributes = {} # self.get_extra_attributes - template_config = {} + def get_main_class_name(self): + tree = self.get_code_tree(self.code) - if "field_config" in extra_attributes: - template_config["field_config"] = ast.literal_eval( - extra_attributes["field_config"] - ) - if "display_name" in extra_attributes: - template_config["display_name"] = ast.literal_eval( - extra_attributes["display_name"] - ) - if "description" in extra_attributes: - template_config["description"] = ast.literal_eval( - extra_attributes["description"] - ) + base_name = self.code_class_base_inheritance + method_name = self.function_entrypoint_name - return template_config + classes = [] + for item in tree.get("classes"): + if base_name in item["bases"]: + method_names = [method["name"] for method in item["methods"]] + if method_name in method_names: + classes.append(item["name"]) + + # Get just the first item + return next(iter(classes), "") + + @property + def build_template_config(self): + tree = self.get_code_tree(self.code) + + attributes = [ + main_class["attributes"] + for main_class in tree.get("classes") + if main_class["name"] == self.get_main_class_name + ] + # Get just the first item + attributes = next(iter(attributes), []) + + return super().build_template_config(attributes) @property def get_function(self): diff --git a/src/backend/langflow/interface/types.py b/src/backend/langflow/interface/types.py index 892b70260..b1ba8573f 100644 --- a/src/backend/langflow/interface/types.py +++ b/src/backend/langflow/interface/types.py @@ -161,7 +161,18 @@ def build_langchain_template_custom_component(custom_component: CustomComponent) function_args = custom_component.get_function_entrypoint_args return_type = custom_component.get_function_entrypoint_return_type - # template_config = custom_component.get_template_config + template_config = custom_component.build_template_config + + # Rewrite diplay_name and description values + if frontend_node: + if "display_name" in template_config: + frontend_node["display_name"] = template_config["display_name"] + + elif "description" in template_config: + frontend_node["description"] = template_config["description"] + + # Rewrite field configurations + field_config = template_config.get("field_config", {}) if function_args is not None: # Add extra fields @@ -174,7 +185,6 @@ def build_langchain_template_custom_component(custom_component: CustomComponent) extra_field.get("default") if "default" in extra_field else "" ) field_required = True - field_config = {} # TODO: Validate type - if is possible to render into frontend if "optional" in field_type.lower(): @@ -184,13 +194,14 @@ def build_langchain_template_custom_component(custom_component: CustomComponent) if not field_type: field_type = "str" + config = field_config.get(field_name, {}) frontend_node = add_new_custom_field( frontend_node, field_name, field_type, field_value, field_required, - field_config, + config, ) frontend_node = add_code_field(frontend_node, custom_component.code) diff --git a/tests/conftest.py b/tests/conftest.py index 8be738632..1773ebf23 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -252,7 +252,8 @@ class CustomChain(CustomComponent): def build(self, prompt, llm, input: str) -> Document: chain = MyCustomChain(prompt=prompt, llm=llm) - return chain(input)''' + return chain(input) +''' @pytest.fixture From d7a3c10d3affcfacf12fbf62a023067e501dd5b2 Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Sat, 15 Jul 2023 01:02:52 +0100 Subject: [PATCH 093/221] =?UTF-8?q?=F0=9F=94=A7=20refactor(test=5Fcustom?= =?UTF-8?q?=5Fcomponent.py):=20improve=20readability=20by=20breaking=20lon?= =?UTF-8?q?g=20lines=20of=20code=20=F0=9F=94=A8=20refactor(test=5Fcustom?= =?UTF-8?q?=5Fcomponent.py):=20update=20import=20statement=20to=20reflect?= =?UTF-8?q?=20new=20file=20structure?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- tests/test_custom_component.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_custom_component.py b/tests/test_custom_component.py index fcb5a03eb..b73a80d69 100644 --- a/tests/test_custom_component.py +++ b/tests/test_custom_component.py @@ -1,7 +1,7 @@ import ast import pytest from fastapi import HTTPException -from langflow.interface.custom.custom import CustomComponent +from langflow.interface.custom.custom_component import CustomComponent from langflow.interface.custom.constants import DEFAULT_CUSTOM_COMPONENT_CODE From 034a8f0641d97cac0503aa7f923be515008032c4 Mon Sep 17 00:00:00 2001 From: Lucas Oliveira Date: Sun, 16 Jul 2023 15:55:22 -0300 Subject: [PATCH 094/221] Fixed tooltip not working on CustomComponent --- src/frontend/src/utils.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/frontend/src/utils.ts b/src/frontend/src/utils.ts index 55bd72af3..e3de62d1e 100644 --- a/src/frontend/src/utils.ts +++ b/src/frontend/src/utils.ts @@ -876,7 +876,7 @@ export function groupByFamily(data, baseClasses, left, type) { }); } - if (left === false) { + if(parentOutput !== "custom_components"){ let resFil = result.filter((group) => group.family === parentOutput); result = resFil; } From dc07b3c69007f31f793be865ec78c6ee114044d9 Mon Sep 17 00:00:00 2001 From: Lucas Oliveira Date: Sun, 16 Jul 2023 16:03:08 -0300 Subject: [PATCH 095/221] Removed output handle if baseclasses not present --- .../src/CustomNodes/GenericNode/index.tsx | 31 +++++++++---------- 1 file changed, 15 insertions(+), 16 deletions(-) diff --git a/src/frontend/src/CustomNodes/GenericNode/index.tsx b/src/frontend/src/CustomNodes/GenericNode/index.tsx index ee62461bd..3b5ffc15d 100644 --- a/src/frontend/src/CustomNodes/GenericNode/index.tsx +++ b/src/frontend/src/CustomNodes/GenericNode/index.tsx @@ -242,22 +242,21 @@ export default function GenericNode({ > {" "}
- {/*
- Output -
*/} - 0 - ? data.node.output_types.join("|") - : data.type - } - tooltipTitle={data.node.base_classes.join("\n")} - id={[data.type, data.id, ...data.node.base_classes].join("|")} - type={data.node.base_classes.join("|")} - left={false} - /> + {data.node.base_classes?.length > 0 && ( + 0 + ? data.node.output_types.join("|") + : data.type + } + tooltipTitle={data.node.base_classes.join("\n")} + id={[data.type, data.id, ...data.node.base_classes].join("|")} + type={data.node.base_classes.join("|")} + left={false} + /> + )}
From 43b23e4ef61fe32b5e648912560a5699309c80a6 Mon Sep 17 00:00:00 2001 From: Lucas Oliveira Date: Sun, 16 Jul 2023 16:18:40 -0300 Subject: [PATCH 096/221] Added check to execute code when first rendering modal. To work, it must merge with ModalRefactor --- src/frontend/src/modals/codeAreaModal/v2.tsx | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/frontend/src/modals/codeAreaModal/v2.tsx b/src/frontend/src/modals/codeAreaModal/v2.tsx index 2f61b67c4..a677fe444 100644 --- a/src/frontend/src/modals/codeAreaModal/v2.tsx +++ b/src/frontend/src/modals/codeAreaModal/v2.tsx @@ -36,7 +36,6 @@ export default function CodeAreaModal({ const [loading, setLoading] = useState(false); const { dark } = useContext(darkContext); const { setErrorData, setSuccessData } = useContext(alertContext); - const [activeTab, setActiveTab] = useState("0"); const [height, setHeight] = useState(null); const [error, setError] = useState<{ detail: { error: string; traceback: string }; @@ -55,6 +54,10 @@ export default function CodeAreaModal({ setValue(code); }, [code, setValue]); + useEffect(() => { + handleClick(); + }, []) + function handleClick() { setLoading(true); if (!dynamic) { From e4f5df9bad18ec6865b2861e27ae481553bf7f6c Mon Sep 17 00:00:00 2001 From: Lucas Oliveira Date: Sun, 16 Jul 2023 17:10:09 -0300 Subject: [PATCH 097/221] Removed unused file --- src/frontend/src/modals/codeAreaModal/v2.tsx | 191 ------------------- 1 file changed, 191 deletions(-) delete mode 100644 src/frontend/src/modals/codeAreaModal/v2.tsx diff --git a/src/frontend/src/modals/codeAreaModal/v2.tsx b/src/frontend/src/modals/codeAreaModal/v2.tsx deleted file mode 100644 index a677fe444..000000000 --- a/src/frontend/src/modals/codeAreaModal/v2.tsx +++ /dev/null @@ -1,191 +0,0 @@ -// organize-imports-ignore -import { useContext, useEffect, useRef, useState } from "react"; -import { PopUpContext } from "../../contexts/popUpContext"; -import "ace-builds/src-noconflict/ace"; -import { darkContext } from "../../contexts/darkContext"; -import { postCustomComponent, postValidateCode } from "../../controllers/API"; -import { alertContext } from "../../contexts/alertContext"; -import { Button } from "../../components/ui/button"; -import { CODE_PROMPT_DIALOG_SUBTITLE } from "../../constants"; -import { APIClassType } from "../../types/api"; -import { DialogTitle } from "@radix-ui/react-dialog"; -import { TerminalSquare } from "lucide-react"; -import AceEditor from "react-ace"; -import "ace-builds/src-noconflict/mode-python"; -import "ace-builds/src-noconflict/theme-github"; -import "ace-builds/src-noconflict/theme-twilight"; -import "ace-builds/src-noconflict/ext-language_tools"; -import "ace-builds/src-noconflict/ace"; -import BaseModal from "../baseModal"; - -export default function CodeAreaModal({ - value, - setValue, - nodeClass, - setNodeClass, - dynamic, -}: { - setValue: (value: string) => void; - value: string; - nodeClass: APIClassType; - setNodeClass: (Class: APIClassType) => void; - dynamic?: boolean; -}) { - const [open, setOpen] = useState(true); - const [code, setCode] = useState(value); - const [loading, setLoading] = useState(false); - const { dark } = useContext(darkContext); - const { setErrorData, setSuccessData } = useContext(alertContext); - const [height, setHeight] = useState(null); - const [error, setError] = useState<{ - detail: { error: string; traceback: string }; - }>(null); - const { closePopUp, setCloseEdit } = useContext(PopUpContext); - function setModalOpen(x: boolean) { - setOpen(x); - if (x === false) { - setTimeout(() => { - setCloseEdit("editcode"); - closePopUp(); - }, 300); - } - } - useEffect(() => { - setValue(code); - }, [code, setValue]); - - useEffect(() => { - handleClick(); - }, []) - - function handleClick() { - setLoading(true); - if (!dynamic) { - postValidateCode(code) - .then((apiReturn) => { - setLoading(false); - if (apiReturn.data) { - let importsErrors = apiReturn.data.imports.errors; - let funcErrors = apiReturn.data.function.errors; - if (funcErrors.length === 0 && importsErrors.length === 0) { - setSuccessData({ - title: "Code is ready to run", - }); - // setValue(code); - } else { - if (funcErrors.length !== 0) { - setErrorData({ - title: "There is an error in your function", - list: funcErrors, - }); - } - if (importsErrors.length !== 0) { - setErrorData({ - title: "There is an error in your imports", - list: importsErrors, - }); - } - } - } else { - setErrorData({ - title: "Something went wrong, please try again", - }); - } - }) - .catch((_) => { - setLoading(false); - setErrorData({ - title: "There is something wrong with this code, please review it", - }); - }); - } else { - postCustomComponent(code, nodeClass) - .then((apiReturn) => { - const { data } = apiReturn; - if (data) { - setNodeClass(data); - setModalOpen(false); - } - }) - .catch((err) => { - setError(err.response.data); - }); - } - } - - /// use effect to update ace editor on error to handle right scroll - useEffect(() => { - // Function to be executed after the state changes - const delayedFunction = setTimeout(() => { - if (error?.detail.error !== undefined) { - //trigger to update the height, does not really apply any height - setHeight("90%"); - } - //600 to happen after the transition of 500ms - }, 600); - - // Cleanup function to clear the timeout if the component unmounts or the state changes again - return () => { - clearTimeout(delayedFunction); - }; - }, [error, setHeight]); - - return ( - - - - Edit Code - - - -
-
- { - setCode(value); - }} - className="h-full w-full rounded-lg border-[1px] border-gray-300 custom-scroll dark:border-gray-600" - /> -
-
-
-

- {error?.detail?.error} -

-
-
-                  {error?.detail?.traceback}
-                
-
-
-
-
- -
-
-
-
- ); -} From 9788ca6c2ed439344efcc7ee083dab15bfdfb409 Mon Sep 17 00:00:00 2001 From: Lucas Oliveira Date: Mon, 17 Jul 2023 10:19:13 -0300 Subject: [PATCH 098/221] FIxed gap in modal and errors not displaying at CodeAreaModal --- src/frontend/src/modals/baseModal/index.tsx | 5 +++-- .../src/modals/codeAreaModal/index.tsx | 21 +++++++++++++++++-- 2 files changed, 22 insertions(+), 4 deletions(-) diff --git a/src/frontend/src/modals/baseModal/index.tsx b/src/frontend/src/modals/baseModal/index.tsx index 7cb3b2536..b40f39bd1 100644 --- a/src/frontend/src/modals/baseModal/index.tsx +++ b/src/frontend/src/modals/baseModal/index.tsx @@ -105,8 +105,9 @@ function BaseModal({ {headerChild}
{ContentChild}
- -
{ContentFooter}
+ {ContentFooter && ( +
{ContentFooter}
+ )}
); diff --git a/src/frontend/src/modals/codeAreaModal/index.tsx b/src/frontend/src/modals/codeAreaModal/index.tsx index 7a09cde69..e6ef23b53 100644 --- a/src/frontend/src/modals/codeAreaModal/index.tsx +++ b/src/frontend/src/modals/codeAreaModal/index.tsx @@ -133,7 +133,7 @@ export default function CodeAreaModal({
-
+
{ setCode(value); }} - className="h-full w-full rounded-lg border-[1px] border-border custom-scroll" + className="h-full w-full rounded-lg border-[1px] border-gray-300 custom-scroll dark:border-gray-600" />
+
+
+

+ {error?.detail?.error} +

+
+
+                  {error?.detail?.traceback}
+                
+
+
+
From 0ada6b841ff99fae2d3378e09fc2fd9a739c32ec Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Tue, 25 Jul 2023 23:44:36 +0100 Subject: [PATCH 180/221] =?UTF-8?q?=F0=9F=90=9B=20fix(types.py):=20remove?= =?UTF-8?q?=20unnecessary=20blank=20line=20=E2=9C=A8=20feat(types.py):=20r?= =?UTF-8?q?efactor=20build=5Flangchain=5Ftemplate=5Fcustom=5Fcomponent=20f?= =?UTF-8?q?unction=20to=20improve=20readability=20and=20error=20handling?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/interface/types.py | 254 +++++++++++++++++------- 1 file changed, 186 insertions(+), 68 deletions(-) diff --git a/src/backend/langflow/interface/types.py b/src/backend/langflow/interface/types.py index ad012bd3d..fdd7857c0 100644 --- a/src/backend/langflow/interface/types.py +++ b/src/backend/langflow/interface/types.py @@ -49,7 +49,6 @@ def get_type_list(): def build_langchain_types_dict(): # sourcery skip: dict-assign-update-to-union """Build a dictionary of all langchain types""" - all_types = {} creators = [ @@ -162,104 +161,201 @@ def extract_type_from_optional(field_type): return match[1] if match else None -def build_langchain_template_custom_component(custom_component: CustomComponent): - # Build base "CustomComponent" template - frontend_node = ( - CustomComponentFrontendNode().to_dict().get(type(custom_component).__name__) - ) +def build_frontend_node(custom_component: CustomComponent): + """Build a frontend node for a custom component""" + try: + return ( + CustomComponentFrontendNode().to_dict().get(type(custom_component).__name__) + ) - function_args = custom_component.get_function_entrypoint_args - return_type = custom_component.get_function_entrypoint_return_type - # Rewrite diplay_name and description values - if frontend_node: - template_config = custom_component.build_template_config + except Exception as exc: + logger.error(f"Error while building base frontend node: {exc}") + return None - if "display_name" in template_config: - frontend_node["display_name"] = template_config["display_name"] - elif "description" in template_config: - frontend_node["description"] = template_config["description"] +def update_display_name_and_description(frontend_node, template_config): + """Update the display name and description of a frontend node""" + if "display_name" in template_config: + frontend_node["display_name"] = template_config["display_name"] - # Rewrite field configurations + if "description" in template_config: + frontend_node["description"] = template_config["description"] + + +def build_field_config(custom_component): + """Build the field configuration for a custom component""" try: custom_class = get_function_custom(custom_component.code) - field_config = custom_class().build_config() + return custom_class().build_config() + except Exception as exc: - logger.error(f"Error while building custom component: {exc}") - field_config = {} + logger.error(f"Error while building field config: {exc}") + return {} - if function_args is not None: - # Add extra fields - for extra_field in function_args: - field_name = extra_field.get("name") if "name" in extra_field else "" - if field_name != "self": - field_type = extra_field.get("type") if "type" in extra_field else "" - field_value = ( - extra_field.get("default") if "default" in extra_field else "" - ) - field_required = True +def add_extra_fields(frontend_node, field_config, function_args): + """Add extra fields to the frontend node""" + if function_args is None: + return - # TODO: Validate type - if is possible to render into frontend - if "optional" in field_type.lower(): - field_type = extract_type_from_optional(field_type) - field_required = False + for extra_field in function_args: + if "name" not in extra_field or extra_field["name"] == "self": + continue - if not field_type: - field_type = "str" + field_name, field_type, field_value, field_required = get_field_properties( + extra_field + ) + config = field_config.get(field_name, {}) + frontend_node = add_new_custom_field( + frontend_node, + field_name, + field_type, + field_value, + field_required, + config, + ) - config = field_config.get(field_name, {}) - frontend_node = add_new_custom_field( - frontend_node, - field_name, - field_type, - field_value, - field_required, - config, - ) - frontend_node = add_code_field(frontend_node, custom_component.code) +def get_field_properties(extra_field): + """Get the properties of an extra field""" + field_name = extra_field["name"] + field_type = extra_field.get("type", "str") + field_value = extra_field.get("default", "") + field_required = "optional" not in field_type.lower() - # Get base classes from "return_type" and add to template.base_classes - try: - if return_type not in LANGCHAIN_BASE_TYPES or return_type is None: - raise HTTPException( - status_code=400, - detail={ - "error": ( - "Invalid return type should be one of: " - f"{list(LANGCHAIN_BASE_TYPES.keys())}" - ), - "traceback": traceback.format_exc(), - }, - ) + if not field_required: + field_type = extract_type_from_optional(field_type) - return_type_instance = LANGCHAIN_BASE_TYPES.get(return_type) - base_classes = get_base_classes(return_type_instance) + return field_name, field_type, field_value, field_required - except (KeyError, AttributeError) as err: + +def add_base_classes(frontend_node, return_type): + """Add base classes to the frontend node""" + if return_type not in LANGCHAIN_BASE_TYPES or return_type is None: raise HTTPException( status_code=400, - detail={"error": type(err).__name__, "traceback": traceback.format_exc()}, - ) from err + detail={ + "error": ( + "Invalid return type should be one of: " + f"{list(LANGCHAIN_BASE_TYPES.keys())}" + ), + "traceback": traceback.format_exc(), + }, + ) + + return_type_instance = LANGCHAIN_BASE_TYPES.get(return_type) + base_classes = get_base_classes(return_type_instance) for base_class in base_classes: frontend_node.get("base_classes").append(base_class) + +def build_langchain_template_custom_component(custom_component: CustomComponent): + """Build a custom component template for the langchain""" + frontend_node = build_frontend_node(custom_component) + + if frontend_node is None: + return None + + template_config = custom_component.build_template_config + + update_display_name_and_description(frontend_node, template_config) + + field_config = build_field_config(custom_component) + add_extra_fields( + frontend_node, field_config, custom_component.get_function_entrypoint_args + ) + + frontend_node = add_code_field(frontend_node, custom_component.code) + + add_base_classes( + frontend_node, custom_component.get_function_entrypoint_return_type + ) + return frontend_node -def build_langchain_custom_component_list_from_path(path: str): - # Load all files from Path +# def build_langchain_custom_component_list_from_path(path: str): +# # Load all files from Path +# reader = DirectoryReader(path, False) +# file_list = reader.get_files() + +# # Build and validate all files +# data = reader.build_component_menu_list(file_list) + +# valid_components = reader.filter_loaded_components( +# data=data, with_errors=False) +# invalid_components = reader.filter_loaded_components( +# data=data, with_errors=True) + +# valid_menu = {} +# for menu_item in valid_components["menu"]: +# menu_name = menu_item["name"] +# valid_menu[menu_name] = {} + +# for component in menu_item["components"]: +# try: +# component_name = component["name"] +# component_code = component["code"] + +# component_extractor = CustomComponent(code=component_code) +# component_extractor.is_check_valid() +# component_template = build_langchain_template_custom_component( +# component_extractor +# ) + +# valid_menu[menu_name][component_name] = component_template +# except Exception as exc: +# logger.error(f"Error while building custom component: {exc}") + +# invalid_menu = {} +# for menu_item in invalid_components["menu"]: +# menu_name = menu_item["name"] +# invalid_menu[menu_name] = {} + +# for component in menu_item["components"]: +# try: +# component_name = component["name"] +# component_code = component["code"] + +# component_template = ( +# CustomComponentFrontendNode( +# description="ERROR - Check your Python Code", +# display_name=f"ERROR - {component_name}", +# ) +# .to_dict() +# .get(type(CustomComponent()).__name__) +# ) + +# component_template.get("template").get("code")[ +# "value"] = component_code + +# invalid_menu[menu_name][component_name] = component_template +# except Exception as exc: +# logger.error(f"Error while creating custom component: {exc}") + +# return merge_nested_dicts(valid_menu, invalid_menu) + + +def load_files_from_path(path: str): + """Load all files from a given path""" reader = DirectoryReader(path, False) - file_list = reader.get_files() - # Build and validate all files + return reader.get_files() + + +def build_and_validate_all_files(reader, file_list): + """Build and validate all files""" data = reader.build_component_menu_list(file_list) - valid_components = reader.filter_loaded_components(data=data, with_errors=False) + invalid_components = reader.filter_loaded_components(data=data, with_errors=True) + return valid_components, invalid_components + + +def build_valid_menu(valid_components): + """Build the valid menu""" valid_menu = {} for menu_item in valid_components["menu"]: menu_name = menu_item["name"] @@ -277,9 +373,15 @@ def build_langchain_custom_component_list_from_path(path: str): ) valid_menu[menu_name][component_name] = component_template + except Exception as exc: logger.error(f"Error while building custom component: {exc}") + return valid_menu + + +def build_invalid_menu(invalid_components): + """Build the invalid menu""" invalid_menu = {} for menu_item in invalid_components["menu"]: menu_name = menu_item["name"] @@ -302,7 +404,23 @@ def build_langchain_custom_component_list_from_path(path: str): component_template.get("template").get("code")["value"] = component_code invalid_menu[menu_name][component_name] = component_template + except Exception as exc: logger.error(f"Error while creating custom component: {exc}") + return invalid_menu + + +def build_langchain_custom_component_list_from_path(path: str): + """Build a list of custom components for the langchain from a given path""" + file_list = load_files_from_path(path) + reader = DirectoryReader(path, False) + + valid_components, invalid_components = build_and_validate_all_files( + reader, file_list + ) + + valid_menu = build_valid_menu(valid_components) + invalid_menu = build_invalid_menu(invalid_components) + return merge_nested_dicts(valid_menu, invalid_menu) From f486c27a2f5e73290a3691d4f1f4d452bd8683b6 Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Tue, 25 Jul 2023 23:45:35 +0100 Subject: [PATCH 181/221] =?UTF-8?q?=F0=9F=94=A5=20refactor(types.py):=20re?= =?UTF-8?q?move=20commented=20out=20code=20for=20building=20langchain=20cu?= =?UTF-8?q?stom=20component=20list=20from=20path?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 🔨 refactor(types.py): refactor load_files_from_path function to improve readability and maintainability 🔨 refactor(types.py): refactor build_and_validate_all_files function to improve readability and maintainability 🔨 refactor(types.py): refactor build_invalid_menu function to improve readability and maintainability --- src/backend/langflow/interface/types.py | 62 ------------------------- 1 file changed, 62 deletions(-) diff --git a/src/backend/langflow/interface/types.py b/src/backend/langflow/interface/types.py index fdd7857c0..4f150e6a2 100644 --- a/src/backend/langflow/interface/types.py +++ b/src/backend/langflow/interface/types.py @@ -275,68 +275,6 @@ def build_langchain_template_custom_component(custom_component: CustomComponent) return frontend_node -# def build_langchain_custom_component_list_from_path(path: str): -# # Load all files from Path -# reader = DirectoryReader(path, False) -# file_list = reader.get_files() - -# # Build and validate all files -# data = reader.build_component_menu_list(file_list) - -# valid_components = reader.filter_loaded_components( -# data=data, with_errors=False) -# invalid_components = reader.filter_loaded_components( -# data=data, with_errors=True) - -# valid_menu = {} -# for menu_item in valid_components["menu"]: -# menu_name = menu_item["name"] -# valid_menu[menu_name] = {} - -# for component in menu_item["components"]: -# try: -# component_name = component["name"] -# component_code = component["code"] - -# component_extractor = CustomComponent(code=component_code) -# component_extractor.is_check_valid() -# component_template = build_langchain_template_custom_component( -# component_extractor -# ) - -# valid_menu[menu_name][component_name] = component_template -# except Exception as exc: -# logger.error(f"Error while building custom component: {exc}") - -# invalid_menu = {} -# for menu_item in invalid_components["menu"]: -# menu_name = menu_item["name"] -# invalid_menu[menu_name] = {} - -# for component in menu_item["components"]: -# try: -# component_name = component["name"] -# component_code = component["code"] - -# component_template = ( -# CustomComponentFrontendNode( -# description="ERROR - Check your Python Code", -# display_name=f"ERROR - {component_name}", -# ) -# .to_dict() -# .get(type(CustomComponent()).__name__) -# ) - -# component_template.get("template").get("code")[ -# "value"] = component_code - -# invalid_menu[menu_name][component_name] = component_template -# except Exception as exc: -# logger.error(f"Error while creating custom component: {exc}") - -# return merge_nested_dicts(valid_menu, invalid_menu) - - def load_files_from_path(path: str): """Load all files from a given path""" reader = DirectoryReader(path, False) From 683d0ffcc89d588e0065db569b72df74c3a81559 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 25 Jul 2023 20:54:57 -0300 Subject: [PATCH 182/221] =?UTF-8?q?=F0=9F=90=9B=20fix(types.py):=20prevent?= =?UTF-8?q?=20adding=20base=20classes=20that=20are=20in=20CLASSES=5FTO=5FR?= =?UTF-8?q?EMOVE=20list=20to=20frontend=5Fnode?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/interface/types.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/backend/langflow/interface/types.py b/src/backend/langflow/interface/types.py index 4f150e6a2..27cdaaa2f 100644 --- a/src/backend/langflow/interface/types.py +++ b/src/backend/langflow/interface/types.py @@ -18,6 +18,7 @@ from langflow.interface.custom.base import custom_component_creator from langflow.interface.custom.custom_component import CustomComponent from langflow.template.field.base import TemplateField +from langflow.template.frontend_node.constants import CLASSES_TO_REMOVE from langflow.template.frontend_node.custom_components import ( CustomComponentFrontendNode, ) @@ -247,7 +248,8 @@ def add_base_classes(frontend_node, return_type): base_classes = get_base_classes(return_type_instance) for base_class in base_classes: - frontend_node.get("base_classes").append(base_class) + if base_class not in CLASSES_TO_REMOVE: + frontend_node.get("base_classes").append(base_class) def build_langchain_template_custom_component(custom_component: CustomComponent): From 1e54c175740fe701e6fc0451a5ee91348b0172e5 Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Wed, 26 Jul 2023 01:00:56 +0100 Subject: [PATCH 183/221] =?UTF-8?q?=F0=9F=94=A5=20refactor(tools.py):=20re?= =?UTF-8?q?move=20unused=20import=20and=20typing=20import=20statement?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/template/frontend_node/tools.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/backend/langflow/template/frontend_node/tools.py b/src/backend/langflow/template/frontend_node/tools.py index 66f0eae3a..579b32da3 100644 --- a/src/backend/langflow/template/frontend_node/tools.py +++ b/src/backend/langflow/template/frontend_node/tools.py @@ -4,7 +4,6 @@ from langflow.template.template.base import Template from langflow.utils.constants import ( DEFAULT_PYTHON_FUNCTION, ) -from typing import Optional class ToolNode(FrontendNode): From 39755fb2171b3ce884d34255ac4fe30134eac307 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Wed, 26 Jul 2023 07:12:24 -0300 Subject: [PATCH 184/221] =?UTF-8?q?=F0=9F=90=9B=20fix(code=5Fparser.py):?= =?UTF-8?q?=20refactor=20CodeParser.parse=5Ffunction=5Fnode=20to=20use=20C?= =?UTF-8?q?allableCodeDetails=20dataclass=20for=20storing=20function=20det?= =?UTF-8?q?ails?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../langflow/interface/custom/code_parser.py | 60 ++++++++++++++----- 1 file changed, 46 insertions(+), 14 deletions(-) diff --git a/src/backend/langflow/interface/custom/code_parser.py b/src/backend/langflow/interface/custom/code_parser.py index aa4191448..7fb3b0184 100644 --- a/src/backend/langflow/interface/custom/code_parser.py +++ b/src/backend/langflow/interface/custom/code_parser.py @@ -2,14 +2,39 @@ import ast import inspect import traceback -from typing import Dict, Any, Type, Union +from typing import Dict, Any, Optional, Type, Union from fastapi import HTTPException +from pydantic import BaseModel class CodeSyntaxError(HTTPException): pass +class CallableCodeDetails(BaseModel): + """ + A dataclass for storing details about a callable. + """ + + name: str + doc: Optional[str] + args: list + body: list + return_type: Optional[str] + + +class ClassCodeDetails(BaseModel): + """ + A dataclass for storing details about a class. + """ + + name: str + doc: str + bases: list + attributes: list + methods: list + + class CodeParser: """ A parser for Python source code, extracting code details. @@ -92,13 +117,20 @@ class CodeParser: """ Extracts details from a single function or method node. """ - func = { - "name": node.name, - "doc": ast.get_docstring(node), - "args": [], - "body": [], - "return_type": ast.unparse(node.returns) if node.returns else None, - } + # func = { + # "name": node.name, + # "doc": ast.get_docstring(node), + # "args": [], + # "body": [], + # "return_type": ast.unparse(node.returns) if node.returns else None, + # } + func = CallableCodeDetails( + name=node.name, + doc=ast.get_docstring(node), + args=[], + body=[], + return_type=ast.unparse(node.returns) if node.returns else None, + ) # Handle positional arguments with default values defaults = [None] * (len(node.args.args) - len(node.args.defaults)) + [ @@ -106,11 +138,11 @@ class CodeParser: ] for arg, default in zip(node.args.args, defaults): - func["args"].append(self.parse_arg(arg, default)) + func.args.append(self.parse_arg(arg, default)) # Handle *args if node.args.vararg: - func["args"].append(self.parse_arg(node.args.vararg, None)) + func.args.append(self.parse_arg(node.args.vararg, None)) # Handle keyword-only arguments with default values kw_defaults = [None] * ( @@ -121,15 +153,15 @@ class CodeParser: ] for arg, default in zip(node.args.kwonlyargs, kw_defaults): - func["args"].append(self.parse_arg(arg, default)) + func.args.append(self.parse_arg(arg, default)) # Handle **kwargs if node.args.kwarg: - func["args"].append(self.parse_arg(node.args.kwarg, None)) + func.args.append(self.parse_arg(node.args.kwarg, None)) for line in node.body: - func["body"].append(ast.unparse(line)) - return func + func.body.append(ast.unparse(line)) + return func.dict() def parse_assign(self, stmt): """ From c93febc7a337206afd52d0d483d7708a24d86861 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Wed, 26 Jul 2023 07:43:16 -0300 Subject: [PATCH 185/221] =?UTF-8?q?=F0=9F=9A=80=20feat(test=5Fagents=5Ftem?= =?UTF-8?q?plate.py):=20add=20"fileTypes"=20field=20to=20the=20"path"=20ag?= =?UTF-8?q?ent=20configuration=20to=20specify=20supported=20file=20types?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- tests/test_agents_template.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_agents_template.py b/tests/test_agents_template.py index 62c237b5a..0b5fb7c3a 100644 --- a/tests/test_agents_template.py +++ b/tests/test_agents_template.py @@ -170,6 +170,7 @@ def test_csv_agent(client: TestClient): "multiline": False, "value": "", "suffixes": [".csv"], + "fileTypes": ["csv"], "password": False, "name": "path", "type": "file", From 57dd75b3f5eb49b76c359826573c9c7944748730 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Wed, 26 Jul 2023 07:43:49 -0300 Subject: [PATCH 186/221] =?UTF-8?q?=F0=9F=94=A7=20chore(component.py):=20a?= =?UTF-8?q?dd=20Optional=20type=20hint=20to=20the=20'code'=20field=20in=20?= =?UTF-8?q?the=20Component=20model=20to=20indicate=20that=20it=20is=20an?= =?UTF-8?q?=20optional=20field=20=F0=9F=94=A7=20chore(component.py):=20upd?= =?UTF-8?q?ate=20the=20error=20message=20in=20the=20validate=5Fentrypoint?= =?UTF-8?q?=5Fname=20function=20to=20provide=20more=20clarity=20on=20the?= =?UTF-8?q?=20required=20field?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/interface/custom/component.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/backend/langflow/interface/custom/component.py b/src/backend/langflow/interface/custom/component.py index 3db2a2516..a9dc0f323 100644 --- a/src/backend/langflow/interface/custom/component.py +++ b/src/backend/langflow/interface/custom/component.py @@ -1,4 +1,5 @@ import ast +from typing import Optional from pydantic import BaseModel from fastapi import HTTPException @@ -20,7 +21,7 @@ class Component(BaseModel): "The name of the entrypoint function must be provided." ) - code: str + code: Optional[str] function_entrypoint_name = "build" field_config: dict = {} From 029f01aa35695738943f792a6a5f230ab62918dc Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Wed, 26 Jul 2023 07:45:31 -0300 Subject: [PATCH 187/221] =?UTF-8?q?=F0=9F=90=9B=20fix(base.py):=20change?= =?UTF-8?q?=20variable=20name=20from=20fileTypes=20to=20file=5Ftypes=20for?= =?UTF-8?q?=20consistency=20and=20readability=20=F0=9F=90=9B=20fix(agents.?= =?UTF-8?q?py):=20change=20variable=20name=20from=20fileTypes=20to=20file?= =?UTF-8?q?=5Ftypes=20for=20consistency=20and=20readability=20=F0=9F=90=9B?= =?UTF-8?q?=20fix(documentloaders.py):=20change=20variable=20name=20from?= =?UTF-8?q?=20fileTypes=20to=20file=5Ftypes=20for=20consistency=20and=20re?= =?UTF-8?q?adability=20=F0=9F=90=9B=20fix(llms.py):=20change=20variable=20?= =?UTF-8?q?name=20from=20fileTypes=20to=20file=5Ftypes=20for=20consistency?= =?UTF-8?q?=20and=20readability?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/interface/tools/base.py | 2 +- src/backend/langflow/template/frontend_node/agents.py | 2 +- src/backend/langflow/template/frontend_node/documentloaders.py | 2 +- src/backend/langflow/template/frontend_node/llms.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/backend/langflow/interface/tools/base.py b/src/backend/langflow/interface/tools/base.py index 027224a3a..f8965134d 100644 --- a/src/backend/langflow/interface/tools/base.py +++ b/src/backend/langflow/interface/tools/base.py @@ -55,7 +55,7 @@ TOOL_INPUTS = { show=True, value="", suffixes=[".json", ".yaml", ".yml"], - fileTypes=["json", "yaml", "yml"], + file_types=["json", "yaml", "yml"], ), } diff --git a/src/backend/langflow/template/frontend_node/agents.py b/src/backend/langflow/template/frontend_node/agents.py index 02aea78b9..63c8a4d5e 100644 --- a/src/backend/langflow/template/frontend_node/agents.py +++ b/src/backend/langflow/template/frontend_node/agents.py @@ -145,7 +145,7 @@ class CSVAgentNode(FrontendNode): name="path", value="", suffixes=[".csv"], - fileTypes=["csv"], + file_types=["csv"], ), TemplateField( field_type="BaseLanguageModel", diff --git a/src/backend/langflow/template/frontend_node/documentloaders.py b/src/backend/langflow/template/frontend_node/documentloaders.py index d775d8736..bb78d8855 100644 --- a/src/backend/langflow/template/frontend_node/documentloaders.py +++ b/src/backend/langflow/template/frontend_node/documentloaders.py @@ -14,7 +14,7 @@ def build_file_field( name=name, value="", suffixes=suffixes, - fileTypes=fileTypes, + file_types=fileTypes, ) diff --git a/src/backend/langflow/template/frontend_node/llms.py b/src/backend/langflow/template/frontend_node/llms.py index de0fa3c0b..a6a128cfe 100644 --- a/src/backend/langflow/template/frontend_node/llms.py +++ b/src/backend/langflow/template/frontend_node/llms.py @@ -19,7 +19,7 @@ class LLMFrontendNode(FrontendNode): name="credentials", value="", suffixes=[".json"], - fileTypes=["json"], + file_types=["json"], ) ) From bf422f7f37d7f1453ac511774703005d1be7753f Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Wed, 26 Jul 2023 07:46:00 -0300 Subject: [PATCH 188/221] =?UTF-8?q?=F0=9F=94=A8=20refactor(code=5Fparser.p?= =?UTF-8?q?y):=20remove=20unused=20imports=20and=20classes=20to=20improve?= =?UTF-8?q?=20code=20cleanliness=20and=20maintainability=20=F0=9F=94=A8=20?= =?UTF-8?q?refactor(code=5Fparser.py):=20remove=20commented=20out=20code?= =?UTF-8?q?=20for=20function=20details=20to=20improve=20code=20readability?= =?UTF-8?q?=20=F0=9F=94=A8=20refactor(code=5Fparser.py):=20refactor=20pars?= =?UTF-8?q?e=5Fnode=20method=20to=20remove=20type=20ignore=20comments=20an?= =?UTF-8?q?d=20improve=20code=20readability=20=F0=9F=94=A8=20refactor(code?= =?UTF-8?q?=5Fparser.py):=20refactor=20parse=5Ffunction=5Fargs=20method=20?= =?UTF-8?q?to=20improve=20code=20readability=20and=20maintainability=20?= =?UTF-8?q?=F0=9F=94=A8=20refactor(code=5Fparser.py):=20refactor=20parse?= =?UTF-8?q?=5Fpositional=5Fargs=20method=20to=20improve=20code=20readabili?= =?UTF-8?q?ty=20and=20maintainability=20=F0=9F=94=A8=20refactor(code=5Fpar?= =?UTF-8?q?ser.py):=20refactor=20parse=5Fvarargs=20method=20to=20improve?= =?UTF-8?q?=20code=20readability=20and=20maintainability=20=F0=9F=94=A8=20?= =?UTF-8?q?refactor(code=5Fparser.py):=20refactor=20parse=5Fkeyword=5Fargs?= =?UTF-8?q?=20method=20to=20improve=20code=20readability=20and=20maintaina?= =?UTF-8?q?bility=20=F0=9F=94=A8=20refactor(code=5Fparser.py):=20refactor?= =?UTF-8?q?=20parse=5Fkwargs=20method=20to=20improve=20code=20readability?= =?UTF-8?q?=20and=20maintainability=20=F0=9F=94=A8=20refactor(code=5Fparse?= =?UTF-8?q?r.py):=20refactor=20parse=5Ffunction=5Fbody=20method=20to=20imp?= =?UTF-8?q?rove=20code=20readability=20and=20maintainability=20?= =?UTF-8?q?=F0=9F=94=A8=20refactor(code=5Fparser.py):=20refactor=20parse?= =?UTF-8?q?=5Fassign=20method=20to=20improve=20code=20readability=20and=20?= =?UTF-8?q?maintainability=20=F0=9F=94=A8=20refactor(code=5Fparser.py):=20?= =?UTF-8?q?refactor=20parse=5Fclass=20method=20to=20improve=20code=20reada?= =?UTF-8?q?bility=20and=20maintainability=20=F0=9F=94=A8=20refactor(code?= =?UTF-8?q?=5Fparser.py):=20refactor=20parse=5Fglobal=5Fvars=20method=20to?= =?UTF-8?q?=20improve=20code=20readability=20and=20maintainability?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../langflow/interface/custom/code_parser.py | 146 ++++++++++-------- 1 file changed, 84 insertions(+), 62 deletions(-) diff --git a/src/backend/langflow/interface/custom/code_parser.py b/src/backend/langflow/interface/custom/code_parser.py index 7fb3b0184..606208624 100644 --- a/src/backend/langflow/interface/custom/code_parser.py +++ b/src/backend/langflow/interface/custom/code_parser.py @@ -2,39 +2,15 @@ import ast import inspect import traceback -from typing import Dict, Any, Optional, Type, Union +from typing import Dict, Any, List, Type, Union from fastapi import HTTPException -from pydantic import BaseModel +from langflow.interface.custom.schema import CallableCodeDetails, ClassCodeDetails class CodeSyntaxError(HTTPException): pass -class CallableCodeDetails(BaseModel): - """ - A dataclass for storing details about a callable. - """ - - name: str - doc: Optional[str] - args: list - body: list - return_type: Optional[str] - - -class ClassCodeDetails(BaseModel): - """ - A dataclass for storing details about a class. - """ - - name: str - doc: str - bases: list - attributes: list - methods: list - - class CodeParser: """ A parser for Python source code, extracting code details. @@ -79,13 +55,13 @@ class CodeParser: return tree - def parse_node(self, node: ast.AST) -> None: + def parse_node(self, node: Union[ast.stmt, ast.AST]) -> None: """ Parses an AST node and updates the data dictionary with the relevant information. """ - if handler := self.handlers.get(type(node)): - handler(node) + if handler := self.handlers.get(type(node)): # type: ignore + handler(node) # type: ignore def parse_imports(self, node: Union[ast.Import, ast.ImportFrom]) -> None: """ @@ -117,13 +93,6 @@ class CodeParser: """ Extracts details from a single function or method node. """ - # func = { - # "name": node.name, - # "doc": ast.get_docstring(node), - # "args": [], - # "body": [], - # "return_type": ast.unparse(node.returns) if node.returns else None, - # } func = CallableCodeDetails( name=node.name, doc=ast.get_docstring(node), @@ -132,19 +101,58 @@ class CodeParser: return_type=ast.unparse(node.returns) if node.returns else None, ) - # Handle positional arguments with default values - defaults = [None] * (len(node.args.args) - len(node.args.defaults)) + [ + func.args = self.parse_function_args(node) + func.body = self.parse_function_body(node) + + return func.dict() + + def parse_function_args(self, node: ast.FunctionDef) -> List[Dict[str, Any]]: + """ + Parses the arguments of a function or method node. + """ + args = [] + + args += self.parse_positional_args(node) + args += self.parse_varargs(node) + args += self.parse_keyword_args(node) + args += self.parse_kwargs(node) + + return args + + def parse_positional_args(self, node: ast.FunctionDef) -> List[Dict[str, Any]]: + """ + Parses the positional arguments of a function or method node. + """ + num_args = len(node.args.args) + num_defaults = len(node.args.defaults) + num_missing_defaults = num_args - num_defaults + missing_defaults = [None] * num_missing_defaults + default_values = [ ast.unparse(default) if default else None for default in node.args.defaults ] + defaults = missing_defaults + default_values - for arg, default in zip(node.args.args, defaults): - func.args.append(self.parse_arg(arg, default)) + args = [ + self.parse_arg(arg, default) + for arg, default in zip(node.args.args, defaults) + ] + return args + + def parse_varargs(self, node: ast.FunctionDef) -> List[Dict[str, Any]]: + """ + Parses the *args argument of a function or method node. + """ + args = [] - # Handle *args if node.args.vararg: - func.args.append(self.parse_arg(node.args.vararg, None)) + args.append(self.parse_arg(node.args.vararg, None)) - # Handle keyword-only arguments with default values + return args + + def parse_keyword_args(self, node: ast.FunctionDef) -> List[Dict[str, Any]]: + """ + Parses the keyword-only arguments of a function or method node. + """ kw_defaults = [None] * ( len(node.args.kwonlyargs) - len(node.args.kw_defaults) ) + [ @@ -152,16 +160,28 @@ class CodeParser: for default in node.args.kw_defaults ] - for arg, default in zip(node.args.kwonlyargs, kw_defaults): - func.args.append(self.parse_arg(arg, default)) + args = [ + self.parse_arg(arg, default) + for arg, default in zip(node.args.kwonlyargs, kw_defaults) + ] + return args + + def parse_kwargs(self, node: ast.FunctionDef) -> List[Dict[str, Any]]: + """ + Parses the **kwargs argument of a function or method node. + """ + args = [] - # Handle **kwargs if node.args.kwarg: - func.args.append(self.parse_arg(node.args.kwarg, None)) + args.append(self.parse_arg(node.args.kwarg, None)) - for line in node.body: - func.body.append(ast.unparse(line)) - return func.dict() + return args + + def parse_function_body(self, node: ast.FunctionDef) -> List[str]: + """ + Parses the body of a function or method node. + """ + return [ast.unparse(line) for line in node.body] def parse_assign(self, stmt): """ @@ -196,29 +216,31 @@ class CodeParser: """ Extracts "classes" from the code, including inheritance and init methods. """ - class_dict = { - "name": node.name, - "doc": ast.get_docstring(node), - "bases": [ast.unparse(base) for base in node.bases], - "attributes": [], - "methods": [], - } + + class_details = ClassCodeDetails( + name=node.name, + doc=ast.get_docstring(node), + bases=[ast.unparse(base) for base in node.bases], + attributes=[], + methods=[], + init=None, + ) for stmt in node.body: if isinstance(stmt, ast.Assign): if attr := self.parse_assign(stmt): - class_dict["attributes"].append(attr) + class_details.attributes.append(attr) elif isinstance(stmt, ast.AnnAssign): if attr := self.parse_ann_assign(stmt): - class_dict["attributes"].append(attr) + class_details.attributes.append(attr) elif isinstance(stmt, ast.FunctionDef): method, is_init = self.parse_function_def(stmt) if is_init: - class_dict["init"] = method + class_details.init = method else: - class_dict["methods"].append(method) + class_details.methods.append(method) - self.data["classes"].append(class_dict) + self.data["classes"].append(class_details.dict()) def parse_global_vars(self, node: ast.Assign) -> None: """ From c4a2c1c80a44bd0278a182dd6acda3b72712d76c Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Wed, 26 Jul 2023 07:46:31 -0300 Subject: [PATCH 189/221] =?UTF-8?q?=E2=9C=A8=20feat(schema.py):=20add=20Cl?= =?UTF-8?q?assCodeDetails=20and=20CallableCodeDetails=20data=20classes=20t?= =?UTF-8?q?o=20store=20details=20about=20classes=20and=20callables=20respe?= =?UTF-8?q?ctively?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../langflow/interface/custom/schema.py | 29 +++++++++++++++++++ 1 file changed, 29 insertions(+) create mode 100644 src/backend/langflow/interface/custom/schema.py diff --git a/src/backend/langflow/interface/custom/schema.py b/src/backend/langflow/interface/custom/schema.py new file mode 100644 index 000000000..80d65405f --- /dev/null +++ b/src/backend/langflow/interface/custom/schema.py @@ -0,0 +1,29 @@ +from pydantic import BaseModel, Field + + +from typing import Optional + + +class ClassCodeDetails(BaseModel): + """ + A dataclass for storing details about a class. + """ + + name: str + doc: Optional[str] + bases: list + attributes: list + methods: list + init: Optional[dict] = Field(default_factory=dict) + + +class CallableCodeDetails(BaseModel): + """ + A dataclass for storing details about a callable. + """ + + name: str + doc: Optional[str] + args: list + body: list + return_type: Optional[str] From 67b2aeae67d188f23ef4bc599b28ea57a9deaf96 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Wed, 26 Jul 2023 07:47:34 -0300 Subject: [PATCH 190/221] =?UTF-8?q?=F0=9F=90=9B=20fix(custom=5Fcomponent.p?= =?UTF-8?q?y):=20remove=20unused=20import=20of=20UUID=20from=20langflow.in?= =?UTF-8?q?terface.custom.custom=5Fcomponent.py=20=F0=9F=90=9B=20fix(custo?= =?UTF-8?q?m=5Fcomponent.py):=20handle=20case=20when=20code=20is=20None=20?= =?UTF-8?q?in=20is=5Fcheck=5Fvalid=20method=20of=20CustomComponent=20class?= =?UTF-8?q?=20=F0=9F=90=9B=20fix(custom=5Fcomponent.py):=20handle=20case?= =?UTF-8?q?=20when=20code=20is=20None=20in=20get=5Ffunction=5Fentrypoint?= =?UTF-8?q?=5Fargs=20property=20of=20CustomComponent=20class=20?= =?UTF-8?q?=F0=9F=90=9B=20fix(custom=5Fcomponent.py):=20handle=20case=20wh?= =?UTF-8?q?en=20code=20is=20None=20in=20get=5Ffunction=5Fentrypoint=5Fretu?= =?UTF-8?q?rn=5Ftype=20property=20of=20CustomComponent=20class=20?= =?UTF-8?q?=F0=9F=90=9B=20fix(custom=5Fcomponent.py):=20change=20flow=5Fid?= =?UTF-8?q?=20parameter=20type=20from=20UUID=20to=20str=20in=20load=5Fflow?= =?UTF-8?q?=20method=20of=20CustomComponent=20class=20=F0=9F=90=9B=20fix(u?= =?UTF-8?q?til.py):=20ignore=20type=20error=20for=20multiprocess=20import?= =?UTF-8?q?=20in=20langflow.utils.util=20module=20=F0=9F=90=9B=20fix(util.?= =?UTF-8?q?py):=20handle=20case=20when=20=5Ftype=20is=20a=20type=20object?= =?UTF-8?q?=20in=20remove=5Foptional=5Fwrapper=20function=20of=20langflow.?= =?UTF-8?q?utils.util=20module?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../langflow/interface/custom/custom_component.py | 9 ++++++--- src/backend/langflow/utils/util.py | 6 ++++-- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/src/backend/langflow/interface/custom/custom_component.py b/src/backend/langflow/interface/custom/custom_component.py index 1cc0ca620..2f89863ae 100644 --- a/src/backend/langflow/interface/custom/custom_component.py +++ b/src/backend/langflow/interface/custom/custom_component.py @@ -5,7 +5,6 @@ from langflow.interface.custom.component import Component from langflow.utils import validate -from uuid import UUID from langflow.database.base import session_getter from langflow.database.models.flow import Flow from pydantic import Extra @@ -44,13 +43,15 @@ class CustomComponent(Component, extra=Extra.allow): return True def is_check_valid(self) -> bool: - return self._class_template_validation(self.code) + return self._class_template_validation(self.code) if self.code else False def get_code_tree(self, code: str): return super().get_code_tree(code) @property def get_function_entrypoint_args(self) -> str: + if not self.code: + return "" tree = self.get_code_tree(self.code) component_classes = [ @@ -78,6 +79,8 @@ class CustomComponent(Component, extra=Extra.allow): @property def get_function_entrypoint_return_type(self) -> str: + if not self.code: + return "" tree = self.get_code_tree(self.code) component_classes = [ @@ -138,7 +141,7 @@ class CustomComponent(Component, extra=Extra.allow): def get_function(self): return validate.create_function(self.code, self.function_entrypoint_name) - def load_flow(self, flow_id: UUID = None): + def load_flow(self, flow_id: str): from langflow.processing.process import build_sorted_vertices_with_caching with session_getter() as session: diff --git a/src/backend/langflow/utils/util.py b/src/backend/langflow/utils/util.py index ce5f03bdf..f68c9dbe2 100644 --- a/src/backend/langflow/utils/util.py +++ b/src/backend/langflow/utils/util.py @@ -9,7 +9,7 @@ from docstring_parser import parse # type: ignore from langflow.template.frontend_node.constants import FORCE_SHOW_FIELDS from langflow.utils import constants from langflow.utils.logger import logger -from multiprocess import cpu_count +from multiprocess import cpu_count # type: ignore def build_template_from_function( @@ -301,13 +301,15 @@ def get_type(value: Any) -> Union[str, type]: return _type if isinstance(_type, str) else _type.__name__ -def remove_optional_wrapper(_type: str) -> str: +def remove_optional_wrapper(_type: Union[str, type]) -> str: """ Removes the 'Optional' wrapper from the type string. Returns: The type string with the 'Optional' wrapper removed. """ + if isinstance(_type, type): + _type = str(_type) if "Optional" in _type: _type = _type.replace("Optional[", "")[:-1] From 28149ddb6cbc5cf4b65e091dbcb6d701cbbb023d Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Wed, 26 Jul 2023 10:00:34 -0300 Subject: [PATCH 191/221] =?UTF-8?q?=F0=9F=94=A7=20fix(index.css):=20remove?= =?UTF-8?q?=20unnecessary=20whitespace=20and=20comments=20for=20better=20c?= =?UTF-8?q?ode=20readability=20=E2=9C=A8=20feat(index.css):=20add=20suppor?= =?UTF-8?q?t=20for=20beta=20background=20and=20foreground=20colors=20to=20?= =?UTF-8?q?improve=20visual=20design=20and=20user=20experience?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 🔧 chore(styles.css): reorganize color variables for better readability and maintainability --- src/frontend/src/style/index.css | 228 ++++++++++++++++--------------- 1 file changed, 115 insertions(+), 113 deletions(-) diff --git a/src/frontend/src/style/index.css b/src/frontend/src/style/index.css index 7e40571b3..e57a89eae 100644 --- a/src/frontend/src/style/index.css +++ b/src/frontend/src/style/index.css @@ -2,125 +2,127 @@ @tailwind components; @tailwind utilities; - /* TODO: Confirm that all colors here are found in tailwind config */ @layer base { - :root { - --background: 0 0% 100%; /* hsl(0 0% 100%) */ - --foreground: 222.2 47.4% 11.2%; /* hsl(222 47% 11%) */ - --muted: 210 40% 98%; /* hsl(210 40% 98%) */ - --muted-foreground: 215.4 16.3% 46.9%; /* hsl(215 16% 46%) */ - --popover: 0 0% 100%; /* hsl(0 0% 100%) */ - --popover-foreground: 222.2 47.4% 11.2%; /* hsl(222 47% 11%) */ - --card: 0 0% 100%; /* hsl(0 0% 100%) */ - --card-foreground: 222.2 47.4% 11.2%; /* hsl(222 47% 11%) */ - --border: 214.3 21.8% 91.4%; /* hsl(214 32% 91%) */ - --input: 214.3 21.8% 91.4%; /* hsl(214 32% 91%) */ - --primary: 222.2 27% 11.2%; /* hsl(222 27% 18%) */ - --primary-foreground: 210 40% 98%; /* hsl(210 40% 98%) */ - --secondary: 210 40% 96.1%; /* hsl(210 40% 96%) */ - --secondary-foreground: 222.2 47.4% 11.2%; /* hsl(222 47% 11%) */ - --accent: 210 30% 96.1%; /* hsl(210 30% 96%) */ - --accent-foreground: 222.2 47.4% 11.2%; /* hsl(222 47% 11%) */ - --destructive: 0 100% 50%; /* hsl(0 100% 50%) */ - --destructive-foreground: 210 40% 98%; /* hsl(210 40% 98%) */ - --radius: 0.5rem; - --ring: 215 20.2% 65.1%; /* hsl(215 20% 65%) */ - --round-btn-shadow: #00000063; - - --error-background: #fef2f2; - --error-foreground: #991b1b; - - --success-background: #f0fdf4; - --success-foreground: #14532d; + --background: 0 0% 100%; /* hsl(0 0% 100%) */ + --foreground: 222.2 47.4% 11.2%; /* hsl(222 47% 11%) */ + --muted: 210 40% 98%; /* hsl(210 40% 98%) */ + --muted-foreground: 215.4 16.3% 46.9%; /* hsl(215 16% 46%) */ + --popover: 0 0% 100%; /* hsl(0 0% 100%) */ + --popover-foreground: 222.2 47.4% 11.2%; /* hsl(222 47% 11%) */ + --card: 0 0% 100%; /* hsl(0 0% 100%) */ + --card-foreground: 222.2 47.4% 11.2%; /* hsl(222 47% 11%) */ + --border: 214.3 21.8% 91.4%; /* hsl(214 32% 91%) */ + --input: 214.3 21.8% 91.4%; /* hsl(214 32% 91%) */ + --primary: 222.2 27% 11.2%; /* hsl(222 27% 18%) */ + --primary-foreground: 210 40% 98%; /* hsl(210 40% 98%) */ + --secondary: 210 40% 96.1%; /* hsl(210 40% 96%) */ + --secondary-foreground: 222.2 47.4% 11.2%; /* hsl(222 47% 11%) */ + --accent: 210 30% 96.1%; /* hsl(210 30% 96%) */ + --accent-foreground: 222.2 47.4% 11.2%; /* hsl(222 47% 11%) */ + --destructive: 0 100% 50%; /* hsl(0 100% 50%) */ + --destructive-foreground: 210 40% 98%; /* hsl(210 40% 98%) */ + --radius: 0.5rem; + --ring: 215 20.2% 65.1%; /* hsl(215 20% 65%) */ + --round-btn-shadow: #00000063; - --info-background: #f0f4fd; - --info-foreground: #141653; + --error-background: #fef2f2; + --error-foreground: #991b1b; - --high-indigo: #4338ca; - --medium-indigo: #6366f1; - --low-indigo: #e0e7ff; + --success-background: #f0fdf4; + --success-foreground: #14532d; - --chat-bot-icon: #afe6ef; - --chat-user-icon: #aface9; - - /* Colors that are shared in dark and light mode */ - --blur-shared: #151923de; - --build-trigger: #dc735b; - --chat-trigger: #5c8be1; - --chat-trigger-disabled: #b4c3da; - --status-red: #ef4444; - --status-yellow: #eab308; - --chat-send: #059669; - --status-green: #4ade80; - --status-blue:#2563eb; - --connection: #555; + --info-background: #f0f4fd; + --info-foreground: #141653; + --high-indigo: #4338ca; + --medium-indigo: #6366f1; + --low-indigo: #e0e7ff; + + --beta-background: rgb(219 234 254); + --beta-foreground: rgb(37 99 235); + + --chat-bot-icon: #afe6ef; + --chat-user-icon: #aface9; + + /* Colors that are shared in dark and light mode */ + --blur-shared: #151923de; + --build-trigger: #dc735b; + --chat-trigger: #5c8be1; + --chat-trigger-disabled: #b4c3da; + --status-red: #ef4444; + --status-yellow: #eab308; + --chat-send: #059669; + --status-green: #4ade80; + --status-blue: #2563eb; + --connection: #555; + } + + .dark { + --background: 224 35% 7.5%; /* hsl(224 40% 10%) */ + --foreground: 213 31% 80%; /* hsl(213 31% 91%) */ + + --muted: 223 27% 11%; /* hsl(223 27% 11%) */ + --muted-foreground: 215.4 16.3% 56.9%; /* hsl(215 16% 56%) */ + + --popover: 224 71% 4%; /* hsl(224 71% 4%) */ + --popover-foreground: 215 20.2% 65.1%; /* hsl(215 20% 65%) */ + + --card: 224 25% 15.5%; /* hsl(224 71% 4%) */ + --card-foreground: 213 31% 80%; /* hsl(213 31% 91%) */ + + --border: 216 24% 17%; /* hsl(216 34% 17%) */ + --input: 216 24% 17%; /* hsl(216 34% 17%) */ + + --primary: 210 20% 80%; /* hsl(210 20% 80%) */ + --primary-foreground: 222.2 27.4% 1.2%; /* hsl(222 47% 1%) */ + + --secondary: 222.2 37.4% 7.2%; /* hsl(222 47% 11%) */ + --secondary-foreground: 210 40% 80%; /* hsl(210 40% 80%) */ + + --accent: 216 24% 20%; /* hsl(216 34% 17%) */ + --accent-foreground: 210 30% 98%; /* hsl(210 40% 98%) */ + + --destructive: 0 63% 31%; /* hsl(0 63% 31%) */ + --destructive-foreground: 210 40% 98%; /* hsl(210 40% 98%) */ + + --ring: 216 24% 30%; /* hsl(216 24% 30%) */ + + --radius: 0.5rem; + + --round-btn-shadow: #00000063; + + --success-background: #022c22; + --success-foreground: #ecfdf5; + + --error-foreground: #fef2f2; + --error-background: #450a0a; + + --info-foreground: #eff6ff; + --info-background: #172554; + + --high-indigo: #4338ca; + --medium-indigo: #6366f1; + --low-indigo: #e0e7ff; + + /* Colors that are shared in dark and light mode */ + --blur-shared: #151923d2; + --build-trigger: #dc735b; + --chat-trigger: #5c8be1; + --chat-trigger-disabled: #2d3b54; + --status-red: #ef4444; + --status-yellow: #eab308; + --chat-send: #059669; + --status-green: #4ade80; + --status-blue: #2563eb; + --connection: #555; + + --beta-background: rgb(37 99 235); + --beta-foreground: rgb(219 234 254); + + --chat-bot-icon: #235d70; + --chat-user-icon: #4f3d6e; + } } - -.dark { - --background: 224 35% 7.5%; /* hsl(224 40% 10%) */ - --foreground: 213 31% 80%; /* hsl(213 31% 91%) */ - - --muted: 223 27% 11%; /* hsl(223 27% 11%) */ - --muted-foreground: 215.4 16.3% 56.9%; /* hsl(215 16% 56%) */ - - --popover: 224 71% 4%; /* hsl(224 71% 4%) */ - --popover-foreground: 215 20.2% 65.1%; /* hsl(215 20% 65%) */ - - --card: 224 25% 15.5%; /* hsl(224 71% 4%) */ - --card-foreground: 213 31% 80%; /* hsl(213 31% 91%) */ - - --border: 216 24% 17%; /* hsl(216 34% 17%) */ - --input: 216 24% 17%; /* hsl(216 34% 17%) */ - - --primary: 210 20% 80%; /* hsl(210 20% 80%) */ - --primary-foreground: 222.2 27.4% 1.2%; /* hsl(222 47% 1%) */ - - --secondary: 222.2 37.4% 7.2%; /* hsl(222 47% 11%) */ - --secondary-foreground: 210 40% 80%; /* hsl(210 40% 80%) */ - - --accent: 216 24% 20%; /* hsl(216 34% 17%) */ - --accent-foreground: 210 30% 98%; /* hsl(210 40% 98%) */ - - --destructive: 0 63% 31%; /* hsl(0 63% 31%) */ - --destructive-foreground: 210 40% 98%; /* hsl(210 40% 98%) */ - - --ring: 216 24% 30%; /* hsl(216 24% 30%) */ - - --radius: 0.5rem; - - --round-btn-shadow: #00000063; - - --success-background: #022c22; - --success-foreground: #ecfdf5; - - --error-foreground: #fef2f2; - --error-background: #450a0a; - - --info-foreground: #eff6ff; - --info-background: #172554; - - - --high-indigo: #4338ca; - --medium-indigo: #6366f1; - --low-indigo: #e0e7ff; - - /* Colors that are shared in dark and light mode */ - --blur-shared: #151923d2; - --build-trigger: #dc735b; - --chat-trigger: #5c8be1; - --chat-trigger-disabled: #2d3b54; - --status-red: #ef4444; - --status-yellow: #eab308; - --chat-send: #059669; - --status-green: #4ade80; - --status-blue: #2563eb; - --connection: #555; - - --chat-bot-icon: #235d70; - --chat-user-icon: #4f3d6e; - -}} From 63ead274c4e7d41a6f7b99fe1da61d4886abd9a5 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Wed, 26 Jul 2023 10:15:22 -0300 Subject: [PATCH 192/221] =?UTF-8?q?=F0=9F=94=A7=20fix(constants.py):=20add?= =?UTF-8?q?=20support=20for=20additional=20Python=20base=20types=20in=20CU?= =?UTF-8?q?STOM=5FCOMPONENT=5FSUPPORTED=5FTYPES=20dictionary=20?= =?UTF-8?q?=F0=9F=94=A7=20fix(custom=5Fcomponent.py):=20update=20return=5F?= =?UTF-8?q?type=5Fvalid=5Flist=20to=20use=20CUSTOM=5FCOMPONENT=5FSUPPORTED?= =?UTF-8?q?=5FTYPES=20dictionary=20=F0=9F=94=A7=20fix(types.py):=20update?= =?UTF-8?q?=20add=5Fbase=5Fclasses=20function=20to=20use=20CUSTOM=5FCOMPON?= =?UTF-8?q?ENT=5FSUPPORTED=5FTYPES=20dictionary=20instead=20of=20LANGCHAIN?= =?UTF-8?q?=5FBASE=5FTYPES?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/interface/custom/constants.py | 10 ++++++++++ .../langflow/interface/custom/custom_component.py | 4 ++-- src/backend/langflow/interface/types.py | 8 ++++---- 3 files changed, 16 insertions(+), 6 deletions(-) diff --git a/src/backend/langflow/interface/custom/constants.py b/src/backend/langflow/interface/custom/constants.py index 0a5809d5d..00aa5314d 100644 --- a/src/backend/langflow/interface/custom/constants.py +++ b/src/backend/langflow/interface/custom/constants.py @@ -20,7 +20,17 @@ LANGCHAIN_BASE_TYPES = { "VectorStore": VectorStore, "Embeddings": Embeddings, "BaseRetriever": BaseRetriever, +} + +# Langchain base types plus Python base types +CUSTOM_COMPONENT_SUPPORTED_TYPES = { + **LANGCHAIN_BASE_TYPES, "str": str, + "int": int, + "float": float, + "bool": bool, + "list": list, + "dict": dict, } diff --git a/src/backend/langflow/interface/custom/custom_component.py b/src/backend/langflow/interface/custom/custom_component.py index 2f89863ae..f58c9db19 100644 --- a/src/backend/langflow/interface/custom/custom_component.py +++ b/src/backend/langflow/interface/custom/custom_component.py @@ -1,6 +1,6 @@ from typing import Callable, Optional from fastapi import HTTPException -from langflow.interface.custom.constants import LANGCHAIN_BASE_TYPES +from langflow.interface.custom.constants import CUSTOM_COMPONENT_SUPPORTED_TYPES from langflow.interface.custom.component import Component from langflow.utils import validate @@ -16,7 +16,7 @@ class CustomComponent(Component, extra=Extra.allow): code_class_base_inheritance = "CustomComponent" function_entrypoint_name = "build" function: Optional[Callable] = None - return_type_valid_list = list(LANGCHAIN_BASE_TYPES.keys()) + return_type_valid_list = list(CUSTOM_COMPONENT_SUPPORTED_TYPES.keys()) repr_value: Optional[str] = "" def __init__(self, **data): diff --git a/src/backend/langflow/interface/types.py b/src/backend/langflow/interface/types.py index 27cdaaa2f..35ecd558b 100644 --- a/src/backend/langflow/interface/types.py +++ b/src/backend/langflow/interface/types.py @@ -1,6 +1,6 @@ from langflow.interface.agents.base import agent_creator from langflow.interface.chains.base import chain_creator -from langflow.interface.custom.constants import LANGCHAIN_BASE_TYPES +from langflow.interface.custom.constants import CUSTOM_COMPONENT_SUPPORTED_TYPES from langflow.interface.document_loaders.base import documentloader_creator from langflow.interface.embeddings.base import embedding_creator from langflow.interface.importing.utils import get_function_custom @@ -232,19 +232,19 @@ def get_field_properties(extra_field): def add_base_classes(frontend_node, return_type): """Add base classes to the frontend node""" - if return_type not in LANGCHAIN_BASE_TYPES or return_type is None: + if return_type not in CUSTOM_COMPONENT_SUPPORTED_TYPES or return_type is None: raise HTTPException( status_code=400, detail={ "error": ( "Invalid return type should be one of: " - f"{list(LANGCHAIN_BASE_TYPES.keys())}" + f"{list(CUSTOM_COMPONENT_SUPPORTED_TYPES.keys())}" ), "traceback": traceback.format_exc(), }, ) - return_type_instance = LANGCHAIN_BASE_TYPES.get(return_type) + return_type_instance = CUSTOM_COMPONENT_SUPPORTED_TYPES.get(return_type) base_classes = get_base_classes(return_type_instance) for base_class in base_classes: From a89a9a3267be0274800ed42a07bd537dadba8f55 Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Wed, 26 Jul 2023 16:56:21 +0100 Subject: [PATCH 193/221] =?UTF-8?q?=F0=9F=94=A5=20refactor(custom.py):=20r?= =?UTF-8?q?emove=20unused=20code=20and=20class=20'CustomComponent=5Fold'?= =?UTF-8?q?=20to=20improve=20code=20cleanliness=20and=20maintainability?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 🔧 fix(test_custom_component.py): fix formatting issues in test_custom_component.py for better readability ✨ feat(test_custom_component.py): add import statements for 'patch' and 'MagicMock' to enable mocking in tests 🔬 test(test_custom_component.py): add test for the 'get_function' method of the Component class with valid code and function_entrypoint_name 🔬 test(test_custom_component.py): add test for the 'parse_assign' method of the CodeParser class 🔬 test(test_custom_component.py): add test for the 'get_code_tree' method of the Component class when given incorrect syntax 🔬 test(test_custom_component.py): add test for the '_class_template_validation' method of the CustomComponent class when the code is None 🔬 test(test_custom_component.py): add test for the 'get_function_entrypoint_args' method of the CustomComponent class 🔬 test(test_custom_component.py): add test for the 'get_function_entrypoint_return_type' method of the CustomComponent class 🔬 test(test_custom_component.py): add test for the 'get_main_class_name' method of the CustomComponent class when there is no main class 🔥 refactor(test_custom_component.py): remove commented out code and unused fixtures to improve code readability and maintainability 🔧 refactor(tests): remove commented out test cases and unused imports ✨ feat(tests): add new test case for list_flows method when there are no flows in the database ✨ feat(tests): add new test case for build_config method when code is not provided ✨ feat(tests): add new test case for list_flows method when there are multiple queries to the database --- .../langflow/interface/tools/custom.py | 26 - tests/test_custom_component.py | 475 ++---------------- 2 files changed, 28 insertions(+), 473 deletions(-) diff --git a/src/backend/langflow/interface/tools/custom.py b/src/backend/langflow/interface/tools/custom.py index a0ed5d378..321298e34 100644 --- a/src/backend/langflow/interface/tools/custom.py +++ b/src/backend/langflow/interface/tools/custom.py @@ -48,29 +48,3 @@ class PythonFunctionTool(Function, Tool): class PythonFunction(Function): code: str - - -class CustomComponent_old(BaseModel): - code: str - function: Optional[Callable] = None - imports: Optional[str] = None - - # Eval code and store the class - def __init__(self, **data): - super().__init__(**data) - - # Validate the Class code - @validator("code") - def validate_func(cls, v): - try: - validate.eval_function(v) - except Exception as e: - raise e - - return v - - def get_function(self): - """Get the function""" - function_name = validate.extract_function_name(self.code) - - return validate.create_function(self.code, function_name) diff --git a/tests/test_custom_component.py b/tests/test_custom_component.py index f4e57d10d..8963a7ece 100644 --- a/tests/test_custom_component.py +++ b/tests/test_custom_component.py @@ -1,6 +1,7 @@ import ast import pytest import types +from unittest.mock import patch, MagicMock from fastapi import HTTPException from langflow.interface.custom.base import CustomComponent @@ -447,462 +448,42 @@ def test_custom_component_build_not_implemented(): custom_component.build() -# ------------------------------------------------------- -# @pytest.fixture -# def custom_chain(): -# return ''' -# from __future__ import annotations -# from typing import Any, Dict, List, Optional +def test_list_flows_no_flows(): + session_getter_module = "langflow.database.base.session_getter" -# from pydantic import Extra + with patch(session_getter_module) as mock_session_getter: + mock_session = MagicMock() + mock_session.query.return_value.all.return_value = [] + mock_session_getter.return_value.__enter__.return_value = mock_session -# from langchain.schema import BaseLanguageModel, Document -# from langchain.callbacks.manager import ( -# AsyncCallbackManagerForChainRun, -# CallbackManagerForChainRun, -# ) -# from langchain.chains.base import Chain -# from langchain.prompts import StringPromptTemplate -# from langflow.interface.custom.base import CustomComponent + component = CustomComponent() + result = component.list_flows() -# class MyCustomChain(Chain): -# """ -# An example of a custom chain. -# """ + assert len(result) == 0 -# from typing import Any, Dict, List, Optional -# from pydantic import Extra +def test_build_config_no_code(): + component = CustomComponent(code=None) -# from langchain.schema import BaseLanguageModel, Document -# from langchain.callbacks.manager import ( -# AsyncCallbackManagerForChainRun, -# CallbackManagerForChainRun, -# ) -# from langchain.chains.base import Chain -# from langchain.prompts import StringPromptTemplate -# from langflow.interface.custom.base import CustomComponent + assert component.get_function_entrypoint_args == "" + assert component.get_function_entrypoint_return_type == "" -# class MyCustomChain(Chain): -# """ -# An example of a custom chain. -# """ -# prompt: StringPromptTemplate -# """Prompt object to use.""" -# llm: BaseLanguageModel -# output_key: str = "text" #: :meta private: +def test_list_flows_multiple_queries(): + mock_flow_1 = MagicMock() + mock_flow_2 = MagicMock() -# class Config: -# """Configuration for this pydantic object.""" + session_getter_module = "langflow.database.base.session_getter" -# extra = Extra.forbid -# arbitrary_types_allowed = True + with patch(session_getter_module) as mock_session_getter: + mock_session = MagicMock() + mock_session.query.return_value.all.side_effect = [[mock_flow_1], [mock_flow_2]] + mock_session_getter.return_value.__enter__.return_value = mock_session -# @property -# def input_keys(self) -> List[str]: -# """Will be whatever keys the prompt expects. + component = CustomComponent() + result = component.list_flows() -# :meta private: -# """ -# return self.prompt.input_variables - -# @property -# def output_keys(self) -> List[str]: -# """Will always return text key. - -# :meta private: -# """ -# return [self.output_key] - -# def _call( -# self, -# inputs: Dict[str, Any], -# run_manager: Optional[CallbackManagerForChainRun] = None, -# ) -> Dict[str, str]: -# # Your custom chain logic goes here -# # This is just an example that mimics LLMChain -# prompt_value = self.prompt.format_prompt(**inputs) - -# # Whenever you call a language model, or another chain, you should pass -# # a callback manager to it. This allows the inner run to be tracked by -# # any callbacks that are registered on the outer run. -# # You can always obtain a callback manager for this by calling -# # `run_manager.get_child()` as shown below. -# response = self.llm.generate_prompt( -# [prompt_value], -# callbacks=run_manager.get_child() if run_manager else None, -# ) - -# # If you want to log something about this run, you can do so by calling -# # methods on the `run_manager`, as shown below. This will trigger any -# # callbacks that are registered for that event. -# if run_manager: -# run_manager.on_text("Log something about this run") - -# return {self.output_key: response.generations[0][0].text} - -# async def _acall( -# self, -# inputs: Dict[str, Any], -# run_manager: Optional[AsyncCallbackManagerForChainRun] = None, -# ) -> Dict[str, str]: -# # Your custom chain logic goes here -# # This is just an example that mimics LLMChain -# prompt_value = self.prompt.format_prompt(**inputs) - -# # Whenever you call a language model, or another chain, you should pass -# # a callback manager to it. This allows the inner run to be tracked by -# # any callbacks that are registered on the outer run. -# # You can always obtain a callback manager for this by calling -# # `run_manager.get_child()` as shown below. -# response = await self.llm.agenerate_prompt( -# [prompt_value], -# callbacks=run_manager.get_child() if run_manager else None, -# ) - -# # If you want to log something about this run, you can do so by calling -# # methods on the `run_manager`, as shown below. This will trigger any -# # callbacks that are registered for that event. -# if run_manager: -# await run_manager.on_text("Log something about this run") - -# return {self.output_key: response.generations[0][0].text} - -# @property -# def _chain_type(self) -> str: -# return "my_custom_chain" - -# class CustomChain(CustomComponent): -# display_name: str = "Custom Chain" -# field_config = { -# "prompt": {"field_type": "prompt"}, -# "llm": {"field_type": "BaseLanguageModel"}, -# } - -# def build(self, prompt, llm, input: str) -> Document: -# chain = MyCustomChain(prompt=prompt, llm=llm) -# return chain(input) -# ''' - - -# @pytest.fixture -# def data_processing(): -# return """ -# import pandas as pd -# from langchain.schema import Document -# from langflow.interface.custom.base import CustomComponent - -# class CSVLoaderComponent(CustomComponent): -# display_name: str = "CSV Loader" -# field_config = { -# "filename": {"field_type": "str", "required": True}, -# "column_name": {"field_type": "str", "required": True}, -# } - -# def build(self, filename: str, column_name: str) -> Document: -# # Load the CSV file -# df = pd.read_csv(filename) - -# # Verify the column exists -# if column_name not in df.columns: -# raise ValueError(f"Column '{column_name}' not found in the CSV file") - -# # Convert each row of the specified column to a document object -# documents = [] -# for content in df[column_name]: -# metadata = {"filename": filename} -# documents.append(Document(page_content=str(content), metadata=metadata)) - -# return documents -# """ - - -# @pytest.fixture -# def filter_docs(): -# return """ -# from langchain.schema import Document -# from langflow.interface.custom.base import CustomComponent -# from typing import List - -# class DocumentFilterByLengthComponent(CustomComponent): -# display_name: str = "Document Filter By Length" -# field_config = { -# "documents": {"field_type": "Document", "required": True}, -# "max_length": {"field_type": "int", "required": True}, -# } - -# def build(self, documents: List[Document], max_length: int) -> List[Document]: -# # Filter the documents by length -# filtered_documents = [doc for doc in documents if len(doc.page_content) <= max_length] - -# return filtered_documents -# """ - - -# @pytest.fixture -# def get_request(): -# return """ -# import requests -# from typing import Dict, Union -# from langchain.schema import Document -# from langflow.interface.custom.base import CustomComponent - -# class GetRequestComponent(CustomComponent): -# display_name: str = "GET Request" -# field_config = { -# "url": {"field_type": "str", "required": True}, -# } - -# def build(self, url: str) -> Document: -# # Send a GET request to the URL -# response = requests.get(url) - -# # Raise an exception if the request was not successful -# if response.status_code != 200: -# raise ValueError(f"GET request failed: {response.status_code} status code") - -# # Create a document with the response text and the URL as metadata -# document = Document(page_content=response.text, metadata={"url": url}) - -# return document -# """ - - -# @pytest.fixture -# def post_request(): -# return """ -# import requests -# from typing import Dict, Union -# from langchain.schema import Document -# from langflow.interface.custom.base import CustomComponent - -# class PostRequestComponent(CustomComponent): -# display_name: str = "POST Request" -# field_config = { -# "url": {"field_type": "str", "required": True}, -# "data": {"field_type": "dict", "required": True}, -# } - -# def build(self, url: str, data: Dict[str, Union[str, int]]) -> Document: -# # Send a POST request to the URL -# response = requests.post(url, data=data) - -# # Raise an exception if the request was not successful -# if response.status_code != 200: -# raise ValueError(f"POST request failed: {response.status_code} status code") - -# # Create a document with the response text and the URL and data as metadata -# document = Document(page_content=response.text, metadata={"url": url, "data": data}) - -# return document -# """ - - -# @pytest.fixture -# def code_default(): -# return """ -# from langflow import Prompt -# from langflow.interface.custom.custom_component import CustomComponent - -# from langchain.llms.base import BaseLLM -# from langchain.chains import LLMChain -# from langchain import PromptTemplate -# from langchain.schema import Document - -# import requests - -# class YourComponent(CustomComponent): -# #display_name: str = "Your Component" -# #description: str = "Your description" -# #field_config = { "url": { "multiline": True, "required": True } } - -# def build(self, url: str, llm: BaseLLM, template: Prompt) -> Document: -# response = requests.get(url) -# prompt = PromptTemplate.from_template(template) -# chain = LLMChain(llm=llm, prompt=prompt) -# result = chain.run(response.text[:300]) -# return Document(page_content=str(result)) -# """ - - -# @pytest.fixture(params=[ -# 'code_default', 'custom_chain', 'data_processing', -# 'filter_docs', 'get_request', 'post_request']) -# def component_code( -# request, code_default, custom_chain, data_processing, -# filter_docs, get_request, post_request): -# return locals()[request.param] - - -# def test_empty_code_tree(component_code): -# """ -# Test the situation when the code tree is empty. -# """ -# cc = CustomComponent(code=component_code) -# with patch.object(cc, 'get_code_tree') as mocked_get_code_tree: -# mocked_get_code_tree.return_value = {} -# assert cc.get_function_entrypoint_args == '' -# assert cc.get_function_entrypoint_return_type == '' -# assert cc.get_main_class_name == '' -# assert cc.build_template_config == {} - - -# def test_class_template_validation(component_code): -# """ -# Test the _class_template_validation method. -# """ -# cc = CustomComponent(code=component_code) -# assert cc._class_template_validation(component_code) == True -# with pytest.raises(HTTPException): -# cc._class_template_validation(None) - - -# def test_get_code_tree(component_code): -# """ -# Test the get_code_tree method. -# """ -# cc = CustomComponent(code=component_code) -# with patch.object(cc, 'get_code_tree') as mocked_get_code_tree: -# mocked_get_code_tree.return_value = {'classes': []} -# assert cc.get_code_tree(component_code) == {'classes': []} - - -# def test_get_function_entrypoint_args(component_code): -# """ -# Test the get_function_entrypoint_args method. -# """ -# cc = CustomComponent(code=component_code) -# with patch.object(cc, 'get_code_tree') as mocked_get_code_tree: -# mocked_get_code_tree.return_value = {'classes': []} -# assert cc.get_function_entrypoint_args == '' - - -# def test_get_function_entrypoint_return_type(component_code): -# """ -# Test the get_function_entrypoint_return_type method. -# """ -# cc = CustomComponent(code=component_code) -# with patch.object(cc, 'get_code_tree') as mocked_get_code_tree: -# mocked_get_code_tree.return_value = {'classes': []} -# assert cc.get_function_entrypoint_return_type == '' - - -# def test_get_main_class_name(component_code): -# """ -# Test the get_main_class_name method. -# """ -# cc = CustomComponent(code=component_code) -# with patch.object(cc, 'get_code_tree') as mocked_get_code_tree: -# mocked_get_code_tree.return_value = {'classes': []} -# assert cc.get_main_class_name == '' - - -# def test_build_template_config(component_code): -# """ -# Test the build_template_config method. -# """ -# cc = CustomComponent(code=component_code) -# with patch.object(cc, 'get_code_tree') as mocked_get_code_tree: -# mocked_get_code_tree.return_value = { -# 'classes': [{'name': '', 'attributes': []}]} -# assert cc.build_template_config == {} - - -# def test_get_function(component_code): -# """ -# Test the get_function method. -# """ -# cc = CustomComponent(code=component_code, function_entrypoint_name='build') -# assert callable(cc.get_function) - - -# def test_build(component_code): -# """ -# Test the build method. -# """ -# cc = CustomComponent(code=component_code) -# with pytest.raises(NotImplementedError): -# cc.build() - - -# @pytest.mark.parametrize("entrypoint_name", ["build", "non_exist_method"]) -# def test_set_non_existing_function_entrypoint_name(component_code, entrypoint_name): -# """ -# Test setting a non-existing function entrypoint name. -# """ -# cc = CustomComponent( -# code=component_code, -# function_entrypoint_name=entrypoint_name -# ) -# with pytest.raises(AttributeError): -# cc.get_function - - -# @pytest.mark.parametrize("base_class", ["CustomComponent", "NonExistingClass"]) -# def test_set_non_existing_base_class(component_code, base_class): -# """ -# Test setting a non-existing base class. -# """ -# cc = CustomComponent(code=component_code) -# cc.code_class_base_inheritance = base_class -# with pytest.raises(AttributeError): -# cc.get_main_class_name - - -# def test_class_with_no_methods(component_code): -# """ -# Test a component class with no methods. -# """ -# cc = CustomComponent(code=component_code) -# with patch.object(cc, 'get_code_tree') as mocked_get_code_tree: -# mocked_get_code_tree.return_value = { -# 'classes': [ -# { -# 'name': 'CustomComponent', -# 'methods': [], -# 'bases': ['CustomComponent'] -# } -# ] -# } -# assert cc.get_function_entrypoint_args == '' -# assert cc.get_function_entrypoint_return_type == '' - - -# def test_class_with_no_bases(component_code): -# """ -# Test a component class with no bases. -# """ -# cc = CustomComponent(code=component_code) -# with patch.object(cc, 'get_code_tree') as mocked_get_code_tree: -# mocked_get_code_tree.return_value = { -# 'classes': [ -# { -# 'name': 'CustomComponent', -# 'methods': [], -# 'bases': [] -# } -# ] -# } -# assert cc.get_function_entrypoint_args == '' -# assert cc.get_function_entrypoint_return_type == '' - - -# def test_class_with_no_name(component_code): -# """ -# Test a component class with no name. -# """ -# cc = CustomComponent(code=component_code) -# with patch.object(cc, 'get_code_tree') as mocked_get_code_tree: -# mocked_get_code_tree.return_value = {'classes': [ -# {'name': '', 'methods': [], 'bases': ['CustomComponent']}]} -# assert cc.get_main_class_name == '' - - -# @pytest.mark.parametrize("input_code", ["", "not a valid python code"]) -# def test_invalid_input_code(input_code): -# """ -# Test inputting an invalid Python code. -# """ -# with pytest.raises(SyntaxError): -# cc = CustomComponent(code=input_code) + # Only the result of the second query is returned + assert len(result) == 1 + assert result[0] == mock_flow_2 + assert mock_session.query.call_count == 2 From d5ee293590a5260f053da0e82907455ec68d22be Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Wed, 26 Jul 2023 17:40:53 +0100 Subject: [PATCH 194/221] =?UTF-8?q?=F0=9F=94=A7=20fix(test=5Fcustom=5Fcomp?= =?UTF-8?q?onent.py):=20fix=20import=20statements=20and=20formatting=20iss?= =?UTF-8?q?ues=20in=20test=5Fcustom=5Fcomponent.py=20=E2=9C=A8=20feat(test?= =?UTF-8?q?=5Fcustom=5Fcomponent.py):=20add=20tests=20for=20list=5Fflows,?= =?UTF-8?q?=20build=5Fconfig=20methods=20in=20CustomComponent=20class=20?= =?UTF-8?q?=F0=9F=94=A7=20fix(test=5Fcustom=5Fcomponent.py):=20fix=20forma?= =?UTF-8?q?tting=20issues=20in=20test=5Flist=5Fflows=5Fmultiple=5Fqueries?= =?UTF-8?q?=20test=20=E2=9C=A8=20feat(test=5Fcustom=5Fcomponent.py):=20add?= =?UTF-8?q?=20tests=20for=20list=5Fflows,=20build=5Fconfig=20methods=20in?= =?UTF-8?q?=20CustomComponent=20class=20=E2=9C=A8=20feat(test=5Fcustom=5Fc?= =?UTF-8?q?omponent.py):=20add=20test=20for=20return=20type=20of=20list=5F?= =?UTF-8?q?flows=20method=20in=20CustomComponent=20class=20=E2=9C=A8=20fea?= =?UTF-8?q?t(test=5Fcustom=5Fcomponent.py):=20add=20test=20for=20return=20?= =?UTF-8?q?type=20of=20build=5Fconfig=20method=20in=20CustomComponent=20cl?= =?UTF-8?q?ass=20=E2=9C=A8=20feat(test=5Fcustom=5Fcomponent.py):=20add=20t?= =?UTF-8?q?est=20for=20presence=20of=20'fields'=20key=20in=20build=5Fconfi?= =?UTF-8?q?g=20method=20in=20CustomComponent=20class=20=E2=9C=A8=20feat(te?= =?UTF-8?q?st=5Fcustom=5Fcomponent.py):=20add=20test=20for=20type=20of=20'?= =?UTF-8?q?fields'=20value=20in=20build=5Fconfig=20method=20in=20CustomCom?= =?UTF-8?q?ponent=20class=20=E2=9C=A8=20feat(test=5Fcustom=5Fcomponent.py)?= =?UTF-8?q?:=20add=20test=20for=20type=20of=20keys=20in=20'fields'=20value?= =?UTF-8?q?=20in=20build=5Fconfig=20method=20in=20CustomComponent=20class?= =?UTF-8?q?=20=E2=9C=A8=20feat(test=5Fcustom=5Fcomponent.py):=20add=20test?= =?UTF-8?q?=20for=20type=20of=20values=20in=20'fields'=20value=20in=20buil?= =?UTF-8?q?d=5Fconfig=20method=20in=20CustomComponent=20class?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- tests/test_custom_component.py | 146 ++++++++++++++++++++++++++------- 1 file changed, 115 insertions(+), 31 deletions(-) diff --git a/tests/test_custom_component.py b/tests/test_custom_component.py index 8963a7ece..2811f68bd 100644 --- a/tests/test_custom_component.py +++ b/tests/test_custom_component.py @@ -1,9 +1,12 @@ import ast import pytest import types +from uuid import uuid4 + from unittest.mock import patch, MagicMock from fastapi import HTTPException +from langflow.database.models.flow import Flow, FlowCreate from langflow.interface.custom.base import CustomComponent from langflow.interface.custom.component import ( Component, @@ -57,7 +60,8 @@ def test_code_parser_get_tree(): def test_code_parser_syntax_error(): """ - Test the __get_tree method raises the CodeSyntaxError when given incorrect syntax. + Test the __get_tree method raises the + CodeSyntaxError when given incorrect syntax. """ code_syntax_error = "zzz import os" @@ -86,7 +90,8 @@ def test_component_get_code_tree(): def test_component_code_null_error(): """ - Test the get_function method raises the ComponentCodeNullError when the code is empty. + Test the get_function method raises the + ComponentCodeNullError when the code is empty. """ component = Component(code="", function_entrypoint_name="") with pytest.raises(ComponentCodeNullError): @@ -140,7 +145,8 @@ def test_custom_component_get_function(): def test_code_parser_parse_imports_import(): """ - Test the parse_imports method of the CodeParser class with an import statement. + Test the parse_imports method of the CodeParser + class with an import statement. """ parser = CodeParser(code_default) tree = parser._CodeParser__get_tree() @@ -152,7 +158,8 @@ def test_code_parser_parse_imports_import(): def test_code_parser_parse_imports_importfrom(): """ - Test the parse_imports method of the CodeParser class with an import from statement. + Test the parse_imports method of the CodeParser + class with an import from statement. """ parser = CodeParser("from os import path") tree = parser._CodeParser__get_tree() @@ -203,16 +210,18 @@ def test_code_parser_parse_global_vars(): def test_component_get_function_valid(): """ - Test the get_function method of the Component class with valid code and function_entrypoint_name. + Test the get_function method of the Component + class with valid code and function_entrypoint_name. """ component = Component(code="def build(): pass", function_entrypoint_name="build") - function = component.get_function() - assert callable(function) + my_function = component.get_function() + assert callable(my_function) def test_custom_component_get_function_entrypoint_args(): """ - Test the get_function_entrypoint_args property of the CustomComponent class. + Test the get_function_entrypoint_args + property of the CustomComponent class. """ custom_component = CustomComponent( code=code_default, function_entrypoint_name="build" @@ -226,7 +235,8 @@ def test_custom_component_get_function_entrypoint_args(): def test_custom_component_get_function_entrypoint_return_type(): """ - Test the get_function_entrypoint_return_type property of the CustomComponent class. + Test the get_function_entrypoint_return_type + property of the CustomComponent class. """ custom_component = CustomComponent( code=code_default, function_entrypoint_name="build" @@ -248,7 +258,8 @@ def test_custom_component_get_main_class_name(): def test_custom_component_get_function_valid(): """ - Test the get_function property of the CustomComponent class with valid code and function_entrypoint_name. + Test the get_function property of the CustomComponent + class with valid code and function_entrypoint_name. """ custom_component = CustomComponent( code="def build(): pass", function_entrypoint_name="build" @@ -281,7 +292,8 @@ def test_code_parser_parse_arg_with_annotation(): def test_code_parser_parse_callable_details_no_args(): """ - Test the parse_callable_details method of the CodeParser class with a function with no arguments. + Test the parse_callable_details method of the + CodeParser class with a function with no arguments. """ parser = CodeParser("") node = ast.FunctionDef( @@ -328,7 +340,8 @@ def test_code_parser_parse_ann_assign(): def test_code_parser_parse_function_def_not_init(): """ - Test the parse_function_def method of the CodeParser class with a function that is not __init__. + Test the parse_function_def method of the + CodeParser class with a function that is not __init__. """ parser = CodeParser("") stmt = ast.FunctionDef( @@ -347,7 +360,8 @@ def test_code_parser_parse_function_def_not_init(): def test_code_parser_parse_function_def_init(): """ - Test the parse_function_def method of the CodeParser class with an __init__ function. + Test the parse_function_def method of the + CodeParser class with an __init__ function. """ parser = CodeParser("") stmt = ast.FunctionDef( @@ -386,7 +400,8 @@ def test_custom_component_class_template_validation_no_code(): def test_custom_component_get_code_tree_syntax_error(): """ - Test the get_code_tree method of the CustomComponent class raises the CodeSyntaxError when given incorrect syntax. + Test the get_code_tree method of the CustomComponent class + raises the CodeSyntaxError when given incorrect syntax. """ custom_component = CustomComponent( code="import os as", function_entrypoint_name="build" @@ -397,7 +412,8 @@ def test_custom_component_get_code_tree_syntax_error(): def test_custom_component_get_function_entrypoint_args_no_args(): """ - Test the get_function_entrypoint_args property of the CustomComponent class with a build method with no arguments. + Test the get_function_entrypoint_args property of + the CustomComponent class with a build method with no arguments. """ my_code = """ class MyMainClass(CustomComponent): @@ -426,7 +442,8 @@ class MyClass(CustomComponent): def test_custom_component_get_main_class_name_no_main_class(): """ - Test the get_main_class_name property of the CustomComponent class when there is no main class. + Test the get_main_class_name property of the + CustomComponent class when there is no main class. """ my_code = """ def build(): @@ -439,7 +456,8 @@ def build(): def test_custom_component_build_not_implemented(): """ - Test the build method of the CustomComponent class raises the NotImplementedError. + Test the build method of the CustomComponent + class raises the NotImplementedError. """ custom_component = CustomComponent( code="def build(): pass", function_entrypoint_name="build" @@ -469,21 +487,87 @@ def test_build_config_no_code(): assert component.get_function_entrypoint_return_type == "" -def test_list_flows_multiple_queries(): - mock_flow_1 = MagicMock() - mock_flow_2 = MagicMock() +@pytest.fixture +def component(): + return CustomComponent( + field_config={ + "fields": { + "llm": {"type": "str"}, + "url": {"type": "str"}, + "year": {"type": "int"}, + } + } + ) - session_getter_module = "langflow.database.base.session_getter" - with patch(session_getter_module) as mock_session_getter: - mock_session = MagicMock() - mock_session.query.return_value.all.side_effect = [[mock_flow_1], [mock_flow_2]] - mock_session_getter.return_value.__enter__.return_value = mock_session +@pytest.fixture(scope="session") +def test_flow(db): + flow_data = { + "nodes": [{"id": "1"}, {"id": "2"}], + "edges": [{"source": "1", "target": "2"}], + } - component = CustomComponent() - result = component.list_flows() + # Create flow + flow = FlowCreate( + id=uuid4(), name="Test Flow", description="Fixture flow", data=flow_data + ) - # Only the result of the second query is returned - assert len(result) == 1 - assert result[0] == mock_flow_2 - assert mock_session.query.call_count == 2 + # Add to database + db.add(flow) + db.commit() + + yield flow + + # Clean up + db.delete(flow) + db.commit() + + +@pytest.fixture(scope="session") +def db(app): + # Setup database for tests + yield app.db + + # Teardown + app.db.drop_all() + + +def test_list_flows_return_type(component): + flows = component.list_flows() + assert isinstance(flows, list) + + +def test_list_flows_flow_objects(component): + flows = component.list_flows() + assert all(isinstance(flow, Flow) for flow in flows) + + +def test_build_config_return_type(component): + config = component.build_config() + assert isinstance(config, dict) + + +def test_build_config_has_fields(component): + config = component.build_config() + assert "fields" in config + + +def test_build_config_fields_dict(component): + config = component.build_config() + assert isinstance(config["fields"], dict) + + +def test_build_config_field_keys(component): + config = component.build_config() + assert all(isinstance(key, str) for key in config["fields"]) + + +def test_build_config_field_values_dict(component): + config = component.build_config() + assert all(isinstance(value, dict) for value in config["fields"].values()) + + +def test_build_config_field_value_keys(component): + config = component.build_config() + field_values = config["fields"].values() + assert all("type" in value for value in field_values) From ba02e99ec5a79d5b1ccd31d570176a48887bab42 Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Wed, 26 Jul 2023 18:02:12 +0100 Subject: [PATCH 195/221] =?UTF-8?q?=F0=9F=94=84=20refactor(index.tsx):=20r?= =?UTF-8?q?eorder=20import=20statements=20for=20better=20organization=20an?= =?UTF-8?q?d=20readability?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/frontend/src/index.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/frontend/src/index.tsx b/src/frontend/src/index.tsx index 0ce5c0f5b..31d8f21f1 100644 --- a/src/frontend/src/index.tsx +++ b/src/frontend/src/index.tsx @@ -5,9 +5,9 @@ import ContextWrapper from "./contexts"; import reportWebVitals from "./reportWebVitals"; import { ApiInterceptor } from "./controllers/API/api"; -import "./style/applies.css"; -import "./style/classes.css"; import "./style/index.css"; +import "./style/classes.css"; +import "./style/applies.css"; const root = ReactDOM.createRoot( document.getElementById("root") as HTMLElement From 011dd9805062f627d34b295a58f7ac008ad0d959 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Wed, 26 Jul 2023 16:44:15 -0300 Subject: [PATCH 196/221] fixed bug on check and save --- src/frontend/src/controllers/API/index.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/frontend/src/controllers/API/index.ts b/src/frontend/src/controllers/API/index.ts index 49b8455bd..e13886e5d 100644 --- a/src/frontend/src/controllers/API/index.ts +++ b/src/frontend/src/controllers/API/index.ts @@ -1,4 +1,4 @@ -import { AxiosResponse } from "axios"; +import axios, { AxiosResponse } from "axios"; import { ReactFlowJsonObject } from "reactflow"; import { api } from "../../controllers/API/api"; import { APIObjectType, sendAllProps } from "../../types/api/index"; From 8ac890c5536995919bc0bb1bc982962e91972bd9 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Wed, 26 Jul 2023 14:42:48 -0300 Subject: [PATCH 197/221] =?UTF-8?q?=F0=9F=94=A7=20chore(chains.mdx):=20add?= =?UTF-8?q?=20import=20statement=20for=20Admonition=20component=20to=20imp?= =?UTF-8?q?rove=20code=20organization=20and=20readability=20=F0=9F=94=A7?= =?UTF-8?q?=20chore(chains.mdx):=20fix=20formatting=20and=20indentation=20?= =?UTF-8?q?for=20better=20code=20readability=20=F0=9F=94=A7=20chore(chains?= =?UTF-8?q?.mdx):=20update=20verbose=20parameter=20description=20to=20impr?= =?UTF-8?q?ove=20clarity=20=F0=9F=94=A7=20chore(chains.mdx):=20fix=20forma?= =?UTF-8?q?tting=20and=20indentation=20for=20better=20code=20readability?= =?UTF-8?q?=20=F0=9F=94=A7=20chore(chains.mdx):=20update=20verbose=20param?= =?UTF-8?q?eter=20description=20to=20improve=20clarity=20=F0=9F=94=A7=20ch?= =?UTF-8?q?ore(chains.mdx):=20fix=20formatting=20and=20indentation=20for?= =?UTF-8?q?=20better=20code=20readability=20=F0=9F=94=A7=20chore(chains.md?= =?UTF-8?q?x):=20update=20verbose=20parameter=20description=20to=20improve?= =?UTF-8?q?=20clarity=20=F0=9F=94=A7=20chore(chains.mdx):=20fix=20formatti?= =?UTF-8?q?ng=20and=20indentation=20for=20better=20code=20readability=20?= =?UTF-8?q?=F0=9F=94=A7=20chore(chains.mdx):=20update=20verbose=20paramete?= =?UTF-8?q?r=20description=20to=20improve=20clarity=20=F0=9F=94=A7=20chore?= =?UTF-8?q?(chains.mdx):=20fix=20formatting=20and=20indentation=20for=20be?= =?UTF-8?q?tter=20code=20readability=20=F0=9F=94=A7=20chore(chains.mdx):?= =?UTF-8?q?=20update=20verbose=20parameter=20description=20to=20improve=20?= =?UTF-8?q?clarity=20=F0=9F=94=A7=20chore(chains.mdx):=20fix=20formatting?= =?UTF-8?q?=20and=20indentation=20for=20better=20code=20readability=20?= =?UTF-8?q?=F0=9F=94=A7=20chore(chains.mdx):=20update=20verbose=20paramete?= =?UTF-8?q?r=20description=20to=20improve=20clarity=20=F0=9F=94=A7=20chore?= =?UTF-8?q?(chains.mdx):=20fix=20formatting=20and=20indentation=20for=20be?= =?UTF-8?q?tter=20code=20readability=20=F0=9F=94=A7=20chore(chains.mdx):?= =?UTF-8?q?=20update=20verbose=20parameter=20description=20to=20improve=20?= =?UTF-8?q?clarity=20=F0=9F=94=A7=20chore(chains.mdx):=20fix=20formatting?= =?UTF-8?q?=20and=20indentation=20for=20better=20code=20readability=20?= =?UTF-8?q?=F0=9F=94=A7=20chore(chains.mdx):=20update=20verbose=20paramete?= =?UTF-8?q?r=20description=20to=20improve=20clarity=20=F0=9F=94=A7=20chore?= =?UTF-8?q?(chains.mdx):=20fix=20formatting=20and=20indentation=20for=20be?= =?UTF-8?q?tter=20code=20readability=20=F0=9F=94=A7=20chore(chains.mdx):?= =?UTF-8?q?=20update=20verbose=20parameter=20description=20to=20improve=20?= =?UTF-8?q?clarity=20=F0=9F=94=A7=20chore(chains.mdx):=20fix=20formatting?= =?UTF-8?q?=20and=20indentation=20for=20better=20code=20readability=20?= =?UTF-8?q?=F0=9F=94=A7=20chore(chains.mdx):=20update=20verbose=20paramete?= =?UTF-8?q?r=20description=20to=20improve=20clarity=20=F0=9F=94=A7=20chore?= =?UTF-8?q?(chains.mdx):=20fix=20formatting=20and=20indentation=20for=20be?= =?UTF-8?q?tter=20code=20readability=20=F0=9F=94=A7=20chore(chains.mdx):?= =?UTF-8?q?=20update=20verbose=20parameter=20description=20to=20improve=20?= =?UTF-8?q?clarity=20=F0=9F=94=A7=20chore(chains.mdx):=20fix=20formatting?= =?UTF-8?q?=20and=20indentation=20for=20better=20code=20readability=20?= =?UTF-8?q?=F0=9F=94=A7=20chore(chains.mdx):=20update=20verbose=20paramete?= =?UTF-8?q?r=20description=20to=20improve=20clarity=20=F0=9F=94=A7=20chore?= =?UTF-8?q?(chains.mdx):=20fix=20formatting=20and=20indentation=20for=20be?= =?UTF-8?q?tter=20code=20readability=20=F0=9F=94=A7=20chore(chains.mdx):?= =?UTF-8?q?=20update=20verbose=20parameter=20description=20to=20improve=20?= =?UTF-8?q?clarity=20=F0=9F=94=A7=20chore(chains.mdx):=20fix=20formatting?= =?UTF-8?q?=20and=20indentation=20for=20better=20code=20readability=20?= =?UTF-8?q?=F0=9F=94=A7?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 📝 chore(docs): update import statements for Admonition component in examples 📝 chore(docs): update link in Prompts component to use Admonition component 📝 chore(docs): update import statements for Admonition component in examples 📝 chore(docs): update link in Conversation Chain component to use Admonition component 📝 chore(docs): update import statements for Admonition component in examples 📝 chore(docs): update link in CSV Loader component to use Admonition component 📝 chore(docs): update import statements for Admonition component in examples 📝 chore(docs): update link in MidJourney Prompt Chain component to use Admonition component 📝 chore(docs): update import statements for Admonition component in examples 📝 chore(docs): update link in Multiple Vector Stores component to use Admonition component 📝 docs(examples/python-function.mdx): add import statement for Admonition component 📝 docs(examples/python-function.mdx): improve readability of tip admonition by breaking lines 📝 docs(examples/python-function.mdx): improve readability of info admonition by breaking lines 📝 docs(examples/serp-api-tool.mdx): add import statement for Admonition component 📝 docs(examples/serp-api-tool.mdx): improve readability of info admonition by breaking lines 📝 docs(guidelines/features.mdx): add import statement for Admonition component 📝 docs(guidelines/features.mdx): improve readability of caution admonition by breaking lines --- docs/docs/components/chains.mdx | 41 ++++++++++--------- docs/docs/components/prompts.mdx | 12 ++++-- docs/docs/examples/buffer-memory.mdx | 7 +++- docs/docs/examples/conversation-chain.mdx | 13 ++++-- docs/docs/examples/csv-loader.mdx | 24 +++++++---- .../docs/examples/midjourney-prompt-chain.mdx | 15 ++++--- docs/docs/examples/multiple-vectorstores.mdx | 27 +++++++----- docs/docs/examples/python-function.mdx | 23 +++++++---- docs/docs/examples/serp-api-tool.mdx | 28 ++++++++----- docs/docs/guidelines/features.mdx | 6 ++- 10 files changed, 122 insertions(+), 74 deletions(-) diff --git a/docs/docs/components/chains.mdx b/docs/docs/components/chains.mdx index 52de6c481..96dcac2d0 100644 --- a/docs/docs/components/chains.mdx +++ b/docs/docs/components/chains.mdx @@ -1,6 +1,7 @@ import ThemedImage from "@theme/ThemedImage"; import useBaseUrl from "@docusaurus/useBaseUrl"; import ZoomableImage from "/src/theme/ZoomableImage.js"; +import Admonition from "@theme/Admonition"; # Chains @@ -12,22 +13,23 @@ Chains, in the context of language models, refer to a series of calls made to a The `CombineDocsChain` incorporates methods to combine or aggregate loaded documents for question-answering functionality. -:::info + Works as a proxy of LangChain’s [documents](https://python.langchain.com/docs/modules/chains/document/) chains generated by the `load_qa_chain` function. -::: + **Params** - **LLM:** Language Model to use in the chain. - **chain_type:** The chain type to be used. Each one of them applies a different “combination strategy”. - - **stuff**: The stuff [documents](https://python.langchain.com/docs/modules/chains/document/stuff) chain (“stuff" as in "to stuff" or "to fill") is the most straightforward of *the* document chains. It takes a list of documents, inserts them all into a prompt, and passes that prompt to an LLM. This chain is well-suited for applications where documents are small and only a few are passed in for most calls. - - **map_reduce**: The map-reduce [documents](https://python.langchain.com/docs/modules/chains/document/map_reduce) chain first applies an LLM chain to each document individually (the Map step), treating the chain output as a new document. It then passes all the new documents to a separate combined documents chain to get a single output (the Reduce step). It can optionally first compress or collapse the mapped documents to make sure that they fit in the combined documents chain (which will often pass them to an LLM). This compression step is performed recursively if necessary. - - **map_rerank**: The map re-rank [documents](https://python.langchain.com/docs/modules/chains/document/map_rerank) chain runs an initial prompt on each document that not only tries to complete a task but also gives a score for how certain it is in its answer. The highest-scoring response is returned. - - **refine**: The refine [documents](https://python.langchain.com/docs/modules/chains/document/refine) chain constructs a response by looping over the input documents and iteratively updating its answer. For each document, it passes all non-document inputs, the current document, and the latest intermediate answer to an LLM chain to get a new answer. - Since the Refine chain only passes a single document to the LLM at a time, it is well-suited for tasks that require analyzing more documents than can fit in the model's context. The obvious tradeoff is that this chain will make far more LLM calls than, for example, the Stuff documents chain. There are also certain tasks that are difficult to accomplish iteratively. For example, the Refine chain can perform poorly when documents frequently cross-reference one another or when a task requires detailed information from many documents. + - **stuff**: The stuff [documents](https://python.langchain.com/docs/modules/chains/document/stuff) chain (“stuff" as in "to stuff" or "to fill") is the most straightforward of _the_ document chains. It takes a list of documents, inserts them all into a prompt, and passes that prompt to an LLM. This chain is well-suited for applications where documents are small and only a few are passed in for most calls. + - **map_reduce**: The map-reduce [documents](https://python.langchain.com/docs/modules/chains/document/map_reduce) chain first applies an LLM chain to each document individually (the Map step), treating the chain output as a new document. It then passes all the new documents to a separate combined documents chain to get a single output (the Reduce step). It can optionally first compress or collapse the mapped documents to make sure that they fit in the combined documents chain (which will often pass them to an LLM). This compression step is performed recursively if necessary. + - **map_rerank**: The map re-rank [documents](https://python.langchain.com/docs/modules/chains/document/map_rerank) chain runs an initial prompt on each document that not only tries to complete a task but also gives a score for how certain it is in its answer. The highest-scoring response is returned. + - **refine**: The refine [documents](https://python.langchain.com/docs/modules/chains/document/refine) chain constructs a response by looping over the input documents and iteratively updating its answer. For each document, it passes all non-document inputs, the current document, and the latest intermediate answer to an LLM chain to get a new answer. + + Since the Refine chain only passes a single document to the LLM at a time, it is well-suited for tasks that require analyzing more documents than can fit in the model's context. The obvious tradeoff is that this chain will make far more LLM calls than, for example, the Stuff documents chain. There are also certain tasks that are difficult to accomplish iteratively. For example, the Refine chain can perform poorly when documents frequently cross-reference one another or when a task requires detailed information from many documents. --- @@ -41,7 +43,7 @@ The `ConversationChain` is a straightforward chain for interactive conversations - **Memory:** Default memory store. - **input_key:** Used to specify the key under which the user input will be stored in the conversation memory. It allows you to provide the user's input to the chain for processing and generating a response. - **output_key:** Used to specify the key under which the generated response will be stored in the conversation memory. It allows you to retrieve the response using the specified key. -- **verbose:** This parameter is used to control the level of detail in the output of the chain. When set to True, it will print out some internal states of the chain while it is being run, which can be helpful for debugging and understanding the chain's behavior. If set to False, it will suppress the verbose output — defaults to `False`. +- **verbose:** This parameter is used to control the level of detail in the output of the chain. When set to True, it will print out some internal states of the chain while it is being run, which can be helpful for debugging and understanding the chain's behavior. If set to False, it will suppress the verbose output — defaults to `False`. --- @@ -49,11 +51,11 @@ The `ConversationChain` is a straightforward chain for interactive conversations The `ConversationalRetrievalChain` extracts information and provides answers by combining document search and question-answering abilities. -:::info + A retriever is a component that finds documents based on a query. It doesn't store the documents themselves, but it returns the ones that match the query. -::: + **Params** @@ -61,12 +63,13 @@ A retriever is a component that finds documents based on a query. It doesn't sto - **Memory:** Default memory store. - **Retriever:** The retriever used to fetch relevant documents. - **chain_type:** The chain type to be used. Each one of them applies a different “combination strategy”. - - **stuff**: The stuff [documents](https://python.langchain.com/docs/modules/chains/document/stuff) chain (“stuff" as in "to stuff" or "to fill") is the most straightforward of *the* document chains. It takes a list of documents, inserts them all into a prompt, and passes that prompt to an LLM. This chain is well-suited for applications where documents are small and only a few are passed in for most calls. - - **map_reduce**: The map-reduce [documents](https://python.langchain.com/docs/modules/chains/document/map_reduce) chain first applies an LLM chain to each document individually (the Map step), treating the chain output as a new document. It then passes all the new documents to a separate combined documents chain to get a single output (the Reduce step). It can optionally first compress or collapse the mapped documents to make sure that they fit in the combined documents chain (which will often pass them to an LLM). This compression step is performed recursively if necessary. - - **map_rerank**: The map re-rank [documents](https://python.langchain.com/docs/modules/chains/document/map_rerank) chain runs an initial prompt on each document that not only tries to complete a task but also gives a score for how certain it is in its answer. The highest-scoring response is returned. - - **refine**: The refine [documents](https://python.langchain.com/docs/modules/chains/document/refine) chain constructs a response by looping over the input documents and iteratively updating its answer. For each document, it passes all non-document inputs, the current document, and the latest intermediate answer to an LLM chain to get a new answer. - Since the Refine chain only passes a single document to the LLM at a time, it is well-suited for tasks that require analyzing more documents than can fit in the model's context. The obvious tradeoff is that this chain will make far more LLM calls than, for example, the Stuff documents chain. There are also certain tasks that are difficult to accomplish iteratively. For example, the Refine chain can perform poorly when documents frequently cross-reference one another or when a task requires detailed information from many documents. + - **stuff**: The stuff [documents](https://python.langchain.com/docs/modules/chains/document/stuff) chain (“stuff" as in "to stuff" or "to fill") is the most straightforward of _the_ document chains. It takes a list of documents, inserts them all into a prompt, and passes that prompt to an LLM. This chain is well-suited for applications where documents are small and only a few are passed in for most calls. + - **map_reduce**: The map-reduce [documents](https://python.langchain.com/docs/modules/chains/document/map_reduce) chain first applies an LLM chain to each document individually (the Map step), treating the chain output as a new document. It then passes all the new documents to a separate combined documents chain to get a single output (the Reduce step). It can optionally first compress or collapse the mapped documents to make sure that they fit in the combined documents chain (which will often pass them to an LLM). This compression step is performed recursively if necessary. + - **map_rerank**: The map re-rank [documents](https://python.langchain.com/docs/modules/chains/document/map_rerank) chain runs an initial prompt on each document that not only tries to complete a task but also gives a score for how certain it is in its answer. The highest-scoring response is returned. + - **refine**: The refine [documents](https://python.langchain.com/docs/modules/chains/document/refine) chain constructs a response by looping over the input documents and iteratively updating its answer. For each document, it passes all non-document inputs, the current document, and the latest intermediate answer to an LLM chain to get a new answer. + + Since the Refine chain only passes a single document to the LLM at a time, it is well-suited for tasks that require analyzing more documents than can fit in the model's context. The obvious tradeoff is that this chain will make far more LLM calls than, for example, the Stuff documents chain. There are also certain tasks that are difficult to accomplish iteratively. For example, the Refine chain can perform poorly when documents frequently cross-reference one another or when a task requires detailed information from many documents. - **return_source_documents:** Used to specify whether or not to include the source documents that were used to answer the question in the output. When set to `True`, source documents will be included in the output along with the generated answer. This can be useful for providing additional context or references to the user — defaults to `True`. - **verbose:** Whether or not to run in verbose mode. In verbose mode, intermediate logs will be printed to the console — defaults to `False`. @@ -108,17 +111,17 @@ The `LLMMathChain` works by using the language model with an `LLMChain` to under `RetrievalQA` is a chain used to find relevant documents or information to answer a given query. The retriever is responsible for returning the relevant documents based on the query, and the QA component then extracts the answer from those documents. The retrieval QA system combines the capabilities of both the retriever and the QA component to provide accurate and relevant answers to user queries. -:::info + A retriever is a component that finds documents based on a query. It doesn't store the documents themselves, but it returns the ones that match the query. -::: + **Params** - **Combine Documents Chain:** Chain to use to combine the documents. - **Memory:** Default memory store. -- **Retriever:** The retriever used to fetch relevant documents. +- **Retriever:** The retriever used to fetch relevant documents. - **input_key:** This parameter is used to specify the key in the input data that contains the question. It is used to retrieve the question from the input data and pass it to the question-answering model for generating the answer — defaults to `query`. - **output_key:** This parameter is used to specify the key in the output data where the generated answer will be stored. It is used to retrieve the answer from the output data after the question-answering model has generated it — defaults to `result`. - **return_source_documents:** Used to specify whether or not to include the source documents that were used to answer the question in the output. When set to `True`, source documents will be included in the output along with the generated answer. This can be useful for providing additional context or references to the user — defaults to `True`. @@ -134,4 +137,4 @@ The `SQLDatabaseChain` finds answers to questions using a SQL database. It works - **Db:** SQL Database to connect to. - **LLM:** Language Model to use in the chain. -- **Prompt:** Prompt template to translate natural language to SQL. \ No newline at end of file +- **Prompt:** Prompt template to translate natural language to SQL. diff --git a/docs/docs/components/prompts.mdx b/docs/docs/components/prompts.mdx index f4f2c4cae..0c7257272 100644 --- a/docs/docs/components/prompts.mdx +++ b/docs/docs/components/prompts.mdx @@ -1,3 +1,5 @@ +import Admonition from "@theme/Admonition"; + # Prompts A prompt refers to the input given to a language model. It is constructed from multiple components and can be parametrized using prompt templates. A prompt template is a reproducible way to generate prompts and allow for easy customization through input variables. @@ -8,8 +10,10 @@ A prompt refers to the input given to a language model. It is constructed from m The `PromptTemplate` component allows users to create prompts and define variables that provide control over instructing the model. The template can take in a set of variables from the end user and generates the prompt once the conversation is initiated. -:::info -Once a variable is defined in the prompt template, it becomes a component input of its own. Check out [Prompt Customization](../guidelines/prompt-customization.mdx) to learn more. -::: + + Once a variable is defined in the prompt template, it becomes a component + input of its own. Check out [Prompt + Customization](../guidelines/prompt-customization.mdx) to learn more. + -- **template:** Template used to format an individual request. \ No newline at end of file +- **template:** Template used to format an individual request. diff --git a/docs/docs/examples/buffer-memory.mdx b/docs/docs/examples/buffer-memory.mdx index c3e886cf9..d34649991 100644 --- a/docs/docs/examples/buffer-memory.mdx +++ b/docs/docs/examples/buffer-memory.mdx @@ -1,3 +1,5 @@ +import Admonition from "@theme/Admonition"; + # Buffer Memory For certain applications, retaining past interactions is crucial. For that, chains and agents may accept a memory component as one of their input parameters. The `ConversationBufferMemory` component is one of them. It stores messages and extracts them into variables. @@ -17,9 +19,10 @@ import ZoomableImage from "/src/theme/ZoomableImage.js"; #### Download Flow -:::note LangChain Components 🦜🔗 + - [`ConversationBufferMemory`](https://python.langchain.com/docs/modules/memory/how_to/buffer) - [`ConversationChain`](https://python.langchain.com/docs/modules/chains/) - [`ChatOpenAI`](https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai) - ::: + + diff --git a/docs/docs/examples/conversation-chain.mdx b/docs/docs/examples/conversation-chain.mdx index b8cbb11bb..db3181881 100644 --- a/docs/docs/examples/conversation-chain.mdx +++ b/docs/docs/examples/conversation-chain.mdx @@ -1,10 +1,14 @@ +import Admonition from "@theme/Admonition"; + # Conversation Chain This example shows how to instantiate a simple `ConversationChain` component using a Language Model (LLM). Once the Node Status turns green 🟢, the chat will be ready to take in user messages. Here, we used `ChatOpenAI` to act as the required LLM input, but you can use any LLM for this purpose. -:::info + + Make sure to always get the API key from the provider. -::: + + ## ⛓️ Langflow Example @@ -21,8 +25,9 @@ import ZoomableImage from "/src/theme/ZoomableImage.js"; #### Download Flow -:::note LangChain Components 🦜🔗 + - [`ConversationChain`](https://python.langchain.com/docs/modules/chains/) - [`ChatOpenAI`](https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai) - ::: + + diff --git a/docs/docs/examples/csv-loader.mdx b/docs/docs/examples/csv-loader.mdx index de808ec3d..c59dfc1e7 100644 --- a/docs/docs/examples/csv-loader.mdx +++ b/docs/docs/examples/csv-loader.mdx @@ -1,3 +1,5 @@ +import Admonition from "@theme/Admonition"; + # CSV Loader The `VectoStoreAgent` component retrieves information from one or more vector stores. This example shows a `VectoStoreAgent` connected to a CSV file through the `Chroma` vector store. Process description: @@ -7,13 +9,18 @@ The `VectoStoreAgent` component retrieves information from one or more vector st - These chunks feed the `Chroma` vector store, which converts them into vectors and stores them for fast indexing. - Finally, the agent accesses the information of the vector store through the `VectorStoreInfo` tool. -:::info -The vector store is used for efficient semantic search, while `VectorStoreInfo` carries information about it, such as its name and description. Embeddings are a way to represent words, phrases, or any entities in a vector space. Learn more about them [here](https://platform.openai.com/docs/guides/embeddings/what-are-embeddings). -::: + + The vector store is used for efficient semantic search, while + `VectorStoreInfo` carries information about it, such as its name and + description. Embeddings are a way to represent words, phrases, or any entities + in a vector space. Learn more about them + [here](https://platform.openai.com/docs/guides/embeddings/what-are-embeddings). + -:::tip -Once you build this flow, ask questions about the data in the chat interface (e.g., number of rows or columns). -::: + + Once you build this flow, ask questions about the data in the chat interface + (e.g., number of rows or columns). + ## ⛓️ Langflow Example @@ -30,7 +37,7 @@ import ZoomableImage from "/src/theme/ZoomableImage.js"; #### Download Flow -:::note LangChain Components 🦜🔗 + - [`CSVLoader`](https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/csv) - [`CharacterTextSplitter`](https://python.langchain.com/docs/modules/data_connection/document_transformers/text_splitters/character_text_splitter) @@ -39,4 +46,5 @@ import ZoomableImage from "/src/theme/ZoomableImage.js"; - [`VectorStoreInfo`](https://python.langchain.com/docs/modules/data_connection/vectorstores/) - [`OpenAI`](https://python.langchain.com/docs/modules/model_io/models/llms/integrations/openai) - [`VectorStoreAgent`](https://python.langchain.com/docs/modules/agents/toolkits/vectorstore) - ::: + + diff --git a/docs/docs/examples/midjourney-prompt-chain.mdx b/docs/docs/examples/midjourney-prompt-chain.mdx index d3ca57c91..c79bb0b27 100644 --- a/docs/docs/examples/midjourney-prompt-chain.mdx +++ b/docs/docs/examples/midjourney-prompt-chain.mdx @@ -1,3 +1,5 @@ +import Admonition from "@theme/Admonition"; + # MidJourney Prompt Chain The `MidJourneyPromptChain` can be used to generate imaginative and detailed MidJourney prompts. @@ -14,9 +16,11 @@ And get a response such as: Imagine a mysterious forest, the trees are tall and ancient, their branches reaching up to the sky. Through the darkness, a dragon emerges from the shadows, its scales shimmering in the moonlight. Its wingspan is immense, and its eyes glow with a fierce intensity. It is a majestic and powerful creature, one that commands both respect and fear. ``` -:::tip -Notice that the `ConversationSummaryMemory` stores a summary of the conversation over time. Try using it to create better prompts as the conversation goes on. -::: + + Notice that the `ConversationSummaryMemory` stores a summary of the + conversation over time. Try using it to create better prompts as the + conversation goes on. + ## ⛓️ Langflow Example @@ -33,8 +37,9 @@ import ZoomableImage from "/src/theme/ZoomableImage.js"; #### Download Flow -:::note LangChain Components 🦜🔗 + - [`OpenAI`](https://python.langchain.com/docs/modules/model_io/models/llms/integrations/openai) - [`ConversationSummaryMemory`](https://python.langchain.com/docs/modules/memory/how_to/summary) - ::: + + diff --git a/docs/docs/examples/multiple-vectorstores.mdx b/docs/docs/examples/multiple-vectorstores.mdx index 36890c866..0bb6b9ade 100644 --- a/docs/docs/examples/multiple-vectorstores.mdx +++ b/docs/docs/examples/multiple-vectorstores.mdx @@ -1,12 +1,15 @@ +import Admonition from "@theme/Admonition"; + # Multiple Vector Stores The example below shows an agent operating with two vector stores built upon different data sources. The `TextLoader` loads a TXT file, while the `WebBaseLoader` pulls text from webpages into a document format to accessed downstream. The `Chroma` vector stores are created analogous to what we have demonstrated in our [CSV Loader](/examples/csv-loader.mdx) example. Finally, the `VectorStoreRouterAgent` constructs an agent that routes between the vector stores. -:::info -Get the TXT file used [here](https://github.com/hwchase17/chat-your-data/blob/master/state_of_the_union.txt). -::: + + Get the TXT file used + [here](https://github.com/hwchase17/chat-your-data/blob/master/state_of_the_union.txt). + URL used by the `WebBaseLoader`: @@ -14,13 +17,15 @@ URL used by the `WebBaseLoader`: https://pt.wikipedia.org/wiki/Harry_Potter ``` -:::tip -When you build the flow, request information about one of the sources. The agent should be able to use the correct source to generate a response. -::: + + When you build the flow, request information about one of the sources. The + agent should be able to use the correct source to generate a response. + -:::info -Learn more about Multiple Vector Stores [here](https://python.langchain.com/docs/modules/agents/toolkits/vectorstore?highlight=Multiple%20Vector%20Stores#multiple-vectorstores). -::: + + Learn more about Multiple Vector Stores + [here](https://python.langchain.com/docs/modules/agents/toolkits/vectorstore?highlight=Multiple%20Vector%20Stores#multiple-vectorstores). + ## ⛓️ Langflow Example @@ -37,7 +42,7 @@ import ZoomableImage from "/src/theme/ZoomableImage.js"; #### Download Flow -:::note LangChain Components 🦜🔗 + - [`WebBaseLoader`](https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/web_base) - [`TextLoader`](https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/unstructured_file) @@ -49,4 +54,4 @@ import ZoomableImage from "/src/theme/ZoomableImage.js"; - [`VectorStoreRouterToolkit`](https://python.langchain.com/docs/modules/agents/toolkits/vectorstore) - [`VectorStoreRouterAgent`](https://python.langchain.com/docs/modules/agents/toolkits/vectorstore) -::: + diff --git a/docs/docs/examples/python-function.mdx b/docs/docs/examples/python-function.mdx index 12a262a3f..f537075c6 100644 --- a/docs/docs/examples/python-function.mdx +++ b/docs/docs/examples/python-function.mdx @@ -1,3 +1,5 @@ +import Admonition from "@theme/Admonition"; + # Python Function Langflow allows you to create a customized tool using the `PythonFunction` connected to a `Tool` component. In this example, Regex is used in Python to validate a pattern. @@ -15,15 +17,19 @@ def is_brazilian_zipcode(zipcode: str) -> bool: return False ``` -:::tip -When a tool is called, it is often desirable to have its output returned directly to the user. You can do this by setting the **return_direct** flag for a tool to be True. -::: + + When a tool is called, it is often desirable to have its output returned + directly to the user. You can do this by setting the **return_direct** flag + for a tool to be True. + The `AgentInitializer` component is a quick way to construct an agent from the model and tools. -:::info -The `PythonFunction` is a custom component that uses the LangChain 🦜🔗 tool decorator. Learn more about it [here](https://python.langchain.com/docs/modules/agents/tools/how_to/custom_tools). -::: + + The `PythonFunction` is a custom component that uses the LangChain 🦜🔗 tool + decorator. Learn more about it + [here](https://python.langchain.com/docs/modules/agents/tools/how_to/custom_tools). + ## ⛓️ Langflow Example @@ -40,9 +46,10 @@ import ZoomableImage from "/src/theme/ZoomableImage.js"; #### Download Flow -:::note LangChain Components 🦜🔗 + - [`PythonFunctionTool`](https://python.langchain.com/docs/modules/agents/tools/how_to/custom_tools) - [`ChatOpenAI`](https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai) - [`AgentInitializer`](https://python.langchain.com/docs/modules/agents/) - ::: + + diff --git a/docs/docs/examples/serp-api-tool.mdx b/docs/docs/examples/serp-api-tool.mdx index a7e1d3d8e..60e55791a 100644 --- a/docs/docs/examples/serp-api-tool.mdx +++ b/docs/docs/examples/serp-api-tool.mdx @@ -1,24 +1,29 @@ +import Admonition from "@theme/Admonition"; + # Serp API Tool The [Serp API](https://serpapi.com/) (Search Engine Results Page) allows developers to scrape results from search engines such as Google, Bing and Yahoo, and can be used as in Langflow through the `Search` component. -:::info -To use the Serp API, you first need to sign up [Serp API](https://serpapi.com/) for an API key on the provider's website. -::: + + To use the Serp API, you first need to sign up [Serp + API](https://serpapi.com/) for an API key on the provider's website. + Here, the `ZeroShotPrompt` component specifies a prompt template for the `ZeroShotAgent`. Set a _Prefix_ and _Suffix_ with rules for the agent to obey. In the example, we used default templates. The `LLMChain` is a simple chain that takes in a prompt template, formats it with the user input, and returns the response from an LLM. -:::tip -In this example, we used [`ChatOpenAI`](https://platform.openai.com/) as the LLM, but feel free to experiment with other Language Models! -::: + + In this example, we used [`ChatOpenAI`](https://platform.openai.com/) as the + LLM, but feel free to experiment with other Language Models! + The `ZeroShotAgent` takes the `LLMChain` and the `Search` tool as inputs, using the tool to find information when necessary. -:::info -Learn more about the Serp API [here](https://python.langchain.com/docs/modules/agents/tools/integrations/serpapi). -::: + + Learn more about the Serp API + [here](https://python.langchain.com/docs/modules/agents/tools/integrations/serpapi). + ## ⛓️ Langflow Example @@ -35,11 +40,12 @@ import ZoomableImage from "/src/theme/ZoomableImage.js"; #### Download Flow -:::note LangChain Components 🦜🔗 + - [`ZeroShotPrompt`](https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/) - [`OpenAI`](https://python.langchain.com/docs/modules/model_io/models/llms/integrations/openai) - [`LLMChain`](https://python.langchain.com/docs/modules/chains/foundational/llm_chain) - [`Search`](https://python.langchain.com/docs/modules/agents/tools/integrations/serpapi) - [`ZeroShotAgent`](https://python.langchain.com/docs/modules/agents/how_to/custom_mrkl_agent) - ::: + + diff --git a/docs/docs/guidelines/features.mdx b/docs/docs/guidelines/features.mdx index cf8b09c6e..18bd0bd75 100644 --- a/docs/docs/guidelines/features.mdx +++ b/docs/docs/guidelines/features.mdx @@ -2,6 +2,7 @@ import ThemedImage from "@theme/ThemedImage"; import useBaseUrl from "@docusaurus/useBaseUrl"; import ZoomableImage from "/src/theme/ZoomableImage.js"; import ReactPlayer from "react-player"; +import Admonition from "@theme/Admonition"; # Features @@ -34,9 +35,10 @@ import ReactPlayer from "react-player"; Flows can be exported and imported as JSON files. -:::caution + Watch out for API keys being stored in local files. -::: + + --- From e78ce40cbf3ff29111a4b414240823c4052bece6 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Wed, 26 Jul 2023 14:56:54 -0300 Subject: [PATCH 198/221] =?UTF-8?q?=F0=9F=94=A7=20fix(docusaurus.config.js?= =?UTF-8?q?):=20fix=20import=20statement=20for=20remarkCodeHike=20to=20res?= =?UTF-8?q?olve=20linting=20error=20=E2=9C=A8=20feat(docusaurus.config.js)?= =?UTF-8?q?:=20add=20support=20for=20code=20highlighting=20with=20remarkCo?= =?UTF-8?q?deHike=20plugin?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/docusaurus.config.js | 242 +++++++++++++++++++------------------- 1 file changed, 123 insertions(+), 119 deletions(-) diff --git a/docs/docusaurus.config.js b/docs/docusaurus.config.js index 3ac152b5b..6dffccbc8 100644 --- a/docs/docusaurus.config.js +++ b/docs/docusaurus.config.js @@ -1,127 +1,131 @@ const lightCodeTheme = require("prism-react-renderer/themes/github"); +const { remarkCodeHike } = require("@code-hike/mdx"); // With JSDoc @type annotations, IDEs can provide config autocompletion /** @type {import('@docusaurus/types').DocusaurusConfig} */ -( - module.exports = { - title: "Langflow Documentation", - tagline: "Langflow is a GUI for LangChain, designed with react-flow", - favicon: "img/favicon.ico", - url: "https://logspace-ai.github.io", - baseUrl: "/", - onBrokenLinks: "throw", - onBrokenMarkdownLinks: "warn", - organizationName: "logspace-ai", - projectName: "langflow", - trailingSlash: false, - customFields: { - mendableAnonKey: process.env.MENDABLE_ANON_KEY, - }, - i18n: { - defaultLocale: "en", - locales: ["en"], - }, - presets: [ - [ - "@docusaurus/preset-classic", - /** @type {import('@docusaurus/preset-classic').Options} */ - ({ - docs: { - routeBasePath: "/", - sidebarPath: require.resolve("./sidebars.js"), - path: "docs", - // sidebarPath: 'sidebars.js', - }, - theme: { - customCss: require.resolve("./src/css/custom.css"), - }, - }), - ], - ], - plugins: [ - ["docusaurus-node-polyfills", { excludeAliases: ["console"] }], - "docusaurus-plugin-image-zoom", - // .... - async function myPlugin(context, options) { - return { - name: "docusaurus-tailwindcss", - configurePostCss(postcssOptions) { - // Appends TailwindCSS and AutoPrefixer. - postcssOptions.plugins.push(require("tailwindcss")); - postcssOptions.plugins.push(require("autoprefixer")); - return postcssOptions; - }, - }; - }, - ], - themeConfig: - /** @type {import('@docusaurus/preset-classic').ThemeConfig} */ +module.exports = { + title: "Langflow Documentation", + tagline: "Langflow is a GUI for LangChain, designed with react-flow", + favicon: "img/favicon.ico", + url: "https://logspace-ai.github.io", + baseUrl: "/", + onBrokenLinks: "throw", + onBrokenMarkdownLinks: "warn", + organizationName: "logspace-ai", + projectName: "langflow", + trailingSlash: false, + customFields: { + mendableAnonKey: process.env.MENDABLE_ANON_KEY, + }, + i18n: { + defaultLocale: "en", + locales: ["en"], + }, + presets: [ + [ + "@docusaurus/preset-classic", + /** @type {import('@docusaurus/preset-classic').Options} */ ({ - navbar: { - hideOnScroll: true, - title: "Langflow", - logo: { - alt: "Langflow", - src: "img/chain.png", - }, - items: [ - // right - { - position: "right", - href: "https://github.com/logspace-ai/langflow", - position: "right", - className: "header-github-link", - target: "_blank", - rel: null, - }, - { - position: "right", - href: "https://twitter.com/logspace_ai", - position: "right", - className: "header-twitter-link", - target: "_blank", - rel: null, - }, - { - position: "right", - href: "https://discord.gg/EqksyE2EX9", - position: "right", - className: "header-discord-link", - target: "_blank", - rel: null, - }, + docs: { + beforeDefaultRemarkPlugins: [[remarkCodeHike, { theme: "nord" }]], + routeBasePath: "/", + sidebarPath: require.resolve("./sidebars.js"), + path: "docs", + // sidebarPath: 'sidebars.js', + }, + theme: { + customCss: [ + require.resolve("@code-hike/mdx/styles.css"), + require.resolve("./src/css/custom.css"), ], }, - tableOfContents: { - minHeadingLevel: 2, - maxHeadingLevel: 5, - }, - colorMode: { - defaultMode: "light", - disableSwitch: true, - respectPrefersColorScheme: false, - }, - announcementBar: { - content: - '⭐️ If you like ⛓️Langflow, star it on GitHub! ⭐️', - backgroundColor: "#B53D38", //Mustard Yellow #D19900 #D4B20B - Salmon #E9967A - textColor: "#fff", - isCloseable: false, - }, - footer: { - links: [], - copyright: `Copyright © ${new Date().getFullYear()} Logspace.`, - }, - zoom: { - selector: ".markdown :not(a) > img:not(.no-zoom)", - background: { - light: "rgba(240, 240, 240, 0.9)", - }, - config: {}, - }, - prism: { - theme: lightCodeTheme, - }, }), - } -); + ], + ], + plugins: [ + ["docusaurus-node-polyfills", { excludeAliases: ["console"] }], + "docusaurus-plugin-image-zoom", + // .... + async function myPlugin(context, options) { + return { + name: "docusaurus-tailwindcss", + configurePostCss(postcssOptions) { + // Appends TailwindCSS and AutoPrefixer. + postcssOptions.plugins.push(require("tailwindcss")); + postcssOptions.plugins.push(require("autoprefixer")); + return postcssOptions; + }, + }; + }, + ], + themes: ["mdx-v2"], + themeConfig: + /** @type {import('@docusaurus/preset-classic').ThemeConfig} */ + ({ + navbar: { + hideOnScroll: true, + title: "Langflow", + logo: { + alt: "Langflow", + src: "img/chain.png", + }, + items: [ + // right + { + position: "right", + href: "https://github.com/logspace-ai/langflow", + position: "right", + className: "header-github-link", + target: "_blank", + rel: null, + }, + { + position: "right", + href: "https://twitter.com/logspace_ai", + position: "right", + className: "header-twitter-link", + target: "_blank", + rel: null, + }, + { + position: "right", + href: "https://discord.gg/EqksyE2EX9", + position: "right", + className: "header-discord-link", + target: "_blank", + rel: null, + }, + ], + }, + tableOfContents: { + minHeadingLevel: 2, + maxHeadingLevel: 5, + }, + colorMode: { + defaultMode: "light", + disableSwitch: true, + respectPrefersColorScheme: false, + }, + announcementBar: { + content: + '⭐️ If you like ⛓️Langflow, star it on GitHub! ⭐️', + backgroundColor: "#B53D38", //Mustard Yellow #D19900 #D4B20B - Salmon #E9967A + textColor: "#fff", + isCloseable: false, + }, + footer: { + links: [], + copyright: `Copyright © ${new Date().getFullYear()} Logspace.`, + }, + zoom: { + selector: ".markdown :not(a) > img:not(.no-zoom)", + background: { + light: "rgba(240, 240, 240, 0.9)", + }, + config: {}, + }, + // prism: { + // theme: require("prism-react-renderer/themes/dracula"), + // }, + }), +}; From bb247af614ec7fdc50c38de47b9958e611ca678c Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Wed, 26 Jul 2023 15:04:01 -0300 Subject: [PATCH 199/221] =?UTF-8?q?=F0=9F=94=A7=20chore(package.json):=20u?= =?UTF-8?q?pdate=20dependencies?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 📦 Add new dependency: - "@code-hike/mdx": "^0.9.0" 🔄 Update existing dependencies: - "@mdx-js/react": "^2.3.0" - "docusaurus-theme-mdx-v2": "^0.1.2" --- docs/package-lock.json | 2033 +++++++++++++++++++++++++++++++++++++++- docs/package.json | 6 +- 2 files changed, 2032 insertions(+), 7 deletions(-) diff --git a/docs/package-lock.json b/docs/package-lock.json index 7db7f9376..ed79230c6 100644 --- a/docs/package-lock.json +++ b/docs/package-lock.json @@ -9,12 +9,13 @@ "version": "0.0.0", "dependencies": { "@babel/preset-react": "^7.22.3", + "@code-hike/mdx": "^0.9.0", "@docusaurus/core": "2.4.1", "@docusaurus/plugin-ideal-image": "^2.4.1", "@docusaurus/preset-classic": "2.4.1", "@docusaurus/theme-classic": "^2.4.1", "@docusaurus/theme-search-algolia": "^2.4.1", - "@mdx-js/react": "^1.6.22", + "@mdx-js/react": "^2.3.0", "@mendable/search": "^0.0.114", "@pbe/react-yandex-maps": "^1.2.4", "@prismicio/client": "^7.0.1", @@ -22,6 +23,7 @@ "autoprefixer": "^10.4.14", "clsx": "^1.2.1", "docusaurus-plugin-image-zoom": "^0.1.4", + "docusaurus-theme-mdx-v2": "^0.1.2", "jquery": "^3.7.0", "medium-zoom": "^1.0.8", "node-fetch": "^3.3.1", @@ -1986,6 +1988,49 @@ "node": ">=6.9.0" } }, + "node_modules/@code-hike/lighter": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/@code-hike/lighter/-/lighter-0.7.0.tgz", + "integrity": "sha512-64O07rIORKQLB+5T/GKAmKcD9sC0N9yHFJXa0Hs+0Aee1G+I4bSXxTccuDFP6c/G/3h5Pk7yv7PoX9/SpzaeiQ==", + "funding": { + "url": "https://github.com/code-hike/lighter?sponsor=1" + } + }, + "node_modules/@code-hike/mdx": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/@code-hike/mdx/-/mdx-0.9.0.tgz", + "integrity": "sha512-0wg68ZCjVWAkWT4gBUZJ8Mwktjen/XeWyqBQCrhA2IZSbZZnMYsEI6JJEFb/nZoNI3comB3JdxPLykZRq3qT2A==", + "dependencies": { + "@code-hike/lighter": "0.7.0", + "node-fetch": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/code-hike" + }, + "peerDependencies": { + "react": "^16.8.3 || ^17 || ^18" + } + }, + "node_modules/@code-hike/mdx/node_modules/node-fetch": { + "version": "2.6.12", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.12.tgz", + "integrity": "sha512-C/fGU2E8ToujUivIO0H+tpQ6HWo4eEmchoPIoXtxCrVghxdKq+QOHqEZW7tuP3KlV3bC8FRMO5nMCC7Zm1VP6g==", + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, "node_modules/@colors/colors": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz", @@ -2683,6 +2728,18 @@ "react-dom": "^16.8.4 || ^17.0.0" } }, + "node_modules/@docusaurus/theme-classic/node_modules/@mdx-js/react": { + "version": "1.6.22", + "resolved": "https://registry.npmjs.org/@mdx-js/react/-/react-1.6.22.tgz", + "integrity": "sha512-TDoPum4SHdfPiGSAaRBw7ECyI8VaHpK8GJugbJIJuqyh6kzw9ZLJZW3HGL3NNrJGxcAixUvqROm+YuQOo5eXtg==", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "peerDependencies": { + "react": "^16.13.1 || ^17.0.0" + } + }, "node_modules/@docusaurus/theme-common": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/@docusaurus/theme-common/-/theme-common-2.4.1.tgz", @@ -3168,15 +3225,19 @@ } }, "node_modules/@mdx-js/react": { - "version": "1.6.22", - "resolved": "https://registry.npmjs.org/@mdx-js/react/-/react-1.6.22.tgz", - "integrity": "sha512-TDoPum4SHdfPiGSAaRBw7ECyI8VaHpK8GJugbJIJuqyh6kzw9ZLJZW3HGL3NNrJGxcAixUvqROm+YuQOo5eXtg==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@mdx-js/react/-/react-2.3.0.tgz", + "integrity": "sha512-zQH//gdOmuu7nt2oJR29vFhDv88oGPmVw6BggmrHeMI+xgEkp1B2dX9/bMBSYtK0dyLX/aOmesKS09g222K1/g==", + "dependencies": { + "@types/mdx": "^2.0.0", + "@types/react": ">=16" + }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" }, "peerDependencies": { - "react": "^16.13.1 || ^17.0.0" + "react": ">=16" } }, "node_modules/@mdx-js/util": { @@ -3665,6 +3726,14 @@ "node": ">=10.13.0" } }, + "node_modules/@types/acorn": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/@types/acorn/-/acorn-4.0.6.tgz", + "integrity": "sha512-veQTnWP+1D/xbxVrPC3zHnCZRjSrKfhbMUlEA43iMZLu7EsnTtkJklIuwrCPbOi8YkvDQAiW05VQQFvvz9oieQ==", + "dependencies": { + "@types/estree": "*" + } + }, "node_modules/@types/body-parser": { "version": "1.19.2", "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.2.tgz", @@ -3730,6 +3799,14 @@ "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.1.tgz", "integrity": "sha512-LG4opVs2ANWZ1TJoKc937iMmNstM/d0ae1vNbnBvBhqCSezgVUOzcLCqbI5elV8Vy6WKwKjaqR+zO9VKirBBCA==" }, + "node_modules/@types/estree-jsx": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@types/estree-jsx/-/estree-jsx-1.0.0.tgz", + "integrity": "sha512-3qvGd0z8F2ENTGr/GG1yViqfiKmRfrXVx5sJyHGFu3z7m5g5utCQtGp/g29JnjflhtQJBv1WDQukHiT58xPcYQ==", + "dependencies": { + "@types/estree": "*" + } + }, "node_modules/@types/express": { "version": "4.17.17", "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.17.tgz", @@ -3817,6 +3894,11 @@ "@types/unist": "^2" } }, + "node_modules/@types/mdx": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@types/mdx/-/mdx-2.0.5.tgz", + "integrity": "sha512-76CqzuD6Q7LC+AtbPqrvD9AqsN0k8bsYo2bM2J8pmNldP1aIPAbzUQ7QbobyXL4eLr1wK5x8FZFe8eF/ubRuBg==" + }, "node_modules/@types/mime": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.2.tgz", @@ -4198,6 +4280,14 @@ "acorn": "^8" } }, + "node_modules/acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, "node_modules/acorn-walk": { "version": "8.2.0", "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.2.0.tgz", @@ -4502,6 +4592,14 @@ "util": "^0.12.0" } }, + "node_modules/astring": { + "version": "1.8.6", + "resolved": "https://registry.npmjs.org/astring/-/astring-1.8.6.tgz", + "integrity": "sha512-ISvCdHdlTDlH5IpxQJIex7BWBywFWgjJSVdwst+/iQCoEYnyOaQ95+X1JGshuBjGp6nxKUy1jMgE3zPqN7fQdg==", + "bin": { + "astring": "bin/astring" + } + }, "node_modules/async-foreach": { "version": "0.1.3", "resolved": "https://registry.npmjs.org/async-foreach/-/async-foreach-0.1.3.tgz", @@ -5391,6 +5489,15 @@ "url": "https://github.com/sponsors/wooorm" } }, + "node_modules/character-entities-html4": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/character-entities-html4/-/character-entities-html4-2.1.0.tgz", + "integrity": "sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/character-entities-legacy": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-1.1.4.tgz", @@ -6884,6 +6991,296 @@ "node": ">=6" } }, + "node_modules/docusaurus-mdx-loader-v2": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/docusaurus-mdx-loader-v2/-/docusaurus-mdx-loader-v2-0.1.2.tgz", + "integrity": "sha512-Dd/XieCKKoirnJDou4h33zRZPCmbtSqvXrZm0yMmhCpLDpeScu8CBvveFVHCqs7UB+x82IpzgZX5rHkoFlz2Bw==", + "dependencies": { + "@babel/parser": "^7.17.3", + "@babel/traverse": "^7.17.3", + "@docusaurus/logger": "2.0.0-beta.18", + "@docusaurus/utils": "2.0.0-beta.18", + "@mdx-js/mdx": "^2.1.0", + "escape-html": "^1.0.3", + "estree-util-value-to-estree": "^1.3.0", + "file-loader": "^6.2.0", + "fs-extra": "^10.0.1", + "image-size": "^1.0.1", + "lz-string": "^1.4.4", + "mdast-util-to-string": "^2.0.0", + "remark-admonitions": "^1.2.1", + "remark-emoji": "^2.1.0", + "remark-gfm": "1.0.0", + "stringify-object": "^3.3.0", + "tslib": "^2.3.1", + "unist-util-visit": "^2.0.2", + "url-loader": "^4.1.1", + "webpack": "^5.69.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "react": "^16.8.4 || ^17.0.0", + "react-dom": "^16.8.4 || ^17.0.0" + } + }, + "node_modules/docusaurus-mdx-loader-v2/node_modules/@docusaurus/logger": { + "version": "2.0.0-beta.18", + "resolved": "https://registry.npmjs.org/@docusaurus/logger/-/logger-2.0.0-beta.18.tgz", + "integrity": "sha512-frNe5vhH3mbPmH980Lvzaz45+n1PQl3TkslzWYXQeJOkFX17zUd3e3U7F9kR1+DocmAqHkgAoWuXVcvEoN29fg==", + "dependencies": { + "chalk": "^4.1.2", + "tslib": "^2.3.1" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/docusaurus-mdx-loader-v2/node_modules/@docusaurus/utils": { + "version": "2.0.0-beta.18", + "resolved": "https://registry.npmjs.org/@docusaurus/utils/-/utils-2.0.0-beta.18.tgz", + "integrity": "sha512-v2vBmH7xSbPwx3+GB90HgLSQdj+Rh5ELtZWy7M20w907k0ROzDmPQ/8Ke2DK3o5r4pZPGnCrsB3SaYI83AEmAA==", + "dependencies": { + "@docusaurus/logger": "2.0.0-beta.18", + "@svgr/webpack": "^6.2.1", + "file-loader": "^6.2.0", + "fs-extra": "^10.0.1", + "github-slugger": "^1.4.0", + "globby": "^11.1.0", + "gray-matter": "^4.0.3", + "js-yaml": "^4.1.0", + "lodash": "^4.17.21", + "micromatch": "^4.0.5", + "resolve-pathname": "^3.0.0", + "shelljs": "^0.8.5", + "tslib": "^2.3.1", + "url-loader": "^4.1.1", + "webpack": "^5.70.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/docusaurus-mdx-loader-v2/node_modules/@mdx-js/mdx": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@mdx-js/mdx/-/mdx-2.3.0.tgz", + "integrity": "sha512-jLuwRlz8DQfQNiUCJR50Y09CGPq3fLtmtUQfVrj79E0JWu3dvsVcxVIcfhR5h0iXu+/z++zDrYeiJqifRynJkA==", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "@types/mdx": "^2.0.0", + "estree-util-build-jsx": "^2.0.0", + "estree-util-is-identifier-name": "^2.0.0", + "estree-util-to-js": "^1.1.0", + "estree-walker": "^3.0.0", + "hast-util-to-estree": "^2.0.0", + "markdown-extensions": "^1.0.0", + "periscopic": "^3.0.0", + "remark-mdx": "^2.0.0", + "remark-parse": "^10.0.0", + "remark-rehype": "^10.0.0", + "unified": "^10.0.0", + "unist-util-position-from-estree": "^1.0.0", + "unist-util-stringify-position": "^3.0.0", + "unist-util-visit": "^4.0.0", + "vfile": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/docusaurus-mdx-loader-v2/node_modules/@mdx-js/mdx/node_modules/unist-util-visit": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-4.1.2.tgz", + "integrity": "sha512-MSd8OUGISqHdVvfY9TPhyK2VdUrPgxkUtWSuMHF6XAAFuL4LokseigBnZtPnJMu+FbynTkFNnFlyjxpVKujMRg==", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-is": "^5.0.0", + "unist-util-visit-parents": "^5.1.1" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/docusaurus-mdx-loader-v2/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/docusaurus-mdx-loader-v2/node_modules/bail": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/bail/-/bail-2.0.2.tgz", + "integrity": "sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/docusaurus-mdx-loader-v2/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/docusaurus-mdx-loader-v2/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/docusaurus-mdx-loader-v2/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, + "node_modules/docusaurus-mdx-loader-v2/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/docusaurus-mdx-loader-v2/node_modules/is-plain-obj": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-4.1.0.tgz", + "integrity": "sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/docusaurus-mdx-loader-v2/node_modules/remark-mdx": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/remark-mdx/-/remark-mdx-2.3.0.tgz", + "integrity": "sha512-g53hMkpM0I98MU266IzDFMrTD980gNF3BJnkyFcmN+dD873mQeD5rdMO3Y2X+x8umQfbSE0PcoEDl7ledSA+2g==", + "dependencies": { + "mdast-util-mdx": "^2.0.0", + "micromark-extension-mdxjs": "^1.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/docusaurus-mdx-loader-v2/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/docusaurus-mdx-loader-v2/node_modules/trough": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/trough/-/trough-2.1.0.tgz", + "integrity": "sha512-AqTiAOLcj85xS7vQ8QkAV41hPDIJ71XJB4RCUrzo/1GM2CQwhkJGaf9Hgr7BOugMRpgGUrqRg/DrBDl4H40+8g==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/docusaurus-mdx-loader-v2/node_modules/unified": { + "version": "10.1.2", + "resolved": "https://registry.npmjs.org/unified/-/unified-10.1.2.tgz", + "integrity": "sha512-pUSWAi/RAnVy1Pif2kAoeWNBa3JVrx0MId2LASj8G+7AiHWoKZNTomq6LG326T68U7/e263X6fTdcXIy7XnF7Q==", + "dependencies": { + "@types/unist": "^2.0.0", + "bail": "^2.0.0", + "extend": "^3.0.0", + "is-buffer": "^2.0.0", + "is-plain-obj": "^4.0.0", + "trough": "^2.0.0", + "vfile": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/docusaurus-mdx-loader-v2/node_modules/unist-util-is": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-5.2.1.tgz", + "integrity": "sha512-u9njyyfEh43npf1M+yGKDGVPbY/JWEemg5nH05ncKPfi+kBbKBJoTdsogMu33uhytuLlv9y0O7GH7fEdwLdLQw==", + "dependencies": { + "@types/unist": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/docusaurus-mdx-loader-v2/node_modules/unist-util-visit-parents": { + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-5.1.3.tgz", + "integrity": "sha512-x6+y8g7wWMyQhL1iZfhIPhDAs7Xwbn9nRosDXl7qoPTSCy0yNxnKc+hWokFifWQIDGi154rdUqKvbCa4+1kLhg==", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-is": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/docusaurus-mdx-loader-v2/node_modules/vfile": { + "version": "5.3.7", + "resolved": "https://registry.npmjs.org/vfile/-/vfile-5.3.7.tgz", + "integrity": "sha512-r7qlzkgErKjobAmyNIkkSpizsFPYiUPuJb5pNW1RB4JcYVZhs4lIbVqk8XPk033CV/1z8ss5pkax8SuhGpcG8g==", + "dependencies": { + "@types/unist": "^2.0.0", + "is-buffer": "^2.0.0", + "unist-util-stringify-position": "^3.0.0", + "vfile-message": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/docusaurus-mdx-loader-v2/node_modules/vfile-message": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-3.1.4.tgz", + "integrity": "sha512-fa0Z6P8HUrQN4BZaX05SIVXic+7kE3b05PWAtPuYP9QLHsLKYR7/AlLW3NtOrpXRLeawpDLMsVkmk5DG0NXgWw==", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-stringify-position": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/docusaurus-node-polyfills": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/docusaurus-node-polyfills/-/docusaurus-node-polyfills-1.0.0.tgz", @@ -6906,6 +7303,18 @@ "medium-zoom": "^1.0.6" } }, + "node_modules/docusaurus-theme-mdx-v2": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/docusaurus-theme-mdx-v2/-/docusaurus-theme-mdx-v2-0.1.2.tgz", + "integrity": "sha512-n5L4nx0LV5coTkZYS+owXmM0ACXWCbd4ou7aDrWIMm3YH7XPusSNelJpYsUKJxHFER/+czitbmieboFe4I7lMQ==", + "dependencies": { + "@mdx-js/react": "^2.1.0", + "docusaurus-mdx-loader-v2": "0.1.2" + }, + "engines": { + "node": ">=14" + } + }, "node_modules/dom-converter": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/dom-converter/-/dom-converter-0.2.0.tgz", @@ -7247,6 +7656,106 @@ "node": ">=4.0" } }, + "node_modules/estree-util-attach-comments": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/estree-util-attach-comments/-/estree-util-attach-comments-2.1.1.tgz", + "integrity": "sha512-+5Ba/xGGS6mnwFbXIuQiDPTbuTxuMCooq3arVv7gPZtYpjp+VXH/NkHAP35OOefPhNG/UGqU3vt/LTABwcHX0w==", + "dependencies": { + "@types/estree": "^1.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/estree-util-build-jsx": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/estree-util-build-jsx/-/estree-util-build-jsx-2.2.2.tgz", + "integrity": "sha512-m56vOXcOBuaF+Igpb9OPAy7f9w9OIkb5yhjsZuaPm7HoGi4oTOQi0h2+yZ+AtKklYFZ+rPC4n0wYCJCEU1ONqg==", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "estree-util-is-identifier-name": "^2.0.0", + "estree-walker": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/estree-util-is-identifier-name": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/estree-util-is-identifier-name/-/estree-util-is-identifier-name-2.1.0.tgz", + "integrity": "sha512-bEN9VHRyXAUOjkKVQVvArFym08BTWB0aJPppZZr0UNyAqWsLaVfAqP7hbaTJjzHifmB5ebnR8Wm7r7yGN/HonQ==", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/estree-util-to-js": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/estree-util-to-js/-/estree-util-to-js-1.2.0.tgz", + "integrity": "sha512-IzU74r1PK5IMMGZXUVZbmiu4A1uhiPgW5hm1GjcOfr4ZzHaMPpLNJjR7HjXiIOzi25nZDrgFTobHTkV5Q6ITjA==", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "astring": "^1.8.0", + "source-map": "^0.7.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/estree-util-to-js/node_modules/source-map": { + "version": "0.7.4", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.4.tgz", + "integrity": "sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA==", + "engines": { + "node": ">= 8" + } + }, + "node_modules/estree-util-value-to-estree": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/estree-util-value-to-estree/-/estree-util-value-to-estree-1.3.0.tgz", + "integrity": "sha512-Y+ughcF9jSUJvncXwqRageavjrNPAI+1M/L3BI3PyLp1nmgYTGUXU6t5z1Y7OWuThoDdhPME07bQU+d5LxdJqw==", + "dependencies": { + "is-plain-obj": "^3.0.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/estree-util-value-to-estree/node_modules/is-plain-obj": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-3.0.0.tgz", + "integrity": "sha512-gwsOE28k+23GP1B6vFl1oVh/WOzmawBrKwo5Ev6wMKzPkaXaCDIQKzLnvsA42DRlbVTWorkgTKIviAKCWkfUwA==", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/estree-util-visit": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/estree-util-visit/-/estree-util-visit-1.2.1.tgz", + "integrity": "sha512-xbgqcrkIVbIG+lI/gzbvd9SGTJL4zqJKBFttUl5pP27KhAjtMKbX/mQXJ7qgyXpMgVy/zvpm0xoQQaGL8OloOw==", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "@types/unist": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/estree-walker": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", + "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", + "dependencies": { + "@types/estree": "^1.0.0" + } + }, "node_modules/esutils": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", @@ -8606,6 +9115,88 @@ "resolved": "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz", "integrity": "sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==" }, + "node_modules/hast-util-to-estree": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/hast-util-to-estree/-/hast-util-to-estree-2.3.3.tgz", + "integrity": "sha512-ihhPIUPxN0v0w6M5+IiAZZrn0LH2uZomeWwhn7uP7avZC6TE7lIiEh2yBMPr5+zi1aUCXq6VoYRgs2Bw9xmycQ==", + "dependencies": { + "@types/estree": "^1.0.0", + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^2.0.0", + "@types/unist": "^2.0.0", + "comma-separated-tokens": "^2.0.0", + "estree-util-attach-comments": "^2.0.0", + "estree-util-is-identifier-name": "^2.0.0", + "hast-util-whitespace": "^2.0.0", + "mdast-util-mdx-expression": "^1.0.0", + "mdast-util-mdxjs-esm": "^1.0.0", + "property-information": "^6.0.0", + "space-separated-tokens": "^2.0.0", + "style-to-object": "^0.4.1", + "unist-util-position": "^4.0.0", + "zwitch": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-to-estree/node_modules/comma-separated-tokens": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz", + "integrity": "sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/hast-util-to-estree/node_modules/property-information": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/property-information/-/property-information-6.2.0.tgz", + "integrity": "sha512-kma4U7AFCTwpqq5twzC1YVIDXSqg6qQK6JN0smOw8fgRy1OkMi0CYSzFmsy6dnqSenamAtj0CyXMUJ1Mf6oROg==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/hast-util-to-estree/node_modules/space-separated-tokens": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-2.0.2.tgz", + "integrity": "sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/hast-util-to-estree/node_modules/style-to-object": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/style-to-object/-/style-to-object-0.4.1.tgz", + "integrity": "sha512-HFpbb5gr2ypci7Qw+IOhnP2zOU7e77b+rzM+wTzXzfi1PrtBCX0E7Pk4wL4iTLnhzZ+JgEGAhX81ebTg/aYjQw==", + "dependencies": { + "inline-style-parser": "0.1.1" + } + }, + "node_modules/hast-util-to-estree/node_modules/unist-util-position": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-4.0.4.tgz", + "integrity": "sha512-kUBE91efOWfIVBo8xzh/uZQ7p9ffYRtUbMRZBNFYwf0RK8koUMx6dGUfwylLOKmaT2cs4wSW96QoYUSXAyEtpg==", + "dependencies": { + "@types/unist": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-to-estree/node_modules/zwitch": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.4.tgz", + "integrity": "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/hast-util-to-parse5": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/hast-util-to-parse5/-/hast-util-to-parse5-6.0.0.tgz", @@ -8622,6 +9213,15 @@ "url": "https://opencollective.com/unified" } }, + "node_modules/hast-util-whitespace": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-2.0.1.tgz", + "integrity": "sha512-nAxA0v8+vXSBDt3AnRUNjyRIQ0rD+ntpbAp4LnPkumc5M9yUbSMa4XDU9Q6etY4f1Wp4bNgvc1yjiZtsTTrSng==", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/hastscript": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-6.0.0.tgz", @@ -9472,6 +10072,14 @@ "node": ">=0.10.0" } }, + "node_modules/is-reference": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/is-reference/-/is-reference-3.0.1.tgz", + "integrity": "sha512-baJJdQLiYaJdvFbJqXrcGv3WU3QCzBlUcI5QhbesIm6/xPsvmO+2CDoi/GMOFBQEQm+PXkwOPrp9KK5ozZsp2w==", + "dependencies": { + "@types/estree": "*" + } + }, "node_modules/is-regexp": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/is-regexp/-/is-regexp-1.0.0.tgz", @@ -9952,6 +10560,15 @@ "resolved": "https://registry.npmjs.org/lodash.uniq/-/lodash.uniq-4.5.0.tgz", "integrity": "sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ==" }, + "node_modules/longest-streak": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-2.0.4.tgz", + "integrity": "sha512-vM6rUVCVUJJt33bnmHiZEvr7wPT78ztX7rojL+LW51bHtLh6HTjx84LA5W4+oa6aKEJA7jJu5LR6vQRBpA5DVg==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/loose-envify": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", @@ -9987,6 +10604,14 @@ "yallist": "^3.0.2" } }, + "node_modules/lz-string": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/lz-string/-/lz-string-1.5.0.tgz", + "integrity": "sha512-h5bgJWpxJNswbU7qCrV0tIKQCaS3blPDrqKWx+QxzuzL1zGUzij9XCWLrSLsJPu5t+eWA/ycetzYAO5IOMcWAQ==", + "bin": { + "lz-string": "bin/bin.js" + } + }, "node_modules/make-dir": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", @@ -10058,6 +10683,26 @@ "url": "https://github.com/sponsors/wooorm" } }, + "node_modules/markdown-extensions": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/markdown-extensions/-/markdown-extensions-1.1.1.tgz", + "integrity": "sha512-WWC0ZuMzCyDHYCasEGs4IPvLyTGftYwh6wIEOULOF0HXcqZlhwRzrK0w2VUlxWA98xnvb/jszw4ZSkJ6ADpM6Q==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/markdown-table": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-2.0.0.tgz", + "integrity": "sha512-Ezda85ToJUBhM6WGaG6veasyym+Tbs3cMAw/ZhOPqXiYsr0jgocBV3j3nx+4lk47plLlIqjwuTm/ywVI+zjJ/A==", + "dependencies": { + "repeat-string": "^1.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/md5.js": { "version": "1.3.5", "resolved": "https://registry.npmjs.org/md5.js/-/md5.js-1.3.5.tgz", @@ -10093,6 +10738,20 @@ "url": "https://opencollective.com/unified" } }, + "node_modules/mdast-util-find-and-replace": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/mdast-util-find-and-replace/-/mdast-util-find-and-replace-1.1.1.tgz", + "integrity": "sha512-9cKl33Y21lyckGzpSmEQnIDjEfeeWelN5s1kUW1LwdB0Fkuq2u+4GdqcGEygYxJE8GVqCl0741bYXHgamfWAZA==", + "dependencies": { + "escape-string-regexp": "^4.0.0", + "unist-util-is": "^4.0.0", + "unist-util-visit-parents": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/mdast-util-from-markdown": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-1.3.1.tgz", @@ -10128,6 +10787,652 @@ "url": "https://opencollective.com/unified" } }, + "node_modules/mdast-util-gfm": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-0.1.2.tgz", + "integrity": "sha512-NNkhDx/qYcuOWB7xHUGWZYVXvjPFFd6afg6/e2g+SV4r9q5XUcCbV4Wfa3DLYIiD+xAEZc6K4MGaE/m0KDcPwQ==", + "dependencies": { + "mdast-util-gfm-autolink-literal": "^0.1.0", + "mdast-util-gfm-strikethrough": "^0.2.0", + "mdast-util-gfm-table": "^0.1.0", + "mdast-util-gfm-task-list-item": "^0.1.0", + "mdast-util-to-markdown": "^0.6.1" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-autolink-literal": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-autolink-literal/-/mdast-util-gfm-autolink-literal-0.1.3.tgz", + "integrity": "sha512-GjmLjWrXg1wqMIO9+ZsRik/s7PLwTaeCHVB7vRxUwLntZc8mzmTsLVr6HW1yLokcnhfURsn5zmSVdi3/xWWu1A==", + "dependencies": { + "ccount": "^1.0.0", + "mdast-util-find-and-replace": "^1.1.0", + "micromark": "^2.11.3" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-autolink-literal/node_modules/micromark": { + "version": "2.11.4", + "resolved": "https://registry.npmjs.org/micromark/-/micromark-2.11.4.tgz", + "integrity": "sha512-+WoovN/ppKolQOFIAajxi7Lu9kInbPxFuTBVEavFcL8eAfVstoc5MocPmqBeAdBOJV00uaVjegzH4+MA0DN/uA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "debug": "^4.0.0", + "parse-entities": "^2.0.0" + } + }, + "node_modules/mdast-util-gfm-strikethrough": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-strikethrough/-/mdast-util-gfm-strikethrough-0.2.3.tgz", + "integrity": "sha512-5OQLXpt6qdbttcDG/UxYY7Yjj3e8P7X16LzvpX8pIQPYJ/C2Z1qFGMmcw+1PZMUM3Z8wt8NRfYTvCni93mgsgA==", + "dependencies": { + "mdast-util-to-markdown": "^0.6.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-table": { + "version": "0.1.6", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-table/-/mdast-util-gfm-table-0.1.6.tgz", + "integrity": "sha512-j4yDxQ66AJSBwGkbpFEp9uG/LS1tZV3P33fN1gkyRB2LoRL+RR3f76m0HPHaby6F4Z5xr9Fv1URmATlRRUIpRQ==", + "dependencies": { + "markdown-table": "^2.0.0", + "mdast-util-to-markdown": "~0.6.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-task-list-item": { + "version": "0.1.6", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-task-list-item/-/mdast-util-gfm-task-list-item-0.1.6.tgz", + "integrity": "sha512-/d51FFIfPsSmCIRNp7E6pozM9z1GYPIkSy1urQ8s/o4TC22BZ7DqfHFWiqBD23bc7J3vV1Fc9O4QIHBlfuit8A==", + "dependencies": { + "mdast-util-to-markdown": "~0.6.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-mdx/-/mdast-util-mdx-2.0.1.tgz", + "integrity": "sha512-38w5y+r8nyKlGvNjSEqWrhG0w5PmnRA+wnBvm+ulYCct7nsGYhFVb0lljS9bQav4psDAS1eGkP2LMVcZBi/aqw==", + "dependencies": { + "mdast-util-from-markdown": "^1.0.0", + "mdast-util-mdx-expression": "^1.0.0", + "mdast-util-mdx-jsx": "^2.0.0", + "mdast-util-mdxjs-esm": "^1.0.0", + "mdast-util-to-markdown": "^1.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-expression": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/mdast-util-mdx-expression/-/mdast-util-mdx-expression-1.3.2.tgz", + "integrity": "sha512-xIPmR5ReJDu/DHH1OoIT1HkuybIfRGYRywC+gJtI7qHjCJp/M9jrmBEJW22O8lskDWm562BX2W8TiAwRTb0rKA==", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^2.0.0", + "@types/mdast": "^3.0.0", + "mdast-util-from-markdown": "^1.0.0", + "mdast-util-to-markdown": "^1.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-expression/node_modules/longest-streak": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-3.1.0.tgz", + "integrity": "sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-mdx-expression/node_modules/mdast-util-to-markdown": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-1.5.0.tgz", + "integrity": "sha512-bbv7TPv/WC49thZPg3jXuqzuvI45IL2EVAr/KxF0BSdHsU0ceFHOmwQn6evxAh1GaoK/6GQ1wp4R4oW2+LFL/A==", + "dependencies": { + "@types/mdast": "^3.0.0", + "@types/unist": "^2.0.0", + "longest-streak": "^3.0.0", + "mdast-util-phrasing": "^3.0.0", + "mdast-util-to-string": "^3.0.0", + "micromark-util-decode-string": "^1.0.0", + "unist-util-visit": "^4.0.0", + "zwitch": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-expression/node_modules/mdast-util-to-string": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-3.2.0.tgz", + "integrity": "sha512-V4Zn/ncyN1QNSqSBxTrMOLpjr+IKdHl2v3KVLoWmDPscP4r9GcCi71gjgvUV1SFSKh92AjAG4peFuBl2/YgCJg==", + "dependencies": { + "@types/mdast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-expression/node_modules/unist-util-is": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-5.2.1.tgz", + "integrity": "sha512-u9njyyfEh43npf1M+yGKDGVPbY/JWEemg5nH05ncKPfi+kBbKBJoTdsogMu33uhytuLlv9y0O7GH7fEdwLdLQw==", + "dependencies": { + "@types/unist": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-expression/node_modules/unist-util-visit": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-4.1.2.tgz", + "integrity": "sha512-MSd8OUGISqHdVvfY9TPhyK2VdUrPgxkUtWSuMHF6XAAFuL4LokseigBnZtPnJMu+FbynTkFNnFlyjxpVKujMRg==", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-is": "^5.0.0", + "unist-util-visit-parents": "^5.1.1" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-expression/node_modules/unist-util-visit-parents": { + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-5.1.3.tgz", + "integrity": "sha512-x6+y8g7wWMyQhL1iZfhIPhDAs7Xwbn9nRosDXl7qoPTSCy0yNxnKc+hWokFifWQIDGi154rdUqKvbCa4+1kLhg==", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-is": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-expression/node_modules/zwitch": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.4.tgz", + "integrity": "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-mdx-jsx": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/mdast-util-mdx-jsx/-/mdast-util-mdx-jsx-2.1.4.tgz", + "integrity": "sha512-DtMn9CmVhVzZx3f+optVDF8yFgQVt7FghCRNdlIaS3X5Bnym3hZwPbg/XW86vdpKjlc1PVj26SpnLGeJBXD3JA==", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^2.0.0", + "@types/mdast": "^3.0.0", + "@types/unist": "^2.0.0", + "ccount": "^2.0.0", + "mdast-util-from-markdown": "^1.1.0", + "mdast-util-to-markdown": "^1.3.0", + "parse-entities": "^4.0.0", + "stringify-entities": "^4.0.0", + "unist-util-remove-position": "^4.0.0", + "unist-util-stringify-position": "^3.0.0", + "vfile-message": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/ccount": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/ccount/-/ccount-2.0.1.tgz", + "integrity": "sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/character-entities-legacy": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-3.0.0.tgz", + "integrity": "sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/character-reference-invalid": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/character-reference-invalid/-/character-reference-invalid-2.0.1.tgz", + "integrity": "sha512-iBZ4F4wRbyORVsu0jPV7gXkOsGYjGHPmAyv+HiHG8gi5PtC9KI2j1+v8/tlibRvjoWX027ypmG/n0HtO5t7unw==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/is-alphabetical": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-2.0.1.tgz", + "integrity": "sha512-FWyyY60MeTNyeSRpkM2Iry0G9hpr7/9kD40mD/cGQEuilcZYS4okz8SN2Q6rLCJ8gbCt6fN+rC+6tMGS99LaxQ==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/is-alphanumerical": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-alphanumerical/-/is-alphanumerical-2.0.1.tgz", + "integrity": "sha512-hmbYhX/9MUMF5uh7tOXyK/n0ZvWpad5caBA17GsC6vyuCqaWliRG5K1qS9inmUhEMaOBIW7/whAnSwveW/LtZw==", + "dependencies": { + "is-alphabetical": "^2.0.0", + "is-decimal": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/is-decimal": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-decimal/-/is-decimal-2.0.1.tgz", + "integrity": "sha512-AAB9hiomQs5DXWcRB1rqsxGUstbRroFOPPVAomNk/3XHR5JyEZChOyTWe2oayKnsSsr/kcGqF+z6yuH6HHpN0A==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/is-hexadecimal": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-hexadecimal/-/is-hexadecimal-2.0.1.tgz", + "integrity": "sha512-DgZQp241c8oO6cA1SbTEWiXeoxV42vlcJxgH+B3hi1AiqqKruZR3ZGF8In3fj4+/y/7rHvlOZLZtgJ/4ttYGZg==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/longest-streak": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-3.1.0.tgz", + "integrity": "sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/mdast-util-to-markdown": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-1.5.0.tgz", + "integrity": "sha512-bbv7TPv/WC49thZPg3jXuqzuvI45IL2EVAr/KxF0BSdHsU0ceFHOmwQn6evxAh1GaoK/6GQ1wp4R4oW2+LFL/A==", + "dependencies": { + "@types/mdast": "^3.0.0", + "@types/unist": "^2.0.0", + "longest-streak": "^3.0.0", + "mdast-util-phrasing": "^3.0.0", + "mdast-util-to-string": "^3.0.0", + "micromark-util-decode-string": "^1.0.0", + "unist-util-visit": "^4.0.0", + "zwitch": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/mdast-util-to-string": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-3.2.0.tgz", + "integrity": "sha512-V4Zn/ncyN1QNSqSBxTrMOLpjr+IKdHl2v3KVLoWmDPscP4r9GcCi71gjgvUV1SFSKh92AjAG4peFuBl2/YgCJg==", + "dependencies": { + "@types/mdast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/parse-entities": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/parse-entities/-/parse-entities-4.0.1.tgz", + "integrity": "sha512-SWzvYcSJh4d/SGLIOQfZ/CoNv6BTlI6YEQ7Nj82oDVnRpwe/Z/F1EMx42x3JAOwGBlCjeCH0BRJQbQ/opHL17w==", + "dependencies": { + "@types/unist": "^2.0.0", + "character-entities": "^2.0.0", + "character-entities-legacy": "^3.0.0", + "character-reference-invalid": "^2.0.0", + "decode-named-character-reference": "^1.0.0", + "is-alphanumerical": "^2.0.0", + "is-decimal": "^2.0.0", + "is-hexadecimal": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/unist-util-is": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-5.2.1.tgz", + "integrity": "sha512-u9njyyfEh43npf1M+yGKDGVPbY/JWEemg5nH05ncKPfi+kBbKBJoTdsogMu33uhytuLlv9y0O7GH7fEdwLdLQw==", + "dependencies": { + "@types/unist": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/unist-util-remove-position": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/unist-util-remove-position/-/unist-util-remove-position-4.0.2.tgz", + "integrity": "sha512-TkBb0HABNmxzAcfLf4qsIbFbaPDvMO6wa3b3j4VcEzFVaw1LBKwnW4/sRJ/atSLSzoIg41JWEdnE7N6DIhGDGQ==", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-visit": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/unist-util-visit": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-4.1.2.tgz", + "integrity": "sha512-MSd8OUGISqHdVvfY9TPhyK2VdUrPgxkUtWSuMHF6XAAFuL4LokseigBnZtPnJMu+FbynTkFNnFlyjxpVKujMRg==", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-is": "^5.0.0", + "unist-util-visit-parents": "^5.1.1" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/unist-util-visit-parents": { + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-5.1.3.tgz", + "integrity": "sha512-x6+y8g7wWMyQhL1iZfhIPhDAs7Xwbn9nRosDXl7qoPTSCy0yNxnKc+hWokFifWQIDGi154rdUqKvbCa4+1kLhg==", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-is": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/vfile-message": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-3.1.4.tgz", + "integrity": "sha512-fa0Z6P8HUrQN4BZaX05SIVXic+7kE3b05PWAtPuYP9QLHsLKYR7/AlLW3NtOrpXRLeawpDLMsVkmk5DG0NXgWw==", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-stringify-position": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/zwitch": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.4.tgz", + "integrity": "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-mdx/node_modules/longest-streak": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-3.1.0.tgz", + "integrity": "sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-mdx/node_modules/mdast-util-to-markdown": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-1.5.0.tgz", + "integrity": "sha512-bbv7TPv/WC49thZPg3jXuqzuvI45IL2EVAr/KxF0BSdHsU0ceFHOmwQn6evxAh1GaoK/6GQ1wp4R4oW2+LFL/A==", + "dependencies": { + "@types/mdast": "^3.0.0", + "@types/unist": "^2.0.0", + "longest-streak": "^3.0.0", + "mdast-util-phrasing": "^3.0.0", + "mdast-util-to-string": "^3.0.0", + "micromark-util-decode-string": "^1.0.0", + "unist-util-visit": "^4.0.0", + "zwitch": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx/node_modules/mdast-util-to-string": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-3.2.0.tgz", + "integrity": "sha512-V4Zn/ncyN1QNSqSBxTrMOLpjr+IKdHl2v3KVLoWmDPscP4r9GcCi71gjgvUV1SFSKh92AjAG4peFuBl2/YgCJg==", + "dependencies": { + "@types/mdast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx/node_modules/unist-util-is": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-5.2.1.tgz", + "integrity": "sha512-u9njyyfEh43npf1M+yGKDGVPbY/JWEemg5nH05ncKPfi+kBbKBJoTdsogMu33uhytuLlv9y0O7GH7fEdwLdLQw==", + "dependencies": { + "@types/unist": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx/node_modules/unist-util-visit": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-4.1.2.tgz", + "integrity": "sha512-MSd8OUGISqHdVvfY9TPhyK2VdUrPgxkUtWSuMHF6XAAFuL4LokseigBnZtPnJMu+FbynTkFNnFlyjxpVKujMRg==", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-is": "^5.0.0", + "unist-util-visit-parents": "^5.1.1" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx/node_modules/unist-util-visit-parents": { + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-5.1.3.tgz", + "integrity": "sha512-x6+y8g7wWMyQhL1iZfhIPhDAs7Xwbn9nRosDXl7qoPTSCy0yNxnKc+hWokFifWQIDGi154rdUqKvbCa4+1kLhg==", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-is": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx/node_modules/zwitch": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.4.tgz", + "integrity": "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-mdxjs-esm": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/mdast-util-mdxjs-esm/-/mdast-util-mdxjs-esm-1.3.1.tgz", + "integrity": "sha512-SXqglS0HrEvSdUEfoXFtcg7DRl7S2cwOXc7jkuusG472Mmjag34DUDeOJUZtl+BVnyeO1frIgVpHlNRWc2gk/w==", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^2.0.0", + "@types/mdast": "^3.0.0", + "mdast-util-from-markdown": "^1.0.0", + "mdast-util-to-markdown": "^1.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdxjs-esm/node_modules/longest-streak": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-3.1.0.tgz", + "integrity": "sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-mdxjs-esm/node_modules/mdast-util-to-markdown": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-1.5.0.tgz", + "integrity": "sha512-bbv7TPv/WC49thZPg3jXuqzuvI45IL2EVAr/KxF0BSdHsU0ceFHOmwQn6evxAh1GaoK/6GQ1wp4R4oW2+LFL/A==", + "dependencies": { + "@types/mdast": "^3.0.0", + "@types/unist": "^2.0.0", + "longest-streak": "^3.0.0", + "mdast-util-phrasing": "^3.0.0", + "mdast-util-to-string": "^3.0.0", + "micromark-util-decode-string": "^1.0.0", + "unist-util-visit": "^4.0.0", + "zwitch": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdxjs-esm/node_modules/mdast-util-to-string": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-3.2.0.tgz", + "integrity": "sha512-V4Zn/ncyN1QNSqSBxTrMOLpjr+IKdHl2v3KVLoWmDPscP4r9GcCi71gjgvUV1SFSKh92AjAG4peFuBl2/YgCJg==", + "dependencies": { + "@types/mdast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdxjs-esm/node_modules/unist-util-is": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-5.2.1.tgz", + "integrity": "sha512-u9njyyfEh43npf1M+yGKDGVPbY/JWEemg5nH05ncKPfi+kBbKBJoTdsogMu33uhytuLlv9y0O7GH7fEdwLdLQw==", + "dependencies": { + "@types/unist": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdxjs-esm/node_modules/unist-util-visit": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-4.1.2.tgz", + "integrity": "sha512-MSd8OUGISqHdVvfY9TPhyK2VdUrPgxkUtWSuMHF6XAAFuL4LokseigBnZtPnJMu+FbynTkFNnFlyjxpVKujMRg==", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-is": "^5.0.0", + "unist-util-visit-parents": "^5.1.1" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdxjs-esm/node_modules/unist-util-visit-parents": { + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-5.1.3.tgz", + "integrity": "sha512-x6+y8g7wWMyQhL1iZfhIPhDAs7Xwbn9nRosDXl7qoPTSCy0yNxnKc+hWokFifWQIDGi154rdUqKvbCa4+1kLhg==", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-is": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdxjs-esm/node_modules/zwitch": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.4.tgz", + "integrity": "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-phrasing": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-3.0.1.tgz", + "integrity": "sha512-WmI1gTXUBJo4/ZmSk79Wcb2HcjPJBzM1nlI/OUWA8yk2X9ik3ffNbBGsU+09BFmXaL1IBb9fiuvq6/KMiNycSg==", + "dependencies": { + "@types/mdast": "^3.0.0", + "unist-util-is": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-phrasing/node_modules/unist-util-is": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-5.2.1.tgz", + "integrity": "sha512-u9njyyfEh43npf1M+yGKDGVPbY/JWEemg5nH05ncKPfi+kBbKBJoTdsogMu33uhytuLlv9y0O7GH7fEdwLdLQw==", + "dependencies": { + "@types/unist": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/mdast-util-to-hast": { "version": "10.0.1", "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-10.0.1.tgz", @@ -10147,6 +11452,23 @@ "url": "https://opencollective.com/unified" } }, + "node_modules/mdast-util-to-markdown": { + "version": "0.6.5", + "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-0.6.5.tgz", + "integrity": "sha512-XeV9sDE7ZlOQvs45C9UKMtfTcctcaj/pGwH8YLbMHoMOXNNCn2LsqVQOqrF1+/NU8lKDAqozme9SCXWyo9oAcQ==", + "dependencies": { + "@types/unist": "^2.0.0", + "longest-streak": "^2.0.0", + "mdast-util-to-string": "^2.0.0", + "parse-entities": "^2.0.0", + "repeat-string": "^1.0.0", + "zwitch": "^1.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/mdast-util-to-string": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-2.0.0.tgz", @@ -10326,6 +11648,298 @@ "uvu": "^0.5.0" } }, + "node_modules/micromark-extension-gfm": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm/-/micromark-extension-gfm-0.3.3.tgz", + "integrity": "sha512-oVN4zv5/tAIA+l3GbMi7lWeYpJ14oQyJ3uEim20ktYFAcfX1x3LNlFGGlmrZHt7u9YlKExmyJdDGaTt6cMSR/A==", + "dependencies": { + "micromark": "~2.11.0", + "micromark-extension-gfm-autolink-literal": "~0.5.0", + "micromark-extension-gfm-strikethrough": "~0.6.5", + "micromark-extension-gfm-table": "~0.4.0", + "micromark-extension-gfm-tagfilter": "~0.3.0", + "micromark-extension-gfm-task-list-item": "~0.3.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-autolink-literal": { + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-0.5.7.tgz", + "integrity": "sha512-ePiDGH0/lhcngCe8FtH4ARFoxKTUelMp4L7Gg2pujYD5CSMb9PbblnyL+AAMud/SNMyusbS2XDSiPIRcQoNFAw==", + "dependencies": { + "micromark": "~2.11.3" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-autolink-literal/node_modules/micromark": { + "version": "2.11.4", + "resolved": "https://registry.npmjs.org/micromark/-/micromark-2.11.4.tgz", + "integrity": "sha512-+WoovN/ppKolQOFIAajxi7Lu9kInbPxFuTBVEavFcL8eAfVstoc5MocPmqBeAdBOJV00uaVjegzH4+MA0DN/uA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "debug": "^4.0.0", + "parse-entities": "^2.0.0" + } + }, + "node_modules/micromark-extension-gfm-strikethrough": { + "version": "0.6.5", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-strikethrough/-/micromark-extension-gfm-strikethrough-0.6.5.tgz", + "integrity": "sha512-PpOKlgokpQRwUesRwWEp+fHjGGkZEejj83k9gU5iXCbDG+XBA92BqnRKYJdfqfkrRcZRgGuPuXb7DaK/DmxOhw==", + "dependencies": { + "micromark": "~2.11.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-strikethrough/node_modules/micromark": { + "version": "2.11.4", + "resolved": "https://registry.npmjs.org/micromark/-/micromark-2.11.4.tgz", + "integrity": "sha512-+WoovN/ppKolQOFIAajxi7Lu9kInbPxFuTBVEavFcL8eAfVstoc5MocPmqBeAdBOJV00uaVjegzH4+MA0DN/uA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "debug": "^4.0.0", + "parse-entities": "^2.0.0" + } + }, + "node_modules/micromark-extension-gfm-table": { + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-table/-/micromark-extension-gfm-table-0.4.3.tgz", + "integrity": "sha512-hVGvESPq0fk6ALWtomcwmgLvH8ZSVpcPjzi0AjPclB9FsVRgMtGZkUcpE0zgjOCFAznKepF4z3hX8z6e3HODdA==", + "dependencies": { + "micromark": "~2.11.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-table/node_modules/micromark": { + "version": "2.11.4", + "resolved": "https://registry.npmjs.org/micromark/-/micromark-2.11.4.tgz", + "integrity": "sha512-+WoovN/ppKolQOFIAajxi7Lu9kInbPxFuTBVEavFcL8eAfVstoc5MocPmqBeAdBOJV00uaVjegzH4+MA0DN/uA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "debug": "^4.0.0", + "parse-entities": "^2.0.0" + } + }, + "node_modules/micromark-extension-gfm-tagfilter": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-tagfilter/-/micromark-extension-gfm-tagfilter-0.3.0.tgz", + "integrity": "sha512-9GU0xBatryXifL//FJH+tAZ6i240xQuFrSL7mYi8f4oZSbc+NvXjkrHemeYP0+L4ZUT+Ptz3b95zhUZnMtoi/Q==", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-task-list-item": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-task-list-item/-/micromark-extension-gfm-task-list-item-0.3.3.tgz", + "integrity": "sha512-0zvM5iSLKrc/NQl84pZSjGo66aTGd57C1idmlWmE87lkMcXrTxg1uXa/nXomxJytoje9trP0NDLvw4bZ/Z/XCQ==", + "dependencies": { + "micromark": "~2.11.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-task-list-item/node_modules/micromark": { + "version": "2.11.4", + "resolved": "https://registry.npmjs.org/micromark/-/micromark-2.11.4.tgz", + "integrity": "sha512-+WoovN/ppKolQOFIAajxi7Lu9kInbPxFuTBVEavFcL8eAfVstoc5MocPmqBeAdBOJV00uaVjegzH4+MA0DN/uA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "debug": "^4.0.0", + "parse-entities": "^2.0.0" + } + }, + "node_modules/micromark-extension-gfm/node_modules/micromark": { + "version": "2.11.4", + "resolved": "https://registry.npmjs.org/micromark/-/micromark-2.11.4.tgz", + "integrity": "sha512-+WoovN/ppKolQOFIAajxi7Lu9kInbPxFuTBVEavFcL8eAfVstoc5MocPmqBeAdBOJV00uaVjegzH4+MA0DN/uA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "debug": "^4.0.0", + "parse-entities": "^2.0.0" + } + }, + "node_modules/micromark-extension-mdx-expression": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/micromark-extension-mdx-expression/-/micromark-extension-mdx-expression-1.0.8.tgz", + "integrity": "sha512-zZpeQtc5wfWKdzDsHRBY003H2Smg+PUi2REhqgIhdzAa5xonhP03FcXxqFSerFiNUr5AWmHpaNPQTBVOS4lrXw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "@types/estree": "^1.0.0", + "micromark-factory-mdx-expression": "^1.0.0", + "micromark-factory-space": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-events-to-acorn": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0", + "uvu": "^0.5.0" + } + }, + "node_modules/micromark-extension-mdx-jsx": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/micromark-extension-mdx-jsx/-/micromark-extension-mdx-jsx-1.0.5.tgz", + "integrity": "sha512-gPH+9ZdmDflbu19Xkb8+gheqEDqkSpdCEubQyxuz/Hn8DOXiXvrXeikOoBA71+e8Pfi0/UYmU3wW3H58kr7akA==", + "dependencies": { + "@types/acorn": "^4.0.0", + "@types/estree": "^1.0.0", + "estree-util-is-identifier-name": "^2.0.0", + "micromark-factory-mdx-expression": "^1.0.0", + "micromark-factory-space": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0", + "uvu": "^0.5.0", + "vfile-message": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-mdx-jsx/node_modules/vfile-message": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-3.1.4.tgz", + "integrity": "sha512-fa0Z6P8HUrQN4BZaX05SIVXic+7kE3b05PWAtPuYP9QLHsLKYR7/AlLW3NtOrpXRLeawpDLMsVkmk5DG0NXgWw==", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-stringify-position": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-mdx-md": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/micromark-extension-mdx-md/-/micromark-extension-mdx-md-1.0.1.tgz", + "integrity": "sha512-7MSuj2S7xjOQXAjjkbjBsHkMtb+mDGVW6uI2dBL9snOBCbZmoNgDAeZ0nSn9j3T42UE/g2xVNMn18PJxZvkBEA==", + "dependencies": { + "micromark-util-types": "^1.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-mdxjs": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/micromark-extension-mdxjs/-/micromark-extension-mdxjs-1.0.1.tgz", + "integrity": "sha512-7YA7hF6i5eKOfFUzZ+0z6avRG52GpWR8DL+kN47y3f2KhxbBZMhmxe7auOeaTBrW2DenbbZTf1ea9tA2hDpC2Q==", + "dependencies": { + "acorn": "^8.0.0", + "acorn-jsx": "^5.0.0", + "micromark-extension-mdx-expression": "^1.0.0", + "micromark-extension-mdx-jsx": "^1.0.0", + "micromark-extension-mdx-md": "^1.0.0", + "micromark-extension-mdxjs-esm": "^1.0.0", + "micromark-util-combine-extensions": "^1.0.0", + "micromark-util-types": "^1.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-mdxjs-esm": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/micromark-extension-mdxjs-esm/-/micromark-extension-mdxjs-esm-1.0.5.tgz", + "integrity": "sha512-xNRBw4aoURcyz/S69B19WnZAkWJMxHMT5hE36GtDAyhoyn/8TuAeqjFJQlwk+MKQsUD7b3l7kFX+vlfVWgcX1w==", + "dependencies": { + "@types/estree": "^1.0.0", + "micromark-core-commonmark": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-events-to-acorn": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0", + "unist-util-position-from-estree": "^1.1.0", + "uvu": "^0.5.0", + "vfile-message": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-mdxjs-esm/node_modules/vfile-message": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-3.1.4.tgz", + "integrity": "sha512-fa0Z6P8HUrQN4BZaX05SIVXic+7kE3b05PWAtPuYP9QLHsLKYR7/AlLW3NtOrpXRLeawpDLMsVkmk5DG0NXgWw==", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-stringify-position": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/micromark-factory-destination": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-1.1.0.tgz", @@ -10367,6 +11981,44 @@ "uvu": "^0.5.0" } }, + "node_modules/micromark-factory-mdx-expression": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/micromark-factory-mdx-expression/-/micromark-factory-mdx-expression-1.0.9.tgz", + "integrity": "sha512-jGIWzSmNfdnkJq05c7b0+Wv0Kfz3NJ3N4cBjnbO4zjXIlxJr+f8lk+5ZmwFvqdAbUy2q6B5rCY//g0QAAaXDWA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "@types/estree": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-events-to-acorn": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0", + "unist-util-position-from-estree": "^1.0.0", + "uvu": "^0.5.0", + "vfile-message": "^3.0.0" + } + }, + "node_modules/micromark-factory-mdx-expression/node_modules/vfile-message": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-3.1.4.tgz", + "integrity": "sha512-fa0Z6P8HUrQN4BZaX05SIVXic+7kE3b05PWAtPuYP9QLHsLKYR7/AlLW3NtOrpXRLeawpDLMsVkmk5DG0NXgWw==", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-stringify-position": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/micromark-factory-space": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-1.1.0.tgz", @@ -10558,6 +12210,44 @@ } ] }, + "node_modules/micromark-util-events-to-acorn": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/micromark-util-events-to-acorn/-/micromark-util-events-to-acorn-1.2.3.tgz", + "integrity": "sha512-ij4X7Wuc4fED6UoLWkmo0xJQhsktfNh1J0m8g4PbIMPlx+ek/4YdW5mvbye8z/aZvAPUoxgXHrwVlXAPKMRp1w==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "@types/acorn": "^4.0.0", + "@types/estree": "^1.0.0", + "@types/unist": "^2.0.0", + "estree-util-visit": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0", + "uvu": "^0.5.0", + "vfile-message": "^3.0.0" + } + }, + "node_modules/micromark-util-events-to-acorn/node_modules/vfile-message": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-3.1.4.tgz", + "integrity": "sha512-fa0Z6P8HUrQN4BZaX05SIVXic+7kE3b05PWAtPuYP9QLHsLKYR7/AlLW3NtOrpXRLeawpDLMsVkmk5DG0NXgWw==", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-stringify-position": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/micromark-util-html-tag-name": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/micromark-util-html-tag-name/-/micromark-util-html-tag-name-1.2.0.tgz", @@ -12117,6 +13807,16 @@ "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-0.2.0.tgz", "integrity": "sha512-YHk5ez1hmMR5LOkb9iJkLKqoBlL7WD5M8ljC75ZfzXriuBIVNuecaXuU7e+hOwyqf24Wxhh7Vxgt7Hnw9288Tg==" }, + "node_modules/periscopic": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/periscopic/-/periscopic-3.1.0.tgz", + "integrity": "sha512-vKiQ8RRtkl9P+r/+oefh25C3fhybptkHKCZSPlcXiJux2tJF55GnEj3BVn4A5gKfq9NWWXXrxkHBwVPUfH0opw==", + "dependencies": { + "@types/estree": "^1.0.0", + "estree-walker": "^3.0.0", + "is-reference": "^3.0.0" + } + }, "node_modules/picocolors": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", @@ -13957,6 +15657,56 @@ "jsesc": "bin/jsesc" } }, + "node_modules/rehype-parse": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/rehype-parse/-/rehype-parse-6.0.2.tgz", + "integrity": "sha512-0S3CpvpTAgGmnz8kiCyFLGuW5yA4OQhyNTm/nwPopZ7+PI11WnGl1TTWTGv/2hPEe/g2jRLlhVVSsoDH8waRug==", + "dependencies": { + "hast-util-from-parse5": "^5.0.0", + "parse5": "^5.0.0", + "xtend": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/rehype-parse/node_modules/hast-util-from-parse5": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/hast-util-from-parse5/-/hast-util-from-parse5-5.0.3.tgz", + "integrity": "sha512-gOc8UB99F6eWVWFtM9jUikjN7QkWxB3nY0df5Z0Zq1/Nkwl5V4hAAsl0tmwlgWl/1shlTF8DnNYLO8X6wRV9pA==", + "dependencies": { + "ccount": "^1.0.3", + "hastscript": "^5.0.0", + "property-information": "^5.0.0", + "web-namespaces": "^1.1.2", + "xtend": "^4.0.1" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/rehype-parse/node_modules/hastscript": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-5.1.2.tgz", + "integrity": "sha512-WlztFuK+Lrvi3EggsqOkQ52rKbxkXL3RwB6t5lwoa8QLMemoWfBuL43eDrwOamJyR7uKQKdmKYaBH1NZBiIRrQ==", + "dependencies": { + "comma-separated-tokens": "^1.0.0", + "hast-util-parse-selector": "^2.0.0", + "property-information": "^5.0.0", + "space-separated-tokens": "^1.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/rehype-parse/node_modules/parse5": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-5.1.1.tgz", + "integrity": "sha512-ugq4DFI0Ptb+WWjAdOK16+u/nHfiIrcE+sh8kZMaM0WllQKLI9rOUq6c2b7cwPkXdzfQESqvoqK6ug7U/Yyzug==" + }, "node_modules/relateurl": { "version": "0.2.7", "resolved": "https://registry.npmjs.org/relateurl/-/relateurl-0.2.7.tgz", @@ -13965,6 +15715,40 @@ "node": ">= 0.10" } }, + "node_modules/remark-admonitions": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/remark-admonitions/-/remark-admonitions-1.2.1.tgz", + "integrity": "sha512-Ji6p68VDvD+H1oS95Fdx9Ar5WA2wcDA4kwrrhVU7fGctC6+d3uiMICu7w7/2Xld+lnU7/gi+432+rRbup5S8ow==", + "dependencies": { + "rehype-parse": "^6.0.2", + "unified": "^8.4.2", + "unist-util-visit": "^2.0.1" + } + }, + "node_modules/remark-admonitions/node_modules/is-plain-obj": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", + "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==", + "engines": { + "node": ">=8" + } + }, + "node_modules/remark-admonitions/node_modules/unified": { + "version": "8.4.2", + "resolved": "https://registry.npmjs.org/unified/-/unified-8.4.2.tgz", + "integrity": "sha512-JCrmN13jI4+h9UAyKEoGcDZV+i1E7BLFuG7OsaDvTXI5P0qhHX+vZO/kOhz9jn8HGENDKbwSeB0nVOg4gVStGA==", + "dependencies": { + "bail": "^1.0.0", + "extend": "^3.0.0", + "is-plain-obj": "^2.0.0", + "trough": "^1.0.0", + "vfile": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/remark-emoji": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/remark-emoji/-/remark-emoji-2.2.0.tgz", @@ -13984,6 +15768,19 @@ "url": "https://opencollective.com/unified" } }, + "node_modules/remark-gfm": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/remark-gfm/-/remark-gfm-1.0.0.tgz", + "integrity": "sha512-KfexHJCiqvrdBZVbQ6RopMZGwaXz6wFJEfByIuEwGf0arvITHjiKKZ1dpXujjH9KZdm1//XJQwgfnJ3lmXaDPA==", + "dependencies": { + "mdast-util-gfm": "^0.1.0", + "micromark-extension-gfm": "^0.3.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/remark-mdx": { "version": "1.6.22", "resolved": "https://registry.npmjs.org/remark-mdx/-/remark-mdx-1.6.22.tgz", @@ -14206,6 +16003,189 @@ "url": "https://opencollective.com/unified" } }, + "node_modules/remark-rehype": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/remark-rehype/-/remark-rehype-10.1.0.tgz", + "integrity": "sha512-EFmR5zppdBp0WQeDVZ/b66CWJipB2q2VLNFMabzDSGR66Z2fQii83G5gTBbgGEnEEA0QRussvrFHxk1HWGJskw==", + "dependencies": { + "@types/hast": "^2.0.0", + "@types/mdast": "^3.0.0", + "mdast-util-to-hast": "^12.1.0", + "unified": "^10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-rehype/node_modules/bail": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/bail/-/bail-2.0.2.tgz", + "integrity": "sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/remark-rehype/node_modules/is-plain-obj": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-4.1.0.tgz", + "integrity": "sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/remark-rehype/node_modules/mdast-util-definitions": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/mdast-util-definitions/-/mdast-util-definitions-5.1.2.tgz", + "integrity": "sha512-8SVPMuHqlPME/z3gqVwWY4zVXn8lqKv/pAhC57FuJ40ImXyBpmO5ukh98zB2v7Blql2FiHjHv9LVztSIqjY+MA==", + "dependencies": { + "@types/mdast": "^3.0.0", + "@types/unist": "^2.0.0", + "unist-util-visit": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-rehype/node_modules/mdast-util-to-hast": { + "version": "12.3.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-12.3.0.tgz", + "integrity": "sha512-pits93r8PhnIoU4Vy9bjW39M2jJ6/tdHyja9rrot9uujkN7UTU9SDnE6WNJz/IGyQk3XHX6yNNtrBH6cQzm8Hw==", + "dependencies": { + "@types/hast": "^2.0.0", + "@types/mdast": "^3.0.0", + "mdast-util-definitions": "^5.0.0", + "micromark-util-sanitize-uri": "^1.1.0", + "trim-lines": "^3.0.0", + "unist-util-generated": "^2.0.0", + "unist-util-position": "^4.0.0", + "unist-util-visit": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-rehype/node_modules/trough": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/trough/-/trough-2.1.0.tgz", + "integrity": "sha512-AqTiAOLcj85xS7vQ8QkAV41hPDIJ71XJB4RCUrzo/1GM2CQwhkJGaf9Hgr7BOugMRpgGUrqRg/DrBDl4H40+8g==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/remark-rehype/node_modules/unified": { + "version": "10.1.2", + "resolved": "https://registry.npmjs.org/unified/-/unified-10.1.2.tgz", + "integrity": "sha512-pUSWAi/RAnVy1Pif2kAoeWNBa3JVrx0MId2LASj8G+7AiHWoKZNTomq6LG326T68U7/e263X6fTdcXIy7XnF7Q==", + "dependencies": { + "@types/unist": "^2.0.0", + "bail": "^2.0.0", + "extend": "^3.0.0", + "is-buffer": "^2.0.0", + "is-plain-obj": "^4.0.0", + "trough": "^2.0.0", + "vfile": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-rehype/node_modules/unist-util-generated": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/unist-util-generated/-/unist-util-generated-2.0.1.tgz", + "integrity": "sha512-qF72kLmPxAw0oN2fwpWIqbXAVyEqUzDHMsbtPvOudIlUzXYFIeQIuxXQCRCFh22B7cixvU0MG7m3MW8FTq/S+A==", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-rehype/node_modules/unist-util-is": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-5.2.1.tgz", + "integrity": "sha512-u9njyyfEh43npf1M+yGKDGVPbY/JWEemg5nH05ncKPfi+kBbKBJoTdsogMu33uhytuLlv9y0O7GH7fEdwLdLQw==", + "dependencies": { + "@types/unist": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-rehype/node_modules/unist-util-position": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-4.0.4.tgz", + "integrity": "sha512-kUBE91efOWfIVBo8xzh/uZQ7p9ffYRtUbMRZBNFYwf0RK8koUMx6dGUfwylLOKmaT2cs4wSW96QoYUSXAyEtpg==", + "dependencies": { + "@types/unist": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-rehype/node_modules/unist-util-visit": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-4.1.2.tgz", + "integrity": "sha512-MSd8OUGISqHdVvfY9TPhyK2VdUrPgxkUtWSuMHF6XAAFuL4LokseigBnZtPnJMu+FbynTkFNnFlyjxpVKujMRg==", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-is": "^5.0.0", + "unist-util-visit-parents": "^5.1.1" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-rehype/node_modules/unist-util-visit-parents": { + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-5.1.3.tgz", + "integrity": "sha512-x6+y8g7wWMyQhL1iZfhIPhDAs7Xwbn9nRosDXl7qoPTSCy0yNxnKc+hWokFifWQIDGi154rdUqKvbCa4+1kLhg==", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-is": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-rehype/node_modules/vfile": { + "version": "5.3.7", + "resolved": "https://registry.npmjs.org/vfile/-/vfile-5.3.7.tgz", + "integrity": "sha512-r7qlzkgErKjobAmyNIkkSpizsFPYiUPuJb5pNW1RB4JcYVZhs4lIbVqk8XPk033CV/1z8ss5pkax8SuhGpcG8g==", + "dependencies": { + "@types/unist": "^2.0.0", + "is-buffer": "^2.0.0", + "unist-util-stringify-position": "^3.0.0", + "vfile-message": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-rehype/node_modules/vfile-message": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-3.1.4.tgz", + "integrity": "sha512-fa0Z6P8HUrQN4BZaX05SIVXic+7kE3b05PWAtPuYP9QLHsLKYR7/AlLW3NtOrpXRLeawpDLMsVkmk5DG0NXgWw==", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-stringify-position": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/remark-squeeze-paragraphs": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/remark-squeeze-paragraphs/-/remark-squeeze-paragraphs-4.0.0.tgz", @@ -15445,6 +17425,28 @@ "url": "https://github.com/chalk/strip-ansi?sponsor=1" } }, + "node_modules/stringify-entities": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/stringify-entities/-/stringify-entities-4.0.3.tgz", + "integrity": "sha512-BP9nNHMhhfcMbiuQKCqMjhDP5yBCAxsPu4pHFFzJ6Alo9dZgY4VLDPutXqIjpRiMoKdp7Av85Gr73Q5uH9k7+g==", + "dependencies": { + "character-entities-html4": "^2.0.0", + "character-entities-legacy": "^3.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/stringify-entities/node_modules/character-entities-legacy": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-3.0.0.tgz", + "integrity": "sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/stringify-object": { "version": "3.3.0", "resolved": "https://registry.npmjs.org/stringify-object/-/stringify-object-3.3.0.tgz", @@ -16033,6 +18035,15 @@ "integrity": "sha512-YzQV+TZg4AxpKxaTHK3c3D+kRDCGVEE7LemdlQZoQXn0iennk10RsIoY6ikzAqJTc9Xjl9C1/waHom/J86ziAQ==", "deprecated": "Use String.prototype.trim() instead" }, + "node_modules/trim-lines": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/trim-lines/-/trim-lines-3.0.1.tgz", + "integrity": "sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/trim-newlines": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-3.0.1.tgz", @@ -16441,6 +18452,18 @@ "url": "https://opencollective.com/unified" } }, + "node_modules/unist-util-position-from-estree": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/unist-util-position-from-estree/-/unist-util-position-from-estree-1.1.2.tgz", + "integrity": "sha512-poZa0eXpS+/XpoQwGwl79UUdea4ol2ZuCYguVaJS4qzIOMDzbqz8a3erUCOmubSZkaOuGamb3tX790iwOIROww==", + "dependencies": { + "@types/unist": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/unist-util-remove": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/unist-util-remove/-/unist-util-remove-2.1.0.tgz", diff --git a/docs/package.json b/docs/package.json index c7732b3dc..856e66ebe 100644 --- a/docs/package.json +++ b/docs/package.json @@ -15,12 +15,13 @@ }, "dependencies": { "@babel/preset-react": "^7.22.3", + "@code-hike/mdx": "^0.9.0", "@docusaurus/core": "2.4.1", "@docusaurus/plugin-ideal-image": "^2.4.1", "@docusaurus/preset-classic": "2.4.1", "@docusaurus/theme-classic": "^2.4.1", "@docusaurus/theme-search-algolia": "^2.4.1", - "@mdx-js/react": "^1.6.22", + "@mdx-js/react": "^2.3.0", "@mendable/search": "^0.0.114", "@pbe/react-yandex-maps": "^1.2.4", "@prismicio/client": "^7.0.1", @@ -28,6 +29,7 @@ "autoprefixer": "^10.4.14", "clsx": "^1.2.1", "docusaurus-plugin-image-zoom": "^0.1.4", + "docusaurus-theme-mdx-v2": "^0.1.2", "jquery": "^3.7.0", "medium-zoom": "^1.0.8", "node-fetch": "^3.3.1", @@ -67,4 +69,4 @@ "engines": { "node": ">=16.14" } -} \ No newline at end of file +} From ea8865dc1835fd05b2268c782680e5313abbb6a4 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Wed, 26 Jul 2023 17:26:31 -0300 Subject: [PATCH 200/221] =?UTF-8?q?=F0=9F=94=A7=20chore(docs):=20update=20?= =?UTF-8?q?docusaurus.config.js=20to=20use=20monokai=20theme=20for=20code?= =?UTF-8?q?=20highlighting,=20show=20copy=20button,=20and=20display=20line?= =?UTF-8?q?=20numbers=20=F0=9F=93=9D=20docs(sidebars.js):=20add=20"guideli?= =?UTF-8?q?nes/custom-component"=20and=20"components/custom"=20to=20the=20?= =?UTF-8?q?sidebar=20for=20better=20navigation=20and=20discoverability?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/docusaurus.config.js | 7 ++++++- docs/sidebars.js | 2 ++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/docs/docusaurus.config.js b/docs/docusaurus.config.js index 6dffccbc8..270235b37 100644 --- a/docs/docusaurus.config.js +++ b/docs/docusaurus.config.js @@ -27,7 +27,12 @@ module.exports = { /** @type {import('@docusaurus/preset-classic').Options} */ ({ docs: { - beforeDefaultRemarkPlugins: [[remarkCodeHike, { theme: "nord" }]], + beforeDefaultRemarkPlugins: [ + [ + remarkCodeHike, + { theme: "monokai", showCopyButton: true, lineNumbers: true }, + ], + ], routeBasePath: "/", sidebarPath: require.resolve("./sidebars.js"), path: "docs", diff --git a/docs/sidebars.js b/docs/sidebars.js index 01a84cf33..38592719d 100644 --- a/docs/sidebars.js +++ b/docs/sidebars.js @@ -21,6 +21,7 @@ module.exports = { "guidelines/collection", "guidelines/prompt-customization", "guidelines/chat-interface", + "guidelines/custom-component", ], }, { @@ -30,6 +31,7 @@ module.exports = { items: [ "components/agents", "components/chains", + "components/custom", "components/embeddings", "components/llms", "components/loaders", From 9a1c20d75094caabcaa86e2ef6def7f91ab5830e Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Wed, 26 Jul 2023 17:27:09 -0300 Subject: [PATCH 201/221] =?UTF-8?q?=F0=9F=93=9D=20docs(custom.mdx):=20add?= =?UTF-8?q?=20documentation=20for=20custom=20component?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/docs/components/custom.mdx | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) create mode 100644 docs/docs/components/custom.mdx diff --git a/docs/docs/components/custom.mdx b/docs/docs/components/custom.mdx new file mode 100644 index 000000000..316bd02d7 --- /dev/null +++ b/docs/docs/components/custom.mdx @@ -0,0 +1,17 @@ +import Admonition from "@theme/Admonition"; + +# Custom Component + +--- + +Used to create a custom component. The code is the class that will be converted to a Custom Component with the fields and formatting you define. + +**Params** + +- **Code:** The code of the component. + + + +[Learn more about Custom Components](../guidelines/custom-component) + + From 6b7ab2adedbf31c103c654941614da328be6d519 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Thu, 27 Jul 2023 06:58:10 -0300 Subject: [PATCH 202/221] =?UTF-8?q?=F0=9F=93=9D=20docs(custom-component.md?= =?UTF-8?q?x):=20add=20documentation=20for=20custom=20components=20?= =?UTF-8?q?=F0=9F=8E=A8=20style(custom.css):=20update=20styling=20for=20co?= =?UTF-8?q?de=20blocks=20and=20sidebar=20in=20documentation?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/docs/guidelines/custom-component.mdx | 317 ++++++++++++++++++++++ docs/src/css/custom.css | 32 ++- 2 files changed, 338 insertions(+), 11 deletions(-) create mode 100644 docs/docs/guidelines/custom-component.mdx diff --git a/docs/docs/guidelines/custom-component.mdx b/docs/docs/guidelines/custom-component.mdx new file mode 100644 index 000000000..96a55ccd7 --- /dev/null +++ b/docs/docs/guidelines/custom-component.mdx @@ -0,0 +1,317 @@ +--- +description: Custom Components +hide_table_of_contents: true +--- + +# Custom Components + +A Custom Component has almost infinite possibilities. It can be a simple function that takes a string and returns a string, +or it can be a complex function that takes other components, calls APIs, and returns a custom object only you know how to use (which might not be ideal). + +Let's take a look at the basic rules, then we'll talk about the ones that are not so basic. + +## TL;DR + +This is a simple example of a Custom Component. We will go over the rules in detail later. + + + +```python +from langflow import CustomComponent +from langchain.chains import LLMChain +from langchain.chains.base import Chain +from langchain import PromptTemplate +from langchain.llms.base import BaseLLM +from langchain import Tool + +class BestComponent(CustomComponent): + display_name = "Best Component" + description = "This is the best component ever" + + def build_config(self) -> dict: + cool_tool_names = ["Cool Tool", + "Cooler Tool", + "Coolest Tool"] + return { + "description": {"multiline": True}, + "name": {"is_list": True, + "options": cool_tool_names}} + + def build(self, name: str, description: str, chain: Chain) -> Tool: + return Tool(name=name, + description=description, + func=chain.run) +``` + + + +## Rules: + + + +## Rule 1 + +The class must inherit from _`CustomComponent`_. + +```python +# focus +from langflow import CustomComponent +from langchain.chains import LLMChain +from langchain.chains.base import Chain +from langchain import PromptTemplate +from langchain.llms.base import BaseLLM +from langchain import Tool + +# focus +class BestComponent(CustomComponent): + display_name = "Custom Component" + description = "This is a custom component" + + def build_config(self) -> dict: + ... + + def build(self): + ... +``` + +--- + +## Rule 2: + +The class must have a _`build`_ method + +```python +from langflow import CustomComponent +from langchain.chains import LLMChain +from langchain.chains.base import Chain +from langchain import PromptTemplate +from langchain.llms.base import BaseLLM +from langchain import Tool + +class BestComponent(CustomComponent): + display_name = "Custom Component" + description = "This is a custom component" + + def build_config(self) -> dict: + ... + + # focus[5:13] + def build(self): + ... +``` + +--- + +## Rule 3 + +The type annotations of the _`build`_ method will be used to create the fields of the component + +The types supported are: + +- _`str`_, _`int`_, _`float`_, _`bool`_, _`list`_, _`dict`_ +- [_`langchain.chains.base.Chain`_](focus://3) +- [_`langchain.PromptTemplate`_](focus://4) +- [_`langchain.llms.base.BaseLLM`_](focus://5) +- [_`langchain.Tool`_](focus://6) +- _`langchain.document_loaders.base.BaseLoader`_ +- _`langchain.schema.Document`_ +- _`langchain.text_splitters.TextSplitter`_ +- _`langchain.vectorstores.base.VectorStore`_ +- _`langchain.embeddings.base.Embeddings`_ +- _`langchain.schema.BaseRetriever`_ + +```python +from langflow import CustomComponent +from langchain.chains import LLMChain +from langchain.chains.base import Chain +from langchain import PromptTemplate +from langchain.llms.base import BaseLLM +from langchain import Tool + +class BestComponent(CustomComponent): + display_name = "Custom Component" + description = "This is a custom component" + + def build_config(self) -> dict: + ... + + # focus[14:75] + def build(self): + ... +``` + +--- + +```python +from langflow import CustomComponent +from langchain.chains import LLMChain +from langchain.chains.base import Chain +from langchain import PromptTemplate +from langchain.llms.base import BaseLLM +from langchain import Tool + +class BestComponent(CustomComponent): + display_name = "Custom Component" + description = "This is a custom component" + + # focus + def build_config(self) -> dict: + ... + + def build(self): + ... +``` + +## Rule 4: + +The class can have a [_`build_config`_](focus://11:19) method + +- The _`build_config`_ method will be used to create the config fields of the component (if any) +- It should always return a _`dict`_ + +The _`dict`_ should have the following format: + +- The top level keys are the names of the fields +- The values are _`dict`_ with the following keys: + + - _`field_type: str`_: The type of the field (can be str, int, float, bool, file or any of the types supported by the _`build`_ method) + - _`is_list: bool`_: If the field is a list + - _`options: List[str]`_: If the field is a list, the options that will be displayed + - _`multiline: bool`_: If the field is a string, if it should be multiline + - _`input_types: List[str]`_: To be used when you want a _`str`_ field to have connectable handles. + - _`dispaly_name: str`_: To change the name of the field + - _`advanced: bool`_: To hide the field in the default view + - _`password: bool`_: To mask the input + - _`required: bool`_: To make the field required + - _`info: str`_: To add a tooltip to the field + - _`file_types: List[str]`_: This is a requirement if the _`field_type`_ is 'file' + (must be used in conjunction with _`suffixes`_) + + Example: _`["json", "yaml", "yml"]`_ + + - _`suffixes: List[str]`_: This is a requirement if the _`field_type`_ is 'file' (must be used in conjunction with _`file_types`_, and it must be a list of strings like 'json') + + Example: _`[".json", ".yaml", ".yml"]`_ + +--- + +# Change the name + +We can change the name of the component by adding a _`display_name`_ attribute. + +```python focus=9 +from langflow import CustomComponent +from langchain.chains import LLMChain +from langchain.chains.base import Chain +from langchain import PromptTemplate +from langchain.llms.base import BaseLLM +from langchain import Tool + +class BestComponent(CustomComponent): + display_name = "Best Component" + description = "This is a custom component" + + def build_config(self) -> dict: + ... + + def build(self): + ... +``` + +--- + +# Change the description + +We can change the description of the component by adding a _`description`_ attribute. + +```python focus=10 +from langflow import CustomComponent +from langchain.chains import LLMChain +from langchain.chains.base import Chain +from langchain import PromptTemplate +from langchain.llms.base import BaseLLM +from langchain import Tool + +class BestComponent(CustomComponent): + display_name = "Best Component" + description = "This is the best component ever" + + def build_config(self) -> dict: + ... + + def build(self): + ... +``` + +--- + +# Add a config + +The _`build_config`_ method will be used to create the config fields of the component (if any). + +- _`multiline`_ is a special option that will give the option to open a text editor. + +- _`is_list`_ is a special option that will give the option to add multiple values. When paired with _`options`_ it will transform it into a select field. + +```python focus=12:19 +from langflow import CustomComponent +from langchain.chains import LLMChain +from langchain.chains.base import Chain +from langchain import PromptTemplate +from langchain.llms.base import BaseLLM +from langchain import Tool + +class BestComponent(CustomComponent): + display_name = "Best Component" + description = "This is the best component ever" + + def build_config(self) -> dict: + cool_tool_names = ["Cool Tool", + "Cooler Tool", + "Coolest Tool"] + return { + "description": {"multiline": True}, + "name": {"is_list": True, + "options": cool_tool_names}} + + def build(self): + ... +``` + +--- + +```python focus=21:25 +from langflow import CustomComponent +from langchain.chains import LLMChain +from langchain.chains.base import Chain +from langchain import PromptTemplate +from langchain.llms.base import BaseLLM +from langchain import Tool + +class BestComponent(CustomComponent): + display_name = "Best Component" + description = "This is the best component ever" + + def build_config(self) -> dict: + cool_tool_names = ["Cool Tool", + "Cooler Tool", + "Coolest Tool"] + return { + "description": {"multiline": True}, + "name": {"is_list": True, + "options": cool_tool_names}} + + def build(self, name: str, description: str, chain: Chain) -> Tool: + return Tool(name=name, + description=description, + func=chain.run) +``` + +# Add the build method + +- name is a string +- description is a string +- chain is a Chain +- The return type is Tool + + diff --git a/docs/src/css/custom.css b/docs/src/css/custom.css index 2f6f992f3..a81f8ed7d 100644 --- a/docs/src/css/custom.css +++ b/docs/src/css/custom.css @@ -3,17 +3,19 @@ * bundles Infima by default. Infima is a CSS framework designed to * work well for content-centric websites. */ - :root { +:root { --ifm-background-color: var(--token-primary-bg-c); --ifm-navbar-link-hover-color: initial; --ifm-navbar-padding-vertical: 0; --ifm-navbar-item-padding-vertical: 0; - --ifm-font-family-base: -apple-system, BlinkMacSystemFont, Inter, Helvetica, Arial, sans-serif, 'Apple Color Emoji', 'Segoe UI emoji'; - --ifm-font-family-monospace: 'SFMono-Regular', 'Roboto Mono', Consolas, 'Liberation Mono', Menlo, Courier, monospace; + --ifm-font-family-base: -apple-system, BlinkMacSystemFont, Inter, Helvetica, + Arial, sans-serif, "Apple Color Emoji", "Segoe UI emoji"; + --ifm-font-family-monospace: "SFMono-Regular", "Roboto Mono", Consolas, + "Liberation Mono", Menlo, Courier, monospace; } .theme-doc-sidebar-item-category.menu__list-item:not(:first-child) { - margin-top: 1.5rem!important; + margin-top: 1.5rem !important; } .docusaurus-highlight-code-line { @@ -31,7 +33,7 @@ transform: skewY(6deg); } -[class^='announcementBar'] { +[class^="announcementBar"] { z-index: 10; } @@ -112,7 +114,7 @@ body { } .header-github-link:before { - content: ''; + content: ""; width: 24px; height: 24px; display: flex; @@ -126,7 +128,7 @@ body { } .header-twitter-link::before { - content: ''; + content: ""; width: 24px; height: 24px; display: flex; @@ -140,7 +142,7 @@ body { } .header-discord-link::before { - content: ''; + content: ""; width: 24px; height: 24px; display: flex; @@ -148,7 +150,6 @@ body { background-size: contain; } - /* Images */ .image-rendering-crisp { image-rendering: crisp-edges; @@ -164,7 +165,7 @@ body { .img-center { display: flex; justify-content: center; - width: 100%, + width: 100%; } .resized-image { @@ -188,4 +189,13 @@ body { .mendable-search { width: 140px; } -} \ No newline at end of file +} + +.ch-scrollycoding-content { + max-width: 40% !important; +} + +.ch-scrollycoding-sticker { + max-width: 60% !important; + min-width: 60% !important; +} From 8d8fae78c878ee01e4c8157eca9cb9ea7cc47bcf Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Thu, 27 Jul 2023 07:02:14 -0300 Subject: [PATCH 203/221] =?UTF-8?q?=F0=9F=94=A7=20fix(=5F=5Finit=5F=5F.py)?= =?UTF-8?q?:=20update=20import=20statement=20for=20CustomComponent=20to=20?= =?UTF-8?q?reflect=20new=20location=20=F0=9F=94=A7=20fix(constants.py):=20?= =?UTF-8?q?update=20import=20statement=20for=20CustomComponent=20to=20refl?= =?UTF-8?q?ect=20new=20location=20=F0=9F=94=A7=20fix(constants.py):=20upda?= =?UTF-8?q?te=20field=5Fconfig=20to=20build=5Fconfig=20to=20match=20change?= =?UTF-8?q?s=20in=20CustomComponent=20class=20=F0=9F=94=A7=20fix(constants?= =?UTF-8?q?.py):=20update=20build=20method=20signature=20to=20match=20chan?= =?UTF-8?q?ges=20in=20CustomComponent=20class?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/__init__.py | 4 ++-- src/backend/langflow/interface/custom/constants.py | 10 +++++----- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/src/backend/langflow/__init__.py b/src/backend/langflow/__init__.py index d6c645486..5920369e2 100644 --- a/src/backend/langflow/__init__.py +++ b/src/backend/langflow/__init__.py @@ -1,7 +1,7 @@ from importlib import metadata from langflow.cache import cache_manager from langflow.processing.process import load_flow_from_json -from langflow.utils.types import Prompt +from langflow.interface.custom.custom_component import CustomComponent try: __version__ = metadata.version(__package__) @@ -10,4 +10,4 @@ except metadata.PackageNotFoundError: __version__ = "" del metadata # optional, avoids polluting the results of dir(__package__) -__all__ = ["load_flow_from_json", "cache_manager", "Prompt"] +__all__ = ["load_flow_from_json", "cache_manager", "CustomComponent"] diff --git a/src/backend/langflow/interface/custom/constants.py b/src/backend/langflow/interface/custom/constants.py index 00aa5314d..8e5db39b8 100644 --- a/src/backend/langflow/interface/custom/constants.py +++ b/src/backend/langflow/interface/custom/constants.py @@ -35,8 +35,7 @@ CUSTOM_COMPONENT_SUPPORTED_TYPES = { DEFAULT_CUSTOM_COMPONENT_CODE = """ -from langflow import Prompt -from langflow.interface.custom.custom_component import CustomComponent +from langflow import CustomComponent from langchain.llms.base import BaseLLM from langchain.chains import LLMChain @@ -48,11 +47,12 @@ import requests class YourComponent(CustomComponent): display_name: str = "Your Component" description: str = "Your description" - field_config = { "url": { "multiline": True, "required": True } } - def build(self, url: str, llm: BaseLLM, template: Prompt) -> Document: + def build_config(self): + return { "url": { "multiline": True, "required": True } } + + def build(self, url: str, llm: BaseLLM, prompt: PromptTemplate) -> Document: response = requests.get(url) - prompt = PromptTemplate.from_template(template) chain = LLMChain(llm=llm, prompt=prompt) result = chain.run(response.text[:300]) return Document(page_content=str(result)) From 34008bf3d0bf6fce5aef667006668954b61a7772 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Thu, 27 Jul 2023 07:02:48 -0300 Subject: [PATCH 204/221] =?UTF-8?q?=F0=9F=94=84=20refactor(index.tsx):=20r?= =?UTF-8?q?eorder=20import=20statements=20for=20better=20readability=20and?= =?UTF-8?q?=20consistency?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/frontend/src/index.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/frontend/src/index.tsx b/src/frontend/src/index.tsx index 31d8f21f1..0ce5c0f5b 100644 --- a/src/frontend/src/index.tsx +++ b/src/frontend/src/index.tsx @@ -5,9 +5,9 @@ import ContextWrapper from "./contexts"; import reportWebVitals from "./reportWebVitals"; import { ApiInterceptor } from "./controllers/API/api"; -import "./style/index.css"; -import "./style/classes.css"; import "./style/applies.css"; +import "./style/classes.css"; +import "./style/index.css"; const root = ReactDOM.createRoot( document.getElementById("root") as HTMLElement From d8980d34f204c36ae54f01c34e98544432739197 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Thu, 27 Jul 2023 07:59:58 -0300 Subject: [PATCH 205/221] =?UTF-8?q?=F0=9F=94=A7=20chore(docusaurus.config.?= =?UTF-8?q?js):=20enable=20sidebar=20to=20be=20hideable=20in=20the=20docs?= =?UTF-8?q?=20section=20for=20better=20user=20experience?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/docusaurus.config.js | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docs/docusaurus.config.js b/docs/docusaurus.config.js index 270235b37..798c2e44a 100644 --- a/docs/docusaurus.config.js +++ b/docs/docusaurus.config.js @@ -132,5 +132,10 @@ module.exports = { // prism: { // theme: require("prism-react-renderer/themes/dracula"), // }, + docs: { + sidebar: { + hideable: true, + }, + }, }), }; From 529322e56ae715c21fc271fd2e34ade69aec882f Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Thu, 27 Jul 2023 08:00:22 -0300 Subject: [PATCH 206/221] =?UTF-8?q?=F0=9F=8E=A8=20style(custom.css):=20adj?= =?UTF-8?q?ust=20max-width=20and=20min-width=20of=20.ch-scrollycoding-cont?= =?UTF-8?q?ent=20and=20.ch-scrollycoding-sticker=20to=20improve=20layout?= =?UTF-8?q?=20proportions?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/src/css/custom.css | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docs/src/css/custom.css b/docs/src/css/custom.css index a81f8ed7d..0510365d4 100644 --- a/docs/src/css/custom.css +++ b/docs/src/css/custom.css @@ -192,10 +192,11 @@ body { } .ch-scrollycoding-content { - max-width: 40% !important; + max-width: 50% !important; + min-width: 40% !important; } .ch-scrollycoding-sticker { max-width: 60% !important; - min-width: 60% !important; + min-width: 50% !important; } From e43f52dd7a5b7c6fb183d12379525660a1e04058 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Thu, 27 Jul 2023 08:04:18 -0300 Subject: [PATCH 207/221] =?UTF-8?q?=F0=9F=93=9D=20docs(custom-component.md?= =?UTF-8?q?x):=20update=20guidelines=20for=20creating=20custom=20component?= =?UTF-8?q?s?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ✨ feat(custom-component.mdx): add example code for creating a custom component 🔧 chore(custom-component.mdx): update code comments and fix typo in build_config method --- docs/docs/guidelines/custom-component.mdx | 42 +++++++++++++++++++++-- 1 file changed, 39 insertions(+), 3 deletions(-) diff --git a/docs/docs/guidelines/custom-component.mdx b/docs/docs/guidelines/custom-component.mdx index 96a55ccd7..7fa3fd1dd 100644 --- a/docs/docs/guidelines/custom-component.mdx +++ b/docs/docs/guidelines/custom-component.mdx @@ -12,7 +12,11 @@ Let's take a look at the basic rules, then we'll talk about the ones that are no ## TL;DR -This is a simple example of a Custom Component. We will go over the rules in detail later. +You need to create a class that inherits from _`CustomComponent`_ and has a _`build`_ method. +Use the type annotations of the _`build`_ method to create the fields of the component. +Use the _`build_config`_ method to create the config fields of the component (if any). + +Here is an example: @@ -45,6 +49,8 @@ class BestComponent(CustomComponent): +## Now, let's go over the rules one by one. + ## Rules: @@ -135,7 +141,7 @@ class BestComponent(CustomComponent): def build_config(self) -> dict: ... - # focus[14:75] + # mark def build(self): ... ``` @@ -195,6 +201,32 @@ The _`dict`_ should have the following format: --- +```python +from langflow import CustomComponent +from langchain.chains import LLMChain +from langchain.chains.base import Chain +from langchain import PromptTemplate +from langchain.llms.base import BaseLLM +from langchain import Tool + +class BestComponent(CustomComponent): + display_name = "Custom Component" + description = "This is a custom component" + + # focus + def build_config(self) -> dict: + ... + + def build(self): + ... +``` + +# Example + +Now let's create a custom component that creates a Tool from a name, a description and a chain. + +--- + # Change the name We can change the name of the component by adding a _`display_name`_ attribute. @@ -247,7 +279,7 @@ class BestComponent(CustomComponent): # Add a config -The _`build_config`_ method will be used to create the config fields of the component (if any). +The _`build_config`_ method will be used to configure the fields of the component. - _`multiline`_ is a special option that will give the option to open a text editor. @@ -309,9 +341,13 @@ class BestComponent(CustomComponent): # Add the build method +The parameters used are: + - name is a string - description is a string - chain is a Chain - The return type is Tool +We then instantiate a Tool and return it. + From 2b55b0c3b3b5939f111c08c0e60057c79fe01cfa Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Thu, 27 Jul 2023 09:39:27 -0300 Subject: [PATCH 208/221] =?UTF-8?q?=F0=9F=94=80=20refactor(custom=5Fcompon?= =?UTF-8?q?ent.py):=20rename=20`load=5Fflow`=20method=20parameter=20`flow?= =?UTF-8?q?=5Fid`=20to=20`graph=5Fid`=20for=20clarity=20=F0=9F=94=80=20ref?= =?UTF-8?q?actor(custom=5Fcomponent.py):=20add=20optional=20`tweaks`=20par?= =?UTF-8?q?ameter=20to=20`load=5Fflow`=20method=20to=20allow=20for=20proce?= =?UTF-8?q?ssing=20tweaks=20on=20the=20graph=20data=20=F0=9F=94=80=20refac?= =?UTF-8?q?tor(custom=5Fcomponent.py):=20rename=20`data=5Fgraph`=20variabl?= =?UTF-8?q?e=20to=20`graph=5Fdata`=20for=20consistency=20and=20clarity=20?= =?UTF-8?q?=F0=9F=94=80=20refactor(custom=5Fcomponent.py):=20add=20logic?= =?UTF-8?q?=20to=20process=20tweaks=20on=20the=20`graph=5Fdata`=20if=20`tw?= =?UTF-8?q?eaks`=20parameter=20is=20provided?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../langflow/interface/custom/custom_component.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/src/backend/langflow/interface/custom/custom_component.py b/src/backend/langflow/interface/custom/custom_component.py index f58c9db19..9ade781c8 100644 --- a/src/backend/langflow/interface/custom/custom_component.py +++ b/src/backend/langflow/interface/custom/custom_component.py @@ -141,14 +141,17 @@ class CustomComponent(Component, extra=Extra.allow): def get_function(self): return validate.create_function(self.code, self.function_entrypoint_name) - def load_flow(self, flow_id: str): + def load_flow(self, flow_id: str, tweaks: Optional[dict] = None): from langflow.processing.process import build_sorted_vertices_with_caching + from langflow.processing.process import process_tweaks with session_getter() as session: - data_graph = flow.data if (flow := session.get(Flow, flow_id)) else None - if not data_graph: + graph_data = flow.data if (flow := session.get(Flow, flow_id)) else None + if not graph_data: raise ValueError(f"Flow {flow_id} not found") - return build_sorted_vertices_with_caching(data_graph) + if tweaks: + graph_data = process_tweaks(graph_data=graph_data, tweaks=tweaks) + return build_sorted_vertices_with_caching(graph_data) def list_flows(self): with session_getter() as session: From 4a0fe950936432bc3ccb37f274b69dc0d3fa8d8c Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Thu, 27 Jul 2023 09:44:01 -0300 Subject: [PATCH 209/221] =?UTF-8?q?=F0=9F=94=A7=20chore(types.py):=20add?= =?UTF-8?q?=20support=20for=20custom=20field=20value=20in=20add=5Fnew=5Fcu?= =?UTF-8?q?stom=5Ffield=20function?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 🔧 chore(types.py): add support for custom field value in add_new_custom_field function to allow specifying a default value for the field --- src/backend/langflow/interface/types.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/backend/langflow/interface/types.py b/src/backend/langflow/interface/types.py index 35ecd558b..e68a07b44 100644 --- a/src/backend/langflow/interface/types.py +++ b/src/backend/langflow/interface/types.py @@ -98,6 +98,7 @@ def add_new_custom_field( display_name = field_config.pop("display_name", field_name) field_type = field_config.pop("field_type", field_type) field_type = process_type(field_type) + field_value = field_config.pop("value", field_value) if "name" in field_config: warnings.warn( From 3d103398e396d43d63523bea87c8f842bea7f1a5 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Thu, 27 Jul 2023 09:46:29 -0300 Subject: [PATCH 210/221] fix(codeAreaModal): clear error state when setting value to code and closing modal to prevent displaying previous errors --- src/frontend/src/modals/codeAreaModal/index.tsx | 1 + 1 file changed, 1 insertion(+) diff --git a/src/frontend/src/modals/codeAreaModal/index.tsx b/src/frontend/src/modals/codeAreaModal/index.tsx index 53dda0e81..6b36bac64 100644 --- a/src/frontend/src/modals/codeAreaModal/index.tsx +++ b/src/frontend/src/modals/codeAreaModal/index.tsx @@ -96,6 +96,7 @@ export default function CodeAreaModal({ if (data) { setNodeClass(data); setValue(code); + setError({ detail: { error: undefined, traceback: undefined } }); setOpen(false); } }) From 28bf88cf7980830ce77024af3ef81886514fb367 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Thu, 27 Jul 2023 09:55:48 -0300 Subject: [PATCH 211/221] =?UTF-8?q?=F0=9F=90=9B=20fix(custom=5Fcomponent.p?= =?UTF-8?q?y):=20fix=20custom=5Frepr=20method=20to=20return=20a=20string?= =?UTF-8?q?=20representation=20of=20repr=5Fvalue?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/interface/custom/custom_component.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/backend/langflow/interface/custom/custom_component.py b/src/backend/langflow/interface/custom/custom_component.py index 9ade781c8..d5f366d97 100644 --- a/src/backend/langflow/interface/custom/custom_component.py +++ b/src/backend/langflow/interface/custom/custom_component.py @@ -23,7 +23,7 @@ class CustomComponent(Component, extra=Extra.allow): super().__init__(**data) def custom_repr(self): - return self.repr_value + return str(self.repr_value) def build_config(self): return self.field_config From d9816902e4e56b1ddefea18b515e347492d29598 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Thu, 27 Jul 2023 09:56:15 -0300 Subject: [PATCH 212/221] =?UTF-8?q?=F0=9F=90=9B=20fix(GenericNode/index.ts?= =?UTF-8?q?x):=20fix=20rendering=20of=20validationStatus.params=20when=20i?= =?UTF-8?q?t=20is=20a=20string?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/frontend/src/CustomNodes/GenericNode/index.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/frontend/src/CustomNodes/GenericNode/index.tsx b/src/frontend/src/CustomNodes/GenericNode/index.tsx index 95fd70a7e..d49544114 100644 --- a/src/frontend/src/CustomNodes/GenericNode/index.tsx +++ b/src/frontend/src/CustomNodes/GenericNode/index.tsx @@ -132,7 +132,7 @@ export default function GenericNode({ ) : (
- {validationStatus.params + {typeof validationStatus.params === "string" ? validationStatus.params .split("\n") .map((line, index) =>
{line}
) From 2144e0d92d0cb2fbe5539c717f1831c11365c106 Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Thu, 27 Jul 2023 15:12:18 +0100 Subject: [PATCH 213/221] =?UTF-8?q?=F0=9F=90=9B=20fix(code=5Fparser.py):?= =?UTF-8?q?=20strip=20single=20quotes=20from=20default=20values=20in=20Cod?= =?UTF-8?q?eParser=20class=20to=20improve=20consistency=20and=20readabilit?= =?UTF-8?q?y?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/interface/custom/code_parser.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/backend/langflow/interface/custom/code_parser.py b/src/backend/langflow/interface/custom/code_parser.py index 606208624..829f8d71a 100644 --- a/src/backend/langflow/interface/custom/code_parser.py +++ b/src/backend/langflow/interface/custom/code_parser.py @@ -128,7 +128,8 @@ class CodeParser: num_missing_defaults = num_args - num_defaults missing_defaults = [None] * num_missing_defaults default_values = [ - ast.unparse(default) if default else None for default in node.args.defaults + ast.unparse(default).strip("'") if default else None + for default in node.args.defaults ] defaults = missing_defaults + default_values From 707e55c880d2a7c4c29e205ef78c23a559f97d48 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Thu, 27 Jul 2023 14:32:10 -0300 Subject: [PATCH 214/221] =?UTF-8?q?=F0=9F=90=9B=20fix(loading.py):=20refac?= =?UTF-8?q?tor=20instantiate=5Fprompt=20function=20to=20improve=20readabil?= =?UTF-8?q?ity=20and=20maintainability=20=E2=9C=A8=20feat(utils.py):=20add?= =?UTF-8?q?=20utility=20functions=20to=20handle=20node=20type=20and=20form?= =?UTF-8?q?at=20kwargs=20in=20instantiate=5Fprompt=20function?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../langflow/interface/initialize/loading.py | 70 +----------- .../langflow/interface/initialize/utils.py | 107 ++++++++++++++++++ 2 files changed, 111 insertions(+), 66 deletions(-) create mode 100644 src/backend/langflow/interface/initialize/utils.py diff --git a/src/backend/langflow/interface/initialize/loading.py b/src/backend/langflow/interface/initialize/loading.py index c9d12f4b9..f6da0edc7 100644 --- a/src/backend/langflow/interface/initialize/loading.py +++ b/src/backend/langflow/interface/initialize/loading.py @@ -1,17 +1,15 @@ -import contextlib import json -from typing import Any, Callable, Dict, List, Sequence, Type +from typing import Any, Callable, Dict, Sequence, Type -from langchain.agents import ZeroShotAgent from langchain.agents import agent as agent_module from langchain.agents.agent import AgentExecutor from langchain.agents.agent_toolkits.base import BaseToolkit from langchain.agents.tools import BaseTool from langflow.interface.initialize.llm import initialize_vertexai +from langflow.interface.initialize.utils import handle_format_kwargs, handle_node_type from langflow.interface.initialize.vector_store import vecstore_initializer -from langchain.schema import Document, BaseOutputParser from pydantic import ValidationError from langflow.interface.importing.utils import ( @@ -212,68 +210,8 @@ def instantiate_agent(node_type, class_object: Type[agent_module.Agent], params: def instantiate_prompt(node_type, class_object, params: Dict): - if node_type == "ZeroShotPrompt": - if "tools" not in params: - params["tools"] = [] - return ZeroShotAgent.create_prompt(**params) - elif "MessagePromptTemplate" in node_type: - # Then we only need the template - from_template_params = { - "template": params.pop("prompt", params.pop("template", "")) - } - - if not from_template_params.get("template"): - raise ValueError("Prompt template is required") - prompt = class_object.from_template(**from_template_params) - - elif node_type == "ChatPromptTemplate": - prompt = class_object.from_messages(**params) - else: - prompt = class_object(**params) - - format_kwargs: Dict[str, Any] = {} - for input_variable in prompt.input_variables: - if input_variable in params: - variable = params[input_variable] - if isinstance(variable, str): - format_kwargs[input_variable] = variable - elif isinstance(variable, BaseOutputParser) and hasattr( - variable, "get_format_instructions" - ): - format_kwargs[input_variable] = variable.get_format_instructions() - elif isinstance(variable, List) and all( - isinstance(item, Document) for item in variable - ): - # Format document to contain page_content and metadata - # as one string separated by a newline - if len(variable) > 1: - content = "\n".join( - [item.page_content for item in variable if item.page_content] - ) - else: - content = variable[0].page_content - # content could be a json list of strings - with contextlib.suppress(json.JSONDecodeError): - content = json.loads(content) - if isinstance(content, list): - content = ",".join([str(item) for item in content]) - format_kwargs[input_variable] = content - # handle_keys will be a list but it does not exist yet - # so we need to create it - - if ( - isinstance(variable, List) - and all(isinstance(item, Document) for item in variable) - ) or ( - isinstance(variable, BaseOutputParser) - and hasattr(variable, "get_format_instructions") - ): - if "handle_keys" not in format_kwargs: - format_kwargs["handle_keys"] = [] - - # Add the handle_keys to the list - format_kwargs["handle_keys"].append(input_variable) - + params, prompt = handle_node_type(node_type, class_object, params) + format_kwargs = handle_format_kwargs(prompt, params) return prompt, format_kwargs diff --git a/src/backend/langflow/interface/initialize/utils.py b/src/backend/langflow/interface/initialize/utils.py new file mode 100644 index 000000000..0e8cf7bee --- /dev/null +++ b/src/backend/langflow/interface/initialize/utils.py @@ -0,0 +1,107 @@ +import contextlib +import json +from typing import Any, Dict, List + +from langchain.agents import ZeroShotAgent + + +from langchain.schema import Document, BaseOutputParser + + +def handle_node_type(node_type, class_object, params: Dict): + if node_type == "ZeroShotPrompt": + params = check_tools_in_params(params) + prompt = ZeroShotAgent.create_prompt(**params) + elif "MessagePromptTemplate" in node_type: + prompt = instantiate_from_template(class_object, params) + elif node_type == "ChatPromptTemplate": + prompt = class_object.from_messages(**params) + else: + prompt = class_object(**params) + return params, prompt + + +def check_tools_in_params(params: Dict): + if "tools" not in params: + params["tools"] = [] + return params + + +def instantiate_from_template(class_object, params: Dict): + from_template_params = { + "template": params.pop("prompt", params.pop("template", "")) + } + if not from_template_params.get("template"): + raise ValueError("Prompt template is required") + return class_object.from_template(**from_template_params) + + +def handle_format_kwargs(prompt, params: Dict): + format_kwargs: Dict[str, Any] = {} + for input_variable in prompt.input_variables: + if input_variable in params: + format_kwargs = handle_variable(params, input_variable, format_kwargs) + return format_kwargs + + +def handle_variable(params: Dict, input_variable: str, format_kwargs: Dict): + variable = params[input_variable] + if isinstance(variable, str): + format_kwargs[input_variable] = variable + elif isinstance(variable, BaseOutputParser) and hasattr( + variable, "get_format_instructions" + ): + format_kwargs[input_variable] = variable.get_format_instructions() + elif is_instance_of_list_or_document(variable): + format_kwargs = format_document(variable, input_variable, format_kwargs) + if needs_handle_keys(variable): + format_kwargs = add_handle_keys(input_variable, format_kwargs) + return format_kwargs + + +def is_instance_of_list_or_document(variable): + return ( + isinstance(variable, List) + and all(isinstance(item, Document) for item in variable) + or isinstance(variable, Document) + ) + + +def format_document(variable, input_variable: str, format_kwargs: Dict): + variable = variable if isinstance(variable, List) else [variable] + content = format_content(variable) + format_kwargs[input_variable] = content + return format_kwargs + + +def format_content(variable): + if len(variable) > 1: + content = "\n".join( + [item.page_content for item in variable if item.page_content] + ) + else: + content = variable[0].page_content + content = try_to_load_json(content) + return content + + +def try_to_load_json(content): + with contextlib.suppress(json.JSONDecodeError): + content = json.loads(content) + if isinstance(content, list): + content = ",".join([str(item) for item in content]) + return content + + +def needs_handle_keys(variable): + return is_instance_of_list_or_document(variable) or ( + isinstance(variable, BaseOutputParser) + and hasattr(variable, "get_format_instructions") + ) + + +def add_handle_keys(input_variable: str, format_kwargs: Dict): + if "handle_keys" not in format_kwargs: + format_kwargs["handle_keys"] = [] + format_kwargs["handle_keys"].append(input_variable) + return format_kwargs From 3b93c5814bef4d6e53ab2d50bfddf922eaa038f9 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Thu, 27 Jul 2023 14:33:31 -0300 Subject: [PATCH 215/221] =?UTF-8?q?=F0=9F=90=9B=20fix(utils.py):=20simplif?= =?UTF-8?q?y=20format=5Fcontent=20function=20to=20improve=20readability=20?= =?UTF-8?q?and=20maintainability?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/interface/initialize/utils.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/src/backend/langflow/interface/initialize/utils.py b/src/backend/langflow/interface/initialize/utils.py index 0e8cf7bee..31fbc6d8b 100644 --- a/src/backend/langflow/interface/initialize/utils.py +++ b/src/backend/langflow/interface/initialize/utils.py @@ -76,13 +76,9 @@ def format_document(variable, input_variable: str, format_kwargs: Dict): def format_content(variable): if len(variable) > 1: - content = "\n".join( - [item.page_content for item in variable if item.page_content] - ) - else: - content = variable[0].page_content - content = try_to_load_json(content) - return content + return "\n".join([item.page_content for item in variable if item.page_content]) + content = variable[0].page_content + return try_to_load_json(content) def try_to_load_json(content): From fb407cf9d10c2615f00a8a821bc714e34aa71ba5 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Thu, 27 Jul 2023 14:35:49 -0300 Subject: [PATCH 216/221] =?UTF-8?q?=F0=9F=94=A5=20refactor(test=5Fcustom?= =?UTF-8?q?=5Fcomponent.py):=20remove=20unused=20imports=20and=20test=5Fli?= =?UTF-8?q?st=5Fflows=5Fno=5Fflows=20test=20case=20=F0=9F=94=A5=20refactor?= =?UTF-8?q?(test=5Fcustom=5Fcomponent.py):=20remove=20test=5Flist=5Fflows?= =?UTF-8?q?=5Fno=5Fflows=20test=20case=20as=20it=20is=20no=20longer=20need?= =?UTF-8?q?ed?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- tests/test_custom_component.py | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/tests/test_custom_component.py b/tests/test_custom_component.py index 2811f68bd..d57f347a1 100644 --- a/tests/test_custom_component.py +++ b/tests/test_custom_component.py @@ -3,7 +3,6 @@ import pytest import types from uuid import uuid4 -from unittest.mock import patch, MagicMock from fastapi import HTTPException from langflow.database.models.flow import Flow, FlowCreate @@ -466,20 +465,6 @@ def test_custom_component_build_not_implemented(): custom_component.build() -def test_list_flows_no_flows(): - session_getter_module = "langflow.database.base.session_getter" - - with patch(session_getter_module) as mock_session_getter: - mock_session = MagicMock() - mock_session.query.return_value.all.return_value = [] - mock_session_getter.return_value.__enter__.return_value = mock_session - - component = CustomComponent() - result = component.list_flows() - - assert len(result) == 0 - - def test_build_config_no_code(): component = CustomComponent(code=None) From 07b541958671b456992370cac711ea0a6fa6c1fc Mon Sep 17 00:00:00 2001 From: Cristhian Zanforlin Lousa Date: Thu, 27 Jul 2023 12:13:45 -0300 Subject: [PATCH 217/221] =?UTF-8?q?=F0=9F=94=A7=20chore(constants.ts):=20a?= =?UTF-8?q?dd=20URL=5FEXCLUDED=5FFROM=5FERROR=5FRETRIES=20constant=20to=20?= =?UTF-8?q?store=20URLs=20excluded=20from=20error=20retries=20for=20better?= =?UTF-8?q?=20error=20handling=20=F0=9F=94=A7=20chore(api.tsx):=20add=20lo?= =?UTF-8?q?gic=20to=20exclude=20certain=20URLs=20from=20error=20retries=20?= =?UTF-8?q?in=20the=20ApiInterceptor=20component=20to=20improve=20error=20?= =?UTF-8?q?handling=20=F0=9F=94=A7=20chore(index.ts):=20update=20postCusto?= =?UTF-8?q?mComponent=20function=20to=20use=20the=20api=20instance=20inste?= =?UTF-8?q?ad=20of=20axios=20for=20consistency=20and=20reusability?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/frontend/src/constants/constants.ts | 7 +++++++ src/frontend/src/controllers/API/api.tsx | 6 +++++- src/frontend/src/controllers/API/index.ts | 2 +- 3 files changed, 13 insertions(+), 2 deletions(-) diff --git a/src/frontend/src/constants/constants.ts b/src/frontend/src/constants/constants.ts index 195570be0..d77a4f471 100644 --- a/src/frontend/src/constants/constants.ts +++ b/src/frontend/src/constants/constants.ts @@ -491,3 +491,10 @@ export const NOUNS: string[] = [ * */ export const USER_PROJECTS_HEADER = "My Collection"; + +/** + * URLs excluded from error retries. + * @constant + * + */ +export const URL_EXCLUDED_FROM_ERROR_RETRIES = ["/api/v1/validate/code", "/api/v1/custom_component", "/api/v1/validate/prompt"]; \ No newline at end of file diff --git a/src/frontend/src/controllers/API/api.tsx b/src/frontend/src/controllers/API/api.tsx index 8dd9eac9f..d5af8f4b0 100644 --- a/src/frontend/src/controllers/API/api.tsx +++ b/src/frontend/src/controllers/API/api.tsx @@ -1,6 +1,7 @@ import axios, { AxiosError, AxiosInstance } from "axios"; import { useContext, useEffect, useRef } from "react"; import { alertContext } from "../../contexts/alertContext"; +import { URL_EXCLUDED_FROM_ERROR_RETRIES } from "../../constants/constants"; // Create a new Axios instance const api: AxiosInstance = axios.create({ @@ -15,6 +16,9 @@ function ApiInterceptor() { const interceptor = api.interceptors.response.use( (response) => response, async (error: AxiosError) => { + if (URL_EXCLUDED_FROM_ERROR_RETRIES.includes(error.config?.url)) { + return Promise.reject(error); + } let retryCount = 0; while (retryCount < 4) { @@ -31,7 +35,7 @@ function ApiInterceptor() { "Refresh the page", "Use a new flow tab", "Check if the backend is up", - "Endpoint: " + error.config.url, + "Endpoint: " + error.config?.url, ], }); return Promise.reject(error); diff --git a/src/frontend/src/controllers/API/index.ts b/src/frontend/src/controllers/API/index.ts index e13886e5d..992197146 100644 --- a/src/frontend/src/controllers/API/index.ts +++ b/src/frontend/src/controllers/API/index.ts @@ -344,5 +344,5 @@ export async function postCustomComponent( code: string, apiClass: APIClassType ): Promise> { - return await axios.post(`/api/v1/custom_component`, { code }); + return await api.post(`/api/v1/custom_component`, { code }); } From 35724e14737cba9bf83aae69d41a086d21624db0 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Thu, 27 Jul 2023 17:17:57 -0300 Subject: [PATCH 218/221] =?UTF-8?q?=F0=9F=90=9B=20fix(code=5Fparser.py):?= =?UTF-8?q?=20handle=20"None"=20values=20in=20default=20arguments=20correc?= =?UTF-8?q?tly?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/interface/custom/code_parser.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/backend/langflow/interface/custom/code_parser.py b/src/backend/langflow/interface/custom/code_parser.py index 829f8d71a..d42f82635 100644 --- a/src/backend/langflow/interface/custom/code_parser.py +++ b/src/backend/langflow/interface/custom/code_parser.py @@ -131,6 +131,12 @@ class CodeParser: ast.unparse(default).strip("'") if default else None for default in node.args.defaults ] + # Now check all default values to see if there + # are any "None" values in the middle + default_values = [ + None if value == "None" else value for value in default_values + ] + defaults = missing_defaults + default_values args = [ From b2e784dcb741d4ccdf001519546f80d5361937fb Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Thu, 27 Jul 2023 17:19:24 -0300 Subject: [PATCH 219/221] =?UTF-8?q?=F0=9F=90=9B=20fix(types.py):=20fix=20i?= =?UTF-8?q?ssue=20with=20incorrect=20assignment=20of=20field=5Fadvanced=20?= =?UTF-8?q?variable=20in=20add=5Fnew=5Fcustom=5Ffield=20function=20?= =?UTF-8?q?=E2=9C=A8=20feat(types.py):=20add=20support=20for=20advanced=20?= =?UTF-8?q?field=20configuration=20in=20add=5Fnew=5Fcustom=5Ffield=20funct?= =?UTF-8?q?ion=20=F0=9F=90=9B=20fix(types.py):=20fix=20issue=20with=20inco?= =?UTF-8?q?rrect=20assignment=20of=20field=5Fadvanced=20variable=20in=20ad?= =?UTF-8?q?d=5Fcode=5Ffield=20function=20=E2=9C=A8=20feat(types.py):=20add?= =?UTF-8?q?=20support=20for=20advanced=20field=20configuration=20in=20add?= =?UTF-8?q?=5Fcode=5Ffield=20function=20=F0=9F=90=9B=20fix(types.py):=20fi?= =?UTF-8?q?x=20issue=20with=20incorrect=20sorting=20of=20function=5Fargs?= =?UTF-8?q?=20in=20add=5Fextra=5Ffields=20function=20=E2=9C=A8=20feat(type?= =?UTF-8?q?s.py):=20add=20sorting=20of=20function=5Fargs=20in=20add=5Fextr?= =?UTF-8?q?a=5Ffields=20function=20=F0=9F=90=9B=20fix(types.py):=20fix=20i?= =?UTF-8?q?ssue=20with=20incorrect=20usage=20of=20field=5Fconfig=20in=20ad?= =?UTF-8?q?d=5Fcode=5Ffield=20function=20=E2=9C=A8=20feat(types.py):=20add?= =?UTF-8?q?=20support=20for=20field=5Fconfig=20in=20add=5Fcode=5Ffield=20f?= =?UTF-8?q?unction=20=F0=9F=90=9B=20fix(types.py):=20fix=20issue=20with=20?= =?UTF-8?q?incorrect=20usage=20of=20field=5Fconfig=20in=20build=5Flangchai?= =?UTF-8?q?n=5Ftemplate=5Fcustom=5Fcomponent=20function=20=E2=9C=A8=20feat?= =?UTF-8?q?(types.py):=20add=20support=20for=20field=5Fconfig=20in=20build?= =?UTF-8?q?=5Flangchain=5Ftemplate=5Fcustom=5Fcomponent=20function?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/interface/types.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/src/backend/langflow/interface/types.py b/src/backend/langflow/interface/types.py index e68a07b44..c6a901baa 100644 --- a/src/backend/langflow/interface/types.py +++ b/src/backend/langflow/interface/types.py @@ -99,6 +99,7 @@ def add_new_custom_field( field_type = field_config.pop("field_type", field_type) field_type = process_type(field_type) field_value = field_config.pop("value", field_value) + field_advanced = field_config.pop("advanced", False) if "name" in field_config: warnings.warn( @@ -115,7 +116,7 @@ def add_new_custom_field( value=field_value, show=True, required=required, - advanced=False, + advanced=field_advanced, placeholder=placeholder, display_name=display_name, **field_config, @@ -127,8 +128,9 @@ def add_new_custom_field( # TODO: Move to correct place -def add_code_field(template, raw_code): +def add_code_field(template, raw_code, field_config): # Field with the Python code to allow update + code_field = { "code": { "dynamic": True, @@ -139,7 +141,7 @@ def add_code_field(template, raw_code): "value": raw_code, "password": False, "name": "code", - "advanced": False, + "advanced": field_config.pop("advanced", False), "type": "code", "list": False, } @@ -199,6 +201,8 @@ def add_extra_fields(frontend_node, field_config, function_args): """Add extra fields to the frontend node""" if function_args is None: return + # sort function_args which is a list of dicts + function_args.sort(key=lambda x: x["name"]) for extra_field in function_args: if "name" not in extra_field or extra_field["name"] == "self": @@ -269,7 +273,9 @@ def build_langchain_template_custom_component(custom_component: CustomComponent) frontend_node, field_config, custom_component.get_function_entrypoint_args ) - frontend_node = add_code_field(frontend_node, custom_component.code) + frontend_node = add_code_field( + frontend_node, custom_component.code, field_config.get("code", {}) + ) add_base_classes( frontend_node, custom_component.get_function_entrypoint_return_type From 4783b4781597d6974fb3829505067cecf34bc8cc Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Thu, 27 Jul 2023 17:20:29 -0300 Subject: [PATCH 220/221] =?UTF-8?q?=F0=9F=94=A7=20chore(custom.css):=20adj?= =?UTF-8?q?ust=20max-width=20and=20min-width=20of=20.ch-scrollycoding-cont?= =?UTF-8?q?ent=20and=20.ch-scrollycoding-sticker=20to=20improve=20layout?= =?UTF-8?q?=20=F0=9F=94=A7=20chore(custom.css):=20add=20min-height=20to=20?= =?UTF-8?q?.ch-scrollycoding-step-content=20for=20better=20spacing?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/src/css/custom.css | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/docs/src/css/custom.css b/docs/src/css/custom.css index 0510365d4..b79c4df59 100644 --- a/docs/src/css/custom.css +++ b/docs/src/css/custom.css @@ -190,13 +190,21 @@ body { width: 140px; } } +/* +.ch-scrollycoding { + gap: 10rem !important; +} */ .ch-scrollycoding-content { - max-width: 50% !important; + max-width: 55% !important; min-width: 40% !important; } .ch-scrollycoding-sticker { max-width: 60% !important; - min-width: 50% !important; + min-width: 45% !important; +} + +.ch-scrollycoding-step-content { + min-height: 70px; } From aeca627222cdac5eca831fbabfa3ba172b64069c Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Thu, 27 Jul 2023 17:28:46 -0300 Subject: [PATCH 221/221] =?UTF-8?q?=F0=9F=93=9D=20docs(custom-component.md?= =?UTF-8?q?x):=20update=20guidelines=20for=20custom=20components?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ✨ feat(custom-component.mdx): add rules and explanations for creating custom components 📝 docs(custom-component.mdx): update rule 1 to specify that the script must contain a single class inheriting from CustomComponent 📝 docs(custom-component.mdx): update rule 2 to specify that the class must have a build method 📝 docs(custom-component.mdx): update rule 3 to specify that the type annotations of the build method will be used to create the fields of the component 📝 docs(custom-component.mdx): update rule 4 to specify that the class can have a build_config method 📝 docs(custom-component.mdx): update rule 4 to provide details on the format of the build_config method 📝 docs(custom-component.mdx): update rule 4 to correct a typo in the display_name key 📝 docs(custom-component.mdx): update rule 4 to provide additional details on the options and multiline keys in the build_config method 📝 docs(custom-component.mdx): add example for creating a FlowRunner component 📝 docs(custom-component.mdx): update example for creating a FlowRunner component to include display_name and description attributes 📝 docs(custom-component.mdx): update example for creating a FlowRunner component to import Document from the schema module 📝 docs(custom-component.mdx): update example for creating a FlowRunner component to add parameters and return type to the build method --- docs/docs/guidelines/custom-component.mdx | 142 +++++++++++++++++++--- 1 file changed, 127 insertions(+), 15 deletions(-) diff --git a/docs/docs/guidelines/custom-component.mdx b/docs/docs/guidelines/custom-component.mdx index 7fa3fd1dd..e06775f2f 100644 --- a/docs/docs/guidelines/custom-component.mdx +++ b/docs/docs/guidelines/custom-component.mdx @@ -49,15 +49,13 @@ class BestComponent(CustomComponent): -## Now, let's go over the rules one by one. - -## Rules: +## Now, let's go over the rules one by one: ## Rule 1 -The class must inherit from _`CustomComponent`_. +The script must contain a **single class** that inherits from _`CustomComponent`_. ```python # focus @@ -82,9 +80,9 @@ class BestComponent(CustomComponent): --- -## Rule 2: +## Rule 2 -The class must have a _`build`_ method +The class must have a _`build`_ method which defines the fields of the component and is used to run it. ```python from langflow import CustomComponent @@ -110,7 +108,7 @@ class BestComponent(CustomComponent): ## Rule 3 -The type annotations of the _`build`_ method will be used to create the fields of the component +The type annotations of the _`build`_ method will be used to create the fields of the component. The types supported are: @@ -168,7 +166,7 @@ class BestComponent(CustomComponent): ... ``` -## Rule 4: +## Rule 4 The class can have a [_`build_config`_](focus://11:19) method @@ -180,12 +178,12 @@ The _`dict`_ should have the following format: - The top level keys are the names of the fields - The values are _`dict`_ with the following keys: - - _`field_type: str`_: The type of the field (can be str, int, float, bool, file or any of the types supported by the _`build`_ method) - - _`is_list: bool`_: If the field is a list - - _`options: List[str]`_: If the field is a list, the options that will be displayed - - _`multiline: bool`_: If the field is a string, if it should be multiline + - _`field_type: str`_: The type of the field (can be any of the types supported by the _`build`_ method) + - _`is_list: bool`_: If the field is a list. + - _`options: List[str]`_: If the field is a list, the options that will be displayed. + - _`multiline: bool`_: If the field is a string, if it should be multiline. - _`input_types: List[str]`_: To be used when you want a _`str`_ field to have connectable handles. - - _`dispaly_name: str`_: To change the name of the field + - _`display_name: str`_: To change the name of the field - _`advanced: bool`_: To hide the field in the default view - _`password: bool`_: To mask the input - _`required: bool`_: To make the field required @@ -281,9 +279,10 @@ class BestComponent(CustomComponent): The _`build_config`_ method will be used to configure the fields of the component. -- _`multiline`_ is a special option that will give the option to open a text editor. +- _`multiline`_ will add the possibility of editing text in a spaceous text editor. -- _`is_list`_ is a special option that will give the option to add multiple values. When paired with _`options`_ it will transform it into a select field. +- _`is_list`_ is a special option that allows you to add many values. When paired with _`options`_ it will transform it into a dropdown menu with the options you provide. + If you set the _`value`_ attribute to one of the options, it will be selected by default. ```python focus=12:19 from langflow import CustomComponent @@ -351,3 +350,116 @@ The parameters used are: We then instantiate a Tool and return it. + +## FlowRunner Example + +Now let's see how to create a component that runs other flows. + + + +```python +from langflow.interface.custom.custom_component import CustomComponent + +class MyComponent(CustomComponent): + display_name = "Custom Component" + + def build_config(self): + ... + + def build(self): + ... + +``` + +So, let's start by adding the _`display_name`_ and a _`description`_. + +--- + +```python +from langflow.interface.custom.custom_component import CustomComponent + +# focus +class FlowRunner(CustomComponent): + # focus + display_name = "Flow Runner" + # focus + description = "Run other flows" + + def build_config(self): + ... + + def build(self): + ... + +``` + +That's better. + +--- + +```python +from langflow.interface.custom.custom_component import CustomComponent +from langchain.schema import Document + +# focus[6:16] +class FlowRunner(CustomComponent): + # focus[19:35] + display_name = "Flow Runner" + # focus[18:35] + description = "Run other flows" + + def build_config(self): + ... + + def build(self): + ... + +``` + +Now let's import Document from the schema module which will be our return type for the _`build`_ method. + +--- + +```python +from langflow.interface.custom.custom_component import CustomComponent +from langchain.schema import Document + +class FlowRunner(CustomComponent): + display_name = "Flow Runner" + description = "Run other flows using a document as input." + + def build_config(self): + ... + + # focus + def build(self, flow_name: str, document: Document) -> Document: + ... + +``` + +Let's add the parameters and the return type to the _`build`_ method. + +--- + +```python +from langflow.interface.custom.custom_component import CustomComponent +from langchain.schema import Document + +# focus +class FlowRunner(CustomComponent): + # focus + display_name = "Flow Runner" + # focus + description = "Run other flows using a document as input." + + def build_config(self): + ... + + def build(self, flow_name: str, document: Document) -> Document: + ... + +``` + +--- + +