Split Langflow into Langflow and Langflow Base (#1562)

* Initial Restructure

* Replace import langflow for import langflow_base

* Fix dependencies

* 🔧 chore(Makefile): refactor build process to separate base and frontend builds for better organization and maintainability

* 🚀 chore(Makefile): update build_frontend command to copy frontend build to the correct directory
🔖 chore(pyproject.toml): update python and httpx dependencies versions
🔧 chore(__init__.py): update import statement for load_flow_from_json function

* 🔖 chore(pyproject.toml): update package version from 0.0.6 to 0.0.8 to reflect changes in the codebase

* 🚀 feat(server.ts): change port variable case from lowercase port to uppercase PORT to improve semantics
🚀 feat(server.ts): add support for process.env.PORT environment variable to be able to run app on a configurable port

* 🐛 fix(server.ts): change port variable case from lowercase port to uppercase PORT to improve semantics
 feat(server.ts): add support for process.env.PORT environment variable to be able to run app on a configurable port
🚚 chore(pyproject.toml): update langflow-base version from 0.0.8 to 0.0.10
 feat(server.ts): add new agent component LCAgentComponent to langflow_base
 feat(server.ts): add new model component LCModelComponent to langflow_base
 feat(server.ts): add new helper functions docs_to_records and records_to_text to langflow_base
 feat(server.ts): add new flow helper functions list_flows, load_flow, run_flow, generate_function_for_flow, get_flow_inputs, build_schema_from_inputs to langflow_base
 feat(server.ts): add new prompt component PromptComponent to langflow_base
 feat(server.ts): add new chat components ChatInput and ChatOutput to langflow_base
 feat(server.ts): add new model component OpenAIModelComponent to langflow_base
🚚 chore(main.py): update import path from langflow.main to langflow_base.main
🚚 chore(service.py): update import path from langflow.services.database.manager to langflow_base.services.database.manager
🚚 chore(factory.py): update import path from langflow.services to langflow_base.services
🚚 chore(service.py): update import path from langflow.services.plugins to langflow_base.services.plugins
🚚 chore(utils.py): update import path from langflow.services to langflow_base.services
🚚 chore(validate.py): update import path from langflow.field_typing to langflow_base.field_typing
🚚 chore(pyproject.toml): update langflow-base version from 0.0.8 to 0.0.10

* Update Makefile to install backend dependencies and build langflow

* Add langflow main module and update __init__.py

* Update langflow install process to use implicit namespace

* Add langflow-base as a local dependency

* Add setup_poetry target to Makefile

* Update Poetry version and add poetry-monorepo-dependency-plugin

* Refactor code to improve performance and readability

* Update imports to custom and load

* Update content-hash in poetry.lock

---------

Co-authored-by: Matheus <jacquesmats@gmail.com>
Co-authored-by: Gabriel Luiz Freitas Almeida <gabriel@logspace.ai>
This commit is contained in:
Matheus Jacques 2024-03-26 13:26:30 +01:00 committed by GitHub
commit 8172e62236
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
466 changed files with 5418 additions and 1144 deletions

View file

@ -11,7 +11,7 @@ on:
workflow_dispatch:
env:
POETRY_VERSION: "1.5.1"
POETRY_VERSION: "1.8.2"
jobs:
if_release:
@ -20,7 +20,7 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Install poetry
run: pipx install poetry==$POETRY_VERSION
run: pipx install poetry==$POETRY_VERSION && poetry self add poetry-monorepo-dependency-plugin
- name: Set up Python 3.10
uses: actions/setup-python@v5
with:

View file

@ -10,7 +10,7 @@ on:
- "pyproject.toml"
env:
POETRY_VERSION: "1.5.1"
POETRY_VERSION: "1.8.2"
jobs:
if_release:
@ -19,7 +19,7 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Install poetry
run: pipx install poetry==$POETRY_VERSION
run: pipx install poetry==$POETRY_VERSION && poetry self add poetry-monorepo-dependency-plugin
- name: Set up Python 3.10
uses: actions/setup-python@v5
with:

View file

@ -2,6 +2,10 @@
all: help
setup_poetry:
pipx install poetry
poetry self add poetry-monorepo-dependency-plugin
init:
@echo 'Installing backend dependencies'
make install_backend
@ -28,7 +32,7 @@ format:
lint:
make install_backend
poetry run mypy src/backend/langflow
poetry run mypy src/backend
poetry run ruff . --fix
install_frontend:
@ -69,6 +73,7 @@ frontendc:
install_backend:
poetry install --extras deploy
poetry run pip install -e src/backend/base/.
backend:
make install_backend
@ -82,24 +87,35 @@ else
endif
build_and_run:
echo 'Removing dist folder'
@echo 'Removing dist folder'
rm -rf dist
make build && poetry run pip install dist/*.tar.gz && poetry run langflow run
rm -rf src/backend/base/dist
make build
poetry run pip install dist/*.tar.gz && pip install src/backend/base/dist/*.tar.gz
poetry run langflow run
build_and_install:
echo 'Removing dist folder'
@echo 'Removing dist folder'
rm -rf dist
make build && poetry run pip install dist/*.tar.gz
rm -rf src/backend/base/dist
make build && poetry run pip install dist/*.tar.gz && pip install src/backend/base/dist/*.tar.gz
build_frontend:
cd src/frontend && CI='' npm run build
cp -r src/frontend/build src/backend/langflow/frontend
cp -r src/frontend/build src/backend/base/langflow/frontend
build:
make build_langflow_base
make build_langflow
build_langflow:
poetry build-rewrite-path-deps --version-pinning-strategy=semver
build_langflow_base:
make install_frontend
make build_frontend
poetry build --format sdist
rm -rf src/backend/langflow/frontend
cd src/backend/base && poetry build-rewrite-path-deps --version-pinning-strategy=semver
rm -rf src/backend/base/langflow/frontend
dev:
make install_frontend
@ -111,10 +127,18 @@ else
docker compose $(if $(debug),-f docker-compose.debug.yml) up
endif
publish:
make build
publish_base:
make build_langflow_base
cd src/backend/base && poetry publish
publish_langflow:
make build_langflow
poetry publish
publish:
make publish_base
make publish_langflow
help:
@echo '----'
@echo 'format - run code formatters'

View file

@ -35,7 +35,7 @@ We will cover how to:
<summary>Example Code</summary>
```python
from langflow import CustomComponent
from langflow.custom import CustomComponent
from langchain.schema import Document
class FlowRunner(CustomComponent):
@ -75,7 +75,7 @@ class FlowRunner(CustomComponent):
<CH.Scrollycoding rows={20} className={""}>
```python
from langflow import CustomComponent
from langflow.custom import CustomComponent
class MyComponent(CustomComponent):
@ -95,7 +95,7 @@ The typical structure of a Custom Component is composed of _`display_name`_ and
---
```python
from langflow import CustomComponent
from langflow.custom import CustomComponent
# focus
@ -118,7 +118,7 @@ Let's start by defining our component's _`display_name`_ and _`description`_.
---
```python
from langflow import CustomComponent
from langflow.custom import CustomComponent
# focus
from langchain.schema import Document
@ -140,7 +140,7 @@ Second, we will import _`Document`_ from the [_langchain.schema_](https://docs.l
---
```python
from langflow import CustomComponent
from langflow.custom import CustomComponent
# focus
from langchain.schema import Document
@ -167,7 +167,7 @@ Now, let's add the [parameters](focus://11[20:55]) and the [return type](focus:/
---
```python focus=13:14
from langflow import CustomComponent
from langflow.custom import CustomComponent
from langchain.schema import Document
@ -189,7 +189,7 @@ We can now start writing the _`build`_ method. Let's list available flows in "My
---
```python focus=15:18
from langflow import CustomComponent
from langflow.custom import CustomComponent
from langchain.schema import Document
@ -222,7 +222,7 @@ And retrieve a flow that matches the selected name (we'll make a dropdown input
---
```python
from langflow import CustomComponent
from langflow.custom import CustomComponent
from langchain.schema import Document
@ -250,7 +250,7 @@ You can load this flow using _`get_flow`_ and set a _`tweaks`_ dictionary to cus
---
```python
from langflow import CustomComponent
from langflow.custom import CustomComponent
from langchain.schema import Document
@ -287,7 +287,7 @@ The content of a document can be extracted using the _`page_content`_ attribute,
---
```python focus=9:16
from langflow import CustomComponent
from langflow.custom import CustomComponent
from langchain.schema import Document
@ -365,4 +365,6 @@ Done! This is what our script and custom component looks like:
}}
/>
</div>
</div>import ZoomableImage from "/src/theme/ZoomableImage.js";
import Admonition from "@theme/Admonition";

View file

@ -30,7 +30,7 @@ Here is an example:
<CH.Code linuNumbers={false}>
```python
from langflow import CustomComponent
from langflow.custom import CustomComponent
from langchain.schema import Document
class DocumentProcessor(CustomComponent):
@ -92,7 +92,7 @@ The Python script for every Custom Component should follow a set of rules. Let's
The script must contain a **single class** that inherits from _`CustomComponent`_.
```python
from langflow import CustomComponent
from langflow.custom import CustomComponent
from langchain.schema import Document
class MyComponent(CustomComponent):
@ -113,7 +113,7 @@ class MyComponent(CustomComponent):
This class requires a _`build`_ method used to run the component and define its fields.
```python
from langflow import CustomComponent
from langflow.custom import CustomComponent
from langchain.schema import Document
class MyComponent(CustomComponent):
@ -134,7 +134,7 @@ class MyComponent(CustomComponent):
The [Return Type Annotation](https://docs.python.org/3/library/typing.html) of the _`build`_ method defines the component type (e.g., Chain, BaseLLM, or basic Python types). Check out all supported types in the [component reference](../components/custom).
```python
from langflow import CustomComponent
from langflow.custom import CustomComponent
from langchain.schema import Document
class MyComponent(CustomComponent):
@ -153,7 +153,7 @@ class MyComponent(CustomComponent):
---
```python
from langflow import CustomComponent
from langflow.custom import CustomComponent
from langchain.schema import Document
class MyComponent(CustomComponent):
@ -179,7 +179,7 @@ Check out the [component reference](../components/custom) for more details on th
---
```python
from langflow import CustomComponent
from langflow.custom import CustomComponent
from langchain.schema import Document
class MyComponent(CustomComponent):
@ -204,7 +204,7 @@ Let's create a custom component that processes a document (_`langchain.schema.Do
To start, let's choose a name for our component by adding a _`display_name`_ attribute. This name will appear on the canvas. The name of the class is not relevant, but let's call it _`DocumentProcessor`_.
```python
from langflow import CustomComponent
from langflow.custom import CustomComponent
from langchain.schema import Document
# focus
@ -227,7 +227,7 @@ class DocumentProcessor(CustomComponent):
We can also write a description for it using a _`description`_ attribute.
```python
from langflow import CustomComponent
from langflow.custom import CustomComponent
from langchain.schema import Document
class DocumentProcessor(CustomComponent):
@ -244,7 +244,7 @@ class DocumentProcessor(CustomComponent):
---
```python
from langflow import CustomComponent
from langflow.custom import CustomComponent
from langchain.schema import Document
class DocumentProcessor(CustomComponent):
@ -287,7 +287,7 @@ The _`build_config`_ method is here defined to customize the component fields.
- _`display_name`_ is the name of the field to be displayed.
```python
from langflow import CustomComponent
from langflow.custom import CustomComponent
from langchain.schema import Document
class DocumentProcessor(CustomComponent):
@ -406,4 +406,6 @@ Langflow will attempt to load all of the components found in the specified direc
Once your custom components have been loaded successfully, they will appear in Langflow's sidebar. From there, you can add them to your Langflow canvas for use. However, please note that components with errors will not be available for addition to the canvas. Always ensure your code is error-free before attempting to load components.
Remember, creating custom components allows you to extend the functionality of Langflow to better suit your unique needs. Happy coding!
Remember, creating custom components allows you to extend the functionality of Langflow to better suit your unique needs. Happy coding!import ZoomableImage from "/src/theme/ZoomableImage.js";
import Admonition from "@theme/Admonition";

View file

@ -49,7 +49,7 @@ The Code button shows snippets to use your flow as a Python object or an API.
Through the Langflow package, you can load a flow from a JSON file and use it as a LangChain object.
```py
from langflow import load_flow_from_json
from langflow.load import load_flow_from_json
flow = load_flow_from_json("path/to/flow.json")
# Now you can use it like any chain
@ -66,4 +66,9 @@ The example below shows a Python script making a POST request to a local API end
style={{ marginBottom: "20px", display: "flex", justifyContent: "center" }}
>
<ReactPlayer playing controls url="/videos/langflow_api.mp4" />
</div>
</div>import ThemedImage from "@theme/ThemedImage";
import useBaseUrl from "@docusaurus/useBaseUrl";
import ZoomableImage from "/src/theme/ZoomableImage.js";
import ReactPlayer from "react-player";
import Admonition from "@theme/Admonition";

File diff suppressed because one or more lines are too long

216
poetry.lock generated
View file

@ -3868,6 +3868,51 @@ files = [
[package.dependencies]
six = "*"
[[package]]
name = "langflow-base"
version = "0.0.11"
description = "A Python package with a built-in web application"
optional = false
python-versions = ">=3.9,<3.12"
files = []
develop = true
[package.dependencies]
alembic = "^1.13.0"
bcrypt = "4.0.1"
cachetools = "^5.3.1"
docstring-parser = "^0.15"
fastapi = "^0.109.0"
gunicorn = "^21.2.0"
httpx = "^0.24.0"
langchain = "~0.1.0"
langchain-experimental = "*"
loguru = "^0.7.1"
multiprocess = "^0.70.14"
orjson = "3.9.15"
pandas = "2.2.0"
passlib = "^1.7.4"
pillow = "^10.2.0"
platformdirs = "^4.2.0"
pydantic = "^2.5.0"
pydantic-settings = "^2.1.0"
python-jose = "^3.3.0"
python-multipart = "^0.0.7"
rich = "^13.7.0"
sqlmodel = "^0.0.14"
typer = "^0.9.0"
uvicorn = "^0.27.0"
websockets = "^10.3"
[package.extras]
all = []
deploy = []
local = []
[package.source]
type = "directory"
url = "src/backend/base"
[[package]]
name = "langfuse"
version = "2.21.1"
@ -7011,24 +7056,6 @@ tomli = {version = ">=1", markers = "python_version < \"3.11\""}
[package.extras]
testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
[[package]]
name = "pytest-asyncio"
version = "0.23.6"
description = "Pytest support for asyncio"
optional = false
python-versions = ">=3.8"
files = [
{file = "pytest-asyncio-0.23.6.tar.gz", hash = "sha256:ffe523a89c1c222598c76856e76852b787504ddb72dd5d9b6617ffa8aa2cde5f"},
{file = "pytest_asyncio-0.23.6-py3-none-any.whl", hash = "sha256:68516fdd1018ac57b846c9846b954f0393b26f094764a28c955eabb0536a4e8a"},
]
[package.dependencies]
pytest = ">=7.0.0,<9"
[package.extras]
docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"]
testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"]
[[package]]
name = "pytest-cov"
version = "4.1.0"
@ -7632,19 +7659,19 @@ full = ["numpy"]
[[package]]
name = "realtime"
version = "1.0.2"
version = "1.0.0"
description = ""
optional = false
python-versions = ">=3.8,<4.0"
files = [
{file = "realtime-1.0.2-py3-none-any.whl", hash = "sha256:8f8375199fd917cd0ded818702321f91b208ab72794ade0a33cee9d55ae30f11"},
{file = "realtime-1.0.2.tar.gz", hash = "sha256:776170a4329edc869b91e104c554cda02c8bf8e052cbb93c377e22482870959c"},
{file = "realtime-1.0.0-py3-none-any.whl", hash = "sha256:ceab9e292211ab08b5792ac52b3fa25398440031d5b369bd5799b8125056e2d8"},
{file = "realtime-1.0.0.tar.gz", hash = "sha256:14e540c4a0cc2736ae83e0cbd7efbbfb8b736df1681df2b9141556cb4848502d"},
]
[package.dependencies]
python-dateutil = ">=2.8.1,<3.0.0"
typing-extensions = ">=4.2.0,<5.0.0"
websockets = ">=11.0,<12.0"
websockets = ">=10.3,<11.0"
[[package]]
name = "red-black-tree-mod"
@ -9595,81 +9622,80 @@ test = ["websockets"]
[[package]]
name = "websockets"
version = "11.0.3"
version = "10.4"
description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)"
optional = false
python-versions = ">=3.7"
files = [
{file = "websockets-11.0.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3ccc8a0c387629aec40f2fc9fdcb4b9d5431954f934da3eaf16cdc94f67dbfac"},
{file = "websockets-11.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d67ac60a307f760c6e65dad586f556dde58e683fab03323221a4e530ead6f74d"},
{file = "websockets-11.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:84d27a4832cc1a0ee07cdcf2b0629a8a72db73f4cf6de6f0904f6661227f256f"},
{file = "websockets-11.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffd7dcaf744f25f82190856bc26ed81721508fc5cbf2a330751e135ff1283564"},
{file = "websockets-11.0.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7622a89d696fc87af8e8d280d9b421db5133ef5b29d3f7a1ce9f1a7bf7fcfa11"},
{file = "websockets-11.0.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bceab846bac555aff6427d060f2fcfff71042dba6f5fca7dc4f75cac815e57ca"},
{file = "websockets-11.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:54c6e5b3d3a8936a4ab6870d46bdd6ec500ad62bde9e44462c32d18f1e9a8e54"},
{file = "websockets-11.0.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:41f696ba95cd92dc047e46b41b26dd24518384749ed0d99bea0a941ca87404c4"},
{file = "websockets-11.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:86d2a77fd490ae3ff6fae1c6ceaecad063d3cc2320b44377efdde79880e11526"},
{file = "websockets-11.0.3-cp310-cp310-win32.whl", hash = "sha256:2d903ad4419f5b472de90cd2d40384573b25da71e33519a67797de17ef849b69"},
{file = "websockets-11.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:1d2256283fa4b7f4c7d7d3e84dc2ece74d341bce57d5b9bf385df109c2a1a82f"},
{file = "websockets-11.0.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e848f46a58b9fcf3d06061d17be388caf70ea5b8cc3466251963c8345e13f7eb"},
{file = "websockets-11.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aa5003845cdd21ac0dc6c9bf661c5beddd01116f6eb9eb3c8e272353d45b3288"},
{file = "websockets-11.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b58cbf0697721120866820b89f93659abc31c1e876bf20d0b3d03cef14faf84d"},
{file = "websockets-11.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:660e2d9068d2bedc0912af508f30bbeb505bbbf9774d98def45f68278cea20d3"},
{file = "websockets-11.0.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c1f0524f203e3bd35149f12157438f406eff2e4fb30f71221c8a5eceb3617b6b"},
{file = "websockets-11.0.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:def07915168ac8f7853812cc593c71185a16216e9e4fa886358a17ed0fd9fcf6"},
{file = "websockets-11.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b30c6590146e53149f04e85a6e4fcae068df4289e31e4aee1fdf56a0dead8f97"},
{file = "websockets-11.0.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:619d9f06372b3a42bc29d0cd0354c9bb9fb39c2cbc1a9c5025b4538738dbffaf"},
{file = "websockets-11.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:01f5567d9cf6f502d655151645d4e8b72b453413d3819d2b6f1185abc23e82dd"},
{file = "websockets-11.0.3-cp311-cp311-win32.whl", hash = "sha256:e1459677e5d12be8bbc7584c35b992eea142911a6236a3278b9b5ce3326f282c"},
{file = "websockets-11.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:e7837cb169eca3b3ae94cc5787c4fed99eef74c0ab9506756eea335e0d6f3ed8"},
{file = "websockets-11.0.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9f59a3c656fef341a99e3d63189852be7084c0e54b75734cde571182c087b152"},
{file = "websockets-11.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2529338a6ff0eb0b50c7be33dc3d0e456381157a31eefc561771ee431134a97f"},
{file = "websockets-11.0.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34fd59a4ac42dff6d4681d8843217137f6bc85ed29722f2f7222bd619d15e95b"},
{file = "websockets-11.0.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:332d126167ddddec94597c2365537baf9ff62dfcc9db4266f263d455f2f031cb"},
{file = "websockets-11.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6505c1b31274723ccaf5f515c1824a4ad2f0d191cec942666b3d0f3aa4cb4007"},
{file = "websockets-11.0.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f467ba0050b7de85016b43f5a22b46383ef004c4f672148a8abf32bc999a87f0"},
{file = "websockets-11.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9d9acd80072abcc98bd2c86c3c9cd4ac2347b5a5a0cae7ed5c0ee5675f86d9af"},
{file = "websockets-11.0.3-cp37-cp37m-win32.whl", hash = "sha256:e590228200fcfc7e9109509e4d9125eace2042fd52b595dd22bbc34bb282307f"},
{file = "websockets-11.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:b16fff62b45eccb9c7abb18e60e7e446998093cdcb50fed33134b9b6878836de"},
{file = "websockets-11.0.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fb06eea71a00a7af0ae6aefbb932fb8a7df3cb390cc217d51a9ad7343de1b8d0"},
{file = "websockets-11.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8a34e13a62a59c871064dfd8ffb150867e54291e46d4a7cf11d02c94a5275bae"},
{file = "websockets-11.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4841ed00f1026dfbced6fca7d963c4e7043aa832648671b5138008dc5a8f6d99"},
{file = "websockets-11.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a073fc9ab1c8aff37c99f11f1641e16da517770e31a37265d2755282a5d28aa"},
{file = "websockets-11.0.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:68b977f21ce443d6d378dbd5ca38621755f2063d6fdb3335bda981d552cfff86"},
{file = "websockets-11.0.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1a99a7a71631f0efe727c10edfba09ea6bee4166a6f9c19aafb6c0b5917d09c"},
{file = "websockets-11.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bee9fcb41db2a23bed96c6b6ead6489702c12334ea20a297aa095ce6d31370d0"},
{file = "websockets-11.0.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4b253869ea05a5a073ebfdcb5cb3b0266a57c3764cf6fe114e4cd90f4bfa5f5e"},
{file = "websockets-11.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:1553cb82942b2a74dd9b15a018dce645d4e68674de2ca31ff13ebc2d9f283788"},
{file = "websockets-11.0.3-cp38-cp38-win32.whl", hash = "sha256:f61bdb1df43dc9c131791fbc2355535f9024b9a04398d3bd0684fc16ab07df74"},
{file = "websockets-11.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:03aae4edc0b1c68498f41a6772d80ac7c1e33c06c6ffa2ac1c27a07653e79d6f"},
{file = "websockets-11.0.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:777354ee16f02f643a4c7f2b3eff8027a33c9861edc691a2003531f5da4f6bc8"},
{file = "websockets-11.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8c82f11964f010053e13daafdc7154ce7385ecc538989a354ccc7067fd7028fd"},
{file = "websockets-11.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3580dd9c1ad0701169e4d6fc41e878ffe05e6bdcaf3c412f9d559389d0c9e016"},
{file = "websockets-11.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f1a3f10f836fab6ca6efa97bb952300b20ae56b409414ca85bff2ad241d2a61"},
{file = "websockets-11.0.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df41b9bc27c2c25b486bae7cf42fccdc52ff181c8c387bfd026624a491c2671b"},
{file = "websockets-11.0.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:279e5de4671e79a9ac877427f4ac4ce93751b8823f276b681d04b2156713b9dd"},
{file = "websockets-11.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1fdf26fa8a6a592f8f9235285b8affa72748dc12e964a5518c6c5e8f916716f7"},
{file = "websockets-11.0.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:69269f3a0b472e91125b503d3c0b3566bda26da0a3261c49f0027eb6075086d1"},
{file = "websockets-11.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:97b52894d948d2f6ea480171a27122d77af14ced35f62e5c892ca2fae9344311"},
{file = "websockets-11.0.3-cp39-cp39-win32.whl", hash = "sha256:c7f3cb904cce8e1be667c7e6fef4516b98d1a6a0635a58a57528d577ac18a128"},
{file = "websockets-11.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c792ea4eabc0159535608fc5658a74d1a81020eb35195dd63214dcf07556f67e"},
{file = "websockets-11.0.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f2e58f2c36cc52d41f2659e4c0cbf7353e28c8c9e63e30d8c6d3494dc9fdedcf"},
{file = "websockets-11.0.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de36fe9c02995c7e6ae6efe2e205816f5f00c22fd1fbf343d4d18c3d5ceac2f5"},
{file = "websockets-11.0.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0ac56b661e60edd453585f4bd68eb6a29ae25b5184fd5ba51e97652580458998"},
{file = "websockets-11.0.3-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e052b8467dd07d4943936009f46ae5ce7b908ddcac3fda581656b1b19c083d9b"},
{file = "websockets-11.0.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:42cc5452a54a8e46a032521d7365da775823e21bfba2895fb7b77633cce031bb"},
{file = "websockets-11.0.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e6316827e3e79b7b8e7d8e3b08f4e331af91a48e794d5d8b099928b6f0b85f20"},
{file = "websockets-11.0.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8531fdcad636d82c517b26a448dcfe62f720e1922b33c81ce695d0edb91eb931"},
{file = "websockets-11.0.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c114e8da9b475739dde229fd3bc6b05a6537a88a578358bc8eb29b4030fac9c9"},
{file = "websockets-11.0.3-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e063b1865974611313a3849d43f2c3f5368093691349cf3c7c8f8f75ad7cb280"},
{file = "websockets-11.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:92b2065d642bf8c0a82d59e59053dd2fdde64d4ed44efe4870fa816c1232647b"},
{file = "websockets-11.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0ee68fe502f9031f19d495dae2c268830df2760c0524cbac5d759921ba8c8e82"},
{file = "websockets-11.0.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcacf2c7a6c3a84e720d1bb2b543c675bf6c40e460300b628bab1b1efc7c034c"},
{file = "websockets-11.0.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b67c6f5e5a401fc56394f191f00f9b3811fe843ee93f4a70df3c389d1adf857d"},
{file = "websockets-11.0.3-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d5023a4b6a5b183dc838808087033ec5df77580485fc533e7dab2567851b0a4"},
{file = "websockets-11.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ed058398f55163a79bb9f06a90ef9ccc063b204bb346c4de78efc5d15abfe602"},
{file = "websockets-11.0.3-py3-none-any.whl", hash = "sha256:6681ba9e7f8f3b19440921e99efbb40fc89f26cd71bf539e45d8c8a25c976dc6"},
{file = "websockets-11.0.3.tar.gz", hash = "sha256:88fc51d9a26b10fc331be344f1781224a375b78488fc343620184e95a4b27016"},
{file = "websockets-10.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d58804e996d7d2307173d56c297cf7bc132c52df27a3efaac5e8d43e36c21c48"},
{file = "websockets-10.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bc0b82d728fe21a0d03e65f81980abbbcb13b5387f733a1a870672c5be26edab"},
{file = "websockets-10.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ba089c499e1f4155d2a3c2a05d2878a3428cf321c848f2b5a45ce55f0d7d310c"},
{file = "websockets-10.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33d69ca7612f0ddff3316b0c7b33ca180d464ecac2d115805c044bf0a3b0d032"},
{file = "websockets-10.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62e627f6b6d4aed919a2052efc408da7a545c606268d5ab5bfab4432734b82b4"},
{file = "websockets-10.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ea7b82bfcae927eeffc55d2ffa31665dc7fec7b8dc654506b8e5a518eb4d50"},
{file = "websockets-10.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e0cb5cc6ece6ffa75baccfd5c02cffe776f3f5c8bf486811f9d3ea3453676ce8"},
{file = "websockets-10.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ae5e95cfb53ab1da62185e23b3130e11d64431179debac6dc3c6acf08760e9b1"},
{file = "websockets-10.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7c584f366f46ba667cfa66020344886cf47088e79c9b9d39c84ce9ea98aaa331"},
{file = "websockets-10.4-cp310-cp310-win32.whl", hash = "sha256:b029fb2032ae4724d8ae8d4f6b363f2cc39e4c7b12454df8df7f0f563ed3e61a"},
{file = "websockets-10.4-cp310-cp310-win_amd64.whl", hash = "sha256:8dc96f64ae43dde92530775e9cb169979f414dcf5cff670455d81a6823b42089"},
{file = "websockets-10.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:47a2964021f2110116cc1125b3e6d87ab5ad16dea161949e7244ec583b905bb4"},
{file = "websockets-10.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e789376b52c295c4946403bd0efecf27ab98f05319df4583d3c48e43c7342c2f"},
{file = "websockets-10.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7d3f0b61c45c3fa9a349cf484962c559a8a1d80dae6977276df8fd1fa5e3cb8c"},
{file = "websockets-10.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f55b5905705725af31ccef50e55391621532cd64fbf0bc6f4bac935f0fccec46"},
{file = "websockets-10.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00c870522cdb69cd625b93f002961ffb0c095394f06ba8c48f17eef7c1541f96"},
{file = "websockets-10.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f38706e0b15d3c20ef6259fd4bc1700cd133b06c3c1bb108ffe3f8947be15fa"},
{file = "websockets-10.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f2c38d588887a609191d30e902df2a32711f708abfd85d318ca9b367258cfd0c"},
{file = "websockets-10.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:fe10ddc59b304cb19a1bdf5bd0a7719cbbc9fbdd57ac80ed436b709fcf889106"},
{file = "websockets-10.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:90fcf8929836d4a0e964d799a58823547df5a5e9afa83081761630553be731f9"},
{file = "websockets-10.4-cp311-cp311-win32.whl", hash = "sha256:b9968694c5f467bf67ef97ae7ad4d56d14be2751000c1207d31bf3bb8860bae8"},
{file = "websockets-10.4-cp311-cp311-win_amd64.whl", hash = "sha256:a7a240d7a74bf8d5cb3bfe6be7f21697a28ec4b1a437607bae08ac7acf5b4882"},
{file = "websockets-10.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:74de2b894b47f1d21cbd0b37a5e2b2392ad95d17ae983e64727e18eb281fe7cb"},
{file = "websockets-10.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3a686ecb4aa0d64ae60c9c9f1a7d5d46cab9bfb5d91a2d303d00e2cd4c4c5cc"},
{file = "websockets-10.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0d15c968ea7a65211e084f523151dbf8ae44634de03c801b8bd070b74e85033"},
{file = "websockets-10.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00213676a2e46b6ebf6045bc11d0f529d9120baa6f58d122b4021ad92adabd41"},
{file = "websockets-10.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:e23173580d740bf8822fd0379e4bf30aa1d5a92a4f252d34e893070c081050df"},
{file = "websockets-10.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:dd500e0a5e11969cdd3320935ca2ff1e936f2358f9c2e61f100a1660933320ea"},
{file = "websockets-10.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4239b6027e3d66a89446908ff3027d2737afc1a375f8fd3eea630a4842ec9a0c"},
{file = "websockets-10.4-cp37-cp37m-win32.whl", hash = "sha256:8a5cc00546e0a701da4639aa0bbcb0ae2bb678c87f46da01ac2d789e1f2d2038"},
{file = "websockets-10.4-cp37-cp37m-win_amd64.whl", hash = "sha256:a9f9a735deaf9a0cadc2d8c50d1a5bcdbae8b6e539c6e08237bc4082d7c13f28"},
{file = "websockets-10.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5c1289596042fad2cdceb05e1ebf7aadf9995c928e0da2b7a4e99494953b1b94"},
{file = "websockets-10.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0cff816f51fb33c26d6e2b16b5c7d48eaa31dae5488ace6aae468b361f422b63"},
{file = "websockets-10.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:dd9becd5fe29773d140d68d607d66a38f60e31b86df75332703757ee645b6faf"},
{file = "websockets-10.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45ec8e75b7dbc9539cbfafa570742fe4f676eb8b0d3694b67dabe2f2ceed8aa6"},
{file = "websockets-10.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f72e5cd0f18f262f5da20efa9e241699e0cf3a766317a17392550c9ad7b37d8"},
{file = "websockets-10.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:185929b4808b36a79c65b7865783b87b6841e852ef5407a2fb0c03381092fa3b"},
{file = "websockets-10.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7d27a7e34c313b3a7f91adcd05134315002aaf8540d7b4f90336beafaea6217c"},
{file = "websockets-10.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:884be66c76a444c59f801ac13f40c76f176f1bfa815ef5b8ed44321e74f1600b"},
{file = "websockets-10.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:931c039af54fc195fe6ad536fde4b0de04da9d5916e78e55405436348cfb0e56"},
{file = "websockets-10.4-cp38-cp38-win32.whl", hash = "sha256:db3c336f9eda2532ec0fd8ea49fef7a8df8f6c804cdf4f39e5c5c0d4a4ad9a7a"},
{file = "websockets-10.4-cp38-cp38-win_amd64.whl", hash = "sha256:48c08473563323f9c9debac781ecf66f94ad5a3680a38fe84dee5388cf5acaf6"},
{file = "websockets-10.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:40e826de3085721dabc7cf9bfd41682dadc02286d8cf149b3ad05bff89311e4f"},
{file = "websockets-10.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:56029457f219ade1f2fc12a6504ea61e14ee227a815531f9738e41203a429112"},
{file = "websockets-10.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f5fc088b7a32f244c519a048c170f14cf2251b849ef0e20cbbb0fdf0fdaf556f"},
{file = "websockets-10.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fc8709c00704194213d45e455adc106ff9e87658297f72d544220e32029cd3d"},
{file = "websockets-10.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0154f7691e4fe6c2b2bc275b5701e8b158dae92a1ab229e2b940efe11905dff4"},
{file = "websockets-10.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c6d2264f485f0b53adf22697ac11e261ce84805c232ed5dbe6b1bcb84b00ff0"},
{file = "websockets-10.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9bc42e8402dc5e9905fb8b9649f57efcb2056693b7e88faa8fb029256ba9c68c"},
{file = "websockets-10.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:edc344de4dac1d89300a053ac973299e82d3db56330f3494905643bb68801269"},
{file = "websockets-10.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:84bc2a7d075f32f6ed98652db3a680a17a4edb21ca7f80fe42e38753a58ee02b"},
{file = "websockets-10.4-cp39-cp39-win32.whl", hash = "sha256:c94ae4faf2d09f7c81847c63843f84fe47bf6253c9d60b20f25edfd30fb12588"},
{file = "websockets-10.4-cp39-cp39-win_amd64.whl", hash = "sha256:bbccd847aa0c3a69b5f691a84d2341a4f8a629c6922558f2a70611305f902d74"},
{file = "websockets-10.4-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:82ff5e1cae4e855147fd57a2863376ed7454134c2bf49ec604dfe71e446e2193"},
{file = "websockets-10.4-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d210abe51b5da0ffdbf7b43eed0cfdff8a55a1ab17abbec4301c9ff077dd0342"},
{file = "websockets-10.4-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:942de28af58f352a6f588bc72490ae0f4ccd6dfc2bd3de5945b882a078e4e179"},
{file = "websockets-10.4-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9b27d6c1c6cd53dc93614967e9ce00ae7f864a2d9f99fe5ed86706e1ecbf485"},
{file = "websockets-10.4-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:3d3cac3e32b2c8414f4f87c1b2ab686fa6284a980ba283617404377cd448f631"},
{file = "websockets-10.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:da39dd03d130162deb63da51f6e66ed73032ae62e74aaccc4236e30edccddbb0"},
{file = "websockets-10.4-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:389f8dbb5c489e305fb113ca1b6bdcdaa130923f77485db5b189de343a179393"},
{file = "websockets-10.4-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09a1814bb15eff7069e51fed0826df0bc0702652b5cb8f87697d469d79c23576"},
{file = "websockets-10.4-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff64a1d38d156d429404aaa84b27305e957fd10c30e5880d1765c9480bea490f"},
{file = "websockets-10.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b343f521b047493dc4022dd338fc6db9d9282658862756b4f6fd0e996c1380e1"},
{file = "websockets-10.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:932af322458da7e4e35df32f050389e13d3d96b09d274b22a7aa1808f292fee4"},
{file = "websockets-10.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6a4162139374a49eb18ef5b2f4da1dd95c994588f5033d64e0bbfda4b6b6fcf"},
{file = "websockets-10.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c57e4c1349fbe0e446c9fa7b19ed2f8a4417233b6984277cce392819123142d3"},
{file = "websockets-10.4-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b627c266f295de9dea86bd1112ed3d5fafb69a348af30a2422e16590a8ecba13"},
{file = "websockets-10.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:05a7233089f8bd355e8cbe127c2e8ca0b4ea55467861906b80d2ebc7db4d6b72"},
{file = "websockets-10.4.tar.gz", hash = "sha256:eef610b23933c54d5d921c92578ae5f89813438fded840c2e9809d378dc765d3"},
]
[[package]]
@ -10149,4 +10175,8 @@ local = ["ctransformers", "llama-cpp-python", "sentence-transformers"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.10,<3.12"
<<<<<<< HEAD
content-hash = "5c98ec89f0dbbf60bc4bbcb3d9ea4b84176b8af34b54dd3c77c817ba132b49a1"
=======
content-hash = "304eb96cfcbfb376af189b8eae989c0ce36b47555db476bd33d488c2217f484e"
>>>>>>> origin/zustand/io/migration

View file

@ -24,30 +24,24 @@ documentation = "https://docs.langflow.org"
[tool.poetry.scripts]
langflow = "langflow.__main__:main"
[tool.poetry-monorepo-dependency-plugin]
enable = true
[tool.poetry.dependencies]
python = ">=3.10,<3.12"
langflow-base = { path = "./src/backend/base", develop = true }
duckdb = "^0.9.2"
fastapi = "^0.109.0"
uvicorn = "^0.27.0"
beautifulsoup4 = "^4.12.2"
google-search-results = "^2.4.1"
google-api-python-client = "^2.118.0"
typer = "^0.9.0"
gunicorn = "^21.2.0"
langchain = "~0.1.0"
openai = "^1.12.0"
pandas = "2.2.0"
chromadb = "^0.4.23"
huggingface-hub = { version = "^0.20.0", extras = ["inference"] }
rich = "^13.7.0"
llama-cpp-python = { version = "~0.2.0", optional = true }
networkx = "^3.1"
pypdf = "^4.0.0"
pysrt = "^1.1.2"
fake-useragent = "^1.4.0"
docstring-parser = "^0.15"
psycopg2-binary = "^2.9.6"
pyarrow = "^14.0.0"
wikipedia = "^1.4.0"
@ -56,15 +50,9 @@ weaviate-client = "*"
sentence-transformers = { version = "^2.3.1", optional = true }
ctransformers = { version = "^0.2.10", optional = true }
cohere = "^4.47.0"
python-multipart = "^0.0.7"
sqlmodel = "^0.0.14"
faiss-cpu = "^1.7.4"
anthropic = "^0.21.0"
orjson = "3.9.15"
multiprocess = "^0.70.14"
cachetools = "^5.3.1"
types-cachetools = "^5.3.0.5"
platformdirs = "^4.2.0"
pinecone-client = "^3.0.3"
pymongo = "^4.6.0"
supabase = "^2.3.0"
@ -72,22 +60,15 @@ certifi = "^2023.11.17"
psycopg = "^3.1.9"
psycopg-binary = "^3.1.9"
fastavro = "^1.8.0"
langchain-experimental = "*"
celery = { extras = ["redis"], version = "^5.3.6", optional = true }
redis = { version = "^5.0.1", optional = true }
flower = { version = "^2.0.0", optional = true }
alembic = "^1.13.0"
passlib = "^1.7.4"
bcrypt = "4.0.1"
python-jose = "^3.3.0"
metaphor-python = "^0.1.11"
pydantic = "^2.5.0"
pydantic-settings = "^2.1.0"
zep-python = "*"
pywin32 = { version = "^306", markers = "sys_platform == 'win32'" }
loguru = "^0.7.1"
langfuse = "^2.9.0"
pillow = "^10.2.0"
metal-sdk = "^2.5.0"
markupsafe = "^2.1.3"
extract-msg = "^0.47.0"
@ -118,7 +99,6 @@ python-docx = "^1.1.0"
langchain-astradb = "^0.1.0"
[tool.poetry.group.dev.dependencies]
pytest-asyncio = "^0.23.1"
types-redis = "^4.6.0.5"
ipykernel = "^6.29.0"
mypy = "^1.8.0"

View file

@ -0,0 +1,348 @@
import platform
import socket
import sys
import time
import webbrowser
from pathlib import Path
from typing import Optional
import httpx
import typer
from dotenv import load_dotenv
from multiprocess import Process, cpu_count # type: ignore
from rich import box
from rich import print as rprint
from rich.console import Console
from rich.panel import Panel
from rich.table import Table
from langflow.main import setup_app
from langflow.services.database.utils import session_getter
from langflow.services.deps import get_db_service, get_settings_service
from langflow.services.utils import initialize_services, initialize_settings_service
from langflow.utils.logger import configure, logger
console = Console()
app = typer.Typer(no_args_is_help=True)
def get_number_of_workers(workers=None):
if workers == -1 or workers is None:
workers = (cpu_count() * 2) + 1
logger.debug(f"Number of workers: {workers}")
return workers
def display_results(results):
"""
Display the results of the migration.
"""
for table_results in results:
table = Table(title=f"Migration {table_results.table_name}")
table.add_column("Name")
table.add_column("Type")
table.add_column("Status")
for result in table_results.results:
status = "Success" if result.success else "Failure"
color = "green" if result.success else "red"
table.add_row(result.name, result.type, f"[{color}]{status}[/{color}]")
console.print(table)
console.print() # Print a new line
def set_var_for_macos_issue():
# OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES
# we need to set this var is we are running on MacOS
# otherwise we get an error when running gunicorn
if platform.system() in ["Darwin"]:
import os
os.environ["OBJC_DISABLE_INITIALIZE_FORK_SAFETY"] = "YES"
# https://stackoverflow.com/questions/75747888/uwsgi-segmentation-fault-with-flask-python-app-behind-nginx-after-running-for-2 # noqa
os.environ["no_proxy"] = "*" # to avoid error with gunicorn
logger.debug("Set OBJC_DISABLE_INITIALIZE_FORK_SAFETY to YES to avoid error")
def update_settings(
config: str,
cache: Optional[str] = None,
dev: bool = False,
remove_api_keys: bool = False,
components_path: Optional[Path] = None,
store: bool = True,
):
"""Update the settings from a config file."""
# Check for database_url in the environment variables
initialize_settings_service()
settings_service = get_settings_service()
if config:
logger.debug(f"Loading settings from {config}")
settings_service.settings.update_from_yaml(config, dev=dev)
if remove_api_keys:
logger.debug(f"Setting remove_api_keys to {remove_api_keys}")
settings_service.settings.update_settings(REMOVE_API_KEYS=remove_api_keys)
if cache:
logger.debug(f"Setting cache to {cache}")
settings_service.settings.update_settings(CACHE=cache)
if components_path:
logger.debug(f"Adding component path {components_path}")
settings_service.settings.update_settings(COMPONENTS_PATH=components_path)
if not store:
logger.debug("Setting store to False")
settings_service.settings.update_settings(STORE=False)
@app.command()
def run(
host: str = typer.Option("127.0.0.1", help="Host to bind the server to.", envvar="LANGFLOW_HOST"),
workers: int = typer.Option(1, help="Number of worker processes.", envvar="LANGFLOW_WORKERS"),
timeout: int = typer.Option(300, help="Worker timeout in seconds."),
port: int = typer.Option(7860, help="Port to listen on.", envvar="LANGFLOW_PORT"),
components_path: Optional[Path] = typer.Option(
Path(__file__).parent / "components",
help="Path to the directory containing custom components.",
envvar="LANGFLOW_COMPONENTS_PATH",
),
config: str = typer.Option(Path(__file__).parent / "config.yaml", help="Path to the configuration file."),
# .env file param
env_file: Path = typer.Option(None, help="Path to the .env file containing environment variables."),
log_level: str = typer.Option("critical", help="Logging level.", envvar="LANGFLOW_LOG_LEVEL"),
log_file: Path = typer.Option("logs/langflow.log", help="Path to the log file.", envvar="LANGFLOW_LOG_FILE"),
cache: Optional[str] = typer.Option(
envvar="LANGFLOW_LANGCHAIN_CACHE",
help="Type of cache to use. (InMemoryCache, SQLiteCache)",
default=None,
),
dev: bool = typer.Option(False, help="Run in development mode (may contain bugs)"),
path: str = typer.Option(
None,
help="Path to the frontend directory containing build files. This is for development purposes only.",
envvar="LANGFLOW_FRONTEND_PATH",
),
open_browser: bool = typer.Option(
True,
help="Open the browser after starting the server.",
envvar="LANGFLOW_OPEN_BROWSER",
),
remove_api_keys: bool = typer.Option(
False,
help="Remove API keys from the projects saved in the database.",
envvar="LANGFLOW_REMOVE_API_KEYS",
),
backend_only: bool = typer.Option(
False,
help="Run only the backend server without the frontend.",
envvar="LANGFLOW_BACKEND_ONLY",
),
store: bool = typer.Option(
True,
help="Enables the store features.",
envvar="LANGFLOW_STORE",
),
):
"""
Run the Langflow.
"""
set_var_for_macos_issue()
# override env variables with .env file
if env_file:
load_dotenv(env_file, override=True)
configure(log_level=log_level, log_file=log_file)
update_settings(
config,
dev=dev,
remove_api_keys=remove_api_keys,
cache=cache,
components_path=components_path,
store=store,
)
# create path object if path is provided
static_files_dir: Optional[Path] = Path(path) if path else None
app = setup_app(static_files_dir=static_files_dir, backend_only=backend_only)
# check if port is being used
if is_port_in_use(port, host):
port = get_free_port(port)
options = {
"bind": f"{host}:{port}",
"workers": get_number_of_workers(workers),
"timeout": timeout,
}
# Define an env variable to know if we are just testing the server
if "pytest" in sys.modules:
return
if platform.system() in ["Windows"]:
# Run using uvicorn on MacOS and Windows
# Windows doesn't support gunicorn
# MacOS requires an env variable to be set to use gunicorn
run_on_windows(host, port, log_level, options, app)
else:
# Run using gunicorn on Linux
run_on_mac_or_linux(host, port, log_level, options, app, open_browser)
def run_on_mac_or_linux(host, port, log_level, options, app, open_browser=True):
webapp_process = Process(target=run_langflow, args=(host, port, log_level, options, app))
webapp_process.start()
status_code = 0
while status_code != 200:
try:
status_code = httpx.get(f"http://{host}:{port}/health").status_code
except Exception:
time.sleep(1)
print_banner(host, port)
if open_browser:
webbrowser.open(f"http://{host}:{port}")
def run_on_windows(host, port, log_level, options, app):
"""
Run the Langflow server on Windows.
"""
print_banner(host, port)
run_langflow(host, port, log_level, options, app)
def is_port_in_use(port, host="localhost"):
"""
Check if a port is in use.
Args:
port (int): The port number to check.
host (str): The host to check the port on. Defaults to 'localhost'.
Returns:
bool: True if the port is in use, False otherwise.
"""
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
return s.connect_ex((host, port)) == 0
def get_free_port(port):
"""
Given a used port, find a free port.
Args:
port (int): The port number to check.
Returns:
int: A free port number.
"""
while is_port_in_use(port):
port += 1
return port
def print_banner(host, port):
# console = Console()
word = "Langflow"
colors = ["#3300cc"]
styled_word = ""
for i, char in enumerate(word):
color = colors[i % len(colors)]
styled_word += f"[{color}]{char}[/]"
# Title with emojis and gradient text
title = (
f"[bold]Welcome to :chains: {styled_word} [/bold]\n\n"
f"Access [link=http://{host}:{port}]http://{host}:{port}[/link]"
)
info_text = (
"Collaborate, and contribute at our "
"[bold][link=https://github.com/logspace-ai/langflow]GitHub Repo[/link][/bold] :rocket:"
)
# Create a panel with the title and the info text, and a border around it
panel = Panel(f"{title}\n{info_text}", box=box.ROUNDED, border_style="blue", expand=False)
# Print the banner with a separator line before and after
rprint(panel)
def run_langflow(host, port, log_level, options, app):
"""
Run Langflow server on localhost
"""
try:
if platform.system() in ["Windows"]:
# Run using uvicorn on MacOS and Windows
# Windows doesn't support gunicorn
# MacOS requires an env variable to be set to use gunicorn
import uvicorn
uvicorn.run(app, host=host, port=port, log_level=log_level)
else:
from langflow.server import LangflowApplication
LangflowApplication(app, options).run()
except KeyboardInterrupt:
pass
except Exception as e:
logger.exception(e)
sys.exit(1)
@app.command()
def superuser(
username: str = typer.Option(..., prompt=True, help="Username for the superuser."),
password: str = typer.Option(..., prompt=True, hide_input=True, help="Password for the superuser."),
log_level: str = typer.Option("critical", help="Logging level.", envvar="LANGFLOW_LOG_LEVEL"),
):
"""
Create a superuser.
"""
configure(log_level=log_level)
initialize_services()
db_service = get_db_service()
with session_getter(db_service) as session:
from langflow.services.auth.utils import create_super_user
if create_super_user(db=session, username=username, password=password):
# Verify that the superuser was created
from langflow.services.database.models.user.model import User
user: User = session.query(User).filter(User.username == username).first()
if user is None or not user.is_superuser:
typer.echo("Superuser creation failed.")
return
typer.echo("Superuser created successfully.")
else:
typer.echo("Superuser creation failed.")
@app.command()
def migration(test: bool = typer.Option(True, help="Run migrations in test mode.")):
"""
Run or test migrations.
"""
initialize_services()
db_service = get_db_service()
if not test:
db_service.run_migrations()
results = db_service.run_migrations_test()
display_results(results)
def main():
app()
if __name__ == "__main__":
main()

View file

@ -66,14 +66,10 @@ def run_migrations_online() -> None:
try:
from langflow.services.database.factory import DatabaseServiceFactory
from langflow.services.deps import get_db_service
from langflow.services.manager import (initialize_settings_service,
service_manager)
from langflow.services.schema import ServiceType
from langflow.services.manager import initialize_settings_service, service_manager
initialize_settings_service()
service_manager.register_factory(
DatabaseServiceFactory()
)
service_manager.register_factory(DatabaseServiceFactory())
connectable = get_db_service().engine
except Exception as e:
logger.error(f"Error getting database engine: {e}")
@ -87,9 +83,7 @@ def run_migrations_online() -> None:
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata, render_as_batch=True
)
context.configure(connection=connection, target_metadata=target_metadata, render_as_batch=True)
with context.begin_transaction():
context.run_migrations()

View file

@ -26,20 +26,13 @@ def upgrade() -> None:
flow_constraints = inspector.get_unique_constraints("flow")
user_constraints = inspector.get_unique_constraints("user")
try:
if not any(
constraint["name"] == "uq_apikey_id" for constraint in api_key_constraints
):
if not any(constraint["name"] == "uq_apikey_id" for constraint in api_key_constraints):
with op.batch_alter_table("apikey", schema=None) as batch_op:
batch_op.create_unique_constraint("uq_apikey_id", ["id"])
if not any(
constraint["name"] == "uq_flow_id" for constraint in flow_constraints
):
if not any(constraint["name"] == "uq_flow_id" for constraint in flow_constraints):
with op.batch_alter_table("flow", schema=None) as batch_op:
batch_op.create_unique_constraint("uq_flow_id", ["id"])
if not any(
constraint["name"] == "uq_user_id" for constraint in user_constraints
):
if not any(constraint["name"] == "uq_user_id" for constraint in user_constraints):
with op.batch_alter_table("user", schema=None) as batch_op:
batch_op.create_unique_constraint("uq_user_id", ["id"])
except Exception as e:
@ -57,16 +50,13 @@ def downgrade() -> None:
flow_constraints = inspector.get_unique_constraints("flow")
user_constraints = inspector.get_unique_constraints("user")
try:
if any(
constraint["name"] == "uq_apikey_id" for constraint in api_key_constraints
):
if any(constraint["name"] == "uq_apikey_id" for constraint in api_key_constraints):
with op.batch_alter_table("user", schema=None) as batch_op:
batch_op.drop_constraint("uq_user_id", type_="unique")
if any(constraint["name"] == "uq_flow_id" for constraint in flow_constraints):
with op.batch_alter_table("flow", schema=None) as batch_op:
batch_op.drop_constraint("uq_flow_id", type_="unique")
if any(constraint["name"] == "uq_user_id" for constraint in user_constraints):
with op.batch_alter_table("apikey", schema=None) as batch_op:
batch_op.drop_constraint("uq_apikey_id", type_="unique")
except Exception as e:

View file

@ -24,10 +24,8 @@ def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
with op.batch_alter_table("apikey", schema=None) as batch_op:
batch_op.alter_column(
"name", existing_type=sqlmodel.sql.sqltypes.AutoString(), nullable=True
)
except Exception as e:
batch_op.alter_column("name", existing_type=sqlmodel.sql.sqltypes.AutoString(), nullable=True)
except Exception:
pass
# ### end Alembic commands ###
@ -37,6 +35,6 @@ def downgrade() -> None:
try:
with op.batch_alter_table("apikey", schema=None) as batch_op:
batch_op.alter_column("name", existing_type=sa.VARCHAR(), nullable=False)
except Exception as e:
except Exception:
pass
# ### end Alembic commands ###

View file

@ -31,23 +31,16 @@ def upgrade() -> None:
# and other related indices
if "flowstyle" in existing_tables:
op.drop_table("flowstyle")
if "ix_flowstyle_flow_id" in [
index["name"] for index in inspector.get_indexes("flowstyle")
]:
op.drop_index(
"ix_flowstyle_flow_id", table_name="flowstyle", if_exists=True
)
if "ix_flowstyle_flow_id" in [index["name"] for index in inspector.get_indexes("flowstyle")]:
op.drop_index("ix_flowstyle_flow_id", table_name="flowstyle", if_exists=True)
existing_indices_flow = []
existing_fks_flow = []
if "flow" in existing_tables:
existing_indices_flow = [
index["name"] for index in inspector.get_indexes("flow")
]
existing_indices_flow = [index["name"] for index in inspector.get_indexes("flow")]
# Existing foreign keys for the 'flow' table, if it exists
existing_fks_flow = [
fk["referred_table"] + "." + fk["referred_columns"][0]
for fk in inspector.get_foreign_keys("flow")
fk["referred_table"] + "." + fk["referred_columns"][0] for fk in inspector.get_foreign_keys("flow")
]
# Now check if the columns user_id exists in the 'flow' table
# If it does not exist, we need to create the foreign key
@ -67,9 +60,7 @@ def upgrade() -> None:
sa.UniqueConstraint("id", name="uq_user_id"),
)
with op.batch_alter_table("user", schema=None) as batch_op:
batch_op.create_index(
batch_op.f("ix_user_username"), ["username"], unique=True
)
batch_op.create_index(batch_op.f("ix_user_username"), ["username"], unique=True)
if "apikey" not in existing_tables:
op.create_table(
@ -82,20 +73,14 @@ def upgrade() -> None:
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("api_key", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.ForeignKeyConstraint(
["user_id"], ["user.id"], name="fk_apikey_user_id_user"
),
sa.ForeignKeyConstraint(["user_id"], ["user.id"], name="fk_apikey_user_id_user"),
sa.PrimaryKeyConstraint("id", name="pk_apikey"),
sa.UniqueConstraint("id", name="uq_apikey_id"),
)
with op.batch_alter_table("apikey", schema=None) as batch_op:
batch_op.create_index(
batch_op.f("ix_apikey_api_key"), ["api_key"], unique=True
)
batch_op.create_index(batch_op.f("ix_apikey_api_key"), ["api_key"], unique=True)
batch_op.create_index(batch_op.f("ix_apikey_name"), ["name"], unique=False)
batch_op.create_index(
batch_op.f("ix_apikey_user_id"), ["user_id"], unique=False
)
batch_op.create_index(batch_op.f("ix_apikey_user_id"), ["user_id"], unique=False)
if "flow" not in existing_tables:
op.create_table(
"flow",
@ -104,9 +89,7 @@ def upgrade() -> None:
sa.Column("description", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.ForeignKeyConstraint(
["user_id"], ["user.id"], name="fk_flow_user_id_user"
),
sa.ForeignKeyConstraint(["user_id"], ["user.id"], name="fk_flow_user_id_user"),
sa.PrimaryKeyConstraint("id", name="pk_flow"),
sa.UniqueConstraint("id", name="uq_flow_id"),
)
@ -129,16 +112,12 @@ def upgrade() -> None:
if "user.id" not in existing_fks_flow:
batch_op.create_foreign_key("fk_flow_user_id", "user", ["user_id"], ["id"])
if "ix_flow_description" not in existing_indices_flow:
batch_op.create_index(
batch_op.f("ix_flow_description"), ["description"], unique=False
)
batch_op.create_index(batch_op.f("ix_flow_description"), ["description"], unique=False)
if "ix_flow_name" not in existing_indices_flow:
batch_op.create_index(batch_op.f("ix_flow_name"), ["name"], unique=False)
with op.batch_alter_table("flow", schema=None) as batch_op:
if "ix_flow_user_id" not in existing_indices_flow:
batch_op.create_index(
batch_op.f("ix_flow_user_id"), ["user_id"], unique=False
)
batch_op.create_index(batch_op.f("ix_flow_user_id"), ["user_id"], unique=False)
# ### end Alembic commands ###

View file

@ -31,9 +31,7 @@ def upgrade() -> None:
"credential",
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("value", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column(
"provider", sqlmodel.sql.sqltypes.AutoString(), nullable=True
),
sa.Column("provider", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("created_at", sa.DateTime(), nullable=False),

View file

@ -23,20 +23,14 @@ depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
conn = op.get_bind()
inspector = Inspector.from_engine(conn) # type: ignore
table_names = inspector.get_table_names()
table_names = inspector.get_table_names() # noqa
column_names = [column["name"] for column in inspector.get_columns("flow")]
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table("flow", schema=None) as batch_op:
if "icon" not in column_names:
batch_op.add_column(
sa.Column("icon", sqlmodel.sql.sqltypes.AutoString(), nullable=True)
)
batch_op.add_column(sa.Column("icon", sqlmodel.sql.sqltypes.AutoString(), nullable=True))
if "icon_bg_color" not in column_names:
batch_op.add_column(
sa.Column(
"icon_bg_color", sqlmodel.sql.sqltypes.AutoString(), nullable=True
)
)
batch_op.add_column(sa.Column("icon_bg_color", sqlmodel.sql.sqltypes.AutoString(), nullable=True))
# ### end Alembic commands ###
@ -44,7 +38,7 @@ def upgrade() -> None:
def downgrade() -> None:
conn = op.get_bind()
inspector = Inspector.from_engine(conn) # type: ignore
table_names = inspector.get_table_names()
table_names = inspector.get_table_names() # noqa
column_names = [column["name"] for column in inspector.get_columns("flow")]
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table("flow", schema=None) as batch_op:

View file

@ -29,18 +29,14 @@ def upgrade() -> None:
try:
if "is_component" not in flow_columns:
with op.batch_alter_table("flow", schema=None) as batch_op:
batch_op.add_column(
sa.Column("is_component", sa.Boolean(), nullable=True)
)
except Exception as e:
batch_op.add_column(sa.Column("is_component", sa.Boolean(), nullable=True))
except Exception:
pass
try:
if "store_api_key" not in user_columns:
with op.batch_alter_table("user", schema=None) as batch_op:
batch_op.add_column(
sa.Column("store_api_key", sqlmodel.AutoString(), nullable=True)
)
except Exception as e:
batch_op.add_column(sa.Column("store_api_key", sqlmodel.AutoString(), nullable=True))
except Exception:
pass
# ### end Alembic commands ###

View file

@ -30,9 +30,7 @@ def upgrade() -> None:
try:
if "name" in api_key_columns:
with op.batch_alter_table("apikey", schema=None) as batch_op:
batch_op.alter_column(
"name", existing_type=sa.VARCHAR(), nullable=False
)
batch_op.alter_column("name", existing_type=sa.VARCHAR(), nullable=False)
except Exception as e:
print(e)
@ -40,15 +38,9 @@ def upgrade() -> None:
try:
with op.batch_alter_table("flow", schema=None) as batch_op:
if "updated_at" not in flow_columns:
batch_op.add_column(
sa.Column("updated_at", sa.DateTime(), nullable=True)
)
batch_op.add_column(sa.Column("updated_at", sa.DateTime(), nullable=True))
if "folder" not in flow_columns:
batch_op.add_column(
sa.Column(
"folder", sqlmodel.sql.sqltypes.AutoString(), nullable=True
)
)
batch_op.add_column(sa.Column("folder", sqlmodel.sql.sqltypes.AutoString(), nullable=True))
except Exception as e:
print(e)
@ -68,7 +60,6 @@ def downgrade() -> None:
pass
try:
with op.batch_alter_table("apikey", schema=None) as batch_op:
batch_op.alter_column("name", existing_type=sa.VARCHAR(), nullable=True)
except Exception as e:

View file

@ -32,33 +32,19 @@ def upgrade() -> None:
with op.batch_alter_table("flow", schema=None) as batch_op:
flow_columns = [column["name"] for column in inspector.get_columns("flow")]
if "is_component" not in flow_columns:
batch_op.add_column(
sa.Column("is_component", sa.Boolean(), nullable=True)
)
batch_op.add_column(sa.Column("is_component", sa.Boolean(), nullable=True))
if "updated_at" not in flow_columns:
batch_op.add_column(
sa.Column("updated_at", sa.DateTime(), nullable=True)
)
batch_op.add_column(sa.Column("updated_at", sa.DateTime(), nullable=True))
if "folder" not in flow_columns:
batch_op.add_column(
sa.Column(
"folder", sqlmodel.sql.sqltypes.AutoString(), nullable=True
)
)
batch_op.add_column(sa.Column("folder", sqlmodel.sql.sqltypes.AutoString(), nullable=True))
if "user_id" not in flow_columns:
batch_op.add_column(
sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=True)
)
batch_op.add_column(sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=True))
indices = inspector.get_indexes("flow")
indices_names = [index["name"] for index in indices]
if "ix_flow_user_id" not in indices_names:
batch_op.create_index(
batch_op.f("ix_flow_user_id"), ["user_id"], unique=False
)
batch_op.create_index(batch_op.f("ix_flow_user_id"), ["user_id"], unique=False)
if "fk_flow_user_id_user" not in indices_names:
batch_op.create_foreign_key(
"fk_flow_user_id_user", "user", ["user_id"], ["id"]
)
batch_op.create_foreign_key("fk_flow_user_id_user", "user", ["user_id"], ["id"])
except Exception:
pass

View file

@ -33,21 +33,13 @@ def upgrade() -> None:
if "updated_at" not in flow_columns:
batch_op.add_column(sa.Column("updated_at", sa.DateTime(), nullable=True))
if "folder" not in flow_columns:
batch_op.add_column(
sa.Column("folder", sqlmodel.sql.sqltypes.AutoString(), nullable=True)
)
batch_op.add_column(sa.Column("folder", sqlmodel.sql.sqltypes.AutoString(), nullable=True))
if "user_id" not in flow_columns:
batch_op.add_column(
sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=True)
)
batch_op.add_column(sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=True))
if "ix_flow_user_id" not in flow_indexes:
batch_op.create_index(
batch_op.f("ix_flow_user_id"), ["user_id"], unique=False
)
batch_op.create_index(batch_op.f("ix_flow_user_id"), ["user_id"], unique=False)
if "flow_user_id_fkey" not in flow_fks:
batch_op.create_foreign_key(
"flow_user_id_fkey", "user", ["user_id"], ["id"]
)
batch_op.create_foreign_key("flow_user_id_fkey", "user", ["user_id"], ["id"])
def downgrade() -> None:

View file

@ -19,7 +19,7 @@ depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
connection = op.get_bind()
connection = op.get_bind() # noqa
pass
# ### end Alembic commands ###

View file

@ -22,9 +22,7 @@ def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
with op.batch_alter_table("flow", schema=None) as batch_op:
batch_op.alter_column(
"user_id", existing_type=sa.CHAR(length=32), nullable=True
)
batch_op.alter_column("user_id", existing_type=sa.CHAR(length=32), nullable=True)
except Exception as e:
print(e)
pass
@ -36,9 +34,7 @@ def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
with op.batch_alter_table("flow", schema=None) as batch_op:
batch_op.alter_column(
"user_id", existing_type=sa.CHAR(length=32), nullable=False
)
batch_op.alter_column("user_id", existing_type=sa.CHAR(length=32), nullable=False)
except Exception as e:
print(e)
pass

View file

@ -31,9 +31,7 @@ def upgrade() -> None:
try:
if "credential" in tables and "fk_credential_user_id" not in foreign_keys_names:
with op.batch_alter_table("credential", schema=None) as batch_op:
batch_op.create_foreign_key(
"fk_credential_user_id", "user", ["user_id"], ["id"]
)
batch_op.create_foreign_key("fk_credential_user_id", "user", ["user_id"], ["id"])
except Exception as e:
print(e)
pass

View file

@ -8,20 +8,10 @@ from langflow.api.v1.schemas import ApiKeyCreateRequest, ApiKeysResponse
from langflow.services.auth import utils as auth_utils
# Assuming you have these methods in your service layer
from langflow.services.database.models.api_key.crud import (
create_api_key,
delete_api_key,
get_api_keys,
)
from langflow.services.database.models.api_key.model import (
ApiKeyCreate,
UnmaskedApiKeyRead,
)
from langflow.services.database.models.api_key.crud import create_api_key, delete_api_key, get_api_keys
from langflow.services.database.models.api_key.model import ApiKeyCreate, UnmaskedApiKeyRead
from langflow.services.database.models.user.model import User
from langflow.services.deps import (
get_session,
get_settings_service,
)
from langflow.services.deps import get_session, get_settings_service
if TYPE_CHECKING:
pass

View file

@ -239,7 +239,7 @@ async def create_upload_file(
# get endpoint to return version of langflow
@router.get("/version")
def get_version():
from langflow import __version__
from langflow.version import __version__
return {"version": __version__}

View file

@ -4,6 +4,8 @@ from io import BytesIO
from fastapi import APIRouter, Depends, HTTPException, UploadFile
from fastapi.responses import StreamingResponse
from langflow.api.v1.schemas import UploadFileResponse
from langflow.services.auth.utils import get_current_active_user
from langflow.services.database.models.flow import Flow

View file

@ -12,12 +12,7 @@ from langflow.api.utils import remove_api_keys, validate_is_component
from langflow.api.v1.schemas import FlowListCreate, FlowListRead
from langflow.initial_setup.setup import STARTER_FOLDER_NAME
from langflow.services.auth.utils import get_current_active_user
from langflow.services.database.models.flow import (
Flow,
FlowCreate,
FlowRead,
FlowUpdate,
)
from langflow.services.database.models.flow import Flow, FlowCreate, FlowRead, FlowUpdate
from langflow.services.database.models.user.model import User
from langflow.services.deps import get_session, get_settings_service
from langflow.services.settings.service import SettingsService

View file

@ -1,6 +1,8 @@
from typing import Optional
from fastapi import APIRouter, Depends, HTTPException, Query
from langflow.services.deps import get_monitor_service
from langflow.services.monitor.schema import VertexBuildMapModel
from langflow.services.monitor.service import MonitorService

View file

@ -3,12 +3,7 @@ from collections import defaultdict
from fastapi import APIRouter, HTTPException
from loguru import logger
from langflow.api.v1.base import (
Code,
CodeValidationResponse,
PromptValidationResponse,
ValidatePromptRequest,
)
from langflow.api.v1.base import Code, CodeValidationResponse, PromptValidationResponse, ValidatePromptRequest
from langflow.base.prompts.utils import (
add_new_variables_to_template,
get_old_custom_fields,

View file

@ -1,9 +1,8 @@
from typing import List, Union
from langchain.agents import AgentExecutor, BaseMultiActionAgent, BaseSingleActionAgent
from langflow import CustomComponent
from langflow.field_typing import BaseMemory, Text, Tool
from langflow.interface.custom.custom_component import CustomComponent
class LCAgentComponent(CustomComponent):

View file

@ -1,8 +1,8 @@
import warnings
from typing import Optional, Union
from langflow import CustomComponent
from langflow.field_typing import Text
from langflow.interface.custom.custom_component import CustomComponent
from langflow.memory import add_messages
from langflow.schema import Record

View file

@ -1,7 +1,7 @@
from typing import Optional
from langflow import CustomComponent
from langflow.field_typing import Text
from langflow.interface.custom.custom_component import CustomComponent
class TextComponent(CustomComponent):

View file

@ -4,7 +4,7 @@ from langchain_core.language_models.chat_models import BaseChatModel
from langchain_core.language_models.llms import LLM
from langchain_core.messages import HumanMessage, SystemMessage
from langflow import CustomComponent
from langflow.interface.custom.custom_component import CustomComponent
class LCModelComponent(CustomComponent):

View file

@ -1,8 +1,9 @@
from typing import Callable, List, Optional, Union
from langchain.agents import AgentExecutor, AgentType, initialize_agent, types
from langflow import CustomComponent
from langflow.field_typing import BaseChatMemory, BaseLanguageModel, Tool
from langflow.interface.custom.custom_component import CustomComponent
class AgentInitializerComponent(CustomComponent):

View file

@ -1,6 +1,6 @@
from langchain_experimental.agents.agent_toolkits.csv.base import create_csv_agent
from langflow import CustomComponent
from langflow.custom import CustomComponent
from langflow.field_typing import AgentExecutor, BaseLanguageModel

View file

@ -1,9 +1,10 @@
from langflow import CustomComponent
from langchain.agents import AgentExecutor, create_json_agent
from langchain_community.agent_toolkits.json.toolkit import JsonToolkit
from langflow.field_typing import (
BaseLanguageModel,
)
from langchain_community.agent_toolkits.json.toolkit import JsonToolkit
from langflow.interface.custom.custom_component import CustomComponent
class JsonAgentComponent(CustomComponent):

View file

@ -9,8 +9,9 @@ from langchain.prompts.chat import MessagesPlaceholder
from langchain.schema.memory import BaseMemory
from langchain.tools import Tool
from langchain_community.chat_models import ChatOpenAI
from langflow import CustomComponent
from langflow.field_typing.range_spec import RangeSpec
from langflow.interface.custom.custom_component import CustomComponent
class ConversationalAgent(CustomComponent):

View file

@ -1,10 +1,12 @@
from langflow import CustomComponent
from typing import Union, Callable
from typing import Callable, Union
from langchain.agents import AgentExecutor
from langflow.field_typing import BaseLanguageModel
from langchain_community.agent_toolkits.sql.base import create_sql_agent
from langchain.sql_database import SQLDatabase
from langchain_community.agent_toolkits import SQLDatabaseToolkit
from langchain_community.agent_toolkits.sql.base import create_sql_agent
from langflow.field_typing import BaseLanguageModel
from langflow.interface.custom.custom_component import CustomComponent
class SQLAgentComponent(CustomComponent):

View file

@ -1,8 +1,10 @@
from langflow import CustomComponent
from typing import Callable, Union
from langchain.agents import AgentExecutor, create_vectorstore_agent
from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreToolkit
from typing import Union, Callable
from langflow.field_typing import BaseLanguageModel
from langflow.interface.custom.custom_component import CustomComponent
class VectorStoreAgentComponent(CustomComponent):

View file

@ -1,9 +1,11 @@
from langflow import CustomComponent
from langchain_core.language_models.base import BaseLanguageModel
from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreRouterToolkit
from langchain.agents import create_vectorstore_router_agent
from typing import Callable
from langchain.agents import create_vectorstore_router_agent
from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreRouterToolkit
from langchain_core.language_models.base import BaseLanguageModel
from langflow.interface.custom.custom_component import CustomComponent
class VectorStoreRouterAgentComponent(CustomComponent):
display_name = "VectorStoreRouterAgent"

View file

@ -2,8 +2,8 @@ from typing import Optional
from langchain.chains import ConversationChain
from langflow import CustomComponent
from langflow.field_typing import BaseLanguageModel, BaseMemory, Text
from langflow.interface.custom.custom_component import CustomComponent
class ConversationChainComponent(CustomComponent):

View file

@ -2,13 +2,8 @@ from typing import Optional
from langchain.chains import LLMChain
from langflow import CustomComponent
from langflow.field_typing import (
BaseLanguageModel,
BaseMemory,
BasePromptTemplate,
Text,
)
from langflow.field_typing import BaseLanguageModel, BaseMemory, BasePromptTemplate, Text
from langflow.interface.custom.custom_component import CustomComponent
class LLMChainComponent(CustomComponent):

View file

@ -1,7 +1,7 @@
from langchain.chains import LLMCheckerChain
from langflow import CustomComponent
from langflow.field_typing import BaseLanguageModel, Text
from langflow.interface.custom.custom_component import CustomComponent
class LLMCheckerChainComponent(CustomComponent):

View file

@ -2,8 +2,8 @@ from typing import Optional
from langchain.chains import LLMChain, LLMMathChain
from langflow import CustomComponent
from langflow.field_typing import BaseLanguageModel, BaseMemory, Text
from langflow.interface.custom.custom_component import CustomComponent
class LLMMathChainComponent(CustomComponent):

View file

@ -4,8 +4,8 @@ from langchain.chains.combine_documents.base import BaseCombineDocumentsChain
from langchain.chains.retrieval_qa.base import RetrievalQA
from langchain_core.documents import Document
from langflow import CustomComponent
from langflow.field_typing import BaseMemory, BaseRetriever, Text
from langflow.interface.custom.custom_component import CustomComponent
class RetrievalQAComponent(CustomComponent):

View file

@ -3,8 +3,8 @@ from typing import Optional
from langchain.chains import RetrievalQAWithSourcesChain
from langchain_core.documents import Document
from langflow import CustomComponent
from langflow.field_typing import BaseLanguageModel, BaseMemory, BaseRetriever, Text
from langflow.interface.custom.custom_component import CustomComponent
class RetrievalQAWithSourcesChainComponent(CustomComponent):

View file

@ -5,8 +5,8 @@ from langchain_community.utilities.sql_database import SQLDatabase
from langchain_core.prompts import PromptTemplate
from langchain_core.runnables import Runnable
from langflow import CustomComponent
from langflow.field_typing import BaseLanguageModel, Text
from langflow.interface.custom.custom_component import CustomComponent
class SQLGeneratorComponent(CustomComponent):

View file

@ -4,7 +4,7 @@ from typing import List, Optional
import httpx
from langflow import CustomComponent
from langflow.interface.custom.custom_component import CustomComponent
from langflow.schema import Record

View file

@ -1,11 +1,7 @@
from typing import Any, Dict, List, Optional
from langflow import CustomComponent
from langflow.base.data.utils import (
parallel_load_records,
parse_text_file_to_record,
retrieve_file_paths,
)
from langflow.base.data.utils import parallel_load_records, parse_text_file_to_record, retrieve_file_paths
from langflow.interface.custom.custom_component import CustomComponent
from langflow.schema import Record

View file

@ -1,7 +1,7 @@
from typing import Any, Dict, List
from langflow import CustomComponent
from langflow.base.data.utils import TEXT_FILE_TYPES, parse_text_file_to_record
from langflow.interface.custom.custom_component import CustomComponent
from langflow.schema import Record

View file

@ -1,6 +1,6 @@
from typing import List
from langflow import CustomComponent
from langflow.interface.custom.custom_component import CustomComponent
from langflow.schema import Record
from langflow.utils.constants import LOADERS_INFO

View file

@ -2,7 +2,7 @@ from typing import Any, Dict
from langchain_community.document_loaders.web_base import WebBaseLoader
from langflow import CustomComponent
from langflow.interface.custom.custom_component import CustomComponent
from langflow.schema import Record

View file

@ -3,8 +3,7 @@ from typing import Optional
from langchain.embeddings.base import Embeddings
from langchain_community.embeddings import BedrockEmbeddings
from langflow import CustomComponent
from langflow.interface.custom.custom_component import CustomComponent
class AmazonBedrockEmeddingsComponent(CustomComponent):

View file

@ -1,7 +1,7 @@
from langchain.embeddings.base import Embeddings
from langchain_community.embeddings import AzureOpenAIEmbeddings
from langflow import CustomComponent
from langflow.interface.custom.custom_component import CustomComponent
class AzureOpenAIEmbeddingsComponent(CustomComponent):

View file

@ -1,7 +1,8 @@
from typing import Optional
from langchain_community.embeddings.cohere import CohereEmbeddings
from langflow import CustomComponent
from langflow.interface.custom.custom_component import CustomComponent
class CohereEmbeddingsComponent(CustomComponent):

View file

@ -1,7 +1,9 @@
from langflow import CustomComponent
from typing import Optional, Dict
from typing import Dict, Optional
from langchain_community.embeddings.huggingface import HuggingFaceEmbeddings
from langflow.interface.custom.custom_component import CustomComponent
class HuggingFaceEmbeddingsComponent(CustomComponent):
display_name = "HuggingFaceEmbeddings"

View file

@ -1,9 +1,10 @@
from typing import Dict, Optional
from langchain_community.embeddings.huggingface import HuggingFaceInferenceAPIEmbeddings
from langflow import CustomComponent
from pydantic.v1.types import SecretStr
from langflow.interface.custom.custom_component import CustomComponent
class HuggingFaceInferenceAPIEmbeddingsComponent(CustomComponent):
display_name = "HuggingFaceInferenceAPIEmbeddings"

View file

@ -1,9 +1,10 @@
from typing import Optional
from langflow import CustomComponent
from langchain.embeddings.base import Embeddings
from langchain_community.embeddings import OllamaEmbeddings
from langflow.interface.custom.custom_component import CustomComponent
class OllamaEmbeddingsComponent(CustomComponent):
"""

View file

@ -1,10 +1,11 @@
from typing import Any, Callable, Dict, List, Optional, Union
from langchain_openai.embeddings.base import OpenAIEmbeddings
from langflow import CustomComponent
from langflow.field_typing import NestedDict
from pydantic.v1.types import SecretStr
from langflow.field_typing import NestedDict
from langflow.interface.custom.custom_component import CustomComponent
class OpenAIEmbeddingsComponent(CustomComponent):
display_name = "OpenAIEmbeddings"

View file

@ -1,6 +1,8 @@
from langflow import CustomComponent
from typing import List, Optional
from langchain_community.embeddings import VertexAIEmbeddings
from typing import Optional, List
from langflow.interface.custom.custom_component import CustomComponent
class VertexAIEmbeddingsComponent(CustomComponent):

View file

@ -1,4 +1,4 @@
from langflow import CustomComponent
from langflow.interface.custom.custom_component import CustomComponent
from langflow.memory import delete_messages, get_messages

View file

@ -1,4 +1,4 @@
from langflow import CustomComponent
from langflow.interface.custom.custom_component import CustomComponent
from langflow.schema import Record

View file

@ -3,7 +3,7 @@ from typing import Any, List, Optional, Text
from langchain_core.tools import StructuredTool
from loguru import logger
from langflow import CustomComponent
from langflow.custom import CustomComponent
from langflow.field_typing import Tool
from langflow.graph.graph.base import Graph
from langflow.helpers.flow import build_function_and_schema

View file

@ -1,4 +1,4 @@
from langflow import CustomComponent
from langflow.interface.custom.custom_component import CustomComponent
from langflow.schema import Record

Some files were not shown because too many files have changed in this diff Show more