merge dev on playground

This commit is contained in:
cristhianzl 2024-05-02 13:59:52 -03:00
commit 101a045cba
90 changed files with 1802 additions and 1114 deletions

View file

@ -27,7 +27,7 @@ jobs:
# Popular action to deploy to GitHub Pages:
# Docs: https://github.com/peaceiris/actions-gh-pages#%EF%B8%8F-docusaurus
- name: Deploy to GitHub Pages
uses: peaceiris/actions-gh-pages@v3
uses: peaceiris/actions-gh-pages@v4
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
# Build output to publish to the `gh-pages` branch:

View file

@ -74,4 +74,4 @@ jobs:
push: true
file: ./build_and_push_base.Dockerfile
tags: |
logspace/langflow:base-${{ needs.release.outputs.version }}
langflowai/langflow:base-${{ needs.release.outputs.version }}

View file

@ -53,7 +53,7 @@ jobs:
run: |
make publish main=true
- name: Upload Artifact
uses: actions/upload-artifact@v2
uses: actions/upload-artifact@v4
with:
name: dist
path: dist
@ -80,15 +80,15 @@ jobs:
push: true
file: ./build_and_push.Dockerfile
tags: |
logspace/langflow:${{ needs.release.outputs.version }}
logspace/langflow:1.0-alpha
langflowai/langflow:${{ needs.release.outputs.version }}
langflowai/langflow:1.0-alpha
create_release:
name: Create Release
runs-on: ubuntu-latest
needs: [docker_build, release]
steps:
- uses: actions/download-artifact@v2
- uses: actions/download-artifact@v4
with:
name: dist
path: dist

View file

@ -52,8 +52,8 @@ jobs:
push: true
file: ./build_and_push.Dockerfile
tags: |
logspace/langflow:${{ steps.check-version.outputs.version }}
logspace/langflow:latest
langflowai/langflow:${{ steps.check-version.outputs.version }}
langflowai/langflow:latest
- name: Create Release
uses: ncipollo/release-action@v1
with:

View file

@ -26,7 +26,7 @@ jobs:
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v3
uses: actions/setup-node@v4
id: setup-node
with:
node-version: ${{ env.NODE_VERSION }}
@ -99,7 +99,7 @@ jobs:
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v3
uses: actions/setup-node@v4
with:
node-version: ${{ env.NODE_VERSION }}

View file

@ -69,7 +69,7 @@ services:
- traefik.http.routers.${STACK_NAME?Variable not set}-proxy-http.middlewares=${STACK_NAME?Variable not set}-www-redirect,${STACK_NAME?Variable not set}-https-redirect
backend: &backend
image: "logspace/langflow:latest"
image: "langflowai/langflow:latest"
depends_on:
- db
- broker

View file

@ -13,7 +13,7 @@ services:
- "7860:7860"
volumes:
- ./:/app
command: bash -c "uvicorn --factory langflow.main:create_app --host 0.0.0.0 --port 7860 --reload --loop asyncio",
command: bash -c "uvicorn --factory langflow.main:create_app --host 0.0.0.0 --port 7860 --reload --loop asyncio"
networks:
- langflow
frontend:

View file

@ -1,3 +1,3 @@
FROM logspace/langflow:latest
FROM langflowai/langflow:latest
CMD ["python", "-m", "langflow", "run", "--host", "0.0.0.0", "--port", "7860"]

View file

@ -35,7 +35,7 @@ The Docker Compose configuration spins up two services: `langflow` and `postgres
### LangFlow Service
The `langflow` service uses the `logspace/langflow:latest` Docker image and exposes port 7860. It depends on the `postgres` service.
The `langflow` service uses the `langflowai/langflow:latest` Docker image and exposes port 7860. It depends on the `postgres` service.
Environment variables:
@ -62,4 +62,4 @@ Volumes:
## Switching to a Specific LangFlow Version
If you want to use a specific version of LangFlow, you can modify the `image` field under the `langflow` service in the Docker Compose file. For example, to use version 1.0-alpha, change `logspace/langflow:latest` to `logspace/langflow:1.0-alpha`.
If you want to use a specific version of LangFlow, you can modify the `image` field under the `langflow` service in the Docker Compose file. For example, to use version 1.0-alpha, change `langflowai/langflow:latest` to `langflowai/langflow:1.0-alpha`.

View file

@ -2,7 +2,7 @@ version: "3.8"
services:
langflow:
image: logspace/langflow:latest
image: langflowai/langflow:latest
ports:
- "7860:7860"
depends_on:

View file

@ -1,3 +1,3 @@
FROM logspace/langflow:1.0-alpha
FROM langflowai/langflow:1.0-alpha
CMD ["python", "-m", "langflow", "run", "--host", "0.0.0.0", "--port", "7860"]

View file

@ -2,7 +2,7 @@ version: "3.8"
services:
langflow:
image: logspace/langflow:1.0-alpha
image: langflowai/langflow:1.0-alpha
ports:
- "7860:7860"
depends_on:

View file

@ -12,7 +12,7 @@ To use a custom component, follow these steps:
<Admonition type="info" label="Tip">
For an in-depth explanation of custom components, their rules, and applications, make sure to read [Custom Component guidelines](../guidelines/custom-component).
For an in-depth explanation of custom components, their rules, and applications, make sure to read [Custom Component guidelines](../administration/custom-component).
</Admonition>
@ -57,7 +57,7 @@ The CustomComponent class serves as the foundation for creating custom component
<Admonition type="info">
Unlike Langchain types, base Python types do not add a
[handle](../guidelines/components) to the field by default. To add handles,
[handle](../administration/components) to the field by default. To add handles,
use the _`input_types`_ key in the _`build_config`_ method.
</Admonition>

View file

@ -21,7 +21,7 @@ The `PromptTemplate` component allows users to create prompts and define variabl
<Admonition type="info">
Once a variable is defined in the prompt template, it becomes a component
input of its own. Check out [Prompt
Customization](../guidelines/prompt-customization) to learn more.
Customization](../administration/prompt-customization) to learn more.
</Admonition>
- **template:** Template used to format an individual request.

View file

@ -159,7 +159,7 @@ Now, let's add the [parameters](focus://11[20:55]) and the [return type](focus:/
- _`flow_name`_ is the name of the flow we want to run.
- _`document`_ is the input document to be passed to that flow.
- Since _`Document`_ is a Langchain type, it will add an input [handle](../guidelines/components) to the component ([see more](../components/custom)).
- Since _`Document`_ is a Langchain type, it will add an input [handle](../administration/components) to the component ([see more](../components/custom)).
---
@ -242,7 +242,7 @@ class FlowRunner(CustomComponent):
```
You can load this flow using _`get_flow`_ and set a _`tweaks`_ dictionary to customize it. Find more about tweaks in our [features guidelines](../guidelines/features#code).
You can load this flow using _`get_flow`_ and set a _`tweaks`_ dictionary to customize it. Find more about tweaks in our [features guidelines](../administration/features#code).
---

View file

@ -0,0 +1,27 @@
import ThemedImage from "@theme/ThemedImage";
import useBaseUrl from "@docusaurus/useBaseUrl";
import ZoomableImage from "/src/theme/ZoomableImage.js";
import Admonition from "@theme/Admonition";
# 🤗 HuggingFace Spaces
Hugging Face provides a great alternative for running Langflow in their Spaces environment. This means you can run Langflow without any local installation required.
The first step is to go to the [Langflow Space](https://huggingface.co/spaces/Langflow/Langflow?duplicate=true) or [Langflow 1.0 Preview Space](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true)
Remember to use a Chromium-based browser for the best experience. You'll be presented with the following screen:
<ZoomableImage
alt="Docusaurus themed image"
sources={{
light: "img/duplicate-space.png",
dark: "img/duplicate-space.png",
}}
style={{ width: "100%", margin: "20px auto" }}
/>
From here, just name your Space, define the visibility (Public or Private), and click on `Duplicate Space` to start the installation process. When that is done, you'll be redirected to the Space's main page to start using Langflow right away!
Once you get Langflow running, click on New Project in the top right corner of the screen. Langflow provides a range of example flows to help you get started.
To quickly try one of them, open a starter example, set up your API keys and click ⚡ Run, on the bottom right corner of the canvas. This will open up Langflow's Interaction Panel with the chat console, text inputs, and outputs.

View file

@ -0,0 +1,77 @@
import ThemedImage from "@theme/ThemedImage";
import useBaseUrl from "@docusaurus/useBaseUrl";
import ZoomableImage from "/src/theme/ZoomableImage.js";
import Admonition from "@theme/Admonition";
# 📦 Install Langflow
<Admonition type="info">
Langflow v1.0 is also available in a [HuggingFace Preview Space](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true) if you'd rather try it out before installing locally.
</Admonition>
## Prerequisites
Langflow requires the following programs installed on your system.
* [Python 3.10](https://www.python.org/downloads/release/python-3100/)
* [pip](https://pypi.org/project/pip/) or [pipx](https://pipx.pypa.io/stable/installation/)
## Install Langflow
To install Langflow:
pip:
```bash
python -m pip install langflow -U
```
pipx:
```bash
pipx install langflow --python python3.10 --fetch-missing-python
```
Pipx can fetch the missing Python version for you with `--fetch-missing-python`, but you can also install the Python version manually.
## Install Langflow pre-release
Use `--force-reinstall` to ensure you have the latest version of Langflow and its dependencies.
To install a pre-release version of Langflow:
pip:
```bash
python -m pip install langflow --pre --force-reinstall
```
pipx:
```bash
pipx install langflow --python python3.10 --fetch-missing-python --pip-args="--pre --force-reinstall"
```
## Having a problem?
If you encounter a problem, see [Possible Installation Issues](/migration/possible-installation-issues).
To get help in the Langflow CLI:
```bash
python -m langflow --help
```
## ⛓️ Run Langflow
1. To run Langflow, enter the following command.
```bash
python -m langflow run
```
2. Confirm that a local Langflow instance starts by visiting `http://127.0.0.1:7860` in your browser.
```bash
│ Welcome to ⛓ Langflow │
│ │
│ Access http://127.0.0.1:7860 │
│ Collaborate, and contribute at our GitHub Repo 🚀 │
```
3. Continue on to the [Quickstart](./quickstart.mdx).

View file

@ -0,0 +1,10 @@
# 📚 New to LLMs?
Large Language Models, or LLMs, are part of an exciting new world in computing.
We made Langflow for anyone to create with LLMs, and hope you'll feel comfortable installing Langflow and [getting started](./quickstart.mdx).
If you want to learn more about LLMs, prompt engineering, and AI models, Langflow recommends [promptingguide.ai](https://promptingguide.ai), an open-source repository of prompt engineering content maintained by AI experts.
PromptingGuide offers content for [beginners](https://www.promptingguide.ai/introduction/basics) and [experts](https://www.promptingguide.ai/techniques/cot), as well as the latest [research papers](https://www.promptingguide.ai/papers) and [test results](https://www.promptingguide.ai/research) fueling AI's progress.
Wherever you are on your AI journey, it's helpful to keep Prompting Guide open in a tab.

View file

@ -0,0 +1,119 @@
import ThemedImage from "@theme/ThemedImage";
import useBaseUrl from "@docusaurus/useBaseUrl";
import ZoomableImage from "/src/theme/ZoomableImage.js";
import ReactPlayer from "react-player";
import Admonition from "@theme/Admonition";
# ⚡️ Quickstart
This quickstart demonstrates how to install Langflow, run it locally, build a basic prompt flow, and modify that prompt for different outcomes.
## Prerequisites
* [Python 3.10](https://www.python.org/downloads/release/python-3100/)
* [pip](https://pypi.org/project/pip/) or [pipx](https://pipx.pypa.io/stable/installation/)
* [OpenAI API key](https://platform.openai.com)
## Install Langflow
<Admonition type="info">
Langflow v1.0 is also available in a [HuggingFace Preview Space](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true) if you'd rather try it out before installing locally. This quickstart will run there, too.
</Admonition>
1. To install Langflow, enter the following command in pip or pipx:
pip:
```bash
python -m pip install langflow -U
```
pipx:
```bash
pipx install langflow --python python3.10 --fetch-missing-python
```
Pipx can fetch the missing Python version for you with `--fetch-missing-python`, but you can also install the Python version manually.
2. Start a local Langflow instance with the Langflow CLI:
```bash
langflow run
```
Or start Langflow with Python:
```bash
python -m langflow run
```
Result:
```
│ Welcome to ⛓ Langflow │
│ │
│ Access http://127.0.0.1:7860 │
│ Collaborate, and contribute at our GitHub Repo 🚀 │
```
3. Go to `http://127.0.0.1:7860` and confirm the Langflow UI is available.
<Admonition type="info">
If you encounter a problem, see [Possible Installation Issues](/migration/possible-installation-issues).
</Admonition>
## Create the basic prompting project
Now that you have Langflow installed and running, let us formally welcome you to Langflow!👋
You will use Langflow's prompt tools to issue prompts to the OpenAI LLM.
Prompts serve as the inputs to a large language model (LLM), acting as the interface between human instructions and computational tasks.
By submitting natural language requests in a prompt to an LLM, you can obtain answers, generate text, and solve problems.
1. From the Langflow dashboard, click **New Project**.
2. Select **Basic Prompting**.
3. The **Basic Prompting** flow is created.
<ZoomableImage
alt="Docusaurus themed image"
sources={{
light: "img/quickstart.png",
dark: "img/quickstart.png",
}}
style={{ width: "80%", margin: "20px auto" }}
/>
This flow allows you to chat with the **OpenAI** component via a **Prompt** component.
Examine the **Prompt** component. The **Template** field instructs the LLM to `Answer the user as if you were a pirate.`
This should be interesting...
4. To create an environment variable for the **OpenAI** component, in the **OpenAI API Key** field, click the **Globe** button, and then click **Add New Variable**.
1. In the **Variable Name** field, enter `openai_api_key`.
2. In the **Value** field, paste your OpenAI API Key (`sk-...`).
3. Click **Save Variable**.
## Run the basic prompting flow
1. Click the **Run** button.
The **Interaction Panel** opens, where you can converse with your bot.
2. Type a message and press Enter.
The bot responds in a markedly piratical manner!
## Modify the prompt for a different result
1. To modify your prompt results, in the **Prompt** template, click the **Template** field.
The **Edit Prompt** window opens.
2. Change `Answer the user as if you were a pirate` to a different character, perhaps `Answer the user as if you were Harold Abelson.`
3. Run the basic prompting flow again.
The response will be markedly different.
## Next steps
Well done! You've built your first prompt in Langflow. 🎉
By adding Langflow components to this prompt, you can build all sorts of interesting flows.
* [Memory chatbot](/starter-projects/memory-chatbot.mdx)
* [Blog writer](/starter-projects/blog-writer.mdx)
* [Document QA](/starter-projects/document-qa.mdx)

View file

@ -5,7 +5,9 @@ import Admonition from "@theme/Admonition";
# 👋 Welcome to Langflow
Langflow is an easy way to build from simple to complex AI applications. It is a low-code platform that allows you to integrate AI into everything you do.
Langflow is a low-code platform that allows you to integrate AI into everything you do.
Use Langflow's simple but powerful UI to build any AI application you can dream up, from simple to complex.
{" "}
@ -20,96 +22,17 @@ Langflow is an easy way to build from simple to complex AI applications. It is a
## 🚀 First steps
## Installation
- [Install Langflow](/getting-started/install-langflow) - Install and start a local Langflow server.
Make sure you have **Python 3.10** installed on your system.
- [Quickstart](/getting-started/quickstart) - Install Langflow, create a flow, and run it.
You can install **Langflow** with [pipx](https://pipx.pypa.io/stable/installation/) or with pip.
- [HuggingFace Spaces](/getting-started/huggingface-spaces) - Duplicate the Langflow preview space and try it out before you install.
Pipx can fetch the missing Python version for you, but you can also install it manually.
- [New to LLMs?](/getting-started/new-to-llms) - Learn more about LLMs, prompting, and more at [promptingguide.ai](https://promptingguide.ai).
```bash
# Remember to check if you have Python 3.10 installed
python -m pip install langflow -U
# or
pipx install langflow --python python3.10 --fetch-missing-python
```
## Learn more about Langflow 1.0
Or you can install a pre-release version using:
Learn more about the exciting changes in Langflow 1.0, and how to migrate your existing Langflow projects.
```bash
python -m pip install langflow --pre --force-reinstall
# or
pipx install langflow --python python3.10 --fetch-missing-python --pip-args="--pre --force-reinstall"
```
<Admonition type="tip">
<p>
Please, check out our [Possible Installation Issues
section](/migration/possible-installation-issues) if you encounter any
problems.
</p>
</Admonition>
We recommend using --force-reinstall to ensure you have the latest version of Langflow and its dependencies.
### ⛓️ Running Langflow
Langflow can be run in a variety of ways, including using the command-line interface (CLI) or HuggingFace Spaces.
```bash
python -m langflow run # or langflow --help
```
#### 🤗 HuggingFace Spaces
Hugging Face provides a great alternative for running Langflow in their Spaces environment. This means you can run Langflow without any local installation required.
The first step is to go to the [Langflow Space](https://huggingface.co/spaces/Langflow/Langflow?duplicate=true) or [Langflow 1.0 Preview Space](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true)
Remember to use a Chromium-based browser for the best experience. You'll be presented with the following screen:
<ZoomableImage
alt="Docusaurus themed image"
sources={{
light: "img/duplicate-space.png",
dark: "img/duplicate-space.png",
}}
style={{ width: "100%", margin: "20px auto" }}
/>
From here, just name your Space, define the visibility (Public or Private), and click on `Duplicate Space` to start the installation process. When that is done, you'll be redirected to the Space's main page to start using Langflow right away!
Once you get Langflow running, click on New Project in the top right corner of the screen. Langflow provides a range of example flows to help you get started.
To quickly try one of them, open a starter example, set up your API keys and click ⚡ Run, on the bottom right corner of the canvas. This will open up Langflow's Playground with the chat console, text inputs, and outputs.
### 🖥️ Command Line Interface (CLI)
Langflow provides a command-line interface (CLI) for easy management and configuration.
#### Usage
You can run the Langflow using the following command:
```bash
langflow run [OPTIONS]
```
Find more information about the available options by running:
```bash
python -m langflow --help
```
## Find out more about 1.0
<Admonition type="caution" icon="🚧" title="ZONE UNDER CONSTRUCTION">
<p>
We are currently working on updating the documentation for Langflow 1.0.
</p>
</Admonition>
To get you learning more about what's new and why you should be excited about Langflow 1.0,
go to [A new chapter for Langflow](/whats-new/a-new-chapter-langflow) and also come back often
to check out our [migration guides](/whats-new/migrating-to-one-point-zero) as we release them.
- [A new chapter for Langflow](/whats-new/a-new-chapter-langflow)
- [Migration guides](/migration/migrating-to-one-point-zero)

View file

@ -25,3 +25,16 @@ For this error to occur, two scenarios are possible:
In this case, you might not be running the correct executable.
To solve this issue, you can run the correct executable by running _`python -m langflow run`_ instead of _`langflow run`_ and if that doesn't work, you can try uninstalling langflow and reinstalling it using _`python -m pip install langflow --pre -U`_.
2. Some version conflicts might have occurred during the installation process. Run _`python -m pip install langflow --pre -U --force-reinstall`_ to reinstall langflow and its dependencies.
## _`Something went wrong running migrations. Please, run 'langflow migration --fix'`_
TLDR;
- Clear the cache by deleting the contents of the cache folder.
This folder can be found at:
- **Linux or WSL2 on Windows**: `home/<username>/.cache/langflow/`
- **MacOS**: `/Users/<username>/Library/Caches/langflow/`
If you wish to retain your files, ensure to back them up before clearing the folder.
This error often occurs when upgrading Langflow, the new version can't override `langflow-pre.db` in `.cache/langflow/`. Clearing the cache removes this file but will also erase your settings.

View file

@ -0,0 +1,83 @@
import ThemedImage from "@theme/ThemedImage";
import useBaseUrl from "@docusaurus/useBaseUrl";
import ZoomableImage from "/src/theme/ZoomableImage.js";
import ReactPlayer from "react-player";
# Basic prompting
Prompts serve as the inputs to a large language model (LLM), acting as the interface between human instructions and computational tasks.
By submitting natural language requests in a prompt to an LLM, you can obtain answers, generate text, and solve problems.
This article demonstrates how to use Langflow's prompt tools to issue basic prompts to an LLM, and how various prompting strategies can affect your outcomes.
## Prerequisites
1. Install Langflow.
```bash
python -m pip install langflow --pre
```
2. Start a local Langflow instance with the Langflow CLI:
```bash
langflow run
```
Or start Langflow with Python:
```bash
python -m langflow run
```
Result:
```
│ Welcome to ⛓ Langflow │
│ │
│ Access http://127.0.0.1:7860 │
│ Collaborate, and contribute at our GitHub Repo 🚀 │
```
Alternatively, go to [HuggingFace Spaces](https://docs.langflow.org/getting-started/hugging-face-spaces) or [Lightning.ai Studio](https://lightning.ai/ogabrielluiz-8j6t8/studios/langflow) for a pre-built Langflow test environment.
3. Create an [OpenAI API key](https://platform.openai.com).
## Create the basic prompting project
1. From the Langflow dashboard, click **New Project**.
2. Select **Basic Prompting**.
3. The **Basic Prompting** flow is created.
<ZoomableImage
alt="Docusaurus themed image"
sources={{
light: "img/basic-prompting.png",
dark: "img/basic-prompting.png",
}}
style={{ width: "80%", margin: "20px auto" }}
/>
This flow allows you to chat with the **OpenAI** component via a **Prompt** component.
Examine the **Prompt** component. The **Template** field instructs the LLM to `Answer the user as if you were a pirate.`
This should be interesting...
4. To create an environment variable for the **OpenAI** component, in the **OpenAI API Key** field, click the **Globe** button, and then click **Add New Variable**.
1. In the **Variable Name** field, enter `openai_api_key`.
2. In the **Value** field, paste your OpenAI API Key (`sk-...`).
3. Click **Save Variable**.
## Run the basic prompting flow
1. Click the **Run** button.
The **Interaction Panel** opens, where you can converse with your bot.
2. Type a message and press Enter.
The bot responds in a markedly piratical manner!
## Modify the prompt for a different result
1. To modify your prompt results, in the **Prompt** template, click the **Template** field.
The **Edit Prompt** window opens.
2. Change `Answer the user as if you were a pirate` to a different character, perhaps `Answer the user as if you were Harold Abelson.`
3. Run the basic prompting flow again.
The response will be markedly different.

View file

@ -0,0 +1,92 @@
import ThemedImage from "@theme/ThemedImage";
import useBaseUrl from "@docusaurus/useBaseUrl";
import ZoomableImage from "/src/theme/ZoomableImage.js";
import ReactPlayer from "react-player";
import Admonition from "@theme/Admonition";
# Blog writer
Build a blog writer with OpenAI that uses URLs for reference content.
## Prerequisites
1. Install Langflow.
```bash
python -m pip install langflow --pre
```
2. Start a local Langflow instance with the Langflow CLI:
```bash
langflow run
```
Or start Langflow with Python:
```bash
python -m langflow run
```
Result:
```bash
│ Welcome to ⛓ Langflow │
│ │
│ Access http://127.0.0.1:7860 │
│ Collaborate, and contribute at our GitHub Repo 🚀 │
```
Alternatively, go to [HuggingFace Spaces](https://docs.langflow.org/getting-started/hugging-face-spaces) or [Lightning.ai Studio](https://lightning.ai/ogabrielluiz-8j6t8/studios/langflow) for a pre-built Langflow test environment.
3. Create an [OpenAI API key](https://platform.openai.com).
## Create the Blog Writer project
1. From the Langflow dashboard, click **New Project**.
2. Select **Blog Writer**.
3. The **Blog Writer** flow is created.
<ZoomableImage
alt="Docusaurus themed image"
sources={{
light: "img/blog-writer.png",
dark: "img/blog-writer.png",
}}
style={{ width: "80%", margin: "20px auto" }}
/>
This flow creates a one-shot prompt flow with **Prompt**, **OpenAI**, and **Chat Output** components, and augments the flow with reference content and instructions from the **URL** and **Instructions** components.
The **Prompt** component's default **Template** field looks like this:
```bash
Reference 1:
{reference_1}
---
Reference 2:
{reference_2}
---
{instructions}
Blog:
```
The `{instructions}` value is received from the **Value** field of the **Instructions** component.
The `reference_1` and `reference_2` values are received from the **URL** fields of the **URL** components.
4. To create an environment variable for the **OpenAI** component, in the **OpenAI API Key** field, click the **Globe** button, and then click **Add New Variable**.
1. In the **Variable Name** field, enter `openai_api_key`.
2. In the **Value** field, paste your OpenAI API Key (`sk-...`).
3. Click **Save Variable**.
## Run the Blog Writer flow
1. Click the **Run** button.
The **Interaction Panel** opens, where you can run your one-shot flow.
2. Click the **Lighting Bolt** icon to run your flow.
3. The **OpenAI** component constructs a blog post with the **URL** items as context.
The default **URL** values are for web pages at `promptingguide.ai`, so your blog post will be about prompting LLMs.
To write about something different, change the values in the **URL** components, and see what the LLM constructs.

View file

@ -0,0 +1,82 @@
import ThemedImage from "@theme/ThemedImage";
import useBaseUrl from "@docusaurus/useBaseUrl";
import ZoomableImage from "/src/theme/ZoomableImage.js";
import ReactPlayer from "react-player";
import Admonition from "@theme/Admonition";
# Document QA
Build a question-and-answer chatbot with a document loaded from local memory.
## Prerequisites
1. Install Langflow.
```bash
python -m pip install langflow --pre
```
2. Start a local Langflow instance with the Langflow CLI:
```bash
langflow run
```
Or start Langflow with Python:
```bash
python -m langflow run
```
Result:
```
│ Welcome to ⛓ Langflow │
│ │
│ Access http://127.0.0.1:7860 │
│ Collaborate, and contribute at our GitHub Repo 🚀 │
```
Alternatively, go to [HuggingFace Spaces](https://docs.langflow.org/getting-started/hugging-face-spaces) or [Lightning.ai Studio](https://lightning.ai/ogabrielluiz-8j6t8/studios/langflow) for a pre-built Langflow test environment.
3. Create an [OpenAI API key](https://platform.openai.com).
## Create the Document QA project
1. From the Langflow dashboard, click **New Project**.
2. Select **Document QA**.
3. The **Document QA** flow is created.
<ZoomableImage
alt="Docusaurus themed image"
sources={{
light: "img/document-qa.png",
dark: "img/document-qa.png",
}}
style={{ width: "80%", margin: "20px auto" }}
/>
This flow creates a basic chatbot with the **Chat Input**, **Prompt**, **OpenAI**, and **Chat Output** components.
This chatbot is augmented with the **Files** component, which loads a file from your local machine into the **Prompt** component as `{Document}`.
The **Prompt** component is instructed to answer questions based on the contents of `{Document}`.
Including a file with the prompt gives the **OpenAI** component context it may not otherwise have access to.
4. To create an environment variable for the **OpenAI** component, in the **OpenAI API Key** field, click the **Globe** button, and then click **Add New Variable**.
1. In the **Variable Name** field, enter `openai_api_key`.
2. In the **Value** field, paste your OpenAI API Key (`sk-...`).
3. Click **Save Variable**.
5. To select a document to load, in the **Files** component, click within the **Path** field.
1. Select a local file, and then click **Open**.
2. The file name appears in the field.
<Admonition type="tip">
The file must be of an extension type listed [here](https://github.com/langflow-ai/langflow/blob/dev/src/backend/base/langflow/base/data/utils.py#L13).
</Admonition>
## Run the Document QA flow
1. Click the **Run** button.
The **Interaction Panel** opens, where you can converse with your bot.
2. Type a message and press Enter.
For this example, we loaded an error log `.txt` file and asked, "What went wrong?"
The bot responded:
```
The issue occurred during the execution of migrations in the application. Specifically, an error was raised by the Alembic library, indicating that new upgrade operations were detected that had not been accounted for in the existing migration scripts. The operation in question involved modifying the nullable property of a column (apikey, created_at) in the database, with details about the existing type (DATETIME()), existing server default, and other properties.
```
This result indicates that the bot received the loaded document and understood the context surrounding the vague question. It also correctly identified the issue in the error log, and followed up with appropriate troubleshooting suggestions. Nice!

View file

@ -0,0 +1,99 @@
import ThemedImage from "@theme/ThemedImage";
import useBaseUrl from "@docusaurus/useBaseUrl";
import ZoomableImage from "/src/theme/ZoomableImage.js";
import ReactPlayer from "react-player";
# Memory chatbot
This flow extends the [basic prompting flow](./basic-prompting.mdx) to include chat memory for unique SessionIDs.
## Prerequisites
1. Install Langflow.
```bash
python -m pip install langflow --pre
```
2. Start a local Langflow instance with the Langflow CLI:
```bash
langflow run
```
Or start Langflow with Python:
```bash
python -m langflow run
```
Result:
```
│ Welcome to ⛓ Langflow │
│ │
│ Access http://127.0.0.1:7860 │
│ Collaborate, and contribute at our GitHub Repo 🚀 │
```
Alternatively, go to [HuggingFace Spaces](https://docs.langflow.org/getting-started/hugging-face-spaces) or [Lightning.ai Studio](https://lightning.ai/ogabrielluiz-8j6t8/studios/langflow) for a pre-built Langflow test environment.
3. Create an [OpenAI API key](https://platform.openai.com).
## Create the memory chatbot project
1. From the Langflow dashboard, click **New Project**.
2. Select **Memory Chatbot**.
3. The **Memory Chatbot** flow is created.
<ZoomableImage
alt="Docusaurus themed image"
sources={{
light: "img/memory-chatbot.png",
dark: "img/memory-chatbot.png",
}}
style={{
width: "80%",
margin: "20px auto",
display: "flex",
justifyContent: "center",
}}
/>
This flow creates a basic chatbot with the **Chat Input**, **Prompt**, and **OpenAI** components.
This chatbot is augmented with the **Chat Memory** component, which stores messages submitted via **Chat Input** and prepends them to subsequent prompts to OpenAI via `{context}`.
The **Chat History** component gives the **OpenAI** component a memory of previous questions.
4. To create an environment variable for the **OpenAI** component, in the **OpenAI API Key** field, click the **Globe** button, and then click **Add New Variable**.
1. In the **Variable Name** field, enter `openai_api_key`.
2. In the **Value** field, paste your OpenAI API Key (`sk-...`).
3. Click **Save Variable**.
## Run the memory chatbot flow
1. Click the **Run** button.
The **Interaction Panel** opens, where you can converse with your bot.
2. Type a message and press Enter.
The bot will respond according to the template in the **Prompt** component.
3. Type more questions. In the **Outputs** log, your queries are logged in order. Up to 5 queries are stored by default. Try asking `What is the first subject I asked you about?` to see where the LLM's memory disappears.
## Modify the Session ID field to have multiple conversations
`SessionID` is a unique identifier in Langchain for a conversation session between a chatbot and a client.
A `SessionID` is created when a conversation is initiated, and then associated with all subsequent messages during that session.
In the **Memory Chatbot** flow you created, the **Chat Memory** component references past interactions with **Chat Input** by **Session ID**.
You can demonstrate this by modifying the **Session ID** value to switch between conversation histories.
1. In the **Session ID** field of the **Chat Memory** and **Chat Input** components, change the **Session ID** value from `MySessionID` to `AnotherSessionID`.
2. Click the **Run** button to run your flow.
In the **Interaction Panel**, you will have a new conversation. (You may need to clear the cache with the **Eraser** button).
3. Type a few questions to your bot.
4. In the **Session ID** field of the **Chat Memory** and **Chat Input** components, change the **Session ID** value back to `MySessionID`.
5. Run your flow.
The **Outputs** log of the **Interaction Panel** displays the history from your initial chat with `MySessionID`.
## Store Session ID as a Langflow variable
To store **Session ID** as a Langflow variable, in the **Session ID** field, click the **Globe** button, and then click **Add New Variable**.
1. In the **Variable Name** field, enter a name like `customer_chat_emea`.
2. In the **Value** field, enter a value like `1B5EBD79-6E9C-4533-B2C8-7E4FF29E983B`.
3. Click **Save Variable**.
4. Apply this variable to **Chat Input**.

View file

@ -61,11 +61,11 @@ We wanted to create start projects that would help you learn about new features
For now, we have:
- **[Basic Prompting (Hello, World)](/guides/basic-prompting)**: A simple flow that shows you how to use the Prompt Component and how to talk like a pirate.
- **[Vector Store RAG](/guides/rag-with-astradb)**: A flow that shows you how to ingest data into a Vector Store and then use it to run a RAG application.
- **[Memory Chatbot](/guides/memory-chatbot)**: This one shows you how to create a simple chatbot that can remember things about the user.
- **[Document QA](/guides/document-qa)**: This flow shows you how to build a simple flow that helps you get answers about a document.
- **[Blog Writer](/guides/blog-writer)**: Shows you how you can expand on the Prompt variables and be creative about what inputs you add to it.
- **[Basic Prompting (Hello, World)](/starter-projects/basic-prompting)**: A simple flow that shows you how to use the Prompt Component and how to talk like a pirate.
- **[Vector Store RAG](/tutorials/rag-with-astradb)**: A flow that shows you how to ingest data into a Vector Store and then use it to run a RAG application.
- **[Memory Chatbot](/starter-projects/memory-chatbot)**: This one shows you how to create a simple chatbot that can remember things about the user.
- **[Document QA](/starter-projects/document-qa)**: This flow shows you how to build a simple flow that helps you get answers about a document.
- **[Blog Writer](/starter-projects/blog-writer)**: Shows you how you can expand on the Prompt variables and be creative about what inputs you add to it.
As always, your feedback is invaluable, so please let us know what you think of the new starter projects and what you would like to see in the future.

View file

@ -2,74 +2,49 @@ module.exports = {
docs: [
{
type: "category",
label: " Getting Started",
label: "What's New?",
collapsed: false,
items: [
"whats-new/a-new-chapter-langflow"
],
},
{
type: "category",
label: "Getting Started",
collapsed: false,
items: [
"index",
"getting-started/cli",
// "guides/basic-prompting",
// "guides/document-qa",
// "guides/blog-writer",
// "guides/memory-chatbot",
"guides/rag-with-astradb",
"getting-started/install-langflow",
"getting-started/quickstart",
"getting-started/huggingface-spaces",
"getting-started/new-to-llms"
],
},
{
type: "category",
label: " What's New",
label: "Starter Projects",
collapsed: false,
items: [
"whats-new/a-new-chapter-langflow",
"whats-new/migrating-to-one-point-zero",
"starter-projects/basic-prompting",
"starter-projects/blog-writer",
"starter-projects/document-qa",
"starter-projects/memory-chatbot"
],
},
{
type: "category",
label: " Migration Guides",
label: "Administration",
collapsed: false,
items: [
"migration/possible-installation-issues",
// "migration/flow-of-data",
"migration/inputs-and-outputs",
// "migration/supported-frameworks",
// "migration/sidebar-and-interaction-panel",
// "migration/new-categories-and-components",
"migration/text-and-record",
// "migration/custom-component",
"migration/compatibility",
// "migration/multiple-flows",
// "migration/component-status-and-data-passing",
// "migration/connecting-output-components",
// "migration/renaming-and-editing-components",
// "migration/passing-tweaks-and-inputs",
"migration/global-variables",
// "migration/experimental-components",
// "migration/state-management",
"administration/login",
"administration/api",
"administration/cli",
"administration/components",
"administration/collection",
"administration/prompt-customization",
"administration/langfuse_integration"
],
},
{
type: "category",
label: "Guidelines",
collapsed: false,
items: [
"guidelines/login",
"guidelines/api",
"guidelines/components",
// "guidelines/features",
"guidelines/collection",
"guidelines/prompt-customization",
// "guidelines/chat-interface",
// "guidelines/chat-widget",
// "guidelines/custom-component",
],
},
{
type: "category",
label: "Extended Components",
collapsed: false,
items: ["guides/langfuse_integration"],
},
{
type: "category",
label: "Core Components",
@ -82,45 +57,69 @@ module.exports = {
"components/helpers",
"components/vector-stores",
"components/embeddings",
"components/custom"
],
},
{
type: "category",
label: "Extended Components",
collapsed: false,
collapsed: true,
items: [
"components/agents",
"components/chains",
"components/loaders",
"components/experimental",
"components/utilities",
"components/memories",
"components/model_specs",
"components/retrievers",
"components/text-splitters",
"components/toolkits",
"components/tools",
"components/tools"
],
},
{
type: "category",
label: "Example Components",
collapsed: true,
items: [
"examples/flow-runner",
"examples/conversation-chain",
"examples/buffer-memory",
"examples/csv-loader",
"examples/searchapi-tool",
"examples/serp-api-tool",
"examples/python-function"
],
},
{
type: "category",
label: "Migration Guides",
collapsed: false,
items: [
"migration/possible-installation-issues",
"migration/migrating-to-one-point-zero",
"migration/inputs-and-outputs",
"migration/text-and-record",
"migration/compatibility",
"migration/global-variables"
]
},
{
type: "category",
label: "Tutorials",
collapsed: true,
items: [
"tutorials/chatprompttemplate_guide",
"tutorials/loading_document",
"tutorials/rag-with-astradb"
],
},
// {
// type: "category",
// label: "Examples",
// collapsed: false,
// items: [
// // "examples/flow-runner",
// // "examples/conversation-chain",
// // "examples/buffer-memory",
// // "examples/csv-loader",
// // "examples/searchapi-tool",
// // "examples/serp-api-tool",
// // "examples/python-function",
// ],
// },
{
type: "category",
label: "Deployment",
collapsed: false,
items: ["deployment/gcp-deployment"],
collapsed: true,
items: [
"deployment/gcp-deployment"
],
},
{
type: "category",
@ -129,7 +128,7 @@ module.exports = {
items: [
"contributing/how-contribute",
"contributing/github-issues",
"contributing/community",
"contributing/community"
],
},
],

BIN
docs/static/img/basic-prompting.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 271 KiB

BIN
docs/static/img/blog-writer.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 524 KiB

BIN
docs/static/img/document-qa.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 126 KiB

BIN
docs/static/img/memory-chatbot.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 271 KiB

BIN
docs/static/img/quickstart.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 486 KiB

1092
poetry.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,6 @@
[tool.poetry]
name = "langflow"
version = "1.0.0a26"
version = "1.0.0a30"
description = "A Python package with a built-in web application"
authors = ["Langflow <contact@langflow.org>"]
maintainers = [

View file

@ -1,4 +1,4 @@
FROM logspace/backend_build as backend_build
FROM langflowai/backend_build as backend_build
FROM python:3.10-slim
WORKDIR /app

View file

@ -0,0 +1,59 @@
"""Fix nullable
Revision ID: 6e7b581b5648
Revises: 58b28437a398
Create Date: 2024-04-30 09:17:45.024688
"""
from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op
from sqlalchemy.engine.reflection import Inspector
# revision identifiers, used by Alembic.
revision: str = "6e7b581b5648"
down_revision: Union[str, None] = "58b28437a398"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
conn = op.get_bind()
inspector = Inspector.from_engine(conn) # type: ignore
table_names = inspector.get_table_names()
# ### commands auto generated by Alembic - please adjust! ###
columns = inspector.get_columns("apikey")
column_names = {column["name"]: column for column in columns}
with op.batch_alter_table("apikey", schema=None) as batch_op:
created_at_column = [column for column in columns if column["name"] == "created_at"][0]
if "created_at" in column_names and created_at_column.get("nullable"):
batch_op.alter_column(
"created_at",
existing_type=sa.DATETIME(),
nullable=False,
existing_server_default=sa.text("(CURRENT_TIMESTAMP)"), # type: ignore
)
# ### end Alembic commands ###
def downgrade() -> None:
conn = op.get_bind()
inspector = Inspector.from_engine(conn) # type: ignore
table_names = inspector.get_table_names()
columns = inspector.get_columns("apikey")
column_names = {column["name"]: column for column in columns}
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table("apikey", schema=None) as batch_op:
created_at_column = [column for column in columns if column["name"] == "created_at"][0]
if "created_at" in column_names and not created_at_column.get("nullable"):
batch_op.alter_column(
"created_at",
existing_type=sa.DATETIME(),
nullable=True,
existing_server_default=sa.text("(CURRENT_TIMESTAMP)"), # type: ignore
)
# ### end Alembic commands ###

View file

@ -26,7 +26,7 @@ class BuildStatus(Enum):
class TweaksRequest(BaseModel):
tweaks: Optional[Dict[str, Dict[str, str]]] = Field(default_factory=dict)
tweaks: Optional[Dict[str, Dict[str, Any]]] = Field(default_factory=dict)
class UpdateTemplateRequest(BaseModel):

View file

@ -0,0 +1,55 @@
from langflow.interface.custom.custom_component import CustomComponent
from langflow.schema import Record
from langflow.field_typing import Text
class TextOperatorComponent(CustomComponent):
display_name = "Text Operator"
description = "Compares two text inputs based on a specified condition such as equality or inequality, with optional case sensitivity."
def build_config(self) -> dict:
return {
"input_text": {
"display_name": "Input Text",
"info": "The primary text input for the operation.",
},
"match_text": {
"display_name": "Match Text",
"info": "The text input to compare against.",
},
"operator": {
"display_name": "Operator",
"info": "The operator to apply for comparing the texts.",
"options": ["equals", "not equals", "contains", "starts with", "ends with"],
},
"case_sensitive": {
"display_name": "Case Sensitive",
"info": "If true, the comparison will be case sensitive.",
"field_type": "bool",
"default": False,
}
}
def build(self, input_text: Text, match_text: Text, operator: Text, case_sensitive: bool = False) -> Text:
if not input_text or not match_text:
raise ValueError("Both 'input_text' and 'match_text' must be provided and non-empty.")
if not case_sensitive:
input_text = input_text.lower()
match_text = match_text.lower()
result = False
if operator == "equals":
result = input_text == match_text
elif operator == "not equals":
result = input_text != match_text
elif operator == "contains":
result = match_text in input_text
elif operator == "starts with":
result = input_text.startswith(match_text)
elif operator == "ends with":
result = input_text.endswith(match_text)
if not result:
self.stop()
self.status = f"{result} \n\n {input_text}"
return input_text

View file

@ -0,0 +1,25 @@
from langflow.interface.custom.custom_component import CustomComponent
from langflow.field_typing import Text
class CombineTextsUnsortedComponent(CustomComponent):
display_name = "Combine Texts (Unsorted)"
description = "Concatenate text sources into a single text chunk using a specified delimiter."
icon = "merge"
def build_config(self):
return {
"texts": {
"display_name": "Texts",
"info": "The first text input to concatenate.",
},
"delimiter": {
"display_name": "Delimiter",
"info": "A string used to separate the two text inputs. Defaults to a whitespace.",
},
}
def build(self, texts: list[str], delimiter: str = " ") -> Text:
combined = delimiter.join(texts)
self.status = combined
return combined

View file

@ -0,0 +1,30 @@
from langchain_core.messages import BaseMessage
from langchain_core.prompts import PromptTemplate
from langflow.custom import CustomComponent
from langflow.field_typing import BaseLanguageModel, Text
class ShouldRunNextComponent(CustomComponent):
display_name = "Should Run Next"
description = "Determines if a vertex is runnable."
def build(self, llm: BaseLanguageModel, question: str, context: str, retries: int = 3) -> Text:
template = "Given the following question and the context below, answer with a yes or no.\n\n{error_message}\n\nQuestion: {question}\n\nContext: {context}\n\nAnswer:"
prompt = PromptTemplate.from_template(template)
chain = prompt | llm
error_message = ""
for i in range(retries):
result = chain.invoke(dict(question=question, context=context, error_message=error_message))
if isinstance(result, BaseMessage):
content = result.content
elif isinstance(result, str):
content = result
if isinstance(content, str) and content.lower().strip() in ["yes", "no"]:
break
condition = str(content).lower().strip() == "yes"
self.status = f"Should Run Next: {condition}"
if condition is False:
self.stop()
return context

View file

@ -232,3 +232,4 @@ output_parsers:
custom_components:
CustomComponent:
documentation: "https://docs.langflow.org/guidelines/custom-component"
# documentation: "https://docs.langflow.org/administration/custom-component"

View file

@ -3,7 +3,7 @@ import uuid
from collections import defaultdict, deque
from functools import partial
from itertools import chain
from typing import TYPE_CHECKING, Callable, Coroutine, Dict, Generator, List, Optional, Type, Union
from typing import TYPE_CHECKING, Callable, Coroutine, Dict, Generator, List, Optional, Tuple, Type, Union
from loguru import logger
@ -14,7 +14,7 @@ from langflow.graph.graph.state_manager import GraphStateManager
from langflow.graph.graph.utils import process_flow
from langflow.graph.schema import InterfaceComponentTypes, RunOutputs
from langflow.graph.vertex.base import Vertex
from langflow.graph.vertex.types import ChatVertex, FileToolVertex, LLMVertex, RoutingVertex, StateVertex, ToolkitVertex
from langflow.graph.vertex.types import ChatVertex, FileToolVertex, LLMVertex, StateVertex, ToolkitVertex
from langflow.interface.tools.constants import FILE_TOOLS
from langflow.schema import Record
from langflow.schema.schema import INPUT_FIELD_NAME, InputType
@ -75,7 +75,7 @@ class Graph:
self.vertices: List[Vertex] = []
self.run_manager = RunnableVerticesManager()
self._build_graph()
self.build_graph_maps()
self.build_graph_maps(self.edges)
self.define_vertices_lists()
self.state_manager = GraphStateManager()
@ -130,6 +130,18 @@ class Graph:
):
vertices_ids.append(vertex_id)
successors = self.get_all_successors(vertex, flat=True)
# Update run_manager.run_predecessors because we are activating vertices
# The run_prdecessors is the predecessor map of the vertices
# we remove the vertex_id from the predecessor map whenever we run a vertex
# So we need to get all edges of the vertex and successors
# and run self.build_adjacency_maps(edges) to get the new predecessor map
# that is not complete but we can use to update the run_predecessors
edges_set = set()
for vertex in [vertex] + successors:
edges_set.update(vertex.edges)
edges = list(edges_set)
new_predecessor_map, _ = self.build_adjacency_maps(edges)
self.run_manager.run_predecessors.update(new_predecessor_map)
self.vertices_to_run.update(list(map(lambda x: x.id, successors)))
self.activated_vertices = vertices_ids
self.vertices_to_run.update(vertices_ids)
@ -401,14 +413,20 @@ class Graph:
"inactivated_vertices": self.inactivated_vertices,
}
def build_graph_maps(self):
def build_graph_maps(self, edges: Optional[List[ContractEdge]] = None, vertices: Optional[List[Vertex]] = None):
"""
Builds the adjacency maps for the graph.
"""
self.predecessor_map, self.successor_map = self.build_adjacency_maps()
if edges is None:
edges = self.edges
self.in_degree_map = self.build_in_degree()
self.parent_child_map = self.build_parent_child_map()
if vertices is None:
vertices = self.vertices
self.predecessor_map, self.successor_map = self.build_adjacency_maps(edges)
self.in_degree_map = self.build_in_degree(edges)
self.parent_child_map = self.build_parent_child_map(vertices)
def reset_inactivated_vertices(self):
"""
@ -433,9 +451,9 @@ class Graph:
for child_id in self.parent_child_map[vertex_id]:
self.mark_branch(child_id, state)
def build_parent_child_map(self):
def build_parent_child_map(self, vertices: List[Vertex]):
parent_child_map = defaultdict(list)
for vertex in self.vertices:
for vertex in vertices:
parent_child_map[vertex.id] = [child.id for child in self.get_successors(vertex)]
return parent_child_map
@ -559,6 +577,7 @@ class Graph:
self.update_vertex_from_another(self_vertex, other_vertex)
self.build_graph_maps()
self.define_vertices_lists()
self.increment_update_count()
return self
@ -944,8 +963,6 @@ class Graph:
node_name = node_id.split("-")[0]
if node_name in ["ChatOutput", "ChatInput"]:
return ChatVertex
elif node_name in ["ShouldRunNext"]:
return RoutingVertex
elif node_name in ["SharedState", "Notify", "Listen"]:
return StateVertex
elif node_base_type in lazy_load_vertex_dict.VERTEX_TYPE_MAP:
@ -1277,17 +1294,17 @@ class Graph:
def remove_from_predecessors(self, vertex_id: str):
self.run_manager.remove_from_predecessors(vertex_id)
def build_in_degree(self):
in_degree = defaultdict(int)
for edge in self.edges:
def build_in_degree(self, edges: List[ContractEdge]) -> Dict[str, int]:
in_degree: Dict[str, int] = defaultdict(int)
for edge in edges:
in_degree[edge.target_id] += 1
return in_degree
def build_adjacency_maps(self):
def build_adjacency_maps(self, edges: List[ContractEdge]) -> Tuple[Dict[str, List[str]], Dict[str, List[str]]]:
"""Returns the adjacency maps for the graph."""
predecessor_map = defaultdict(list)
successor_map = defaultdict(list)
for edge in self.edges:
for edge in edges:
predecessor_map[edge.target_id].append(edge.source_id)
successor_map[edge.source_id].append(edge.target_id)
return predecessor_map, successor_map

View file

@ -15,7 +15,6 @@ from langflow.interface.wrappers.base import wrapper_creator
from langflow.utils.lazy_load import LazyLoadDictBase
CHAT_COMPONENTS = ["ChatInput", "ChatOutput", "TextInput", "SessionID"]
ROUTING_COMPONENTS = ["ShouldRunNext"]
class VertexTypesDict(LazyLoadDictBase):
@ -51,7 +50,6 @@ class VertexTypesDict(LazyLoadDictBase):
**{t: types.CustomComponentVertex for t in custom_component_creator.to_list()},
**{t: types.RetrieverVertex for t in retriever_creator.to_list()},
**{t: types.ChatVertex for t in CHAT_COMPONENTS},
**{t: types.RoutingVertex for t in ROUTING_COMPONENTS},
}
def get_custom_component_vertex_type(self):

View file

@ -15,6 +15,7 @@ class RunnableVerticesManager:
def is_vertex_runnable(self, vertex_id: str) -> bool:
"""Determines if a vertex is runnable."""
return vertex_id in self.vertices_to_run and not self.run_predecessors.get(vertex_id)
def find_runnable_predecessors_for_successors(self, vertex_id: str) -> List[str]:

View file

@ -72,7 +72,6 @@ class Vertex:
self.load_from_db_fields: List[str] = []
self.parent_is_top_level = False
self.layer = None
self.should_run = True
self.result: Optional[ResultData] = None
try:
self.is_interface_component = self.vertex_type in InterfaceComponentTypes

View file

@ -1,6 +1,7 @@
import ast
import json
from typing import AsyncIterator, Callable, Dict, Iterator, List, Optional, Union
import yaml
from langchain_core.messages import AIMessage
from loguru import logger
@ -438,41 +439,6 @@ class ChatVertex(Vertex):
return self.vertex_type == InterfaceComponentTypes.ChatInput and self.is_input
class RoutingVertex(Vertex):
def __init__(self, data: Dict, graph):
super().__init__(data, graph=graph, base_type="custom_components")
self.use_result = True
self.steps = [self._build]
def _built_object_repr(self):
if self.artifacts and "repr" in self.artifacts:
return self.artifacts["repr"] or super()._built_object_repr()
return super()._built_object_repr()
@property
def successors_ids(self):
if isinstance(self._built_object, bool):
ids = super().successors_ids
if self._built_object:
return ids
return []
raise ValueError("RoutingVertex should return a boolean value.")
def _run(self, *args, **kwargs):
if self._built_object:
condition = self._built_object.get("condition")
result = self._built_object.get("result")
if condition is None:
raise ValueError("Condition is required for the routing vertex.")
if result is None:
raise ValueError("Result is required for the routing vertex.")
if condition is True:
self._built_result = result
else:
self.graph.mark_branch(self.id, "INACTIVE")
self._built_result = None
class StateVertex(Vertex):
def __init__(self, data: Dict, graph):
super().__init__(data, graph=graph, base_type="custom_components")

View file

@ -87,6 +87,14 @@ class CustomComponent(Component):
except Exception as e:
raise ValueError(f"Error updating state: {e}")
def stop(self):
if not self.vertex:
raise ValueError("Vertex is not set")
try:
self.graph.mark_branch(self.vertex.id, "INACTIVE")
except Exception as e:
raise ValueError(f"Error stopping {self.display_name}: {e}")
def append_state(self, name: str, value: Any):
if not self.vertex:
raise ValueError("Vertex is not set")

View file

@ -53,6 +53,7 @@ def get_lifespan(fix_migration=False, socketio_server=None):
except Exception as exc:
if "langflow migration --fix" not in str(exc):
logger.error(exc)
raise
# Shutdown message
rprint("[bold red]Shutting down Langflow...[/bold red]")
teardown_services()

View file

@ -3,6 +3,8 @@ from pathlib import Path
from typing import List, Optional, Union
from dotenv import load_dotenv
from loguru import logger
from langflow.graph import Graph
from langflow.graph.schema import RunOutputs
from langflow.processing.process import process_tweaks, run_graph
@ -101,6 +103,12 @@ def run_flow_from_json(
List[RunOutputs]: A list of RunOutputs objects representing the results of running the flow.
"""
# Set all streaming to false
try:
import nest_asyncio # type: ignore
nest_asyncio.apply()
except Exception as e:
logger.warning(f"Could not apply nest_asyncio: {e}")
if tweaks is None:
tweaks = {}
tweaks["stream"] = False

View file

@ -1,5 +1,6 @@
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
from langchain.agents import AgentExecutor
from langchain.schema import AgentAction
from loguru import logger
@ -13,6 +14,7 @@ from langflow.schema.graph import InputValue, Tweaks
from langflow.schema.schema import INPUT_FIELD_NAME
from langflow.services.session.service import SessionService
if TYPE_CHECKING:
from langflow.api.v1.schemas import InputValueRequest

View file

@ -1,7 +1,8 @@
from typing import List, Optional, Union
from typing import Any, List, Optional, Union
from pydantic import BaseModel, Field, RootModel
from langflow.schema.schema import InputType
from pydantic import BaseModel, Field, RootModel
class InputValue(BaseModel):
@ -14,7 +15,7 @@ class InputValue(BaseModel):
class Tweaks(RootModel):
root: dict[str, Union[str, dict[str, str]]] = Field(
root: dict[str, Union[str, dict[str, Any]]] = Field(
description="A dictionary of tweaks to adjust the flow's execution. Allows customizing flow behavior dynamically. All tweaks are overridden by the input values.",
)
model_config = {

View file

@ -22,7 +22,7 @@ class ApiKeyBase(SQLModel):
class ApiKey(ApiKeyBase, table=True):
id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True)
created_at: datetime = Field(
created_at: Optional[datetime] = Field(
default=None, sa_column=Column(DateTime(timezone=True), server_default=func.now(), nullable=False)
)
api_key: str = Field(index=True, unique=True)

View file

@ -25,7 +25,7 @@ class Variable(VariableBase, table=True):
description="Unique ID for the variable",
)
# name is unique per user
created_at: datetime = Field(
created_at: Optional[datetime] = Field(
default=None,
sa_column=Column(DateTime(timezone=True), server_default=func.now(), nullable=True),
description="Creation time of the variable",

View file

@ -133,7 +133,7 @@ class DatabaseService(Service):
alembic_cfg = Config(stdout=buffer)
# alembic_cfg.attributes["connection"] = session
alembic_cfg.set_main_option("script_location", str(self.script_location))
alembic_cfg.set_main_option("sqlalchemy.url", self.database_url.replace('%', '%%'))
alembic_cfg.set_main_option("sqlalchemy.url", self.database_url.replace("%", "%%"))
should_initialize_alembic = False
with Session(self.engine) as session:
@ -170,9 +170,7 @@ class DatabaseService(Service):
except util.exc.AutogenerateDiffsDetected as exc:
logger.error(f"AutogenerateDiffsDetected: {exc}")
if not fix:
raise RuntimeError(
"Something went wrong running migrations. Please, run `langflow migration --fix`"
) from exc
raise RuntimeError(f"There's a mismatch between the models and the database.\n{exc}")
if fix:
self.try_downgrade_upgrade_until_success(alembic_cfg)

View file

@ -101,10 +101,16 @@ def add_row_to_table(
conn.execute(insert_sql, values)
except Exception as e:
# Log values types
column_error_message = ""
for key, value in validated_dict.items():
logger.error(f"{key}: {type(value)}")
if value in str(e):
column_error_message = f"Column: {key} Value: {value} Error: {e}"
logger.error(f"Error adding row to table: {e}")
if column_error_message:
logger.error(f"Error adding row to {table_name}: {column_error_message}")
else:
logger.error(f"Error adding row to {table_name}: {e}")
async def log_message(

View file

@ -517,13 +517,13 @@ test-randomorder = ["pytest-randomly"]
[[package]]
name = "dataclasses-json"
version = "0.6.4"
version = "0.6.5"
description = "Easily serialize dataclasses to and from JSON."
optional = false
python-versions = ">=3.7,<4.0"
python-versions = "<4.0,>=3.7"
files = [
{file = "dataclasses_json-0.6.4-py3-none-any.whl", hash = "sha256:f90578b8a3177f7552f4e1a6e535e84293cd5da421fcce0642d49c0d7bdf8df2"},
{file = "dataclasses_json-0.6.4.tar.gz", hash = "sha256:73696ebf24936560cca79a2430cbc4f3dd23ac7bf46ed17f38e5e5e7657a6377"},
{file = "dataclasses_json-0.6.5-py3-none-any.whl", hash = "sha256:f49c77aa3a85cac5bf5b7f65f4790ca0d2be8ef4d92c75e91ba0103072788a39"},
{file = "dataclasses_json-0.6.5.tar.gz", hash = "sha256:1c287594d9fcea72dc42d6d3836cf14848c2dc5ce88f65ed61b36b57f515fe26"},
]
[package.dependencies]
@ -652,13 +652,13 @@ test = ["pytest (>=6)"]
[[package]]
name = "fastapi"
version = "0.110.2"
version = "0.110.3"
description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
optional = false
python-versions = ">=3.8"
files = [
{file = "fastapi-0.110.2-py3-none-any.whl", hash = "sha256:239403f2c0a3dda07a9420f95157a7f014ddb2b770acdbc984f9bdf3ead7afdb"},
{file = "fastapi-0.110.2.tar.gz", hash = "sha256:b53d673652da3b65e8cd787ad214ec0fe303cad00d2b529b86ce7db13f17518d"},
{file = "fastapi-0.110.3-py3-none-any.whl", hash = "sha256:fd7600612f755e4050beb74001310b5a7e1796d149c2ee363124abdfa0289d32"},
{file = "fastapi-0.110.3.tar.gz", hash = "sha256:555700b0159379e94fdbfc6bb66a0f1c43f4cf7060f25239af3d84b63a656626"},
]
[package.dependencies]
@ -667,7 +667,7 @@ starlette = ">=0.37.2,<0.38.0"
typing-extensions = ">=4.8.0"
[package.extras]
all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"]
all = ["email_validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"]
[[package]]
name = "frozenlist"
@ -1064,19 +1064,19 @@ text-helpers = ["chardet (>=5.1.0,<6.0.0)"]
[[package]]
name = "langchain-community"
version = "0.0.34"
version = "0.0.35"
description = "Community contributed LangChain integrations."
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
{file = "langchain_community-0.0.34-py3-none-any.whl", hash = "sha256:bc13b21a44bbfca01bff8b35c10a26d71485b57c1d284f499b577ba6e1a5d84a"},
{file = "langchain_community-0.0.34.tar.gz", hash = "sha256:96e9a807d9b4777820df5a970996f6bf3ad5632137bf0f4d863bd832bdeb2b0f"},
{file = "langchain_community-0.0.35-py3-none-any.whl", hash = "sha256:296c47dcddf8c3c565f41240dc21421620f309ae24db762a5bdaf0c19cbb01ef"},
{file = "langchain_community-0.0.35.tar.gz", hash = "sha256:0f8726d9f8e1f369ae1b0c7ec738403063009a78ecb58860d21e5388e238ff0c"},
]
[package.dependencies]
aiohttp = ">=3.8.3,<4.0.0"
dataclasses-json = ">=0.5.7,<0.7"
langchain-core = ">=0.1.45,<0.2.0"
langchain-core = ">=0.1.47,<0.2.0"
langsmith = ">=0.1.0,<0.2.0"
numpy = ">=1,<2"
PyYAML = ">=5.3"
@ -1086,17 +1086,17 @@ tenacity = ">=8.1.0,<9.0.0"
[package.extras]
cli = ["typer (>=0.9.0,<0.10.0)"]
extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cloudpickle (>=2.0.0)", "cohere (>=4,<5)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "friendli-client (>=1.2.4,<2.0.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "httpx (>=0.24.1,<0.25.0)", "httpx-sse (>=0.4.0,<0.5.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "nvidia-riva-client (>=2.14.0,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "premai (>=0.3.25,<0.4.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pyjwt (>=2.8.0,<3.0.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "tidb-vector (>=0.0.3,<1.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "tree-sitter (>=0.20.2,<0.21.0)", "tree-sitter-languages (>=1.8.0,<2.0.0)", "upstash-redis (>=0.15.0,<0.16.0)", "vdms (>=0.0.20,<0.0.21)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"]
extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "azure-identity (>=1.15.0,<2.0.0)", "azure-search-documents (==11.4.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.6,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cloudpickle (>=2.0.0)", "cohere (>=4,<5)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "friendli-client (>=1.2.4,<2.0.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "httpx (>=0.24.1,<0.25.0)", "httpx-sse (>=0.4.0,<0.5.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "nvidia-riva-client (>=2.14.0,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "premai (>=0.3.25,<0.4.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pyjwt (>=2.8.0,<3.0.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "tidb-vector (>=0.0.3,<1.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "tree-sitter (>=0.20.2,<0.21.0)", "tree-sitter-languages (>=1.8.0,<2.0.0)", "upstash-redis (>=0.15.0,<0.16.0)", "vdms (>=0.0.20,<0.0.21)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"]
[[package]]
name = "langchain-core"
version = "0.1.45"
version = "0.1.47"
description = "Building applications with LLMs through composability"
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
{file = "langchain_core-0.1.45-py3-none-any.whl", hash = "sha256:91eff20de0bcf5f025e1d8c4582cb597a9c17527965eb03b314486e7c834e7df"},
{file = "langchain_core-0.1.45.tar.gz", hash = "sha256:526532c1af279a9e2debe7a4e143ba6e980cf90b5ab2e0991c2230ee04c693e2"},
{file = "langchain_core-0.1.47-py3-none-any.whl", hash = "sha256:ebf12ca25cbdfedd8a61dbdb60f47283bb1bdfc39b5f01d3b76bb36fdbe4a1e8"},
{file = "langchain_core-0.1.47.tar.gz", hash = "sha256:d97d6927a4b22acbc2d0e731b3580890551256fa5dde775ef6beb72beb1a6015"},
]
[package.dependencies]
@ -1162,13 +1162,13 @@ types-requests = ">=2.31.0.2,<3.0.0.0"
[[package]]
name = "langsmith"
version = "0.1.50"
version = "0.1.52"
description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
{file = "langsmith-0.1.50-py3-none-any.whl", hash = "sha256:a81e9809fcaa277bfb314d729e58116554f186d1478fcfdf553b1c2ccce54b85"},
{file = "langsmith-0.1.50.tar.gz", hash = "sha256:9fd22df8c689c044058536ea5af66f5302067e7551b60d7a335fede8d479572b"},
{file = "langsmith-0.1.52-py3-none-any.whl", hash = "sha256:4518e269b9a0e10197550f050b6518d1276fe68732f7b8579b3e1302b8471d29"},
{file = "langsmith-0.1.52.tar.gz", hash = "sha256:f767fddb13c794bea7cc827a77f050a8a1c075ab1d997eb37849b975b0eef1b0"},
]
[package.dependencies]
@ -1196,173 +1196,95 @@ dev = ["Sphinx (==7.2.5)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptio
[[package]]
name = "lxml"
version = "5.2.1"
version = "4.9.2"
description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API."
optional = false
python-versions = ">=3.6"
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*"
files = [
{file = "lxml-5.2.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1f7785f4f789fdb522729ae465adcaa099e2a3441519df750ebdccc481d961a1"},
{file = "lxml-5.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6cc6ee342fb7fa2471bd9b6d6fdfc78925a697bf5c2bcd0a302e98b0d35bfad3"},
{file = "lxml-5.2.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:794f04eec78f1d0e35d9e0c36cbbb22e42d370dda1609fb03bcd7aeb458c6377"},
{file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817d420c60a5183953c783b0547d9eb43b7b344a2c46f69513d5952a78cddf3"},
{file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2213afee476546a7f37c7a9b4ad4d74b1e112a6fafffc9185d6d21f043128c81"},
{file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b070bbe8d3f0f6147689bed981d19bbb33070225373338df755a46893528104a"},
{file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e02c5175f63effbd7c5e590399c118d5db6183bbfe8e0d118bdb5c2d1b48d937"},
{file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:3dc773b2861b37b41a6136e0b72a1a44689a9c4c101e0cddb6b854016acc0aa8"},
{file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:d7520db34088c96cc0e0a3ad51a4fd5b401f279ee112aa2b7f8f976d8582606d"},
{file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:bcbf4af004f98793a95355980764b3d80d47117678118a44a80b721c9913436a"},
{file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a2b44bec7adf3e9305ce6cbfa47a4395667e744097faed97abb4728748ba7d47"},
{file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1c5bb205e9212d0ebddf946bc07e73fa245c864a5f90f341d11ce7b0b854475d"},
{file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2c9d147f754b1b0e723e6afb7ba1566ecb162fe4ea657f53d2139bbf894d050a"},
{file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:3545039fa4779be2df51d6395e91a810f57122290864918b172d5dc7ca5bb433"},
{file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a91481dbcddf1736c98a80b122afa0f7296eeb80b72344d7f45dc9f781551f56"},
{file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2ddfe41ddc81f29a4c44c8ce239eda5ade4e7fc305fb7311759dd6229a080052"},
{file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:a7baf9ffc238e4bf401299f50e971a45bfcc10a785522541a6e3179c83eabf0a"},
{file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:31e9a882013c2f6bd2f2c974241bf4ba68c85eba943648ce88936d23209a2e01"},
{file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0a15438253b34e6362b2dc41475e7f80de76320f335e70c5528b7148cac253a1"},
{file = "lxml-5.2.1-cp310-cp310-win32.whl", hash = "sha256:6992030d43b916407c9aa52e9673612ff39a575523c5f4cf72cdef75365709a5"},
{file = "lxml-5.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:da052e7962ea2d5e5ef5bc0355d55007407087392cf465b7ad84ce5f3e25fe0f"},
{file = "lxml-5.2.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:70ac664a48aa64e5e635ae5566f5227f2ab7f66a3990d67566d9907edcbbf867"},
{file = "lxml-5.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1ae67b4e737cddc96c99461d2f75d218bdf7a0c3d3ad5604d1f5e7464a2f9ffe"},
{file = "lxml-5.2.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f18a5a84e16886898e51ab4b1d43acb3083c39b14c8caeb3589aabff0ee0b270"},
{file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6f2c8372b98208ce609c9e1d707f6918cc118fea4e2c754c9f0812c04ca116d"},
{file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:394ed3924d7a01b5bd9a0d9d946136e1c2f7b3dc337196d99e61740ed4bc6fe1"},
{file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d077bc40a1fe984e1a9931e801e42959a1e6598edc8a3223b061d30fbd26bbc"},
{file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:764b521b75701f60683500d8621841bec41a65eb739b8466000c6fdbc256c240"},
{file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:3a6b45da02336895da82b9d472cd274b22dc27a5cea1d4b793874eead23dd14f"},
{file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:5ea7b6766ac2dfe4bcac8b8595107665a18ef01f8c8343f00710b85096d1b53a"},
{file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:e196a4ff48310ba62e53a8e0f97ca2bca83cdd2fe2934d8b5cb0df0a841b193a"},
{file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:200e63525948e325d6a13a76ba2911f927ad399ef64f57898cf7c74e69b71095"},
{file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:dae0ed02f6b075426accbf6b2863c3d0a7eacc1b41fb40f2251d931e50188dad"},
{file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:ab31a88a651039a07a3ae327d68ebdd8bc589b16938c09ef3f32a4b809dc96ef"},
{file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:df2e6f546c4df14bc81f9498bbc007fbb87669f1bb707c6138878c46b06f6510"},
{file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5dd1537e7cc06efd81371f5d1a992bd5ab156b2b4f88834ca852de4a8ea523fa"},
{file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9b9ec9c9978b708d488bec36b9e4c94d88fd12ccac3e62134a9d17ddba910ea9"},
{file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:8e77c69d5892cb5ba71703c4057091e31ccf534bd7f129307a4d084d90d014b8"},
{file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:a8d5c70e04aac1eda5c829a26d1f75c6e5286c74743133d9f742cda8e53b9c2f"},
{file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c94e75445b00319c1fad60f3c98b09cd63fe1134a8a953dcd48989ef42318534"},
{file = "lxml-5.2.1-cp311-cp311-win32.whl", hash = "sha256:4951e4f7a5680a2db62f7f4ab2f84617674d36d2d76a729b9a8be4b59b3659be"},
{file = "lxml-5.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:5c670c0406bdc845b474b680b9a5456c561c65cf366f8db5a60154088c92d102"},
{file = "lxml-5.2.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:abc25c3cab9ec7fcd299b9bcb3b8d4a1231877e425c650fa1c7576c5107ab851"},
{file = "lxml-5.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6935bbf153f9a965f1e07c2649c0849d29832487c52bb4a5c5066031d8b44fd5"},
{file = "lxml-5.2.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d793bebb202a6000390a5390078e945bbb49855c29c7e4d56a85901326c3b5d9"},
{file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afd5562927cdef7c4f5550374acbc117fd4ecc05b5007bdfa57cc5355864e0a4"},
{file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0e7259016bc4345a31af861fdce942b77c99049d6c2107ca07dc2bba2435c1d9"},
{file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:530e7c04f72002d2f334d5257c8a51bf409db0316feee7c87e4385043be136af"},
{file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59689a75ba8d7ffca577aefd017d08d659d86ad4585ccc73e43edbfc7476781a"},
{file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f9737bf36262046213a28e789cc82d82c6ef19c85a0cf05e75c670a33342ac2c"},
{file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:3a74c4f27167cb95c1d4af1c0b59e88b7f3e0182138db2501c353555f7ec57f4"},
{file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:68a2610dbe138fa8c5826b3f6d98a7cfc29707b850ddcc3e21910a6fe51f6ca0"},
{file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:f0a1bc63a465b6d72569a9bba9f2ef0334c4e03958e043da1920299100bc7c08"},
{file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c2d35a1d047efd68027817b32ab1586c1169e60ca02c65d428ae815b593e65d4"},
{file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:79bd05260359170f78b181b59ce871673ed01ba048deef4bf49a36ab3e72e80b"},
{file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:865bad62df277c04beed9478fe665b9ef63eb28fe026d5dedcb89b537d2e2ea6"},
{file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:44f6c7caff88d988db017b9b0e4ab04934f11e3e72d478031efc7edcac6c622f"},
{file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:71e97313406ccf55d32cc98a533ee05c61e15d11b99215b237346171c179c0b0"},
{file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:057cdc6b86ab732cf361f8b4d8af87cf195a1f6dc5b0ff3de2dced242c2015e0"},
{file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:f3bbbc998d42f8e561f347e798b85513ba4da324c2b3f9b7969e9c45b10f6169"},
{file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:491755202eb21a5e350dae00c6d9a17247769c64dcf62d8c788b5c135e179dc4"},
{file = "lxml-5.2.1-cp312-cp312-win32.whl", hash = "sha256:8de8f9d6caa7f25b204fc861718815d41cbcf27ee8f028c89c882a0cf4ae4134"},
{file = "lxml-5.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:f2a9efc53d5b714b8df2b4b3e992accf8ce5bbdfe544d74d5c6766c9e1146a3a"},
{file = "lxml-5.2.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:70a9768e1b9d79edca17890175ba915654ee1725975d69ab64813dd785a2bd5c"},
{file = "lxml-5.2.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c38d7b9a690b090de999835f0443d8aa93ce5f2064035dfc48f27f02b4afc3d0"},
{file = "lxml-5.2.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5670fb70a828663cc37552a2a85bf2ac38475572b0e9b91283dc09efb52c41d1"},
{file = "lxml-5.2.1-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:958244ad566c3ffc385f47dddde4145088a0ab893504b54b52c041987a8c1863"},
{file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:2a66bf12fbd4666dd023b6f51223aed3d9f3b40fef06ce404cb75bafd3d89536"},
{file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:9123716666e25b7b71c4e1789ec829ed18663152008b58544d95b008ed9e21e9"},
{file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:0c3f67e2aeda739d1cc0b1102c9a9129f7dc83901226cc24dd72ba275ced4218"},
{file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:5d5792e9b3fb8d16a19f46aa8208987cfeafe082363ee2745ea8b643d9cc5b45"},
{file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:88e22fc0a6684337d25c994381ed8a1580a6f5ebebd5ad41f89f663ff4ec2885"},
{file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_ppc64le.whl", hash = "sha256:21c2e6b09565ba5b45ae161b438e033a86ad1736b8c838c766146eff8ceffff9"},
{file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_s390x.whl", hash = "sha256:afbbdb120d1e78d2ba8064a68058001b871154cc57787031b645c9142b937a62"},
{file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:627402ad8dea044dde2eccde4370560a2b750ef894c9578e1d4f8ffd54000461"},
{file = "lxml-5.2.1-cp36-cp36m-win32.whl", hash = "sha256:e89580a581bf478d8dcb97d9cd011d567768e8bc4095f8557b21c4d4c5fea7d0"},
{file = "lxml-5.2.1-cp36-cp36m-win_amd64.whl", hash = "sha256:59565f10607c244bc4c05c0c5fa0c190c990996e0c719d05deec7030c2aa8289"},
{file = "lxml-5.2.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:857500f88b17a6479202ff5fe5f580fc3404922cd02ab3716197adf1ef628029"},
{file = "lxml-5.2.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56c22432809085b3f3ae04e6e7bdd36883d7258fcd90e53ba7b2e463efc7a6af"},
{file = "lxml-5.2.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a55ee573116ba208932e2d1a037cc4b10d2c1cb264ced2184d00b18ce585b2c0"},
{file = "lxml-5.2.1-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:6cf58416653c5901e12624e4013708b6e11142956e7f35e7a83f1ab02f3fe456"},
{file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:64c2baa7774bc22dd4474248ba16fe1a7f611c13ac6123408694d4cc93d66dbd"},
{file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:74b28c6334cca4dd704e8004cba1955af0b778cf449142e581e404bd211fb619"},
{file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:7221d49259aa1e5a8f00d3d28b1e0b76031655ca74bb287123ef56c3db92f213"},
{file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3dbe858ee582cbb2c6294dc85f55b5f19c918c2597855e950f34b660f1a5ede6"},
{file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:04ab5415bf6c86e0518d57240a96c4d1fcfc3cb370bb2ac2a732b67f579e5a04"},
{file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:6ab833e4735a7e5533711a6ea2df26459b96f9eec36d23f74cafe03631647c41"},
{file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f443cdef978430887ed55112b491f670bba6462cea7a7742ff8f14b7abb98d75"},
{file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:9e2addd2d1866fe112bc6f80117bcc6bc25191c5ed1bfbcf9f1386a884252ae8"},
{file = "lxml-5.2.1-cp37-cp37m-win32.whl", hash = "sha256:f51969bac61441fd31f028d7b3b45962f3ecebf691a510495e5d2cd8c8092dbd"},
{file = "lxml-5.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:b0b58fbfa1bf7367dde8a557994e3b1637294be6cf2169810375caf8571a085c"},
{file = "lxml-5.2.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3e183c6e3298a2ed5af9d7a356ea823bccaab4ec2349dc9ed83999fd289d14d5"},
{file = "lxml-5.2.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:804f74efe22b6a227306dd890eecc4f8c59ff25ca35f1f14e7482bbce96ef10b"},
{file = "lxml-5.2.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:08802f0c56ed150cc6885ae0788a321b73505d2263ee56dad84d200cab11c07a"},
{file = "lxml-5.2.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f8c09ed18ecb4ebf23e02b8e7a22a05d6411911e6fabef3a36e4f371f4f2585"},
{file = "lxml-5.2.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3d30321949861404323c50aebeb1943461a67cd51d4200ab02babc58bd06a86"},
{file = "lxml-5.2.1-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:b560e3aa4b1d49e0e6c847d72665384db35b2f5d45f8e6a5c0072e0283430533"},
{file = "lxml-5.2.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:058a1308914f20784c9f4674036527e7c04f7be6fb60f5d61353545aa7fcb739"},
{file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:adfb84ca6b87e06bc6b146dc7da7623395db1e31621c4785ad0658c5028b37d7"},
{file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:417d14450f06d51f363e41cace6488519038f940676ce9664b34ebf5653433a5"},
{file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a2dfe7e2473f9b59496247aad6e23b405ddf2e12ef0765677b0081c02d6c2c0b"},
{file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bf2e2458345d9bffb0d9ec16557d8858c9c88d2d11fed53998512504cd9df49b"},
{file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:58278b29cb89f3e43ff3e0c756abbd1518f3ee6adad9e35b51fb101c1c1daaec"},
{file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:64641a6068a16201366476731301441ce93457eb8452056f570133a6ceb15fca"},
{file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:78bfa756eab503673991bdcf464917ef7845a964903d3302c5f68417ecdc948c"},
{file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:11a04306fcba10cd9637e669fd73aa274c1c09ca64af79c041aa820ea992b637"},
{file = "lxml-5.2.1-cp38-cp38-win32.whl", hash = "sha256:66bc5eb8a323ed9894f8fa0ee6cb3e3fb2403d99aee635078fd19a8bc7a5a5da"},
{file = "lxml-5.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:9676bfc686fa6a3fa10cd4ae6b76cae8be26eb5ec6811d2a325636c460da1806"},
{file = "lxml-5.2.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cf22b41fdae514ee2f1691b6c3cdeae666d8b7fa9434de445f12bbeee0cf48dd"},
{file = "lxml-5.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ec42088248c596dbd61d4ae8a5b004f97a4d91a9fd286f632e42e60b706718d7"},
{file = "lxml-5.2.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd53553ddad4a9c2f1f022756ae64abe16da1feb497edf4d9f87f99ec7cf86bd"},
{file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feaa45c0eae424d3e90d78823f3828e7dc42a42f21ed420db98da2c4ecf0a2cb"},
{file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddc678fb4c7e30cf830a2b5a8d869538bc55b28d6c68544d09c7d0d8f17694dc"},
{file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:853e074d4931dbcba7480d4dcab23d5c56bd9607f92825ab80ee2bd916edea53"},
{file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc4691d60512798304acb9207987e7b2b7c44627ea88b9d77489bbe3e6cc3bd4"},
{file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:beb72935a941965c52990f3a32d7f07ce869fe21c6af8b34bf6a277b33a345d3"},
{file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:6588c459c5627fefa30139be4d2e28a2c2a1d0d1c265aad2ba1935a7863a4913"},
{file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:588008b8497667f1ddca7c99f2f85ce8511f8f7871b4a06ceede68ab62dff64b"},
{file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b6787b643356111dfd4032b5bffe26d2f8331556ecb79e15dacb9275da02866e"},
{file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7c17b64b0a6ef4e5affae6a3724010a7a66bda48a62cfe0674dabd46642e8b54"},
{file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:27aa20d45c2e0b8cd05da6d4759649170e8dfc4f4e5ef33a34d06f2d79075d57"},
{file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:d4f2cc7060dc3646632d7f15fe68e2fa98f58e35dd5666cd525f3b35d3fed7f8"},
{file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff46d772d5f6f73564979cd77a4fffe55c916a05f3cb70e7c9c0590059fb29ef"},
{file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:96323338e6c14e958d775700ec8a88346014a85e5de73ac7967db0367582049b"},
{file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:52421b41ac99e9d91934e4d0d0fe7da9f02bfa7536bb4431b4c05c906c8c6919"},
{file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:7a7efd5b6d3e30d81ec68ab8a88252d7c7c6f13aaa875009fe3097eb4e30b84c"},
{file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0ed777c1e8c99b63037b91f9d73a6aad20fd035d77ac84afcc205225f8f41188"},
{file = "lxml-5.2.1-cp39-cp39-win32.whl", hash = "sha256:644df54d729ef810dcd0f7732e50e5ad1bd0a135278ed8d6bcb06f33b6b6f708"},
{file = "lxml-5.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:9ca66b8e90daca431b7ca1408cae085d025326570e57749695d6a01454790e95"},
{file = "lxml-5.2.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9b0ff53900566bc6325ecde9181d89afadc59c5ffa39bddf084aaedfe3b06a11"},
{file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd6037392f2d57793ab98d9e26798f44b8b4da2f2464388588f48ac52c489ea1"},
{file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b9c07e7a45bb64e21df4b6aa623cb8ba214dfb47d2027d90eac197329bb5e94"},
{file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3249cc2989d9090eeac5467e50e9ec2d40704fea9ab72f36b034ea34ee65ca98"},
{file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f42038016852ae51b4088b2862126535cc4fc85802bfe30dea3500fdfaf1864e"},
{file = "lxml-5.2.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:533658f8fbf056b70e434dff7e7aa611bcacb33e01f75de7f821810e48d1bb66"},
{file = "lxml-5.2.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:622020d4521e22fb371e15f580d153134bfb68d6a429d1342a25f051ec72df1c"},
{file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efa7b51824aa0ee957ccd5a741c73e6851de55f40d807f08069eb4c5a26b2baa"},
{file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c6ad0fbf105f6bcc9300c00010a2ffa44ea6f555df1a2ad95c88f5656104817"},
{file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e233db59c8f76630c512ab4a4daf5a5986da5c3d5b44b8e9fc742f2a24dbd460"},
{file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6a014510830df1475176466b6087fc0c08b47a36714823e58d8b8d7709132a96"},
{file = "lxml-5.2.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:d38c8f50ecf57f0463399569aa388b232cf1a2ffb8f0a9a5412d0db57e054860"},
{file = "lxml-5.2.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5aea8212fb823e006b995c4dda533edcf98a893d941f173f6c9506126188860d"},
{file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff097ae562e637409b429a7ac958a20aab237a0378c42dabaa1e3abf2f896e5f"},
{file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f5d65c39f16717a47c36c756af0fb36144069c4718824b7533f803ecdf91138"},
{file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3d0c3dd24bb4605439bf91068598d00c6370684f8de4a67c2992683f6c309d6b"},
{file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e32be23d538753a8adb6c85bd539f5fd3b15cb987404327c569dfc5fd8366e85"},
{file = "lxml-5.2.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:cc518cea79fd1e2f6c90baafa28906d4309d24f3a63e801d855e7424c5b34144"},
{file = "lxml-5.2.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a0af35bd8ebf84888373630f73f24e86bf016642fb8576fba49d3d6b560b7cbc"},
{file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8aca2e3a72f37bfc7b14ba96d4056244001ddcc18382bd0daa087fd2e68a354"},
{file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ca1e8188b26a819387b29c3895c47a5e618708fe6f787f3b1a471de2c4a94d9"},
{file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c8ba129e6d3b0136a0f50345b2cb3db53f6bda5dd8c7f5d83fbccba97fb5dcb5"},
{file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e998e304036198b4f6914e6a1e2b6f925208a20e2042563d9734881150c6c246"},
{file = "lxml-5.2.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d3be9b2076112e51b323bdf6d5a7f8a798de55fb8d95fcb64bd179460cdc0704"},
{file = "lxml-5.2.1.tar.gz", hash = "sha256:3f7765e69bbce0906a7c74d5fe46d2c7a7596147318dbc08e4a2431f3060e306"},
{file = "lxml-4.9.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:76cf573e5a365e790396a5cc2b909812633409306c6531a6877c59061e42c4f2"},
{file = "lxml-4.9.2-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b1f42b6921d0e81b1bcb5e395bc091a70f41c4d4e55ba99c6da2b31626c44892"},
{file = "lxml-4.9.2-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9f102706d0ca011de571de32c3247c6476b55bb6bc65a20f682f000b07a4852a"},
{file = "lxml-4.9.2-cp27-cp27m-win32.whl", hash = "sha256:8d0b4612b66ff5d62d03bcaa043bb018f74dfea51184e53f067e6fdcba4bd8de"},
{file = "lxml-4.9.2-cp27-cp27m-win_amd64.whl", hash = "sha256:4c8f293f14abc8fd3e8e01c5bd86e6ed0b6ef71936ded5bf10fe7a5efefbaca3"},
{file = "lxml-4.9.2-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2899456259589aa38bfb018c364d6ae7b53c5c22d8e27d0ec7609c2a1ff78b50"},
{file = "lxml-4.9.2-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6749649eecd6a9871cae297bffa4ee76f90b4504a2a2ab528d9ebe912b101975"},
{file = "lxml-4.9.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:a08cff61517ee26cb56f1e949cca38caabe9ea9fbb4b1e10a805dc39844b7d5c"},
{file = "lxml-4.9.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:85cabf64adec449132e55616e7ca3e1000ab449d1d0f9d7f83146ed5bdcb6d8a"},
{file = "lxml-4.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8340225bd5e7a701c0fa98284c849c9b9fc9238abf53a0ebd90900f25d39a4e4"},
{file = "lxml-4.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:1ab8f1f932e8f82355e75dda5413a57612c6ea448069d4fb2e217e9a4bed13d4"},
{file = "lxml-4.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:699a9af7dffaf67deeae27b2112aa06b41c370d5e7633e0ee0aea2e0b6c211f7"},
{file = "lxml-4.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9cc34af337a97d470040f99ba4282f6e6bac88407d021688a5d585e44a23184"},
{file = "lxml-4.9.2-cp310-cp310-win32.whl", hash = "sha256:d02a5399126a53492415d4906ab0ad0375a5456cc05c3fc0fc4ca11771745cda"},
{file = "lxml-4.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:a38486985ca49cfa574a507e7a2215c0c780fd1778bb6290c21193b7211702ab"},
{file = "lxml-4.9.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:c83203addf554215463b59f6399835201999b5e48019dc17f182ed5ad87205c9"},
{file = "lxml-4.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:2a87fa548561d2f4643c99cd13131acb607ddabb70682dcf1dff5f71f781a4bf"},
{file = "lxml-4.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:d6b430a9938a5a5d85fc107d852262ddcd48602c120e3dbb02137c83d212b380"},
{file = "lxml-4.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3efea981d956a6f7173b4659849f55081867cf897e719f57383698af6f618a92"},
{file = "lxml-4.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:df0623dcf9668ad0445e0558a21211d4e9a149ea8f5666917c8eeec515f0a6d1"},
{file = "lxml-4.9.2-cp311-cp311-win32.whl", hash = "sha256:da248f93f0418a9e9d94b0080d7ebc407a9a5e6d0b57bb30db9b5cc28de1ad33"},
{file = "lxml-4.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:3818b8e2c4b5148567e1b09ce739006acfaa44ce3156f8cbbc11062994b8e8dd"},
{file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca989b91cf3a3ba28930a9fc1e9aeafc2a395448641df1f387a2d394638943b0"},
{file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:822068f85e12a6e292803e112ab876bc03ed1f03dddb80154c395f891ca6b31e"},
{file = "lxml-4.9.2-cp35-cp35m-win32.whl", hash = "sha256:be7292c55101e22f2a3d4d8913944cbea71eea90792bf914add27454a13905df"},
{file = "lxml-4.9.2-cp35-cp35m-win_amd64.whl", hash = "sha256:998c7c41910666d2976928c38ea96a70d1aa43be6fe502f21a651e17483a43c5"},
{file = "lxml-4.9.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:b26a29f0b7fc6f0897f043ca366142d2b609dc60756ee6e4e90b5f762c6adc53"},
{file = "lxml-4.9.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:ab323679b8b3030000f2be63e22cdeea5b47ee0abd2d6a1dc0c8103ddaa56cd7"},
{file = "lxml-4.9.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:689bb688a1db722485e4610a503e3e9210dcc20c520b45ac8f7533c837be76fe"},
{file = "lxml-4.9.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:f49e52d174375a7def9915c9f06ec4e569d235ad428f70751765f48d5926678c"},
{file = "lxml-4.9.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:36c3c175d34652a35475a73762b545f4527aec044910a651d2bf50de9c3352b1"},
{file = "lxml-4.9.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a35f8b7fa99f90dd2f5dc5a9fa12332642f087a7641289ca6c40d6e1a2637d8e"},
{file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:58bfa3aa19ca4c0f28c5dde0ff56c520fbac6f0daf4fac66ed4c8d2fb7f22e74"},
{file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc718cd47b765e790eecb74d044cc8d37d58562f6c314ee9484df26276d36a38"},
{file = "lxml-4.9.2-cp36-cp36m-win32.whl", hash = "sha256:d5bf6545cd27aaa8a13033ce56354ed9e25ab0e4ac3b5392b763d8d04b08e0c5"},
{file = "lxml-4.9.2-cp36-cp36m-win_amd64.whl", hash = "sha256:3ab9fa9d6dc2a7f29d7affdf3edebf6ece6fb28a6d80b14c3b2fb9d39b9322c3"},
{file = "lxml-4.9.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:05ca3f6abf5cf78fe053da9b1166e062ade3fa5d4f92b4ed688127ea7d7b1d03"},
{file = "lxml-4.9.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:a5da296eb617d18e497bcf0a5c528f5d3b18dadb3619fbdadf4ed2356ef8d941"},
{file = "lxml-4.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:04876580c050a8c5341d706dd464ff04fd597095cc8c023252566a8826505726"},
{file = "lxml-4.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c9ec3eaf616d67db0764b3bb983962b4f385a1f08304fd30c7283954e6a7869b"},
{file = "lxml-4.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2a29ba94d065945944016b6b74e538bdb1751a1db6ffb80c9d3c2e40d6fa9894"},
{file = "lxml-4.9.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a82d05da00a58b8e4c0008edbc8a4b6ec5a4bc1e2ee0fb6ed157cf634ed7fa45"},
{file = "lxml-4.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:223f4232855ade399bd409331e6ca70fb5578efef22cf4069a6090acc0f53c0e"},
{file = "lxml-4.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d17bc7c2ccf49c478c5bdd447594e82692c74222698cfc9b5daae7ae7e90743b"},
{file = "lxml-4.9.2-cp37-cp37m-win32.whl", hash = "sha256:b64d891da92e232c36976c80ed7ebb383e3f148489796d8d31a5b6a677825efe"},
{file = "lxml-4.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:a0a336d6d3e8b234a3aae3c674873d8f0e720b76bc1d9416866c41cd9500ffb9"},
{file = "lxml-4.9.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:da4dd7c9c50c059aba52b3524f84d7de956f7fef88f0bafcf4ad7dde94a064e8"},
{file = "lxml-4.9.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:821b7f59b99551c69c85a6039c65b75f5683bdc63270fec660f75da67469ca24"},
{file = "lxml-4.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:e5168986b90a8d1f2f9dc1b841467c74221bd752537b99761a93d2d981e04889"},
{file = "lxml-4.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:8e20cb5a47247e383cf4ff523205060991021233ebd6f924bca927fcf25cf86f"},
{file = "lxml-4.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:13598ecfbd2e86ea7ae45ec28a2a54fb87ee9b9fdb0f6d343297d8e548392c03"},
{file = "lxml-4.9.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:880bbbcbe2fca64e2f4d8e04db47bcdf504936fa2b33933efd945e1b429bea8c"},
{file = "lxml-4.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7d2278d59425777cfcb19735018d897ca8303abe67cc735f9f97177ceff8027f"},
{file = "lxml-4.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5344a43228767f53a9df6e5b253f8cdca7dfc7b7aeae52551958192f56d98457"},
{file = "lxml-4.9.2-cp38-cp38-win32.whl", hash = "sha256:925073b2fe14ab9b87e73f9a5fde6ce6392da430f3004d8b72cc86f746f5163b"},
{file = "lxml-4.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:9b22c5c66f67ae00c0199f6055705bc3eb3fcb08d03d2ec4059a2b1b25ed48d7"},
{file = "lxml-4.9.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:5f50a1c177e2fa3ee0667a5ab79fdc6b23086bc8b589d90b93b4bd17eb0e64d1"},
{file = "lxml-4.9.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:090c6543d3696cbe15b4ac6e175e576bcc3f1ccfbba970061b7300b0c15a2140"},
{file = "lxml-4.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:63da2ccc0857c311d764e7d3d90f429c252e83b52d1f8f1d1fe55be26827d1f4"},
{file = "lxml-4.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:5b4545b8a40478183ac06c073e81a5ce4cf01bf1734962577cf2bb569a5b3bbf"},
{file = "lxml-4.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2e430cd2824f05f2d4f687701144556646bae8f249fd60aa1e4c768ba7018947"},
{file = "lxml-4.9.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6804daeb7ef69e7b36f76caddb85cccd63d0c56dedb47555d2fc969e2af6a1a5"},
{file = "lxml-4.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a6e441a86553c310258aca15d1c05903aaf4965b23f3bc2d55f200804e005ee5"},
{file = "lxml-4.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ca34efc80a29351897e18888c71c6aca4a359247c87e0b1c7ada14f0ab0c0fb2"},
{file = "lxml-4.9.2-cp39-cp39-win32.whl", hash = "sha256:6b418afe5df18233fc6b6093deb82a32895b6bb0b1155c2cdb05203f583053f1"},
{file = "lxml-4.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:f1496ea22ca2c830cbcbd473de8f114a320da308438ae65abad6bab7867fe38f"},
{file = "lxml-4.9.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:b264171e3143d842ded311b7dccd46ff9ef34247129ff5bf5066123c55c2431c"},
{file = "lxml-4.9.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0dc313ef231edf866912e9d8f5a042ddab56c752619e92dfd3a2c277e6a7299a"},
{file = "lxml-4.9.2-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:16efd54337136e8cd72fb9485c368d91d77a47ee2d42b057564aae201257d419"},
{file = "lxml-4.9.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:0f2b1e0d79180f344ff9f321327b005ca043a50ece8713de61d1cb383fb8ac05"},
{file = "lxml-4.9.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:7b770ed79542ed52c519119473898198761d78beb24b107acf3ad65deae61f1f"},
{file = "lxml-4.9.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:efa29c2fe6b4fdd32e8ef81c1528506895eca86e1d8c4657fda04c9b3786ddf9"},
{file = "lxml-4.9.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7e91ee82f4199af8c43d8158024cbdff3d931df350252288f0d4ce656df7f3b5"},
{file = "lxml-4.9.2-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:b23e19989c355ca854276178a0463951a653309fb8e57ce674497f2d9f208746"},
{file = "lxml-4.9.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:01d36c05f4afb8f7c20fd9ed5badca32a2029b93b1750f571ccc0b142531caf7"},
{file = "lxml-4.9.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7b515674acfdcadb0eb5d00d8a709868173acece5cb0be3dd165950cbfdf5409"},
{file = "lxml-4.9.2.tar.gz", hash = "sha256:2455cfaeb7ac70338b3257f41e21f0724f4b5b0c0e7702da67ee6c3640835b67"},
]
[package.extras]
cssselect = ["cssselect (>=0.7)"]
html-clean = ["lxml-html-clean"]
html5 = ["html5lib"]
htmlsoup = ["BeautifulSoup4"]
source = ["Cython (>=3.0.10)"]
source = ["Cython (>=0.29.7)"]
[[package]]
name = "mako"
@ -2160,18 +2082,18 @@ six = ">=1.5"
[[package]]
name = "python-docx"
version = "1.1.0"
version = "1.1.1"
description = "Create, read, and update Microsoft Word .docx files."
optional = false
python-versions = ">=3.7"
files = [
{file = "python-docx-1.1.0.tar.gz", hash = "sha256:5829b722141cf1ab79aedf0c34d9fe9924b29764584c0f2164eb2b02dcdf17c9"},
{file = "python_docx-1.1.0-py3-none-any.whl", hash = "sha256:bac9773278098a1ddc43a52d84e22f5909c4a3080a624530b3ecb3771b07c6cd"},
{file = "python_docx-1.1.1-py3-none-any.whl", hash = "sha256:fc09412cef1a9ce7756d52376158f94f2a0edd0fc722da1d0a074f01d83e5021"},
{file = "python_docx-1.1.1.tar.gz", hash = "sha256:15473bd40a7c16d9367b0a4b2cbbab0a787904fa2f7cadae1ed6f96201dcbb66"},
]
[package.dependencies]
lxml = ">=3.1.0"
typing-extensions = "*"
lxml = ">=3.1.0,<=4.9.2"
typing-extensions = ">=4.9.0"
[[package]]
name = "python-dotenv"

View file

@ -1,6 +1,6 @@
[tool.poetry]
name = "langflow-base"
version = "0.0.37"
version = "0.0.41"
description = "A Python package with a built-in web application"
authors = ["Langflow <contact@langflow.org>"]
maintainers = [

View file

@ -63,9 +63,10 @@ export default function App() {
}, [dark]);
useEffect(() => {
const abortController = new AbortController();
const isLoginPage = location.pathname.includes("login");
autoLogin()
autoLogin(abortController.signal)
.then(async (user) => {
if (user && user["access_token"]) {
user["refresh_token"] = "auto";
@ -76,31 +77,44 @@ export default function App() {
await Promise.all([refreshStars(), refreshVersion(), fetchData()]);
}
})
.catch(async () => {
setAutoLogin(false);
if (isAuthenticated && !isLoginPage) {
getUser();
await Promise.all([refreshStars(), refreshVersion(), fetchData()]);
} else {
setLoading(false);
useFlowsManagerStore.setState({ isLoading: false });
.catch(async (error) => {
if (error.name !== "CanceledError") {
setAutoLogin(false);
if (isAuthenticated && !isLoginPage) {
getUser();
await Promise.all([refreshStars(), refreshVersion(), fetchData()]);
} else {
setLoading(false);
useFlowsManagerStore.setState({ isLoading: false });
}
}
});
}, [isAuthenticated]);
/*
Abort the request as it isn't needed anymore, the component being
unmounted. It helps avoid, among other things, the well-known "can't
perform a React state update on an unmounted component" warning.
*/
return () => abortController.abort();
}, []);
const fetchData = async () => {
if (isAuthenticated) {
try {
await getTypes();
refreshFlows();
const res = await getGlobalVariables();
setGlobalVariables(res);
checkHasStore();
fetchApiData();
} catch (error) {
console.error("Failed to fetch data:", error);
return new Promise<void>(async (resolve, reject) => {
if (isAuthenticated) {
try {
await getTypes();
await refreshFlows();
const res = await getGlobalVariables();
setGlobalVariables(res);
checkHasStore();
fetchApiData();
resolve();
} catch (error) {
console.error("Failed to fetch data:", error);
reject();
}
}
}
});
};
useEffect(() => {

View file

@ -39,8 +39,8 @@ import { classNames } from "../../utils/utils";
import ShadTooltip from "../ShadTooltipComponent";
import DictComponent from "../dictComponent";
import IconComponent from "../genericIconComponent";
import InputGlobalComponent from "../inputGlobalComponent";
import KeypairListComponent from "../keypairListComponent";
import InputComponent from "../inputComponent";
export default function CodeTabsComponent({
flow,
@ -351,31 +351,38 @@ export default function CodeTabsComponent({
/>
</div>
) : (
<InputGlobalComponent
<InputComponent
editNode={true}
disabled={false}
password={
node.data.node.template[
templateField
].password ?? false
}
value={
!node.data.node.template[
templateField
].value ||
node.data.node.template[
templateField
].value === ""
? ""
: node.data.node
.template[
templateField
].value
}
onChange={(target) => {
if (node.data) {
setNode(
node.data.id,
(oldNode) => {
let newNode =
cloneDeep(
oldNode
);
newNode.data = {
...newNode.data,
};
newNode.data.node.template[
templateField
].value = target;
return newNode;
}
);
}
setData((old) => {
let newInputList =
cloneDeep(old);
newInputList![
i
].data.node.template[
templateField
].value = target;
return newInputList;
});
tweaks.buildTweakObject!(
node["data"]["id"],
target,
@ -384,25 +391,6 @@ export default function CodeTabsComponent({
]
);
}}
setDb={(value) => {
setNode(
node.data.id,
(oldNode) => {
let newNode =
cloneDeep(oldNode);
newNode.data = {
...newNode.data,
};
newNode.data.node.template[
templateField
].load_from_db =
value;
return newNode;
}
);
}}
name={templateField}
data={node.data}
/>
)}
</div>

View file

@ -5,6 +5,8 @@ import { nodeIconsLucide } from "../../utils/styleUtils";
import { cn } from "../../utils/utils";
import Loading from "../ui/loading";
import { useEffect, useState } from "react";
const ForwardedIconComponent = memo(
forwardRef(
(
@ -18,9 +20,18 @@ const ForwardedIconComponent = memo(
}: IconComponentProps,
ref
) => {
const [showFallback, setShowFallback] = useState(false);
useEffect(() => {
const timer = setTimeout(() => {
setShowFallback(true);
}, 30);
return () => clearTimeout(timer);
}, []);
let TargetIcon = nodeIconsLucide[name];
if (!TargetIcon) {
// check if name exists in dynamicIconImports
if (!dynamicIconImports[name]) {
TargetIcon = nodeIconsLucide["unknown"];
} else TargetIcon = lazy(dynamicIconImports[name]);
@ -35,11 +46,15 @@ const ForwardedIconComponent = memo(
if (!TargetIcon) {
return null; // Render nothing until the icon is loaded
}
const fallback = (
const fallback = showFallback ? (
<div className={cn(className, "flex items-center justify-center")}>
<Loading />
</div>
) : (
<div className={className}></div>
);
return (
<Suspense fallback={fallback}>
<TargetIcon

View file

@ -406,9 +406,11 @@ export async function onLogin(user: LoginType) {
}
}
export async function autoLogin() {
export async function autoLogin(abortSignal) {
try {
const response = await api.get(`${BASE_URL_API}auto_login`);
const response = await api.get(`${BASE_URL_API}auto_login`, {
signal: abortSignal,
});
if (response.status === 200) {
const data = response.data;

View file

@ -25,6 +25,7 @@ import {
import { getTagsIds } from "../../utils/storeUtils";
import ConfirmationModal from "../ConfirmationModal";
import BaseModal from "../baseModal";
import ExportModal from "../exportModal";
export default function ShareModal({
component,
@ -206,9 +207,8 @@ export default function ShareModal({
{children ? children : <></>}
</BaseModal.Trigger>
<BaseModal.Header
description={`Publish ${
is_component ? "your component" : "workflow"
} to the Langflow Store.`}
description={`Publish ${is_component ? "your component" : "workflow"
} to the Langflow Store.`}
>
<span className="pr-2">Share</span>
<IconComponent
@ -251,18 +251,34 @@ export default function ShareModal({
<BaseModal.Footer>
<div className="flex w-full justify-between gap-2">
<Button
{!is_component && <ExportModal>
<Button
type="button"
variant="outline"
className="gap-2"
onClick={() => {
// (setOpen || internalSetOpen)(false);
}}
>
<IconComponent name="Download" className="h-4 w-4" />
Export
</Button>
</ExportModal>
}
{is_component && <Button
type="button"
variant="outline"
className="gap-2"
onClick={() => {
handleExportComponent();
(setOpen || internalSetOpen)(false);
handleExportComponent();
}}
>
<IconComponent name="Download" className="h-4 w-4" />
Export
</Button>
}
<Button
disabled={loadingNames}
type="button"

View file

@ -1,4 +1,4 @@
import { useEffect, useState } from "react";
import { useState } from "react";
import { Link, useNavigate } from "react-router-dom";
import PaginatorComponent from "../../../../components/PaginatorComponent";
import CollectionCardComponent from "../../../../components/cardComponent";
@ -23,42 +23,36 @@ export default function ComponentsComponent({
const uploadFlow = useFlowsManagerStore((state) => state.uploadFlow);
const removeFlow = useFlowsManagerStore((state) => state.removeFlow);
const isLoading = useFlowsManagerStore((state) => state.isLoading);
const setExamples = useFlowsManagerStore((state) => state.setExamples);
const flows = useFlowsManagerStore((state) => state.flows);
const setSuccessData = useAlertStore((state) => state.setSuccessData);
const setErrorData = useAlertStore((state) => state.setErrorData);
const [pageSize, setPageSize] = useState(20);
const [pageIndex, setPageIndex] = useState(1);
const [loadingScreen, setLoadingScreen] = useState(true);
const navigate = useNavigate();
useEffect(() => {
if (isLoading) return;
let all = flows
.filter((f) => (f.is_component ?? false) === is_component)
.sort((a, b) => {
if (a?.updated_at && b?.updated_at) {
return (
new Date(b?.updated_at!).getTime() -
new Date(a?.updated_at!).getTime()
);
} else if (a?.updated_at && !b?.updated_at) {
return 1;
} else if (!a?.updated_at && b?.updated_at) {
return -1;
} else {
return (
new Date(b?.date_created!).getTime() -
new Date(a?.date_created!).getTime()
);
}
});
const start = (pageIndex - 1) * pageSize;
const end = start + pageSize;
setData(all.slice(start, end));
}, [flows,isLoading, pageIndex, pageSize]);
const [data, setData] = useState<FlowType[]>([]);
const all: FlowType[] = flows
.filter((f) => (f.is_component ?? false) === is_component)
.sort((a, b) => {
if (a?.updated_at && b?.updated_at) {
return (
new Date(b?.updated_at!).getTime() -
new Date(a?.updated_at!).getTime()
);
} else if (a?.updated_at && !b?.updated_at) {
return 1;
} else if (!a?.updated_at && b?.updated_at) {
return -1;
} else {
return (
new Date(b?.date_created!).getTime() -
new Date(a?.date_created!).getTime()
);
}
});
const start = (pageIndex - 1) * pageSize;
const end = start + pageSize;
const data: FlowType[] = all.slice(start, end);
const name = is_component ? "Component" : "Flow";
@ -73,8 +67,9 @@ export default function ComponentsComponent({
})
.then(() => {
setSuccessData({
title: `${is_component ? "Component" : "Flow"
} uploaded successfully`,
title: `${
is_component ? "Component" : "Flow"
} uploaded successfully`,
});
})
.catch((error) => {
@ -139,18 +134,46 @@ export default function ComponentsComponent({
onDelete={() => {
removeFlow(item.id);
setSuccessData({
title: `${item.is_component ? "Component" : "Flow"
} deleted successfully!`,
title: `${
item.is_component ? "Component" : "Flow"
} deleted successfully!`,
});
resetFilter();
}}
key={idx}
data={item}
data-testid={
"edit-flow-button-" + item.id + "-" + idx
}
onClick={!is_component ? () => {navigate("/flow/" + item.id);} : undefined}
data={{ is_component: item.is_component ?? false, ...item }}
disabled={isLoading}
data-testid={"edit-flow-button-" + item.id + "-" + idx}
button={
!is_component ? (
<Link to={"/flow/" + item.id}>
<Button
tabIndex={-1}
variant="outline"
size="sm"
className="whitespace-nowrap "
data-testid={
"edit-flow-button-" + item.id + "-" + idx
}
>
<IconComponent
name="ExternalLink"
className="main-page-nav-button select-none"
/>
Edit Flow
</Button>
</Link>
) : (
<></>
)
}
onClick={
!is_component
? () => {
navigate("/flow/" + item.id);
}
: undefined
}
playground={!is_component}
/>
))

View file

@ -1,5 +1,4 @@
import { useEffect } from "react";
import { Route, Routes, useNavigate } from "react-router-dom";
import { Navigate, Route, Routes } from "react-router-dom";
import { ProtectedAdminRoute } from "./components/authAdminGuard";
import { ProtectedRoute } from "./components/authGuard";
import { ProtectedLoginRoute } from "./components/authLoginGuard";
@ -20,13 +19,6 @@ import SignUp from "./pages/signUpPage";
import PlaygroundPage from "./pages/Playground";
const Router = () => {
const navigate = useNavigate();
useEffect(() => {
// Redirect from root to /flows
if (window.location.pathname === "/") {
navigate("/flows");
}
}, [navigate]);
return (
<Routes>
<Route
@ -37,6 +29,7 @@ const Router = () => {
</ProtectedRoute>
}
>
<Route index element={<Navigate replace to={"flows"} />} />
<Route
path="flows"
element={<ComponentsComponent key="flows" is_component={false} />}

View file

@ -481,8 +481,13 @@ const useFlowStore = create<FlowStoreType>((set, get) => ({
// const nextVertices will be the zip of vertexBuildData.next_vertices_ids and
// vertexBuildData.top_level_vertices
// the VertexLayerElementType as {id: next_vertices_id, layer: top_level_vertex}
// next_vertices_ids should be next_vertices_ids without the inactivated vertices
const next_vertices_ids = vertexBuildData.next_vertices_ids.filter(
(id) => !vertexBuildData.inactivated_vertices?.includes(id)
);
const nextVertices: VertexLayerElementType[] = zip(
vertexBuildData.next_vertices_ids,
next_vertices_ids,
vertexBuildData.top_level_vertices
).map(([id, reference]) => ({ id: id!, reference }));
@ -492,7 +497,7 @@ const useFlowStore = create<FlowStoreType>((set, get) => ({
];
const newIds = [
...get().verticesBuild!.verticesIds,
...vertexBuildData.next_vertices_ids,
...next_vertices_ids,
];
get().updateVerticesBuild({
verticesIds: newIds,
@ -603,7 +608,10 @@ const useFlowStore = create<FlowStoreType>((set, get) => ({
set({
verticesBuild: {
...verticesBuild,
// remove the vertices from the list of vertices ids
// that are going to be built
verticesIds: get().verticesBuild!.verticesIds.filter(
// keep the vertices that are not in the list of vertices to remove
(vertex) => !vertices.includes(vertex)
),
},

View file

@ -323,7 +323,7 @@
muted-foreground is too strong, maybe use a lighter shade of it?
*/
@apply border-none ring ring-muted-foreground;
@apply border-none ring grayscale;
}
.built-invalid-status {
@apply border-none ring ring-[#FF9090];

View file

@ -175,14 +175,26 @@ export async function buildVertices({
!useFlowStore
.getState()
.verticesBuild?.verticesIds.includes(element.id) &&
!useFlowStore
.getState()
.verticesBuild?.verticesIds.includes(element.reference ?? "") &&
onBuildUpdate
) {
// If it is, skip building and set the state to inactive
onBuildUpdate(
getInactiveVertexData(element.id),
BuildStatus.INACTIVE,
runId
);
if (element.id) {
onBuildUpdate(
getInactiveVertexData(element.id),
BuildStatus.INACTIVE,
runId
);
}
if (element.reference) {
onBuildUpdate(
getInactiveVertexData(element.reference),
BuildStatus.INACTIVE,
runId
);
}
buildResults.push(false);
return;
}