merge fix
This commit is contained in:
commit
357029865f
72 changed files with 2282 additions and 2558 deletions
39
.github/workflows/pre-release-base.yml
vendored
39
.github/workflows/pre-release-base.yml
vendored
|
|
@ -7,15 +7,20 @@ on:
|
|||
branches:
|
||||
- dev
|
||||
paths:
|
||||
- "pyproject.toml"
|
||||
- "src/backend/base/pyproject.toml"
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
force_release:
|
||||
description: "Force a release"
|
||||
required: false
|
||||
default: "false"
|
||||
|
||||
env:
|
||||
POETRY_VERSION: "1.8.2"
|
||||
|
||||
jobs:
|
||||
if_release:
|
||||
if: ${{ (github.event.pull_request.merged == true) && contains(github.event.pull_request.labels.*.name, 'pre-release') }}
|
||||
if: ${{ (github.event.pull_request.merged == true) && contains(github.event.pull_request.labels.*.name, 'pre-release') }} || ${{ github.event_name == 'workflow_dispatch' && inputs.force_release == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
|
@ -26,12 +31,31 @@ jobs:
|
|||
with:
|
||||
python-version: "3.10"
|
||||
cache: "poetry"
|
||||
- name: Build project for distribution
|
||||
run: make build base=true
|
||||
- name: Check Version
|
||||
id: check-version
|
||||
# In this step, we should check the version of the package
|
||||
# and see if it is a version that is already released
|
||||
# echo version=$(cd src/backend/base && poetry version --short) >> $GITHUB_OUTPUT
|
||||
# cd src/backend/base && poetry version --short should
|
||||
# be different than the last release version in pypi
|
||||
# which we can get from curl -s "https://pypi.org/pypi/langflow/json" | jq -r '.releases | keys | .[]' | sort -V | tail -n 1
|
||||
run: |
|
||||
echo version=$(poetry version --short) >> $GITHUB_OUTPUT
|
||||
version=$(cd src/backend/base && poetry version --short)
|
||||
last_released_version=$(curl -s "https://pypi.org/pypi/langflow-base/json" | jq -r '.releases | keys | .[]' | sort -V | tail -n 1)
|
||||
if [ "$version" = "$last_released_version" ]; then
|
||||
echo "Version $version is already released. Skipping release."
|
||||
exit 1
|
||||
else
|
||||
echo version=$version >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Build project for distribution
|
||||
run: make build base=true
|
||||
|
||||
- name: Publish to PyPI
|
||||
env:
|
||||
POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_API_TOKEN }}
|
||||
run: |
|
||||
make publish base=true
|
||||
- name: Create Release
|
||||
uses: ncipollo/release-action@v1
|
||||
with:
|
||||
|
|
@ -42,11 +66,6 @@ jobs:
|
|||
prerelease: true
|
||||
tag: v${{ steps.check-version.outputs.version }}
|
||||
commit: dev
|
||||
- name: Publish to PyPI
|
||||
env:
|
||||
POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_API_TOKEN }}
|
||||
run: |
|
||||
poetry publish base=true
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: Set up Docker Buildx
|
||||
|
|
|
|||
33
.github/workflows/pre-release-langflow.yml
vendored
33
.github/workflows/pre-release-langflow.yml
vendored
|
|
@ -9,6 +9,11 @@ on:
|
|||
paths:
|
||||
- "pyproject.toml"
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
force_release:
|
||||
description: "Force a release"
|
||||
required: false
|
||||
default: "false"
|
||||
workflow_run:
|
||||
workflows: ["pre-release-base"]
|
||||
types: [completed]
|
||||
|
|
@ -19,7 +24,7 @@ env:
|
|||
|
||||
jobs:
|
||||
if_release:
|
||||
if: ${{ (github.event.pull_request.merged == true) && contains(github.event.pull_request.labels.*.name, 'pre-release') }}
|
||||
if: ${{ (github.event.pull_request.merged == true) && contains(github.event.pull_request.labels.*.name, 'pre-release') }} || ${{ github.event_name == 'workflow_dispatch' && inputs.force_release == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
|
@ -30,12 +35,26 @@ jobs:
|
|||
with:
|
||||
python-version: "3.10"
|
||||
cache: "poetry"
|
||||
- name: Build project for distribution
|
||||
run: make build main=true
|
||||
- name: Check Version
|
||||
id: check-version
|
||||
run: |
|
||||
echo version=$(poetry version --short) >> $GITHUB_OUTPUT
|
||||
version=$(cd src/backend/base && poetry version --short)
|
||||
last_released_version=$(curl -s "https://pypi.org/pypi/langflow/json" | jq -r '.releases | keys | .[]' | sort -V | tail -n 1)
|
||||
if [ "$version" = "$last_released_version" ]; then
|
||||
echo "Version $version is already released. Skipping release."
|
||||
exit 1
|
||||
else
|
||||
echo version=$version >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Build project for distribution
|
||||
run: make build main=true
|
||||
- name: Display pyproject.toml langflow-base Version
|
||||
run: cat pyproject.toml | grep langflow-base
|
||||
- name: Publish to PyPI
|
||||
env:
|
||||
POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_API_TOKEN }}
|
||||
run: |
|
||||
make publish main=true
|
||||
- name: Create Release
|
||||
uses: ncipollo/release-action@v1
|
||||
with:
|
||||
|
|
@ -46,11 +65,6 @@ jobs:
|
|||
prerelease: true
|
||||
tag: v${{ steps.check-version.outputs.version }}
|
||||
commit: dev
|
||||
- name: Publish to PyPI
|
||||
env:
|
||||
POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_API_TOKEN }}
|
||||
run: |
|
||||
poetry publish main=true
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: Set up Docker Buildx
|
||||
|
|
@ -68,3 +82,4 @@ jobs:
|
|||
file: ./build_and_push.Dockerfile
|
||||
tags: |
|
||||
logspace/langflow:${{ steps.check-version.outputs.version }}
|
||||
logspace/langflow:1.0-alpha
|
||||
|
|
|
|||
2
.vscode/launch.json
vendored
2
.vscode/launch.json
vendored
|
|
@ -32,7 +32,7 @@
|
|||
"args": [
|
||||
"run",
|
||||
"--path",
|
||||
"${workspaceFolder}/src/backend/langflow/frontend"
|
||||
"${workspaceFolder}/src/backend/base/langflow/frontend"
|
||||
],
|
||||
"jinja": true,
|
||||
"justMyCode": false,
|
||||
|
|
|
|||
2
LICENSE
2
LICENSE
|
|
@ -1,6 +1,6 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2023 Logspace
|
||||
Copyright (c) 2024 Logspace
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
|||
8
Makefile
8
Makefile
|
|
@ -188,8 +188,10 @@ build_langflow:
|
|||
cd ./scripts && poetry run python update_dependencies.py
|
||||
poetry lock
|
||||
poetry build
|
||||
ifdef restore
|
||||
mv pyproject.toml.bak pyproject.toml
|
||||
mv poetry.lock.bak poetry.lock
|
||||
endif
|
||||
|
||||
dev:
|
||||
make install_frontend
|
||||
|
|
@ -214,21 +216,19 @@ lock:
|
|||
poetry lock
|
||||
|
||||
publish_base:
|
||||
make build_langflow_base
|
||||
cd src/backend/base && poetry publish
|
||||
|
||||
publish_langflow:
|
||||
make build_langflow
|
||||
poetry publish
|
||||
|
||||
publish:
|
||||
@echo 'Publishing the project'
|
||||
ifdef base
|
||||
-make publish_base
|
||||
make publish_base
|
||||
endif
|
||||
|
||||
ifdef main
|
||||
-make publish_langflow
|
||||
make publish_langflow
|
||||
endif
|
||||
|
||||
help:
|
||||
|
|
|
|||
94
README.md
94
README.md
|
|
@ -1,69 +1,51 @@
|
|||
<!-- markdownlint-disable MD030 -->
|
||||
|
||||
# ⛓️ Langflow
|
||||
# [](https://www.langflow.org)
|
||||
|
||||
<h3>Discover a simpler & smarter way to build around Foundation Models</h3>
|
||||
### [Langflow](https://www.langflow.org) is a new, visual way to build, iterate and deploy AI apps.
|
||||
|
||||
[](https://github.com/logspace-ai/langflow/releases)
|
||||
[](https://github.com/logspace-ai/langflow/contributors)
|
||||
[](https://github.com/logspace-ai/langflow/last-commit)
|
||||
[](https://github.com/logspace-ai/langflow/issues)
|
||||
[](https://github.com/logspace-ai/langflow/repo-size)
|
||||
[](https://vscode.dev/redirect?url=vscode://ms-vscode-remote.remote-containers/cloneInVolume?url=https://github.com/logspace-ai/langflow)
|
||||
[](https://opensource.org/licenses/MIT)
|
||||
[](https://star-history.com/#logspace-ai/langflow)
|
||||
[](https://github.com/logspace-ai/langflow/fork)
|
||||
[](https://twitter.com/langflow_ai)
|
||||
[](https://discord.com/invite/EqksyE2EX9)
|
||||
[](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true)
|
||||
[](https://codespaces.new/logspace-ai/langflow)
|
||||
# ⚡️ Documentation and Community
|
||||
|
||||
<a href="https://github.com/logspace-ai/langflow">
|
||||
<img width="100%" src="https://github.com/logspace-ai/langflow/blob/dev/docs/static/img/new_langflow_demo.gif"></a>
|
||||
- [Documentation](https://docs.langflow.org)
|
||||
- [Discord](https://discord.com/invite/EqksyE2EX9)
|
||||
|
||||
# 📦 Installation
|
||||
|
||||
### <b>Locally</b>
|
||||
|
||||
Make sure you have Python 3.10 installed on your system.
|
||||
|
||||
You can install Langflow from pip:
|
||||
You can install Langflow with pip:
|
||||
|
||||
```shell
|
||||
# This installs the package without dependencies for local models
|
||||
pip install langflow
|
||||
# Make sure you have Python 3.10 installed on your system.
|
||||
# Install the pre-release version
|
||||
python -m pip install langflow --pre --force-reinstall
|
||||
|
||||
# or stable version
|
||||
python -m pip install langflow -U
|
||||
```
|
||||
|
||||
To use local models (e.g llama-cpp-python) run:
|
||||
|
||||
```shell
|
||||
pip install langflow[local]
|
||||
```
|
||||
|
||||
This will install the following dependencies:
|
||||
|
||||
- [CTransformers](https://github.com/marella/ctransformers)
|
||||
- [llama-cpp-python](https://github.com/abetlen/llama-cpp-python)
|
||||
- [sentence-transformers](https://github.com/UKPLab/sentence-transformers)
|
||||
|
||||
You can still use models from projects like LocalAI, Ollama, LM Studio, Jan and others.
|
||||
|
||||
Next, run:
|
||||
Then, run Langflow with:
|
||||
|
||||
```shell
|
||||
python -m langflow run
|
||||
```
|
||||
|
||||
or
|
||||
You can also preview Langflow in [HuggingFace Spaces](https://huggingface.co/spaces/Logspace/Langflow-Preview). [Clone the space using this link](https://huggingface.co/spaces/Logspace/Langflow-Preview?duplicate=true), to create your own Langflow workspace in minutes.
|
||||
|
||||
```shell
|
||||
langflow run # or langflow --help
|
||||
# 🎨 Creating Flows
|
||||
|
||||
Creating flows with Langflow is easy. Simply drag components from the sidebar onto the canvas and connect them to start building your application.
|
||||
|
||||
Explore by editing prompt parameters, grouping components into a single high-level component, and building your own Custom Components.
|
||||
|
||||
Once you’re done, you can export your flow as a JSON file.
|
||||
|
||||
Load the flow with:
|
||||
|
||||
```python
|
||||
from langflow.load import run_flow_from_json
|
||||
|
||||
results = run_flow_from_json("path/to/flow.json", input_value="Hello, World!")
|
||||
```
|
||||
|
||||
### HuggingFace Spaces
|
||||
|
||||
You can also check it out on HuggingFace Spaces and run it in your browser for free! [Click here to duplicate the Space](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true)
|
||||
|
||||
# 🖥️ Command Line Interface (CLI)
|
||||
|
||||
Langflow provides a command-line interface (CLI) for easy management and configuration.
|
||||
|
|
@ -126,30 +108,10 @@ Alternatively, click the **"Open in Cloud Shell"** button below to launch Google
|
|||
<img src="https://render.com/images/deploy-to-render-button.svg" alt="Deploy to Render" />
|
||||
</a>
|
||||
|
||||
# 🎨 Creating Flows
|
||||
|
||||
Creating flows with Langflow is easy. Simply drag components from the sidebar onto the canvas and connect them to start building your application.
|
||||
|
||||
Explore by editing prompt parameters, grouping components into a single high-level component, and building your own Custom Components.
|
||||
|
||||
Once you’re done, you can export your flow as a JSON file.
|
||||
|
||||
Load the flow with:
|
||||
|
||||
```python
|
||||
from langflow import load_flow_from_json
|
||||
|
||||
flow = load_flow_from_json("path/to/flow.json")
|
||||
# Now you can use it
|
||||
flow("Hey, have you heard of Langflow?")
|
||||
```
|
||||
|
||||
# 👋 Contributing
|
||||
|
||||
We welcome contributions from developers of all levels to our open-source project on GitHub. If you'd like to contribute, please check our [contributing guidelines](./CONTRIBUTING.md) and help make Langflow more accessible.
|
||||
|
||||
Join our [Discord](https://discord.com/invite/EqksyE2EX9) server to ask questions, make suggestions, and showcase your projects! 🦾
|
||||
|
||||
---
|
||||
|
||||
[](https://star-history.com/#logspace-ai/langflow&Date)
|
||||
|
|
|
|||
|
|
@ -1,15 +1,3 @@
|
|||
FROM python:3.10-slim
|
||||
FROM logspace/langflow:latest
|
||||
|
||||
RUN apt-get update && apt-get install gcc g++ git make -y && apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
RUN useradd -m -u 1000 user
|
||||
USER user
|
||||
ENV HOME=/home/user \
|
||||
PATH=/home/user/.local/bin:$PATH
|
||||
|
||||
WORKDIR $HOME/app
|
||||
|
||||
COPY --chown=user . $HOME/app
|
||||
|
||||
RUN pip install langflow>==0.5.0 -U --user
|
||||
CMD ["python", "-m", "langflow", "run", "--host", "0.0.0.0", "--port", "7860"]
|
||||
|
|
|
|||
|
|
@ -1,10 +1,30 @@
|
|||
version: '3'
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
langflow:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
image: logspace/langflow:latest
|
||||
ports:
|
||||
- "7860:7860"
|
||||
command: langflow run --host 0.0.0.0
|
||||
depends_on:
|
||||
- postgres
|
||||
environment:
|
||||
- LANGFLOW_DATABASE_URL=postgresql://langflow:langflow@postgres:5432/langflow
|
||||
# This variable defines where the logs, file storage, monitor data and secret keys are stored.
|
||||
- LANGFLOW_CONFIG_DIR=/var/lib/langflow
|
||||
volumes:
|
||||
- langflow-data:/var/lib/langflow
|
||||
|
||||
postgres:
|
||||
image: postgres:16
|
||||
environment:
|
||||
POSTGRES_USER: langflow
|
||||
POSTGRES_PASSWORD: langflow
|
||||
POSTGRES_DB: langflow
|
||||
ports:
|
||||
- "5432:5432"
|
||||
volumes:
|
||||
- langflow-postgres:/var/lib/postgresql/data
|
||||
|
||||
volumes:
|
||||
langflow-postgres:
|
||||
langflow-data:
|
||||
|
|
|
|||
|
|
@ -2,10 +2,6 @@ import Admonition from "@theme/Admonition";
|
|||
|
||||
# Embeddings
|
||||
|
||||
Embeddings are vector representations of text that capture the semantic meaning of the text. They are created using text embedding models and allow us to think about the text in a vector space, enabling us to perform tasks like semantic search, where we look for pieces of text that are most similar in the vector space.
|
||||
|
||||
---
|
||||
|
||||
### Amazon Bedrock Embeddings
|
||||
|
||||
Used to load [Amazon Bedrocks’s](https://aws.amazon.com/bedrock/) embedding models.
|
||||
|
|
@ -127,7 +123,12 @@ Used to load [OpenAI’s](https://openai.com/) embedding models.
|
|||
Wrapper around [Google Vertex AI](https://cloud.google.com/vertex-ai) [Embeddings API](https://cloud.google.com/vertex-ai/docs/generative-ai/embeddings/get-text-embeddings).
|
||||
|
||||
<Admonition type="info">
|
||||
Vertex AI is a cloud computing platform offered by Google Cloud Platform (GCP). It provides access, management, and development of applications and services through global data centers. To use Vertex AI PaLM, you need to have the [google-cloud-aiplatform](https://pypi.org/project/google-cloud-aiplatform/) Python package installed and credentials configured for your environment.
|
||||
Vertex AI is a cloud computing platform offered by Google Cloud Platform
|
||||
(GCP). It provides access, management, and development of applications and
|
||||
services through global data centers. To use Vertex AI PaLM, you need to have
|
||||
the
|
||||
[google-cloud-aiplatform](https://pypi.org/project/google-cloud-aiplatform/)
|
||||
Python package installed and credentials configured for your environment.
|
||||
</Admonition>
|
||||
|
||||
- **credentials:** The default custom credentials (google.auth.credentials.Credentials) to use.
|
||||
|
|
|
|||
|
|
@ -1,13 +1,7 @@
|
|||
import Admonition from '@theme/Admonition';
|
||||
import Admonition from "@theme/Admonition";
|
||||
|
||||
# Models
|
||||
|
||||
<Admonition type="caution" icon="🚧" title="ZONE UNDER CONSTRUCTION">
|
||||
<p>
|
||||
We appreciate your understanding as we polish our documentation – it may contain some rough edges. Share your feedback or report issues to help us improve! 🛠️📝
|
||||
</p>
|
||||
</Admonition>
|
||||
|
||||
### Amazon Bedrock
|
||||
|
||||
This component facilitates the generation of text using the LLM (Large Language Model) model from Amazon Bedrock.
|
||||
|
|
@ -19,16 +13,17 @@ This component facilitates the generation of text using the LLM (Large Language
|
|||
- **System Message (Optional):** A system message to pass to the model.
|
||||
|
||||
- **Model ID (Optional):** Specifies the model ID to be used for text generation. Defaults to _`"anthropic.claude-instant-v1"`_. Available options include:
|
||||
- _`"ai21.j2-grande-instruct"`_
|
||||
- _`"ai21.j2-jumbo-instruct"`_
|
||||
- _`"ai21.j2-mid"`_
|
||||
- _`"ai21.j2-mid-v1"`_
|
||||
- _`"ai21.j2-ultra"`_
|
||||
- _`"ai21.j2-ultra-v1"`_
|
||||
- _`"anthropic.claude-instant-v1"`_
|
||||
- _`"anthropic.claude-v1"`_
|
||||
- _`"anthropic.claude-v2"`_
|
||||
- _`"cohere.command-text-v14"`_
|
||||
|
||||
- _`"ai21.j2-grande-instruct"`_
|
||||
- _`"ai21.j2-jumbo-instruct"`_
|
||||
- _`"ai21.j2-mid"`_
|
||||
- _`"ai21.j2-mid-v1"`_
|
||||
- _`"ai21.j2-ultra"`_
|
||||
- _`"ai21.j2-ultra-v1"`_
|
||||
- _`"anthropic.claude-instant-v1"`_
|
||||
- _`"anthropic.claude-v1"`_
|
||||
- _`"anthropic.claude-v2"`_
|
||||
- _`"cohere.command-text-v14"`_
|
||||
|
||||
- **Credentials Profile Name (Optional):** Specifies the name of the credentials profile.
|
||||
|
||||
|
|
@ -45,12 +40,12 @@ This component facilitates the generation of text using the LLM (Large Language
|
|||
- **Stream (Optional):** Specifies whether to stream the response from the model. Defaults to _`False`_.
|
||||
|
||||
<Admonition type="note" title="Note">
|
||||
<p>
|
||||
Ensure that necessary credentials are provided to connect to the Amazon Bedrock API. If connection fails, a ValueError will be raised.
|
||||
</p>
|
||||
<p>
|
||||
Ensure that necessary credentials are provided to connect to the Amazon
|
||||
Bedrock API. If connection fails, a ValueError will be raised.
|
||||
</p>
|
||||
</Admonition>
|
||||
|
||||
|
||||
---
|
||||
|
||||
### Anthropic
|
||||
|
|
@ -60,10 +55,11 @@ This component allows the generation of text using Anthropic Chat&Completion lar
|
|||
**Params**
|
||||
|
||||
- **Model Name:** Specifies the name of the Anthropic model to be used for text generation. Available options include:
|
||||
- _`"claude-2.1"`_
|
||||
- _`"claude-2.0"`_
|
||||
- _`"claude-instant-1.2"`_
|
||||
- _`"claude-instant-1"`_
|
||||
|
||||
- _`"claude-2.1"`_
|
||||
- _`"claude-2.0"`_
|
||||
- _`"claude-instant-1.2"`_
|
||||
- _`"claude-instant-1"`_
|
||||
|
||||
- **Anthropic API Key:** Your Anthropic API key.
|
||||
|
||||
|
|
@ -90,25 +86,27 @@ This component allows the generation of text using the LLM (Large Language Model
|
|||
**Params**
|
||||
|
||||
- **Model Name:** Specifies the name of the Azure OpenAI model to be used for text generation. Available options include:
|
||||
- _`"gpt-35-turbo"`_
|
||||
- _`"gpt-35-turbo-16k"`_
|
||||
- _`"gpt-35-turbo-instruct"`_
|
||||
- _`"gpt-4"`_
|
||||
- _`"gpt-4-32k"`_
|
||||
- _`"gpt-4-vision"`_
|
||||
|
||||
- _`"gpt-35-turbo"`_
|
||||
- _`"gpt-35-turbo-16k"`_
|
||||
- _`"gpt-35-turbo-instruct"`_
|
||||
- _`"gpt-4"`_
|
||||
- _`"gpt-4-32k"`_
|
||||
- _`"gpt-4-vision"`_
|
||||
|
||||
- **Azure Endpoint:** Your Azure endpoint, including the resource. Example: `https://example-resource.azure.openai.com/`.
|
||||
|
||||
- **Deployment Name:** Specifies the name of the deployment.
|
||||
|
||||
- **API Version:** Specifies the version of the Azure OpenAI API to be used. Available options include:
|
||||
- _`"2023-03-15-preview"`_
|
||||
- _`"2023-05-15"`_
|
||||
- _`"2023-06-01-preview"`_
|
||||
- _`"2023-07-01-preview"`_
|
||||
- _`"2023-08-01-preview"`_
|
||||
- _`"2023-09-01-preview"`_
|
||||
- _`"2023-12-01-preview"`_
|
||||
|
||||
- _`"2023-03-15-preview"`_
|
||||
- _`"2023-05-15"`_
|
||||
- _`"2023-06-01-preview"`_
|
||||
- _`"2023-07-01-preview"`_
|
||||
- _`"2023-08-01-preview"`_
|
||||
- _`"2023-09-01-preview"`_
|
||||
- _`"2023-12-01-preview"`_
|
||||
|
||||
- **API Key:** Your Azure OpenAI API key.
|
||||
|
||||
|
|
@ -124,7 +122,6 @@ This component allows the generation of text using the LLM (Large Language Model
|
|||
|
||||
For detailed documentation and integration guides, please refer to the [Azure OpenAI Component Documentation](https://python.langchain.com/docs/integrations/llms/azure_openai).
|
||||
|
||||
|
||||
---
|
||||
|
||||
### Cohere
|
||||
|
|
@ -197,6 +194,29 @@ This component facilitates text generation using LLM models from the Hugging Fac
|
|||
|
||||
---
|
||||
|
||||
### LiteLLM Model
|
||||
|
||||
Generates text using the `LiteLLM` collection of large language models.
|
||||
|
||||
**Parameters**
|
||||
|
||||
- **Model name:** The name of the model to use. For example, `gpt-3.5-turbo`. (Type: str)
|
||||
- **API key:** The API key to use for accessing the provider's API. (Type: str, Optional)
|
||||
- **Provider:** The provider of the API key. (Type: str, Choices: "OpenAI", "Azure", "Anthropic", "Replicate", "Cohere", "OpenRouter")
|
||||
- **Temperature:** Controls the randomness of the text generation. (Type: float, Default: 0.7)
|
||||
- **Model kwargs:** Additional keyword arguments for the model. (Type: Dict, Optional)
|
||||
- **Top p:** Filter responses to keep the cumulative probability within the top p tokens. (Type: float, Optional)
|
||||
- **Top k:** Filter responses to only include the top k tokens. (Type: int, Optional)
|
||||
- **N:** Number of chat completions to generate for each prompt. (Type: int, Default: 1)
|
||||
- **Max tokens:** The maximum number of tokens to generate for each chat completion. (Type: int, Default: 256)
|
||||
- **Max retries:** Maximum number of retries for failed requests. (Type: int, Default: 6)
|
||||
- **Verbose:** Whether to print verbose output. (Type: bool, Default: False)
|
||||
- **Input:** The input prompt for text generation. (Type: str)
|
||||
- **Stream:** Whether to stream the output. (Type: bool, Default: False)
|
||||
- **System message:** System message to pass to the model. (Type: str, Optional)
|
||||
|
||||
---
|
||||
|
||||
### Ollama
|
||||
|
||||
Generate text using Ollama Local LLMs.
|
||||
|
|
@ -248,7 +268,7 @@ This component facilitates text generation using OpenAI's models.
|
|||
|
||||
- **OpenAI API Base (Optional):** The base URL of the OpenAI API. Defaults to _`https://api.openai.com/v1`_.
|
||||
|
||||
- **OpenAI API Key (Optional):** The API key for accessing the OpenAI API.
|
||||
- **OpenAI API Key (Optional):** The API key for accessing the OpenAI API.
|
||||
|
||||
- **Temperature:** Controls the creativity of model responses. Defaults to _`0.7`_.
|
||||
|
||||
|
|
@ -265,16 +285,17 @@ This component facilitates the generation of text using Baidu Qianfan chat model
|
|||
**Params**
|
||||
|
||||
- **Model Name:** Specifies the name of the Qianfan chat model to be used for text generation. Available options include:
|
||||
- _`"ERNIE-Bot"`_
|
||||
- _`"ERNIE-Bot-turbo"`_
|
||||
- _`"BLOOMZ-7B"`_
|
||||
- _`"Llama-2-7b-chat"`_
|
||||
- _`"Llama-2-13b-chat"`_
|
||||
- _`"Llama-2-70b-chat"`_
|
||||
- _`"Qianfan-BLOOMZ-7B-compressed"`_
|
||||
- _`"Qianfan-Chinese-Llama-2-7B"`_
|
||||
- _`"ChatGLM2-6B-32K"`_
|
||||
- _`"AquilaChat-7B"`_
|
||||
|
||||
- _`"ERNIE-Bot"`_
|
||||
- _`"ERNIE-Bot-turbo"`_
|
||||
- _`"BLOOMZ-7B"`_
|
||||
- _`"Llama-2-7b-chat"`_
|
||||
- _`"Llama-2-13b-chat"`_
|
||||
- _`"Llama-2-70b-chat"`_
|
||||
- _`"Qianfan-BLOOMZ-7B-compressed"`_
|
||||
- _`"Qianfan-Chinese-Llama-2-7B"`_
|
||||
- _`"ChatGLM2-6B-32K"`_
|
||||
- _`"AquilaChat-7B"`_
|
||||
|
||||
- **Qianfan Ak:** Your Baidu Qianfan access key, obtainable from [here](https://cloud.baidu.com/product/wenxinworkshop).
|
||||
|
||||
|
|
@ -326,4 +347,4 @@ The `ChatVertexAI` is a component for generating text using Vertex AI Chat large
|
|||
|
||||
- **Stream (Optional):** Specifies whether to stream the response from the model. Defaults to _`False`_.
|
||||
|
||||
- **System Message (Optional):** System message to pass to the model.
|
||||
- **System Message (Optional):** System message to pass to the model.
|
||||
|
|
|
|||
|
|
@ -2,14 +2,6 @@ import Admonition from "@theme/Admonition";
|
|||
|
||||
# Vector Stores
|
||||
|
||||
<Admonition type="caution" icon="🚧" title="ZONE UNDER CONSTRUCTION">
|
||||
<p>
|
||||
We appreciate your understanding as we polish our documentation – it may
|
||||
contain some rough edges. Share your feedback or report issues to help us
|
||||
improve! 🛠️📝
|
||||
</p>
|
||||
</Admonition>
|
||||
|
||||
### Astra DB
|
||||
|
||||
The `Astra DB` is a component for initializing an Astra DB Vector Store from Records. It facilitates the creation of Astra DB-based vector indexes for efficient document storage and retrieval.
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
# 🖥️ Command Line Interface (CLI)
|
||||
|
||||
|
||||
## Overview
|
||||
|
||||
Langflow's Command Line Interface (CLI) is a powerful tool that allows you to interact with the Langflow server from the command line. The CLI provides a wide range of commands to help you shape Langflow to your needs.
|
||||
|
|
@ -8,9 +7,9 @@ Langflow's Command Line Interface (CLI) is a powerful tool that allows you to in
|
|||
Running the CLI without any arguments will display a list of available commands and options.
|
||||
|
||||
```bash
|
||||
langflow --help
|
||||
python -m langflow --help
|
||||
# or
|
||||
langflow
|
||||
python -m langflow
|
||||
```
|
||||
|
||||
Each option is detailed below:
|
||||
|
|
|
|||
|
|
@ -366,7 +366,7 @@ For advanced customization, Langflow offers the option to create and load custom
|
|||
|
||||
### Folder Structure
|
||||
|
||||
Create a folder that follows the same structural conventions as the [config.yaml](https://github.com/logspace-ai/langflow/blob/dev/src/backend/langflow/config.yaml) file. Inside this main directory, use a `custom_components` subdirectory for your custom components.
|
||||
Create a folder that follows the same structural conventions as the [config.yaml](https://github.com/logspace-ai/langflow/blob/dev/src/backend/base/langflow/config.yaml) file. Inside this main directory, use a `custom_components` subdirectory for your custom components.
|
||||
|
||||
Inside `custom_components`, you can create a Python file for each component. Similarly, any custom agents should be housed in an `agents` subdirectory.
|
||||
|
||||
|
|
|
|||
|
|
@ -41,14 +41,12 @@ The Code button shows snippets to use your flow as a Python object or an API.
|
|||
|
||||
**Python Code**
|
||||
|
||||
Through the Langflow package, you can load a flow from a JSON file and use it as a LangChain object.
|
||||
Through the Langflow package, you can run your flow from a JSON file. The example below shows how to run a flow from a JSON file.
|
||||
|
||||
```python
|
||||
from langflow.load import load_flow_from_json
|
||||
from langflow.load import run_flow_from_json
|
||||
|
||||
flow = load_flow_from_json("path/to/flow.json")
|
||||
# Now you can use it like any chain
|
||||
flow("Hey, have you heard of Langflow?")
|
||||
results = run_flow_from_json("path/to/flow.json", input_value="Hello, World!")
|
||||
```
|
||||
|
||||
**API**
|
||||
|
|
|
|||
0
docs/docs/guides/basic-prompting.mdx
Normal file
0
docs/docs/guides/basic-prompting.mdx
Normal file
0
docs/docs/guides/blog-writer.mdx
Normal file
0
docs/docs/guides/blog-writer.mdx
Normal file
0
docs/docs/guides/document-qa.mdx
Normal file
0
docs/docs/guides/document-qa.mdx
Normal file
0
docs/docs/guides/memory-chatbot.mdx
Normal file
0
docs/docs/guides/memory-chatbot.mdx
Normal file
186
docs/docs/guides/rag-with-astradb.mdx
Normal file
186
docs/docs/guides/rag-with-astradb.mdx
Normal file
|
|
@ -0,0 +1,186 @@
|
|||
import ThemedImage from "@theme/ThemedImage";
|
||||
import useBaseUrl from "@docusaurus/useBaseUrl";
|
||||
import ZoomableImage from "/src/theme/ZoomableImage.js";
|
||||
import Admonition from "@theme/Admonition";
|
||||
|
||||
# 🌟 RAG with Astra DB
|
||||
|
||||
This guide will walk you through how to build a RAG (Retrieval Augmented Generation) application using **Astra DB** and **Langflow**.
|
||||
|
||||
[Astra DB](https://www.datastax.com/products/datastax-astra?utm_source=langflow-pre-release&utm_medium=referral&utm_campaign=langflow-announcement&utm_content=astradb) is a cloud-native database built on Apache Cassandra that is optimized for the cloud. It is a fully managed database-as-a-service that simplifies operations and reduces costs. Astra DB is built on the same technology that powers the largest Cassandra deployments in the world.
|
||||
|
||||
In this guide, we will use Astra DB as a vector store to store and retrieve the documents that will be used by the RAG application to generate responses.
|
||||
|
||||
<Admonition type="tip">
|
||||
This guide assumes that you have Langflow up and running. If you are new to
|
||||
Langflow, you can check out the [Getting Started](/) guide.
|
||||
</Admonition>
|
||||
|
||||
TLDR;
|
||||
|
||||
- [Create a free Astra DB account](https://astra.datastax.com/signup?utm_source=langflow-pre-release&utm_medium=referral&utm_campaign=langflow-announcement&utm_content=create-a-free-astra-db-account)
|
||||
- Duplicate our [Langflow 1.0 Space](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true)
|
||||
- Create a new database, get a **Token** and the **API Endpoint**
|
||||
- Click on the **New Project** button and look for Vector Store RAG. This will create a new project with the necessary components
|
||||
- Import the project into Langflow by dropping it on the Canvas or My Collection page
|
||||
- Update the **Token** and **API Endpoint** in the **Astra DB** components
|
||||
- Update the OpenAI API key in the **OpenAI** components
|
||||
- Run the ingestion flow which is the one that uses the **Astra DB** component
|
||||
- Click on the ⚡ _Run_ button and start interacting with your RAG application
|
||||
|
||||
# First things first
|
||||
|
||||
## Create an Astra DB Database
|
||||
|
||||
To get started, you will need to [create an Astra DB database](https://astra.datastax.com/signup?utm_source=langflow-pre-release&utm_medium=referral&utm_campaign=langflow-announcement&utm_content=create-an-astradb-database).
|
||||
|
||||
Once you have created an account, you will be taken to the Astra DB dashboard. Click on the **Create Database** button.
|
||||
|
||||
<ZoomableImage
|
||||
alt="Docusaurus themed image"
|
||||
sources={{
|
||||
light: "img/astra-create-database.png",
|
||||
dark: "img/astra-create-database.png",
|
||||
}}
|
||||
style={{ width: "80%", margin: "20px auto" }}
|
||||
/>
|
||||
|
||||
Now you will need to configure your database. Choose the **Serverless (Vector)** deployment type, and pick a Database name, provider and region.
|
||||
|
||||
After you have configured your database, click on the **Create Database** button.
|
||||
|
||||
<ZoomableImage
|
||||
alt="Docusaurus themed image"
|
||||
sources={{
|
||||
light: "img/astra-configure-deployment.png",
|
||||
dark: "img/astra-configure-deployment.png",
|
||||
}}
|
||||
style={{ width: "80%", margin: "20px auto" }}
|
||||
/>
|
||||
|
||||
Once your database is initialized, to the right of the page, you will see the _Database Details_ section which contains a button for you to copy the **API Endpoint** and another to generate a **Token**.
|
||||
|
||||
<ZoomableImage
|
||||
alt="Docusaurus themed image"
|
||||
sources={{
|
||||
light: "img/astra-generate-token.png",
|
||||
dark: "img/astra-generate-token.png",
|
||||
}}
|
||||
style={{ width: "50%", margin: "20px auto" }}
|
||||
/>
|
||||
|
||||
Now we are all set to start building our RAG application using Astra DB and Langflow.
|
||||
|
||||
## (Optional) Duplicate the Langflow 1.0 HuggingFace Space
|
||||
|
||||
If you haven't already, now is the time to launch Langflow. To make things easier, you can duplicate our [Langflow 1.0 Space](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true) which sets up a Langflow instance just for you.
|
||||
|
||||
## Open the Vector Store RAG Project
|
||||
|
||||
To get started, click on the **New Project** button and look for the **Vector Store RAG** project. This will open a starter project with the necessary components to run a RAG application using Astra DB.
|
||||
|
||||
This project consists of two flows. The simpler one is the **Ingestion Flow** which is responsible for ingesting the documents into the Astra DB database.
|
||||
|
||||
Your first step should be to understand what each flow does and how they interact with each other.
|
||||
|
||||
The ingestion flow consists of:
|
||||
|
||||
- **Files** component that uploads a text file to Langflow
|
||||
- **Recursive Character Text Splitter** component that splits the text into smaller chunks
|
||||
- **OpenAIEmbeddings** component that generates embeddings for the text chunks
|
||||
- **Astra DB** component that stores the text chunks in the Astra DB database
|
||||
|
||||
<ZoomableImage
|
||||
alt="Docusaurus themed image"
|
||||
sources={{
|
||||
light: "img/astra-ingestion-flow.png",
|
||||
dark: "img/astra-ingestion-flow.png",
|
||||
}}
|
||||
style={{ width: "80%", margin: "20px auto" }}
|
||||
/>
|
||||
|
||||
Now, let's update the **Astra DB** and **Astra DB Search** components with the **Token** and **API Endpoint** that we generated earlier, and the OpenAI Embeddings components with your OpenAI API key.
|
||||
|
||||
<ZoomableImage
|
||||
alt="Docusaurus themed image"
|
||||
sources={{
|
||||
light: "img/astra-ingestion-fields.png",
|
||||
dark: "img/astra-ingestion-fields.png",
|
||||
}}
|
||||
style={{ width: "80%", margin: "20px auto" }}
|
||||
/>
|
||||
|
||||
And run it! This will ingest the Text data from your file into the Astra DB database.
|
||||
|
||||
<ZoomableImage
|
||||
alt="Docusaurus themed image"
|
||||
sources={{
|
||||
light: "img/astra-ingestion-run.png",
|
||||
dark: "img/astra-ingestion-run.png",
|
||||
}}
|
||||
style={{ width: "80%", margin: "20px auto" }}
|
||||
/>
|
||||
|
||||
Now, on to the **RAG Flow**. This flow is responsible for generating responses to your queries. It will define all of the steps from getting the User's input to generating a response and displaying it in the Interaction Panel.
|
||||
|
||||
The RAG flow is a bit more complex. It consists of:
|
||||
|
||||
- **Chat Input** component that defines where to put the user input coming from the Interaction Panel
|
||||
- **OpenAI Embeddings** component that generates embeddings from the user input
|
||||
- **Astra DB Search** component that retrieves the most relevant Records from the Astra DB database
|
||||
- **Text Output** component that turns the Records into Text by concatenating them and also displays it in the Interaction Panel
|
||||
- One interesting point you'll see here is that this component is named `Extracted Chunks`, and that is how it will appear in the Interaction Panel
|
||||
- **Prompt** component that takes in the user input and the retrieved Records as text and builds a prompt for the OpenAI model
|
||||
- **OpenAI** component that generates a response to the prompt
|
||||
- **Chat Output** component that displays the response in the Interaction Panel
|
||||
|
||||
<ZoomableImage
|
||||
alt="Docusaurus themed image"
|
||||
sources={{
|
||||
light: "img/astra-rag-flow.png",
|
||||
dark: "img/astra-rag-flow.png",
|
||||
}}
|
||||
style={{ width: "80%", margin: "20px auto" }}
|
||||
/>
|
||||
|
||||
To run it all we have to do is click on the ⚡ _Run_ button and start interacting with your RAG application.
|
||||
|
||||
<ZoomableImage
|
||||
alt="Docusaurus themed image"
|
||||
sources={{
|
||||
light: "img/astra-rag-flow-run.png",
|
||||
dark: "img/astra-rag-flow-run.png",
|
||||
}}
|
||||
style={{ width: "80%", margin: "20px auto" }}
|
||||
/>
|
||||
|
||||
This opens the Interaction Panel where you can chat your data.
|
||||
|
||||
Because this flow has a **Chat Input** and a **Text Output** component, the Panel displays a chat input at the bottom and the Extracted Chunks section on the left.
|
||||
|
||||
<ZoomableImage
|
||||
alt="Docusaurus themed image"
|
||||
sources={{
|
||||
light: "img/astra-rag-flow-interaction-panel.png",
|
||||
dark: "img/astra-rag-flow-interaction-panel.png",
|
||||
}}
|
||||
style={{ width: "80%", margin: "20px auto" }}
|
||||
/>
|
||||
|
||||
Once we interact with it we get a response and the Extracted Chunks section is updated with the retrieved records.
|
||||
|
||||
<ZoomableImage
|
||||
alt="Docusaurus themed image"
|
||||
sources={{
|
||||
light: "img/astra-rag-flow-interaction-panel-interaction.png",
|
||||
dark: "img/astra-rag-flow-interaction-panel-interaction.png",
|
||||
}}
|
||||
style={{ width: "80%", margin: "20px auto" }}
|
||||
/>
|
||||
|
||||
And that's it! You have successfully ran a RAG application using Astra DB and Langflow.
|
||||
|
||||
# Conclusion
|
||||
|
||||
In this guide, we have learned how to run a RAG application using Astra DB and Langflow.
|
||||
We have seen how to create an Astra DB database, import the Astra DB RAG Flows project into Langflow, and run the ingestion and RAG flows.
|
||||
|
|
@ -1,6 +1,13 @@
|
|||
import ThemedImage from "@theme/ThemedImage";
|
||||
import useBaseUrl from "@docusaurus/useBaseUrl";
|
||||
import ZoomableImage from "/src/theme/ZoomableImage.js";
|
||||
import Admonition from "@theme/Admonition";
|
||||
|
||||
# 👋 Welcome to Langflow
|
||||
|
||||
Langflow is an easy way to build from simple to complex AI applications. It is a low-code platform that allows you to integrate AI into everything you do.
|
||||
|
||||
{" "}
|
||||
|
||||
# 👋 Welcome to Langflow
|
||||
|
||||
|
|
@ -30,7 +37,8 @@ You can install **Langflow** with [pipx](https://pipx.pypa.io/stable/installatio
|
|||
Pipx can fetch the missing Python version for you, but you can also install it manually.
|
||||
|
||||
```bash
|
||||
pip install langflow -U
|
||||
# Remember to check if you have Python 3.10 installed
|
||||
python -m pip install langflow -U
|
||||
# or
|
||||
pipx install langflow --python python3.10 --fetch-missing-python
|
||||
```
|
||||
|
|
@ -38,11 +46,19 @@ pipx install langflow --python python3.10 --fetch-missing-python
|
|||
Or you can install a pre-release version using:
|
||||
|
||||
```bash
|
||||
pip install langflow --pre --force-reinstall
|
||||
python -m pip install langflow --pre --force-reinstall
|
||||
# or
|
||||
pipx install langflow --python python3.10 --fetch-missing-python --pip-args="--pre --force-reinstall"
|
||||
```
|
||||
|
||||
<Admonition type="tip">
|
||||
<p>
|
||||
Please, check out our [Possible Installation Issues
|
||||
section](/migration/possible-installation-issues) if you encounter any
|
||||
problems.
|
||||
</p>
|
||||
</Admonition>
|
||||
|
||||
We recommend using --force-reinstall to ensure you have the latest version of Langflow and its dependencies.
|
||||
|
||||
### ⛓️ Running Langflow
|
||||
|
|
@ -50,14 +66,14 @@ We recommend using --force-reinstall to ensure you have the latest version of La
|
|||
Langflow can be run in a variety of ways, including using the command-line interface (CLI) or HuggingFace Spaces.
|
||||
|
||||
```bash
|
||||
langflow run # or langflow --help
|
||||
python -m langflow run # or langflow --help
|
||||
```
|
||||
|
||||
#### 🤗 HuggingFace Spaces
|
||||
|
||||
Hugging Face provides a great alternative for running Langflow in their Spaces environment. This means you can run Langflow without any local installation required.
|
||||
|
||||
The first step is to go to the [Langflow Space](https://huggingface.co/spaces/Logspace/Langflow?duplicate=true).
|
||||
The first step is to go to the [Langflow Space](https://huggingface.co/spaces/Langflow/Langflow?duplicate=true) or [Langflow 1.0 Preview Space](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true)
|
||||
|
||||
Remember to use a Chromium-based browser for the best experience. You'll be presented with the following screen:
|
||||
|
||||
|
|
@ -91,5 +107,17 @@ langflow run [OPTIONS]
|
|||
Find more information about the available options by running:
|
||||
|
||||
```bash
|
||||
langflow --help
|
||||
python -m langflow --help
|
||||
```
|
||||
|
||||
## Find out more about 1.0
|
||||
|
||||
<Admonition type="caution" icon="🚧" title="ZONE UNDER CONSTRUCTION">
|
||||
<p>
|
||||
We are currently working on updating the documentation for Langflow 1.0.
|
||||
</p>
|
||||
</Admonition>
|
||||
|
||||
To get you learning more about what's new and why you should be excited about Langflow 1.0,
|
||||
go to [A new chapter for Langflow](/whats-new/a-new-chapter-langflow) and also come back often
|
||||
to check out our [migration guides](/whats-new/migrating-to-one-point-zero) as we release them.
|
||||
|
|
|
|||
|
|
@ -1,12 +1,12 @@
|
|||
import Admonition from '@theme/Admonition';
|
||||
import Admonition from "@theme/Admonition";
|
||||
import ZoomableImage from "/src/theme/ZoomableImage.js";
|
||||
|
||||
# Compatibility with Previous Versions
|
||||
|
||||
|
||||
## TLDR;
|
||||
|
||||
- You'll need to add a few components to your flow to make it compatible with the new version of Langflow.
|
||||
- Add a Runnable Executor, connect it to the last component (a Chain or an Agent) in your flow, and connect a Chat Input and a Chat Output to the Runnable Executor. This should work *most of the time*.
|
||||
- Add a Runnable Executor, connect it to the last component (a Chain or an Agent) in your flow, and connect a Chat Input and a Chat Output to the Runnable Executor. This should work _most of the time_.
|
||||
- You might also need to update the Chain or Agent component to the latest version.
|
||||
- Most Components will work as they are, but you'll need to add an Input and an Output to your flow.
|
||||
- You can use the Runnable Executor to run a LangChain runnable (which is the output of many components before 1.0)
|
||||
|
|
@ -22,23 +22,31 @@ We've tried to make it as easy as possible for you to adapt your existing flows
|
|||
|
||||
## How to Adapt Your Existing Flows
|
||||
|
||||
|
||||
The steps to take are few but not always simple. Here's how you can adapt your existing flows to work seamlessly in the new version of Langflow:
|
||||
|
||||
<Admonition type="caution">
|
||||
<p>**Caution:**</p>
|
||||
<p>While this should work most of the time, it might not work for all flows. You might need to update the Chain or Agent component to the latest version. Please let us know if you encounter any issues.</p>
|
||||
<p>
|
||||
While this should work most of the time, it might not work for all flows.
|
||||
You might need to update the Chain or Agent component to the latest version.
|
||||
Please let us know if you encounter any issues.
|
||||
</p>
|
||||
</Admonition>
|
||||
|
||||
1. **Check if your flow ends with a Chain or Agent component**.
|
||||
- If it does not, it *should* work as it is because it probably was not a chat flow.
|
||||
- If it does not, it _should_ work as it is because it probably was not a chat flow.
|
||||
2. **Add a Runnable Executor**.
|
||||
- Add a Runnable Executor to the end of your flow.
|
||||
- Connect the last component (a Chain or an Agent) in your flow to the Runnable Executor.
|
||||
- Add a Runnable Executor to the end of your flow.
|
||||
- Connect the last component (a Chain or an Agent) in your flow to the Runnable Executor.
|
||||
3. **Add a Chat Input and a Chat Output**.
|
||||
- Add a Chat Input and a Chat Output to your flow.
|
||||
- Connect the Chat Input to the Runnable Executor.
|
||||
- Connect the Chat Output to the Runnable Executor.
|
||||
|
||||
{/* Add picture of the flow */}
|
||||
- Add a Chat Input and a Chat Output to your flow.
|
||||
- Connect the Chat Input to the Runnable Executor.
|
||||
- Connect the Chat Output to the Runnable Executor.
|
||||
|
||||
<ZoomableImage
|
||||
alt="Docusaurus themed image"
|
||||
sources={{
|
||||
light: "img/runnable-executor.png",
|
||||
dark: "img/runnable-executor.png",
|
||||
}}
|
||||
style={{ width: "100%", margin: "20px auto" }}
|
||||
/>
|
||||
|
|
|
|||
27
docs/docs/migration/possible-installation-issues.mdx
Normal file
27
docs/docs/migration/possible-installation-issues.mdx
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
# Possible Installation Issues
|
||||
|
||||
This is a list of possible issues that you may encounter when installing Langflow 1.0 Alpha and how to solve them.
|
||||
|
||||
## _`No module named 'langflow.__main__'`_
|
||||
|
||||
TLDR;
|
||||
|
||||
- Run _`python -m langflow run`_ instead of _`langflow run`_. If that doesn't work, run _`_python -m pip install langflow --pre -U`_ to reinstall langflow.
|
||||
- If the above doesn't work, run _`python -m pip install langflow --pre -U --force-reinstall`_ to reinstall langflow and its dependencies.
|
||||
|
||||
When you try to run langflow using the command `langflow run`, you may encounter the following error:
|
||||
|
||||
```bash
|
||||
> langflow run
|
||||
Traceback (most recent call last):
|
||||
File ".../langflow", line 5, in <module>
|
||||
from langflow.__main__ import main
|
||||
ModuleNotFoundError: No module named 'langflow.__main__'
|
||||
```
|
||||
|
||||
For this error to occur, two scenarios are possible:
|
||||
|
||||
1. You've installed langflow using _`pip install langflow`_ but you already had a previous version of langflow installed in your system.
|
||||
In this case, you might not be running the correct executable.
|
||||
To solve this issue, you can run the correct executable by running _`python -m langflow run`_ instead of _`langflow run`_ and if that doesn't work, you can try uninstalling langflow and reinstalling it using _`python -m pip install langflow --pre -U`_.
|
||||
2. Some version conflicts might have occurred during the installation process. Run _`python -m pip install langflow --pre -U --force-reinstall`_ to reinstall langflow and its dependencies.
|
||||
|
|
@ -0,0 +1,45 @@
|
|||
# Text and Record
|
||||
|
||||
In Langflow 1.0 we added two main input and output types: Text and Record. Text is a simple string input and output type, while Record is a structure very similar to a dictionary in Python. It is a key-value pair data structure.
|
||||
|
||||
We've created a few components to help you work with these types. Let's see how a few of them work.
|
||||
|
||||
### Records To Text
|
||||
|
||||
This is a Component that takes in Records and outputs a Text. It does this using a template string and concatenating the values of the Record, one per line.
|
||||
|
||||
If we have the following Records:
|
||||
|
||||
```json
|
||||
{
|
||||
"sender_name": "Alice",
|
||||
"message": "Hello!"
|
||||
}
|
||||
{
|
||||
"sender_name": "John",
|
||||
"message": "Hi!"
|
||||
}
|
||||
```
|
||||
|
||||
And the template string is: _`{sender_name}: {message}`_
|
||||
|
||||
```
|
||||
Alice: Hello!
|
||||
John: Hi!
|
||||
```
|
||||
|
||||
### Create Record
|
||||
|
||||
This Component allows you to create a Record from a number of inputs. You can add as many key-value pairs as you want (as long as it is less than 15 😅). Once you've picked that number you'll need to write the name of the Key and can pass Text values from other components to it.
|
||||
|
||||
### Documents To Records
|
||||
|
||||
This Component takes in a [LangChain](https://langchain.com) Document and outputs a Record. It does this by extracting the _`page_content`_ and the _`metadata`_ from the Document and adding them to the Record as _`text`_ and _`data`_ respectively.
|
||||
|
||||
## Why is this useful?
|
||||
|
||||
The idea was to create a unified way to work with complex data in Langflow, and to make it easier to work with data that is not just a simple string. This way you can create more complex workflows and use the data in more ways.
|
||||
|
||||
## What's next?
|
||||
|
||||
We are planning to integrate an array of modalities to Langflow, such as images, audio, and video. This will allow you to create even more complex workflows and use cases. Stay tuned for more updates! 🚀
|
||||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
# First things first
|
||||
|
||||
Thank you all for being part of the Langflow community. The journey so far has been amazing and we are happy to have you with us.
|
||||
**Thank you all for being part of the Langflow community**. The journey so far has been amazing and we are happy to have you with us.
|
||||
|
||||
We have some exciting news to share with you. Langflow is changing, and we want to tell you all about it.
|
||||
|
||||
|
|
@ -61,11 +61,11 @@ We wanted to create start projects that would help you learn about new features
|
|||
|
||||
For now, we have:
|
||||
|
||||
- **[Basic Prompting (Ahoy World!)](/getting-started/basic-prompting)**: A simple flow that shows you how to use the Prompt Component and how to talk like a pirate.
|
||||
- **[Vector Store RAG](/getting-started/rag-with-astradb)**: A flow that shows you how to ingest data into a Vector Store and then use it to run a RAG application.
|
||||
- **[Memory Chatbot](/getting-started/memory-chatbot)**: This one shows you how to create a simple chatbot that can remember things about the user.
|
||||
- **[Document QA](/getting-started/document-qa)**: This flow shows you how to build a simple flow that helps you get answers about a document.
|
||||
- **[Blog Writer](/getting-started/blog-writer)**: Shows you how you can expand on the Prompt variables and be creative about what inputs you add to it.
|
||||
- **[Basic Prompting (Hello, world!)](/guides/basic-prompting)**: A simple flow that shows you how to use the Prompt Component and how to talk like a pirate.
|
||||
- **[Vector Store RAG](/guides/rag-with-astradb)**: A flow that shows you how to ingest data into a Vector Store and then use it to run a RAG application.
|
||||
- **[Memory Chatbot](/guides/memory-chatbot)**: This one shows you how to create a simple chatbot that can remember things about the user.
|
||||
- **[Document QA](/guides/document-qa)**: This flow shows you how to build a simple flow that helps you get answers about a document.
|
||||
- **[Blog Writer](/guides/blog-writer)**: Shows you how you can expand on the Prompt variables and be creative about what inputs you add to it.
|
||||
|
||||
As always, your feedback is invaluable, so please let us know what you think of the new starter projects and what you would like to see in the future.
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,16 @@
|
|||
import Admonition from "@theme/Admonition";
|
||||
|
||||
# Migrating to Langflow 1.0: A Guide
|
||||
|
||||
<Admonition type="caution" icon="🚧" title="ZONE UNDER CONSTRUCTION">
|
||||
<p>
|
||||
We are currently working on updating this guide to provide the most accurate
|
||||
and up-to-date information on migrating to Langflow 1.0. We will be adding
|
||||
more content and examples to help you navigate the changes and improvements
|
||||
in the new version.
|
||||
</p>
|
||||
</Admonition>
|
||||
|
||||
Langflow 1.0 is a significant update that brings many exciting changes and improvements to the platform.
|
||||
This guide will walk you through the key improvements and help you migrate your existing projects to the new version.
|
||||
|
||||
|
|
@ -42,19 +53,19 @@ We will create guides on how to interweave LangChain components with our Core co
|
|||
|
||||
Langflow 1.0 continues to support LangChain while also introducing support for multiple frameworks. This is another important boon that adding the paradigm of data flow brings to the table. Find out how to leverage the power of different frameworks in your projects.
|
||||
|
||||
[Learn more about Supported Frameworks](../migration/supported-frameworks)
|
||||
**Guide coming soon**
|
||||
|
||||
## Sidebar Redesign and Customizable Interaction Panel
|
||||
|
||||
We've expanded on the chat experience by creating a customizable interaction panel that allows you to design a panel that fits your needs and interact with it. The sidebar has also been redesigned to provide a more intuitive and user-friendly experience. Explore the new sidebar and interaction panel features to enhance your workflow.
|
||||
|
||||
[Learn more about some of the UI updates](../migration/sidebar-and-interaction-panel)
|
||||
**Guide coming soon**
|
||||
|
||||
## New Native Categories and Components
|
||||
|
||||
Langflow 1.0 introduces many new native categories, including Inputs, Outputs, Helpers, Experimental, Models, and more. Discover the new components available, such as Chat Input, Prompt, Files, API Request, and others.
|
||||
|
||||
[Learn more about New Categories and Components](../migration/new-categories-and-components)
|
||||
**Guide coming soon**
|
||||
|
||||
## New Way of Using Langflow: Text and Record (and more to come)
|
||||
|
||||
|
|
@ -66,7 +77,7 @@ With the introduction of Text and Record types connections between Components ar
|
|||
|
||||
Almost all components in Langflow 1.0 are now CustomComponents, allowing you to check and modify the code of each component. Discover how to leverage this feature to customize your components to your specific needs.
|
||||
|
||||
[Learn more about CustomComponent](../migration/custom-component)
|
||||
**Guide coming soon**
|
||||
|
||||
## Compatibility with Previous Versions
|
||||
|
||||
|
|
@ -78,31 +89,31 @@ To use flows built in previous versions of Langflow, you can utilize the experim
|
|||
|
||||
Langflow 1.0 allows you to have more than one flow in the canvas and run them separately. Discover how to create and manage multiple flows within a single project.
|
||||
|
||||
[Learn more about Multiple Flows](../migration/multiple-flows)
|
||||
**Guide coming soon**
|
||||
|
||||
## Improved Component Status
|
||||
|
||||
Each component now displays its status more clearly, allowing you to quickly identify any issues or errors. Explore how to use the new component status feature to troubleshoot and optimize your flows.
|
||||
|
||||
[Learn more about Component Status](../migration/component-status-and-data-passing)
|
||||
**Guide coming soon**
|
||||
|
||||
## Connecting Output Components
|
||||
|
||||
You can now connect Output components to any other component (that has a Text output), providing a better understanding of the data flow. Explore the possibilities of connecting Output components and how it enhances your flow's functionality.
|
||||
|
||||
[Learn more about Connecting Output Components](../migration/connecting-output-components)
|
||||
**Guide coming soon**
|
||||
|
||||
## Renaming and Editing Component Descriptions
|
||||
|
||||
Langflow 1.0 allows you to rename and edit the description of each component, making it easier to understand and interact with the flow. Learn how to customize your component names and descriptions for improved clarity.
|
||||
|
||||
[Learn more about Renaming and Editing Components](../migration/renaming-and-editing-components)
|
||||
**Guide coming soon**
|
||||
|
||||
## Passing Tweaks and Inputs in the API
|
||||
|
||||
Things got a whole lot easier. You can now pass tweaks and inputs in the API by referencing the Display Name of the component. Discover how to leverage this feature to dynamically control your flow's behavior.
|
||||
|
||||
[Learn more about Passing Tweaks and Inputs](../migration/passing-tweaks-and-inputs)
|
||||
**Guide coming soon**
|
||||
|
||||
## Global Variables for Text Fields
|
||||
|
||||
|
|
@ -114,12 +125,12 @@ Global Variables can be used in any Text Field across your projects. Learn how t
|
|||
|
||||
Explore the experimental components available in Langflow 1.0, such as SubFlow, which allows you to load a flow as a component dynamically, and Flow as Tool, which enables you to use a flow as a tool for an Agent.
|
||||
|
||||
[Learn more about Experimental Components](../migration/experimental-components)
|
||||
**Guide coming soon**
|
||||
|
||||
## Experimental State Management System
|
||||
|
||||
We are experimenting with a State Management system for flows that allows components to trigger other components and pass messages between them using the Notify and Listen components. Discover how to leverage this system to create more dynamic and interactive flows.
|
||||
|
||||
[Learn more about State Management](../migration/state-management)
|
||||
**Guide coming soon**
|
||||
|
||||
We hope this guide helps you navigate the changes and improvements in Langflow 1.0. If you have any questions or need further assistance, please don't hesitate to reach out to us in our [Discord](https://discord.gg/wZSWQaukgJ).
|
||||
We hope this guide helps you navigate the changes and improvements in Langflow 1.0. If you have any questions or need further assistance, please don't hesitate to reach out to us in our [Discord](https://discord.gg/wZSWQaukgJ).
|
||||
|
|
|
|||
|
|
@ -7,11 +7,11 @@ module.exports = {
|
|||
items: [
|
||||
"index",
|
||||
"getting-started/cli",
|
||||
"getting-started/basic-prompting",
|
||||
"getting-started/document-qa",
|
||||
"getting-started/blog-writer",
|
||||
"getting-started/memory-chatbot",
|
||||
"getting-started/rag-with-astradb",
|
||||
// "guides/basic-prompting",
|
||||
// "guides/document-qa",
|
||||
// "guides/blog-writer",
|
||||
// "guides/memory-chatbot",
|
||||
"guides/rag-with-astradb",
|
||||
],
|
||||
},
|
||||
{
|
||||
|
|
@ -29,18 +29,19 @@ module.exports = {
|
|||
label: " Migration Guides",
|
||||
collapsed: false,
|
||||
items: [
|
||||
"migration/possible-installation-issues",
|
||||
// "migration/flow-of-data",
|
||||
"migration/inputs-and-outputs",
|
||||
// "migration/supported-frameworks",
|
||||
"migration/sidebar-and-interaction-panel",
|
||||
"migration/new-categories-and-components",
|
||||
// "migration/sidebar-and-interaction-panel",
|
||||
// "migration/new-categories-and-components",
|
||||
"migration/text-and-record",
|
||||
// "migration/custom-component",
|
||||
"migration/compatibility",
|
||||
"migration/multiple-flows",
|
||||
"migration/component-status-and-data-passing",
|
||||
// "migration/multiple-flows",
|
||||
// "migration/component-status-and-data-passing",
|
||||
// "migration/connecting-output-components",
|
||||
"migration/renaming-and-editing-components",
|
||||
// "migration/renaming-and-editing-components",
|
||||
// "migration/passing-tweaks-and-inputs",
|
||||
"migration/global-variables",
|
||||
// "migration/experimental-components",
|
||||
|
|
@ -55,17 +56,17 @@ module.exports = {
|
|||
"guidelines/login",
|
||||
"guidelines/api",
|
||||
"guidelines/components",
|
||||
"guidelines/features",
|
||||
// "guidelines/features",
|
||||
"guidelines/collection",
|
||||
"guidelines/prompt-customization",
|
||||
"guidelines/chat-interface",
|
||||
"guidelines/chat-widget",
|
||||
"guidelines/custom-component",
|
||||
// "guidelines/chat-interface",
|
||||
// "guidelines/chat-widget",
|
||||
// "guidelines/custom-component",
|
||||
],
|
||||
},
|
||||
{
|
||||
type: "category",
|
||||
label: "Step-by-Step Guides",
|
||||
label: "Extended Components",
|
||||
collapsed: false,
|
||||
items: ["guides/langfuse_integration"],
|
||||
},
|
||||
|
|
@ -101,20 +102,20 @@ module.exports = {
|
|||
"components/tools",
|
||||
],
|
||||
},
|
||||
{
|
||||
type: "category",
|
||||
label: "Examples",
|
||||
collapsed: false,
|
||||
items: [
|
||||
"examples/flow-runner",
|
||||
"examples/conversation-chain",
|
||||
"examples/buffer-memory",
|
||||
"examples/csv-loader",
|
||||
"examples/searchapi-tool",
|
||||
"examples/serp-api-tool",
|
||||
"examples/python-function",
|
||||
],
|
||||
},
|
||||
// {
|
||||
// type: "category",
|
||||
// label: "Examples",
|
||||
// collapsed: false,
|
||||
// items: [
|
||||
// // "examples/flow-runner",
|
||||
// // "examples/conversation-chain",
|
||||
// // "examples/buffer-memory",
|
||||
// // "examples/csv-loader",
|
||||
// // "examples/searchapi-tool",
|
||||
// // "examples/serp-api-tool",
|
||||
// // "examples/python-function",
|
||||
// ],
|
||||
// },
|
||||
{
|
||||
type: "category",
|
||||
label: "Deployment",
|
||||
|
|
|
|||
BIN
docs/static/img/hero.png
vendored
Normal file
BIN
docs/static/img/hero.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 580 KiB |
BIN
docs/static/img/runnable-executor.png
vendored
Normal file
BIN
docs/static/img/runnable-executor.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 207 KiB |
503
poetry.lock
generated
503
poetry.lock
generated
|
|
@ -167,13 +167,13 @@ files = [
|
|||
|
||||
[[package]]
|
||||
name = "anthropic"
|
||||
version = "0.21.3"
|
||||
version = "0.23.1"
|
||||
description = "The official Python library for the anthropic API"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "anthropic-0.21.3-py3-none-any.whl", hash = "sha256:5869115453b543a46ded6515c9f29b8d610b6e94bbba3230ad80ac947d2b0862"},
|
||||
{file = "anthropic-0.21.3.tar.gz", hash = "sha256:02f1ab5694c497e2b2d42d30d51a4f2edcaca92d2ec86bb64fe78a9c7434a869"},
|
||||
{file = "anthropic-0.23.1-py3-none-any.whl", hash = "sha256:6dc5779dae83a5834864f4a4af0166c972b70f4cb8fd2765e1558282cc6d6242"},
|
||||
{file = "anthropic-0.23.1.tar.gz", hash = "sha256:9325103702cbc96bb09d1b58c36bde75c726f6a01029fb4d85f41ebba07e9066"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -455,17 +455,17 @@ files = [
|
|||
|
||||
[[package]]
|
||||
name = "boto3"
|
||||
version = "1.34.77"
|
||||
version = "1.34.79"
|
||||
description = "The AWS SDK for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "boto3-1.34.77-py3-none-any.whl", hash = "sha256:7abd327980258ec2ae980d2ff7fc32ede7448146b14d34c56bf0be074e2a149b"},
|
||||
{file = "boto3-1.34.77.tar.gz", hash = "sha256:8ebed4fa5a3b84dd4037f28226985af00e00fb860d739fc8b1ed6381caa4b330"},
|
||||
{file = "boto3-1.34.79-py3-none-any.whl", hash = "sha256:265b0b4865e8c07e27abb32a31d2bd9129bb009b1d89ca0783776ec084886123"},
|
||||
{file = "boto3-1.34.79.tar.gz", hash = "sha256:139dd2d94eaa0e3213ff37ba7cf4cb2e3823269178fe8f3e33c965f680a9ddde"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
botocore = ">=1.34.77,<1.35.0"
|
||||
botocore = ">=1.34.79,<1.35.0"
|
||||
jmespath = ">=0.7.1,<2.0.0"
|
||||
s3transfer = ">=0.10.0,<0.11.0"
|
||||
|
||||
|
|
@ -474,13 +474,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
|
|||
|
||||
[[package]]
|
||||
name = "botocore"
|
||||
version = "1.34.77"
|
||||
version = "1.34.79"
|
||||
description = "Low-level, data-driven core of boto 3."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "botocore-1.34.77-py3-none-any.whl", hash = "sha256:6d6a402032ca0b89525212356a865397f8f2839683dd53d41b8cee1aa84b2b4b"},
|
||||
{file = "botocore-1.34.77.tar.gz", hash = "sha256:6dab60261cdbfb7d0059488ea39408d5522fad419c004ba5db3484e6df854ea8"},
|
||||
{file = "botocore-1.34.79-py3-none-any.whl", hash = "sha256:a42a014d3dbaa9ef123810592af69f9e55b456c5be3ac9efc037325685519e83"},
|
||||
{file = "botocore-1.34.79.tar.gz", hash = "sha256:6b59b0f7de219d383a2a633f6718c2600642ebcb707749dc6c67a6a436474b7a"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -1129,22 +1129,22 @@ testing = ["pytest (>=7.2.1)", "pytest-cov (>=4.0.0)", "tox (>=4.4.3)"]
|
|||
|
||||
[[package]]
|
||||
name = "cohere"
|
||||
version = "5.2.1"
|
||||
version = "5.2.2"
|
||||
description = ""
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8"
|
||||
files = [
|
||||
{file = "cohere-5.2.1-py3-none-any.whl", hash = "sha256:c694f9d2cdafd87443f54ea5238b51a0fb807f119673e00b814c2a2993368e38"},
|
||||
{file = "cohere-5.2.1.tar.gz", hash = "sha256:7cd5522bb162c05c67b2db0b7aba2a103622e17ece9e885f5ef2de66bb67a324"},
|
||||
{file = "cohere-5.2.2-py3-none-any.whl", hash = "sha256:dae0c7318fa0d15a2147a044a49c1f40164a6c8135a260cc46214058c7c06085"},
|
||||
{file = "cohere-5.2.2.tar.gz", hash = "sha256:43243b86d4abf3140f9077435580ebceaa431fb7d57a9fef6858eee880f26646"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
fastavro = ">=1.9.4,<2.0.0"
|
||||
httpx = ">=0.21.2"
|
||||
pydantic = ">=1.9.2"
|
||||
requests = ">=2.31.0,<3.0.0"
|
||||
requests = ">=2.0.0,<3.0.0"
|
||||
tokenizers = ">=0.15.2,<0.16.0"
|
||||
types-requests = ">=2.31.0.20240311,<3.0.0.0"
|
||||
types-requests = ">=2.0.0,<3.0.0"
|
||||
typing_extensions = ">=4.0.0"
|
||||
|
||||
[[package]]
|
||||
|
|
@ -1515,20 +1515,20 @@ files = [
|
|||
|
||||
[[package]]
|
||||
name = "deepdiff"
|
||||
version = "6.7.1"
|
||||
version = "7.0.0"
|
||||
description = "Deep Difference and Search of any Python object/data. Recreate objects by adding adding deltas to each other."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "deepdiff-6.7.1-py3-none-any.whl", hash = "sha256:58396bb7a863cbb4ed5193f548c56f18218060362311aa1dc36397b2f25108bd"},
|
||||
{file = "deepdiff-6.7.1.tar.gz", hash = "sha256:b367e6fa6caac1c9f500adc79ada1b5b1242c50d5f716a1a4362030197847d30"},
|
||||
{file = "deepdiff-7.0.0-py3-none-any.whl", hash = "sha256:f7bbb845f83ad6b9453a4ab07c579bdc6f1df712edc515740455a9b88c2bc41a"},
|
||||
{file = "deepdiff-7.0.0.tar.gz", hash = "sha256:4e07da4f2a1ae069b7465d264715764f3b36ce181ec89f47050ead61711b1e9a"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
ordered-set = ">=4.0.2,<4.2.0"
|
||||
ordered-set = ">=4.1.0,<4.2.0"
|
||||
|
||||
[package.extras]
|
||||
cli = ["click (==8.1.3)", "pyyaml (==6.0.1)"]
|
||||
cli = ["click (==8.1.7)", "pyyaml (==6.0.1)"]
|
||||
optimize = ["orjson"]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -1861,13 +1861,13 @@ test = ["pytest (>=6)"]
|
|||
|
||||
[[package]]
|
||||
name = "execnet"
|
||||
version = "2.0.2"
|
||||
version = "2.1.1"
|
||||
description = "execnet: rapid multi-Python deployment"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "execnet-2.0.2-py3-none-any.whl", hash = "sha256:88256416ae766bc9e8895c76a87928c0012183da3cc4fc18016e6f050e025f41"},
|
||||
{file = "execnet-2.0.2.tar.gz", hash = "sha256:cc59bc4423742fd71ad227122eb0dd44db51efb3dc4095b45ac9a08c770096af"},
|
||||
{file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"},
|
||||
{file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
|
|
@ -1965,18 +1965,18 @@ files = [
|
|||
|
||||
[[package]]
|
||||
name = "fastapi"
|
||||
version = "0.109.2"
|
||||
version = "0.110.1"
|
||||
description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "fastapi-0.109.2-py3-none-any.whl", hash = "sha256:2c9bab24667293b501cad8dd388c05240c850b58ec5876ee3283c47d6e1e3a4d"},
|
||||
{file = "fastapi-0.109.2.tar.gz", hash = "sha256:f3817eac96fe4f65a2ebb4baa000f394e55f5fccdaf7f75250804bc58f354f73"},
|
||||
{file = "fastapi-0.110.1-py3-none-any.whl", hash = "sha256:5df913203c482f820d31f48e635e022f8cbfe7350e4830ef05a3163925b1addc"},
|
||||
{file = "fastapi-0.110.1.tar.gz", hash = "sha256:6feac43ec359dfe4f45b2c18ec8c94edb8dc2dfc461d417d9e626590c071baad"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0"
|
||||
starlette = ">=0.36.3,<0.37.0"
|
||||
starlette = ">=0.37.2,<0.38.0"
|
||||
typing-extensions = ">=4.8.0"
|
||||
|
||||
[package.extras]
|
||||
|
|
@ -2094,13 +2094,13 @@ vw = ["scikit-learn", "vowpalwabbit (>=8.10.0,<9.0.0)"]
|
|||
|
||||
[[package]]
|
||||
name = "flask"
|
||||
version = "3.0.2"
|
||||
version = "3.0.3"
|
||||
description = "A simple framework for building complex web applications."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "flask-3.0.2-py3-none-any.whl", hash = "sha256:3232e0e9c850d781933cf0207523d1ece087eb8d87b23777ae38456e2fbe7c6e"},
|
||||
{file = "flask-3.0.2.tar.gz", hash = "sha256:822c03f4b799204250a7ee84b1eddc40665395333973dfb9deebfe425fefcb7d"},
|
||||
{file = "flask-3.0.3-py3-none-any.whl", hash = "sha256:34e815dfaa43340d1d15a5c3a02b8476004037eb4840b34910c6e21679d288f3"},
|
||||
{file = "flask-3.0.3.tar.gz", hash = "sha256:ceb27b0af3823ea2737928a4d99d125a06175b8512c445cbd9a9ce200ef76842"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -3741,19 +3741,50 @@ text-helpers = ["chardet (>=5.1.0,<6.0.0)"]
|
|||
|
||||
[[package]]
|
||||
name = "langchain-anthropic"
|
||||
version = "0.1.4"
|
||||
version = "0.1.6"
|
||||
description = "An integration package connecting AnthropicMessages and LangChain"
|
||||
optional = false
|
||||
python-versions = ">=3.8.1,<4.0"
|
||||
python-versions = "<4.0,>=3.8.1"
|
||||
files = [
|
||||
{file = "langchain_anthropic-0.1.4-py3-none-any.whl", hash = "sha256:9b3e28c1c0f7a502495b240c6c015d7fc57d04fb381fae389ecdce8847de5777"},
|
||||
{file = "langchain_anthropic-0.1.4.tar.gz", hash = "sha256:d772f7111335953d23393cac8173a0a1ee65b5fe0dc137c6b7a6db2a06fbcac4"},
|
||||
{file = "langchain_anthropic-0.1.6-py3-none-any.whl", hash = "sha256:5626f9f2f0d3cc1665a2f5817ea1856dbfa4c745bc6f95b7043c56b6ab85e0c1"},
|
||||
{file = "langchain_anthropic-0.1.6.tar.gz", hash = "sha256:544e5c8c365964c594b80eb1db994e67d90722be9efde460229e5888524545de"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
anthropic = ">=0.17.0,<1"
|
||||
anthropic = ">=0.23.0,<1"
|
||||
defusedxml = ">=0.7.1,<0.8.0"
|
||||
langchain-core = ">=0.1,<0.2"
|
||||
langchain-core = ">=0.1.33,<0.2.0"
|
||||
|
||||
[[package]]
|
||||
name = "langchain-astradb"
|
||||
version = "0.1.0"
|
||||
description = "An integration package connecting Astra DB and LangChain"
|
||||
optional = false
|
||||
python-versions = ">=3.8.1,<4.0"
|
||||
files = [
|
||||
{file = "langchain_astradb-0.1.0-py3-none-any.whl", hash = "sha256:c6686089da343fce8c31e36c9162323e88888300b09d56b72347a19449d7361f"},
|
||||
{file = "langchain_astradb-0.1.0.tar.gz", hash = "sha256:c8a3426c9daa2beeec2dc7a718186b0b9c388082e9543e0bc07363712cc3b947"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
astrapy = ">=0.7.7,<0.8.0"
|
||||
langchain-core = ">=0.1.31,<0.2.0"
|
||||
numpy = ">=1,<2"
|
||||
|
||||
[[package]]
|
||||
name = "langchain-cohere"
|
||||
version = "0.1.0"
|
||||
description = "An integration package connecting Cohere and LangChain"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8.1"
|
||||
files = [
|
||||
{file = "langchain_cohere-0.1.0-py3-none-any.whl", hash = "sha256:f60e9eb41f7d4ead9659bddb3fae7aa18ddc3fdf2b2867be4bd8a565229f488d"},
|
||||
{file = "langchain_cohere-0.1.0.tar.gz", hash = "sha256:960551293ea58d170fad37d44657d3ae4587f6b2e8f3f58922c53c59b9e9d85c"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cohere = ">=5.1.4,<6.0.0"
|
||||
langchain-core = ">=0.1.32,<0.2.0"
|
||||
|
||||
[[package]]
|
||||
name = "langchain-astradb"
|
||||
|
|
@ -3872,18 +3903,17 @@ images = ["pillow (>=10.1.0,<11.0.0)"]
|
|||
|
||||
[[package]]
|
||||
name = "langchain-openai"
|
||||
version = "0.0.5"
|
||||
version = "0.1.1"
|
||||
description = "An integration package connecting OpenAI and LangChain"
|
||||
optional = false
|
||||
python-versions = ">=3.8.1,<4.0"
|
||||
python-versions = "<4.0,>=3.8.1"
|
||||
files = [
|
||||
{file = "langchain_openai-0.0.5-py3-none-any.whl", hash = "sha256:93b37dfac274adad65e46d5d6e71411e00c6984bcc5e10f1d6bb58e7944dc01b"},
|
||||
{file = "langchain_openai-0.0.5.tar.gz", hash = "sha256:f317fee5b652949ad96ad7edf8ef7a044a6a3f0cc71d1e12f9d5261789fd68c4"},
|
||||
{file = "langchain_openai-0.1.1-py3-none-any.whl", hash = "sha256:5cf4df5d2550af673337eafedaeec014ba52f9a25aeb8451206ca254bed01e5c"},
|
||||
{file = "langchain_openai-0.1.1.tar.gz", hash = "sha256:d10e9a9fc4c8ea99ca98f23808ce44c7dcdd65354ac07ad10afe874ecf3401ca"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
langchain-core = ">=0.1.16,<0.2"
|
||||
numpy = ">=1,<2"
|
||||
langchain-core = ">=0.1.33,<0.2.0"
|
||||
openai = ">=1.10.0,<2.0.0"
|
||||
tiktoken = ">=0.5.2,<0.6.0"
|
||||
|
||||
|
|
@ -3920,7 +3950,7 @@ six = "*"
|
|||
|
||||
[[package]]
|
||||
name = "langflow-base"
|
||||
version = "0.0.17"
|
||||
version = "0.0.21"
|
||||
description = "A Python package with a built-in web application"
|
||||
optional = false
|
||||
python-versions = ">=3.10,<3.12"
|
||||
|
|
@ -3931,19 +3961,19 @@ develop = true
|
|||
alembic = "^1.13.0"
|
||||
bcrypt = "4.0.1"
|
||||
cachetools = "^5.3.1"
|
||||
chromadb = "^0.4.24"
|
||||
cryptography = "^42.0.5"
|
||||
docstring-parser = "^0.15"
|
||||
duckdb = "^0.9.2"
|
||||
fastapi = "^0.109.0"
|
||||
emoji = "^2.11.0"
|
||||
fastapi = "^0.110.1"
|
||||
gunicorn = "^21.2.0"
|
||||
httpx = "*"
|
||||
jq = {version = "^1.7.0", markers = "sys_platform != \"win32\""}
|
||||
langchain = "~0.1.0"
|
||||
langchain-anthropic = "^0.1.4"
|
||||
langchain-astradb = "^0.1.0"
|
||||
langchain = "~0.1.14"
|
||||
langchain-experimental = "*"
|
||||
loguru = "^0.7.1"
|
||||
multiprocess = "^0.70.14"
|
||||
nest-asyncio = "^1.6.0"
|
||||
orjson = "3.9.15"
|
||||
pandas = "2.2.0"
|
||||
passlib = "^1.7.4"
|
||||
|
|
@ -3957,9 +3987,9 @@ python-jose = "^3.3.0"
|
|||
python-multipart = "^0.0.7"
|
||||
python-socketio = "^5.11.0"
|
||||
rich = "^13.7.0"
|
||||
sqlmodel = "^0.0.14"
|
||||
typer = "^0.9.0"
|
||||
uvicorn = "^0.27.0"
|
||||
sqlmodel = "^0.0.16"
|
||||
typer = "^0.12.0"
|
||||
uvicorn = "^0.29.0"
|
||||
websockets = "*"
|
||||
|
||||
[package.extras]
|
||||
|
|
@ -3973,13 +4003,13 @@ url = "src/backend/base"
|
|||
|
||||
[[package]]
|
||||
name = "langfuse"
|
||||
version = "2.21.2"
|
||||
version = "2.22.0"
|
||||
description = "A client library for accessing langfuse"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8.1"
|
||||
files = [
|
||||
{file = "langfuse-2.21.2-py3-none-any.whl", hash = "sha256:bd65858e6326776f65c9b2e414e64fdea0f14402f5c784952af93346dfd489bb"},
|
||||
{file = "langfuse-2.21.2.tar.gz", hash = "sha256:eb7911aa640f020f097cb56eaa7d67f01d39f9e2bdd6226e0c5d642a87f3663c"},
|
||||
{file = "langfuse-2.22.0-py3-none-any.whl", hash = "sha256:9e244e3f8e81c391009b41f51cf8eb8bcd29281a93e90e8b861b231f99a8abe2"},
|
||||
{file = "langfuse-2.22.0.tar.gz", hash = "sha256:e0ed2ced0f8216e88a84331476a21b6ac5e627b340c2b01676d61fb27c368bb4"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -3997,13 +4027,13 @@ openai = ["openai (>=0.27.8)"]
|
|||
|
||||
[[package]]
|
||||
name = "langsmith"
|
||||
version = "0.1.39"
|
||||
version = "0.1.40"
|
||||
description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8.1"
|
||||
files = [
|
||||
{file = "langsmith-0.1.39-py3-none-any.whl", hash = "sha256:85c19177162585728001cb7ae91ab48ca4abe39b7bc1ff783212ac426ded222b"},
|
||||
{file = "langsmith-0.1.39.tar.gz", hash = "sha256:2aec9d2f9cc664042d2121b13da569b0902aff842c86b17b440245d57da84ec5"},
|
||||
{file = "langsmith-0.1.40-py3-none-any.whl", hash = "sha256:aa47d0f5a1eabd5c05ac6ce2cd3e28ccfc554d366e856a27b7c3c17c443881cb"},
|
||||
{file = "langsmith-0.1.40.tar.gz", hash = "sha256:50fdf313741cf94e978de06025fd180b56acf1d1a4549b0fd5453ef23d5461ef"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -4030,13 +4060,13 @@ regex = ["regex"]
|
|||
|
||||
[[package]]
|
||||
name = "litellm"
|
||||
version = "1.34.22"
|
||||
version = "1.34.34"
|
||||
description = "Library to easily interface with LLM API providers"
|
||||
optional = false
|
||||
python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8"
|
||||
files = [
|
||||
{file = "litellm-1.34.22-py3-none-any.whl", hash = "sha256:0e573d56d762f4060c53493da4a08c48034b5bb5ba22e34517065739adfd9154"},
|
||||
{file = "litellm-1.34.22.tar.gz", hash = "sha256:ca50ede3ca8d3f9dc2765ca13cf2ff5c4e4b9afb4db222f9d7cb9ee838b6180f"},
|
||||
{file = "litellm-1.34.34-py3-none-any.whl", hash = "sha256:c9eefd4b5adec3c2e6d0ab765a4fcebd475a895c7e417f47f8e677410b607f51"},
|
||||
{file = "litellm-1.34.34.tar.gz", hash = "sha256:d11c9d5296d052a9e5e1187ac7b33683f3a581740abc4de6a9c327d3f3c7187c"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -4056,12 +4086,12 @@ proxy = ["PyJWT (>=2.8.0,<3.0.0)", "apscheduler (>=3.10.4,<4.0.0)", "backoff", "
|
|||
|
||||
[[package]]
|
||||
name = "llama-cpp-python"
|
||||
version = "0.2.59"
|
||||
version = "0.2.60"
|
||||
description = "Python bindings for the llama.cpp library"
|
||||
optional = true
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "llama_cpp_python-0.2.59.tar.gz", hash = "sha256:4b19283226ab91c74c6d811d88724a6f32d9dd7d07caf9d8b897dd3372d5d4d2"},
|
||||
{file = "llama_cpp_python-0.2.60.tar.gz", hash = "sha256:0cb98955ae6a14dacb9418d9793e4fe1a5575be3b01a55c1d49d48c79c3b19c3"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -4078,19 +4108,19 @@ test = ["httpx (>=0.24.1)", "pytest (>=7.4.0)", "scipy (>=1.10)"]
|
|||
|
||||
[[package]]
|
||||
name = "llama-index"
|
||||
version = "0.10.26"
|
||||
version = "0.10.27"
|
||||
description = "Interface between LLMs and your data"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8.1"
|
||||
files = [
|
||||
{file = "llama_index-0.10.26-py3-none-any.whl", hash = "sha256:25a3f8c9f63fbdaaf3d760ce59c5dad8afc5cf40431e9e6a28e02b92a820d450"},
|
||||
{file = "llama_index-0.10.26.tar.gz", hash = "sha256:5a6036bcb2449277ede3244cfa1b49d9fab5dba30ad8b212c2df92449cfa7d48"},
|
||||
{file = "llama_index-0.10.27-py3-none-any.whl", hash = "sha256:e4813786c8240504ac8c394bb36a1755c6de7518a2bcbfc9e54ec78724f0af0b"},
|
||||
{file = "llama_index-0.10.27.tar.gz", hash = "sha256:ef7862df50ceb1cd9f43b7e4b476121dd1a230075d2a8fdc84c92b03239602c7"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
llama-index-agent-openai = ">=0.1.4,<0.3.0"
|
||||
llama-index-cli = ">=0.1.2,<0.2.0"
|
||||
llama-index-core = ">=0.10.26,<0.11.0"
|
||||
llama-index-core = ">=0.10.27,<0.11.0"
|
||||
llama-index-embeddings-openai = ">=0.1.5,<0.2.0"
|
||||
llama-index-indices-managed-llama-cloud = ">=0.1.2,<0.2.0"
|
||||
llama-index-legacy = ">=0.9.48,<0.10.0"
|
||||
|
|
@ -4135,13 +4165,13 @@ llama-index-llms-openai = ">=0.1.1,<0.2.0"
|
|||
|
||||
[[package]]
|
||||
name = "llama-index-core"
|
||||
version = "0.10.26"
|
||||
version = "0.10.27"
|
||||
description = "Interface between LLMs and your data"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8.1"
|
||||
files = [
|
||||
{file = "llama_index_core-0.10.26-py3-none-any.whl", hash = "sha256:e08886b9d353fb45f63d0011e78326627f1ee5c7761585fdd85a9a0dfdbf0365"},
|
||||
{file = "llama_index_core-0.10.26.tar.gz", hash = "sha256:8ea27093a053aed79b7b5d80694152066ddaa5b70116308d48db382c76367a87"},
|
||||
{file = "llama_index_core-0.10.27-py3-none-any.whl", hash = "sha256:805b20a16a417180a32a31956710637af75e22cd0849fec2729447d182197d39"},
|
||||
{file = "llama_index_core-0.10.27.tar.gz", hash = "sha256:01881a1943cb7b37f9f8147212d4a55caeef2d68ec498a0a1b864f79cf9d2be4"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -4151,7 +4181,7 @@ deprecated = ">=1.2.9.3"
|
|||
dirtyjson = ">=1.0.8,<2.0.0"
|
||||
fsspec = ">=2023.5.0"
|
||||
httpx = "*"
|
||||
llamaindex-py-client = ">=0.1.15,<0.2.0"
|
||||
llamaindex-py-client = ">=0.1.16,<0.2.0"
|
||||
nest-asyncio = ">=1.5.8,<2.0.0"
|
||||
networkx = ">=3.0"
|
||||
nltk = ">=3.8.1,<4.0.0"
|
||||
|
|
@ -5181,20 +5211,20 @@ files = [
|
|||
|
||||
[[package]]
|
||||
name = "networkx"
|
||||
version = "3.2.1"
|
||||
version = "3.3"
|
||||
description = "Python package for creating and manipulating graphs and networks"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
python-versions = ">=3.10"
|
||||
files = [
|
||||
{file = "networkx-3.2.1-py3-none-any.whl", hash = "sha256:f18c69adc97877c42332c170849c96cefa91881c99a7cb3e95b7c659ebdc1ec2"},
|
||||
{file = "networkx-3.2.1.tar.gz", hash = "sha256:9f1bb5cf3409bf324e0a722c20bdb4c20ee39bf1c30ce8ae499c8502b0b5e0c6"},
|
||||
{file = "networkx-3.3-py3-none-any.whl", hash = "sha256:28575580c6ebdaf4505b22c6256a2b9de86b316dc63ba9e93abde3d78dfdbcf2"},
|
||||
{file = "networkx-3.3.tar.gz", hash = "sha256:0c127d8b2f4865f59ae9cb8aafcd60b5c70f3241ebd66f7defad7c4ab90126c9"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
default = ["matplotlib (>=3.5)", "numpy (>=1.22)", "pandas (>=1.4)", "scipy (>=1.9,!=1.11.0,!=1.11.1)"]
|
||||
developer = ["changelist (==0.4)", "mypy (>=1.1)", "pre-commit (>=3.2)", "rtoml"]
|
||||
doc = ["nb2plots (>=0.7)", "nbconvert (<7.9)", "numpydoc (>=1.6)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.14)", "sphinx (>=7)", "sphinx-gallery (>=0.14)", "texext (>=0.6.7)"]
|
||||
extra = ["lxml (>=4.6)", "pydot (>=1.4.2)", "pygraphviz (>=1.11)", "sympy (>=1.10)"]
|
||||
default = ["matplotlib (>=3.6)", "numpy (>=1.23)", "pandas (>=1.4)", "scipy (>=1.9,!=1.11.0,!=1.11.1)"]
|
||||
developer = ["changelist (==0.5)", "mypy (>=1.1)", "pre-commit (>=3.2)", "rtoml"]
|
||||
doc = ["myst-nb (>=1.0)", "numpydoc (>=1.7)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.14)", "sphinx (>=7)", "sphinx-gallery (>=0.14)", "texext (>=0.6.7)"]
|
||||
extra = ["lxml (>=4.6)", "pydot (>=2.0)", "pygraphviz (>=1.12)", "sympy (>=1.10)"]
|
||||
test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -5429,14 +5459,13 @@ files = [
|
|||
|
||||
[[package]]
|
||||
name = "nvidia-nvjitlink-cu12"
|
||||
version = "12.4.99"
|
||||
version = "12.4.127"
|
||||
description = "Nvidia JIT LTO Library"
|
||||
optional = true
|
||||
python-versions = ">=3"
|
||||
files = [
|
||||
{file = "nvidia_nvjitlink_cu12-12.4.99-py3-none-manylinux2014_aarch64.whl", hash = "sha256:75d6498c96d9adb9435f2bbdbddb479805ddfb97b5c1b32395c694185c20ca57"},
|
||||
{file = "nvidia_nvjitlink_cu12-12.4.99-py3-none-manylinux2014_x86_64.whl", hash = "sha256:c6428836d20fe7e327191c175791d38570e10762edc588fb46749217cd444c74"},
|
||||
{file = "nvidia_nvjitlink_cu12-12.4.99-py3-none-win_amd64.whl", hash = "sha256:991905ffa2144cb603d8ca7962d75c35334ae82bf92820b6ba78157277da1ad2"},
|
||||
{file = "nvidia_nvjitlink_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:06b3b9b25bf3f8af351d664978ca26a16d2c5127dbd53c0497e28d1fb9611d57"},
|
||||
{file = "nvidia_nvjitlink_cu12-12.4.127-py3-none-win_amd64.whl", hash = "sha256:fd9020c501d27d135f983c6d3e244b197a7ccad769e34df53a42e276b0e25fa1"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -5546,13 +5575,13 @@ sympy = "*"
|
|||
|
||||
[[package]]
|
||||
name = "openai"
|
||||
version = "1.16.1"
|
||||
version = "1.16.2"
|
||||
description = "The official Python library for the openai API"
|
||||
optional = false
|
||||
python-versions = ">=3.7.1"
|
||||
files = [
|
||||
{file = "openai-1.16.1-py3-none-any.whl", hash = "sha256:77ef3db6110071f7154859e234250fb945a36554207a30a4491092eadb73fcb5"},
|
||||
{file = "openai-1.16.1.tar.gz", hash = "sha256:58922c785d167458b46e3c76e7b1bc2306f313ee9b71791e84cbf590abe160f2"},
|
||||
{file = "openai-1.16.2-py3-none-any.whl", hash = "sha256:46a435380921e42dae218d04d6dd0e89a30d7f3b9d8a778d5887f78003cf9354"},
|
||||
{file = "openai-1.16.2.tar.gz", hash = "sha256:c93d5efe5b73b6cb72c4cd31823852d2e7c84a138c0af3cbe4a8eb32b1164ab2"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -5959,18 +5988,18 @@ types-pytz = ">=2022.1.1"
|
|||
|
||||
[[package]]
|
||||
name = "parso"
|
||||
version = "0.8.3"
|
||||
version = "0.8.4"
|
||||
description = "A Python Parser"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"},
|
||||
{file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"},
|
||||
{file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"},
|
||||
{file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
qa = ["flake8 (==3.8.3)", "mypy (==0.782)"]
|
||||
testing = ["docopt", "pytest (<6.0.0)"]
|
||||
qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"]
|
||||
testing = ["docopt", "pytest"]
|
||||
|
||||
[[package]]
|
||||
name = "passlib"
|
||||
|
|
@ -6666,13 +6695,13 @@ pyasn1 = ">=0.4.6,<0.7.0"
|
|||
|
||||
[[package]]
|
||||
name = "pyautogen"
|
||||
version = "0.2.21"
|
||||
version = "0.2.22"
|
||||
description = "Enabling Next-Gen LLM Applications via Multi-Agent Conversation Framework"
|
||||
optional = false
|
||||
python-versions = "<3.13,>=3.8"
|
||||
files = [
|
||||
{file = "pyautogen-0.2.21-py3-none-any.whl", hash = "sha256:64c8311d33599ad517e60fa63e125d67ac99dfc4a80790826b603df936b8e7fc"},
|
||||
{file = "pyautogen-0.2.21.tar.gz", hash = "sha256:8b4fde51511d65ceb6e320e6a1d82c9d96684e3605c00ed17805abd8d90b1049"},
|
||||
{file = "pyautogen-0.2.22-py3-none-any.whl", hash = "sha256:022d451ea286914024c5fb0e83f5c6622df925ce02a4e10f410389e9f003cbcc"},
|
||||
{file = "pyautogen-0.2.22.tar.gz", hash = "sha256:8d25ae881779d95c13f477a42c448e91f1038c080c5bf6b905ccc1559f7ac535"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -6694,7 +6723,7 @@ jupyter-executor = ["ipykernel (>=6.29.0)", "jupyter-client (>=8.6.0)", "jupyter
|
|||
lmm = ["pillow", "replicate"]
|
||||
mathchat = ["pydantic (==1.10.9)", "sympy", "wolframalpha"]
|
||||
redis = ["redis"]
|
||||
retrievechat = ["chromadb", "ipython", "pypdf", "sentence-transformers"]
|
||||
retrievechat = ["beautifulsoup4", "chromadb", "ipython", "markdownify", "pypdf", "sentence-transformers"]
|
||||
teachable = ["chromadb"]
|
||||
test = ["coverage (>=5.3)", "ipykernel", "nbconvert", "nbformat", "pandas", "pre-commit", "pytest (>=6.1.1,<8)", "pytest-asyncio"]
|
||||
types = ["ipykernel (>=6.29.0)", "jupyter-client (>=8.6.0)", "jupyter-kernel-gateway", "mypy (==1.9.0)", "pytest (>=6.1.1,<8)", "requests", "websocket-client"]
|
||||
|
|
@ -7093,15 +7122,18 @@ files = [
|
|||
|
||||
[[package]]
|
||||
name = "pypdf"
|
||||
version = "4.1.0"
|
||||
version = "4.2.0"
|
||||
description = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "pypdf-4.1.0-py3-none-any.whl", hash = "sha256:16cac912a05200099cef3f347c4c7e0aaf0a6d027603b8f9a973c0ea500dff89"},
|
||||
{file = "pypdf-4.1.0.tar.gz", hash = "sha256:01c3257ec908676efd60a4537e525b89d48e0852bc92b4e0aa4cc646feda17cc"},
|
||||
{file = "pypdf-4.2.0-py3-none-any.whl", hash = "sha256:dc035581664e0ad717e3492acebc1a5fc23dba759e788e3d4a9fc9b1a32e72c1"},
|
||||
{file = "pypdf-4.2.0.tar.gz", hash = "sha256:fe63f3f7d1dcda1c9374421a94c1bba6c6f8c4a62173a59b64ffd52058f846b1"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
typing_extensions = {version = ">=4.0", markers = "python_version < \"3.11\""}
|
||||
|
||||
[package.extras]
|
||||
crypto = ["PyCryptodome", "cryptography"]
|
||||
dev = ["black", "flit", "pip-tools", "pre-commit (<2.18.0)", "pytest-cov", "pytest-socket", "pytest-timeout", "pytest-xdist", "wheel"]
|
||||
|
|
@ -7316,13 +7348,13 @@ typing-extensions = "*"
|
|||
|
||||
[[package]]
|
||||
name = "python-dotenv"
|
||||
version = "1.0.0"
|
||||
version = "1.0.1"
|
||||
description = "Read key-value pairs from a .env file and set them as environment variables"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "python-dotenv-1.0.0.tar.gz", hash = "sha256:a8df96034aae6d2d50a4ebe8216326c61c3eb64836776504fcca410e5937a3ba"},
|
||||
{file = "python_dotenv-1.0.0-py3-none-any.whl", hash = "sha256:f5971a9226b701070a4bf2c38c89e5a3f0d64de8debda981d1db98583009122a"},
|
||||
{file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"},
|
||||
{file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
|
|
@ -7698,101 +7730,101 @@ langchain = ["langchain (>=0.0.321)"]
|
|||
|
||||
[[package]]
|
||||
name = "rapidfuzz"
|
||||
version = "3.7.0"
|
||||
version = "3.8.1"
|
||||
description = "rapid fuzzy string matching"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "rapidfuzz-3.7.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:860f438238f1807532aa5c5c25e74c284232ccc115fe84697b78e25d48f364f7"},
|
||||
{file = "rapidfuzz-3.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4bb9285abeb0477cdb2f8ea0cf7fd4b5f72ed5a9a7d3f0c0bb4a5239db2fc1ed"},
|
||||
{file = "rapidfuzz-3.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:08671280e0c04d2bb3f39511f13cae5914e6690036fd1eefc3d47a47f9fae634"},
|
||||
{file = "rapidfuzz-3.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04bae4d9c16ce1bab6447d196fb8258d98139ed8f9b288a38b84887985e4227b"},
|
||||
{file = "rapidfuzz-3.7.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1efa2268b51b68156fb84d18ca1720311698a58051c4a19c40d670057ce60519"},
|
||||
{file = "rapidfuzz-3.7.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:600b4d4315f33ec0356c0dab3991a5d5761102420bcff29e0773706aa48936e8"},
|
||||
{file = "rapidfuzz-3.7.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18bc2f13c73d5d34499ff6ada55b052c445d3aa64d22c2639e5ab45472568046"},
|
||||
{file = "rapidfuzz-3.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e11c5e6593be41a555475c9c20320342c1f5585d635a064924956944c465ad4"},
|
||||
{file = "rapidfuzz-3.7.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d7878025248b99ccca3285891899373f98548f2ca13835d83619ffc42241c626"},
|
||||
{file = "rapidfuzz-3.7.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b4a7e37fe136022d944374fcd8a2f72b8a19f7b648d2cdfb946667e9ede97f9f"},
|
||||
{file = "rapidfuzz-3.7.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b5881856f830351aaabd869151124f64a80bf61560546d9588a630a4e933a5de"},
|
||||
{file = "rapidfuzz-3.7.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:c788b11565cc176fab8fab6dfcd469031e906927db94bf7e422afd8ef8f88a5a"},
|
||||
{file = "rapidfuzz-3.7.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9e17a3092e74025d896ef1d67ac236c83494da37a78ef84c712e4e2273c115f1"},
|
||||
{file = "rapidfuzz-3.7.0-cp310-cp310-win32.whl", hash = "sha256:e499c823206c9ffd9d89aa11f813a4babdb9219417d4efe4c8a6f8272da00e98"},
|
||||
{file = "rapidfuzz-3.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:91f798cc00cd94a0def43e9befc6e867c9bd8fa8f882d1eaa40042f528b7e2c7"},
|
||||
{file = "rapidfuzz-3.7.0-cp310-cp310-win_arm64.whl", hash = "sha256:d5a3872f35bec89f07b993fa1c5401d11b9e68bcdc1b9737494e279308a38a5f"},
|
||||
{file = "rapidfuzz-3.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ef6b6ab64c4c91c57a6b58e1d690b59453bfa1f1e9757a7e52e59b4079e36631"},
|
||||
{file = "rapidfuzz-3.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2f9070b42c0ba030b045bba16a35bdb498a0d6acb0bdb3ff4e325960e685e290"},
|
||||
{file = "rapidfuzz-3.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:63044c63565f50818d885bfcd40ac369947da4197de56b4d6c26408989d48edf"},
|
||||
{file = "rapidfuzz-3.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49b0c47860c733a3d73a4b70b97b35c8cbf24ef24f8743732f0d1c412a8c85de"},
|
||||
{file = "rapidfuzz-3.7.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1b14489b038f007f425a06fcf28ac6313c02cb603b54e3a28d9cfae82198cc0"},
|
||||
{file = "rapidfuzz-3.7.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be08f39e397a618aab907887465d7fabc2d1a4d15d1a67cb8b526a7fb5202a3e"},
|
||||
{file = "rapidfuzz-3.7.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16895dc62a7b92028f9c8b6d22830f1cbc77306ee794f461afc6028e1a8d7539"},
|
||||
{file = "rapidfuzz-3.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:579cce49dfa57ffd8c8227b3fb53cced54b4df70cec502e63e9799b4d1f44004"},
|
||||
{file = "rapidfuzz-3.7.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:40998c8dc35fdd221790b8b5134a8d7499adbfab9a5dd9ec626c7e92e17a43ed"},
|
||||
{file = "rapidfuzz-3.7.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:dc3fdb4738a6b83ae27f1d8923b00d3a9c2b5c50da75b9f8b81841839c6e3e1f"},
|
||||
{file = "rapidfuzz-3.7.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:92b8146fbfb37ac358ef7e0f6b79619e4f793fbbe894b99ea87920f9c0a9d77d"},
|
||||
{file = "rapidfuzz-3.7.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:1dfceaa7c2914585bb8a043265c39ec09078f13fbf53b5525722fc074306b6fa"},
|
||||
{file = "rapidfuzz-3.7.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f332d61f51b0b9c8b55a0fb052b4764b6ad599ea8ce948ac47a4388e9083c35e"},
|
||||
{file = "rapidfuzz-3.7.0-cp311-cp311-win32.whl", hash = "sha256:dfd1e4819f1f3c47141f86159b44b7360ecb19bf675080b3b40437bf97273ab9"},
|
||||
{file = "rapidfuzz-3.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:594b9c33fc1a86784962043ee3fbaaed875fbaadff72e467c2f7a83cd6c5d69d"},
|
||||
{file = "rapidfuzz-3.7.0-cp311-cp311-win_arm64.whl", hash = "sha256:0b13a6823a1b83ae43f8bf35955df35032bee7bec0daf9b5ab836e0286067434"},
|
||||
{file = "rapidfuzz-3.7.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:075a419a0ec29be44b3d7f4bcfa5cb7e91e419379a85fc05eb33de68315bd96f"},
|
||||
{file = "rapidfuzz-3.7.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:51a5b96d2081c3afbef1842a61d63e55d0a5a201473e6975a80190ff2d6f22ca"},
|
||||
{file = "rapidfuzz-3.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a9460d8fddac7ea46dff9298eee9aa950dbfe79f2eb509a9f18fbaefcd10894c"},
|
||||
{file = "rapidfuzz-3.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f39eb1513ee139ba6b5c01fe47ddf2d87e9560dd7fdee1068f7f6efbae70de34"},
|
||||
{file = "rapidfuzz-3.7.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eace9fdde58a425d4c9a93021b24a0cac830df167a5b2fc73299e2acf9f41493"},
|
||||
{file = "rapidfuzz-3.7.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0cc77237242303733de47829028a0a8b6ab9188b23ec9d9ff0a674fdcd3c8e7f"},
|
||||
{file = "rapidfuzz-3.7.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:74e692357dd324dff691d379ef2c094c9ec526c0ce83ed43a066e4e68fe70bf6"},
|
||||
{file = "rapidfuzz-3.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f2075ac9ee5c15d33d24a1efc8368d095602b5fd9634c5b5f24d83e41903528"},
|
||||
{file = "rapidfuzz-3.7.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5a8ba64d72329a940ff6c74b721268c2004eecc48558f648a38e96915b5d1c1b"},
|
||||
{file = "rapidfuzz-3.7.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a1f268a2a37cd22573b4a06eccd481c04504b246d3cadc2d8e8dfa64b575636d"},
|
||||
{file = "rapidfuzz-3.7.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:42c2e8a2341363c7caf276efdbe1a673fc5267a02568c47c8e980f12e9bc8727"},
|
||||
{file = "rapidfuzz-3.7.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:a9acca34b34fb895ee6a84c436bb919f3b9cd8f43e7003d43e9573a1d990ff74"},
|
||||
{file = "rapidfuzz-3.7.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9bad6a0fe3bc1753dacaa6229a8ba7d9844eb7ae24d44d17c5f4c51c91a8a95e"},
|
||||
{file = "rapidfuzz-3.7.0-cp312-cp312-win32.whl", hash = "sha256:c86bc4b1d2380739e6485396195e30021df509b4923f3f757914e171587bce7c"},
|
||||
{file = "rapidfuzz-3.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:d7361608c8e73a1dc0203a87d151cddebdade0098a047c46da43c469c07df964"},
|
||||
{file = "rapidfuzz-3.7.0-cp312-cp312-win_arm64.whl", hash = "sha256:8fdc26e7863e0f63c2185d53bb61f5173ad4451c1c8287b535b30ea25a419a5a"},
|
||||
{file = "rapidfuzz-3.7.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9b6167468f76779a14b9af66210f68741af94d32d086f19118de4e919f00585c"},
|
||||
{file = "rapidfuzz-3.7.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5bd394e28ff221557ea4d8152fcec3e66d9f620557feca5f2bedc4c21f8cf2f9"},
|
||||
{file = "rapidfuzz-3.7.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8e70f876ca89a6df344f8157ac60384e8c05a0dfb442da2490c3f1c45238ccf5"},
|
||||
{file = "rapidfuzz-3.7.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c837f89d86a5affe9ee6574dad6b195475676a6ab171a67920fc99966f2ab2c"},
|
||||
{file = "rapidfuzz-3.7.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cda4550a98658f9a8bcdc03d0498ed1565c1563880e3564603a9eaae28d51b2a"},
|
||||
{file = "rapidfuzz-3.7.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecd70212fd9f1f8b1d3bdd8bcb05acc143defebd41148bdab43e573b043bb241"},
|
||||
{file = "rapidfuzz-3.7.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:187db4cc8fb54f8c49c67b7f38ef3a122ce23be273032fa2ff34112a2694c3d8"},
|
||||
{file = "rapidfuzz-3.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4604dfc1098920c4eb6d0c6b5cc7bdd4bf95b48633e790c1d3f100a25870691d"},
|
||||
{file = "rapidfuzz-3.7.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01581b688c5f4f6665b779135e32db0edab1d78028abf914bb91469928efa383"},
|
||||
{file = "rapidfuzz-3.7.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0828b55ec8ad084febdf4ab0c942eb1f81c97c0935f1cb0be0b4ea84ce755988"},
|
||||
{file = "rapidfuzz-3.7.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:150c98b65faff17b917b9d36bff8a4d37b6173579c6bc2e38ff2044e209d37a4"},
|
||||
{file = "rapidfuzz-3.7.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:7e4eea225d2bff1aff4c85fcc44716596d3699374d99eb5906b7a7560297460e"},
|
||||
{file = "rapidfuzz-3.7.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7bc944d7e830cfce0f8b4813875f05904207017b66e25ab7ee757507001310a9"},
|
||||
{file = "rapidfuzz-3.7.0-cp38-cp38-win32.whl", hash = "sha256:3e55f02105c451ab6ff0edaaba57cab1b6c0a0241cfb2b306d4e8e1503adba50"},
|
||||
{file = "rapidfuzz-3.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:41851620d2900791d66d9b6092fc163441d7dd91a460c73b07957ff1c517bc30"},
|
||||
{file = "rapidfuzz-3.7.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e8041c6b2d339766efe6298fa272f79d6dd799965df364ef4e50f488c101c899"},
|
||||
{file = "rapidfuzz-3.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4e09d81008e212fc824ea23603ff5270d75886e72372fa6c7c41c1880bcb57ed"},
|
||||
{file = "rapidfuzz-3.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:419c8961e861fb5fc5590056c66a279623d1ea27809baea17e00cdc313f1217a"},
|
||||
{file = "rapidfuzz-3.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1522eaab91b9400b3ef16eebe445940a19e70035b5bc5d98aef23d66e9ac1df0"},
|
||||
{file = "rapidfuzz-3.7.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:611278ce3136f4544d596af18ab8849827d64372e1d8888d9a8d071bf4a3f44d"},
|
||||
{file = "rapidfuzz-3.7.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4efa9bfc5b955b6474ee077eee154e240441842fa304f280b06e6b6aa58a1d1e"},
|
||||
{file = "rapidfuzz-3.7.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0cc9d3c8261457af3f8756b1f71a9fdc4892978a9e8b967976d2803e08bf972"},
|
||||
{file = "rapidfuzz-3.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce728e2b582fd396bc2559160ee2e391e6a4b5d2e455624044699d96abe8a396"},
|
||||
{file = "rapidfuzz-3.7.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3a6a36c9299e059e0bee3409218bc5235a46570c20fc980cdee5ed21ea6110ad"},
|
||||
{file = "rapidfuzz-3.7.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9ea720db8def684c1eb71dadad1f61c9b52f4d979263eb5d443f2b22b0d5430a"},
|
||||
{file = "rapidfuzz-3.7.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:358692f1df3f8aebcd48e69c77c948c9283b44c0efbaf1eeea01739efe3cd9a6"},
|
||||
{file = "rapidfuzz-3.7.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:faded69ffe79adcefa8da08f414a0fd52375e2b47f57be79471691dad9656b5a"},
|
||||
{file = "rapidfuzz-3.7.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7f9f3dc14fadbd553975f824ac48c381f42192cec9d7e5711b528357662a8d8e"},
|
||||
{file = "rapidfuzz-3.7.0-cp39-cp39-win32.whl", hash = "sha256:7be5f460ff42d7d27729115bfe8a02e83fa0284536d8630ee900d17b75c29e65"},
|
||||
{file = "rapidfuzz-3.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:dd5ad2c12dab2b98340c4b7b9592c8f349730bda9a2e49675ea592bbcbc1360b"},
|
||||
{file = "rapidfuzz-3.7.0-cp39-cp39-win_arm64.whl", hash = "sha256:aa163257a0ac4e70f9009d25e5030bdd83a8541dfa3ba78dc86b35c9e16a80b4"},
|
||||
{file = "rapidfuzz-3.7.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4e50840a8a8e0229563eeaf22e21a203359859557db8829f4d0285c17126c5fb"},
|
||||
{file = "rapidfuzz-3.7.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:632f09e19365ace5ff2670008adc8bf23d03d668b03a30230e5b60ff9317ee93"},
|
||||
{file = "rapidfuzz-3.7.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:209dda6ae66b702f74a78cef555397cdc2a83d7f48771774a20d2fc30808b28c"},
|
||||
{file = "rapidfuzz-3.7.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bc0b78572626af6ab134895e4dbfe4f4d615d18dcc43b8d902d8e45471aabba"},
|
||||
{file = "rapidfuzz-3.7.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7ba14850cc8258b3764ea16b8a4409ac2ba16d229bde7a5f495dd479cd9ccd56"},
|
||||
{file = "rapidfuzz-3.7.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b917764fd2b267addc9d03a96d26f751f6117a95f617428c44a069057653b528"},
|
||||
{file = "rapidfuzz-3.7.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1252ca156e1b053e84e5ae1c8e9e062ee80468faf23aa5c543708212a42795fd"},
|
||||
{file = "rapidfuzz-3.7.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:86c7676a32d7524e40bc73546e511a408bc831ae5b163029d325ea3a2027d089"},
|
||||
{file = "rapidfuzz-3.7.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20e7d729af2e5abb29caa070ec048aba042f134091923d9ca2ac662b5604577e"},
|
||||
{file = "rapidfuzz-3.7.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:86eea3e6c314a9238de568254a9c591ec73c2985f125675ed5f171d869c47773"},
|
||||
{file = "rapidfuzz-3.7.0.tar.gz", hash = "sha256:620df112c39c6d27316dc1e22046dc0382d6d91fd60d7c51bd41ca0333d867e9"},
|
||||
{file = "rapidfuzz-3.8.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1b176f01490b48337183da5b4223005bc0c2354a4faee5118917d2fba0bedc1c"},
|
||||
{file = "rapidfuzz-3.8.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0798e32304b8009d215026bf7e1c448f1831da0a03987b7de30059a41bee92f3"},
|
||||
{file = "rapidfuzz-3.8.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ad4dbd06c1f579eb043b2dcfc635bc6c9fb858240a70f0abd3bed84d8ac79994"},
|
||||
{file = "rapidfuzz-3.8.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6ec696a268e8d730b42711537e500f7397afc06125c0e8fa9c8211386d315a5"},
|
||||
{file = "rapidfuzz-3.8.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a8a007fdc5cf646e48e361a39eabe725b93af7673c5ab90294e551cae72ff58"},
|
||||
{file = "rapidfuzz-3.8.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:68b185a0397aebe78bcc5d0e1efd96509d4e2f3c4a05996e5c843732f547e9ef"},
|
||||
{file = "rapidfuzz-3.8.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:267ff42370e031195e3020fff075420c136b69dc918ecb5542ec75c1e36af81f"},
|
||||
{file = "rapidfuzz-3.8.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:987cd277d27d14301019fdf61c17524f6127f5d364be5482228726049d8e0d10"},
|
||||
{file = "rapidfuzz-3.8.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bc5a1ec3bd05b55d3070d557c0cdd4412272d51b4966c79aa3e9da207bd33d65"},
|
||||
{file = "rapidfuzz-3.8.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa223c73c59cc45c12eaa9c439318084003beced0447ff92b578a890288e19eb"},
|
||||
{file = "rapidfuzz-3.8.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:d4276c7ee061db0bac54846933b40339f60085523675f917f37de24a4b3ce0ee"},
|
||||
{file = "rapidfuzz-3.8.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:2ba0e43e9a94d256a704a674c7010e6f8ef9225edf7287cf3e7f66c9894b06cd"},
|
||||
{file = "rapidfuzz-3.8.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c22b32a57ab47afb207e8fe4bd7bb58c90f9291a63723cafd4e704742166e368"},
|
||||
{file = "rapidfuzz-3.8.1-cp310-cp310-win32.whl", hash = "sha256:50db3867864422bf6a6435ea65b9ac9de71ef52ed1e05d62f498cd430189eece"},
|
||||
{file = "rapidfuzz-3.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:bca5acf77508d1822023a85118c2dd8d3c16abdd56d2762359a46deb14daa5e0"},
|
||||
{file = "rapidfuzz-3.8.1-cp310-cp310-win_arm64.whl", hash = "sha256:c763d99cf087e7b2c5be0cf34ae9a0e1b031f5057d2341a0a0ed782458645b7e"},
|
||||
{file = "rapidfuzz-3.8.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:30c282612b7ebf2d7646ebebfd98dd308c582246a94d576734e4b0162f57baf4"},
|
||||
{file = "rapidfuzz-3.8.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c6a43446f0cd8ff347b1fbb918dc0d657bebf484ddfa960ee069e422a477428"},
|
||||
{file = "rapidfuzz-3.8.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4969fe0eb179aedacee53ca8f8f1be3c655964a6d62db30f247fee444b9c52b4"},
|
||||
{file = "rapidfuzz-3.8.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:799f5f221d639d1c2ed8a2348d1edf5e22aa489b58b2cc99f5bf0c1917e2d0f2"},
|
||||
{file = "rapidfuzz-3.8.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e62bde7d5df3312acc528786ee801c472cae5078b1f1e42761c853ba7fe1072a"},
|
||||
{file = "rapidfuzz-3.8.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ea3d2e41d8fac71cb63ee72f75bee0ed1e9c50709d4c58587f15437761c1858"},
|
||||
{file = "rapidfuzz-3.8.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f34a541895627c2bc9ef7757f16f02428a08d960d33208adfb96b33338d0945"},
|
||||
{file = "rapidfuzz-3.8.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0643a25937fafe8d117f2907606e9940cd1cc905c66f16ece9ab93128299994"},
|
||||
{file = "rapidfuzz-3.8.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:63044a7b6791a2e945dce9d812a6886e93159deb0464984eb403617ded257f08"},
|
||||
{file = "rapidfuzz-3.8.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bbc15985c5658691f637a6b97651771147744edfad2a4be56b8a06755e3932fa"},
|
||||
{file = "rapidfuzz-3.8.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:48b6e5a337a814aec7c6dda5d6460f947c9330860615301f35b519e16dde3c77"},
|
||||
{file = "rapidfuzz-3.8.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:8c40da44ca20235cda05751d6e828b6b348e7a7c5de2922fa0f9c63f564fd675"},
|
||||
{file = "rapidfuzz-3.8.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c21d5c7cfa6078c79897e5e482a7e84ff927143d2f3fb020dd6edd27f5469574"},
|
||||
{file = "rapidfuzz-3.8.1-cp311-cp311-win32.whl", hash = "sha256:209bb712c448cdec4def6260b9f059bd4681ec61a01568f5e70e37bfe9efe830"},
|
||||
{file = "rapidfuzz-3.8.1-cp311-cp311-win_amd64.whl", hash = "sha256:6f7641992de44ec2ca54102422be44a8e3fb75b9690ccd74fff72b9ac7fc00ee"},
|
||||
{file = "rapidfuzz-3.8.1-cp311-cp311-win_arm64.whl", hash = "sha256:c458085e067c766112f089f78ce39eab2b69ba027d7bbb11d067a0b085774367"},
|
||||
{file = "rapidfuzz-3.8.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1905d9319a97bed29f21584ca641190dbc9218a556202b77876f1e37618d2e03"},
|
||||
{file = "rapidfuzz-3.8.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f176867f438ff2a43e6a837930153ca78fddb3ca94e378603a1e7b860d7869bf"},
|
||||
{file = "rapidfuzz-3.8.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25498650e30122f4a5ad6b27c7614b4af8628c1d32b19d406410d33f77a86c80"},
|
||||
{file = "rapidfuzz-3.8.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16153a97efacadbd693ccc612a3285df2f072fd07c121f30c2c135a709537075"},
|
||||
{file = "rapidfuzz-3.8.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c0264d03dcee1bb975975b77c2fe041820fb4d4a25a99e3cb74ddd083d671ca"},
|
||||
{file = "rapidfuzz-3.8.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:17d79398849c1244f646425cf31d856eab9ebd67b7d6571273e53df724ca817e"},
|
||||
{file = "rapidfuzz-3.8.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e08b01dc9369941a24d7e512b0d81bf514e7d6add1b93d8aeec3c8fa08a824e"},
|
||||
{file = "rapidfuzz-3.8.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97c13f156f14f10667e1cfc4257069b775440ce005e896c09ce3aff21c9ae665"},
|
||||
{file = "rapidfuzz-3.8.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8b76abfec195bf1ee6f9ec56c33ba5e9615ff2d0a9530a54001ed87e5a6ced3b"},
|
||||
{file = "rapidfuzz-3.8.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:b0ba20be465566264fa5580d874ccf5eabba6975dba45857e2c76e2df3359c6d"},
|
||||
{file = "rapidfuzz-3.8.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:4d5cd86aca3f12e73bfc70015db7e8fc44122da03aa3761138b95112e83f66e4"},
|
||||
{file = "rapidfuzz-3.8.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:9a16ef3702cecf16056c5fd66398b7ea8622ff4e3afeb00a8db3e74427e850af"},
|
||||
{file = "rapidfuzz-3.8.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:392582aa784737d95255ca122ebe7dca3c774da900d100c07b53d32cd221a60e"},
|
||||
{file = "rapidfuzz-3.8.1-cp312-cp312-win32.whl", hash = "sha256:ceb10039e7346927cec47eaa490b34abb602b537e738ee9914bb41b8de029fbc"},
|
||||
{file = "rapidfuzz-3.8.1-cp312-cp312-win_amd64.whl", hash = "sha256:cc4af7090a626c902c48db9b5d786c1faa0d8e141571e8a63a5350419ea575bd"},
|
||||
{file = "rapidfuzz-3.8.1-cp312-cp312-win_arm64.whl", hash = "sha256:3aff3b829b0b04bdf78bd780ec9faf5f26eac3591df98c35a0ae216c925ae436"},
|
||||
{file = "rapidfuzz-3.8.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:78a0d2a11bb3936463609777c6d6d4984a27ebb2360b58339c699899d85db036"},
|
||||
{file = "rapidfuzz-3.8.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f8af980695b866255447703bf634551e67e1a4e1c2d2d26501858d9233d886d7"},
|
||||
{file = "rapidfuzz-3.8.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d1a15fef1938b43468002f2d81012dbc9e7b50eb8533af202b0559c2dc7865d9"},
|
||||
{file = "rapidfuzz-3.8.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4dbb1ebc9a811f38da33f32ed2bb5f58b149289b89eb11e384519e9ba7ca881"},
|
||||
{file = "rapidfuzz-3.8.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:41219536634bd6f85419f38450ef080cfb519638125d805cf8626443e677dc61"},
|
||||
{file = "rapidfuzz-3.8.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e3f882110f2f4894942e314451773c47e8b1b4920b5ea2b6dd2e2d4079dd3135"},
|
||||
{file = "rapidfuzz-3.8.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c754ce1fab41b731259f100d5d46529a38aa2c9b683c92aeb7e96ef5b2898cd8"},
|
||||
{file = "rapidfuzz-3.8.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:718ea99f84b16c4bdbf6a93e53552cdccefa18e12ff9a02c5041e621460e2e61"},
|
||||
{file = "rapidfuzz-3.8.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9441aca94b21f7349cdb231cd0ce9ca251b2355836e8a02bf6ccbea5b442d7a9"},
|
||||
{file = "rapidfuzz-3.8.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:90167a48de3ed7f062058826608a80242b8561d0fb0cce2c610d741624811a61"},
|
||||
{file = "rapidfuzz-3.8.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:8e02425bfc7ebed617323a674974b70eaecd8f07b64a7d16e0bf3e766b93e3c9"},
|
||||
{file = "rapidfuzz-3.8.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:d48657a404fab82b2754faa813a10c5ad6aa594cb1829dca168a49438b61b4ec"},
|
||||
{file = "rapidfuzz-3.8.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f8b62fdccc429e6643cefffd5df9c7bca65588d06e8925b78014ad9ad983bf5"},
|
||||
{file = "rapidfuzz-3.8.1-cp38-cp38-win32.whl", hash = "sha256:63db612bb6da1bb9f6aa7412739f0e714b1910ec07bc675943044fe683ef192c"},
|
||||
{file = "rapidfuzz-3.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:bb571dbd4cc93342be0ba632f0b8d7de4cbd9d959d76371d33716d2216090d41"},
|
||||
{file = "rapidfuzz-3.8.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b27cea618601ca5032ea98ee116ca6e0fe67be7b286bcb0b9f956d64db697472"},
|
||||
{file = "rapidfuzz-3.8.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d5592b08e3cadc9e06ef3af6a9d66b6ef1bf871ed5acd7f9b1e162d78806a65"},
|
||||
{file = "rapidfuzz-3.8.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:58999b21d01dd353f49511a61937eac20c7a5b22eab87612063947081855d85f"},
|
||||
{file = "rapidfuzz-3.8.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a2ee3909f611cc5860cc8d9f92d039fd84241ce7360b49ea88e657181d2b45f6"},
|
||||
{file = "rapidfuzz-3.8.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00b5ee47b387fa3805f4038362a085ec58149135dc5bc640ca315a9893a16f9e"},
|
||||
{file = "rapidfuzz-3.8.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4c647795c5b901091a68e210c76b769af70a33a8624ac496ac3e34d33366c0d"},
|
||||
{file = "rapidfuzz-3.8.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:77ea62879932b32aba77ab23a9296390a67d024bf2f048dee99143be80a4ce26"},
|
||||
{file = "rapidfuzz-3.8.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3fee62ae76e3b8b9fff8aa2ca4061575ee358927ffbdb2919a8c84a98da59f78"},
|
||||
{file = "rapidfuzz-3.8.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:231dc1cb63b1c8dd78c0597aa3ad3749a86a2b7e76af295dd81609522699a558"},
|
||||
{file = "rapidfuzz-3.8.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:827ddf2d5d157ac3d1001b52e84c9e20366237a742946599ffc435af7fdd26d0"},
|
||||
{file = "rapidfuzz-3.8.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c04ef83c9ca3162d200df36e933b3ea0327a2626cee2e01bbe55acbc004ce261"},
|
||||
{file = "rapidfuzz-3.8.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:747265f39978bbaad356f5c6b6c808f0e8f5e8994875af0119b82b4700c55387"},
|
||||
{file = "rapidfuzz-3.8.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:14791324f0c753f5a0918df1249b91515f5ddc16281fbaa5ec48bff8fa659229"},
|
||||
{file = "rapidfuzz-3.8.1-cp39-cp39-win32.whl", hash = "sha256:b7b9cbc60e3eb08da6d18636c62c6eb6206cd9d0c7ad73996f7a1df3fc415b27"},
|
||||
{file = "rapidfuzz-3.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:2084193fd8fd346db496a2220363437eb9370a06d1d5a7a9dba00a64390c6a28"},
|
||||
{file = "rapidfuzz-3.8.1-cp39-cp39-win_arm64.whl", hash = "sha256:c9597a05d08e8103ad59ebdf29e3fbffb0d0dbf3b641f102cfbeadc3a77bde51"},
|
||||
{file = "rapidfuzz-3.8.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5f4174079dfe8ed1f13ece9bde7660f19f98ab17e0c0d002d90cc845c3a7e238"},
|
||||
{file = "rapidfuzz-3.8.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07d7d4a3c49a15146d65f06e44d7545628ca0437c929684e32ef122852f44d95"},
|
||||
{file = "rapidfuzz-3.8.1-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1ef119fc127c982053fb9ec638dcc3277f83b034b5972eb05941984b9ec4a290"},
|
||||
{file = "rapidfuzz-3.8.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8e57f9c2367706a320b78e91f8bf9a3b03bf9069464eb7b54455fa340d03e4c"},
|
||||
{file = "rapidfuzz-3.8.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:6d4f1956fe1fc618e34ac79a6ed84fff5a6f23e41a8a476dd3e8570f0b12f02b"},
|
||||
{file = "rapidfuzz-3.8.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:313bdcd16e9cd5e5568b4a31d18a631f0b04cc10a3fd916e4ef75b713e6f177e"},
|
||||
{file = "rapidfuzz-3.8.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a02def2eb526cc934d2125533cf2f15aa71c72ed4397afca38427ab047901e88"},
|
||||
{file = "rapidfuzz-3.8.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9d5d924970b07128c61c08eebee718686f4bd9838ef712a50468169520c953f"},
|
||||
{file = "rapidfuzz-3.8.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1edafc0a2737df277d3ddf401f3a73f76e246b7502762c94a3916453ae67e9b1"},
|
||||
{file = "rapidfuzz-3.8.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:81fd28389bedab28251f0535b3c034b0e63a618efc3ff1d338c81a3da723adb3"},
|
||||
{file = "rapidfuzz-3.8.1.tar.gz", hash = "sha256:a357aae6791118011ad3ab4f2a4aa7bd7a487e5f9981b390e9f3c2c5137ecadf"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
|
|
@ -7985,13 +8017,13 @@ rsa = ["oauthlib[signedtoken] (>=3.0.0)"]
|
|||
|
||||
[[package]]
|
||||
name = "respx"
|
||||
version = "0.20.2"
|
||||
version = "0.21.1"
|
||||
description = "A utility for mocking out the Python HTTPX and HTTP Core libraries."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "respx-0.20.2-py2.py3-none-any.whl", hash = "sha256:ab8e1cf6da28a5b2dd883ea617f8130f77f676736e6e9e4a25817ad116a172c9"},
|
||||
{file = "respx-0.20.2.tar.gz", hash = "sha256:07cf4108b1c88b82010f67d3c831dae33a375c7b436e54d87737c7f9f99be643"},
|
||||
{file = "respx-0.21.1-py2.py3-none-any.whl", hash = "sha256:05f45de23f0c785862a2c92a3e173916e8ca88e4caad715dd5f68584d6053c20"},
|
||||
{file = "respx-0.21.1.tar.gz", hash = "sha256:0bd7fe21bfaa52106caa1223ce61224cf30786985f17c63c5d71eff0307ee8af"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -8354,6 +8386,17 @@ docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments
|
|||
testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
|
||||
testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
|
||||
|
||||
[[package]]
|
||||
name = "shellingham"
|
||||
version = "1.5.4"
|
||||
description = "Tool to Detect Surrounding Shell"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"},
|
||||
{file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "simple-websocket"
|
||||
version = "1.0.0"
|
||||
|
|
@ -8493,13 +8536,13 @@ sqlcipher = ["sqlcipher3_binary"]
|
|||
|
||||
[[package]]
|
||||
name = "sqlmodel"
|
||||
version = "0.0.14"
|
||||
version = "0.0.16"
|
||||
description = "SQLModel, SQL databases in Python, designed for simplicity, compatibility, and robustness."
|
||||
optional = false
|
||||
python-versions = ">=3.7,<4.0"
|
||||
files = [
|
||||
{file = "sqlmodel-0.0.14-py3-none-any.whl", hash = "sha256:accea3ff5d878e41ac439b11e78613ed61ce300cfcb860e87a2d73d4884cbee4"},
|
||||
{file = "sqlmodel-0.0.14.tar.gz", hash = "sha256:0bff8fc94af86b44925aa813f56cf6aabdd7f156b73259f2f60692c6a64ac90e"},
|
||||
{file = "sqlmodel-0.0.16-py3-none-any.whl", hash = "sha256:b972f5d319580d6c37ecc417881f6ec4d1ad3ed3583d0ac0ed43234a28bf605a"},
|
||||
{file = "sqlmodel-0.0.16.tar.gz", hash = "sha256:966656f18a8e9a2d159eb215b07fb0cf5222acfae3362707ca611848a8a06bd1"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -8527,13 +8570,13 @@ tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"]
|
|||
|
||||
[[package]]
|
||||
name = "starlette"
|
||||
version = "0.36.3"
|
||||
version = "0.37.2"
|
||||
description = "The little ASGI library that shines."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "starlette-0.36.3-py3-none-any.whl", hash = "sha256:13d429aa93a61dc40bf503e8c801db1f1bca3dc706b10ef2434a36123568f044"},
|
||||
{file = "starlette-0.36.3.tar.gz", hash = "sha256:90a671733cfb35771d8cc605e0b679d23b992f8dcfad48cc60b38cb29aeb7080"},
|
||||
{file = "starlette-0.37.2-py3-none-any.whl", hash = "sha256:6fe59f29268538e5d0d182f2791a479a0c64638e6935d1c6989e63fb2699c6ee"},
|
||||
{file = "starlette-0.37.2.tar.gz", hash = "sha256:9af890290133b79fc3db55474ade20f6220a364a0402e0b556e7cd5e1e093823"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -9092,25 +9135,21 @@ tutorials = ["matplotlib", "pandas", "tabulate", "torch"]
|
|||
|
||||
[[package]]
|
||||
name = "typer"
|
||||
version = "0.9.4"
|
||||
version = "0.12.1"
|
||||
description = "Typer, build great CLIs. Easy to code. Based on Python type hints."
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "typer-0.9.4-py3-none-any.whl", hash = "sha256:aa6c4a4e2329d868b80ecbaf16f807f2b54e192209d7ac9dd42691d63f7a54eb"},
|
||||
{file = "typer-0.9.4.tar.gz", hash = "sha256:f714c2d90afae3a7929fcd72a3abb08df305e1ff61719381384211c4070af57f"},
|
||||
{file = "typer-0.12.1-py3-none-any.whl", hash = "sha256:43ebb23c8a358c3d623e31064359a65f50229d0bf73ae8dfd203f49d9126ae06"},
|
||||
{file = "typer-0.12.1.tar.gz", hash = "sha256:72d218ef3c686aed9c6ff3ca25b238aee0474a1628b29c559b18b634cfdeca88"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
click = ">=7.1.1,<9.0.0"
|
||||
click = ">=8.0.0"
|
||||
rich = ">=10.11.0"
|
||||
shellingham = ">=1.3.0"
|
||||
typing-extensions = ">=3.7.4.3"
|
||||
|
||||
[package.extras]
|
||||
all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"]
|
||||
dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"]
|
||||
doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"]
|
||||
test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.971)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "types-cachetools"
|
||||
version = "5.3.0.7"
|
||||
|
|
@ -9146,13 +9185,13 @@ files = [
|
|||
|
||||
[[package]]
|
||||
name = "types-pillow"
|
||||
version = "10.2.0.20240331"
|
||||
version = "10.2.0.20240406"
|
||||
description = "Typing stubs for Pillow"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "types-Pillow-10.2.0.20240331.tar.gz", hash = "sha256:c604e39aeb70719e463d359157a2238bfc0e81bf9c658375703f7b98cfd607da"},
|
||||
{file = "types_Pillow-10.2.0.20240331-py3-none-any.whl", hash = "sha256:ceec177ab78d9d7c110b8efdb592298ae9ae3bb4da90f7de28b99818dfbe5179"},
|
||||
{file = "types-Pillow-10.2.0.20240406.tar.gz", hash = "sha256:62e0cc1f17caba40e72e7154a483f4c7f3bea0e1c34c0ebba9de3c7745bc306d"},
|
||||
{file = "types_Pillow-10.2.0.20240406-py3-none-any.whl", hash = "sha256:5ac182e8afce53de30abca2fdf9cbec7b2500e549d0be84da035a729a84c7c47"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -9207,13 +9246,13 @@ files = [
|
|||
|
||||
[[package]]
|
||||
name = "types-pywin32"
|
||||
version = "306.0.0.20240331"
|
||||
version = "306.0.0.20240408"
|
||||
description = "Typing stubs for pywin32"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "types-pywin32-306.0.0.20240331.tar.gz", hash = "sha256:3427e60ffbbc47b31e6bc416c3e4c37d579e6774e5152dab953589c9b64c4d58"},
|
||||
{file = "types_pywin32-306.0.0.20240331-py3-none-any.whl", hash = "sha256:844b90253f3da38e254c9ce199ba41f63da52c511003daedcda414e88f1cf319"},
|
||||
{file = "types-pywin32-306.0.0.20240408.tar.gz", hash = "sha256:706d8d4f1e796cd611e97d4772aaab36bddb01a829783ec11bd64f629df5fe3b"},
|
||||
{file = "types_pywin32-306.0.0.20240408-py3-none-any.whl", hash = "sha256:147466069d4c51a4a25e9fe380bf3ad9511ffb3c877d33a311e13fa38a6340bf"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -9244,13 +9283,13 @@ types-pyOpenSSL = "*"
|
|||
|
||||
[[package]]
|
||||
name = "types-requests"
|
||||
version = "2.31.0.20240403"
|
||||
version = "2.31.0.20240406"
|
||||
description = "Typing stubs for requests"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "types-requests-2.31.0.20240403.tar.gz", hash = "sha256:e1e0cd0b655334f39d9f872b68a1310f0e343647688bf2cee932ec4c2b04de59"},
|
||||
{file = "types_requests-2.31.0.20240403-py3-none-any.whl", hash = "sha256:06abf6a68f5c4f2a62f6bb006672dfb26ed50ccbfddb281e1ee6f09a65707d5d"},
|
||||
{file = "types-requests-2.31.0.20240406.tar.gz", hash = "sha256:4428df33c5503945c74b3f42e82b181e86ec7b724620419a2966e2de604ce1a1"},
|
||||
{file = "types_requests-2.31.0.20240406-py3-none-any.whl", hash = "sha256:6216cdac377c6b9a040ac1c0404f7284bd13199c0e1bb235f4324627e8898cf5"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -9258,13 +9297,13 @@ urllib3 = ">=2"
|
|||
|
||||
[[package]]
|
||||
name = "typing-extensions"
|
||||
version = "4.10.0"
|
||||
version = "4.11.0"
|
||||
description = "Backported and Experimental Type Hints for Python 3.8+"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"},
|
||||
{file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"},
|
||||
{file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"},
|
||||
{file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -9538,13 +9577,13 @@ zstd = ["zstandard (>=0.18.0)"]
|
|||
|
||||
[[package]]
|
||||
name = "uvicorn"
|
||||
version = "0.27.1"
|
||||
version = "0.29.0"
|
||||
description = "The lightning-fast ASGI server."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "uvicorn-0.27.1-py3-none-any.whl", hash = "sha256:5c89da2f3895767472a35556e539fd59f7edbe9b1e9c0e1c99eebeadc61838e4"},
|
||||
{file = "uvicorn-0.27.1.tar.gz", hash = "sha256:3d9a267296243532db80c83a959a3400502165ade2c1338dea4e67915fd4745a"},
|
||||
{file = "uvicorn-0.29.0-py3-none-any.whl", hash = "sha256:2c2aac7ff4f4365c206fd773a39bf4ebd1047c238f8b8268ad996829323473de"},
|
||||
{file = "uvicorn-0.29.0.tar.gz", hash = "sha256:6a69214c0b6a087462412670b3ef21224fa48cae0e452b5883e8e8bdfdd11dd0"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -9739,13 +9778,13 @@ files = [
|
|||
|
||||
[[package]]
|
||||
name = "weaviate-client"
|
||||
version = "4.5.4"
|
||||
version = "4.5.5"
|
||||
description = "A python native Weaviate client"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "weaviate-client-4.5.4.tar.gz", hash = "sha256:fc53dc73cd53df453c5e6dc758e49a6a1549212d6670ddd013392107120692f8"},
|
||||
{file = "weaviate_client-4.5.4-py3-none-any.whl", hash = "sha256:f6d3a6b759e5aa0d3350067490526ea38b9274ae4043b4a3ae0064c28d56883f"},
|
||||
{file = "weaviate-client-4.5.5.tar.gz", hash = "sha256:69906588e8eda0a307ad2c5b3c7c7e0ae4b9d80202a5cc97bdd2af15293977e3"},
|
||||
{file = "weaviate_client-4.5.5-py3-none-any.whl", hash = "sha256:70cbd139f8a230723eb2400b8a3fb495055ae8c0897bd837ab58994924de0413"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -10317,4 +10356,4 @@ local = ["ctransformers", "llama-cpp-python", "sentence-transformers"]
|
|||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.10,<3.12"
|
||||
content-hash = "ed8605b2934fceb591d03d5be7461ed05a8f427512b693ce1baefeaa4fa21500"
|
||||
content-hash = "0d658da1ee75640f74d7335685760091df2a452e84eef81dcda2590dd4044e92"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
[tool.poetry]
|
||||
name = "langflow"
|
||||
version = "1.0.0a4"
|
||||
version = "1.0.0a9"
|
||||
description = "A Python package with a built-in web application"
|
||||
authors = ["Logspace <contact@logspace.ai>"]
|
||||
maintainers = [
|
||||
|
|
@ -72,13 +72,15 @@ langchain-cohere = "^0.1.0rc1"
|
|||
elasticsearch = "^8.12.0"
|
||||
pytube = "^15.0.0"
|
||||
llama-index = "^0.10.13"
|
||||
langchain-openai = "^0.0.5"
|
||||
unstructured = { extras = ["md"], version = "^0.12.4" }
|
||||
dspy-ai = "^2.4.0"
|
||||
crewai = "^0.22.5"
|
||||
html2text = "^2024.2.26"
|
||||
assemblyai = "^0.23.1"
|
||||
litellm = "^1.34.22"
|
||||
chromadb = "^0.4.24"
|
||||
langchain-anthropic = "^0.1.6"
|
||||
langchain-astradb = "^0.1.0"
|
||||
langchain-openai = "^0.1.1"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
types-redis = "^4.6.0.5"
|
||||
|
|
@ -101,10 +103,9 @@ pytest-xdist = "^3.5.0"
|
|||
types-pywin32 = "^306.0.0.4"
|
||||
types-google-cloud-ndb = "^2.2.0.0"
|
||||
pytest-sugar = "^1.0.0"
|
||||
respx = "^0.21.1"
|
||||
pytest-instafail = "^0.5.0"
|
||||
pytest-asyncio = "^0.23.0"
|
||||
respx = "^0.20.2"
|
||||
|
||||
|
||||
[tool.poetry.extras]
|
||||
deploy = ["celery", "redis", "flower"]
|
||||
|
|
|
|||
|
|
@ -11,12 +11,23 @@ def read_version_from_pyproject(file_path):
|
|||
return None
|
||||
|
||||
|
||||
def get_version_from_pypi(package_name):
|
||||
import requests
|
||||
# def get_version_from_pypi(package_name):
|
||||
# import requests
|
||||
|
||||
response = requests.get(f"https://pypi.org/pypi/{package_name}/json")
|
||||
if response.ok:
|
||||
return response.json()["info"]["version"]
|
||||
# response = requests.get(f"https://pypi.org/pypi/{package_name}/json")
|
||||
# if response.ok:
|
||||
# return response.json()["info"]["version"]
|
||||
# return None
|
||||
|
||||
|
||||
def get_version_from_pypi(package_name):
|
||||
# Use default python lib to make the GET for this because it runs in github actions
|
||||
import json
|
||||
import urllib.request
|
||||
|
||||
response = urllib.request.urlopen(f"https://pypi.org/pypi/{package_name}/json")
|
||||
if response.getcode() == 200:
|
||||
return json.loads(response.read())["info"]["version"]
|
||||
return None
|
||||
|
||||
|
||||
|
|
@ -44,7 +55,7 @@ if __name__ == "__main__":
|
|||
|
||||
# Reading version and updating pyproject.toml
|
||||
langflow_base_path = Path(__file__).resolve().parent / "../src/backend/base/pyproject.toml"
|
||||
version = get_version_from_pypi("langflow-base")
|
||||
version = read_version_from_pyproject(langflow_base_path)
|
||||
if version:
|
||||
update_pyproject_dependency(pyproject_path, version)
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -2,25 +2,25 @@ import platform
|
|||
import socket
|
||||
import sys
|
||||
import time
|
||||
import webbrowser
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
import click
|
||||
import httpx
|
||||
import typer
|
||||
from dotenv import load_dotenv
|
||||
from multiprocess import Process, cpu_count # type: ignore
|
||||
from rich import box
|
||||
from rich import print as rprint
|
||||
from rich.console import Console
|
||||
from rich.panel import Panel
|
||||
from rich.table import Table
|
||||
|
||||
from langflow.main import setup_app
|
||||
from langflow.services.database.utils import session_getter
|
||||
from langflow.services.deps import get_db_service, get_settings_service
|
||||
from langflow.services.utils import initialize_services, initialize_settings_service
|
||||
from langflow.utils.logger import configure, logger
|
||||
from multiprocess import Process, cpu_count # type: ignore
|
||||
from packaging import version as pkg_version
|
||||
from rich import box
|
||||
from rich import print as rprint
|
||||
from rich.console import Console
|
||||
from rich.panel import Panel
|
||||
from rich.table import Table
|
||||
|
||||
console = Console()
|
||||
|
||||
|
|
@ -99,8 +99,12 @@ def update_settings(
|
|||
|
||||
@app.command()
|
||||
def run(
|
||||
host: str = typer.Option("127.0.0.1", help="Host to bind the server to.", envvar="LANGFLOW_HOST"),
|
||||
workers: int = typer.Option(1, help="Number of worker processes.", envvar="LANGFLOW_WORKERS"),
|
||||
host: str = typer.Option(
|
||||
"127.0.0.1", help="Host to bind the server to.", envvar="LANGFLOW_HOST"
|
||||
),
|
||||
workers: int = typer.Option(
|
||||
1, help="Number of worker processes.", envvar="LANGFLOW_WORKERS"
|
||||
),
|
||||
timeout: int = typer.Option(300, help="Worker timeout in seconds."),
|
||||
port: int = typer.Option(7860, help="Port to listen on.", envvar="LANGFLOW_PORT"),
|
||||
components_path: Optional[Path] = typer.Option(
|
||||
|
|
@ -108,11 +112,19 @@ def run(
|
|||
help="Path to the directory containing custom components.",
|
||||
envvar="LANGFLOW_COMPONENTS_PATH",
|
||||
),
|
||||
config: str = typer.Option(Path(__file__).parent / "config.yaml", help="Path to the configuration file."),
|
||||
config: str = typer.Option(
|
||||
Path(__file__).parent / "config.yaml", help="Path to the configuration file."
|
||||
),
|
||||
# .env file param
|
||||
env_file: Path = typer.Option(None, help="Path to the .env file containing environment variables."),
|
||||
log_level: str = typer.Option("critical", help="Logging level.", envvar="LANGFLOW_LOG_LEVEL"),
|
||||
log_file: Path = typer.Option("logs/langflow.log", help="Path to the log file.", envvar="LANGFLOW_LOG_FILE"),
|
||||
env_file: Path = typer.Option(
|
||||
None, help="Path to the .env file containing environment variables."
|
||||
),
|
||||
log_level: str = typer.Option(
|
||||
"critical", help="Logging level.", envvar="LANGFLOW_LOG_LEVEL"
|
||||
),
|
||||
log_file: Path = typer.Option(
|
||||
"logs/langflow.log", help="Path to the log file.", envvar="LANGFLOW_LOG_FILE"
|
||||
),
|
||||
cache: Optional[str] = typer.Option(
|
||||
envvar="LANGFLOW_LANGCHAIN_CACHE",
|
||||
help="Type of cache to use. (InMemoryCache, SQLiteCache)",
|
||||
|
|
@ -189,22 +201,30 @@ def run(
|
|||
else:
|
||||
# Run using gunicorn on Linux
|
||||
run_on_mac_or_linux(host, port, log_level, options, app, open_browser)
|
||||
if open_browser:
|
||||
click.launch(f"http://{host}:{port}")
|
||||
|
||||
|
||||
def run_on_mac_or_linux(host, port, log_level, options, app, open_browser=True):
|
||||
webapp_process = Process(target=run_langflow, args=(host, port, log_level, options, app))
|
||||
webapp_process.start()
|
||||
def wait_for_server_ready(host, port):
|
||||
"""
|
||||
Wait for the server to become ready by polling the health endpoint.
|
||||
"""
|
||||
status_code = 0
|
||||
while status_code != 200:
|
||||
try:
|
||||
status_code = httpx.get(f"http://{host}:{port}/health").status_code
|
||||
|
||||
except Exception:
|
||||
time.sleep(1)
|
||||
|
||||
|
||||
def run_on_mac_or_linux(host, port, log_level, options, app):
|
||||
webapp_process = Process(
|
||||
target=run_langflow, args=(host, port, log_level, options, app)
|
||||
)
|
||||
webapp_process.start()
|
||||
wait_for_server_ready(host, port)
|
||||
|
||||
print_banner(host, port)
|
||||
if open_browser:
|
||||
webbrowser.open(f"http://{host}:{port}")
|
||||
|
||||
|
||||
def run_on_windows(host, port, log_level, options, app):
|
||||
|
|
@ -245,40 +265,165 @@ def get_free_port(port):
|
|||
return port
|
||||
|
||||
|
||||
def print_banner(host, port):
|
||||
def version_is_prerelease(version: str):
|
||||
"""
|
||||
Check if a version is a pre-release version.
|
||||
"""
|
||||
return "a" in version or "b" in version or "rc" in version
|
||||
|
||||
|
||||
def get_letter_from_version(version: str):
|
||||
"""
|
||||
Get the letter from a pre-release version.
|
||||
"""
|
||||
if "a" in version:
|
||||
return "a"
|
||||
if "b" in version:
|
||||
return "b"
|
||||
if "rc" in version:
|
||||
return "rc"
|
||||
return None
|
||||
|
||||
|
||||
def build_new_version_notice(current_version: str, package_name: str):
|
||||
"""
|
||||
Build a new version notice.
|
||||
"""
|
||||
# The idea here is that we want to show a notice to the user
|
||||
# when a new version of Langflow is available.
|
||||
# The key is that if the version the user has is a pre-release
|
||||
# e.g 0.0.0a1, then we find the latest version that is pre-release
|
||||
# otherwise we find the latest stable version.
|
||||
# we will show the notice either way, but only if the version
|
||||
# the user has is not the latest version.
|
||||
if version_is_prerelease(current_version):
|
||||
# curl -s "https://pypi.org/pypi/langflow/json" | jq -r '.releases | keys | .[]' | sort -V | tail -n 1
|
||||
# this command will give us the latest pre-release version
|
||||
package_info = httpx.get(f"https://pypi.org/pypi/{package_name}/json").json()
|
||||
# 4.0.0a1 or 4.0.0b1 or 4.0.0rc1
|
||||
# find which type of pre-release version we have
|
||||
# could be a1, b1, rc1
|
||||
# we want the a, b, or rc and the number
|
||||
suffix_letter = get_letter_from_version(current_version)
|
||||
number_version = current_version.split(suffix_letter)[0]
|
||||
latest_version = sorted(
|
||||
package_info["releases"].keys(),
|
||||
key=lambda x: x.split(suffix_letter)[-1] and number_version in x,
|
||||
)[-1]
|
||||
if version_is_prerelease(latest_version) and latest_version != current_version:
|
||||
return (
|
||||
True,
|
||||
f"A new pre-release version of {package_name} is available: {latest_version}",
|
||||
)
|
||||
else:
|
||||
latest_version = httpx.get(f"https://pypi.org/pypi/{package_name}/json").json()[
|
||||
"info"
|
||||
]["version"]
|
||||
if not version_is_prerelease(latest_version):
|
||||
return (
|
||||
False,
|
||||
f"A new version of {package_name} is available: {latest_version}",
|
||||
)
|
||||
return False, ""
|
||||
|
||||
|
||||
def is_prerelease(version: str) -> bool:
|
||||
return "a" in version or "b" in version or "rc" in version
|
||||
|
||||
|
||||
def fetch_latest_version(package_name: str, include_prerelease: bool) -> str:
|
||||
response = httpx.get(f"https://pypi.org/pypi/{package_name}/json")
|
||||
versions = response.json()["releases"].keys()
|
||||
valid_versions = [v for v in versions if include_prerelease or not is_prerelease(v)]
|
||||
if not valid_versions:
|
||||
return None # Handle case where no valid versions are found
|
||||
return max(valid_versions, key=lambda v: pkg_version.parse(v))
|
||||
|
||||
|
||||
def build_version_notice(current_version: str, package_name: str) -> str:
|
||||
latest_version = fetch_latest_version(package_name, is_prerelease(current_version))
|
||||
if latest_version and pkg_version.parse(current_version) < pkg_version.parse(
|
||||
latest_version
|
||||
):
|
||||
release_type = "pre-release" if is_prerelease(latest_version) else "version"
|
||||
return f"A new {release_type} of {package_name} is available: {latest_version}"
|
||||
return ""
|
||||
|
||||
|
||||
def generate_pip_command(package_names, is_pre_release):
|
||||
"""
|
||||
Generate the pip install command based on the packages and whether it's a pre-release.
|
||||
"""
|
||||
base_command = "pip install"
|
||||
if is_pre_release:
|
||||
return f"{base_command} {' '.join(package_names)} -U --pre"
|
||||
else:
|
||||
return f"{base_command} {' '.join(package_names)} -U"
|
||||
|
||||
|
||||
def stylize_text(text: str, to_style: str, is_prerelease: bool) -> str:
|
||||
color = "#42a7f5" if is_prerelease else "#6e42f5"
|
||||
# return "".join(f"[{color}]{char}[/]" for char in text)
|
||||
styled_text = f"[{color}]{to_style}[/]"
|
||||
return text.replace(to_style, styled_text)
|
||||
|
||||
|
||||
def print_banner(host: str, port: int):
|
||||
notices = []
|
||||
package_names = [] # Track package names for pip install instructions
|
||||
is_pre_release = False # Track if any package is a pre-release
|
||||
package_name = ""
|
||||
|
||||
try:
|
||||
from langflow.version import __version__
|
||||
from langflow.version import __version__ as langflow_version
|
||||
|
||||
version = __version__
|
||||
word = "Langflow"
|
||||
is_pre_release |= is_prerelease(langflow_version) # Update pre-release status
|
||||
notice = build_version_notice(langflow_version, "langflow")
|
||||
notice = stylize_text(notice, "langflow", is_pre_release)
|
||||
if notice:
|
||||
notices.append(notice)
|
||||
package_names.append("langflow")
|
||||
package_name = "Langflow"
|
||||
except ImportError:
|
||||
from importlib import metadata
|
||||
langflow_version = None
|
||||
|
||||
version = metadata.version("langflow-base")
|
||||
word = "Langflow Base"
|
||||
# Attempt to handle langflow-base similarly
|
||||
if langflow_version is None: # This means langflow.version was not imported
|
||||
try:
|
||||
from importlib import metadata
|
||||
|
||||
colors = ["#6e42f5"]
|
||||
langflow_base_version = metadata.version("langflow-base")
|
||||
is_pre_release |= is_prerelease(
|
||||
langflow_base_version
|
||||
) # Update pre-release status
|
||||
notice = build_version_notice(langflow_base_version, "langflow-base")
|
||||
notice = stylize_text(notice, "langflow-base", is_pre_release)
|
||||
if notice:
|
||||
notices.append(notice)
|
||||
package_names.append("langflow-base")
|
||||
package_name = "Langflow Base"
|
||||
except ImportError as e:
|
||||
logger.exception(e)
|
||||
raise e
|
||||
|
||||
styled_word = ""
|
||||
# Generate pip command based on the collected data
|
||||
pip_command = generate_pip_command(package_names, is_pre_release)
|
||||
|
||||
for i, char in enumerate(word):
|
||||
color = colors[i % len(colors)]
|
||||
styled_word += f"[{color}]{char}[/]"
|
||||
# Add pip install command to notices if any package needs an update
|
||||
if notices:
|
||||
notices.append(f"Run '{pip_command}' to update.")
|
||||
|
||||
# Title with emojis and gradient text
|
||||
title = (
|
||||
f"[bold]Welcome to :chains: {styled_word} v{version}[/bold]\n"
|
||||
f"Access [link=http://{host}:{port}]http://{host}:{port}[/link]"
|
||||
)
|
||||
info_text = (
|
||||
"Collaborate, and contribute at our "
|
||||
"[bold][link=https://github.com/logspace-ai/langflow]GitHub Repo[/link][/bold] :rocket:"
|
||||
styled_notices = [f"[bold]{notice}[/bold]" for notice in notices if notice]
|
||||
styled_package_name = stylize_text(
|
||||
package_name, package_name, any("pre-release" in notice for notice in notices)
|
||||
)
|
||||
|
||||
# Create a panel with the title and the info text, and a border around it
|
||||
panel = Panel(f"{title}\n{info_text}", box=box.ROUNDED, border_style="blue", expand=False)
|
||||
title = f"[bold]Welcome to :chains: {styled_package_name}[/bold]\n"
|
||||
info_text = "Collaborate, and contribute at our [bold][link=https://github.com/logspace-ai/langflow]GitHub Repo[/link][/bold] :rocket:"
|
||||
access_link = f"Access [link=http://{host}:{port}]http://{host}:{port}[/link]"
|
||||
|
||||
# Print the banner with a separator line before and after
|
||||
panel_content = "\n\n".join([title, *styled_notices, info_text, access_link])
|
||||
panel = Panel(panel_content, box=box.ROUNDED, border_style="blue", expand=False)
|
||||
rprint(panel)
|
||||
|
||||
|
||||
|
|
@ -314,8 +459,12 @@ def run_langflow(host, port, log_level, options, app):
|
|||
@app.command()
|
||||
def superuser(
|
||||
username: str = typer.Option(..., prompt=True, help="Username for the superuser."),
|
||||
password: str = typer.Option(..., prompt=True, hide_input=True, help="Password for the superuser."),
|
||||
log_level: str = typer.Option("error", help="Logging level.", envvar="LANGFLOW_LOG_LEVEL"),
|
||||
password: str = typer.Option(
|
||||
..., prompt=True, hide_input=True, help="Password for the superuser."
|
||||
),
|
||||
log_level: str = typer.Option(
|
||||
"error", help="Logging level.", envvar="LANGFLOW_LOG_LEVEL"
|
||||
),
|
||||
):
|
||||
"""
|
||||
Create a superuser.
|
||||
|
|
|
|||
|
|
@ -86,7 +86,7 @@ class ChatComponent(CustomComponent):
|
|||
input_value: Optional[Union[str, Record]] = None,
|
||||
session_id: Optional[str] = None,
|
||||
return_record: Optional[bool] = False,
|
||||
record_template: Optional[str] = "Text: {text}\nData: {data}",
|
||||
record_template: str = "Text: {text}\nData: {data}",
|
||||
) -> Union[Text, Record]:
|
||||
input_value_record: Optional[Record] = None
|
||||
if return_record:
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Optional, Union
|
||||
from typing import Optional
|
||||
|
||||
from langflow.field_typing import Text
|
||||
from langflow.helpers.record import records_to_text
|
||||
|
|
@ -27,7 +27,7 @@ class TextComponent(CustomComponent):
|
|||
|
||||
def build(
|
||||
self,
|
||||
input_value: Optional[Union[Text, Record]] = "",
|
||||
input_value: Optional[Text] = "",
|
||||
record_template: Optional[str] = "{text}",
|
||||
) -> Text:
|
||||
if isinstance(input_value, Record):
|
||||
|
|
|
|||
|
|
@ -94,14 +94,14 @@ class APIRequest(CustomComponent):
|
|||
self,
|
||||
method: str,
|
||||
urls: List[str],
|
||||
headers: Optional[Record] = None,
|
||||
_headers: Optional[Record] = None,
|
||||
body: Optional[Record] = None,
|
||||
timeout: int = 5,
|
||||
) -> List[Record]:
|
||||
if headers is None:
|
||||
if _headers is None:
|
||||
headers = {}
|
||||
else:
|
||||
headers = headers.data
|
||||
headers = _headers.data
|
||||
|
||||
bodies = []
|
||||
if body:
|
||||
|
|
|
|||
|
|
@ -44,6 +44,7 @@ class CreateRecordComponent(CustomComponent):
|
|||
)
|
||||
build_config[field.name] = field.to_dict()
|
||||
|
||||
build_config["number_of_fields"]["value"] = field_value_int
|
||||
return build_config
|
||||
|
||||
def build_config(self):
|
||||
|
|
|
|||
|
|
@ -890,9 +890,9 @@ class Graph:
|
|||
raise ValueError(f"Source vertex {edge['source']} not found")
|
||||
if target is None:
|
||||
raise ValueError(f"Target vertex {edge['target']} not found")
|
||||
edge = ContractEdge(source, target, edge)
|
||||
new_edge = ContractEdge(source, target, edge)
|
||||
|
||||
edges.add(edge)
|
||||
edges.add(new_edge)
|
||||
|
||||
return list(edges)
|
||||
|
||||
|
|
|
|||
|
|
@ -85,7 +85,7 @@ class RunnableVerticesManager:
|
|||
for v_id in set(next_runnable_vertices): # Use set to avoid duplicates
|
||||
self.update_vertex_run_state(v_id, is_runnable=False)
|
||||
self.remove_from_predecessors(v_id)
|
||||
await set_cache_coro(data=graph, lock=lock)
|
||||
await set_cache_coro(data=graph, lock=lock) # type: ignore
|
||||
return next_runnable_vertices
|
||||
|
||||
@staticmethod
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ import inspect
|
|||
import types
|
||||
from enum import Enum
|
||||
from typing import TYPE_CHECKING, Any, AsyncIterator, Callable, Dict, Iterator, List, Optional
|
||||
import os
|
||||
|
||||
from loguru import logger
|
||||
|
||||
|
|
@ -305,7 +306,7 @@ class Vertex:
|
|||
if file_path := field.get("file_path"):
|
||||
storage_service = get_storage_service()
|
||||
try:
|
||||
flow_id, file_name = file_path.split("/")
|
||||
flow_id, file_name = os.path.split(file_path)
|
||||
full_path = storage_service.build_full_path(flow_id, file_name)
|
||||
except ValueError as e:
|
||||
if "too many values to unpack" in str(e):
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from typing import TYPE_CHECKING, Any, Callable, Coroutine, List, Optional, Tuple, Union
|
||||
from typing import TYPE_CHECKING, Any, Callable, Coroutine, List, Optional, Tuple, Type, Union, cast
|
||||
|
||||
from pydantic.v1 import BaseModel, Field, create_model
|
||||
from sqlmodel import select
|
||||
|
|
@ -63,12 +63,14 @@ def find_flow(flow_name: str, user_id: str) -> Optional[str]:
|
|||
|
||||
|
||||
async def run_flow(
|
||||
inputs: Union[dict, List[dict]] = None,
|
||||
inputs: Optional[Union[dict, List[dict]]] = None,
|
||||
tweaks: Optional[dict] = None,
|
||||
flow_id: Optional[str] = None,
|
||||
flow_name: Optional[str] = None,
|
||||
user_id: Optional[str] = None,
|
||||
) -> Any:
|
||||
if user_id is None:
|
||||
raise ValueError("Session is invalid")
|
||||
graph = await load_flow(user_id, flow_id, flow_name, tweaks)
|
||||
|
||||
if inputs is None:
|
||||
|
|
@ -77,7 +79,7 @@ async def run_flow(
|
|||
inputs_components = []
|
||||
types = []
|
||||
for input_dict in inputs:
|
||||
inputs_list.append({INPUT_FIELD_NAME: input_dict.get("input_value")})
|
||||
inputs_list.append({INPUT_FIELD_NAME: cast(str, input_dict.get("input_value"))})
|
||||
inputs_components.append(input_dict.get("components", []))
|
||||
types.append(input_dict.get("type", []))
|
||||
|
||||
|
|
@ -138,12 +140,12 @@ async def flow_function({func_args}):
|
|||
"""
|
||||
|
||||
compiled_func = compile(func_body, "<string>", "exec")
|
||||
local_scope = {}
|
||||
local_scope: dict = {}
|
||||
exec(compiled_func, globals(), local_scope)
|
||||
return local_scope["flow_function"]
|
||||
|
||||
|
||||
def build_function_and_schema(flow_record: Record, graph: "Graph") -> Tuple[Callable, BaseModel]:
|
||||
def build_function_and_schema(flow_record: Record, graph: "Graph") -> Tuple[Callable, Type[BaseModel]]:
|
||||
"""
|
||||
Builds a dynamic function and schema for a given flow.
|
||||
|
||||
|
|
@ -178,7 +180,7 @@ def get_flow_inputs(graph: "Graph") -> List["Vertex"]:
|
|||
return inputs
|
||||
|
||||
|
||||
def build_schema_from_inputs(name: str, inputs: List[tuple[str, str, str]]) -> BaseModel:
|
||||
def build_schema_from_inputs(name: str, inputs: List["Vertex"]) -> Type[BaseModel]:
|
||||
"""
|
||||
Builds a schema from the given inputs.
|
||||
|
||||
|
|
@ -196,4 +198,4 @@ def build_schema_from_inputs(name: str, inputs: List[tuple[str, str, str]]) -> B
|
|||
field_name = input_.display_name.lower().replace(" ", "_")
|
||||
description = input_.description
|
||||
fields[field_name] = (str, Field(default="", description=description))
|
||||
return create_model(name, **fields)
|
||||
return create_model(name, **fields) # type: ignore
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
from typing import Union
|
||||
from langchain_core.documents import Document
|
||||
|
||||
from langflow.schema import Record
|
||||
|
|
@ -16,7 +17,7 @@ def docs_to_records(documents: list[Document]) -> list[Record]:
|
|||
return [Record.from_document(document) for document in documents]
|
||||
|
||||
|
||||
def records_to_text(template: str, records: list[Record]) -> str:
|
||||
def records_to_text(template: str, records: Union[Record, list[Record]]) -> str:
|
||||
"""
|
||||
Converts a list of Records to a list of texts.
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,888 @@
|
|||
{
|
||||
"id": "c091a57f-43a7-4a5e-b352-035ae8d8379c",
|
||||
"data": {
|
||||
"nodes": [
|
||||
{
|
||||
"id": "Prompt-uxBqP",
|
||||
"type": "genericNode",
|
||||
"position": {
|
||||
"x": 53.588791333410654,
|
||||
"y": -107.07318910019967
|
||||
},
|
||||
"data": {
|
||||
"type": "Prompt",
|
||||
"node": {
|
||||
"template": {
|
||||
"code": {
|
||||
"type": "code",
|
||||
"required": true,
|
||||
"placeholder": "",
|
||||
"list": false,
|
||||
"show": true,
|
||||
"multiline": true,
|
||||
"value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.field_typing import Prompt, TemplateField, Text\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n",
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"name": "code",
|
||||
"advanced": true,
|
||||
"dynamic": true,
|
||||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false
|
||||
},
|
||||
"template": {
|
||||
"type": "prompt",
|
||||
"required": false,
|
||||
"placeholder": "",
|
||||
"list": false,
|
||||
"show": true,
|
||||
"multiline": false,
|
||||
"value": "Answer the user as if you were a pirate.\n\nUser: {user_input}\n\nAnswer: ",
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"name": "template",
|
||||
"display_name": "Template",
|
||||
"advanced": false,
|
||||
"input_types": [
|
||||
"Text"
|
||||
],
|
||||
"dynamic": false,
|
||||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false
|
||||
},
|
||||
"_type": "CustomComponent",
|
||||
"user_input": {
|
||||
"field_type": "str",
|
||||
"required": false,
|
||||
"placeholder": "",
|
||||
"list": false,
|
||||
"show": true,
|
||||
"multiline": true,
|
||||
"value": "",
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"name": "user_input",
|
||||
"display_name": "user_input",
|
||||
"advanced": false,
|
||||
"input_types": [
|
||||
"Document",
|
||||
"BaseOutputParser",
|
||||
"Record",
|
||||
"Text"
|
||||
],
|
||||
"dynamic": false,
|
||||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"type": "str"
|
||||
}
|
||||
},
|
||||
"description": "Create a prompt template with dynamic variables.",
|
||||
"icon": "prompts",
|
||||
"is_input": null,
|
||||
"is_output": null,
|
||||
"is_composition": null,
|
||||
"base_classes": [
|
||||
"object",
|
||||
"str",
|
||||
"Text"
|
||||
],
|
||||
"name": "",
|
||||
"display_name": "Prompt",
|
||||
"documentation": "",
|
||||
"custom_fields": {
|
||||
"template": [
|
||||
"user_input"
|
||||
]
|
||||
},
|
||||
"output_types": [
|
||||
"Text"
|
||||
],
|
||||
"full_path": null,
|
||||
"field_formatters": {},
|
||||
"frozen": false,
|
||||
"field_order": [],
|
||||
"beta": false,
|
||||
"error": null
|
||||
},
|
||||
"id": "Prompt-uxBqP",
|
||||
"description": "Create a prompt template with dynamic variables.",
|
||||
"display_name": "Prompt"
|
||||
},
|
||||
"selected": true,
|
||||
"width": 384,
|
||||
"height": 383,
|
||||
"dragging": false,
|
||||
"positionAbsolute": {
|
||||
"x": 53.588791333410654,
|
||||
"y": -107.07318910019967
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "OpenAIModel-k39HS",
|
||||
"type": "genericNode",
|
||||
"position": {
|
||||
"x": 634.8148772766217,
|
||||
"y": 27.035057029045305
|
||||
},
|
||||
"data": {
|
||||
"type": "OpenAIModel",
|
||||
"node": {
|
||||
"template": {
|
||||
"input_value": {
|
||||
"type": "str",
|
||||
"required": true,
|
||||
"placeholder": "",
|
||||
"list": false,
|
||||
"show": true,
|
||||
"multiline": false,
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"name": "input_value",
|
||||
"display_name": "Input",
|
||||
"advanced": false,
|
||||
"dynamic": false,
|
||||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"code": {
|
||||
"type": "code",
|
||||
"required": true,
|
||||
"placeholder": "",
|
||||
"list": false,
|
||||
"show": true,
|
||||
"multiline": true,
|
||||
"value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": [\n \"gpt-4-turbo-preview\",\n \"gpt-3.5-turbo\",\n \"gpt-4-0125-preview\",\n \"gpt-4-1106-preview\",\n \"gpt-4-vision-preview\",\n \"gpt-3.5-turbo-0125\",\n \"gpt-3.5-turbo-1106\",\n ],\n \"value\": \"gpt-4-turbo-preview\",\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str,\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n output = ChatOpenAI(\n max_tokens=max_tokens,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=openai_api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n",
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"name": "code",
|
||||
"advanced": true,
|
||||
"dynamic": true,
|
||||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false
|
||||
},
|
||||
"max_tokens": {
|
||||
"type": "int",
|
||||
"required": false,
|
||||
"placeholder": "",
|
||||
"list": false,
|
||||
"show": true,
|
||||
"multiline": false,
|
||||
"value": 256,
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"name": "max_tokens",
|
||||
"display_name": "Max Tokens",
|
||||
"advanced": true,
|
||||
"dynamic": false,
|
||||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false
|
||||
},
|
||||
"model_kwargs": {
|
||||
"type": "NestedDict",
|
||||
"required": false,
|
||||
"placeholder": "",
|
||||
"list": false,
|
||||
"show": true,
|
||||
"multiline": false,
|
||||
"value": {},
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"name": "model_kwargs",
|
||||
"display_name": "Model Kwargs",
|
||||
"advanced": true,
|
||||
"dynamic": false,
|
||||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false
|
||||
},
|
||||
"model_name": {
|
||||
"type": "str",
|
||||
"required": true,
|
||||
"placeholder": "",
|
||||
"list": true,
|
||||
"show": true,
|
||||
"multiline": false,
|
||||
"value": "gpt-3.5-turbo",
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"options": [
|
||||
"gpt-4-turbo-preview",
|
||||
"gpt-3.5-turbo",
|
||||
"gpt-4-0125-preview",
|
||||
"gpt-4-1106-preview",
|
||||
"gpt-4-vision-preview",
|
||||
"gpt-3.5-turbo-0125",
|
||||
"gpt-3.5-turbo-1106"
|
||||
],
|
||||
"name": "model_name",
|
||||
"display_name": "Model Name",
|
||||
"advanced": false,
|
||||
"dynamic": false,
|
||||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"openai_api_base": {
|
||||
"type": "str",
|
||||
"required": false,
|
||||
"placeholder": "",
|
||||
"list": false,
|
||||
"show": true,
|
||||
"multiline": false,
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"name": "openai_api_base",
|
||||
"display_name": "OpenAI API Base",
|
||||
"advanced": true,
|
||||
"dynamic": false,
|
||||
"info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"openai_api_key": {
|
||||
"type": "str",
|
||||
"required": true,
|
||||
"placeholder": "",
|
||||
"list": false,
|
||||
"show": true,
|
||||
"multiline": false,
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": true,
|
||||
"name": "openai_api_key",
|
||||
"display_name": "OpenAI API Key",
|
||||
"advanced": false,
|
||||
"dynamic": false,
|
||||
"info": "The OpenAI API Key to use for the OpenAI model.",
|
||||
"load_from_db": true,
|
||||
"title_case": false,
|
||||
"input_types": [
|
||||
"Text"
|
||||
],
|
||||
"value": ""
|
||||
},
|
||||
"stream": {
|
||||
"type": "bool",
|
||||
"required": false,
|
||||
"placeholder": "",
|
||||
"list": false,
|
||||
"show": true,
|
||||
"multiline": false,
|
||||
"value": true,
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"name": "stream",
|
||||
"display_name": "Stream",
|
||||
"advanced": true,
|
||||
"dynamic": false,
|
||||
"info": "Stream the response from the model. Streaming works only in Chat.",
|
||||
"load_from_db": false,
|
||||
"title_case": false
|
||||
},
|
||||
"system_message": {
|
||||
"type": "str",
|
||||
"required": false,
|
||||
"placeholder": "",
|
||||
"list": false,
|
||||
"show": true,
|
||||
"multiline": false,
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"name": "system_message",
|
||||
"display_name": "System Message",
|
||||
"advanced": true,
|
||||
"dynamic": false,
|
||||
"info": "System message to pass to the model.",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"temperature": {
|
||||
"type": "float",
|
||||
"required": true,
|
||||
"placeholder": "",
|
||||
"list": false,
|
||||
"show": true,
|
||||
"multiline": false,
|
||||
"value": 0.1,
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"name": "temperature",
|
||||
"display_name": "Temperature",
|
||||
"advanced": false,
|
||||
"dynamic": false,
|
||||
"info": "",
|
||||
"rangeSpec": {
|
||||
"step_type": "float",
|
||||
"min": -1,
|
||||
"max": 1,
|
||||
"step": 0.1
|
||||
},
|
||||
"load_from_db": false,
|
||||
"title_case": false
|
||||
},
|
||||
"_type": "CustomComponent"
|
||||
},
|
||||
"description": "Generates text using OpenAI LLMs.",
|
||||
"icon": "OpenAI",
|
||||
"base_classes": [
|
||||
"object",
|
||||
"Text",
|
||||
"str"
|
||||
],
|
||||
"display_name": "OpenAI",
|
||||
"documentation": "",
|
||||
"custom_fields": {
|
||||
"input_value": null,
|
||||
"openai_api_key": null,
|
||||
"temperature": null,
|
||||
"model_name": null,
|
||||
"max_tokens": null,
|
||||
"model_kwargs": null,
|
||||
"openai_api_base": null,
|
||||
"stream": null,
|
||||
"system_message": null
|
||||
},
|
||||
"output_types": [
|
||||
"Text"
|
||||
],
|
||||
"field_formatters": {},
|
||||
"frozen": false,
|
||||
"field_order": [
|
||||
"max_tokens",
|
||||
"model_kwargs",
|
||||
"model_name",
|
||||
"openai_api_base",
|
||||
"openai_api_key",
|
||||
"temperature",
|
||||
"input_value",
|
||||
"system_message",
|
||||
"stream"
|
||||
],
|
||||
"beta": false
|
||||
},
|
||||
"id": "OpenAIModel-k39HS",
|
||||
"description": "Generates text using OpenAI LLMs.",
|
||||
"display_name": "OpenAI"
|
||||
},
|
||||
"selected": false,
|
||||
"width": 384,
|
||||
"height": 563,
|
||||
"positionAbsolute": {
|
||||
"x": 634.8148772766217,
|
||||
"y": 27.035057029045305
|
||||
},
|
||||
"dragging": false
|
||||
},
|
||||
{
|
||||
"id": "ChatOutput-njtka",
|
||||
"type": "genericNode",
|
||||
"position": {
|
||||
"x": 1193.250417197867,
|
||||
"y": 71.88476890163852
|
||||
},
|
||||
"data": {
|
||||
"type": "ChatOutput",
|
||||
"node": {
|
||||
"template": {
|
||||
"code": {
|
||||
"type": "code",
|
||||
"required": true,
|
||||
"placeholder": "",
|
||||
"list": false,
|
||||
"show": true,
|
||||
"multiline": true,
|
||||
"value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Interaction Panel.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n record_template: Optional[str] = \"{text}\",\n ) -> Union[Text, Record]:\n return super().build(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n record_template=record_template,\n )\n",
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"name": "code",
|
||||
"advanced": true,
|
||||
"dynamic": true,
|
||||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false
|
||||
},
|
||||
"input_value": {
|
||||
"type": "str",
|
||||
"required": false,
|
||||
"placeholder": "",
|
||||
"list": false,
|
||||
"show": true,
|
||||
"multiline": true,
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"name": "input_value",
|
||||
"display_name": "Message",
|
||||
"advanced": false,
|
||||
"input_types": [
|
||||
"Text"
|
||||
],
|
||||
"dynamic": false,
|
||||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false
|
||||
},
|
||||
"record_template": {
|
||||
"type": "str",
|
||||
"required": false,
|
||||
"placeholder": "",
|
||||
"list": false,
|
||||
"show": true,
|
||||
"multiline": true,
|
||||
"value": "{text}",
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"name": "record_template",
|
||||
"display_name": "Record Template",
|
||||
"advanced": true,
|
||||
"dynamic": false,
|
||||
"info": "In case of Message being a Record, this template will be used to convert it to text.",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"return_record": {
|
||||
"type": "bool",
|
||||
"required": false,
|
||||
"placeholder": "",
|
||||
"list": false,
|
||||
"show": true,
|
||||
"multiline": false,
|
||||
"value": false,
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"name": "return_record",
|
||||
"display_name": "Return Record",
|
||||
"advanced": true,
|
||||
"dynamic": false,
|
||||
"info": "Return the message as a record containing the sender, sender_name, and session_id.",
|
||||
"load_from_db": false,
|
||||
"title_case": false
|
||||
},
|
||||
"sender": {
|
||||
"type": "str",
|
||||
"required": false,
|
||||
"placeholder": "",
|
||||
"list": true,
|
||||
"show": true,
|
||||
"multiline": false,
|
||||
"value": "Machine",
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"options": [
|
||||
"Machine",
|
||||
"User"
|
||||
],
|
||||
"name": "sender",
|
||||
"display_name": "Sender Type",
|
||||
"advanced": true,
|
||||
"dynamic": false,
|
||||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"sender_name": {
|
||||
"type": "str",
|
||||
"required": false,
|
||||
"placeholder": "",
|
||||
"list": false,
|
||||
"show": true,
|
||||
"multiline": false,
|
||||
"value": "AI",
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"name": "sender_name",
|
||||
"display_name": "Sender Name",
|
||||
"advanced": false,
|
||||
"dynamic": false,
|
||||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"session_id": {
|
||||
"type": "str",
|
||||
"required": false,
|
||||
"placeholder": "",
|
||||
"list": false,
|
||||
"show": true,
|
||||
"multiline": false,
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"name": "session_id",
|
||||
"display_name": "Session ID",
|
||||
"advanced": true,
|
||||
"dynamic": false,
|
||||
"info": "If provided, the message will be stored in the memory.",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"_type": "CustomComponent"
|
||||
},
|
||||
"description": "Display a chat message in the Interaction Panel.",
|
||||
"icon": "ChatOutput",
|
||||
"base_classes": [
|
||||
"Record",
|
||||
"Text",
|
||||
"str",
|
||||
"object"
|
||||
],
|
||||
"display_name": "Chat Output",
|
||||
"documentation": "",
|
||||
"custom_fields": {
|
||||
"sender": null,
|
||||
"sender_name": null,
|
||||
"input_value": null,
|
||||
"session_id": null,
|
||||
"return_record": null,
|
||||
"record_template": null
|
||||
},
|
||||
"output_types": [
|
||||
"Text",
|
||||
"Record"
|
||||
],
|
||||
"field_formatters": {},
|
||||
"frozen": false,
|
||||
"field_order": [],
|
||||
"beta": false
|
||||
},
|
||||
"id": "ChatOutput-njtka"
|
||||
},
|
||||
"selected": false,
|
||||
"width": 384,
|
||||
"height": 383,
|
||||
"positionAbsolute": {
|
||||
"x": 1193.250417197867,
|
||||
"y": 71.88476890163852
|
||||
},
|
||||
"dragging": false
|
||||
},
|
||||
{
|
||||
"id": "ChatInput-P3fgL",
|
||||
"type": "genericNode",
|
||||
"position": {
|
||||
"x": -495.2223093083827,
|
||||
"y": -232.56998443685862
|
||||
},
|
||||
"data": {
|
||||
"type": "ChatInput",
|
||||
"node": {
|
||||
"template": {
|
||||
"code": {
|
||||
"type": "code",
|
||||
"required": true,
|
||||
"placeholder": "",
|
||||
"list": false,
|
||||
"show": true,
|
||||
"multiline": true,
|
||||
"value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Interaction Panel.\"\n icon = \"ChatInput\"\n\n def build_config(self):\n build_config = super().build_config()\n build_config[\"input_value\"] = {\n \"input_types\": [],\n \"display_name\": \"Message\",\n \"multiline\": True,\n }\n\n return build_config\n\n def build(\n self,\n sender: Optional[str] = \"User\",\n sender_name: Optional[str] = \"User\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n ) -> Union[Text, Record]:\n return super().build(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n )\n",
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"name": "code",
|
||||
"advanced": true,
|
||||
"dynamic": true,
|
||||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false
|
||||
},
|
||||
"input_value": {
|
||||
"type": "str",
|
||||
"required": false,
|
||||
"placeholder": "",
|
||||
"list": false,
|
||||
"show": true,
|
||||
"multiline": true,
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"name": "input_value",
|
||||
"display_name": "Message",
|
||||
"advanced": false,
|
||||
"input_types": [],
|
||||
"dynamic": false,
|
||||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"value": "hi"
|
||||
},
|
||||
"return_record": {
|
||||
"type": "bool",
|
||||
"required": false,
|
||||
"placeholder": "",
|
||||
"list": false,
|
||||
"show": true,
|
||||
"multiline": false,
|
||||
"value": false,
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"name": "return_record",
|
||||
"display_name": "Return Record",
|
||||
"advanced": true,
|
||||
"dynamic": false,
|
||||
"info": "Return the message as a record containing the sender, sender_name, and session_id.",
|
||||
"load_from_db": false,
|
||||
"title_case": false
|
||||
},
|
||||
"sender": {
|
||||
"type": "str",
|
||||
"required": false,
|
||||
"placeholder": "",
|
||||
"list": true,
|
||||
"show": true,
|
||||
"multiline": false,
|
||||
"value": "User",
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"options": [
|
||||
"Machine",
|
||||
"User"
|
||||
],
|
||||
"name": "sender",
|
||||
"display_name": "Sender Type",
|
||||
"advanced": true,
|
||||
"dynamic": false,
|
||||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"sender_name": {
|
||||
"type": "str",
|
||||
"required": false,
|
||||
"placeholder": "",
|
||||
"list": false,
|
||||
"show": true,
|
||||
"multiline": false,
|
||||
"value": "User",
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"name": "sender_name",
|
||||
"display_name": "Sender Name",
|
||||
"advanced": false,
|
||||
"dynamic": false,
|
||||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"session_id": {
|
||||
"type": "str",
|
||||
"required": false,
|
||||
"placeholder": "",
|
||||
"list": false,
|
||||
"show": true,
|
||||
"multiline": false,
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"name": "session_id",
|
||||
"display_name": "Session ID",
|
||||
"advanced": true,
|
||||
"dynamic": false,
|
||||
"info": "If provided, the message will be stored in the memory.",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"_type": "CustomComponent"
|
||||
},
|
||||
"description": "Get chat inputs from the Interaction Panel.",
|
||||
"icon": "ChatInput",
|
||||
"base_classes": [
|
||||
"object",
|
||||
"Record",
|
||||
"str",
|
||||
"Text"
|
||||
],
|
||||
"display_name": "Chat Input",
|
||||
"documentation": "",
|
||||
"custom_fields": {
|
||||
"sender": null,
|
||||
"sender_name": null,
|
||||
"input_value": null,
|
||||
"session_id": null,
|
||||
"return_record": null
|
||||
},
|
||||
"output_types": [
|
||||
"Text",
|
||||
"Record"
|
||||
],
|
||||
"field_formatters": {},
|
||||
"frozen": false,
|
||||
"field_order": [],
|
||||
"beta": false
|
||||
},
|
||||
"id": "ChatInput-P3fgL"
|
||||
},
|
||||
"selected": false,
|
||||
"width": 384,
|
||||
"height": 375,
|
||||
"positionAbsolute": {
|
||||
"x": -495.2223093083827,
|
||||
"y": -232.56998443685862
|
||||
},
|
||||
"dragging": false
|
||||
}
|
||||
],
|
||||
"edges": [
|
||||
{
|
||||
"source": "OpenAIModel-k39HS",
|
||||
"sourceHandle": "{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-k39HSœ}",
|
||||
"target": "ChatOutput-njtka",
|
||||
"targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-njtkaœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}",
|
||||
"data": {
|
||||
"targetHandle": {
|
||||
"fieldName": "input_value",
|
||||
"id": "ChatOutput-njtka",
|
||||
"inputTypes": [
|
||||
"Text"
|
||||
],
|
||||
"type": "str"
|
||||
},
|
||||
"sourceHandle": {
|
||||
"baseClasses": [
|
||||
"object",
|
||||
"Text",
|
||||
"str"
|
||||
],
|
||||
"dataType": "OpenAIModel",
|
||||
"id": "OpenAIModel-k39HS"
|
||||
}
|
||||
},
|
||||
"style": {
|
||||
"stroke": "#555"
|
||||
},
|
||||
"className": "stroke-gray-900 stroke-connection",
|
||||
"id": "reactflow__edge-OpenAIModel-k39HS{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-k39HSœ}-ChatOutput-njtka{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-njtkaœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}"
|
||||
},
|
||||
{
|
||||
"source": "Prompt-uxBqP",
|
||||
"sourceHandle": "{œbaseClassesœ:[œobjectœ,œstrœ,œTextœ],œdataTypeœ:œPromptœ,œidœ:œPrompt-uxBqPœ}",
|
||||
"target": "OpenAIModel-k39HS",
|
||||
"targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-k39HSœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}",
|
||||
"data": {
|
||||
"targetHandle": {
|
||||
"fieldName": "input_value",
|
||||
"id": "OpenAIModel-k39HS",
|
||||
"inputTypes": [
|
||||
"Text"
|
||||
],
|
||||
"type": "str"
|
||||
},
|
||||
"sourceHandle": {
|
||||
"baseClasses": [
|
||||
"object",
|
||||
"str",
|
||||
"Text"
|
||||
],
|
||||
"dataType": "Prompt",
|
||||
"id": "Prompt-uxBqP"
|
||||
}
|
||||
},
|
||||
"style": {
|
||||
"stroke": "#555"
|
||||
},
|
||||
"className": "stroke-gray-900 stroke-connection",
|
||||
"id": "reactflow__edge-Prompt-uxBqP{œbaseClassesœ:[œobjectœ,œstrœ,œTextœ],œdataTypeœ:œPromptœ,œidœ:œPrompt-uxBqPœ}-OpenAIModel-k39HS{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-k39HSœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}"
|
||||
},
|
||||
{
|
||||
"source": "ChatInput-P3fgL",
|
||||
"sourceHandle": "{œbaseClassesœ:[œobjectœ,œRecordœ,œstrœ,œTextœ],œdataTypeœ:œChatInputœ,œidœ:œChatInput-P3fgLœ}",
|
||||
"target": "Prompt-uxBqP",
|
||||
"targetHandle": "{œfieldNameœ:œuser_inputœ,œidœ:œPrompt-uxBqPœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}",
|
||||
"data": {
|
||||
"targetHandle": {
|
||||
"fieldName": "user_input",
|
||||
"id": "Prompt-uxBqP",
|
||||
"inputTypes": [
|
||||
"Document",
|
||||
"BaseOutputParser",
|
||||
"Record",
|
||||
"Text"
|
||||
],
|
||||
"type": "str"
|
||||
},
|
||||
"sourceHandle": {
|
||||
"baseClasses": [
|
||||
"object",
|
||||
"Record",
|
||||
"str",
|
||||
"Text"
|
||||
],
|
||||
"dataType": "ChatInput",
|
||||
"id": "ChatInput-P3fgL"
|
||||
}
|
||||
},
|
||||
"style": {
|
||||
"stroke": "#555"
|
||||
},
|
||||
"className": "stroke-gray-900 stroke-connection",
|
||||
"id": "reactflow__edge-ChatInput-P3fgL{œbaseClassesœ:[œobjectœ,œRecordœ,œstrœ,œTextœ],œdataTypeœ:œChatInputœ,œidœ:œChatInput-P3fgLœ}-Prompt-uxBqP{œfieldNameœ:œuser_inputœ,œidœ:œPrompt-uxBqPœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}"
|
||||
}
|
||||
],
|
||||
"viewport": {
|
||||
"x": 260.58251815500563,
|
||||
"y": 318.2261172111936,
|
||||
"zoom": 0.43514115784696294
|
||||
}
|
||||
},
|
||||
"description": "This flow will get you experimenting with the basics of the UI, the Chat and the Prompt component. \n\nTry changing the Template in it to see how the model behaves. \nYou can change it to this and a Text Input into the `type_of_person` variable : \"Answer the user as if you were a pirate.\n\nUser: {user_input}\n\nAnswer: \" ",
|
||||
"name": "Basic Prompting (Hello, world!)",
|
||||
"last_tested_version": "1.0.0a4",
|
||||
"is_component": false
|
||||
}
|
||||
|
|
@ -1 +1 @@
|
|||
from langflow.processing.load import load_flow_from_json # noqa: F401
|
||||
from langflow.processing.load import load_flow_from_json, run_flow_from_json # noqa: F401
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ from pathlib import Path
|
|||
from typing import Optional
|
||||
from urllib.parse import urlencode
|
||||
|
||||
import nest_asyncio
|
||||
import nest_asyncio # type: ignore
|
||||
import socketio # type: ignore
|
||||
from fastapi import FastAPI, Request
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ class Record(BaseModel):
|
|||
data (dict, optional): Additional data associated with the record.
|
||||
"""
|
||||
|
||||
text_key: Optional[str] = "text"
|
||||
text_key: str = "text"
|
||||
data: dict = {}
|
||||
default_value: Optional[str] = ""
|
||||
|
||||
|
|
|
|||
|
|
@ -1,24 +1,45 @@
|
|||
import os
|
||||
import logging
|
||||
|
||||
from gunicorn import glogging
|
||||
from gunicorn.app.base import BaseApplication # type: ignore
|
||||
from uvicorn.workers import UvicornWorker
|
||||
|
||||
from langflow.utils.logger import InterceptHandler # type: ignore
|
||||
|
||||
|
||||
class LangflowUvicornWorker(UvicornWorker):
|
||||
CONFIG_KWARGS = {"loop": "asyncio"}
|
||||
|
||||
|
||||
class Logger(glogging.Logger):
|
||||
"""Implements and overrides the gunicorn logging interface.
|
||||
|
||||
This class inherits from the standard gunicorn logger and overrides it by
|
||||
replacing the handlers with `InterceptHandler` in order to route the
|
||||
gunicorn logs to loguru.
|
||||
"""
|
||||
|
||||
def __init__(self, cfg):
|
||||
super().__init__(cfg)
|
||||
logging.getLogger("gunicorn.error").handlers = [InterceptHandler()]
|
||||
logging.getLogger("gunicorn.access").handlers = [InterceptHandler()]
|
||||
|
||||
|
||||
class LangflowApplication(BaseApplication):
|
||||
def __init__(self, app, options=None):
|
||||
self.options = options or {}
|
||||
|
||||
self.options["worker_class"] = "langflow.server.LangflowUvicornWorker"
|
||||
self.options["loglevel"] = os.getenv("LANGFLOW_LOG_LEVEL", "error").lower()
|
||||
self.options["logger_class"] = Logger
|
||||
self.application = app
|
||||
super().__init__()
|
||||
|
||||
def load_config(self):
|
||||
config = {key: value for key, value in self.options.items() if key in self.cfg.settings and value is not None}
|
||||
config = {
|
||||
key: value
|
||||
for key, value in self.options.items()
|
||||
if key in self.cfg.settings and value is not None
|
||||
}
|
||||
for key, value in config.items():
|
||||
self.cfg.set(key.lower(), value)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,22 +1,22 @@
|
|||
from datetime import datetime
|
||||
import time
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import command, util
|
||||
from alembic.config import Config
|
||||
from loguru import logger
|
||||
from sqlalchemy import inspect
|
||||
from sqlalchemy.exc import OperationalError
|
||||
from sqlmodel import Session, SQLModel, create_engine, select, text
|
||||
|
||||
from langflow.services.base import Service
|
||||
from langflow.services.database import models # noqa
|
||||
from langflow.services.database.models.user.crud import get_user_by_username
|
||||
from langflow.services.database.utils import Result, TableResults
|
||||
from langflow.services.deps import get_settings_service
|
||||
from langflow.services.utils import teardown_superuser
|
||||
from loguru import logger
|
||||
from sqlalchemy import inspect
|
||||
from sqlalchemy.exc import OperationalError
|
||||
from sqlmodel import Session, SQLModel, create_engine, select, text
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from sqlalchemy.engine import Engine
|
||||
|
|
@ -37,7 +37,10 @@ class DatabaseService(Service):
|
|||
def _create_engine(self) -> "Engine":
|
||||
"""Create the engine for the database."""
|
||||
settings_service = get_settings_service()
|
||||
if settings_service.settings.DATABASE_URL and settings_service.settings.DATABASE_URL.startswith("sqlite"):
|
||||
if (
|
||||
settings_service.settings.DATABASE_URL
|
||||
and settings_service.settings.DATABASE_URL.startswith("sqlite")
|
||||
):
|
||||
connect_args = {"check_same_thread": False}
|
||||
else:
|
||||
connect_args = {}
|
||||
|
|
@ -49,7 +52,9 @@ class DatabaseService(Service):
|
|||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
if exc_type is not None: # If an exception has been raised
|
||||
logger.error(f"Session rollback because of exception: {exc_type.__name__} {exc_value}")
|
||||
logger.error(
|
||||
f"Session rollback because of exception: {exc_type.__name__} {exc_value}"
|
||||
)
|
||||
self._session.rollback()
|
||||
else:
|
||||
self._session.commit()
|
||||
|
|
@ -66,7 +71,9 @@ class DatabaseService(Service):
|
|||
settings_service = get_settings_service()
|
||||
if settings_service.auth_settings.AUTO_LOGIN:
|
||||
with Session(self.engine) as session:
|
||||
flows = session.exec(select(models.Flow).where(models.Flow.user_id is None)).all()
|
||||
flows = session.exec(
|
||||
select(models.Flow).where(models.Flow.user_id is None)
|
||||
).all()
|
||||
if flows:
|
||||
logger.debug("Migrating flows to default superuser")
|
||||
username = settings_service.auth_settings.SUPERUSER
|
||||
|
|
@ -96,14 +103,16 @@ class DatabaseService(Service):
|
|||
expected_columns = list(model.model_fields.keys())
|
||||
|
||||
try:
|
||||
available_columns = [col["name"] for col in inspector.get_columns(table)]
|
||||
available_columns = [
|
||||
col["name"] for col in inspector.get_columns(table)
|
||||
]
|
||||
except sa.exc.NoSuchTableError:
|
||||
logger.error(f"Missing table: {table}")
|
||||
logger.debug(f"Missing table: {table}")
|
||||
return False
|
||||
|
||||
for column in expected_columns:
|
||||
if column not in available_columns:
|
||||
logger.error(f"Missing column: {column} in table {table}")
|
||||
logger.debug(f"Missing column: {column} in table {table}")
|
||||
return False
|
||||
|
||||
for table in legacy_tables:
|
||||
|
|
@ -160,7 +169,9 @@ class DatabaseService(Service):
|
|||
buffer.write(f"{datetime.now().isoformat()}: Checking migrations\n")
|
||||
command.check(alembic_cfg)
|
||||
except Exception as exc:
|
||||
if isinstance(exc, (util.exc.CommandError, util.exc.AutogenerateDiffsDetected)):
|
||||
if isinstance(
|
||||
exc, (util.exc.CommandError, util.exc.AutogenerateDiffsDetected)
|
||||
):
|
||||
command.upgrade(alembic_cfg, "head")
|
||||
time.sleep(3)
|
||||
|
||||
|
|
@ -197,7 +208,10 @@ class DatabaseService(Service):
|
|||
# We will check that all models are in the database
|
||||
# and that the database is up to date with all columns
|
||||
sql_models = [models.Flow, models.User, models.ApiKey]
|
||||
return [TableResults(sql_model.__tablename__, self.check_table(sql_model)) for sql_model in sql_models]
|
||||
return [
|
||||
TableResults(sql_model.__tablename__, self.check_table(sql_model))
|
||||
for sql_model in sql_models
|
||||
]
|
||||
|
||||
def check_table(self, model):
|
||||
results = []
|
||||
|
|
@ -206,7 +220,9 @@ class DatabaseService(Service):
|
|||
expected_columns = list(model.__fields__.keys())
|
||||
available_columns = []
|
||||
try:
|
||||
available_columns = [col["name"] for col in inspector.get_columns(table_name)]
|
||||
available_columns = [
|
||||
col["name"] for col in inspector.get_columns(table_name)
|
||||
]
|
||||
results.append(Result(name=table_name, type="table", success=True))
|
||||
except sa.exc.NoSuchTableError:
|
||||
logger.error(f"Missing table: {table_name}")
|
||||
|
|
@ -237,7 +253,9 @@ class DatabaseService(Service):
|
|||
try:
|
||||
table.create(self.engine, checkfirst=True)
|
||||
except OperationalError as oe:
|
||||
logger.warning(f"Table {table} already exists, skipping. Exception: {oe}")
|
||||
logger.warning(
|
||||
f"Table {table} already exists, skipping. Exception: {oe}"
|
||||
)
|
||||
except Exception as exc:
|
||||
logger.error(f"Error creating table {table}: {exc}")
|
||||
raise RuntimeError(f"Error creating table {table}") from exc
|
||||
|
|
@ -249,7 +267,9 @@ class DatabaseService(Service):
|
|||
if table not in table_names:
|
||||
logger.error("Something went wrong creating the database and tables.")
|
||||
logger.error("Please check your database settings.")
|
||||
raise RuntimeError("Something went wrong creating the database and tables.")
|
||||
raise RuntimeError(
|
||||
"Something went wrong creating the database and tables."
|
||||
)
|
||||
|
||||
logger.debug("Database and tables created successfully")
|
||||
|
||||
|
|
|
|||
|
|
@ -128,7 +128,7 @@ class MonitorService(Service):
|
|||
if conditions:
|
||||
query += " WHERE " + " AND ".join(conditions)
|
||||
|
||||
if order_by:
|
||||
if order_by and order:
|
||||
# Make sure the order is from newest to oldest
|
||||
query += f" ORDER BY {order_by} {order.upper()}"
|
||||
|
||||
|
|
|
|||
|
|
@ -30,8 +30,8 @@ class StateService(Service):
|
|||
class InMemoryStateService(StateService):
|
||||
def __init__(self, settings_service: SettingsService):
|
||||
self.settings_service = settings_service
|
||||
self.states = {}
|
||||
self.observers = defaultdict(list)
|
||||
self.states: dict = {}
|
||||
self.observers: dict = defaultdict(list)
|
||||
self.lock = Lock()
|
||||
|
||||
def append_state(self, key, new_state, run_id: str):
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
|
@ -25,7 +26,10 @@ def patching(record):
|
|||
|
||||
|
||||
def configure(log_level: Optional[str] = None, log_file: Optional[Path] = None):
|
||||
if os.getenv("LANGFLOW_LOG_LEVEL", "").upper() in VALID_LOG_LEVELS and log_level is None:
|
||||
if (
|
||||
os.getenv("LANGFLOW_LOG_LEVEL", "").upper() in VALID_LOG_LEVELS
|
||||
and log_level is None
|
||||
):
|
||||
log_level = os.getenv("LANGFLOW_LOG_LEVEL")
|
||||
if log_level is None:
|
||||
log_level = "ERROR"
|
||||
|
|
@ -67,3 +71,46 @@ def configure(log_level: Optional[str] = None, log_file: Optional[Path] = None):
|
|||
logger.debug(f"Logger set up with log level: {log_level}")
|
||||
if log_file:
|
||||
logger.debug(f"Log file: {log_file}")
|
||||
|
||||
setup_uvicorn_logger()
|
||||
setup_gunicorn_logger()
|
||||
|
||||
|
||||
def setup_uvicorn_logger():
|
||||
loggers = (
|
||||
logging.getLogger(name)
|
||||
for name in logging.root.manager.loggerDict
|
||||
if name.startswith("uvicorn.")
|
||||
)
|
||||
for uvicorn_logger in loggers:
|
||||
uvicorn_logger.handlers = []
|
||||
logging.getLogger("uvicorn").handlers = [InterceptHandler()]
|
||||
|
||||
|
||||
def setup_gunicorn_logger():
|
||||
logging.getLogger("gunicorn.error").handlers = [InterceptHandler()]
|
||||
logging.getLogger("gunicorn.access").handlers = [InterceptHandler()]
|
||||
|
||||
|
||||
class InterceptHandler(logging.Handler):
|
||||
"""
|
||||
Default handler from examples in loguru documentaion.
|
||||
See https://loguru.readthedocs.io/en/stable/overview.html#entirely-compatible-with-standard-logging
|
||||
"""
|
||||
|
||||
def emit(self, record):
|
||||
# Get corresponding Loguru level if it exists
|
||||
try:
|
||||
level = logger.level(record.levelname).name
|
||||
except ValueError:
|
||||
level = record.levelno
|
||||
|
||||
# Find caller from where originated the logged message
|
||||
frame, depth = logging.currentframe(), 2
|
||||
while frame.f_code.co_filename == logging.__file__:
|
||||
frame = frame.f_back
|
||||
depth += 1
|
||||
|
||||
logger.opt(depth=depth, exception=record.exc_info).log(
|
||||
level, record.getMessage()
|
||||
)
|
||||
|
|
|
|||
2035
src/backend/base/poetry.lock
generated
2035
src/backend/base/poetry.lock
generated
File diff suppressed because it is too large
Load diff
|
|
@ -1,6 +1,6 @@
|
|||
[tool.poetry]
|
||||
name = "langflow-base"
|
||||
version = "0.0.18"
|
||||
version = "0.0.21"
|
||||
description = "A Python package with a built-in web application"
|
||||
authors = ["Logspace <contact@logspace.ai>"]
|
||||
maintainers = [
|
||||
|
|
@ -27,19 +27,19 @@ langflow-base = "langflow.__main__:main"
|
|||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.10,<3.12"
|
||||
fastapi = "^0.109.0"
|
||||
fastapi = "^0.110.1"
|
||||
httpx = "*"
|
||||
uvicorn = "^0.27.0"
|
||||
uvicorn = "^0.29.0"
|
||||
gunicorn = "^21.2.0"
|
||||
langchain = "~0.1.0"
|
||||
sqlmodel = "^0.0.14"
|
||||
langchain = "~0.1.14"
|
||||
sqlmodel = "^0.0.16"
|
||||
loguru = "^0.7.1"
|
||||
rich = "^13.7.0"
|
||||
langchain-experimental = "*"
|
||||
pydantic = "^2.5.0"
|
||||
pydantic-settings = "^2.1.0"
|
||||
websockets = "*"
|
||||
typer = "^0.9.0"
|
||||
typer = "^0.12.0"
|
||||
cachetools = "^5.3.1"
|
||||
platformdirs = "^4.2.0"
|
||||
python-multipart = "^0.0.7"
|
||||
|
|
@ -57,37 +57,33 @@ python-socketio = "^5.11.0"
|
|||
python-docx = "^1.1.0"
|
||||
jq = { version = "^1.7.0", markers = "sys_platform != 'win32'" }
|
||||
pypdf = "^4.1.0"
|
||||
chromadb = "^0.4.24"
|
||||
langchain-anthropic = "^0.1.4"
|
||||
langchain-astradb = "^0.1.0"
|
||||
nest-asyncio = "^1.6.0"
|
||||
emoji = "^2.11.0"
|
||||
cryptography = "^42.0.5"
|
||||
langchain-openai = "^0.1.1"
|
||||
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
pytest-asyncio = "^0.21.1"
|
||||
pytest-asyncio = "^0.23.1"
|
||||
types-redis = "^4.6.0.5"
|
||||
ipykernel = "^6.26.0"
|
||||
mypy = "^1.7.1"
|
||||
ruff = "^0.1.5"
|
||||
ruff = "^0.3.5"
|
||||
httpx = "*"
|
||||
pytest = "^7.4.2"
|
||||
pytest = "^8.1.1"
|
||||
types-requests = "^2.31.0"
|
||||
requests = "^2.31.0"
|
||||
pytest-cov = "^4.1.0"
|
||||
pandas-stubs = "^2.0.0.230412"
|
||||
types-pillow = "^9.5.0.2"
|
||||
pytest-cov = "^5.0.0"
|
||||
pandas-stubs = "^2.2.1.230412"
|
||||
types-pillow = "^10.2.0.0"
|
||||
types-pyyaml = "^6.0.12.8"
|
||||
types-python-jose = "^3.3.4.8"
|
||||
types-passlib = "^1.7.7.13"
|
||||
locust = "^2.16.1"
|
||||
pytest-mock = "^3.11.1"
|
||||
pytest-xdist = "^3.3.1"
|
||||
locust = "^2.24.1"
|
||||
pytest-mock = "^3.14.0"
|
||||
pytest-xdist = "^3.5.0"
|
||||
types-pywin32 = "^306.0.0.4"
|
||||
types-google-cloud-ndb = "^2.2.0.0"
|
||||
pytest-sugar = "^0.9.7"
|
||||
types-google-cloud-ndb = "^2.3.0.0"
|
||||
pytest-sugar = "^1.0.0"
|
||||
|
||||
|
||||
[tool.poetry.extras]
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ export default function UndrawCardComponent({
|
|||
}}
|
||||
/>
|
||||
);
|
||||
case "Basic Prompting (Ahoy World!)":
|
||||
case "Basic Prompting (Hello, world!)":
|
||||
return (
|
||||
<BasicPrompt
|
||||
style={{
|
||||
|
|
|
|||
|
|
@ -34,11 +34,15 @@ export default function NewFlowModal({
|
|||
{/* {examples.map((example, idx) => {
|
||||
return <UndrawCardComponent key={idx} flow={example} />;
|
||||
})} */}
|
||||
{examples.find((e) => e.name == "Basic Prompting (Ahoy World!)") && (
|
||||
{examples.find(
|
||||
(e) => e.name == "Basic Prompting (Hello, world!)"
|
||||
) && (
|
||||
<UndrawCardComponent
|
||||
key={1}
|
||||
flow={
|
||||
examples.find((e) => e.name == "Basic Prompting (Ahoy World!)")!
|
||||
examples.find(
|
||||
(e) => e.name == "Basic Prompting (Hello, world!)"
|
||||
)!
|
||||
}
|
||||
/>
|
||||
)}
|
||||
|
|
|
|||
|
|
@ -37,11 +37,11 @@ export default function FlowPage({ view }: { view?: boolean }): JSX.Element {
|
|||
)}
|
||||
<a
|
||||
target={"_blank"}
|
||||
href="https://logspace.ai/"
|
||||
href="https://medium.com/logspace/langflow-datastax-better-together-1b7462cebc4d"
|
||||
className="logspace-page-icon"
|
||||
>
|
||||
{version && <div className="mt-1">⛓️ Langflow v{version}</div>}
|
||||
<div className={version ? "mt-2" : "mt-1"}>Created by Logspace</div>
|
||||
{version && <div className="mt-1">Langflow 🤝 DataStax</div>}
|
||||
<div className={version ? "mt-2" : "mt-1"}>⛓️ v{version}</div>
|
||||
</a>
|
||||
</div>
|
||||
</>
|
||||
|
|
|
|||
|
|
@ -80,6 +80,15 @@ export type InputGlobalComponentType = {
|
|||
editNode?: boolean;
|
||||
};
|
||||
|
||||
export type InputGlobalComponentType = {
|
||||
disabled: boolean;
|
||||
onChange: (value: string) => void;
|
||||
setDb: (value: boolean) => void;
|
||||
name: string;
|
||||
data: NodeDataType;
|
||||
editNode?: boolean;
|
||||
};
|
||||
|
||||
export type KeyPairListComponentType = {
|
||||
value: any;
|
||||
onChange: (value: Object[]) => void;
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import { debounce } from "lodash";
|
||||
import { SAVE_DEBOUNCE_TIME } from "../constants/constants";
|
||||
import { postCustomComponentUpdate } from "../controllers/API";
|
||||
import { ResponseErrorTypeAPI } from "../types/api";
|
||||
import { NodeDataType } from "../types/flow";
|
||||
|
|
@ -38,4 +39,7 @@ export const handleUpdateValues = async (name: string, data: NodeDataType) => {
|
|||
}
|
||||
};
|
||||
|
||||
export const debouncedHandleUpdateValues = debounce(handleUpdateValues, 200);
|
||||
export const debouncedHandleUpdateValues = debounce(
|
||||
handleUpdateValues,
|
||||
SAVE_DEBOUNCE_TIME
|
||||
);
|
||||
|
|
|
|||
|
|
@ -1,5 +1,8 @@
|
|||
import { test } from "@playwright/test";
|
||||
|
||||
test.beforeEach(async ({ page }) => {
|
||||
// await page.waitForTimeout(16000);
|
||||
// test.setTimeout(140000);
|
||||
});
|
||||
test.describe("Auto_login tests", () => {
|
||||
test("auto_login sign in", async ({ page }) => {
|
||||
await page.goto("http:localhost:3000/");
|
||||
|
|
|
|||
|
|
@ -1,5 +1,8 @@
|
|||
import { expect, test } from "@playwright/test";
|
||||
|
||||
test.beforeEach(async ({ page }) => {
|
||||
await page.waitForTimeout(2000);
|
||||
test.setTimeout(120000);
|
||||
});
|
||||
test("CodeAreaModalComponent", async ({ page }) => {
|
||||
await page.goto("http:localhost:3000/");
|
||||
await page.waitForTimeout(2000);
|
||||
|
|
|
|||
|
|
@ -1,6 +1,9 @@
|
|||
import { expect, test } from "@playwright/test";
|
||||
import { readFileSync } from "fs";
|
||||
|
||||
test.beforeEach(async ({ page }) => {
|
||||
// await page.waitForTimeout(3000);
|
||||
// test.setTimeout(120000);
|
||||
});
|
||||
test.describe("drag and drop test", () => {
|
||||
/// <reference lib="dom"/>
|
||||
test("drop collection", async ({ page }) => {
|
||||
|
|
|
|||
|
|
@ -1,4 +1,8 @@
|
|||
import { Page, test } from "@playwright/test";
|
||||
test.beforeEach(async ({ page }) => {
|
||||
// await page.waitForTimeout(6000);
|
||||
// test.setTimeout(120000);
|
||||
});
|
||||
|
||||
test.describe("Flow Page tests", () => {
|
||||
async function goToFlowPage(page: Page) {
|
||||
|
|
|
|||
|
|
@ -1,5 +1,8 @@
|
|||
import { expect, test } from "@playwright/test";
|
||||
|
||||
test.beforeEach(async ({ page }) => {
|
||||
// await page.waitForTimeout(8000);
|
||||
// test.setTimeout(120000);
|
||||
});
|
||||
test("InputComponent", async ({ page }) => {
|
||||
await page.goto("http:localhost:3000/");
|
||||
await page.waitForTimeout(2000);
|
||||
|
|
|
|||
|
|
@ -1,5 +1,8 @@
|
|||
import { expect, test } from "@playwright/test";
|
||||
|
||||
test.beforeEach(async ({ page }) => {
|
||||
// await page.waitForTimeout(20000);
|
||||
// test.setTimeout(120000);
|
||||
});
|
||||
test("KeypairListComponent", async ({ page }) => {
|
||||
await page.goto("http:localhost:3000/");
|
||||
await page.waitForTimeout(2000);
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
import { expect, test } from "@playwright/test";
|
||||
import uaParser from "ua-parser-js";
|
||||
|
||||
test("LangflowShortcuts", async ({ page }) => {
|
||||
const getUA = await page.evaluate(() => navigator.userAgent);
|
||||
const userAgentInfo = uaParser(getUA);
|
||||
|
|
|
|||
|
|
@ -1,5 +1,8 @@
|
|||
import { expect, test } from "@playwright/test";
|
||||
|
||||
test.beforeEach(async ({ page }) => {
|
||||
// await page.waitForTimeout(12000);
|
||||
// test.setTimeout(120000);
|
||||
});
|
||||
test("NestedComponent", async ({ page }) => {
|
||||
await page.goto("http:localhost:3000/");
|
||||
await page.waitForTimeout(2000);
|
||||
|
|
|
|||
|
|
@ -1,5 +1,8 @@
|
|||
import { expect, test } from "@playwright/test";
|
||||
|
||||
test.beforeEach(async ({ page }) => {
|
||||
// await page.waitForTimeout(13000);
|
||||
// test.setTimeout(120000);
|
||||
});
|
||||
test("PromptTemplateComponent", async ({ page }) => {
|
||||
await page.goto("http:localhost:3000/");
|
||||
await page.waitForTimeout(2000);
|
||||
|
|
|
|||
|
|
@ -1,6 +1,9 @@
|
|||
import { Page, expect, test } from "@playwright/test";
|
||||
import { readFileSync } from "fs";
|
||||
|
||||
test.beforeEach(async ({ page }) => {
|
||||
// await page.waitForTimeout(14000);
|
||||
// test.setTimeout(120000);
|
||||
});
|
||||
test.describe("save component tests", () => {
|
||||
async function saveComponent(page: Page, pattern: RegExp, n: number) {
|
||||
for (let i = 0; i < n; i++) {
|
||||
|
|
|
|||
|
|
@ -1,5 +1,8 @@
|
|||
import { expect, test } from "@playwright/test";
|
||||
|
||||
test.beforeEach(async ({ page }) => {
|
||||
// await page.waitForTimeout(15000);
|
||||
// test.setTimeout(120000);
|
||||
});
|
||||
test("ToggleComponent", async ({ page }) => {
|
||||
await page.goto("http:localhost:3000/");
|
||||
await page.waitForTimeout(2000);
|
||||
|
|
|
|||
|
|
@ -10,6 +10,10 @@ import orjson
|
|||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
from httpx import AsyncClient
|
||||
from sqlmodel import Session, SQLModel, create_engine, select
|
||||
from sqlmodel.pool import StaticPool
|
||||
from typer.testing import CliRunner
|
||||
|
||||
from langflow.graph.graph.base import Graph
|
||||
from langflow.initial_setup.setup import STARTER_FOLDER_NAME
|
||||
from langflow.services.auth.utils import get_password_hash
|
||||
|
|
@ -380,7 +384,7 @@ def get_starter_project(active_user):
|
|||
# once the client is created, we can get the starter project
|
||||
with session_getter(get_db_service()) as session:
|
||||
flow = session.exec(
|
||||
select(Flow).where(Flow.folder == STARTER_FOLDER_NAME).where(Flow.name == "Basic Prompting (Ahoy World!)")
|
||||
select(Flow).where(Flow.folder == STARTER_FOLDER_NAME).where(Flow.name == "Basic Prompting (Hello, world!)")
|
||||
).first()
|
||||
if not flow:
|
||||
raise ValueError("No starter project found")
|
||||
|
|
|
|||
|
|
@ -4,7 +4,6 @@ from uuid import uuid4
|
|||
import pytest
|
||||
from fastapi import status
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from langflow.interface.custom.directory_reader.directory_reader import DirectoryReader
|
||||
from langflow.services.deps import get_settings_service
|
||||
from langflow.template.frontend_node.chains import TimeTravelGuideChainNode
|
||||
|
|
@ -447,8 +446,8 @@ def test_successful_run_no_payload(client, starter_project, created_api_key):
|
|||
display_names = [output.get("component_display_name") for output in outputs_dict.get("outputs")]
|
||||
assert all([name in display_names for name in ["Chat Output"]])
|
||||
inner_results = [output.get("results").get("result") for output in outputs_dict.get("outputs")]
|
||||
expected_result = "Langflow"
|
||||
assert all([expected_result in result for result in inner_results]), inner_results
|
||||
|
||||
assert all([len(result) > 0 for result in inner_results]), inner_results
|
||||
|
||||
|
||||
def test_successful_run_with_output_type_text(client, starter_project, created_api_key):
|
||||
|
|
|
|||
|
|
@ -72,8 +72,8 @@ def test_text_to_record_component():
|
|||
|
||||
# Act
|
||||
# Replace with your actual test data
|
||||
dict_with_text = {"key": "value"}
|
||||
result = text_to_record_component.build(dict_with_text)
|
||||
dict_with_text = {"field_1": {"key": "value"}}
|
||||
result = text_to_record_component.build(number_of_fields=1, **dict_with_text)
|
||||
|
||||
# Assert
|
||||
# Replace with your actual expected result
|
||||
|
|
|
|||
|
|
@ -1,9 +1,6 @@
|
|||
from datetime import datetime
|
||||
|
||||
import pytest
|
||||
from sqlalchemy import func
|
||||
from sqlmodel import select
|
||||
|
||||
from langflow.graph.graph.base import Graph
|
||||
from langflow.graph.schema import RunOutputs
|
||||
from langflow.initial_setup.setup import (
|
||||
|
|
@ -15,6 +12,8 @@ from langflow.initial_setup.setup import (
|
|||
from langflow.memory import delete_messages
|
||||
from langflow.services.database.models.flow.model import Flow
|
||||
from langflow.services.deps import session_scope
|
||||
from sqlalchemy import func
|
||||
from sqlmodel import select
|
||||
|
||||
|
||||
def test_load_starter_projects():
|
||||
|
|
@ -78,7 +77,9 @@ async def test_starter_project_can_run_successfully(client):
|
|||
projects = session.exec(select(Flow).where(Flow.folder == STARTER_FOLDER_NAME)).all()
|
||||
|
||||
graphs: list[tuple[str, Graph]] = [
|
||||
(project.name, Graph.from_payload(project.data, flow_id=project.id)) for project in projects
|
||||
(project.name, Graph.from_payload(project.data, flow_id=project.id))
|
||||
for project in projects
|
||||
if "Document" not in project.name or "RAG" not in project.name
|
||||
]
|
||||
assert len(graphs) == len(projects)
|
||||
for name, graph in graphs:
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue