diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
deleted file mode 100644
index d4a67f8f4..000000000
--- a/.github/workflows/ci.yml
+++ /dev/null
@@ -1,44 +0,0 @@
-name: "Async API tests"
-
-on:
- push:
- branches:
- - dev
- pull_request:
- branches:
- - dev
- - main
-
-jobs:
- build-and-test:
- runs-on: ubuntu-latest
- env:
- OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
-
- - name: Cache Docker layers
- uses: actions/cache@v4
- with:
- path: /tmp/.buildx-cache
- key: ${{ runner.os }}-buildx-${{ github.sha }}
- restore-keys: |
- ${{ runner.os }}-buildx-
-
- - name: Set up Docker
- run: docker --version && docker-compose --version
-
- - name: "Create env file"
- working-directory: ./deploy
- run: |
- echo "${{ secrets.ENV_FILE }}" > .env
-
- - name: Build and start services
-
- working-directory: ./deploy
- run: docker compose up --exit-code-from tests tests result_backend broker celeryworker db --build
- continue-on-error: true
-
- # - name: Stop services
- # run: docker compose down
diff --git a/.github/workflows/pre-release-base.yml b/.github/workflows/pre-release-base.yml
index ac2a29a94..f61c1843e 100644
--- a/.github/workflows/pre-release-base.yml
+++ b/.github/workflows/pre-release-base.yml
@@ -1,26 +1,20 @@
name: Langflow Base Pre-release
-
+run-name: Langflow Base Pre-release by @${{ github.actor }}
on:
- pull_request:
- types:
- - closed
- branches:
- - dev
- paths:
- - "src/backend/base/pyproject.toml"
workflow_dispatch:
inputs:
- force_release:
- description: "Force a release"
- required: false
- default: "false"
+ release_package:
+ description: "Release package"
+ required: true
+ type: boolean
+ default: false
env:
POETRY_VERSION: "1.8.2"
jobs:
if_release:
- if: ${{ (github.event.pull_request.merged == true) && contains(github.event.pull_request.labels.*.name, 'pre-release') }} || ${{ github.event_name == 'workflow_dispatch' && inputs.force_release == 'true' }}
+ if: inputs.release_package == true
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
@@ -50,22 +44,11 @@ jobs:
fi
- name: Build project for distribution
run: make build base=true
-
- name: Publish to PyPI
env:
POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_API_TOKEN }}
run: |
make publish base=true
- - name: Create Release
- uses: ncipollo/release-action@v1
- with:
- artifacts: "dist/*"
- token: ${{ secrets.GITHUB_TOKEN }}
- draft: false
- generateReleaseNotes: true
- prerelease: true
- tag: v${{ steps.check-version.outputs.version }}
- commit: dev
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
@@ -83,3 +66,13 @@ jobs:
file: ./build_and_push_base.Dockerfile
tags: |
logspace/langflow:base-${{ steps.check-version.outputs.version }}
+ - name: Create Release
+ uses: ncipollo/release-action@v1
+ with:
+ artifacts: "dist/*"
+ token: ${{ secrets.GITHUB_TOKEN }}
+ draft: false
+ generateReleaseNotes: true
+ prerelease: true
+ tag: v${{ steps.check-version.outputs.version }}
+ commit: dev
diff --git a/.github/workflows/pre-release-langflow.yml b/.github/workflows/pre-release-langflow.yml
index 11f894e1a..4c0be75c9 100644
--- a/.github/workflows/pre-release-langflow.yml
+++ b/.github/workflows/pre-release-langflow.yml
@@ -1,19 +1,13 @@
name: Langflow Pre-release
-
+run-name: Langflow Pre-release by @${{ github.actor }}
on:
- pull_request:
- types:
- - closed
- branches:
- - dev
- paths:
- - "pyproject.toml"
workflow_dispatch:
inputs:
- force_release:
- description: "Force a release"
- required: false
- default: "false"
+ release_package:
+ description: "Release package"
+ required: true
+ type: boolean
+ default: false
workflow_run:
workflows: ["pre-release-base"]
types: [completed]
@@ -24,7 +18,7 @@ env:
jobs:
if_release:
- if: ${{ (github.event.pull_request.merged == true) && contains(github.event.pull_request.labels.*.name, 'pre-release') }} || ${{ github.event_name == 'workflow_dispatch' && inputs.force_release == 'true' }}
+ if: inputs.release_package == true
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
@@ -38,7 +32,7 @@ jobs:
- name: Check Version
id: check-version
run: |
- version=$(cd src/backend/base && poetry version --short)
+ version=$(poetry version --short)
last_released_version=$(curl -s "https://pypi.org/pypi/langflow/json" | jq -r '.releases | keys | .[]' | sort -V | tail -n 1)
if [ "$version" = "$last_released_version" ]; then
echo "Version $version is already released. Skipping release."
@@ -55,16 +49,6 @@ jobs:
POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_API_TOKEN }}
run: |
make publish main=true
- - name: Create Release
- uses: ncipollo/release-action@v1
- with:
- artifacts: "dist/*"
- token: ${{ secrets.GITHUB_TOKEN }}
- draft: false
- generateReleaseNotes: true
- prerelease: true
- tag: v${{ steps.check-version.outputs.version }}
- commit: dev
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
@@ -83,3 +67,14 @@ jobs:
tags: |
logspace/langflow:${{ steps.check-version.outputs.version }}
logspace/langflow:1.0-alpha
+
+ - name: Create Release
+ uses: ncipollo/release-action@v1
+ with:
+ artifacts: "dist/*"
+ token: ${{ secrets.GITHUB_TOKEN }}
+ draft: false
+ generateReleaseNotes: true
+ prerelease: true
+ tag: v${{ steps.check-version.outputs.version }}
+ commit: dev
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 8004618f6..5d522c230 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -31,15 +31,6 @@ jobs:
id: check-version
run: |
echo version=$(poetry version --short) >> $GITHUB_OUTPUT
- - name: Create Release
- uses: ncipollo/release-action@v1
- with:
- artifacts: "dist/*"
- token: ${{ secrets.GITHUB_TOKEN }}
- draft: false
- generateReleaseNotes: true
- tag: v${{ steps.check-version.outputs.version }}
- commit: main
- name: Publish to PyPI
env:
POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_API_TOKEN }}
@@ -63,3 +54,12 @@ jobs:
tags: |
logspace/langflow:${{ steps.check-version.outputs.version }}
logspace/langflow:latest
+ - name: Create Release
+ uses: ncipollo/release-action@v1
+ with:
+ artifacts: "dist/*"
+ token: ${{ secrets.GITHUB_TOKEN }}
+ draft: false
+ generateReleaseNotes: true
+ tag: v${{ steps.check-version.outputs.version }}
+ commit: main
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index c58bb92f1..383d21344 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -16,12 +16,12 @@ The branch structure is as follows:
## 🚩GitHub Issues
-Our [issues](https://github.com/logspace-ai/langflow/issues) page is kept up to date
+Our [issues](https://github.com/langflow-ai/langflow/issues) page is kept up to date
with bugs, improvements, and feature requests. There is a taxonomy of labels to help
with sorting and discovery of issues of interest.
If you're looking for help with your code, consider posting a question on the
-[GitHub Discussions board](https://github.com/logspace-ai/langflow/discussions). Please
+[GitHub Discussions board](https://github.com/langflow-ai/langflow/discussions). Please
understand that we won't be able to provide individual support via email. We
also believe that help is much more valuable if it's **shared publicly**,
so that more people can benefit from it.
@@ -40,7 +40,7 @@ so that more people can benefit from it.
## Issue labels
-[See this page](https://github.com/logspace-ai/langflow/labels) for an overview of
+[See this page](https://github.com/langflow-ai/langflow/labels) for an overview of
the system we use to tag our issues and pull requests.
## Local development
diff --git a/GCP_DEPLOYMENT.md b/GCP_DEPLOYMENT.md
index e00e9b1f8..12b6b0239 100644
--- a/GCP_DEPLOYMENT.md
+++ b/GCP_DEPLOYMENT.md
@@ -4,25 +4,27 @@ This guide will help you set up a Langflow development VM in a Google Cloud Plat
> **Note**: When Cloud Shell opens, be sure to select **Trust repo**. Some `gcloud` commands might not run in an ephemeral Cloud Shell environment.
+## Standard VM
-## Standard VM
-[](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/logspace-ai/langflow&working_dir=scripts&shellonly=true&tutorial=walkthroughtutorial.md)
+[](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/langflow-ai/langflow&working_dir=scripts&shellonly=true&tutorial=walkthroughtutorial.md)
This script sets up a Debian-based VM with the Langflow package, Nginx, and the necessary configurations to run the Langflow Dev environment.
+
## Spot/Preemptible Instance
-[](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/logspace-ai/langflow&working_dir=scripts&shellonly=true&tutorial=walkthroughtutorial_spot.md)
+[](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/langflow-ai/langflow&working_dir=scripts&shellonly=true&tutorial=walkthroughtutorial_spot.md)
When running as a [spot (preemptible) instance](https://cloud.google.com/compute/docs/instances/preemptible), the code and VM will behave the same way as in a regular instance, executing the startup script to configure the environment, install necessary dependencies, and run the Langflow application. However, **due to the nature of spot instances, the VM may be terminated at any time if Google Cloud needs to reclaim the resources**. This makes spot instances suitable for fault-tolerant, stateless, or interruptible workloads that can handle unexpected terminations and restarts.
## Pricing (approximate)
-> For a more accurate breakdown of costs, please use the [**GCP Pricing Calculator**](https://cloud.google.com/products/calculator)
-
-| Component | Regular Cost (Hourly) | Regular Cost (Monthly) | Spot/Preemptible Cost (Hourly) | Spot/Preemptible Cost (Monthly) | Notes |
-| -------------- | --------------------- | ---------------------- | ------------------------------ | ------------------------------- | ----- |
-| 100 GB Disk | - | $10/month | - | $10/month | Disk cost remains the same for both regular and Spot/Preemptible VMs |
-| VM (n1-standard-4) | $0.15/hr | ~$108/month | ~$0.04/hr | ~$29/month | The VM cost can be significantly reduced using a Spot/Preemptible instance |
-| **Total** | **$0.15/hr** | **~$118/month** | **~$0.04/hr** | **~$39/month** | Total costs for running the VM and disk 24/7 for an entire month |
+> For a more accurate breakdown of costs, please use the [**GCP Pricing Calculator**](https://cloud.google.com/products/calculator)
+>
+
+| Component | Regular Cost (Hourly) | Regular Cost (Monthly) | Spot/Preemptible Cost (Hourly) | Spot/Preemptible Cost (Monthly) | Notes |
+| ------------------ | --------------------- | ---------------------- | ------------------------------ | ------------------------------- | -------------------------------------------------------------------------- |
+| 100 GB Disk | - | $10/month | - | $10/month | Disk cost remains the same for both regular and Spot/Preemptible VMs |
+| VM (n1-standard-4) | $0.15/hr | ~$108/month | ~$0.04/hr | ~$29/month | The VM cost can be significantly reduced using a Spot/Preemptible instance |
+| **Total** | **$0.15/hr** | **~$118/month** | **~$0.04/hr** | **~$39/month** | Total costs for running the VM and disk 24/7 for an entire month |
diff --git a/README.md b/README.md
index 4c4c2f1da..8668788b4 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,6 @@
-# [](https://www.langflow.org)
+# [](https://www.langflow.org)
### [Langflow](https://www.langflow.org) is a new, visual way to build, iterate and deploy AI apps.
@@ -28,7 +28,7 @@ Then, run Langflow with:
python -m langflow run
```
-You can also preview Langflow in [HuggingFace Spaces](https://huggingface.co/spaces/Logspace/Langflow-Preview). [Clone the space using this link](https://huggingface.co/spaces/Logspace/Langflow-Preview?duplicate=true), to create your own Langflow workspace in minutes.
+You can also preview Langflow in [HuggingFace Spaces](https://huggingface.co/spaces/Langflow/Langflow-Preview). [Clone the space using this link](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true), to create your own Langflow workspace in minutes.
# 🎨 Creating Flows
@@ -96,7 +96,7 @@ Follow our step-by-step guide to deploy Langflow on Google Cloud Platform (GCP)
Alternatively, click the **"Open in Cloud Shell"** button below to launch Google Cloud Shell, clone the Langflow repository, and start an **interactive tutorial** that will guide you through the process of setting up the necessary resources and deploying Langflow on your GCP project.
-[](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/logspace-ai/langflow&working_dir=scripts/gcp&shellonly=true&tutorial=walkthroughtutorial_spot.md)
+[](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/langflow-ai/langflow&working_dir=scripts/gcp&shellonly=true&tutorial=walkthroughtutorial_spot.md)
## Deploy on Railway
@@ -104,7 +104,7 @@ Alternatively, click the **"Open in Cloud Shell"** button below to launch Google
## Deploy on Render
-
+
@@ -114,11 +114,11 @@ We welcome contributions from developers of all levels to our open-source projec
---
-[](https://star-history.com/#logspace-ai/langflow&Date)
+[](https://star-history.com/#langflow-ai/langflow&Date)
# 🌟 Contributors
-[](https://github.com/logspace-ai/langflow/graphs/contributors)
+[](https://github.com/langflow-ai/langflow/graphs/contributors)
# 📄 License
diff --git a/docs/docs/components/custom.mdx b/docs/docs/components/custom.mdx
index b07f953fa..bdf631314 100644
--- a/docs/docs/components/custom.mdx
+++ b/docs/docs/components/custom.mdx
@@ -37,7 +37,7 @@ The CustomComponent class serves as the foundation for creating custom component
| _`langflow.field_typing.Prompt`_ |
| _`langchain.chains.base.Chain`_ |
| _`langchain.PromptTemplate`_ |
- | _`langchain.llms.base.BaseLLM`_ |
+ | _`from langchain.schema.language_model import BaseLanguageModel`_ |
| _`langchain.Tool`_ |
| _`langchain.document_loaders.base.BaseLoader`_ |
| _`langchain.schema.Document`_ |
diff --git a/docs/docs/contributing/community.md b/docs/docs/contributing/community.md
index 6bb62641d..604487133 100644
--- a/docs/docs/contributing/community.md
+++ b/docs/docs/contributing/community.md
@@ -2,11 +2,11 @@
## 🤖 Join **Langflow** Discord server
- Join us to ask questions and showcase your projects.
+Join us to ask questions and showcase your projects.
- Let's bring together the building blocks of AI integration!
+Let's bring together the building blocks of AI integration!
- Langflow [Discord](https://discord.gg/EqksyE2EX9) server.
+Langflow [Discord](https://discord.gg/EqksyE2EX9) server.
---
@@ -15,9 +15,10 @@
Follow [@langflow_ai](https://twitter.com/langflow_ai) on **Twitter** to get the latest news about **Langflow**.
---
+
## ⭐️ Star **Langflow** on GitHub
-You can "star" **Langflow** in [GitHub](https://github.com/logspace-ai/langflow).
+You can "star" **Langflow** in [GitHub](https://github.com/langflow-ai/langflow).
By adding a star, other users will be able to find it more easily and see that it has been already useful for others.
@@ -25,14 +26,12 @@ By adding a star, other users will be able to find it more easily and see that i
## 👀 Watch the GitHub repository for releases
-You can "watch" **Langflow** in [GitHub](https://github.com/logspace-ai/langflow).
-
+You can "watch" **Langflow** in [GitHub](https://github.com/langflow-ai/langflow).
If you select "Watching" instead of "Releases only" you will receive notifications when someone creates a new issue or question. You can also specify that you only want to be notified about new issues, discussions, PRs, etc.
-
Then you can try and help them solve those questions.
---
-Thanks! 🚀
\ No newline at end of file
+Thanks! 🚀
diff --git a/docs/docs/contributing/github-issues.md b/docs/docs/contributing/github-issues.md
index 41cc674e1..269c976cd 100644
--- a/docs/docs/contributing/github-issues.md
+++ b/docs/docs/contributing/github-issues.md
@@ -1,11 +1,11 @@
# GitHub Issues
-Our [issues](https://github.com/logspace-ai/langflow/issues) page is kept up to date
+Our [issues](https://github.com/langflow-ai/langflow/issues) page is kept up to date
with bugs, improvements, and feature requests. There is a taxonomy of labels to help
with sorting and discovery of issues of interest.
If you're looking for help with your code, consider posting a question on the
-[GitHub Discussions board](https://github.com/logspace-ai/langflow/discussions). Please
+[GitHub Discussions board](https://github.com/langflow-ai/langflow/discussions). Please
understand that we won't be able to provide individual support via email. We
also believe that help is much more valuable if it's **shared publicly**,
so that more people can benefit from it.
@@ -21,7 +21,6 @@ so that more people can benefit from it.
logs or tracebacks, you can wrap them in `` and ` `. This
[collapses the content](https://developer.mozilla.org/en/docs/Web/HTML/Element/details) so it only becomes visible on click, making the issue easier to read and follow.
-
## Issue labels
-[See this page](https://github.com/logspace-ai/langflow/labels) for an overview of the system we use to tag our issues and pull requests.
\ No newline at end of file
+[See this page](https://github.com/langflow-ai/langflow/labels) for an overview of the system we use to tag our issues and pull requests.
diff --git a/docs/docs/contributing/how-contribute.md b/docs/docs/contributing/how-contribute.md
index 53b430496..4939edaee 100644
--- a/docs/docs/contributing/how-contribute.md
+++ b/docs/docs/contributing/how-contribute.md
@@ -1,6 +1,6 @@
# How to contribute?
-👋 Hello there! We welcome contributions from developers of all levels to our open-source project on [GitHub](https://github.com/logspace-ai/langflow). If you'd like to contribute, please check our contributing guidelines and help make Langflow more accessible.
+👋 Hello there! We welcome contributions from developers of all levels to our open-source project on [GitHub](https://github.com/langflow-ai/langflow). If you'd like to contribute, please check our contributing guidelines and help make Langflow more accessible.
As an open-source project in a rapidly developing field, we are extremely open
to contributions, whether in the form of a new feature, improved infra, or better documentation.
@@ -10,6 +10,7 @@ To contribute to this project, please follow a ["fork and pull request"](https:/
Please do not try to push directly to this repo unless you are a maintainer.
---
+
## Local development
You can develop Langflow using docker compose, or locally.
@@ -17,6 +18,7 @@ You can develop Langflow using docker compose, or locally.
We provide a .vscode/launch.json file for debugging the backend in VSCode, which is a lot faster than using docker compose.
Setting up hooks:
+
```bash
make init
```
@@ -48,7 +50,6 @@ And the frontend:
make frontend
```
-
---
## Docker compose
diff --git a/docs/docs/deployment/gcp-deployment.md b/docs/docs/deployment/gcp-deployment.md
index 032426d94..bd2b48968 100644
--- a/docs/docs/deployment/gcp-deployment.md
+++ b/docs/docs/deployment/gcp-deployment.md
@@ -6,10 +6,9 @@ This guide will help you set up a Langflow development VM in a Google Cloud Plat
> Note: When Cloud Shell opens, be sure to select **Trust repo**. Some `gcloud` commands might not run in an ephemeral Cloud Shell environment.
+## Standard VM
-
-## Standard VM
-[](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/logspace-ai/langflow&working_dir=scripts&shellonly=true&tutorial=walkthroughtutorial.md)
+[](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/langflow-ai/langflow&working_dir=scripts&shellonly=true&tutorial=walkthroughtutorial.md)
This script sets up a Debian-based VM with the Langflow package, Nginx, and the necessary configurations to run the Langflow Dev environment.
@@ -24,11 +23,11 @@ When running as a [spot (preemptible) instance](https://cloud.google.com/compute
---
## Pricing (approximate)
+
> For a more accurate breakdown of costs, please use the [**GCP Pricing Calculator**](https://cloud.google.com/products/calculator)
-
-| Component | Regular Cost (Hourly) | Regular Cost (Monthly) | Spot/Preemptible Cost (Hourly) | Spot/Preemptible Cost (Monthly) | Notes |
-| -------------- | --------------------- | ---------------------- | ------------------------------ | ------------------------------- | ----- |
-| 100 GB Disk | - | $10/month | - | $10/month | Disk cost remains the same for both regular and Spot/Preemptible VMs |
-| VM (n1-standard-4) | $0.15/hr | ~$108/month | ~$0.04/hr | ~$29/month | The VM cost can be significantly reduced using a Spot/Preemptible instance |
-| **Total** | **$0.15/hr** | **~$118/month** | **~$0.04/hr** | **~$39/month** | Total costs for running the VM and disk 24/7 for an entire month |
+| Component | Regular Cost (Hourly) | Regular Cost (Monthly) | Spot/Preemptible Cost (Hourly) | Spot/Preemptible Cost (Monthly) | Notes |
+| ------------------ | --------------------- | ---------------------- | ------------------------------ | ------------------------------- | -------------------------------------------------------------------------- |
+| 100 GB Disk | - | $10/month | - | $10/month | Disk cost remains the same for both regular and Spot/Preemptible VMs |
+| VM (n1-standard-4) | $0.15/hr | ~$108/month | ~$0.04/hr | ~$29/month | The VM cost can be significantly reduced using a Spot/Preemptible instance |
+| **Total** | **$0.15/hr** | **~$118/month** | **~$0.04/hr** | **~$39/month** | Total costs for running the VM and disk 24/7 for an entire month |
diff --git a/docs/docs/guidelines/chat-widget.mdx b/docs/docs/guidelines/chat-widget.mdx
index 46ed974a8..6d47e123b 100644
--- a/docs/docs/guidelines/chat-widget.mdx
+++ b/docs/docs/guidelines/chat-widget.mdx
@@ -78,7 +78,7 @@ The Chat Widget can be embedded into any HTML page, inside a _``_ tag, as
To embed the Chat Widget using React, you'll need to insert this _`
+
```
Then, declare your Web Component and encapsulate it in a React component.
@@ -115,7 +115,7 @@ Finally, you can place the component anywhere in your code to display the Chat W
To use it in Angular, first add this _`
+
```
When you use a custom web component in an Angular template, the Angular compiler might show a warning when it doesn't recognize the custom elements by default. To suppress this warning, add _`CUSTOM_ELEMENTS_SCHEMA`_ to the module's _`@NgModule.schemas`_.
@@ -185,7 +185,7 @@ Use the widget API to customize your Chat Widget:
| Prop | Type | Required | Description |
-| --------------------- | ------- | -------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------- |
+| --------------------- | ------- | -------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------- |
| bot_message_style | JSON | No | Applies custom formatting to bot messages. |
| chat_input_field | String | Yes | Defines the type of the input field for chat messages. |
| chat_inputs | JSON | Yes | Determines the chat input elements and their respective values. |
@@ -208,4 +208,4 @@ Use the widget API to customize your Chat Widget:
| tweaks | JSON | No | Applies additional custom adjustments for the associated flow. |
| user_message_style | JSON | No | Determines the formatting for user messages in the chat window. |
| width | Number | No | Sets the width of the chat window in pixels. |
-| window_title | String | No | Sets the title displayed in the chat window's header or title bar. |
+| window_title | String | No | Sets the title displayed in the chat window's header or title bar. | import ThemedImage from "@theme/ThemedImage"; |
diff --git a/docs/docs/guidelines/custom-component.mdx b/docs/docs/guidelines/custom-component.mdx
index a509214b8..6decb3833 100644
--- a/docs/docs/guidelines/custom-component.mdx
+++ b/docs/docs/guidelines/custom-component.mdx
@@ -131,7 +131,7 @@ class MyComponent(CustomComponent):
---
-The [Return Type Annotation](https://docs.python.org/3/library/typing.html) of the _`build`_ method defines the component type (e.g., Chain, BaseLLM, or basic Python types). Check out all supported types in the [component reference](../components/custom).
+The [Return Type Annotation](https://docs.python.org/3/library/typing.html) of the _`build`_ method defines the component type (e.g., Chain, BaseLanguageModel, or basic Python types). Check out all supported types in the [component reference](../components/custom).
```python
from langflow.custom import CustomComponent
@@ -366,7 +366,7 @@ For advanced customization, Langflow offers the option to create and load custom
### Folder Structure
-Create a folder that follows the same structural conventions as the [config.yaml](https://github.com/logspace-ai/langflow/blob/dev/src/backend/base/langflow/config.yaml) file. Inside this main directory, use a `custom_components` subdirectory for your custom components.
+Create a folder that follows the same structural conventions as the [config.yaml](https://github.com/langflow-ai/langflow/blob/dev/src/backend/base/langflow/config.yaml) file. Inside this main directory, use a `custom_components` subdirectory for your custom components.
Inside `custom_components`, you can create a Python file for each component. Similarly, any custom agents should be housed in an `agents` subdirectory.
@@ -406,4 +406,5 @@ Langflow will attempt to load all of the components found in the specified direc
Once your custom components have been loaded successfully, they will appear in Langflow's sidebar. From there, you can add them to your Langflow canvas for use. However, please note that components with errors will not be available for addition to the canvas. Always ensure your code is error-free before attempting to load components.
-Remember, creating custom components allows you to extend the functionality of Langflow to better suit your unique needs. Happy coding!
+Remember, creating custom components allows you to extend the functionality of Langflow to better suit your unique needs. Happy coding!import ZoomableImage from "/src/theme/ZoomableImage.js";
+import Admonition from "@theme/Admonition";
diff --git a/docs/docs/whats-new/migrating-to-one-point-zero.mdx b/docs/docs/whats-new/migrating-to-one-point-zero.mdx
index 987f00277..c140bdf94 100644
--- a/docs/docs/whats-new/migrating-to-one-point-zero.mdx
+++ b/docs/docs/whats-new/migrating-to-one-point-zero.mdx
@@ -14,7 +14,7 @@ import Admonition from "@theme/Admonition";
Langflow 1.0 is a significant update that brings many exciting changes and improvements to the platform.
This guide will walk you through the key improvements and help you migrate your existing projects to the new version.
-If you have any questions or need assistance during the migration process, please don't hesitate to reach out to in our [Discord](https://discord.gg/wZSWQaukgJ) or [GitHub](https://github.com/logspace-ai/langflow/issues) community.
+If you have any questions or need assistance during the migration process, please don't hesitate to reach out to in our [Discord](https://discord.gg/wZSWQaukgJ) or [GitHub](https://github.com/langflow-ai/langflow/issues) community.
We have a special channel in our Discord server dedicated to Langflow 1.0 migration, where you can ask questions, share your experiences, and get help from the community.
diff --git a/docs/docusaurus.config.js b/docs/docusaurus.config.js
index 979953918..f64b180c7 100644
--- a/docs/docusaurus.config.js
+++ b/docs/docusaurus.config.js
@@ -44,7 +44,7 @@ module.exports = {
// sidebarPath: 'sidebars.js',
},
gtag: {
- trackingID: 'G-XHC7G628ZP',
+ trackingID: "G-XHC7G628ZP",
anonymizeIP: true,
},
theme: {
@@ -87,7 +87,7 @@ module.exports = {
// right
{
position: "right",
- href: "https://github.com/logspace-ai/langflow",
+ href: "https://github.com/langflow-ai/langflow",
position: "right",
className: "header-github-link",
target: "_blank",
@@ -124,7 +124,7 @@ module.exports = {
},
announcementBar: {
content:
- '⭐️ If you like ⛓️Langflow, star it on GitHub! ⭐️',
+ '⭐️ If you like ⛓️Langflow, star it on GitHub! ⭐️',
backgroundColor: "#E8EBF1", //Mustard Yellow #D19900 #D4B20B - Salmon #E9967A
textColor: "#1C1E21",
isCloseable: false,
diff --git a/poetry.lock b/poetry.lock
index 6f9ca269b..8826908c2 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -455,17 +455,17 @@ files = [
[[package]]
name = "boto3"
-version = "1.34.79"
+version = "1.34.80"
description = "The AWS SDK for Python"
optional = false
python-versions = ">=3.8"
files = [
- {file = "boto3-1.34.79-py3-none-any.whl", hash = "sha256:265b0b4865e8c07e27abb32a31d2bd9129bb009b1d89ca0783776ec084886123"},
- {file = "boto3-1.34.79.tar.gz", hash = "sha256:139dd2d94eaa0e3213ff37ba7cf4cb2e3823269178fe8f3e33c965f680a9ddde"},
+ {file = "boto3-1.34.80-py3-none-any.whl", hash = "sha256:bb8f433c04dcdffbd4a802df56c1c30f2be23b1161fd8fb45e4b76c1487ec122"},
+ {file = "boto3-1.34.80.tar.gz", hash = "sha256:5627f6ecadb46fc7c9f8c368baf948f1b00a3fd2f8eb1275c254469853ad8fdb"},
]
[package.dependencies]
-botocore = ">=1.34.79,<1.35.0"
+botocore = ">=1.34.80,<1.35.0"
jmespath = ">=0.7.1,<2.0.0"
s3transfer = ">=0.10.0,<0.11.0"
@@ -474,13 +474,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
[[package]]
name = "botocore"
-version = "1.34.79"
+version = "1.34.80"
description = "Low-level, data-driven core of boto 3."
optional = false
python-versions = ">=3.8"
files = [
- {file = "botocore-1.34.79-py3-none-any.whl", hash = "sha256:a42a014d3dbaa9ef123810592af69f9e55b456c5be3ac9efc037325685519e83"},
- {file = "botocore-1.34.79.tar.gz", hash = "sha256:6b59b0f7de219d383a2a633f6718c2600642ebcb707749dc6c67a6a436474b7a"},
+ {file = "botocore-1.34.80-py3-none-any.whl", hash = "sha256:354a00f03faba52acc6f1a84fa4f035d48541633be98ccc24b59dc544f679f8b"},
+ {file = "botocore-1.34.80.tar.gz", hash = "sha256:8402262e819f3d46df504bbd781e770858c0130b90f660699f75ef3a63abca5a"},
]
[package.dependencies]
@@ -1129,13 +1129,13 @@ testing = ["pytest (>=7.2.1)", "pytest-cov (>=4.0.0)", "tox (>=4.4.3)"]
[[package]]
name = "cohere"
-version = "5.2.2"
+version = "5.2.4"
description = ""
optional = false
python-versions = "<4.0,>=3.8"
files = [
- {file = "cohere-5.2.2-py3-none-any.whl", hash = "sha256:dae0c7318fa0d15a2147a044a49c1f40164a6c8135a260cc46214058c7c06085"},
- {file = "cohere-5.2.2.tar.gz", hash = "sha256:43243b86d4abf3140f9077435580ebceaa431fb7d57a9fef6858eee880f26646"},
+ {file = "cohere-5.2.4-py3-none-any.whl", hash = "sha256:50e8cbd009a6d6f6ce7127a0b62c50d3dfcfdb853f681f0ac315cfa70599fee4"},
+ {file = "cohere-5.2.4.tar.gz", hash = "sha256:2bf6e905773116ad3fff348e054e4ce1a1830092a63cb48fa8180beda4cbb96a"},
]
[package.dependencies]
@@ -1312,33 +1312,6 @@ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.1
[package.extras]
toml = ["tomli"]
-[[package]]
-name = "crewai"
-version = "0.22.5"
-description = "Cutting-edge framework for orchestrating role-playing, autonomous AI agents. By fostering collaborative intelligence, CrewAI empowers agents to work together seamlessly, tackling complex tasks."
-optional = false
-python-versions = ">=3.10,<=3.13"
-files = [
- {file = "crewai-0.22.5-py3-none-any.whl", hash = "sha256:9f254d8b2ebd7fae9a43d0ebab116a3f536c6620ca3bc955cb796fc8bb9beb7d"},
- {file = "crewai-0.22.5.tar.gz", hash = "sha256:03c76a04f46a432a1c3d9e5e0c8e039f983b25019194b0ab2ec594762ac380cf"},
-]
-
-[package.dependencies]
-click = ">=8.1.7,<9.0.0"
-instructor = ">=0.5.2,<0.6.0"
-langchain = ">=0.1.10,<0.2.0"
-langchain-openai = ">=0.0.5,<0.0.6"
-openai = ">=1.13.3,<2.0.0"
-opentelemetry-api = ">=1.22.0,<2.0.0"
-opentelemetry-exporter-otlp-proto-http = ">=1.22.0,<2.0.0"
-opentelemetry-sdk = ">=1.22.0,<2.0.0"
-pydantic = ">=2.4.2,<3.0.0"
-python-dotenv = "1.0.0"
-regex = ">=2023.12.25,<2024.0.0"
-
-[package.extras]
-tools = ["crewai-tools (>=0.0.15,<0.0.16)"]
-
[[package]]
name = "cryptography"
version = "42.0.5"
@@ -1515,13 +1488,13 @@ files = [
[[package]]
name = "deepdiff"
-version = "7.0.0"
+version = "7.0.1"
description = "Deep Difference and Search of any Python object/data. Recreate objects by adding adding deltas to each other."
optional = false
python-versions = ">=3.8"
files = [
- {file = "deepdiff-7.0.0-py3-none-any.whl", hash = "sha256:f7bbb845f83ad6b9453a4ab07c579bdc6f1df712edc515740455a9b88c2bc41a"},
- {file = "deepdiff-7.0.0.tar.gz", hash = "sha256:4e07da4f2a1ae069b7465d264715764f3b36ce181ec89f47050ead61711b1e9a"},
+ {file = "deepdiff-7.0.1-py3-none-any.whl", hash = "sha256:447760081918216aa4fd4ca78a4b6a848b81307b2ea94c810255334b759e1dc3"},
+ {file = "deepdiff-7.0.1.tar.gz", hash = "sha256:260c16f052d4badbf60351b4f77e8390bee03a0b516246f6839bc813fb429ddf"},
]
[package.dependencies]
@@ -1674,13 +1647,13 @@ files = [
[[package]]
name = "dspy-ai"
-version = "2.4.0"
+version = "2.4.5"
description = "DSPy"
optional = false
python-versions = ">=3.9"
files = [
- {file = "dspy-ai-2.4.0.tar.gz", hash = "sha256:d646ec015270b70bd51916346c01680923ccfa386c9e5474caac6d4366148678"},
- {file = "dspy_ai-2.4.0-py3-none-any.whl", hash = "sha256:edea508dc315702f61ae39568eaf2d8789ba4f8682102e730e016801b9102703"},
+ {file = "dspy-ai-2.4.5.tar.gz", hash = "sha256:6d9f166f9c214a86cfa0ef0e1d1489b4034dbed1cdab61ce152de8c09f64d799"},
+ {file = "dspy_ai-2.4.5-py3-none-any.whl", hash = "sha256:341e239374fba86ad7f9cd1eb3810ccd70bbbdfe8e9b2fa79038da0e0e720d4e"},
]
[package.dependencies]
@@ -1697,6 +1670,7 @@ tqdm = "*"
ujson = "*"
[package.extras]
+anthropic = ["anthropic (>=0.18.0,<0.19.0)"]
chromadb = ["chromadb (>=0.4.14,<0.5.0)"]
dev = ["pytest (>=6.2.5)"]
docs = ["autodoc-pydantic", "docutils (<0.17)", "furo (>=2023.3.27)", "m2r2", "myst-nb", "myst-parser", "sphinx (>=4.3.0)", "sphinx-autobuild", "sphinx-automodapi (==0.16.0)", "sphinx-reredirects (>=0.1.2)", "sphinx-rtd-theme"]
@@ -1778,13 +1752,13 @@ files = [
[[package]]
name = "ecdsa"
-version = "0.18.0"
+version = "0.19.0"
description = "ECDSA cryptographic signature library (pure python)"
optional = false
-python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.6"
files = [
- {file = "ecdsa-0.18.0-py2.py3-none-any.whl", hash = "sha256:80600258e7ed2f16b9aa1d7c295bd70194109ad5a30fdee0eaeefef1d4c559dd"},
- {file = "ecdsa-0.18.0.tar.gz", hash = "sha256:190348041559e21b22a1d65cee485282ca11a6f81d503fddb84d5017e9ed1e49"},
+ {file = "ecdsa-0.19.0-py2.py3-none-any.whl", hash = "sha256:2cea9b88407fdac7bbeca0833b189e4c9c53f2ef1e1eaa29f6224dbc809b707a"},
+ {file = "ecdsa-0.19.0.tar.gz", hash = "sha256:60eaad1199659900dd0af521ed462b793bbdf867432b3948e87416ae4caf6bf8"},
]
[package.dependencies]
@@ -3320,26 +3294,6 @@ files = [
{file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
]
-[[package]]
-name = "instructor"
-version = "0.5.2"
-description = "structured outputs for llm"
-optional = false
-python-versions = ">=3.10,<4.0"
-files = [
- {file = "instructor-0.5.2-py3-none-any.whl", hash = "sha256:8c7c927f3cbf6cd863eeebceae3f021e27eaca2ceaf9e9f3c8204540a1126160"},
- {file = "instructor-0.5.2.tar.gz", hash = "sha256:d8d679eb4624254db615794aaab59840e506fa696bc0181d998ae4f9ded2706d"},
-]
-
-[package.dependencies]
-aiohttp = ">=3.9.1,<4.0.0"
-docstring-parser = ">=0.15,<0.16"
-openai = ">=1.1.0,<2.0.0"
-pydantic = ">=2.0.2,<3.0.0"
-rich = ">=13.7.0,<14.0.0"
-tenacity = ">=8.2.3,<9.0.0"
-typer = ">=0.9.0,<0.10.0"
-
[[package]]
name = "ipykernel"
version = "6.29.4"
@@ -3786,37 +3740,6 @@ files = [
cohere = ">=5.1.4,<6.0.0"
langchain-core = ">=0.1.32,<0.2.0"
-[[package]]
-name = "langchain-astradb"
-version = "0.1.0"
-description = "An integration package connecting Astra DB and LangChain"
-optional = false
-python-versions = ">=3.8.1,<4.0"
-files = [
- {file = "langchain_astradb-0.1.0-py3-none-any.whl", hash = "sha256:c6686089da343fce8c31e36c9162323e88888300b09d56b72347a19449d7361f"},
- {file = "langchain_astradb-0.1.0.tar.gz", hash = "sha256:c8a3426c9daa2beeec2dc7a718186b0b9c388082e9543e0bc07363712cc3b947"},
-]
-
-[package.dependencies]
-astrapy = ">=0.7.7,<0.8.0"
-langchain-core = ">=0.1.31,<0.2.0"
-numpy = ">=1,<2"
-
-[[package]]
-name = "langchain-cohere"
-version = "0.1.0"
-description = "An integration package connecting Cohere and LangChain"
-optional = false
-python-versions = "<4.0,>=3.8.1"
-files = [
- {file = "langchain_cohere-0.1.0-py3-none-any.whl", hash = "sha256:f60e9eb41f7d4ead9659bddb3fae7aa18ddc3fdf2b2867be4bd8a565229f488d"},
- {file = "langchain_cohere-0.1.0.tar.gz", hash = "sha256:960551293ea58d170fad37d44657d3ae4587f6b2e8f3f58922c53c59b9e9d85c"},
-]
-
-[package.dependencies]
-cohere = ">=5.1.4,<6.0.0"
-langchain-core = ">=0.1.32,<0.2.0"
-
[[package]]
name = "langchain-community"
version = "0.0.31"
@@ -3915,7 +3838,7 @@ files = [
[package.dependencies]
langchain-core = ">=0.1.33,<0.2.0"
openai = ">=1.10.0,<2.0.0"
-tiktoken = ">=0.5.2,<0.6.0"
+tiktoken = ">=0.5.2,<1"
[[package]]
name = "langchain-text-splitters"
@@ -3950,7 +3873,7 @@ six = "*"
[[package]]
name = "langflow-base"
-version = "0.0.21"
+version = "0.0.24"
description = "A Python package with a built-in web application"
optional = false
python-versions = ">=3.10,<3.12"
@@ -3974,7 +3897,7 @@ langchain-experimental = "*"
loguru = "^0.7.1"
multiprocess = "^0.70.14"
nest-asyncio = "^1.6.0"
-orjson = "3.9.15"
+orjson = "3.10.0"
pandas = "2.2.0"
passlib = "^1.7.4"
pillow = "^10.2.0"
@@ -4060,13 +3983,13 @@ regex = ["regex"]
[[package]]
name = "litellm"
-version = "1.34.34"
+version = "1.34.36"
description = "Library to easily interface with LLM API providers"
optional = false
python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8"
files = [
- {file = "litellm-1.34.34-py3-none-any.whl", hash = "sha256:c9eefd4b5adec3c2e6d0ab765a4fcebd475a895c7e417f47f8e677410b607f51"},
- {file = "litellm-1.34.34.tar.gz", hash = "sha256:d11c9d5296d052a9e5e1187ac7b33683f3a581740abc4de6a9c327d3f3c7187c"},
+ {file = "litellm-1.34.36-py3-none-any.whl", hash = "sha256:e5baa7f4540dd2923472f60d3ffb4f76bb48f5288a7fb26efed2010b57b0d8ad"},
+ {file = "litellm-1.34.36.tar.gz", hash = "sha256:03e59c30ae21b9c1df4064dc20b015233c3d86a07bb4832c40dd73021778f3c8"},
]
[package.dependencies]
@@ -4338,13 +4261,13 @@ llama-index-program-openai = ">=0.1.1,<0.2.0"
[[package]]
name = "llama-index-readers-file"
-version = "0.1.13"
+version = "0.1.15"
description = "llama-index readers file integration"
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
- {file = "llama_index_readers_file-0.1.13-py3-none-any.whl", hash = "sha256:692988b8c3ca2807d21a171351078b634ada2ff1682ebe4a484f82da384dfc55"},
- {file = "llama_index_readers_file-0.1.13.tar.gz", hash = "sha256:830f06ec7b34437fc3bb5f268d235c5c7640296adb148d8f92277dceb7f0846d"},
+ {file = "llama_index_readers_file-0.1.15-py3-none-any.whl", hash = "sha256:f0d26a46d4c40334729d4506d9fbdbed8f5e187e36e22159fb48b28d67c7f240"},
+ {file = "llama_index_readers_file-0.1.15.tar.gz", hash = "sha256:98087d983a1f2d26961805217f13d24b0f23cbbe8e32d2327ae86b1ee668d3b2"},
]
[package.dependencies]
@@ -5648,26 +5571,6 @@ opentelemetry-sdk = ">=1.24.0,<1.25.0"
[package.extras]
test = ["pytest-grpc"]
-[[package]]
-name = "opentelemetry-exporter-otlp-proto-http"
-version = "1.24.0"
-description = "OpenTelemetry Collector Protobuf over HTTP Exporter"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "opentelemetry_exporter_otlp_proto_http-1.24.0-py3-none-any.whl", hash = "sha256:25af10e46fdf4cd3833175e42f4879a1255fc01655fe14c876183a2903949836"},
- {file = "opentelemetry_exporter_otlp_proto_http-1.24.0.tar.gz", hash = "sha256:704c066cc96f5131881b75c0eac286cd73fc735c490b054838b4513254bd7850"},
-]
-
-[package.dependencies]
-deprecated = ">=1.2.6"
-googleapis-common-protos = ">=1.52,<2.0"
-opentelemetry-api = ">=1.15,<2.0"
-opentelemetry-exporter-otlp-proto-common = "1.24.0"
-opentelemetry-proto = "1.24.0"
-opentelemetry-sdk = ">=1.24.0,<1.25.0"
-requests = ">=2.7,<3.0"
-
[[package]]
name = "opentelemetry-instrumentation"
version = "0.45b0"
@@ -5821,61 +5724,62 @@ dev = ["black", "mypy", "pytest"]
[[package]]
name = "orjson"
-version = "3.9.15"
+version = "3.10.0"
description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy"
optional = false
python-versions = ">=3.8"
files = [
- {file = "orjson-3.9.15-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:d61f7ce4727a9fa7680cd6f3986b0e2c732639f46a5e0156e550e35258aa313a"},
- {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4feeb41882e8aa17634b589533baafdceb387e01e117b1ec65534ec724023d04"},
- {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fbbeb3c9b2edb5fd044b2a070f127a0ac456ffd079cb82746fc84af01ef021a4"},
- {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b66bcc5670e8a6b78f0313bcb74774c8291f6f8aeef10fe70e910b8040f3ab75"},
- {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2973474811db7b35c30248d1129c64fd2bdf40d57d84beed2a9a379a6f57d0ab"},
- {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fe41b6f72f52d3da4db524c8653e46243c8c92df826ab5ffaece2dba9cccd58"},
- {file = "orjson-3.9.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4228aace81781cc9d05a3ec3a6d2673a1ad0d8725b4e915f1089803e9efd2b99"},
- {file = "orjson-3.9.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6f7b65bfaf69493c73423ce9db66cfe9138b2f9ef62897486417a8fcb0a92bfe"},
- {file = "orjson-3.9.15-cp310-none-win32.whl", hash = "sha256:2d99e3c4c13a7b0fb3792cc04c2829c9db07838fb6973e578b85c1745e7d0ce7"},
- {file = "orjson-3.9.15-cp310-none-win_amd64.whl", hash = "sha256:b725da33e6e58e4a5d27958568484aa766e825e93aa20c26c91168be58e08cbb"},
- {file = "orjson-3.9.15-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:c8e8fe01e435005d4421f183038fc70ca85d2c1e490f51fb972db92af6e047c2"},
- {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87f1097acb569dde17f246faa268759a71a2cb8c96dd392cd25c668b104cad2f"},
- {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff0f9913d82e1d1fadbd976424c316fbc4d9c525c81d047bbdd16bd27dd98cfc"},
- {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8055ec598605b0077e29652ccfe9372247474375e0e3f5775c91d9434e12d6b1"},
- {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d6768a327ea1ba44c9114dba5fdda4a214bdb70129065cd0807eb5f010bfcbb5"},
- {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12365576039b1a5a47df01aadb353b68223da413e2e7f98c02403061aad34bde"},
- {file = "orjson-3.9.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:71c6b009d431b3839d7c14c3af86788b3cfac41e969e3e1c22f8a6ea13139404"},
- {file = "orjson-3.9.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e18668f1bd39e69b7fed19fa7cd1cd110a121ec25439328b5c89934e6d30d357"},
- {file = "orjson-3.9.15-cp311-none-win32.whl", hash = "sha256:62482873e0289cf7313461009bf62ac8b2e54bc6f00c6fabcde785709231a5d7"},
- {file = "orjson-3.9.15-cp311-none-win_amd64.whl", hash = "sha256:b3d336ed75d17c7b1af233a6561cf421dee41d9204aa3cfcc6c9c65cd5bb69a8"},
- {file = "orjson-3.9.15-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:82425dd5c7bd3adfe4e94c78e27e2fa02971750c2b7ffba648b0f5d5cc016a73"},
- {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c51378d4a8255b2e7c1e5cc430644f0939539deddfa77f6fac7b56a9784160a"},
- {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6ae4e06be04dc00618247c4ae3f7c3e561d5bc19ab6941427f6d3722a0875ef7"},
- {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bcef128f970bb63ecf9a65f7beafd9b55e3aaf0efc271a4154050fc15cdb386e"},
- {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b72758f3ffc36ca566ba98a8e7f4f373b6c17c646ff8ad9b21ad10c29186f00d"},
- {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c57bc7b946cf2efa67ac55766e41764b66d40cbd9489041e637c1304400494"},
- {file = "orjson-3.9.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:946c3a1ef25338e78107fba746f299f926db408d34553b4754e90a7de1d44068"},
- {file = "orjson-3.9.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2f256d03957075fcb5923410058982aea85455d035607486ccb847f095442bda"},
- {file = "orjson-3.9.15-cp312-none-win_amd64.whl", hash = "sha256:5bb399e1b49db120653a31463b4a7b27cf2fbfe60469546baf681d1b39f4edf2"},
- {file = "orjson-3.9.15-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:b17f0f14a9c0ba55ff6279a922d1932e24b13fc218a3e968ecdbf791b3682b25"},
- {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f6cbd8e6e446fb7e4ed5bac4661a29e43f38aeecbf60c4b900b825a353276a1"},
- {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:76bc6356d07c1d9f4b782813094d0caf1703b729d876ab6a676f3aaa9a47e37c"},
- {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fdfa97090e2d6f73dced247a2f2d8004ac6449df6568f30e7fa1a045767c69a6"},
- {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7413070a3e927e4207d00bd65f42d1b780fb0d32d7b1d951f6dc6ade318e1b5a"},
- {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9cf1596680ac1f01839dba32d496136bdd5d8ffb858c280fa82bbfeb173bdd40"},
- {file = "orjson-3.9.15-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:809d653c155e2cc4fd39ad69c08fdff7f4016c355ae4b88905219d3579e31eb7"},
- {file = "orjson-3.9.15-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:920fa5a0c5175ab14b9c78f6f820b75804fb4984423ee4c4f1e6d748f8b22bc1"},
- {file = "orjson-3.9.15-cp38-none-win32.whl", hash = "sha256:2b5c0f532905e60cf22a511120e3719b85d9c25d0e1c2a8abb20c4dede3b05a5"},
- {file = "orjson-3.9.15-cp38-none-win_amd64.whl", hash = "sha256:67384f588f7f8daf040114337d34a5188346e3fae6c38b6a19a2fe8c663a2f9b"},
- {file = "orjson-3.9.15-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:6fc2fe4647927070df3d93f561d7e588a38865ea0040027662e3e541d592811e"},
- {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34cbcd216e7af5270f2ffa63a963346845eb71e174ea530867b7443892d77180"},
- {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f541587f5c558abd93cb0de491ce99a9ef8d1ae29dd6ab4dbb5a13281ae04cbd"},
- {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92255879280ef9c3c0bcb327c5a1b8ed694c290d61a6a532458264f887f052cb"},
- {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:05a1f57fb601c426635fcae9ddbe90dfc1ed42245eb4c75e4960440cac667262"},
- {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ede0bde16cc6e9b96633df1631fbcd66491d1063667f260a4f2386a098393790"},
- {file = "orjson-3.9.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e88b97ef13910e5f87bcbc4dd7979a7de9ba8702b54d3204ac587e83639c0c2b"},
- {file = "orjson-3.9.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:57d5d8cf9c27f7ef6bc56a5925c7fbc76b61288ab674eb352c26ac780caa5b10"},
- {file = "orjson-3.9.15-cp39-none-win32.whl", hash = "sha256:001f4eb0ecd8e9ebd295722d0cbedf0748680fb9998d3993abaed2f40587257a"},
- {file = "orjson-3.9.15-cp39-none-win_amd64.whl", hash = "sha256:ea0b183a5fe6b2b45f3b854b0d19c4e932d6f5934ae1f723b07cf9560edd4ec7"},
- {file = "orjson-3.9.15.tar.gz", hash = "sha256:95cae920959d772f30ab36d3b25f83bb0f3be671e986c72ce22f8fa700dae061"},
+ {file = "orjson-3.10.0-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:47af5d4b850a2d1328660661f0881b67fdbe712aea905dadd413bdea6f792c33"},
+ {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c90681333619d78360d13840c7235fdaf01b2b129cb3a4f1647783b1971542b6"},
+ {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:400c5b7c4222cb27b5059adf1fb12302eebcabf1978f33d0824aa5277ca899bd"},
+ {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5dcb32e949eae80fb335e63b90e5808b4b0f64e31476b3777707416b41682db5"},
+ {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7d507c7493252c0a0264b5cc7e20fa2f8622b8a83b04d819b5ce32c97cf57b"},
+ {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e286a51def6626f1e0cc134ba2067dcf14f7f4b9550f6dd4535fd9d79000040b"},
+ {file = "orjson-3.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8acd4b82a5f3a3ec8b1dc83452941d22b4711964c34727eb1e65449eead353ca"},
+ {file = "orjson-3.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:30707e646080dd3c791f22ce7e4a2fc2438765408547c10510f1f690bd336217"},
+ {file = "orjson-3.10.0-cp310-none-win32.whl", hash = "sha256:115498c4ad34188dcb73464e8dc80e490a3e5e88a925907b6fedcf20e545001a"},
+ {file = "orjson-3.10.0-cp310-none-win_amd64.whl", hash = "sha256:6735dd4a5a7b6df00a87d1d7a02b84b54d215fb7adac50dd24da5997ffb4798d"},
+ {file = "orjson-3.10.0-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9587053e0cefc284e4d1cd113c34468b7d3f17666d22b185ea654f0775316a26"},
+ {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bef1050b1bdc9ea6c0d08468e3e61c9386723633b397e50b82fda37b3563d72"},
+ {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d16c6963ddf3b28c0d461641517cd312ad6b3cf303d8b87d5ef3fa59d6844337"},
+ {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4251964db47ef090c462a2d909f16c7c7d5fe68e341dabce6702879ec26d1134"},
+ {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:73bbbdc43d520204d9ef0817ac03fa49c103c7f9ea94f410d2950755be2c349c"},
+ {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:414e5293b82373606acf0d66313aecb52d9c8c2404b1900683eb32c3d042dbd7"},
+ {file = "orjson-3.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:feaed5bb09877dc27ed0d37f037ddef6cb76d19aa34b108db270d27d3d2ef747"},
+ {file = "orjson-3.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5127478260db640323cea131ee88541cb1a9fbce051f0b22fa2f0892f44da302"},
+ {file = "orjson-3.10.0-cp311-none-win32.whl", hash = "sha256:b98345529bafe3c06c09996b303fc0a21961820d634409b8639bc16bd4f21b63"},
+ {file = "orjson-3.10.0-cp311-none-win_amd64.whl", hash = "sha256:658ca5cee3379dd3d37dbacd43d42c1b4feee99a29d847ef27a1cb18abdfb23f"},
+ {file = "orjson-3.10.0-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:4329c1d24fd130ee377e32a72dc54a3c251e6706fccd9a2ecb91b3606fddd998"},
+ {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef0f19fdfb6553342b1882f438afd53c7cb7aea57894c4490c43e4431739c700"},
+ {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c4f60db24161534764277f798ef53b9d3063092f6d23f8f962b4a97edfa997a0"},
+ {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1de3fd5c7b208d836f8ecb4526995f0d5877153a4f6f12f3e9bf11e49357de98"},
+ {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f93e33f67729d460a177ba285002035d3f11425ed3cebac5f6ded4ef36b28344"},
+ {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:237ba922aef472761acd697eef77fef4831ab769a42e83c04ac91e9f9e08fa0e"},
+ {file = "orjson-3.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:98c1bfc6a9bec52bc8f0ab9b86cc0874b0299fccef3562b793c1576cf3abb570"},
+ {file = "orjson-3.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:30d795a24be16c03dca0c35ca8f9c8eaaa51e3342f2c162d327bd0225118794a"},
+ {file = "orjson-3.10.0-cp312-none-win32.whl", hash = "sha256:6a3f53dc650bc860eb26ec293dfb489b2f6ae1cbfc409a127b01229980e372f7"},
+ {file = "orjson-3.10.0-cp312-none-win_amd64.whl", hash = "sha256:983db1f87c371dc6ffc52931eb75f9fe17dc621273e43ce67bee407d3e5476e9"},
+ {file = "orjson-3.10.0-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9a667769a96a72ca67237224a36faf57db0c82ab07d09c3aafc6f956196cfa1b"},
+ {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ade1e21dfde1d37feee8cf6464c20a2f41fa46c8bcd5251e761903e46102dc6b"},
+ {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:23c12bb4ced1c3308eff7ba5c63ef8f0edb3e4c43c026440247dd6c1c61cea4b"},
+ {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2d014cf8d4dc9f03fc9f870de191a49a03b1bcda51f2a957943fb9fafe55aac"},
+ {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eadecaa16d9783affca33597781328e4981b048615c2ddc31c47a51b833d6319"},
+ {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd583341218826f48bd7c6ebf3310b4126216920853cbc471e8dbeaf07b0b80e"},
+ {file = "orjson-3.10.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:90bfc137c75c31d32308fd61951d424424426ddc39a40e367704661a9ee97095"},
+ {file = "orjson-3.10.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:13b5d3c795b09a466ec9fcf0bd3ad7b85467d91a60113885df7b8d639a9d374b"},
+ {file = "orjson-3.10.0-cp38-none-win32.whl", hash = "sha256:5d42768db6f2ce0162544845facb7c081e9364a5eb6d2ef06cd17f6050b048d8"},
+ {file = "orjson-3.10.0-cp38-none-win_amd64.whl", hash = "sha256:33e6655a2542195d6fd9f850b428926559dee382f7a862dae92ca97fea03a5ad"},
+ {file = "orjson-3.10.0-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:4050920e831a49d8782a1720d3ca2f1c49b150953667eed6e5d63a62e80f46a2"},
+ {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1897aa25a944cec774ce4a0e1c8e98fb50523e97366c637b7d0cddabc42e6643"},
+ {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9bf565a69e0082ea348c5657401acec3cbbb31564d89afebaee884614fba36b4"},
+ {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b6ebc17cfbbf741f5c1a888d1854354536f63d84bee537c9a7c0335791bb9009"},
+ {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d2817877d0b69f78f146ab305c5975d0618df41acf8811249ee64231f5953fee"},
+ {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57d017863ec8aa4589be30a328dacd13c2dc49de1c170bc8d8c8a98ece0f2925"},
+ {file = "orjson-3.10.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:22c2f7e377ac757bd3476ecb7480c8ed79d98ef89648f0176deb1da5cd014eb7"},
+ {file = "orjson-3.10.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e62ba42bfe64c60c1bc84799944f80704e996592c6b9e14789c8e2a303279912"},
+ {file = "orjson-3.10.0-cp39-none-win32.whl", hash = "sha256:60c0b1bdbccd959ebd1575bd0147bd5e10fc76f26216188be4a36b691c937077"},
+ {file = "orjson-3.10.0-cp39-none-win_amd64.whl", hash = "sha256:175a41500ebb2fdf320bf78e8b9a75a1279525b62ba400b2b2444e274c2c8bee"},
+ {file = "orjson-3.10.0.tar.gz", hash = "sha256:ba4d8cac5f2e2cff36bea6b6481cdb92b38c202bcec603d6f5ff91960595a1ed"},
]
[[package]]
@@ -8391,7 +8295,7 @@ name = "shellingham"
version = "1.5.4"
description = "Tool to Detect Surrounding Shell"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
files = [
{file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"},
{file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"},
@@ -8730,47 +8634,47 @@ files = [
[[package]]
name = "tiktoken"
-version = "0.5.2"
+version = "0.6.0"
description = "tiktoken is a fast BPE tokeniser for use with OpenAI's models"
optional = false
python-versions = ">=3.8"
files = [
- {file = "tiktoken-0.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8c4e654282ef05ec1bd06ead22141a9a1687991cef2c6a81bdd1284301abc71d"},
- {file = "tiktoken-0.5.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7b3134aa24319f42c27718c6967f3c1916a38a715a0fa73d33717ba121231307"},
- {file = "tiktoken-0.5.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6092e6e77730929c8c6a51bb0d7cfdf1b72b63c4d033d6258d1f2ee81052e9e5"},
- {file = "tiktoken-0.5.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72ad8ae2a747622efae75837abba59be6c15a8f31b4ac3c6156bc56ec7a8e631"},
- {file = "tiktoken-0.5.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:51cba7c8711afa0b885445f0637f0fcc366740798c40b981f08c5f984e02c9d1"},
- {file = "tiktoken-0.5.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3d8c7d2c9313f8e92e987d585ee2ba0f7c40a0de84f4805b093b634f792124f5"},
- {file = "tiktoken-0.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:692eca18c5fd8d1e0dde767f895c17686faaa102f37640e884eecb6854e7cca7"},
- {file = "tiktoken-0.5.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:138d173abbf1ec75863ad68ca289d4da30caa3245f3c8d4bfb274c4d629a2f77"},
- {file = "tiktoken-0.5.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7388fdd684690973fdc450b47dfd24d7f0cbe658f58a576169baef5ae4658607"},
- {file = "tiktoken-0.5.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a114391790113bcff670c70c24e166a841f7ea8f47ee2fe0e71e08b49d0bf2d4"},
- {file = "tiktoken-0.5.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca96f001e69f6859dd52926d950cfcc610480e920e576183497ab954e645e6ac"},
- {file = "tiktoken-0.5.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:15fed1dd88e30dfadcdd8e53a8927f04e1f6f81ad08a5ca824858a593ab476c7"},
- {file = "tiktoken-0.5.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:93f8e692db5756f7ea8cb0cfca34638316dcf0841fb8469de8ed7f6a015ba0b0"},
- {file = "tiktoken-0.5.2-cp311-cp311-win_amd64.whl", hash = "sha256:bcae1c4c92df2ffc4fe9f475bf8148dbb0ee2404743168bbeb9dcc4b79dc1fdd"},
- {file = "tiktoken-0.5.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b76a1e17d4eb4357d00f0622d9a48ffbb23401dcf36f9716d9bd9c8e79d421aa"},
- {file = "tiktoken-0.5.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:01d8b171bb5df4035580bc26d4f5339a6fd58d06f069091899d4a798ea279d3e"},
- {file = "tiktoken-0.5.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42adf7d4fb1ed8de6e0ff2e794a6a15005f056a0d83d22d1d6755a39bffd9e7f"},
- {file = "tiktoken-0.5.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c3f894dbe0adb44609f3d532b8ea10820d61fdcb288b325a458dfc60fefb7db"},
- {file = "tiktoken-0.5.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:58ccfddb4e62f0df974e8f7e34a667981d9bb553a811256e617731bf1d007d19"},
- {file = "tiktoken-0.5.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58902a8bad2de4268c2a701f1c844d22bfa3cbcc485b10e8e3e28a050179330b"},
- {file = "tiktoken-0.5.2-cp312-cp312-win_amd64.whl", hash = "sha256:5e39257826d0647fcac403d8fa0a474b30d02ec8ffc012cfaf13083e9b5e82c5"},
- {file = "tiktoken-0.5.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bde3b0fbf09a23072d39c1ede0e0821f759b4fa254a5f00078909158e90ae1f"},
- {file = "tiktoken-0.5.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2ddee082dcf1231ccf3a591d234935e6acf3e82ee28521fe99af9630bc8d2a60"},
- {file = "tiktoken-0.5.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35c057a6a4e777b5966a7540481a75a31429fc1cb4c9da87b71c8b75b5143037"},
- {file = "tiktoken-0.5.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c4a049b87e28f1dc60509f8eb7790bc8d11f9a70d99b9dd18dfdd81a084ffe6"},
- {file = "tiktoken-0.5.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5bf5ce759089f4f6521ea6ed89d8f988f7b396e9f4afb503b945f5c949c6bec2"},
- {file = "tiktoken-0.5.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0c964f554af1a96884e01188f480dad3fc224c4bbcf7af75d4b74c4b74ae0125"},
- {file = "tiktoken-0.5.2-cp38-cp38-win_amd64.whl", hash = "sha256:368dd5726d2e8788e47ea04f32e20f72a2012a8a67af5b0b003d1e059f1d30a3"},
- {file = "tiktoken-0.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a2deef9115b8cd55536c0a02c0203512f8deb2447f41585e6d929a0b878a0dd2"},
- {file = "tiktoken-0.5.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2ed7d380195affbf886e2f8b92b14edfe13f4768ff5fc8de315adba5b773815e"},
- {file = "tiktoken-0.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c76fce01309c8140ffe15eb34ded2bb94789614b7d1d09e206838fc173776a18"},
- {file = "tiktoken-0.5.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60a5654d6a2e2d152637dd9a880b4482267dfc8a86ccf3ab1cec31a8c76bfae8"},
- {file = "tiktoken-0.5.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:41d4d3228e051b779245a8ddd21d4336f8975563e92375662f42d05a19bdff41"},
- {file = "tiktoken-0.5.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c1cdec2c92fcde8c17a50814b525ae6a88e8e5b02030dc120b76e11db93f13"},
- {file = "tiktoken-0.5.2-cp39-cp39-win_amd64.whl", hash = "sha256:84ddb36faedb448a50b246e13d1b6ee3437f60b7169b723a4b2abad75e914f3e"},
- {file = "tiktoken-0.5.2.tar.gz", hash = "sha256:f54c581f134a8ea96ce2023ab221d4d4d81ab614efa0b2fbce926387deb56c80"},
+ {file = "tiktoken-0.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:277de84ccd8fa12730a6b4067456e5cf72fef6300bea61d506c09e45658d41ac"},
+ {file = "tiktoken-0.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9c44433f658064463650d61387623735641dcc4b6c999ca30bc0f8ba3fccaf5c"},
+ {file = "tiktoken-0.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afb9a2a866ae6eef1995ab656744287a5ac95acc7e0491c33fad54d053288ad3"},
+ {file = "tiktoken-0.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c62c05b3109fefca26fedb2820452a050074ad8e5ad9803f4652977778177d9f"},
+ {file = "tiktoken-0.6.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0ef917fad0bccda07bfbad835525bbed5f3ab97a8a3e66526e48cdc3e7beacf7"},
+ {file = "tiktoken-0.6.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e095131ab6092d0769a2fda85aa260c7c383072daec599ba9d8b149d2a3f4d8b"},
+ {file = "tiktoken-0.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:05b344c61779f815038292a19a0c6eb7098b63c8f865ff205abb9ea1b656030e"},
+ {file = "tiktoken-0.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cefb9870fb55dca9e450e54dbf61f904aab9180ff6fe568b61f4db9564e78871"},
+ {file = "tiktoken-0.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:702950d33d8cabc039845674107d2e6dcabbbb0990ef350f640661368df481bb"},
+ {file = "tiktoken-0.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8d49d076058f23254f2aff9af603863c5c5f9ab095bc896bceed04f8f0b013a"},
+ {file = "tiktoken-0.6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:430bc4e650a2d23a789dc2cdca3b9e5e7eb3cd3935168d97d43518cbb1f9a911"},
+ {file = "tiktoken-0.6.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:293cb8669757301a3019a12d6770bd55bec38a4d3ee9978ddbe599d68976aca7"},
+ {file = "tiktoken-0.6.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7bd1a288b7903aadc054b0e16ea78e3171f70b670e7372432298c686ebf9dd47"},
+ {file = "tiktoken-0.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:ac76e000183e3b749634968a45c7169b351e99936ef46f0d2353cd0d46c3118d"},
+ {file = "tiktoken-0.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:17cc8a4a3245ab7d935c83a2db6bb71619099d7284b884f4b2aea4c74f2f83e3"},
+ {file = "tiktoken-0.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:284aebcccffe1bba0d6571651317df6a5b376ff6cfed5aeb800c55df44c78177"},
+ {file = "tiktoken-0.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c1a3a5d33846f8cd9dd3b7897c1d45722f48625a587f8e6f3d3e85080559be8"},
+ {file = "tiktoken-0.6.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6318b2bb2337f38ee954fd5efa82632c6e5ced1d52a671370fa4b2eff1355e91"},
+ {file = "tiktoken-0.6.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f5f0f2ed67ba16373f9a6013b68da298096b27cd4e1cf276d2d3868b5c7efd1"},
+ {file = "tiktoken-0.6.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:75af4c0b16609c2ad02581f3cdcd1fb698c7565091370bf6c0cf8624ffaba6dc"},
+ {file = "tiktoken-0.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:45577faf9a9d383b8fd683e313cf6df88b6076c034f0a16da243bb1c139340c3"},
+ {file = "tiktoken-0.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7c1492ab90c21ca4d11cef3a236ee31a3e279bb21b3fc5b0e2210588c4209e68"},
+ {file = "tiktoken-0.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e2b380c5b7751272015400b26144a2bab4066ebb8daae9c3cd2a92c3b508fe5a"},
+ {file = "tiktoken-0.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9f497598b9f58c99cbc0eb764b4a92272c14d5203fc713dd650b896a03a50ad"},
+ {file = "tiktoken-0.6.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e65e8bd6f3f279d80f1e1fbd5f588f036b9a5fa27690b7f0cc07021f1dfa0839"},
+ {file = "tiktoken-0.6.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5f1495450a54e564d236769d25bfefbf77727e232d7a8a378f97acddee08c1ae"},
+ {file = "tiktoken-0.6.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6c4e4857d99f6fb4670e928250835b21b68c59250520a1941618b5b4194e20c3"},
+ {file = "tiktoken-0.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:168d718f07a39b013032741867e789971346df8e89983fe3c0ef3fbd5a0b1cb9"},
+ {file = "tiktoken-0.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:47fdcfe11bd55376785a6aea8ad1db967db7f66ea81aed5c43fad497521819a4"},
+ {file = "tiktoken-0.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fb7d2ccbf1a7784810aff6b80b4012fb42c6fc37eaa68cb3b553801a5cc2d1fc"},
+ {file = "tiktoken-0.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ccb7a111ee76af5d876a729a347f8747d5ad548e1487eeea90eaf58894b3138"},
+ {file = "tiktoken-0.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2048e1086b48e3c8c6e2ceeac866561374cd57a84622fa49a6b245ffecb7744"},
+ {file = "tiktoken-0.6.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:07f229a5eb250b6403a61200199cecf0aac4aa23c3ecc1c11c1ca002cbb8f159"},
+ {file = "tiktoken-0.6.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:432aa3be8436177b0db5a2b3e7cc28fd6c693f783b2f8722539ba16a867d0c6a"},
+ {file = "tiktoken-0.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:8bfe8a19c8b5c40d121ee7938cd9c6a278e5b97dc035fd61714b4f0399d2f7a1"},
+ {file = "tiktoken-0.6.0.tar.gz", hash = "sha256:ace62a4ede83c75b0374a2ddfa4b76903cf483e9cb06247f566be3bf14e6beed"},
]
[package.dependencies]
@@ -9135,13 +9039,13 @@ tutorials = ["matplotlib", "pandas", "tabulate", "torch"]
[[package]]
name = "typer"
-version = "0.12.1"
+version = "0.12.2"
description = "Typer, build great CLIs. Easy to code. Based on Python type hints."
optional = false
python-versions = ">=3.7"
files = [
- {file = "typer-0.12.1-py3-none-any.whl", hash = "sha256:43ebb23c8a358c3d623e31064359a65f50229d0bf73ae8dfd203f49d9126ae06"},
- {file = "typer-0.12.1.tar.gz", hash = "sha256:72d218ef3c686aed9c6ff3ca25b238aee0474a1628b29c559b18b634cfdeca88"},
+ {file = "typer-0.12.2-py3-none-any.whl", hash = "sha256:e1accbaa7e2b2350753acec896ac30493ac573211a8d4603e88f8356217e01f7"},
+ {file = "typer-0.12.2.tar.gz", hash = "sha256:977929604fde12aeada011852ad9c64370501be6ac2eac248f3161cdc9eeb7c9"},
]
[package.dependencies]
@@ -9268,13 +9172,13 @@ files = [
[[package]]
name = "types-redis"
-version = "4.6.0.20240311"
+version = "4.6.0.20240409"
description = "Typing stubs for redis"
optional = false
python-versions = ">=3.8"
files = [
- {file = "types-redis-4.6.0.20240311.tar.gz", hash = "sha256:e049bbdff0e0a1f8e701b64636811291d21bff79bf1e7850850a44055224a85f"},
- {file = "types_redis-4.6.0.20240311-py3-none-any.whl", hash = "sha256:6b9d68a29aba1ee400c823d8e5fe88675282eb69d7211e72fe65dbe54b33daca"},
+ {file = "types-redis-4.6.0.20240409.tar.gz", hash = "sha256:ce217c279581d769df992c5b76d61c65425b0a679626048e633e643868eb881b"},
+ {file = "types_redis-4.6.0.20240409-py3-none-any.whl", hash = "sha256:a3b92760c49a034827a0c3825206728df4e61e981c1324099d4414335af4f52f"},
]
[package.dependencies]
diff --git a/pyproject.toml b/pyproject.toml
index cc7ac04ae..269f3d964 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = "langflow"
-version = "1.0.0a9"
+version = "1.0.0a12"
description = "A Python package with a built-in web application"
authors = ["Logspace "]
maintainers = [
@@ -12,7 +12,7 @@ maintainers = [
"Otávio Anovazzi ",
"Rodrigo Nader ",
]
-repository = "https://github.com/logspace-ai/langflow"
+repository = "https://github.com/langflow-ai/langflow"
license = "MIT"
readme = "README.md"
keywords = ["nlp", "langchain", "openai", "gpt", "gui"]
diff --git a/render.yaml b/render.yaml
index 4c17923c6..583a3c324 100644
--- a/render.yaml
+++ b/render.yaml
@@ -4,7 +4,7 @@ services:
name: langflow
runtime: docker
dockerfilePath: ./Dockerfile
- repo: https://github.com/logspace-ai/langflow
+ repo: https://github.com/langflow-ai/langflow
branch: main
healthCheckPath: /health
autoDeploy: false
diff --git a/scripts/aws/README.ja.md b/scripts/aws/README.ja.md
index b7ad49f21..86e8699bb 100644
--- a/scripts/aws/README.ja.md
+++ b/scripts/aws/README.ja.md
@@ -3,6 +3,7 @@
**想定時間**: 30 分
## 説明
+
Langflow on AWS では、 [AWS Cloud Development Kit](https://aws.amazon.com/cdk/?nc2=type_a) (CDK) を用いて Langflow を AWS 上にデプロイする方法を学べます。
このチュートリアルは、AWS アカウントと AWS に関する基本的な知識を有していることを前提としています。
@@ -10,44 +11,47 @@ Langflow on AWS では、 [AWS Cloud Development Kit](https://aws.amazon.com/cdk

AWS CDK によって Langflow のアプリケーションをデプロイします。アプリケーションは [Amazon CloudFront](https://aws.amazon.com/cloudfront/?nc1=h_ls) を介して配信されます。CloudFront は 2 つのオリジンを有しています。1 つ目は静的な Web サイトを配信するための [Amazon Simple Storage Service](https://aws.amazon.com/s3/?nc1=h_ls) (S3)、2 つ目は バックエンドと通信するための [Application Load Balancer](https://aws.amazon.com/elasticloadbalancing/application-load-balancer/?nc1=h_ls) (ALB) です。ALB の背後には FastAPI が動作する [AWS Fargate](https://aws.amazon.com/fargate/?nc2=type_a) 、データベースの [Amazon Aurora](https://aws.amazon.com/rds/aurora/?nc2=type_a) が作成されます。
Fargate は [Amazon Elastic Container Registry](https://aws.amazon.com/ecr/?nc1=h_ls) (ECR) に保存された Docker イメージを使用します。
-Auroraのシークレットは [AWS Secrets Manager](https://aws.amazon.com/secrets-manager/?nc2=type_a) によって管理されます。
+Aurora のシークレットは [AWS Secrets Manager](https://aws.amazon.com/secrets-manager/?nc2=type_a) によって管理されます。
# 環境構築とデプロイ方法
+
1. [AWS CloudShell](https://us-east-1.console.aws.amazon.com/cloudshell/home?region=us-east-1)を開きます。
1. 以下のコマンドを実行します。
- ```shell
- git clone https://github.com/aws-samples/cloud9-setup-for-prototyping
- cd cloud9-setup-for-prototyping
- ./bin/bootstrap
- ```
+
+ ```shell
+ git clone https://github.com/aws-samples/cloud9-setup-for-prototyping
+ cd cloud9-setup-for-prototyping
+ ./bin/bootstrap
+ ```
1. `Done!` と表示されたら [AWS Cloud9](https://us-east-1.console.aws.amazon.com/cloud9control/home?region=us-east-1#/) から `cloud9-for-prototyping` を開きます。
- 
+ 
1. 以下のコマンドを実行します。
- ```shell
- git clone https://github.com/logspace-ai/langflow.git
- cd langflow/scripts/aws
- cp .env.example .env # 環境設定を変える場合はこのファイル(.env)を編集してください。
- npm ci
- cdk bootstrap
- cdk deploy
- ```
+ ```shell
+ git clone https://github.com/langflow-ai/langflow.git
+ cd langflow/scripts/aws
+ cp .env.example .env # 環境設定を変える場合はこのファイル(.env)を編集してください。
+ npm ci
+ cdk bootstrap
+ cdk deploy
+ ```
1. 表示される URL にアクセスします。
- ```shell
- Outputs:
- LangflowAppStack.frontendURLXXXXXX = https://XXXXXXXXXXX.cloudfront.net
- ```
+ ```shell
+ Outputs:
+ LangflowAppStack.frontendURLXXXXXX = https://XXXXXXXXXXX.cloudfront.net
+ ```
1. サインイン画面でユーザー名とパスワードを入力します。`.env`ファイルでユーザー名とパスワードを設定していない場合、ユーザー名は`admin`、パスワードは`123456`で設定されます。
- 
+ 
# 環境の削除
-1. `Cloud9` で以下のコマンドを実行します。
- ```shell
- bash delete-resources.sh
- ```
+1. `Cloud9` で以下のコマンドを実行します。
+
+ ```shell
+ bash delete-resources.sh
+ ```
1. [AWS CloudFormation](https://us-east-1.console.aws.amazon.com/cloudformation/home?region=us-east-1#/getting-started)を開き、`aws-cloud9-cloud9-for-prototyping-XXXX` を選択して削除します。
- 
\ No newline at end of file
+ 
diff --git a/scripts/aws/README.md b/scripts/aws/README.md
index 031718f75..da561747f 100644
--- a/scripts/aws/README.md
+++ b/scripts/aws/README.md
@@ -10,7 +10,7 @@ This tutorial assumes you have an AWS account and basic knowledge of AWS.
The architecture of the application to be created:

Langflow is deployed using AWS CDK. The application is distributed via [Amazon CloudFront](https://aws.amazon.com/cloudfront/?nc1=h_ls), which has two origins: the first is [Amazon Simple Storage Service](https://aws.amazon.com/s3/?nc1=h_ls) (S3) for serving a static website, and the second is an [Application Load Balancer](https://aws.amazon.com/elasticloadbalancing/application-load-balancer/?nc1=h_ls) (ALB) for communicating with the backend. [AWS Fargate](https://aws.amazon.com/fargate/?nc2=type_a), where FastAPI runs and [Amazon Aurora](https://aws.amazon.com/rds/aurora/?nc2=type_a), the database, are created behind the ALB.
-Fargate uses a Docker image stored in [Amazon Elastic Container Registry](https://aws.amazon.com/ecr/?nc1=h_ls) (ECR).
+Fargate uses a Docker image stored in [Amazon Elastic Container Registry](https://aws.amazon.com/ecr/?nc1=h_ls) (ECR).
Aurora's secret is managed by [AWS Secrets Manager](https://aws.amazon.com/secrets-manager/?nc2=type_a).
# How to set up your environment and deploy langflow
@@ -26,14 +26,14 @@ Aurora's secret is managed by [AWS Secrets Manager](https://aws.amazon.com/secre
1. When you see `Done!` in Cloudshell, open `c9-for-langflow` from [AWS Cloud9](https://us-east-1.console.aws.amazon.com/cloud9control/home?region=us-east-1#/).

1. Run the following command in the Cloud9 terminal.
- ```shell
- git clone https://github.com/logspace-ai/langflow.git
- cd langflow/scripts/aws
- cp .env.example .env # Edit this file if you need environment settings
- npm ci
- cdk bootstrap
- cdk deploy
- ```
+ ```shell
+ git clone https://github.com/langflow-ai/langflow.git
+ cd langflow/scripts/aws
+ cp .env.example .env # Edit this file if you need environment settings
+ npm ci
+ cdk bootstrap
+ cdk deploy
+ ```
1. Access the URL displayed.
```shell
Outputs:
@@ -50,4 +50,4 @@ Aurora's secret is managed by [AWS Secrets Manager](https://aws.amazon.com/secre
```
1. Open [AWS CloudFormation](https://us-east-1.console.aws.amazon.com/cloudformation/home?region=us-east-1#/getting-started), select `aws-cloud9-c9-for-langflow-XXXX` and delete it.

-s
\ No newline at end of file
+ s
diff --git a/src/backend/base/langflow/__main__.py b/src/backend/base/langflow/__main__.py
index bfb2a5311..184618bdf 100644
--- a/src/backend/base/langflow/__main__.py
+++ b/src/backend/base/langflow/__main__.py
@@ -9,11 +9,6 @@ import click
import httpx
import typer
from dotenv import load_dotenv
-from langflow.main import setup_app
-from langflow.services.database.utils import session_getter
-from langflow.services.deps import get_db_service, get_settings_service
-from langflow.services.utils import initialize_services, initialize_settings_service
-from langflow.utils.logger import configure, logger
from multiprocess import Process, cpu_count # type: ignore
from packaging import version as pkg_version
from rich import box
@@ -22,6 +17,12 @@ from rich.console import Console
from rich.panel import Panel
from rich.table import Table
+from langflow.main import setup_app
+from langflow.services.database.utils import session_getter
+from langflow.services.deps import get_db_service, get_settings_service
+from langflow.services.utils import initialize_services, initialize_settings_service
+from langflow.utils.logger import configure, logger
+
console = Console()
app = typer.Typer(no_args_is_help=True)
@@ -99,12 +100,8 @@ def update_settings(
@app.command()
def run(
- host: str = typer.Option(
- "127.0.0.1", help="Host to bind the server to.", envvar="LANGFLOW_HOST"
- ),
- workers: int = typer.Option(
- 1, help="Number of worker processes.", envvar="LANGFLOW_WORKERS"
- ),
+ host: str = typer.Option("127.0.0.1", help="Host to bind the server to.", envvar="LANGFLOW_HOST"),
+ workers: int = typer.Option(1, help="Number of worker processes.", envvar="LANGFLOW_WORKERS"),
timeout: int = typer.Option(300, help="Worker timeout in seconds."),
port: int = typer.Option(7860, help="Port to listen on.", envvar="LANGFLOW_PORT"),
components_path: Optional[Path] = typer.Option(
@@ -112,19 +109,11 @@ def run(
help="Path to the directory containing custom components.",
envvar="LANGFLOW_COMPONENTS_PATH",
),
- config: str = typer.Option(
- Path(__file__).parent / "config.yaml", help="Path to the configuration file."
- ),
+ config: str = typer.Option(Path(__file__).parent / "config.yaml", help="Path to the configuration file."),
# .env file param
- env_file: Path = typer.Option(
- None, help="Path to the .env file containing environment variables."
- ),
- log_level: str = typer.Option(
- "critical", help="Logging level.", envvar="LANGFLOW_LOG_LEVEL"
- ),
- log_file: Path = typer.Option(
- "logs/langflow.log", help="Path to the log file.", envvar="LANGFLOW_LOG_FILE"
- ),
+ env_file: Path = typer.Option(None, help="Path to the .env file containing environment variables."),
+ log_level: str = typer.Option("critical", help="Logging level.", envvar="LANGFLOW_LOG_LEVEL"),
+ log_file: Path = typer.Option("logs/langflow.log", help="Path to the log file.", envvar="LANGFLOW_LOG_FILE"),
cache: Optional[str] = typer.Option(
envvar="LANGFLOW_LANGCHAIN_CACHE",
help="Type of cache to use. (InMemoryCache, SQLiteCache)",
@@ -200,7 +189,7 @@ def run(
run_on_windows(host, port, log_level, options, app)
else:
# Run using gunicorn on Linux
- run_on_mac_or_linux(host, port, log_level, options, app, open_browser)
+ run_on_mac_or_linux(host, port, log_level, options, app)
if open_browser:
click.launch(f"http://{host}:{port}")
@@ -218,9 +207,7 @@ def wait_for_server_ready(host, port):
def run_on_mac_or_linux(host, port, log_level, options, app):
- webapp_process = Process(
- target=run_langflow, args=(host, port, log_level, options, app)
- )
+ webapp_process = Process(target=run_langflow, args=(host, port, log_level, options, app))
webapp_process.start()
wait_for_server_ready(host, port)
@@ -316,9 +303,7 @@ def build_new_version_notice(current_version: str, package_name: str):
f"A new pre-release version of {package_name} is available: {latest_version}",
)
else:
- latest_version = httpx.get(f"https://pypi.org/pypi/{package_name}/json").json()[
- "info"
- ]["version"]
+ latest_version = httpx.get(f"https://pypi.org/pypi/{package_name}/json").json()["info"]["version"]
if not version_is_prerelease(latest_version):
return (
False,
@@ -342,9 +327,7 @@ def fetch_latest_version(package_name: str, include_prerelease: bool) -> str:
def build_version_notice(current_version: str, package_name: str) -> str:
latest_version = fetch_latest_version(package_name, is_prerelease(current_version))
- if latest_version and pkg_version.parse(current_version) < pkg_version.parse(
- latest_version
- ):
+ if latest_version and pkg_version.parse(current_version) < pkg_version.parse(latest_version):
release_type = "pre-release" if is_prerelease(latest_version) else "version"
return f"A new {release_type} of {package_name} is available: {latest_version}"
return ""
@@ -393,9 +376,7 @@ def print_banner(host: str, port: int):
from importlib import metadata
langflow_base_version = metadata.version("langflow-base")
- is_pre_release |= is_prerelease(
- langflow_base_version
- ) # Update pre-release status
+ is_pre_release |= is_prerelease(langflow_base_version) # Update pre-release status
notice = build_version_notice(langflow_base_version, "langflow-base")
notice = stylize_text(notice, "langflow-base", is_pre_release)
if notice:
@@ -414,12 +395,10 @@ def print_banner(host: str, port: int):
notices.append(f"Run '{pip_command}' to update.")
styled_notices = [f"[bold]{notice}[/bold]" for notice in notices if notice]
- styled_package_name = stylize_text(
- package_name, package_name, any("pre-release" in notice for notice in notices)
- )
+ styled_package_name = stylize_text(package_name, package_name, any("pre-release" in notice for notice in notices))
title = f"[bold]Welcome to :chains: {styled_package_name}[/bold]\n"
- info_text = "Collaborate, and contribute at our [bold][link=https://github.com/logspace-ai/langflow]GitHub Repo[/link][/bold] :rocket:"
+ info_text = "Collaborate, and contribute at our [bold][link=https://github.com/langflow-ai/langflow]GitHub Repo[/link][/bold] :rocket:"
access_link = f"Access [link=http://{host}:{port}]http://{host}:{port}[/link]"
panel_content = "\n\n".join([title, *styled_notices, info_text, access_link])
@@ -459,12 +438,8 @@ def run_langflow(host, port, log_level, options, app):
@app.command()
def superuser(
username: str = typer.Option(..., prompt=True, help="Username for the superuser."),
- password: str = typer.Option(
- ..., prompt=True, hide_input=True, help="Password for the superuser."
- ),
- log_level: str = typer.Option(
- "error", help="Logging level.", envvar="LANGFLOW_LOG_LEVEL"
- ),
+ password: str = typer.Option(..., prompt=True, hide_input=True, help="Password for the superuser."),
+ log_level: str = typer.Option("error", help="Logging level.", envvar="LANGFLOW_LOG_LEVEL"),
):
"""
Create a superuser.
@@ -491,11 +466,23 @@ def superuser(
@app.command()
-def migration(test: bool = typer.Option(True, help="Run migrations in test mode.")):
+def migration(
+ test: bool = typer.Option(True, help="Run migrations in test mode."),
+ fix: bool = typer.Option(
+ False,
+ help="Fix migrations. This is a destructive operation, and should only be used if you know what you are doing.",
+ ),
+):
"""
Run or test migrations.
"""
- initialize_services()
+ if fix:
+ if not typer.confirm(
+ "This will delete all data necessary to fix migrations. Are you sure you want to continue?"
+ ):
+ raise typer.Abort()
+
+ initialize_services(fix_migration=fix)
db_service = get_db_service()
if not test:
db_service.run_migrations()
diff --git a/src/backend/base/langflow/base/constants.py b/src/backend/base/langflow/base/constants.py
index a54d6eff5..c9a41a100 100644
--- a/src/backend/base/langflow/base/constants.py
+++ b/src/backend/base/langflow/base/constants.py
@@ -24,4 +24,5 @@ FIELD_FORMAT_ATTRIBUTES = [
"real_time_refresh",
"refresh_button",
"refresh_button_text",
+ "options",
]
diff --git a/src/backend/base/langflow/base/data/utils.py b/src/backend/base/langflow/base/data/utils.py
index c3fda5e34..09169393b 100644
--- a/src/backend/base/langflow/base/data/utils.py
+++ b/src/backend/base/langflow/base/data/utils.py
@@ -10,7 +10,26 @@ from langflow.schema.schema import Record
# Types of files that can be read simply by file.read()
# and have 100% to be completely readable
-TEXT_FILE_TYPES = ["txt", "md", "mdx", "csv", "json", "yaml", "yml", "xml", "html", "htm", "pdf", "docx"]
+TEXT_FILE_TYPES = [
+ "txt",
+ "md",
+ "mdx",
+ "csv",
+ "json",
+ "yaml",
+ "yml",
+ "xml",
+ "html",
+ "htm",
+ "pdf",
+ "docx",
+ "py",
+ "sh",
+ "sql",
+ "js",
+ "ts",
+ "tsx",
+]
def is_hidden(path: Path) -> bool:
diff --git a/src/backend/base/langflow/base/tools/__init__.py b/src/backend/base/langflow/base/tools/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/backend/base/langflow/base/tools/base.py b/src/backend/base/langflow/base/tools/base.py
new file mode 100644
index 000000000..e1c4d5fdc
--- /dev/null
+++ b/src/backend/base/langflow/base/tools/base.py
@@ -0,0 +1,23 @@
+from langflow.field_typing import Tool
+
+
+def build_status_from_tool(tool: Tool):
+ """
+ Builds a status string representation of a tool.
+
+ Args:
+ tool (Tool): The tool object to build the status for.
+
+ Returns:
+ str: The status string representation of the tool, including its name, description, and arguments (if any).
+ """
+ description_repr = repr(tool.description).strip("'")
+ args_str = "\n".join(
+ [
+ f"- {arg_name}: {arg_data['description']}"
+ for arg_name, arg_data in tool.args.items()
+ if "description" in arg_data
+ ]
+ )
+ status = f"Name: {tool.name}\nDescription: {description_repr}"
+ return status + (f"\nArguments:\n{args_str}" if args_str else "")
diff --git a/src/backend/base/langflow/components/agents/XMLAgent.py b/src/backend/base/langflow/components/agents/XMLAgent.py
index 687bfff6f..117399af9 100644
--- a/src/backend/base/langflow/components/agents/XMLAgent.py
+++ b/src/backend/base/langflow/components/agents/XMLAgent.py
@@ -4,7 +4,7 @@ from langchain.agents import create_xml_agent
from langchain_core.prompts import PromptTemplate
from langflow.base.agents.agent import LCAgentComponent
-from langflow.field_typing import BaseLLM, BaseMemory, Text, Tool
+from langflow.field_typing import BaseLanguageModel, BaseMemory, Text, Tool
class XMLAgentComponent(LCAgentComponent):
@@ -66,7 +66,7 @@ class XMLAgentComponent(LCAgentComponent):
async def build(
self,
input_value: str,
- llm: BaseLLM,
+ llm: BaseLanguageModel,
tools: List[Tool],
prompt: str,
memory: Optional[BaseMemory] = None,
diff --git a/src/backend/base/langflow/components/model_specs/AmazonBedrockSpecs.py b/src/backend/base/langflow/components/model_specs/AmazonBedrockSpecs.py
index 1a18f5c08..ff36820f5 100644
--- a/src/backend/base/langflow/components/model_specs/AmazonBedrockSpecs.py
+++ b/src/backend/base/langflow/components/model_specs/AmazonBedrockSpecs.py
@@ -1,6 +1,5 @@
from typing import Optional
-
-from langchain.llms.base import BaseLLM
+from langflow.field_typing import BaseLanguageModel
from langchain_community.llms.bedrock import Bedrock
from langflow.interface.custom.custom_component import CustomComponent
@@ -46,7 +45,7 @@ class AmazonBedrockComponent(CustomComponent):
endpoint_url: Optional[str] = None,
streaming: bool = False,
cache: Optional[bool] = None,
- ) -> BaseLLM:
+ ) -> BaseLanguageModel:
try:
output = Bedrock(
credentials_profile_name=credentials_profile_name,
diff --git a/src/backend/base/langflow/components/model_specs/AnthropicLLMSpecs.py b/src/backend/base/langflow/components/model_specs/AnthropicLLMSpecs.py
index 2ea78162b..016eaeb2d 100644
--- a/src/backend/base/langflow/components/model_specs/AnthropicLLMSpecs.py
+++ b/src/backend/base/langflow/components/model_specs/AnthropicLLMSpecs.py
@@ -1,14 +1,14 @@
from typing import Optional
from langchain.llms.base import BaseLanguageModel
-from langchain_community.chat_models.anthropic import ChatAnthropic
+from langchain_anthropic import ChatAnthropic
from pydantic.v1 import SecretStr
from langflow.interface.custom.custom_component import CustomComponent
-class AnthropicLLM(CustomComponent):
- display_name: str = "AnthropicLLM"
+class ChatAntropicSpecsComponent(CustomComponent):
+ display_name: str = "Anthropic"
description: str = "Anthropic Chat&Completion large language models."
icon = "Anthropic"
diff --git a/src/backend/base/langflow/components/model_specs/AnthropicSpecs.py b/src/backend/base/langflow/components/model_specs/AnthropicSpecs.py
deleted file mode 100644
index 23c284888..000000000
--- a/src/backend/base/langflow/components/model_specs/AnthropicSpecs.py
+++ /dev/null
@@ -1,49 +0,0 @@
-from typing import Optional
-
-from langchain_community.llms.anthropic import Anthropic
-from pydantic.v1 import SecretStr
-
-from langflow.field_typing import BaseLanguageModel, NestedDict
-from langflow.interface.custom.custom_component import CustomComponent
-
-
-class AnthropicComponent(CustomComponent):
- display_name = "Anthropic"
- description = "Anthropic large language models."
- icon = "Anthropic"
-
- def build_config(self):
- return {
- "anthropic_api_key": {
- "display_name": "Anthropic API Key",
- "type": str,
- "password": True,
- },
- "anthropic_api_url": {
- "display_name": "Anthropic API URL",
- "type": str,
- },
- "model_kwargs": {
- "display_name": "Model Kwargs",
- "field_type": "NestedDict",
- "advanced": True,
- },
- "temperature": {
- "display_name": "Temperature",
- "field_type": "float",
- },
- }
-
- def build(
- self,
- anthropic_api_key: str,
- anthropic_api_url: str,
- model_kwargs: NestedDict = {},
- temperature: Optional[float] = None,
- ) -> BaseLanguageModel:
- return Anthropic(
- anthropic_api_key=SecretStr(anthropic_api_key),
- anthropic_api_url=anthropic_api_url,
- model_kwargs=model_kwargs,
- temperature=temperature,
- )
diff --git a/src/backend/base/langflow/components/model_specs/BaiduQianfanChatEndpointsSpecs.py b/src/backend/base/langflow/components/model_specs/BaiduQianfanChatEndpointsSpecs.py
index 9c39acdf6..a60fb9a64 100644
--- a/src/backend/base/langflow/components/model_specs/BaiduQianfanChatEndpointsSpecs.py
+++ b/src/backend/base/langflow/components/model_specs/BaiduQianfanChatEndpointsSpecs.py
@@ -1,9 +1,9 @@
from typing import Optional
-from langchain.llms.base import BaseLLM
from langchain_community.chat_models.baidu_qianfan_endpoint import QianfanChatEndpoint
from pydantic.v1 import SecretStr
+from langflow.field_typing import BaseLanguageModel
from langflow.interface.custom.custom_component import CustomComponent
@@ -79,7 +79,7 @@ class QianfanChatEndpointComponent(CustomComponent):
temperature: Optional[float] = None,
penalty_score: Optional[float] = None,
endpoint: Optional[str] = None,
- ) -> BaseLLM:
+ ) -> BaseLanguageModel:
try:
output = QianfanChatEndpoint( # type: ignore
model=model,
diff --git a/src/backend/base/langflow/components/model_specs/BaiduQianfanLLMEndpointsSpecs.py b/src/backend/base/langflow/components/model_specs/BaiduQianfanLLMEndpointsSpecs.py
index acbf8ba28..21ed13e70 100644
--- a/src/backend/base/langflow/components/model_specs/BaiduQianfanLLMEndpointsSpecs.py
+++ b/src/backend/base/langflow/components/model_specs/BaiduQianfanLLMEndpointsSpecs.py
@@ -1,9 +1,9 @@
from typing import Optional
from langchain.llms.baidu_qianfan_endpoint import QianfanLLMEndpoint
-from langchain.llms.base import BaseLLM
from langflow.interface.custom.custom_component import CustomComponent
+from langflow.field_typing import BaseLanguageModel
class QianfanLLMEndpointComponent(CustomComponent):
@@ -78,7 +78,7 @@ class QianfanLLMEndpointComponent(CustomComponent):
temperature: Optional[float] = None,
penalty_score: Optional[float] = None,
endpoint: Optional[str] = None,
- ) -> BaseLLM:
+ ) -> BaseLanguageModel:
try:
output = QianfanLLMEndpoint( # type: ignore
model=model,
diff --git a/src/backend/base/langflow/components/model_specs/ChatAnthropicSpecs.py b/src/backend/base/langflow/components/model_specs/ChatAnthropicSpecs.py
index a4a37b283..b03a9b737 100644
--- a/src/backend/base/langflow/components/model_specs/ChatAnthropicSpecs.py
+++ b/src/backend/base/langflow/components/model_specs/ChatAnthropicSpecs.py
@@ -1,67 +1,89 @@
-from typing import Callable, Optional, Union
+from typing import Optional
-from langchain_community.chat_models.anthropic import ChatAnthropic
+from langchain_anthropic import ChatAnthropic
from pydantic.v1.types import SecretStr
+
from langflow.custom import CustomComponent
from langflow.field_typing import BaseLanguageModel
-class ChatAnthropicComponent(CustomComponent):
- display_name = "ChatAnthropic"
- description = "`Anthropic` chat large language models."
- documentation = "https://python.langchain.com/docs/modules/model_io/models/chat/integrations/anthropic"
+class AnthropicLLM(CustomComponent):
+ display_name: str = "Anthropic"
+ description: str = "Generate text using Anthropic Chat&Completion LLMs."
icon = "Anthropic"
+ field_order = [
+ "model",
+ "anthropic_api_key",
+ "max_tokens",
+ "temperature",
+ "anthropic_api_url",
+ ]
+
def build_config(self):
return {
+ "model": {
+ "display_name": "Model Name",
+ "options": [
+ "claude-3-opus-20240229",
+ "claude-3-sonnet-20240229",
+ "claude-3-haiku-20240307",
+ "claude-2.1",
+ "claude-2.0",
+ "claude-instant-1.2",
+ "claude-instant-1",
+ ],
+ "info": "Name of the model to use.",
+ "required": True,
+ "value": "claude-3-opus-20240229",
+ },
"anthropic_api_key": {
"display_name": "Anthropic API Key",
- "field_type": "str",
+ "required": True,
"password": True,
- },
- "model_kwargs": {
- "display_name": "Model Kwargs",
- "field_type": "dict",
- "advanced": True,
- },
- "model_name": {
- "display_name": "Model Name",
- "field_type": "str",
- "advanced": False,
- "required": False,
- "options": ["claude-3-opus-20240229", "claude-3-sonnet-20240229", "claude-3-haiku-20240307"],
- },
- "temperature": {
- "display_name": "Temperature",
- "field_type": "float",
+ "info": "Your Anthropic API key.",
},
"max_tokens": {
"display_name": "Max Tokens",
"field_type": "int",
- "advanced": False,
- "required": False,
+ "advanced": True,
+ "value": 256,
},
- "top_k": {"display_name": "Top K", "field_type": "int", "advanced": True},
- "top_p": {"display_name": "Top P", "field_type": "float", "advanced": True},
+ "temperature": {
+ "display_name": "Temperature",
+ "field_type": "float",
+ "value": 0.1,
+ },
+ "anthropic_api_url": {
+ "display_name": "Anthropic API URL",
+ "advanced": True,
+ "info": "Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.",
+ },
+ "code": {"show": False},
}
def build(
self,
- anthropic_api_key: str,
- model_kwargs: dict = {},
- model_name: str = "claude-3-opus-20240229",
+ model: str,
+ anthropic_api_key: Optional[str] = None,
+ max_tokens: Optional[int] = None,
temperature: Optional[float] = None,
- max_tokens: Optional[int] = 1024,
- top_k: Optional[int] = None,
- top_p: Optional[float] = None,
- ) -> Union[BaseLanguageModel, Callable]:
- return ChatAnthropic(
- anthropic_api_key=SecretStr(anthropic_api_key),
- model_kwargs=model_kwargs,
- model_name=model_name,
- temperature=temperature,
- max_tokens=max_tokens, # type: ignore
- top_k=top_k,
- top_p=top_p,
- )
+ anthropic_api_url: Optional[str] = None,
+ ) -> BaseLanguageModel:
+ # Set default API endpoint if not provided
+ if not anthropic_api_url:
+ anthropic_api_url = "https://api.anthropic.com"
+
+ try:
+ output = ChatAnthropic(
+ model_name=model,
+ anthropic_api_key=(SecretStr(anthropic_api_key) if anthropic_api_key else None),
+ max_tokens_to_sample=max_tokens, # type: ignore
+ temperature=temperature,
+ anthropic_api_url=anthropic_api_url,
+ )
+ except Exception as e:
+ raise ValueError("Could not connect to Anthropic API.") from e
+
+ return output
diff --git a/src/backend/base/langflow/components/model_specs/ChatLiteLLMSpecs.py b/src/backend/base/langflow/components/model_specs/ChatLiteLLMSpecs.py
index 439266fbb..840682f4d 100644
--- a/src/backend/base/langflow/components/model_specs/ChatLiteLLMSpecs.py
+++ b/src/backend/base/langflow/components/model_specs/ChatLiteLLMSpecs.py
@@ -1,4 +1,4 @@
-from typing import Any, Callable, Dict, Optional, Union
+from typing import Any, Dict, Optional
from langchain_community.chat_models.litellm import ChatLiteLLM, ChatLiteLLMException
from langflow.field_typing import BaseLanguageModel
diff --git a/src/backend/base/langflow/components/model_specs/ChatOpenAISpecs.py b/src/backend/base/langflow/components/model_specs/ChatOpenAISpecs.py
index fcaf80965..704090620 100644
--- a/src/backend/base/langflow/components/model_specs/ChatOpenAISpecs.py
+++ b/src/backend/base/langflow/components/model_specs/ChatOpenAISpecs.py
@@ -1,9 +1,9 @@
-from typing import Optional, Union
+from typing import Optional
-from langchain.llms import BaseLLM
+from langflow.field_typing import BaseLanguageModel
from langchain_community.chat_models.openai import ChatOpenAI
-from langflow.field_typing import BaseLanguageModel, NestedDict
+from langflow.field_typing import NestedDict
from langflow.interface.custom.custom_component import CustomComponent
@@ -68,7 +68,7 @@ class ChatOpenAIComponent(CustomComponent):
openai_api_base: Optional[str] = None,
openai_api_key: Optional[str] = None,
temperature: float = 0.7,
- ) -> Union[BaseLanguageModel, BaseLLM]:
+ ) -> BaseLanguageModel:
if not openai_api_base:
openai_api_base = "https://api.openai.com/v1"
return ChatOpenAI(
diff --git a/src/backend/base/langflow/components/model_specs/ChatVertexAISpecs.py b/src/backend/base/langflow/components/model_specs/ChatVertexAISpecs.py
index fd8b5b427..f2c377546 100644
--- a/src/backend/base/langflow/components/model_specs/ChatVertexAISpecs.py
+++ b/src/backend/base/langflow/components/model_specs/ChatVertexAISpecs.py
@@ -1,6 +1,5 @@
-from typing import List, Optional, Union
+from typing import List, Optional
-from langchain.llms import BaseLLM
from langchain_community.chat_models.vertexai import ChatVertexAI
from langchain_core.messages.base import BaseMessage
@@ -74,7 +73,7 @@ class ChatVertexAIComponent(CustomComponent):
top_k: int = 40,
top_p: float = 0.95,
verbose: bool = False,
- ) -> Union[BaseLanguageModel, BaseLLM]:
+ ) -> BaseLanguageModel:
return ChatVertexAI(
credentials=credentials,
examples=examples,
diff --git a/src/backend/base/langflow/components/model_specs/HuggingFaceEndpointsSpecs.py b/src/backend/base/langflow/components/model_specs/HuggingFaceEndpointsSpecs.py
index c3e74b1bd..c16fb2c2c 100644
--- a/src/backend/base/langflow/components/model_specs/HuggingFaceEndpointsSpecs.py
+++ b/src/backend/base/langflow/components/model_specs/HuggingFaceEndpointsSpecs.py
@@ -1,6 +1,6 @@
from typing import Optional
-from langchain.llms.base import BaseLLM
+from langflow.field_typing import BaseLanguageModel
from langchain.llms.huggingface_endpoint import HuggingFaceEndpoint
from langflow.interface.custom.custom_component import CustomComponent
@@ -32,7 +32,7 @@ class HuggingFaceEndpointsComponent(CustomComponent):
task: str = "text2text-generation",
huggingfacehub_api_token: Optional[str] = None,
model_kwargs: Optional[dict] = None,
- ) -> BaseLLM:
+ ) -> BaseLanguageModel:
try:
output = HuggingFaceEndpoint( # type: ignore
endpoint_url=endpoint_url,
diff --git a/src/backend/base/langflow/components/model_specs/OllamaLLMSpecs.py b/src/backend/base/langflow/components/model_specs/OllamaLLMSpecs.py
index 8d94467db..4ba5502d3 100644
--- a/src/backend/base/langflow/components/model_specs/OllamaLLMSpecs.py
+++ b/src/backend/base/langflow/components/model_specs/OllamaLLMSpecs.py
@@ -1,6 +1,6 @@
from typing import List, Optional
-from langchain.llms.base import BaseLLM
+from langflow.field_typing import BaseLanguageModel
from langchain_community.llms.ollama import Ollama
from langflow.interface.custom.custom_component import CustomComponent
@@ -118,7 +118,7 @@ class OllamaLLM(CustomComponent):
tfs_z: Optional[float] = None,
top_k: Optional[int] = None,
top_p: Optional[int] = None,
- ) -> BaseLLM:
+ ) -> BaseLanguageModel:
if not base_url:
base_url = "http://localhost:11434"
diff --git a/src/backend/base/langflow/components/model_specs/VertexAISpecs.py b/src/backend/base/langflow/components/model_specs/VertexAISpecs.py
index f186f2ed0..c9664408d 100644
--- a/src/backend/base/langflow/components/model_specs/VertexAISpecs.py
+++ b/src/backend/base/langflow/components/model_specs/VertexAISpecs.py
@@ -1,6 +1,6 @@
-from typing import Callable, Dict, Optional, Union
+from typing import Dict, Optional
-from langchain.llms import BaseLLM
+from langflow.field_typing import BaseLanguageModel
from langchain_community.llms.vertexai import VertexAI
from langflow.interface.custom.custom_component import CustomComponent
@@ -129,7 +129,7 @@ class VertexAIComponent(CustomComponent):
top_p: float = 0.95,
tuned_model_name: Optional[str] = None,
verbose: bool = False,
- ) -> Union[BaseLLM, Callable]:
+ ) -> BaseLanguageModel:
return VertexAI(
credentials=credentials,
location=location,
diff --git a/src/backend/base/langflow/components/model_specs/__init__.py b/src/backend/base/langflow/components/model_specs/__init__.py
index 7bf6c2881..3bd847894 100644
--- a/src/backend/base/langflow/components/model_specs/__init__.py
+++ b/src/backend/base/langflow/components/model_specs/__init__.py
@@ -1,6 +1,6 @@
from .AmazonBedrockSpecs import AmazonBedrockComponent
-from .AnthropicLLMSpecs import AnthropicLLM
-from .AnthropicSpecs import AnthropicComponent
+from .AnthropicLLMSpecs import ChatAntropicSpecsComponent
+
from .AzureChatOpenAISpecs import AzureChatOpenAISpecsComponent
from .BaiduQianfanChatEndpointsSpecs import QianfanChatEndpointComponent
from .BaiduQianfanLLMEndpointsSpecs import QianfanLLMEndpointComponent
@@ -17,8 +17,7 @@ from .VertexAISpecs import VertexAIComponent
__all__ = [
"AmazonBedrockComponent",
- "AnthropicLLM",
- "AnthropicComponent",
+ "ChatAntropicSpecsComponent",
"AzureChatOpenAISpecsComponent",
"QianfanChatEndpointComponent",
"QianfanLLMEndpointComponent",
diff --git a/src/backend/base/langflow/components/retrievers/MultiQueryRetriever.py b/src/backend/base/langflow/components/retrievers/MultiQueryRetriever.py
index 0dec1c028..8dd6d8579 100644
--- a/src/backend/base/langflow/components/retrievers/MultiQueryRetriever.py
+++ b/src/backend/base/langflow/components/retrievers/MultiQueryRetriever.py
@@ -1,8 +1,8 @@
-from typing import Callable, Optional, Union
+from typing import Optional
from langchain.retrievers import MultiQueryRetriever
-from langflow.field_typing import BaseLLM, BaseRetriever, PromptTemplate
+from langflow.field_typing import BaseRetriever, PromptTemplate, BaseLanguageModel
from langflow.interface.custom.custom_component import CustomComponent
@@ -39,11 +39,11 @@ class MultiQueryRetrieverComponent(CustomComponent):
def build(
self,
- llm: BaseLLM,
+ llm: BaseLanguageModel,
retriever: BaseRetriever,
prompt: Optional[PromptTemplate] = None,
parser_key: str = "lines",
- ) -> Union[Callable, MultiQueryRetriever]:
+ ) -> MultiQueryRetriever:
if not prompt:
return MultiQueryRetriever.from_llm(llm=llm, retriever=retriever, parser_key=parser_key)
else:
diff --git a/src/backend/base/langflow/components/toolkits/VectorStoreInfo.py b/src/backend/base/langflow/components/toolkits/VectorStoreInfo.py
index 1775d816a..626a14fd8 100644
--- a/src/backend/base/langflow/components/toolkits/VectorStoreInfo.py
+++ b/src/backend/base/langflow/components/toolkits/VectorStoreInfo.py
@@ -1,5 +1,3 @@
-from typing import Callable, Union
-
from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreInfo
from langchain_community.vectorstores import VectorStore
@@ -22,5 +20,5 @@ class VectorStoreInfoComponent(CustomComponent):
vectorstore: VectorStore,
description: str,
name: str,
- ) -> Union[VectorStoreInfo, Callable]:
+ ) -> VectorStoreInfo:
return VectorStoreInfo(vectorstore=vectorstore, description=description, name=name)
diff --git a/src/backend/base/langflow/components/tools/PythonREPLTool.py b/src/backend/base/langflow/components/tools/PythonREPLTool.py
new file mode 100644
index 000000000..fa5cd5fc3
--- /dev/null
+++ b/src/backend/base/langflow/components/tools/PythonREPLTool.py
@@ -0,0 +1,68 @@
+import importlib
+
+from langchain.agents import Tool
+from langchain_experimental.utilities import PythonREPL
+
+from langflow.base.tools.base import build_status_from_tool
+from langflow.custom import CustomComponent
+
+
+class PythonREPLToolComponent(CustomComponent):
+ display_name = "Python REPL Tool"
+ description = "A tool for running Python code in a REPL environment."
+
+ def build_config(self):
+ return {
+ "name": {"display_name": "Name", "info": "The name of the tool."},
+ "description": {"display_name": "Description", "info": "A description of the tool."},
+ "global_imports": {
+ "display_name": "Global Imports",
+ "info": "A list of modules to import globally, e.g. ['math', 'numpy'].",
+ },
+ }
+
+ def get_globals(self, globals: list[str]) -> dict:
+ """
+ Retrieves the global variables from the specified modules.
+
+ Args:
+ globals (list[str]): A list of module names.
+
+ Returns:
+ dict: A dictionary containing the global variables from the specified modules.
+ """
+ global_dict = {}
+ for module in globals:
+ try:
+ module = importlib.import_module(module)
+ global_dict[module.__name__] = module
+ except ImportError:
+ print(f"Could not import module {module}")
+ return global_dict
+
+ def build(
+ self,
+ name: str = "python_repl",
+ description: str = "A Python shell. Use this to execute python commands. Input should be a valid python command. If you want to see the output of a value, you should print it out with `print(...)`.",
+ global_imports: list[str] = ["math"],
+ ) -> Tool:
+ """
+ Builds a Python REPL tool.
+
+ Args:
+ name (str, optional): The name of the tool. Defaults to "python_repl".
+ description (str, optional): The description of the tool. Defaults to "A Python shell. Use this to execute python commands. Input should be a valid python command. If you want to see the output of a value, you should print it out with `print(...)`. ".
+ global_imports (list[str], optional): A list of global imports to be available in the Python REPL. Defaults to ["math"].
+
+ Returns:
+ Tool: The built Python REPL tool.
+ """
+ _globals = self.get_globals(global_imports)
+ python_repl = PythonREPL(_globals=_globals)
+ tool = Tool(
+ name=name,
+ description=description,
+ func=python_repl.run,
+ )
+ self.status = build_status_from_tool(tool)
+ return tool
diff --git a/src/backend/base/langflow/components/tools/__init__.py b/src/backend/base/langflow/components/tools/__init__.py
index 27072c109..3d64a723c 100644
--- a/src/backend/base/langflow/components/tools/__init__.py
+++ b/src/backend/base/langflow/components/tools/__init__.py
@@ -1,5 +1,7 @@
+from .PythonREPLTool import PythonREPLToolComponent
from .RetrieverTool import RetrieverToolComponent
-from .SearchAPITool import SearchApiToolComponent
from .SearchApi import SearchApi
+from .SearchAPITool import SearchApiToolComponent
-__all__ = ["RetrieverToolComponent", "SearchApiToolComponent", "SearchApi"]
+
+__all__ = ["RetrieverToolComponent", "SearchApiToolComponent", "SearchApi", "PythonREPLToolComponent"]
diff --git a/src/backend/base/langflow/graph/graph/base.py b/src/backend/base/langflow/graph/graph/base.py
index 8261d0b8c..bca21bcdd 100644
--- a/src/backend/base/langflow/graph/graph/base.py
+++ b/src/backend/base/langflow/graph/graph/base.py
@@ -249,7 +249,10 @@ class Graph:
vertex.update_raw_params({"session_id": session_id})
# Process the graph
try:
- await self.process()
+ start_component_id = next(
+ (vertex_id for vertex_id in self._is_input_vertices if "chat" in vertex_id.lower()), None
+ )
+ await self.process(start_component_id=start_component_id)
self.increment_run_count()
except Exception as exc:
logger.exception(exc)
@@ -345,7 +348,7 @@ class Graph:
if types is None:
types = []
for _ in range(len(inputs) - len(types)):
- types.append("any")
+ types.append("chat") # default to chat
for run_inputs, components, input_type in zip(inputs, inputs_components, types):
run_outputs = await self._run(
inputs=run_inputs,
@@ -733,8 +736,10 @@ class Graph:
vertices.append(vertex)
return vertices
- async def process(self) -> "Graph":
+ async def process(self, start_component_id: Optional[str] = None) -> "Graph":
"""Processes the graph with vertices in each layer run in parallel."""
+
+ self.sort_vertices(start_component_id=start_component_id)
vertices_layers = self.sorted_vertices_layers
vertex_task_run_count: Dict[str, int] = {}
for layer_index, layer in enumerate(vertices_layers):
diff --git a/src/backend/base/langflow/graph/vertex/base.py b/src/backend/base/langflow/graph/vertex/base.py
index 7bcdc1b8c..f300b2dda 100644
--- a/src/backend/base/langflow/graph/vertex/base.py
+++ b/src/backend/base/langflow/graph/vertex/base.py
@@ -1,10 +1,10 @@
import ast
import asyncio
import inspect
+import os
import types
from enum import Enum
from typing import TYPE_CHECKING, Any, AsyncIterator, Callable, Dict, Iterator, List, Optional
-import os
from loguru import logger
@@ -315,7 +315,8 @@ class Vertex:
raise e
params[field_name] = full_path
elif field.get("required"):
- raise ValueError(f"File path not found for {self.display_name}")
+ field_display_name = field.get("display_name")
+ raise ValueError(f"File path not found for {field_display_name} in component {self.display_name}")
elif field.get("type") in DIRECT_TYPES and params.get(field_name) is None:
val = field.get("value")
if field.get("type") == "code":
diff --git a/src/backend/base/langflow/initial_setup/setup.py b/src/backend/base/langflow/initial_setup/setup.py
index f2181f85e..ca2bc230a 100644
--- a/src/backend/base/langflow/initial_setup/setup.py
+++ b/src/backend/base/langflow/initial_setup/setup.py
@@ -84,7 +84,7 @@ def log_node_changes(node_changes_log):
logger.debug("\n".join(formatted_messages))
-def load_starter_projects():
+def load_starter_projects() -> list[tuple[Path, dict]]:
starter_projects = []
folder = Path(__file__).parent / "starter_projects"
for file in folder.glob("*.json"):
diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting (Hello, world!).json b/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting (Hello, world!).json
index ca465fc23..09f849152 100644
--- a/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting (Hello, world!).json
+++ b/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting (Hello, world!).json
@@ -882,7 +882,7 @@
}
},
"description": "This flow will get you experimenting with the basics of the UI, the Chat and the Prompt component. \n\nTry changing the Template in it to see how the model behaves. \nYou can change it to this and a Text Input into the `type_of_person` variable : \"Answer the user as if you were a pirate.\n\nUser: {user_input}\n\nAnswer: \" ",
- "name": "Basic Prompting (Hello, world!)",
+ "name": "Basic Prompting (Hello, World)",
"last_tested_version": "1.0.0a4",
"is_component": false
}
\ No newline at end of file
diff --git a/src/backend/base/langflow/initial_setup/starter_projects/VectorStore-RAG-Flows.json b/src/backend/base/langflow/initial_setup/starter_projects/VectorStore-RAG-Flows.json
index 5706a0fbf..0d17d2ab5 100644
--- a/src/backend/base/langflow/initial_setup/starter_projects/VectorStore-RAG-Flows.json
+++ b/src/backend/base/langflow/initial_setup/starter_projects/VectorStore-RAG-Flows.json
@@ -1,3403 +1,3409 @@
{
- "id": "51e2b78a-199b-4054-9f32-e288eef6924c",
- "data": {
- "nodes": [
- {
- "id": "ChatInput-yxMKE",
- "type": "genericNode",
- "position": {
- "x": 1195.5276981160775,
- "y": 209.421875
- },
- "data": {
- "type": "ChatInput",
- "node": {
- "template": {
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Interaction Panel.\"\n icon = \"ChatInput\"\n\n def build_config(self):\n build_config = super().build_config()\n build_config[\"input_value\"] = {\n \"input_types\": [],\n \"display_name\": \"Message\",\n \"multiline\": True,\n }\n\n return build_config\n\n def build(\n self,\n sender: Optional[str] = \"User\",\n sender_name: Optional[str] = \"User\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n ) -> Union[Text, Record]:\n return super().build(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n )\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "input_value": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "input_value",
- "display_name": "Message",
- "advanced": false,
- "input_types": [],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "value": "what is a line"
- },
- "return_record": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "return_record",
- "display_name": "Return Record",
- "advanced": true,
- "dynamic": false,
- "info": "Return the message as a record containing the sender, sender_name, and session_id.",
- "load_from_db": false,
- "title_case": false
- },
- "sender": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "User",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "Machine",
- "User"
- ],
- "name": "sender",
- "display_name": "Sender Type",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "sender_name": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "User",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "sender_name",
- "display_name": "Sender Name",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "session_id": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "session_id",
- "display_name": "Session ID",
- "advanced": true,
- "dynamic": false,
- "info": "If provided, the message will be stored in the memory.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "_type": "CustomComponent"
- },
- "description": "Get chat inputs from the Interaction Panel.",
- "icon": "ChatInput",
- "base_classes": [
- "Text",
- "str",
- "object",
- "Record"
- ],
- "display_name": "Chat Input",
- "documentation": "",
- "custom_fields": {
- "sender": null,
- "sender_name": null,
- "input_value": null,
- "session_id": null,
- "return_record": null
- },
- "output_types": [
- "Text",
- "Record"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false
- },
- "id": "ChatInput-yxMKE"
- },
- "selected": false,
- "width": 384,
- "height": 383
+ "id": "51e2b78a-199b-4054-9f32-e288eef6924c",
+ "data": {
+ "nodes": [
+ {
+ "id": "ChatInput-yxMKE",
+ "type": "genericNode",
+ "position": {
+ "x": 1195.5276981160775,
+ "y": 209.421875
+ },
+ "data": {
+ "type": "ChatInput",
+ "node": {
+ "template": {
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Interaction Panel.\"\n icon = \"ChatInput\"\n\n def build_config(self):\n build_config = super().build_config()\n build_config[\"input_value\"] = {\n \"input_types\": [],\n \"display_name\": \"Message\",\n \"multiline\": True,\n }\n\n return build_config\n\n def build(\n self,\n sender: Optional[str] = \"User\",\n sender_name: Optional[str] = \"User\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n ) -> Union[Text, Record]:\n return super().build(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n )\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "input_value": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Message",
+ "advanced": false,
+ "input_types": [],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "value": "what is a line"
+ },
+ "return_record": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "return_record",
+ "display_name": "Return Record",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Return the message as a record containing the sender, sender_name, and session_id.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "sender": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "User",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": [
+ "Machine",
+ "User"
+ ],
+ "name": "sender",
+ "display_name": "Sender Type",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "sender_name": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "User",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "sender_name",
+ "display_name": "Sender Name",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "session_id": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "session_id",
+ "display_name": "Session ID",
+ "advanced": true,
+ "dynamic": false,
+ "info": "If provided, the message will be stored in the memory.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "_type": "CustomComponent"
},
- {
- "id": "TextOutput-BDknO",
- "type": "genericNode",
- "position": {
- "x": 2322.600672827879,
- "y": 604.9467307442569
- },
- "data": {
- "type": "TextOutput",
- "node": {
- "template": {
- "input_value": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "input_value",
- "display_name": "Value",
- "advanced": false,
- "input_types": [
- "Record",
- "Text"
- ],
- "dynamic": false,
- "info": "Text or Record to be passed as output.",
- "load_from_db": false,
- "title_case": false
- },
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Optional\n\nfrom langflow.base.io.text import TextComponent\nfrom langflow.field_typing import Text\n\n\nclass TextOutput(TextComponent):\n display_name = \"Text Output\"\n description = \"Display a text output in the Interaction Panel.\"\n icon = \"type\"\n\n def build_config(self):\n return {\n \"input_value\": {\n \"display_name\": \"Value\",\n \"input_types\": [\"Record\", \"Text\"],\n \"info\": \"Text or Record to be passed as output.\",\n },\n \"record_template\": {\n \"display_name\": \"Record Template\",\n \"multiline\": True,\n \"info\": \"Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.\",\n \"advanced\": True,\n },\n }\n\n def build(self, input_value: Optional[Text] = \"\", record_template: str = \"\") -> Text:\n return super().build(input_value=input_value, record_template=record_template)\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "record_template": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "{text}",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "record_template",
- "display_name": "Record Template",
- "advanced": true,
- "dynamic": false,
- "info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "_type": "CustomComponent"
- },
- "description": "Display a text output in the Interaction Panel.",
- "icon": "type",
- "base_classes": [
- "object",
- "Text",
- "str"
- ],
- "display_name": "Extracted Chunks",
- "documentation": "",
- "custom_fields": {
- "input_value": null,
- "record_template": null
- },
- "output_types": [
- "Text"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false
- },
- "id": "TextOutput-BDknO"
- },
- "selected": false,
- "width": 384,
- "height": 289,
- "positionAbsolute": {
- "x": 2322.600672827879,
- "y": 604.9467307442569
- },
- "dragging": false
+ "description": "Get chat inputs from the Interaction Panel.",
+ "icon": "ChatInput",
+ "base_classes": [
+ "Text",
+ "str",
+ "object",
+ "Record"
+ ],
+ "display_name": "Chat Input",
+ "documentation": "",
+ "custom_fields": {
+ "sender": null,
+ "sender_name": null,
+ "input_value": null,
+ "session_id": null,
+ "return_record": null
},
- {
- "id": "OpenAIEmbeddings-ZlOk1",
- "type": "genericNode",
- "position": {
- "x": 1183.667250865064,
- "y": 687.3171828430261
- },
- "data": {
- "type": "OpenAIEmbeddings",
- "node": {
- "template": {
- "allowed_special": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": [],
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "allowed_special",
- "display_name": "Allowed Special",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "chunk_size": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 1000,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "chunk_size",
- "display_name": "Chunk Size",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "client": {
- "type": "Any",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "client",
- "display_name": "Client",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Any, Dict, List, Optional\n\nfrom langchain_openai.embeddings.base import OpenAIEmbeddings\n\nfrom langflow.field_typing import Embeddings, NestedDict\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass OpenAIEmbeddingsComponent(CustomComponent):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n\n def build_config(self):\n return {\n \"allowed_special\": {\n \"display_name\": \"Allowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"default_headers\": {\n \"display_name\": \"Default Headers\",\n \"advanced\": True,\n \"field_type\": \"dict\",\n },\n \"default_query\": {\n \"display_name\": \"Default Query\",\n \"advanced\": True,\n \"field_type\": \"NestedDict\",\n },\n \"disallowed_special\": {\n \"display_name\": \"Disallowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"chunk_size\": {\"display_name\": \"Chunk Size\", \"advanced\": True},\n \"client\": {\"display_name\": \"Client\", \"advanced\": True},\n \"deployment\": {\"display_name\": \"Deployment\", \"advanced\": True},\n \"embedding_ctx_length\": {\n \"display_name\": \"Embedding Context Length\",\n \"advanced\": True,\n },\n \"max_retries\": {\"display_name\": \"Max Retries\", \"advanced\": True},\n \"model\": {\n \"display_name\": \"Model\",\n \"advanced\": False,\n \"options\": [\n \"text-embedding-3-small\",\n \"text-embedding-3-large\",\n \"text-embedding-ada-002\",\n ],\n },\n \"model_kwargs\": {\"display_name\": \"Model Kwargs\", \"advanced\": True},\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"password\": True,\n \"advanced\": True,\n },\n \"openai_api_key\": {\"display_name\": \"OpenAI API Key\", \"password\": True},\n \"openai_api_type\": {\n \"display_name\": \"OpenAI API Type\",\n \"advanced\": True,\n \"password\": True,\n },\n \"openai_api_version\": {\n \"display_name\": \"OpenAI API Version\",\n \"advanced\": True,\n },\n \"openai_organization\": {\n \"display_name\": \"OpenAI Organization\",\n \"advanced\": True,\n },\n \"openai_proxy\": {\"display_name\": \"OpenAI Proxy\", \"advanced\": True},\n \"request_timeout\": {\"display_name\": \"Request Timeout\", \"advanced\": True},\n \"show_progress_bar\": {\n \"display_name\": \"Show Progress Bar\",\n \"advanced\": True,\n },\n \"skip_empty\": {\"display_name\": \"Skip Empty\", \"advanced\": True},\n \"tiktoken_model_name\": {\n \"display_name\": \"TikToken Model Name\",\n \"advanced\": True,\n },\n \"tiktoken_enable\": {\"display_name\": \"TikToken Enable\", \"advanced\": True},\n }\n\n def build(\n self,\n openai_api_key: str,\n default_headers: Optional[Dict[str, str]] = None,\n default_query: Optional[NestedDict] = {},\n allowed_special: List[str] = [],\n disallowed_special: List[str] = [\"all\"],\n chunk_size: int = 1000,\n client: Optional[Any] = None,\n deployment: str = \"text-embedding-ada-002\",\n embedding_ctx_length: int = 8191,\n max_retries: int = 6,\n model: str = \"text-embedding-ada-002\",\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n openai_api_type: Optional[str] = None,\n openai_api_version: Optional[str] = None,\n openai_organization: Optional[str] = None,\n openai_proxy: Optional[str] = None,\n request_timeout: Optional[float] = None,\n show_progress_bar: bool = False,\n skip_empty: bool = False,\n tiktoken_enable: bool = True,\n tiktoken_model_name: Optional[str] = None,\n ) -> Embeddings:\n # This is to avoid errors with Vector Stores (e.g Chroma)\n if disallowed_special == [\"all\"]:\n disallowed_special = \"all\" # type: ignore\n\n return OpenAIEmbeddings(\n tiktoken_enabled=tiktoken_enable,\n default_headers=default_headers,\n default_query=default_query,\n allowed_special=set(allowed_special),\n disallowed_special=\"all\",\n chunk_size=chunk_size,\n client=client,\n deployment=deployment,\n embedding_ctx_length=embedding_ctx_length,\n max_retries=max_retries,\n model=model,\n model_kwargs=model_kwargs,\n base_url=openai_api_base,\n api_key=openai_api_key,\n openai_api_type=openai_api_type,\n api_version=openai_api_version,\n organization=openai_organization,\n openai_proxy=openai_proxy,\n timeout=request_timeout,\n show_progress_bar=show_progress_bar,\n skip_empty=skip_empty,\n tiktoken_model_name=tiktoken_model_name,\n )\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "default_headers": {
- "type": "dict",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "default_headers",
- "display_name": "Default Headers",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "default_query": {
- "type": "NestedDict",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": {},
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "default_query",
- "display_name": "Default Query",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "deployment": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "text-embedding-ada-002",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "deployment",
- "display_name": "Deployment",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "disallowed_special": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": [
- "all"
- ],
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "disallowed_special",
- "display_name": "Disallowed Special",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "embedding_ctx_length": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 8191,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "embedding_ctx_length",
- "display_name": "Embedding Context Length",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "max_retries": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 6,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "max_retries",
- "display_name": "Max Retries",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "model": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "text-embedding-ada-002",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "text-embedding-3-small",
- "text-embedding-3-large",
- "text-embedding-ada-002"
- ],
- "name": "model",
- "display_name": "Model",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "model_kwargs": {
- "type": "NestedDict",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": {},
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "model_kwargs",
- "display_name": "Model Kwargs",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "openai_api_base": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": true,
- "name": "openai_api_base",
- "display_name": "OpenAI API Base",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_api_key": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": true,
- "name": "openai_api_key",
- "display_name": "OpenAI API Key",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": ""
- },
- "openai_api_type": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": true,
- "name": "openai_api_type",
- "display_name": "OpenAI API Type",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_api_version": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "openai_api_version",
- "display_name": "OpenAI API Version",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_organization": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "openai_organization",
- "display_name": "OpenAI Organization",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_proxy": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "openai_proxy",
- "display_name": "OpenAI Proxy",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "request_timeout": {
- "type": "float",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "request_timeout",
- "display_name": "Request Timeout",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "rangeSpec": {
- "step_type": "float",
- "min": -1,
- "max": 1,
- "step": 0.1
- },
- "load_from_db": false,
- "title_case": false
- },
- "show_progress_bar": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "show_progress_bar",
- "display_name": "Show Progress Bar",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "skip_empty": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "skip_empty",
- "display_name": "Skip Empty",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "tiktoken_enable": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": true,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "tiktoken_enable",
- "display_name": "TikToken Enable",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "tiktoken_model_name": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "tiktoken_model_name",
- "display_name": "TikToken Model Name",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "_type": "CustomComponent"
- },
- "description": "Generate embeddings using OpenAI models.",
- "base_classes": [
- "Embeddings"
- ],
- "display_name": "OpenAI Embeddings",
- "documentation": "",
- "custom_fields": {
- "openai_api_key": null,
- "default_headers": null,
- "default_query": null,
- "allowed_special": null,
- "disallowed_special": null,
- "chunk_size": null,
- "client": null,
- "deployment": null,
- "embedding_ctx_length": null,
- "max_retries": null,
- "model": null,
- "model_kwargs": null,
- "openai_api_base": null,
- "openai_api_type": null,
- "openai_api_version": null,
- "openai_organization": null,
- "openai_proxy": null,
- "request_timeout": null,
- "show_progress_bar": null,
- "skip_empty": null,
- "tiktoken_enable": null,
- "tiktoken_model_name": null
- },
- "output_types": [
- "Embeddings"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false
- },
- "id": "OpenAIEmbeddings-ZlOk1"
- },
- "selected": false,
- "width": 384,
- "height": 383,
- "dragging": false
+ "output_types": [
+ "Text",
+ "Record"
+ ],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "ChatInput-yxMKE"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 383
+ },
+ {
+ "id": "TextOutput-BDknO",
+ "type": "genericNode",
+ "position": {
+ "x": 2322.600672827879,
+ "y": 604.9467307442569
+ },
+ "data": {
+ "type": "TextOutput",
+ "node": {
+ "template": {
+ "input_value": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Value",
+ "advanced": false,
+ "input_types": [
+ "Record",
+ "Text"
+ ],
+ "dynamic": false,
+ "info": "Text or Record to be passed as output.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional\n\nfrom langflow.base.io.text import TextComponent\nfrom langflow.field_typing import Text\n\n\nclass TextOutput(TextComponent):\n display_name = \"Text Output\"\n description = \"Display a text output in the Interaction Panel.\"\n icon = \"type\"\n\n def build_config(self):\n return {\n \"input_value\": {\n \"display_name\": \"Value\",\n \"input_types\": [\"Record\", \"Text\"],\n \"info\": \"Text or Record to be passed as output.\",\n },\n \"record_template\": {\n \"display_name\": \"Record Template\",\n \"multiline\": True,\n \"info\": \"Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.\",\n \"advanced\": True,\n },\n }\n\n def build(self, input_value: Optional[Text] = \"\", record_template: str = \"\") -> Text:\n return super().build(input_value=input_value, record_template=record_template)\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "record_template": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "{text}",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "record_template",
+ "display_name": "Record Template",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "_type": "CustomComponent"
},
- {
- "id": "OpenAIModel-EjXlN",
- "type": "genericNode",
- "position": {
- "x": 3410.117202077183,
- "y": 431.2038048137648
- },
- "data": {
- "type": "OpenAIModel",
- "node": {
- "template": {
- "input_value": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "input_value",
- "display_name": "Input",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": [\n \"gpt-4-turbo-preview\",\n \"gpt-3.5-turbo\",\n \"gpt-4-0125-preview\",\n \"gpt-4-1106-preview\",\n \"gpt-4-vision-preview\",\n \"gpt-3.5-turbo-0125\",\n \"gpt-3.5-turbo-1106\",\n ],\n \"value\": \"gpt-4-turbo-preview\",\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str,\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n output = ChatOpenAI(\n max_tokens=max_tokens,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=openai_api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "max_tokens": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 256,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "max_tokens",
- "display_name": "Max Tokens",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "model_kwargs": {
- "type": "NestedDict",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": {},
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "model_kwargs",
- "display_name": "Model Kwargs",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "model_name": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "gpt-3.5-turbo",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "gpt-4-turbo-preview",
- "gpt-3.5-turbo",
- "gpt-4-0125-preview",
- "gpt-4-1106-preview",
- "gpt-4-vision-preview",
- "gpt-3.5-turbo-0125",
- "gpt-3.5-turbo-1106"
- ],
- "name": "model_name",
- "display_name": "Model Name",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_api_base": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "openai_api_base",
- "display_name": "OpenAI API Base",
- "advanced": true,
- "dynamic": false,
- "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_api_key": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": true,
- "name": "openai_api_key",
- "display_name": "OpenAI API Key",
- "advanced": false,
- "dynamic": false,
- "info": "The OpenAI API Key to use for the OpenAI model.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": ""
- },
- "stream": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "stream",
- "display_name": "Stream",
- "advanced": true,
- "dynamic": false,
- "info": "Stream the response from the model. Streaming works only in Chat.",
- "load_from_db": false,
- "title_case": false
- },
- "system_message": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "system_message",
- "display_name": "System Message",
- "advanced": true,
- "dynamic": false,
- "info": "System message to pass to the model.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "temperature": {
- "type": "float",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 0.1,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "temperature",
- "display_name": "Temperature",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "rangeSpec": {
- "step_type": "float",
- "min": -1,
- "max": 1,
- "step": 0.1
- },
- "load_from_db": false,
- "title_case": false
- },
- "_type": "CustomComponent"
- },
- "description": "Generates text using OpenAI LLMs.",
- "icon": "OpenAI",
- "base_classes": [
- "object",
- "Text",
- "str"
- ],
- "display_name": "OpenAI",
- "documentation": "",
- "custom_fields": {
- "input_value": null,
- "openai_api_key": null,
- "temperature": null,
- "model_name": null,
- "max_tokens": null,
- "model_kwargs": null,
- "openai_api_base": null,
- "stream": null,
- "system_message": null
- },
- "output_types": [
- "Text"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [
- "max_tokens",
- "model_kwargs",
- "model_name",
- "openai_api_base",
- "openai_api_key",
- "temperature",
- "input_value",
- "system_message",
- "stream"
- ],
- "beta": false
- },
- "id": "OpenAIModel-EjXlN"
- },
- "selected": true,
- "width": 384,
- "height": 563,
- "positionAbsolute": {
- "x": 3410.117202077183,
- "y": 431.2038048137648
- },
- "dragging": false
+ "description": "Display a text output in the Interaction Panel.",
+ "icon": "type",
+ "base_classes": [
+ "object",
+ "Text",
+ "str"
+ ],
+ "display_name": "Extracted Chunks",
+ "documentation": "",
+ "custom_fields": {
+ "input_value": null,
+ "record_template": null
},
- {
- "id": "Prompt-xeI6K",
- "type": "genericNode",
- "position": {
- "x": 2969.0261961391298,
- "y": 442.1613649809069
+ "output_types": [
+ "Text"
+ ],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "TextOutput-BDknO"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 289,
+ "positionAbsolute": {
+ "x": 2322.600672827879,
+ "y": 604.9467307442569
+ },
+ "dragging": false
+ },
+ {
+ "id": "OpenAIEmbeddings-ZlOk1",
+ "type": "genericNode",
+ "position": {
+ "x": 1183.667250865064,
+ "y": 687.3171828430261
+ },
+ "data": {
+ "type": "OpenAIEmbeddings",
+ "node": {
+ "template": {
+ "allowed_special": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": [],
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "allowed_special",
+ "display_name": "Allowed Special",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "chunk_size": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 1000,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "chunk_size",
+ "display_name": "Chunk Size",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "client": {
+ "type": "Any",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "client",
+ "display_name": "Client",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Any, Dict, List, Optional\n\nfrom langchain_openai.embeddings.base import OpenAIEmbeddings\n\nfrom langflow.field_typing import Embeddings, NestedDict\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass OpenAIEmbeddingsComponent(CustomComponent):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n\n def build_config(self):\n return {\n \"allowed_special\": {\n \"display_name\": \"Allowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"default_headers\": {\n \"display_name\": \"Default Headers\",\n \"advanced\": True,\n \"field_type\": \"dict\",\n },\n \"default_query\": {\n \"display_name\": \"Default Query\",\n \"advanced\": True,\n \"field_type\": \"NestedDict\",\n },\n \"disallowed_special\": {\n \"display_name\": \"Disallowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"chunk_size\": {\"display_name\": \"Chunk Size\", \"advanced\": True},\n \"client\": {\"display_name\": \"Client\", \"advanced\": True},\n \"deployment\": {\"display_name\": \"Deployment\", \"advanced\": True},\n \"embedding_ctx_length\": {\n \"display_name\": \"Embedding Context Length\",\n \"advanced\": True,\n },\n \"max_retries\": {\"display_name\": \"Max Retries\", \"advanced\": True},\n \"model\": {\n \"display_name\": \"Model\",\n \"advanced\": False,\n \"options\": [\n \"text-embedding-3-small\",\n \"text-embedding-3-large\",\n \"text-embedding-ada-002\",\n ],\n },\n \"model_kwargs\": {\"display_name\": \"Model Kwargs\", \"advanced\": True},\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"password\": True,\n \"advanced\": True,\n },\n \"openai_api_key\": {\"display_name\": \"OpenAI API Key\", \"password\": True},\n \"openai_api_type\": {\n \"display_name\": \"OpenAI API Type\",\n \"advanced\": True,\n \"password\": True,\n },\n \"openai_api_version\": {\n \"display_name\": \"OpenAI API Version\",\n \"advanced\": True,\n },\n \"openai_organization\": {\n \"display_name\": \"OpenAI Organization\",\n \"advanced\": True,\n },\n \"openai_proxy\": {\"display_name\": \"OpenAI Proxy\", \"advanced\": True},\n \"request_timeout\": {\"display_name\": \"Request Timeout\", \"advanced\": True},\n \"show_progress_bar\": {\n \"display_name\": \"Show Progress Bar\",\n \"advanced\": True,\n },\n \"skip_empty\": {\"display_name\": \"Skip Empty\", \"advanced\": True},\n \"tiktoken_model_name\": {\n \"display_name\": \"TikToken Model Name\",\n \"advanced\": True,\n },\n \"tiktoken_enable\": {\"display_name\": \"TikToken Enable\", \"advanced\": True},\n }\n\n def build(\n self,\n openai_api_key: str,\n default_headers: Optional[Dict[str, str]] = None,\n default_query: Optional[NestedDict] = {},\n allowed_special: List[str] = [],\n disallowed_special: List[str] = [\"all\"],\n chunk_size: int = 1000,\n client: Optional[Any] = None,\n deployment: str = \"text-embedding-ada-002\",\n embedding_ctx_length: int = 8191,\n max_retries: int = 6,\n model: str = \"text-embedding-ada-002\",\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n openai_api_type: Optional[str] = None,\n openai_api_version: Optional[str] = None,\n openai_organization: Optional[str] = None,\n openai_proxy: Optional[str] = None,\n request_timeout: Optional[float] = None,\n show_progress_bar: bool = False,\n skip_empty: bool = False,\n tiktoken_enable: bool = True,\n tiktoken_model_name: Optional[str] = None,\n ) -> Embeddings:\n # This is to avoid errors with Vector Stores (e.g Chroma)\n if disallowed_special == [\"all\"]:\n disallowed_special = \"all\" # type: ignore\n\n return OpenAIEmbeddings(\n tiktoken_enabled=tiktoken_enable,\n default_headers=default_headers,\n default_query=default_query,\n allowed_special=set(allowed_special),\n disallowed_special=\"all\",\n chunk_size=chunk_size,\n client=client,\n deployment=deployment,\n embedding_ctx_length=embedding_ctx_length,\n max_retries=max_retries,\n model=model,\n model_kwargs=model_kwargs,\n base_url=openai_api_base,\n api_key=openai_api_key,\n openai_api_type=openai_api_type,\n api_version=openai_api_version,\n organization=openai_organization,\n openai_proxy=openai_proxy,\n timeout=request_timeout,\n show_progress_bar=show_progress_bar,\n skip_empty=skip_empty,\n tiktoken_model_name=tiktoken_model_name,\n )\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "default_headers": {
+ "type": "dict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "default_headers",
+ "display_name": "Default Headers",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "default_query": {
+ "type": "NestedDict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": {},
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "default_query",
+ "display_name": "Default Query",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "deployment": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "text-embedding-ada-002",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "deployment",
+ "display_name": "Deployment",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "disallowed_special": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": [
+ "all"
+ ],
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "disallowed_special",
+ "display_name": "Disallowed Special",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "embedding_ctx_length": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 8191,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "embedding_ctx_length",
+ "display_name": "Embedding Context Length",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "max_retries": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 6,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "max_retries",
+ "display_name": "Max Retries",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "model": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "text-embedding-ada-002",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": [
+ "text-embedding-3-small",
+ "text-embedding-3-large",
+ "text-embedding-ada-002"
+ ],
+ "name": "model",
+ "display_name": "Model",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "model_kwargs": {
+ "type": "NestedDict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": {},
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "model_kwargs",
+ "display_name": "Model Kwargs",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "openai_api_base": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "openai_api_base",
+ "display_name": "OpenAI API Base",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "openai_api_key": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "openai_api_key",
+ "display_name": "OpenAI API Key",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ],
+ "value": ""
+ },
+ "openai_api_type": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "openai_api_type",
+ "display_name": "OpenAI API Type",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "openai_api_version": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "openai_api_version",
+ "display_name": "OpenAI API Version",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "openai_organization": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "openai_organization",
+ "display_name": "OpenAI Organization",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "openai_proxy": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "openai_proxy",
+ "display_name": "OpenAI Proxy",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "request_timeout": {
+ "type": "float",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "request_timeout",
+ "display_name": "Request Timeout",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "rangeSpec": {
+ "step_type": "float",
+ "min": -1,
+ "max": 1,
+ "step": 0.1
},
- "data": {
- "type": "Prompt",
- "node": {
- "template": {
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.field_typing import Prompt, TemplateField, Text\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "template": {
- "type": "prompt",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "{context}\n\n---\n\nGiven the context above, answer the question as best as possible.\n\nQuestion: {question}\n\nAnswer: ",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "template",
- "display_name": "Template",
- "advanced": false,
- "input_types": [
- "Text"
- ],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "_type": "CustomComponent",
- "context": {
- "field_type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "context",
- "display_name": "context",
- "advanced": false,
- "input_types": [
- "Document",
- "BaseOutputParser",
- "Record",
- "Text"
- ],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "type": "str"
- },
- "question": {
- "field_type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "question",
- "display_name": "question",
- "advanced": false,
- "input_types": [
- "Document",
- "BaseOutputParser",
- "Record",
- "Text"
- ],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "type": "str"
- }
- },
- "description": "Create a prompt template with dynamic variables.",
- "icon": "prompts",
- "is_input": null,
- "is_output": null,
- "is_composition": null,
- "base_classes": [
- "object",
- "Text",
- "str"
- ],
- "name": "",
- "display_name": "Prompt",
- "documentation": "",
- "custom_fields": {
- "template": [
- "context",
- "question"
- ]
- },
- "output_types": [
- "Text"
- ],
- "full_path": null,
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false,
- "error": null
- },
- "id": "Prompt-xeI6K",
- "description": "Create a prompt template with dynamic variables.",
- "display_name": "Prompt"
- },
- "selected": false,
- "width": 384,
- "height": 477,
- "positionAbsolute": {
- "x": 2969.0261961391298,
- "y": 442.1613649809069
- },
- "dragging": false
+ "load_from_db": false,
+ "title_case": false
+ },
+ "show_progress_bar": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "show_progress_bar",
+ "display_name": "Show Progress Bar",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "skip_empty": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "skip_empty",
+ "display_name": "Skip Empty",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "tiktoken_enable": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": true,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "tiktoken_enable",
+ "display_name": "TikToken Enable",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "tiktoken_model_name": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "tiktoken_model_name",
+ "display_name": "TikToken Model Name",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "_type": "CustomComponent"
},
- {
- "id": "ChatOutput-Q39I8",
- "type": "genericNode",
- "position": {
- "x": 3887.2073667611485,
- "y": 588.4801225794856
- },
- "data": {
- "type": "ChatOutput",
- "node": {
- "template": {
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Interaction Panel.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n record_template: Optional[str] = \"{text}\",\n ) -> Union[Text, Record]:\n return super().build(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n record_template=record_template,\n )\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "input_value": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "input_value",
- "display_name": "Message",
- "advanced": false,
- "input_types": [
- "Text"
- ],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "record_template": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "{text}",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "record_template",
- "display_name": "Record Template",
- "advanced": true,
- "dynamic": false,
- "info": "In case of Message being a Record, this template will be used to convert it to text.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "return_record": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "return_record",
- "display_name": "Return Record",
- "advanced": true,
- "dynamic": false,
- "info": "Return the message as a record containing the sender, sender_name, and session_id.",
- "load_from_db": false,
- "title_case": false
- },
- "sender": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "Machine",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "Machine",
- "User"
- ],
- "name": "sender",
- "display_name": "Sender Type",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "sender_name": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "AI",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "sender_name",
- "display_name": "Sender Name",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "session_id": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "session_id",
- "display_name": "Session ID",
- "advanced": true,
- "dynamic": false,
- "info": "If provided, the message will be stored in the memory.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "_type": "CustomComponent"
- },
- "description": "Display a chat message in the Interaction Panel.",
- "icon": "ChatOutput",
- "base_classes": [
- "object",
- "Text",
- "Record",
- "str"
- ],
- "display_name": "Chat Output",
- "documentation": "",
- "custom_fields": {
- "sender": null,
- "sender_name": null,
- "input_value": null,
- "session_id": null,
- "return_record": null,
- "record_template": null
- },
- "output_types": [
- "Text",
- "Record"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false
- },
- "id": "ChatOutput-Q39I8"
- },
- "selected": false,
- "width": 384,
- "height": 383,
- "positionAbsolute": {
- "x": 3887.2073667611485,
- "y": 588.4801225794856
- },
- "dragging": false
+ "description": "Generate embeddings using OpenAI models.",
+ "base_classes": [
+ "Embeddings"
+ ],
+ "display_name": "OpenAI Embeddings",
+ "documentation": "",
+ "custom_fields": {
+ "openai_api_key": null,
+ "default_headers": null,
+ "default_query": null,
+ "allowed_special": null,
+ "disallowed_special": null,
+ "chunk_size": null,
+ "client": null,
+ "deployment": null,
+ "embedding_ctx_length": null,
+ "max_retries": null,
+ "model": null,
+ "model_kwargs": null,
+ "openai_api_base": null,
+ "openai_api_type": null,
+ "openai_api_version": null,
+ "openai_organization": null,
+ "openai_proxy": null,
+ "request_timeout": null,
+ "show_progress_bar": null,
+ "skip_empty": null,
+ "tiktoken_enable": null,
+ "tiktoken_model_name": null
},
- {
- "id": "File-t0a6a",
- "type": "genericNode",
- "position": {
- "x": 2257.233450682836,
- "y": 1747.5389618367233
+ "output_types": [
+ "Embeddings"
+ ],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "OpenAIEmbeddings-ZlOk1"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 383,
+ "dragging": false
+ },
+ {
+ "id": "OpenAIModel-EjXlN",
+ "type": "genericNode",
+ "position": {
+ "x": 3410.117202077183,
+ "y": 431.2038048137648
+ },
+ "data": {
+ "type": "OpenAIModel",
+ "node": {
+ "template": {
+ "input_value": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Input",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": [\n \"gpt-4-turbo-preview\",\n \"gpt-3.5-turbo\",\n \"gpt-4-0125-preview\",\n \"gpt-4-1106-preview\",\n \"gpt-4-vision-preview\",\n \"gpt-3.5-turbo-0125\",\n \"gpt-3.5-turbo-1106\",\n ],\n \"value\": \"gpt-4-turbo-preview\",\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str,\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n output = ChatOpenAI(\n max_tokens=max_tokens,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=openai_api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "max_tokens": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 256,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "max_tokens",
+ "display_name": "Max Tokens",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "model_kwargs": {
+ "type": "NestedDict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": {},
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "model_kwargs",
+ "display_name": "Model Kwargs",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "model_name": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "gpt-3.5-turbo",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": [
+ "gpt-4-turbo-preview",
+ "gpt-3.5-turbo",
+ "gpt-4-0125-preview",
+ "gpt-4-1106-preview",
+ "gpt-4-vision-preview",
+ "gpt-3.5-turbo-0125",
+ "gpt-3.5-turbo-1106"
+ ],
+ "name": "model_name",
+ "display_name": "Model Name",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "openai_api_base": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "openai_api_base",
+ "display_name": "OpenAI API Base",
+ "advanced": true,
+ "dynamic": false,
+ "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "openai_api_key": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "openai_api_key",
+ "display_name": "OpenAI API Key",
+ "advanced": false,
+ "dynamic": false,
+ "info": "The OpenAI API Key to use for the OpenAI model.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ],
+ "value": ""
+ },
+ "stream": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "stream",
+ "display_name": "Stream",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Stream the response from the model. Streaming works only in Chat.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "system_message": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "system_message",
+ "display_name": "System Message",
+ "advanced": true,
+ "dynamic": false,
+ "info": "System message to pass to the model.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "temperature": {
+ "type": "float",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 0.1,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "temperature",
+ "display_name": "Temperature",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "rangeSpec": {
+ "step_type": "float",
+ "min": -1,
+ "max": 1,
+ "step": 0.1
},
- "data": {
- "type": "File",
- "node": {
- "template": {
- "path": {
- "type": "file",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [
- ".txt",
- ".md",
- ".mdx",
- ".csv",
- ".json",
- ".yaml",
- ".yml",
- ".xml",
- ".html",
- ".htm",
- ".pdf",
- ".docx"
- ],
- "file_path": "51e2b78a-199b-4054-9f32-e288eef6924c/Langflow conversation.pdf",
- "password": false,
- "name": "path",
- "display_name": "Path",
- "advanced": false,
- "dynamic": false,
- "info": "Supported file types: txt, md, mdx, csv, json, yaml, yml, xml, html, htm, pdf, docx",
- "load_from_db": false,
- "title_case": false,
- "value": ""
- },
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from pathlib import Path\nfrom typing import Any, Dict\n\nfrom langflow.base.data.utils import TEXT_FILE_TYPES, parse_text_file_to_record\nfrom langflow.interface.custom.custom_component import CustomComponent\nfrom langflow.schema import Record\n\n\nclass FileComponent(CustomComponent):\n display_name = \"File\"\n description = \"A generic file loader.\"\n icon = \"file-text\"\n\n def build_config(self) -> Dict[str, Any]:\n return {\n \"path\": {\n \"display_name\": \"Path\",\n \"field_type\": \"file\",\n \"file_types\": TEXT_FILE_TYPES,\n \"info\": f\"Supported file types: {', '.join(TEXT_FILE_TYPES)}\",\n },\n \"silent_errors\": {\n \"display_name\": \"Silent Errors\",\n \"advanced\": True,\n \"info\": \"If true, errors will not raise an exception.\",\n },\n }\n\n def load_file(self, path: str, silent_errors: bool = False) -> Record:\n resolved_path = self.resolve_path(path)\n path_obj = Path(resolved_path)\n extension = path_obj.suffix[1:].lower()\n if extension == \"doc\":\n raise ValueError(\"doc files are not supported. Please save as .docx\")\n if extension not in TEXT_FILE_TYPES:\n raise ValueError(f\"Unsupported file type: {extension}\")\n record = parse_text_file_to_record(resolved_path, silent_errors)\n self.status = record if record else \"No data\"\n return record or Record()\n\n def build(\n self,\n path: str,\n silent_errors: bool = False,\n ) -> Record:\n record = self.load_file(path, silent_errors)\n self.status = record\n return record\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "silent_errors": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "silent_errors",
- "display_name": "Silent Errors",
- "advanced": true,
- "dynamic": false,
- "info": "If true, errors will not raise an exception.",
- "load_from_db": false,
- "title_case": false
- },
- "_type": "CustomComponent"
- },
- "description": "A generic file loader.",
- "icon": "file-text",
- "base_classes": [
- "Record"
- ],
- "display_name": "File",
- "documentation": "",
- "custom_fields": {
- "path": null,
- "silent_errors": null
- },
- "output_types": [
- "Record"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false
- },
- "id": "File-t0a6a"
- },
- "selected": false,
- "width": 384,
- "height": 281,
- "positionAbsolute": {
- "x": 2257.233450682836,
- "y": 1747.5389618367233
- },
- "dragging": false
+ "load_from_db": false,
+ "title_case": false
+ },
+ "_type": "CustomComponent"
},
- {
- "id": "RecursiveCharacterTextSplitter-tR9QM",
- "type": "genericNode",
- "position": {
- "x": 2791.013514133929,
- "y": 1462.9588953494142
- },
- "data": {
- "type": "RecursiveCharacterTextSplitter",
- "node": {
- "template": {
- "inputs": {
- "type": "Document",
- "required": true,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "inputs",
- "display_name": "Input",
- "advanced": false,
- "input_types": [
- "Document",
- "Record"
- ],
- "dynamic": false,
- "info": "The texts to split.",
- "load_from_db": false,
- "title_case": false
- },
- "chunk_overlap": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 200,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "chunk_overlap",
- "display_name": "Chunk Overlap",
- "advanced": false,
- "dynamic": false,
- "info": "The amount of overlap between chunks.",
- "load_from_db": false,
- "title_case": false
- },
- "chunk_size": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 1000,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "chunk_size",
- "display_name": "Chunk Size",
- "advanced": false,
- "dynamic": false,
- "info": "The maximum length of each chunk.",
- "load_from_db": false,
- "title_case": false
- },
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Optional\n\nfrom langchain.text_splitter import RecursiveCharacterTextSplitter\nfrom langchain_core.documents import Document\n\nfrom langflow.interface.custom.custom_component import CustomComponent\nfrom langflow.schema import Record\nfrom langflow.utils.util import build_loader_repr_from_records, unescape_string\n\n\nclass RecursiveCharacterTextSplitterComponent(CustomComponent):\n display_name: str = \"Recursive Character Text Splitter\"\n description: str = \"Split text into chunks of a specified length.\"\n documentation: str = \"https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter\"\n\n def build_config(self):\n return {\n \"inputs\": {\n \"display_name\": \"Input\",\n \"info\": \"The texts to split.\",\n \"input_types\": [\"Document\", \"Record\"],\n },\n \"separators\": {\n \"display_name\": \"Separators\",\n \"info\": 'The characters to split on.\\nIf left empty defaults to [\"\\\\n\\\\n\", \"\\\\n\", \" \", \"\"].',\n \"is_list\": True,\n },\n \"chunk_size\": {\n \"display_name\": \"Chunk Size\",\n \"info\": \"The maximum length of each chunk.\",\n \"field_type\": \"int\",\n \"value\": 1000,\n },\n \"chunk_overlap\": {\n \"display_name\": \"Chunk Overlap\",\n \"info\": \"The amount of overlap between chunks.\",\n \"field_type\": \"int\",\n \"value\": 200,\n },\n \"code\": {\"show\": False},\n }\n\n def build(\n self,\n inputs: list[Document],\n separators: Optional[list[str]] = None,\n chunk_size: Optional[int] = 1000,\n chunk_overlap: Optional[int] = 200,\n ) -> list[Record]:\n \"\"\"\n Split text into chunks of a specified length.\n\n Args:\n separators (list[str]): The characters to split on.\n chunk_size (int): The maximum length of each chunk.\n chunk_overlap (int): The amount of overlap between chunks.\n length_function (function): The function to use to calculate the length of the text.\n\n Returns:\n list[str]: The chunks of text.\n \"\"\"\n\n if separators == \"\":\n separators = None\n elif separators:\n # check if the separators list has escaped characters\n # if there are escaped characters, unescape them\n separators = [unescape_string(x) for x in separators]\n\n # Make sure chunk_size and chunk_overlap are ints\n if isinstance(chunk_size, str):\n chunk_size = int(chunk_size)\n if isinstance(chunk_overlap, str):\n chunk_overlap = int(chunk_overlap)\n splitter = RecursiveCharacterTextSplitter(\n separators=separators,\n chunk_size=chunk_size,\n chunk_overlap=chunk_overlap,\n )\n documents = []\n for _input in inputs:\n if isinstance(_input, Record):\n documents.append(_input.to_lc_document())\n else:\n documents.append(_input)\n docs = splitter.split_documents(documents)\n records = self.to_records(docs)\n self.repr_value = build_loader_repr_from_records(records)\n return records\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "separators": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "separators",
- "display_name": "Separators",
- "advanced": false,
- "dynamic": false,
- "info": "The characters to split on.\nIf left empty defaults to [\"\\n\\n\", \"\\n\", \" \", \"\"].",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": [
- ""
- ]
- },
- "_type": "CustomComponent"
- },
- "description": "Split text into chunks of a specified length.",
- "base_classes": [
- "Record"
- ],
- "display_name": "Recursive Character Text Splitter",
- "documentation": "https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter",
- "custom_fields": {
- "inputs": null,
- "separators": null,
- "chunk_size": null,
- "chunk_overlap": null
- },
- "output_types": [
- "Record"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false
- },
- "id": "RecursiveCharacterTextSplitter-tR9QM"
- },
- "selected": false,
- "width": 384,
- "height": 501,
- "positionAbsolute": {
- "x": 2791.013514133929,
- "y": 1462.9588953494142
- },
- "dragging": false
+ "description": "Generates text using OpenAI LLMs.",
+ "icon": "OpenAI",
+ "base_classes": [
+ "object",
+ "Text",
+ "str"
+ ],
+ "display_name": "OpenAI",
+ "documentation": "",
+ "custom_fields": {
+ "input_value": null,
+ "openai_api_key": null,
+ "temperature": null,
+ "model_name": null,
+ "max_tokens": null,
+ "model_kwargs": null,
+ "openai_api_base": null,
+ "stream": null,
+ "system_message": null
},
- {
- "id": "AstraDBSearch-41nRz",
- "type": "genericNode",
- "position": {
- "x": 1723.976434815103,
- "y": 277.03317407245913
- },
- "data": {
- "type": "AstraDBSearch",
- "node": {
- "template": {
- "embedding": {
- "type": "Embeddings",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "embedding",
- "display_name": "Embedding",
- "advanced": false,
- "dynamic": false,
- "info": "Embedding to use",
- "load_from_db": false,
- "title_case": false
- },
- "input_value": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "input_value",
- "display_name": "Input Value",
- "advanced": false,
- "dynamic": false,
- "info": "Input value to search",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "api_endpoint": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "api_endpoint",
- "display_name": "API Endpoint",
- "advanced": false,
- "dynamic": false,
- "info": "API endpoint URL for the Astra DB service.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": ""
- },
- "batch_size": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "batch_size",
- "display_name": "Batch Size",
- "advanced": true,
- "dynamic": false,
- "info": "Optional number of records to process in a single batch.",
- "load_from_db": false,
- "title_case": false
- },
- "bulk_delete_concurrency": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "bulk_delete_concurrency",
- "display_name": "Bulk Delete Concurrency",
- "advanced": true,
- "dynamic": false,
- "info": "Optional concurrency level for bulk delete operations.",
- "load_from_db": false,
- "title_case": false
- },
- "bulk_insert_batch_concurrency": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "bulk_insert_batch_concurrency",
- "display_name": "Bulk Insert Batch Concurrency",
- "advanced": true,
- "dynamic": false,
- "info": "Optional concurrency level for bulk insert operations.",
- "load_from_db": false,
- "title_case": false
- },
- "bulk_insert_overwrite_concurrency": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "bulk_insert_overwrite_concurrency",
- "display_name": "Bulk Insert Overwrite Concurrency",
- "advanced": true,
- "dynamic": false,
- "info": "Optional concurrency level for bulk insert operations that overwrite existing records.",
- "load_from_db": false,
- "title_case": false
- },
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import List, Optional\n\nfrom langflow.components.vectorstores.AstraDB import AstraDBVectorStoreComponent\nfrom langflow.components.vectorstores.base.model import LCVectorStoreComponent\nfrom langflow.field_typing import Embeddings, Text\nfrom langflow.schema import Record\n\n\nclass AstraDBSearchComponent(LCVectorStoreComponent):\n display_name = \"Astra DB Search\"\n description = \"Searches an existing Astra DB Vector Store.\"\n icon = \"AstraDB\"\n field_order = [\"token\", \"api_endpoint\", \"collection_name\", \"input_value\", \"embedding\"]\n\n def build_config(self):\n return {\n \"search_type\": {\n \"display_name\": \"Search Type\",\n \"options\": [\"Similarity\", \"MMR\"],\n },\n \"input_value\": {\n \"display_name\": \"Input Value\",\n \"info\": \"Input value to search\",\n },\n \"embedding\": {\"display_name\": \"Embedding\", \"info\": \"Embedding to use\"},\n \"collection_name\": {\n \"display_name\": \"Collection Name\",\n \"info\": \"The name of the collection within Astra DB where the vectors will be stored.\",\n },\n \"token\": {\n \"display_name\": \"Token\",\n \"info\": \"Authentication token for accessing Astra DB.\",\n \"password\": True,\n },\n \"api_endpoint\": {\n \"display_name\": \"API Endpoint\",\n \"info\": \"API endpoint URL for the Astra DB service.\",\n },\n \"namespace\": {\n \"display_name\": \"Namespace\",\n \"info\": \"Optional namespace within Astra DB to use for the collection.\",\n \"advanced\": True,\n },\n \"metric\": {\n \"display_name\": \"Metric\",\n \"info\": \"Optional distance metric for vector comparisons in the vector store.\",\n \"advanced\": True,\n },\n \"batch_size\": {\n \"display_name\": \"Batch Size\",\n \"info\": \"Optional number of records to process in a single batch.\",\n \"advanced\": True,\n },\n \"bulk_insert_batch_concurrency\": {\n \"display_name\": \"Bulk Insert Batch Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations.\",\n \"advanced\": True,\n },\n \"bulk_insert_overwrite_concurrency\": {\n \"display_name\": \"Bulk Insert Overwrite Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations that overwrite existing records.\",\n \"advanced\": True,\n },\n \"bulk_delete_concurrency\": {\n \"display_name\": \"Bulk Delete Concurrency\",\n \"info\": \"Optional concurrency level for bulk delete operations.\",\n \"advanced\": True,\n },\n \"setup_mode\": {\n \"display_name\": \"Setup Mode\",\n \"info\": \"Configuration mode for setting up the vector store, with options like “Sync”, “Async”, or “Off”.\",\n \"options\": [\"Sync\", \"Async\", \"Off\"],\n \"advanced\": True,\n },\n \"pre_delete_collection\": {\n \"display_name\": \"Pre Delete Collection\",\n \"info\": \"Boolean flag to determine whether to delete the collection before creating a new one.\",\n \"advanced\": True,\n },\n \"metadata_indexing_include\": {\n \"display_name\": \"Metadata Indexing Include\",\n \"info\": \"Optional list of metadata fields to include in the indexing.\",\n \"advanced\": True,\n },\n \"metadata_indexing_exclude\": {\n \"display_name\": \"Metadata Indexing Exclude\",\n \"info\": \"Optional list of metadata fields to exclude from the indexing.\",\n \"advanced\": True,\n },\n \"collection_indexing_policy\": {\n \"display_name\": \"Collection Indexing Policy\",\n \"info\": \"Optional dictionary defining the indexing policy for the collection.\",\n \"advanced\": True,\n },\n \"number_of_results\": {\n \"display_name\": \"Number of Results\",\n \"info\": \"Number of results to return.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n embedding: Embeddings,\n collection_name: str,\n input_value: Text,\n token: str,\n api_endpoint: str,\n search_type: str = \"Similarity\",\n number_of_results: int = 4,\n namespace: Optional[str] = None,\n metric: Optional[str] = None,\n batch_size: Optional[int] = None,\n bulk_insert_batch_concurrency: Optional[int] = None,\n bulk_insert_overwrite_concurrency: Optional[int] = None,\n bulk_delete_concurrency: Optional[int] = None,\n setup_mode: str = \"Sync\",\n pre_delete_collection: bool = False,\n metadata_indexing_include: Optional[List[str]] = None,\n metadata_indexing_exclude: Optional[List[str]] = None,\n collection_indexing_policy: Optional[dict] = None,\n ) -> List[Record]:\n vector_store = AstraDBVectorStoreComponent().build(\n embedding=embedding,\n collection_name=collection_name,\n token=token,\n api_endpoint=api_endpoint,\n namespace=namespace,\n metric=metric,\n batch_size=batch_size,\n bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,\n bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,\n bulk_delete_concurrency=bulk_delete_concurrency,\n setup_mode=setup_mode,\n pre_delete_collection=pre_delete_collection,\n metadata_indexing_include=metadata_indexing_include,\n metadata_indexing_exclude=metadata_indexing_exclude,\n collection_indexing_policy=collection_indexing_policy,\n )\n try:\n return self.search_with_vector_store(input_value, search_type, vector_store, k=number_of_results)\n except KeyError as e:\n if \"content\" in str(e):\n raise ValueError(\n \"You should ingest data through Langflow (or LangChain) to query it in Langflow. Your collection does not contain a field name 'content'.\"\n )\n else:\n raise e\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "collection_indexing_policy": {
- "type": "dict",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "collection_indexing_policy",
- "display_name": "Collection Indexing Policy",
- "advanced": true,
- "dynamic": false,
- "info": "Optional dictionary defining the indexing policy for the collection.",
- "load_from_db": false,
- "title_case": false
- },
- "collection_name": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "collection_name",
- "display_name": "Collection Name",
- "advanced": false,
- "dynamic": false,
- "info": "The name of the collection within Astra DB where the vectors will be stored.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": "langflow"
- },
- "metadata_indexing_exclude": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "metadata_indexing_exclude",
- "display_name": "Metadata Indexing Exclude",
- "advanced": true,
- "dynamic": false,
- "info": "Optional list of metadata fields to exclude from the indexing.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "metadata_indexing_include": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "metadata_indexing_include",
- "display_name": "Metadata Indexing Include",
- "advanced": true,
- "dynamic": false,
- "info": "Optional list of metadata fields to include in the indexing.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "metric": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "metric",
- "display_name": "Metric",
- "advanced": true,
- "dynamic": false,
- "info": "Optional distance metric for vector comparisons in the vector store.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "namespace": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "namespace",
- "display_name": "Namespace",
- "advanced": true,
- "dynamic": false,
- "info": "Optional namespace within Astra DB to use for the collection.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "number_of_results": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 4,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "number_of_results",
- "display_name": "Number of Results",
- "advanced": true,
- "dynamic": false,
- "info": "Number of results to return.",
- "load_from_db": false,
- "title_case": false
- },
- "pre_delete_collection": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "pre_delete_collection",
- "display_name": "Pre Delete Collection",
- "advanced": true,
- "dynamic": false,
- "info": "Boolean flag to determine whether to delete the collection before creating a new one.",
- "load_from_db": false,
- "title_case": false
- },
- "search_type": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "Similarity",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "Similarity",
- "MMR"
- ],
- "name": "search_type",
- "display_name": "Search Type",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "setup_mode": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "Sync",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "Sync",
- "Async",
- "Off"
- ],
- "name": "setup_mode",
- "display_name": "Setup Mode",
- "advanced": true,
- "dynamic": false,
- "info": "Configuration mode for setting up the vector store, with options like “Sync”, “Async”, or “Off”.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "token": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": true,
- "name": "token",
- "display_name": "Token",
- "advanced": false,
- "dynamic": false,
- "info": "Authentication token for accessing Astra DB.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": ""
- },
- "_type": "CustomComponent"
- },
- "description": "Searches an existing Astra DB Vector Store.",
- "icon": "AstraDB",
- "base_classes": [
- "Record"
- ],
- "display_name": "Astra DB Search",
- "documentation": "",
- "custom_fields": {
- "embedding": null,
- "collection_name": null,
- "input_value": null,
- "token": null,
- "api_endpoint": null,
- "search_type": null,
- "number_of_results": null,
- "namespace": null,
- "metric": null,
- "batch_size": null,
- "bulk_insert_batch_concurrency": null,
- "bulk_insert_overwrite_concurrency": null,
- "bulk_delete_concurrency": null,
- "setup_mode": null,
- "pre_delete_collection": null,
- "metadata_indexing_include": null,
- "metadata_indexing_exclude": null,
- "collection_indexing_policy": null
- },
- "output_types": [
- "Record"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [
- "token",
- "api_endpoint",
- "collection_name",
- "input_value",
- "embedding"
- ],
- "beta": false
- },
- "id": "AstraDBSearch-41nRz"
- },
- "selected": false,
- "width": 384,
- "height": 713,
- "dragging": false,
- "positionAbsolute": {
- "x": 1723.976434815103,
- "y": 277.03317407245913
- }
+ "output_types": [
+ "Text"
+ ],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [
+ "max_tokens",
+ "model_kwargs",
+ "model_name",
+ "openai_api_base",
+ "openai_api_key",
+ "temperature",
+ "input_value",
+ "system_message",
+ "stream"
+ ],
+ "beta": false
+ },
+ "id": "OpenAIModel-EjXlN"
+ },
+ "selected": true,
+ "width": 384,
+ "height": 563,
+ "positionAbsolute": {
+ "x": 3410.117202077183,
+ "y": 431.2038048137648
+ },
+ "dragging": false
+ },
+ {
+ "id": "Prompt-xeI6K",
+ "type": "genericNode",
+ "position": {
+ "x": 2969.0261961391298,
+ "y": 442.1613649809069
+ },
+ "data": {
+ "type": "Prompt",
+ "node": {
+ "template": {
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.field_typing import Prompt, TemplateField, Text\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "template": {
+ "type": "prompt",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "{context}\n\n---\n\nGiven the context above, answer the question as best as possible.\n\nQuestion: {question}\n\nAnswer: ",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "template",
+ "display_name": "Template",
+ "advanced": false,
+ "input_types": [
+ "Text"
+ ],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "_type": "CustomComponent",
+ "context": {
+ "field_type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "context",
+ "display_name": "context",
+ "advanced": false,
+ "input_types": [
+ "Document",
+ "BaseOutputParser",
+ "Record",
+ "Text"
+ ],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "type": "str"
+ },
+ "question": {
+ "field_type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "question",
+ "display_name": "question",
+ "advanced": false,
+ "input_types": [
+ "Document",
+ "BaseOutputParser",
+ "Record",
+ "Text"
+ ],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "type": "str"
+ }
},
- {
- "id": "AstraDB-eUCSS",
- "type": "genericNode",
- "position": {
- "x": 3372.04958055989,
- "y": 1611.0742035495277
- },
- "data": {
- "type": "AstraDB",
- "node": {
- "template": {
- "embedding": {
- "type": "Embeddings",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "embedding",
- "display_name": "Embedding",
- "advanced": false,
- "dynamic": false,
- "info": "Embedding to use",
- "load_from_db": false,
- "title_case": false
- },
- "inputs": {
- "type": "Record",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "inputs",
- "display_name": "Inputs",
- "advanced": false,
- "dynamic": false,
- "info": "Optional list of records to be processed and stored in the vector store.",
- "load_from_db": false,
- "title_case": false
- },
- "api_endpoint": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "api_endpoint",
- "display_name": "API Endpoint",
- "advanced": false,
- "dynamic": false,
- "info": "API endpoint URL for the Astra DB service.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": ""
- },
- "batch_size": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "batch_size",
- "display_name": "Batch Size",
- "advanced": true,
- "dynamic": false,
- "info": "Optional number of records to process in a single batch.",
- "load_from_db": false,
- "title_case": false
- },
- "bulk_delete_concurrency": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "bulk_delete_concurrency",
- "display_name": "Bulk Delete Concurrency",
- "advanced": true,
- "dynamic": false,
- "info": "Optional concurrency level for bulk delete operations.",
- "load_from_db": false,
- "title_case": false
- },
- "bulk_insert_batch_concurrency": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "bulk_insert_batch_concurrency",
- "display_name": "Bulk Insert Batch Concurrency",
- "advanced": true,
- "dynamic": false,
- "info": "Optional concurrency level for bulk insert operations.",
- "load_from_db": false,
- "title_case": false
- },
- "bulk_insert_overwrite_concurrency": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "bulk_insert_overwrite_concurrency",
- "display_name": "Bulk Insert Overwrite Concurrency",
- "advanced": true,
- "dynamic": false,
- "info": "Optional concurrency level for bulk insert operations that overwrite existing records.",
- "load_from_db": false,
- "title_case": false
- },
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import List, Optional\n\nfrom langchain_astradb import AstraDBVectorStore\nfrom langchain_astradb.utils.astradb import SetupMode\n\nfrom langflow.custom import CustomComponent\nfrom langflow.field_typing import Embeddings, VectorStore\nfrom langflow.schema import Record\n\n\nclass AstraDBVectorStoreComponent(CustomComponent):\n display_name = \"Astra DB\"\n description = \"Builds or loads an Astra DB Vector Store.\"\n icon = \"AstraDB\"\n field_order = [\"token\", \"api_endpoint\", \"collection_name\", \"inputs\", \"embedding\"]\n\n def build_config(self):\n return {\n \"inputs\": {\n \"display_name\": \"Inputs\",\n \"info\": \"Optional list of records to be processed and stored in the vector store.\",\n },\n \"embedding\": {\"display_name\": \"Embedding\", \"info\": \"Embedding to use\"},\n \"collection_name\": {\n \"display_name\": \"Collection Name\",\n \"info\": \"The name of the collection within Astra DB where the vectors will be stored.\",\n },\n \"token\": {\n \"display_name\": \"Token\",\n \"info\": \"Authentication token for accessing Astra DB.\",\n \"password\": True,\n },\n \"api_endpoint\": {\n \"display_name\": \"API Endpoint\",\n \"info\": \"API endpoint URL for the Astra DB service.\",\n },\n \"namespace\": {\n \"display_name\": \"Namespace\",\n \"info\": \"Optional namespace within Astra DB to use for the collection.\",\n \"advanced\": True,\n },\n \"metric\": {\n \"display_name\": \"Metric\",\n \"info\": \"Optional distance metric for vector comparisons in the vector store.\",\n \"advanced\": True,\n },\n \"batch_size\": {\n \"display_name\": \"Batch Size\",\n \"info\": \"Optional number of records to process in a single batch.\",\n \"advanced\": True,\n },\n \"bulk_insert_batch_concurrency\": {\n \"display_name\": \"Bulk Insert Batch Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations.\",\n \"advanced\": True,\n },\n \"bulk_insert_overwrite_concurrency\": {\n \"display_name\": \"Bulk Insert Overwrite Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations that overwrite existing records.\",\n \"advanced\": True,\n },\n \"bulk_delete_concurrency\": {\n \"display_name\": \"Bulk Delete Concurrency\",\n \"info\": \"Optional concurrency level for bulk delete operations.\",\n \"advanced\": True,\n },\n \"setup_mode\": {\n \"display_name\": \"Setup Mode\",\n \"info\": \"Configuration mode for setting up the vector store, with options like “Sync”, “Async”, or “Off”.\",\n \"options\": [\"Sync\", \"Async\", \"Off\"],\n \"advanced\": True,\n },\n \"pre_delete_collection\": {\n \"display_name\": \"Pre Delete Collection\",\n \"info\": \"Boolean flag to determine whether to delete the collection before creating a new one.\",\n \"advanced\": True,\n },\n \"metadata_indexing_include\": {\n \"display_name\": \"Metadata Indexing Include\",\n \"info\": \"Optional list of metadata fields to include in the indexing.\",\n \"advanced\": True,\n },\n \"metadata_indexing_exclude\": {\n \"display_name\": \"Metadata Indexing Exclude\",\n \"info\": \"Optional list of metadata fields to exclude from the indexing.\",\n \"advanced\": True,\n },\n \"collection_indexing_policy\": {\n \"display_name\": \"Collection Indexing Policy\",\n \"info\": \"Optional dictionary defining the indexing policy for the collection.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n embedding: Embeddings,\n token: str,\n api_endpoint: str,\n collection_name: str,\n inputs: Optional[List[Record]] = None,\n namespace: Optional[str] = None,\n metric: Optional[str] = None,\n batch_size: Optional[int] = None,\n bulk_insert_batch_concurrency: Optional[int] = None,\n bulk_insert_overwrite_concurrency: Optional[int] = None,\n bulk_delete_concurrency: Optional[int] = None,\n setup_mode: str = \"Async\",\n pre_delete_collection: bool = False,\n metadata_indexing_include: Optional[List[str]] = None,\n metadata_indexing_exclude: Optional[List[str]] = None,\n collection_indexing_policy: Optional[dict] = None,\n ) -> VectorStore:\n try:\n setup_mode_value = SetupMode[setup_mode.upper()]\n except KeyError:\n raise ValueError(f\"Invalid setup mode: {setup_mode}\")\n if inputs:\n documents = [_input.to_lc_document() for _input in inputs]\n\n vector_store = AstraDBVectorStore.from_documents(\n documents=documents,\n embedding=embedding,\n collection_name=collection_name,\n token=token,\n api_endpoint=api_endpoint,\n namespace=namespace,\n metric=metric,\n batch_size=batch_size,\n bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,\n bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,\n bulk_delete_concurrency=bulk_delete_concurrency,\n setup_mode=setup_mode_value,\n pre_delete_collection=pre_delete_collection,\n metadata_indexing_include=metadata_indexing_include,\n metadata_indexing_exclude=metadata_indexing_exclude,\n collection_indexing_policy=collection_indexing_policy,\n )\n else:\n vector_store = AstraDBVectorStore(\n embedding=embedding,\n collection_name=collection_name,\n token=token,\n api_endpoint=api_endpoint,\n namespace=namespace,\n metric=metric,\n batch_size=batch_size,\n bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,\n bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,\n bulk_delete_concurrency=bulk_delete_concurrency,\n setup_mode=setup_mode_value,\n pre_delete_collection=pre_delete_collection,\n metadata_indexing_include=metadata_indexing_include,\n metadata_indexing_exclude=metadata_indexing_exclude,\n collection_indexing_policy=collection_indexing_policy,\n )\n\n return vector_store\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "collection_indexing_policy": {
- "type": "dict",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "collection_indexing_policy",
- "display_name": "Collection Indexing Policy",
- "advanced": true,
- "dynamic": false,
- "info": "Optional dictionary defining the indexing policy for the collection.",
- "load_from_db": false,
- "title_case": false
- },
- "collection_name": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "collection_name",
- "display_name": "Collection Name",
- "advanced": false,
- "dynamic": false,
- "info": "The name of the collection within Astra DB where the vectors will be stored.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": "langflow"
- },
- "metadata_indexing_exclude": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "metadata_indexing_exclude",
- "display_name": "Metadata Indexing Exclude",
- "advanced": true,
- "dynamic": false,
- "info": "Optional list of metadata fields to exclude from the indexing.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "metadata_indexing_include": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "metadata_indexing_include",
- "display_name": "Metadata Indexing Include",
- "advanced": true,
- "dynamic": false,
- "info": "Optional list of metadata fields to include in the indexing.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "metric": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "metric",
- "display_name": "Metric",
- "advanced": true,
- "dynamic": false,
- "info": "Optional distance metric for vector comparisons in the vector store.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "namespace": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "namespace",
- "display_name": "Namespace",
- "advanced": true,
- "dynamic": false,
- "info": "Optional namespace within Astra DB to use for the collection.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "pre_delete_collection": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "pre_delete_collection",
- "display_name": "Pre Delete Collection",
- "advanced": true,
- "dynamic": false,
- "info": "Boolean flag to determine whether to delete the collection before creating a new one.",
- "load_from_db": false,
- "title_case": false
- },
- "setup_mode": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "Async",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "Sync",
- "Async",
- "Off"
- ],
- "name": "setup_mode",
- "display_name": "Setup Mode",
- "advanced": true,
- "dynamic": false,
- "info": "Configuration mode for setting up the vector store, with options like “Sync”, “Async”, or “Off”.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "token": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": true,
- "name": "token",
- "display_name": "Token",
- "advanced": false,
- "dynamic": false,
- "info": "Authentication token for accessing Astra DB.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": ""
- },
- "_type": "CustomComponent"
- },
- "description": "Builds or loads an Astra DB Vector Store.",
- "icon": "AstraDB",
- "base_classes": [
- "VectorStore"
- ],
- "display_name": "Astra DB",
- "documentation": "",
- "custom_fields": {
- "embedding": null,
- "token": null,
- "api_endpoint": null,
- "collection_name": null,
- "inputs": null,
- "namespace": null,
- "metric": null,
- "batch_size": null,
- "bulk_insert_batch_concurrency": null,
- "bulk_insert_overwrite_concurrency": null,
- "bulk_delete_concurrency": null,
- "setup_mode": null,
- "pre_delete_collection": null,
- "metadata_indexing_include": null,
- "metadata_indexing_exclude": null,
- "collection_indexing_policy": null
- },
- "output_types": [
- "VectorStore"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [
- "token",
- "api_endpoint",
- "collection_name",
- "inputs",
- "embedding"
- ],
- "beta": false
- },
- "id": "AstraDB-eUCSS"
- },
- "selected": false,
- "width": 384,
- "height": 573,
- "positionAbsolute": {
- "x": 3372.04958055989,
- "y": 1611.0742035495277
- },
- "dragging": false
+ "description": "Create a prompt template with dynamic variables.",
+ "icon": "prompts",
+ "is_input": null,
+ "is_output": null,
+ "is_composition": null,
+ "base_classes": [
+ "object",
+ "Text",
+ "str"
+ ],
+ "name": "",
+ "display_name": "Prompt",
+ "documentation": "",
+ "custom_fields": {
+ "template": [
+ "context",
+ "question"
+ ]
},
- {
- "id": "OpenAIEmbeddings-9TPjc",
- "type": "genericNode",
- "position": {
- "x": 2814.0402191223047,
- "y": 1955.9268168273086
- },
- "data": {
- "type": "OpenAIEmbeddings",
- "node": {
- "template": {
- "allowed_special": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": [],
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "allowed_special",
- "display_name": "Allowed Special",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "chunk_size": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 1000,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "chunk_size",
- "display_name": "Chunk Size",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "client": {
- "type": "Any",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "client",
- "display_name": "Client",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Any, Dict, List, Optional\n\nfrom langchain_openai.embeddings.base import OpenAIEmbeddings\n\nfrom langflow.field_typing import Embeddings, NestedDict\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass OpenAIEmbeddingsComponent(CustomComponent):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n\n def build_config(self):\n return {\n \"allowed_special\": {\n \"display_name\": \"Allowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"default_headers\": {\n \"display_name\": \"Default Headers\",\n \"advanced\": True,\n \"field_type\": \"dict\",\n },\n \"default_query\": {\n \"display_name\": \"Default Query\",\n \"advanced\": True,\n \"field_type\": \"NestedDict\",\n },\n \"disallowed_special\": {\n \"display_name\": \"Disallowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"chunk_size\": {\"display_name\": \"Chunk Size\", \"advanced\": True},\n \"client\": {\"display_name\": \"Client\", \"advanced\": True},\n \"deployment\": {\"display_name\": \"Deployment\", \"advanced\": True},\n \"embedding_ctx_length\": {\n \"display_name\": \"Embedding Context Length\",\n \"advanced\": True,\n },\n \"max_retries\": {\"display_name\": \"Max Retries\", \"advanced\": True},\n \"model\": {\n \"display_name\": \"Model\",\n \"advanced\": False,\n \"options\": [\n \"text-embedding-3-small\",\n \"text-embedding-3-large\",\n \"text-embedding-ada-002\",\n ],\n },\n \"model_kwargs\": {\"display_name\": \"Model Kwargs\", \"advanced\": True},\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"password\": True,\n \"advanced\": True,\n },\n \"openai_api_key\": {\"display_name\": \"OpenAI API Key\", \"password\": True},\n \"openai_api_type\": {\n \"display_name\": \"OpenAI API Type\",\n \"advanced\": True,\n \"password\": True,\n },\n \"openai_api_version\": {\n \"display_name\": \"OpenAI API Version\",\n \"advanced\": True,\n },\n \"openai_organization\": {\n \"display_name\": \"OpenAI Organization\",\n \"advanced\": True,\n },\n \"openai_proxy\": {\"display_name\": \"OpenAI Proxy\", \"advanced\": True},\n \"request_timeout\": {\"display_name\": \"Request Timeout\", \"advanced\": True},\n \"show_progress_bar\": {\n \"display_name\": \"Show Progress Bar\",\n \"advanced\": True,\n },\n \"skip_empty\": {\"display_name\": \"Skip Empty\", \"advanced\": True},\n \"tiktoken_model_name\": {\n \"display_name\": \"TikToken Model Name\",\n \"advanced\": True,\n },\n \"tiktoken_enable\": {\"display_name\": \"TikToken Enable\", \"advanced\": True},\n }\n\n def build(\n self,\n openai_api_key: str,\n default_headers: Optional[Dict[str, str]] = None,\n default_query: Optional[NestedDict] = {},\n allowed_special: List[str] = [],\n disallowed_special: List[str] = [\"all\"],\n chunk_size: int = 1000,\n client: Optional[Any] = None,\n deployment: str = \"text-embedding-ada-002\",\n embedding_ctx_length: int = 8191,\n max_retries: int = 6,\n model: str = \"text-embedding-ada-002\",\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n openai_api_type: Optional[str] = None,\n openai_api_version: Optional[str] = None,\n openai_organization: Optional[str] = None,\n openai_proxy: Optional[str] = None,\n request_timeout: Optional[float] = None,\n show_progress_bar: bool = False,\n skip_empty: bool = False,\n tiktoken_enable: bool = True,\n tiktoken_model_name: Optional[str] = None,\n ) -> Embeddings:\n # This is to avoid errors with Vector Stores (e.g Chroma)\n if disallowed_special == [\"all\"]:\n disallowed_special = \"all\" # type: ignore\n\n return OpenAIEmbeddings(\n tiktoken_enabled=tiktoken_enable,\n default_headers=default_headers,\n default_query=default_query,\n allowed_special=set(allowed_special),\n disallowed_special=\"all\",\n chunk_size=chunk_size,\n client=client,\n deployment=deployment,\n embedding_ctx_length=embedding_ctx_length,\n max_retries=max_retries,\n model=model,\n model_kwargs=model_kwargs,\n base_url=openai_api_base,\n api_key=openai_api_key,\n openai_api_type=openai_api_type,\n api_version=openai_api_version,\n organization=openai_organization,\n openai_proxy=openai_proxy,\n timeout=request_timeout,\n show_progress_bar=show_progress_bar,\n skip_empty=skip_empty,\n tiktoken_model_name=tiktoken_model_name,\n )\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "default_headers": {
- "type": "dict",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "default_headers",
- "display_name": "Default Headers",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "default_query": {
- "type": "NestedDict",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": {},
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "default_query",
- "display_name": "Default Query",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "deployment": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "text-embedding-ada-002",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "deployment",
- "display_name": "Deployment",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "disallowed_special": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": [
- "all"
- ],
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "disallowed_special",
- "display_name": "Disallowed Special",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "embedding_ctx_length": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 8191,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "embedding_ctx_length",
- "display_name": "Embedding Context Length",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "max_retries": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 6,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "max_retries",
- "display_name": "Max Retries",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "model": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "text-embedding-ada-002",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "text-embedding-3-small",
- "text-embedding-3-large",
- "text-embedding-ada-002"
- ],
- "name": "model",
- "display_name": "Model",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "model_kwargs": {
- "type": "NestedDict",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": {},
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "model_kwargs",
- "display_name": "Model Kwargs",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "openai_api_base": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": true,
- "name": "openai_api_base",
- "display_name": "OpenAI API Base",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_api_key": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": true,
- "name": "openai_api_key",
- "display_name": "OpenAI API Key",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": ""
- },
- "openai_api_type": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": true,
- "name": "openai_api_type",
- "display_name": "OpenAI API Type",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_api_version": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "openai_api_version",
- "display_name": "OpenAI API Version",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_organization": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "openai_organization",
- "display_name": "OpenAI Organization",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_proxy": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "openai_proxy",
- "display_name": "OpenAI Proxy",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "request_timeout": {
- "type": "float",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "request_timeout",
- "display_name": "Request Timeout",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "rangeSpec": {
- "step_type": "float",
- "min": -1,
- "max": 1,
- "step": 0.1
- },
- "load_from_db": false,
- "title_case": false
- },
- "show_progress_bar": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "show_progress_bar",
- "display_name": "Show Progress Bar",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "skip_empty": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "skip_empty",
- "display_name": "Skip Empty",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "tiktoken_enable": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": true,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "tiktoken_enable",
- "display_name": "TikToken Enable",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "tiktoken_model_name": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "tiktoken_model_name",
- "display_name": "TikToken Model Name",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "_type": "CustomComponent"
- },
- "description": "Generate embeddings using OpenAI models.",
- "base_classes": [
- "Embeddings"
- ],
- "display_name": "OpenAI Embeddings",
- "documentation": "",
- "custom_fields": {
- "openai_api_key": null,
- "default_headers": null,
- "default_query": null,
- "allowed_special": null,
- "disallowed_special": null,
- "chunk_size": null,
- "client": null,
- "deployment": null,
- "embedding_ctx_length": null,
- "max_retries": null,
- "model": null,
- "model_kwargs": null,
- "openai_api_base": null,
- "openai_api_type": null,
- "openai_api_version": null,
- "openai_organization": null,
- "openai_proxy": null,
- "request_timeout": null,
- "show_progress_bar": null,
- "skip_empty": null,
- "tiktoken_enable": null,
- "tiktoken_model_name": null
- },
- "output_types": [
- "Embeddings"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false
- },
- "id": "OpenAIEmbeddings-9TPjc"
- },
- "selected": false,
- "width": 384,
- "height": 383,
- "positionAbsolute": {
- "x": 2814.0402191223047,
- "y": 1955.9268168273086
- },
- "dragging": false
- }
- ],
- "edges": [
- {
- "source": "TextOutput-BDknO",
- "target": "Prompt-xeI6K",
- "sourceHandle": "{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œTextOutputœ,œidœ:œTextOutput-BDknOœ}",
- "targetHandle": "{œfieldNameœ:œcontextœ,œidœ:œPrompt-xeI6Kœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}",
- "id": "reactflow__edge-TextOutput-BDknO{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œTextOutputœ,œidœ:œTextOutput-BDknOœ}-Prompt-xeI6K{œfieldNameœ:œcontextœ,œidœ:œPrompt-xeI6Kœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}",
- "data": {
- "targetHandle": {
- "fieldName": "context",
- "id": "Prompt-xeI6K",
- "inputTypes": [
- "Document",
- "BaseOutputParser",
- "Record",
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "object",
- "Text",
- "str"
- ],
- "dataType": "TextOutput",
- "id": "TextOutput-BDknO"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "selected": false
+ "output_types": [
+ "Text"
+ ],
+ "full_path": null,
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false,
+ "error": null
+ },
+ "id": "Prompt-xeI6K",
+ "description": "Create a prompt template with dynamic variables.",
+ "display_name": "Prompt"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 477,
+ "positionAbsolute": {
+ "x": 2969.0261961391298,
+ "y": 442.1613649809069
+ },
+ "dragging": false
+ },
+ {
+ "id": "ChatOutput-Q39I8",
+ "type": "genericNode",
+ "position": {
+ "x": 3887.2073667611485,
+ "y": 588.4801225794856
+ },
+ "data": {
+ "type": "ChatOutput",
+ "node": {
+ "template": {
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Interaction Panel.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n record_template: Optional[str] = \"{text}\",\n ) -> Union[Text, Record]:\n return super().build(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n record_template=record_template,\n )\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "input_value": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Message",
+ "advanced": false,
+ "input_types": [
+ "Text"
+ ],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "record_template": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "{text}",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "record_template",
+ "display_name": "Record Template",
+ "advanced": true,
+ "dynamic": false,
+ "info": "In case of Message being a Record, this template will be used to convert it to text.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "return_record": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "return_record",
+ "display_name": "Return Record",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Return the message as a record containing the sender, sender_name, and session_id.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "sender": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "Machine",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": [
+ "Machine",
+ "User"
+ ],
+ "name": "sender",
+ "display_name": "Sender Type",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "sender_name": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "AI",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "sender_name",
+ "display_name": "Sender Name",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "session_id": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "session_id",
+ "display_name": "Session ID",
+ "advanced": true,
+ "dynamic": false,
+ "info": "If provided, the message will be stored in the memory.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "_type": "CustomComponent"
},
- {
- "source": "ChatInput-yxMKE",
- "target": "Prompt-xeI6K",
- "sourceHandle": "{œbaseClassesœ:[œTextœ,œstrœ,œobjectœ,œRecordœ],œdataTypeœ:œChatInputœ,œidœ:œChatInput-yxMKEœ}",
- "targetHandle": "{œfieldNameœ:œquestionœ,œidœ:œPrompt-xeI6Kœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}",
- "id": "reactflow__edge-ChatInput-yxMKE{œbaseClassesœ:[œTextœ,œstrœ,œobjectœ,œRecordœ],œdataTypeœ:œChatInputœ,œidœ:œChatInput-yxMKEœ}-Prompt-xeI6K{œfieldNameœ:œquestionœ,œidœ:œPrompt-xeI6Kœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}",
- "data": {
- "targetHandle": {
- "fieldName": "question",
- "id": "Prompt-xeI6K",
- "inputTypes": [
- "Document",
- "BaseOutputParser",
- "Record",
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "Text",
- "str",
- "object",
- "Record"
- ],
- "dataType": "ChatInput",
- "id": "ChatInput-yxMKE"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "selected": false
+ "description": "Display a chat message in the Interaction Panel.",
+ "icon": "ChatOutput",
+ "base_classes": [
+ "object",
+ "Text",
+ "Record",
+ "str"
+ ],
+ "display_name": "Chat Output",
+ "documentation": "",
+ "custom_fields": {
+ "sender": null,
+ "sender_name": null,
+ "input_value": null,
+ "session_id": null,
+ "return_record": null,
+ "record_template": null
},
- {
- "source": "Prompt-xeI6K",
- "target": "OpenAIModel-EjXlN",
- "sourceHandle": "{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œPromptœ,œidœ:œPrompt-xeI6Kœ}",
- "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-EjXlNœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}",
- "id": "reactflow__edge-Prompt-xeI6K{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œPromptœ,œidœ:œPrompt-xeI6Kœ}-OpenAIModel-EjXlN{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-EjXlNœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}",
- "data": {
- "targetHandle": {
- "fieldName": "input_value",
- "id": "OpenAIModel-EjXlN",
- "inputTypes": [
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "object",
- "Text",
- "str"
- ],
- "dataType": "Prompt",
- "id": "Prompt-xeI6K"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "selected": false
+ "output_types": [
+ "Text",
+ "Record"
+ ],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "ChatOutput-Q39I8"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 383,
+ "positionAbsolute": {
+ "x": 3887.2073667611485,
+ "y": 588.4801225794856
+ },
+ "dragging": false
+ },
+ {
+ "id": "File-t0a6a",
+ "type": "genericNode",
+ "position": {
+ "x": 2257.233450682836,
+ "y": 1747.5389618367233
+ },
+ "data": {
+ "type": "File",
+ "node": {
+ "template": {
+ "path": {
+ "type": "file",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [
+ ".txt",
+ ".md",
+ ".mdx",
+ ".csv",
+ ".json",
+ ".yaml",
+ ".yml",
+ ".xml",
+ ".html",
+ ".htm",
+ ".pdf",
+ ".docx",
+ ".py",
+ ".sh",
+ ".sql",
+ ".js",
+ ".ts",
+ ".tsx"
+ ],
+ "file_path": "51e2b78a-199b-4054-9f32-e288eef6924c/Langflow conversation.pdf",
+ "password": false,
+ "name": "path",
+ "display_name": "Path",
+ "advanced": false,
+ "dynamic": false,
+ "info": "Supported file types: txt, md, mdx, csv, json, yaml, yml, xml, html, htm, pdf, docx, py, sh, sql, js, ts, tsx",
+ "load_from_db": false,
+ "title_case": false,
+ "value": ""
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from pathlib import Path\nfrom typing import Any, Dict\n\nfrom langflow.base.data.utils import TEXT_FILE_TYPES, parse_text_file_to_record\nfrom langflow.interface.custom.custom_component import CustomComponent\nfrom langflow.schema import Record\n\n\nclass FileComponent(CustomComponent):\n display_name = \"File\"\n description = \"A generic file loader.\"\n icon = \"file-text\"\n\n def build_config(self) -> Dict[str, Any]:\n return {\n \"path\": {\n \"display_name\": \"Path\",\n \"field_type\": \"file\",\n \"file_types\": TEXT_FILE_TYPES,\n \"info\": f\"Supported file types: {', '.join(TEXT_FILE_TYPES)}\",\n },\n \"silent_errors\": {\n \"display_name\": \"Silent Errors\",\n \"advanced\": True,\n \"info\": \"If true, errors will not raise an exception.\",\n },\n }\n\n def load_file(self, path: str, silent_errors: bool = False) -> Record:\n resolved_path = self.resolve_path(path)\n path_obj = Path(resolved_path)\n extension = path_obj.suffix[1:].lower()\n if extension == \"doc\":\n raise ValueError(\"doc files are not supported. Please save as .docx\")\n if extension not in TEXT_FILE_TYPES:\n raise ValueError(f\"Unsupported file type: {extension}\")\n record = parse_text_file_to_record(resolved_path, silent_errors)\n self.status = record if record else \"No data\"\n return record or Record()\n\n def build(\n self,\n path: str,\n silent_errors: bool = False,\n ) -> Record:\n record = self.load_file(path, silent_errors)\n self.status = record\n return record\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "silent_errors": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "silent_errors",
+ "display_name": "Silent Errors",
+ "advanced": true,
+ "dynamic": false,
+ "info": "If true, errors will not raise an exception.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "_type": "CustomComponent"
},
- {
- "source": "OpenAIModel-EjXlN",
- "target": "ChatOutput-Q39I8",
- "sourceHandle": "{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-EjXlNœ}",
- "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-Q39I8œ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}",
- "id": "reactflow__edge-OpenAIModel-EjXlN{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-EjXlNœ}-ChatOutput-Q39I8{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-Q39I8œ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}",
- "data": {
- "targetHandle": {
- "fieldName": "input_value",
- "id": "ChatOutput-Q39I8",
- "inputTypes": [
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "object",
- "Text",
- "str"
- ],
- "dataType": "OpenAIModel",
- "id": "OpenAIModel-EjXlN"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "selected": false
+ "description": "A generic file loader.",
+ "icon": "file-text",
+ "base_classes": [
+ "Record"
+ ],
+ "display_name": "File",
+ "documentation": "",
+ "custom_fields": {
+ "path": null,
+ "silent_errors": null
},
- {
- "source": "File-t0a6a",
- "target": "RecursiveCharacterTextSplitter-tR9QM",
- "sourceHandle": "{œbaseClassesœ:[œRecordœ],œdataTypeœ:œFileœ,œidœ:œFile-t0a6aœ}",
- "targetHandle": "{œfieldNameœ:œinputsœ,œidœ:œRecursiveCharacterTextSplitter-tR9QMœ,œinputTypesœ:[œDocumentœ,œRecordœ],œtypeœ:œDocumentœ}",
- "id": "reactflow__edge-File-t0a6a{œbaseClassesœ:[œRecordœ],œdataTypeœ:œFileœ,œidœ:œFile-t0a6aœ}-RecursiveCharacterTextSplitter-tR9QM{œfieldNameœ:œinputsœ,œidœ:œRecursiveCharacterTextSplitter-tR9QMœ,œinputTypesœ:[œDocumentœ,œRecordœ],œtypeœ:œDocumentœ}",
- "data": {
- "targetHandle": {
- "fieldName": "inputs",
- "id": "RecursiveCharacterTextSplitter-tR9QM",
- "inputTypes": [
- "Document",
- "Record"
- ],
- "type": "Document"
- },
- "sourceHandle": {
- "baseClasses": [
- "Record"
- ],
- "dataType": "File",
- "id": "File-t0a6a"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "selected": false
+ "output_types": [
+ "Record"
+ ],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "File-t0a6a"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 281,
+ "positionAbsolute": {
+ "x": 2257.233450682836,
+ "y": 1747.5389618367233
+ },
+ "dragging": false
+ },
+ {
+ "id": "RecursiveCharacterTextSplitter-tR9QM",
+ "type": "genericNode",
+ "position": {
+ "x": 2791.013514133929,
+ "y": 1462.9588953494142
+ },
+ "data": {
+ "type": "RecursiveCharacterTextSplitter",
+ "node": {
+ "template": {
+ "inputs": {
+ "type": "Document",
+ "required": true,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "inputs",
+ "display_name": "Input",
+ "advanced": false,
+ "input_types": [
+ "Document",
+ "Record"
+ ],
+ "dynamic": false,
+ "info": "The texts to split.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "chunk_overlap": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 200,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "chunk_overlap",
+ "display_name": "Chunk Overlap",
+ "advanced": false,
+ "dynamic": false,
+ "info": "The amount of overlap between chunks.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "chunk_size": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 1000,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "chunk_size",
+ "display_name": "Chunk Size",
+ "advanced": false,
+ "dynamic": false,
+ "info": "The maximum length of each chunk.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional\n\nfrom langchain.text_splitter import RecursiveCharacterTextSplitter\nfrom langchain_core.documents import Document\n\nfrom langflow.interface.custom.custom_component import CustomComponent\nfrom langflow.schema import Record\nfrom langflow.utils.util import build_loader_repr_from_records, unescape_string\n\n\nclass RecursiveCharacterTextSplitterComponent(CustomComponent):\n display_name: str = \"Recursive Character Text Splitter\"\n description: str = \"Split text into chunks of a specified length.\"\n documentation: str = \"https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter\"\n\n def build_config(self):\n return {\n \"inputs\": {\n \"display_name\": \"Input\",\n \"info\": \"The texts to split.\",\n \"input_types\": [\"Document\", \"Record\"],\n },\n \"separators\": {\n \"display_name\": \"Separators\",\n \"info\": 'The characters to split on.\\nIf left empty defaults to [\"\\\\n\\\\n\", \"\\\\n\", \" \", \"\"].',\n \"is_list\": True,\n },\n \"chunk_size\": {\n \"display_name\": \"Chunk Size\",\n \"info\": \"The maximum length of each chunk.\",\n \"field_type\": \"int\",\n \"value\": 1000,\n },\n \"chunk_overlap\": {\n \"display_name\": \"Chunk Overlap\",\n \"info\": \"The amount of overlap between chunks.\",\n \"field_type\": \"int\",\n \"value\": 200,\n },\n \"code\": {\"show\": False},\n }\n\n def build(\n self,\n inputs: list[Document],\n separators: Optional[list[str]] = None,\n chunk_size: Optional[int] = 1000,\n chunk_overlap: Optional[int] = 200,\n ) -> list[Record]:\n \"\"\"\n Split text into chunks of a specified length.\n\n Args:\n separators (list[str]): The characters to split on.\n chunk_size (int): The maximum length of each chunk.\n chunk_overlap (int): The amount of overlap between chunks.\n length_function (function): The function to use to calculate the length of the text.\n\n Returns:\n list[str]: The chunks of text.\n \"\"\"\n\n if separators == \"\":\n separators = None\n elif separators:\n # check if the separators list has escaped characters\n # if there are escaped characters, unescape them\n separators = [unescape_string(x) for x in separators]\n\n # Make sure chunk_size and chunk_overlap are ints\n if isinstance(chunk_size, str):\n chunk_size = int(chunk_size)\n if isinstance(chunk_overlap, str):\n chunk_overlap = int(chunk_overlap)\n splitter = RecursiveCharacterTextSplitter(\n separators=separators,\n chunk_size=chunk_size,\n chunk_overlap=chunk_overlap,\n )\n documents = []\n for _input in inputs:\n if isinstance(_input, Record):\n documents.append(_input.to_lc_document())\n else:\n documents.append(_input)\n docs = splitter.split_documents(documents)\n records = self.to_records(docs)\n self.repr_value = build_loader_repr_from_records(records)\n return records\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "separators": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "separators",
+ "display_name": "Separators",
+ "advanced": false,
+ "dynamic": false,
+ "info": "The characters to split on.\nIf left empty defaults to [\"\\n\\n\", \"\\n\", \" \", \"\"].",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ],
+ "value": [
+ ""
+ ]
+ },
+ "_type": "CustomComponent"
},
- {
- "source": "OpenAIEmbeddings-ZlOk1",
- "sourceHandle": "{œbaseClassesœ:[œEmbeddingsœ],œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-ZlOk1œ}",
- "target": "AstraDBSearch-41nRz",
- "targetHandle": "{œfieldNameœ:œembeddingœ,œidœ:œAstraDBSearch-41nRzœ,œinputTypesœ:null,œtypeœ:œEmbeddingsœ}",
- "data": {
- "targetHandle": {
- "fieldName": "embedding",
- "id": "AstraDBSearch-41nRz",
- "inputTypes": null,
- "type": "Embeddings"
- },
- "sourceHandle": {
- "baseClasses": [
- "Embeddings"
- ],
- "dataType": "OpenAIEmbeddings",
- "id": "OpenAIEmbeddings-ZlOk1"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "id": "reactflow__edge-OpenAIEmbeddings-ZlOk1{œbaseClassesœ:[œEmbeddingsœ],œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-ZlOk1œ}-AstraDBSearch-41nRz{œfieldNameœ:œembeddingœ,œidœ:œAstraDBSearch-41nRzœ,œinputTypesœ:null,œtypeœ:œEmbeddingsœ}"
+ "description": "Split text into chunks of a specified length.",
+ "base_classes": [
+ "Record"
+ ],
+ "display_name": "Recursive Character Text Splitter",
+ "documentation": "https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter",
+ "custom_fields": {
+ "inputs": null,
+ "separators": null,
+ "chunk_size": null,
+ "chunk_overlap": null
},
- {
- "source": "ChatInput-yxMKE",
- "sourceHandle": "{œbaseClassesœ:[œTextœ,œstrœ,œobjectœ,œRecordœ],œdataTypeœ:œChatInputœ,œidœ:œChatInput-yxMKEœ}",
- "target": "AstraDBSearch-41nRz",
- "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œAstraDBSearch-41nRzœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}",
- "data": {
- "targetHandle": {
- "fieldName": "input_value",
- "id": "AstraDBSearch-41nRz",
- "inputTypes": [
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "Text",
- "str",
- "object",
- "Record"
- ],
- "dataType": "ChatInput",
- "id": "ChatInput-yxMKE"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "id": "reactflow__edge-ChatInput-yxMKE{œbaseClassesœ:[œTextœ,œstrœ,œobjectœ,œRecordœ],œdataTypeœ:œChatInputœ,œidœ:œChatInput-yxMKEœ}-AstraDBSearch-41nRz{œfieldNameœ:œinput_valueœ,œidœ:œAstraDBSearch-41nRzœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}"
+ "output_types": [
+ "Record"
+ ],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "RecursiveCharacterTextSplitter-tR9QM"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 501,
+ "positionAbsolute": {
+ "x": 2791.013514133929,
+ "y": 1462.9588953494142
+ },
+ "dragging": false
+ },
+ {
+ "id": "AstraDBSearch-41nRz",
+ "type": "genericNode",
+ "position": {
+ "x": 1723.976434815103,
+ "y": 277.03317407245913
+ },
+ "data": {
+ "type": "AstraDBSearch",
+ "node": {
+ "template": {
+ "embedding": {
+ "type": "Embeddings",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "embedding",
+ "display_name": "Embedding",
+ "advanced": false,
+ "dynamic": false,
+ "info": "Embedding to use",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "input_value": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Input Value",
+ "advanced": false,
+ "dynamic": false,
+ "info": "Input value to search",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "api_endpoint": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "api_endpoint",
+ "display_name": "API Endpoint",
+ "advanced": false,
+ "dynamic": false,
+ "info": "API endpoint URL for the Astra DB service.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ],
+ "value": ""
+ },
+ "batch_size": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "batch_size",
+ "display_name": "Batch Size",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional number of records to process in a single batch.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "bulk_delete_concurrency": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "bulk_delete_concurrency",
+ "display_name": "Bulk Delete Concurrency",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional concurrency level for bulk delete operations.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "bulk_insert_batch_concurrency": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "bulk_insert_batch_concurrency",
+ "display_name": "Bulk Insert Batch Concurrency",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional concurrency level for bulk insert operations.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "bulk_insert_overwrite_concurrency": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "bulk_insert_overwrite_concurrency",
+ "display_name": "Bulk Insert Overwrite Concurrency",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional concurrency level for bulk insert operations that overwrite existing records.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import List, Optional\n\nfrom langflow.components.vectorstores.AstraDB import AstraDBVectorStoreComponent\nfrom langflow.components.vectorstores.base.model import LCVectorStoreComponent\nfrom langflow.field_typing import Embeddings, Text\nfrom langflow.schema import Record\n\n\nclass AstraDBSearchComponent(LCVectorStoreComponent):\n display_name = \"Astra DB Search\"\n description = \"Searches an existing Astra DB Vector Store.\"\n icon = \"AstraDB\"\n field_order = [\"token\", \"api_endpoint\", \"collection_name\", \"input_value\", \"embedding\"]\n\n def build_config(self):\n return {\n \"search_type\": {\n \"display_name\": \"Search Type\",\n \"options\": [\"Similarity\", \"MMR\"],\n },\n \"input_value\": {\n \"display_name\": \"Input Value\",\n \"info\": \"Input value to search\",\n },\n \"embedding\": {\"display_name\": \"Embedding\", \"info\": \"Embedding to use\"},\n \"collection_name\": {\n \"display_name\": \"Collection Name\",\n \"info\": \"The name of the collection within Astra DB where the vectors will be stored.\",\n },\n \"token\": {\n \"display_name\": \"Token\",\n \"info\": \"Authentication token for accessing Astra DB.\",\n \"password\": True,\n },\n \"api_endpoint\": {\n \"display_name\": \"API Endpoint\",\n \"info\": \"API endpoint URL for the Astra DB service.\",\n },\n \"namespace\": {\n \"display_name\": \"Namespace\",\n \"info\": \"Optional namespace within Astra DB to use for the collection.\",\n \"advanced\": True,\n },\n \"metric\": {\n \"display_name\": \"Metric\",\n \"info\": \"Optional distance metric for vector comparisons in the vector store.\",\n \"advanced\": True,\n },\n \"batch_size\": {\n \"display_name\": \"Batch Size\",\n \"info\": \"Optional number of records to process in a single batch.\",\n \"advanced\": True,\n },\n \"bulk_insert_batch_concurrency\": {\n \"display_name\": \"Bulk Insert Batch Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations.\",\n \"advanced\": True,\n },\n \"bulk_insert_overwrite_concurrency\": {\n \"display_name\": \"Bulk Insert Overwrite Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations that overwrite existing records.\",\n \"advanced\": True,\n },\n \"bulk_delete_concurrency\": {\n \"display_name\": \"Bulk Delete Concurrency\",\n \"info\": \"Optional concurrency level for bulk delete operations.\",\n \"advanced\": True,\n },\n \"setup_mode\": {\n \"display_name\": \"Setup Mode\",\n \"info\": \"Configuration mode for setting up the vector store, with options like “Sync”, “Async”, or “Off”.\",\n \"options\": [\"Sync\", \"Async\", \"Off\"],\n \"advanced\": True,\n },\n \"pre_delete_collection\": {\n \"display_name\": \"Pre Delete Collection\",\n \"info\": \"Boolean flag to determine whether to delete the collection before creating a new one.\",\n \"advanced\": True,\n },\n \"metadata_indexing_include\": {\n \"display_name\": \"Metadata Indexing Include\",\n \"info\": \"Optional list of metadata fields to include in the indexing.\",\n \"advanced\": True,\n },\n \"metadata_indexing_exclude\": {\n \"display_name\": \"Metadata Indexing Exclude\",\n \"info\": \"Optional list of metadata fields to exclude from the indexing.\",\n \"advanced\": True,\n },\n \"collection_indexing_policy\": {\n \"display_name\": \"Collection Indexing Policy\",\n \"info\": \"Optional dictionary defining the indexing policy for the collection.\",\n \"advanced\": True,\n },\n \"number_of_results\": {\n \"display_name\": \"Number of Results\",\n \"info\": \"Number of results to return.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n embedding: Embeddings,\n collection_name: str,\n input_value: Text,\n token: str,\n api_endpoint: str,\n search_type: str = \"Similarity\",\n number_of_results: int = 4,\n namespace: Optional[str] = None,\n metric: Optional[str] = None,\n batch_size: Optional[int] = None,\n bulk_insert_batch_concurrency: Optional[int] = None,\n bulk_insert_overwrite_concurrency: Optional[int] = None,\n bulk_delete_concurrency: Optional[int] = None,\n setup_mode: str = \"Sync\",\n pre_delete_collection: bool = False,\n metadata_indexing_include: Optional[List[str]] = None,\n metadata_indexing_exclude: Optional[List[str]] = None,\n collection_indexing_policy: Optional[dict] = None,\n ) -> List[Record]:\n vector_store = AstraDBVectorStoreComponent().build(\n embedding=embedding,\n collection_name=collection_name,\n token=token,\n api_endpoint=api_endpoint,\n namespace=namespace,\n metric=metric,\n batch_size=batch_size,\n bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,\n bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,\n bulk_delete_concurrency=bulk_delete_concurrency,\n setup_mode=setup_mode,\n pre_delete_collection=pre_delete_collection,\n metadata_indexing_include=metadata_indexing_include,\n metadata_indexing_exclude=metadata_indexing_exclude,\n collection_indexing_policy=collection_indexing_policy,\n )\n try:\n return self.search_with_vector_store(input_value, search_type, vector_store, k=number_of_results)\n except KeyError as e:\n if \"content\" in str(e):\n raise ValueError(\n \"You should ingest data through Langflow (or LangChain) to query it in Langflow. Your collection does not contain a field name 'content'.\"\n )\n else:\n raise e\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "collection_indexing_policy": {
+ "type": "dict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "collection_indexing_policy",
+ "display_name": "Collection Indexing Policy",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional dictionary defining the indexing policy for the collection.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "collection_name": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "collection_name",
+ "display_name": "Collection Name",
+ "advanced": false,
+ "dynamic": false,
+ "info": "The name of the collection within Astra DB where the vectors will be stored.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ],
+ "value": "langflow"
+ },
+ "metadata_indexing_exclude": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "metadata_indexing_exclude",
+ "display_name": "Metadata Indexing Exclude",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional list of metadata fields to exclude from the indexing.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "metadata_indexing_include": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "metadata_indexing_include",
+ "display_name": "Metadata Indexing Include",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional list of metadata fields to include in the indexing.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "metric": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "metric",
+ "display_name": "Metric",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional distance metric for vector comparisons in the vector store.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "namespace": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "namespace",
+ "display_name": "Namespace",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional namespace within Astra DB to use for the collection.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "number_of_results": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 4,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "number_of_results",
+ "display_name": "Number of Results",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Number of results to return.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "pre_delete_collection": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "pre_delete_collection",
+ "display_name": "Pre Delete Collection",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Boolean flag to determine whether to delete the collection before creating a new one.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "search_type": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "Similarity",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": [
+ "Similarity",
+ "MMR"
+ ],
+ "name": "search_type",
+ "display_name": "Search Type",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "setup_mode": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "Sync",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": [
+ "Sync",
+ "Async",
+ "Off"
+ ],
+ "name": "setup_mode",
+ "display_name": "Setup Mode",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Configuration mode for setting up the vector store, with options like “Sync”, “Async”, or “Off”.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "token": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "token",
+ "display_name": "Token",
+ "advanced": false,
+ "dynamic": false,
+ "info": "Authentication token for accessing Astra DB.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ],
+ "value": ""
+ },
+ "_type": "CustomComponent"
},
- {
- "source": "RecursiveCharacterTextSplitter-tR9QM",
- "sourceHandle": "{œbaseClassesœ:[œRecordœ],œdataTypeœ:œRecursiveCharacterTextSplitterœ,œidœ:œRecursiveCharacterTextSplitter-tR9QMœ}",
- "target": "AstraDB-eUCSS",
- "targetHandle": "{œfieldNameœ:œinputsœ,œidœ:œAstraDB-eUCSSœ,œinputTypesœ:null,œtypeœ:œRecordœ}",
- "data": {
- "targetHandle": {
- "fieldName": "inputs",
- "id": "AstraDB-eUCSS",
- "inputTypes": null,
- "type": "Record"
- },
- "sourceHandle": {
- "baseClasses": [
- "Record"
- ],
- "dataType": "RecursiveCharacterTextSplitter",
- "id": "RecursiveCharacterTextSplitter-tR9QM"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "id": "reactflow__edge-RecursiveCharacterTextSplitter-tR9QM{œbaseClassesœ:[œRecordœ],œdataTypeœ:œRecursiveCharacterTextSplitterœ,œidœ:œRecursiveCharacterTextSplitter-tR9QMœ}-AstraDB-eUCSS{œfieldNameœ:œinputsœ,œidœ:œAstraDB-eUCSSœ,œinputTypesœ:null,œtypeœ:œRecordœ}",
- "selected": false
+ "description": "Searches an existing Astra DB Vector Store.",
+ "icon": "AstraDB",
+ "base_classes": [
+ "Record"
+ ],
+ "display_name": "Astra DB Search",
+ "documentation": "",
+ "custom_fields": {
+ "embedding": null,
+ "collection_name": null,
+ "input_value": null,
+ "token": null,
+ "api_endpoint": null,
+ "search_type": null,
+ "number_of_results": null,
+ "namespace": null,
+ "metric": null,
+ "batch_size": null,
+ "bulk_insert_batch_concurrency": null,
+ "bulk_insert_overwrite_concurrency": null,
+ "bulk_delete_concurrency": null,
+ "setup_mode": null,
+ "pre_delete_collection": null,
+ "metadata_indexing_include": null,
+ "metadata_indexing_exclude": null,
+ "collection_indexing_policy": null
},
- {
- "source": "OpenAIEmbeddings-9TPjc",
- "sourceHandle": "{œbaseClassesœ:[œEmbeddingsœ],œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-9TPjcœ}",
- "target": "AstraDB-eUCSS",
- "targetHandle": "{œfieldNameœ:œembeddingœ,œidœ:œAstraDB-eUCSSœ,œinputTypesœ:null,œtypeœ:œEmbeddingsœ}",
- "data": {
- "targetHandle": {
- "fieldName": "embedding",
- "id": "AstraDB-eUCSS",
- "inputTypes": null,
- "type": "Embeddings"
- },
- "sourceHandle": {
- "baseClasses": [
- "Embeddings"
- ],
- "dataType": "OpenAIEmbeddings",
- "id": "OpenAIEmbeddings-9TPjc"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "id": "reactflow__edge-OpenAIEmbeddings-9TPjc{œbaseClassesœ:[œEmbeddingsœ],œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-9TPjcœ}-AstraDB-eUCSS{œfieldNameœ:œembeddingœ,œidœ:œAstraDB-eUCSSœ,œinputTypesœ:null,œtypeœ:œEmbeddingsœ}",
- "selected": false
- },
- {
- "source": "AstraDBSearch-41nRz",
- "sourceHandle": "{œbaseClassesœ:[œRecordœ],œdataTypeœ:œAstraDBSearchœ,œidœ:œAstraDBSearch-41nRzœ}",
- "target": "TextOutput-BDknO",
- "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œTextOutput-BDknOœ,œinputTypesœ:[œRecordœ,œTextœ],œtypeœ:œstrœ}",
- "data": {
- "targetHandle": {
- "fieldName": "input_value",
- "id": "TextOutput-BDknO",
- "inputTypes": [
- "Record",
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "Record"
- ],
- "dataType": "AstraDBSearch",
- "id": "AstraDBSearch-41nRz"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "id": "reactflow__edge-AstraDBSearch-41nRz{œbaseClassesœ:[œRecordœ],œdataTypeœ:œAstraDBSearchœ,œidœ:œAstraDBSearch-41nRzœ}-TextOutput-BDknO{œfieldNameœ:œinput_valueœ,œidœ:œTextOutput-BDknOœ,œinputTypesœ:[œRecordœ,œTextœ],œtypeœ:œstrœ}"
- }
- ],
- "viewport": {
- "x": -259.6782520315529,
- "y": 90.3428735006047,
- "zoom": 0.2687057134854984
+ "output_types": [
+ "Record"
+ ],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [
+ "token",
+ "api_endpoint",
+ "collection_name",
+ "input_value",
+ "embedding"
+ ],
+ "beta": false
+ },
+ "id": "AstraDBSearch-41nRz"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 713,
+ "dragging": false,
+ "positionAbsolute": {
+ "x": 1723.976434815103,
+ "y": 277.03317407245913
}
- },
- "description": "Visit https://pre-release.langflow.org/guides/rag-with-astradb for a detailed guide of this project.\nThis project give you both Ingestion and RAG in a single file. You'll need to visit https://astra.datastax.com/ to create an Astra DB instance, your Token and grab an API Endpoint.\nRunning this project requires you to add a file in the Files component, then define a Collection Name and click on the Play icon on the Astra DB component. \n\nAfter the ingestion ends you are ready to click on the Run button at the lower left corner and start asking questions about your data.",
- "name": "Vector Store RAG",
- "last_tested_version": "1.0.0a0",
- "is_component": false
+ },
+ {
+ "id": "AstraDB-eUCSS",
+ "type": "genericNode",
+ "position": {
+ "x": 3372.04958055989,
+ "y": 1611.0742035495277
+ },
+ "data": {
+ "type": "AstraDB",
+ "node": {
+ "template": {
+ "embedding": {
+ "type": "Embeddings",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "embedding",
+ "display_name": "Embedding",
+ "advanced": false,
+ "dynamic": false,
+ "info": "Embedding to use",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "inputs": {
+ "type": "Record",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "inputs",
+ "display_name": "Inputs",
+ "advanced": false,
+ "dynamic": false,
+ "info": "Optional list of records to be processed and stored in the vector store.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "api_endpoint": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "api_endpoint",
+ "display_name": "API Endpoint",
+ "advanced": false,
+ "dynamic": false,
+ "info": "API endpoint URL for the Astra DB service.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ],
+ "value": ""
+ },
+ "batch_size": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "batch_size",
+ "display_name": "Batch Size",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional number of records to process in a single batch.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "bulk_delete_concurrency": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "bulk_delete_concurrency",
+ "display_name": "Bulk Delete Concurrency",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional concurrency level for bulk delete operations.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "bulk_insert_batch_concurrency": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "bulk_insert_batch_concurrency",
+ "display_name": "Bulk Insert Batch Concurrency",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional concurrency level for bulk insert operations.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "bulk_insert_overwrite_concurrency": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "bulk_insert_overwrite_concurrency",
+ "display_name": "Bulk Insert Overwrite Concurrency",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional concurrency level for bulk insert operations that overwrite existing records.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import List, Optional\n\nfrom langchain_astradb import AstraDBVectorStore\nfrom langchain_astradb.utils.astradb import SetupMode\n\nfrom langflow.custom import CustomComponent\nfrom langflow.field_typing import Embeddings, VectorStore\nfrom langflow.schema import Record\n\n\nclass AstraDBVectorStoreComponent(CustomComponent):\n display_name = \"Astra DB\"\n description = \"Builds or loads an Astra DB Vector Store.\"\n icon = \"AstraDB\"\n field_order = [\"token\", \"api_endpoint\", \"collection_name\", \"inputs\", \"embedding\"]\n\n def build_config(self):\n return {\n \"inputs\": {\n \"display_name\": \"Inputs\",\n \"info\": \"Optional list of records to be processed and stored in the vector store.\",\n },\n \"embedding\": {\"display_name\": \"Embedding\", \"info\": \"Embedding to use\"},\n \"collection_name\": {\n \"display_name\": \"Collection Name\",\n \"info\": \"The name of the collection within Astra DB where the vectors will be stored.\",\n },\n \"token\": {\n \"display_name\": \"Token\",\n \"info\": \"Authentication token for accessing Astra DB.\",\n \"password\": True,\n },\n \"api_endpoint\": {\n \"display_name\": \"API Endpoint\",\n \"info\": \"API endpoint URL for the Astra DB service.\",\n },\n \"namespace\": {\n \"display_name\": \"Namespace\",\n \"info\": \"Optional namespace within Astra DB to use for the collection.\",\n \"advanced\": True,\n },\n \"metric\": {\n \"display_name\": \"Metric\",\n \"info\": \"Optional distance metric for vector comparisons in the vector store.\",\n \"advanced\": True,\n },\n \"batch_size\": {\n \"display_name\": \"Batch Size\",\n \"info\": \"Optional number of records to process in a single batch.\",\n \"advanced\": True,\n },\n \"bulk_insert_batch_concurrency\": {\n \"display_name\": \"Bulk Insert Batch Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations.\",\n \"advanced\": True,\n },\n \"bulk_insert_overwrite_concurrency\": {\n \"display_name\": \"Bulk Insert Overwrite Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations that overwrite existing records.\",\n \"advanced\": True,\n },\n \"bulk_delete_concurrency\": {\n \"display_name\": \"Bulk Delete Concurrency\",\n \"info\": \"Optional concurrency level for bulk delete operations.\",\n \"advanced\": True,\n },\n \"setup_mode\": {\n \"display_name\": \"Setup Mode\",\n \"info\": \"Configuration mode for setting up the vector store, with options like “Sync”, “Async”, or “Off”.\",\n \"options\": [\"Sync\", \"Async\", \"Off\"],\n \"advanced\": True,\n },\n \"pre_delete_collection\": {\n \"display_name\": \"Pre Delete Collection\",\n \"info\": \"Boolean flag to determine whether to delete the collection before creating a new one.\",\n \"advanced\": True,\n },\n \"metadata_indexing_include\": {\n \"display_name\": \"Metadata Indexing Include\",\n \"info\": \"Optional list of metadata fields to include in the indexing.\",\n \"advanced\": True,\n },\n \"metadata_indexing_exclude\": {\n \"display_name\": \"Metadata Indexing Exclude\",\n \"info\": \"Optional list of metadata fields to exclude from the indexing.\",\n \"advanced\": True,\n },\n \"collection_indexing_policy\": {\n \"display_name\": \"Collection Indexing Policy\",\n \"info\": \"Optional dictionary defining the indexing policy for the collection.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n embedding: Embeddings,\n token: str,\n api_endpoint: str,\n collection_name: str,\n inputs: Optional[List[Record]] = None,\n namespace: Optional[str] = None,\n metric: Optional[str] = None,\n batch_size: Optional[int] = None,\n bulk_insert_batch_concurrency: Optional[int] = None,\n bulk_insert_overwrite_concurrency: Optional[int] = None,\n bulk_delete_concurrency: Optional[int] = None,\n setup_mode: str = \"Async\",\n pre_delete_collection: bool = False,\n metadata_indexing_include: Optional[List[str]] = None,\n metadata_indexing_exclude: Optional[List[str]] = None,\n collection_indexing_policy: Optional[dict] = None,\n ) -> VectorStore:\n try:\n setup_mode_value = SetupMode[setup_mode.upper()]\n except KeyError:\n raise ValueError(f\"Invalid setup mode: {setup_mode}\")\n if inputs:\n documents = [_input.to_lc_document() for _input in inputs]\n\n vector_store = AstraDBVectorStore.from_documents(\n documents=documents,\n embedding=embedding,\n collection_name=collection_name,\n token=token,\n api_endpoint=api_endpoint,\n namespace=namespace,\n metric=metric,\n batch_size=batch_size,\n bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,\n bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,\n bulk_delete_concurrency=bulk_delete_concurrency,\n setup_mode=setup_mode_value,\n pre_delete_collection=pre_delete_collection,\n metadata_indexing_include=metadata_indexing_include,\n metadata_indexing_exclude=metadata_indexing_exclude,\n collection_indexing_policy=collection_indexing_policy,\n )\n else:\n vector_store = AstraDBVectorStore(\n embedding=embedding,\n collection_name=collection_name,\n token=token,\n api_endpoint=api_endpoint,\n namespace=namespace,\n metric=metric,\n batch_size=batch_size,\n bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,\n bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,\n bulk_delete_concurrency=bulk_delete_concurrency,\n setup_mode=setup_mode_value,\n pre_delete_collection=pre_delete_collection,\n metadata_indexing_include=metadata_indexing_include,\n metadata_indexing_exclude=metadata_indexing_exclude,\n collection_indexing_policy=collection_indexing_policy,\n )\n\n return vector_store\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "collection_indexing_policy": {
+ "type": "dict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "collection_indexing_policy",
+ "display_name": "Collection Indexing Policy",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional dictionary defining the indexing policy for the collection.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "collection_name": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "collection_name",
+ "display_name": "Collection Name",
+ "advanced": false,
+ "dynamic": false,
+ "info": "The name of the collection within Astra DB where the vectors will be stored.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ],
+ "value": "langflow"
+ },
+ "metadata_indexing_exclude": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "metadata_indexing_exclude",
+ "display_name": "Metadata Indexing Exclude",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional list of metadata fields to exclude from the indexing.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "metadata_indexing_include": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "metadata_indexing_include",
+ "display_name": "Metadata Indexing Include",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional list of metadata fields to include in the indexing.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "metric": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "metric",
+ "display_name": "Metric",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional distance metric for vector comparisons in the vector store.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "namespace": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "namespace",
+ "display_name": "Namespace",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional namespace within Astra DB to use for the collection.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "pre_delete_collection": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "pre_delete_collection",
+ "display_name": "Pre Delete Collection",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Boolean flag to determine whether to delete the collection before creating a new one.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "setup_mode": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "Async",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": [
+ "Sync",
+ "Async",
+ "Off"
+ ],
+ "name": "setup_mode",
+ "display_name": "Setup Mode",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Configuration mode for setting up the vector store, with options like “Sync”, “Async”, or “Off”.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "token": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "token",
+ "display_name": "Token",
+ "advanced": false,
+ "dynamic": false,
+ "info": "Authentication token for accessing Astra DB.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ],
+ "value": ""
+ },
+ "_type": "CustomComponent"
+ },
+ "description": "Builds or loads an Astra DB Vector Store.",
+ "icon": "AstraDB",
+ "base_classes": [
+ "VectorStore"
+ ],
+ "display_name": "Astra DB",
+ "documentation": "",
+ "custom_fields": {
+ "embedding": null,
+ "token": null,
+ "api_endpoint": null,
+ "collection_name": null,
+ "inputs": null,
+ "namespace": null,
+ "metric": null,
+ "batch_size": null,
+ "bulk_insert_batch_concurrency": null,
+ "bulk_insert_overwrite_concurrency": null,
+ "bulk_delete_concurrency": null,
+ "setup_mode": null,
+ "pre_delete_collection": null,
+ "metadata_indexing_include": null,
+ "metadata_indexing_exclude": null,
+ "collection_indexing_policy": null
+ },
+ "output_types": [
+ "VectorStore"
+ ],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [
+ "token",
+ "api_endpoint",
+ "collection_name",
+ "inputs",
+ "embedding"
+ ],
+ "beta": false
+ },
+ "id": "AstraDB-eUCSS"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 573,
+ "positionAbsolute": {
+ "x": 3372.04958055989,
+ "y": 1611.0742035495277
+ },
+ "dragging": false
+ },
+ {
+ "id": "OpenAIEmbeddings-9TPjc",
+ "type": "genericNode",
+ "position": {
+ "x": 2814.0402191223047,
+ "y": 1955.9268168273086
+ },
+ "data": {
+ "type": "OpenAIEmbeddings",
+ "node": {
+ "template": {
+ "allowed_special": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": [],
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "allowed_special",
+ "display_name": "Allowed Special",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "chunk_size": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 1000,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "chunk_size",
+ "display_name": "Chunk Size",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "client": {
+ "type": "Any",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "client",
+ "display_name": "Client",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Any, Dict, List, Optional\n\nfrom langchain_openai.embeddings.base import OpenAIEmbeddings\n\nfrom langflow.field_typing import Embeddings, NestedDict\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass OpenAIEmbeddingsComponent(CustomComponent):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n\n def build_config(self):\n return {\n \"allowed_special\": {\n \"display_name\": \"Allowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"default_headers\": {\n \"display_name\": \"Default Headers\",\n \"advanced\": True,\n \"field_type\": \"dict\",\n },\n \"default_query\": {\n \"display_name\": \"Default Query\",\n \"advanced\": True,\n \"field_type\": \"NestedDict\",\n },\n \"disallowed_special\": {\n \"display_name\": \"Disallowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"chunk_size\": {\"display_name\": \"Chunk Size\", \"advanced\": True},\n \"client\": {\"display_name\": \"Client\", \"advanced\": True},\n \"deployment\": {\"display_name\": \"Deployment\", \"advanced\": True},\n \"embedding_ctx_length\": {\n \"display_name\": \"Embedding Context Length\",\n \"advanced\": True,\n },\n \"max_retries\": {\"display_name\": \"Max Retries\", \"advanced\": True},\n \"model\": {\n \"display_name\": \"Model\",\n \"advanced\": False,\n \"options\": [\n \"text-embedding-3-small\",\n \"text-embedding-3-large\",\n \"text-embedding-ada-002\",\n ],\n },\n \"model_kwargs\": {\"display_name\": \"Model Kwargs\", \"advanced\": True},\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"password\": True,\n \"advanced\": True,\n },\n \"openai_api_key\": {\"display_name\": \"OpenAI API Key\", \"password\": True},\n \"openai_api_type\": {\n \"display_name\": \"OpenAI API Type\",\n \"advanced\": True,\n \"password\": True,\n },\n \"openai_api_version\": {\n \"display_name\": \"OpenAI API Version\",\n \"advanced\": True,\n },\n \"openai_organization\": {\n \"display_name\": \"OpenAI Organization\",\n \"advanced\": True,\n },\n \"openai_proxy\": {\"display_name\": \"OpenAI Proxy\", \"advanced\": True},\n \"request_timeout\": {\"display_name\": \"Request Timeout\", \"advanced\": True},\n \"show_progress_bar\": {\n \"display_name\": \"Show Progress Bar\",\n \"advanced\": True,\n },\n \"skip_empty\": {\"display_name\": \"Skip Empty\", \"advanced\": True},\n \"tiktoken_model_name\": {\n \"display_name\": \"TikToken Model Name\",\n \"advanced\": True,\n },\n \"tiktoken_enable\": {\"display_name\": \"TikToken Enable\", \"advanced\": True},\n }\n\n def build(\n self,\n openai_api_key: str,\n default_headers: Optional[Dict[str, str]] = None,\n default_query: Optional[NestedDict] = {},\n allowed_special: List[str] = [],\n disallowed_special: List[str] = [\"all\"],\n chunk_size: int = 1000,\n client: Optional[Any] = None,\n deployment: str = \"text-embedding-ada-002\",\n embedding_ctx_length: int = 8191,\n max_retries: int = 6,\n model: str = \"text-embedding-ada-002\",\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n openai_api_type: Optional[str] = None,\n openai_api_version: Optional[str] = None,\n openai_organization: Optional[str] = None,\n openai_proxy: Optional[str] = None,\n request_timeout: Optional[float] = None,\n show_progress_bar: bool = False,\n skip_empty: bool = False,\n tiktoken_enable: bool = True,\n tiktoken_model_name: Optional[str] = None,\n ) -> Embeddings:\n # This is to avoid errors with Vector Stores (e.g Chroma)\n if disallowed_special == [\"all\"]:\n disallowed_special = \"all\" # type: ignore\n\n return OpenAIEmbeddings(\n tiktoken_enabled=tiktoken_enable,\n default_headers=default_headers,\n default_query=default_query,\n allowed_special=set(allowed_special),\n disallowed_special=\"all\",\n chunk_size=chunk_size,\n client=client,\n deployment=deployment,\n embedding_ctx_length=embedding_ctx_length,\n max_retries=max_retries,\n model=model,\n model_kwargs=model_kwargs,\n base_url=openai_api_base,\n api_key=openai_api_key,\n openai_api_type=openai_api_type,\n api_version=openai_api_version,\n organization=openai_organization,\n openai_proxy=openai_proxy,\n timeout=request_timeout,\n show_progress_bar=show_progress_bar,\n skip_empty=skip_empty,\n tiktoken_model_name=tiktoken_model_name,\n )\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "default_headers": {
+ "type": "dict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "default_headers",
+ "display_name": "Default Headers",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "default_query": {
+ "type": "NestedDict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": {},
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "default_query",
+ "display_name": "Default Query",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "deployment": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "text-embedding-ada-002",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "deployment",
+ "display_name": "Deployment",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "disallowed_special": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": [
+ "all"
+ ],
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "disallowed_special",
+ "display_name": "Disallowed Special",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "embedding_ctx_length": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 8191,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "embedding_ctx_length",
+ "display_name": "Embedding Context Length",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "max_retries": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 6,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "max_retries",
+ "display_name": "Max Retries",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "model": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "text-embedding-ada-002",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": [
+ "text-embedding-3-small",
+ "text-embedding-3-large",
+ "text-embedding-ada-002"
+ ],
+ "name": "model",
+ "display_name": "Model",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "model_kwargs": {
+ "type": "NestedDict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": {},
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "model_kwargs",
+ "display_name": "Model Kwargs",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "openai_api_base": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "openai_api_base",
+ "display_name": "OpenAI API Base",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "openai_api_key": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "openai_api_key",
+ "display_name": "OpenAI API Key",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ],
+ "value": ""
+ },
+ "openai_api_type": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "openai_api_type",
+ "display_name": "OpenAI API Type",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "openai_api_version": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "openai_api_version",
+ "display_name": "OpenAI API Version",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "openai_organization": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "openai_organization",
+ "display_name": "OpenAI Organization",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "openai_proxy": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "openai_proxy",
+ "display_name": "OpenAI Proxy",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "request_timeout": {
+ "type": "float",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "request_timeout",
+ "display_name": "Request Timeout",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "rangeSpec": {
+ "step_type": "float",
+ "min": -1,
+ "max": 1,
+ "step": 0.1
+ },
+ "load_from_db": false,
+ "title_case": false
+ },
+ "show_progress_bar": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "show_progress_bar",
+ "display_name": "Show Progress Bar",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "skip_empty": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "skip_empty",
+ "display_name": "Skip Empty",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "tiktoken_enable": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": true,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "tiktoken_enable",
+ "display_name": "TikToken Enable",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "tiktoken_model_name": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "tiktoken_model_name",
+ "display_name": "TikToken Model Name",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "_type": "CustomComponent"
+ },
+ "description": "Generate embeddings using OpenAI models.",
+ "base_classes": [
+ "Embeddings"
+ ],
+ "display_name": "OpenAI Embeddings",
+ "documentation": "",
+ "custom_fields": {
+ "openai_api_key": null,
+ "default_headers": null,
+ "default_query": null,
+ "allowed_special": null,
+ "disallowed_special": null,
+ "chunk_size": null,
+ "client": null,
+ "deployment": null,
+ "embedding_ctx_length": null,
+ "max_retries": null,
+ "model": null,
+ "model_kwargs": null,
+ "openai_api_base": null,
+ "openai_api_type": null,
+ "openai_api_version": null,
+ "openai_organization": null,
+ "openai_proxy": null,
+ "request_timeout": null,
+ "show_progress_bar": null,
+ "skip_empty": null,
+ "tiktoken_enable": null,
+ "tiktoken_model_name": null
+ },
+ "output_types": [
+ "Embeddings"
+ ],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "OpenAIEmbeddings-9TPjc"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 383,
+ "positionAbsolute": {
+ "x": 2814.0402191223047,
+ "y": 1955.9268168273086
+ },
+ "dragging": false
+ }
+ ],
+ "edges": [
+ {
+ "source": "TextOutput-BDknO",
+ "target": "Prompt-xeI6K",
+ "sourceHandle": "{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œTextOutputœ,œidœ:œTextOutput-BDknOœ}",
+ "targetHandle": "{œfieldNameœ:œcontextœ,œidœ:œPrompt-xeI6Kœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}",
+ "id": "reactflow__edge-TextOutput-BDknO{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œTextOutputœ,œidœ:œTextOutput-BDknOœ}-Prompt-xeI6K{œfieldNameœ:œcontextœ,œidœ:œPrompt-xeI6Kœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "context",
+ "id": "Prompt-xeI6K",
+ "inputTypes": [
+ "Document",
+ "BaseOutputParser",
+ "Record",
+ "Text"
+ ],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": [
+ "object",
+ "Text",
+ "str"
+ ],
+ "dataType": "TextOutput",
+ "id": "TextOutput-BDknO"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "selected": false
+ },
+ {
+ "source": "ChatInput-yxMKE",
+ "target": "Prompt-xeI6K",
+ "sourceHandle": "{œbaseClassesœ:[œTextœ,œstrœ,œobjectœ,œRecordœ],œdataTypeœ:œChatInputœ,œidœ:œChatInput-yxMKEœ}",
+ "targetHandle": "{œfieldNameœ:œquestionœ,œidœ:œPrompt-xeI6Kœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}",
+ "id": "reactflow__edge-ChatInput-yxMKE{œbaseClassesœ:[œTextœ,œstrœ,œobjectœ,œRecordœ],œdataTypeœ:œChatInputœ,œidœ:œChatInput-yxMKEœ}-Prompt-xeI6K{œfieldNameœ:œquestionœ,œidœ:œPrompt-xeI6Kœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "question",
+ "id": "Prompt-xeI6K",
+ "inputTypes": [
+ "Document",
+ "BaseOutputParser",
+ "Record",
+ "Text"
+ ],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": [
+ "Text",
+ "str",
+ "object",
+ "Record"
+ ],
+ "dataType": "ChatInput",
+ "id": "ChatInput-yxMKE"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "selected": false
+ },
+ {
+ "source": "Prompt-xeI6K",
+ "target": "OpenAIModel-EjXlN",
+ "sourceHandle": "{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œPromptœ,œidœ:œPrompt-xeI6Kœ}",
+ "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-EjXlNœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}",
+ "id": "reactflow__edge-Prompt-xeI6K{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œPromptœ,œidœ:œPrompt-xeI6Kœ}-OpenAIModel-EjXlN{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-EjXlNœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "input_value",
+ "id": "OpenAIModel-EjXlN",
+ "inputTypes": [
+ "Text"
+ ],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": [
+ "object",
+ "Text",
+ "str"
+ ],
+ "dataType": "Prompt",
+ "id": "Prompt-xeI6K"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "selected": false
+ },
+ {
+ "source": "OpenAIModel-EjXlN",
+ "target": "ChatOutput-Q39I8",
+ "sourceHandle": "{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-EjXlNœ}",
+ "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-Q39I8œ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}",
+ "id": "reactflow__edge-OpenAIModel-EjXlN{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-EjXlNœ}-ChatOutput-Q39I8{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-Q39I8œ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "input_value",
+ "id": "ChatOutput-Q39I8",
+ "inputTypes": [
+ "Text"
+ ],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": [
+ "object",
+ "Text",
+ "str"
+ ],
+ "dataType": "OpenAIModel",
+ "id": "OpenAIModel-EjXlN"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "selected": false
+ },
+ {
+ "source": "File-t0a6a",
+ "target": "RecursiveCharacterTextSplitter-tR9QM",
+ "sourceHandle": "{œbaseClassesœ:[œRecordœ],œdataTypeœ:œFileœ,œidœ:œFile-t0a6aœ}",
+ "targetHandle": "{œfieldNameœ:œinputsœ,œidœ:œRecursiveCharacterTextSplitter-tR9QMœ,œinputTypesœ:[œDocumentœ,œRecordœ],œtypeœ:œDocumentœ}",
+ "id": "reactflow__edge-File-t0a6a{œbaseClassesœ:[œRecordœ],œdataTypeœ:œFileœ,œidœ:œFile-t0a6aœ}-RecursiveCharacterTextSplitter-tR9QM{œfieldNameœ:œinputsœ,œidœ:œRecursiveCharacterTextSplitter-tR9QMœ,œinputTypesœ:[œDocumentœ,œRecordœ],œtypeœ:œDocumentœ}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "inputs",
+ "id": "RecursiveCharacterTextSplitter-tR9QM",
+ "inputTypes": [
+ "Document",
+ "Record"
+ ],
+ "type": "Document"
+ },
+ "sourceHandle": {
+ "baseClasses": [
+ "Record"
+ ],
+ "dataType": "File",
+ "id": "File-t0a6a"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "selected": false
+ },
+ {
+ "source": "OpenAIEmbeddings-ZlOk1",
+ "sourceHandle": "{œbaseClassesœ:[œEmbeddingsœ],œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-ZlOk1œ}",
+ "target": "AstraDBSearch-41nRz",
+ "targetHandle": "{œfieldNameœ:œembeddingœ,œidœ:œAstraDBSearch-41nRzœ,œinputTypesœ:null,œtypeœ:œEmbeddingsœ}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "embedding",
+ "id": "AstraDBSearch-41nRz",
+ "inputTypes": null,
+ "type": "Embeddings"
+ },
+ "sourceHandle": {
+ "baseClasses": [
+ "Embeddings"
+ ],
+ "dataType": "OpenAIEmbeddings",
+ "id": "OpenAIEmbeddings-ZlOk1"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-OpenAIEmbeddings-ZlOk1{œbaseClassesœ:[œEmbeddingsœ],œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-ZlOk1œ}-AstraDBSearch-41nRz{œfieldNameœ:œembeddingœ,œidœ:œAstraDBSearch-41nRzœ,œinputTypesœ:null,œtypeœ:œEmbeddingsœ}"
+ },
+ {
+ "source": "ChatInput-yxMKE",
+ "sourceHandle": "{œbaseClassesœ:[œTextœ,œstrœ,œobjectœ,œRecordœ],œdataTypeœ:œChatInputœ,œidœ:œChatInput-yxMKEœ}",
+ "target": "AstraDBSearch-41nRz",
+ "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œAstraDBSearch-41nRzœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "input_value",
+ "id": "AstraDBSearch-41nRz",
+ "inputTypes": [
+ "Text"
+ ],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": [
+ "Text",
+ "str",
+ "object",
+ "Record"
+ ],
+ "dataType": "ChatInput",
+ "id": "ChatInput-yxMKE"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-ChatInput-yxMKE{œbaseClassesœ:[œTextœ,œstrœ,œobjectœ,œRecordœ],œdataTypeœ:œChatInputœ,œidœ:œChatInput-yxMKEœ}-AstraDBSearch-41nRz{œfieldNameœ:œinput_valueœ,œidœ:œAstraDBSearch-41nRzœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}"
+ },
+ {
+ "source": "RecursiveCharacterTextSplitter-tR9QM",
+ "sourceHandle": "{œbaseClassesœ:[œRecordœ],œdataTypeœ:œRecursiveCharacterTextSplitterœ,œidœ:œRecursiveCharacterTextSplitter-tR9QMœ}",
+ "target": "AstraDB-eUCSS",
+ "targetHandle": "{œfieldNameœ:œinputsœ,œidœ:œAstraDB-eUCSSœ,œinputTypesœ:null,œtypeœ:œRecordœ}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "inputs",
+ "id": "AstraDB-eUCSS",
+ "inputTypes": null,
+ "type": "Record"
+ },
+ "sourceHandle": {
+ "baseClasses": [
+ "Record"
+ ],
+ "dataType": "RecursiveCharacterTextSplitter",
+ "id": "RecursiveCharacterTextSplitter-tR9QM"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-RecursiveCharacterTextSplitter-tR9QM{œbaseClassesœ:[œRecordœ],œdataTypeœ:œRecursiveCharacterTextSplitterœ,œidœ:œRecursiveCharacterTextSplitter-tR9QMœ}-AstraDB-eUCSS{œfieldNameœ:œinputsœ,œidœ:œAstraDB-eUCSSœ,œinputTypesœ:null,œtypeœ:œRecordœ}",
+ "selected": false
+ },
+ {
+ "source": "OpenAIEmbeddings-9TPjc",
+ "sourceHandle": "{œbaseClassesœ:[œEmbeddingsœ],œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-9TPjcœ}",
+ "target": "AstraDB-eUCSS",
+ "targetHandle": "{œfieldNameœ:œembeddingœ,œidœ:œAstraDB-eUCSSœ,œinputTypesœ:null,œtypeœ:œEmbeddingsœ}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "embedding",
+ "id": "AstraDB-eUCSS",
+ "inputTypes": null,
+ "type": "Embeddings"
+ },
+ "sourceHandle": {
+ "baseClasses": [
+ "Embeddings"
+ ],
+ "dataType": "OpenAIEmbeddings",
+ "id": "OpenAIEmbeddings-9TPjc"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-OpenAIEmbeddings-9TPjc{œbaseClassesœ:[œEmbeddingsœ],œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-9TPjcœ}-AstraDB-eUCSS{œfieldNameœ:œembeddingœ,œidœ:œAstraDB-eUCSSœ,œinputTypesœ:null,œtypeœ:œEmbeddingsœ}",
+ "selected": false
+ },
+ {
+ "source": "AstraDBSearch-41nRz",
+ "sourceHandle": "{œbaseClassesœ:[œRecordœ],œdataTypeœ:œAstraDBSearchœ,œidœ:œAstraDBSearch-41nRzœ}",
+ "target": "TextOutput-BDknO",
+ "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œTextOutput-BDknOœ,œinputTypesœ:[œRecordœ,œTextœ],œtypeœ:œstrœ}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "input_value",
+ "id": "TextOutput-BDknO",
+ "inputTypes": [
+ "Record",
+ "Text"
+ ],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": [
+ "Record"
+ ],
+ "dataType": "AstraDBSearch",
+ "id": "AstraDBSearch-41nRz"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-AstraDBSearch-41nRz{œbaseClassesœ:[œRecordœ],œdataTypeœ:œAstraDBSearchœ,œidœ:œAstraDBSearch-41nRzœ}-TextOutput-BDknO{œfieldNameœ:œinput_valueœ,œidœ:œTextOutput-BDknOœ,œinputTypesœ:[œRecordœ,œTextœ],œtypeœ:œstrœ}"
+ }
+ ],
+ "viewport": {
+ "x": -259.6782520315529,
+ "y": 90.3428735006047,
+ "zoom": 0.2687057134854984
+ }
+ },
+ "description": "Visit https://pre-release.langflow.org/guides/rag-with-astradb for a detailed guide of this project.\nThis project give you both Ingestion and RAG in a single file. You'll need to visit https://astra.datastax.com/ to create an Astra DB instance, your Token and grab an API Endpoint.\nRunning this project requires you to add a file in the Files component, then define a Collection Name and click on the Play icon on the Astra DB component. \n\nAfter the ingestion ends you are ready to click on the Run button at the lower left corner and start asking questions about your data.",
+ "name": "Vector Store RAG",
+ "last_tested_version": "1.0.0a0",
+ "is_component": false
}
\ No newline at end of file
diff --git a/src/backend/base/langflow/processing/process.py b/src/backend/base/langflow/processing/process.py
index c05163933..770f7ed76 100644
--- a/src/backend/base/langflow/processing/process.py
+++ b/src/backend/base/langflow/processing/process.py
@@ -245,7 +245,7 @@ def apply_tweaks(node: Dict[str, Any], node_tweaks: Dict[str, Any]) -> None:
logger.warning(f"Node {node.get('id')} does not have a tweak named {tweak_name}")
continue
if tweak_name in template_data:
- key = tweak_name if tweak_name == "file_path" else "value"
+ key = "file_path" if template_data[tweak_name]["type"] == "file" else "value"
template_data[tweak_name][key] = tweak_value
diff --git a/src/backend/base/langflow/server.py b/src/backend/base/langflow/server.py
index 6a1509dc0..05e19ecb3 100644
--- a/src/backend/base/langflow/server.py
+++ b/src/backend/base/langflow/server.py
@@ -35,11 +35,7 @@ class LangflowApplication(BaseApplication):
super().__init__()
def load_config(self):
- config = {
- key: value
- for key, value in self.options.items()
- if key in self.cfg.settings and value is not None
- }
+ config = {key: value for key, value in self.options.items() if key in self.cfg.settings and value is not None}
for key, value in config.items():
self.cfg.set(key.lower(), value)
diff --git a/src/backend/base/langflow/services/database/service.py b/src/backend/base/langflow/services/database/service.py
index 2b1112fc8..ac14f4d4d 100644
--- a/src/backend/base/langflow/services/database/service.py
+++ b/src/backend/base/langflow/services/database/service.py
@@ -1,6 +1,5 @@
from datetime import datetime
import time
-from datetime import datetime
from pathlib import Path
from typing import TYPE_CHECKING
@@ -37,10 +36,7 @@ class DatabaseService(Service):
def _create_engine(self) -> "Engine":
"""Create the engine for the database."""
settings_service = get_settings_service()
- if (
- settings_service.settings.DATABASE_URL
- and settings_service.settings.DATABASE_URL.startswith("sqlite")
- ):
+ if settings_service.settings.DATABASE_URL and settings_service.settings.DATABASE_URL.startswith("sqlite"):
connect_args = {"check_same_thread": False}
else:
connect_args = {}
@@ -52,9 +48,7 @@ class DatabaseService(Service):
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is not None: # If an exception has been raised
- logger.error(
- f"Session rollback because of exception: {exc_type.__name__} {exc_value}"
- )
+ logger.error(f"Session rollback because of exception: {exc_type.__name__} {exc_value}")
self._session.rollback()
else:
self._session.commit()
@@ -71,9 +65,7 @@ class DatabaseService(Service):
settings_service = get_settings_service()
if settings_service.auth_settings.AUTO_LOGIN:
with Session(self.engine) as session:
- flows = session.exec(
- select(models.Flow).where(models.Flow.user_id is None)
- ).all()
+ flows = session.exec(select(models.Flow).where(models.Flow.user_id is None)).all()
if flows:
logger.debug("Migrating flows to default superuser")
username = settings_service.auth_settings.SUPERUSER
@@ -103,9 +95,7 @@ class DatabaseService(Service):
expected_columns = list(model.model_fields.keys())
try:
- available_columns = [
- col["name"] for col in inspector.get_columns(table)
- ]
+ available_columns = [col["name"] for col in inspector.get_columns(table)]
except sa.exc.NoSuchTableError:
logger.debug(f"Missing table: {table}")
return False
@@ -169,9 +159,7 @@ class DatabaseService(Service):
buffer.write(f"{datetime.now().isoformat()}: Checking migrations\n")
command.check(alembic_cfg)
except Exception as exc:
- if isinstance(
- exc, (util.exc.CommandError, util.exc.AutogenerateDiffsDetected)
- ):
+ if isinstance(exc, (util.exc.CommandError, util.exc.AutogenerateDiffsDetected)):
command.upgrade(alembic_cfg, "head")
time.sleep(3)
@@ -208,10 +196,7 @@ class DatabaseService(Service):
# We will check that all models are in the database
# and that the database is up to date with all columns
sql_models = [models.Flow, models.User, models.ApiKey]
- return [
- TableResults(sql_model.__tablename__, self.check_table(sql_model))
- for sql_model in sql_models
- ]
+ return [TableResults(sql_model.__tablename__, self.check_table(sql_model)) for sql_model in sql_models]
def check_table(self, model):
results = []
@@ -220,9 +205,7 @@ class DatabaseService(Service):
expected_columns = list(model.__fields__.keys())
available_columns = []
try:
- available_columns = [
- col["name"] for col in inspector.get_columns(table_name)
- ]
+ available_columns = [col["name"] for col in inspector.get_columns(table_name)]
results.append(Result(name=table_name, type="table", success=True))
except sa.exc.NoSuchTableError:
logger.error(f"Missing table: {table_name}")
@@ -253,9 +236,7 @@ class DatabaseService(Service):
try:
table.create(self.engine, checkfirst=True)
except OperationalError as oe:
- logger.warning(
- f"Table {table} already exists, skipping. Exception: {oe}"
- )
+ logger.warning(f"Table {table} already exists, skipping. Exception: {oe}")
except Exception as exc:
logger.error(f"Error creating table {table}: {exc}")
raise RuntimeError(f"Error creating table {table}") from exc
@@ -267,9 +248,7 @@ class DatabaseService(Service):
if table not in table_names:
logger.error("Something went wrong creating the database and tables.")
logger.error("Please check your database settings.")
- raise RuntimeError(
- "Something went wrong creating the database and tables."
- )
+ raise RuntimeError("Something went wrong creating the database and tables.")
logger.debug("Database and tables created successfully")
diff --git a/src/backend/base/langflow/template/frontend_node/llms.py b/src/backend/base/langflow/template/frontend_node/llms.py
index e33c4a60a..7bf5a8cb6 100644
--- a/src/backend/base/langflow/template/frontend_node/llms.py
+++ b/src/backend/base/langflow/template/frontend_node/llms.py
@@ -63,8 +63,8 @@ class LLMFrontendNode(FrontendNode):
field.info = OPENAI_API_BASE_INFO
def add_extra_base_classes(self) -> None:
- if "BaseLLM" not in self.base_classes:
- self.base_classes.append("BaseLLM")
+ if "BaseLanguageModel" not in self.base_classes:
+ self.base_classes.append("BaseLanguageModel")
@staticmethod
def format_azure_field(field: TemplateField):
diff --git a/src/backend/base/langflow/utils/logger.py b/src/backend/base/langflow/utils/logger.py
index a656a8462..f63120443 100644
--- a/src/backend/base/langflow/utils/logger.py
+++ b/src/backend/base/langflow/utils/logger.py
@@ -26,10 +26,7 @@ def patching(record):
def configure(log_level: Optional[str] = None, log_file: Optional[Path] = None):
- if (
- os.getenv("LANGFLOW_LOG_LEVEL", "").upper() in VALID_LOG_LEVELS
- and log_level is None
- ):
+ if os.getenv("LANGFLOW_LOG_LEVEL", "").upper() in VALID_LOG_LEVELS and log_level is None:
log_level = os.getenv("LANGFLOW_LOG_LEVEL")
if log_level is None:
log_level = "ERROR"
@@ -77,11 +74,7 @@ def configure(log_level: Optional[str] = None, log_file: Optional[Path] = None):
def setup_uvicorn_logger():
- loggers = (
- logging.getLogger(name)
- for name in logging.root.manager.loggerDict
- if name.startswith("uvicorn.")
- )
+ loggers = (logging.getLogger(name) for name in logging.root.manager.loggerDict if name.startswith("uvicorn."))
for uvicorn_logger in loggers:
uvicorn_logger.handlers = []
logging.getLogger("uvicorn").handlers = [InterceptHandler()]
@@ -111,6 +104,4 @@ class InterceptHandler(logging.Handler):
frame = frame.f_back
depth += 1
- logger.opt(depth=depth, exception=record.exc_info).log(
- level, record.getMessage()
- )
+ logger.opt(depth=depth, exception=record.exc_info).log(level, record.getMessage())
diff --git a/src/backend/base/poetry.lock b/src/backend/base/poetry.lock
index 32be8f70b..96c6f71ca 100644
--- a/src/backend/base/poetry.lock
+++ b/src/backend/base/poetry.lock
@@ -865,13 +865,13 @@ files = [
[[package]]
name = "ecdsa"
-version = "0.18.0"
+version = "0.19.0"
description = "ECDSA cryptographic signature library (pure python)"
optional = false
-python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.6"
files = [
- {file = "ecdsa-0.18.0-py2.py3-none-any.whl", hash = "sha256:80600258e7ed2f16b9aa1d7c295bd70194109ad5a30fdee0eaeefef1d4c559dd"},
- {file = "ecdsa-0.18.0.tar.gz", hash = "sha256:190348041559e21b22a1d65cee485282ca11a6f81d503fddb84d5017e9ed1e49"},
+ {file = "ecdsa-0.19.0-py2.py3-none-any.whl", hash = "sha256:2cea9b88407fdac7bbeca0833b189e4c9c53f2ef1e1eaa29f6224dbc809b707a"},
+ {file = "ecdsa-0.19.0.tar.gz", hash = "sha256:60eaad1199659900dd0af521ed462b793bbdf867432b3948e87416ae4caf6bf8"},
]
[package.dependencies]
@@ -2533,61 +2533,62 @@ files = [
[[package]]
name = "orjson"
-version = "3.9.15"
+version = "3.10.0"
description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy"
optional = false
python-versions = ">=3.8"
files = [
- {file = "orjson-3.9.15-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:d61f7ce4727a9fa7680cd6f3986b0e2c732639f46a5e0156e550e35258aa313a"},
- {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4feeb41882e8aa17634b589533baafdceb387e01e117b1ec65534ec724023d04"},
- {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fbbeb3c9b2edb5fd044b2a070f127a0ac456ffd079cb82746fc84af01ef021a4"},
- {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b66bcc5670e8a6b78f0313bcb74774c8291f6f8aeef10fe70e910b8040f3ab75"},
- {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2973474811db7b35c30248d1129c64fd2bdf40d57d84beed2a9a379a6f57d0ab"},
- {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fe41b6f72f52d3da4db524c8653e46243c8c92df826ab5ffaece2dba9cccd58"},
- {file = "orjson-3.9.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4228aace81781cc9d05a3ec3a6d2673a1ad0d8725b4e915f1089803e9efd2b99"},
- {file = "orjson-3.9.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6f7b65bfaf69493c73423ce9db66cfe9138b2f9ef62897486417a8fcb0a92bfe"},
- {file = "orjson-3.9.15-cp310-none-win32.whl", hash = "sha256:2d99e3c4c13a7b0fb3792cc04c2829c9db07838fb6973e578b85c1745e7d0ce7"},
- {file = "orjson-3.9.15-cp310-none-win_amd64.whl", hash = "sha256:b725da33e6e58e4a5d27958568484aa766e825e93aa20c26c91168be58e08cbb"},
- {file = "orjson-3.9.15-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:c8e8fe01e435005d4421f183038fc70ca85d2c1e490f51fb972db92af6e047c2"},
- {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87f1097acb569dde17f246faa268759a71a2cb8c96dd392cd25c668b104cad2f"},
- {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff0f9913d82e1d1fadbd976424c316fbc4d9c525c81d047bbdd16bd27dd98cfc"},
- {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8055ec598605b0077e29652ccfe9372247474375e0e3f5775c91d9434e12d6b1"},
- {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d6768a327ea1ba44c9114dba5fdda4a214bdb70129065cd0807eb5f010bfcbb5"},
- {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12365576039b1a5a47df01aadb353b68223da413e2e7f98c02403061aad34bde"},
- {file = "orjson-3.9.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:71c6b009d431b3839d7c14c3af86788b3cfac41e969e3e1c22f8a6ea13139404"},
- {file = "orjson-3.9.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e18668f1bd39e69b7fed19fa7cd1cd110a121ec25439328b5c89934e6d30d357"},
- {file = "orjson-3.9.15-cp311-none-win32.whl", hash = "sha256:62482873e0289cf7313461009bf62ac8b2e54bc6f00c6fabcde785709231a5d7"},
- {file = "orjson-3.9.15-cp311-none-win_amd64.whl", hash = "sha256:b3d336ed75d17c7b1af233a6561cf421dee41d9204aa3cfcc6c9c65cd5bb69a8"},
- {file = "orjson-3.9.15-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:82425dd5c7bd3adfe4e94c78e27e2fa02971750c2b7ffba648b0f5d5cc016a73"},
- {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c51378d4a8255b2e7c1e5cc430644f0939539deddfa77f6fac7b56a9784160a"},
- {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6ae4e06be04dc00618247c4ae3f7c3e561d5bc19ab6941427f6d3722a0875ef7"},
- {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bcef128f970bb63ecf9a65f7beafd9b55e3aaf0efc271a4154050fc15cdb386e"},
- {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b72758f3ffc36ca566ba98a8e7f4f373b6c17c646ff8ad9b21ad10c29186f00d"},
- {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c57bc7b946cf2efa67ac55766e41764b66d40cbd9489041e637c1304400494"},
- {file = "orjson-3.9.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:946c3a1ef25338e78107fba746f299f926db408d34553b4754e90a7de1d44068"},
- {file = "orjson-3.9.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2f256d03957075fcb5923410058982aea85455d035607486ccb847f095442bda"},
- {file = "orjson-3.9.15-cp312-none-win_amd64.whl", hash = "sha256:5bb399e1b49db120653a31463b4a7b27cf2fbfe60469546baf681d1b39f4edf2"},
- {file = "orjson-3.9.15-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:b17f0f14a9c0ba55ff6279a922d1932e24b13fc218a3e968ecdbf791b3682b25"},
- {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f6cbd8e6e446fb7e4ed5bac4661a29e43f38aeecbf60c4b900b825a353276a1"},
- {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:76bc6356d07c1d9f4b782813094d0caf1703b729d876ab6a676f3aaa9a47e37c"},
- {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fdfa97090e2d6f73dced247a2f2d8004ac6449df6568f30e7fa1a045767c69a6"},
- {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7413070a3e927e4207d00bd65f42d1b780fb0d32d7b1d951f6dc6ade318e1b5a"},
- {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9cf1596680ac1f01839dba32d496136bdd5d8ffb858c280fa82bbfeb173bdd40"},
- {file = "orjson-3.9.15-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:809d653c155e2cc4fd39ad69c08fdff7f4016c355ae4b88905219d3579e31eb7"},
- {file = "orjson-3.9.15-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:920fa5a0c5175ab14b9c78f6f820b75804fb4984423ee4c4f1e6d748f8b22bc1"},
- {file = "orjson-3.9.15-cp38-none-win32.whl", hash = "sha256:2b5c0f532905e60cf22a511120e3719b85d9c25d0e1c2a8abb20c4dede3b05a5"},
- {file = "orjson-3.9.15-cp38-none-win_amd64.whl", hash = "sha256:67384f588f7f8daf040114337d34a5188346e3fae6c38b6a19a2fe8c663a2f9b"},
- {file = "orjson-3.9.15-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:6fc2fe4647927070df3d93f561d7e588a38865ea0040027662e3e541d592811e"},
- {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34cbcd216e7af5270f2ffa63a963346845eb71e174ea530867b7443892d77180"},
- {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f541587f5c558abd93cb0de491ce99a9ef8d1ae29dd6ab4dbb5a13281ae04cbd"},
- {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92255879280ef9c3c0bcb327c5a1b8ed694c290d61a6a532458264f887f052cb"},
- {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:05a1f57fb601c426635fcae9ddbe90dfc1ed42245eb4c75e4960440cac667262"},
- {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ede0bde16cc6e9b96633df1631fbcd66491d1063667f260a4f2386a098393790"},
- {file = "orjson-3.9.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e88b97ef13910e5f87bcbc4dd7979a7de9ba8702b54d3204ac587e83639c0c2b"},
- {file = "orjson-3.9.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:57d5d8cf9c27f7ef6bc56a5925c7fbc76b61288ab674eb352c26ac780caa5b10"},
- {file = "orjson-3.9.15-cp39-none-win32.whl", hash = "sha256:001f4eb0ecd8e9ebd295722d0cbedf0748680fb9998d3993abaed2f40587257a"},
- {file = "orjson-3.9.15-cp39-none-win_amd64.whl", hash = "sha256:ea0b183a5fe6b2b45f3b854b0d19c4e932d6f5934ae1f723b07cf9560edd4ec7"},
- {file = "orjson-3.9.15.tar.gz", hash = "sha256:95cae920959d772f30ab36d3b25f83bb0f3be671e986c72ce22f8fa700dae061"},
+ {file = "orjson-3.10.0-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:47af5d4b850a2d1328660661f0881b67fdbe712aea905dadd413bdea6f792c33"},
+ {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c90681333619d78360d13840c7235fdaf01b2b129cb3a4f1647783b1971542b6"},
+ {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:400c5b7c4222cb27b5059adf1fb12302eebcabf1978f33d0824aa5277ca899bd"},
+ {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5dcb32e949eae80fb335e63b90e5808b4b0f64e31476b3777707416b41682db5"},
+ {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7d507c7493252c0a0264b5cc7e20fa2f8622b8a83b04d819b5ce32c97cf57b"},
+ {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e286a51def6626f1e0cc134ba2067dcf14f7f4b9550f6dd4535fd9d79000040b"},
+ {file = "orjson-3.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8acd4b82a5f3a3ec8b1dc83452941d22b4711964c34727eb1e65449eead353ca"},
+ {file = "orjson-3.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:30707e646080dd3c791f22ce7e4a2fc2438765408547c10510f1f690bd336217"},
+ {file = "orjson-3.10.0-cp310-none-win32.whl", hash = "sha256:115498c4ad34188dcb73464e8dc80e490a3e5e88a925907b6fedcf20e545001a"},
+ {file = "orjson-3.10.0-cp310-none-win_amd64.whl", hash = "sha256:6735dd4a5a7b6df00a87d1d7a02b84b54d215fb7adac50dd24da5997ffb4798d"},
+ {file = "orjson-3.10.0-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9587053e0cefc284e4d1cd113c34468b7d3f17666d22b185ea654f0775316a26"},
+ {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bef1050b1bdc9ea6c0d08468e3e61c9386723633b397e50b82fda37b3563d72"},
+ {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d16c6963ddf3b28c0d461641517cd312ad6b3cf303d8b87d5ef3fa59d6844337"},
+ {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4251964db47ef090c462a2d909f16c7c7d5fe68e341dabce6702879ec26d1134"},
+ {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:73bbbdc43d520204d9ef0817ac03fa49c103c7f9ea94f410d2950755be2c349c"},
+ {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:414e5293b82373606acf0d66313aecb52d9c8c2404b1900683eb32c3d042dbd7"},
+ {file = "orjson-3.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:feaed5bb09877dc27ed0d37f037ddef6cb76d19aa34b108db270d27d3d2ef747"},
+ {file = "orjson-3.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5127478260db640323cea131ee88541cb1a9fbce051f0b22fa2f0892f44da302"},
+ {file = "orjson-3.10.0-cp311-none-win32.whl", hash = "sha256:b98345529bafe3c06c09996b303fc0a21961820d634409b8639bc16bd4f21b63"},
+ {file = "orjson-3.10.0-cp311-none-win_amd64.whl", hash = "sha256:658ca5cee3379dd3d37dbacd43d42c1b4feee99a29d847ef27a1cb18abdfb23f"},
+ {file = "orjson-3.10.0-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:4329c1d24fd130ee377e32a72dc54a3c251e6706fccd9a2ecb91b3606fddd998"},
+ {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef0f19fdfb6553342b1882f438afd53c7cb7aea57894c4490c43e4431739c700"},
+ {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c4f60db24161534764277f798ef53b9d3063092f6d23f8f962b4a97edfa997a0"},
+ {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1de3fd5c7b208d836f8ecb4526995f0d5877153a4f6f12f3e9bf11e49357de98"},
+ {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f93e33f67729d460a177ba285002035d3f11425ed3cebac5f6ded4ef36b28344"},
+ {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:237ba922aef472761acd697eef77fef4831ab769a42e83c04ac91e9f9e08fa0e"},
+ {file = "orjson-3.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:98c1bfc6a9bec52bc8f0ab9b86cc0874b0299fccef3562b793c1576cf3abb570"},
+ {file = "orjson-3.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:30d795a24be16c03dca0c35ca8f9c8eaaa51e3342f2c162d327bd0225118794a"},
+ {file = "orjson-3.10.0-cp312-none-win32.whl", hash = "sha256:6a3f53dc650bc860eb26ec293dfb489b2f6ae1cbfc409a127b01229980e372f7"},
+ {file = "orjson-3.10.0-cp312-none-win_amd64.whl", hash = "sha256:983db1f87c371dc6ffc52931eb75f9fe17dc621273e43ce67bee407d3e5476e9"},
+ {file = "orjson-3.10.0-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9a667769a96a72ca67237224a36faf57db0c82ab07d09c3aafc6f956196cfa1b"},
+ {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ade1e21dfde1d37feee8cf6464c20a2f41fa46c8bcd5251e761903e46102dc6b"},
+ {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:23c12bb4ced1c3308eff7ba5c63ef8f0edb3e4c43c026440247dd6c1c61cea4b"},
+ {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2d014cf8d4dc9f03fc9f870de191a49a03b1bcda51f2a957943fb9fafe55aac"},
+ {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eadecaa16d9783affca33597781328e4981b048615c2ddc31c47a51b833d6319"},
+ {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd583341218826f48bd7c6ebf3310b4126216920853cbc471e8dbeaf07b0b80e"},
+ {file = "orjson-3.10.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:90bfc137c75c31d32308fd61951d424424426ddc39a40e367704661a9ee97095"},
+ {file = "orjson-3.10.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:13b5d3c795b09a466ec9fcf0bd3ad7b85467d91a60113885df7b8d639a9d374b"},
+ {file = "orjson-3.10.0-cp38-none-win32.whl", hash = "sha256:5d42768db6f2ce0162544845facb7c081e9364a5eb6d2ef06cd17f6050b048d8"},
+ {file = "orjson-3.10.0-cp38-none-win_amd64.whl", hash = "sha256:33e6655a2542195d6fd9f850b428926559dee382f7a862dae92ca97fea03a5ad"},
+ {file = "orjson-3.10.0-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:4050920e831a49d8782a1720d3ca2f1c49b150953667eed6e5d63a62e80f46a2"},
+ {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1897aa25a944cec774ce4a0e1c8e98fb50523e97366c637b7d0cddabc42e6643"},
+ {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9bf565a69e0082ea348c5657401acec3cbbb31564d89afebaee884614fba36b4"},
+ {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b6ebc17cfbbf741f5c1a888d1854354536f63d84bee537c9a7c0335791bb9009"},
+ {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d2817877d0b69f78f146ab305c5975d0618df41acf8811249ee64231f5953fee"},
+ {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57d017863ec8aa4589be30a328dacd13c2dc49de1c170bc8d8c8a98ece0f2925"},
+ {file = "orjson-3.10.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:22c2f7e377ac757bd3476ecb7480c8ed79d98ef89648f0176deb1da5cd014eb7"},
+ {file = "orjson-3.10.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e62ba42bfe64c60c1bc84799944f80704e996592c6b9e14789c8e2a303279912"},
+ {file = "orjson-3.10.0-cp39-none-win32.whl", hash = "sha256:60c0b1bdbccd959ebd1575bd0147bd5e10fc76f26216188be4a36b691c937077"},
+ {file = "orjson-3.10.0-cp39-none-win_amd64.whl", hash = "sha256:175a41500ebb2fdf320bf78e8b9a75a1279525b62ba400b2b2444e274c2c8bee"},
+ {file = "orjson-3.10.0.tar.gz", hash = "sha256:ba4d8cac5f2e2cff36bea6b6481cdb92b38c202bcec603d6f5ff91960595a1ed"},
]
[[package]]
@@ -3902,13 +3903,13 @@ test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,
[[package]]
name = "typer"
-version = "0.12.1"
+version = "0.12.2"
description = "Typer, build great CLIs. Easy to code. Based on Python type hints."
optional = false
python-versions = ">=3.7"
files = [
- {file = "typer-0.12.1-py3-none-any.whl", hash = "sha256:43ebb23c8a358c3d623e31064359a65f50229d0bf73ae8dfd203f49d9126ae06"},
- {file = "typer-0.12.1.tar.gz", hash = "sha256:72d218ef3c686aed9c6ff3ca25b238aee0474a1628b29c559b18b634cfdeca88"},
+ {file = "typer-0.12.2-py3-none-any.whl", hash = "sha256:e1accbaa7e2b2350753acec896ac30493ac573211a8d4603e88f8356217e01f7"},
+ {file = "typer-0.12.2.tar.gz", hash = "sha256:977929604fde12aeada011852ad9c64370501be6ac2eac248f3161cdc9eeb7c9"},
]
[package.dependencies]
@@ -4024,13 +4025,13 @@ files = [
[[package]]
name = "types-redis"
-version = "4.6.0.20240311"
+version = "4.6.0.20240409"
description = "Typing stubs for redis"
optional = false
python-versions = ">=3.8"
files = [
- {file = "types-redis-4.6.0.20240311.tar.gz", hash = "sha256:e049bbdff0e0a1f8e701b64636811291d21bff79bf1e7850850a44055224a85f"},
- {file = "types_redis-4.6.0.20240311-py3-none-any.whl", hash = "sha256:6b9d68a29aba1ee400c823d8e5fe88675282eb69d7211e72fe65dbe54b33daca"},
+ {file = "types-redis-4.6.0.20240409.tar.gz", hash = "sha256:ce217c279581d769df992c5b76d61c65425b0a679626048e633e643868eb881b"},
+ {file = "types_redis-4.6.0.20240409-py3-none-any.whl", hash = "sha256:a3b92760c49a034827a0c3825206728df4e61e981c1324099d4414335af4f52f"},
]
[package.dependencies]
@@ -4443,4 +4444,4 @@ local = []
[metadata]
lock-version = "2.0"
python-versions = ">=3.10,<3.12"
-content-hash = "a3de9e754754b1899c6f1755e6e29e170a8286c43e241e8b55be5b060aa464e5"
+content-hash = "ab99a3fe4d80dff80dfa69347dcb22b85d48d33487a14e01c709b51f7b62699e"
diff --git a/src/backend/base/pyproject.toml b/src/backend/base/pyproject.toml
index 5565d6d8d..9441815e2 100644
--- a/src/backend/base/pyproject.toml
+++ b/src/backend/base/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = "langflow-base"
-version = "0.0.21"
+version = "0.0.24"
description = "A Python package with a built-in web application"
authors = ["Logspace "]
maintainers = [
@@ -13,7 +13,7 @@ maintainers = [
"Otávio Anovazzi ",
"Rodrigo Nader ",
]
-repository = "https://github.com/logspace-ai/langflow"
+repository = "https://github.com/langflow-ai/langflow"
license = "MIT"
readme = "README.md"
keywords = ["nlp", "langchain", "openai", "gpt", "gui"]
@@ -43,7 +43,7 @@ typer = "^0.12.0"
cachetools = "^5.3.1"
platformdirs = "^4.2.0"
python-multipart = "^0.0.7"
-orjson = "3.9.15"
+orjson = "3.10.0"
alembic = "^1.13.0"
passlib = "^1.7.4"
bcrypt = "4.0.1"
diff --git a/src/frontend/src/components/CrashErrorComponent/index.tsx b/src/frontend/src/components/CrashErrorComponent/index.tsx
index 57dc83d83..978c9b340 100644
--- a/src/frontend/src/components/CrashErrorComponent/index.tsx
+++ b/src/frontend/src/components/CrashErrorComponent/index.tsx
@@ -28,7 +28,7 @@ export default function CrashErrorComponent({
Please report errors with detailed tracebacks on the{" "}
Restart Langflow
diff --git a/src/frontend/src/components/headerComponent/index.tsx b/src/frontend/src/components/headerComponent/index.tsx
index 68e95cf2c..c302800c0 100644
--- a/src/frontend/src/components/headerComponent/index.tsx
+++ b/src/frontend/src/components/headerComponent/index.tsx
@@ -114,7 +114,7 @@ export default function Header(): JSX.Element {
{
const authorizedDomains = [
- "https://raw.githubusercontent.com/logspace-ai/langflow_examples/main/examples",
- "https://api.github.com/repos/logspace-ai/langflow_examples/contents/examples",
- "https://api.github.com/repos/logspace-ai/langflow",
+ "https://raw.githubusercontent.com/langflow-ai/langflow_examples/main/examples",
+ "https://api.github.com/repos/langflow-ai/langflow_examples/contents/examples",
+ "https://api.github.com/repos/langflow-ai/langflow",
"auto_login",
];
diff --git a/src/frontend/src/controllers/API/index.ts b/src/frontend/src/controllers/API/index.ts
index 52082abd9..920335a97 100644
--- a/src/frontend/src/controllers/API/index.ts
+++ b/src/frontend/src/controllers/API/index.ts
@@ -90,7 +90,7 @@ export async function postValidatePrompt(
*/
export async function getExamples(): Promise {
const url =
- "https://api.github.com/repos/logspace-ai/langflow_examples/contents/examples?ref=main";
+ "https://api.github.com/repos/langflow-ai/langflow_examples/contents/examples?ref=main";
const response = await api.get(url);
const jsonFiles = response.data.filter((file: any) => {
diff --git a/src/frontend/src/stores/typesStore.ts b/src/frontend/src/stores/typesStore.ts
index 808a5d646..142118f44 100644
--- a/src/frontend/src/stores/typesStore.ts
+++ b/src/frontend/src/stores/typesStore.ts
@@ -29,6 +29,7 @@ export const useTypesStore = create((set, get) => ({
.catch((error) => {
console.error("An error has occurred while fetching types.");
console.log(error);
+ setLoading(false);
reject();
});
});
diff --git a/src/frontend/src/utils/utils.ts b/src/frontend/src/utils/utils.ts
index 91d7a72f9..afcc264f6 100644
--- a/src/frontend/src/utils/utils.ts
+++ b/src/frontend/src/utils/utils.ts
@@ -456,7 +456,7 @@ export function getWidgetCode(
const inputs = buildInputs();
let chat_input_field = getChatInputField(flowState);
- return `
+ return `
Document:
+ def build(self, url: str,file:str,integer:int,nested:NestedDict,flt:float,boolean:bool,lisst:list[str],dictionary:dict, llm: BaseLanguageModel, prompt: PromptTemplate) -> Document:
return "test"`;
diff --git a/src/frontend/tests/end-to-end/dropdownComponent.spec.ts b/src/frontend/tests/end-to-end/dropdownComponent.spec.ts
index a31ceae44..0623e7460 100644
--- a/src/frontend/tests/end-to-end/dropdownComponent.spec.ts
+++ b/src/frontend/tests/end-to-end/dropdownComponent.spec.ts
@@ -177,7 +177,7 @@ test("dropDownComponent", async ({ page }) => {
.click();
await page.locator("textarea").press("Control+a");
const emptyOptionsCode = `from typing import Optional
-from langchain.llms.base import BaseLLM
+from langflow.field_typing import BaseLanguageModel
from langchain_community.llms.bedrock import Bedrock
from langflow.interface.custom.custom_component import CustomComponent
@@ -212,7 +212,7 @@ class AmazonBedrockComponent(CustomComponent):
endpoint_url: Optional[str] = None,
streaming: bool = False,
cache: Optional[bool] = None,
- ) -> BaseLLM:
+ ) -> BaseLanguageModel:
try:
output = Bedrock(
credentials_profile_name=credentials_profile_name,
diff --git a/tests/conftest.py b/tests/conftest.py
index 9b6d7c0d6..cfec4f7ef 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -22,9 +22,6 @@ from langflow.services.database.models.flow.model import Flow, FlowCreate
from langflow.services.database.models.user.model import User, UserCreate
from langflow.services.database.utils import session_getter
from langflow.services.deps import get_db_service
-from sqlmodel import Session, SQLModel, create_engine, select
-from sqlmodel.pool import StaticPool
-from typer.testing import CliRunner
if TYPE_CHECKING:
from langflow.services.database.service import DatabaseService
diff --git a/tests/test_custom_component.py b/tests/test_custom_component.py
index 725d35564..07796c540 100644
--- a/tests/test_custom_component.py
+++ b/tests/test_custom_component.py
@@ -6,21 +6,15 @@ import pytest
from langchain_core.documents import Document
from langflow.interface.custom.base import CustomComponent
-from langflow.interface.custom.code_parser.code_parser import (
- CodeParser,
- CodeSyntaxError,
-)
-from langflow.interface.custom.custom_component.component import (
- Component,
- ComponentCodeNullError,
-)
+from langflow.interface.custom.code_parser.code_parser import CodeParser, CodeSyntaxError
+from langflow.interface.custom.custom_component.component import Component, ComponentCodeNullError
from langflow.services.database.models.flow import Flow, FlowCreate
code_default = """
from langflow.field_typing import Prompt
from langflow.interface.custom.custom_component import CustomComponent
-from langchain.llms.base import BaseLLM
+from langflow.field_typing import BaseLanguageModel
from langchain.chains import LLMChain
from langchain.prompts import PromptTemplate
from langchain_core.documents import Document
@@ -32,7 +26,7 @@ class YourComponent(CustomComponent):
description: str = "Your description"
field_config = { "url": { "multiline": True, "required": True } }
- def build(self, url: str, llm: BaseLLM, template: Prompt) -> Document:
+ def build(self, url: str, llm: BaseLanguageModel, template: Prompt) -> Document:
response = requests.get(url)
prompt = PromptTemplate.from_template(template)
chain = LLMChain(llm=llm, prompt=prompt)
diff --git a/tests/test_data_components.py b/tests/test_data_components.py
index ca92ee190..1887d6af3 100644
--- a/tests/test_data_components.py
+++ b/tests/test_data_components.py
@@ -6,10 +6,9 @@ import httpx
import pytest
import respx
from httpx import Response
-
from langflow.components import (
data,
-) # Adjust the import according to your project structure
+)
@pytest.fixture
@@ -135,7 +134,7 @@ def test_directory_without_mocks():
from langflow.initial_setup import setup
from langflow.initial_setup.setup import load_starter_projects
- projects = load_starter_projects()
+ _, projects = zip(*load_starter_projects())
# the setup module has a folder where the projects are stored
# the contents of that folder are in the projects variable
# the directory component can be used to load the projects
diff --git a/tests/test_endpoints.py b/tests/test_endpoints.py
index 1430682ae..f5da2c0fc 100644
--- a/tests/test_endpoints.py
+++ b/tests/test_endpoints.py
@@ -472,11 +472,11 @@ def test_successful_run_with_output_type_text(client, starter_project, created_a
assert isinstance(outputs_dict.get("outputs"), list)
assert len(outputs_dict.get("outputs")) == 1
ids = [output.get("component_id") for output in outputs_dict.get("outputs")]
- assert all(["TextOutput" in _id for _id in ids]), ids
+ assert all(["ChatOutput" in _id for _id in ids]), ids
display_names = [output.get("component_display_name") for output in outputs_dict.get("outputs")]
- assert all([name in display_names for name in ["Prompt Output"]]), display_names
+ assert all([name in display_names for name in ["Chat Output"]]), display_names
inner_results = [output.get("results").get("result") for output in outputs_dict.get("outputs")]
- expected_result = "Langflow"
+ expected_result = ""
assert all([expected_result in result for result in inner_results]), inner_results
@@ -501,13 +501,13 @@ def test_successful_run_with_output_type_any(client, starter_project, created_ap
assert "outputs" in outputs_dict
assert outputs_dict.get("inputs") == {"input_value": ""}
assert isinstance(outputs_dict.get("outputs"), list)
- assert len(outputs_dict.get("outputs")) == 2
+ assert len(outputs_dict.get("outputs")) == 1
ids = [output.get("component_id") for output in outputs_dict.get("outputs")]
assert all(["ChatOutput" in _id or "TextOutput" in _id for _id in ids]), ids
display_names = [output.get("component_display_name") for output in outputs_dict.get("outputs")]
- assert all([name in display_names for name in ["Chat Output", "Prompt Output"]]), display_names
+ assert all([name in display_names for name in ["Chat Output"]]), display_names
inner_results = [output.get("results").get("result") for output in outputs_dict.get("outputs")]
- expected_result = "Langflow"
+ expected_result = ""
assert all([expected_result in result for result in inner_results]), inner_results
@@ -533,7 +533,7 @@ def test_successful_run_with_output_type_debug(client, starter_project, created_
assert "outputs" in outputs_dict
assert outputs_dict.get("inputs") == {"input_value": ""}
assert isinstance(outputs_dict.get("outputs"), list)
- assert len(outputs_dict.get("outputs")) == 7
+ assert len(outputs_dict.get("outputs")) == 4
# To test input_type wel'l just set it with output_type debug and check if the value is correct
@@ -559,10 +559,10 @@ def test_successful_run_with_input_type_text(client, starter_project, created_ap
assert "outputs" in outputs_dict
assert outputs_dict.get("inputs") == {"input_value": "value1"}
assert isinstance(outputs_dict.get("outputs"), list)
- assert len(outputs_dict.get("outputs")) == 7
+ assert len(outputs_dict.get("outputs")) == 4
# Now we get all components that contain TextInput in the component_id
text_input_outputs = [output for output in outputs_dict.get("outputs") if "TextInput" in output.get("component_id")]
- assert len(text_input_outputs) == 2
+ assert len(text_input_outputs) == 0
# Now we check if the input_value is correct
assert all([output.get("results").get("result") == "value1" for output in text_input_outputs]), text_input_outputs
@@ -590,7 +590,7 @@ def test_successful_run_with_input_type_chat(client, starter_project, created_ap
assert "outputs" in outputs_dict
assert outputs_dict.get("inputs") == {"input_value": "value1"}
assert isinstance(outputs_dict.get("outputs"), list)
- assert len(outputs_dict.get("outputs")) == 7
+ assert len(outputs_dict.get("outputs")) == 4
# Now we get all components that contain TextInput in the component_id
chat_input_outputs = [output for output in outputs_dict.get("outputs") if "ChatInput" in output.get("component_id")]
assert len(chat_input_outputs) == 1
@@ -620,14 +620,14 @@ def test_successful_run_with_input_type_any(client, starter_project, created_api
assert "outputs" in outputs_dict
assert outputs_dict.get("inputs") == {"input_value": "value1"}
assert isinstance(outputs_dict.get("outputs"), list)
- assert len(outputs_dict.get("outputs")) == 7
+ assert len(outputs_dict.get("outputs")) == 4
# Now we get all components that contain TextInput or ChatInput in the component_id
any_input_outputs = [
output
for output in outputs_dict.get("outputs")
if "TextInput" in output.get("component_id") or "ChatInput" in output.get("component_id")
]
- assert len(any_input_outputs) == 3
+ assert len(any_input_outputs) == 1
# Now we check if the input_value is correct
assert all([output.get("results").get("result") == "value1" for output in any_input_outputs]), any_input_outputs
diff --git a/tests/test_graph.py b/tests/test_graph.py
index 8395c304f..f67c12846 100644
--- a/tests/test_graph.py
+++ b/tests/test_graph.py
@@ -409,7 +409,7 @@ def test_update_source_handle():
@pytest.mark.asyncio
async def test_pickle_graph(json_vector_store):
starter_projects = load_starter_projects()
- data = starter_projects[0]["data"]
+ data = starter_projects[0][1]["data"]
graph = Graph.from_payload(data)
assert isinstance(graph, Graph)
pickled = pickle.dumps(graph)
@@ -421,7 +421,7 @@ async def test_pickle_graph(json_vector_store):
@pytest.mark.asyncio
async def test_pickle_each_vertex(json_vector_store):
starter_projects = load_starter_projects()
- data = starter_projects[0]["data"]
+ data = starter_projects[0][1]["data"]
graph = Graph.from_payload(data)
assert isinstance(graph, Graph)
for vertex in graph.vertices:
@@ -430,15 +430,3 @@ async def test_pickle_each_vertex(json_vector_store):
assert pickled is not None
unpickled = pickle.loads(pickled)
assert unpickled is not None
-
-
-@pytest.mark.asyncio
-async def test_build_ordering(complex_graph_with_groups):
- sorted_vertices = complex_graph_with_groups.sort_vertices(stop_component_id="ChatInput-Ay8QQ")
- assert sorted_vertices == [
- "ChatInput-Ay8QQ",
- "RecordsAsText-vkx2A",
- "FileLoader-Vo1Cq",
- ]
-
- sorted_vertices = complex_graph_with_groups.sort_vertices()
diff --git a/tests/test_initial_setup.py b/tests/test_initial_setup.py
index 33aaf8df3..c506c3275 100644
--- a/tests/test_initial_setup.py
+++ b/tests/test_initial_setup.py
@@ -1,4 +1,5 @@
from datetime import datetime
+from pathlib import Path
import pytest
from langflow.graph.graph.base import Graph
@@ -10,6 +11,7 @@ from langflow.initial_setup.setup import (
load_starter_projects,
)
from langflow.memory import delete_messages
+from langflow.processing.process import process_tweaks
from langflow.services.database.models.flow.model import Flow
from langflow.services.deps import session_scope
from sqlalchemy import func
@@ -19,7 +21,8 @@ from sqlmodel import select
def test_load_starter_projects():
projects = load_starter_projects()
assert isinstance(projects, list)
- assert all(isinstance(project, dict) for project in projects)
+ assert all(isinstance(project[1], dict) for project in projects)
+ assert all(isinstance(project[0], Path) for project in projects)
def test_get_project_data():
@@ -59,7 +62,7 @@ def test_create_or_update_starter_projects(client):
@pytest.mark.asyncio
-async def test_starter_project_can_run_successfully(client):
+async def test_starter_projects_can_run_successfully(client):
with session_scope() as session:
# Run the function to create or update projects
create_or_update_starter_projects()
@@ -75,12 +78,13 @@ async def test_starter_project_can_run_successfully(client):
# Get all the starter projects
projects = session.exec(select(Flow).where(Flow.folder == STARTER_FOLDER_NAME)).all()
-
- graphs: list[tuple[str, Graph]] = [
- (project.name, Graph.from_payload(project.data, flow_id=project.id))
- for project in projects
- if "Document" not in project.name or "RAG" not in project.name
- ]
+ graphs: list[tuple[str, Graph]] = []
+ for project in projects:
+ # Add tweaks to make file_path work
+ tweaks = {"path": __file__}
+ graph_data = process_tweaks(project.data, tweaks)
+ graph_object = Graph.from_payload(graph_data, flow_id=project.id)
+ graphs.append((project.name, graph_object))
assert len(graphs) == len(projects)
for name, graph in graphs:
outputs = await graph.arun(
diff --git a/tests/test_process.py b/tests/test_process.py
index 2548e9215..64404d9dd 100644
--- a/tests/test_process.py
+++ b/tests/test_process.py
@@ -1,5 +1,4 @@
import pytest
-
from langflow.processing.process import process_tweaks
from langflow.services.deps import get_session_service
@@ -284,12 +283,12 @@ async def test_load_langchain_object_with_no_cached_session(client, basic_graph_
session_id = session_service.build_key(session_id1, basic_graph_data)
graph1, artifacts1 = await session_service.load_session(session_id, data_graph=basic_graph_data, flow_id="flow_id")
# Clear the cache
- session_service.clear_session(session_id)
- # Use the new session_id to get the langchain_object again
+ await session_service.clear_session(session_id)
+ # Use the new session_id to get the graph again
graph2, artifacts2 = await session_service.load_session(session_id, data_graph=basic_graph_data, flow_id="flow_id")
- assert id(graph1) != id(graph2)
# Since the cache was cleared, objects should be different
+ assert id(graph1) != id(graph2)
@pytest.mark.asyncio