diff --git a/.github/actions/poetry_caching/action.yml b/.github/actions/poetry_caching/action.yml index e185e7094..4bb6415ac 100644 --- a/.github/actions/poetry_caching/action.yml +++ b/.github/actions/poetry_caching/action.yml @@ -74,7 +74,7 @@ runs: if: steps.cache-bin-poetry.outputs.cache-hit != 'true' shell: bash env: - POETRY_VERSION: ${{ inputs.poetry-version }} + POETRY_VERSION: ${{ inputs.poetry-version || env.POETRY_VERSION }} PYTHON_VERSION: ${{ inputs.python-version }} # Install poetry using the python version installed by setup-python step. run: | diff --git a/.github/workflows/create-release.yml b/.github/workflows/create-release.yml index ef3c8f698..e1b806ccf 100644 --- a/.github/workflows/create-release.yml +++ b/.github/workflows/create-release.yml @@ -25,7 +25,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Install poetry - run: pipx install poetry==$POETRY_VERSION + run: pipx install poetry==${{ env.POETRY_VERSION }} - name: Set up Python 3.12 uses: actions/setup-python@v5 with: diff --git a/.github/workflows/docker-build.yml b/.github/workflows/docker-build.yml index 8cb6d0a8d..f480d3214 100644 --- a/.github/workflows/docker-build.yml +++ b/.github/workflows/docker-build.yml @@ -19,6 +19,8 @@ on: options: - base - main +env: + POETRY_VERSION: "1.8.2" jobs: docker_build: @@ -54,6 +56,28 @@ jobs: tags: ${{ env.TAGS }} - name: Wait for Docker Hub to propagate run: sleep 120 + - name: Build and push (backend) + if: ${{ inputs.release_type == 'main' }} + uses: docker/build-push-action@v5 + with: + context: . + push: true + file: ./docker/build_and_push_backend.Dockerfile + build-args: | + LANGFLOW_IMAGE=langflowai/langflow:${{ inputs.version }} + tags: | + langflowai/langflow-backend:${{ inputs.version }} + langflowai/langflow-backend:1.0-alpha + - name: Build and push (frontend) + if: ${{ inputs.release_type == 'main' }} + uses: docker/build-push-action@v5 + with: + context: . + push: true + file: ./docker/frontend/build_and_push_frontend.Dockerfile + tags: | + langflowai/langflow-frontend:${{ inputs.version }} + langflowai/langflow-frontend:1.0-alpha restart-space: runs-on: ubuntu-latest @@ -76,6 +100,4 @@ jobs: - name: Restart HuggingFace Spaces Build run: | - poetry run python ./scripts/factory_restart_space.py - env: - HUGGINGFACE_API_TOKEN: ${{ secrets.HUGGINGFACE_API_TOKEN }} + poetry run python ./scripts/factory_restart_space.py --space "Langflow/Langflow-Preview" --token ${{ secrets.HUGGINGFACE_API_TOKEN }} diff --git a/.github/workflows/docker_test.yml b/.github/workflows/docker_test.yml new file mode 100644 index 000000000..f46010358 --- /dev/null +++ b/.github/workflows/docker_test.yml @@ -0,0 +1,61 @@ +name: Test Docker images + +on: + push: + branches: [main] + paths: + - "docker/**" + - "poetry.lock" + - "pyproject.toml" + - "src/backend/**" + pull_request: + branches: [dev] + paths: + - "docker/**" + - "poetry.lock" + - "pyproject.toml" + - "src/**" + +env: + POETRY_VERSION: "1.8.2" + +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Build image + run: | + docker build -t langflowai/langflow:latest-dev \ + -f docker/build_and_push.Dockerfile \ + . + - name: Test image + run: | + expected_version=$(cat pyproject.toml | grep version | head -n 1 | cut -d '"' -f 2) + version=$(docker run --rm --entrypoint bash langflowai/langflow:latest-dev -c 'python -c "from langflow.version import __version__ as langflow_version; print(langflow_version)"') + if [ "$expected_version" != "$version" ]; then + echo "Expected version: $expected_version" + echo "Actual version: $version" + exit 1 + fi + + - name: Build backend image + run: | + docker build -t langflowai/langflow-backend:latest-dev \ + --build-arg LANGFLOW_IMAGE=langflowai/langflow:latest-dev \ + -f docker/build_and_push_backend.Dockerfile \ + . + - name: Test backend image + run: | + expected_version=$(cat pyproject.toml | grep version | head -n 1 | cut -d '"' -f 2) + version=$(docker run --rm --entrypoint bash langflowai/langflow-backend:latest-dev -c 'python -c "from langflow.version import __version__ as langflow_version; print(langflow_version)"') + if [ "$expected_version" != "$version" ]; then + echo "Expected version: $expected_version" + echo "Actual version: $version" + exit 1 + fi + - name: Build frontend image + run: | + docker build -t langflowai/langflow-frontend:latest-dev \ + -f docker/frontend/build_and_push_frontend.Dockerfile \ + . diff --git a/.github/workflows/pre-release-base.yml b/.github/workflows/pre-release-base.yml index d087fc183..6045038be 100644 --- a/.github/workflows/pre-release-base.yml +++ b/.github/workflows/pre-release-base.yml @@ -22,7 +22,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Install poetry - run: pipx install poetry==$POETRY_VERSION + run: pipx install poetry==${{ env.POETRY_VERSION }} - name: Set up Python 3.10 uses: actions/setup-python@v5 with: diff --git a/.github/workflows/pre-release-langflow.yml b/.github/workflows/pre-release-langflow.yml index 82cb580f3..f3909f7b1 100644 --- a/.github/workflows/pre-release-langflow.yml +++ b/.github/workflows/pre-release-langflow.yml @@ -26,7 +26,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Install poetry - run: pipx install poetry==$POETRY_VERSION + run: pipx install poetry==${{ env.POETRY_VERSION }} - name: Set up Python 3.10 uses: actions/setup-python@v5 with: @@ -82,6 +82,28 @@ jobs: tags: | langflowai/langflow:${{ needs.release.outputs.version }} langflowai/langflow:1.0-alpha + - name: Build and push (frontend) + uses: docker/build-push-action@v5 + with: + context: . + push: true + file: ./docker/frontend/build_and_push_frontend.Dockerfile + tags: | + langflowai/langflow-frontend:${{ needs.release.outputs.version }} + langflowai/langflow-frontend:1.0-alpha + - name: Wait for Docker Hub to propagate + run: sleep 120 + - name: Build and push (backend) + uses: docker/build-push-action@v5 + with: + context: . + push: true + file: ./docker/build_and_push_backend.Dockerfile + build-args: | + LANGFLOW_IMAGE=langflowai/langflow:${{ needs.release.outputs.version }} + tags: | + langflowai/langflow-backend:${{ needs.release.outputs.version }} + langflowai/langflow-backend:1.0-alpha create_release: name: Create Release diff --git a/.github/workflows/pre-release.yml b/.github/workflows/pre-release.yml index b72def8b3..8cb0bc90e 100644 --- a/.github/workflows/pre-release.yml +++ b/.github/workflows/pre-release.yml @@ -29,7 +29,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Install poetry - run: pipx install poetry==$POETRY_VERSION + run: pipx install poetry==${{ env.POETRY_VERSION }} - name: Set up Python 3.10 uses: actions/setup-python@v5 with: diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 06df72e9f..851f06424 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -19,7 +19,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Install poetry - run: pipx install poetry==$POETRY_VERSION + run: pipx install poetry==${{ env.POETRY_VERSION }} - name: Set up Python 3.10 uses: actions/setup-python@v5 with: @@ -54,6 +54,28 @@ jobs: tags: | langflowai/langflow:${{ steps.check-version.outputs.version }} langflowai/langflow:latest + - name: Wait for Docker Hub to propagate + run: sleep 120 + - name: Build and push (backend) + uses: docker/build-push-action@v5 + with: + context: . + push: true + file: ./docker/build_and_push_backend.Dockerfile + build-args: | + LANGFLOW_IMAGE=langflowai/langflow:${{ steps.check-version.outputs.version }} + tags: | + langflowai/langflow-backend:${{ steps.check-version.outputs.version }} + langflowai/langflow-backend:latest + - name: Build and push (frontend) + uses: docker/build-push-action@v5 + with: + context: . + push: true + file: ./docker/frontend/build_and_push_frontend.Dockerfile + tags: | + langflowai/langflow-frontend:${{ steps.check-version.outputs.version }} + langflowai/langflow-frontend:latest - name: Create Release uses: ncipollo/release-action@v1 with: diff --git a/Makefile b/Makefile index f62ecb411..878eeca80 100644 --- a/Makefile +++ b/Makefile @@ -48,8 +48,8 @@ coverage: # allow passing arguments to pytest tests: - poetry run pytest tests --instafail $(args) -# Use like: + poetry run pytest tests --instafail -ra -n auto -m "not api_key_required" $(args) + format: poetry run ruff check . --fix diff --git a/README.PT.md b/README.PT.md new file mode 100644 index 000000000..6a12e8eef --- /dev/null +++ b/README.PT.md @@ -0,0 +1,172 @@ + + +# [![Langflow](./docs/static/img/hero.png)](https://www.langflow.org) + +

+ Um framework visual para criar apps de agentes autônomos e RAG +

+

+ Open-source, construído em Python, totalmente personalizável, agnóstico em relação a modelos e databases +

+ +

+ Docs - + Junte-se ao nosso Discord - + Siga-nos no X - + Demonstração +

+ +

+ + + + + + +

+ +
+ README em Inglês + README em Chinês Simplificado +
+ +

+ Seu GIF +

+ +# 📝 Conteúdo + +- [](#) +- [📝 Conteúdo](#-conteúdo) +- [📦 Introdução](#-introdução) +- [🎨 Criar Fluxos](#-criar-fluxos) +- [Deploy](#deploy) + - [Deploy usando Google Cloud Platform](#deploy-usando-google-cloud-platform) + - [Deploy on Railway](#deploy-on-railway) + - [Deploy on Render](#deploy-on-render) +- [🖥️ Interface de Linha de Comando (CLI)](#️-interface-de-linha-de-comando-cli) + - [Uso](#uso) + - [Variáveis de Ambiente](#variáveis-de-ambiente) +- [👋 Contribuir](#-contribuir) +- [🌟 Contribuidores](#-contribuidores) +- [📄 Licença](#-licença) + +# 📦 Introdução + +Você pode instalar o Langflow com pip: + +```shell +# Certifique-se de ter >=Python 3.10 instalado no seu sistema. +# Instale a versão pré-lançamento (recomendada para as atualizações mais recentes) +python -m pip install langflow --pre --force-reinstall + +# ou versão estável +python -m pip install langflow -U +``` + +Então, execute o Langflow com: + +```shell +python -m langflow run +``` + +Você também pode visualizar o Langflow no [HuggingFace Spaces](https://huggingface.co/spaces/Langflow/Langflow-Preview). [Clone o Space usando este link](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true) para criar seu próprio workspace do Langflow em minutos. + +# 🎨 Criar Fluxos + +Criar fluxos com Langflow é fácil. Basta arrastar componentes da barra lateral para o canvas e conectá-los para começar a construir sua aplicação. + +Explore editando os parâmetros do prompt, agrupando componentes e construindo seus próprios componentes personalizados (Custom Components). + +Quando terminar, você pode exportar seu fluxo como um arquivo JSON. + +Carregue o fluxo com: + +```python +from langflow.load import run_flow_from_json + +results = run_flow_from_json("path/to/flow.json", input_value="Hello, World!") +``` + +# Deploy + +## Deploy usando Google Cloud Platform + +Siga nosso passo a passo para fazer deploy do Langflow no Google Cloud Platform (GCP) usando o Google Cloud Shell. O guia está disponível no documento [**Langflow on Google Cloud Platform**](https://github.com/langflow-ai/langflow/blob/dev/docs/docs/deployment/gcp-deployment.md). + +Alternativamente, clique no botão **"Open in Cloud Shell"** abaixo para iniciar o Google Cloud Shell, clonar o repositório do Langflow e começar um **tutorial interativo** que o guiará pelo processo de configuração dos recursos necessários e deploy do Langflow no seu projeto GCP. + +[![Open on Cloud Shell](https://gstatic.com/cloudssh/images/open-btn.svg)](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/langflow-ai/langflow&working_dir=scripts/gcp&shellonly=true&tutorial=walkthroughtutorial_spot.md) + +## Deploy on Railway + +Use este template para implantar o Langflow 1.0 Preview no Railway: + +[![Deploy 1.0 Preview on Railway](https://railway.app/button.svg)](https://railway.app/template/UsJ1uB?referralCode=MnPSdg) + +Ou este para implantar o Langflow 0.6.x: + +[![Deploy on Railway](https://railway.app/button.svg)](https://railway.app/template/JMXEWp?referralCode=MnPSdg) + +## Deploy on Render + + +Deploy to Render + + +# 🖥️ Interface de Linha de Comando (CLI) + +O Langflow fornece uma interface de linha de comando (CLI) para fácil gerenciamento e configuração. + +## Uso + +Você pode executar o Langflow usando o seguinte comando: + +```shell +langflow run [OPTIONS] +``` + +Cada opção é detalhada abaixo: + +- `--help`: Exibe todas as opções disponíveis. +- `--host`: Define o host para vincular o servidor. Pode ser configurado usando a variável de ambiente `LANGFLOW_HOST`. O padrão é `127.0.0.1`. +- `--workers`: Define o número de processos. Pode ser configurado usando a variável de ambiente `LANGFLOW_WORKERS`. O padrão é `1`. +- `--timeout`: Define o tempo limite do worker em segundos. O padrão é `60`. +- `--port`: Define a porta para escutar. Pode ser configurado usando a variável de ambiente `LANGFLOW_PORT`. O padrão é `7860`. +- `--env-file`: Especifica o caminho para o arquivo .env contendo variáveis de ambiente. O padrão é `.env`. +- `--log-level`: Define o nível de log. Pode ser configurado usando a variável de ambiente `LANGFLOW_LOG_LEVEL`. O padrão é `critical`. +- `--components-path`: Especifica o caminho para o diretório contendo componentes personalizados. Pode ser configurado usando a variável de ambiente `LANGFLOW_COMPONENTS_PATH`. O padrão é `langflow/components`. +- `--log-file`: Especifica o caminho para o arquivo de log. Pode ser configurado usando a variável de ambiente `LANGFLOW_LOG_FILE`. O padrão é `logs/langflow.log`. +- `--cache`: Seleciona o tipo de cache a ser usado. As opções são `InMemoryCache` e `SQLiteCache`. Pode ser configurado usando a variável de ambiente `LANGFLOW_LANGCHAIN_CACHE`. O padrão é `SQLiteCache`. +- `--dev/--no-dev`: Alterna o modo de desenvolvimento. O padrão é `no-dev`. +- `--path`: Especifica o caminho para o diretório frontend contendo os arquivos de build. Esta opção é apenas para fins de desenvolvimento. Pode ser configurado usando a variável de ambiente `LANGFLOW_FRONTEND_PATH`. +- `--open-browser/--no-open-browser`: Alterna a opção de abrir o navegador após iniciar o servidor. Pode ser configurado usando a variável de ambiente `LANGFLOW_OPEN_BROWSER`. O padrão é `open-browser`. +- `--remove-api-keys/--no-remove-api-keys`: Alterna a opção de remover as chaves de API dos projetos salvos no banco de dados. Pode ser configurado usando a variável de ambiente `LANGFLOW_REMOVE_API_KEYS`. O padrão é `no-remove-api-keys`. +- `--install-completion [bash|zsh|fish|powershell|pwsh]`: Instala a conclusão para o shell especificado. +- `--show-completion [bash|zsh|fish|powershell|pwsh]`: Exibe a conclusão para o shell especificado, permitindo que você copie ou personalize a instalação. +- `--backend-only`: Este parâmetro, com valor padrão `False`, permite executar apenas o servidor backend sem o frontend. Também pode ser configurado usando a variável de ambiente `LANGFLOW_BACKEND_ONLY`. +- `--store`: Este parâmetro, com valor padrão `True`, ativa os recursos da loja, use `--no-store` para desativá-los. Pode ser configurado usando a variável de ambiente `LANGFLOW_STORE`. + +Esses parâmetros são importantes para usuários que precisam personalizar o comportamento do Langflow, especialmente em cenários de desenvolvimento ou deploy especializado. + +### Variáveis de Ambiente + +Você pode configurar muitas das opções de CLI usando variáveis de ambiente. Estas podem ser exportadas no seu sistema operacional ou adicionadas a um arquivo `.env` e carregadas usando a opção `--env-file`. + +Um arquivo de exemplo `.env` chamado `.env.example` está incluído no projeto. Copie este arquivo para um novo arquivo chamado `.env` e substitua os valores de exemplo pelas suas configurações reais. Se você estiver definindo valores tanto no seu sistema operacional quanto no arquivo `.env`, as configurações do `.env` terão precedência. + +# 👋 Contribuir + +Aceitamos contribuições de desenvolvedores de todos os níveis para nosso projeto open-source no GitHub. Se você deseja contribuir, por favor, confira nossas [diretrizes de contribuição](./CONTRIBUTING.md) e ajude a tornar o Langflow mais acessível. + +--- + +[![Star History Chart](https://api.star-history.com/svg?repos=langflow-ai/langflow&type=Timeline)](https://star-history.com/#langflow-ai/langflow&Date) + +# 🌟 Contribuidores + +[![langflow contributors](https://contrib.rocks/image?repo=langflow-ai/langflow)](https://github.com/langflow-ai/langflow/graphs/contributors) + +# 📄 Licença + +O Langflow é lançado sob a licença MIT. Veja o arquivo [LICENSE](LICENSE) para detalhes. diff --git a/README.md b/README.md index 3c29e83e2..940855bb1 100644 --- a/README.md +++ b/README.md @@ -25,13 +25,17 @@

+
+ README in English + README in Simplified Chinese +
+

Your GIF

# 📝 Content -- [](#) - [📝 Content](#-content) - [📦 Get Started](#-get-started) - [🎨 Create Flows](#-create-flows) diff --git a/README.zh_CN.md b/README.zh_CN.md new file mode 100644 index 000000000..fee764902 --- /dev/null +++ b/README.zh_CN.md @@ -0,0 +1,172 @@ + + +# [![Langflow](./docs/static/img/hero.png)](https://www.langflow.org) + +

+ 一种用于构建多智能体和RAG应用的可视化框架 +

+

+ 开源、Python驱动、完全可定制、大模型且不依赖于特定的向量存储 +

+ +

+ 文档 - + 加入我们的Discord社区 - + 在X上关注我们 - + 在线体验 +

+ +

+ + + + + + +

+ +
+ README in English + README in Simplified Chinese +
+ +

+ Your GIF +

+ +# 📝 目录 + +- [📝 目录](#-目录) +- [📦 快速开始](#-快速开始) +- [🎨 创建工作流](#-创建工作流) +- [部署](#部署) + - [在Google Cloud Platform上部署Langflow](#在google-cloud-platform上部署langflow) + - [在Railway上部署](#在railway上部署) + - [在Render上部署](#在render上部署) +- [🖥️ 命令行界面 (CLI)](#️-命令行界面-cli) + - [用法](#用法) + - [环境变量](#环境变量) +- [👋 贡献](#-贡献) +- [🌟 贡献者](#-贡献者) +- [📄 许可证](#-许可证) + +# 📦 快速开始 + +使用 pip 安装 Langflow: + +```shell +# 确保您的系统已经安装上>=Python 3.10 +# 安装Langflow预发布版本 +python -m pip install langflow --pre --force-reinstall + +# 安装Langflow稳定版本 +python -m pip install langflow -U +``` + +然后运行Langflow: + +```shell +python -m langflow run +``` + +您可以在[HuggingFace Spaces](https://huggingface.co/spaces/Langflow/Langflow-Preview)中在线体验 Langflow,也可以使用该链接[克隆空间](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true),在几分钟内创建您自己的 Langflow 运行工作空间。 + +# 🎨 创建工作流 + +使用 Langflow 来创建工作流非常简单。只需从侧边栏拖动组件到画布上,然后连接组件即可开始构建应用程序。 + +您可以通过编辑提示参数、将组件分组到单个高级组件中以及构建您自己的自定义组件来展开探索。 + +完成后,可以将工作流导出为 JSON 文件。 + +然后使用以下脚本加载工作流: + +```python +from langflow.load import run_flow_from_json + +results = run_flow_from_json("path/to/flow.json", input_value="Hello, World!") +``` + +# 部署 + +## 在Google Cloud Platform上部署Langflow + +请按照我们的分步指南使用 Google Cloud Shell 在 Google Cloud Platform (GCP) 上部署 Langflow。该指南在 [**Langflow in Google Cloud Platform**](GCP_DEPLOYMENT.md) 文档中提供。 + +或者,点击下面的 "Open in Cloud Shell" 按钮,启动 Google Cloud Shell,克隆 Langflow 仓库,并开始一个互动教程,该教程将指导您设置必要的资源并在 GCP 项目中部署 Langflow。 + +[![Open in Cloud Shell](https://gstatic.com/cloudssh/images/open-btn.svg)](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/langflow-ai/langflow&working_dir=scripts/gcp&shellonly=true&tutorial=walkthroughtutorial_spot.md) + +## 在Railway上部署 + +使用此模板在 Railway 上部署 Langflow 1.0 预览版: + +[![Deploy 1.0 Preview on Railway](https://railway.app/button.svg)](https://railway.app/template/UsJ1uB?referralCode=MnPSdg) + +或者使用此模板部署 Langflow 0.6.x: + +[![Deploy on Railway](https://railway.app/button.svg)](https://railway.app/template/JMXEWp?referralCode=MnPSdg) + +## 在Render上部署 + + +Deploy to Render + + +# 🖥️ 命令行界面 (CLI) + +Langflow提供了一个命令行界面以便于平台的管理和配置。 + +## 用法 + +您可以使用以下命令运行Langflow: + +```shell +langflow run [OPTIONS] +``` + +命令行参数的详细说明: + +- `--help`: 显示所有可用参数。 +- `--host`: 定义绑定服务器的主机host参数,可以使用 LANGFLOW_HOST 环境变量设置,默认值为 127.0.0.1。 +- `--workers`: 设置工作进程的数量,可以使用 LANGFLOW_WORKERS 环境变量设置,默认值为 1。 +- `--timeout`: 设置工作进程的超时时间(秒),默认值为 60。 +- `--port`: 设置服务监听的端口,可以使用 LANGFLOW_PORT 环境变量设置,默认值为 7860。 +- `--config`: 定义配置文件的路径,默认值为 config.yaml。 +- `--env-file`: 指定包含环境变量的 .env 文件路径,默认值为 .env。 +- `--log-level`: 定义日志记录级别,可以使用 LANGFLOW_LOG_LEVEL 环境变量设置,默认值为 critical。 +- `--components-path`: 指定包含自定义组件的目录路径,可以使用 LANGFLOW_COMPONENTS_PATH 环境变量设置,默认值为 langflow/components。 +- `--log-file`: 指定日志文件的路径,可以使用 LANGFLOW_LOG_FILE 环境变量设置,默认值为 logs/langflow.log。 +- `--cache`: 选择要使用的缓存类型,可选项为 InMemoryCache 和 SQLiteCache,可以使用 LANGFLOW_LANGCHAIN_CACHE 环境变量设置,默认值为 SQLiteCache。 +- `--dev/--no-dev`: 切换开发/非开发模式,默认值为 no-dev即非开发模式。 +- `--path`: 指定包含前端构建文件的目录路径,此参数仅用于开发目的,可以使用 LANGFLOW_FRONTEND_PATH 环境变量设置。 +- `--open-browser/--no-open-browser`: 切换启动服务器后是否打开浏览器,可以使用 LANGFLOW_OPEN_BROWSER 环境变量设置,默认值为 open-browser即启动后打开浏览器。 +- `--remove-api-keys/--no-remove-api-keys`: 切换是否从数据库中保存的项目中移除 API 密钥,可以使用 LANGFLOW_REMOVE_API_KEYS 环境变量设置,默认值为 no-remove-api-keys。 +- `--install-completion [bash|zsh|fish|powershell|pwsh]`: 为指定的 shell 安装自动补全。 +- `--show-completion [bash|zsh|fish|powershell|pwsh]`: 显示指定 shell 的自动补全,使您可以复制或自定义安装。 +- `--backend-only`: 此参数默认为 False,允许仅运行后端服务器而不运行前端,也可以使用 LANGFLOW_BACKEND_ONLY 环境变量设置。 +- `--store`: 此参数默认为 True,启用存储功能,使用 --no-store 可禁用它,可以使用 LANGFLOW_STORE 环境变量配置。 + +这些参数对于需要定制 Langflow 行为的用户尤其重要,特别是在开发或者特殊部署场景中。 + +### 环境变量 + +您可以使用环境变量配置许多 CLI 参数选项。这些变量可以在操作系统中导出,或添加到 .env 文件中,并使用 --env-file 参数加载。 + +项目中包含一个名为 .env.example 的示例 .env 文件。将此文件复制为新文件 .env,并用实际设置值替换示例值。如果同时在操作系统和 .env 文件中设置值,则 .env 设置优先。 + +# 👋 贡献 + +我们欢迎各级开发者为我们的 GitHub 开源项目做出贡献,并帮助 Langflow 更加易用,如果您想参与贡献,请查看我们的贡献指南 [contributing guidelines](./CONTRIBUTING.md) 。 + +--- + +[![Star History Chart](https://api.star-history.com/svg?repos=langflow-ai/langflow&type=Timeline)](https://star-history.com/#langflow-ai/langflow&Date) + +# 🌟 贡献者 + +[![langflow contributors](https://contrib.rocks/image?repo=langflow-ai/langflow)](https://github.com/langflow-ai/langflow/graphs/contributors) + +# 📄 许可证 + +Langflow 以 MIT 许可证发布。有关详细信息,请参阅 [LICENSE](LICENSE) 文件。 diff --git a/docker/build_and_push.Dockerfile b/docker/build_and_push.Dockerfile index 3a34db188..cabc1a753 100644 --- a/docker/build_and_push.Dockerfile +++ b/docker/build_and_push.Dockerfile @@ -1,21 +1,13 @@ - - # syntax=docker/dockerfile:1 # Keep this syntax directive! It's used to enable Docker BuildKit -# Based on https://github.com/python-poetry/poetry/discussions/1879?sort=top#discussioncomment-216865 -# but I try to keep it updated (see history) - ################################ -# PYTHON-BASE -# Sets up all our shared environment variables +# BUILDER-BASE +# Used to build deps + create our virtual environment ################################ -FROM python:3.12-slim as python-base +FROM python:3.12-slim as builder-base -# python -ENV PYTHONUNBUFFERED=1 \ - # prevents python creating .pyc files - PYTHONDONTWRITEBYTECODE=1 \ +ENV PYTHONDONTWRITEBYTECODE=1 \ \ # pip PIP_DISABLE_PIP_VERSION_CHECK=on \ @@ -37,56 +29,49 @@ ENV PYTHONUNBUFFERED=1 \ PYSETUP_PATH="/opt/pysetup" \ VENV_PATH="/opt/pysetup/.venv" - -# prepend poetry and venv to path -ENV PATH="$POETRY_HOME/bin:$VENV_PATH/bin:$PATH" - - -################################ -# BUILDER-BASE -# Used to build deps + create our virtual environment -################################ -FROM python-base as builder-base - RUN apt-get update \ && apt-get install --no-install-recommends -y \ # deps for installing poetry curl \ # deps for building python deps - build-essential \ - # npm - npm \ + build-essential npm \ # gcc gcc \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* - - -# Now we need to copy the entire project into the image -WORKDIR /app -COPY pyproject.toml poetry.lock ./ -COPY src ./src -COPY scripts ./scripts -COPY Makefile ./ -COPY README.md ./ RUN --mount=type=cache,target=/root/.cache \ curl -sSL https://install.python-poetry.org | python3 - -RUN useradd -m -u 1000 user && \ - mkdir -p /app/langflow && \ - chown -R user:user /app && \ - chmod -R u+w /app/langflow -# Update PATH with home/user/.local/bin -ENV PATH="/home/user/.local/bin:${PATH}" -RUN python -m pip install requests && cd ./scripts && python update_dependencies.py -RUN $POETRY_HOME/bin/poetry lock -RUN $POETRY_HOME/bin/poetry build +WORKDIR /app +COPY pyproject.toml poetry.lock README.md ./ +COPY src/ ./src +COPY scripts/ ./scripts + +RUN python -m pip install requests --user && cd ./scripts && python update_dependencies.py +RUN $POETRY_HOME/bin/poetry lock --no-update \ + && $POETRY_HOME/bin/poetry install --no-interaction --no-ansi -E deploy \ + && $POETRY_HOME/bin/poetry build -f wheel \ + && $POETRY_HOME/bin/poetry run pip install dist/*.whl + +################################ +# RUNTIME +# Setup user, utilities and copy the virtual environment only +################################ +FROM python:3.12-slim as runtime + +LABEL org.opencontainers.image.title=langflow +LABEL org.opencontainers.image.authors=['Langflow'] +LABEL org.opencontainers.image.licenses=MIT +LABEL org.opencontainers.image.url=https://github.com/langflow-ai/langflow +LABEL org.opencontainers.image.source=https://github.com/langflow-ai/langflow + +RUN useradd user -u 1000 -g 0 --no-create-home --home-dir /app/data +COPY --from=builder-base --chown=1000 /app/.venv /app/.venv +ENV PATH="/app/.venv/bin:${PATH}" -# Copy virtual environment and built .tar.gz from builder base USER user -# Install the package from the .tar.gz -RUN python -m pip install /app/dist/*.tar.gz --user +WORKDIR /app ENTRYPOINT ["python", "-m", "langflow", "run"] -CMD ["--host", "0.0.0.0", "--port", "7860"] +CMD ["--host", "0.0.0.0", "--port", "7860"] \ No newline at end of file diff --git a/docker/build_and_push_backend.Dockerfile b/docker/build_and_push_backend.Dockerfile new file mode 100644 index 000000000..8b82da524 --- /dev/null +++ b/docker/build_and_push_backend.Dockerfile @@ -0,0 +1,8 @@ +# syntax=docker/dockerfile:1 +# Keep this syntax directive! It's used to enable Docker BuildKit + +ARG LANGFLOW_IMAGE +FROM $LANGFLOW_IMAGE + +RUN rm -rf /app/.venv/langflow/frontend +CMD ["--host", "0.0.0.0", "--port", "7860", "--backend-only"] diff --git a/docker/frontend/build_and_push_frontend.Dockerfile b/docker/frontend/build_and_push_frontend.Dockerfile new file mode 100644 index 000000000..e954a801e --- /dev/null +++ b/docker/frontend/build_and_push_frontend.Dockerfile @@ -0,0 +1,27 @@ +# syntax=docker/dockerfile:1 +# Keep this syntax directive! It's used to enable Docker BuildKit + +################################ +# BUILDER-BASE +################################ +FROM node:lts-bookworm-slim as builder-base +COPY src/frontend /frontend + +RUN cd /frontend && npm install && npm run build + +################################ +# RUNTIME +################################ +FROM nginxinc/nginx-unprivileged:stable-bookworm-perl as runtime + +LABEL org.opencontainers.image.title=langflow-frontend +LABEL org.opencontainers.image.authors=['Langflow'] +LABEL org.opencontainers.image.licenses=MIT +LABEL org.opencontainers.image.url=https://github.com/langflow-ai/langflow +LABEL org.opencontainers.image.source=https://github.com/langflow-ai/langflow + +COPY --from=builder-base --chown=nginx /frontend/build /usr/share/nginx/html +COPY --chown=nginx ./docker/frontend/nginx.conf /etc/nginx/conf.d/default.conf +COPY --chown=nginx ./docker/frontend/start-nginx.sh /start-nginx.sh +RUN chmod +x /start-nginx.sh +ENTRYPOINT ["/start-nginx.sh"] \ No newline at end of file diff --git a/docker/frontend/nginx.conf b/docker/frontend/nginx.conf new file mode 100644 index 000000000..d5ecfce43 --- /dev/null +++ b/docker/frontend/nginx.conf @@ -0,0 +1,22 @@ +server { + gzip on; + gzip_comp_level 2; + gzip_min_length 1000; + gzip_types text/xml text/css; + gzip_http_version 1.1; + gzip_vary on; + gzip_disable "MSIE [4-6] \."; + + listen 80; + + location / { + root /usr/share/nginx/html; + index index.html index.htm; + try_files $uri $uri/ /index.html =404; + } + location /api { + proxy_pass __BACKEND_URL__; + } + + include /etc/nginx/extra-conf.d/*.conf; +} diff --git a/docker/frontend/start-nginx.sh b/docker/frontend/start-nginx.sh new file mode 100644 index 000000000..3607adf7d --- /dev/null +++ b/docker/frontend/start-nginx.sh @@ -0,0 +1,16 @@ +#!/bin/sh +set -e +trap 'kill -TERM $PID' TERM INT +if [ -z "$BACKEND_URL" ]; then + BACKEND_URL="$1" +fi +if [ -z "$BACKEND_URL" ]; then + echo "BACKEND_URL must be set as an environment variable or as first parameter. (e.g. http://localhost:7860)" + exit 1 +fi +sed -i "s|__BACKEND_URL__|$BACKEND_URL|g" /etc/nginx/conf.d/default.conf +cat /etc/nginx/conf.d/default.conf + + +# Start nginx +exec nginx -g 'daemon off;' diff --git a/docs/docs/administration/global-env.mdx b/docs/docs/administration/global-env.mdx index c23ca8dd1..51e5d633e 100644 --- a/docs/docs/administration/global-env.mdx +++ b/docs/docs/administration/global-env.mdx @@ -1,31 +1,39 @@ +import ThemedImage from "@theme/ThemedImage"; +import useBaseUrl from "@docusaurus/useBaseUrl"; import ZoomableImage from "/src/theme/ZoomableImage.js"; -import Admonition from "@theme/Admonition"; import ReactPlayer from "react-player"; +import Admonition from "@theme/Admonition"; -# Global Environment Variables +# Global Variables -Langflow 1.0 alpha includes the option to add **Global Environment Variables** for your application. +Global Variables are a useful feature of Langflow, allowing you to define reusable variables accessed from any Text field in your project. -## Add a global variable to a project +## TL;DR -In this example, you'll add the `openai_api_key` credential as a global environment variable to the **Basic Prompting** starter project. +- Global Variables are reusable variables accessible from any Text field in your project. +- To create one, click the 🌐 button in a Text field and then **+ Add New Variable**. +- Define the **Name**, **Type**, and **Value** of the variable. +- Click **Save Variable** to create it. +- All Credential Global Variables are encrypted and accessible only by you. +- Set _`LANGFLOW_STORE_ENVIRONMENT_VARIABLES`_ to _`true`_ in your `.env` file to add all variables in _`LANGFLOW_VARIABLES_TO_GET_FROM_ENVIRONMENT`_ to your user's Global Variables. -For more information on the starter flow, see [Basic prompting](../starter-projects/basic-prompting.mdx). +## Creating and Adding a Global Variable -1. From the Langflow dashboard, click **New Project**. -2. Select **Basic Prompting**. +To create and add a global variable, click the 🌐 button in a Text field, and then click **+ Add New Variable**. -The **Basic Prompting** flow is created. +Text fields are where you write text without opening a Text area, and are identified with the 🌐 icon. -3. To create an environment variable for the **OpenAI** component: - 1. In the **OpenAI API Key** field, click the **Globe** button, and then click **Add New Variable**. - 2. In the **Variable Name** field, enter `openai_api_key`. - 3. In the **Value** field, paste your OpenAI API Key (`sk-...`). - 4. For the variable **Type**, select **Credential**. - 5. In the **Apply to Fields** field, select **OpenAI API Key** to apply this variable to all fields named **OpenAI API Key**. - 6. Click **Save Variable**. +For example, to create an environment variable for the **OpenAI** component: + +1. In the **OpenAI API Key** text field, click the 🌐 button, then **Add New Variable**. +2. Enter `openai_api_key` in the **Variable Name** field. +3. Paste your OpenAI API Key (`sk-...`) in the **Value** field. +4. Select **Credential** for the **Type**. +5. Choose **OpenAI API Key** in the **Apply to Fields** field to apply this variable to all fields named **OpenAI API Key**. +6. Click **Save Variable**. You now have a `openai_api_key` global environment variable for your Langflow project. +Subsequently, clicking the 🌐 button in a Text field will display the new variable in the dropdown. You can also create global variables in **Settings** > **Variables and @@ -41,10 +49,55 @@ You now have a `openai_api_key` global environment variable for your Langflow pr style={{ width: "40%", margin: "20px auto" }} /> -4. To view and manage your project's global environment variables, visit **Settings** > **Variables and Secrets**. +To view and manage your project's global environment variables, visit **Settings** > **Variables and Secrets**. For more on variables in HuggingFace Spaces, see [Managing Secrets](https://huggingface.co/docs/hub/spaces-overview#managing-secrets). +{/* All variables are encrypted */} + + + All Credential Global Variables are encrypted and accessible only by you. + + +## Configuring Environment Variables in your .env file + +Setting `LANGFLOW_STORE_ENVIRONMENT_VARIABLES` to `true` in your `.env` file (default) adds all variables in `LANGFLOW_VARIABLES_TO_GET_FROM_ENVIRONMENT` to your user's Global Variables. + +These variables are accessible like any other Global Variable. + + + To prevent this behavior, set `LANGFLOW_STORE_ENVIRONMENT_VARIABLES` to + `false` in your `.env` file. + + +You can specify variables to get from the environment by listing them in `LANGFLOW_VARIABLES_TO_GET_FROM_ENVIRONMENT`. + +Specify variables as a comma-separated list (e.g., _`"VARIABLE1, VARIABLE2"`_) or a JSON-encoded string (e.g., _`'["VARIABLE1", "VARIABLE2"]'`_). + +The default list of variables includes: + +- ANTHROPIC_API_KEY +- ASTRA_DB_API_ENDPOINT +- ASTRA_DB_APPLICATION_TOKEN +- AZURE_OPENAI_API_KEY +- AZURE_OPENAI_API_DEPLOYMENT_NAME +- AZURE_OPENAI_API_EMBEDDINGS_DEPLOYMENT_NAME +- AZURE_OPENAI_API_INSTANCE_NAME +- AZURE_OPENAI_API_VERSION +- COHERE_API_KEY +- GOOGLE_API_KEY +- GROQ_API_KEY +- HUGGINGFACEHUB_API_TOKEN +- OPENAI_API_KEY +- PINECONE_API_KEY +- SEARCHAPI_API_KEY +- SERPAPI_API_KEY +- UPSTASH_VECTOR_REST_URL +- UPSTASH_VECTOR_REST_TOKEN +- VECTARA_CUSTOMER_ID +- VECTARA_CORPUS_ID +- VECTARA_API_KEY + ## Video
- Read the [Custom Component Guidelines](../administration/custom-component) for detailed information on custom components. + Read the [Custom Component Guidelines](../administration/custom-component) for + detailed information on custom components. Custom components let you extend Langflow by creating reusable and configurable components from a Python script. @@ -31,57 +32,60 @@ This class is the foundation for creating custom components. It allows users to The following types are supported in the build method: -| Supported Types | -| --------------------------------------------------------- | -| _`str`_, _`int`_, _`float`_, _`bool`_, _`list`_, _`dict`_ | -| _`langflow.field_typing.NestedDict`_ | -| _`langflow.field_typing.Prompt`_ | -| _`langchain.chains.base.Chain`_ | -| _`langchain.PromptTemplate`_ | +| Supported Types | +| ----------------------------------------------------------------- | +| _`str`_, _`int`_, _`float`_, _`bool`_, _`list`_, _`dict`_ | +| _`langflow.field_typing.NestedDict`_ | +| _`langflow.field_typing.Prompt`_ | +| _`langchain.chains.base.Chain`_ | +| _`langchain.PromptTemplate`_ | | _`from langchain.schema.language_model import BaseLanguageModel`_ | -| _`langchain.Tool`_ | -| _`langchain.document_loaders.base.BaseLoader`_ | -| _`langchain.schema.Document`_ | -| _`langchain.text_splitters.TextSplitter`_ | -| _`langchain.vectorstores.base.VectorStore`_ | -| _`langchain.embeddings.base.Embeddings`_ | -| _`langchain.schema.BaseRetriever`_ | +| _`langchain.Tool`_ | +| _`langchain.document_loaders.base.BaseLoader`_ | +| _`langchain.schema.Document`_ | +| _`langchain.text_splitters.TextSplitter`_ | +| _`langchain.vectorstores.base.VectorStore`_ | +| _`langchain.embeddings.base.Embeddings`_ | +| _`langchain.schema.BaseRetriever`_ | The difference between _`dict`_ and _`langflow.field_typing.NestedDict`_ is that one adds a simple key-value pair field, while the other opens a more robust dictionary editor. - Use the `Prompt` type by adding **kwargs to the build method. - If you want to add the values of the variables to the template you defined, format the `PromptTemplate` inside the `CustomComponent` class. + Use the `Prompt` type by adding **kwargs to the build method. If you want to + add the values of the variables to the template you defined, format the + `PromptTemplate` inside the `CustomComponent` class. - Use base Python types without a handle by default. To add handles, use the `input_types` key in the `build_config` method. + Use base Python types without a handle by default. To add handles, use the + `input_types` key in the `build_config` method. **build_config:** Defines the configuration fields of the component. This method returns a dictionary where each key represents a field name and each value defines the field's behavior. Supported keys for configuring fields: -| Key | Description | -| --------------------- | --------------------------------------------------- | -| `is_list` | Boolean indicating if the field can hold multiple values. | -| `options` | Dropdown menu options. | -| `multiline` | Boolean indicating if a field allows multiline input. | -| `input_types` | Allows connection handles for string fields. | -| `display_name` | Field name displayed in the UI. | -| `advanced` | Hides the field in the default UI view. | -| `password` | Masks input, useful for sensitive data. | -| `required` | Overrides the default behavior to make a field mandatory. | -| `info` | Tooltip for the field. | -| `file_types` | Accepted file types, useful for file fields. | -| `range_spec` | Defines valid ranges for float fields. | -| `title_case` | Boolean that controls field name capitalization. | -| `refresh_button` | Adds a refresh button that updates field values. | -| `real_time_refresh` | Updates the configuration as field values change. | -| `field_type` | Automatically set based on the build method's type hint. | +| Key | Description | +| ------------------- | --------------------------------------------------------- | +| `is_list` | Boolean indicating if the field can hold multiple values. | +| `options` | Dropdown menu options. | +| `multiline` | Boolean indicating if a field allows multiline input. | +| `input_types` | Allows connection handles for string fields. | +| `display_name` | Field name displayed in the UI. | +| `advanced` | Hides the field in the default UI view. | +| `password` | Masks input, useful for sensitive data. | +| `required` | Overrides the default behavior to make a field mandatory. | +| `info` | Tooltip for the field. | +| `file_types` | Accepted file types, useful for file fields. | +| `range_spec` | Defines valid ranges for float fields. | +| `title_case` | Boolean that controls field name capitalization. | +| `refresh_button` | Adds a refresh button that updates field values. | +| `real_time_refresh` | Updates the configuration as field values change. | +| `field_type` | Automatically set based on the build method's type hint. | - Use the `update_build_config` method to dynamically update configurations based on field values. + Use the `update_build_config` method to dynamically update configurations + based on field values. ## Additional methods and attributes @@ -99,4 +103,3 @@ The `CustomComponent` class also provides helpful methods for specific tasks (e. - `status`: Shows values from the `build` method, useful for debugging. - `field_order`: Controls the display order of fields. - `icon`: Sets the canvas display icon. - diff --git a/docs/docs/components/inputs-and-outputs.mdx b/docs/docs/components/inputs-and-outputs.mdx new file mode 100644 index 000000000..2a624221a --- /dev/null +++ b/docs/docs/components/inputs-and-outputs.mdx @@ -0,0 +1,161 @@ +import Admonition from "@theme/Admonition"; +import ZoomableImage from "/src/theme/ZoomableImage.js"; + +# Inputs and Outputs + +TL;DR: Inputs and Outputs are a category of components that are used to define where data comes in and out of your flow. +They also dynamically change the Playground and can be renamed to facilitate building and maintaining your flows. + +## Inputs + +Inputs are components used to define where data enters your flow. They can receive data from the user, a database, or any other source that can be converted to Text or Record. + +The difference between Chat Input and other Input components is the output format, the number of configurable fields, and the way they are displayed in the Playground. + +Chat Input components can output `Text` or `Record`. When you want to pass the sender name or sender to the next component, use the `Record` output. To pass only the message, use the `Text` output, useful when saving the message to a database or memory system like Zep. + +You can find out more about Chat Input and other Inputs [here](#chat-input). + +### Chat Input + +This component collects user input from the chat. + +**Parameters** + +- **Sender Type:** Specifies the sender type. Defaults to `User`. Options are `Machine` and `User`. +- **Sender Name:** Specifies the name of the sender. Defaults to `User`. +- **Message:** Specifies the message text. It is a multiline text input. +- **Session ID:** Specifies the session ID of the chat history. If provided, the message will be saved in the Message History. + + +

+ If `As Record` is `true` and the `Message` is a `Record`, the data of the + `Record` will be updated with the `Sender`, `Sender Name`, and `Session ID`. +

+
+ + + +One significant capability of the Chat Input component is its ability to transform the Playground into a chat window. This feature is particularly valuable for scenarios requiring user input to initiate or influence the flow. + + + +### Text Input + +The **Text Input** component adds an **Input** field on the Playground. This enables you to define parameters while running and testing your flow. + +**Parameters** + +- **Value:** Specifies the text input value. This is where the user inputs text data that will be passed to the next component in the sequence. If no value is provided, it defaults to an empty string. +- **Record Template:** Specifies how a `Record` should be converted into `Text`. + +The **Record Template** field is used to specify how a `Record` should be converted into `Text`. This is particularly useful when you want to extract specific information from a `Record` and pass it as text to the next component in the sequence. + +For example, if you have a `Record` with the following structure: + +```json +{ + "name": "John Doe", + "age": 30, + "email": "johndoe@email.com" +} +``` + +A template with `Name: {name}, Age: {age}` will convert the `Record` into a text string of `Name: John Doe, Age: 30`. + +If you pass more than one `Record`, the text will be concatenated with a new line separator. + + + +## Outputs + +Outputs are components that are used to define where data comes out of your flow. They can be used to send data to the user, to the Playground, or to define how the data will be displayed in the Playground. + +The Chat Output works similarly to the Chat Input but does not have a field that allows for written input. It is used as an Output definition and can be used to send data to the user. + +You can find out more about it and the other Outputs [here](#chat-output). + +### Chat Output + +This component sends a message to the chat. + +**Parameters** + +- **Sender Type:** Specifies the sender type. Default is `"Machine"`. Options are `"Machine"` and `"User"`. + +- **Sender Name:** Specifies the sender's name. Default is `"AI"`. + +- **Session ID:** Specifies the session ID of the chat history. If provided, messages are saved in the Message History. + +- **Message:** Specifies the text of the message. + + +

+ If `As Record` is `true` and the `Message` is a `Record`, the data in the + `Record` is updated with the `Sender`, `Sender Name`, and `Session ID`. +

+
+ +### Text Output + +This component displays text data to the user. It is useful when you want to show text without sending it to the chat. + +**Parameters** + +- **Value:** Specifies the text data to be displayed. Defaults to an empty string. + +The `TextOutput` component provides a simple way to display text data. It allows textual data to be visible in the chat window during your interaction flow. + +## Prompts + +A prompt is the input provided to a language model, consisting of multiple components and can be parameterized using prompt templates. A prompt template offers a reproducible method for generating prompts, enabling easy customization through input variables. + +### Prompt + +This component creates a prompt template with dynamic variables. This is useful for structuring prompts and passing dynamic data to a language model. + +**Parameters** + +- **Template:** The template for the prompt. This field allows you to create other fields dynamically by using curly brackets `{}`. For example, if you have a template like `Hello {name}, how are you?`, a new field called `name` will be created. Prompt variables can be created with any name inside curly brackets, e.g. `{variable_name}`. + + + +### PromptTemplate + +The `PromptTemplate` component enables users to create prompts and define variables that control how the model is instructed. Users can input a set of variables which the template uses to generate the prompt when a conversation starts. + + + After defining a variable in the prompt template, it acts as its own component + input. See [Prompt Customization](../administration/prompt-customization) for + more details. + + +- **template:** The template used to format an individual request. diff --git a/docs/docs/components/inputs.mdx b/docs/docs/components/inputs.mdx deleted file mode 100644 index 854f7fee3..000000000 --- a/docs/docs/components/inputs.mdx +++ /dev/null @@ -1,99 +0,0 @@ -import Admonition from '@theme/Admonition'; -import ZoomableImage from "/src/theme/ZoomableImage.js"; - -# Inputs - -## Chat Input - -This component obtains user input from the chat. - -**Parameters** - -- **Sender Type:** Specifies the sender type. Defaults to `User`. Options are `Machine` and `User`. -- **Sender Name:** Specifies the name of the sender. Defaults to `User`. -- **Message:** Specifies the message text. It is a multiline text input. -- **Session ID:** Specifies the session ID of the chat history. If provided, the message will be saved in the Message History. - - -

- If `As Record` is `true` and the `Message` is a `Record`, the data - of the `Record` will be updated with the `Sender`, `Sender Name`, and - `Session ID`. -

-
- - - -One significant capability of the Chat Input component is its ability to transform the Playground into a chat window. This feature is particularly valuable for scenarios requiring user input to initiate or influence the flow. - - - ---- - -## Prompt - -This component creates a prompt template with dynamic variables. This is useful for structuring prompts and passing dynamic data to a language model. - -**Parameters** - -- **Template:** The template for the prompt. This field allows you to create other fields dynamically by using curly brackets `{}`. For example, if you have a template like `Hello {name}, how are you?`, a new field called `name` will be created. Prompt variables can be created with any name inside curly brackets, e.g. `{variable_name}`. - - - ---- - -## Text Input - -The **Text Input** component adds an **Input** field on the Playground. This enables you to define parameters while running and testing your flow. - -**Parameters** - -- **Value:** Specifies the text input value. This is where the user inputs text data that will be passed to the next component in the sequence. If no value is provided, it defaults to an empty string. -- **Record Template:** Specifies how a `Record` should be converted into `Text`. - -The **Record Template** field is used to specify how a `Record` should be converted into `Text`. This is particularly useful when you want to extract specific information from a `Record` and pass it as text to the next component in the sequence. - -For example, if you have a `Record` with the following structure: - -```json -{ - "name": "John Doe", - "age": 30, - "email": "johndoe@email.com" -} -``` - -A template with `Name: {name}, Age: {age}` will convert the `Record` into a text string of `Name: John Doe, Age: 30`. - -If you pass more than one `Record`, the text will be concatenated with a new line separator. - - - diff --git a/docs/docs/components/outputs.mdx b/docs/docs/components/outputs.mdx deleted file mode 100644 index a8947e60e..000000000 --- a/docs/docs/components/outputs.mdx +++ /dev/null @@ -1,34 +0,0 @@ -import Admonition from '@theme/Admonition'; - -# Outputs - -## Chat Output - -This component sends a message to the chat. - -**Parameters** - -- **Sender Type:** Specifies the sender type. Default is `"Machine"`. Options are `"Machine"` and `"User"`. - -- **Sender Name:** Specifies the sender's name. Default is `"AI"`. - -- **Session ID:** Specifies the session ID of the chat history. If provided, messages are saved in the Message History. - -- **Message:** Specifies the text of the message. - - -

- If `As Record` is `true` and the `Message` is a `Record`, the data in the `Record` is updated with the `Sender`, `Sender Name`, and `Session ID`. -

-
- -## Text Output - -This component displays text data to the user. It is useful when you want to show text without sending it to the chat. - -**Parameters** - -- **Value:** Specifies the text data to be displayed. Defaults to an empty string. - - -The `TextOutput` component provides a simple way to display text data. It allows textual data to be visible in the chat window during your interaction flow. diff --git a/docs/docs/components/prompts.mdx b/docs/docs/components/prompts.mdx deleted file mode 100644 index 19fdedf11..000000000 --- a/docs/docs/components/prompts.mdx +++ /dev/null @@ -1,25 +0,0 @@ -import Admonition from "@theme/Admonition"; - -# Prompts - - -

- Thank you for your patience as we refine our documentation. It may - still have some areas under development. Please share your feedback or report any issues to help us improve! -

-
- -A prompt is the input provided to a language model, consisting of multiple components and can be parameterized using prompt templates. A prompt template offers a reproducible method for generating prompts, enabling easy customization through input variables. - ---- - -### PromptTemplate - -The `PromptTemplate` component enables users to create prompts and define variables that control how the model is instructed. Users can input a set of variables which the template uses to generate the prompt when a conversation starts. - - - After defining a variable in the prompt template, it acts as its own component - input. See [Prompt Customization](../administration/prompt-customization) for more details. - - -- **template:** The template used to format an individual request. diff --git a/docs/docs/components/text-and-record.mdx b/docs/docs/components/text-and-record.mdx new file mode 100644 index 000000000..24c16e4aa --- /dev/null +++ b/docs/docs/components/text-and-record.mdx @@ -0,0 +1,49 @@ +# Text and Record + +In Langflow 1.0, we added two main input and output types: `Text` and `Record`. + +`Text` is a simple string input and output type, while `Record` is a structure very similar to a dictionary in Python. It is a key-value pair data structure. + +We've created a few components to help you work with these types. Let's see how a few of them work. + +## Records To Text + +This is a component that takes in Records and outputs a `Text`. It does this using a template string and concatenating the values of the `Record`, one per line. + +If we have the following Records: + +```json +{ + "sender_name": "Alice", + "message": "Hello!" +} +{ + "sender_name": "John", + "message": "Hi!" +} +``` + +And the template string is: _`{sender_name}: {message}`_ + +The output is: + +``` +Alice: Hello! +John: Hi! +``` + +## Create Record + +This component allows you to create a `Record` from a number of inputs. You can add as many key-value pairs as you want (as long as it is less than 15). Once you've picked that number you'll need to write the name of the Key and can pass `Text` values from other components to it. + +## Documents To Records + +This component takes in a LangChain `Document` and outputs a `Record`. It does this by extracting the `page_content` and the `metadata` from the `Document` and adding them to the `Record` as text and data respectively. + +## Why is this useful? + +The idea was to create a unified way to work with complex data in Langflow and to make it easier to work with data that is not just a simple string. This way you can create more complex workflows and use the data in more ways. + +## What's next? + +We are planning to integrate an array of modalities to Langflow, such as images, audio, and video. This will allow you to create even more complex workflows and use cases. Stay tuned for more updates! 🚀 diff --git a/docs/docs/components/vector-stores.mdx b/docs/docs/components/vector-stores.mdx index 7e21f1021..6072abe29 100644 --- a/docs/docs/components/vector-stores.mdx +++ b/docs/docs/components/vector-stores.mdx @@ -1,6 +1,6 @@ import Admonition from "@theme/Admonition"; -# Vector Stores Documentation +# Vector Stores ### Astra DB diff --git a/docs/docs/examples/chat-memory.mdx b/docs/docs/examples/chat-memory.mdx index d9b7d2e20..88dbbca2b 100644 --- a/docs/docs/examples/chat-memory.mdx +++ b/docs/docs/examples/chat-memory.mdx @@ -14,4 +14,4 @@ This component is available under the **Helpers** tab of the Langflow preview. style={{ marginBottom: "20px", display: "flex", justifyContent: "center" }} > -
\ No newline at end of file + diff --git a/docs/docs/examples/combine-text.mdx b/docs/docs/examples/combine-text.mdx index 0d7524a5b..5a4e86cf0 100644 --- a/docs/docs/examples/combine-text.mdx +++ b/docs/docs/examples/combine-text.mdx @@ -18,4 +18,4 @@ This component is available under the **Helpers** tab of the Langflow preview. style={{ marginBottom: "20px", display: "flex", justifyContent: "center" }} > - \ No newline at end of file + diff --git a/docs/docs/examples/create-record.mdx b/docs/docs/examples/create-record.mdx index f94ba84bd..aa7a886f4 100644 --- a/docs/docs/examples/create-record.mdx +++ b/docs/docs/examples/create-record.mdx @@ -14,4 +14,4 @@ The **Create Record** component allows you to dynamically create a `Record` from style={{ marginBottom: "20px", display: "flex", justifyContent: "center" }} > - \ No newline at end of file + diff --git a/docs/docs/examples/pass.mdx b/docs/docs/examples/pass.mdx index cdf1858d5..ddfe35cca 100644 --- a/docs/docs/examples/pass.mdx +++ b/docs/docs/examples/pass.mdx @@ -14,4 +14,4 @@ The **Pass** component enables you to ignore one input and move forward with ano style={{ marginBottom: "20px", display: "flex", justifyContent: "center" }} > - \ No newline at end of file + diff --git a/docs/docs/examples/store-message.mdx b/docs/docs/examples/store-message.mdx index 610bf645c..75ff0bd46 100644 --- a/docs/docs/examples/store-message.mdx +++ b/docs/docs/examples/store-message.mdx @@ -14,4 +14,4 @@ The **Message History** component can then be used to retrieve stored messages. style={{ marginBottom: "20px", display: "flex", justifyContent: "center" }} > - \ No newline at end of file + diff --git a/docs/docs/examples/sub-flow.mdx b/docs/docs/examples/sub-flow.mdx index ae7e5c9da..d2b9674ad 100644 --- a/docs/docs/examples/sub-flow.mdx +++ b/docs/docs/examples/sub-flow.mdx @@ -12,4 +12,4 @@ The **Sub Flow** component enables a user to select a previously built flow and style={{ marginBottom: "20px", display: "flex", justifyContent: "center" }} > - \ No newline at end of file + diff --git a/docs/docs/examples/text-operator.mdx b/docs/docs/examples/text-operator.mdx index 5637dbc79..50d52fdbf 100644 --- a/docs/docs/examples/text-operator.mdx +++ b/docs/docs/examples/text-operator.mdx @@ -12,4 +12,4 @@ The **Text Operator** component simplifies logic. It evaluates the results from style={{ marginBottom: "20px", display: "flex", justifyContent: "center" }} > - \ No newline at end of file + diff --git a/docs/docs/getting-started/flows-components-collections.mdx b/docs/docs/getting-started/flows-components-collections.mdx index d7a1fd1a6..335fb5c12 100644 --- a/docs/docs/getting-started/flows-components-collections.mdx +++ b/docs/docs/getting-started/flows-components-collections.mdx @@ -18,75 +18,3 @@ A [project](#project) can be a component or a flow. Projects are saved as part o For example, the **OpenAI LLM** is a **component** of the **Basic prompting** flow, and the **flow** is stored in a **collection**. ## Component - -Components are the building blocks of flows. They consist of inputs, outputs, and parameters that define their functionality. These elements provide a convenient and straightforward way to compose LLM-based applications. Learn more about components and how they work in the LangChain [documentation](https://python.langchain.com/docs/integrations/components). - -
- During the flow creation process, you will notice handles (colored circles) - attached to one or both sides of a component. These handles represent the - availability to connect to other components. Hover over a handle to see - connection details. -
- -
- For example, if you select a ConversationChain component, you - will see orange o and purple{" "} - o input handles. They indicate that - this component accepts an LLM and a Memory component as inputs. The red - asterisk * means that at least one input - of that type is required. -
- -{" "} - - - -
-In the top right corner of the component, you'll find the component status icon (![Status icon](/logos/playbutton.svg)). -Build the flow by clicking the **![Playground icon](/logos/botmessage.svg)Playground** at the bottom right of the canvas. - -Once the validation is complete, the status of each validated component should turn green (![Status icon](/logos/greencheck.svg)). -To debug, hover over the component status to see the outputs. - -
- ---- - -### Component Parameters - -Langflow components can be edited by clicking the component settings button. Hide parameters to reduce complexity and keep the canvas clean and intuitive for experimentation. - -
- -
- -## Collection - -A collection is a snapshot of flows available in a database. - -Collections can be downloaded to local storage and uploaded for future use. - -
- -
- -## Project - -A **Project** can be a flow or a component. To view your saved projects, select **My Collection**. - -Your **Projects** are displayed. - -Click the **![Playground icon](/logos/botmessage.svg) Playground** button to run a flow from the **My Collection** screen. - -In the top left corner of the screen are options for **Download Collection**, **Upload Collection**, and **New Project**. diff --git a/docs/docs/index.mdx b/docs/docs/index.mdx index 5ccb8d7a0..e762142f0 100644 --- a/docs/docs/index.mdx +++ b/docs/docs/index.mdx @@ -14,8 +14,8 @@ Its intuitive interface allows for easy manipulation of AI building blocks, enab diff --git a/docs/docs/integrations/notion/add-content-to-page.md b/docs/docs/integrations/notion/add-content-to-page.md index 243c09d81..ace43e103 100644 --- a/docs/docs/integrations/notion/add-content-to-page.md +++ b/docs/docs/integrations/notion/add-content-to-page.md @@ -9,14 +9,11 @@ The `AddContentToPage` component converts markdown text to Notion blocks and app [Notion Reference](https://developers.notion.com/reference/patch-block-children) - - The `AddContentToPage` component enables you to: - Convert markdown text to Notion blocks. - Append the converted blocks to a specified Notion page. - Seamlessly integrate Notion content creation into Langflow workflows. - ## Component Usage @@ -100,8 +97,6 @@ class NotionPageCreator(CustomComponent): ## Example Usage - - Example of using the `AddContentToPage` component in a Langflow flow using Markdown as input: - ## Best Practices When using the `AddContentToPage` component: diff --git a/docs/docs/integrations/notion/list-users.md b/docs/docs/integrations/notion/list-users.md index c22c20ca8..0eb8236f5 100644 --- a/docs/docs/integrations/notion/list-users.md +++ b/docs/docs/integrations/notion/list-users.md @@ -9,13 +9,11 @@ The `NotionUserList` component retrieves users from Notion. It provides a conven [Notion Reference](https://developers.notion.com/reference/get-users) - - The `NotionUserList` component enables you to: +The `NotionUserList` component enables you to: - Retrieve user data from Notion - Access user information such as ID, type, name, and avatar URL - Integrate Notion user data seamlessly into your Langflow workflows - ## Component Usage @@ -95,7 +93,6 @@ class NotionUserList(CustomComponent): ## Example Usage - Here's an example of how you can use the `NotionUserList` component in a Langflow flow and passing the outputs to the Prompt component: - - ## Best Practices When using the `NotionUserList` component, consider the following best practices: diff --git a/docs/docs/migration/api.mdx b/docs/docs/migration/api.mdx deleted file mode 100644 index e69de29bb..000000000 diff --git a/docs/docs/migration/component-status-and-data-passing.mdx b/docs/docs/migration/component-status-and-data-passing.mdx deleted file mode 100644 index e69de29bb..000000000 diff --git a/docs/docs/migration/connecting-output-components.mdx b/docs/docs/migration/connecting-output-components.mdx deleted file mode 100644 index e69de29bb..000000000 diff --git a/docs/docs/migration/custom-component.mdx b/docs/docs/migration/custom-component.mdx deleted file mode 100644 index e69de29bb..000000000 diff --git a/docs/docs/migration/experimental-components.mdx b/docs/docs/migration/experimental-components.mdx deleted file mode 100644 index e69de29bb..000000000 diff --git a/docs/docs/migration/flow-of-data.mdx b/docs/docs/migration/flow-of-data.mdx deleted file mode 100644 index e69de29bb..000000000 diff --git a/docs/docs/migration/global-variables.mdx b/docs/docs/migration/global-variables.mdx deleted file mode 100644 index 616fa3621..000000000 --- a/docs/docs/migration/global-variables.mdx +++ /dev/null @@ -1,118 +0,0 @@ -import ZoomableImage from "/src/theme/ZoomableImage.js"; -import Admonition from "@theme/Admonition"; - -# Global Variables - -## TLDR; - -- Global Variables are reusable variables that can be accessed from any Text field in your project. -- To create a Global Variable, click on the 🌐 button in a Text field and then **+ Add New Variable**. -- Define the **Name**, **Type**, and **Value** of the variable. -- Click on **Save Variable** to create the variable. -- All Credential Global Variables are encrypted and cannot be accessed by anyone but you. -- Set _`LANGFLOW_STORE_ENVIRONMENT_VARIABLES`_ to _`true`_ in your `.env` file to add all variables in _`LANGFLOW_VARIABLES_TO_GET_FROM_ENVIRONMENT`_ to your user's Global Variables. - -Global Variables are a really useful feature of Langflow. -They allow you to define reusable variables that can be accessed from any Text field in your project. - -The first thing you need to do is find a **Text field** in a Component, so let's talk about what a Text field is. - -## Text Fields - -Text fields are the fields in a Component where you can write text but that does not allow you to open a Text Area. - -The easiest way to find fields that are Text fields, though, is to look for fields that have a 🌐 button. - - - -## Creating a Global Variable - -To create a Global Variable, you need to click on the 🌐 button in a Text field and that will open a dropdown showing your currently available variables and at the end of it **+ Add New Variable**. - - - -Click on **+ Add New Variable** and a window will open where you can define your new Global Variable. - -In it, you can define the **Name** of the variable, the optional **Type** of the variable, and the **Value** of the variable. - -The **Name** is the name that you will use to refer to the variable in your Text fields. - -The **Type** is optional for now but will be used in the future to allow for more advanced features. - -The **Value** is the value that the variable will have. -{/* say that all variables are encrypted */} - - - All Credential Global Variables are encrypted and cannot be accessed by anyone - but you. - - - - -After you have defined your variable, click on **Save Variable** and your variable will be created. - -After that, once you click on the 🌐 button in a Text field, you will see your new variable in the dropdown. - -## Environment Variables - -If you set _`LANGFLOW_STORE_ENVIRONMENT_VARIABLES`_ to _`true`_ (which is the default value) in your `.env` file, all variables in _`LANGFLOW_VARIABLES_TO_GET_FROM_ENVIRONMENT`_ will be added to your user's Global Variables. - -All of these variables can be used in your project as any other Global Variable. - - - You can set _`LANGFLOW_STORE_ENVIRONMENT_VARIABLES`_ to _`false`_ in your - `.env` file to prevent this behavior. - - -You can also set _`LANGFLOW_VARIABLES_TO_GET_FROM_ENVIRONMENT`_ to a list of variables that you want to get from the environment. - -The default list at the moment is: - -- ANTHROPIC_API_KEY -- ASTRA_DB_API_ENDPOINT -- ASTRA_DB_APPLICATION_TOKEN -- AZURE_OPENAI_API_KEY -- AZURE_OPENAI_API_DEPLOYMENT_NAME -- AZURE_OPENAI_API_EMBEDDINGS_DEPLOYMENT_NAME -- AZURE_OPENAI_API_INSTANCE_NAME -- AZURE_OPENAI_API_VERSION -- COHERE_API_KEY -- GOOGLE_API_KEY -- GROQ_API_KEY -- HUGGINGFACEHUB_API_TOKEN -- OPENAI_API_KEY -- PINECONE_API_KEY -- SEARCHAPI_API_KEY -- SERPAPI_API_KEY -- UPSTASH_VECTOR_REST_URL -- UPSTASH_VECTOR_REST_TOKEN -- VECTARA_CUSTOMER_ID -- VECTARA_CORPUS_ID -- VECTARA_API_KEY - - - Set _`LANGFLOW_VARIABLES_TO_GET_FROM_ENVIRONMENT`_ as a comma-separated list - of variables (e.g. _`"VARIABLE1, VARIABLE2"`_) or as a JSON-encoded string - (e.g. _`'["VARIABLE1", "VARIABLE2"]'`_). - diff --git a/docs/docs/migration/inputs-and-outputs.mdx b/docs/docs/migration/inputs-and-outputs.mdx deleted file mode 100644 index 1e1745347..000000000 --- a/docs/docs/migration/inputs-and-outputs.mdx +++ /dev/null @@ -1,36 +0,0 @@ -# Inputs and Outputs - -TL;DR: Inputs and Outputs are a category of components that are used to define where data comes in and out of your flow. They also -dynamically change the Playground and can be renamed to make it easier to build and maintain your flows. - -## Introduction - -Langflow 1.0 introduces new categories of components called Inputs and Outputs. They are used to make it easier to understand and interact with your flows. - -Let's start with what they have in common: - -- Components in these categories connect to components that have Text or Record inputs or outputs. Some can connect to both but you have to pick what type of data you want to output or input. -- They can be renamed to help you identify them more easily in the Playground and while using the API. -- They dynamically change the Playground to make it easier to understand and interact with your flows. - -Native Langflow Components were created to be powerful tools that work around Langflow's features. They are designed to be easy to use and understand, and to help you build your flows faster. - -Let's dive into Inputs and Outputs. - -## Inputs - -Inputs are components that are used to define where data comes into your flow. They can be used to receive data from the user, from a database, or from any other source that can be converted to Text or Record. - -The difference between Chat Input and other Input components is the format of the output, the number of configurable fields, and the way they are displayed in the Playground. - -Chat Input components can output Text or Record. When you want to pass the sender name, or sender to the next component, you can use the Record output, and when you want to pass the message only you can use the Text output. This is useful when saving the message to a database or a memory system like Zep. - -You can find out more about it and the other Inputs [here](../components/inputs). - -## Outputs - -Outputs are components that are used to define where data comes out of your flow. They can be used to send data to the user, to the Playground, or to define how the data will be displayed in the Playground. - -The Chat Output works similarly to the Chat Input but does not have a field that allows for written input. It is used as an Output definition and can be used to send data to the user. - -You can find out more about it and the other Outputs [here](../components/outputs). diff --git a/docs/docs/migration/migrating-to-one-point-zero.mdx b/docs/docs/migration/migrating-to-one-point-zero.mdx index 827f0e118..973393606 100644 --- a/docs/docs/migration/migrating-to-one-point-zero.mdx +++ b/docs/docs/migration/migrating-to-one-point-zero.mdx @@ -41,7 +41,7 @@ We have a special channel in our Discord server dedicated to Langflow 1.0 migrat Langflow 1.0 introduces adds the concept of Inputs and Outputs to flows, allowing a clear definition of the data flow between components. Discover how to use Inputs and Outputs to pass data between components and create more dynamic flows. -[Learn more about Inputs and Outputs of Components](../migration/inputs-and-outputs) +[Learn more about Inputs and Outputs of Components](../components/inputs-and-outputs) ## To Compose or Not to Compose: the choice is yours @@ -71,7 +71,7 @@ Langflow 1.0 introduces many new native categories, including Inputs, Outputs, H With the introduction of Text and Record types connections between Components are more intuitive and easier to understand. This is the first step in a series of improvements to the way you interact with Langflow. Learn how to use Text, and Record and how they help you build better flows. -[Learn more about Text and Record](../migration/text-and-record) +[Learn more about Text and Record](../components/text-and-record) ## CustomComponent for All Components @@ -119,7 +119,7 @@ Things got a whole lot easier. You can now pass tweaks and inputs in the API by Global Variables can be used in any Text Field across your projects. Learn how to define and utilize Global Variables to streamline your workflow. -[Learn more about Global Variables](../migration/global-variables) +[Learn more about Global Variables](../administration/global-env.mdx) ## Experimental Components diff --git a/docs/docs/migration/multiple-flows.mdx b/docs/docs/migration/multiple-flows.mdx deleted file mode 100644 index e69de29bb..000000000 diff --git a/docs/docs/migration/new-categories-and-components.mdx b/docs/docs/migration/new-categories-and-components.mdx deleted file mode 100644 index e69de29bb..000000000 diff --git a/docs/docs/migration/passing-tweaks-and-inputs.mdx b/docs/docs/migration/passing-tweaks-and-inputs.mdx deleted file mode 100644 index e69de29bb..000000000 diff --git a/docs/docs/migration/renaming-and-editing-components.mdx b/docs/docs/migration/renaming-and-editing-components.mdx deleted file mode 100644 index e69de29bb..000000000 diff --git a/docs/docs/migration/sidebar-and-interaction-panel.mdx b/docs/docs/migration/sidebar-and-interaction-panel.mdx deleted file mode 100644 index e69de29bb..000000000 diff --git a/docs/docs/migration/state-management.mdx b/docs/docs/migration/state-management.mdx deleted file mode 100644 index e69de29bb..000000000 diff --git a/docs/docs/migration/supported-frameworks.mdx b/docs/docs/migration/supported-frameworks.mdx deleted file mode 100644 index e69de29bb..000000000 diff --git a/docs/docs/migration/text-and-record.mdx b/docs/docs/migration/text-and-record.mdx deleted file mode 100644 index cdfb26b6c..000000000 --- a/docs/docs/migration/text-and-record.mdx +++ /dev/null @@ -1,45 +0,0 @@ -# Text and Record - -In Langflow 1.0 we added two main input and output types: Text and Record. Text is a simple string input and output type, while Record is a structure very similar to a dictionary in Python. It is a key-value pair data structure. - -We've created a few components to help you work with these types. Let's see how a few of them work. - -### Records To Text - -This is a Component that takes in Records and outputs a Text. It does this using a template string and concatenating the values of the Record, one per line. - -If we have the following Records: - -```json -{ - "sender_name": "Alice", - "message": "Hello!" -} -{ - "sender_name": "John", - "message": "Hi!" -} -``` - -And the template string is: _`{sender_name}: {message}`_ - -``` -Alice: Hello! -John: Hi! -``` - -### Create Record - -This Component allows you to create a Record from a number of inputs. You can add as many key-value pairs as you want (as long as it is less than 15 😅). Once you've picked that number you'll need to write the name of the Key and can pass Text values from other components to it. - -### Documents To Records - -This Component takes in a [LangChain](https://langchain.com) Document and outputs a Record. It does this by extracting the _`page_content`_ and the _`metadata`_ from the Document and adding them to the Record as _`text`_ and _`data`_ respectively. - -## Why is this useful? - -The idea was to create a unified way to work with complex data in Langflow, and to make it easier to work with data that is not just a simple string. This way you can create more complex workflows and use the data in more ways. - -## What's next? - -We are planning to integrate an array of modalities to Langflow, such as images, audio, and video. This will allow you to create even more complex workflows and use cases. Stay tuned for more updates! 🚀 diff --git a/docs/docs/whats-new/a-new-chapter-langflow.mdx b/docs/docs/whats-new/a-new-chapter-langflow.mdx index 3ff74ffb2..bdc0f178b 100644 --- a/docs/docs/whats-new/a-new-chapter-langflow.mdx +++ b/docs/docs/whats-new/a-new-chapter-langflow.mdx @@ -41,7 +41,7 @@ By having a clear definition of Inputs and Outputs, we could build the experienc When building a project testing and debugging is crucial. The Playground is a tool that changes dynamically based on the Inputs and Outputs you defined in your project. For example, let's say you are building a simple RAG application. Generally, you have an Input, some references that come from a Vector Store Search, a Prompt and the answer. -Now, you could plug the output of your Prompt into a [Text Output](../components/outputs#Text-Output), rename that to "Prompt Result" and see the output of your Prompt in the Playground. +Now, you could plug the output of your Prompt into a [Text Output](../components/inputs-and-outputs), rename that to "Prompt Result" and see the output of your Prompt in the Playground. {/* Add image here of the described above */} diff --git a/docs/sidebars.js b/docs/sidebars.js index d3f4f2671..b12111797 100644 --- a/docs/sidebars.js +++ b/docs/sidebars.js @@ -49,8 +49,8 @@ module.exports = { label: "Core Components", collapsed: false, items: [ - "components/inputs", - "components/outputs", + "components/inputs-and-outputs", + "components/text-and-record", "components/data", "components/models", "components/helpers", @@ -91,15 +91,12 @@ module.exports = { }, { type: "category", - label: "Migration Guides", + label: "Migration", collapsed: false, items: [ "migration/possible-installation-issues", "migration/migrating-to-one-point-zero", - "migration/inputs-and-outputs", - "migration/text-and-record", "migration/compatibility", - "migration/global-variables", ], }, { diff --git a/docs/static/data/AstraDB-RAG-Flows.json b/docs/static/data/AstraDB-RAG-Flows.json index 10dafa85f..d8bd23eb2 100644 --- a/docs/static/data/AstraDB-RAG-Flows.json +++ b/docs/static/data/AstraDB-RAG-Flows.json @@ -1,3403 +1,3147 @@ { - "id": "51e2b78a-199b-4054-9f32-e288eef6924c", - "data": { - "nodes": [ - { - "id": "ChatInput-yxMKE", - "type": "genericNode", - "position": { - "x": 1195.5276981160775, - "y": 209.421875 - }, - "data": { - "type": "ChatInput", - "node": { - "template": { - "code": { - "type": "code", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n\n def build_config(self):\n build_config = super().build_config()\n build_config[\"input_value\"] = {\n \"input_types\": [],\n \"display_name\": \"Message\",\n \"multiline\": True,\n }\n\n return build_config\n\n def build(\n self,\n sender: Optional[str] = \"User\",\n sender_name: Optional[str] = \"User\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n ) -> Union[Text, Record]:\n return super().build(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n )\n", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "code", - "advanced": true, - "dynamic": true, - "info": "", - "load_from_db": false, - "title_case": false - }, - "input_value": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "input_value", - "display_name": "Message", - "advanced": false, - "input_types": [], - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "value": "what is a line" - }, - "return_record": { - "type": "bool", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "return_record", - "display_name": "Return Record", - "advanced": true, - "dynamic": false, - "info": "Return the message as a record containing the sender, sender_name, and session_id.", - "load_from_db": false, - "title_case": false - }, - "sender": { - "type": "str", - "required": false, - "placeholder": "", - "list": true, - "show": true, - "multiline": false, - "value": "User", - "fileTypes": [], - "file_path": "", - "password": false, - "options": [ - "Machine", - "User" - ], - "name": "sender", - "display_name": "Sender Type", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "sender_name": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": "User", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "sender_name", - "display_name": "Sender Name", - "advanced": false, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "session_id": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "session_id", - "display_name": "Session ID", - "advanced": true, - "dynamic": false, - "info": "If provided, the message will be stored in the memory.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "_type": "CustomComponent" - }, - "description": "Get chat inputs from the Playground.", - "icon": "ChatInput", - "base_classes": [ - "Text", - "str", - "object", - "Record" - ], - "display_name": "Chat Input", - "documentation": "", - "custom_fields": { - "sender": null, - "sender_name": null, - "input_value": null, - "session_id": null, - "return_record": null - }, - "output_types": [ - "Text", - "Record" - ], - "field_formatters": {}, - "frozen": false, - "field_order": [], - "beta": false - }, - "id": "ChatInput-yxMKE" - }, - "selected": false, - "width": 384, - "height": 383 + "id": "51e2b78a-199b-4054-9f32-e288eef6924c", + "data": { + "nodes": [ + { + "id": "ChatInput-yxMKE", + "type": "genericNode", + "position": { + "x": 1195.5276981160775, + "y": 209.421875 + }, + "data": { + "type": "ChatInput", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n\n def build_config(self):\n build_config = super().build_config()\n build_config[\"input_value\"] = {\n \"input_types\": [],\n \"display_name\": \"Message\",\n \"multiline\": True,\n }\n\n return build_config\n\n def build(\n self,\n sender: Optional[str] = \"User\",\n sender_name: Optional[str] = \"User\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n ) -> Union[Text, Record]:\n return super().build(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n )\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "input_value": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Message", + "advanced": false, + "input_types": [], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "value": "what is a line" + }, + "return_record": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "return_record", + "display_name": "Return Record", + "advanced": true, + "dynamic": false, + "info": "Return the message as a record containing the sender, sender_name, and session_id.", + "load_from_db": false, + "title_case": false + }, + "sender": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "User", + "fileTypes": [], + "file_path": "", + "password": false, + "options": ["Machine", "User"], + "name": "sender", + "display_name": "Sender Type", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "sender_name": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "User", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "sender_name", + "display_name": "Sender Name", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "session_id": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "session_id", + "display_name": "Session ID", + "advanced": true, + "dynamic": false, + "info": "If provided, the message will be stored in the memory.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "_type": "CustomComponent" }, - { - "id": "TextOutput-BDknO", - "type": "genericNode", - "position": { - "x": 2322.600672827879, - "y": 604.9467307442569 - }, - "data": { - "type": "TextOutput", - "node": { - "template": { - "input_value": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": "", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "input_value", - "display_name": "Value", - "advanced": false, - "input_types": [ - "Record", - "Text" - ], - "dynamic": false, - "info": "Text or Record to be passed as output.", - "load_from_db": false, - "title_case": false - }, - "code": { - "type": "code", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "from typing import Optional\n\nfrom langflow.base.io.text import TextComponent\nfrom langflow.field_typing import Text\n\n\nclass TextOutput(TextComponent):\n display_name = \"Text Output\"\n description = \"Display a text output in the Playground.\"\n icon = \"type\"\n\n def build_config(self):\n return {\n \"input_value\": {\n \"display_name\": \"Value\",\n \"input_types\": [\"Record\", \"Text\"],\n \"info\": \"Text or Record to be passed as output.\",\n },\n \"record_template\": {\n \"display_name\": \"Record Template\",\n \"multiline\": True,\n \"info\": \"Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.\",\n \"advanced\": True,\n },\n }\n\n def build(self, input_value: Optional[Text] = \"\", record_template: str = \"\") -> Text:\n return super().build(input_value=input_value, record_template=record_template)\n", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "code", - "advanced": true, - "dynamic": true, - "info": "", - "load_from_db": false, - "title_case": false - }, - "record_template": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "{text}", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "record_template", - "display_name": "Record Template", - "advanced": true, - "dynamic": false, - "info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "_type": "CustomComponent" - }, - "description": "Display a text output in the Playground.", - "icon": "type", - "base_classes": [ - "object", - "Text", - "str" - ], - "display_name": "Extracted Chunks", - "documentation": "", - "custom_fields": { - "input_value": null, - "record_template": null - }, - "output_types": [ - "Text" - ], - "field_formatters": {}, - "frozen": false, - "field_order": [], - "beta": false - }, - "id": "TextOutput-BDknO" - }, - "selected": false, - "width": 384, - "height": 289, - "positionAbsolute": { - "x": 2322.600672827879, - "y": 604.9467307442569 - }, - "dragging": false + "description": "Get chat inputs from the Playground.", + "icon": "ChatInput", + "base_classes": ["Text", "str", "object", "Record"], + "display_name": "Chat Input", + "documentation": "", + "custom_fields": { + "sender": null, + "sender_name": null, + "input_value": null, + "session_id": null, + "return_record": null }, - { - "id": "OpenAIEmbeddings-ZlOk1", - "type": "genericNode", - "position": { - "x": 1183.667250865064, - "y": 687.3171828430261 - }, - "data": { - "type": "OpenAIEmbeddings", - "node": { - "template": { - "allowed_special": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": [], - "fileTypes": [], - "file_path": "", - "password": false, - "name": "allowed_special", - "display_name": "Allowed Special", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "chunk_size": { - "type": "int", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": 1000, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "chunk_size", - "display_name": "Chunk Size", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "client": { - "type": "Any", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "client", - "display_name": "Client", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "code": { - "type": "code", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "from typing import Any, Dict, List, Optional\n\nfrom langchain_openai.embeddings.base import OpenAIEmbeddings\n\nfrom langflow.field_typing import Embeddings, NestedDict\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass OpenAIEmbeddingsComponent(CustomComponent):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n\n def build_config(self):\n return {\n \"allowed_special\": {\n \"display_name\": \"Allowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"default_headers\": {\n \"display_name\": \"Default Headers\",\n \"advanced\": True,\n \"field_type\": \"dict\",\n },\n \"default_query\": {\n \"display_name\": \"Default Query\",\n \"advanced\": True,\n \"field_type\": \"NestedDict\",\n },\n \"disallowed_special\": {\n \"display_name\": \"Disallowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"chunk_size\": {\"display_name\": \"Chunk Size\", \"advanced\": True},\n \"client\": {\"display_name\": \"Client\", \"advanced\": True},\n \"deployment\": {\"display_name\": \"Deployment\", \"advanced\": True},\n \"embedding_ctx_length\": {\n \"display_name\": \"Embedding Context Length\",\n \"advanced\": True,\n },\n \"max_retries\": {\"display_name\": \"Max Retries\", \"advanced\": True},\n \"model\": {\n \"display_name\": \"Model\",\n \"advanced\": False,\n \"options\": [\n \"text-embedding-3-small\",\n \"text-embedding-3-large\",\n \"text-embedding-ada-002\",\n ],\n },\n \"model_kwargs\": {\"display_name\": \"Model Kwargs\", \"advanced\": True},\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"password\": True,\n \"advanced\": True,\n },\n \"openai_api_key\": {\"display_name\": \"OpenAI API Key\", \"password\": True},\n \"openai_api_type\": {\n \"display_name\": \"OpenAI API Type\",\n \"advanced\": True,\n \"password\": True,\n },\n \"openai_api_version\": {\n \"display_name\": \"OpenAI API Version\",\n \"advanced\": True,\n },\n \"openai_organization\": {\n \"display_name\": \"OpenAI Organization\",\n \"advanced\": True,\n },\n \"openai_proxy\": {\"display_name\": \"OpenAI Proxy\", \"advanced\": True},\n \"request_timeout\": {\"display_name\": \"Request Timeout\", \"advanced\": True},\n \"show_progress_bar\": {\n \"display_name\": \"Show Progress Bar\",\n \"advanced\": True,\n },\n \"skip_empty\": {\"display_name\": \"Skip Empty\", \"advanced\": True},\n \"tiktoken_model_name\": {\n \"display_name\": \"TikToken Model Name\",\n \"advanced\": True,\n },\n \"tiktoken_enable\": {\"display_name\": \"TikToken Enable\", \"advanced\": True},\n }\n\n def build(\n self,\n openai_api_key: str,\n default_headers: Optional[Dict[str, str]] = None,\n default_query: Optional[NestedDict] = {},\n allowed_special: List[str] = [],\n disallowed_special: List[str] = [\"all\"],\n chunk_size: int = 1000,\n client: Optional[Any] = None,\n deployment: str = \"text-embedding-ada-002\",\n embedding_ctx_length: int = 8191,\n max_retries: int = 6,\n model: str = \"text-embedding-ada-002\",\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n openai_api_type: Optional[str] = None,\n openai_api_version: Optional[str] = None,\n openai_organization: Optional[str] = None,\n openai_proxy: Optional[str] = None,\n request_timeout: Optional[float] = None,\n show_progress_bar: bool = False,\n skip_empty: bool = False,\n tiktoken_enable: bool = True,\n tiktoken_model_name: Optional[str] = None,\n ) -> Embeddings:\n # This is to avoid errors with Vector Stores (e.g Chroma)\n if disallowed_special == [\"all\"]:\n disallowed_special = \"all\" # type: ignore\n\n return OpenAIEmbeddings(\n tiktoken_enabled=tiktoken_enable,\n default_headers=default_headers,\n default_query=default_query,\n allowed_special=set(allowed_special),\n disallowed_special=\"all\",\n chunk_size=chunk_size,\n client=client,\n deployment=deployment,\n embedding_ctx_length=embedding_ctx_length,\n max_retries=max_retries,\n model=model,\n model_kwargs=model_kwargs,\n base_url=openai_api_base,\n api_key=openai_api_key,\n openai_api_type=openai_api_type,\n api_version=openai_api_version,\n organization=openai_organization,\n openai_proxy=openai_proxy,\n timeout=request_timeout,\n show_progress_bar=show_progress_bar,\n skip_empty=skip_empty,\n tiktoken_model_name=tiktoken_model_name,\n )\n", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "code", - "advanced": true, - "dynamic": true, - "info": "", - "load_from_db": false, - "title_case": false - }, - "default_headers": { - "type": "dict", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "default_headers", - "display_name": "Default Headers", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "default_query": { - "type": "NestedDict", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": {}, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "default_query", - "display_name": "Default Query", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "deployment": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": "text-embedding-ada-002", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "deployment", - "display_name": "Deployment", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "disallowed_special": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": [ - "all" - ], - "fileTypes": [], - "file_path": "", - "password": false, - "name": "disallowed_special", - "display_name": "Disallowed Special", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "embedding_ctx_length": { - "type": "int", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": 8191, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "embedding_ctx_length", - "display_name": "Embedding Context Length", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "max_retries": { - "type": "int", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": 6, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "max_retries", - "display_name": "Max Retries", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "model": { - "type": "str", - "required": false, - "placeholder": "", - "list": true, - "show": true, - "multiline": false, - "value": "text-embedding-ada-002", - "fileTypes": [], - "file_path": "", - "password": false, - "options": [ - "text-embedding-3-small", - "text-embedding-3-large", - "text-embedding-ada-002" - ], - "name": "model", - "display_name": "Model", - "advanced": false, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "model_kwargs": { - "type": "NestedDict", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": {}, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "model_kwargs", - "display_name": "Model Kwargs", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "openai_api_base": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": true, - "name": "openai_api_base", - "display_name": "OpenAI API Base", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "openai_api_key": { - "type": "str", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": true, - "name": "openai_api_key", - "display_name": "OpenAI API Key", - "advanced": false, - "dynamic": false, - "info": "", - "load_from_db": true, - "title_case": false, - "input_types": [ - "Text" - ], - "value": "OPENAI_API_KEY" - }, - "openai_api_type": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": true, - "name": "openai_api_type", - "display_name": "OpenAI API Type", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "openai_api_version": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "openai_api_version", - "display_name": "OpenAI API Version", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "openai_organization": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "openai_organization", - "display_name": "OpenAI Organization", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "openai_proxy": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "openai_proxy", - "display_name": "OpenAI Proxy", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "request_timeout": { - "type": "float", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "request_timeout", - "display_name": "Request Timeout", - "advanced": true, - "dynamic": false, - "info": "", - "rangeSpec": { - "step_type": "float", - "min": -1, - "max": 1, - "step": 0.1 - }, - "load_from_db": false, - "title_case": false - }, - "show_progress_bar": { - "type": "bool", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "show_progress_bar", - "display_name": "Show Progress Bar", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "skip_empty": { - "type": "bool", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "skip_empty", - "display_name": "Skip Empty", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "tiktoken_enable": { - "type": "bool", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": true, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "tiktoken_enable", - "display_name": "TikToken Enable", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "tiktoken_model_name": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "tiktoken_model_name", - "display_name": "TikToken Model Name", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "_type": "CustomComponent" - }, - "description": "Generate embeddings using OpenAI models.", - "base_classes": [ - "Embeddings" - ], - "display_name": "OpenAI Embeddings", - "documentation": "", - "custom_fields": { - "openai_api_key": null, - "default_headers": null, - "default_query": null, - "allowed_special": null, - "disallowed_special": null, - "chunk_size": null, - "client": null, - "deployment": null, - "embedding_ctx_length": null, - "max_retries": null, - "model": null, - "model_kwargs": null, - "openai_api_base": null, - "openai_api_type": null, - "openai_api_version": null, - "openai_organization": null, - "openai_proxy": null, - "request_timeout": null, - "show_progress_bar": null, - "skip_empty": null, - "tiktoken_enable": null, - "tiktoken_model_name": null - }, - "output_types": [ - "Embeddings" - ], - "field_formatters": {}, - "frozen": false, - "field_order": [], - "beta": false - }, - "id": "OpenAIEmbeddings-ZlOk1" - }, - "selected": false, - "width": 384, - "height": 383, - "dragging": false + "output_types": ["Text", "Record"], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "ChatInput-yxMKE" + }, + "selected": false, + "width": 384, + "height": 383 + }, + { + "id": "TextOutput-BDknO", + "type": "genericNode", + "position": { + "x": 2322.600672827879, + "y": 604.9467307442569 + }, + "data": { + "type": "TextOutput", + "node": { + "template": { + "input_value": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Value", + "advanced": false, + "input_types": ["Record", "Text"], + "dynamic": false, + "info": "Text or Record to be passed as output.", + "load_from_db": false, + "title_case": false + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional\n\nfrom langflow.base.io.text import TextComponent\nfrom langflow.field_typing import Text\n\n\nclass TextOutput(TextComponent):\n display_name = \"Text Output\"\n description = \"Display a text output in the Playground.\"\n icon = \"type\"\n\n def build_config(self):\n return {\n \"input_value\": {\n \"display_name\": \"Value\",\n \"input_types\": [\"Record\", \"Text\"],\n \"info\": \"Text or Record to be passed as output.\",\n },\n \"record_template\": {\n \"display_name\": \"Record Template\",\n \"multiline\": True,\n \"info\": \"Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.\",\n \"advanced\": True,\n },\n }\n\n def build(self, input_value: Optional[Text] = \"\", record_template: str = \"\") -> Text:\n return super().build(input_value=input_value, record_template=record_template)\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "record_template": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "{text}", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "record_template", + "display_name": "Record Template", + "advanced": true, + "dynamic": false, + "info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "_type": "CustomComponent" }, - { - "id": "OpenAIModel-EjXlN", - "type": "genericNode", - "position": { - "x": 3410.117202077183, - "y": 431.2038048137648 - }, - "data": { - "type": "OpenAIModel", - "node": { - "template": { - "input_value": { - "type": "str", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "input_value", - "display_name": "Input", - "advanced": false, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "code": { - "type": "code", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": [\n \"gpt-4-turbo-preview\",\n \"gpt-3.5-turbo\",\n \"gpt-4-0125-preview\",\n \"gpt-4-1106-preview\",\n \"gpt-4-vision-preview\",\n \"gpt-3.5-turbo-0125\",\n \"gpt-3.5-turbo-1106\",\n ],\n \"value\": \"gpt-4-turbo-preview\",\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str,\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n output = ChatOpenAI(\n max_tokens=max_tokens,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=openai_api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "code", - "advanced": true, - "dynamic": true, - "info": "", - "load_from_db": false, - "title_case": false - }, - "max_tokens": { - "type": "int", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": 256, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "max_tokens", - "display_name": "Max Tokens", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "model_kwargs": { - "type": "NestedDict", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": {}, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "model_kwargs", - "display_name": "Model Kwargs", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "model_name": { - "type": "str", - "required": true, - "placeholder": "", - "list": true, - "show": true, - "multiline": false, - "value": "gpt-3.5-turbo", - "fileTypes": [], - "file_path": "", - "password": false, - "options": [ - "gpt-4-turbo-preview", - "gpt-3.5-turbo", - "gpt-4-0125-preview", - "gpt-4-1106-preview", - "gpt-4-vision-preview", - "gpt-3.5-turbo-0125", - "gpt-3.5-turbo-1106" - ], - "name": "model_name", - "display_name": "Model Name", - "advanced": false, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "openai_api_base": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "openai_api_base", - "display_name": "OpenAI API Base", - "advanced": true, - "dynamic": false, - "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "openai_api_key": { - "type": "str", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": true, - "name": "openai_api_key", - "display_name": "OpenAI API Key", - "advanced": false, - "dynamic": false, - "info": "The OpenAI API Key to use for the OpenAI model.", - "load_from_db": true, - "title_case": false, - "input_types": [ - "Text" - ], - "value": "OPENAI_API_KEY" - }, - "stream": { - "type": "bool", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "stream", - "display_name": "Stream", - "advanced": true, - "dynamic": false, - "info": "Stream the response from the model. Streaming works only in Chat.", - "load_from_db": false, - "title_case": false - }, - "system_message": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "system_message", - "display_name": "System Message", - "advanced": true, - "dynamic": false, - "info": "System message to pass to the model.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "temperature": { - "type": "float", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": 0.1, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "temperature", - "display_name": "Temperature", - "advanced": false, - "dynamic": false, - "info": "", - "rangeSpec": { - "step_type": "float", - "min": -1, - "max": 1, - "step": 0.1 - }, - "load_from_db": false, - "title_case": false - }, - "_type": "CustomComponent" - }, - "description": "Generates text using OpenAI LLMs.", - "icon": "OpenAI", - "base_classes": [ - "object", - "Text", - "str" - ], - "display_name": "OpenAI", - "documentation": "", - "custom_fields": { - "input_value": null, - "openai_api_key": null, - "temperature": null, - "model_name": null, - "max_tokens": null, - "model_kwargs": null, - "openai_api_base": null, - "stream": null, - "system_message": null - }, - "output_types": [ - "Text" - ], - "field_formatters": {}, - "frozen": false, - "field_order": [ - "max_tokens", - "model_kwargs", - "model_name", - "openai_api_base", - "openai_api_key", - "temperature", - "input_value", - "system_message", - "stream" - ], - "beta": false - }, - "id": "OpenAIModel-EjXlN" - }, - "selected": true, - "width": 384, - "height": 563, - "positionAbsolute": { - "x": 3410.117202077183, - "y": 431.2038048137648 - }, - "dragging": false + "description": "Display a text output in the Playground.", + "icon": "type", + "base_classes": ["object", "Text", "str"], + "display_name": "Extracted Chunks", + "documentation": "", + "custom_fields": { + "input_value": null, + "record_template": null }, - { - "id": "Prompt-xeI6K", - "type": "genericNode", - "position": { - "x": 2969.0261961391298, - "y": 442.1613649809069 + "output_types": ["Text"], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "TextOutput-BDknO" + }, + "selected": false, + "width": 384, + "height": 289, + "positionAbsolute": { + "x": 2322.600672827879, + "y": 604.9467307442569 + }, + "dragging": false + }, + { + "id": "OpenAIEmbeddings-ZlOk1", + "type": "genericNode", + "position": { + "x": 1183.667250865064, + "y": 687.3171828430261 + }, + "data": { + "type": "OpenAIEmbeddings", + "node": { + "template": { + "allowed_special": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": [], + "fileTypes": [], + "file_path": "", + "password": false, + "name": "allowed_special", + "display_name": "Allowed Special", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "chunk_size": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 1000, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "chunk_size", + "display_name": "Chunk Size", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "client": { + "type": "Any", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "client", + "display_name": "Client", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Any, Dict, List, Optional\n\nfrom langchain_openai.embeddings.base import OpenAIEmbeddings\n\nfrom langflow.field_typing import Embeddings, NestedDict\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass OpenAIEmbeddingsComponent(CustomComponent):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n\n def build_config(self):\n return {\n \"allowed_special\": {\n \"display_name\": \"Allowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"default_headers\": {\n \"display_name\": \"Default Headers\",\n \"advanced\": True,\n \"field_type\": \"dict\",\n },\n \"default_query\": {\n \"display_name\": \"Default Query\",\n \"advanced\": True,\n \"field_type\": \"NestedDict\",\n },\n \"disallowed_special\": {\n \"display_name\": \"Disallowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"chunk_size\": {\"display_name\": \"Chunk Size\", \"advanced\": True},\n \"client\": {\"display_name\": \"Client\", \"advanced\": True},\n \"deployment\": {\"display_name\": \"Deployment\", \"advanced\": True},\n \"embedding_ctx_length\": {\n \"display_name\": \"Embedding Context Length\",\n \"advanced\": True,\n },\n \"max_retries\": {\"display_name\": \"Max Retries\", \"advanced\": True},\n \"model\": {\n \"display_name\": \"Model\",\n \"advanced\": False,\n \"options\": [\n \"text-embedding-3-small\",\n \"text-embedding-3-large\",\n \"text-embedding-ada-002\",\n ],\n },\n \"model_kwargs\": {\"display_name\": \"Model Kwargs\", \"advanced\": True},\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"password\": True,\n \"advanced\": True,\n },\n \"openai_api_key\": {\"display_name\": \"OpenAI API Key\", \"password\": True},\n \"openai_api_type\": {\n \"display_name\": \"OpenAI API Type\",\n \"advanced\": True,\n \"password\": True,\n },\n \"openai_api_version\": {\n \"display_name\": \"OpenAI API Version\",\n \"advanced\": True,\n },\n \"openai_organization\": {\n \"display_name\": \"OpenAI Organization\",\n \"advanced\": True,\n },\n \"openai_proxy\": {\"display_name\": \"OpenAI Proxy\", \"advanced\": True},\n \"request_timeout\": {\"display_name\": \"Request Timeout\", \"advanced\": True},\n \"show_progress_bar\": {\n \"display_name\": \"Show Progress Bar\",\n \"advanced\": True,\n },\n \"skip_empty\": {\"display_name\": \"Skip Empty\", \"advanced\": True},\n \"tiktoken_model_name\": {\n \"display_name\": \"TikToken Model Name\",\n \"advanced\": True,\n },\n \"tiktoken_enable\": {\"display_name\": \"TikToken Enable\", \"advanced\": True},\n }\n\n def build(\n self,\n openai_api_key: str,\n default_headers: Optional[Dict[str, str]] = None,\n default_query: Optional[NestedDict] = {},\n allowed_special: List[str] = [],\n disallowed_special: List[str] = [\"all\"],\n chunk_size: int = 1000,\n client: Optional[Any] = None,\n deployment: str = \"text-embedding-ada-002\",\n embedding_ctx_length: int = 8191,\n max_retries: int = 6,\n model: str = \"text-embedding-ada-002\",\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n openai_api_type: Optional[str] = None,\n openai_api_version: Optional[str] = None,\n openai_organization: Optional[str] = None,\n openai_proxy: Optional[str] = None,\n request_timeout: Optional[float] = None,\n show_progress_bar: bool = False,\n skip_empty: bool = False,\n tiktoken_enable: bool = True,\n tiktoken_model_name: Optional[str] = None,\n ) -> Embeddings:\n # This is to avoid errors with Vector Stores (e.g Chroma)\n if disallowed_special == [\"all\"]:\n disallowed_special = \"all\" # type: ignore\n\n return OpenAIEmbeddings(\n tiktoken_enabled=tiktoken_enable,\n default_headers=default_headers,\n default_query=default_query,\n allowed_special=set(allowed_special),\n disallowed_special=\"all\",\n chunk_size=chunk_size,\n client=client,\n deployment=deployment,\n embedding_ctx_length=embedding_ctx_length,\n max_retries=max_retries,\n model=model,\n model_kwargs=model_kwargs,\n base_url=openai_api_base,\n api_key=openai_api_key,\n openai_api_type=openai_api_type,\n api_version=openai_api_version,\n organization=openai_organization,\n openai_proxy=openai_proxy,\n timeout=request_timeout,\n show_progress_bar=show_progress_bar,\n skip_empty=skip_empty,\n tiktoken_model_name=tiktoken_model_name,\n )\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "default_headers": { + "type": "dict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "default_headers", + "display_name": "Default Headers", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "default_query": { + "type": "NestedDict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": {}, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "default_query", + "display_name": "Default Query", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "deployment": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "text-embedding-ada-002", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "deployment", + "display_name": "Deployment", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "disallowed_special": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": ["all"], + "fileTypes": [], + "file_path": "", + "password": false, + "name": "disallowed_special", + "display_name": "Disallowed Special", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "embedding_ctx_length": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 8191, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "embedding_ctx_length", + "display_name": "Embedding Context Length", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "max_retries": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 6, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "max_retries", + "display_name": "Max Retries", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "model": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "text-embedding-ada-002", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "text-embedding-3-small", + "text-embedding-3-large", + "text-embedding-ada-002" + ], + "name": "model", + "display_name": "Model", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "model_kwargs": { + "type": "NestedDict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": {}, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "model_kwargs", + "display_name": "Model Kwargs", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "openai_api_base": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_base", + "display_name": "OpenAI API Base", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "openai_api_key": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_key", + "display_name": "OpenAI API Key", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": true, + "title_case": false, + "input_types": ["Text"], + "value": "OPENAI_API_KEY" + }, + "openai_api_type": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_type", + "display_name": "OpenAI API Type", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "openai_api_version": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_api_version", + "display_name": "OpenAI API Version", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "openai_organization": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_organization", + "display_name": "OpenAI Organization", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "openai_proxy": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_proxy", + "display_name": "OpenAI Proxy", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "request_timeout": { + "type": "float", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "request_timeout", + "display_name": "Request Timeout", + "advanced": true, + "dynamic": false, + "info": "", + "rangeSpec": { + "step_type": "float", + "min": -1, + "max": 1, + "step": 0.1 }, - "data": { - "type": "Prompt", - "node": { - "template": { - "code": { - "type": "code", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.field_typing import Prompt, TemplateField, Text\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "code", - "advanced": true, - "dynamic": true, - "info": "", - "load_from_db": false, - "title_case": false - }, - "template": { - "type": "prompt", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": "{context}\n\n---\n\nGiven the context above, answer the question as best as possible.\n\nQuestion: {question}\n\nAnswer: ", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "template", - "display_name": "Template", - "advanced": false, - "input_types": [ - "Text" - ], - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "_type": "CustomComponent", - "context": { - "field_type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "context", - "display_name": "context", - "advanced": false, - "input_types": [ - "Document", - "BaseOutputParser", - "Record", - "Text" - ], - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "type": "str" - }, - "question": { - "field_type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "question", - "display_name": "question", - "advanced": false, - "input_types": [ - "Document", - "BaseOutputParser", - "Record", - "Text" - ], - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "type": "str" - } - }, - "description": "Create a prompt template with dynamic variables.", - "icon": "prompts", - "is_input": null, - "is_output": null, - "is_composition": null, - "base_classes": [ - "object", - "Text", - "str" - ], - "name": "", - "display_name": "Prompt", - "documentation": "", - "custom_fields": { - "template": [ - "context", - "question" - ] - }, - "output_types": [ - "Text" - ], - "full_path": null, - "field_formatters": {}, - "frozen": false, - "field_order": [], - "beta": false, - "error": null - }, - "id": "Prompt-xeI6K", - "description": "Create a prompt template with dynamic variables.", - "display_name": "Prompt" - }, - "selected": false, - "width": 384, - "height": 477, - "positionAbsolute": { - "x": 2969.0261961391298, - "y": 442.1613649809069 - }, - "dragging": false + "load_from_db": false, + "title_case": false + }, + "show_progress_bar": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "show_progress_bar", + "display_name": "Show Progress Bar", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "skip_empty": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "skip_empty", + "display_name": "Skip Empty", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "tiktoken_enable": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": true, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "tiktoken_enable", + "display_name": "TikToken Enable", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "tiktoken_model_name": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "tiktoken_model_name", + "display_name": "TikToken Model Name", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "_type": "CustomComponent" }, - { - "id": "ChatOutput-Q39I8", - "type": "genericNode", - "position": { - "x": 3887.2073667611485, - "y": 588.4801225794856 - }, - "data": { - "type": "ChatOutput", - "node": { - "template": { - "code": { - "type": "code", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n record_template: Optional[str] = \"{text}\",\n ) -> Union[Text, Record]:\n return super().build(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n record_template=record_template,\n )\n", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "code", - "advanced": true, - "dynamic": true, - "info": "", - "load_from_db": false, - "title_case": false - }, - "input_value": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "input_value", - "display_name": "Message", - "advanced": false, - "input_types": [ - "Text" - ], - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "record_template": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "{text}", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "record_template", - "display_name": "Record Template", - "advanced": true, - "dynamic": false, - "info": "In case of Message being a Record, this template will be used to convert it to text.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "return_record": { - "type": "bool", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "return_record", - "display_name": "Return Record", - "advanced": true, - "dynamic": false, - "info": "Return the message as a record containing the sender, sender_name, and session_id.", - "load_from_db": false, - "title_case": false - }, - "sender": { - "type": "str", - "required": false, - "placeholder": "", - "list": true, - "show": true, - "multiline": false, - "value": "Machine", - "fileTypes": [], - "file_path": "", - "password": false, - "options": [ - "Machine", - "User" - ], - "name": "sender", - "display_name": "Sender Type", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "sender_name": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": "AI", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "sender_name", - "display_name": "Sender Name", - "advanced": false, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "session_id": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "session_id", - "display_name": "Session ID", - "advanced": true, - "dynamic": false, - "info": "If provided, the message will be stored in the memory.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "_type": "CustomComponent" - }, - "description": "Display a chat message in the Playground.", - "icon": "ChatOutput", - "base_classes": [ - "object", - "Text", - "Record", - "str" - ], - "display_name": "Chat Output", - "documentation": "", - "custom_fields": { - "sender": null, - "sender_name": null, - "input_value": null, - "session_id": null, - "return_record": null, - "record_template": null - }, - "output_types": [ - "Text", - "Record" - ], - "field_formatters": {}, - "frozen": false, - "field_order": [], - "beta": false - }, - "id": "ChatOutput-Q39I8" - }, - "selected": false, - "width": 384, - "height": 383, - "positionAbsolute": { - "x": 3887.2073667611485, - "y": 588.4801225794856 - }, - "dragging": false + "description": "Generate embeddings using OpenAI models.", + "base_classes": ["Embeddings"], + "display_name": "OpenAI Embeddings", + "documentation": "", + "custom_fields": { + "openai_api_key": null, + "default_headers": null, + "default_query": null, + "allowed_special": null, + "disallowed_special": null, + "chunk_size": null, + "client": null, + "deployment": null, + "embedding_ctx_length": null, + "max_retries": null, + "model": null, + "model_kwargs": null, + "openai_api_base": null, + "openai_api_type": null, + "openai_api_version": null, + "openai_organization": null, + "openai_proxy": null, + "request_timeout": null, + "show_progress_bar": null, + "skip_empty": null, + "tiktoken_enable": null, + "tiktoken_model_name": null }, - { - "id": "File-t0a6a", - "type": "genericNode", - "position": { - "x": 2257.233450682836, - "y": 1747.5389618367233 + "output_types": ["Embeddings"], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "OpenAIEmbeddings-ZlOk1" + }, + "selected": false, + "width": 384, + "height": 383, + "dragging": false + }, + { + "id": "OpenAIModel-EjXlN", + "type": "genericNode", + "position": { + "x": 3410.117202077183, + "y": 431.2038048137648 + }, + "data": { + "type": "OpenAIModel", + "node": { + "template": { + "input_value": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Input", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": [\n \"gpt-4-turbo-preview\",\n \"gpt-3.5-turbo\",\n \"gpt-4-0125-preview\",\n \"gpt-4-1106-preview\",\n \"gpt-4-vision-preview\",\n \"gpt-3.5-turbo-0125\",\n \"gpt-3.5-turbo-1106\",\n ],\n \"value\": \"gpt-4-turbo-preview\",\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str,\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n output = ChatOpenAI(\n max_tokens=max_tokens,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=openai_api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "max_tokens": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 256, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "max_tokens", + "display_name": "Max Tokens", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "model_kwargs": { + "type": "NestedDict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": {}, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "model_kwargs", + "display_name": "Model Kwargs", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "model_name": { + "type": "str", + "required": true, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "gpt-3.5-turbo", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "gpt-4-turbo-preview", + "gpt-3.5-turbo", + "gpt-4-0125-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-1106" + ], + "name": "model_name", + "display_name": "Model Name", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "openai_api_base": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_api_base", + "display_name": "OpenAI API Base", + "advanced": true, + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "openai_api_key": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_key", + "display_name": "OpenAI API Key", + "advanced": false, + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "load_from_db": true, + "title_case": false, + "input_types": ["Text"], + "value": "OPENAI_API_KEY" + }, + "stream": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "stream", + "display_name": "Stream", + "advanced": true, + "dynamic": false, + "info": "Stream the response from the model. Streaming works only in Chat.", + "load_from_db": false, + "title_case": false + }, + "system_message": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "system_message", + "display_name": "System Message", + "advanced": true, + "dynamic": false, + "info": "System message to pass to the model.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "temperature": { + "type": "float", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 0.1, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "temperature", + "display_name": "Temperature", + "advanced": false, + "dynamic": false, + "info": "", + "rangeSpec": { + "step_type": "float", + "min": -1, + "max": 1, + "step": 0.1 }, - "data": { - "type": "File", - "node": { - "template": { - "path": { - "type": "file", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [ - ".txt", - ".md", - ".mdx", - ".csv", - ".json", - ".yaml", - ".yml", - ".xml", - ".html", - ".htm", - ".pdf", - ".docx" - ], - "file_path": "51e2b78a-199b-4054-9f32-e288eef6924c/Langflow conversation.pdf", - "password": false, - "name": "path", - "display_name": "Path", - "advanced": false, - "dynamic": false, - "info": "Supported file types: txt, md, mdx, csv, json, yaml, yml, xml, html, htm, pdf, docx", - "load_from_db": false, - "title_case": false, - "value": "" - }, - "code": { - "type": "code", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "from pathlib import Path\nfrom typing import Any, Dict\n\nfrom langflow.base.data.utils import TEXT_FILE_TYPES, parse_text_file_to_record\nfrom langflow.interface.custom.custom_component import CustomComponent\nfrom langflow.schema import Record\n\n\nclass FileComponent(CustomComponent):\n display_name = \"File\"\n description = \"A generic file loader.\"\n icon = \"file-text\"\n\n def build_config(self) -> Dict[str, Any]:\n return {\n \"path\": {\n \"display_name\": \"Path\",\n \"field_type\": \"file\",\n \"file_types\": TEXT_FILE_TYPES,\n \"info\": f\"Supported file types: {', '.join(TEXT_FILE_TYPES)}\",\n },\n \"silent_errors\": {\n \"display_name\": \"Silent Errors\",\n \"advanced\": True,\n \"info\": \"If true, errors will not raise an exception.\",\n },\n }\n\n def load_file(self, path: str, silent_errors: bool = False) -> Record:\n resolved_path = self.resolve_path(path)\n path_obj = Path(resolved_path)\n extension = path_obj.suffix[1:].lower()\n if extension == \"doc\":\n raise ValueError(\"doc files are not supported. Please save as .docx\")\n if extension not in TEXT_FILE_TYPES:\n raise ValueError(f\"Unsupported file type: {extension}\")\n record = parse_text_file_to_record(resolved_path, silent_errors)\n self.status = record if record else \"No data\"\n return record or Record()\n\n def build(\n self,\n path: str,\n silent_errors: bool = False,\n ) -> Record:\n record = self.load_file(path, silent_errors)\n self.status = record\n return record\n", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "code", - "advanced": true, - "dynamic": true, - "info": "", - "load_from_db": false, - "title_case": false - }, - "silent_errors": { - "type": "bool", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "silent_errors", - "display_name": "Silent Errors", - "advanced": true, - "dynamic": false, - "info": "If true, errors will not raise an exception.", - "load_from_db": false, - "title_case": false - }, - "_type": "CustomComponent" - }, - "description": "A generic file loader.", - "icon": "file-text", - "base_classes": [ - "Record" - ], - "display_name": "File", - "documentation": "", - "custom_fields": { - "path": null, - "silent_errors": null - }, - "output_types": [ - "Record" - ], - "field_formatters": {}, - "frozen": false, - "field_order": [], - "beta": false - }, - "id": "File-t0a6a" - }, - "selected": false, - "width": 384, - "height": 281, - "positionAbsolute": { - "x": 2257.233450682836, - "y": 1747.5389618367233 - }, - "dragging": false + "load_from_db": false, + "title_case": false + }, + "_type": "CustomComponent" }, - { - "id": "RecursiveCharacterTextSplitter-tR9QM", - "type": "genericNode", - "position": { - "x": 2791.013514133929, - "y": 1462.9588953494142 - }, - "data": { - "type": "RecursiveCharacterTextSplitter", - "node": { - "template": { - "inputs": { - "type": "Document", - "required": true, - "placeholder": "", - "list": true, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "inputs", - "display_name": "Input", - "advanced": false, - "input_types": [ - "Document", - "Record" - ], - "dynamic": false, - "info": "The texts to split.", - "load_from_db": false, - "title_case": false - }, - "chunk_overlap": { - "type": "int", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": 200, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "chunk_overlap", - "display_name": "Chunk Overlap", - "advanced": false, - "dynamic": false, - "info": "The amount of overlap between chunks.", - "load_from_db": false, - "title_case": false - }, - "chunk_size": { - "type": "int", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": 1000, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "chunk_size", - "display_name": "Chunk Size", - "advanced": false, - "dynamic": false, - "info": "The maximum length of each chunk.", - "load_from_db": false, - "title_case": false - }, - "code": { - "type": "code", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "from typing import Optional\n\nfrom langchain.text_splitter import RecursiveCharacterTextSplitter\nfrom langchain_core.documents import Document\n\nfrom langflow.interface.custom.custom_component import CustomComponent\nfrom langflow.schema import Record\nfrom langflow.utils.util import build_loader_repr_from_records, unescape_string\n\n\nclass RecursiveCharacterTextSplitterComponent(CustomComponent):\n display_name: str = \"Recursive Character Text Splitter\"\n description: str = \"Split text into chunks of a specified length.\"\n documentation: str = \"https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter\"\n\n def build_config(self):\n return {\n \"inputs\": {\n \"display_name\": \"Input\",\n \"info\": \"The texts to split.\",\n \"input_types\": [\"Document\", \"Record\"],\n },\n \"separators\": {\n \"display_name\": \"Separators\",\n \"info\": 'The characters to split on.\\nIf left empty defaults to [\"\\\\n\\\\n\", \"\\\\n\", \" \", \"\"].',\n \"is_list\": True,\n },\n \"chunk_size\": {\n \"display_name\": \"Chunk Size\",\n \"info\": \"The maximum length of each chunk.\",\n \"field_type\": \"int\",\n \"value\": 1000,\n },\n \"chunk_overlap\": {\n \"display_name\": \"Chunk Overlap\",\n \"info\": \"The amount of overlap between chunks.\",\n \"field_type\": \"int\",\n \"value\": 200,\n },\n \"code\": {\"show\": False},\n }\n\n def build(\n self,\n inputs: list[Document],\n separators: Optional[list[str]] = None,\n chunk_size: Optional[int] = 1000,\n chunk_overlap: Optional[int] = 200,\n ) -> list[Record]:\n \"\"\"\n Split text into chunks of a specified length.\n\n Args:\n separators (list[str]): The characters to split on.\n chunk_size (int): The maximum length of each chunk.\n chunk_overlap (int): The amount of overlap between chunks.\n length_function (function): The function to use to calculate the length of the text.\n\n Returns:\n list[str]: The chunks of text.\n \"\"\"\n\n if separators == \"\":\n separators = None\n elif separators:\n # check if the separators list has escaped characters\n # if there are escaped characters, unescape them\n separators = [unescape_string(x) for x in separators]\n\n # Make sure chunk_size and chunk_overlap are ints\n if isinstance(chunk_size, str):\n chunk_size = int(chunk_size)\n if isinstance(chunk_overlap, str):\n chunk_overlap = int(chunk_overlap)\n splitter = RecursiveCharacterTextSplitter(\n separators=separators,\n chunk_size=chunk_size,\n chunk_overlap=chunk_overlap,\n )\n documents = []\n for _input in inputs:\n if isinstance(_input, Record):\n documents.append(_input.to_lc_document())\n else:\n documents.append(_input)\n docs = splitter.split_documents(documents)\n records = self.to_records(docs)\n self.repr_value = build_loader_repr_from_records(records)\n return records\n", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "code", - "advanced": true, - "dynamic": true, - "info": "", - "load_from_db": false, - "title_case": false - }, - "separators": { - "type": "str", - "required": false, - "placeholder": "", - "list": true, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "separators", - "display_name": "Separators", - "advanced": false, - "dynamic": false, - "info": "The characters to split on.\nIf left empty defaults to [\"\\n\\n\", \"\\n\", \" \", \"\"].", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ], - "value": [ - "" - ] - }, - "_type": "CustomComponent" - }, - "description": "Split text into chunks of a specified length.", - "base_classes": [ - "Record" - ], - "display_name": "Recursive Character Text Splitter", - "documentation": "https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter", - "custom_fields": { - "inputs": null, - "separators": null, - "chunk_size": null, - "chunk_overlap": null - }, - "output_types": [ - "Record" - ], - "field_formatters": {}, - "frozen": false, - "field_order": [], - "beta": false - }, - "id": "RecursiveCharacterTextSplitter-tR9QM" - }, - "selected": false, - "width": 384, - "height": 501, - "positionAbsolute": { - "x": 2791.013514133929, - "y": 1462.9588953494142 - }, - "dragging": false + "description": "Generates text using OpenAI LLMs.", + "icon": "OpenAI", + "base_classes": ["object", "Text", "str"], + "display_name": "OpenAI", + "documentation": "", + "custom_fields": { + "input_value": null, + "openai_api_key": null, + "temperature": null, + "model_name": null, + "max_tokens": null, + "model_kwargs": null, + "openai_api_base": null, + "stream": null, + "system_message": null }, - { - "id": "AstraDBSearch-41nRz", - "type": "genericNode", - "position": { - "x": 1723.976434815103, - "y": 277.03317407245913 - }, - "data": { - "type": "AstraDBSearch", - "node": { - "template": { - "embedding": { - "type": "Embeddings", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "embedding", - "display_name": "Embedding", - "advanced": false, - "dynamic": false, - "info": "Embedding to use", - "load_from_db": false, - "title_case": false - }, - "input_value": { - "type": "str", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "input_value", - "display_name": "Input Value", - "advanced": false, - "dynamic": false, - "info": "Input value to search", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "api_endpoint": { - "type": "str", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "api_endpoint", - "display_name": "API Endpoint", - "advanced": false, - "dynamic": false, - "info": "API endpoint URL for the Astra DB service.", - "load_from_db": true, - "title_case": false, - "input_types": [ - "Text" - ], - "value": "ASTRA_DB_API_ENDPOINT" - }, - "batch_size": { - "type": "int", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "batch_size", - "display_name": "Batch Size", - "advanced": true, - "dynamic": false, - "info": "Optional number of records to process in a single batch.", - "load_from_db": false, - "title_case": false - }, - "bulk_delete_concurrency": { - "type": "int", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "bulk_delete_concurrency", - "display_name": "Bulk Delete Concurrency", - "advanced": true, - "dynamic": false, - "info": "Optional concurrency level for bulk delete operations.", - "load_from_db": false, - "title_case": false - }, - "bulk_insert_batch_concurrency": { - "type": "int", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "bulk_insert_batch_concurrency", - "display_name": "Bulk Insert Batch Concurrency", - "advanced": true, - "dynamic": false, - "info": "Optional concurrency level for bulk insert operations.", - "load_from_db": false, - "title_case": false - }, - "bulk_insert_overwrite_concurrency": { - "type": "int", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "bulk_insert_overwrite_concurrency", - "display_name": "Bulk Insert Overwrite Concurrency", - "advanced": true, - "dynamic": false, - "info": "Optional concurrency level for bulk insert operations that overwrite existing records.", - "load_from_db": false, - "title_case": false - }, - "code": { - "type": "code", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "from typing import List, Optional\n\nfrom langflow.components.vectorstores.AstraDB import AstraDBVectorStoreComponent\nfrom langflow.components.vectorstores.base.model import LCVectorStoreComponent\nfrom langflow.field_typing import Embeddings, Text\nfrom langflow.schema import Record\n\n\nclass AstraDBSearchComponent(LCVectorStoreComponent):\n display_name = \"Astra DB Search\"\n description = \"Searches an existing Astra DB Vector Store.\"\n icon = \"AstraDB\"\n field_order = [\"token\", \"api_endpoint\", \"collection_name\", \"input_value\", \"embedding\"]\n\n def build_config(self):\n return {\n \"search_type\": {\n \"display_name\": \"Search Type\",\n \"options\": [\"Similarity\", \"MMR\"],\n },\n \"input_value\": {\n \"display_name\": \"Input Value\",\n \"info\": \"Input value to search\",\n },\n \"embedding\": {\"display_name\": \"Embedding\", \"info\": \"Embedding to use\"},\n \"collection_name\": {\n \"display_name\": \"Collection Name\",\n \"info\": \"The name of the collection within Astra DB where the vectors will be stored.\",\n },\n \"token\": {\n \"display_name\": \"Token\",\n \"info\": \"Authentication token for accessing Astra DB.\",\n \"password\": True,\n },\n \"api_endpoint\": {\n \"display_name\": \"API Endpoint\",\n \"info\": \"API endpoint URL for the Astra DB service.\",\n },\n \"namespace\": {\n \"display_name\": \"Namespace\",\n \"info\": \"Optional namespace within Astra DB to use for the collection.\",\n \"advanced\": True,\n },\n \"metric\": {\n \"display_name\": \"Metric\",\n \"info\": \"Optional distance metric for vector comparisons in the vector store.\",\n \"advanced\": True,\n },\n \"batch_size\": {\n \"display_name\": \"Batch Size\",\n \"info\": \"Optional number of records to process in a single batch.\",\n \"advanced\": True,\n },\n \"bulk_insert_batch_concurrency\": {\n \"display_name\": \"Bulk Insert Batch Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations.\",\n \"advanced\": True,\n },\n \"bulk_insert_overwrite_concurrency\": {\n \"display_name\": \"Bulk Insert Overwrite Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations that overwrite existing records.\",\n \"advanced\": True,\n },\n \"bulk_delete_concurrency\": {\n \"display_name\": \"Bulk Delete Concurrency\",\n \"info\": \"Optional concurrency level for bulk delete operations.\",\n \"advanced\": True,\n },\n \"setup_mode\": {\n \"display_name\": \"Setup Mode\",\n \"info\": \"Configuration mode for setting up the vector store, with options like \u201cSync\u201d, \u201cAsync\u201d, or \u201cOff\u201d.\",\n \"options\": [\"Sync\", \"Async\", \"Off\"],\n \"advanced\": True,\n },\n \"pre_delete_collection\": {\n \"display_name\": \"Pre Delete Collection\",\n \"info\": \"Boolean flag to determine whether to delete the collection before creating a new one.\",\n \"advanced\": True,\n },\n \"metadata_indexing_include\": {\n \"display_name\": \"Metadata Indexing Include\",\n \"info\": \"Optional list of metadata fields to include in the indexing.\",\n \"advanced\": True,\n },\n \"metadata_indexing_exclude\": {\n \"display_name\": \"Metadata Indexing Exclude\",\n \"info\": \"Optional list of metadata fields to exclude from the indexing.\",\n \"advanced\": True,\n },\n \"collection_indexing_policy\": {\n \"display_name\": \"Collection Indexing Policy\",\n \"info\": \"Optional dictionary defining the indexing policy for the collection.\",\n \"advanced\": True,\n },\n \"number_of_results\": {\n \"display_name\": \"Number of Results\",\n \"info\": \"Number of results to return.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n embedding: Embeddings,\n collection_name: str,\n input_value: Text,\n token: str,\n api_endpoint: str,\n search_type: str = \"Similarity\",\n number_of_results: int = 4,\n namespace: Optional[str] = None,\n metric: Optional[str] = None,\n batch_size: Optional[int] = None,\n bulk_insert_batch_concurrency: Optional[int] = None,\n bulk_insert_overwrite_concurrency: Optional[int] = None,\n bulk_delete_concurrency: Optional[int] = None,\n setup_mode: str = \"Sync\",\n pre_delete_collection: bool = False,\n metadata_indexing_include: Optional[List[str]] = None,\n metadata_indexing_exclude: Optional[List[str]] = None,\n collection_indexing_policy: Optional[dict] = None,\n ) -> List[Record]:\n vector_store = AstraDBVectorStoreComponent().build(\n embedding=embedding,\n collection_name=collection_name,\n token=token,\n api_endpoint=api_endpoint,\n namespace=namespace,\n metric=metric,\n batch_size=batch_size,\n bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,\n bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,\n bulk_delete_concurrency=bulk_delete_concurrency,\n setup_mode=setup_mode,\n pre_delete_collection=pre_delete_collection,\n metadata_indexing_include=metadata_indexing_include,\n metadata_indexing_exclude=metadata_indexing_exclude,\n collection_indexing_policy=collection_indexing_policy,\n )\n try:\n return self.search_with_vector_store(input_value, search_type, vector_store, k=number_of_results)\n except KeyError as e:\n if \"content\" in str(e):\n raise ValueError(\n \"You should ingest data through Langflow (or LangChain) to query it in Langflow. Your collection does not contain a field name 'content'.\"\n )\n else:\n raise e\n", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "code", - "advanced": true, - "dynamic": true, - "info": "", - "load_from_db": false, - "title_case": false - }, - "collection_indexing_policy": { - "type": "dict", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "collection_indexing_policy", - "display_name": "Collection Indexing Policy", - "advanced": true, - "dynamic": false, - "info": "Optional dictionary defining the indexing policy for the collection.", - "load_from_db": false, - "title_case": false - }, - "collection_name": { - "type": "str", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "collection_name", - "display_name": "Collection Name", - "advanced": false, - "dynamic": false, - "info": "The name of the collection within Astra DB where the vectors will be stored.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ], - "value": "langflow" - }, - "metadata_indexing_exclude": { - "type": "str", - "required": false, - "placeholder": "", - "list": true, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "metadata_indexing_exclude", - "display_name": "Metadata Indexing Exclude", - "advanced": true, - "dynamic": false, - "info": "Optional list of metadata fields to exclude from the indexing.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "metadata_indexing_include": { - "type": "str", - "required": false, - "placeholder": "", - "list": true, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "metadata_indexing_include", - "display_name": "Metadata Indexing Include", - "advanced": true, - "dynamic": false, - "info": "Optional list of metadata fields to include in the indexing.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "metric": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "metric", - "display_name": "Metric", - "advanced": true, - "dynamic": false, - "info": "Optional distance metric for vector comparisons in the vector store.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "namespace": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "namespace", - "display_name": "Namespace", - "advanced": true, - "dynamic": false, - "info": "Optional namespace within Astra DB to use for the collection.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "number_of_results": { - "type": "int", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": 4, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "number_of_results", - "display_name": "Number of Results", - "advanced": true, - "dynamic": false, - "info": "Number of results to return.", - "load_from_db": false, - "title_case": false - }, - "pre_delete_collection": { - "type": "bool", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "pre_delete_collection", - "display_name": "Pre Delete Collection", - "advanced": true, - "dynamic": false, - "info": "Boolean flag to determine whether to delete the collection before creating a new one.", - "load_from_db": false, - "title_case": false - }, - "search_type": { - "type": "str", - "required": false, - "placeholder": "", - "list": true, - "show": true, - "multiline": false, - "value": "Similarity", - "fileTypes": [], - "file_path": "", - "password": false, - "options": [ - "Similarity", - "MMR" - ], - "name": "search_type", - "display_name": "Search Type", - "advanced": false, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "setup_mode": { - "type": "str", - "required": false, - "placeholder": "", - "list": true, - "show": true, - "multiline": false, - "value": "Sync", - "fileTypes": [], - "file_path": "", - "password": false, - "options": [ - "Sync", - "Async", - "Off" - ], - "name": "setup_mode", - "display_name": "Setup Mode", - "advanced": true, - "dynamic": false, - "info": "Configuration mode for setting up the vector store, with options like \u201cSync\u201d, \u201cAsync\u201d, or \u201cOff\u201d.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "token": { - "type": "str", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": true, - "name": "token", - "display_name": "Token", - "advanced": false, - "dynamic": false, - "info": "Authentication token for accessing Astra DB.", - "load_from_db": true, - "title_case": false, - "input_types": [ - "Text" - ], - "value": "ASTRA_DB_APPLICATION_TOKEN" - }, - "_type": "CustomComponent" - }, - "description": "Searches an existing Astra DB Vector Store.", - "icon": "AstraDB", - "base_classes": [ - "Record" - ], - "display_name": "Astra DB Search", - "documentation": "", - "custom_fields": { - "embedding": null, - "collection_name": null, - "input_value": null, - "token": null, - "api_endpoint": null, - "search_type": null, - "number_of_results": null, - "namespace": null, - "metric": null, - "batch_size": null, - "bulk_insert_batch_concurrency": null, - "bulk_insert_overwrite_concurrency": null, - "bulk_delete_concurrency": null, - "setup_mode": null, - "pre_delete_collection": null, - "metadata_indexing_include": null, - "metadata_indexing_exclude": null, - "collection_indexing_policy": null - }, - "output_types": [ - "Record" - ], - "field_formatters": {}, - "frozen": false, - "field_order": [ - "token", - "api_endpoint", - "collection_name", - "input_value", - "embedding" - ], - "beta": false - }, - "id": "AstraDBSearch-41nRz" - }, - "selected": false, - "width": 384, - "height": 713, - "dragging": false, - "positionAbsolute": { - "x": 1723.976434815103, - "y": 277.03317407245913 - } + "output_types": ["Text"], + "field_formatters": {}, + "frozen": false, + "field_order": [ + "max_tokens", + "model_kwargs", + "model_name", + "openai_api_base", + "openai_api_key", + "temperature", + "input_value", + "system_message", + "stream" + ], + "beta": false + }, + "id": "OpenAIModel-EjXlN" + }, + "selected": true, + "width": 384, + "height": 563, + "positionAbsolute": { + "x": 3410.117202077183, + "y": 431.2038048137648 + }, + "dragging": false + }, + { + "id": "Prompt-xeI6K", + "type": "genericNode", + "position": { + "x": 2969.0261961391298, + "y": 442.1613649809069 + }, + "data": { + "type": "Prompt", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.field_typing import Prompt, TemplateField, Text\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "template": { + "type": "prompt", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "{context}\n\n---\n\nGiven the context above, answer the question as best as possible.\n\nQuestion: {question}\n\nAnswer: ", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "template", + "display_name": "Template", + "advanced": false, + "input_types": ["Text"], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "_type": "CustomComponent", + "context": { + "field_type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "context", + "display_name": "context", + "advanced": false, + "input_types": [ + "Document", + "BaseOutputParser", + "Record", + "Text" + ], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "type": "str" + }, + "question": { + "field_type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "question", + "display_name": "question", + "advanced": false, + "input_types": [ + "Document", + "BaseOutputParser", + "Record", + "Text" + ], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "type": "str" + } }, - { - "id": "AstraDB-eUCSS", - "type": "genericNode", - "position": { - "x": 3372.04958055989, - "y": 1611.0742035495277 - }, - "data": { - "type": "AstraDB", - "node": { - "template": { - "embedding": { - "type": "Embeddings", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "embedding", - "display_name": "Embedding", - "advanced": false, - "dynamic": false, - "info": "Embedding to use", - "load_from_db": false, - "title_case": false - }, - "inputs": { - "type": "Record", - "required": false, - "placeholder": "", - "list": true, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "inputs", - "display_name": "Inputs", - "advanced": false, - "dynamic": false, - "info": "Optional list of records to be processed and stored in the vector store.", - "load_from_db": false, - "title_case": false - }, - "api_endpoint": { - "type": "str", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "api_endpoint", - "display_name": "API Endpoint", - "advanced": false, - "dynamic": false, - "info": "API endpoint URL for the Astra DB service.", - "load_from_db": true, - "title_case": false, - "input_types": [ - "Text" - ], - "value": "ASTRA_DB_API_ENDPOINT" - }, - "batch_size": { - "type": "int", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "batch_size", - "display_name": "Batch Size", - "advanced": true, - "dynamic": false, - "info": "Optional number of records to process in a single batch.", - "load_from_db": false, - "title_case": false - }, - "bulk_delete_concurrency": { - "type": "int", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "bulk_delete_concurrency", - "display_name": "Bulk Delete Concurrency", - "advanced": true, - "dynamic": false, - "info": "Optional concurrency level for bulk delete operations.", - "load_from_db": false, - "title_case": false - }, - "bulk_insert_batch_concurrency": { - "type": "int", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "bulk_insert_batch_concurrency", - "display_name": "Bulk Insert Batch Concurrency", - "advanced": true, - "dynamic": false, - "info": "Optional concurrency level for bulk insert operations.", - "load_from_db": false, - "title_case": false - }, - "bulk_insert_overwrite_concurrency": { - "type": "int", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "bulk_insert_overwrite_concurrency", - "display_name": "Bulk Insert Overwrite Concurrency", - "advanced": true, - "dynamic": false, - "info": "Optional concurrency level for bulk insert operations that overwrite existing records.", - "load_from_db": false, - "title_case": false - }, - "code": { - "type": "code", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "from typing import List, Optional\n\nfrom langchain_astradb import AstraDBVectorStore\nfrom langchain_astradb.utils.astradb import SetupMode\n\nfrom langflow.custom import CustomComponent\nfrom langflow.field_typing import Embeddings, VectorStore\nfrom langflow.schema import Record\n\n\nclass AstraDBVectorStoreComponent(CustomComponent):\n display_name = \"Astra DB\"\n description = \"Builds or loads an Astra DB Vector Store.\"\n icon = \"AstraDB\"\n field_order = [\"token\", \"api_endpoint\", \"collection_name\", \"inputs\", \"embedding\"]\n\n def build_config(self):\n return {\n \"inputs\": {\n \"display_name\": \"Inputs\",\n \"info\": \"Optional list of records to be processed and stored in the vector store.\",\n },\n \"embedding\": {\"display_name\": \"Embedding\", \"info\": \"Embedding to use\"},\n \"collection_name\": {\n \"display_name\": \"Collection Name\",\n \"info\": \"The name of the collection within Astra DB where the vectors will be stored.\",\n },\n \"token\": {\n \"display_name\": \"Token\",\n \"info\": \"Authentication token for accessing Astra DB.\",\n \"password\": True,\n },\n \"api_endpoint\": {\n \"display_name\": \"API Endpoint\",\n \"info\": \"API endpoint URL for the Astra DB service.\",\n },\n \"namespace\": {\n \"display_name\": \"Namespace\",\n \"info\": \"Optional namespace within Astra DB to use for the collection.\",\n \"advanced\": True,\n },\n \"metric\": {\n \"display_name\": \"Metric\",\n \"info\": \"Optional distance metric for vector comparisons in the vector store.\",\n \"advanced\": True,\n },\n \"batch_size\": {\n \"display_name\": \"Batch Size\",\n \"info\": \"Optional number of records to process in a single batch.\",\n \"advanced\": True,\n },\n \"bulk_insert_batch_concurrency\": {\n \"display_name\": \"Bulk Insert Batch Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations.\",\n \"advanced\": True,\n },\n \"bulk_insert_overwrite_concurrency\": {\n \"display_name\": \"Bulk Insert Overwrite Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations that overwrite existing records.\",\n \"advanced\": True,\n },\n \"bulk_delete_concurrency\": {\n \"display_name\": \"Bulk Delete Concurrency\",\n \"info\": \"Optional concurrency level for bulk delete operations.\",\n \"advanced\": True,\n },\n \"setup_mode\": {\n \"display_name\": \"Setup Mode\",\n \"info\": \"Configuration mode for setting up the vector store, with options like \u201cSync\u201d, \u201cAsync\u201d, or \u201cOff\u201d.\",\n \"options\": [\"Sync\", \"Async\", \"Off\"],\n \"advanced\": True,\n },\n \"pre_delete_collection\": {\n \"display_name\": \"Pre Delete Collection\",\n \"info\": \"Boolean flag to determine whether to delete the collection before creating a new one.\",\n \"advanced\": True,\n },\n \"metadata_indexing_include\": {\n \"display_name\": \"Metadata Indexing Include\",\n \"info\": \"Optional list of metadata fields to include in the indexing.\",\n \"advanced\": True,\n },\n \"metadata_indexing_exclude\": {\n \"display_name\": \"Metadata Indexing Exclude\",\n \"info\": \"Optional list of metadata fields to exclude from the indexing.\",\n \"advanced\": True,\n },\n \"collection_indexing_policy\": {\n \"display_name\": \"Collection Indexing Policy\",\n \"info\": \"Optional dictionary defining the indexing policy for the collection.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n embedding: Embeddings,\n token: str,\n api_endpoint: str,\n collection_name: str,\n inputs: Optional[List[Record]] = None,\n namespace: Optional[str] = None,\n metric: Optional[str] = None,\n batch_size: Optional[int] = None,\n bulk_insert_batch_concurrency: Optional[int] = None,\n bulk_insert_overwrite_concurrency: Optional[int] = None,\n bulk_delete_concurrency: Optional[int] = None,\n setup_mode: str = \"Async\",\n pre_delete_collection: bool = False,\n metadata_indexing_include: Optional[List[str]] = None,\n metadata_indexing_exclude: Optional[List[str]] = None,\n collection_indexing_policy: Optional[dict] = None,\n ) -> VectorStore:\n try:\n setup_mode_value = SetupMode[setup_mode.upper()]\n except KeyError:\n raise ValueError(f\"Invalid setup mode: {setup_mode}\")\n if inputs:\n documents = [_input.to_lc_document() for _input in inputs]\n\n vector_store = AstraDBVectorStore.from_documents(\n documents=documents,\n embedding=embedding,\n collection_name=collection_name,\n token=token,\n api_endpoint=api_endpoint,\n namespace=namespace,\n metric=metric,\n batch_size=batch_size,\n bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,\n bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,\n bulk_delete_concurrency=bulk_delete_concurrency,\n setup_mode=setup_mode_value,\n pre_delete_collection=pre_delete_collection,\n metadata_indexing_include=metadata_indexing_include,\n metadata_indexing_exclude=metadata_indexing_exclude,\n collection_indexing_policy=collection_indexing_policy,\n )\n else:\n vector_store = AstraDBVectorStore(\n embedding=embedding,\n collection_name=collection_name,\n token=token,\n api_endpoint=api_endpoint,\n namespace=namespace,\n metric=metric,\n batch_size=batch_size,\n bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,\n bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,\n bulk_delete_concurrency=bulk_delete_concurrency,\n setup_mode=setup_mode_value,\n pre_delete_collection=pre_delete_collection,\n metadata_indexing_include=metadata_indexing_include,\n metadata_indexing_exclude=metadata_indexing_exclude,\n collection_indexing_policy=collection_indexing_policy,\n )\n\n return vector_store\n", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "code", - "advanced": true, - "dynamic": true, - "info": "", - "load_from_db": false, - "title_case": false - }, - "collection_indexing_policy": { - "type": "dict", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "collection_indexing_policy", - "display_name": "Collection Indexing Policy", - "advanced": true, - "dynamic": false, - "info": "Optional dictionary defining the indexing policy for the collection.", - "load_from_db": false, - "title_case": false - }, - "collection_name": { - "type": "str", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "collection_name", - "display_name": "Collection Name", - "advanced": false, - "dynamic": false, - "info": "The name of the collection within Astra DB where the vectors will be stored.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ], - "value": "langflow" - }, - "metadata_indexing_exclude": { - "type": "str", - "required": false, - "placeholder": "", - "list": true, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "metadata_indexing_exclude", - "display_name": "Metadata Indexing Exclude", - "advanced": true, - "dynamic": false, - "info": "Optional list of metadata fields to exclude from the indexing.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "metadata_indexing_include": { - "type": "str", - "required": false, - "placeholder": "", - "list": true, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "metadata_indexing_include", - "display_name": "Metadata Indexing Include", - "advanced": true, - "dynamic": false, - "info": "Optional list of metadata fields to include in the indexing.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "metric": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "metric", - "display_name": "Metric", - "advanced": true, - "dynamic": false, - "info": "Optional distance metric for vector comparisons in the vector store.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "namespace": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "namespace", - "display_name": "Namespace", - "advanced": true, - "dynamic": false, - "info": "Optional namespace within Astra DB to use for the collection.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "pre_delete_collection": { - "type": "bool", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "pre_delete_collection", - "display_name": "Pre Delete Collection", - "advanced": true, - "dynamic": false, - "info": "Boolean flag to determine whether to delete the collection before creating a new one.", - "load_from_db": false, - "title_case": false - }, - "setup_mode": { - "type": "str", - "required": false, - "placeholder": "", - "list": true, - "show": true, - "multiline": false, - "value": "Async", - "fileTypes": [], - "file_path": "", - "password": false, - "options": [ - "Sync", - "Async", - "Off" - ], - "name": "setup_mode", - "display_name": "Setup Mode", - "advanced": true, - "dynamic": false, - "info": "Configuration mode for setting up the vector store, with options like \u201cSync\u201d, \u201cAsync\u201d, or \u201cOff\u201d.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "token": { - "type": "str", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": true, - "name": "token", - "display_name": "Token", - "advanced": false, - "dynamic": false, - "info": "Authentication token for accessing Astra DB.", - "load_from_db": true, - "title_case": false, - "input_types": [ - "Text" - ], - "value": "ASTRA_DB_APPLICATION_TOKEN" - }, - "_type": "CustomComponent" - }, - "description": "Builds or loads an Astra DB Vector Store.", - "icon": "AstraDB", - "base_classes": [ - "VectorStore" - ], - "display_name": "Astra DB", - "documentation": "", - "custom_fields": { - "embedding": null, - "token": null, - "api_endpoint": null, - "collection_name": null, - "inputs": null, - "namespace": null, - "metric": null, - "batch_size": null, - "bulk_insert_batch_concurrency": null, - "bulk_insert_overwrite_concurrency": null, - "bulk_delete_concurrency": null, - "setup_mode": null, - "pre_delete_collection": null, - "metadata_indexing_include": null, - "metadata_indexing_exclude": null, - "collection_indexing_policy": null - }, - "output_types": [ - "VectorStore" - ], - "field_formatters": {}, - "frozen": false, - "field_order": [ - "token", - "api_endpoint", - "collection_name", - "inputs", - "embedding" - ], - "beta": false - }, - "id": "AstraDB-eUCSS" - }, - "selected": false, - "width": 384, - "height": 573, - "positionAbsolute": { - "x": 3372.04958055989, - "y": 1611.0742035495277 - }, - "dragging": false + "description": "Create a prompt template with dynamic variables.", + "icon": "prompts", + "is_input": null, + "is_output": null, + "is_composition": null, + "base_classes": ["object", "Text", "str"], + "name": "", + "display_name": "Prompt", + "documentation": "", + "custom_fields": { + "template": ["context", "question"] }, - { - "id": "OpenAIEmbeddings-9TPjc", - "type": "genericNode", - "position": { - "x": 2814.0402191223047, - "y": 1955.9268168273086 - }, - "data": { - "type": "OpenAIEmbeddings", - "node": { - "template": { - "allowed_special": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": [], - "fileTypes": [], - "file_path": "", - "password": false, - "name": "allowed_special", - "display_name": "Allowed Special", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "chunk_size": { - "type": "int", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": 1000, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "chunk_size", - "display_name": "Chunk Size", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "client": { - "type": "Any", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "client", - "display_name": "Client", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "code": { - "type": "code", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "from typing import Any, Dict, List, Optional\n\nfrom langchain_openai.embeddings.base import OpenAIEmbeddings\n\nfrom langflow.field_typing import Embeddings, NestedDict\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass OpenAIEmbeddingsComponent(CustomComponent):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n\n def build_config(self):\n return {\n \"allowed_special\": {\n \"display_name\": \"Allowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"default_headers\": {\n \"display_name\": \"Default Headers\",\n \"advanced\": True,\n \"field_type\": \"dict\",\n },\n \"default_query\": {\n \"display_name\": \"Default Query\",\n \"advanced\": True,\n \"field_type\": \"NestedDict\",\n },\n \"disallowed_special\": {\n \"display_name\": \"Disallowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"chunk_size\": {\"display_name\": \"Chunk Size\", \"advanced\": True},\n \"client\": {\"display_name\": \"Client\", \"advanced\": True},\n \"deployment\": {\"display_name\": \"Deployment\", \"advanced\": True},\n \"embedding_ctx_length\": {\n \"display_name\": \"Embedding Context Length\",\n \"advanced\": True,\n },\n \"max_retries\": {\"display_name\": \"Max Retries\", \"advanced\": True},\n \"model\": {\n \"display_name\": \"Model\",\n \"advanced\": False,\n \"options\": [\n \"text-embedding-3-small\",\n \"text-embedding-3-large\",\n \"text-embedding-ada-002\",\n ],\n },\n \"model_kwargs\": {\"display_name\": \"Model Kwargs\", \"advanced\": True},\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"password\": True,\n \"advanced\": True,\n },\n \"openai_api_key\": {\"display_name\": \"OpenAI API Key\", \"password\": True},\n \"openai_api_type\": {\n \"display_name\": \"OpenAI API Type\",\n \"advanced\": True,\n \"password\": True,\n },\n \"openai_api_version\": {\n \"display_name\": \"OpenAI API Version\",\n \"advanced\": True,\n },\n \"openai_organization\": {\n \"display_name\": \"OpenAI Organization\",\n \"advanced\": True,\n },\n \"openai_proxy\": {\"display_name\": \"OpenAI Proxy\", \"advanced\": True},\n \"request_timeout\": {\"display_name\": \"Request Timeout\", \"advanced\": True},\n \"show_progress_bar\": {\n \"display_name\": \"Show Progress Bar\",\n \"advanced\": True,\n },\n \"skip_empty\": {\"display_name\": \"Skip Empty\", \"advanced\": True},\n \"tiktoken_model_name\": {\n \"display_name\": \"TikToken Model Name\",\n \"advanced\": True,\n },\n \"tiktoken_enable\": {\"display_name\": \"TikToken Enable\", \"advanced\": True},\n }\n\n def build(\n self,\n openai_api_key: str,\n default_headers: Optional[Dict[str, str]] = None,\n default_query: Optional[NestedDict] = {},\n allowed_special: List[str] = [],\n disallowed_special: List[str] = [\"all\"],\n chunk_size: int = 1000,\n client: Optional[Any] = None,\n deployment: str = \"text-embedding-ada-002\",\n embedding_ctx_length: int = 8191,\n max_retries: int = 6,\n model: str = \"text-embedding-ada-002\",\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n openai_api_type: Optional[str] = None,\n openai_api_version: Optional[str] = None,\n openai_organization: Optional[str] = None,\n openai_proxy: Optional[str] = None,\n request_timeout: Optional[float] = None,\n show_progress_bar: bool = False,\n skip_empty: bool = False,\n tiktoken_enable: bool = True,\n tiktoken_model_name: Optional[str] = None,\n ) -> Embeddings:\n # This is to avoid errors with Vector Stores (e.g Chroma)\n if disallowed_special == [\"all\"]:\n disallowed_special = \"all\" # type: ignore\n\n return OpenAIEmbeddings(\n tiktoken_enabled=tiktoken_enable,\n default_headers=default_headers,\n default_query=default_query,\n allowed_special=set(allowed_special),\n disallowed_special=\"all\",\n chunk_size=chunk_size,\n client=client,\n deployment=deployment,\n embedding_ctx_length=embedding_ctx_length,\n max_retries=max_retries,\n model=model,\n model_kwargs=model_kwargs,\n base_url=openai_api_base,\n api_key=openai_api_key,\n openai_api_type=openai_api_type,\n api_version=openai_api_version,\n organization=openai_organization,\n openai_proxy=openai_proxy,\n timeout=request_timeout,\n show_progress_bar=show_progress_bar,\n skip_empty=skip_empty,\n tiktoken_model_name=tiktoken_model_name,\n )\n", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "code", - "advanced": true, - "dynamic": true, - "info": "", - "load_from_db": false, - "title_case": false - }, - "default_headers": { - "type": "dict", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "default_headers", - "display_name": "Default Headers", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "default_query": { - "type": "NestedDict", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": {}, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "default_query", - "display_name": "Default Query", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "deployment": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": "text-embedding-ada-002", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "deployment", - "display_name": "Deployment", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "disallowed_special": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": [ - "all" - ], - "fileTypes": [], - "file_path": "", - "password": false, - "name": "disallowed_special", - "display_name": "Disallowed Special", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "embedding_ctx_length": { - "type": "int", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": 8191, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "embedding_ctx_length", - "display_name": "Embedding Context Length", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "max_retries": { - "type": "int", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": 6, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "max_retries", - "display_name": "Max Retries", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "model": { - "type": "str", - "required": false, - "placeholder": "", - "list": true, - "show": true, - "multiline": false, - "value": "text-embedding-ada-002", - "fileTypes": [], - "file_path": "", - "password": false, - "options": [ - "text-embedding-3-small", - "text-embedding-3-large", - "text-embedding-ada-002" - ], - "name": "model", - "display_name": "Model", - "advanced": false, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "model_kwargs": { - "type": "NestedDict", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": {}, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "model_kwargs", - "display_name": "Model Kwargs", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "openai_api_base": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": true, - "name": "openai_api_base", - "display_name": "OpenAI API Base", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "openai_api_key": { - "type": "str", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": true, - "name": "openai_api_key", - "display_name": "OpenAI API Key", - "advanced": false, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ], - "value": "" - }, - "openai_api_type": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": true, - "name": "openai_api_type", - "display_name": "OpenAI API Type", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "openai_api_version": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "openai_api_version", - "display_name": "OpenAI API Version", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "openai_organization": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "openai_organization", - "display_name": "OpenAI Organization", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "openai_proxy": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "openai_proxy", - "display_name": "OpenAI Proxy", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "request_timeout": { - "type": "float", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "request_timeout", - "display_name": "Request Timeout", - "advanced": true, - "dynamic": false, - "info": "", - "rangeSpec": { - "step_type": "float", - "min": -1, - "max": 1, - "step": 0.1 - }, - "load_from_db": false, - "title_case": false - }, - "show_progress_bar": { - "type": "bool", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "show_progress_bar", - "display_name": "Show Progress Bar", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "skip_empty": { - "type": "bool", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "skip_empty", - "display_name": "Skip Empty", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "tiktoken_enable": { - "type": "bool", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": true, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "tiktoken_enable", - "display_name": "TikToken Enable", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "tiktoken_model_name": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "tiktoken_model_name", - "display_name": "TikToken Model Name", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "_type": "CustomComponent" - }, - "description": "Generate embeddings using OpenAI models.", - "base_classes": [ - "Embeddings" - ], - "display_name": "OpenAI Embeddings", - "documentation": "", - "custom_fields": { - "openai_api_key": null, - "default_headers": null, - "default_query": null, - "allowed_special": null, - "disallowed_special": null, - "chunk_size": null, - "client": null, - "deployment": null, - "embedding_ctx_length": null, - "max_retries": null, - "model": null, - "model_kwargs": null, - "openai_api_base": null, - "openai_api_type": null, - "openai_api_version": null, - "openai_organization": null, - "openai_proxy": null, - "request_timeout": null, - "show_progress_bar": null, - "skip_empty": null, - "tiktoken_enable": null, - "tiktoken_model_name": null - }, - "output_types": [ - "Embeddings" - ], - "field_formatters": {}, - "frozen": false, - "field_order": [], - "beta": false - }, - "id": "OpenAIEmbeddings-9TPjc" - }, - "selected": false, - "width": 384, - "height": 383, - "positionAbsolute": { - "x": 2814.0402191223047, - "y": 1955.9268168273086 - }, - "dragging": false - } - ], - "edges": [ - { - "source": "TextOutput-BDknO", - "target": "Prompt-xeI6K", - "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153TextOutput\u0153,\u0153id\u0153:\u0153TextOutput-BDknO\u0153}", - "targetHandle": "{\u0153fieldName\u0153:\u0153context\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", - "id": "reactflow__edge-TextOutput-BDknO{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153TextOutput\u0153,\u0153id\u0153:\u0153TextOutput-BDknO\u0153}-Prompt-xeI6K{\u0153fieldName\u0153:\u0153context\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", - "data": { - "targetHandle": { - "fieldName": "context", - "id": "Prompt-xeI6K", - "inputTypes": [ - "Document", - "BaseOutputParser", - "Record", - "Text" - ], - "type": "str" - }, - "sourceHandle": { - "baseClasses": [ - "object", - "Text", - "str" - ], - "dataType": "TextOutput", - "id": "TextOutput-BDknO" - } - }, - "style": { - "stroke": "#555" - }, - "className": "stroke-gray-900 stroke-connection", - "selected": false + "output_types": ["Text"], + "full_path": null, + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false, + "error": null + }, + "id": "Prompt-xeI6K", + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt" + }, + "selected": false, + "width": 384, + "height": 477, + "positionAbsolute": { + "x": 2969.0261961391298, + "y": 442.1613649809069 + }, + "dragging": false + }, + { + "id": "ChatOutput-Q39I8", + "type": "genericNode", + "position": { + "x": 3887.2073667611485, + "y": 588.4801225794856 + }, + "data": { + "type": "ChatOutput", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n record_template: Optional[str] = \"{text}\",\n ) -> Union[Text, Record]:\n return super().build(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n record_template=record_template,\n )\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "input_value": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Message", + "advanced": false, + "input_types": ["Text"], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "record_template": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "{text}", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "record_template", + "display_name": "Record Template", + "advanced": true, + "dynamic": false, + "info": "In case of Message being a Record, this template will be used to convert it to text.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "return_record": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "return_record", + "display_name": "Return Record", + "advanced": true, + "dynamic": false, + "info": "Return the message as a record containing the sender, sender_name, and session_id.", + "load_from_db": false, + "title_case": false + }, + "sender": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "Machine", + "fileTypes": [], + "file_path": "", + "password": false, + "options": ["Machine", "User"], + "name": "sender", + "display_name": "Sender Type", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "sender_name": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "AI", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "sender_name", + "display_name": "Sender Name", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "session_id": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "session_id", + "display_name": "Session ID", + "advanced": true, + "dynamic": false, + "info": "If provided, the message will be stored in the memory.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "_type": "CustomComponent" }, - { - "source": "ChatInput-yxMKE", - "target": "Prompt-xeI6K", - "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Text\u0153,\u0153str\u0153,\u0153object\u0153,\u0153Record\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-yxMKE\u0153}", - "targetHandle": "{\u0153fieldName\u0153:\u0153question\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", - "id": "reactflow__edge-ChatInput-yxMKE{\u0153baseClasses\u0153:[\u0153Text\u0153,\u0153str\u0153,\u0153object\u0153,\u0153Record\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-yxMKE\u0153}-Prompt-xeI6K{\u0153fieldName\u0153:\u0153question\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", - "data": { - "targetHandle": { - "fieldName": "question", - "id": "Prompt-xeI6K", - "inputTypes": [ - "Document", - "BaseOutputParser", - "Record", - "Text" - ], - "type": "str" - }, - "sourceHandle": { - "baseClasses": [ - "Text", - "str", - "object", - "Record" - ], - "dataType": "ChatInput", - "id": "ChatInput-yxMKE" - } - }, - "style": { - "stroke": "#555" - }, - "className": "stroke-gray-900 stroke-connection", - "selected": false + "description": "Display a chat message in the Playground.", + "icon": "ChatOutput", + "base_classes": ["object", "Text", "Record", "str"], + "display_name": "Chat Output", + "documentation": "", + "custom_fields": { + "sender": null, + "sender_name": null, + "input_value": null, + "session_id": null, + "return_record": null, + "record_template": null }, - { - "source": "Prompt-xeI6K", - "target": "OpenAIModel-EjXlN", - "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153}", - "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153OpenAIModel-EjXlN\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", - "id": "reactflow__edge-Prompt-xeI6K{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153}-OpenAIModel-EjXlN{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153OpenAIModel-EjXlN\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", - "data": { - "targetHandle": { - "fieldName": "input_value", - "id": "OpenAIModel-EjXlN", - "inputTypes": [ - "Text" - ], - "type": "str" - }, - "sourceHandle": { - "baseClasses": [ - "object", - "Text", - "str" - ], - "dataType": "Prompt", - "id": "Prompt-xeI6K" - } - }, - "style": { - "stroke": "#555" - }, - "className": "stroke-gray-900 stroke-connection", - "selected": false + "output_types": ["Text", "Record"], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "ChatOutput-Q39I8" + }, + "selected": false, + "width": 384, + "height": 383, + "positionAbsolute": { + "x": 3887.2073667611485, + "y": 588.4801225794856 + }, + "dragging": false + }, + { + "id": "File-t0a6a", + "type": "genericNode", + "position": { + "x": 2257.233450682836, + "y": 1747.5389618367233 + }, + "data": { + "type": "File", + "node": { + "template": { + "path": { + "type": "file", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [ + ".txt", + ".md", + ".mdx", + ".csv", + ".json", + ".yaml", + ".yml", + ".xml", + ".html", + ".htm", + ".pdf", + ".docx" + ], + "file_path": "51e2b78a-199b-4054-9f32-e288eef6924c/Langflow conversation.pdf", + "password": false, + "name": "path", + "display_name": "Path", + "advanced": false, + "dynamic": false, + "info": "Supported file types: txt, md, mdx, csv, json, yaml, yml, xml, html, htm, pdf, docx", + "load_from_db": false, + "title_case": false, + "value": "" + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from pathlib import Path\nfrom typing import Any, Dict\n\nfrom langflow.base.data.utils import TEXT_FILE_TYPES, parse_text_file_to_record\nfrom langflow.interface.custom.custom_component import CustomComponent\nfrom langflow.schema import Record\n\n\nclass FileComponent(CustomComponent):\n display_name = \"File\"\n description = \"A generic file loader.\"\n icon = \"file-text\"\n\n def build_config(self) -> Dict[str, Any]:\n return {\n \"path\": {\n \"display_name\": \"Path\",\n \"field_type\": \"file\",\n \"file_types\": TEXT_FILE_TYPES,\n \"info\": f\"Supported file types: {', '.join(TEXT_FILE_TYPES)}\",\n },\n \"silent_errors\": {\n \"display_name\": \"Silent Errors\",\n \"advanced\": True,\n \"info\": \"If true, errors will not raise an exception.\",\n },\n }\n\n def load_file(self, path: str, silent_errors: bool = False) -> Record:\n resolved_path = self.resolve_path(path)\n path_obj = Path(resolved_path)\n extension = path_obj.suffix[1:].lower()\n if extension == \"doc\":\n raise ValueError(\"doc files are not supported. Please save as .docx\")\n if extension not in TEXT_FILE_TYPES:\n raise ValueError(f\"Unsupported file type: {extension}\")\n record = parse_text_file_to_record(resolved_path, silent_errors)\n self.status = record if record else \"No data\"\n return record or Record()\n\n def build(\n self,\n path: str,\n silent_errors: bool = False,\n ) -> Record:\n record = self.load_file(path, silent_errors)\n self.status = record\n return record\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "silent_errors": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "silent_errors", + "display_name": "Silent Errors", + "advanced": true, + "dynamic": false, + "info": "If true, errors will not raise an exception.", + "load_from_db": false, + "title_case": false + }, + "_type": "CustomComponent" }, - { - "source": "OpenAIModel-EjXlN", - "target": "ChatOutput-Q39I8", - "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-EjXlN\u0153}", - "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153ChatOutput-Q39I8\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", - "id": "reactflow__edge-OpenAIModel-EjXlN{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-EjXlN\u0153}-ChatOutput-Q39I8{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153ChatOutput-Q39I8\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", - "data": { - "targetHandle": { - "fieldName": "input_value", - "id": "ChatOutput-Q39I8", - "inputTypes": [ - "Text" - ], - "type": "str" - }, - "sourceHandle": { - "baseClasses": [ - "object", - "Text", - "str" - ], - "dataType": "OpenAIModel", - "id": "OpenAIModel-EjXlN" - } - }, - "style": { - "stroke": "#555" - }, - "className": "stroke-gray-900 stroke-connection", - "selected": false + "description": "A generic file loader.", + "icon": "file-text", + "base_classes": ["Record"], + "display_name": "File", + "documentation": "", + "custom_fields": { + "path": null, + "silent_errors": null }, - { - "source": "File-t0a6a", - "target": "RecursiveCharacterTextSplitter-tR9QM", - "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153File\u0153,\u0153id\u0153:\u0153File-t0a6a\u0153}", - "targetHandle": "{\u0153fieldName\u0153:\u0153inputs\u0153,\u0153id\u0153:\u0153RecursiveCharacterTextSplitter-tR9QM\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153Record\u0153],\u0153type\u0153:\u0153Document\u0153}", - "id": "reactflow__edge-File-t0a6a{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153File\u0153,\u0153id\u0153:\u0153File-t0a6a\u0153}-RecursiveCharacterTextSplitter-tR9QM{\u0153fieldName\u0153:\u0153inputs\u0153,\u0153id\u0153:\u0153RecursiveCharacterTextSplitter-tR9QM\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153Record\u0153],\u0153type\u0153:\u0153Document\u0153}", - "data": { - "targetHandle": { - "fieldName": "inputs", - "id": "RecursiveCharacterTextSplitter-tR9QM", - "inputTypes": [ - "Document", - "Record" - ], - "type": "Document" - }, - "sourceHandle": { - "baseClasses": [ - "Record" - ], - "dataType": "File", - "id": "File-t0a6a" - } - }, - "style": { - "stroke": "#555" - }, - "className": "stroke-gray-900 stroke-connection", - "selected": false + "output_types": ["Record"], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "File-t0a6a" + }, + "selected": false, + "width": 384, + "height": 281, + "positionAbsolute": { + "x": 2257.233450682836, + "y": 1747.5389618367233 + }, + "dragging": false + }, + { + "id": "RecursiveCharacterTextSplitter-tR9QM", + "type": "genericNode", + "position": { + "x": 2791.013514133929, + "y": 1462.9588953494142 + }, + "data": { + "type": "RecursiveCharacterTextSplitter", + "node": { + "template": { + "inputs": { + "type": "Document", + "required": true, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "inputs", + "display_name": "Input", + "advanced": false, + "input_types": ["Document", "Record"], + "dynamic": false, + "info": "The texts to split.", + "load_from_db": false, + "title_case": false + }, + "chunk_overlap": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 200, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "chunk_overlap", + "display_name": "Chunk Overlap", + "advanced": false, + "dynamic": false, + "info": "The amount of overlap between chunks.", + "load_from_db": false, + "title_case": false + }, + "chunk_size": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 1000, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "chunk_size", + "display_name": "Chunk Size", + "advanced": false, + "dynamic": false, + "info": "The maximum length of each chunk.", + "load_from_db": false, + "title_case": false + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional\n\nfrom langchain.text_splitter import RecursiveCharacterTextSplitter\nfrom langchain_core.documents import Document\n\nfrom langflow.interface.custom.custom_component import CustomComponent\nfrom langflow.schema import Record\nfrom langflow.utils.util import build_loader_repr_from_records, unescape_string\n\n\nclass RecursiveCharacterTextSplitterComponent(CustomComponent):\n display_name: str = \"Recursive Character Text Splitter\"\n description: str = \"Split text into chunks of a specified length.\"\n documentation: str = \"https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter\"\n\n def build_config(self):\n return {\n \"inputs\": {\n \"display_name\": \"Input\",\n \"info\": \"The texts to split.\",\n \"input_types\": [\"Document\", \"Record\"],\n },\n \"separators\": {\n \"display_name\": \"Separators\",\n \"info\": 'The characters to split on.\\nIf left empty defaults to [\"\\\\n\\\\n\", \"\\\\n\", \" \", \"\"].',\n \"is_list\": True,\n },\n \"chunk_size\": {\n \"display_name\": \"Chunk Size\",\n \"info\": \"The maximum length of each chunk.\",\n \"field_type\": \"int\",\n \"value\": 1000,\n },\n \"chunk_overlap\": {\n \"display_name\": \"Chunk Overlap\",\n \"info\": \"The amount of overlap between chunks.\",\n \"field_type\": \"int\",\n \"value\": 200,\n },\n \"code\": {\"show\": False},\n }\n\n def build(\n self,\n inputs: list[Document],\n separators: Optional[list[str]] = None,\n chunk_size: Optional[int] = 1000,\n chunk_overlap: Optional[int] = 200,\n ) -> list[Record]:\n \"\"\"\n Split text into chunks of a specified length.\n\n Args:\n separators (list[str]): The characters to split on.\n chunk_size (int): The maximum length of each chunk.\n chunk_overlap (int): The amount of overlap between chunks.\n length_function (function): The function to use to calculate the length of the text.\n\n Returns:\n list[str]: The chunks of text.\n \"\"\"\n\n if separators == \"\":\n separators = None\n elif separators:\n # check if the separators list has escaped characters\n # if there are escaped characters, unescape them\n separators = [unescape_string(x) for x in separators]\n\n # Make sure chunk_size and chunk_overlap are ints\n if isinstance(chunk_size, str):\n chunk_size = int(chunk_size)\n if isinstance(chunk_overlap, str):\n chunk_overlap = int(chunk_overlap)\n splitter = RecursiveCharacterTextSplitter(\n separators=separators,\n chunk_size=chunk_size,\n chunk_overlap=chunk_overlap,\n )\n documents = []\n for _input in inputs:\n if isinstance(_input, Record):\n documents.append(_input.to_lc_document())\n else:\n documents.append(_input)\n docs = splitter.split_documents(documents)\n records = self.to_records(docs)\n self.repr_value = build_loader_repr_from_records(records)\n return records\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "separators": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "separators", + "display_name": "Separators", + "advanced": false, + "dynamic": false, + "info": "The characters to split on.\nIf left empty defaults to [\"\\n\\n\", \"\\n\", \" \", \"\"].", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"], + "value": [""] + }, + "_type": "CustomComponent" }, - { - "source": "OpenAIEmbeddings-ZlOk1", - "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Embeddings\u0153],\u0153dataType\u0153:\u0153OpenAIEmbeddings\u0153,\u0153id\u0153:\u0153OpenAIEmbeddings-ZlOk1\u0153}", - "target": "AstraDBSearch-41nRz", - "targetHandle": "{\u0153fieldName\u0153:\u0153embedding\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Embeddings\u0153}", - "data": { - "targetHandle": { - "fieldName": "embedding", - "id": "AstraDBSearch-41nRz", - "inputTypes": null, - "type": "Embeddings" - }, - "sourceHandle": { - "baseClasses": [ - "Embeddings" - ], - "dataType": "OpenAIEmbeddings", - "id": "OpenAIEmbeddings-ZlOk1" - } - }, - "style": { - "stroke": "#555" - }, - "className": "stroke-gray-900 stroke-connection", - "id": "reactflow__edge-OpenAIEmbeddings-ZlOk1{\u0153baseClasses\u0153:[\u0153Embeddings\u0153],\u0153dataType\u0153:\u0153OpenAIEmbeddings\u0153,\u0153id\u0153:\u0153OpenAIEmbeddings-ZlOk1\u0153}-AstraDBSearch-41nRz{\u0153fieldName\u0153:\u0153embedding\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Embeddings\u0153}" + "description": "Split text into chunks of a specified length.", + "base_classes": ["Record"], + "display_name": "Recursive Character Text Splitter", + "documentation": "https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter", + "custom_fields": { + "inputs": null, + "separators": null, + "chunk_size": null, + "chunk_overlap": null }, - { - "source": "ChatInput-yxMKE", - "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Text\u0153,\u0153str\u0153,\u0153object\u0153,\u0153Record\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-yxMKE\u0153}", - "target": "AstraDBSearch-41nRz", - "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", - "data": { - "targetHandle": { - "fieldName": "input_value", - "id": "AstraDBSearch-41nRz", - "inputTypes": [ - "Text" - ], - "type": "str" - }, - "sourceHandle": { - "baseClasses": [ - "Text", - "str", - "object", - "Record" - ], - "dataType": "ChatInput", - "id": "ChatInput-yxMKE" - } - }, - "style": { - "stroke": "#555" - }, - "className": "stroke-gray-900 stroke-connection", - "id": "reactflow__edge-ChatInput-yxMKE{\u0153baseClasses\u0153:[\u0153Text\u0153,\u0153str\u0153,\u0153object\u0153,\u0153Record\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-yxMKE\u0153}-AstraDBSearch-41nRz{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}" + "output_types": ["Record"], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "RecursiveCharacterTextSplitter-tR9QM" + }, + "selected": false, + "width": 384, + "height": 501, + "positionAbsolute": { + "x": 2791.013514133929, + "y": 1462.9588953494142 + }, + "dragging": false + }, + { + "id": "AstraDBSearch-41nRz", + "type": "genericNode", + "position": { + "x": 1723.976434815103, + "y": 277.03317407245913 + }, + "data": { + "type": "AstraDBSearch", + "node": { + "template": { + "embedding": { + "type": "Embeddings", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "embedding", + "display_name": "Embedding", + "advanced": false, + "dynamic": false, + "info": "Embedding to use", + "load_from_db": false, + "title_case": false + }, + "input_value": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Input Value", + "advanced": false, + "dynamic": false, + "info": "Input value to search", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "api_endpoint": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "api_endpoint", + "display_name": "API Endpoint", + "advanced": false, + "dynamic": false, + "info": "API endpoint URL for the Astra DB service.", + "load_from_db": true, + "title_case": false, + "input_types": ["Text"], + "value": "ASTRA_DB_API_ENDPOINT" + }, + "batch_size": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "batch_size", + "display_name": "Batch Size", + "advanced": true, + "dynamic": false, + "info": "Optional number of records to process in a single batch.", + "load_from_db": false, + "title_case": false + }, + "bulk_delete_concurrency": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "bulk_delete_concurrency", + "display_name": "Bulk Delete Concurrency", + "advanced": true, + "dynamic": false, + "info": "Optional concurrency level for bulk delete operations.", + "load_from_db": false, + "title_case": false + }, + "bulk_insert_batch_concurrency": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "bulk_insert_batch_concurrency", + "display_name": "Bulk Insert Batch Concurrency", + "advanced": true, + "dynamic": false, + "info": "Optional concurrency level for bulk insert operations.", + "load_from_db": false, + "title_case": false + }, + "bulk_insert_overwrite_concurrency": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "bulk_insert_overwrite_concurrency", + "display_name": "Bulk Insert Overwrite Concurrency", + "advanced": true, + "dynamic": false, + "info": "Optional concurrency level for bulk insert operations that overwrite existing records.", + "load_from_db": false, + "title_case": false + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import List, Optional\n\nfrom langflow.components.vectorstores.AstraDB import AstraDBVectorStoreComponent\nfrom langflow.components.vectorstores.base.model import LCVectorStoreComponent\nfrom langflow.field_typing import Embeddings, Text\nfrom langflow.schema import Record\n\n\nclass AstraDBSearchComponent(LCVectorStoreComponent):\n display_name = \"Astra DB Search\"\n description = \"Searches an existing Astra DB Vector Store.\"\n icon = \"AstraDB\"\n field_order = [\"token\", \"api_endpoint\", \"collection_name\", \"input_value\", \"embedding\"]\n\n def build_config(self):\n return {\n \"search_type\": {\n \"display_name\": \"Search Type\",\n \"options\": [\"Similarity\", \"MMR\"],\n },\n \"input_value\": {\n \"display_name\": \"Input Value\",\n \"info\": \"Input value to search\",\n },\n \"embedding\": {\"display_name\": \"Embedding\", \"info\": \"Embedding to use\"},\n \"collection_name\": {\n \"display_name\": \"Collection Name\",\n \"info\": \"The name of the collection within Astra DB where the vectors will be stored.\",\n },\n \"token\": {\n \"display_name\": \"Token\",\n \"info\": \"Authentication token for accessing Astra DB.\",\n \"password\": True,\n },\n \"api_endpoint\": {\n \"display_name\": \"API Endpoint\",\n \"info\": \"API endpoint URL for the Astra DB service.\",\n },\n \"namespace\": {\n \"display_name\": \"Namespace\",\n \"info\": \"Optional namespace within Astra DB to use for the collection.\",\n \"advanced\": True,\n },\n \"metric\": {\n \"display_name\": \"Metric\",\n \"info\": \"Optional distance metric for vector comparisons in the vector store.\",\n \"advanced\": True,\n },\n \"batch_size\": {\n \"display_name\": \"Batch Size\",\n \"info\": \"Optional number of records to process in a single batch.\",\n \"advanced\": True,\n },\n \"bulk_insert_batch_concurrency\": {\n \"display_name\": \"Bulk Insert Batch Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations.\",\n \"advanced\": True,\n },\n \"bulk_insert_overwrite_concurrency\": {\n \"display_name\": \"Bulk Insert Overwrite Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations that overwrite existing records.\",\n \"advanced\": True,\n },\n \"bulk_delete_concurrency\": {\n \"display_name\": \"Bulk Delete Concurrency\",\n \"info\": \"Optional concurrency level for bulk delete operations.\",\n \"advanced\": True,\n },\n \"setup_mode\": {\n \"display_name\": \"Setup Mode\",\n \"info\": \"Configuration mode for setting up the vector store, with options like \u201cSync\u201d, \u201cAsync\u201d, or \u201cOff\u201d.\",\n \"options\": [\"Sync\", \"Async\", \"Off\"],\n \"advanced\": True,\n },\n \"pre_delete_collection\": {\n \"display_name\": \"Pre Delete Collection\",\n \"info\": \"Boolean flag to determine whether to delete the collection before creating a new one.\",\n \"advanced\": True,\n },\n \"metadata_indexing_include\": {\n \"display_name\": \"Metadata Indexing Include\",\n \"info\": \"Optional list of metadata fields to include in the indexing.\",\n \"advanced\": True,\n },\n \"metadata_indexing_exclude\": {\n \"display_name\": \"Metadata Indexing Exclude\",\n \"info\": \"Optional list of metadata fields to exclude from the indexing.\",\n \"advanced\": True,\n },\n \"collection_indexing_policy\": {\n \"display_name\": \"Collection Indexing Policy\",\n \"info\": \"Optional dictionary defining the indexing policy for the collection.\",\n \"advanced\": True,\n },\n \"number_of_results\": {\n \"display_name\": \"Number of Results\",\n \"info\": \"Number of results to return.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n embedding: Embeddings,\n collection_name: str,\n input_value: Text,\n token: str,\n api_endpoint: str,\n search_type: str = \"Similarity\",\n number_of_results: int = 4,\n namespace: Optional[str] = None,\n metric: Optional[str] = None,\n batch_size: Optional[int] = None,\n bulk_insert_batch_concurrency: Optional[int] = None,\n bulk_insert_overwrite_concurrency: Optional[int] = None,\n bulk_delete_concurrency: Optional[int] = None,\n setup_mode: str = \"Sync\",\n pre_delete_collection: bool = False,\n metadata_indexing_include: Optional[List[str]] = None,\n metadata_indexing_exclude: Optional[List[str]] = None,\n collection_indexing_policy: Optional[dict] = None,\n ) -> List[Record]:\n vector_store = AstraDBVectorStoreComponent().build(\n embedding=embedding,\n collection_name=collection_name,\n token=token,\n api_endpoint=api_endpoint,\n namespace=namespace,\n metric=metric,\n batch_size=batch_size,\n bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,\n bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,\n bulk_delete_concurrency=bulk_delete_concurrency,\n setup_mode=setup_mode,\n pre_delete_collection=pre_delete_collection,\n metadata_indexing_include=metadata_indexing_include,\n metadata_indexing_exclude=metadata_indexing_exclude,\n collection_indexing_policy=collection_indexing_policy,\n )\n try:\n return self.search_with_vector_store(input_value, search_type, vector_store, k=number_of_results)\n except KeyError as e:\n if \"content\" in str(e):\n raise ValueError(\n \"You should ingest data through Langflow (or LangChain) to query it in Langflow. Your collection does not contain a field name 'content'.\"\n )\n else:\n raise e\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "collection_indexing_policy": { + "type": "dict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "collection_indexing_policy", + "display_name": "Collection Indexing Policy", + "advanced": true, + "dynamic": false, + "info": "Optional dictionary defining the indexing policy for the collection.", + "load_from_db": false, + "title_case": false + }, + "collection_name": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "collection_name", + "display_name": "Collection Name", + "advanced": false, + "dynamic": false, + "info": "The name of the collection within Astra DB where the vectors will be stored.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"], + "value": "langflow" + }, + "metadata_indexing_exclude": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "metadata_indexing_exclude", + "display_name": "Metadata Indexing Exclude", + "advanced": true, + "dynamic": false, + "info": "Optional list of metadata fields to exclude from the indexing.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "metadata_indexing_include": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "metadata_indexing_include", + "display_name": "Metadata Indexing Include", + "advanced": true, + "dynamic": false, + "info": "Optional list of metadata fields to include in the indexing.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "metric": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "metric", + "display_name": "Metric", + "advanced": true, + "dynamic": false, + "info": "Optional distance metric for vector comparisons in the vector store.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "namespace": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "namespace", + "display_name": "Namespace", + "advanced": true, + "dynamic": false, + "info": "Optional namespace within Astra DB to use for the collection.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "number_of_results": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 4, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "number_of_results", + "display_name": "Number of Results", + "advanced": true, + "dynamic": false, + "info": "Number of results to return.", + "load_from_db": false, + "title_case": false + }, + "pre_delete_collection": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "pre_delete_collection", + "display_name": "Pre Delete Collection", + "advanced": true, + "dynamic": false, + "info": "Boolean flag to determine whether to delete the collection before creating a new one.", + "load_from_db": false, + "title_case": false + }, + "search_type": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "Similarity", + "fileTypes": [], + "file_path": "", + "password": false, + "options": ["Similarity", "MMR"], + "name": "search_type", + "display_name": "Search Type", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "setup_mode": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "Sync", + "fileTypes": [], + "file_path": "", + "password": false, + "options": ["Sync", "Async", "Off"], + "name": "setup_mode", + "display_name": "Setup Mode", + "advanced": true, + "dynamic": false, + "info": "Configuration mode for setting up the vector store, with options like \u201cSync\u201d, \u201cAsync\u201d, or \u201cOff\u201d.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "token": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "token", + "display_name": "Token", + "advanced": false, + "dynamic": false, + "info": "Authentication token for accessing Astra DB.", + "load_from_db": true, + "title_case": false, + "input_types": ["Text"], + "value": "ASTRA_DB_APPLICATION_TOKEN" + }, + "_type": "CustomComponent" }, - { - "source": "RecursiveCharacterTextSplitter-tR9QM", - "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153RecursiveCharacterTextSplitter\u0153,\u0153id\u0153:\u0153RecursiveCharacterTextSplitter-tR9QM\u0153}", - "target": "AstraDB-eUCSS", - "targetHandle": "{\u0153fieldName\u0153:\u0153inputs\u0153,\u0153id\u0153:\u0153AstraDB-eUCSS\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Record\u0153}", - "data": { - "targetHandle": { - "fieldName": "inputs", - "id": "AstraDB-eUCSS", - "inputTypes": null, - "type": "Record" - }, - "sourceHandle": { - "baseClasses": [ - "Record" - ], - "dataType": "RecursiveCharacterTextSplitter", - "id": "RecursiveCharacterTextSplitter-tR9QM" - } - }, - "style": { - "stroke": "#555" - }, - "className": "stroke-gray-900 stroke-connection", - "id": "reactflow__edge-RecursiveCharacterTextSplitter-tR9QM{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153RecursiveCharacterTextSplitter\u0153,\u0153id\u0153:\u0153RecursiveCharacterTextSplitter-tR9QM\u0153}-AstraDB-eUCSS{\u0153fieldName\u0153:\u0153inputs\u0153,\u0153id\u0153:\u0153AstraDB-eUCSS\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Record\u0153}", - "selected": false + "description": "Searches an existing Astra DB Vector Store.", + "icon": "AstraDB", + "base_classes": ["Record"], + "display_name": "Astra DB Search", + "documentation": "", + "custom_fields": { + "embedding": null, + "collection_name": null, + "input_value": null, + "token": null, + "api_endpoint": null, + "search_type": null, + "number_of_results": null, + "namespace": null, + "metric": null, + "batch_size": null, + "bulk_insert_batch_concurrency": null, + "bulk_insert_overwrite_concurrency": null, + "bulk_delete_concurrency": null, + "setup_mode": null, + "pre_delete_collection": null, + "metadata_indexing_include": null, + "metadata_indexing_exclude": null, + "collection_indexing_policy": null }, - { - "source": "OpenAIEmbeddings-9TPjc", - "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Embeddings\u0153],\u0153dataType\u0153:\u0153OpenAIEmbeddings\u0153,\u0153id\u0153:\u0153OpenAIEmbeddings-9TPjc\u0153}", - "target": "AstraDB-eUCSS", - "targetHandle": "{\u0153fieldName\u0153:\u0153embedding\u0153,\u0153id\u0153:\u0153AstraDB-eUCSS\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Embeddings\u0153}", - "data": { - "targetHandle": { - "fieldName": "embedding", - "id": "AstraDB-eUCSS", - "inputTypes": null, - "type": "Embeddings" - }, - "sourceHandle": { - "baseClasses": [ - "Embeddings" - ], - "dataType": "OpenAIEmbeddings", - "id": "OpenAIEmbeddings-9TPjc" - } - }, - "style": { - "stroke": "#555" - }, - "className": "stroke-gray-900 stroke-connection", - "id": "reactflow__edge-OpenAIEmbeddings-9TPjc{\u0153baseClasses\u0153:[\u0153Embeddings\u0153],\u0153dataType\u0153:\u0153OpenAIEmbeddings\u0153,\u0153id\u0153:\u0153OpenAIEmbeddings-9TPjc\u0153}-AstraDB-eUCSS{\u0153fieldName\u0153:\u0153embedding\u0153,\u0153id\u0153:\u0153AstraDB-eUCSS\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Embeddings\u0153}", - "selected": false - }, - { - "source": "AstraDBSearch-41nRz", - "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153AstraDBSearch\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153}", - "target": "TextOutput-BDknO", - "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153TextOutput-BDknO\u0153,\u0153inputTypes\u0153:[\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", - "data": { - "targetHandle": { - "fieldName": "input_value", - "id": "TextOutput-BDknO", - "inputTypes": [ - "Record", - "Text" - ], - "type": "str" - }, - "sourceHandle": { - "baseClasses": [ - "Record" - ], - "dataType": "AstraDBSearch", - "id": "AstraDBSearch-41nRz" - } - }, - "style": { - "stroke": "#555" - }, - "className": "stroke-gray-900 stroke-connection", - "id": "reactflow__edge-AstraDBSearch-41nRz{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153AstraDBSearch\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153}-TextOutput-BDknO{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153TextOutput-BDknO\u0153,\u0153inputTypes\u0153:[\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}" - } - ], - "viewport": { - "x": -259.6782520315529, - "y": 90.3428735006047, - "zoom": 0.2687057134854984 + "output_types": ["Record"], + "field_formatters": {}, + "frozen": false, + "field_order": [ + "token", + "api_endpoint", + "collection_name", + "input_value", + "embedding" + ], + "beta": false + }, + "id": "AstraDBSearch-41nRz" + }, + "selected": false, + "width": 384, + "height": 713, + "dragging": false, + "positionAbsolute": { + "x": 1723.976434815103, + "y": 277.03317407245913 } - }, - "description": "Visit https://pre-release.langflow.org/tutorials/rag-with-astradb for a detailed guide of this project.\nThis project give you both Ingestion and RAG in a single file. You'll need to visit https://astra.datastax.com/ to create an Astra DB instance, your Token and grab an API Endpoint.\nRunning this project requires you to add a file in the Files component, then define a Collection Name and click on the Play icon on the Astra DB component. \n\nAfter the ingestion ends you are ready to click on the Run button at the lower left corner and start asking questions about your data.", - "name": "Vector Store RAG", - "last_tested_version": "1.0.0a0", - "is_component": false + }, + { + "id": "AstraDB-eUCSS", + "type": "genericNode", + "position": { + "x": 3372.04958055989, + "y": 1611.0742035495277 + }, + "data": { + "type": "AstraDB", + "node": { + "template": { + "embedding": { + "type": "Embeddings", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "embedding", + "display_name": "Embedding", + "advanced": false, + "dynamic": false, + "info": "Embedding to use", + "load_from_db": false, + "title_case": false + }, + "inputs": { + "type": "Record", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "inputs", + "display_name": "Inputs", + "advanced": false, + "dynamic": false, + "info": "Optional list of records to be processed and stored in the vector store.", + "load_from_db": false, + "title_case": false + }, + "api_endpoint": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "api_endpoint", + "display_name": "API Endpoint", + "advanced": false, + "dynamic": false, + "info": "API endpoint URL for the Astra DB service.", + "load_from_db": true, + "title_case": false, + "input_types": ["Text"], + "value": "ASTRA_DB_API_ENDPOINT" + }, + "batch_size": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "batch_size", + "display_name": "Batch Size", + "advanced": true, + "dynamic": false, + "info": "Optional number of records to process in a single batch.", + "load_from_db": false, + "title_case": false + }, + "bulk_delete_concurrency": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "bulk_delete_concurrency", + "display_name": "Bulk Delete Concurrency", + "advanced": true, + "dynamic": false, + "info": "Optional concurrency level for bulk delete operations.", + "load_from_db": false, + "title_case": false + }, + "bulk_insert_batch_concurrency": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "bulk_insert_batch_concurrency", + "display_name": "Bulk Insert Batch Concurrency", + "advanced": true, + "dynamic": false, + "info": "Optional concurrency level for bulk insert operations.", + "load_from_db": false, + "title_case": false + }, + "bulk_insert_overwrite_concurrency": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "bulk_insert_overwrite_concurrency", + "display_name": "Bulk Insert Overwrite Concurrency", + "advanced": true, + "dynamic": false, + "info": "Optional concurrency level for bulk insert operations that overwrite existing records.", + "load_from_db": false, + "title_case": false + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import List, Optional\n\nfrom langchain_astradb import AstraDBVectorStore\nfrom langchain_astradb.utils.astradb import SetupMode\n\nfrom langflow.custom import CustomComponent\nfrom langflow.field_typing import Embeddings, VectorStore\nfrom langflow.schema import Record\n\n\nclass AstraDBVectorStoreComponent(CustomComponent):\n display_name = \"Astra DB\"\n description = \"Builds or loads an Astra DB Vector Store.\"\n icon = \"AstraDB\"\n field_order = [\"token\", \"api_endpoint\", \"collection_name\", \"inputs\", \"embedding\"]\n\n def build_config(self):\n return {\n \"inputs\": {\n \"display_name\": \"Inputs\",\n \"info\": \"Optional list of records to be processed and stored in the vector store.\",\n },\n \"embedding\": {\"display_name\": \"Embedding\", \"info\": \"Embedding to use\"},\n \"collection_name\": {\n \"display_name\": \"Collection Name\",\n \"info\": \"The name of the collection within Astra DB where the vectors will be stored.\",\n },\n \"token\": {\n \"display_name\": \"Token\",\n \"info\": \"Authentication token for accessing Astra DB.\",\n \"password\": True,\n },\n \"api_endpoint\": {\n \"display_name\": \"API Endpoint\",\n \"info\": \"API endpoint URL for the Astra DB service.\",\n },\n \"namespace\": {\n \"display_name\": \"Namespace\",\n \"info\": \"Optional namespace within Astra DB to use for the collection.\",\n \"advanced\": True,\n },\n \"metric\": {\n \"display_name\": \"Metric\",\n \"info\": \"Optional distance metric for vector comparisons in the vector store.\",\n \"advanced\": True,\n },\n \"batch_size\": {\n \"display_name\": \"Batch Size\",\n \"info\": \"Optional number of records to process in a single batch.\",\n \"advanced\": True,\n },\n \"bulk_insert_batch_concurrency\": {\n \"display_name\": \"Bulk Insert Batch Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations.\",\n \"advanced\": True,\n },\n \"bulk_insert_overwrite_concurrency\": {\n \"display_name\": \"Bulk Insert Overwrite Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations that overwrite existing records.\",\n \"advanced\": True,\n },\n \"bulk_delete_concurrency\": {\n \"display_name\": \"Bulk Delete Concurrency\",\n \"info\": \"Optional concurrency level for bulk delete operations.\",\n \"advanced\": True,\n },\n \"setup_mode\": {\n \"display_name\": \"Setup Mode\",\n \"info\": \"Configuration mode for setting up the vector store, with options like \u201cSync\u201d, \u201cAsync\u201d, or \u201cOff\u201d.\",\n \"options\": [\"Sync\", \"Async\", \"Off\"],\n \"advanced\": True,\n },\n \"pre_delete_collection\": {\n \"display_name\": \"Pre Delete Collection\",\n \"info\": \"Boolean flag to determine whether to delete the collection before creating a new one.\",\n \"advanced\": True,\n },\n \"metadata_indexing_include\": {\n \"display_name\": \"Metadata Indexing Include\",\n \"info\": \"Optional list of metadata fields to include in the indexing.\",\n \"advanced\": True,\n },\n \"metadata_indexing_exclude\": {\n \"display_name\": \"Metadata Indexing Exclude\",\n \"info\": \"Optional list of metadata fields to exclude from the indexing.\",\n \"advanced\": True,\n },\n \"collection_indexing_policy\": {\n \"display_name\": \"Collection Indexing Policy\",\n \"info\": \"Optional dictionary defining the indexing policy for the collection.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n embedding: Embeddings,\n token: str,\n api_endpoint: str,\n collection_name: str,\n inputs: Optional[List[Record]] = None,\n namespace: Optional[str] = None,\n metric: Optional[str] = None,\n batch_size: Optional[int] = None,\n bulk_insert_batch_concurrency: Optional[int] = None,\n bulk_insert_overwrite_concurrency: Optional[int] = None,\n bulk_delete_concurrency: Optional[int] = None,\n setup_mode: str = \"Async\",\n pre_delete_collection: bool = False,\n metadata_indexing_include: Optional[List[str]] = None,\n metadata_indexing_exclude: Optional[List[str]] = None,\n collection_indexing_policy: Optional[dict] = None,\n ) -> VectorStore:\n try:\n setup_mode_value = SetupMode[setup_mode.upper()]\n except KeyError:\n raise ValueError(f\"Invalid setup mode: {setup_mode}\")\n if inputs:\n documents = [_input.to_lc_document() for _input in inputs]\n\n vector_store = AstraDBVectorStore.from_documents(\n documents=documents,\n embedding=embedding,\n collection_name=collection_name,\n token=token,\n api_endpoint=api_endpoint,\n namespace=namespace,\n metric=metric,\n batch_size=batch_size,\n bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,\n bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,\n bulk_delete_concurrency=bulk_delete_concurrency,\n setup_mode=setup_mode_value,\n pre_delete_collection=pre_delete_collection,\n metadata_indexing_include=metadata_indexing_include,\n metadata_indexing_exclude=metadata_indexing_exclude,\n collection_indexing_policy=collection_indexing_policy,\n )\n else:\n vector_store = AstraDBVectorStore(\n embedding=embedding,\n collection_name=collection_name,\n token=token,\n api_endpoint=api_endpoint,\n namespace=namespace,\n metric=metric,\n batch_size=batch_size,\n bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,\n bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,\n bulk_delete_concurrency=bulk_delete_concurrency,\n setup_mode=setup_mode_value,\n pre_delete_collection=pre_delete_collection,\n metadata_indexing_include=metadata_indexing_include,\n metadata_indexing_exclude=metadata_indexing_exclude,\n collection_indexing_policy=collection_indexing_policy,\n )\n\n return vector_store\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "collection_indexing_policy": { + "type": "dict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "collection_indexing_policy", + "display_name": "Collection Indexing Policy", + "advanced": true, + "dynamic": false, + "info": "Optional dictionary defining the indexing policy for the collection.", + "load_from_db": false, + "title_case": false + }, + "collection_name": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "collection_name", + "display_name": "Collection Name", + "advanced": false, + "dynamic": false, + "info": "The name of the collection within Astra DB where the vectors will be stored.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"], + "value": "langflow" + }, + "metadata_indexing_exclude": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "metadata_indexing_exclude", + "display_name": "Metadata Indexing Exclude", + "advanced": true, + "dynamic": false, + "info": "Optional list of metadata fields to exclude from the indexing.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "metadata_indexing_include": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "metadata_indexing_include", + "display_name": "Metadata Indexing Include", + "advanced": true, + "dynamic": false, + "info": "Optional list of metadata fields to include in the indexing.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "metric": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "metric", + "display_name": "Metric", + "advanced": true, + "dynamic": false, + "info": "Optional distance metric for vector comparisons in the vector store.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "namespace": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "namespace", + "display_name": "Namespace", + "advanced": true, + "dynamic": false, + "info": "Optional namespace within Astra DB to use for the collection.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "pre_delete_collection": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "pre_delete_collection", + "display_name": "Pre Delete Collection", + "advanced": true, + "dynamic": false, + "info": "Boolean flag to determine whether to delete the collection before creating a new one.", + "load_from_db": false, + "title_case": false + }, + "setup_mode": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "Async", + "fileTypes": [], + "file_path": "", + "password": false, + "options": ["Sync", "Async", "Off"], + "name": "setup_mode", + "display_name": "Setup Mode", + "advanced": true, + "dynamic": false, + "info": "Configuration mode for setting up the vector store, with options like \u201cSync\u201d, \u201cAsync\u201d, or \u201cOff\u201d.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "token": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "token", + "display_name": "Token", + "advanced": false, + "dynamic": false, + "info": "Authentication token for accessing Astra DB.", + "load_from_db": true, + "title_case": false, + "input_types": ["Text"], + "value": "ASTRA_DB_APPLICATION_TOKEN" + }, + "_type": "CustomComponent" + }, + "description": "Builds or loads an Astra DB Vector Store.", + "icon": "AstraDB", + "base_classes": ["VectorStore"], + "display_name": "Astra DB", + "documentation": "", + "custom_fields": { + "embedding": null, + "token": null, + "api_endpoint": null, + "collection_name": null, + "inputs": null, + "namespace": null, + "metric": null, + "batch_size": null, + "bulk_insert_batch_concurrency": null, + "bulk_insert_overwrite_concurrency": null, + "bulk_delete_concurrency": null, + "setup_mode": null, + "pre_delete_collection": null, + "metadata_indexing_include": null, + "metadata_indexing_exclude": null, + "collection_indexing_policy": null + }, + "output_types": ["VectorStore"], + "field_formatters": {}, + "frozen": false, + "field_order": [ + "token", + "api_endpoint", + "collection_name", + "inputs", + "embedding" + ], + "beta": false + }, + "id": "AstraDB-eUCSS" + }, + "selected": false, + "width": 384, + "height": 573, + "positionAbsolute": { + "x": 3372.04958055989, + "y": 1611.0742035495277 + }, + "dragging": false + }, + { + "id": "OpenAIEmbeddings-9TPjc", + "type": "genericNode", + "position": { + "x": 2814.0402191223047, + "y": 1955.9268168273086 + }, + "data": { + "type": "OpenAIEmbeddings", + "node": { + "template": { + "allowed_special": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": [], + "fileTypes": [], + "file_path": "", + "password": false, + "name": "allowed_special", + "display_name": "Allowed Special", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "chunk_size": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 1000, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "chunk_size", + "display_name": "Chunk Size", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "client": { + "type": "Any", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "client", + "display_name": "Client", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Any, Dict, List, Optional\n\nfrom langchain_openai.embeddings.base import OpenAIEmbeddings\n\nfrom langflow.field_typing import Embeddings, NestedDict\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass OpenAIEmbeddingsComponent(CustomComponent):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n\n def build_config(self):\n return {\n \"allowed_special\": {\n \"display_name\": \"Allowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"default_headers\": {\n \"display_name\": \"Default Headers\",\n \"advanced\": True,\n \"field_type\": \"dict\",\n },\n \"default_query\": {\n \"display_name\": \"Default Query\",\n \"advanced\": True,\n \"field_type\": \"NestedDict\",\n },\n \"disallowed_special\": {\n \"display_name\": \"Disallowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"chunk_size\": {\"display_name\": \"Chunk Size\", \"advanced\": True},\n \"client\": {\"display_name\": \"Client\", \"advanced\": True},\n \"deployment\": {\"display_name\": \"Deployment\", \"advanced\": True},\n \"embedding_ctx_length\": {\n \"display_name\": \"Embedding Context Length\",\n \"advanced\": True,\n },\n \"max_retries\": {\"display_name\": \"Max Retries\", \"advanced\": True},\n \"model\": {\n \"display_name\": \"Model\",\n \"advanced\": False,\n \"options\": [\n \"text-embedding-3-small\",\n \"text-embedding-3-large\",\n \"text-embedding-ada-002\",\n ],\n },\n \"model_kwargs\": {\"display_name\": \"Model Kwargs\", \"advanced\": True},\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"password\": True,\n \"advanced\": True,\n },\n \"openai_api_key\": {\"display_name\": \"OpenAI API Key\", \"password\": True},\n \"openai_api_type\": {\n \"display_name\": \"OpenAI API Type\",\n \"advanced\": True,\n \"password\": True,\n },\n \"openai_api_version\": {\n \"display_name\": \"OpenAI API Version\",\n \"advanced\": True,\n },\n \"openai_organization\": {\n \"display_name\": \"OpenAI Organization\",\n \"advanced\": True,\n },\n \"openai_proxy\": {\"display_name\": \"OpenAI Proxy\", \"advanced\": True},\n \"request_timeout\": {\"display_name\": \"Request Timeout\", \"advanced\": True},\n \"show_progress_bar\": {\n \"display_name\": \"Show Progress Bar\",\n \"advanced\": True,\n },\n \"skip_empty\": {\"display_name\": \"Skip Empty\", \"advanced\": True},\n \"tiktoken_model_name\": {\n \"display_name\": \"TikToken Model Name\",\n \"advanced\": True,\n },\n \"tiktoken_enable\": {\"display_name\": \"TikToken Enable\", \"advanced\": True},\n }\n\n def build(\n self,\n openai_api_key: str,\n default_headers: Optional[Dict[str, str]] = None,\n default_query: Optional[NestedDict] = {},\n allowed_special: List[str] = [],\n disallowed_special: List[str] = [\"all\"],\n chunk_size: int = 1000,\n client: Optional[Any] = None,\n deployment: str = \"text-embedding-ada-002\",\n embedding_ctx_length: int = 8191,\n max_retries: int = 6,\n model: str = \"text-embedding-ada-002\",\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n openai_api_type: Optional[str] = None,\n openai_api_version: Optional[str] = None,\n openai_organization: Optional[str] = None,\n openai_proxy: Optional[str] = None,\n request_timeout: Optional[float] = None,\n show_progress_bar: bool = False,\n skip_empty: bool = False,\n tiktoken_enable: bool = True,\n tiktoken_model_name: Optional[str] = None,\n ) -> Embeddings:\n # This is to avoid errors with Vector Stores (e.g Chroma)\n if disallowed_special == [\"all\"]:\n disallowed_special = \"all\" # type: ignore\n\n return OpenAIEmbeddings(\n tiktoken_enabled=tiktoken_enable,\n default_headers=default_headers,\n default_query=default_query,\n allowed_special=set(allowed_special),\n disallowed_special=\"all\",\n chunk_size=chunk_size,\n client=client,\n deployment=deployment,\n embedding_ctx_length=embedding_ctx_length,\n max_retries=max_retries,\n model=model,\n model_kwargs=model_kwargs,\n base_url=openai_api_base,\n api_key=openai_api_key,\n openai_api_type=openai_api_type,\n api_version=openai_api_version,\n organization=openai_organization,\n openai_proxy=openai_proxy,\n timeout=request_timeout,\n show_progress_bar=show_progress_bar,\n skip_empty=skip_empty,\n tiktoken_model_name=tiktoken_model_name,\n )\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "default_headers": { + "type": "dict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "default_headers", + "display_name": "Default Headers", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "default_query": { + "type": "NestedDict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": {}, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "default_query", + "display_name": "Default Query", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "deployment": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "text-embedding-ada-002", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "deployment", + "display_name": "Deployment", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "disallowed_special": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": ["all"], + "fileTypes": [], + "file_path": "", + "password": false, + "name": "disallowed_special", + "display_name": "Disallowed Special", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "embedding_ctx_length": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 8191, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "embedding_ctx_length", + "display_name": "Embedding Context Length", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "max_retries": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 6, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "max_retries", + "display_name": "Max Retries", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "model": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "text-embedding-ada-002", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "text-embedding-3-small", + "text-embedding-3-large", + "text-embedding-ada-002" + ], + "name": "model", + "display_name": "Model", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "model_kwargs": { + "type": "NestedDict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": {}, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "model_kwargs", + "display_name": "Model Kwargs", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "openai_api_base": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_base", + "display_name": "OpenAI API Base", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "openai_api_key": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_key", + "display_name": "OpenAI API Key", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"], + "value": "" + }, + "openai_api_type": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_type", + "display_name": "OpenAI API Type", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "openai_api_version": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_api_version", + "display_name": "OpenAI API Version", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "openai_organization": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_organization", + "display_name": "OpenAI Organization", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "openai_proxy": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_proxy", + "display_name": "OpenAI Proxy", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "request_timeout": { + "type": "float", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "request_timeout", + "display_name": "Request Timeout", + "advanced": true, + "dynamic": false, + "info": "", + "rangeSpec": { + "step_type": "float", + "min": -1, + "max": 1, + "step": 0.1 + }, + "load_from_db": false, + "title_case": false + }, + "show_progress_bar": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "show_progress_bar", + "display_name": "Show Progress Bar", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "skip_empty": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "skip_empty", + "display_name": "Skip Empty", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "tiktoken_enable": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": true, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "tiktoken_enable", + "display_name": "TikToken Enable", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "tiktoken_model_name": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "tiktoken_model_name", + "display_name": "TikToken Model Name", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "_type": "CustomComponent" + }, + "description": "Generate embeddings using OpenAI models.", + "base_classes": ["Embeddings"], + "display_name": "OpenAI Embeddings", + "documentation": "", + "custom_fields": { + "openai_api_key": null, + "default_headers": null, + "default_query": null, + "allowed_special": null, + "disallowed_special": null, + "chunk_size": null, + "client": null, + "deployment": null, + "embedding_ctx_length": null, + "max_retries": null, + "model": null, + "model_kwargs": null, + "openai_api_base": null, + "openai_api_type": null, + "openai_api_version": null, + "openai_organization": null, + "openai_proxy": null, + "request_timeout": null, + "show_progress_bar": null, + "skip_empty": null, + "tiktoken_enable": null, + "tiktoken_model_name": null + }, + "output_types": ["Embeddings"], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "OpenAIEmbeddings-9TPjc" + }, + "selected": false, + "width": 384, + "height": 383, + "positionAbsolute": { + "x": 2814.0402191223047, + "y": 1955.9268168273086 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "TextOutput-BDknO", + "target": "Prompt-xeI6K", + "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153TextOutput\u0153,\u0153id\u0153:\u0153TextOutput-BDknO\u0153}", + "targetHandle": "{\u0153fieldName\u0153:\u0153context\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", + "id": "reactflow__edge-TextOutput-BDknO{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153TextOutput\u0153,\u0153id\u0153:\u0153TextOutput-BDknO\u0153}-Prompt-xeI6K{\u0153fieldName\u0153:\u0153context\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", + "data": { + "targetHandle": { + "fieldName": "context", + "id": "Prompt-xeI6K", + "inputTypes": ["Document", "BaseOutputParser", "Record", "Text"], + "type": "str" + }, + "sourceHandle": { + "baseClasses": ["object", "Text", "str"], + "dataType": "TextOutput", + "id": "TextOutput-BDknO" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "selected": false + }, + { + "source": "ChatInput-yxMKE", + "target": "Prompt-xeI6K", + "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Text\u0153,\u0153str\u0153,\u0153object\u0153,\u0153Record\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-yxMKE\u0153}", + "targetHandle": "{\u0153fieldName\u0153:\u0153question\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", + "id": "reactflow__edge-ChatInput-yxMKE{\u0153baseClasses\u0153:[\u0153Text\u0153,\u0153str\u0153,\u0153object\u0153,\u0153Record\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-yxMKE\u0153}-Prompt-xeI6K{\u0153fieldName\u0153:\u0153question\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", + "data": { + "targetHandle": { + "fieldName": "question", + "id": "Prompt-xeI6K", + "inputTypes": ["Document", "BaseOutputParser", "Record", "Text"], + "type": "str" + }, + "sourceHandle": { + "baseClasses": ["Text", "str", "object", "Record"], + "dataType": "ChatInput", + "id": "ChatInput-yxMKE" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "selected": false + }, + { + "source": "Prompt-xeI6K", + "target": "OpenAIModel-EjXlN", + "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153}", + "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153OpenAIModel-EjXlN\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", + "id": "reactflow__edge-Prompt-xeI6K{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153}-OpenAIModel-EjXlN{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153OpenAIModel-EjXlN\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", + "data": { + "targetHandle": { + "fieldName": "input_value", + "id": "OpenAIModel-EjXlN", + "inputTypes": ["Text"], + "type": "str" + }, + "sourceHandle": { + "baseClasses": ["object", "Text", "str"], + "dataType": "Prompt", + "id": "Prompt-xeI6K" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "selected": false + }, + { + "source": "OpenAIModel-EjXlN", + "target": "ChatOutput-Q39I8", + "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-EjXlN\u0153}", + "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153ChatOutput-Q39I8\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", + "id": "reactflow__edge-OpenAIModel-EjXlN{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-EjXlN\u0153}-ChatOutput-Q39I8{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153ChatOutput-Q39I8\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", + "data": { + "targetHandle": { + "fieldName": "input_value", + "id": "ChatOutput-Q39I8", + "inputTypes": ["Text"], + "type": "str" + }, + "sourceHandle": { + "baseClasses": ["object", "Text", "str"], + "dataType": "OpenAIModel", + "id": "OpenAIModel-EjXlN" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "selected": false + }, + { + "source": "File-t0a6a", + "target": "RecursiveCharacterTextSplitter-tR9QM", + "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153File\u0153,\u0153id\u0153:\u0153File-t0a6a\u0153}", + "targetHandle": "{\u0153fieldName\u0153:\u0153inputs\u0153,\u0153id\u0153:\u0153RecursiveCharacterTextSplitter-tR9QM\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153Record\u0153],\u0153type\u0153:\u0153Document\u0153}", + "id": "reactflow__edge-File-t0a6a{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153File\u0153,\u0153id\u0153:\u0153File-t0a6a\u0153}-RecursiveCharacterTextSplitter-tR9QM{\u0153fieldName\u0153:\u0153inputs\u0153,\u0153id\u0153:\u0153RecursiveCharacterTextSplitter-tR9QM\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153Record\u0153],\u0153type\u0153:\u0153Document\u0153}", + "data": { + "targetHandle": { + "fieldName": "inputs", + "id": "RecursiveCharacterTextSplitter-tR9QM", + "inputTypes": ["Document", "Record"], + "type": "Document" + }, + "sourceHandle": { + "baseClasses": ["Record"], + "dataType": "File", + "id": "File-t0a6a" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "selected": false + }, + { + "source": "OpenAIEmbeddings-ZlOk1", + "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Embeddings\u0153],\u0153dataType\u0153:\u0153OpenAIEmbeddings\u0153,\u0153id\u0153:\u0153OpenAIEmbeddings-ZlOk1\u0153}", + "target": "AstraDBSearch-41nRz", + "targetHandle": "{\u0153fieldName\u0153:\u0153embedding\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Embeddings\u0153}", + "data": { + "targetHandle": { + "fieldName": "embedding", + "id": "AstraDBSearch-41nRz", + "inputTypes": null, + "type": "Embeddings" + }, + "sourceHandle": { + "baseClasses": ["Embeddings"], + "dataType": "OpenAIEmbeddings", + "id": "OpenAIEmbeddings-ZlOk1" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-OpenAIEmbeddings-ZlOk1{\u0153baseClasses\u0153:[\u0153Embeddings\u0153],\u0153dataType\u0153:\u0153OpenAIEmbeddings\u0153,\u0153id\u0153:\u0153OpenAIEmbeddings-ZlOk1\u0153}-AstraDBSearch-41nRz{\u0153fieldName\u0153:\u0153embedding\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Embeddings\u0153}" + }, + { + "source": "ChatInput-yxMKE", + "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Text\u0153,\u0153str\u0153,\u0153object\u0153,\u0153Record\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-yxMKE\u0153}", + "target": "AstraDBSearch-41nRz", + "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", + "data": { + "targetHandle": { + "fieldName": "input_value", + "id": "AstraDBSearch-41nRz", + "inputTypes": ["Text"], + "type": "str" + }, + "sourceHandle": { + "baseClasses": ["Text", "str", "object", "Record"], + "dataType": "ChatInput", + "id": "ChatInput-yxMKE" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-ChatInput-yxMKE{\u0153baseClasses\u0153:[\u0153Text\u0153,\u0153str\u0153,\u0153object\u0153,\u0153Record\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-yxMKE\u0153}-AstraDBSearch-41nRz{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}" + }, + { + "source": "RecursiveCharacterTextSplitter-tR9QM", + "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153RecursiveCharacterTextSplitter\u0153,\u0153id\u0153:\u0153RecursiveCharacterTextSplitter-tR9QM\u0153}", + "target": "AstraDB-eUCSS", + "targetHandle": "{\u0153fieldName\u0153:\u0153inputs\u0153,\u0153id\u0153:\u0153AstraDB-eUCSS\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Record\u0153}", + "data": { + "targetHandle": { + "fieldName": "inputs", + "id": "AstraDB-eUCSS", + "inputTypes": null, + "type": "Record" + }, + "sourceHandle": { + "baseClasses": ["Record"], + "dataType": "RecursiveCharacterTextSplitter", + "id": "RecursiveCharacterTextSplitter-tR9QM" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-RecursiveCharacterTextSplitter-tR9QM{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153RecursiveCharacterTextSplitter\u0153,\u0153id\u0153:\u0153RecursiveCharacterTextSplitter-tR9QM\u0153}-AstraDB-eUCSS{\u0153fieldName\u0153:\u0153inputs\u0153,\u0153id\u0153:\u0153AstraDB-eUCSS\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Record\u0153}", + "selected": false + }, + { + "source": "OpenAIEmbeddings-9TPjc", + "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Embeddings\u0153],\u0153dataType\u0153:\u0153OpenAIEmbeddings\u0153,\u0153id\u0153:\u0153OpenAIEmbeddings-9TPjc\u0153}", + "target": "AstraDB-eUCSS", + "targetHandle": "{\u0153fieldName\u0153:\u0153embedding\u0153,\u0153id\u0153:\u0153AstraDB-eUCSS\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Embeddings\u0153}", + "data": { + "targetHandle": { + "fieldName": "embedding", + "id": "AstraDB-eUCSS", + "inputTypes": null, + "type": "Embeddings" + }, + "sourceHandle": { + "baseClasses": ["Embeddings"], + "dataType": "OpenAIEmbeddings", + "id": "OpenAIEmbeddings-9TPjc" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-OpenAIEmbeddings-9TPjc{\u0153baseClasses\u0153:[\u0153Embeddings\u0153],\u0153dataType\u0153:\u0153OpenAIEmbeddings\u0153,\u0153id\u0153:\u0153OpenAIEmbeddings-9TPjc\u0153}-AstraDB-eUCSS{\u0153fieldName\u0153:\u0153embedding\u0153,\u0153id\u0153:\u0153AstraDB-eUCSS\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Embeddings\u0153}", + "selected": false + }, + { + "source": "AstraDBSearch-41nRz", + "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153AstraDBSearch\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153}", + "target": "TextOutput-BDknO", + "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153TextOutput-BDknO\u0153,\u0153inputTypes\u0153:[\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", + "data": { + "targetHandle": { + "fieldName": "input_value", + "id": "TextOutput-BDknO", + "inputTypes": ["Record", "Text"], + "type": "str" + }, + "sourceHandle": { + "baseClasses": ["Record"], + "dataType": "AstraDBSearch", + "id": "AstraDBSearch-41nRz" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-AstraDBSearch-41nRz{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153AstraDBSearch\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153}-TextOutput-BDknO{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153TextOutput-BDknO\u0153,\u0153inputTypes\u0153:[\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}" + } + ], + "viewport": { + "x": -259.6782520315529, + "y": 90.3428735006047, + "zoom": 0.2687057134854984 + } + }, + "description": "Visit https://pre-release.langflow.org/tutorials/rag-with-astradb for a detailed guide of this project.\nThis project give you both Ingestion and RAG in a single file. You'll need to visit https://astra.datastax.com/ to create an Astra DB instance, your Token and grab an API Endpoint.\nRunning this project requires you to add a file in the Files component, then define a Collection Name and click on the Play icon on the Astra DB component. \n\nAfter the ingestion ends you are ready to click on the Run button at the lower left corner and start asking questions about your data.", + "name": "Vector Store RAG", + "last_tested_version": "1.0.0a0", + "is_component": false } diff --git a/docs/static/json_files/Notion_Components_bundle.json b/docs/static/json_files/Notion_Components_bundle.json index 2fe1ab378..5e632ad9c 100644 --- a/docs/static/json_files/Notion_Components_bundle.json +++ b/docs/static/json_files/Notion_Components_bundle.json @@ -1,1002 +1,881 @@ { - "id": "7cd51434-9767-450f-8742-27857367f8c2", - "data": { - "nodes": [ - { - "id": "RecordsToText-Q69g5", - "type": "genericNode", - "position": { - "x": -2671.5528488127866, - "y": -963.4266471378126 - }, - "data": { - "type": "RecordsToText", - "node": { - "template": { - "code": { - "type": "code", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "import requests\r\nfrom typing import List\r\n\r\nfrom langflow import CustomComponent\r\nfrom langflow.schema import Record\r\n\r\n\r\nclass NotionUserList(CustomComponent):\r\n display_name = \"List Users [Notion]\"\r\n description = \"Retrieve users from Notion.\"\r\n documentation: str = \"https://docs.langflow.org/integrations/notion/list-users\"\r\n icon = \"NotionDirectoryLoader\"\r\n \r\n def build_config(self):\r\n return {\r\n \"notion_secret\": {\r\n \"display_name\": \"Notion Secret\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The Notion integration token.\",\r\n \"password\": True,\r\n },\r\n }\r\n\r\n def build(\r\n self,\r\n notion_secret: str,\r\n ) -> List[Record]:\r\n url = \"https://api.notion.com/v1/users\"\r\n headers = {\r\n \"Authorization\": f\"Bearer {notion_secret}\",\r\n \"Notion-Version\": \"2022-06-28\",\r\n }\r\n\r\n response = requests.get(url, headers=headers)\r\n response.raise_for_status()\r\n\r\n data = response.json()\r\n results = data['results']\r\n\r\n records = []\r\n for user in results:\r\n id = user['id']\r\n type = user['type']\r\n name = user.get('name', '')\r\n avatar_url = user.get('avatar_url', '')\r\n\r\n record_data = {\r\n \"id\": id,\r\n \"type\": type,\r\n \"name\": name,\r\n \"avatar_url\": avatar_url,\r\n }\r\n\r\n output = \"User:\\n\"\r\n for key, value in record_data.items():\r\n output += f\"{key.replace('_', ' ').title()}: {value}\\n\"\r\n output += \"________________________\\n\"\r\n\r\n record = Record(text=output, data=record_data)\r\n records.append(record)\r\n\r\n self.status = \"\\n\".join(record.text for record in records)\r\n return records", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "code", - "advanced": true, - "dynamic": true, - "info": "", - "load_from_db": false, - "title_case": false - }, - "notion_secret": { - "type": "str", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": true, - "name": "notion_secret", - "display_name": "Notion Secret", - "advanced": false, - "dynamic": false, - "info": "The Notion integration token.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ], - "value": "" - }, - "_type": "CustomComponent" - }, - "description": "Retrieve users from Notion.", - "icon": "NotionDirectoryLoader", - "base_classes": [ - "Record" - ], - "display_name": "List Users [Notion] ", - "documentation": "https://docs.langflow.org/integrations/notion/list-users", - "custom_fields": { - "notion_secret": null - }, - "output_types": [ - "Record" - ], - "field_formatters": {}, - "frozen": false, - "field_order": [], - "beta": false - }, - "id": "RecordsToText-Q69g5", - "description": "Retrieve users from Notion.", - "display_name": "List Users [Notion] " - }, - "selected": false, - "width": 384, - "height": 289, - "dragging": false, - "positionAbsolute": { - "x": -2671.5528488127866, - "y": -963.4266471378126 - } + "id": "7cd51434-9767-450f-8742-27857367f8c2", + "data": { + "nodes": [ + { + "id": "RecordsToText-Q69g5", + "type": "genericNode", + "position": { "x": -2671.5528488127866, "y": -963.4266471378126 }, + "data": { + "type": "RecordsToText", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "import requests\r\nfrom typing import List\r\n\r\nfrom langflow import CustomComponent\r\nfrom langflow.schema import Record\r\n\r\n\r\nclass NotionUserList(CustomComponent):\r\n display_name = \"List Users [Notion]\"\r\n description = \"Retrieve users from Notion.\"\r\n documentation: str = \"https://docs.langflow.org/integrations/notion/list-users\"\r\n icon = \"NotionDirectoryLoader\"\r\n \r\n def build_config(self):\r\n return {\r\n \"notion_secret\": {\r\n \"display_name\": \"Notion Secret\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The Notion integration token.\",\r\n \"password\": True,\r\n },\r\n }\r\n\r\n def build(\r\n self,\r\n notion_secret: str,\r\n ) -> List[Record]:\r\n url = \"https://api.notion.com/v1/users\"\r\n headers = {\r\n \"Authorization\": f\"Bearer {notion_secret}\",\r\n \"Notion-Version\": \"2022-06-28\",\r\n }\r\n\r\n response = requests.get(url, headers=headers)\r\n response.raise_for_status()\r\n\r\n data = response.json()\r\n results = data['results']\r\n\r\n records = []\r\n for user in results:\r\n id = user['id']\r\n type = user['type']\r\n name = user.get('name', '')\r\n avatar_url = user.get('avatar_url', '')\r\n\r\n record_data = {\r\n \"id\": id,\r\n \"type\": type,\r\n \"name\": name,\r\n \"avatar_url\": avatar_url,\r\n }\r\n\r\n output = \"User:\\n\"\r\n for key, value in record_data.items():\r\n output += f\"{key.replace('_', ' ').title()}: {value}\\n\"\r\n output += \"________________________\\n\"\r\n\r\n record = Record(text=output, data=record_data)\r\n records.append(record)\r\n\r\n self.status = \"\\n\".join(record.text for record in records)\r\n return records", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "notion_secret": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "notion_secret", + "display_name": "Notion Secret", + "advanced": false, + "dynamic": false, + "info": "The Notion integration token.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"], + "value": "" + }, + "_type": "CustomComponent" }, - { - "id": "CustomComponent-PU0K5", - "type": "genericNode", - "position": { - "x": -3077.2269116193215, - "y": -960.9450220159636 - }, - "data": { - "type": "CustomComponent", - "node": { - "template": { - "code": { - "type": "code", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "import json\r\nfrom typing import Optional\r\n\r\nimport requests\r\nfrom langflow.custom import CustomComponent\r\n\r\n\r\nclass NotionPageCreator(CustomComponent):\r\n display_name = \"Create Page [Notion]\"\r\n description = \"A component for creating Notion pages.\"\r\n documentation: str = \"https://docs.langflow.org/integrations/notion/page-create\"\r\n icon = \"NotionDirectoryLoader\"\r\n\r\n def build_config(self):\r\n return {\r\n \"database_id\": {\r\n \"display_name\": \"Database ID\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The ID of the Notion database.\",\r\n },\r\n \"notion_secret\": {\r\n \"display_name\": \"Notion Secret\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The Notion integration token.\",\r\n \"password\": True,\r\n },\r\n \"properties\": {\r\n \"display_name\": \"Properties\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The properties of the new page. Depending on your database setup, this can change. E.G: {'Task name': {'id': 'title', 'type': 'title', 'title': [{'type': 'text', 'text': {'content': 'Send Notion Components to LF', 'link': null}}]}}\",\r\n },\r\n }\r\n\r\n def build(\r\n self,\r\n database_id: str,\r\n notion_secret: str,\r\n properties: str = '{\"Task name\": {\"id\": \"title\", \"type\": \"title\", \"title\": [{\"type\": \"text\", \"text\": {\"content\": \"Send Notion Components to LF\", \"link\": null}}]}}',\r\n ) -> str:\r\n if not database_id or not properties:\r\n raise ValueError(\"Invalid input. Please provide 'database_id' and 'properties'.\")\r\n\r\n headers = {\r\n \"Authorization\": f\"Bearer {notion_secret}\",\r\n \"Content-Type\": \"application/json\",\r\n \"Notion-Version\": \"2022-06-28\",\r\n }\r\n\r\n data = {\r\n \"parent\": {\"database_id\": database_id},\r\n \"properties\": json.loads(properties),\r\n }\r\n\r\n response = requests.post(\"https://api.notion.com/v1/pages\", headers=headers, json=data)\r\n\r\n if response.status_code == 200:\r\n page_id = response.json()[\"id\"]\r\n self.status = f\"Successfully created Notion page with ID: {page_id}\\n {str(response.json())}\"\r\n return response.json()\r\n else:\r\n error_message = f\"Failed to create Notion page. Status code: {response.status_code}, Error: {response.text}\"\r\n self.status = error_message\r\n raise Exception(error_message)", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "code", - "advanced": true, - "dynamic": true, - "info": "", - "load_from_db": false, - "title_case": false - }, - "database_id": { - "type": "str", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "database_id", - "display_name": "Database ID", - "advanced": false, - "dynamic": false, - "info": "The ID of the Notion database.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "notion_secret": { - "type": "str", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": true, - "name": "notion_secret", - "display_name": "Notion Secret", - "advanced": false, - "dynamic": false, - "info": "The Notion integration token.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ], - "value": "" - }, - "properties": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": "{\"Task name\": {\"id\": \"title\", \"type\": \"title\", \"title\": [{\"type\": \"text\", \"text\": {\"content\": \"Send Notion Components to LF\", \"link\": null}}]}}", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "properties", - "display_name": "Properties", - "advanced": false, - "dynamic": false, - "info": "The properties of the new page. Depending on your database setup, this can change. E.G: {'Task name': {'id': 'title', 'type': 'title', 'title': [{'type': 'text', 'text': {'content': 'Send Notion Components to LF', 'link': null}}]}}", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "_type": "CustomComponent" - }, - "description": "A component for creating Notion pages.", - "icon": "NotionDirectoryLoader", - "base_classes": [ - "object", - "str", - "Text" - ], - "display_name": "Create Page [Notion] ", - "documentation": "https://docs.langflow.org/integrations/notion/page-create", - "custom_fields": { - "database_id": null, - "notion_secret": null, - "properties": null - }, - "output_types": [ - "Text" - ], - "field_formatters": {}, - "frozen": false, - "field_order": [], - "beta": false - }, - "id": "CustomComponent-PU0K5", - "description": "A component for creating Notion pages.", - "display_name": "Create Page [Notion] " - }, - "selected": false, - "width": 384, - "height": 477, - "positionAbsolute": { - "x": -3077.2269116193215, - "y": -960.9450220159636 - }, - "dragging": false - }, - { - "id": "CustomComponent-YODla", - "type": "genericNode", - "position": { - "x": -3485.297183150799, - "y": -362.8525892356713 - }, - "data": { - "type": "CustomComponent", - "node": { - "template": { - "code": { - "type": "code", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "import requests\r\nfrom typing import Dict\r\n\r\nfrom langflow import CustomComponent\r\nfrom langflow.schema import Record\r\n\r\n\r\nclass NotionDatabaseProperties(CustomComponent):\r\n display_name = \"List Database Properties [Notion]\"\r\n description = \"Retrieve properties of a Notion database.\"\r\n documentation: str = \"https://docs.langflow.org/integrations/notion/list-database-properties\"\r\n icon = \"NotionDirectoryLoader\"\r\n \r\n def build_config(self):\r\n return {\r\n \"database_id\": {\r\n \"display_name\": \"Database ID\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The ID of the Notion database.\",\r\n },\r\n \"notion_secret\": {\r\n \"display_name\": \"Notion Secret\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The Notion integration token.\",\r\n \"password\": True,\r\n },\r\n }\r\n\r\n def build(\r\n self,\r\n database_id: str,\r\n notion_secret: str,\r\n ) -> Record:\r\n url = f\"https://api.notion.com/v1/databases/{database_id}\"\r\n headers = {\r\n \"Authorization\": f\"Bearer {notion_secret}\",\r\n \"Notion-Version\": \"2022-06-28\", # Use the latest supported version\r\n }\r\n\r\n response = requests.get(url, headers=headers)\r\n response.raise_for_status()\r\n\r\n data = response.json()\r\n properties = data.get(\"properties\", {})\r\n\r\n record = Record(text=str(response.json()), data=properties)\r\n self.status = f\"Retrieved {len(properties)} properties from the Notion database.\\n {record.text}\"\r\n return record", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "code", - "advanced": true, - "dynamic": true, - "info": "", - "load_from_db": false, - "title_case": false - }, - "database_id": { - "type": "str", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "database_id", - "display_name": "Database ID", - "advanced": false, - "dynamic": false, - "info": "The ID of the Notion database.", - "load_from_db": true, - "title_case": false, - "input_types": [ - "Text" - ], - "value": "NOTION_NMSTX_DB_ID" - }, - "notion_secret": { - "type": "str", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": true, - "name": "notion_secret", - "display_name": "Notion Secret", - "advanced": false, - "dynamic": false, - "info": "The Notion integration token.", - "load_from_db": true, - "title_case": false, - "input_types": [ - "Text" - ], - "value": "" - }, - "_type": "CustomComponent" - }, - "description": "Retrieve properties of a Notion database.", - "icon": "NotionDirectoryLoader", - "base_classes": [ - "Record" - ], - "display_name": "List Database Properties [Notion] ", - "documentation": "https://docs.langflow.org/integrations/notion/list-database-properties", - "custom_fields": { - "database_id": null, - "notion_secret": null - }, - "output_types": [ - "Record" - ], - "field_formatters": {}, - "frozen": false, - "field_order": [], - "beta": false - }, - "id": "CustomComponent-YODla", - "description": "Retrieve properties of a Notion database.", - "display_name": "List Database Properties [Notion] " - }, - "selected": true, - "width": 384, - "height": 383, - "dragging": false, - "positionAbsolute": { - "x": -3485.297183150799, - "y": -362.8525892356713 - } - }, - { - "id": "CustomComponent-wHlSz", - "type": "genericNode", - "position": { - "x": -2668.7714642455403, - "y": -657.2376228212606 - }, - "data": { - "type": "CustomComponent", - "node": { - "template": { - "code": { - "type": "code", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "import json\r\nimport requests\r\nfrom typing import Dict, Any\r\n\r\nfrom langflow import CustomComponent\r\nfrom langflow.schema import Record\r\n\r\n\r\nclass NotionPageUpdate(CustomComponent):\r\n display_name = \"Update Page Property [Notion]\"\r\n description = \"Update the properties of a Notion page.\"\r\n documentation: str = \"https://docs.langflow.org/integrations/notion/page-update\"\r\n icon = \"NotionDirectoryLoader\"\r\n\r\n def build_config(self):\r\n return {\r\n \"page_id\": {\r\n \"display_name\": \"Page ID\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The ID of the Notion page to update.\",\r\n },\r\n \"properties\": {\r\n \"display_name\": \"Properties\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The properties to update on the page (as a JSON string).\",\r\n \"multiline\": True,\r\n },\r\n \"notion_secret\": {\r\n \"display_name\": \"Notion Secret\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The Notion integration token.\",\r\n \"password\": True,\r\n },\r\n }\r\n\r\n def build(\r\n self,\r\n page_id: str,\r\n properties: str,\r\n notion_secret: str,\r\n ) -> Record:\r\n url = f\"https://api.notion.com/v1/pages/{page_id}\"\r\n headers = {\r\n \"Authorization\": f\"Bearer {notion_secret}\",\r\n \"Content-Type\": \"application/json\",\r\n \"Notion-Version\": \"2022-06-28\", # Use the latest supported version\r\n }\r\n\r\n try:\r\n parsed_properties = json.loads(properties)\r\n except json.JSONDecodeError as e:\r\n raise ValueError(\"Invalid JSON format for properties\") from e\r\n\r\n data = {\r\n \"properties\": parsed_properties\r\n }\r\n\r\n response = requests.patch(url, headers=headers, json=data)\r\n response.raise_for_status()\r\n\r\n updated_page = response.json()\r\n\r\n output = \"Updated page properties:\\n\"\r\n for prop_name, prop_value in updated_page[\"properties\"].items():\r\n output += f\"{prop_name}: {prop_value}\\n\"\r\n\r\n self.status = output\r\n return Record(data=updated_page)", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "code", - "advanced": true, - "dynamic": true, - "info": "", - "load_from_db": false, - "title_case": false - }, - "notion_secret": { - "type": "str", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": true, - "name": "notion_secret", - "display_name": "Notion Secret", - "advanced": false, - "dynamic": false, - "info": "The Notion integration token.", - "load_from_db": true, - "title_case": false, - "input_types": [ - "Text" - ], - "value": "" - }, - "page_id": { - "type": "str", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "page_id", - "display_name": "Page ID", - "advanced": false, - "dynamic": false, - "info": "The ID of the Notion page to update.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "properties": { - "type": "str", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "properties", - "display_name": "Properties", - "advanced": false, - "dynamic": false, - "info": "The properties to update on the page (as a JSON string).", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ], - "value": "{ \"title\": [ { \"text\": { \"content\": \"Test Page\" } } ] }" - }, - "_type": "CustomComponent" - }, - "description": "Update the properties of a Notion page.", - "icon": "NotionDirectoryLoader", - "base_classes": [ - "Record" - ], - "display_name": "Update Page Property [Notion]", - "documentation": "https://docs.langflow.org/integrations/notion/page-update", - "custom_fields": { - "page_id": null, - "properties": null, - "notion_secret": null - }, - "output_types": [ - "Record" - ], - "field_formatters": {}, - "frozen": false, - "field_order": [], - "beta": false - }, - "id": "CustomComponent-wHlSz", - "description": "Update the properties of a Notion page.", - "display_name": "Update Page Property [Notion]" - }, - "selected": false, - "width": 384, - "height": 477, - "dragging": false, - "positionAbsolute": { - "x": -2668.7714642455403, - "y": -657.2376228212606 - } - }, - { - "id": "CustomComponent-oelYw", - "type": "genericNode", - "position": { - "x": -2253.1007124701327, - "y": -448.47240118604134 - }, - "data": { - "type": "CustomComponent", - "node": { - "template": { - "code": { - "type": "code", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "import requests\r\nfrom typing import Dict, Any\r\n\r\nfrom langflow import CustomComponent\r\nfrom langflow.schema import Record\r\n\r\n\r\nclass NotionPageContent(CustomComponent):\r\n display_name = \"Page Content Viewer [Notion]\"\r\n description = \"Retrieve the content of a Notion page as plain text.\"\r\n documentation: str = \"https://docs.langflow.org/integrations/notion/page-content-viewer\"\r\n icon = \"NotionDirectoryLoader\"\r\n\r\n def build_config(self):\r\n return {\r\n \"page_id\": {\r\n \"display_name\": \"Page ID\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The ID of the Notion page to retrieve.\",\r\n },\r\n \"notion_secret\": {\r\n \"display_name\": \"Notion Secret\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The Notion integration token.\",\r\n \"password\": True,\r\n },\r\n }\r\n\r\n def build(\r\n self,\r\n page_id: str,\r\n notion_secret: str,\r\n ) -> Record:\r\n blocks_url = f\"https://api.notion.com/v1/blocks/{page_id}/children?page_size=100\"\r\n headers = {\r\n \"Authorization\": f\"Bearer {notion_secret}\",\r\n \"Notion-Version\": \"2022-06-28\", # Use the latest supported version\r\n }\r\n\r\n # Retrieve the child blocks\r\n blocks_response = requests.get(blocks_url, headers=headers)\r\n blocks_response.raise_for_status()\r\n blocks_data = blocks_response.json()\r\n\r\n # Parse the blocks and extract the content as plain text\r\n content = self.parse_blocks(blocks_data[\"results\"])\r\n\r\n self.status = content\r\n return Record(data={\"content\": content}, text=content)\r\n\r\n def parse_blocks(self, blocks: list) -> str:\r\n content = \"\"\r\n for block in blocks:\r\n block_type = block[\"type\"]\r\n if block_type in [\"paragraph\", \"heading_1\", \"heading_2\", \"heading_3\", \"quote\"]:\r\n content += self.parse_rich_text(block[block_type][\"rich_text\"]) + \"\\n\\n\"\r\n elif block_type in [\"bulleted_list_item\", \"numbered_list_item\"]:\r\n content += self.parse_rich_text(block[block_type][\"rich_text\"]) + \"\\n\"\r\n elif block_type == \"to_do\":\r\n content += self.parse_rich_text(block[\"to_do\"][\"rich_text\"]) + \"\\n\"\r\n elif block_type == \"code\":\r\n content += self.parse_rich_text(block[\"code\"][\"rich_text\"]) + \"\\n\\n\"\r\n elif block_type == \"image\":\r\n content += f\"[Image: {block['image']['external']['url']}]\\n\\n\"\r\n elif block_type == \"divider\":\r\n content += \"---\\n\\n\"\r\n return content.strip()\r\n\r\n def parse_rich_text(self, rich_text: list) -> str:\r\n text = \"\"\r\n for segment in rich_text:\r\n text += segment[\"plain_text\"]\r\n return text", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "code", - "advanced": true, - "dynamic": true, - "info": "", - "load_from_db": false, - "title_case": false - }, - "notion_secret": { - "type": "str", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": true, - "name": "notion_secret", - "display_name": "Notion Secret", - "advanced": false, - "dynamic": false, - "info": "The Notion integration token.", - "load_from_db": true, - "title_case": false, - "input_types": [ - "Text" - ], - "value": "" - }, - "page_id": { - "type": "str", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "page_id", - "display_name": "Page ID", - "advanced": false, - "dynamic": false, - "info": "The ID of the Notion page to retrieve.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "_type": "CustomComponent" - }, - "description": "Retrieve the content of a Notion page as plain text.", - "icon": "NotionDirectoryLoader", - "base_classes": [ - "Record" - ], - "display_name": "Page Content Viewer [Notion] ", - "documentation": "https://docs.langflow.org/integrations/notion/page-content-viewer", - "custom_fields": { - "page_id": null, - "notion_secret": null - }, - "output_types": [ - "Record" - ], - "field_formatters": {}, - "frozen": false, - "field_order": [], - "beta": false - }, - "id": "CustomComponent-oelYw", - "description": "Retrieve the content of a Notion page as plain text.", - "display_name": "Page Content Viewer [Notion] " - }, - "selected": false, - "width": 384, - "height": 383, - "positionAbsolute": { - "x": -2253.1007124701327, - "y": -448.47240118604134 - }, - "dragging": false - }, - { - "id": "CustomComponent-Pn52w", - "type": "genericNode", - "position": { - "x": -3070.9222948695096, - "y": -472.4537855763852 - }, - "data": { - "type": "CustomComponent", - "node": { - "template": { - "code": { - "type": "code", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "import requests\r\nimport json\r\nfrom typing import Dict, Any, List\r\nfrom langflow.custom import CustomComponent\r\nfrom langflow.schema import Record\r\n\r\nclass NotionListPages(CustomComponent):\r\n display_name = \"List Pages [Notion]\"\r\n description = (\r\n \"Query a Notion database with filtering and sorting. \"\r\n \"The input should be a JSON string containing the 'filter' and 'sorts' objects. \"\r\n \"Example input:\\n\"\r\n '{\"filter\": {\"property\": \"Status\", \"select\": {\"equals\": \"Done\"}}, \"sorts\": [{\"timestamp\": \"created_time\", \"direction\": \"descending\"}]}'\r\n )\r\n documentation: str = \"https://docs.langflow.org/integrations/notion/list-pages\"\r\n icon = \"NotionDirectoryLoader\"\r\n\r\n field_order = [\r\n \"notion_secret\",\r\n \"database_id\",\r\n \"query_payload\",\r\n ]\r\n\r\n def build_config(self):\r\n return {\r\n \"notion_secret\": {\r\n \"display_name\": \"Notion Secret\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The Notion integration token.\",\r\n \"password\": True,\r\n },\r\n \"database_id\": {\r\n \"display_name\": \"Database ID\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The ID of the Notion database to query.\",\r\n },\r\n \"query_payload\": {\r\n \"display_name\": \"Database query\",\r\n \"field_type\": \"str\",\r\n \"info\": \"A JSON string containing the filters that will be used for querying the database. EG: {'filter': {'property': 'Status', 'status': {'equals': 'In progress'}}}\",\r\n },\r\n }\r\n\r\n def build(\r\n self,\r\n notion_secret: str,\r\n database_id: str,\r\n query_payload: str = \"{}\",\r\n ) -> List[Record]:\r\n try:\r\n query_data = json.loads(query_payload)\r\n filter_obj = query_data.get(\"filter\")\r\n sorts = query_data.get(\"sorts\", [])\r\n\r\n url = f\"https://api.notion.com/v1/databases/{database_id}/query\"\r\n headers = {\r\n \"Authorization\": f\"Bearer {notion_secret}\",\r\n \"Content-Type\": \"application/json\",\r\n \"Notion-Version\": \"2022-06-28\",\r\n }\r\n\r\n data = {\r\n \"sorts\": sorts,\r\n }\r\n\r\n if filter_obj:\r\n data[\"filter\"] = filter_obj\r\n\r\n response = requests.post(url, headers=headers, json=data)\r\n response.raise_for_status()\r\n\r\n results = response.json()\r\n records = []\r\n combined_text = f\"Pages found: {len(results['results'])}\\n\\n\"\r\n for page in results['results']:\r\n page_data = {\r\n 'id': page['id'],\r\n 'url': page['url'],\r\n 'created_time': page['created_time'],\r\n 'last_edited_time': page['last_edited_time'],\r\n 'properties': page['properties'],\r\n }\r\n\r\n text = (\r\n f\"id: {page['id']}\\n\"\r\n f\"url: {page['url']}\\n\"\r\n f\"created_time: {page['created_time']}\\n\"\r\n f\"last_edited_time: {page['last_edited_time']}\\n\"\r\n f\"properties: {json.dumps(page['properties'], indent=2)}\\n\\n\"\r\n )\r\n\r\n combined_text += text\r\n records.append(Record(text=text, data=page_data))\r\n \r\n self.status = combined_text.strip()\r\n return records\r\n\r\n except Exception as e:\r\n self.status = f\"An error occurred: {str(e)}\"\r\n return [Record(text=self.status, data=[])]", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "code", - "advanced": true, - "dynamic": true, - "info": "", - "load_from_db": false, - "title_case": false - }, - "database_id": { - "type": "str", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "database_id", - "display_name": "Database ID", - "advanced": false, - "dynamic": false, - "info": "The ID of the Notion database to query.", - "load_from_db": true, - "title_case": false, - "input_types": [ - "Text" - ], - "value": "NOTION_NMSTX_DB_ID" - }, - "notion_secret": { - "type": "str", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": true, - "name": "notion_secret", - "display_name": "Notion Secret", - "advanced": false, - "dynamic": false, - "info": "The Notion integration token.", - "load_from_db": true, - "title_case": false, - "input_types": [ - "Text" - ], - "value": "" - }, - "query_payload": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": {}, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "query_payload", - "display_name": "Database query", - "advanced": false, - "dynamic": false, - "info": "A JSON string containing the filters that will be used for querying the database. EG: {'filter': {'property': 'Status', 'status': {'equals': 'In progress'}}}", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "_type": "CustomComponent" - }, - "description": "Query a Notion database with filtering and sorting. The input should be a JSON string containing the 'filter' and 'sorts' objects. Example input:\n{\"filter\": {\"property\": \"Status\", \"select\": {\"equals\": \"Done\"}}, \"sorts\": [{\"timestamp\": \"created_time\", \"direction\": \"descending\"}]}", - "icon": "NotionDirectoryLoader", - "base_classes": [ - "Record" - ], - "display_name": "List Pages [Notion] ", - "documentation": "https://docs.langflow.org/integrations/notion/list-pages", - "custom_fields": { - "notion_secret": null, - "database_id": null, - "query_payload": null - }, - "output_types": [ - "Record" - ], - "field_formatters": {}, - "frozen": false, - "field_order": [ - "notion_secret", - "database_id", - "query_payload" - ], - "beta": false - }, - "id": "CustomComponent-Pn52w", - "description": "Query a Notion database with filtering and sorting. The input should be a JSON string containing the 'filter' and 'sorts' objects. Example input:\n{\"filter\": {\"property\": \"Status\", \"select\": {\"equals\": \"Done\"}}, \"sorts\": [{\"timestamp\": \"created_time\", \"direction\": \"descending\"}]}", - "display_name": "List Pages [Notion] " - }, - "selected": false, - "width": 384, - "height": 517, - "positionAbsolute": { - "x": -3070.9222948695096, - "y": -472.4537855763852 - }, - "dragging": false - }, - { - "id": "CustomComponent-I8Dec", - "type": "genericNode", - "position": { - "x": -2256.686402636563, - "y": -963.4541117792749 - }, - "data": { - "type": "CustomComponent", - "node": { - "template": { - "block_id": { - "type": "str", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "block_id", - "display_name": "Page/Block ID", - "advanced": false, - "dynamic": false, - "info": "The ID of the page/block to add the content.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "code": { - "type": "code", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "import json\r\nfrom typing import List, Dict, Any\r\nfrom markdown import markdown\r\nfrom bs4 import BeautifulSoup\r\nimport requests\r\n\r\nfrom langflow import CustomComponent\r\nfrom langflow.schema import Record\r\n\r\nclass AddContentToPage(CustomComponent):\r\n display_name = \"Add Content to Page [Notion]\"\r\n description = \"Convert markdown text to Notion blocks and append them to a Notion page.\"\r\n documentation: str = \"https://developers.notion.com/reference/patch-block-children\"\r\n icon = \"NotionDirectoryLoader\"\r\n\r\n def build_config(self):\r\n return {\r\n \"markdown_text\": {\r\n \"display_name\": \"Markdown Text\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The markdown text to convert to Notion blocks.\",\r\n \"multiline\": True,\r\n },\r\n \"block_id\": {\r\n \"display_name\": \"Page/Block ID\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The ID of the page/block to add the content.\",\r\n },\r\n \"notion_secret\": {\r\n \"display_name\": \"Notion Secret\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The Notion integration token.\",\r\n \"password\": True,\r\n },\r\n }\r\n\r\n def build(self, markdown_text: str, block_id: str, notion_secret: str) -> Record:\r\n html_text = markdown(markdown_text)\r\n soup = BeautifulSoup(html_text, 'html.parser')\r\n blocks = self.process_node(soup)\r\n\r\n url = f\"https://api.notion.com/v1/blocks/{block_id}/children\"\r\n headers = {\r\n \"Authorization\": f\"Bearer {notion_secret}\",\r\n \"Content-Type\": \"application/json\",\r\n \"Notion-Version\": \"2022-06-28\",\r\n }\r\n\r\n data = {\r\n \"children\": blocks,\r\n }\r\n\r\n response = requests.patch(url, headers=headers, json=data)\r\n self.status = str(response.json())\r\n response.raise_for_status()\r\n\r\n result = response.json()\r\n self.status = f\"Appended {len(blocks)} blocks to page with ID: {block_id}\"\r\n return Record(data=result, text=json.dumps(result))\r\n\r\n def process_node(self, node):\r\n blocks = []\r\n if isinstance(node, str):\r\n text = node.strip()\r\n if text:\r\n if text.startswith('#'):\r\n heading_level = text.count('#', 0, 6)\r\n heading_text = text[heading_level:].strip()\r\n if heading_level == 1:\r\n blocks.append(self.create_block('heading_1', heading_text))\r\n elif heading_level == 2:\r\n blocks.append(self.create_block('heading_2', heading_text))\r\n elif heading_level == 3:\r\n blocks.append(self.create_block('heading_3', heading_text))\r\n else:\r\n blocks.append(self.create_block('paragraph', text))\r\n elif node.name == 'h1':\r\n blocks.append(self.create_block('heading_1', node.get_text(strip=True)))\r\n elif node.name == 'h2':\r\n blocks.append(self.create_block('heading_2', node.get_text(strip=True)))\r\n elif node.name == 'h3':\r\n blocks.append(self.create_block('heading_3', node.get_text(strip=True)))\r\n elif node.name == 'p':\r\n code_node = node.find('code')\r\n if code_node:\r\n code_text = code_node.get_text()\r\n language, code = self.extract_language_and_code(code_text)\r\n blocks.append(self.create_block('code', code, language=language))\r\n elif self.is_table(str(node)):\r\n blocks.extend(self.process_table(node))\r\n else:\r\n blocks.append(self.create_block('paragraph', node.get_text(strip=True)))\r\n elif node.name == 'ul':\r\n blocks.extend(self.process_list(node, 'bulleted_list_item'))\r\n elif node.name == 'ol':\r\n blocks.extend(self.process_list(node, 'numbered_list_item'))\r\n elif node.name == 'blockquote':\r\n blocks.append(self.create_block('quote', node.get_text(strip=True)))\r\n elif node.name == 'hr':\r\n blocks.append(self.create_block('divider', ''))\r\n elif node.name == 'img':\r\n blocks.append(self.create_block('image', '', image_url=node.get('src')))\r\n elif node.name == 'a':\r\n blocks.append(self.create_block('bookmark', node.get_text(strip=True), link_url=node.get('href')))\r\n elif node.name == 'table':\r\n blocks.extend(self.process_table(node))\r\n\r\n for child in node.children:\r\n if isinstance(child, str):\r\n continue\r\n blocks.extend(self.process_node(child))\r\n\r\n return blocks\r\n\r\n def extract_language_and_code(self, code_text):\r\n lines = code_text.split('\\n')\r\n language = lines[0].strip()\r\n code = '\\n'.join(lines[1:]).strip()\r\n return language, code\r\n\r\n def is_code_block(self, text):\r\n return text.startswith('```')\r\n\r\n def extract_code_block(self, text):\r\n lines = text.split('\\n')\r\n language = lines[0].strip('`').strip()\r\n code = '\\n'.join(lines[1:]).strip('`').strip()\r\n return language, code\r\n \r\n def is_table(self, text):\r\n rows = text.split('\\n')\r\n if len(rows) < 2:\r\n return False\r\n\r\n has_separator = False\r\n for i, row in enumerate(rows):\r\n if '|' in row:\r\n cells = [cell.strip() for cell in row.split('|')]\r\n cells = [cell for cell in cells if cell] # Remove empty cells\r\n if i == 1 and all(set(cell) <= set('-|') for cell in cells):\r\n has_separator = True\r\n elif not cells:\r\n return False\r\n\r\n return has_separator and len(rows) >= 3\r\n\r\n def process_list(self, node, list_type):\r\n blocks = []\r\n for item in node.find_all('li'):\r\n item_text = item.get_text(strip=True)\r\n checked = item_text.startswith('[x]')\r\n is_checklist = item_text.startswith('[ ]') or checked\r\n\r\n if is_checklist:\r\n item_text = item_text.replace('[x]', '').replace('[ ]', '').strip()\r\n blocks.append(self.create_block('to_do', item_text, checked=checked))\r\n else:\r\n blocks.append(self.create_block(list_type, item_text))\r\n return blocks\r\n\r\n def process_table(self, node):\r\n blocks = []\r\n header_row = node.find('thead').find('tr') if node.find('thead') else None\r\n body_rows = node.find('tbody').find_all('tr') if node.find('tbody') else []\r\n\r\n if header_row or body_rows:\r\n table_width = max(len(header_row.find_all(['th', 'td'])) if header_row else 0,\r\n max(len(row.find_all(['th', 'td'])) for row in body_rows))\r\n\r\n table_block = self.create_block('table', '', table_width=table_width, has_column_header=bool(header_row))\r\n blocks.append(table_block)\r\n\r\n if header_row:\r\n header_cells = [cell.get_text(strip=True) for cell in header_row.find_all(['th', 'td'])]\r\n header_row_block = self.create_block('table_row', header_cells)\r\n blocks.append(header_row_block)\r\n\r\n for row in body_rows:\r\n cells = [cell.get_text(strip=True) for cell in row.find_all(['th', 'td'])]\r\n row_block = self.create_block('table_row', cells)\r\n blocks.append(row_block)\r\n\r\n return blocks\r\n \r\n def create_block(self, block_type: str, content: str, **kwargs) -> Dict[str, Any]:\r\n block = {\r\n \"object\": \"block\",\r\n \"type\": block_type,\r\n block_type: {},\r\n }\r\n\r\n if block_type in [\"paragraph\", \"heading_1\", \"heading_2\", \"heading_3\", \"bulleted_list_item\", \"numbered_list_item\", \"quote\"]:\r\n block[block_type][\"rich_text\"] = [\r\n {\r\n \"type\": \"text\",\r\n \"text\": {\r\n \"content\": content,\r\n },\r\n }\r\n ]\r\n elif block_type == 'to_do':\r\n block[block_type][\"rich_text\"] = [\r\n {\r\n \"type\": \"text\",\r\n \"text\": {\r\n \"content\": content,\r\n },\r\n }\r\n ]\r\n block[block_type]['checked'] = kwargs.get('checked', False)\r\n elif block_type == 'code':\r\n block[block_type]['rich_text'] = [\r\n {\r\n \"type\": \"text\",\r\n \"text\": {\r\n \"content\": content,\r\n },\r\n }\r\n ]\r\n block[block_type]['language'] = kwargs.get('language', 'plain text')\r\n elif block_type == 'image':\r\n block[block_type] = {\r\n \"type\": \"external\",\r\n \"external\": {\r\n \"url\": kwargs.get('image_url', '')\r\n }\r\n }\r\n elif block_type == 'divider':\r\n pass\r\n elif block_type == 'bookmark':\r\n block[block_type]['url'] = kwargs.get('link_url', '')\r\n elif block_type == 'table':\r\n block[block_type]['table_width'] = kwargs.get('table_width', 0)\r\n block[block_type]['has_column_header'] = kwargs.get('has_column_header', False)\r\n block[block_type]['has_row_header'] = kwargs.get('has_row_header', False)\r\n elif block_type == 'table_row':\r\n block[block_type]['cells'] = [[{'type': 'text', 'text': {'content': cell}} for cell in content]]\r\n\r\n return block", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "code", - "advanced": true, - "dynamic": true, - "info": "", - "load_from_db": false, - "title_case": false - }, - "markdown_text": { - "type": "str", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "markdown_text", - "display_name": "Markdown Text", - "advanced": false, - "dynamic": false, - "info": "The markdown text to convert to Notion blocks.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ], - "value": "# Heading 1\n\n## Heading 2\n\n### Heading 3\n\nThis is a regular paragraph.\n\nHere's another paragraph with an image:\n![Image](https://example.com/image.jpg)\n\n## Checklist\n- [x] Completed task\n- [ ] Incomplete task\n- [x] Another completed task\n\n## Numbered List\n1. First item\n2. Second item\n3. Third item\n\n## Bulleted List\n- Item 1\n- Item 2\n- Item 3\n\n## Code Block\n```python\ndef hello_world():\n print(\"Hello, World!\")\n```\n\n## Quote\n> This is a blockquote.\n> It can span multiple lines.\n\n## Horizontal Rule\n---\n\n\n## Link\n[Notion API Documentation](https://developers.notion.com)\n\n" - }, - "notion_secret": { - "type": "str", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": true, - "name": "notion_secret", - "display_name": "Notion Secret", - "advanced": false, - "dynamic": false, - "info": "The Notion integration token.", - "load_from_db": true, - "title_case": false, - "input_types": [ - "Text" - ], - "value": "" - }, - "_type": "CustomComponent" - }, - "description": "Convert markdown text to Notion blocks and append them to a Notion page.", - "icon": "NotionDirectoryLoader", - "base_classes": [ - "Record" - ], - "display_name": "Add Content to Page [Notion] ", - "documentation": "https://developers.notion.com/reference/patch-block-children", - "custom_fields": { - "markdown_text": null, - "block_id": null, - "notion_secret": null - }, - "output_types": [ - "Record" - ], - "field_formatters": {}, - "frozen": false, - "field_order": [], - "beta": false, - "official": false - }, - "id": "CustomComponent-I8Dec" - }, - "selected": false, - "width": 384, - "height": 497, - "positionAbsolute": { - "x": -2256.686402636563, - "y": -963.4541117792749 - }, - "dragging": false - }, - { - "id": "CustomComponent-ZcsA9", - "type": "genericNode", - "position": { - "x": -3488.029350341937, - "y": -965.3756250644985 - }, - "data": { - "type": "CustomComponent", - "node": { - "template": { - "code": { - "type": "code", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "import requests\r\nfrom typing import Dict, Any, List\r\nfrom langflow.custom import CustomComponent\r\nfrom langflow.schema import Record\r\n\r\nclass NotionSearch(CustomComponent):\r\n display_name = \"Search Notion\"\r\n description = (\r\n \"Searches all pages and databases that have been shared with an integration.\"\r\n )\r\n documentation: str = \"https://docs.langflow.org/integrations/notion/search\"\r\n icon = \"NotionDirectoryLoader\"\r\n\r\n field_order = [\r\n \"notion_secret\",\r\n \"query\",\r\n \"filter_value\",\r\n \"sort_direction\",\r\n ]\r\n\r\n def build_config(self):\r\n return {\r\n \"notion_secret\": {\r\n \"display_name\": \"Notion Secret\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The Notion integration token.\",\r\n \"password\": True,\r\n },\r\n \"query\": {\r\n \"display_name\": \"Search Query\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The text that the API compares page and database titles against.\",\r\n },\r\n \"filter_value\": {\r\n \"display_name\": \"Filter Type\",\r\n \"field_type\": \"str\",\r\n \"info\": \"Limits the results to either only pages or only databases.\",\r\n \"options\": [\"page\", \"database\"],\r\n \"default_value\": \"page\",\r\n },\r\n \"sort_direction\": {\r\n \"display_name\": \"Sort Direction\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The direction to sort the results.\",\r\n \"options\": [\"ascending\", \"descending\"],\r\n \"default_value\": \"descending\",\r\n },\r\n }\r\n\r\n def build(\r\n self,\r\n notion_secret: str,\r\n query: str = \"\",\r\n filter_value: str = \"page\",\r\n sort_direction: str = \"descending\",\r\n ) -> List[Record]:\r\n try:\r\n url = \"https://api.notion.com/v1/search\"\r\n headers = {\r\n \"Authorization\": f\"Bearer {notion_secret}\",\r\n \"Content-Type\": \"application/json\",\r\n \"Notion-Version\": \"2022-06-28\",\r\n }\r\n\r\n data = {\r\n \"query\": query,\r\n \"filter\": {\r\n \"value\": filter_value,\r\n \"property\": \"object\"\r\n },\r\n \"sort\":{\r\n \"direction\": sort_direction,\r\n \"timestamp\": \"last_edited_time\"\r\n }\r\n }\r\n\r\n response = requests.post(url, headers=headers, json=data)\r\n response.raise_for_status()\r\n\r\n results = response.json()\r\n records = []\r\n combined_text = f\"Results found: {len(results['results'])}\\n\\n\"\r\n for result in results['results']:\r\n result_data = {\r\n 'id': result['id'],\r\n 'type': result['object'],\r\n 'last_edited_time': result['last_edited_time'],\r\n }\r\n \r\n if result['object'] == 'page':\r\n result_data['title_or_url'] = result['url']\r\n text = f\"id: {result['id']}\\ntitle_or_url: {result['url']}\\n\"\r\n elif result['object'] == 'database':\r\n if 'title' in result and isinstance(result['title'], list) and len(result['title']) > 0:\r\n result_data['title_or_url'] = result['title'][0]['plain_text']\r\n text = f\"id: {result['id']}\\ntitle_or_url: {result['title'][0]['plain_text']}\\n\"\r\n else:\r\n result_data['title_or_url'] = \"N/A\"\r\n text = f\"id: {result['id']}\\ntitle_or_url: N/A\\n\"\r\n\r\n text += f\"type: {result['object']}\\nlast_edited_time: {result['last_edited_time']}\\n\\n\"\r\n combined_text += text\r\n records.append(Record(text=text, data=result_data))\r\n \r\n self.status = combined_text\r\n return records\r\n\r\n except Exception as e:\r\n self.status = f\"An error occurred: {str(e)}\"\r\n return [Record(text=self.status, data=[])]", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "code", - "advanced": true, - "dynamic": true, - "info": "", - "load_from_db": false, - "title_case": false - }, - "filter_value": { - "type": "str", - "required": false, - "placeholder": "", - "list": true, - "show": true, - "multiline": false, - "value": "database", - "fileTypes": [], - "file_path": "", - "password": false, - "options": [ - "page", - "database" - ], - "name": "filter_value", - "display_name": "Filter Type", - "advanced": false, - "dynamic": false, - "info": "Limits the results to either only pages or only databases.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "notion_secret": { - "type": "str", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": true, - "name": "notion_secret", - "display_name": "Notion Secret", - "advanced": false, - "dynamic": false, - "info": "The Notion integration token.", - "load_from_db": true, - "title_case": false, - "input_types": [ - "Text" - ], - "value": "" - }, - "query": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": "", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "query", - "display_name": "Search Query", - "advanced": false, - "dynamic": false, - "info": "The text that the API compares page and database titles against.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "sort_direction": { - "type": "str", - "required": false, - "placeholder": "", - "list": true, - "show": true, - "multiline": false, - "value": "descending", - "fileTypes": [], - "file_path": "", - "password": false, - "options": [ - "ascending", - "descending" - ], - "name": "sort_direction", - "display_name": "Sort Direction", - "advanced": false, - "dynamic": false, - "info": "The direction to sort the results.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "_type": "CustomComponent" - }, - "description": "Searches all pages and databases that have been shared with an integration.", - "icon": "NotionDirectoryLoader", - "base_classes": [ - "Record" - ], - "display_name": "Search [Notion]", - "documentation": "https://docs.langflow.org/integrations/notion/search", - "custom_fields": { - "notion_secret": null, - "query": null, - "filter_value": null, - "sort_direction": null - }, - "output_types": [ - "Record" - ], - "field_formatters": {}, - "frozen": false, - "field_order": [ - "notion_secret", - "query", - "filter_value", - "sort_direction" - ], - "beta": false - }, - "id": "CustomComponent-ZcsA9", - "description": "Searches all pages and databases that have been shared with an integration.", - "display_name": "Search [Notion]" - }, - "selected": false, - "width": 384, - "height": 591, - "positionAbsolute": { - "x": -3488.029350341937, - "y": -965.3756250644985 - }, - "dragging": false - } - ], - "edges": [], - "viewport": { - "x": 2623.378922967084, - "y": 696.8541079344027, - "zoom": 0.5981384177708997 + "description": "Retrieve users from Notion.", + "icon": "NotionDirectoryLoader", + "base_classes": ["Record"], + "display_name": "List Users [Notion] ", + "documentation": "https://docs.langflow.org/integrations/notion/list-users", + "custom_fields": { "notion_secret": null }, + "output_types": ["Record"], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "RecordsToText-Q69g5", + "description": "Retrieve users from Notion.", + "display_name": "List Users [Notion] " + }, + "selected": false, + "width": 384, + "height": 289, + "dragging": false, + "positionAbsolute": { + "x": -2671.5528488127866, + "y": -963.4266471378126 } - }, - "description": "A Bundle containing Notion components for Page and Database manipulation. You can list pages, users databases, update properties, create new pages and add content to Notion Pages.", - "name": "Notion - Components", - "last_tested_version": "1.0.0a36", - "is_component": false + }, + { + "id": "CustomComponent-PU0K5", + "type": "genericNode", + "position": { "x": -3077.2269116193215, "y": -960.9450220159636 }, + "data": { + "type": "CustomComponent", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "import json\r\nfrom typing import Optional\r\n\r\nimport requests\r\nfrom langflow.custom import CustomComponent\r\n\r\n\r\nclass NotionPageCreator(CustomComponent):\r\n display_name = \"Create Page [Notion]\"\r\n description = \"A component for creating Notion pages.\"\r\n documentation: str = \"https://docs.langflow.org/integrations/notion/page-create\"\r\n icon = \"NotionDirectoryLoader\"\r\n\r\n def build_config(self):\r\n return {\r\n \"database_id\": {\r\n \"display_name\": \"Database ID\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The ID of the Notion database.\",\r\n },\r\n \"notion_secret\": {\r\n \"display_name\": \"Notion Secret\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The Notion integration token.\",\r\n \"password\": True,\r\n },\r\n \"properties\": {\r\n \"display_name\": \"Properties\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The properties of the new page. Depending on your database setup, this can change. E.G: {'Task name': {'id': 'title', 'type': 'title', 'title': [{'type': 'text', 'text': {'content': 'Send Notion Components to LF', 'link': null}}]}}\",\r\n },\r\n }\r\n\r\n def build(\r\n self,\r\n database_id: str,\r\n notion_secret: str,\r\n properties: str = '{\"Task name\": {\"id\": \"title\", \"type\": \"title\", \"title\": [{\"type\": \"text\", \"text\": {\"content\": \"Send Notion Components to LF\", \"link\": null}}]}}',\r\n ) -> str:\r\n if not database_id or not properties:\r\n raise ValueError(\"Invalid input. Please provide 'database_id' and 'properties'.\")\r\n\r\n headers = {\r\n \"Authorization\": f\"Bearer {notion_secret}\",\r\n \"Content-Type\": \"application/json\",\r\n \"Notion-Version\": \"2022-06-28\",\r\n }\r\n\r\n data = {\r\n \"parent\": {\"database_id\": database_id},\r\n \"properties\": json.loads(properties),\r\n }\r\n\r\n response = requests.post(\"https://api.notion.com/v1/pages\", headers=headers, json=data)\r\n\r\n if response.status_code == 200:\r\n page_id = response.json()[\"id\"]\r\n self.status = f\"Successfully created Notion page with ID: {page_id}\\n {str(response.json())}\"\r\n return response.json()\r\n else:\r\n error_message = f\"Failed to create Notion page. Status code: {response.status_code}, Error: {response.text}\"\r\n self.status = error_message\r\n raise Exception(error_message)", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "database_id": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "database_id", + "display_name": "Database ID", + "advanced": false, + "dynamic": false, + "info": "The ID of the Notion database.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "notion_secret": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "notion_secret", + "display_name": "Notion Secret", + "advanced": false, + "dynamic": false, + "info": "The Notion integration token.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"], + "value": "" + }, + "properties": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "{\"Task name\": {\"id\": \"title\", \"type\": \"title\", \"title\": [{\"type\": \"text\", \"text\": {\"content\": \"Send Notion Components to LF\", \"link\": null}}]}}", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "properties", + "display_name": "Properties", + "advanced": false, + "dynamic": false, + "info": "The properties of the new page. Depending on your database setup, this can change. E.G: {'Task name': {'id': 'title', 'type': 'title', 'title': [{'type': 'text', 'text': {'content': 'Send Notion Components to LF', 'link': null}}]}}", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "_type": "CustomComponent" + }, + "description": "A component for creating Notion pages.", + "icon": "NotionDirectoryLoader", + "base_classes": ["object", "str", "Text"], + "display_name": "Create Page [Notion] ", + "documentation": "https://docs.langflow.org/integrations/notion/page-create", + "custom_fields": { + "database_id": null, + "notion_secret": null, + "properties": null + }, + "output_types": ["Text"], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "CustomComponent-PU0K5", + "description": "A component for creating Notion pages.", + "display_name": "Create Page [Notion] " + }, + "selected": false, + "width": 384, + "height": 477, + "positionAbsolute": { + "x": -3077.2269116193215, + "y": -960.9450220159636 + }, + "dragging": false + }, + { + "id": "CustomComponent-YODla", + "type": "genericNode", + "position": { "x": -3485.297183150799, "y": -362.8525892356713 }, + "data": { + "type": "CustomComponent", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "import requests\r\nfrom typing import Dict\r\n\r\nfrom langflow import CustomComponent\r\nfrom langflow.schema import Record\r\n\r\n\r\nclass NotionDatabaseProperties(CustomComponent):\r\n display_name = \"List Database Properties [Notion]\"\r\n description = \"Retrieve properties of a Notion database.\"\r\n documentation: str = \"https://docs.langflow.org/integrations/notion/list-database-properties\"\r\n icon = \"NotionDirectoryLoader\"\r\n \r\n def build_config(self):\r\n return {\r\n \"database_id\": {\r\n \"display_name\": \"Database ID\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The ID of the Notion database.\",\r\n },\r\n \"notion_secret\": {\r\n \"display_name\": \"Notion Secret\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The Notion integration token.\",\r\n \"password\": True,\r\n },\r\n }\r\n\r\n def build(\r\n self,\r\n database_id: str,\r\n notion_secret: str,\r\n ) -> Record:\r\n url = f\"https://api.notion.com/v1/databases/{database_id}\"\r\n headers = {\r\n \"Authorization\": f\"Bearer {notion_secret}\",\r\n \"Notion-Version\": \"2022-06-28\", # Use the latest supported version\r\n }\r\n\r\n response = requests.get(url, headers=headers)\r\n response.raise_for_status()\r\n\r\n data = response.json()\r\n properties = data.get(\"properties\", {})\r\n\r\n record = Record(text=str(response.json()), data=properties)\r\n self.status = f\"Retrieved {len(properties)} properties from the Notion database.\\n {record.text}\"\r\n return record", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "database_id": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "database_id", + "display_name": "Database ID", + "advanced": false, + "dynamic": false, + "info": "The ID of the Notion database.", + "load_from_db": true, + "title_case": false, + "input_types": ["Text"], + "value": "NOTION_NMSTX_DB_ID" + }, + "notion_secret": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "notion_secret", + "display_name": "Notion Secret", + "advanced": false, + "dynamic": false, + "info": "The Notion integration token.", + "load_from_db": true, + "title_case": false, + "input_types": ["Text"], + "value": "" + }, + "_type": "CustomComponent" + }, + "description": "Retrieve properties of a Notion database.", + "icon": "NotionDirectoryLoader", + "base_classes": ["Record"], + "display_name": "List Database Properties [Notion] ", + "documentation": "https://docs.langflow.org/integrations/notion/list-database-properties", + "custom_fields": { "database_id": null, "notion_secret": null }, + "output_types": ["Record"], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "CustomComponent-YODla", + "description": "Retrieve properties of a Notion database.", + "display_name": "List Database Properties [Notion] " + }, + "selected": true, + "width": 384, + "height": 383, + "dragging": false, + "positionAbsolute": { "x": -3485.297183150799, "y": -362.8525892356713 } + }, + { + "id": "CustomComponent-wHlSz", + "type": "genericNode", + "position": { "x": -2668.7714642455403, "y": -657.2376228212606 }, + "data": { + "type": "CustomComponent", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "import json\r\nimport requests\r\nfrom typing import Dict, Any\r\n\r\nfrom langflow import CustomComponent\r\nfrom langflow.schema import Record\r\n\r\n\r\nclass NotionPageUpdate(CustomComponent):\r\n display_name = \"Update Page Property [Notion]\"\r\n description = \"Update the properties of a Notion page.\"\r\n documentation: str = \"https://docs.langflow.org/integrations/notion/page-update\"\r\n icon = \"NotionDirectoryLoader\"\r\n\r\n def build_config(self):\r\n return {\r\n \"page_id\": {\r\n \"display_name\": \"Page ID\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The ID of the Notion page to update.\",\r\n },\r\n \"properties\": {\r\n \"display_name\": \"Properties\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The properties to update on the page (as a JSON string).\",\r\n \"multiline\": True,\r\n },\r\n \"notion_secret\": {\r\n \"display_name\": \"Notion Secret\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The Notion integration token.\",\r\n \"password\": True,\r\n },\r\n }\r\n\r\n def build(\r\n self,\r\n page_id: str,\r\n properties: str,\r\n notion_secret: str,\r\n ) -> Record:\r\n url = f\"https://api.notion.com/v1/pages/{page_id}\"\r\n headers = {\r\n \"Authorization\": f\"Bearer {notion_secret}\",\r\n \"Content-Type\": \"application/json\",\r\n \"Notion-Version\": \"2022-06-28\", # Use the latest supported version\r\n }\r\n\r\n try:\r\n parsed_properties = json.loads(properties)\r\n except json.JSONDecodeError as e:\r\n raise ValueError(\"Invalid JSON format for properties\") from e\r\n\r\n data = {\r\n \"properties\": parsed_properties\r\n }\r\n\r\n response = requests.patch(url, headers=headers, json=data)\r\n response.raise_for_status()\r\n\r\n updated_page = response.json()\r\n\r\n output = \"Updated page properties:\\n\"\r\n for prop_name, prop_value in updated_page[\"properties\"].items():\r\n output += f\"{prop_name}: {prop_value}\\n\"\r\n\r\n self.status = output\r\n return Record(data=updated_page)", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "notion_secret": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "notion_secret", + "display_name": "Notion Secret", + "advanced": false, + "dynamic": false, + "info": "The Notion integration token.", + "load_from_db": true, + "title_case": false, + "input_types": ["Text"], + "value": "" + }, + "page_id": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "page_id", + "display_name": "Page ID", + "advanced": false, + "dynamic": false, + "info": "The ID of the Notion page to update.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "properties": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "properties", + "display_name": "Properties", + "advanced": false, + "dynamic": false, + "info": "The properties to update on the page (as a JSON string).", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"], + "value": "{ \"title\": [ { \"text\": { \"content\": \"Test Page\" } } ] }" + }, + "_type": "CustomComponent" + }, + "description": "Update the properties of a Notion page.", + "icon": "NotionDirectoryLoader", + "base_classes": ["Record"], + "display_name": "Update Page Property [Notion]", + "documentation": "https://docs.langflow.org/integrations/notion/page-update", + "custom_fields": { + "page_id": null, + "properties": null, + "notion_secret": null + }, + "output_types": ["Record"], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "CustomComponent-wHlSz", + "description": "Update the properties of a Notion page.", + "display_name": "Update Page Property [Notion]" + }, + "selected": false, + "width": 384, + "height": 477, + "dragging": false, + "positionAbsolute": { + "x": -2668.7714642455403, + "y": -657.2376228212606 + } + }, + { + "id": "CustomComponent-oelYw", + "type": "genericNode", + "position": { "x": -2253.1007124701327, "y": -448.47240118604134 }, + "data": { + "type": "CustomComponent", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "import requests\r\nfrom typing import Dict, Any\r\n\r\nfrom langflow import CustomComponent\r\nfrom langflow.schema import Record\r\n\r\n\r\nclass NotionPageContent(CustomComponent):\r\n display_name = \"Page Content Viewer [Notion]\"\r\n description = \"Retrieve the content of a Notion page as plain text.\"\r\n documentation: str = \"https://docs.langflow.org/integrations/notion/page-content-viewer\"\r\n icon = \"NotionDirectoryLoader\"\r\n\r\n def build_config(self):\r\n return {\r\n \"page_id\": {\r\n \"display_name\": \"Page ID\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The ID of the Notion page to retrieve.\",\r\n },\r\n \"notion_secret\": {\r\n \"display_name\": \"Notion Secret\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The Notion integration token.\",\r\n \"password\": True,\r\n },\r\n }\r\n\r\n def build(\r\n self,\r\n page_id: str,\r\n notion_secret: str,\r\n ) -> Record:\r\n blocks_url = f\"https://api.notion.com/v1/blocks/{page_id}/children?page_size=100\"\r\n headers = {\r\n \"Authorization\": f\"Bearer {notion_secret}\",\r\n \"Notion-Version\": \"2022-06-28\", # Use the latest supported version\r\n }\r\n\r\n # Retrieve the child blocks\r\n blocks_response = requests.get(blocks_url, headers=headers)\r\n blocks_response.raise_for_status()\r\n blocks_data = blocks_response.json()\r\n\r\n # Parse the blocks and extract the content as plain text\r\n content = self.parse_blocks(blocks_data[\"results\"])\r\n\r\n self.status = content\r\n return Record(data={\"content\": content}, text=content)\r\n\r\n def parse_blocks(self, blocks: list) -> str:\r\n content = \"\"\r\n for block in blocks:\r\n block_type = block[\"type\"]\r\n if block_type in [\"paragraph\", \"heading_1\", \"heading_2\", \"heading_3\", \"quote\"]:\r\n content += self.parse_rich_text(block[block_type][\"rich_text\"]) + \"\\n\\n\"\r\n elif block_type in [\"bulleted_list_item\", \"numbered_list_item\"]:\r\n content += self.parse_rich_text(block[block_type][\"rich_text\"]) + \"\\n\"\r\n elif block_type == \"to_do\":\r\n content += self.parse_rich_text(block[\"to_do\"][\"rich_text\"]) + \"\\n\"\r\n elif block_type == \"code\":\r\n content += self.parse_rich_text(block[\"code\"][\"rich_text\"]) + \"\\n\\n\"\r\n elif block_type == \"image\":\r\n content += f\"[Image: {block['image']['external']['url']}]\\n\\n\"\r\n elif block_type == \"divider\":\r\n content += \"---\\n\\n\"\r\n return content.strip()\r\n\r\n def parse_rich_text(self, rich_text: list) -> str:\r\n text = \"\"\r\n for segment in rich_text:\r\n text += segment[\"plain_text\"]\r\n return text", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "notion_secret": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "notion_secret", + "display_name": "Notion Secret", + "advanced": false, + "dynamic": false, + "info": "The Notion integration token.", + "load_from_db": true, + "title_case": false, + "input_types": ["Text"], + "value": "" + }, + "page_id": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "page_id", + "display_name": "Page ID", + "advanced": false, + "dynamic": false, + "info": "The ID of the Notion page to retrieve.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "_type": "CustomComponent" + }, + "description": "Retrieve the content of a Notion page as plain text.", + "icon": "NotionDirectoryLoader", + "base_classes": ["Record"], + "display_name": "Page Content Viewer [Notion] ", + "documentation": "https://docs.langflow.org/integrations/notion/page-content-viewer", + "custom_fields": { "page_id": null, "notion_secret": null }, + "output_types": ["Record"], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "CustomComponent-oelYw", + "description": "Retrieve the content of a Notion page as plain text.", + "display_name": "Page Content Viewer [Notion] " + }, + "selected": false, + "width": 384, + "height": 383, + "positionAbsolute": { + "x": -2253.1007124701327, + "y": -448.47240118604134 + }, + "dragging": false + }, + { + "id": "CustomComponent-Pn52w", + "type": "genericNode", + "position": { "x": -3070.9222948695096, "y": -472.4537855763852 }, + "data": { + "type": "CustomComponent", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "import requests\r\nimport json\r\nfrom typing import Dict, Any, List\r\nfrom langflow.custom import CustomComponent\r\nfrom langflow.schema import Record\r\n\r\nclass NotionListPages(CustomComponent):\r\n display_name = \"List Pages [Notion]\"\r\n description = (\r\n \"Query a Notion database with filtering and sorting. \"\r\n \"The input should be a JSON string containing the 'filter' and 'sorts' objects. \"\r\n \"Example input:\\n\"\r\n '{\"filter\": {\"property\": \"Status\", \"select\": {\"equals\": \"Done\"}}, \"sorts\": [{\"timestamp\": \"created_time\", \"direction\": \"descending\"}]}'\r\n )\r\n documentation: str = \"https://docs.langflow.org/integrations/notion/list-pages\"\r\n icon = \"NotionDirectoryLoader\"\r\n\r\n field_order = [\r\n \"notion_secret\",\r\n \"database_id\",\r\n \"query_payload\",\r\n ]\r\n\r\n def build_config(self):\r\n return {\r\n \"notion_secret\": {\r\n \"display_name\": \"Notion Secret\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The Notion integration token.\",\r\n \"password\": True,\r\n },\r\n \"database_id\": {\r\n \"display_name\": \"Database ID\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The ID of the Notion database to query.\",\r\n },\r\n \"query_payload\": {\r\n \"display_name\": \"Database query\",\r\n \"field_type\": \"str\",\r\n \"info\": \"A JSON string containing the filters that will be used for querying the database. EG: {'filter': {'property': 'Status', 'status': {'equals': 'In progress'}}}\",\r\n },\r\n }\r\n\r\n def build(\r\n self,\r\n notion_secret: str,\r\n database_id: str,\r\n query_payload: str = \"{}\",\r\n ) -> List[Record]:\r\n try:\r\n query_data = json.loads(query_payload)\r\n filter_obj = query_data.get(\"filter\")\r\n sorts = query_data.get(\"sorts\", [])\r\n\r\n url = f\"https://api.notion.com/v1/databases/{database_id}/query\"\r\n headers = {\r\n \"Authorization\": f\"Bearer {notion_secret}\",\r\n \"Content-Type\": \"application/json\",\r\n \"Notion-Version\": \"2022-06-28\",\r\n }\r\n\r\n data = {\r\n \"sorts\": sorts,\r\n }\r\n\r\n if filter_obj:\r\n data[\"filter\"] = filter_obj\r\n\r\n response = requests.post(url, headers=headers, json=data)\r\n response.raise_for_status()\r\n\r\n results = response.json()\r\n records = []\r\n combined_text = f\"Pages found: {len(results['results'])}\\n\\n\"\r\n for page in results['results']:\r\n page_data = {\r\n 'id': page['id'],\r\n 'url': page['url'],\r\n 'created_time': page['created_time'],\r\n 'last_edited_time': page['last_edited_time'],\r\n 'properties': page['properties'],\r\n }\r\n\r\n text = (\r\n f\"id: {page['id']}\\n\"\r\n f\"url: {page['url']}\\n\"\r\n f\"created_time: {page['created_time']}\\n\"\r\n f\"last_edited_time: {page['last_edited_time']}\\n\"\r\n f\"properties: {json.dumps(page['properties'], indent=2)}\\n\\n\"\r\n )\r\n\r\n combined_text += text\r\n records.append(Record(text=text, data=page_data))\r\n \r\n self.status = combined_text.strip()\r\n return records\r\n\r\n except Exception as e:\r\n self.status = f\"An error occurred: {str(e)}\"\r\n return [Record(text=self.status, data=[])]", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "database_id": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "database_id", + "display_name": "Database ID", + "advanced": false, + "dynamic": false, + "info": "The ID of the Notion database to query.", + "load_from_db": true, + "title_case": false, + "input_types": ["Text"], + "value": "NOTION_NMSTX_DB_ID" + }, + "notion_secret": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "notion_secret", + "display_name": "Notion Secret", + "advanced": false, + "dynamic": false, + "info": "The Notion integration token.", + "load_from_db": true, + "title_case": false, + "input_types": ["Text"], + "value": "" + }, + "query_payload": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": {}, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "query_payload", + "display_name": "Database query", + "advanced": false, + "dynamic": false, + "info": "A JSON string containing the filters that will be used for querying the database. EG: {'filter': {'property': 'Status', 'status': {'equals': 'In progress'}}}", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "_type": "CustomComponent" + }, + "description": "Query a Notion database with filtering and sorting. The input should be a JSON string containing the 'filter' and 'sorts' objects. Example input:\n{\"filter\": {\"property\": \"Status\", \"select\": {\"equals\": \"Done\"}}, \"sorts\": [{\"timestamp\": \"created_time\", \"direction\": \"descending\"}]}", + "icon": "NotionDirectoryLoader", + "base_classes": ["Record"], + "display_name": "List Pages [Notion] ", + "documentation": "https://docs.langflow.org/integrations/notion/list-pages", + "custom_fields": { + "notion_secret": null, + "database_id": null, + "query_payload": null + }, + "output_types": ["Record"], + "field_formatters": {}, + "frozen": false, + "field_order": ["notion_secret", "database_id", "query_payload"], + "beta": false + }, + "id": "CustomComponent-Pn52w", + "description": "Query a Notion database with filtering and sorting. The input should be a JSON string containing the 'filter' and 'sorts' objects. Example input:\n{\"filter\": {\"property\": \"Status\", \"select\": {\"equals\": \"Done\"}}, \"sorts\": [{\"timestamp\": \"created_time\", \"direction\": \"descending\"}]}", + "display_name": "List Pages [Notion] " + }, + "selected": false, + "width": 384, + "height": 517, + "positionAbsolute": { + "x": -3070.9222948695096, + "y": -472.4537855763852 + }, + "dragging": false + }, + { + "id": "CustomComponent-I8Dec", + "type": "genericNode", + "position": { "x": -2256.686402636563, "y": -963.4541117792749 }, + "data": { + "type": "CustomComponent", + "node": { + "template": { + "block_id": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "block_id", + "display_name": "Page/Block ID", + "advanced": false, + "dynamic": false, + "info": "The ID of the page/block to add the content.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "import json\r\nfrom typing import List, Dict, Any\r\nfrom markdown import markdown\r\nfrom bs4 import BeautifulSoup\r\nimport requests\r\n\r\nfrom langflow import CustomComponent\r\nfrom langflow.schema import Record\r\n\r\nclass AddContentToPage(CustomComponent):\r\n display_name = \"Add Content to Page [Notion]\"\r\n description = \"Convert markdown text to Notion blocks and append them to a Notion page.\"\r\n documentation: str = \"https://developers.notion.com/reference/patch-block-children\"\r\n icon = \"NotionDirectoryLoader\"\r\n\r\n def build_config(self):\r\n return {\r\n \"markdown_text\": {\r\n \"display_name\": \"Markdown Text\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The markdown text to convert to Notion blocks.\",\r\n \"multiline\": True,\r\n },\r\n \"block_id\": {\r\n \"display_name\": \"Page/Block ID\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The ID of the page/block to add the content.\",\r\n },\r\n \"notion_secret\": {\r\n \"display_name\": \"Notion Secret\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The Notion integration token.\",\r\n \"password\": True,\r\n },\r\n }\r\n\r\n def build(self, markdown_text: str, block_id: str, notion_secret: str) -> Record:\r\n html_text = markdown(markdown_text)\r\n soup = BeautifulSoup(html_text, 'html.parser')\r\n blocks = self.process_node(soup)\r\n\r\n url = f\"https://api.notion.com/v1/blocks/{block_id}/children\"\r\n headers = {\r\n \"Authorization\": f\"Bearer {notion_secret}\",\r\n \"Content-Type\": \"application/json\",\r\n \"Notion-Version\": \"2022-06-28\",\r\n }\r\n\r\n data = {\r\n \"children\": blocks,\r\n }\r\n\r\n response = requests.patch(url, headers=headers, json=data)\r\n self.status = str(response.json())\r\n response.raise_for_status()\r\n\r\n result = response.json()\r\n self.status = f\"Appended {len(blocks)} blocks to page with ID: {block_id}\"\r\n return Record(data=result, text=json.dumps(result))\r\n\r\n def process_node(self, node):\r\n blocks = []\r\n if isinstance(node, str):\r\n text = node.strip()\r\n if text:\r\n if text.startswith('#'):\r\n heading_level = text.count('#', 0, 6)\r\n heading_text = text[heading_level:].strip()\r\n if heading_level == 1:\r\n blocks.append(self.create_block('heading_1', heading_text))\r\n elif heading_level == 2:\r\n blocks.append(self.create_block('heading_2', heading_text))\r\n elif heading_level == 3:\r\n blocks.append(self.create_block('heading_3', heading_text))\r\n else:\r\n blocks.append(self.create_block('paragraph', text))\r\n elif node.name == 'h1':\r\n blocks.append(self.create_block('heading_1', node.get_text(strip=True)))\r\n elif node.name == 'h2':\r\n blocks.append(self.create_block('heading_2', node.get_text(strip=True)))\r\n elif node.name == 'h3':\r\n blocks.append(self.create_block('heading_3', node.get_text(strip=True)))\r\n elif node.name == 'p':\r\n code_node = node.find('code')\r\n if code_node:\r\n code_text = code_node.get_text()\r\n language, code = self.extract_language_and_code(code_text)\r\n blocks.append(self.create_block('code', code, language=language))\r\n elif self.is_table(str(node)):\r\n blocks.extend(self.process_table(node))\r\n else:\r\n blocks.append(self.create_block('paragraph', node.get_text(strip=True)))\r\n elif node.name == 'ul':\r\n blocks.extend(self.process_list(node, 'bulleted_list_item'))\r\n elif node.name == 'ol':\r\n blocks.extend(self.process_list(node, 'numbered_list_item'))\r\n elif node.name == 'blockquote':\r\n blocks.append(self.create_block('quote', node.get_text(strip=True)))\r\n elif node.name == 'hr':\r\n blocks.append(self.create_block('divider', ''))\r\n elif node.name == 'img':\r\n blocks.append(self.create_block('image', '', image_url=node.get('src')))\r\n elif node.name == 'a':\r\n blocks.append(self.create_block('bookmark', node.get_text(strip=True), link_url=node.get('href')))\r\n elif node.name == 'table':\r\n blocks.extend(self.process_table(node))\r\n\r\n for child in node.children:\r\n if isinstance(child, str):\r\n continue\r\n blocks.extend(self.process_node(child))\r\n\r\n return blocks\r\n\r\n def extract_language_and_code(self, code_text):\r\n lines = code_text.split('\\n')\r\n language = lines[0].strip()\r\n code = '\\n'.join(lines[1:]).strip()\r\n return language, code\r\n\r\n def is_code_block(self, text):\r\n return text.startswith('```')\r\n\r\n def extract_code_block(self, text):\r\n lines = text.split('\\n')\r\n language = lines[0].strip('`').strip()\r\n code = '\\n'.join(lines[1:]).strip('`').strip()\r\n return language, code\r\n \r\n def is_table(self, text):\r\n rows = text.split('\\n')\r\n if len(rows) < 2:\r\n return False\r\n\r\n has_separator = False\r\n for i, row in enumerate(rows):\r\n if '|' in row:\r\n cells = [cell.strip() for cell in row.split('|')]\r\n cells = [cell for cell in cells if cell] # Remove empty cells\r\n if i == 1 and all(set(cell) <= set('-|') for cell in cells):\r\n has_separator = True\r\n elif not cells:\r\n return False\r\n\r\n return has_separator and len(rows) >= 3\r\n\r\n def process_list(self, node, list_type):\r\n blocks = []\r\n for item in node.find_all('li'):\r\n item_text = item.get_text(strip=True)\r\n checked = item_text.startswith('[x]')\r\n is_checklist = item_text.startswith('[ ]') or checked\r\n\r\n if is_checklist:\r\n item_text = item_text.replace('[x]', '').replace('[ ]', '').strip()\r\n blocks.append(self.create_block('to_do', item_text, checked=checked))\r\n else:\r\n blocks.append(self.create_block(list_type, item_text))\r\n return blocks\r\n\r\n def process_table(self, node):\r\n blocks = []\r\n header_row = node.find('thead').find('tr') if node.find('thead') else None\r\n body_rows = node.find('tbody').find_all('tr') if node.find('tbody') else []\r\n\r\n if header_row or body_rows:\r\n table_width = max(len(header_row.find_all(['th', 'td'])) if header_row else 0,\r\n max(len(row.find_all(['th', 'td'])) for row in body_rows))\r\n\r\n table_block = self.create_block('table', '', table_width=table_width, has_column_header=bool(header_row))\r\n blocks.append(table_block)\r\n\r\n if header_row:\r\n header_cells = [cell.get_text(strip=True) for cell in header_row.find_all(['th', 'td'])]\r\n header_row_block = self.create_block('table_row', header_cells)\r\n blocks.append(header_row_block)\r\n\r\n for row in body_rows:\r\n cells = [cell.get_text(strip=True) for cell in row.find_all(['th', 'td'])]\r\n row_block = self.create_block('table_row', cells)\r\n blocks.append(row_block)\r\n\r\n return blocks\r\n \r\n def create_block(self, block_type: str, content: str, **kwargs) -> Dict[str, Any]:\r\n block = {\r\n \"object\": \"block\",\r\n \"type\": block_type,\r\n block_type: {},\r\n }\r\n\r\n if block_type in [\"paragraph\", \"heading_1\", \"heading_2\", \"heading_3\", \"bulleted_list_item\", \"numbered_list_item\", \"quote\"]:\r\n block[block_type][\"rich_text\"] = [\r\n {\r\n \"type\": \"text\",\r\n \"text\": {\r\n \"content\": content,\r\n },\r\n }\r\n ]\r\n elif block_type == 'to_do':\r\n block[block_type][\"rich_text\"] = [\r\n {\r\n \"type\": \"text\",\r\n \"text\": {\r\n \"content\": content,\r\n },\r\n }\r\n ]\r\n block[block_type]['checked'] = kwargs.get('checked', False)\r\n elif block_type == 'code':\r\n block[block_type]['rich_text'] = [\r\n {\r\n \"type\": \"text\",\r\n \"text\": {\r\n \"content\": content,\r\n },\r\n }\r\n ]\r\n block[block_type]['language'] = kwargs.get('language', 'plain text')\r\n elif block_type == 'image':\r\n block[block_type] = {\r\n \"type\": \"external\",\r\n \"external\": {\r\n \"url\": kwargs.get('image_url', '')\r\n }\r\n }\r\n elif block_type == 'divider':\r\n pass\r\n elif block_type == 'bookmark':\r\n block[block_type]['url'] = kwargs.get('link_url', '')\r\n elif block_type == 'table':\r\n block[block_type]['table_width'] = kwargs.get('table_width', 0)\r\n block[block_type]['has_column_header'] = kwargs.get('has_column_header', False)\r\n block[block_type]['has_row_header'] = kwargs.get('has_row_header', False)\r\n elif block_type == 'table_row':\r\n block[block_type]['cells'] = [[{'type': 'text', 'text': {'content': cell}} for cell in content]]\r\n\r\n return block", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "markdown_text": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "markdown_text", + "display_name": "Markdown Text", + "advanced": false, + "dynamic": false, + "info": "The markdown text to convert to Notion blocks.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"], + "value": "# Heading 1\n\n## Heading 2\n\n### Heading 3\n\nThis is a regular paragraph.\n\nHere's another paragraph with an image:\n![Image](https://example.com/image.jpg)\n\n## Checklist\n- [x] Completed task\n- [ ] Incomplete task\n- [x] Another completed task\n\n## Numbered List\n1. First item\n2. Second item\n3. Third item\n\n## Bulleted List\n- Item 1\n- Item 2\n- Item 3\n\n## Code Block\n```python\ndef hello_world():\n print(\"Hello, World!\")\n```\n\n## Quote\n> This is a blockquote.\n> It can span multiple lines.\n\n## Horizontal Rule\n---\n\n\n## Link\n[Notion API Documentation](https://developers.notion.com)\n\n" + }, + "notion_secret": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "notion_secret", + "display_name": "Notion Secret", + "advanced": false, + "dynamic": false, + "info": "The Notion integration token.", + "load_from_db": true, + "title_case": false, + "input_types": ["Text"], + "value": "" + }, + "_type": "CustomComponent" + }, + "description": "Convert markdown text to Notion blocks and append them to a Notion page.", + "icon": "NotionDirectoryLoader", + "base_classes": ["Record"], + "display_name": "Add Content to Page [Notion] ", + "documentation": "https://developers.notion.com/reference/patch-block-children", + "custom_fields": { + "markdown_text": null, + "block_id": null, + "notion_secret": null + }, + "output_types": ["Record"], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false, + "official": false + }, + "id": "CustomComponent-I8Dec" + }, + "selected": false, + "width": 384, + "height": 497, + "positionAbsolute": { + "x": -2256.686402636563, + "y": -963.4541117792749 + }, + "dragging": false + }, + { + "id": "CustomComponent-ZcsA9", + "type": "genericNode", + "position": { "x": -3488.029350341937, "y": -965.3756250644985 }, + "data": { + "type": "CustomComponent", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "import requests\r\nfrom typing import Dict, Any, List\r\nfrom langflow.custom import CustomComponent\r\nfrom langflow.schema import Record\r\n\r\nclass NotionSearch(CustomComponent):\r\n display_name = \"Search Notion\"\r\n description = (\r\n \"Searches all pages and databases that have been shared with an integration.\"\r\n )\r\n documentation: str = \"https://docs.langflow.org/integrations/notion/search\"\r\n icon = \"NotionDirectoryLoader\"\r\n\r\n field_order = [\r\n \"notion_secret\",\r\n \"query\",\r\n \"filter_value\",\r\n \"sort_direction\",\r\n ]\r\n\r\n def build_config(self):\r\n return {\r\n \"notion_secret\": {\r\n \"display_name\": \"Notion Secret\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The Notion integration token.\",\r\n \"password\": True,\r\n },\r\n \"query\": {\r\n \"display_name\": \"Search Query\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The text that the API compares page and database titles against.\",\r\n },\r\n \"filter_value\": {\r\n \"display_name\": \"Filter Type\",\r\n \"field_type\": \"str\",\r\n \"info\": \"Limits the results to either only pages or only databases.\",\r\n \"options\": [\"page\", \"database\"],\r\n \"default_value\": \"page\",\r\n },\r\n \"sort_direction\": {\r\n \"display_name\": \"Sort Direction\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The direction to sort the results.\",\r\n \"options\": [\"ascending\", \"descending\"],\r\n \"default_value\": \"descending\",\r\n },\r\n }\r\n\r\n def build(\r\n self,\r\n notion_secret: str,\r\n query: str = \"\",\r\n filter_value: str = \"page\",\r\n sort_direction: str = \"descending\",\r\n ) -> List[Record]:\r\n try:\r\n url = \"https://api.notion.com/v1/search\"\r\n headers = {\r\n \"Authorization\": f\"Bearer {notion_secret}\",\r\n \"Content-Type\": \"application/json\",\r\n \"Notion-Version\": \"2022-06-28\",\r\n }\r\n\r\n data = {\r\n \"query\": query,\r\n \"filter\": {\r\n \"value\": filter_value,\r\n \"property\": \"object\"\r\n },\r\n \"sort\":{\r\n \"direction\": sort_direction,\r\n \"timestamp\": \"last_edited_time\"\r\n }\r\n }\r\n\r\n response = requests.post(url, headers=headers, json=data)\r\n response.raise_for_status()\r\n\r\n results = response.json()\r\n records = []\r\n combined_text = f\"Results found: {len(results['results'])}\\n\\n\"\r\n for result in results['results']:\r\n result_data = {\r\n 'id': result['id'],\r\n 'type': result['object'],\r\n 'last_edited_time': result['last_edited_time'],\r\n }\r\n \r\n if result['object'] == 'page':\r\n result_data['title_or_url'] = result['url']\r\n text = f\"id: {result['id']}\\ntitle_or_url: {result['url']}\\n\"\r\n elif result['object'] == 'database':\r\n if 'title' in result and isinstance(result['title'], list) and len(result['title']) > 0:\r\n result_data['title_or_url'] = result['title'][0]['plain_text']\r\n text = f\"id: {result['id']}\\ntitle_or_url: {result['title'][0]['plain_text']}\\n\"\r\n else:\r\n result_data['title_or_url'] = \"N/A\"\r\n text = f\"id: {result['id']}\\ntitle_or_url: N/A\\n\"\r\n\r\n text += f\"type: {result['object']}\\nlast_edited_time: {result['last_edited_time']}\\n\\n\"\r\n combined_text += text\r\n records.append(Record(text=text, data=result_data))\r\n \r\n self.status = combined_text\r\n return records\r\n\r\n except Exception as e:\r\n self.status = f\"An error occurred: {str(e)}\"\r\n return [Record(text=self.status, data=[])]", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "filter_value": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "database", + "fileTypes": [], + "file_path": "", + "password": false, + "options": ["page", "database"], + "name": "filter_value", + "display_name": "Filter Type", + "advanced": false, + "dynamic": false, + "info": "Limits the results to either only pages or only databases.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "notion_secret": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "notion_secret", + "display_name": "Notion Secret", + "advanced": false, + "dynamic": false, + "info": "The Notion integration token.", + "load_from_db": true, + "title_case": false, + "input_types": ["Text"], + "value": "" + }, + "query": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "query", + "display_name": "Search Query", + "advanced": false, + "dynamic": false, + "info": "The text that the API compares page and database titles against.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "sort_direction": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "descending", + "fileTypes": [], + "file_path": "", + "password": false, + "options": ["ascending", "descending"], + "name": "sort_direction", + "display_name": "Sort Direction", + "advanced": false, + "dynamic": false, + "info": "The direction to sort the results.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "_type": "CustomComponent" + }, + "description": "Searches all pages and databases that have been shared with an integration.", + "icon": "NotionDirectoryLoader", + "base_classes": ["Record"], + "display_name": "Search [Notion]", + "documentation": "https://docs.langflow.org/integrations/notion/search", + "custom_fields": { + "notion_secret": null, + "query": null, + "filter_value": null, + "sort_direction": null + }, + "output_types": ["Record"], + "field_formatters": {}, + "frozen": false, + "field_order": [ + "notion_secret", + "query", + "filter_value", + "sort_direction" + ], + "beta": false + }, + "id": "CustomComponent-ZcsA9", + "description": "Searches all pages and databases that have been shared with an integration.", + "display_name": "Search [Notion]" + }, + "selected": false, + "width": 384, + "height": 591, + "positionAbsolute": { + "x": -3488.029350341937, + "y": -965.3756250644985 + }, + "dragging": false + } + ], + "edges": [], + "viewport": { + "x": 2623.378922967084, + "y": 696.8541079344027, + "zoom": 0.5981384177708997 + } + }, + "description": "A Bundle containing Notion components for Page and Database manipulation. You can list pages, users databases, update properties, create new pages and add content to Notion Pages.", + "name": "Notion - Components", + "last_tested_version": "1.0.0a36", + "is_component": false } diff --git a/poetry.lock b/poetry.lock index e0978b558..58c4e5e1c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -471,17 +471,17 @@ files = [ [[package]] name = "boto3" -version = "1.34.117" +version = "1.34.116" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" files = [ - {file = "boto3-1.34.117-py3-none-any.whl", hash = "sha256:1506589e30566bbb2f4997b60968ff7d4ef8a998836c31eedd36437ac3b7408a"}, - {file = "boto3-1.34.117.tar.gz", hash = "sha256:c8a383b904d6faaf7eed0c06e31b423db128e4c09ce7bd2afc39d1cd07030a51"}, + {file = "boto3-1.34.116-py3-none-any.whl", hash = "sha256:e7f5ab2d1f1b90971a2b9369760c2c6bae49dae98c084a5c3f5c78e3968ace15"}, + {file = "boto3-1.34.116.tar.gz", hash = "sha256:53cb8aeb405afa1cd2b25421e27a951aeb568026675dec020587861fac96ac87"}, ] [package.dependencies] -botocore = ">=1.34.117,<1.35.0" +botocore = ">=1.34.116,<1.35.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -490,13 +490,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.34.117" +version = "1.34.116" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.34.117-py3-none-any.whl", hash = "sha256:26a431997f882bcdd1e835f44c24b2a1752b1c4e5183c2ce62999ce95d518d6c"}, - {file = "botocore-1.34.117.tar.gz", hash = "sha256:4637ca42e6c51aebc4d9a2d92f97bf4bdb042e3f7985ff31a659a11e4c170e73"}, + {file = "botocore-1.34.116-py3-none-any.whl", hash = "sha256:ec4d42c816e9b2d87a2439ad277e7dda16a4a614ef6839cf66f4c1a58afa547c"}, + {file = "botocore-1.34.116.tar.gz", hash = "sha256:269cae7ba99081519a9f87d7298e238d9e68ba94eb4f8ddfa906224c34cb8b6c"}, ] [package.dependencies] @@ -4322,7 +4322,7 @@ types-requests = ">=2.31.0.2,<3.0.0.0" [[package]] name = "langflow-base" -version = "0.0.55" +version = "0.0.54" description = "A Python package with a built-in web application" optional = false python-versions = ">=3.10,<3.13" @@ -4365,7 +4365,7 @@ rich = "^13.7.0" sqlmodel = "^0.0.18" typer = "^0.12.0" uncurl = "^0.0.11" -uvicorn = "^0.30.0" +uvicorn = "^0.29.0" websockets = "*" [package.extras] @@ -4419,13 +4419,13 @@ requests = ">=2,<3" [[package]] name = "litellm" -version = "1.40.0" +version = "1.39.5" description = "Library to easily interface with LLM API providers" optional = false python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8" files = [ - {file = "litellm-1.40.0-py3-none-any.whl", hash = "sha256:c3055767ae144585699fdb07b3ad678e66738c2eff19abd7761c8fe22d6e636f"}, - {file = "litellm-1.40.0.tar.gz", hash = "sha256:12b4c0ad850ede5aebdb2f48e3a8e898efb25df5bc915ff89929ad963cb92f54"}, + {file = "litellm-1.39.5-py3-none-any.whl", hash = "sha256:1e8dd43c5d257fa8d7a0039b20aed7aeed4463d53608d1ba4ac233f1967a5330"}, + {file = "litellm-1.39.5.tar.gz", hash = "sha256:8f4ea9fe21d67890e81a578e12c30b4172260ff35971dc7c3edf7eb69167d3be"}, ] [package.dependencies] @@ -7763,28 +7763,28 @@ pyasn1 = ">=0.1.3" [[package]] name = "ruff" -version = "0.4.7" +version = "0.4.6" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.4.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:e089371c67892a73b6bb1525608e89a2aca1b77b5440acf7a71dda5dac958f9e"}, - {file = "ruff-0.4.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:10f973d521d910e5f9c72ab27e409e839089f955be8a4c8826601a6323a89753"}, - {file = "ruff-0.4.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59c3d110970001dfa494bcd95478e62286c751126dfb15c3c46e7915fc49694f"}, - {file = "ruff-0.4.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa9773c6c00f4958f73b317bc0fd125295110c3776089f6ef318f4b775f0abe4"}, - {file = "ruff-0.4.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07fc80bbb61e42b3b23b10fda6a2a0f5a067f810180a3760c5ef1b456c21b9db"}, - {file = "ruff-0.4.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:fa4dafe3fe66d90e2e2b63fa1591dd6e3f090ca2128daa0be33db894e6c18648"}, - {file = "ruff-0.4.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a7c0083febdec17571455903b184a10026603a1de078428ba155e7ce9358c5f6"}, - {file = "ruff-0.4.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ad1b20e66a44057c326168437d680a2166c177c939346b19c0d6b08a62a37589"}, - {file = "ruff-0.4.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbf5d818553add7511c38b05532d94a407f499d1a76ebb0cad0374e32bc67202"}, - {file = "ruff-0.4.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:50e9651578b629baec3d1513b2534de0ac7ed7753e1382272b8d609997e27e83"}, - {file = "ruff-0.4.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8874a9df7766cb956b218a0a239e0a5d23d9e843e4da1e113ae1d27ee420877a"}, - {file = "ruff-0.4.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:b9de9a6e49f7d529decd09381c0860c3f82fa0b0ea00ea78409b785d2308a567"}, - {file = "ruff-0.4.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:13a1768b0691619822ae6d446132dbdfd568b700ecd3652b20d4e8bc1e498f78"}, - {file = "ruff-0.4.7-py3-none-win32.whl", hash = "sha256:769e5a51df61e07e887b81e6f039e7ed3573316ab7dd9f635c5afaa310e4030e"}, - {file = "ruff-0.4.7-py3-none-win_amd64.whl", hash = "sha256:9e3ab684ad403a9ed1226894c32c3ab9c2e0718440f6f50c7c5829932bc9e054"}, - {file = "ruff-0.4.7-py3-none-win_arm64.whl", hash = "sha256:10f2204b9a613988e3484194c2c9e96a22079206b22b787605c255f130db5ed7"}, - {file = "ruff-0.4.7.tar.gz", hash = "sha256:2331d2b051dc77a289a653fcc6a42cce357087c5975738157cd966590b18b5e1"}, + {file = "ruff-0.4.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ef995583a038cd4a7edf1422c9e19118e2511b8ba0b015861b4abd26ec5367c5"}, + {file = "ruff-0.4.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:602ebd7ad909eab6e7da65d3c091547781bb06f5f826974a53dbe563d357e53c"}, + {file = "ruff-0.4.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f9ced5cbb7510fd7525448eeb204e0a22cabb6e99a3cb160272262817d49786"}, + {file = "ruff-0.4.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04a80acfc862e0e1630c8b738e70dcca03f350bad9e106968a8108379e12b31f"}, + {file = "ruff-0.4.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be47700ecb004dfa3fd4dcdddf7322d4e632de3c06cd05329d69c45c0280e618"}, + {file = "ruff-0.4.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:1ff930d6e05f444090a0139e4e13e1e2e1f02bd51bb4547734823c760c621e79"}, + {file = "ruff-0.4.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f13410aabd3b5776f9c5699f42b37a3a348d65498c4310589bc6e5c548dc8a2f"}, + {file = "ruff-0.4.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0cf5cc02d3ae52dfb0c8a946eb7a1d6ffe4d91846ffc8ce388baa8f627e3bd50"}, + {file = "ruff-0.4.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea3424793c29906407e3cf417f28fc33f689dacbbadfb52b7e9a809dd535dcef"}, + {file = "ruff-0.4.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1fa8561489fadf483ffbb091ea94b9c39a00ed63efacd426aae2f197a45e67fc"}, + {file = "ruff-0.4.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4d5b914818d8047270308fe3e85d9d7f4a31ec86c6475c9f418fbd1624d198e0"}, + {file = "ruff-0.4.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:4f02284335c766678778475e7698b7ab83abaf2f9ff0554a07b6f28df3b5c259"}, + {file = "ruff-0.4.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3a6a0a4f4b5f54fff7c860010ab3dd81425445e37d35701a965c0248819dde7a"}, + {file = "ruff-0.4.6-py3-none-win32.whl", hash = "sha256:9018bf59b3aa8ad4fba2b1dc0299a6e4e60a4c3bc62bbeaea222679865453062"}, + {file = "ruff-0.4.6-py3-none-win_amd64.whl", hash = "sha256:a769ae07ac74ff1a019d6bd529426427c3e30d75bdf1e08bb3d46ac8f417326a"}, + {file = "ruff-0.4.6-py3-none-win_arm64.whl", hash = "sha256:735a16407a1a8f58e4c5b913ad6102722e80b562dd17acb88887685ff6f20cf6"}, + {file = "ruff-0.4.6.tar.gz", hash = "sha256:a797a87da50603f71e6d0765282098245aca6e3b94b7c17473115167d8dfb0b7"}, ] [[package]] diff --git a/scripts/factory_restart_space.py b/scripts/factory_restart_space.py index e9972e8cb..8a52dc42a 100644 --- a/scripts/factory_restart_space.py +++ b/scripts/factory_restart_space.py @@ -1,4 +1,4 @@ -import os +import argparse from huggingface_hub import HfApi, list_models from rich import print @@ -6,11 +6,27 @@ from rich import print # Use root method models = list_models() +args = argparse.ArgumentParser(description="Restart a space in the Hugging Face Hub.") +args.add_argument("--space", type=str, help="The space to restart.") +args.add_argument("--token", type=str, help="The Hugging Face API token.") + +parsed_args = args.parse_args() + +space = parsed_args.space + +if not space: + print("Please provide a space to restart.") + exit() + +if not parsed_args.api_token: + print("Please provide an API token.") + exit() + # Or configure a HfApi client hf_api = HfApi( endpoint="https://huggingface.co", # Can be a Private Hub endpoint. - token=os.getenv("HUGGINFACE_API_TOKEN"), + token=parsed_args.token, ) -space_runtime = hf_api.restart_space("Langflow/Langflow-Preview", factory_reboot=True) +space_runtime = hf_api.restart_space(space, factory_reboot=True) print(space_runtime) diff --git a/src/backend/base/langflow/__main__.py b/src/backend/base/langflow/__main__.py index 4162629dd..343188336 100644 --- a/src/backend/base/langflow/__main__.py +++ b/src/backend/base/langflow/__main__.py @@ -121,7 +121,7 @@ def run( ), ): """ - Run the Langflow. + Run Langflow. """ configure(log_level=log_level, log_file=log_file) diff --git a/src/backend/base/langflow/api/v1/flows.py b/src/backend/base/langflow/api/v1/flows.py index 36030a12d..c1ccf68db 100644 --- a/src/backend/base/langflow/api/v1/flows.py +++ b/src/backend/base/langflow/api/v1/flows.py @@ -9,7 +9,7 @@ from loguru import logger from sqlmodel import Session, col, select from langflow.api.utils import remove_api_keys, validate_is_component -from langflow.api.v1.schemas import FlowListCreate, FlowListIds, FlowListRead +from langflow.api.v1.schemas import FlowListCreate, FlowListRead from langflow.initial_setup.setup import STARTER_FOLDER_NAME from langflow.services.auth.utils import get_current_active_user from langflow.services.database.models.flow import Flow, FlowCreate, FlowRead, FlowUpdate @@ -258,9 +258,9 @@ async def download_file( return FlowListRead(flows=flows) -@router.post("/multiple_delete/") +@router.delete("/") async def delete_multiple_flows( - flow_ids: FlowListIds, user: User = Depends(get_current_active_user), db: Session = Depends(get_session) + flow_ids: List[UUID], user: User = Depends(get_current_active_user), db: Session = Depends(get_session) ): """ Delete multiple flows by their IDs. @@ -274,9 +274,7 @@ async def delete_multiple_flows( """ try: - deleted_flows = db.exec( - select(Flow).where(col(Flow.id).in_(flow_ids.flow_ids)).where(Flow.user_id == user.id) - ).all() + deleted_flows = db.exec(select(Flow).where(col(Flow.id).in_(flow_ids)).where(Flow.user_id == user.id)).all() for flow in deleted_flows: db.delete(flow) db.commit() diff --git a/src/backend/base/langflow/api/v1/monitor.py b/src/backend/base/langflow/api/v1/monitor.py index 05fee6f03..e419ed5bf 100644 --- a/src/backend/base/langflow/api/v1/monitor.py +++ b/src/backend/base/langflow/api/v1/monitor.py @@ -1,9 +1,10 @@ from typing import List, Optional - +from uuid import UUID from fastapi import APIRouter, Depends, HTTPException, Query from langflow.services.deps import get_monitor_service from langflow.services.monitor.schema import ( + MessageModelRequest, MessageModelResponse, TransactionModelResponse, VertexBuildMapModel, @@ -66,6 +67,44 @@ async def get_messages( raise HTTPException(status_code=500, detail=str(e)) +@router.delete("/messages", status_code=204) +async def delete_messages( + message_ids: List[int], + monitor_service: MonitorService = Depends(get_monitor_service), +): + try: + monitor_service.delete_messages(message_ids=message_ids) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + +@router.post("/messages/{message_id}", response_model=MessageModelResponse) +async def update_message( + message_id: str, + message: MessageModelRequest, + monitor_service: MonitorService = Depends(get_monitor_service), +): + try: + message_dict = message.model_dump(exclude_none=True) + message_dict.pop("index", None) + monitor_service.update_message(message_id=message_id, **message_dict) + return MessageModelResponse(index=message_id, **message_dict) + + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + +@router.delete("/messages/session/{session_id}", status_code=204) +async def delete_messages_session( + session_id: str, + monitor_service: MonitorService = Depends(get_monitor_service), +): + try: + monitor_service.delete_messages_session(session_id=session_id) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + @router.get("/transactions", response_model=List[TransactionModelResponse]) async def get_transactions( source: Optional[str] = Query(None), diff --git a/src/backend/base/langflow/base/data/utils.py b/src/backend/base/langflow/base/data/utils.py index aa79dfd6f..07704f0be 100644 --- a/src/backend/base/langflow/base/data/utils.py +++ b/src/backend/base/langflow/base/data/utils.py @@ -3,7 +3,7 @@ import xml.etree.ElementTree as ET from concurrent import futures from pathlib import Path from typing import Callable, List, Optional, Text - +import chardet import yaml from langflow.schema.schema import Record @@ -96,7 +96,12 @@ def retrieve_file_paths( def read_text_file(file_path: str) -> str: - with open(file_path, "r") as f: + with open(file_path, "rb") as f: + raw_data = f.read() + result = chardet.detect(raw_data) + encoding = result['encoding'] + + with open(file_path, "r", encoding=encoding) as f: return f.read() diff --git a/src/backend/base/langflow/graph/graph/base.py b/src/backend/base/langflow/graph/graph/base.py index 9639f8745..379bc2e1e 100644 --- a/src/backend/base/langflow/graph/graph/base.py +++ b/src/backend/base/langflow/graph/graph/base.py @@ -17,6 +17,7 @@ from langflow.graph.vertex.base import Vertex from langflow.graph.vertex.types import InterfaceVertex, StateVertex from langflow.schema import Record from langflow.schema.schema import INPUT_FIELD_NAME, InputType +from langflow.services.cache.utils import CacheMiss from langflow.services.chat.service import ChatService from langflow.services.deps import get_chat_service from langflow.services.monitor.utils import log_transaction @@ -732,14 +733,29 @@ class Graph: """ vertex = self.get_vertex(vertex_id) try: - if not vertex.frozen or not vertex._built: - await vertex.build( - user_id=user_id, inputs=inputs_dict, files=files, fallback_to_env_vars=fallback_to_env_vars - ) + params = "" + if vertex.frozen: + # Check the cache for the vertex + cached_result = await chat_service.get_cache(key=vertex.id) + if isinstance(cached_result, CacheMiss): + await vertex.build(user_id=user_id, inputs=inputs_dict, fallback_to_env_vars=fallback_to_env_vars) + await chat_service.set_cache(key=vertex.id, data=vertex) + else: + cached_vertex = cached_result["result"] + # Now set update the vertex with the cached vertex + vertex._built = cached_vertex._built + vertex.result = cached_vertex.result + vertex.artifacts = cached_vertex.artifacts + vertex._built_object = cached_vertex._built_object + vertex._custom_component = cached_vertex._custom_component + if vertex.result is not None: + vertex.result.used_frozen_result = True + + else: + await vertex.build(user_id=user_id, inputs=inputs_dict, fallback_to_env_vars=fallback_to_env_vars) if vertex.result is not None: - params = vertex.artifacts_raw - log_type = vertex.artifacts_type + params = f"{vertex._built_object_repr()}{params}" valid = True result_dict = vertex.result else: @@ -748,7 +764,8 @@ class Graph: next_runnable_vertices, top_level_vertices = await self.get_next_and_top_level_vertices( lock, set_cache_coro, vertex ) - return next_runnable_vertices, top_level_vertices, result_dict, params, valid, log_type, vertex + log_transaction(vertex, status="success") + return next_runnable_vertices, top_level_vertices, result_dict, params, valid, artifacts, vertex except Exception as exc: logger.exception(f"Error building vertex: {exc}") log_transaction(vertex, status="failure", error=str(exc)) diff --git a/src/backend/base/langflow/helpers/flow.py b/src/backend/base/langflow/helpers/flow.py index 0f2a1e170..9a8a7c3b5 100644 --- a/src/backend/base/langflow/helpers/flow.py +++ b/src/backend/base/langflow/helpers/flow.py @@ -8,7 +8,7 @@ from sqlmodel import Session, select from langflow.graph.schema import RunOutputs from langflow.schema.schema import INPUT_FIELD_NAME, Record from langflow.services.database.models.flow import Flow -from langflow.services.deps import get_session, session_scope +from langflow.services.deps import get_session, get_settings_service, session_scope if TYPE_CHECKING: from langflow.graph.graph.base import Graph @@ -88,7 +88,9 @@ async def run_flow( inputs_components.append(input_dict.get("components", [])) types.append(input_dict.get("type", "chat")) - return await graph.arun(inputs_list, inputs_components=inputs_components, types=types) + fallback_to_env_vars = get_settings_service().settings.fallback_to_env_var + + return await graph.arun(inputs_list, inputs_components=inputs_components, types=types, fallback_to_env_vars=fallback_to_env_vars) def generate_function_for_flow( diff --git a/src/backend/base/langflow/initial_setup/starter_projects/VectorStore-RAG-Flows.json b/src/backend/base/langflow/initial_setup/starter_projects/VectorStore-RAG-Flows.json index 308bc7708..4bd5931f1 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/VectorStore-RAG-Flows.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/VectorStore-RAG-Flows.json @@ -81,10 +81,7 @@ "fileTypes": [], "file_path": "", "password": false, - "options": [ - "Machine", - "User" - ], + "options": ["Machine", "User"], "name": "sender", "display_name": "Sender Type", "advanced": true, @@ -92,9 +89,7 @@ "info": "", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "sender_name": { "type": "str", @@ -114,9 +109,7 @@ "info": "", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "session_id": { "type": "str", @@ -135,20 +128,13 @@ "info": "If provided, the message will be stored in the memory.", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "_type": "CustomComponent" }, "description": "Get chat inputs from the Playground.", "icon": "ChatInput", - "base_classes": [ - "Text", - "str", - "object", - "Record" - ], + "base_classes": ["Text", "str", "object", "Record"], "display_name": "Chat Input", "documentation": "", "custom_fields": { @@ -158,10 +144,7 @@ "session_id": null, "return_record": null }, - "output_types": [ - "Text", - "Record" - ], + "output_types": ["Text", "Record"], "field_formatters": {}, "frozen": false, "field_order": [], @@ -198,10 +181,7 @@ "name": "input_value", "display_name": "Value", "advanced": false, - "input_types": [ - "Record", - "Text" - ], + "input_types": ["Record", "Text"], "dynamic": false, "info": "Text or Record to be passed as output.", "load_from_db": false, @@ -243,28 +223,20 @@ "info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "_type": "CustomComponent" }, "description": "Display a text output in the Playground.", "icon": "type", - "base_classes": [ - "object", - "Text", - "str" - ], + "base_classes": ["object", "Text", "str"], "display_name": "Extracted Chunks", "documentation": "", "custom_fields": { "input_value": null, "record_template": null }, - "output_types": [ - "Text" - ], + "output_types": ["Text"], "field_formatters": {}, "frozen": false, "field_order": [], @@ -310,9 +282,7 @@ "info": "", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "chunk_size": { "type": "int", @@ -424,9 +394,7 @@ "info": "", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "disallowed_special": { "type": "str", @@ -435,9 +403,7 @@ "list": false, "show": true, "multiline": false, - "value": [ - "all" - ], + "value": ["all"], "fileTypes": [], "file_path": "", "password": false, @@ -448,9 +414,7 @@ "info": "", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "embedding_ctx_length": { "type": "int", @@ -513,9 +477,7 @@ "info": "", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "model_kwargs": { "type": "NestedDict", @@ -553,9 +515,7 @@ "info": "", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "openai_api_key": { "type": "str", @@ -572,11 +532,9 @@ "advanced": false, "dynamic": false, "info": "", - "load_from_db": false, + "load_from_db": true, "title_case": false, - "input_types": [ - "Text" - ], + "input_types": ["Text"], "value": "OPENAI_API_KEY" }, "openai_api_type": { @@ -596,9 +554,7 @@ "info": "", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "openai_api_version": { "type": "str", @@ -617,9 +573,7 @@ "info": "", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "openai_organization": { "type": "str", @@ -638,9 +592,7 @@ "info": "", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "openai_proxy": { "type": "str", @@ -659,9 +611,7 @@ "info": "", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "request_timeout": { "type": "float", @@ -761,16 +711,12 @@ "info": "", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "_type": "CustomComponent" }, "description": "Generate embeddings using OpenAI models.", - "base_classes": [ - "Embeddings" - ], + "base_classes": ["Embeddings"], "display_name": "OpenAI Embeddings", "documentation": "", "custom_fields": { @@ -797,9 +743,7 @@ "tiktoken_enable": null, "tiktoken_model_name": null }, - "output_types": [ - "Embeddings" - ], + "output_types": ["Embeddings"], "field_formatters": {}, "frozen": false, "field_order": [], @@ -840,9 +784,7 @@ "info": "", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "code": { "type": "code", @@ -925,9 +867,7 @@ "info": "", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "openai_api_base": { "type": "str", @@ -946,9 +886,7 @@ "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "openai_api_key": { "type": "str", @@ -965,11 +903,9 @@ "advanced": false, "dynamic": false, "info": "The OpenAI API Key to use for the OpenAI model.", - "load_from_db": false, + "load_from_db": true, "title_case": false, - "input_types": [ - "Text" - ], + "input_types": ["Text"], "value": "OPENAI_API_KEY" }, "stream": { @@ -1008,9 +944,7 @@ "info": "System message to pass to the model.", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "temperature": { "type": "float", @@ -1041,11 +975,7 @@ }, "description": "Generates text using OpenAI LLMs.", "icon": "OpenAI", - "base_classes": [ - "object", - "Text", - "str" - ], + "base_classes": ["object", "Text", "str"], "display_name": "OpenAI", "documentation": "", "custom_fields": { @@ -1059,9 +989,7 @@ "stream": null, "system_message": null }, - "output_types": [ - "Text" - ], + "output_types": ["Text"], "field_formatters": {}, "frozen": false, "field_order": [ @@ -1131,9 +1059,7 @@ "name": "template", "display_name": "Template", "advanced": false, - "input_types": [ - "Text" - ], + "input_types": ["Text"], "dynamic": false, "info": "", "load_from_db": false, @@ -1198,23 +1124,14 @@ "is_input": null, "is_output": null, "is_composition": null, - "base_classes": [ - "object", - "Text", - "str" - ], + "base_classes": ["object", "Text", "str"], "name": "", "display_name": "Prompt", "documentation": "", "custom_fields": { - "template": [ - "context", - "question" - ] + "template": ["context", "question"] }, - "output_types": [ - "Text" - ], + "output_types": ["Text"], "full_path": null, "field_formatters": {}, "frozen": false, @@ -1277,9 +1194,7 @@ "name": "input_value", "display_name": "Message", "advanced": false, - "input_types": [ - "Text" - ], + "input_types": ["Text"], "dynamic": false, "info": "", "load_from_db": false, @@ -1303,9 +1218,7 @@ "info": "In case of Message being a Record, this template will be used to convert it to text.", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "return_record": { "type": "bool", @@ -1337,10 +1250,7 @@ "fileTypes": [], "file_path": "", "password": false, - "options": [ - "Machine", - "User" - ], + "options": ["Machine", "User"], "name": "sender", "display_name": "Sender Type", "advanced": true, @@ -1348,9 +1258,7 @@ "info": "", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "sender_name": { "type": "str", @@ -1370,9 +1278,7 @@ "info": "", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "session_id": { "type": "str", @@ -1391,20 +1297,13 @@ "info": "If provided, the message will be stored in the memory.", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "_type": "CustomComponent" }, "description": "Display a chat message in the Playground.", "icon": "ChatOutput", - "base_classes": [ - "object", - "Text", - "Record", - "str" - ], + "base_classes": ["object", "Text", "Record", "str"], "display_name": "Chat Output", "documentation": "", "custom_fields": { @@ -1415,10 +1314,7 @@ "return_record": null, "record_template": null }, - "output_types": [ - "Text", - "Record" - ], + "output_types": ["Text", "Record"], "field_formatters": {}, "frozen": false, "field_order": [], @@ -1525,18 +1421,14 @@ }, "description": "A generic file loader.", "icon": "file-text", - "base_classes": [ - "Record" - ], + "base_classes": ["Record"], "display_name": "File", "documentation": "", "custom_fields": { "path": null, "silent_errors": null }, - "output_types": [ - "Record" - ], + "output_types": ["Record"], "field_formatters": {}, "frozen": false, "field_order": [], @@ -1577,10 +1469,7 @@ "name": "inputs", "display_name": "Input", "advanced": false, - "input_types": [ - "Document", - "Record" - ], + "input_types": ["Document", "Record"], "dynamic": false, "info": "The texts to split.", "load_from_db": false, @@ -1659,19 +1548,13 @@ "info": "The characters to split on.\nIf left empty defaults to [\"\\n\\n\", \"\\n\", \" \", \"\"].", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ], - "value": [ - "" - ] + "input_types": ["Text"], + "value": [""] }, "_type": "CustomComponent" }, "description": "Split text into chunks of a specified length.", - "base_classes": [ - "Record" - ], + "base_classes": ["Record"], "display_name": "Recursive Character Text Splitter", "documentation": "https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter", "custom_fields": { @@ -1680,9 +1563,7 @@ "chunk_size": null, "chunk_overlap": null }, - "output_types": [ - "Record" - ], + "output_types": ["Record"], "field_formatters": {}, "frozen": false, "field_order": [], @@ -1745,9 +1626,7 @@ "info": "Input value to search", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "api_endpoint": { "type": "str", @@ -1766,9 +1645,7 @@ "info": "API endpoint URL for the Astra DB service.", "load_from_db": true, "title_case": false, - "input_types": [ - "Text" - ], + "input_types": ["Text"], "value": "ASTRA_DB_API_ENDPOINT" }, "batch_size": { @@ -1896,9 +1773,7 @@ "info": "The name of the collection within Astra DB where the vectors will be stored.", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ], + "input_types": ["Text"], "value": "langflow" }, "metadata_indexing_exclude": { @@ -1918,9 +1793,7 @@ "info": "Optional list of metadata fields to exclude from the indexing.", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "metadata_indexing_include": { "type": "str", @@ -1939,9 +1812,7 @@ "info": "Optional list of metadata fields to include in the indexing.", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "metric": { "type": "str", @@ -1960,9 +1831,7 @@ "info": "Optional distance metric for vector comparisons in the vector store.", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "namespace": { "type": "str", @@ -1981,9 +1850,7 @@ "info": "Optional namespace within Astra DB to use for the collection.", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "number_of_results": { "type": "int", @@ -2034,10 +1901,7 @@ "fileTypes": [], "file_path": "", "password": false, - "options": [ - "Similarity", - "MMR" - ], + "options": ["Similarity", "MMR"], "name": "search_type", "display_name": "Search Type", "advanced": false, @@ -2045,9 +1909,7 @@ "info": "", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "setup_mode": { "type": "str", @@ -2060,11 +1922,7 @@ "fileTypes": [], "file_path": "", "password": false, - "options": [ - "Sync", - "Async", - "Off" - ], + "options": ["Sync", "Async", "Off"], "name": "setup_mode", "display_name": "Setup Mode", "advanced": true, @@ -2072,9 +1930,7 @@ "info": "Configuration mode for setting up the vector store, with options like “Sync”, “Async”, or “Off”.", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "token": { "type": "str", @@ -2093,18 +1949,14 @@ "info": "Authentication token for accessing Astra DB.", "load_from_db": true, "title_case": false, - "input_types": [ - "Text" - ], + "input_types": ["Text"], "value": "ASTRA_DB_APPLICATION_TOKEN" }, "_type": "CustomComponent" }, "description": "Searches an existing Astra DB Vector Store.", "icon": "AstraDB", - "base_classes": [ - "Record" - ], + "base_classes": ["Record"], "display_name": "Astra DB Search", "documentation": "", "custom_fields": { @@ -2127,9 +1979,7 @@ "metadata_indexing_exclude": null, "collection_indexing_policy": null }, - "output_types": [ - "Record" - ], + "output_types": ["Record"], "field_formatters": {}, "frozen": false, "field_order": [ @@ -2216,9 +2066,7 @@ "info": "API endpoint URL for the Astra DB service.", "load_from_db": true, "title_case": false, - "input_types": [ - "Text" - ], + "input_types": ["Text"], "value": "ASTRA_DB_API_ENDPOINT" }, "batch_size": { @@ -2346,9 +2194,7 @@ "info": "The name of the collection within Astra DB where the vectors will be stored.", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ], + "input_types": ["Text"], "value": "langflow" }, "metadata_indexing_exclude": { @@ -2368,9 +2214,7 @@ "info": "Optional list of metadata fields to exclude from the indexing.", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "metadata_indexing_include": { "type": "str", @@ -2389,9 +2233,7 @@ "info": "Optional list of metadata fields to include in the indexing.", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "metric": { "type": "str", @@ -2410,9 +2252,7 @@ "info": "Optional distance metric for vector comparisons in the vector store.", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "namespace": { "type": "str", @@ -2431,9 +2271,7 @@ "info": "Optional namespace within Astra DB to use for the collection.", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "pre_delete_collection": { "type": "bool", @@ -2465,11 +2303,7 @@ "fileTypes": [], "file_path": "", "password": false, - "options": [ - "Sync", - "Async", - "Off" - ], + "options": ["Sync", "Async", "Off"], "name": "setup_mode", "display_name": "Setup Mode", "advanced": true, @@ -2477,9 +2311,7 @@ "info": "Configuration mode for setting up the vector store, with options like “Sync”, “Async”, or “Off”.", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "token": { "type": "str", @@ -2498,18 +2330,14 @@ "info": "Authentication token for accessing Astra DB.", "load_from_db": true, "title_case": false, - "input_types": [ - "Text" - ], + "input_types": ["Text"], "value": "ASTRA_DB_APPLICATION_TOKEN" }, "_type": "CustomComponent" }, "description": "Builds or loads an Astra DB Vector Store.", "icon": "AstraDB", - "base_classes": [ - "VectorStore" - ], + "base_classes": ["VectorStore"], "display_name": "Astra DB", "documentation": "", "custom_fields": { @@ -2530,9 +2358,7 @@ "metadata_indexing_exclude": null, "collection_indexing_policy": null }, - "output_types": [ - "VectorStore" - ], + "output_types": ["VectorStore"], "field_formatters": {}, "frozen": false, "field_order": [ @@ -2584,9 +2410,7 @@ "info": "", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "chunk_size": { "type": "int", @@ -2698,9 +2522,7 @@ "info": "", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "disallowed_special": { "type": "str", @@ -2709,9 +2531,7 @@ "list": false, "show": true, "multiline": false, - "value": [ - "all" - ], + "value": ["all"], "fileTypes": [], "file_path": "", "password": false, @@ -2722,9 +2542,7 @@ "info": "", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "embedding_ctx_length": { "type": "int", @@ -2787,9 +2605,7 @@ "info": "", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "model_kwargs": { "type": "NestedDict", @@ -2827,9 +2643,7 @@ "info": "", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "openai_api_key": { "type": "str", @@ -2846,11 +2660,9 @@ "advanced": false, "dynamic": false, "info": "", - "load_from_db": false, + "load_from_db": true, "title_case": false, - "input_types": [ - "Text" - ], + "input_types": ["Text"], "value": "OPENAI_API_KEY" }, "openai_api_type": { @@ -2870,9 +2682,7 @@ "info": "", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "openai_api_version": { "type": "str", @@ -2891,9 +2701,7 @@ "info": "", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "openai_organization": { "type": "str", @@ -2912,9 +2720,7 @@ "info": "", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "openai_proxy": { "type": "str", @@ -2933,9 +2739,7 @@ "info": "", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "request_timeout": { "type": "float", @@ -3035,16 +2839,12 @@ "info": "", "load_from_db": false, "title_case": false, - "input_types": [ - "Text" - ] + "input_types": ["Text"] }, "_type": "CustomComponent" }, "description": "Generate embeddings using OpenAI models.", - "base_classes": [ - "Embeddings" - ], + "base_classes": ["Embeddings"], "display_name": "OpenAI Embeddings", "documentation": "", "custom_fields": { @@ -3071,9 +2871,7 @@ "tiktoken_enable": null, "tiktoken_model_name": null }, - "output_types": [ - "Embeddings" - ], + "output_types": ["Embeddings"], "field_formatters": {}, "frozen": false, "field_order": [], @@ -3102,20 +2900,11 @@ "targetHandle": { "fieldName": "context", "id": "Prompt-xeI6K", - "inputTypes": [ - "Document", - "BaseOutputParser", - "Record", - "Text" - ], + "inputTypes": ["Document", "BaseOutputParser", "Record", "Text"], "type": "str" }, "sourceHandle": { - "baseClasses": [ - "object", - "Text", - "str" - ], + "baseClasses": ["object", "Text", "str"], "dataType": "TextOutput", "id": "TextOutput-BDknO" } @@ -3136,21 +2925,11 @@ "targetHandle": { "fieldName": "question", "id": "Prompt-xeI6K", - "inputTypes": [ - "Document", - "BaseOutputParser", - "Record", - "Text" - ], + "inputTypes": ["Document", "BaseOutputParser", "Record", "Text"], "type": "str" }, "sourceHandle": { - "baseClasses": [ - "Text", - "str", - "object", - "Record" - ], + "baseClasses": ["Text", "str", "object", "Record"], "dataType": "ChatInput", "id": "ChatInput-yxMKE" } @@ -3171,17 +2950,11 @@ "targetHandle": { "fieldName": "input_value", "id": "OpenAIModel-EjXlN", - "inputTypes": [ - "Text" - ], + "inputTypes": ["Text"], "type": "str" }, "sourceHandle": { - "baseClasses": [ - "object", - "Text", - "str" - ], + "baseClasses": ["object", "Text", "str"], "dataType": "Prompt", "id": "Prompt-xeI6K" } @@ -3202,17 +2975,11 @@ "targetHandle": { "fieldName": "input_value", "id": "ChatOutput-Q39I8", - "inputTypes": [ - "Text" - ], + "inputTypes": ["Text"], "type": "str" }, "sourceHandle": { - "baseClasses": [ - "object", - "Text", - "str" - ], + "baseClasses": ["object", "Text", "str"], "dataType": "OpenAIModel", "id": "OpenAIModel-EjXlN" } @@ -3233,16 +3000,11 @@ "targetHandle": { "fieldName": "inputs", "id": "RecursiveCharacterTextSplitter-tR9QM", - "inputTypes": [ - "Document", - "Record" - ], + "inputTypes": ["Document", "Record"], "type": "Document" }, "sourceHandle": { - "baseClasses": [ - "Record" - ], + "baseClasses": ["Record"], "dataType": "File", "id": "File-t0a6a" } @@ -3266,9 +3028,7 @@ "type": "Embeddings" }, "sourceHandle": { - "baseClasses": [ - "Embeddings" - ], + "baseClasses": ["Embeddings"], "dataType": "OpenAIEmbeddings", "id": "OpenAIEmbeddings-ZlOk1" } @@ -3288,18 +3048,11 @@ "targetHandle": { "fieldName": "input_value", "id": "AstraDBSearch-41nRz", - "inputTypes": [ - "Text" - ], + "inputTypes": ["Text"], "type": "str" }, "sourceHandle": { - "baseClasses": [ - "Text", - "str", - "object", - "Record" - ], + "baseClasses": ["Text", "str", "object", "Record"], "dataType": "ChatInput", "id": "ChatInput-yxMKE" } @@ -3323,9 +3076,7 @@ "type": "Record" }, "sourceHandle": { - "baseClasses": [ - "Record" - ], + "baseClasses": ["Record"], "dataType": "RecursiveCharacterTextSplitter", "id": "RecursiveCharacterTextSplitter-tR9QM" } @@ -3350,9 +3101,7 @@ "type": "Embeddings" }, "sourceHandle": { - "baseClasses": [ - "Embeddings" - ], + "baseClasses": ["Embeddings"], "dataType": "OpenAIEmbeddings", "id": "OpenAIEmbeddings-9TPjc" } @@ -3373,16 +3122,11 @@ "targetHandle": { "fieldName": "input_value", "id": "TextOutput-BDknO", - "inputTypes": [ - "Record", - "Text" - ], + "inputTypes": ["Record", "Text"], "type": "str" }, "sourceHandle": { - "baseClasses": [ - "Record" - ], + "baseClasses": ["Record"], "dataType": "AstraDBSearch", "id": "AstraDBSearch-41nRz" } @@ -3404,4 +3148,4 @@ "name": "Vector Store RAG", "last_tested_version": "1.0.0a0", "is_component": false -} \ No newline at end of file +} diff --git a/src/backend/base/langflow/load/__init__.py b/src/backend/base/langflow/load/__init__.py index 2002e8bb1..59dbdf6e0 100644 --- a/src/backend/base/langflow/load/__init__.py +++ b/src/backend/base/langflow/load/__init__.py @@ -1,3 +1,4 @@ -from .load import load_flow_from_json, run_flow_from_json # noqa: F401 +from .load import load_flow_from_json, run_flow_from_json +from .utils import upload_file, get_flow -__all__ = ["load_flow_from_json", "run_flow_from_json"] +__all__ = ["load_flow_from_json", "run_flow_from_json", "upload_file", "get_flow"] diff --git a/src/backend/base/langflow/load/utils.py b/src/backend/base/langflow/load/utils.py new file mode 100644 index 000000000..9c2918e91 --- /dev/null +++ b/src/backend/base/langflow/load/utils.py @@ -0,0 +1,89 @@ +import httpx + +from langflow.services.database.models.flow.model import FlowBase + + +def upload(file_path, host, flow_id): + """ + Upload a file to Langflow and return the file path. + + Args: + file_path (str): The path to the file to be uploaded. + host (str): The host URL of Langflow. + flow_id (UUID): The ID of the flow to which the file belongs. + + Returns: + dict: A dictionary containing the file path. + + Raises: + Exception: If an error occurs during the upload process. + """ + try: + url = f"{host}/api/v1/upload/{flow_id}" + response = httpx.post(url, files={"file": open(file_path, "rb")}) + if response.status_code == 200: + return response.json() + else: + raise Exception(f"Error uploading file: {response.status_code}") + except Exception as e: + raise Exception(f"Error uploading file: {e}") + + +def upload_file(file_path, host, flow_id, components, tweaks={}): + """ + Upload a file to Langflow and return the file path. + + Args: + file_path (str): The path to the file to be uploaded. + host (str): The host URL of Langflow. + port (int): The port number of Langflow. + flow_id (UUID): The ID of the flow to which the file belongs. + components (str): List of component IDs or names that need the file. + tweaks (dict): A dictionary of tweaks to be applied to the file. + + Returns: + dict: A dictionary containing the file path and any tweaks that were applied. + + Raises: + Exception: If an error occurs during the upload process. + """ + try: + response = upload(file_path, host, flow_id) + if response["file_path"]: + for component in components: + if isinstance(component, str): + tweaks[component] = {"file_path": response["file_path"]} + else: + raise ValueError(f"Component ID or name must be a string. Got {type(component)}") + return tweaks + else: + raise ValueError("Error uploading file") + except Exception as e: + raise ValueError(f"Error uploading file: {e}") + + +def get_flow(url: str, flow_id: str): + """Get the details of a flow from Langflow. + + Args: + url (str): The host URL of Langflow. + port (int): The port number of Langflow. + flow_id (UUID): The ID of the flow to retrieve. + + Returns: + dict: A dictionary containing the details of the flow. + + Raises: + Exception: If an error occurs during the retrieval process. + """ + try: + flow_url = f"{url}/api/v1/flows/{flow_id}" + response = httpx.get(flow_url) + if response.status_code == 200: + json_response = response.json() + flow = FlowBase(**json_response).model_dump() + return flow + else: + raise Exception(f"Error retrieving flow: {response.status_code}") + except Exception as e: + raise Exception(f"Error retrieving flow: {e}") diff --git a/src/backend/base/langflow/processing/process.py b/src/backend/base/langflow/processing/process.py index d53b5e25f..aeff0f1a4 100644 --- a/src/backend/base/langflow/processing/process.py +++ b/src/backend/base/langflow/processing/process.py @@ -8,6 +8,7 @@ from langflow.graph.schema import RunOutputs from langflow.graph.vertex.base import Vertex from langflow.schema.graph import InputValue, Tweaks from langflow.schema.schema import INPUT_FIELD_NAME +from langflow.services.deps import get_settings_service from langflow.services.session.service import SessionService if TYPE_CHECKING: @@ -49,6 +50,8 @@ async def run_graph_internal( inputs_list.append({INPUT_FIELD_NAME: input_value_request.input_value}) types.append(input_value_request.type) + fallback_to_env_vars = get_settings_service().settings.fallback_to_env_var + run_outputs = await graph.arun( inputs_list, components, @@ -56,6 +59,7 @@ async def run_graph_internal( outputs or [], stream=stream, session_id=session_id_str or "", + fallback_to_env_vars=fallback_to_env_vars ) if session_id_str and session_service: await session_service.update_session(session_id_str, (graph, artifacts)) diff --git a/src/backend/base/langflow/services/monitor/schema.py b/src/backend/base/langflow/services/monitor/schema.py index 9cbc86bda..349d5fc2a 100644 --- a/src/backend/base/langflow/services/monitor/schema.py +++ b/src/backend/base/langflow/services/monitor/schema.py @@ -117,6 +117,13 @@ class MessageModelResponse(MessageModel): return v +class MessageModelRequest(MessageModel): + message: str = Field(default="") + sender: str = Field(default="") + sender_name: str = Field(default="") + session_id: str = Field(default="") + + class VertexBuildModel(BaseModel): index: Optional[int] = Field(default=None, alias="index", exclude=True) id: Optional[str] = Field(default=None, alias="id") diff --git a/src/backend/base/langflow/services/monitor/service.py b/src/backend/base/langflow/services/monitor/service.py index 4500d081b..ab5a87f08 100644 --- a/src/backend/base/langflow/services/monitor/service.py +++ b/src/backend/base/langflow/services/monitor/service.py @@ -32,6 +32,10 @@ class MonitorService(Service): except Exception as e: logger.exception(f"Error initializing monitor service: {e}") + def exec_query(self, query: str): + with duckdb.connect(str(self.db_path)) as conn: + return conn.execute(query).df() + def to_df(self, table_name): return self.load_table_as_dataframe(table_name) @@ -69,7 +73,7 @@ class MonitorService(Service): valid: Optional[bool] = None, order_by: Optional[str] = "timestamp", ): - query = "SELECT id, flow_id, valid, logs, data, timestamp FROM vertex_builds" + query = "SELECT id, index,flow_id, valid, params, data, artifacts, timestamp FROM vertex_builds" conditions = [] if flow_id: conditions.append(f"flow_id = '{flow_id}'") @@ -88,6 +92,8 @@ class MonitorService(Service): with duckdb.connect(str(self.db_path)) as conn: df = conn.execute(query).df() + print(query) + return df.to_dict(orient="records") def delete_vertex_builds(self, flow_id: Optional[str] = None): @@ -98,11 +104,20 @@ class MonitorService(Service): with duckdb.connect(str(self.db_path)) as conn: conn.execute(query) - def delete_messages(self, session_id: str): + def delete_messages_session(self, session_id: str): query = f"DELETE FROM messages WHERE session_id = '{session_id}'" - with duckdb.connect(str(self.db_path)) as conn: - conn.execute(query) + return self.exec_query(query) + + def delete_messages(self, message_ids: list[int]): + query = f"DELETE FROM messages WHERE index IN ({','.join(map(str, message_ids))})" + + return self.exec_query(query) + + def update_message(self, message_id: int, **kwargs): + query = f"""UPDATE messages SET {', '.join(f"{k} = '{v}'" for k, v in kwargs.items())} WHERE index = {message_id}""" + + return self.exec_query(query) def add_message(self, message: MessageModel): self.add_row("messages", message) diff --git a/src/backend/base/poetry.lock b/src/backend/base/poetry.lock index 324dccc72..12fe93f21 100644 --- a/src/backend/base/poetry.lock +++ b/src/backend/base/poetry.lock @@ -463,43 +463,43 @@ files = [ [[package]] name = "cryptography" -version = "42.0.7" +version = "42.0.8" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-42.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a987f840718078212fdf4504d0fd4c6effe34a7e4740378e59d47696e8dfb477"}, - {file = "cryptography-42.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:bd13b5e9b543532453de08bcdc3cc7cebec6f9883e886fd20a92f26940fd3e7a"}, - {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a79165431551042cc9d1d90e6145d5d0d3ab0f2d66326c201d9b0e7f5bf43604"}, - {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a47787a5e3649008a1102d3df55424e86606c9bae6fb77ac59afe06d234605f8"}, - {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:02c0eee2d7133bdbbc5e24441258d5d2244beb31da5ed19fbb80315f4bbbff55"}, - {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:5e44507bf8d14b36b8389b226665d597bc0f18ea035d75b4e53c7b1ea84583cc"}, - {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:7f8b25fa616d8b846aef64b15c606bb0828dbc35faf90566eb139aa9cff67af2"}, - {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:93a3209f6bb2b33e725ed08ee0991b92976dfdcf4e8b38646540674fc7508e13"}, - {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e6b8f1881dac458c34778d0a424ae5769de30544fc678eac51c1c8bb2183e9da"}, - {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3de9a45d3b2b7d8088c3fbf1ed4395dfeff79d07842217b38df14ef09ce1d8d7"}, - {file = "cryptography-42.0.7-cp37-abi3-win32.whl", hash = "sha256:789caea816c6704f63f6241a519bfa347f72fbd67ba28d04636b7c6b7da94b0b"}, - {file = "cryptography-42.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:8cb8ce7c3347fcf9446f201dc30e2d5a3c898d009126010cbd1f443f28b52678"}, - {file = "cryptography-42.0.7-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:a3a5ac8b56fe37f3125e5b72b61dcde43283e5370827f5233893d461b7360cd4"}, - {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:779245e13b9a6638df14641d029add5dc17edbef6ec915688f3acb9e720a5858"}, - {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d563795db98b4cd57742a78a288cdbdc9daedac29f2239793071fe114f13785"}, - {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:31adb7d06fe4383226c3e963471f6837742889b3c4caa55aac20ad951bc8ffda"}, - {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:efd0bf5205240182e0f13bcaea41be4fdf5c22c5129fc7ced4a0282ac86998c9"}, - {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a9bc127cdc4ecf87a5ea22a2556cab6c7eda2923f84e4f3cc588e8470ce4e42e"}, - {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:3577d029bc3f4827dd5bf8bf7710cac13527b470bbf1820a3f394adb38ed7d5f"}, - {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2e47577f9b18723fa294b0ea9a17d5e53a227867a0a4904a1a076d1646d45ca1"}, - {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1a58839984d9cb34c855197043eaae2c187d930ca6d644612843b4fe8513c886"}, - {file = "cryptography-42.0.7-cp39-abi3-win32.whl", hash = "sha256:e6b79d0adb01aae87e8a44c2b64bc3f3fe59515280e00fb6d57a7267a2583cda"}, - {file = "cryptography-42.0.7-cp39-abi3-win_amd64.whl", hash = "sha256:16268d46086bb8ad5bf0a2b5544d8a9ed87a0e33f5e77dd3c3301e63d941a83b"}, - {file = "cryptography-42.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2954fccea107026512b15afb4aa664a5640cd0af630e2ee3962f2602693f0c82"}, - {file = "cryptography-42.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:362e7197754c231797ec45ee081f3088a27a47c6c01eff2ac83f60f85a50fe60"}, - {file = "cryptography-42.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4f698edacf9c9e0371112792558d2f705b5645076cc0aaae02f816a0171770fd"}, - {file = "cryptography-42.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5482e789294854c28237bba77c4c83be698be740e31a3ae5e879ee5444166582"}, - {file = "cryptography-42.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e9b2a6309f14c0497f348d08a065d52f3020656f675819fc405fb63bbcd26562"}, - {file = "cryptography-42.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d8e3098721b84392ee45af2dd554c947c32cc52f862b6a3ae982dbb90f577f14"}, - {file = "cryptography-42.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c65f96dad14f8528a447414125e1fc8feb2ad5a272b8f68477abbcc1ea7d94b9"}, - {file = "cryptography-42.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:36017400817987670037fbb0324d71489b6ead6231c9604f8fc1f7d008087c68"}, - {file = "cryptography-42.0.7.tar.gz", hash = "sha256:ecbfbc00bf55888edda9868a4cf927205de8499e7fabe6c050322298382953f2"}, + {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e"}, + {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7"}, + {file = "cryptography-42.0.8-cp37-abi3-win32.whl", hash = "sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2"}, + {file = "cryptography-42.0.8-cp37-abi3-win_amd64.whl", hash = "sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba"}, + {file = "cryptography-42.0.8-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14"}, + {file = "cryptography-42.0.8-cp39-abi3-win32.whl", hash = "sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c"}, + {file = "cryptography-42.0.8-cp39-abi3-win_amd64.whl", hash = "sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad"}, + {file = "cryptography-42.0.8.tar.gz", hash = "sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2"}, ] [package.dependencies] @@ -1159,13 +1159,13 @@ files = [ [[package]] name = "langchain" -version = "0.2.1" +version = "0.2.2" description = "Building applications with LLMs through composability" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langchain-0.2.1-py3-none-any.whl", hash = "sha256:3e13bf97c5717bce2c281f5117e8778823e8ccf62d949e73d3869448962b1c97"}, - {file = "langchain-0.2.1.tar.gz", hash = "sha256:5758a315e1ac92eb26dafec5ad0fafa03cafa686aba197d5bb0b1dd28cc03ebe"}, + {file = "langchain-0.2.2-py3-none-any.whl", hash = "sha256:58ca0c47bcdd156da66f50a0a4fcedc49bf6950827f4a6b06c8c4842d55805f3"}, + {file = "langchain-0.2.2.tar.gz", hash = "sha256:9d61e50e9cdc2bea659bc5e6c03650ba048fda63a307490ae368e539f61a0d3a"}, ] [package.dependencies] @@ -1197,13 +1197,13 @@ text-helpers = ["chardet (>=5.1.0,<6.0.0)"] [[package]] name = "langchain-community" -version = "0.2.1" +version = "0.2.3" description = "Community contributed LangChain integrations." optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langchain_community-0.2.1-py3-none-any.whl", hash = "sha256:b834e2c5ded6903b839fcaf566eee90a0ffae53405a0f7748202725e701d39cd"}, - {file = "langchain_community-0.2.1.tar.gz", hash = "sha256:079942e8f15da975769ccaae19042b7bba5481c42020bbbd7d8cad73a9393261"}, + {file = "langchain_community-0.2.3-py3-none-any.whl", hash = "sha256:aa895545be2f3f4aa2fea36f6da2e3b4ec50ce61ec986e8f146901a1e9138138"}, + {file = "langchain_community-0.2.3.tar.gz", hash = "sha256:a3c35af215e47b700e7cb4e548fa8b45c6d46d52b5a5a65af2577c5a0104fc9f"}, ] [package.dependencies] @@ -1220,22 +1220,22 @@ tenacity = ">=8.1.0,<9.0.0" [package.extras] cli = ["typer (>=0.9.0,<0.10.0)"] -extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "azure-identity (>=1.15.0,<2.0.0)", "azure-search-documents (==11.4.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.6,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cloudpathlib (>=0.18,<0.19)", "cloudpickle (>=2.0.0)", "cohere (>=4,<5)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "friendli-client (>=1.2.4,<2.0.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "httpx (>=0.24.1,<0.25.0)", "httpx-sse (>=0.4.0,<0.5.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "nvidia-riva-client (>=2.14.0,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "oracledb (>=2.2.0,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "premai (>=0.3.25,<0.4.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pyjwt (>=2.8.0,<3.0.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "tidb-vector (>=0.0.3,<1.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "tree-sitter (>=0.20.2,<0.21.0)", "tree-sitter-languages (>=1.8.0,<2.0.0)", "upstash-redis (>=0.15.0,<0.16.0)", "vdms (>=0.0.20,<0.0.21)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] +extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "azure-identity (>=1.15.0,<2.0.0)", "azure-search-documents (==11.4.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.6,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cloudpathlib (>=0.18,<0.19)", "cloudpickle (>=2.0.0)", "cohere (>=4,<5)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "friendli-client (>=1.2.4,<2.0.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "httpx (>=0.24.1,<0.25.0)", "httpx-sse (>=0.4.0,<0.5.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "nvidia-riva-client (>=2.14.0,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "oracledb (>=2.2.0,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "premai (>=0.3.25,<0.4.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pyjwt (>=2.8.0,<3.0.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "simsimd (>=4.3.1,<5.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "tidb-vector (>=0.0.3,<1.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "tree-sitter (>=0.20.2,<0.21.0)", "tree-sitter-languages (>=1.8.0,<2.0.0)", "upstash-redis (>=0.15.0,<0.16.0)", "vdms (>=0.0.20,<0.0.21)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] [[package]] name = "langchain-core" -version = "0.2.3" +version = "0.2.4" description = "Building applications with LLMs through composability" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langchain_core-0.2.3-py3-none-any.whl", hash = "sha256:22189b5a3a30bfd65eb995f95e627f7c2c3acb322feb89f5f5f2fb7df21833a7"}, - {file = "langchain_core-0.2.3.tar.gz", hash = "sha256:fbc75a64b9c0b7655d96ca57a707df1e6c09efc1539c36adbd73260612549810"}, + {file = "langchain_core-0.2.4-py3-none-any.whl", hash = "sha256:5212f7ec78a525e88a178ed3aefe2fd7134b03fb92573dfbab9914f1d92d6ec5"}, + {file = "langchain_core-0.2.4.tar.gz", hash = "sha256:82bdcc546eb0341cefcf1f4ecb3e49836fff003903afddda2d1312bb8491ef81"}, ] [package.dependencies] jsonpatch = ">=1.33,<2.0" -langsmith = ">=0.1.65,<0.2.0" +langsmith = ">=0.1.66,<0.2.0" packaging = ">=23.2,<24.0" pydantic = ">=1,<3" PyYAML = ">=5.3" @@ -1246,13 +1246,13 @@ extended-testing = ["jinja2 (>=3,<4)"] [[package]] name = "langchain-experimental" -version = "0.0.59" +version = "0.0.60" description = "Building applications with LLMs through composability" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langchain_experimental-0.0.59-py3-none-any.whl", hash = "sha256:d6ceb586c15ad35fc619542e86d01f0984a94985324a78a9ed8cd87615ff265d"}, - {file = "langchain_experimental-0.0.59.tar.gz", hash = "sha256:3a93f5c328f6ee1cd4f9dd8792c535df2d5638cff0d778ee25546804b5282fda"}, + {file = "langchain_experimental-0.0.60-py3-none-any.whl", hash = "sha256:ef3b6b6b84fe2bfe19eba6d1a98005e27d96576514c6415f5afe4ace5bf477d8"}, + {file = "langchain_experimental-0.0.60.tar.gz", hash = "sha256:a16cbcd18cda6b86be8f41fed7963c13569295def0d8b4c6324b806d878d442c"}, ] [package.dependencies] @@ -1264,13 +1264,13 @@ extended-testing = ["faker (>=19.3.1,<20.0.0)", "jinja2 (>=3,<4)", "pandas (>=2. [[package]] name = "langchain-text-splitters" -version = "0.2.0" +version = "0.2.1" description = "LangChain text splitting utilities" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langchain_text_splitters-0.2.0-py3-none-any.whl", hash = "sha256:7b4c6a45f8471630a882b321e138329b6897102a5bc62f4c12be1c0b05bb9199"}, - {file = "langchain_text_splitters-0.2.0.tar.gz", hash = "sha256:b32ab4f7397f7d42c1fa3283fefc2547ba356bd63a68ee9092865e5ad83c82f9"}, + {file = "langchain_text_splitters-0.2.1-py3-none-any.whl", hash = "sha256:c2774a85f17189eaca50339629d2316d13130d4a8d9f1a1a96f3a03670c4a138"}, + {file = "langchain_text_splitters-0.2.1.tar.gz", hash = "sha256:06853d17d7241ecf5c97c7b6ef01f600f9b0fb953dd997838142a527a4f32ea4"}, ] [package.dependencies] @@ -1296,13 +1296,13 @@ types-requests = ">=2.31.0.2,<3.0.0.0" [[package]] name = "langsmith" -version = "0.1.67" +version = "0.1.72" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langsmith-0.1.67-py3-none-any.whl", hash = "sha256:7eb2e1c1b375925ff47700ed8071e10c15e942e9d1d634b4a449a9060364071a"}, - {file = "langsmith-0.1.67.tar.gz", hash = "sha256:149558669a2ac4f21471cd964e61072687bba23b7c1ccb51f190a8f59b595b39"}, + {file = "langsmith-0.1.72-py3-none-any.whl", hash = "sha256:a4456707669521bd75b7431b9205a6b99579fb9ff01bd338f52d29df11a7662d"}, + {file = "langsmith-0.1.72.tar.gz", hash = "sha256:262ae9e8aceaba50f3a0f5b6eb559d6110886f0afc6b0ed5270e7d3d3f1fd8d6"}, ] [package.dependencies] @@ -2104,18 +2104,18 @@ files = [ [[package]] name = "pydantic" -version = "2.7.2" +version = "2.7.3" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.7.2-py3-none-any.whl", hash = "sha256:834ab954175f94e6e68258537dc49402c4a5e9d0409b9f1b86b7e934a8372de7"}, - {file = "pydantic-2.7.2.tar.gz", hash = "sha256:71b2945998f9c9b7919a45bde9a50397b289937d215ae141c1d0903ba7149fd7"}, + {file = "pydantic-2.7.3-py3-none-any.whl", hash = "sha256:ea91b002777bf643bb20dd717c028ec43216b24a6001a280f83877fd2655d0b4"}, + {file = "pydantic-2.7.3.tar.gz", hash = "sha256:c46c76a40bb1296728d7a8b99aa73dd70a48c3510111ff290034f860c99c419e"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.18.3" +pydantic-core = "2.18.4" typing-extensions = ">=4.6.1" [package.extras] @@ -2123,90 +2123,90 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.18.3" +version = "2.18.4" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.18.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:744697428fcdec6be5670460b578161d1ffe34743a5c15656be7ea82b008197c"}, - {file = "pydantic_core-2.18.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37b40c05ced1ba4218b14986fe6f283d22e1ae2ff4c8e28881a70fb81fbfcda7"}, - {file = "pydantic_core-2.18.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:544a9a75622357076efb6b311983ff190fbfb3c12fc3a853122b34d3d358126c"}, - {file = "pydantic_core-2.18.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e2e253af04ceaebde8eb201eb3f3e3e7e390f2d275a88300d6a1959d710539e2"}, - {file = "pydantic_core-2.18.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:855ec66589c68aa367d989da5c4755bb74ee92ccad4fdb6af942c3612c067e34"}, - {file = "pydantic_core-2.18.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d3e42bb54e7e9d72c13ce112e02eb1b3b55681ee948d748842171201a03a98a"}, - {file = "pydantic_core-2.18.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6ac9ffccc9d2e69d9fba841441d4259cb668ac180e51b30d3632cd7abca2b9b"}, - {file = "pydantic_core-2.18.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c56eca1686539fa0c9bda992e7bd6a37583f20083c37590413381acfc5f192d6"}, - {file = "pydantic_core-2.18.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:17954d784bf8abfc0ec2a633108207ebc4fa2df1a0e4c0c3ccbaa9bb01d2c426"}, - {file = "pydantic_core-2.18.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:98ed737567d8f2ecd54f7c8d4f8572ca7c7921ede93a2e52939416170d357812"}, - {file = "pydantic_core-2.18.3-cp310-none-win32.whl", hash = "sha256:9f9e04afebd3ed8c15d67a564ed0a34b54e52136c6d40d14c5547b238390e779"}, - {file = "pydantic_core-2.18.3-cp310-none-win_amd64.whl", hash = "sha256:45e4ffbae34f7ae30d0047697e724e534a7ec0a82ef9994b7913a412c21462a0"}, - {file = "pydantic_core-2.18.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:b9ebe8231726c49518b16b237b9fe0d7d361dd221302af511a83d4ada01183ab"}, - {file = "pydantic_core-2.18.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b8e20e15d18bf7dbb453be78a2d858f946f5cdf06c5072453dace00ab652e2b2"}, - {file = "pydantic_core-2.18.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0d9ff283cd3459fa0bf9b0256a2b6f01ac1ff9ffb034e24457b9035f75587cb"}, - {file = "pydantic_core-2.18.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f7ef5f0ebb77ba24c9970da18b771711edc5feaf00c10b18461e0f5f5949231"}, - {file = "pydantic_core-2.18.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73038d66614d2e5cde30435b5afdced2b473b4c77d4ca3a8624dd3e41a9c19be"}, - {file = "pydantic_core-2.18.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6afd5c867a74c4d314c557b5ea9520183fadfbd1df4c2d6e09fd0d990ce412cd"}, - {file = "pydantic_core-2.18.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd7df92f28d351bb9f12470f4c533cf03d1b52ec5a6e5c58c65b183055a60106"}, - {file = "pydantic_core-2.18.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:80aea0ffeb1049336043d07799eace1c9602519fb3192916ff525b0287b2b1e4"}, - {file = "pydantic_core-2.18.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:aaee40f25bba38132e655ffa3d1998a6d576ba7cf81deff8bfa189fb43fd2bbe"}, - {file = "pydantic_core-2.18.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9128089da8f4fe73f7a91973895ebf2502539d627891a14034e45fb9e707e26d"}, - {file = "pydantic_core-2.18.3-cp311-none-win32.whl", hash = "sha256:fec02527e1e03257aa25b1a4dcbe697b40a22f1229f5d026503e8b7ff6d2eda7"}, - {file = "pydantic_core-2.18.3-cp311-none-win_amd64.whl", hash = "sha256:58ff8631dbab6c7c982e6425da8347108449321f61fe427c52ddfadd66642af7"}, - {file = "pydantic_core-2.18.3-cp311-none-win_arm64.whl", hash = "sha256:3fc1c7f67f34c6c2ef9c213e0f2a351797cda98249d9ca56a70ce4ebcaba45f4"}, - {file = "pydantic_core-2.18.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f0928cde2ae416a2d1ebe6dee324709c6f73e93494d8c7aea92df99aab1fc40f"}, - {file = "pydantic_core-2.18.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0bee9bb305a562f8b9271855afb6ce00223f545de3d68560b3c1649c7c5295e9"}, - {file = "pydantic_core-2.18.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e862823be114387257dacbfa7d78547165a85d7add33b446ca4f4fae92c7ff5c"}, - {file = "pydantic_core-2.18.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6a36f78674cbddc165abab0df961b5f96b14461d05feec5e1f78da58808b97e7"}, - {file = "pydantic_core-2.18.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba905d184f62e7ddbb7a5a751d8a5c805463511c7b08d1aca4a3e8c11f2e5048"}, - {file = "pydantic_core-2.18.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7fdd362f6a586e681ff86550b2379e532fee63c52def1c666887956748eaa326"}, - {file = "pydantic_core-2.18.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24b214b7ee3bd3b865e963dbed0f8bc5375f49449d70e8d407b567af3222aae4"}, - {file = "pydantic_core-2.18.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:691018785779766127f531674fa82bb368df5b36b461622b12e176c18e119022"}, - {file = "pydantic_core-2.18.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:60e4c625e6f7155d7d0dcac151edf5858102bc61bf959d04469ca6ee4e8381bd"}, - {file = "pydantic_core-2.18.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4e651e47d981c1b701dcc74ab8fec5a60a5b004650416b4abbef13db23bc7be"}, - {file = "pydantic_core-2.18.3-cp312-none-win32.whl", hash = "sha256:ffecbb5edb7f5ffae13599aec33b735e9e4c7676ca1633c60f2c606beb17efc5"}, - {file = "pydantic_core-2.18.3-cp312-none-win_amd64.whl", hash = "sha256:2c8333f6e934733483c7eddffdb094c143b9463d2af7e6bd85ebcb2d4a1b82c6"}, - {file = "pydantic_core-2.18.3-cp312-none-win_arm64.whl", hash = "sha256:7a20dded653e516a4655f4c98e97ccafb13753987434fe7cf044aa25f5b7d417"}, - {file = "pydantic_core-2.18.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:eecf63195be644b0396f972c82598cd15693550f0ff236dcf7ab92e2eb6d3522"}, - {file = "pydantic_core-2.18.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2c44efdd3b6125419c28821590d7ec891c9cb0dff33a7a78d9d5c8b6f66b9702"}, - {file = "pydantic_core-2.18.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e59fca51ffbdd1638b3856779342ed69bcecb8484c1d4b8bdb237d0eb5a45e2"}, - {file = "pydantic_core-2.18.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:70cf099197d6b98953468461d753563b28e73cf1eade2ffe069675d2657ed1d5"}, - {file = "pydantic_core-2.18.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:63081a49dddc6124754b32a3774331467bfc3d2bd5ff8f10df36a95602560361"}, - {file = "pydantic_core-2.18.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:370059b7883485c9edb9655355ff46d912f4b03b009d929220d9294c7fd9fd60"}, - {file = "pydantic_core-2.18.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a64faeedfd8254f05f5cf6fc755023a7e1606af3959cfc1a9285744cc711044"}, - {file = "pydantic_core-2.18.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19d2e725de0f90d8671f89e420d36c3dd97639b98145e42fcc0e1f6d492a46dc"}, - {file = "pydantic_core-2.18.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:67bc078025d70ec5aefe6200ef094576c9d86bd36982df1301c758a9fff7d7f4"}, - {file = "pydantic_core-2.18.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:adf952c3f4100e203cbaf8e0c907c835d3e28f9041474e52b651761dc248a3c0"}, - {file = "pydantic_core-2.18.3-cp38-none-win32.whl", hash = "sha256:9a46795b1f3beb167eaee91736d5d17ac3a994bf2215a996aed825a45f897558"}, - {file = "pydantic_core-2.18.3-cp38-none-win_amd64.whl", hash = "sha256:200ad4e3133cb99ed82342a101a5abf3d924722e71cd581cc113fe828f727fbc"}, - {file = "pydantic_core-2.18.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:304378b7bf92206036c8ddd83a2ba7b7d1a5b425acafff637172a3aa72ad7083"}, - {file = "pydantic_core-2.18.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c826870b277143e701c9ccf34ebc33ddb4d072612683a044e7cce2d52f6c3fef"}, - {file = "pydantic_core-2.18.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e201935d282707394f3668380e41ccf25b5794d1b131cdd96b07f615a33ca4b1"}, - {file = "pydantic_core-2.18.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5560dda746c44b48bf82b3d191d74fe8efc5686a9ef18e69bdabccbbb9ad9442"}, - {file = "pydantic_core-2.18.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b32c2a1f8032570842257e4c19288eba9a2bba4712af542327de9a1204faff8"}, - {file = "pydantic_core-2.18.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:929c24e9dea3990bc8bcd27c5f2d3916c0c86f5511d2caa69e0d5290115344a9"}, - {file = "pydantic_core-2.18.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1a8376fef60790152564b0eab376b3e23dd6e54f29d84aad46f7b264ecca943"}, - {file = "pydantic_core-2.18.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dccf3ef1400390ddd1fb55bf0632209d39140552d068ee5ac45553b556780e06"}, - {file = "pydantic_core-2.18.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:41dbdcb0c7252b58fa931fec47937edb422c9cb22528f41cb8963665c372caf6"}, - {file = "pydantic_core-2.18.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:666e45cf071669fde468886654742fa10b0e74cd0fa0430a46ba6056b24fb0af"}, - {file = "pydantic_core-2.18.3-cp39-none-win32.whl", hash = "sha256:f9c08cabff68704a1b4667d33f534d544b8a07b8e5d039c37067fceb18789e78"}, - {file = "pydantic_core-2.18.3-cp39-none-win_amd64.whl", hash = "sha256:4afa5f5973e8572b5c0dcb4e2d4fda7890e7cd63329bd5cc3263a25c92ef0026"}, - {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:77319771a026f7c7d29c6ebc623de889e9563b7087911b46fd06c044a12aa5e9"}, - {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:df11fa992e9f576473038510d66dd305bcd51d7dd508c163a8c8fe148454e059"}, - {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d531076bdfb65af593326ffd567e6ab3da145020dafb9187a1d131064a55f97c"}, - {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d33ce258e4e6e6038f2b9e8b8a631d17d017567db43483314993b3ca345dcbbb"}, - {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1f9cd7f5635b719939019be9bda47ecb56e165e51dd26c9a217a433e3d0d59a9"}, - {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:cd4a032bb65cc132cae1fe3e52877daecc2097965cd3914e44fbd12b00dae7c5"}, - {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f2718430098bcdf60402136c845e4126a189959d103900ebabb6774a5d9fdb"}, - {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c0037a92cf0c580ed14e10953cdd26528e8796307bb8bb312dc65f71547df04d"}, - {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b95a0972fac2b1ff3c94629fc9081b16371dad870959f1408cc33b2f78ad347a"}, - {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a62e437d687cc148381bdd5f51e3e81f5b20a735c55f690c5be94e05da2b0d5c"}, - {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b367a73a414bbb08507da102dc2cde0fa7afe57d09b3240ce82a16d608a7679c"}, - {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ecce4b2360aa3f008da3327d652e74a0e743908eac306198b47e1c58b03dd2b"}, - {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bd4435b8d83f0c9561a2a9585b1de78f1abb17cb0cef5f39bf6a4b47d19bafe3"}, - {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:616221a6d473c5b9aa83fa8982745441f6a4a62a66436be9445c65f241b86c94"}, - {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:7e6382ce89a92bc1d0c0c5edd51e931432202b9080dc921d8d003e616402efd1"}, - {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ff58f379345603d940e461eae474b6bbb6dab66ed9a851ecd3cb3709bf4dcf6a"}, - {file = "pydantic_core-2.18.3.tar.gz", hash = "sha256:432e999088d85c8f36b9a3f769a8e2b57aabd817bbb729a90d1fe7f18f6f1f39"}, + {file = "pydantic_core-2.18.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f76d0ad001edd426b92233d45c746fd08f467d56100fd8f30e9ace4b005266e4"}, + {file = "pydantic_core-2.18.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:59ff3e89f4eaf14050c8022011862df275b552caef8082e37b542b066ce1ff26"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a55b5b16c839df1070bc113c1f7f94a0af4433fcfa1b41799ce7606e5c79ce0a"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4d0dcc59664fcb8974b356fe0a18a672d6d7cf9f54746c05f43275fc48636851"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8951eee36c57cd128f779e641e21eb40bc5073eb28b2d23f33eb0ef14ffb3f5d"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4701b19f7e3a06ea655513f7938de6f108123bf7c86bbebb1196eb9bd35cf724"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e00a3f196329e08e43d99b79b286d60ce46bed10f2280d25a1718399457e06be"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:97736815b9cc893b2b7f663628e63f436018b75f44854c8027040e05230eeddb"}, + {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6891a2ae0e8692679c07728819b6e2b822fb30ca7445f67bbf6509b25a96332c"}, + {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bc4ff9805858bd54d1a20efff925ccd89c9d2e7cf4986144b30802bf78091c3e"}, + {file = "pydantic_core-2.18.4-cp310-none-win32.whl", hash = "sha256:1b4de2e51bbcb61fdebd0ab86ef28062704f62c82bbf4addc4e37fa4b00b7cbc"}, + {file = "pydantic_core-2.18.4-cp310-none-win_amd64.whl", hash = "sha256:6a750aec7bf431517a9fd78cb93c97b9b0c496090fee84a47a0d23668976b4b0"}, + {file = "pydantic_core-2.18.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:942ba11e7dfb66dc70f9ae66b33452f51ac7bb90676da39a7345e99ffb55402d"}, + {file = "pydantic_core-2.18.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b2ebef0e0b4454320274f5e83a41844c63438fdc874ea40a8b5b4ecb7693f1c4"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a642295cd0c8df1b86fc3dced1d067874c353a188dc8e0f744626d49e9aa51c4"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f09baa656c904807e832cf9cce799c6460c450c4ad80803517032da0cd062e2"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98906207f29bc2c459ff64fa007afd10a8c8ac080f7e4d5beff4c97086a3dabd"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19894b95aacfa98e7cb093cd7881a0c76f55731efad31073db4521e2b6ff5b7d"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fbbdc827fe5e42e4d196c746b890b3d72876bdbf160b0eafe9f0334525119c8"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f85d05aa0918283cf29a30b547b4df2fbb56b45b135f9e35b6807cb28bc47951"}, + {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e85637bc8fe81ddb73fda9e56bab24560bdddfa98aa64f87aaa4e4b6730c23d2"}, + {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2f5966897e5461f818e136b8451d0551a2e77259eb0f73a837027b47dc95dab9"}, + {file = "pydantic_core-2.18.4-cp311-none-win32.whl", hash = "sha256:44c7486a4228413c317952e9d89598bcdfb06399735e49e0f8df643e1ccd0558"}, + {file = "pydantic_core-2.18.4-cp311-none-win_amd64.whl", hash = "sha256:8a7164fe2005d03c64fd3b85649891cd4953a8de53107940bf272500ba8a788b"}, + {file = "pydantic_core-2.18.4-cp311-none-win_arm64.whl", hash = "sha256:4e99bc050fe65c450344421017f98298a97cefc18c53bb2f7b3531eb39bc7805"}, + {file = "pydantic_core-2.18.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6f5c4d41b2771c730ea1c34e458e781b18cc668d194958e0112455fff4e402b2"}, + {file = "pydantic_core-2.18.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2fdf2156aa3d017fddf8aea5adfba9f777db1d6022d392b682d2a8329e087cef"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4748321b5078216070b151d5271ef3e7cc905ab170bbfd27d5c83ee3ec436695"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:847a35c4d58721c5dc3dba599878ebbdfd96784f3fb8bb2c356e123bdcd73f34"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c40d4eaad41f78e3bbda31b89edc46a3f3dc6e171bf0ecf097ff7a0ffff7cb1"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:21a5e440dbe315ab9825fcd459b8814bb92b27c974cbc23c3e8baa2b76890077"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01dd777215e2aa86dfd664daed5957704b769e726626393438f9c87690ce78c3"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4b06beb3b3f1479d32befd1f3079cc47b34fa2da62457cdf6c963393340b56e9"}, + {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:564d7922e4b13a16b98772441879fcdcbe82ff50daa622d681dd682175ea918c"}, + {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0eb2a4f660fcd8e2b1c90ad566db2b98d7f3f4717c64fe0a83e0adb39766d5b8"}, + {file = "pydantic_core-2.18.4-cp312-none-win32.whl", hash = "sha256:8b8bab4c97248095ae0c4455b5a1cd1cdd96e4e4769306ab19dda135ea4cdb07"}, + {file = "pydantic_core-2.18.4-cp312-none-win_amd64.whl", hash = "sha256:14601cdb733d741b8958224030e2bfe21a4a881fb3dd6fbb21f071cabd48fa0a"}, + {file = "pydantic_core-2.18.4-cp312-none-win_arm64.whl", hash = "sha256:c1322d7dd74713dcc157a2b7898a564ab091ca6c58302d5c7b4c07296e3fd00f"}, + {file = "pydantic_core-2.18.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:823be1deb01793da05ecb0484d6c9e20baebb39bd42b5d72636ae9cf8350dbd2"}, + {file = "pydantic_core-2.18.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ebef0dd9bf9b812bf75bda96743f2a6c5734a02092ae7f721c048d156d5fabae"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae1d6df168efb88d7d522664693607b80b4080be6750c913eefb77e34c12c71a"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f9899c94762343f2cc2fc64c13e7cae4c3cc65cdfc87dd810a31654c9b7358cc"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99457f184ad90235cfe8461c4d70ab7dd2680e28821c29eca00252ba90308c78"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18f469a3d2a2fdafe99296a87e8a4c37748b5080a26b806a707f25a902c040a8"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7cdf28938ac6b8b49ae5e92f2735056a7ba99c9b110a474473fd71185c1af5d"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:938cb21650855054dc54dfd9120a851c974f95450f00683399006aa6e8abb057"}, + {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:44cd83ab6a51da80fb5adbd9560e26018e2ac7826f9626bc06ca3dc074cd198b"}, + {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:972658f4a72d02b8abfa2581d92d59f59897d2e9f7e708fdabe922f9087773af"}, + {file = "pydantic_core-2.18.4-cp38-none-win32.whl", hash = "sha256:1d886dc848e60cb7666f771e406acae54ab279b9f1e4143babc9c2258213daa2"}, + {file = "pydantic_core-2.18.4-cp38-none-win_amd64.whl", hash = "sha256:bb4462bd43c2460774914b8525f79b00f8f407c945d50881568f294c1d9b4443"}, + {file = "pydantic_core-2.18.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:44a688331d4a4e2129140a8118479443bd6f1905231138971372fcde37e43528"}, + {file = "pydantic_core-2.18.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a2fdd81edd64342c85ac7cf2753ccae0b79bf2dfa063785503cb85a7d3593223"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86110d7e1907ab36691f80b33eb2da87d780f4739ae773e5fc83fb272f88825f"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:46387e38bd641b3ee5ce247563b60c5ca098da9c56c75c157a05eaa0933ed154"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:123c3cec203e3f5ac7b000bd82235f1a3eced8665b63d18be751f115588fea30"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dc1803ac5c32ec324c5261c7209e8f8ce88e83254c4e1aebdc8b0a39f9ddb443"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53db086f9f6ab2b4061958d9c276d1dbe3690e8dd727d6abf2321d6cce37fa94"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:abc267fa9837245cc28ea6929f19fa335f3dc330a35d2e45509b6566dc18be23"}, + {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a0d829524aaefdebccb869eed855e2d04c21d2d7479b6cada7ace5448416597b"}, + {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:509daade3b8649f80d4e5ff21aa5673e4ebe58590b25fe42fac5f0f52c6f034a"}, + {file = "pydantic_core-2.18.4-cp39-none-win32.whl", hash = "sha256:ca26a1e73c48cfc54c4a76ff78df3727b9d9f4ccc8dbee4ae3f73306a591676d"}, + {file = "pydantic_core-2.18.4-cp39-none-win_amd64.whl", hash = "sha256:c67598100338d5d985db1b3d21f3619ef392e185e71b8d52bceacc4a7771ea7e"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:574d92eac874f7f4db0ca653514d823a0d22e2354359d0759e3f6a406db5d55d"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1f4d26ceb5eb9eed4af91bebeae4b06c3fb28966ca3a8fb765208cf6b51102ab"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77450e6d20016ec41f43ca4a6c63e9fdde03f0ae3fe90e7c27bdbeaece8b1ed4"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d323a01da91851a4f17bf592faf46149c9169d68430b3146dcba2bb5e5719abc"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43d447dd2ae072a0065389092a231283f62d960030ecd27565672bd40746c507"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:578e24f761f3b425834f297b9935e1ce2e30f51400964ce4801002435a1b41ef"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:81b5efb2f126454586d0f40c4d834010979cb80785173d1586df845a632e4e6d"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ab86ce7c8f9bea87b9d12c7f0af71102acbf5ecbc66c17796cff45dae54ef9a5"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:90afc12421df2b1b4dcc975f814e21bc1754640d502a2fbcc6d41e77af5ec312"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:51991a89639a912c17bef4b45c87bd83593aee0437d8102556af4885811d59f5"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:293afe532740370aba8c060882f7d26cfd00c94cae32fd2e212a3a6e3b7bc15e"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b48ece5bde2e768197a2d0f6e925f9d7e3e826f0ad2271120f8144a9db18d5c8"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:eae237477a873ab46e8dd748e515c72c0c804fb380fbe6c85533c7de51f23a8f"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:834b5230b5dfc0c1ec37b2fda433b271cbbc0e507560b5d1588e2cc1148cf1ce"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e858ac0a25074ba4bce653f9b5d0a85b7456eaddadc0ce82d3878c22489fa4ee"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2fd41f6eff4c20778d717af1cc50eca52f5afe7805ee530a4fbd0bae284f16e9"}, + {file = "pydantic_core-2.18.4.tar.gz", hash = "sha256:ec3beeada09ff865c344ff3bc2f427f5e6c26401cc6113d77e372c3fdac73864"}, ] [package.dependencies] @@ -2214,13 +2214,13 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pydantic-settings" -version = "2.3.0" +version = "2.3.1" description = "Settings management using Pydantic" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_settings-2.3.0-py3-none-any.whl", hash = "sha256:26eeed27370a9c5e3f64e4a7d6602573cbedf05ed940f1d5b11c3f178427af7a"}, - {file = "pydantic_settings-2.3.0.tar.gz", hash = "sha256:78db28855a71503cfe47f39500a1dece523c640afd5280edb5c5c9c9cfa534c9"}, + {file = "pydantic_settings-2.3.1-py3-none-any.whl", hash = "sha256:acb2c213140dfff9669f4fe9f8180d43914f51626db28ab2db7308a576cce51a"}, + {file = "pydantic_settings-2.3.1.tar.gz", hash = "sha256:e34bbd649803a6bb3e2f0f58fb0edff1f0c7f556849fda106cc21bcce12c30ab"}, ] [package.dependencies] diff --git a/src/backend/base/pyproject.toml b/src/backend/base/pyproject.toml index 3d80fe194..14a7b87a8 100644 --- a/src/backend/base/pyproject.toml +++ b/src/backend/base/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langflow-base" -version = "0.0.55" +version = "0.0.57" description = "A Python package with a built-in web application" authors = ["Langflow "] maintainers = [ diff --git a/src/frontend/package-lock.json b/src/frontend/package-lock.json index 3d0118f66..1d3f2e149 100644 --- a/src/frontend/package-lock.json +++ b/src/frontend/package-lock.json @@ -26,6 +26,7 @@ "@radix-ui/react-slot": "^1.0.2", "@radix-ui/react-switch": "^1.0.3", "@radix-ui/react-tabs": "^1.0.4", + "@radix-ui/react-toggle": "^1.0.3", "@radix-ui/react-tooltip": "^1.0.6", "@tabler/icons-react": "^2.32.0", "@tailwindcss/forms": "^0.5.6", @@ -40,6 +41,7 @@ "class-variance-authority": "^0.6.1", "clsx": "^1.2.1", "cmdk": "^1.0.0", + "debounce-promise": "^3.1.2", "dompurify": "^3.0.5", "dotenv": "^16.4.5", "esbuild": "^0.17.19", @@ -2761,6 +2763,31 @@ } } }, + "node_modules/@radix-ui/react-toggle": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-toggle/-/react-toggle-1.0.3.tgz", + "integrity": "sha512-Pkqg3+Bc98ftZGsl60CLANXQBBQ4W3mTFS9EJvNxKMZ7magklKV69/id1mlAlOFDDfHvlCms0fx8fA4CMKDJHg==", + "dependencies": { + "@babel/runtime": "^7.13.10", + "@radix-ui/primitive": "1.0.1", + "@radix-ui/react-primitive": "1.0.3", + "@radix-ui/react-use-controllable-state": "1.0.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0", + "react-dom": "^16.8 || ^17.0 || ^18.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, "node_modules/@radix-ui/react-tooltip": { "version": "1.0.7", "resolved": "https://registry.npmjs.org/@radix-ui/react-tooltip/-/react-tooltip-1.0.7.tgz", @@ -5670,6 +5697,11 @@ "node": ">=12" } }, + "node_modules/debounce-promise": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/debounce-promise/-/debounce-promise-3.1.2.tgz", + "integrity": "sha512-rZHcgBkbYavBeD9ej6sP56XfG53d51CD4dnaw989YX/nZ/ZJfgRx/9ePKmTNiUiyQvh4mtrMoS3OAWW+yoYtpg==" + }, "node_modules/debug": { "version": "4.3.4", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", diff --git a/src/frontend/package.json b/src/frontend/package.json index 053a246ee..7a56d080e 100644 --- a/src/frontend/package.json +++ b/src/frontend/package.json @@ -21,6 +21,7 @@ "@radix-ui/react-slot": "^1.0.2", "@radix-ui/react-switch": "^1.0.3", "@radix-ui/react-tabs": "^1.0.4", + "@radix-ui/react-toggle": "^1.0.3", "@radix-ui/react-tooltip": "^1.0.6", "@tabler/icons-react": "^2.32.0", "@tailwindcss/forms": "^0.5.6", @@ -35,6 +36,7 @@ "class-variance-authority": "^0.6.1", "clsx": "^1.2.1", "cmdk": "^1.0.0", + "debounce-promise": "^3.1.2", "dompurify": "^3.0.5", "dotenv": "^16.4.5", "esbuild": "^0.17.19", diff --git a/src/frontend/src/App.tsx b/src/frontend/src/App.tsx index 5eee2bfa6..b9e02a27d 100644 --- a/src/frontend/src/App.tsx +++ b/src/frontend/src/App.tsx @@ -222,12 +222,19 @@ export default function App() { id={alert.id} removeAlert={removeAlert} /> + ) : alert.type === "notice" ? ( + ) : ( - alert.type === "notice" && ( - @@ -236,20 +243,6 @@ export default function App() { ))} -
- {tempNotificationList.map((alert) => ( -
- {alert.type === "success" && ( - - )} -
- ))} -
); diff --git a/src/frontend/src/alerts/alertDropDown/index.tsx b/src/frontend/src/alerts/alertDropDown/index.tsx index 3577a5de6..05f42922d 100644 --- a/src/frontend/src/alerts/alertDropDown/index.tsx +++ b/src/frontend/src/alerts/alertDropDown/index.tsx @@ -16,13 +16,13 @@ export default function AlertDropdown({ }: AlertDropdownType): JSX.Element { const notificationList = useAlertStore((state) => state.notificationList); const clearNotificationList = useAlertStore( - (state) => state.clearNotificationList + (state) => state.clearNotificationList, ); const removeFromNotificationList = useAlertStore( - (state) => state.removeFromNotificationList + (state) => state.removeFromNotificationList, ); const setNotificationCenter = useAlertStore( - (state) => state.setNotificationCenter + (state) => state.setNotificationCenter, ); const [open, setOpen] = useState(false); @@ -36,7 +36,7 @@ export default function AlertDropdown({ }} > {children} - +
Notifications
diff --git a/src/frontend/src/alerts/error/index.tsx b/src/frontend/src/alerts/error/index.tsx index ec23c103e..3690590b9 100644 --- a/src/frontend/src/alerts/error/index.tsx +++ b/src/frontend/src/alerts/error/index.tsx @@ -40,7 +40,7 @@ export default function ErrorAlert({ removeAlert(id); }, 500); }} - className="error-build-message nocopy nopan nodelete nodrag noundo" + className="error-build-message nocopy nowheel nopan nodelete nodrag noundo" >
diff --git a/src/frontend/src/alerts/notice/index.tsx b/src/frontend/src/alerts/notice/index.tsx index faaa4db6a..fb29954ea 100644 --- a/src/frontend/src/alerts/notice/index.tsx +++ b/src/frontend/src/alerts/notice/index.tsx @@ -36,7 +36,7 @@ export default function NoticeAlert({ setShow(false); removeAlert(id); }} - className="nocopy nopan nodelete nodrag noundo mt-6 w-96 rounded-md bg-info-background p-4 shadow-xl" + className="nocopy nowheel nopan nodelete nodrag noundo mt-6 w-96 rounded-md bg-info-background p-4 shadow-xl" >
diff --git a/src/frontend/src/alerts/success/index.tsx b/src/frontend/src/alerts/success/index.tsx index ec6abf589..db62c8432 100644 --- a/src/frontend/src/alerts/success/index.tsx +++ b/src/frontend/src/alerts/success/index.tsx @@ -34,7 +34,7 @@ export default function SuccessAlert({ setShow(false); removeAlert(id); }} - className="success-alert nocopy nopan nodelete nodrag noundo" + className="success-alert nocopy nowheel nopan nodelete nodrag noundo" >
diff --git a/src/frontend/src/components/accordionComponent/index.tsx b/src/frontend/src/components/accordionComponent/index.tsx index fdcf8b96c..7c5562e7f 100644 --- a/src/frontend/src/components/accordionComponent/index.tsx +++ b/src/frontend/src/components/accordionComponent/index.tsx @@ -6,10 +6,13 @@ import { AccordionTrigger, } from "../../components/ui/accordion"; import { AccordionComponentType } from "../../types/components"; +import { cn } from "../../utils/utils"; +import ShadTooltip from "../shadTooltipComponent"; export default function AccordionComponent({ trigger, children, + disabled, open = [], keyValue, sideBar, @@ -29,7 +32,9 @@ export default function AccordionComponent({ } function handleClick(): void { - value === "" ? setValue(keyValue!) : setValue(""); + if (!disabled) { + value === "" ? setValue(keyValue!) : setValue(""); + } } return ( @@ -38,16 +43,18 @@ export default function AccordionComponent({ type="single" className="w-full" value={value} - onValueChange={setValue} + onValueChange={!disabled ? setValue : () => {}} > { handleClick(); }} - className={ - sideBar ? "w-full bg-muted px-[0.75rem] py-[0.5rem]" : "ml-3" - } + disabled={disabled} + className={cn( + sideBar ? "w-full bg-muted px-[0.75rem] py-[0.5rem]" : "ml-3", + disabled ? "cursor-not-allowed" : "cursor-pointer", + )} > {trigger} diff --git a/src/frontend/src/components/addNewVariableButtonComponent/addNewVariableButton.tsx b/src/frontend/src/components/addNewVariableButtonComponent/addNewVariableButton.tsx index c8e5ad3b6..36b68a7e8 100644 --- a/src/frontend/src/components/addNewVariableButtonComponent/addNewVariableButton.tsx +++ b/src/frontend/src/components/addNewVariableButtonComponent/addNewVariableButton.tsx @@ -7,7 +7,6 @@ import { useTypesStore } from "../../stores/typesStore"; import { ResponseErrorDetailAPI } from "../../types/api"; import ForwardedIconComponent from "../genericIconComponent"; import InputComponent from "../inputComponent"; -import { Button } from "../ui/button"; import { Input } from "../ui/input"; import { Label } from "../ui/label"; import { Textarea } from "../ui/textarea"; @@ -70,7 +69,12 @@ export default function AddNewVariableButton({ children }): JSX.Element { }); } return ( - +
- - - + ); } diff --git a/src/frontend/src/components/dictComponent/index.tsx b/src/frontend/src/components/dictComponent/index.tsx index 5161135ed..39850e6e3 100644 --- a/src/frontend/src/components/dictComponent/index.tsx +++ b/src/frontend/src/components/dictComponent/index.tsx @@ -29,7 +29,7 @@ export default function DictComponent({
1 && editNode ? "my-1" : "", - "flex flex-col gap-3" + "flex flex-col gap-3", )} > { diff --git a/src/frontend/src/components/editFlowSettingsComponent/index.tsx b/src/frontend/src/components/editFlowSettingsComponent/index.tsx index d8a6fc43e..26bc138e3 100644 --- a/src/frontend/src/components/editFlowSettingsComponent/index.tsx +++ b/src/frontend/src/components/editFlowSettingsComponent/index.tsx @@ -99,7 +99,7 @@ export const EditFlowSettings: React.FC = ({ {description === "" ? "No description" : description} @@ -109,7 +109,7 @@ export const EditFlowSettings: React.FC = ({ {setEndpointName && (