Merge remote-tracking branch 'origin/dev' into two_edges
This commit is contained in:
parent
3d82417068
commit
4c87f7662c
283 changed files with 11734 additions and 9297 deletions
2
.github/actions/poetry_caching/action.yml
vendored
2
.github/actions/poetry_caching/action.yml
vendored
|
|
@ -74,7 +74,7 @@ runs:
|
|||
if: steps.cache-bin-poetry.outputs.cache-hit != 'true'
|
||||
shell: bash
|
||||
env:
|
||||
POETRY_VERSION: ${{ inputs.poetry-version }}
|
||||
POETRY_VERSION: ${{ inputs.poetry-version || env.POETRY_VERSION }}
|
||||
PYTHON_VERSION: ${{ inputs.python-version }}
|
||||
# Install poetry using the python version installed by setup-python step.
|
||||
run: |
|
||||
|
|
|
|||
2
.github/workflows/create-release.yml
vendored
2
.github/workflows/create-release.yml
vendored
|
|
@ -25,7 +25,7 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install poetry
|
||||
run: pipx install poetry==$POETRY_VERSION
|
||||
run: pipx install poetry==${{ env.POETRY_VERSION }}
|
||||
- name: Set up Python 3.12
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
|
|
|
|||
9
.github/workflows/docker-build.yml
vendored
9
.github/workflows/docker-build.yml
vendored
|
|
@ -19,6 +19,8 @@ on:
|
|||
options:
|
||||
- base
|
||||
- main
|
||||
env:
|
||||
POETRY_VERSION: "1.8.2"
|
||||
|
||||
jobs:
|
||||
docker_build:
|
||||
|
|
@ -78,7 +80,10 @@ jobs:
|
|||
langflowai/langflow-frontend:1.0-alpha
|
||||
|
||||
restart-space:
|
||||
name: Restart HuggingFace Spaces
|
||||
if: ${{ inputs.release_type == 'main' }}
|
||||
runs-on: ubuntu-latest
|
||||
needs: docker_build
|
||||
strategy:
|
||||
matrix:
|
||||
python-version:
|
||||
|
|
@ -98,6 +103,4 @@ jobs:
|
|||
|
||||
- name: Restart HuggingFace Spaces Build
|
||||
run: |
|
||||
poetry run python ./scripts/factory_restart_space.py
|
||||
env:
|
||||
HUGGINGFACE_API_TOKEN: ${{ secrets.HUGGINGFACE_API_TOKEN }}
|
||||
poetry run python ./scripts/factory_restart_space.py --space "Langflow/Langflow-Preview" --token ${{ secrets.HUGGINGFACE_API_TOKEN }}
|
||||
|
|
|
|||
2
.github/workflows/pre-release-base.yml
vendored
2
.github/workflows/pre-release-base.yml
vendored
|
|
@ -22,7 +22,7 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install poetry
|
||||
run: pipx install poetry==$POETRY_VERSION
|
||||
run: pipx install poetry==${{ env.POETRY_VERSION }}
|
||||
- name: Set up Python 3.10
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
|
|
|
|||
2
.github/workflows/pre-release-langflow.yml
vendored
2
.github/workflows/pre-release-langflow.yml
vendored
|
|
@ -26,7 +26,7 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install poetry
|
||||
run: pipx install poetry==$POETRY_VERSION
|
||||
run: pipx install poetry==${{ env.POETRY_VERSION }}
|
||||
- name: Set up Python 3.10
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
|
|
|
|||
6
.github/workflows/pre-release.yml
vendored
6
.github/workflows/pre-release.yml
vendored
|
|
@ -29,12 +29,16 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install poetry
|
||||
run: pipx install poetry==$POETRY_VERSION
|
||||
run: pipx install poetry==${{ env.POETRY_VERSION }}
|
||||
- name: Set up Python 3.10
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "poetry"
|
||||
- name: Set up Nodejs 20
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
- name: Check Version
|
||||
id: check-version
|
||||
run: |
|
||||
|
|
|
|||
2
.github/workflows/release.yml
vendored
2
.github/workflows/release.yml
vendored
|
|
@ -19,7 +19,7 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install poetry
|
||||
run: pipx install poetry==$POETRY_VERSION
|
||||
run: pipx install poetry==${{ env.POETRY_VERSION }}
|
||||
- name: Set up Python 3.10
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
|
|
|
|||
1
Makefile
1
Makefile
|
|
@ -168,6 +168,7 @@ build_and_install:
|
|||
|
||||
build_frontend:
|
||||
cd src/frontend && CI='' npm run build
|
||||
rm -rf src/backend/base/langflow/frontend
|
||||
cp -r src/frontend/build src/backend/base/langflow/frontend
|
||||
|
||||
build:
|
||||
|
|
|
|||
171
README.PT.md
Normal file
171
README.PT.md
Normal file
|
|
@ -0,0 +1,171 @@
|
|||
<!-- markdownlint-disable MD030 -->
|
||||
|
||||
# [](https://www.langflow.org)
|
||||
|
||||
<p align="center"><strong>
|
||||
Um framework visual para criar apps de agentes autônomos e RAG
|
||||
</strong></p>
|
||||
<p align="center" style="font-size: 12px;">
|
||||
Open-source, construído em Python, totalmente personalizável, agnóstico em relação a modelos e databases
|
||||
</p>
|
||||
|
||||
<p align="center" style="font-size: 12px;">
|
||||
<a href="https://docs.langflow.org" style="text-decoration: underline;">Docs</a> -
|
||||
<a href="https://discord.com/invite/EqksyE2EX9" style="text-decoration: underline;">Junte-se ao nosso Discord</a> -
|
||||
<a href="https://twitter.com/langflow_ai" style="text-decoration: underline;">Siga-nos no X</a> -
|
||||
<a href="https://huggingface.co/spaces/Langflow/Langflow-Preview" style="text-decoration: underline;">Demonstração</a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://github.com/langflow-ai/langflow">
|
||||
<img src="https://img.shields.io/github/stars/langflow-ai/langflow">
|
||||
</a>
|
||||
<a href="https://discord.com/invite/EqksyE2EX9">
|
||||
<img src="https://img.shields.io/discord/1116803230643527710?label=Discord">
|
||||
</a>
|
||||
</p>
|
||||
|
||||
<div align="center">
|
||||
<a href="./README.md"><img alt="README em Inglês" src="https://img.shields.io/badge/English-d9d9d9"></a>
|
||||
<a href="./README.zh_CN.md"><img alt="README em Chinês Simplificado" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
|
||||
</div>
|
||||
|
||||
<p align="center">
|
||||
<img src="./docs/static/img/langflow_basic_howto.gif" alt="Seu GIF" style="border: 3px solid #211C43;">
|
||||
</p>
|
||||
|
||||
# 📝 Conteúdo
|
||||
|
||||
- [📝 Conteúdo](#-conteúdo)
|
||||
- [📦 Introdução](#-introdução)
|
||||
- [🎨 Criar Fluxos](#-criar-fluxos)
|
||||
- [Deploy](#deploy)
|
||||
- [Deploy usando Google Cloud Platform](#deploy-usando-google-cloud-platform)
|
||||
- [Deploy on Railway](#deploy-on-railway)
|
||||
- [Deploy on Render](#deploy-on-render)
|
||||
- [🖥️ Interface de Linha de Comando (CLI)](#️-interface-de-linha-de-comando-cli)
|
||||
- [Uso](#uso)
|
||||
- [Variáveis de Ambiente](#variáveis-de-ambiente)
|
||||
- [👋 Contribuir](#-contribuir)
|
||||
- [🌟 Contribuidores](#-contribuidores)
|
||||
- [📄 Licença](#-licença)
|
||||
|
||||
# 📦 Introdução
|
||||
|
||||
Você pode instalar o Langflow com pip:
|
||||
|
||||
```shell
|
||||
# Certifique-se de ter >=Python 3.10 instalado no seu sistema.
|
||||
# Instale a versão pré-lançamento (recomendada para as atualizações mais recentes)
|
||||
python -m pip install langflow --pre --force-reinstall
|
||||
|
||||
# ou versão estável
|
||||
python -m pip install langflow -U
|
||||
```
|
||||
|
||||
Então, execute o Langflow com:
|
||||
|
||||
```shell
|
||||
python -m langflow run
|
||||
```
|
||||
|
||||
Você também pode visualizar o Langflow no [HuggingFace Spaces](https://huggingface.co/spaces/Langflow/Langflow-Preview). [Clone o Space usando este link](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true) para criar seu próprio workspace do Langflow em minutos.
|
||||
|
||||
# 🎨 Criar Fluxos
|
||||
|
||||
Criar fluxos com Langflow é fácil. Basta arrastar componentes da barra lateral para o canvas e conectá-los para começar a construir sua aplicação.
|
||||
|
||||
Explore editando os parâmetros do prompt, agrupando componentes e construindo seus próprios componentes personalizados (Custom Components).
|
||||
|
||||
Quando terminar, você pode exportar seu fluxo como um arquivo JSON.
|
||||
|
||||
Carregue o fluxo com:
|
||||
|
||||
```python
|
||||
from langflow.load import run_flow_from_json
|
||||
|
||||
results = run_flow_from_json("path/to/flow.json", input_value="Hello, World!")
|
||||
```
|
||||
|
||||
# Deploy
|
||||
|
||||
## Deploy usando Google Cloud Platform
|
||||
|
||||
Siga nosso passo a passo para fazer deploy do Langflow no Google Cloud Platform (GCP) usando o Google Cloud Shell. O guia está disponível no documento [**Langflow on Google Cloud Platform**](https://github.com/langflow-ai/langflow/blob/dev/docs/docs/deployment/gcp-deployment.md).
|
||||
|
||||
Alternativamente, clique no botão **"Open in Cloud Shell"** abaixo para iniciar o Google Cloud Shell, clonar o repositório do Langflow e começar um **tutorial interativo** que o guiará pelo processo de configuração dos recursos necessários e deploy do Langflow no seu projeto GCP.
|
||||
|
||||
[](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/langflow-ai/langflow&working_dir=scripts/gcp&shellonly=true&tutorial=walkthroughtutorial_spot.md)
|
||||
|
||||
## Deploy on Railway
|
||||
|
||||
Use este template para implantar o Langflow 1.0 Preview no Railway:
|
||||
|
||||
[](https://railway.app/template/UsJ1uB?referralCode=MnPSdg)
|
||||
|
||||
Ou este para implantar o Langflow 0.6.x:
|
||||
|
||||
[](https://railway.app/template/JMXEWp?referralCode=MnPSdg)
|
||||
|
||||
## Deploy on Render
|
||||
|
||||
<a href="https://render.com/deploy?repo=https://github.com/langflow-ai/langflow/tree/dev">
|
||||
<img src="https://render.com/images/deploy-to-render-button.svg" alt="Deploy to Render" />
|
||||
</a>
|
||||
|
||||
# 🖥️ Interface de Linha de Comando (CLI)
|
||||
|
||||
O Langflow fornece uma interface de linha de comando (CLI) para fácil gerenciamento e configuração.
|
||||
|
||||
## Uso
|
||||
|
||||
Você pode executar o Langflow usando o seguinte comando:
|
||||
|
||||
```shell
|
||||
langflow run [OPTIONS]
|
||||
```
|
||||
|
||||
Cada opção é detalhada abaixo:
|
||||
|
||||
- `--help`: Exibe todas as opções disponíveis.
|
||||
- `--host`: Define o host para vincular o servidor. Pode ser configurado usando a variável de ambiente `LANGFLOW_HOST`. O padrão é `127.0.0.1`.
|
||||
- `--workers`: Define o número de processos. Pode ser configurado usando a variável de ambiente `LANGFLOW_WORKERS`. O padrão é `1`.
|
||||
- `--timeout`: Define o tempo limite do worker em segundos. O padrão é `60`.
|
||||
- `--port`: Define a porta para escutar. Pode ser configurado usando a variável de ambiente `LANGFLOW_PORT`. O padrão é `7860`.
|
||||
- `--env-file`: Especifica o caminho para o arquivo .env contendo variáveis de ambiente. O padrão é `.env`.
|
||||
- `--log-level`: Define o nível de log. Pode ser configurado usando a variável de ambiente `LANGFLOW_LOG_LEVEL`. O padrão é `critical`.
|
||||
- `--components-path`: Especifica o caminho para o diretório contendo componentes personalizados. Pode ser configurado usando a variável de ambiente `LANGFLOW_COMPONENTS_PATH`. O padrão é `langflow/components`.
|
||||
- `--log-file`: Especifica o caminho para o arquivo de log. Pode ser configurado usando a variável de ambiente `LANGFLOW_LOG_FILE`. O padrão é `logs/langflow.log`.
|
||||
- `--cache`: Seleciona o tipo de cache a ser usado. As opções são `InMemoryCache` e `SQLiteCache`. Pode ser configurado usando a variável de ambiente `LANGFLOW_LANGCHAIN_CACHE`. O padrão é `SQLiteCache`.
|
||||
- `--dev/--no-dev`: Alterna o modo de desenvolvimento. O padrão é `no-dev`.
|
||||
- `--path`: Especifica o caminho para o diretório frontend contendo os arquivos de build. Esta opção é apenas para fins de desenvolvimento. Pode ser configurado usando a variável de ambiente `LANGFLOW_FRONTEND_PATH`.
|
||||
- `--open-browser/--no-open-browser`: Alterna a opção de abrir o navegador após iniciar o servidor. Pode ser configurado usando a variável de ambiente `LANGFLOW_OPEN_BROWSER`. O padrão é `open-browser`.
|
||||
- `--remove-api-keys/--no-remove-api-keys`: Alterna a opção de remover as chaves de API dos projetos salvos no banco de dados. Pode ser configurado usando a variável de ambiente `LANGFLOW_REMOVE_API_KEYS`. O padrão é `no-remove-api-keys`.
|
||||
- `--install-completion [bash|zsh|fish|powershell|pwsh]`: Instala a conclusão para o shell especificado.
|
||||
- `--show-completion [bash|zsh|fish|powershell|pwsh]`: Exibe a conclusão para o shell especificado, permitindo que você copie ou personalize a instalação.
|
||||
- `--backend-only`: Este parâmetro, com valor padrão `False`, permite executar apenas o servidor backend sem o frontend. Também pode ser configurado usando a variável de ambiente `LANGFLOW_BACKEND_ONLY`.
|
||||
- `--store`: Este parâmetro, com valor padrão `True`, ativa os recursos da loja, use `--no-store` para desativá-los. Pode ser configurado usando a variável de ambiente `LANGFLOW_STORE`.
|
||||
|
||||
Esses parâmetros são importantes para usuários que precisam personalizar o comportamento do Langflow, especialmente em cenários de desenvolvimento ou deploy especializado.
|
||||
|
||||
### Variáveis de Ambiente
|
||||
|
||||
Você pode configurar muitas das opções de CLI usando variáveis de ambiente. Estas podem ser exportadas no seu sistema operacional ou adicionadas a um arquivo `.env` e carregadas usando a opção `--env-file`.
|
||||
|
||||
Um arquivo de exemplo `.env` chamado `.env.example` está incluído no projeto. Copie este arquivo para um novo arquivo chamado `.env` e substitua os valores de exemplo pelas suas configurações reais. Se você estiver definindo valores tanto no seu sistema operacional quanto no arquivo `.env`, as configurações do `.env` terão precedência.
|
||||
|
||||
# 👋 Contribuir
|
||||
|
||||
Aceitamos contribuições de desenvolvedores de todos os níveis para nosso projeto open-source no GitHub. Se você deseja contribuir, por favor, confira nossas [diretrizes de contribuição](./CONTRIBUTING.md) e ajude a tornar o Langflow mais acessível.
|
||||
|
||||
---
|
||||
|
||||
[](https://star-history.com/#langflow-ai/langflow&Date)
|
||||
|
||||
# 🌟 Contribuidores
|
||||
|
||||
[](https://github.com/langflow-ai/langflow/graphs/contributors)
|
||||
|
||||
# 📄 Licença
|
||||
|
||||
O Langflow é lançado sob a licença MIT. Veja o arquivo [LICENSE](LICENSE) para detalhes.
|
||||
|
|
@ -25,13 +25,18 @@
|
|||
</a>
|
||||
</p>
|
||||
|
||||
<div align="center">
|
||||
<a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
|
||||
<a href="./README.PT.md"><img alt="README in Portuguese" src="https://img.shields.io/badge/Portuguese-d9d9d9"></a>
|
||||
<a href="./README.zh_CN.md"><img alt="README in Simplified Chinese" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
|
||||
</div>
|
||||
|
||||
<p align="center">
|
||||
<img src="./docs/static/img/langflow_basic_howto.gif" alt="Your GIF" style="border: 3px solid #211C43;">
|
||||
</p>
|
||||
|
||||
# 📝 Content
|
||||
|
||||
- [](#)
|
||||
- [📝 Content](#-content)
|
||||
- [📦 Get Started](#-get-started)
|
||||
- [🎨 Create Flows](#-create-flows)
|
||||
|
|
|
|||
172
README.zh_CN.md
Normal file
172
README.zh_CN.md
Normal file
|
|
@ -0,0 +1,172 @@
|
|||
<!-- markdownlint-disable MD030 -->
|
||||
|
||||
# [](https://www.langflow.org)
|
||||
|
||||
<p align="center"><strong>
|
||||
一种用于构建多智能体和RAG应用的可视化框架
|
||||
</strong></p>
|
||||
<p align="center" style="font-size: 12px;">
|
||||
开源、Python驱动、完全可定制、大模型且不依赖于特定的向量存储
|
||||
</p>
|
||||
|
||||
<p align="center" style="font-size: 12px;">
|
||||
<a href="https://docs.langflow.org" style="text-decoration: underline;">文档</a> -
|
||||
<a href="https://discord.com/invite/EqksyE2EX9" style="text-decoration: underline;">加入我们的Discord社区</a> -
|
||||
<a href="https://twitter.com/langflow_ai" style="text-decoration: underline;">在X上关注我们</a> -
|
||||
<a href="https://huggingface.co/spaces/Langflow/Langflow-Preview" style="text-decoration: underline;">在线体验</a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://github.com/langflow-ai/langflow">
|
||||
<img src="https://img.shields.io/github/stars/langflow-ai/langflow">
|
||||
</a>
|
||||
<a href="https://discord.com/invite/EqksyE2EX9">
|
||||
<img src="https://img.shields.io/discord/1116803230643527710?label=Discord">
|
||||
</a>
|
||||
</p>
|
||||
|
||||
<div align="center">
|
||||
<a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/英文-d9d9d9"></a>
|
||||
<a href="./README.zh_CN.md"><img alt="README in Simplified Chinese" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
|
||||
</div>
|
||||
|
||||
<p align="center">
|
||||
<img src="./docs/static/img/langflow_basic_howto.gif" alt="Your GIF" style="border: 3px solid #211C43;">
|
||||
</p>
|
||||
|
||||
# 📝 目录
|
||||
|
||||
- [📝 目录](#-目录)
|
||||
- [📦 快速开始](#-快速开始)
|
||||
- [🎨 创建工作流](#-创建工作流)
|
||||
- [部署](#部署)
|
||||
- [在Google Cloud Platform上部署Langflow](#在google-cloud-platform上部署langflow)
|
||||
- [在Railway上部署](#在railway上部署)
|
||||
- [在Render上部署](#在render上部署)
|
||||
- [🖥️ 命令行界面 (CLI)](#️-命令行界面-cli)
|
||||
- [用法](#用法)
|
||||
- [环境变量](#环境变量)
|
||||
- [👋 贡献](#-贡献)
|
||||
- [🌟 贡献者](#-贡献者)
|
||||
- [📄 许可证](#-许可证)
|
||||
|
||||
# 📦 快速开始
|
||||
|
||||
使用 pip 安装 Langflow:
|
||||
|
||||
```shell
|
||||
# 确保您的系统已经安装上>=Python 3.10
|
||||
# 安装Langflow预发布版本
|
||||
python -m pip install langflow --pre --force-reinstall
|
||||
|
||||
# 安装Langflow稳定版本
|
||||
python -m pip install langflow -U
|
||||
```
|
||||
|
||||
然后运行Langflow:
|
||||
|
||||
```shell
|
||||
python -m langflow run
|
||||
```
|
||||
|
||||
您可以在[HuggingFace Spaces](https://huggingface.co/spaces/Langflow/Langflow-Preview)中在线体验 Langflow,也可以使用该链接[克隆空间](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true),在几分钟内创建您自己的 Langflow 运行工作空间。
|
||||
|
||||
# 🎨 创建工作流
|
||||
|
||||
使用 Langflow 来创建工作流非常简单。只需从侧边栏拖动组件到画布上,然后连接组件即可开始构建应用程序。
|
||||
|
||||
您可以通过编辑提示参数、将组件分组到单个高级组件中以及构建您自己的自定义组件来展开探索。
|
||||
|
||||
完成后,可以将工作流导出为 JSON 文件。
|
||||
|
||||
然后使用以下脚本加载工作流:
|
||||
|
||||
```python
|
||||
from langflow.load import run_flow_from_json
|
||||
|
||||
results = run_flow_from_json("path/to/flow.json", input_value="Hello, World!")
|
||||
```
|
||||
|
||||
# 部署
|
||||
|
||||
## 在Google Cloud Platform上部署Langflow
|
||||
|
||||
请按照我们的分步指南使用 Google Cloud Shell 在 Google Cloud Platform (GCP) 上部署 Langflow。该指南在 [**Langflow in Google Cloud Platform**](GCP_DEPLOYMENT.md) 文档中提供。
|
||||
|
||||
或者,点击下面的 "Open in Cloud Shell" 按钮,启动 Google Cloud Shell,克隆 Langflow 仓库,并开始一个互动教程,该教程将指导您设置必要的资源并在 GCP 项目中部署 Langflow。
|
||||
|
||||
[](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/langflow-ai/langflow&working_dir=scripts/gcp&shellonly=true&tutorial=walkthroughtutorial_spot.md)
|
||||
|
||||
## 在Railway上部署
|
||||
|
||||
使用此模板在 Railway 上部署 Langflow 1.0 预览版:
|
||||
|
||||
[](https://railway.app/template/UsJ1uB?referralCode=MnPSdg)
|
||||
|
||||
或者使用此模板部署 Langflow 0.6.x:
|
||||
|
||||
[](https://railway.app/template/JMXEWp?referralCode=MnPSdg)
|
||||
|
||||
## 在Render上部署
|
||||
|
||||
<a href="https://render.com/deploy?repo=https://github.com/langflow-ai/langflow/tree/dev">
|
||||
<img src="https://render.com/images/deploy-to-render-button.svg" alt="Deploy to Render" />
|
||||
</a>
|
||||
|
||||
# 🖥️ 命令行界面 (CLI)
|
||||
|
||||
Langflow提供了一个命令行界面以便于平台的管理和配置。
|
||||
|
||||
## 用法
|
||||
|
||||
您可以使用以下命令运行Langflow:
|
||||
|
||||
```shell
|
||||
langflow run [OPTIONS]
|
||||
```
|
||||
|
||||
命令行参数的详细说明:
|
||||
|
||||
- `--help`: 显示所有可用参数。
|
||||
- `--host`: 定义绑定服务器的主机host参数,可以使用 LANGFLOW_HOST 环境变量设置,默认值为 127.0.0.1。
|
||||
- `--workers`: 设置工作进程的数量,可以使用 LANGFLOW_WORKERS 环境变量设置,默认值为 1。
|
||||
- `--timeout`: 设置工作进程的超时时间(秒),默认值为 60。
|
||||
- `--port`: 设置服务监听的端口,可以使用 LANGFLOW_PORT 环境变量设置,默认值为 7860。
|
||||
- `--config`: 定义配置文件的路径,默认值为 config.yaml。
|
||||
- `--env-file`: 指定包含环境变量的 .env 文件路径,默认值为 .env。
|
||||
- `--log-level`: 定义日志记录级别,可以使用 LANGFLOW_LOG_LEVEL 环境变量设置,默认值为 critical。
|
||||
- `--components-path`: 指定包含自定义组件的目录路径,可以使用 LANGFLOW_COMPONENTS_PATH 环境变量设置,默认值为 langflow/components。
|
||||
- `--log-file`: 指定日志文件的路径,可以使用 LANGFLOW_LOG_FILE 环境变量设置,默认值为 logs/langflow.log。
|
||||
- `--cache`: 选择要使用的缓存类型,可选项为 InMemoryCache 和 SQLiteCache,可以使用 LANGFLOW_LANGCHAIN_CACHE 环境变量设置,默认值为 SQLiteCache。
|
||||
- `--dev/--no-dev`: 切换开发/非开发模式,默认值为 no-dev即非开发模式。
|
||||
- `--path`: 指定包含前端构建文件的目录路径,此参数仅用于开发目的,可以使用 LANGFLOW_FRONTEND_PATH 环境变量设置。
|
||||
- `--open-browser/--no-open-browser`: 切换启动服务器后是否打开浏览器,可以使用 LANGFLOW_OPEN_BROWSER 环境变量设置,默认值为 open-browser即启动后打开浏览器。
|
||||
- `--remove-api-keys/--no-remove-api-keys`: 切换是否从数据库中保存的项目中移除 API 密钥,可以使用 LANGFLOW_REMOVE_API_KEYS 环境变量设置,默认值为 no-remove-api-keys。
|
||||
- `--install-completion [bash|zsh|fish|powershell|pwsh]`: 为指定的 shell 安装自动补全。
|
||||
- `--show-completion [bash|zsh|fish|powershell|pwsh]`: 显示指定 shell 的自动补全,使您可以复制或自定义安装。
|
||||
- `--backend-only`: 此参数默认为 False,允许仅运行后端服务器而不运行前端,也可以使用 LANGFLOW_BACKEND_ONLY 环境变量设置。
|
||||
- `--store`: 此参数默认为 True,启用存储功能,使用 --no-store 可禁用它,可以使用 LANGFLOW_STORE 环境变量配置。
|
||||
|
||||
这些参数对于需要定制 Langflow 行为的用户尤其重要,特别是在开发或者特殊部署场景中。
|
||||
|
||||
### 环境变量
|
||||
|
||||
您可以使用环境变量配置许多 CLI 参数选项。这些变量可以在操作系统中导出,或添加到 .env 文件中,并使用 --env-file 参数加载。
|
||||
|
||||
项目中包含一个名为 .env.example 的示例 .env 文件。将此文件复制为新文件 .env,并用实际设置值替换示例值。如果同时在操作系统和 .env 文件中设置值,则 .env 设置优先。
|
||||
|
||||
# 👋 贡献
|
||||
|
||||
我们欢迎各级开发者为我们的 GitHub 开源项目做出贡献,并帮助 Langflow 更加易用,如果您想参与贡献,请查看我们的贡献指南 [contributing guidelines](./CONTRIBUTING.md) 。
|
||||
|
||||
---
|
||||
|
||||
[](https://star-history.com/#langflow-ai/langflow&Date)
|
||||
|
||||
# 🌟 贡献者
|
||||
|
||||
[](https://github.com/langflow-ai/langflow/graphs/contributors)
|
||||
|
||||
# 📄 许可证
|
||||
|
||||
Langflow 以 MIT 许可证发布。有关详细信息,请参阅 [LICENSE](LICENSE) 文件。
|
||||
|
|
@ -1,6 +1,7 @@
|
|||
# syntax=docker/dockerfile:1
|
||||
# Keep this syntax directive! It's used to enable Docker BuildKit
|
||||
|
||||
|
||||
################################
|
||||
# BUILDER-BASE
|
||||
# Used to build deps + create our virtual environment
|
||||
|
|
@ -47,12 +48,10 @@ WORKDIR /app
|
|||
COPY pyproject.toml poetry.lock README.md ./
|
||||
COPY src/ ./src
|
||||
COPY scripts/ ./scripts
|
||||
|
||||
RUN python -m pip install requests --user && cd ./scripts && python update_dependencies.py
|
||||
RUN $POETRY_HOME/bin/poetry lock --no-update \
|
||||
&& $POETRY_HOME/bin/poetry install --no-interaction --no-ansi -E deploy \
|
||||
&& $POETRY_HOME/bin/poetry build -f wheel \
|
||||
&& $POETRY_HOME/bin/poetry run pip install dist/*.whl
|
||||
&& $POETRY_HOME/bin/poetry run pip install dist/*.whl --force-reinstall
|
||||
|
||||
################################
|
||||
# RUNTIME
|
||||
|
|
|
|||
|
|
@ -10,8 +10,7 @@ Langflow provides an API key functionality that allows users to access their ind
|
|||
The default user and password are set using the LANGFLOW_SUPERUSER and
|
||||
LANGFLOW_SUPERUSER_PASSWORD environment variables.
|
||||
|
||||
The default values are
|
||||
langflow and langflow, respectively.
|
||||
The default values are `langflow` and `langflow`, respectively.
|
||||
|
||||
</Admonition>
|
||||
|
||||
|
|
|
|||
|
|
@ -1,62 +1,51 @@
|
|||
# Command Line Interface (CLI)
|
||||
|
||||
## Overview
|
||||
|
||||
Langflow's Command Line Interface (CLI) is a powerful tool that allows you to interact with the Langflow server from the command line. The CLI provides a wide range of commands to help you shape Langflow to your needs.
|
||||
|
||||
Running the CLI without any arguments will display a list of available commands and options.
|
||||
The available commands are below. Navigate to their individual sections of this page to see the parameters.
|
||||
|
||||
* [langflow](#overview)
|
||||
* [langflow api-key](#langflow-api-key)
|
||||
* [langflow copy-db](#langflow-copy-db)
|
||||
* [langflow migration](#langflow-migration)
|
||||
* [langflow run](#langflow-run)
|
||||
* [langflow superuser](#langflow-superuser)
|
||||
|
||||
## Overview
|
||||
|
||||
Running the CLI without any arguments displays a list of available options and commands.
|
||||
|
||||
```bash
|
||||
python -m langflow run --help
|
||||
langflow
|
||||
# or
|
||||
python -m langflow run
|
||||
langflow --help
|
||||
# or
|
||||
python -m langflow
|
||||
```
|
||||
|
||||
Each option for `run` command are detailed below:
|
||||
| Command | Description |
|
||||
| ------- | ----------- |
|
||||
| `api-key` | Creates an API key for the default superuser if AUTO_LOGIN is enabled. |
|
||||
| `copy-db` | Copy the database files to the current directory (`which langflow`). |
|
||||
| `migration` | Run or test migrations. |
|
||||
| `run` | Run the Langflow. |
|
||||
| `superuser` | Create a superuser. |
|
||||
|
||||
- `--help`: Displays all available options.
|
||||
- `--host`: Defines the host to bind the server to. Can be set using the `LANGFLOW_HOST` environment variable. The default is `127.0.0.1`.
|
||||
- `--workers`: Sets the number of worker processes. Can be set using the `LANGFLOW_WORKERS` environment variable. The default is `1`.
|
||||
- `--timeout`: Sets the worker timeout in seconds. The default is `60`.
|
||||
- `--port`: Sets the port to listen on. Can be set using the `LANGFLOW_PORT` environment variable. The default is `7860`.
|
||||
- `--env-file`: Specifies the path to the .env file containing environment variables. The default is `.env`.
|
||||
- `--log-level`: Defines the logging level. Can be set using the `LANGFLOW_LOG_LEVEL` environment variable. The default is `critical`.
|
||||
- `--components-path`: Specifies the path to the directory containing custom components. Can be set using the `LANGFLOW_COMPONENTS_PATH` environment variable. The default is `langflow/components`.
|
||||
- `--log-file`: Specifies the path to the log file. Can be set using the `LANGFLOW_LOG_FILE` environment variable. The default is `logs/langflow.log`.
|
||||
- `--cache`: Select the type of cache to use. Options are `InMemoryCache` and `SQLiteCache`. Can be set using the `LANGFLOW_LANGCHAIN_CACHE` environment variable. The default is `SQLiteCache`.
|
||||
- `--dev/--no-dev`: Toggles the development mode. The default is `no-dev`.
|
||||
- `--path`: Specifies the path to the frontend directory containing build files. This option is for development purposes only. Can be set using the `LANGFLOW_FRONTEND_PATH` environment variable.
|
||||
- `--open-browser/--no-open-browser`: Toggles the option to open the browser after starting the server. Can be set using the `LANGFLOW_OPEN_BROWSER` environment variable. The default is `open-browser`.
|
||||
- `--remove-api-keys/--no-remove-api-keys`: Toggles the option to remove API keys from the projects saved in the database. Can be set using the `LANGFLOW_REMOVE_API_KEYS` environment variable. The default is `no-remove-api-keys`.
|
||||
- `--install-completion [bash|zsh|fish|powershell|pwsh]`: Installs completion for the specified shell.
|
||||
- `--show-completion [bash|zsh|fish|powershell|pwsh]`: Shows completion for the specified shell, allowing you to copy it or customize the installation.
|
||||
- `--backend-only`: This parameter, with a default value of `False`, allows running only the backend server without the frontend. It can also be set using the `LANGFLOW_BACKEND_ONLY` environment variable.
|
||||
- `--store`: This parameter, with a default value of `True`, enables the store features, use `--no-store` to deactivate it. It can be configured using the `LANGFLOW_STORE` environment variable.
|
||||
### Options
|
||||
|
||||
These parameters are important for users who need to customize the behavior of Langflow, especially in development or specialized deployment scenarios.
|
||||
| Option | Description |
|
||||
| ------ | ----------- |
|
||||
| `--install-completion` | Install completion for the current shell. |
|
||||
| `--show-completion` | Show completion for the current shell, to copy it or customize the installation. |
|
||||
| `--help` | Show this message and exit. |
|
||||
|
||||
### API Key Command
|
||||
## langflow api-key
|
||||
|
||||
The `api-key` command allows you to create an API key for accessing Langflow's API when `LANGFLOW_AUTO_LOGIN` is set to `True`.
|
||||
|
||||
```bash
|
||||
python -m langflow api-key --help
|
||||
|
||||
Usage: langflow api-key [OPTIONS]
|
||||
|
||||
Creates an API key for the default superuser if AUTO_LOGIN is enabled.
|
||||
Args: log_level (str, optional): Logging level. Defaults to "error".
|
||||
Returns: None
|
||||
|
||||
╭─ Options ───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮
|
||||
│ --log-level TEXT Logging level. [env var: LANGFLOW_LOG_LEVEL] [default: error] │
|
||||
│ --help Show this message and exit. │
|
||||
╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
|
||||
```
|
||||
|
||||
Once you run the `api-key` command, it will create an API key for the default superuser if `LANGFLOW_AUTO_LOGIN` is set to `True`.
|
||||
Run the `api-key` command to create an API key for the default superuser if `LANGFLOW_AUTO_LOGIN` is set to `True`.
|
||||
|
||||
```bash
|
||||
langflow api-key
|
||||
# or
|
||||
python -m langflow api-key
|
||||
╭─────────────────────────────────────────────────────────────────────╮
|
||||
│ API Key Created Successfully: │
|
||||
|
|
@ -67,11 +56,99 @@ python -m langflow api-key
|
|||
│ Make sure to store it in a secure location. │
|
||||
│ │
|
||||
│ The API key has been copied to your clipboard. Cmd + V to paste it. │
|
||||
╰─────────────────────────────────────────────────────────────────────╯
|
||||
╰──────────────────────────────
|
||||
```
|
||||
|
||||
### Environment Variables
|
||||
### Options
|
||||
|
||||
| Option | Type | Description |
|
||||
|------------------|------|-------------------------------------------------------------|
|
||||
| --log-level | TEXT | Logging level. [env var: LANGFLOW_LOG_LEVEL] [default: error] |
|
||||
| --help | | Show this message and exit. |
|
||||
|
||||
## langflow copy-db
|
||||
|
||||
Run the `copy-db` command to copy the cached `langflow.db` and `langflow-pre.db` database files to the current directory.
|
||||
|
||||
If the files exist in the cache directory, they will be copied to the same directory as `__main__.py`, which can be found with `which langflow`.
|
||||
|
||||
### Options
|
||||
|
||||
None.
|
||||
|
||||
## langflow migration
|
||||
|
||||
Run or test migrations with the [Alembic](https://pypi.org/project/alembic/) database tool.
|
||||
|
||||
```bash
|
||||
langflow migration
|
||||
# or
|
||||
python -m langflow migration
|
||||
```
|
||||
|
||||
### Options
|
||||
| Option | Description |
|
||||
|-----------------|-------------------------------------------------------------|
|
||||
| `--test, --no-test` | Run migrations in test mode. [default: test] |
|
||||
| `--fix, --no-fix` | Fix migrations. This is a destructive operation, and should only be used if you know what you are doing. [default: no-fix] |
|
||||
| `--help` | Show this message and exit. |
|
||||
|
||||
|
||||
## langflow run
|
||||
|
||||
Run Langflow.
|
||||
|
||||
```bash
|
||||
langflow run
|
||||
# or
|
||||
python -m langflow run
|
||||
```
|
||||
|
||||
### Options
|
||||
|
||||
| Option | Description |
|
||||
|-------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| `--help` | Displays all available options. |
|
||||
| `--host` | Defines the host to bind the server to. Can be set using the `LANGFLOW_HOST` environment variable. The default is `127.0.0.1`. |
|
||||
| `--workers` | Sets the number of worker processes. Can be set using the `LANGFLOW_WORKERS` environment variable. The default is `1`. |
|
||||
| `--timeout` | Sets the worker timeout in seconds. The default is `60`. |
|
||||
| `--port` | Sets the port to listen on. Can be set using the `LANGFLOW_PORT` environment variable. The default is `7860`. |
|
||||
| `--env-file` | Specifies the path to the .env file containing environment variables. The default is `.env`. |
|
||||
| `--log-level` | Defines the logging level. Can be set using the `LANGFLOW_LOG_LEVEL` environment variable. The default is `critical`. |
|
||||
| `--components-path` | Specifies the path to the directory containing custom components. Can be set using the `LANGFLOW_COMPONENTS_PATH` environment variable. The default is `langflow/components`. |
|
||||
| `--log-file` | Specifies the path to the log file. Can be set using the `LANGFLOW_LOG_FILE` environment variable. The default is `logs/langflow.log`. |
|
||||
| `--cache` | Select the type of cache to use. Options are `InMemoryCache` and `SQLiteCache`. Can be set using the `LANGFLOW_LANGCHAIN_CACHE` environment variable. The default is `SQLiteCache`. |
|
||||
| `--dev`/`--no-dev` | Toggles the development mode. The default is `no-dev`. |
|
||||
| `--path` | Specifies the path to the frontend directory containing build files. This option is for development purposes only. Can be set using the `LANGFLOW_FRONTEND_PATH` environment variable. |
|
||||
| `--open-browser`/`--no-open-browser`| Toggles the option to open the browser after starting the server. Can be set using the `LANGFLOW_OPEN_BROWSER` environment variable. The default is `open-browser`. |
|
||||
| `--remove-api-keys`/`--no-remove-api-keys`| Toggles the option to remove API keys from the projects saved in the database. Can be set using the `LANGFLOW_REMOVE_API_KEYS` environment variable. The default is `no-remove-api-keys`. |
|
||||
| `--install-completion [bash\|zsh\|fish\|powershell\|pwsh]`| Installs completion for the specified shell. |
|
||||
| `--show-completion [bash\|zsh\|fish\|powershell\|pwsh]` | Shows completion for the specified shell, allowing you to copy it or customize the installation. |
|
||||
| `--backend-only` | This parameter, with a default value of `False`, allows running only the backend server without the frontend. It can also be set using the `LANGFLOW_BACKEND_ONLY` environment variable. For more, see [Backend-only](../deployment/backend-only.md).|
|
||||
| `--store` | This parameter, with a default value of `True`, enables the store features, use `--no-store` to deactivate it. It can be configured using the `LANGFLOW_STORE` environment variable. |
|
||||
|
||||
#### Environment Variables
|
||||
|
||||
You can configure many of the CLI options using environment variables. These can be exported in your operating system or added to a `.env` file and loaded using the `--env-file` option.
|
||||
|
||||
A sample `.env` file named `.env.example` is included with the project. Copy this file to a new file named `.env` and replace the example values with your actual settings. If you're setting values in both your OS and the `.env` file, the `.env` settings will take precedence.
|
||||
|
||||
## langflow superuser
|
||||
|
||||
Create a superuser for Langflow.
|
||||
|
||||
```bash
|
||||
langflow superuser
|
||||
# or
|
||||
python -m langflow superuser
|
||||
```
|
||||
|
||||
### Options
|
||||
|
||||
| Option | Type | Description |
|
||||
|----------------|-------|-------------------------------------------------------------|
|
||||
| `--username` | TEXT | Username for the superuser. [default: None] [required] |
|
||||
| `--password` | TEXT | Password for the superuser. [default: None] [required] |
|
||||
| `--log-level` | TEXT | Logging level. [env var: LANGFLOW_LOG_LEVEL] [default: error] |
|
||||
| `--help` | | Show this message and exit. |
|
||||
|
||||
|
|
|
|||
|
|
@ -1,31 +1,39 @@
|
|||
import ThemedImage from "@theme/ThemedImage";
|
||||
import useBaseUrl from "@docusaurus/useBaseUrl";
|
||||
import ZoomableImage from "/src/theme/ZoomableImage.js";
|
||||
import Admonition from "@theme/Admonition";
|
||||
import ReactPlayer from "react-player";
|
||||
import Admonition from "@theme/Admonition";
|
||||
|
||||
# Global Environment Variables
|
||||
# Global Variables
|
||||
|
||||
Langflow 1.0 alpha includes the option to add **Global Environment Variables** for your application.
|
||||
Global Variables are a useful feature of Langflow, allowing you to define reusable variables accessed from any Text field in your project.
|
||||
|
||||
## Add a global variable to a project
|
||||
## TL;DR
|
||||
|
||||
In this example, you'll add the `openai_api_key` credential as a global environment variable to the **Basic Prompting** starter project.
|
||||
- Global Variables are reusable variables accessible from any Text field in your project.
|
||||
- To create one, click the 🌐 button in a Text field and then **+ Add New Variable**.
|
||||
- Define the **Name**, **Type**, and **Value** of the variable.
|
||||
- Click **Save Variable** to create it.
|
||||
- All Credential Global Variables are encrypted and accessible only by you.
|
||||
- Set _`LANGFLOW_STORE_ENVIRONMENT_VARIABLES`_ to _`true`_ in your `.env` file to add all variables in _`LANGFLOW_VARIABLES_TO_GET_FROM_ENVIRONMENT`_ to your user's Global Variables.
|
||||
|
||||
For more information on the starter flow, see [Basic prompting](../starter-projects/basic-prompting.mdx).
|
||||
## Creating and Adding a Global Variable
|
||||
|
||||
1. From the Langflow dashboard, click **New Project**.
|
||||
2. Select **Basic Prompting**.
|
||||
To create and add a global variable, click the 🌐 button in a Text field, and then click **+ Add New Variable**.
|
||||
|
||||
The **Basic Prompting** flow is created.
|
||||
Text fields are where you write text without opening a Text area, and are identified with the 🌐 icon.
|
||||
|
||||
3. To create an environment variable for the **OpenAI** component:
|
||||
1. In the **OpenAI API Key** field, click the **Globe** button, and then click **Add New Variable**.
|
||||
2. In the **Variable Name** field, enter `openai_api_key`.
|
||||
3. In the **Value** field, paste your OpenAI API Key (`sk-...`).
|
||||
4. For the variable **Type**, select **Credential**.
|
||||
5. In the **Apply to Fields** field, select **OpenAI API Key** to apply this variable to all fields named **OpenAI API Key**.
|
||||
6. Click **Save Variable**.
|
||||
For example, to create an environment variable for the **OpenAI** component:
|
||||
|
||||
1. In the **OpenAI API Key** text field, click the 🌐 button, then **Add New Variable**.
|
||||
2. Enter `openai_api_key` in the **Variable Name** field.
|
||||
3. Paste your OpenAI API Key (`sk-...`) in the **Value** field.
|
||||
4. Select **Credential** for the **Type**.
|
||||
5. Choose **OpenAI API Key** in the **Apply to Fields** field to apply this variable to all fields named **OpenAI API Key**.
|
||||
6. Click **Save Variable**.
|
||||
|
||||
You now have a `openai_api_key` global environment variable for your Langflow project.
|
||||
Subsequently, clicking the 🌐 button in a Text field will display the new variable in the dropdown.
|
||||
|
||||
<Admonition type="tip">
|
||||
You can also create global variables in **Settings** > **Variables and
|
||||
|
|
@ -41,10 +49,55 @@ You now have a `openai_api_key` global environment variable for your Langflow pr
|
|||
style={{ width: "40%", margin: "20px auto" }}
|
||||
/>
|
||||
|
||||
4. To view and manage your project's global environment variables, visit **Settings** > **Variables and Secrets**.
|
||||
To view and manage your project's global environment variables, visit **Settings** > **Variables and Secrets**.
|
||||
|
||||
For more on variables in HuggingFace Spaces, see [Managing Secrets](https://huggingface.co/docs/hub/spaces-overview#managing-secrets).
|
||||
|
||||
{/* All variables are encrypted */}
|
||||
|
||||
<Admonition type="warning">
|
||||
All Credential Global Variables are encrypted and accessible only by you.
|
||||
</Admonition>
|
||||
|
||||
## Configuring Environment Variables in your .env file
|
||||
|
||||
Setting `LANGFLOW_STORE_ENVIRONMENT_VARIABLES` to `true` in your `.env` file (default) adds all variables in `LANGFLOW_VARIABLES_TO_GET_FROM_ENVIRONMENT` to your user's Global Variables.
|
||||
|
||||
These variables are accessible like any other Global Variable.
|
||||
|
||||
<Admonition type="tip">
|
||||
To prevent this behavior, set `LANGFLOW_STORE_ENVIRONMENT_VARIABLES` to
|
||||
`false` in your `.env` file.
|
||||
</Admonition>
|
||||
|
||||
You can specify variables to get from the environment by listing them in `LANGFLOW_VARIABLES_TO_GET_FROM_ENVIRONMENT`.
|
||||
|
||||
Specify variables as a comma-separated list (e.g., _`"VARIABLE1, VARIABLE2"`_) or a JSON-encoded string (e.g., _`'["VARIABLE1", "VARIABLE2"]'`_).
|
||||
|
||||
The default list of variables includes:
|
||||
|
||||
- ANTHROPIC_API_KEY
|
||||
- ASTRA_DB_API_ENDPOINT
|
||||
- ASTRA_DB_APPLICATION_TOKEN
|
||||
- AZURE_OPENAI_API_KEY
|
||||
- AZURE_OPENAI_API_DEPLOYMENT_NAME
|
||||
- AZURE_OPENAI_API_EMBEDDINGS_DEPLOYMENT_NAME
|
||||
- AZURE_OPENAI_API_INSTANCE_NAME
|
||||
- AZURE_OPENAI_API_VERSION
|
||||
- COHERE_API_KEY
|
||||
- GOOGLE_API_KEY
|
||||
- GROQ_API_KEY
|
||||
- HUGGINGFACEHUB_API_TOKEN
|
||||
- OPENAI_API_KEY
|
||||
- PINECONE_API_KEY
|
||||
- SEARCHAPI_API_KEY
|
||||
- SERPAPI_API_KEY
|
||||
- UPSTASH_VECTOR_REST_URL
|
||||
- UPSTASH_VECTOR_REST_TOKEN
|
||||
- VECTARA_CUSTOMER_ID
|
||||
- VECTARA_CORPUS_ID
|
||||
- VECTARA_API_KEY
|
||||
|
||||
## Video
|
||||
|
||||
<div
|
||||
|
|
|
|||
|
|
@ -3,7 +3,8 @@ import Admonition from "@theme/Admonition";
|
|||
# Custom Components
|
||||
|
||||
<Admonition type="info" label="Tip">
|
||||
Read the [Custom Component Guidelines](../administration/custom-component) for detailed information on custom components.
|
||||
Read the [Custom Component Guidelines](../administration/custom-component) for
|
||||
detailed information on custom components.
|
||||
</Admonition>
|
||||
|
||||
Custom components let you extend Langflow by creating reusable and configurable components from a Python script.
|
||||
|
|
@ -31,57 +32,60 @@ This class is the foundation for creating custom components. It allows users to
|
|||
|
||||
The following types are supported in the build method:
|
||||
|
||||
| Supported Types |
|
||||
| --------------------------------------------------------- |
|
||||
| _`str`_, _`int`_, _`float`_, _`bool`_, _`list`_, _`dict`_ |
|
||||
| _`langflow.field_typing.NestedDict`_ |
|
||||
| _`langflow.field_typing.Prompt`_ |
|
||||
| _`langchain.chains.base.Chain`_ |
|
||||
| _`langchain.PromptTemplate`_ |
|
||||
| Supported Types |
|
||||
| ----------------------------------------------------------------- |
|
||||
| _`str`_, _`int`_, _`float`_, _`bool`_, _`list`_, _`dict`_ |
|
||||
| _`langflow.field_typing.NestedDict`_ |
|
||||
| _`langflow.field_typing.Prompt`_ |
|
||||
| _`langchain.chains.base.Chain`_ |
|
||||
| _`langchain.PromptTemplate`_ |
|
||||
| _`from langchain.schema.language_model import BaseLanguageModel`_ |
|
||||
| _`langchain.Tool`_ |
|
||||
| _`langchain.document_loaders.base.BaseLoader`_ |
|
||||
| _`langchain.schema.Document`_ |
|
||||
| _`langchain.text_splitters.TextSplitter`_ |
|
||||
| _`langchain.vectorstores.base.VectorStore`_ |
|
||||
| _`langchain.embeddings.base.Embeddings`_ |
|
||||
| _`langchain.schema.BaseRetriever`_ |
|
||||
| _`langchain.Tool`_ |
|
||||
| _`langchain.document_loaders.base.BaseLoader`_ |
|
||||
| _`langchain.schema.Document`_ |
|
||||
| _`langchain.text_splitters.TextSplitter`_ |
|
||||
| _`langchain.vectorstores.base.VectorStore`_ |
|
||||
| _`langchain.embeddings.base.Embeddings`_ |
|
||||
| _`langchain.schema.BaseRetriever`_ |
|
||||
|
||||
The difference between _`dict`_ and _`langflow.field_typing.NestedDict`_ is that one adds a simple key-value pair field, while the other opens a more robust dictionary editor.
|
||||
|
||||
<Admonition type="info">
|
||||
Use the `Prompt` type by adding **kwargs to the build method.
|
||||
If you want to add the values of the variables to the template you defined, format the `PromptTemplate` inside the `CustomComponent` class.
|
||||
Use the `Prompt` type by adding **kwargs to the build method. If you want to
|
||||
add the values of the variables to the template you defined, format the
|
||||
`PromptTemplate` inside the `CustomComponent` class.
|
||||
</Admonition>
|
||||
|
||||
<Admonition type="info">
|
||||
Use base Python types without a handle by default. To add handles, use the `input_types` key in the `build_config` method.
|
||||
Use base Python types without a handle by default. To add handles, use the
|
||||
`input_types` key in the `build_config` method.
|
||||
</Admonition>
|
||||
|
||||
**build_config:** Defines the configuration fields of the component. This method returns a dictionary where each key represents a field name and each value defines the field's behavior.
|
||||
|
||||
Supported keys for configuring fields:
|
||||
|
||||
| Key | Description |
|
||||
| --------------------- | --------------------------------------------------- |
|
||||
| `is_list` | Boolean indicating if the field can hold multiple values. |
|
||||
| `options` | Dropdown menu options. |
|
||||
| `multiline` | Boolean indicating if a field allows multiline input. |
|
||||
| `input_types` | Allows connection handles for string fields. |
|
||||
| `display_name` | Field name displayed in the UI. |
|
||||
| `advanced` | Hides the field in the default UI view. |
|
||||
| `password` | Masks input, useful for sensitive data. |
|
||||
| `required` | Overrides the default behavior to make a field mandatory. |
|
||||
| `info` | Tooltip for the field. |
|
||||
| `file_types` | Accepted file types, useful for file fields. |
|
||||
| `range_spec` | Defines valid ranges for float fields. |
|
||||
| `title_case` | Boolean that controls field name capitalization. |
|
||||
| `refresh_button` | Adds a refresh button that updates field values. |
|
||||
| `real_time_refresh` | Updates the configuration as field values change. |
|
||||
| `field_type` | Automatically set based on the build method's type hint. |
|
||||
| Key | Description |
|
||||
| ------------------- | --------------------------------------------------------- |
|
||||
| `is_list` | Boolean indicating if the field can hold multiple values. |
|
||||
| `options` | Dropdown menu options. |
|
||||
| `multiline` | Boolean indicating if a field allows multiline input. |
|
||||
| `input_types` | Allows connection handles for string fields. |
|
||||
| `display_name` | Field name displayed in the UI. |
|
||||
| `advanced` | Hides the field in the default UI view. |
|
||||
| `password` | Masks input, useful for sensitive data. |
|
||||
| `required` | Overrides the default behavior to make a field mandatory. |
|
||||
| `info` | Tooltip for the field. |
|
||||
| `file_types` | Accepted file types, useful for file fields. |
|
||||
| `range_spec` | Defines valid ranges for float fields. |
|
||||
| `title_case` | Boolean that controls field name capitalization. |
|
||||
| `refresh_button` | Adds a refresh button that updates field values. |
|
||||
| `real_time_refresh` | Updates the configuration as field values change. |
|
||||
| `field_type` | Automatically set based on the build method's type hint. |
|
||||
|
||||
<Admonition type="info" label="Tip">
|
||||
Use the `update_build_config` method to dynamically update configurations based on field values.
|
||||
Use the `update_build_config` method to dynamically update configurations
|
||||
based on field values.
|
||||
</Admonition>
|
||||
|
||||
## Additional methods and attributes
|
||||
|
|
@ -99,4 +103,3 @@ The `CustomComponent` class also provides helpful methods for specific tasks (e.
|
|||
- `status`: Shows values from the `build` method, useful for debugging.
|
||||
- `field_order`: Controls the display order of fields.
|
||||
- `icon`: Sets the canvas display icon.
|
||||
|
||||
|
|
|
|||
161
docs/docs/components/inputs-and-outputs.mdx
Normal file
161
docs/docs/components/inputs-and-outputs.mdx
Normal file
|
|
@ -0,0 +1,161 @@
|
|||
import Admonition from "@theme/Admonition";
|
||||
import ZoomableImage from "/src/theme/ZoomableImage.js";
|
||||
|
||||
# Inputs and Outputs
|
||||
|
||||
TL;DR: Inputs and Outputs are a category of components that are used to define where data comes in and out of your flow.
|
||||
They also dynamically change the Playground and can be renamed to facilitate building and maintaining your flows.
|
||||
|
||||
## Inputs
|
||||
|
||||
Inputs are components used to define where data enters your flow. They can receive data from the user, a database, or any other source that can be converted to Text or Record.
|
||||
|
||||
The difference between Chat Input and other Input components is the output format, the number of configurable fields, and the way they are displayed in the Playground.
|
||||
|
||||
Chat Input components can output `Text` or `Record`. When you want to pass the sender name or sender to the next component, use the `Record` output. To pass only the message, use the `Text` output, useful when saving the message to a database or memory system like Zep.
|
||||
|
||||
You can find out more about Chat Input and other Inputs [here](#chat-input).
|
||||
|
||||
### Chat Input
|
||||
|
||||
This component collects user input from the chat.
|
||||
|
||||
**Parameters**
|
||||
|
||||
- **Sender Type:** Specifies the sender type. Defaults to `User`. Options are `Machine` and `User`.
|
||||
- **Sender Name:** Specifies the name of the sender. Defaults to `User`.
|
||||
- **Message:** Specifies the message text. It is a multiline text input.
|
||||
- **Session ID:** Specifies the session ID of the chat history. If provided, the message will be saved in the Message History.
|
||||
|
||||
<Admonition type="note" title="Note">
|
||||
<p>
|
||||
If `As Record` is `true` and the `Message` is a `Record`, the data of the
|
||||
`Record` will be updated with the `Sender`, `Sender Name`, and `Session ID`.
|
||||
</p>
|
||||
</Admonition>
|
||||
|
||||
<ZoomableImage
|
||||
alt="Docusaurus themed image"
|
||||
sources={{
|
||||
light: "img/chat-input-expanded.png",
|
||||
dark: "img/chat-input-expanded.png",
|
||||
}}
|
||||
style={{ width: "40%", margin: "20px auto" }}
|
||||
/>
|
||||
|
||||
One significant capability of the Chat Input component is its ability to transform the Playground into a chat window. This feature is particularly valuable for scenarios requiring user input to initiate or influence the flow.
|
||||
|
||||
<ZoomableImage
|
||||
alt="Docusaurus themed image"
|
||||
sources={{
|
||||
light: "img/interaction-panel-with-chat-input.png",
|
||||
dark: "img/interaction-panel-with-chat-input.png",
|
||||
}}
|
||||
style={{ width: "50%", margin: "20px auto" }}
|
||||
/>
|
||||
|
||||
### Text Input
|
||||
|
||||
The **Text Input** component adds an **Input** field on the Playground. This enables you to define parameters while running and testing your flow.
|
||||
|
||||
**Parameters**
|
||||
|
||||
- **Value:** Specifies the text input value. This is where the user inputs text data that will be passed to the next component in the sequence. If no value is provided, it defaults to an empty string.
|
||||
- **Record Template:** Specifies how a `Record` should be converted into `Text`.
|
||||
|
||||
The **Record Template** field is used to specify how a `Record` should be converted into `Text`. This is particularly useful when you want to extract specific information from a `Record` and pass it as text to the next component in the sequence.
|
||||
|
||||
For example, if you have a `Record` with the following structure:
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "John Doe",
|
||||
"age": 30,
|
||||
"email": "johndoe@email.com"
|
||||
}
|
||||
```
|
||||
|
||||
A template with `Name: {name}, Age: {age}` will convert the `Record` into a text string of `Name: John Doe, Age: 30`.
|
||||
|
||||
If you pass more than one `Record`, the text will be concatenated with a new line separator.
|
||||
|
||||
<ZoomableImage
|
||||
alt="Docusaurus themed image"
|
||||
sources={{
|
||||
light: "img/text-input-expanded.png",
|
||||
dark: "img/text-input-expanded.png",
|
||||
}}
|
||||
style={{ width: "50%", margin: "20px auto" }}
|
||||
/>
|
||||
|
||||
## Outputs
|
||||
|
||||
Outputs are components that are used to define where data comes out of your flow. They can be used to send data to the user, to the Playground, or to define how the data will be displayed in the Playground.
|
||||
|
||||
The Chat Output works similarly to the Chat Input but does not have a field that allows for written input. It is used as an Output definition and can be used to send data to the user.
|
||||
|
||||
You can find out more about it and the other Outputs [here](#chat-output).
|
||||
|
||||
### Chat Output
|
||||
|
||||
This component sends a message to the chat.
|
||||
|
||||
**Parameters**
|
||||
|
||||
- **Sender Type:** Specifies the sender type. Default is `"Machine"`. Options are `"Machine"` and `"User"`.
|
||||
|
||||
- **Sender Name:** Specifies the sender's name. Default is `"AI"`.
|
||||
|
||||
- **Session ID:** Specifies the session ID of the chat history. If provided, messages are saved in the Message History.
|
||||
|
||||
- **Message:** Specifies the text of the message.
|
||||
|
||||
<Admonition type="note" title="Note">
|
||||
<p>
|
||||
If `As Record` is `true` and the `Message` is a `Record`, the data in the
|
||||
`Record` is updated with the `Sender`, `Sender Name`, and `Session ID`.
|
||||
</p>
|
||||
</Admonition>
|
||||
|
||||
### Text Output
|
||||
|
||||
This component displays text data to the user. It is useful when you want to show text without sending it to the chat.
|
||||
|
||||
**Parameters**
|
||||
|
||||
- **Value:** Specifies the text data to be displayed. Defaults to an empty string.
|
||||
|
||||
The `TextOutput` component provides a simple way to display text data. It allows textual data to be visible in the chat window during your interaction flow.
|
||||
|
||||
## Prompts
|
||||
|
||||
A prompt is the input provided to a language model, consisting of multiple components and can be parameterized using prompt templates. A prompt template offers a reproducible method for generating prompts, enabling easy customization through input variables.
|
||||
|
||||
### Prompt
|
||||
|
||||
This component creates a prompt template with dynamic variables. This is useful for structuring prompts and passing dynamic data to a language model.
|
||||
|
||||
**Parameters**
|
||||
|
||||
- **Template:** The template for the prompt. This field allows you to create other fields dynamically by using curly brackets `{}`. For example, if you have a template like `Hello {name}, how are you?`, a new field called `name` will be created. Prompt variables can be created with any name inside curly brackets, e.g. `{variable_name}`.
|
||||
|
||||
<ZoomableImage
|
||||
alt="Docusaurus themed image"
|
||||
sources={{
|
||||
light: "img/prompt-with-template.png",
|
||||
dark: "img/prompt-with-template.png",
|
||||
}}
|
||||
style={{ width: "50%", margin: "20px auto" }}
|
||||
/>
|
||||
|
||||
### PromptTemplate
|
||||
|
||||
The `PromptTemplate` component enables users to create prompts and define variables that control how the model is instructed. Users can input a set of variables which the template uses to generate the prompt when a conversation starts.
|
||||
|
||||
<Admonition type="info">
|
||||
After defining a variable in the prompt template, it acts as its own component
|
||||
input. See [Prompt Customization](../administration/prompt-customization) for
|
||||
more details.
|
||||
</Admonition>
|
||||
|
||||
- **template:** The template used to format an individual request.
|
||||
|
|
@ -1,99 +0,0 @@
|
|||
import Admonition from '@theme/Admonition';
|
||||
import ZoomableImage from "/src/theme/ZoomableImage.js";
|
||||
|
||||
# Inputs
|
||||
|
||||
## Chat Input
|
||||
|
||||
This component obtains user input from the chat.
|
||||
|
||||
**Parameters**
|
||||
|
||||
- **Sender Type:** Specifies the sender type. Defaults to `User`. Options are `Machine` and `User`.
|
||||
- **Sender Name:** Specifies the name of the sender. Defaults to `User`.
|
||||
- **Message:** Specifies the message text. It is a multiline text input.
|
||||
- **Session ID:** Specifies the session ID of the chat history. If provided, the message will be saved in the Message History.
|
||||
|
||||
<Admonition type="note" title="Note">
|
||||
<p>
|
||||
If `As Record` is `true` and the `Message` is a `Record`, the data
|
||||
of the `Record` will be updated with the `Sender`, `Sender Name`, and
|
||||
`Session ID`.
|
||||
</p>
|
||||
</Admonition>
|
||||
|
||||
<ZoomableImage
|
||||
alt="Docusaurus themed image"
|
||||
sources={{
|
||||
light: "img/chat-input-expanded.png",
|
||||
dark: "img/chat-input-expanded.png",
|
||||
}}
|
||||
style={{ width: "40%", margin: "20px auto" }}
|
||||
/>
|
||||
|
||||
One significant capability of the Chat Input component is its ability to transform the Playground into a chat window. This feature is particularly valuable for scenarios requiring user input to initiate or influence the flow.
|
||||
|
||||
<ZoomableImage
|
||||
alt="Docusaurus themed image"
|
||||
sources={{
|
||||
light: "img/interaction-panel-with-chat-input.png",
|
||||
dark: "img/interaction-panel-with-chat-input.png",
|
||||
}}
|
||||
style={{ width: "50%", margin: "20px auto" }}
|
||||
/>
|
||||
|
||||
---
|
||||
|
||||
## Prompt
|
||||
|
||||
This component creates a prompt template with dynamic variables. This is useful for structuring prompts and passing dynamic data to a language model.
|
||||
|
||||
**Parameters**
|
||||
|
||||
- **Template:** The template for the prompt. This field allows you to create other fields dynamically by using curly brackets `{}`. For example, if you have a template like `Hello {name}, how are you?`, a new field called `name` will be created. Prompt variables can be created with any name inside curly brackets, e.g. `{variable_name}`.
|
||||
|
||||
<ZoomableImage
|
||||
alt="Docusaurus themed image"
|
||||
sources={{
|
||||
light: "img/prompt-with-template.png",
|
||||
dark: "img/prompt-with-template.png",
|
||||
}}
|
||||
style={{ width: "50%", margin: "20px auto" }}
|
||||
/>
|
||||
|
||||
---
|
||||
|
||||
## Text Input
|
||||
|
||||
The **Text Input** component adds an **Input** field on the Playground. This enables you to define parameters while running and testing your flow.
|
||||
|
||||
**Parameters**
|
||||
|
||||
- **Value:** Specifies the text input value. This is where the user inputs text data that will be passed to the next component in the sequence. If no value is provided, it defaults to an empty string.
|
||||
- **Record Template:** Specifies how a `Record` should be converted into `Text`.
|
||||
|
||||
The **Record Template** field is used to specify how a `Record` should be converted into `Text`. This is particularly useful when you want to extract specific information from a `Record` and pass it as text to the next component in the sequence.
|
||||
|
||||
For example, if you have a `Record` with the following structure:
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "John Doe",
|
||||
"age": 30,
|
||||
"email": "johndoe@email.com"
|
||||
}
|
||||
```
|
||||
|
||||
A template with `Name: {name}, Age: {age}` will convert the `Record` into a text string of `Name: John Doe, Age: 30`.
|
||||
|
||||
If you pass more than one `Record`, the text will be concatenated with a new line separator.
|
||||
|
||||
<ZoomableImage
|
||||
alt="Docusaurus themed image"
|
||||
sources={{
|
||||
light: "img/text-input-expanded.png",
|
||||
dark: "img/text-input-expanded.png",
|
||||
}}
|
||||
style={{ width: "50%", margin: "20px auto" }}
|
||||
/>
|
||||
|
||||
|
|
@ -1,34 +0,0 @@
|
|||
import Admonition from '@theme/Admonition';
|
||||
|
||||
# Outputs
|
||||
|
||||
## Chat Output
|
||||
|
||||
This component sends a message to the chat.
|
||||
|
||||
**Parameters**
|
||||
|
||||
- **Sender Type:** Specifies the sender type. Default is `"Machine"`. Options are `"Machine"` and `"User"`.
|
||||
|
||||
- **Sender Name:** Specifies the sender's name. Default is `"AI"`.
|
||||
|
||||
- **Session ID:** Specifies the session ID of the chat history. If provided, messages are saved in the Message History.
|
||||
|
||||
- **Message:** Specifies the text of the message.
|
||||
|
||||
<Admonition type="note" title="Note">
|
||||
<p>
|
||||
If `As Record` is `true` and the `Message` is a `Record`, the data in the `Record` is updated with the `Sender`, `Sender Name`, and `Session ID`.
|
||||
</p>
|
||||
</Admonition>
|
||||
|
||||
## Text Output
|
||||
|
||||
This component displays text data to the user. It is useful when you want to show text without sending it to the chat.
|
||||
|
||||
**Parameters**
|
||||
|
||||
- **Value:** Specifies the text data to be displayed. Defaults to an empty string.
|
||||
|
||||
|
||||
The `TextOutput` component provides a simple way to display text data. It allows textual data to be visible in the chat window during your interaction flow.
|
||||
|
|
@ -1,25 +0,0 @@
|
|||
import Admonition from "@theme/Admonition";
|
||||
|
||||
# Prompts
|
||||
|
||||
<Admonition type="caution" icon="🚧" title="Zone Under Construction">
|
||||
<p>
|
||||
Thank you for your patience as we refine our documentation. It may
|
||||
still have some areas under development. Please share your feedback or report any issues to help us improve!
|
||||
</p>
|
||||
</Admonition>
|
||||
|
||||
A prompt is the input provided to a language model, consisting of multiple components and can be parameterized using prompt templates. A prompt template offers a reproducible method for generating prompts, enabling easy customization through input variables.
|
||||
|
||||
---
|
||||
|
||||
### PromptTemplate
|
||||
|
||||
The `PromptTemplate` component enables users to create prompts and define variables that control how the model is instructed. Users can input a set of variables which the template uses to generate the prompt when a conversation starts.
|
||||
|
||||
<Admonition type="info">
|
||||
After defining a variable in the prompt template, it acts as its own component
|
||||
input. See [Prompt Customization](../administration/prompt-customization) for more details.
|
||||
</Admonition>
|
||||
|
||||
- **template:** The template used to format an individual request.
|
||||
49
docs/docs/components/text-and-record.mdx
Normal file
49
docs/docs/components/text-and-record.mdx
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
# Text and Record
|
||||
|
||||
In Langflow 1.0, we added two main input and output types: `Text` and `Record`.
|
||||
|
||||
`Text` is a simple string input and output type, while `Record` is a structure very similar to a dictionary in Python. It is a key-value pair data structure.
|
||||
|
||||
We've created a few components to help you work with these types. Let's see how a few of them work.
|
||||
|
||||
## Records To Text
|
||||
|
||||
This is a component that takes in Records and outputs a `Text`. It does this using a template string and concatenating the values of the `Record`, one per line.
|
||||
|
||||
If we have the following Records:
|
||||
|
||||
```json
|
||||
{
|
||||
"sender_name": "Alice",
|
||||
"message": "Hello!"
|
||||
}
|
||||
{
|
||||
"sender_name": "John",
|
||||
"message": "Hi!"
|
||||
}
|
||||
```
|
||||
|
||||
And the template string is: _`{sender_name}: {message}`_
|
||||
|
||||
The output is:
|
||||
|
||||
```
|
||||
Alice: Hello!
|
||||
John: Hi!
|
||||
```
|
||||
|
||||
## Create Record
|
||||
|
||||
This component allows you to create a `Record` from a number of inputs. You can add as many key-value pairs as you want (as long as it is less than 15). Once you've picked that number you'll need to write the name of the Key and can pass `Text` values from other components to it.
|
||||
|
||||
## Documents To Records
|
||||
|
||||
This component takes in a LangChain `Document` and outputs a `Record`. It does this by extracting the `page_content` and the `metadata` from the `Document` and adding them to the `Record` as text and data respectively.
|
||||
|
||||
## Why is this useful?
|
||||
|
||||
The idea was to create a unified way to work with complex data in Langflow and to make it easier to work with data that is not just a simple string. This way you can create more complex workflows and use the data in more ways.
|
||||
|
||||
## What's next?
|
||||
|
||||
We are planning to integrate an array of modalities to Langflow, such as images, audio, and video. This will allow you to create even more complex workflows and use cases. Stay tuned for more updates! 🚀
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
import Admonition from "@theme/Admonition";
|
||||
|
||||
# Vector Stores Documentation
|
||||
# Vector Stores
|
||||
|
||||
### Astra DB
|
||||
|
||||
|
|
|
|||
113
docs/docs/deployment/backend-only.md
Normal file
113
docs/docs/deployment/backend-only.md
Normal file
|
|
@ -0,0 +1,113 @@
|
|||
# Backend-only
|
||||
You can run Langflow in `--backend-only` mode to expose your Langflow app as an API, without running the frontend UI.
|
||||
|
||||
Start langflow in backend-only mode with `python3 -m langflow run --backend-only`.
|
||||
|
||||
The terminal prints ` Welcome to ⛓ Langflow `, and a blank window opens at `http://127.0.0.1:7864/all`.
|
||||
Langflow will now serve requests to its API without the frontend running.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
* [Langflow installed](../getting-started/install-langflow.mdx)
|
||||
|
||||
* [OpenAI API key](https://platform.openai.com)
|
||||
|
||||
* [A Langflow flow created](../starter-projects/basic-prompting.mdx)
|
||||
|
||||
## Download your flow's curl call
|
||||
|
||||
1. Click API.
|
||||
2. Click **curl** > **Copy code** and save the code to your local machine.
|
||||
It will look something like this:
|
||||
```curl
|
||||
curl -X POST \
|
||||
"http://127.0.0.1:7864/api/v1/run/ef7e0554-69e5-4e3e-ab29-ee83bcd8d9ef?stream=false" \
|
||||
-H 'Content-Type: application/json'\
|
||||
-d '{"input_value": "message",
|
||||
"output_type": "chat",
|
||||
"input_type": "chat",
|
||||
"tweaks": {
|
||||
"Prompt-kvo86": {},
|
||||
"OpenAIModel-MilkD": {},
|
||||
"ChatOutput-ktwdw": {},
|
||||
"ChatInput-xXC4F": {}
|
||||
}}'
|
||||
```
|
||||
Note the flow ID of `ef7e0554-69e5-4e3e-ab29-ee83bcd8d9ef`. You can find this ID in the UI as well to ensure you're querying the right flow.
|
||||
|
||||
## Start Langflow in backend-only mode
|
||||
|
||||
1. Stop Langflow with Ctrl+C.
|
||||
2. Start langflow in backend-only mode with `python3 -m langflow run --backend-only`.
|
||||
The terminal prints ` Welcome to ⛓ Langflow `, and a blank window opens at `http://127.0.0.1:7864/all`.
|
||||
Langflow will now serve requests to its API.
|
||||
3. Run the curl code you copied from the UI.
|
||||
You should get a result like this:
|
||||
```bash
|
||||
{"session_id":"ef7e0554-69e5-4e3e-ab29-ee83bcd8d9ef:bf81d898868ac87e1b4edbd96c131c5dee801ea2971122cc91352d144a45b880","outputs":[{"inputs":{"input_value":"hi, are you there?"},"outputs":[{"results":{"result":"Arrr, ahoy matey! Aye, I be here. What be ye needin', me hearty?"},"artifacts":{"message":"Arrr, ahoy matey! Aye, I be here. What be ye needin', me hearty?","sender":"Machine","sender_name":"AI"},"messages":[{"message":"Arrr, ahoy matey! Aye, I be here. What be ye needin', me hearty?","sender":"Machine","sender_name":"AI","component_id":"ChatOutput-ktwdw"}],"component_display_name":"Chat Output","component_id":"ChatOutput-ktwdw","used_frozen_result":false}]}]}%
|
||||
```
|
||||
Again, note that the flow ID matches.
|
||||
Langflow is receiving your POST request, running the flow, and returning the result, all without running the frontend. Cool!
|
||||
|
||||
## Download your flow's Python API call
|
||||
|
||||
Instead of using curl, you can download your flow as a Python API call instead.
|
||||
|
||||
1. Click API.
|
||||
2. Click **Python API** > **Copy code** and save the code to your local machine.
|
||||
The code will look something like this:
|
||||
```python
|
||||
import requests
|
||||
from typing import Optional
|
||||
|
||||
BASE_API_URL = "http://127.0.0.1:7864/api/v1/run"
|
||||
FLOW_ID = "ef7e0554-69e5-4e3e-ab29-ee83bcd8d9ef"
|
||||
# You can tweak the flow by adding a tweaks dictionary
|
||||
# e.g {"OpenAI-XXXXX": {"model_name": "gpt-4"}}
|
||||
|
||||
def run_flow(message: str,
|
||||
flow_id: str,
|
||||
output_type: str = "chat",
|
||||
input_type: str = "chat",
|
||||
tweaks: Optional[dict] = None,
|
||||
api_key: Optional[str] = None) -> dict:
|
||||
"""
|
||||
Run a flow with a given message and optional tweaks.
|
||||
|
||||
:param message: The message to send to the flow
|
||||
:param flow_id: The ID of the flow to run
|
||||
:param tweaks: Optional tweaks to customize the flow
|
||||
:return: The JSON response from the flow
|
||||
"""
|
||||
api_url = f"{BASE_API_URL}/{flow_id}"
|
||||
|
||||
payload = {
|
||||
"input_value": message,
|
||||
"output_type": output_type,
|
||||
"input_type": input_type,
|
||||
}
|
||||
headers = None
|
||||
if tweaks:
|
||||
payload["tweaks"] = tweaks
|
||||
if api_key:
|
||||
headers = {"x-api-key": api_key}
|
||||
response = requests.post(api_url, json=payload, headers=headers)
|
||||
return response.json()
|
||||
|
||||
# Setup any tweaks you want to apply to the flow
|
||||
message = "message"
|
||||
|
||||
print(run_flow(message=message, flow_id=FLOW_ID))
|
||||
```
|
||||
3. Run your Python app:
|
||||
```python
|
||||
python3 app.py
|
||||
```
|
||||
|
||||
The result is similar to the curl call:
|
||||
```bash
|
||||
{'session_id': 'ef7e0554-69e5-4e3e-ab29-ee83bcd8d9ef:bf81d898868ac87e1b4edbd96c131c5dee801ea2971122cc91352d144a45b880', 'outputs': [{'inputs': {'input_value': 'message'}, 'outputs': [{'results': {'result': "Arrr matey! What be yer message for this ol' pirate? Speak up or walk the plank!"}, 'artifacts': {'message': "Arrr matey! What be yer message for this ol' pirate? Speak up or walk the plank!", 'sender': 'Machine', 'sender_name': 'AI'}, 'messages': [{'message': "Arrr matey! What be yer message for this ol' pirate? Speak up or walk the plank!", 'sender': 'Machine', 'sender_name': 'AI', 'component_id': 'ChatOutput-ktwdw'}], 'component_display_name': 'Chat Output', 'component_id': 'ChatOutput-ktwdw', 'used_frozen_result': False}]}]}
|
||||
```
|
||||
Your Python app POSTs to your Langflow server, and the server runs the flow and returns the result.
|
||||
|
||||
See [API](../administration/api.mdx) for more ways to interact with your headless Langflow server.
|
||||
65
docs/docs/deployment/docker.md
Normal file
65
docs/docs/deployment/docker.md
Normal file
|
|
@ -0,0 +1,65 @@
|
|||
# Docker
|
||||
|
||||
This guide will help you get LangFlow up and running using Docker and Docker Compose.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Docker
|
||||
- Docker Compose
|
||||
|
||||
## Steps
|
||||
|
||||
1. Clone the LangFlow repository:
|
||||
|
||||
```sh
|
||||
git clone https://github.com/langflow-ai/langflow.git
|
||||
```
|
||||
|
||||
2. Navigate to the `docker_example` directory:
|
||||
|
||||
```sh
|
||||
cd langflow/docker_example
|
||||
```
|
||||
|
||||
3. Run the Docker Compose file:
|
||||
|
||||
```sh
|
||||
docker compose up
|
||||
```
|
||||
|
||||
LangFlow will now be accessible at [http://localhost:7860/](http://localhost:7860/).
|
||||
|
||||
## Docker Compose Configuration
|
||||
|
||||
The Docker Compose configuration spins up two services: `langflow` and `postgres`.
|
||||
|
||||
### LangFlow Service
|
||||
|
||||
The `langflow` service uses the `langflowai/langflow:latest` Docker image and exposes port 7860. It depends on the `postgres` service.
|
||||
|
||||
Environment variables:
|
||||
|
||||
- `LANGFLOW_DATABASE_URL`: The connection string for the PostgreSQL database.
|
||||
- `LANGFLOW_CONFIG_DIR`: The directory where LangFlow stores logs, file storage, monitor data, and secret keys.
|
||||
|
||||
Volumes:
|
||||
|
||||
- `langflow-data`: This volume is mapped to `/var/lib/langflow` in the container.
|
||||
|
||||
### PostgreSQL Service
|
||||
|
||||
The `postgres` service uses the `postgres:16` Docker image and exposes port 5432.
|
||||
|
||||
Environment variables:
|
||||
|
||||
- `POSTGRES_USER`: The username for the PostgreSQL database.
|
||||
- `POSTGRES_PASSWORD`: The password for the PostgreSQL database.
|
||||
- `POSTGRES_DB`: The name of the PostgreSQL database.
|
||||
|
||||
Volumes:
|
||||
|
||||
- `langflow-postgres`: This volume is mapped to `/var/lib/postgresql/data` in the container.
|
||||
|
||||
## Switching to a Specific LangFlow Version
|
||||
|
||||
If you want to use a specific version of LangFlow, you can modify the `image` field under the `langflow` service in the Docker Compose file. For example, to use version 1.0-alpha, change `langflowai/langflow:latest` to `langflowai/langflow:1.0-alpha`.
|
||||
|
|
@ -14,4 +14,4 @@ This component is available under the **Helpers** tab of the Langflow preview.
|
|||
style={{ marginBottom: "20px", display: "flex", justifyContent: "center" }}
|
||||
>
|
||||
<ReactPlayer playing controls url="/videos/chat_memory.mp4" />
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -18,4 +18,4 @@ This component is available under the **Helpers** tab of the Langflow preview.
|
|||
style={{ marginBottom: "20px", display: "flex", justifyContent: "center" }}
|
||||
>
|
||||
<ReactPlayer playing controls url="/videos/combine_text.mp4" />
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -14,4 +14,4 @@ The **Create Record** component allows you to dynamically create a `Record` from
|
|||
style={{ marginBottom: "20px", display: "flex", justifyContent: "center" }}
|
||||
>
|
||||
<ReactPlayer playing controls url="/videos/create_record.mp4" />
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -14,4 +14,4 @@ The **Pass** component enables you to ignore one input and move forward with ano
|
|||
style={{ marginBottom: "20px", display: "flex", justifyContent: "center" }}
|
||||
>
|
||||
<ReactPlayer playing controls url="/videos/pass.mp4" />
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -14,4 +14,4 @@ The **Message History** component can then be used to retrieve stored messages.
|
|||
style={{ marginBottom: "20px", display: "flex", justifyContent: "center" }}
|
||||
>
|
||||
<ReactPlayer playing controls url="/videos/store_message.mp4" />
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -12,4 +12,4 @@ The **Sub Flow** component enables a user to select a previously built flow and
|
|||
style={{ marginBottom: "20px", display: "flex", justifyContent: "center" }}
|
||||
>
|
||||
<ReactPlayer playing controls url="/videos/sub_flow.mp4" />
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -12,4 +12,4 @@ The **Text Operator** component simplifies logic. It evaluates the results from
|
|||
style={{ marginBottom: "20px", display: "flex", justifyContent: "center" }}
|
||||
>
|
||||
<ReactPlayer playing controls url="/videos/text_operator.mp4" />
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -280,9 +280,3 @@ To see options for your project, in the upper left corner of the canvas, select
|
|||
**Export** - Download your current project to your local machine as a `.json` file.
|
||||
|
||||
**Undo** or **Redo** - Undo or redo your last action.
|
||||
|
||||
import ThemedImage from "@theme/ThemedImage";
|
||||
import useBaseUrl from "@docusaurus/useBaseUrl";
|
||||
import ZoomableImage from "/src/theme/ZoomableImage.js";
|
||||
import ReactPlayer from "react-player";
|
||||
import Admonition from "@theme/Admonition";
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import ThemedImage from '@theme/ThemedImage';
|
||||
import useBaseUrl from '@docusaurus/useBaseUrl';
|
||||
import ZoomableImage from '/src/theme/ZoomableImage.js';
|
||||
import ReactPlayer from 'react-player';
|
||||
import ThemedImage from "@theme/ThemedImage";
|
||||
import useBaseUrl from "@docusaurus/useBaseUrl";
|
||||
import ZoomableImage from "/src/theme/ZoomableImage.js";
|
||||
import ReactPlayer from "react-player";
|
||||
|
||||
# 🖥️ Flows, components, collections, and projects
|
||||
|
||||
|
|
@ -17,10 +17,4 @@ A [project](#project) can be a component or a flow. Projects are saved as part o
|
|||
|
||||
For example, the **OpenAI LLM** is a **component** of the **Basic prompting** flow, and the **flow** is stored in a **collection**.
|
||||
|
||||
|
||||
|
||||
## Component
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -6,33 +6,40 @@ import Admonition from "@theme/Admonition";
|
|||
# 📦 Install Langflow
|
||||
|
||||
<Admonition type="info">
|
||||
Langflow v1.0 alpha is also available in HuggingFace Spaces. [Clone the space using this link](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true), to create your own Langflow workspace in minutes.
|
||||
Langflow v1.0 alpha is also available in HuggingFace Spaces. [Clone the space
|
||||
using this
|
||||
link](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true),
|
||||
to create your own Langflow workspace in minutes.
|
||||
</Admonition>
|
||||
|
||||
Langflow requires [Python >=3.10](https://www.python.org/downloads/release/python-3100/) and [pip](https://pypi.org/project/pip/) or [pipx](https://pipx.pypa.io/stable/installation/) to be installed on your system.
|
||||
|
||||
Install Langflow with pip:
|
||||
|
||||
```bash
|
||||
python -m pip install langflow -U
|
||||
```
|
||||
|
||||
Install Langflow with pipx:
|
||||
|
||||
```bash
|
||||
pipx install langflow --python python3.10 --fetch-missing-python
|
||||
```
|
||||
Pipx can fetch the missing Python version for you with `--fetch-missing-python`, but you can also install the Python version manually.
|
||||
|
||||
Pipx can fetch the missing Python version for you with `--fetch-missing-python`, but you can also install the Python version manually.
|
||||
|
||||
## Install Langflow pre-release
|
||||
|
||||
To install a pre-release version of Langflow:
|
||||
|
||||
pip:
|
||||
|
||||
```bash
|
||||
python -m pip install langflow --pre --force-reinstall
|
||||
```
|
||||
|
||||
pipx:
|
||||
|
||||
```bash
|
||||
pipx install langflow --python python3.10 --fetch-missing-python --pip-args="--pre --force-reinstall"
|
||||
```
|
||||
|
|
@ -52,11 +59,13 @@ python -m langflow --help
|
|||
## ⛓️ Run Langflow
|
||||
|
||||
1. To run Langflow, enter the following command.
|
||||
|
||||
```bash
|
||||
python -m langflow run
|
||||
```
|
||||
|
||||
2. Confirm that a local Langflow instance starts by visiting `http://127.0.0.1:7860` in a Chromium-based browser.
|
||||
|
||||
```bash
|
||||
│ Welcome to ⛓ Langflow │
|
||||
│ │
|
||||
|
|
@ -83,4 +92,4 @@ You'll be presented with the following screen:
|
|||
style={{ width: "100%", margin: "20px auto" }}
|
||||
/>
|
||||
|
||||
Name your Space, define the visibility (Public or Private), and click on **Duplicate Space** to start the installation process. When installation is finished, you'll be redirected to the Space's main page to start using Langflow right away!
|
||||
Name your Space, define the visibility (Public or Private), and click on **Duplicate Space** to start the installation process. When installation is finished, you'll be redirected to the Space's main page to start using Langflow right away!
|
||||
|
|
|
|||
|
|
@ -10,12 +10,15 @@ This guide demonstrates how to build a basic prompt flow and modify that prompt
|
|||
|
||||
## Prerequisites
|
||||
|
||||
* [Langflow installed and running](./install-langflow.mdx)
|
||||
- [Langflow installed and running](./install-langflow.mdx)
|
||||
|
||||
* [OpenAI API key](https://platform.openai.com)
|
||||
- [OpenAI API key](https://platform.openai.com)
|
||||
|
||||
<Admonition type="info">
|
||||
Langflow v1.0 alpha is also available in HuggingFace Spaces. [Clone the space using this link](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true) to create your own Langflow workspace in minutes.
|
||||
Langflow v1.0 alpha is also available in HuggingFace Spaces. [Clone the space
|
||||
using this
|
||||
link](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true)
|
||||
to create your own Langflow workspace in minutes.
|
||||
</Admonition>
|
||||
|
||||
## Hello World - Basic Prompting
|
||||
|
|
@ -44,25 +47,25 @@ Examine the **Prompt** component. The **Template** field instructs the LLM to `A
|
|||
This should be interesting...
|
||||
|
||||
4. To create an environment variable for the **OpenAI** component, in the **OpenAI API Key** field, click the **Globe** button, and then click **Add New Variable**.
|
||||
1. In the **Variable Name** field, enter `openai_api_key`.
|
||||
2. In the **Value** field, paste your OpenAI API Key (`sk-...`).
|
||||
3. Click **Save Variable**.
|
||||
1. In the **Variable Name** field, enter `openai_api_key`.
|
||||
2. In the **Value** field, paste your OpenAI API Key (`sk-...`).
|
||||
3. Click **Save Variable**.
|
||||
|
||||
## Run the basic prompting flow
|
||||
|
||||
1. Click the **Run** button.
|
||||
The **Interaction Panel** opens, where you can chat with your bot.
|
||||
The **Interaction Panel** opens, where you can chat with your bot.
|
||||
2. Type a message and press Enter.
|
||||
And... Ahoy! 🏴☠️
|
||||
The bot responds in a piratical manner!
|
||||
And... Ahoy! 🏴☠️
|
||||
The bot responds in a piratical manner!
|
||||
|
||||
## Modify the prompt for a different result
|
||||
|
||||
1. To modify your prompt results, in the **Prompt** template, click the **Template** field.
|
||||
The **Edit Prompt** window opens.
|
||||
The **Edit Prompt** window opens.
|
||||
2. Change `Answer the user as if you were a pirate` to a different character, perhaps `Answer the user as if you were Harold Abelson.`
|
||||
3. Run the basic prompting flow again.
|
||||
The response will be markedly different.
|
||||
The response will be markedly different.
|
||||
|
||||
## Next steps
|
||||
|
||||
|
|
@ -72,8 +75,6 @@ By adding Langflow components to your flow, you can create all sorts of interest
|
|||
|
||||
Here are a couple of examples:
|
||||
|
||||
* [Memory chatbot](/starter-projects/memory-chatbot.mdx)
|
||||
* [Blog writer](/starter-projects/blog-writer.mdx)
|
||||
* [Document QA](/starter-projects/document-qa.mdx)
|
||||
|
||||
|
||||
- [Memory chatbot](/starter-projects/memory-chatbot.mdx)
|
||||
- [Blog writer](/starter-projects/blog-writer.mdx)
|
||||
- [Document QA](/starter-projects/document-qa.mdx)
|
||||
|
|
|
|||
|
|
@ -29,7 +29,10 @@ Its intuitive interface allows for easy manipulation of AI building blocks, enab
|
|||
- [Langflow Canvas](/getting-started/canvas) - Learn more about the Langflow canvas.
|
||||
|
||||
<Admonition type="info">
|
||||
Langflow v1.0 alpha is also available in HuggingFace Spaces. [Clone the space using this link](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true) to create your own Langflow workspace in minutes.
|
||||
Langflow v1.0 alpha is also available in HuggingFace Spaces. [Clone the space
|
||||
using this
|
||||
link](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true)
|
||||
to create your own Langflow workspace in minutes.
|
||||
</Admonition>
|
||||
|
||||
## Learn more about Langflow 1.0
|
||||
|
|
|
|||
|
|
@ -9,14 +9,11 @@ The `AddContentToPage` component converts markdown text to Notion blocks and app
|
|||
|
||||
[Notion Reference](https://developers.notion.com/reference/patch-block-children)
|
||||
|
||||
<Admonition type="tip" title="Component Functionality">
|
||||
|
||||
The `AddContentToPage` component enables you to:
|
||||
|
||||
- Convert markdown text to Notion blocks.
|
||||
- Append the converted blocks to a specified Notion page.
|
||||
- Seamlessly integrate Notion content creation into Langflow workflows.
|
||||
</Admonition>
|
||||
|
||||
## Component Usage
|
||||
|
||||
|
|
@ -100,23 +97,19 @@ class NotionPageCreator(CustomComponent):
|
|||
|
||||
## Example Usage
|
||||
|
||||
<Admonition type="info" title="Example Usage">
|
||||
|
||||
Example of using the `AddContentToPage` component in a Langflow flow using Markdown as input:
|
||||
|
||||
<ZoomableImage
|
||||
alt="NotionDatabaseProperties Flow Example"
|
||||
sources={{
|
||||
alt="NotionDatabaseProperties Flow Example"
|
||||
sources={{
|
||||
light: "img/notion/AddContentToPage_flow_example.png",
|
||||
dark: "img/notion/AddContentToPage_flow_example.png",
|
||||
}}
|
||||
style={{ width: "100%", margin: "20px 0" }}
|
||||
style={{ width: "100%", margin: "20px 0" }}
|
||||
/>
|
||||
|
||||
In this example, the `AddContentToPage` component connects to a `MarkdownLoader` component to provide the markdown text input. The converted Notion blocks are appended to the specified Notion page using the provided `block_id` and `notion_secret`.
|
||||
|
||||
</Admonition>
|
||||
|
||||
## Best Practices
|
||||
|
||||
When using the `AddContentToPage` component:
|
||||
|
|
@ -131,8 +124,8 @@ The `AddContentToPage` component is a powerful tool for integrating Notion conte
|
|||
## Troubleshooting
|
||||
|
||||
If you encounter any issues while using the `AddContentToPage` component, consider the following:
|
||||
|
||||
- Verify the Notion integration token’s validity and permissions.
|
||||
- Check the Notion API documentation for updates.
|
||||
- Ensure markdown text is properly formatted.
|
||||
- Double-check the `block_id` for correctness.
|
||||
|
||||
|
|
|
|||
|
|
@ -8,12 +8,12 @@ import ZoomableImage from "/src/theme/ZoomableImage.js";
|
|||
The Notion integration in Langflow enables seamless connectivity with Notion databases, pages, and users, facilitating automation and improving productivity.
|
||||
|
||||
<ZoomableImage
|
||||
alt="Notion Components in Langflow"
|
||||
sources={{
|
||||
alt="Notion Components in Langflow"
|
||||
sources={{
|
||||
light: "img/notion/notion_bundle.jpg",
|
||||
dark: "img/notion/notion_bundle.jpg",
|
||||
}}
|
||||
style={{ width: "100%", margin: "20px 0" }}
|
||||
style={{ width: "100%", margin: "20px 0" }}
|
||||
/>
|
||||
|
||||
#### <a target="\_blank" href="json_files/Notion_Components_bundle.json" download>Download Notion Components Bundle</a>
|
||||
|
|
|
|||
|
|
@ -41,7 +41,7 @@ class NotionDatabaseProperties(CustomComponent):
|
|||
description = "Retrieve properties of a Notion database."
|
||||
documentation: str = "https://docs.langflow.org/integrations/notion/list-database-properties"
|
||||
icon = "NotionDirectoryLoader"
|
||||
|
||||
|
||||
def build_config(self):
|
||||
return {
|
||||
"database_id": {
|
||||
|
|
@ -80,6 +80,7 @@ class NotionDatabaseProperties(CustomComponent):
|
|||
```
|
||||
|
||||
## Example Usage
|
||||
|
||||
<Admonition type="info" title="Example Usage">
|
||||
Here's an example of how you can use the `NotionDatabaseProperties` component in a Langflow flow:
|
||||
|
||||
|
|
@ -110,6 +111,7 @@ Feel free to explore the capabilities of the `NotionDatabaseProperties` componen
|
|||
## Troubleshooting
|
||||
|
||||
If you encounter any issues while using the `NotionDatabaseProperties` component, consider the following:
|
||||
|
||||
- Verify that the Notion integration token is valid and has the required permissions.
|
||||
- Check the database ID to ensure it matches the intended Notion database.
|
||||
- Inspect the response from the Notion API for any error messages or status codes that may indicate the cause of the issue.
|
||||
- Inspect the response from the Notion API for any error messages or status codes that may indicate the cause of the issue.
|
||||
|
|
|
|||
|
|
@ -140,16 +140,17 @@ class NotionListPages(CustomComponent):
|
|||
<Admonition type="info" title="Example Usage">
|
||||
|
||||
## Example Usage
|
||||
|
||||
Here's an example of how you can use the `NotionListPages` component in a Langflow flow and passing to the Prompt component:
|
||||
|
||||
<ZoomableImage
|
||||
alt="NotionListPages
|
||||
Flow Example"
|
||||
sources={{
|
||||
alt="NotionListPages
|
||||
Flow Example"
|
||||
sources={{
|
||||
light: "img/notion/NotionListPages_flow_example.png",
|
||||
dark: "img/notion/NotionListPages_flow_example_dark.png",
|
||||
}}
|
||||
style={{ width: "100%", margin: "20px 0" }}
|
||||
style={{ width: "100%", margin: "20px 0" }}
|
||||
/>
|
||||
|
||||
In this example, the `NotionListPages` component is used to retrieve specific pages from a Notion database based on the provided filters and sorting options. The retrieved data can then be processed further in the subsequent components of the flow.
|
||||
|
|
@ -157,7 +158,7 @@ In this example, the `NotionListPages` component is used to retrieve specific pa
|
|||
|
||||
## Best Practices
|
||||
|
||||
When using the `NotionListPages
|
||||
When using the `NotionListPages
|
||||
` component, consider the following best practices:
|
||||
|
||||
- Ensure that you have a valid Notion integration token with the necessary permissions to query the desired database.
|
||||
|
|
@ -171,7 +172,7 @@ We encourage you to explore the capabilities of the `NotionListPages
|
|||
|
||||
## Troubleshooting
|
||||
|
||||
If you encounter any issues while using the `NotionListPages` component, consider the following:
|
||||
If you encounter any issues while using the `NotionListPages` component, consider the following:
|
||||
|
||||
- Double-check that the `notion_secret` and `database_id` are correct and valid.
|
||||
- Verify that the `query_payload` JSON string is properly formatted and contains valid filtering and sorting options.
|
||||
|
|
|
|||
|
|
@ -9,13 +9,11 @@ The `NotionUserList` component retrieves users from Notion. It provides a conven
|
|||
|
||||
[Notion Reference](https://developers.notion.com/reference/get-users)
|
||||
|
||||
<Admonition type="tip" title="Component Functionality">
|
||||
The `NotionUserList` component enables you to:
|
||||
The `NotionUserList` component enables you to:
|
||||
|
||||
- Retrieve user data from Notion
|
||||
- Access user information such as ID, type, name, and avatar URL
|
||||
- Integrate Notion user data seamlessly into your Langflow workflows
|
||||
</Admonition>
|
||||
|
||||
## Component Usage
|
||||
|
||||
|
|
@ -94,34 +92,31 @@ class NotionUserList(CustomComponent):
|
|||
```
|
||||
|
||||
## Example Usage
|
||||
<Admonition type="info" title="Example Usage">
|
||||
|
||||
Here's an example of how you can use the `NotionUserList` component in a Langflow flow and passing the outputs to the Prompt component:
|
||||
|
||||
<ZoomableImage
|
||||
alt="NotionUserList Flow Example"
|
||||
sources={{
|
||||
alt="NotionUserList Flow Example"
|
||||
sources={{
|
||||
light: "img/notion/NotionUserList_flow_example.png",
|
||||
dark: "img/notion/NotionUserList_flow_example_dark.png",
|
||||
}}
|
||||
style={{ width: "100%", margin: "20px 0" }}
|
||||
style={{ width: "100%", margin: "20px 0" }}
|
||||
/>
|
||||
|
||||
</Admonition>
|
||||
|
||||
## Best Practices
|
||||
|
||||
When using the `NotionUserList` component, consider the following best practices:
|
||||
When using the `NotionUserList` component, consider the following best practices:
|
||||
|
||||
- Ensure that you have a valid Notion integration token with the necessary permissions to retrieve user data.
|
||||
- Handle the retrieved user data securely and in compliance with Notion's API usage guidelines.
|
||||
|
||||
The `NotionUserList` component provides a seamless way to integrate Notion user data into your Langflow workflows. By leveraging this component, you can easily retrieve and utilize user information from Notion, enhancing the capabilities of your Langflow applications. Feel free to explore and experiment with the `NotionUserList` component to unlock new possibilities in your Langflow projects!
|
||||
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
If you encounter any issues while using the `NotionUserList` component, consider the following:
|
||||
If you encounter any issues while using the `NotionUserList` component, consider the following:
|
||||
|
||||
- Double-check that your Notion integration token is valid and has the required permissions.
|
||||
- Verify that you have installed the necessary dependencies (`requests`) for the component to function properly.
|
||||
- Check the Notion API documentation for any updates or changes that may affect the component's functionality.
|
||||
- Check the Notion API documentation for any updates or changes that may affect the component's functionality.
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ The `NotionPageContent` component retrieves the content of a Notion page as plai
|
|||
|
||||
<Admonition type="tip" title="Component Functionality">
|
||||
|
||||
The `NotionPageContent` component enables you to:
|
||||
The `NotionPageContent` component enables you to:
|
||||
|
||||
- Retrieve the content of a Notion page as plain text
|
||||
- Extract text from various block types, including paragraphs, headings, lists, and more
|
||||
|
|
@ -114,18 +114,18 @@ class NotionPageContent(CustomComponent):
|
|||
Here's an example of how you can use the `NotionPageContent` component in a Langflow flow:
|
||||
|
||||
<ZoomableImage
|
||||
alt="NotionPageContent Flow Example"
|
||||
sources={{
|
||||
alt="NotionPageContent Flow Example"
|
||||
sources={{
|
||||
light: "img/notion/NotionPageContent_flow_example.png",
|
||||
dark: "img/notion/NotionPageContent_flow_example_dark.png",
|
||||
}}
|
||||
style={{ width: "100%", margin: "20px 0" }}
|
||||
style={{ width: "100%", margin: "20px 0" }}
|
||||
/>
|
||||
</Admonition>
|
||||
|
||||
## Best Practices
|
||||
|
||||
When using the `NotionPageContent` component, consider the following best practices:
|
||||
When using the `NotionPageContent` component, consider the following best practices:
|
||||
|
||||
- Ensure that you have the necessary permissions to access the Notion page you want to retrieve.
|
||||
- Keep your Notion integration token secure and avoid sharing it publicly.
|
||||
|
|
@ -135,7 +135,7 @@ The `NotionPageContent` component provides a seamless way to integrate Notion pa
|
|||
|
||||
## Troubleshooting
|
||||
|
||||
If you encounter any issues while using the `NotionPageContent` component, consider the following:
|
||||
If you encounter any issues while using the `NotionPageContent` component, consider the following:
|
||||
|
||||
- Double-check that you have provided the correct Notion page ID.
|
||||
- Verify that your Notion integration token is valid and has the necessary permissions.
|
||||
|
|
|
|||
|
|
@ -97,16 +97,17 @@ class NotionPageCreator(CustomComponent):
|
|||
```
|
||||
|
||||
## Example Usage
|
||||
|
||||
<Admonition type="info" title="Example Usage">
|
||||
Here's an example of how to use the `NotionPageCreator` component in a Langflow flow:
|
||||
|
||||
<ZoomableImage
|
||||
alt="NotionPageCreator Flow Example"
|
||||
sources={{
|
||||
alt="NotionPageCreator Flow Example"
|
||||
sources={{
|
||||
light: "img/notion/NotionPageCreator_flow_example.png",
|
||||
dark: "img/notion/NotionPageCreator_flow_example_dark.png",
|
||||
}}
|
||||
style={{ width: "100%", margin: "20px 0" }}
|
||||
style={{ width: "100%", margin: "20px 0" }}
|
||||
/>
|
||||
</Admonition>
|
||||
|
||||
|
|
@ -124,6 +125,7 @@ The `NotionPageCreator` component simplifies the process of creating pages in a
|
|||
## Troubleshooting
|
||||
|
||||
If you encounter any issues while using the `NotionPageCreator` component, consider the following:
|
||||
|
||||
- Double-check that the `database_id` and `notion_secret` inputs are correct and valid.
|
||||
- Verify that the `properties` input is properly formatted as a JSON string and matches the structure of your Notion database.
|
||||
- Check the Notion API documentation for any updates or changes that may affect the component's functionality.
|
||||
- Check the Notion API documentation for any updates or changes that may affect the component's functionality.
|
||||
|
|
|
|||
|
|
@ -146,16 +146,17 @@ class NotionSearch(CustomComponent):
|
|||
```
|
||||
|
||||
## Example Usage
|
||||
|
||||
<Admonition type="info" title="Example Usage">
|
||||
Here's an example of how you can use the `NotionSearch` component in a Langflow flow:
|
||||
|
||||
<ZoomableImage
|
||||
alt="NotionSearch Flow Example"
|
||||
sources={{
|
||||
alt="NotionSearch Flow Example"
|
||||
sources={{
|
||||
light: "img/notion/NotionSearch_flow_example.png",
|
||||
dark: "img/notion/NotionSearch_flow_example_dark.png",
|
||||
}}
|
||||
style={{ width: "100%", margin: "20px 0" }}
|
||||
style={{ width: "100%", margin: "20px 0" }}
|
||||
/>
|
||||
|
||||
In this example, the `NotionSearch` component is used to search for pages and databases in Notion based on the provided query and filter criteria. The retrieved data can then be processed further in the subsequent components of the flow.
|
||||
|
|
|
|||
|
|
@ -76,4 +76,3 @@ Refer to the individual component documentation for more details on how to use e
|
|||
- [Notion Integration Capabilities](https://developers.notion.com/reference/capabilities)
|
||||
|
||||
If you encounter any issues or have questions, please reach out to our support team or consult the Langflow community forums.
|
||||
|
||||
|
|
|
|||
|
|
@ -1,118 +0,0 @@
|
|||
import ZoomableImage from "/src/theme/ZoomableImage.js";
|
||||
import Admonition from "@theme/Admonition";
|
||||
|
||||
# Global Variables
|
||||
|
||||
## TLDR;
|
||||
|
||||
- Global Variables are reusable variables that can be accessed from any Text field in your project.
|
||||
- To create a Global Variable, click on the 🌐 button in a Text field and then **+ Add New Variable**.
|
||||
- Define the **Name**, **Type**, and **Value** of the variable.
|
||||
- Click on **Save Variable** to create the variable.
|
||||
- All Credential Global Variables are encrypted and cannot be accessed by anyone but you.
|
||||
- Set _`LANGFLOW_STORE_ENVIRONMENT_VARIABLES`_ to _`true`_ in your `.env` file to add all variables in _`LANGFLOW_VARIABLES_TO_GET_FROM_ENVIRONMENT`_ to your user's Global Variables.
|
||||
|
||||
Global Variables are a really useful feature of Langflow.
|
||||
They allow you to define reusable variables that can be accessed from any Text field in your project.
|
||||
|
||||
The first thing you need to do is find a **Text field** in a Component, so let's talk about what a Text field is.
|
||||
|
||||
## Text Fields
|
||||
|
||||
Text fields are the fields in a Component where you can write text but that does not allow you to open a Text Area.
|
||||
|
||||
The easiest way to find fields that are Text fields, though, is to look for fields that have a 🌐 button.
|
||||
|
||||
<ZoomableImage
|
||||
alt="Docusaurus themed image"
|
||||
sources={{
|
||||
light: "img/ollama-gv.png",
|
||||
dark: "img/ollama-gv.png",
|
||||
}}
|
||||
style={{ width: "50%" }}
|
||||
/>
|
||||
|
||||
## Creating a Global Variable
|
||||
|
||||
To create a Global Variable, you need to click on the 🌐 button in a Text field and that will open a dropdown showing your currently available variables and at the end of it **+ Add New Variable**.
|
||||
|
||||
<ZoomableImage
|
||||
alt="Docusaurus themed image"
|
||||
sources={{
|
||||
light: "img/add-new-variable.png",
|
||||
dark: "img/add-new-variable.png",
|
||||
}}
|
||||
style={{ width: "60%" }}
|
||||
/>
|
||||
|
||||
Click on **+ Add New Variable** and a window will open where you can define your new Global Variable.
|
||||
|
||||
In it, you can define the **Name** of the variable, the optional **Type** of the variable, and the **Value** of the variable.
|
||||
|
||||
The **Name** is the name that you will use to refer to the variable in your Text fields.
|
||||
|
||||
The **Type** is optional for now but will be used in the future to allow for more advanced features.
|
||||
|
||||
The **Value** is the value that the variable will have.
|
||||
{/* say that all variables are encrypted */}
|
||||
|
||||
<Admonition type="warning">
|
||||
All Credential Global Variables are encrypted and cannot be accessed by anyone
|
||||
but you.
|
||||
</Admonition>
|
||||
|
||||
<ZoomableImage
|
||||
alt="Docusaurus themed image"
|
||||
sources={{
|
||||
light: "img/create-variable-window.png",
|
||||
dark: "img/create-variable-window.png",
|
||||
}}
|
||||
style={{ width: "60%" }}
|
||||
/>
|
||||
|
||||
After you have defined your variable, click on **Save Variable** and your variable will be created.
|
||||
|
||||
After that, once you click on the 🌐 button in a Text field, you will see your new variable in the dropdown.
|
||||
|
||||
## Environment Variables
|
||||
|
||||
If you set _`LANGFLOW_STORE_ENVIRONMENT_VARIABLES`_ to _`true`_ (which is the default value) in your `.env` file, all variables in _`LANGFLOW_VARIABLES_TO_GET_FROM_ENVIRONMENT`_ will be added to your user's Global Variables.
|
||||
|
||||
All of these variables can be used in your project as any other Global Variable.
|
||||
|
||||
<Admonition type="tip">
|
||||
You can set _`LANGFLOW_STORE_ENVIRONMENT_VARIABLES`_ to _`false`_ in your
|
||||
`.env` file to prevent this behavior.
|
||||
</Admonition>
|
||||
|
||||
You can also set _`LANGFLOW_VARIABLES_TO_GET_FROM_ENVIRONMENT`_ to a list of variables that you want to get from the environment.
|
||||
|
||||
The default list at the moment is:
|
||||
|
||||
- ANTHROPIC_API_KEY
|
||||
- ASTRA_DB_API_ENDPOINT
|
||||
- ASTRA_DB_APPLICATION_TOKEN
|
||||
- AZURE_OPENAI_API_KEY
|
||||
- AZURE_OPENAI_API_DEPLOYMENT_NAME
|
||||
- AZURE_OPENAI_API_EMBEDDINGS_DEPLOYMENT_NAME
|
||||
- AZURE_OPENAI_API_INSTANCE_NAME
|
||||
- AZURE_OPENAI_API_VERSION
|
||||
- COHERE_API_KEY
|
||||
- GOOGLE_API_KEY
|
||||
- GROQ_API_KEY
|
||||
- HUGGINGFACEHUB_API_TOKEN
|
||||
- OPENAI_API_KEY
|
||||
- PINECONE_API_KEY
|
||||
- SEARCHAPI_API_KEY
|
||||
- SERPAPI_API_KEY
|
||||
- UPSTASH_VECTOR_REST_URL
|
||||
- UPSTASH_VECTOR_REST_TOKEN
|
||||
- VECTARA_CUSTOMER_ID
|
||||
- VECTARA_CORPUS_ID
|
||||
- VECTARA_API_KEY
|
||||
|
||||
<Admonition type="tip">
|
||||
Set _`LANGFLOW_VARIABLES_TO_GET_FROM_ENVIRONMENT`_ as a comma-separated list
|
||||
of variables (e.g. _`"VARIABLE1, VARIABLE2"`_) or as a JSON-encoded string
|
||||
(e.g. _`'["VARIABLE1", "VARIABLE2"]'`_).
|
||||
</Admonition>
|
||||
|
|
@ -1,36 +0,0 @@
|
|||
# Inputs and Outputs
|
||||
|
||||
TL;DR: Inputs and Outputs are a category of components that are used to define where data comes in and out of your flow. They also
|
||||
dynamically change the Playground and can be renamed to make it easier to build and maintain your flows.
|
||||
|
||||
## Introduction
|
||||
|
||||
Langflow 1.0 introduces new categories of components called Inputs and Outputs. They are used to make it easier to understand and interact with your flows.
|
||||
|
||||
Let's start with what they have in common:
|
||||
|
||||
- Components in these categories connect to components that have Text or Record inputs or outputs. Some can connect to both but you have to pick what type of data you want to output or input.
|
||||
- They can be renamed to help you identify them more easily in the Playground and while using the API.
|
||||
- They dynamically change the Playground to make it easier to understand and interact with your flows.
|
||||
|
||||
Native Langflow Components were created to be powerful tools that work around Langflow's features. They are designed to be easy to use and understand, and to help you build your flows faster.
|
||||
|
||||
Let's dive into Inputs and Outputs.
|
||||
|
||||
## Inputs
|
||||
|
||||
Inputs are components that are used to define where data comes into your flow. They can be used to receive data from the user, from a database, or from any other source that can be converted to Text or Record.
|
||||
|
||||
The difference between Chat Input and other Input components is the format of the output, the number of configurable fields, and the way they are displayed in the Playground.
|
||||
|
||||
Chat Input components can output Text or Record. When you want to pass the sender name, or sender to the next component, you can use the Record output, and when you want to pass the message only you can use the Text output. This is useful when saving the message to a database or a memory system like Zep.
|
||||
|
||||
You can find out more about it and the other Inputs [here](../components/inputs).
|
||||
|
||||
## Outputs
|
||||
|
||||
Outputs are components that are used to define where data comes out of your flow. They can be used to send data to the user, to the Playground, or to define how the data will be displayed in the Playground.
|
||||
|
||||
The Chat Output works similarly to the Chat Input but does not have a field that allows for written input. It is used as an Output definition and can be used to send data to the user.
|
||||
|
||||
You can find out more about it and the other Outputs [here](../components/outputs).
|
||||
|
|
@ -41,7 +41,7 @@ We have a special channel in our Discord server dedicated to Langflow 1.0 migrat
|
|||
|
||||
Langflow 1.0 introduces adds the concept of Inputs and Outputs to flows, allowing a clear definition of the data flow between components. Discover how to use Inputs and Outputs to pass data between components and create more dynamic flows.
|
||||
|
||||
[Learn more about Inputs and Outputs of Components](../migration/inputs-and-outputs)
|
||||
[Learn more about Inputs and Outputs of Components](../components/inputs-and-outputs)
|
||||
|
||||
## To Compose or Not to Compose: the choice is yours
|
||||
|
||||
|
|
@ -71,7 +71,7 @@ Langflow 1.0 introduces many new native categories, including Inputs, Outputs, H
|
|||
|
||||
With the introduction of Text and Record types connections between Components are more intuitive and easier to understand. This is the first step in a series of improvements to the way you interact with Langflow. Learn how to use Text, and Record and how they help you build better flows.
|
||||
|
||||
[Learn more about Text and Record](../migration/text-and-record)
|
||||
[Learn more about Text and Record](../components/text-and-record)
|
||||
|
||||
## CustomComponent for All Components
|
||||
|
||||
|
|
@ -119,7 +119,7 @@ Things got a whole lot easier. You can now pass tweaks and inputs in the API by
|
|||
|
||||
Global Variables can be used in any Text Field across your projects. Learn how to define and utilize Global Variables to streamline your workflow.
|
||||
|
||||
[Learn more about Global Variables](../migration/global-variables)
|
||||
[Learn more about Global Variables](../administration/global-env.mdx)
|
||||
|
||||
## Experimental Components
|
||||
|
||||
|
|
|
|||
|
|
@ -25,11 +25,11 @@ ModuleNotFoundError: No module named 'langflow.__main__'
|
|||
There are two possible reasons for this error:
|
||||
|
||||
1. You've installed Langflow using _`pip install langflow`_ but you already had a previous version of Langflow installed in your system.
|
||||
In this case, you might be running the wrong executable.
|
||||
To solve this issue, run the correct executable by running _`python -m langflow run`_ instead of _`langflow run`_.
|
||||
If that doesn't work, try uninstalling and reinstalling Langflow with _`python -m pip install langflow --pre -U`_.
|
||||
In this case, you might be running the wrong executable.
|
||||
To solve this issue, run the correct executable by running _`python -m langflow run`_ instead of _`langflow run`_.
|
||||
If that doesn't work, try uninstalling and reinstalling Langflow with _`python -m pip install langflow --pre -U`_.
|
||||
2. Some version conflicts might have occurred during the installation process.
|
||||
Run _`python -m pip install langflow --pre -U --force-reinstall`_ to reinstall Langflow and its dependencies.
|
||||
Run _`python -m pip install langflow --pre -U --force-reinstall`_ to reinstall Langflow and its dependencies.
|
||||
|
||||
## _`Something went wrong running migrations. Please, run 'langflow migration --fix'`_
|
||||
|
||||
|
|
@ -45,4 +45,3 @@ There are two possible reasons for this error:
|
|||
This error can occur during Langflow upgrades when the new version can't override `langflow-pre.db` in `.cache/langflow/`. Clearing the cache removes this file but will also erase your settings.
|
||||
|
||||
If you wish to retain your files, back them up before clearing the folder.
|
||||
|
||||
|
|
|
|||
|
|
@ -1,45 +0,0 @@
|
|||
# Text and Record
|
||||
|
||||
In Langflow 1.0 we added two main input and output types: Text and Record. Text is a simple string input and output type, while Record is a structure very similar to a dictionary in Python. It is a key-value pair data structure.
|
||||
|
||||
We've created a few components to help you work with these types. Let's see how a few of them work.
|
||||
|
||||
### Records To Text
|
||||
|
||||
This is a Component that takes in Records and outputs a Text. It does this using a template string and concatenating the values of the Record, one per line.
|
||||
|
||||
If we have the following Records:
|
||||
|
||||
```json
|
||||
{
|
||||
"sender_name": "Alice",
|
||||
"message": "Hello!"
|
||||
}
|
||||
{
|
||||
"sender_name": "John",
|
||||
"message": "Hi!"
|
||||
}
|
||||
```
|
||||
|
||||
And the template string is: _`{sender_name}: {message}`_
|
||||
|
||||
```
|
||||
Alice: Hello!
|
||||
John: Hi!
|
||||
```
|
||||
|
||||
### Create Record
|
||||
|
||||
This Component allows you to create a Record from a number of inputs. You can add as many key-value pairs as you want (as long as it is less than 15 😅). Once you've picked that number you'll need to write the name of the Key and can pass Text values from other components to it.
|
||||
|
||||
### Documents To Records
|
||||
|
||||
This Component takes in a [LangChain](https://langchain.com) Document and outputs a Record. It does this by extracting the _`page_content`_ and the _`metadata`_ from the Document and adding them to the Record as _`text`_ and _`data`_ respectively.
|
||||
|
||||
## Why is this useful?
|
||||
|
||||
The idea was to create a unified way to work with complex data in Langflow, and to make it easier to work with data that is not just a simple string. This way you can create more complex workflows and use the data in more ways.
|
||||
|
||||
## What's next?
|
||||
|
||||
We are planning to integrate an array of modalities to Langflow, such as images, audio, and video. This will allow you to create even more complex workflows and use cases. Stay tuned for more updates! 🚀
|
||||
|
|
@ -14,12 +14,15 @@ This article demonstrates how to use Langflow's prompt tools to issue basic prom
|
|||
|
||||
## Prerequisites
|
||||
|
||||
* [Langflow installed and running](../getting-started/install-langflow.mdx)
|
||||
- [Langflow installed and running](../getting-started/install-langflow.mdx)
|
||||
|
||||
* [OpenAI API key created](https://platform.openai.com)
|
||||
- [OpenAI API key created](https://platform.openai.com)
|
||||
|
||||
<Admonition type="info">
|
||||
Langflow v1.0 alpha is also available in HuggingFace Spaces. [Clone the space using this link](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true) to create your own Langflow workspace in minutes.
|
||||
Langflow v1.0 alpha is also available in HuggingFace Spaces. [Clone the space
|
||||
using this
|
||||
link](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true)
|
||||
to create your own Langflow workspace in minutes.
|
||||
</Admonition>
|
||||
|
||||
## Create the basic prompting project
|
||||
|
|
@ -42,25 +45,21 @@ Examine the **Prompt** component. The **Template** field instructs the LLM to `A
|
|||
This should be interesting...
|
||||
|
||||
4. To create an environment variable for the **OpenAI** component, in the **OpenAI API Key** field, click the **Globe** button, and then click **Add New Variable**.
|
||||
1. In the **Variable Name** field, enter `openai_api_key`.
|
||||
2. In the **Value** field, paste your OpenAI API Key (`sk-...`).
|
||||
3. Click **Save Variable**.
|
||||
1. In the **Variable Name** field, enter `openai_api_key`.
|
||||
2. In the **Value** field, paste your OpenAI API Key (`sk-...`).
|
||||
3. Click **Save Variable**.
|
||||
|
||||
## Run the basic prompting flow
|
||||
|
||||
1. Click the **Run** button.
|
||||
The **Interaction Panel** opens, where you can converse with your bot.
|
||||
The **Interaction Panel** opens, where you can converse with your bot.
|
||||
2. Type a message and press Enter.
|
||||
The bot responds in a markedly piratical manner!
|
||||
The bot responds in a markedly piratical manner!
|
||||
|
||||
## Modify the prompt for a different result
|
||||
|
||||
1. To modify your prompt results, in the **Prompt** template, click the **Template** field.
|
||||
The **Edit Prompt** window opens.
|
||||
The **Edit Prompt** window opens.
|
||||
2. Change `Answer the user as if you were a pirate` to a different character, perhaps `Answer the user as if you were Harold Abelson.`
|
||||
3. Run the basic prompting flow again.
|
||||
The response will be markedly different.
|
||||
|
||||
|
||||
|
||||
|
||||
The response will be markedly different.
|
||||
|
|
|
|||
|
|
@ -10,12 +10,15 @@ Build a blog writer with OpenAI that uses URLs for reference content.
|
|||
|
||||
## Prerequisites
|
||||
|
||||
* [Langflow installed and running](../getting-started/install-langflow.mdx)
|
||||
- [Langflow installed and running](../getting-started/install-langflow.mdx)
|
||||
|
||||
* [OpenAI API key created](https://platform.openai.com)
|
||||
- [OpenAI API key created](https://platform.openai.com)
|
||||
|
||||
<Admonition type="info">
|
||||
Langflow v1.0 alpha is also available in HuggingFace Spaces. [Clone the space using this link](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true) to create your own Langflow workspace in minutes.
|
||||
Langflow v1.0 alpha is also available in HuggingFace Spaces. [Clone the space
|
||||
using this
|
||||
link](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true)
|
||||
to create your own Langflow workspace in minutes.
|
||||
</Admonition>
|
||||
|
||||
## Create the Blog Writer project
|
||||
|
|
@ -36,6 +39,7 @@ Build a blog writer with OpenAI that uses URLs for reference content.
|
|||
This flow creates a one-shot prompt flow with **Prompt**, **OpenAI**, and **Chat Output** components, and augments the flow with reference content and instructions from the **URL** and **Instructions** components.
|
||||
|
||||
The **Prompt** component's default **Template** field looks like this:
|
||||
|
||||
```bash
|
||||
Reference 1:
|
||||
|
||||
|
|
@ -59,16 +63,16 @@ The `{instructions}` value is received from the **Value** field of the **Instruc
|
|||
The `reference_1` and `reference_2` values are received from the **URL** fields of the **URL** components.
|
||||
|
||||
4. To create an environment variable for the **OpenAI** component, in the **OpenAI API Key** field, click the **Globe** button, and then click **Add New Variable**.
|
||||
1. In the **Variable Name** field, enter `openai_api_key`.
|
||||
2. In the **Value** field, paste your OpenAI API Key (`sk-...`).
|
||||
3. Click **Save Variable**.
|
||||
1. In the **Variable Name** field, enter `openai_api_key`.
|
||||
2. In the **Value** field, paste your OpenAI API Key (`sk-...`).
|
||||
3. Click **Save Variable**.
|
||||
|
||||
## Run the Blog Writer flow
|
||||
|
||||
1. Click the **Run** button.
|
||||
The **Interaction Panel** opens, where you can run your one-shot flow.
|
||||
The **Interaction Panel** opens, where you can run your one-shot flow.
|
||||
2. Click the **Lighting Bolt** icon to run your flow.
|
||||
3. The **OpenAI** component constructs a blog post with the **URL** items as context.
|
||||
The default **URL** values are for web pages at `promptingguide.ai`, so your blog post will be about prompting LLMs.
|
||||
The default **URL** values are for web pages at `promptingguide.ai`, so your blog post will be about prompting LLMs.
|
||||
|
||||
To write about something different, change the values in the **URL** components, and see what the LLM constructs.
|
||||
To write about something different, change the values in the **URL** components, and see what the LLM constructs.
|
||||
|
|
|
|||
|
|
@ -10,12 +10,15 @@ Build a question-and-answer chatbot with a document loaded from local memory.
|
|||
|
||||
## Prerequisites
|
||||
|
||||
* [Langflow installed and running](../getting-started/install-langflow.mdx)
|
||||
- [Langflow installed and running](../getting-started/install-langflow.mdx)
|
||||
|
||||
* [OpenAI API key created](https://platform.openai.com)
|
||||
- [OpenAI API key created](https://platform.openai.com)
|
||||
|
||||
<Admonition type="info">
|
||||
Langflow v1.0 alpha is also available in HuggingFace Spaces. [Clone the space using this link](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true) to create your own Langflow workspace in minutes.
|
||||
Langflow v1.0 alpha is also available in HuggingFace Spaces. [Clone the space
|
||||
using this
|
||||
link](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true)
|
||||
to create your own Langflow workspace in minutes.
|
||||
</Admonition>
|
||||
|
||||
## Create the Document QA project
|
||||
|
|
@ -39,24 +42,27 @@ The **Prompt** component is instructed to answer questions based on the contents
|
|||
Including a file with the prompt gives the **OpenAI** component context it may not otherwise have access to.
|
||||
|
||||
4. To create an environment variable for the **OpenAI** component, in the **OpenAI API Key** field, click the **Globe** button, and then click **Add New Variable**.
|
||||
1. In the **Variable Name** field, enter `openai_api_key`.
|
||||
2. In the **Value** field, paste your OpenAI API Key (`sk-...`).
|
||||
3. Click **Save Variable**.
|
||||
|
||||
1. In the **Variable Name** field, enter `openai_api_key`.
|
||||
2. In the **Value** field, paste your OpenAI API Key (`sk-...`).
|
||||
3. Click **Save Variable**.
|
||||
|
||||
5. To select a document to load, in the **Files** component, click within the **Path** field.
|
||||
1. Select a local file, and then click **Open**.
|
||||
2. The file name appears in the field.
|
||||
<Admonition type="tip">
|
||||
The file must be of an extension type listed [here](https://github.com/langflow-ai/langflow/blob/dev/src/backend/base/langflow/base/data/utils.py#L13).
|
||||
</Admonition>
|
||||
1. Select a local file, and then click **Open**.
|
||||
2. The file name appears in the field.
|
||||
<Admonition type="tip">
|
||||
The file must be of an extension type listed
|
||||
[here](https://github.com/langflow-ai/langflow/blob/dev/src/backend/base/langflow/base/data/utils.py#L13).
|
||||
</Admonition>
|
||||
|
||||
## Run the Document QA flow
|
||||
|
||||
1. Click the **Run** button.
|
||||
The **Interaction Panel** opens, where you can converse with your bot.
|
||||
The **Interaction Panel** opens, where you can converse with your bot.
|
||||
2. Type a message and press Enter.
|
||||
For this example, we loaded an error log `.txt` file and asked, "What went wrong?"
|
||||
The bot responded:
|
||||
For this example, we loaded an error log `.txt` file and asked, "What went wrong?"
|
||||
The bot responded:
|
||||
|
||||
```
|
||||
The issue occurred during the execution of migrations in the application. Specifically, an error was raised by the Alembic library, indicating that new upgrade operations were detected that had not been accounted for in the existing migration scripts. The operation in question involved modifying the nullable property of a column (apikey, created_at) in the database, with details about the existing type (DATETIME()), existing server default, and other properties.
|
||||
```
|
||||
|
|
|
|||
|
|
@ -10,12 +10,15 @@ This flow extends the [basic prompting flow](./basic-prompting.mdx) to include c
|
|||
|
||||
## Prerequisites
|
||||
|
||||
* [Langflow installed and running](../getting-started/install-langflow.mdx)
|
||||
- [Langflow installed and running](../getting-started/install-langflow.mdx)
|
||||
|
||||
* [OpenAI API key created](https://platform.openai.com)
|
||||
- [OpenAI API key created](https://platform.openai.com)
|
||||
|
||||
<Admonition type="info">
|
||||
Langflow v1.0 alpha is also available in HuggingFace Spaces. [Clone the space using this link](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true) to create your own Langflow workspace in minutes.
|
||||
Langflow v1.0 alpha is also available in HuggingFace Spaces. [Clone the space
|
||||
using this
|
||||
link](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true)
|
||||
to create your own Langflow workspace in minutes.
|
||||
</Admonition>
|
||||
|
||||
## Create the memory chatbot project
|
||||
|
|
@ -43,16 +46,16 @@ This chatbot is augmented with the **Chat Memory** component, which stores messa
|
|||
The **Chat History** component gives the **OpenAI** component a memory of previous questions.
|
||||
|
||||
4. To create an environment variable for the **OpenAI** component, in the **OpenAI API Key** field, click the **Globe** button, and then click **Add New Variable**.
|
||||
1. In the **Variable Name** field, enter `openai_api_key`.
|
||||
2. In the **Value** field, paste your OpenAI API Key (`sk-...`).
|
||||
3. Click **Save Variable**.
|
||||
1. In the **Variable Name** field, enter `openai_api_key`.
|
||||
2. In the **Value** field, paste your OpenAI API Key (`sk-...`).
|
||||
3. Click **Save Variable**.
|
||||
|
||||
## Run the memory chatbot flow
|
||||
|
||||
1. Click the **Run** button.
|
||||
The **Interaction Panel** opens, where you can converse with your bot.
|
||||
The **Interaction Panel** opens, where you can converse with your bot.
|
||||
2. Type a message and press Enter.
|
||||
The bot will respond according to the template in the **Prompt** component.
|
||||
The bot will respond according to the template in the **Prompt** component.
|
||||
3. Type more questions. In the **Outputs** log, your queries are logged in order. Up to 5 queries are stored by default. Try asking `What is the first subject I asked you about?` to see where the LLM's memory disappears.
|
||||
|
||||
## Modify the Session ID field to have multiple conversations
|
||||
|
|
@ -65,11 +68,11 @@ You can demonstrate this by modifying the **Session ID** value to switch between
|
|||
|
||||
1. In the **Session ID** field of the **Chat Memory** and **Chat Input** components, change the **Session ID** value from `MySessionID` to `AnotherSessionID`.
|
||||
2. Click the **Run** button to run your flow.
|
||||
In the **Interaction Panel**, you will have a new conversation. (You may need to clear the cache with the **Eraser** button).
|
||||
In the **Interaction Panel**, you will have a new conversation. (You may need to clear the cache with the **Eraser** button).
|
||||
3. Type a few questions to your bot.
|
||||
4. In the **Session ID** field of the **Chat Memory** and **Chat Input** components, change the **Session ID** value back to `MySessionID`.
|
||||
5. Run your flow.
|
||||
The **Outputs** log of the **Interaction Panel** displays the history from your initial chat with `MySessionID`.
|
||||
The **Outputs** log of the **Interaction Panel** displays the history from your initial chat with `MySessionID`.
|
||||
|
||||
## Store Session ID as a Langflow variable
|
||||
|
||||
|
|
@ -79,4 +82,3 @@ To store **Session ID** as a Langflow variable, in the **Session ID** field, cli
|
|||
2. In the **Value** field, enter a value like `1B5EBD79-6E9C-4533-B2C8-7E4FF29E983B`.
|
||||
3. Click **Save Variable**.
|
||||
4. Apply this variable to **Chat Input**.
|
||||
|
||||
|
|
|
|||
|
|
@ -17,16 +17,19 @@ We've chosen [Astra DB](https://astra.datastax.com/signup?utm_source=langflow-pr
|
|||
## Prerequisites
|
||||
|
||||
<Admonition type="info">
|
||||
Langflow v1.0 alpha is also available in HuggingFace Spaces. [Clone the space using this link](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true) to create your own Langflow workspace in minutes.
|
||||
Langflow v1.0 alpha is also available in HuggingFace Spaces. [Clone the space
|
||||
using this
|
||||
link](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true)
|
||||
to create your own Langflow workspace in minutes.
|
||||
</Admonition>
|
||||
|
||||
* [Langflow installed and running](../getting-started/install-langflow.mdx)
|
||||
- [Langflow installed and running](../getting-started/install-langflow.mdx)
|
||||
|
||||
* [OpenAI API key](https://platform.openai.com)
|
||||
- [OpenAI API key](https://platform.openai.com)
|
||||
|
||||
* [An Astra DB vector database created](https://docs.datastax.com/en/astra-db-serverless/get-started/quickstart.html) with:
|
||||
* Application token (`AstraCS:WSnyFUhRxsrg…`)
|
||||
* API endpoint (`https://ASTRA_DB_ID-ASTRA_DB_REGION.apps.astra.datastax.com`)
|
||||
- [An Astra DB vector database created](https://docs.datastax.com/en/astra-db-serverless/get-started/quickstart.html) with:
|
||||
- Application token (`AstraCS:WSnyFUhRxsrg…`)
|
||||
- API endpoint (`https://ASTRA_DB_ID-ASTRA_DB_REGION.apps.astra.datastax.com`)
|
||||
|
||||
## Create the vector store RAG project
|
||||
|
||||
|
|
@ -49,38 +52,40 @@ The **ingestion** flow (bottom of the screen) populates the vector store with da
|
|||
It ingests data from a file (**File**), splits it into chunks (**Recursive Character Text Splitter**), indexes it in Astra DB (**Astra DB**), and computes embeddings for the chunks (**OpenAI Embeddings**).
|
||||
This forms a "brain" for the query flow.
|
||||
|
||||
The **query** flow (top of the screen) allows users to chat with the embedded vector store data. It's a little more complex:
|
||||
The **query** flow (top of the screen) allows users to chat with the embedded vector store data. It's a little more complex:
|
||||
|
||||
* **Chat Input** component defines where to put the user input coming from the Playground.
|
||||
* **OpenAI Embeddings** component generates embeddings from the user input.
|
||||
* **Astra DB Search** component retrieves the most relevant Records from the Astra DB database.
|
||||
* **Text Output** component turns the Records into Text by concatenating them and also displays it in the Playground.
|
||||
* **Prompt** component takes in the user input and the retrieved Records as text and builds a prompt for the OpenAI model.
|
||||
* **OpenAI** component generates a response to the prompt.
|
||||
* **Chat Output** component displays the response in the Playground.
|
||||
- **Chat Input** component defines where to put the user input coming from the Playground.
|
||||
- **OpenAI Embeddings** component generates embeddings from the user input.
|
||||
- **Astra DB Search** component retrieves the most relevant Records from the Astra DB database.
|
||||
- **Text Output** component turns the Records into Text by concatenating them and also displays it in the Playground.
|
||||
- **Prompt** component takes in the user input and the retrieved Records as text and builds a prompt for the OpenAI model.
|
||||
- **OpenAI** component generates a response to the prompt.
|
||||
- **Chat Output** component displays the response in the Playground.
|
||||
|
||||
4. To create an environment variable for the **OpenAI** component, in the **OpenAI API Key** field, click the **Globe** button, and then click **Add New Variable**.
|
||||
1. In the **Variable Name** field, enter `openai_api_key`.
|
||||
2. In the **Value** field, paste your OpenAI API Key (`sk-...`).
|
||||
3. Click **Save Variable**.
|
||||
|
||||
4. To create environment variables for the **Astra DB** and **Astra DB Search** components:
|
||||
1. In the **Token** field, click the **Globe** button, and then click **Add New Variable**.
|
||||
2. In the **Variable Name** field, enter `astra_token`.
|
||||
3. In the **Value** field, paste your Astra application token (`AstraCS:WSnyFUhRxsrg…`).
|
||||
4. Click **Save Variable**.
|
||||
5. Repeat the above steps for the **API Endpoint** field, pasting your Astra API Endpoint instead (`https://ASTRA_DB_ID-ASTRA_DB_REGION.apps.astra.datastax.com`).
|
||||
6. Add the global variable to both the **Astra DB** and **Astra DB Search** components.
|
||||
1. In the **Variable Name** field, enter `openai_api_key`.
|
||||
2. In the **Value** field, paste your OpenAI API Key (`sk-...`).
|
||||
3. Click **Save Variable**.
|
||||
|
||||
5. To create environment variables for the **Astra DB** and **Astra DB Search** components:
|
||||
1. In the **Token** field, click the **Globe** button, and then click **Add New Variable**.
|
||||
2. In the **Variable Name** field, enter `astra_token`.
|
||||
3. In the **Value** field, paste your Astra application token (`AstraCS:WSnyFUhRxsrg…`).
|
||||
4. Click **Save Variable**.
|
||||
5. Repeat the above steps for the **API Endpoint** field, pasting your Astra API Endpoint instead (`https://ASTRA_DB_ID-ASTRA_DB_REGION.apps.astra.datastax.com`).
|
||||
6. Add the global variable to both the **Astra DB** and **Astra DB Search** components.
|
||||
|
||||
## Run the vector store RAG flow
|
||||
|
||||
1. Click the **Playground** button.
|
||||
The **Playground** opens, where you can chat with your data.
|
||||
The **Playground** opens, where you can chat with your data.
|
||||
2. Type a message and press Enter. (Try something like "What topics do you know about?")
|
||||
3. The bot will respond with a summary of the data you've embedded.
|
||||
|
||||
For example, we embedded a PDF of an engine maintenance manual and asked, "How do I change the oil?"
|
||||
The bot responds:
|
||||
|
||||
```
|
||||
To change the oil in the engine, follow these steps:
|
||||
|
||||
|
|
@ -102,7 +107,3 @@ You should use a 3/8 inch wrench to remove the oil drain cap.
|
|||
```
|
||||
|
||||
This is the size the engine manual lists as well. This confirms our flow works, because the query returns the unique knowledge we embedded from the Astra vector store.
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -41,7 +41,7 @@ By having a clear definition of Inputs and Outputs, we could build the experienc
|
|||
When building a project testing and debugging is crucial. The Playground is a tool that changes dynamically based on the Inputs and Outputs you defined in your project.
|
||||
|
||||
For example, let's say you are building a simple RAG application. Generally, you have an Input, some references that come from a Vector Store Search, a Prompt and the answer.
|
||||
Now, you could plug the output of your Prompt into a [Text Output](../components/outputs#Text-Output), rename that to "Prompt Result" and see the output of your Prompt in the Playground.
|
||||
Now, you could plug the output of your Prompt into a [Text Output](../components/inputs-and-outputs), rename that to "Prompt Result" and see the output of your Prompt in the Playground.
|
||||
|
||||
{/* Add image here of the described above */}
|
||||
|
||||
|
|
|
|||
|
|
@ -49,8 +49,8 @@ module.exports = {
|
|||
label: "Core Components",
|
||||
collapsed: false,
|
||||
items: [
|
||||
"components/inputs",
|
||||
"components/outputs",
|
||||
"components/inputs-and-outputs",
|
||||
"components/text-and-record",
|
||||
"components/data",
|
||||
"components/models",
|
||||
"components/helpers",
|
||||
|
|
@ -91,15 +91,12 @@ module.exports = {
|
|||
},
|
||||
{
|
||||
type: "category",
|
||||
label: "Migration Guides",
|
||||
label: "Migration",
|
||||
collapsed: false,
|
||||
items: [
|
||||
"migration/possible-installation-issues",
|
||||
"migration/migrating-to-one-point-zero",
|
||||
"migration/inputs-and-outputs",
|
||||
"migration/text-and-record",
|
||||
"migration/compatibility",
|
||||
"migration/global-variables",
|
||||
],
|
||||
},
|
||||
{
|
||||
|
|
@ -116,7 +113,10 @@ module.exports = {
|
|||
type: "category",
|
||||
label: "Deployment",
|
||||
collapsed: true,
|
||||
items: ["deployment/gcp-deployment"],
|
||||
items: ["deployment/docker",
|
||||
"deployment/backend-only",
|
||||
"deployment/gcp-deployment",
|
||||
],
|
||||
},
|
||||
{
|
||||
type: "category",
|
||||
|
|
|
|||
6368
docs/static/data/AstraDB-RAG-Flows.json
vendored
6368
docs/static/data/AstraDB-RAG-Flows.json
vendored
File diff suppressed because one or more lines are too long
882
docs/static/json_files/Notion_Components_bundle.json
vendored
882
docs/static/json_files/Notion_Components_bundle.json
vendored
File diff suppressed because one or more lines are too long
4
docs/static/logos/twitter.svg
vendored
4
docs/static/logos/twitter.svg
vendored
|
|
@ -1,3 +1,3 @@
|
|||
<svg width="18" height="18" viewBox="0 0 24 20" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M24 2.36764C23.1181 2.76923 22.1687 3.04081 21.1728 3.16215C22.1898 2.5381 22.9703 1.54857 23.338 0.369812C22.3856 0.947636 21.3334 1.368 20.2092 1.59334C19.3133 0.612492 18.0328 0 16.6156 0C13.8983 0 11.6936 2.26074 11.6936 5.04874C11.6936 5.44456 11.7359 5.82881 11.8204 6.19862C7.72812 5.98771 4.10072 3.97977 1.67071 0.921629C1.24669 1.66992 1.00439 2.5381 1.00439 3.46262C1.00439 5.21343 1.87357 6.75911 3.19493 7.66485C2.38915 7.64029 1.62845 7.41061 0.963547 7.03502V7.09713C0.963547 9.54422 2.66102 11.5854 4.91495 12.0476C4.5022 12.1661 4.06691 12.2253 3.61754 12.2253C3.30058 12.2253 2.99066 12.195 2.69062 12.1358C3.31748 14.1408 5.1347 15.6013 7.29001 15.6403C5.6052 16.9953 3.48089 17.8028 1.17485 17.8028C0.777598 17.8028 0.384575 17.7796 0 17.7334C2.17926 19.1636 4.76844 20 7.54781 20C16.6057 20 21.5573 12.3077 21.5573 5.63524C21.5573 5.41566 21.5531 5.19609 21.5447 4.98084C22.5067 4.26868 23.3422 3.38027 24 2.36764Z" fill="#00AAEC"/>
|
||||
<svg width="1200" height="1227" viewBox="0 0 1200 1227" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M714.163 519.284L1160.89 0H1055.03L667.137 450.887L357.328 0H0L468.492 681.821L0 1226.37H105.866L515.491 750.218L842.672 1226.37H1200L714.137 519.284H714.163ZM569.165 687.828L521.697 619.934L144.011 79.6944H306.615L611.412 515.685L658.88 583.579L1055.08 1150.3H892.476L569.165 687.854V687.828Z" fill="white"/>
|
||||
</svg>
|
||||
|
|
|
|||
|
Before Width: | Height: | Size: 1.1 KiB After Width: | Height: | Size: 430 B |
492
poetry.lock
generated
492
poetry.lock
generated
|
|
@ -261,13 +261,13 @@ extras = ["pyaudio (>=0.2.13)"]
|
|||
|
||||
[[package]]
|
||||
name = "astrapy"
|
||||
version = "1.2.0"
|
||||
version = "1.2.1"
|
||||
description = "AstraPy is a Pythonic SDK for DataStax Astra and its Data API"
|
||||
optional = false
|
||||
python-versions = "<4.0.0,>=3.8.0"
|
||||
files = [
|
||||
{file = "astrapy-1.2.0-py3-none-any.whl", hash = "sha256:5d65242771934c38ebe16f330e9e517968c1437846dabdbe7e48470f7b1782e8"},
|
||||
{file = "astrapy-1.2.0.tar.gz", hash = "sha256:6ce1b421d1ae21fe73373fa36048d8d56c775367886525504f01c48cbb742842"},
|
||||
{file = "astrapy-1.2.1-py3-none-any.whl", hash = "sha256:0d7ca1e6f18a6a4e9a41ffaf2aa4cc585d36de3e983b5c5ce0bbb30a1595e30b"},
|
||||
{file = "astrapy-1.2.1.tar.gz", hash = "sha256:c4ba88ef16ac1e990ccba322d376b6ea256513a3004a0894c14bfa2403f1d646"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -367,13 +367,13 @@ files = [
|
|||
|
||||
[[package]]
|
||||
name = "bce-python-sdk"
|
||||
version = "0.9.11"
|
||||
version = "0.9.14"
|
||||
description = "BCE SDK for python"
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,<4,>=2.7"
|
||||
files = [
|
||||
{file = "bce_python_sdk-0.9.11-py3-none-any.whl", hash = "sha256:3afb9717f6c0c5f5fe3104a8bea4c111bf2ab3fe87ae73b05492566bc2b5d11a"},
|
||||
{file = "bce_python_sdk-0.9.11.tar.gz", hash = "sha256:d9e977f059fef6466eebdbb34ad1e27b6f76ef90338807ab959693a78a761e7d"},
|
||||
{file = "bce_python_sdk-0.9.14-py3-none-any.whl", hash = "sha256:5704aa454151ee608b01ddda7531457433f9b4bb8afbd00706dd368f3b4339a1"},
|
||||
{file = "bce_python_sdk-0.9.14.tar.gz", hash = "sha256:7cbd182ec1e21034f10d3cdb812f3171d31908f1a783d6cf643039272942d8e8"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -471,17 +471,17 @@ files = [
|
|||
|
||||
[[package]]
|
||||
name = "boto3"
|
||||
version = "1.34.119"
|
||||
version = "1.34.121"
|
||||
description = "The AWS SDK for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "boto3-1.34.119-py3-none-any.whl", hash = "sha256:8f9c43c54b3dfaa36c4a0d7b42c417227a515bc7a2e163e62802780000a5a3e2"},
|
||||
{file = "boto3-1.34.119.tar.gz", hash = "sha256:cea2365a25b2b83a97e77f24ac6f922ef62e20636b42f9f6ee9f97188f9c1c03"},
|
||||
{file = "boto3-1.34.121-py3-none-any.whl", hash = "sha256:4e79e400d6d44b4eee5deda6ac0ecd08a3f5a30c45a0d30712795cdc4459fd79"},
|
||||
{file = "boto3-1.34.121.tar.gz", hash = "sha256:ec89f3e0b0dc959c418df29e14d3748c0b05ab7acf7c0b90c839e9f340a659fa"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
botocore = ">=1.34.119,<1.35.0"
|
||||
botocore = ">=1.34.121,<1.35.0"
|
||||
jmespath = ">=0.7.1,<2.0.0"
|
||||
s3transfer = ">=0.10.0,<0.11.0"
|
||||
|
||||
|
|
@ -490,13 +490,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
|
|||
|
||||
[[package]]
|
||||
name = "botocore"
|
||||
version = "1.34.119"
|
||||
version = "1.34.121"
|
||||
description = "Low-level, data-driven core of boto 3."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "botocore-1.34.119-py3-none-any.whl", hash = "sha256:4bdf7926a1290b2650d62899ceba65073dd2693e61c35f5cdeb3a286a0aaa27b"},
|
||||
{file = "botocore-1.34.119.tar.gz", hash = "sha256:b253f15b24b87b070e176af48e8ef146516090429d30a7d8b136a4c079b28008"},
|
||||
{file = "botocore-1.34.121-py3-none-any.whl", hash = "sha256:25b05c7646a9f240cde1c8f839552a43f27e71e15c42600275dea93e219f7dd9"},
|
||||
{file = "botocore-1.34.121.tar.gz", hash = "sha256:1a8f94b917c47dfd84a0b531ab607dc53570efb0d073d8686600f2d2be985323"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -505,7 +505,7 @@ python-dateutil = ">=2.1,<3.0.0"
|
|||
urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""}
|
||||
|
||||
[package.extras]
|
||||
crt = ["awscrt (==0.20.9)"]
|
||||
crt = ["awscrt (==0.20.11)"]
|
||||
|
||||
[[package]]
|
||||
name = "brotli"
|
||||
|
|
@ -698,13 +698,13 @@ graph = ["gremlinpython (==3.4.6)"]
|
|||
|
||||
[[package]]
|
||||
name = "cassio"
|
||||
version = "0.1.7"
|
||||
version = "0.1.8"
|
||||
description = "A framework-agnostic Python library to seamlessly integrate Apache Cassandra(R) with ML/LLM/genAI workloads."
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8"
|
||||
files = [
|
||||
{file = "cassio-0.1.7-py3-none-any.whl", hash = "sha256:08d1028a20d09bd207de0e17eaf7ae821b3c8e4788555e2d337aa440e0846d87"},
|
||||
{file = "cassio-0.1.7.tar.gz", hash = "sha256:44f705dff8a9a1c48527db2c9e968686358c960fa21ba940d9e66de00639ad78"},
|
||||
{file = "cassio-0.1.8-py3-none-any.whl", hash = "sha256:c09e7c884ba7227ff5277c86f3b0f31c523672ea407f56d093c7227e69c54d94"},
|
||||
{file = "cassio-0.1.8.tar.gz", hash = "sha256:4e09929506cb3dd6fad217e89846d0a1a59069afd24b82c72526ef6f2e9271af"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -1824,13 +1824,13 @@ develop = ["aiohttp", "furo", "httpx", "mock", "opentelemetry-api", "opentelemet
|
|||
|
||||
[[package]]
|
||||
name = "elasticsearch"
|
||||
version = "8.13.2"
|
||||
version = "8.14.0"
|
||||
description = "Python client for Elasticsearch"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "elasticsearch-8.13.2-py3-none-any.whl", hash = "sha256:7412ceae9c0e437a72854ab3123aa1f37110d1635cc645366988b8c0fee98598"},
|
||||
{file = "elasticsearch-8.13.2.tar.gz", hash = "sha256:d51c93431a459b2b7c6c919b6e92a2adc8ac712758de9aeeb16cd4997fc148ad"},
|
||||
{file = "elasticsearch-8.14.0-py3-none-any.whl", hash = "sha256:cef8ef70a81af027f3da74a4f7d9296b390c636903088439087b8262a468c130"},
|
||||
{file = "elasticsearch-8.14.0.tar.gz", hash = "sha256:aa2490029dd96f4015b333c1827aa21fd6c0a4d223b00dfb0fe933b8d09a511b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -2588,13 +2588,13 @@ uritemplate = ">=3.0.1,<5"
|
|||
|
||||
[[package]]
|
||||
name = "google-auth"
|
||||
version = "2.29.0"
|
||||
version = "2.30.0"
|
||||
description = "Google Authentication Library"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "google-auth-2.29.0.tar.gz", hash = "sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360"},
|
||||
{file = "google_auth-2.29.0-py2.py3-none-any.whl", hash = "sha256:d452ad095688cd52bae0ad6fafe027f6a6d6f560e810fec20914e17a09526415"},
|
||||
{file = "google-auth-2.30.0.tar.gz", hash = "sha256:ab630a1320f6720909ad76a7dbdb6841cdf5c66b328d690027e4867bdfb16688"},
|
||||
{file = "google_auth-2.30.0-py2.py3-none-any.whl", hash = "sha256:8df7da660f62757388b8a7f249df13549b3373f24388cb5d2f1dd91cc18180b5"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -2626,13 +2626,13 @@ httplib2 = ">=0.19.0"
|
|||
|
||||
[[package]]
|
||||
name = "google-cloud-aiplatform"
|
||||
version = "1.53.0"
|
||||
version = "1.54.0"
|
||||
description = "Vertex AI API client library"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "google-cloud-aiplatform-1.53.0.tar.gz", hash = "sha256:574cfad8ac5fa5d57ef717f5335ce05636a5fa9b8aeea0f5c325b46b9448e6b1"},
|
||||
{file = "google_cloud_aiplatform-1.53.0-py2.py3-none-any.whl", hash = "sha256:9dfb1f110e6d4795b45afcfab79108fc5c8ed9aa4eaf899e433bc2ca1b76c778"},
|
||||
{file = "google-cloud-aiplatform-1.54.0.tar.gz", hash = "sha256:6f5187d35a32951028465804fbb42b478362bf41e2b634ddd22b150299f6e1d8"},
|
||||
{file = "google_cloud_aiplatform-1.54.0-py2.py3-none-any.whl", hash = "sha256:7b3ed849b9fb59a01bd6f44444ccbb7d18495b867a26f913542f6b2d4c3de252"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -2653,7 +2653,7 @@ autologging = ["mlflow (>=1.27.0,<=2.1.1)"]
|
|||
cloud-profiler = ["tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"]
|
||||
datasets = ["pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)"]
|
||||
endpoint = ["requests (>=2.28.1)"]
|
||||
full = ["cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.109.1)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "nest-asyncio (>=1.0.0,<1.6.0)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "requests (>=2.28.1)", "starlette (>=0.17.1)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)"]
|
||||
full = ["cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.109.1)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "nest-asyncio (>=1.0.0,<1.6.0)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "requests (>=2.28.1)", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)"]
|
||||
langchain = ["langchain (>=0.1.16,<0.2)", "langchain-core (<0.2)", "langchain-google-vertexai (<2)"]
|
||||
langchain-testing = ["absl-py", "cloudpickle (>=2.2.1,<4.0)", "langchain (>=0.1.16,<0.2)", "langchain-core (<0.2)", "langchain-google-vertexai (<2)", "pydantic (>=2.6.3,<3)", "pytest-xdist"]
|
||||
lit = ["explainable-ai-sdk (>=1.0.0)", "lit-nlp (==0.4.0)", "pandas (>=1.0.0)", "tensorflow (>=2.3.0,<3.0.0dev)"]
|
||||
|
|
@ -2663,11 +2663,11 @@ prediction = ["docker (>=5.0.3)", "fastapi (>=0.71.0,<=0.109.1)", "httpx (>=0.23
|
|||
preview = ["cloudpickle (<3.0)", "google-cloud-logging (<4.0)"]
|
||||
private-endpoints = ["requests (>=2.28.1)", "urllib3 (>=1.21.1,<1.27)"]
|
||||
rapid-evaluation = ["nest-asyncio (>=1.0.0,<1.6.0)", "pandas (>=1.0.0,<2.2.0)"]
|
||||
ray = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)"]
|
||||
ray-testing = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pytest-xdist", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "ray[train] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "scikit-learn", "tensorflow", "torch (>=2.0.0,<2.1.0)", "xgboost", "xgboost-ray"]
|
||||
ray = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "setuptools (<70.0.0)"]
|
||||
ray-testing = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pytest-xdist", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "ray[train] (==2.9.3)", "scikit-learn", "setuptools (<70.0.0)", "tensorflow", "torch (>=2.0.0,<2.1.0)", "xgboost", "xgboost-ray"]
|
||||
reasoningengine = ["cloudpickle (>=2.2.1,<4.0)", "pydantic (>=2.6.3,<3)"]
|
||||
tensorboard = ["tensorflow (>=2.3.0,<3.0.0dev)"]
|
||||
testing = ["bigframes", "cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.109.1)", "google-api-core (>=2.11,<3.0.0)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "grpcio-testing", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "ipython", "kfp (>=2.6.0,<3.0.0)", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "nest-asyncio (>=1.0.0,<1.6.0)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyfakefs", "pytest-asyncio", "pytest-xdist", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "requests (>=2.28.1)", "requests-toolbelt (<1.0.0)", "scikit-learn", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (==2.13.0)", "tensorflow (==2.16.1)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "torch (>=2.0.0,<2.1.0)", "torch (>=2.2.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)", "xgboost"]
|
||||
testing = ["bigframes", "cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.109.1)", "google-api-core (>=2.11,<3.0.0)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "grpcio-testing", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "ipython", "kfp (>=2.6.0,<3.0.0)", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "nest-asyncio (>=1.0.0,<1.6.0)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyfakefs", "pytest-asyncio", "pytest-xdist", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "requests (>=2.28.1)", "requests-toolbelt (<1.0.0)", "scikit-learn", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (==2.13.0)", "tensorflow (==2.16.1)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "torch (>=2.0.0,<2.1.0)", "torch (>=2.2.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)", "xgboost"]
|
||||
vizier = ["google-vizier (>=0.1.6)"]
|
||||
xai = ["tensorflow (>=2.3.0,<3.0.0dev)"]
|
||||
|
||||
|
|
@ -2929,13 +2929,13 @@ pydantic = ">=1.10,<3"
|
|||
|
||||
[[package]]
|
||||
name = "gprof2dot"
|
||||
version = "2022.7.29"
|
||||
version = "2024.6.6"
|
||||
description = "Generate a dot graph from the output of several profilers."
|
||||
optional = false
|
||||
python-versions = ">=2.7"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "gprof2dot-2022.7.29-py2.py3-none-any.whl", hash = "sha256:f165b3851d3c52ee4915eb1bd6cca571e5759823c2cd0f71a79bda93c2dc85d6"},
|
||||
{file = "gprof2dot-2022.7.29.tar.gz", hash = "sha256:45b4d298bd36608fccf9511c3fd88a773f7a1abc04d6cd39445b11ba43133ec5"},
|
||||
{file = "gprof2dot-2024.6.6-py2.py3-none-any.whl", hash = "sha256:45b14ad7ce64e299c8f526881007b9eb2c6b75505d5613e96e66ee4d5ab33696"},
|
||||
{file = "gprof2dot-2024.6.6.tar.gz", hash = "sha256:fa1420c60025a9eb7734f65225b4da02a10fc6dd741b37fa129bc6b41951e5ab"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -3478,100 +3478,105 @@ files = [
|
|||
|
||||
[[package]]
|
||||
name = "ijson"
|
||||
version = "3.2.3"
|
||||
version = "3.3.0"
|
||||
description = "Iterative JSON parser with standard Python iterator interfaces"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "ijson-3.2.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0a4ae076bf97b0430e4e16c9cb635a6b773904aec45ed8dcbc9b17211b8569ba"},
|
||||
{file = "ijson-3.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cfced0a6ec85916eb8c8e22415b7267ae118eaff2a860c42d2cc1261711d0d31"},
|
||||
{file = "ijson-3.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0b9d1141cfd1e6d6643aa0b4876730d0d28371815ce846d2e4e84a2d4f471cf3"},
|
||||
{file = "ijson-3.2.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e0a27db6454edd6013d40a956d008361aac5bff375a9c04ab11fc8c214250b5"},
|
||||
{file = "ijson-3.2.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c0d526ccb335c3c13063c273637d8611f32970603dfb182177b232d01f14c23"},
|
||||
{file = "ijson-3.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:545a30b3659df2a3481593d30d60491d1594bc8005f99600e1bba647bb44cbb5"},
|
||||
{file = "ijson-3.2.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9680e37a10fedb3eab24a4a7e749d8a73f26f1a4c901430e7aa81b5da15f7307"},
|
||||
{file = "ijson-3.2.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2a80c0bb1053055d1599e44dc1396f713e8b3407000e6390add72d49633ff3bb"},
|
||||
{file = "ijson-3.2.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f05ed49f434ce396ddcf99e9fd98245328e99f991283850c309f5e3182211a79"},
|
||||
{file = "ijson-3.2.3-cp310-cp310-win32.whl", hash = "sha256:b4eb2304573c9fdf448d3fa4a4fdcb727b93002b5c5c56c14a5ffbbc39f64ae4"},
|
||||
{file = "ijson-3.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:923131f5153c70936e8bd2dd9dcfcff43c67a3d1c789e9c96724747423c173eb"},
|
||||
{file = "ijson-3.2.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:904f77dd3d87736ff668884fe5197a184748eb0c3e302ded61706501d0327465"},
|
||||
{file = "ijson-3.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0974444c1f416e19de1e9f567a4560890095e71e81623c509feff642114c1e53"},
|
||||
{file = "ijson-3.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c1a4b8eb69b6d7b4e94170aa991efad75ba156b05f0de2a6cd84f991def12ff9"},
|
||||
{file = "ijson-3.2.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d052417fd7ce2221114f8d3b58f05a83c1a2b6b99cafe0b86ac9ed5e2fc889df"},
|
||||
{file = "ijson-3.2.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b8064a85ec1b0beda7dd028e887f7112670d574db606f68006c72dd0bb0e0e2"},
|
||||
{file = "ijson-3.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaac293853f1342a8d2a45ac1f723c860f700860e7743fb97f7b76356df883a8"},
|
||||
{file = "ijson-3.2.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6c32c18a934c1dc8917455b0ce478fd7a26c50c364bd52c5a4fb0fc6bb516af7"},
|
||||
{file = "ijson-3.2.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:713a919e0220ac44dab12b5fed74f9130f3480e55e90f9d80f58de129ea24f83"},
|
||||
{file = "ijson-3.2.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4a3a6a2fbbe7550ffe52d151cf76065e6b89cfb3e9d0463e49a7e322a25d0426"},
|
||||
{file = "ijson-3.2.3-cp311-cp311-win32.whl", hash = "sha256:6a4db2f7fb9acfb855c9ae1aae602e4648dd1f88804a0d5cfb78c3639bcf156c"},
|
||||
{file = "ijson-3.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:ccd6be56335cbb845f3d3021b1766299c056c70c4c9165fb2fbe2d62258bae3f"},
|
||||
{file = "ijson-3.2.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:055b71bbc37af5c3c5861afe789e15211d2d3d06ac51ee5a647adf4def19c0ea"},
|
||||
{file = "ijson-3.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c075a547de32f265a5dd139ab2035900fef6653951628862e5cdce0d101af557"},
|
||||
{file = "ijson-3.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:457f8a5fc559478ac6b06b6d37ebacb4811f8c5156e997f0d87d708b0d8ab2ae"},
|
||||
{file = "ijson-3.2.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9788f0c915351f41f0e69ec2618b81ebfcf9f13d9d67c6d404c7f5afda3e4afb"},
|
||||
{file = "ijson-3.2.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fa234ab7a6a33ed51494d9d2197fb96296f9217ecae57f5551a55589091e7853"},
|
||||
{file = "ijson-3.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdd0dc5da4f9dc6d12ab6e8e0c57d8b41d3c8f9ceed31a99dae7b2baf9ea769a"},
|
||||
{file = "ijson-3.2.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c6beb80df19713e39e68dc5c337b5c76d36ccf69c30b79034634e5e4c14d6904"},
|
||||
{file = "ijson-3.2.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a2973ce57afb142d96f35a14e9cfec08308ef178a2c76b8b5e1e98f3960438bf"},
|
||||
{file = "ijson-3.2.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:105c314fd624e81ed20f925271ec506523b8dd236589ab6c0208b8707d652a0e"},
|
||||
{file = "ijson-3.2.3-cp312-cp312-win32.whl", hash = "sha256:ac44781de5e901ce8339352bb5594fcb3b94ced315a34dbe840b4cff3450e23b"},
|
||||
{file = "ijson-3.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:0567e8c833825b119e74e10a7c29761dc65fcd155f5d4cb10f9d3b8916ef9912"},
|
||||
{file = "ijson-3.2.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:eeb286639649fb6bed37997a5e30eefcacddac79476d24128348ec890b2a0ccb"},
|
||||
{file = "ijson-3.2.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:396338a655fb9af4ac59dd09c189885b51fa0eefc84d35408662031023c110d1"},
|
||||
{file = "ijson-3.2.3-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e0243d166d11a2a47c17c7e885debf3b19ed136be2af1f5d1c34212850236ac"},
|
||||
{file = "ijson-3.2.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85afdb3f3a5d0011584d4fa8e6dccc5936be51c27e84cd2882fe904ca3bd04c5"},
|
||||
{file = "ijson-3.2.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:4fc35d569eff3afa76bfecf533f818ecb9390105be257f3f83c03204661ace70"},
|
||||
{file = "ijson-3.2.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:455d7d3b7a6aacfb8ab1ebcaf697eedf5be66e044eac32508fccdc633d995f0e"},
|
||||
{file = "ijson-3.2.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:c63f3d57dbbac56cead05b12b81e8e1e259f14ce7f233a8cbe7fa0996733b628"},
|
||||
{file = "ijson-3.2.3-cp36-cp36m-win32.whl", hash = "sha256:a4d7fe3629de3ecb088bff6dfe25f77be3e8261ed53d5e244717e266f8544305"},
|
||||
{file = "ijson-3.2.3-cp36-cp36m-win_amd64.whl", hash = "sha256:96190d59f015b5a2af388a98446e411f58ecc6a93934e036daa75f75d02386a0"},
|
||||
{file = "ijson-3.2.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:35194e0b8a2bda12b4096e2e792efa5d4801a0abb950c48ade351d479cd22ba5"},
|
||||
{file = "ijson-3.2.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1053fb5f0b010ee76ca515e6af36b50d26c1728ad46be12f1f147a835341083"},
|
||||
{file = "ijson-3.2.3-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:211124cff9d9d139dd0dfced356f1472860352c055d2481459038b8205d7d742"},
|
||||
{file = "ijson-3.2.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92dc4d48e9f6a271292d6079e9fcdce33c83d1acf11e6e12696fb05c5889fe74"},
|
||||
{file = "ijson-3.2.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3dcc33ee56f92a77f48776014ddb47af67c33dda361e84371153c4f1ed4434e1"},
|
||||
{file = "ijson-3.2.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:98c6799925a5d1988da4cd68879b8eeab52c6e029acc45e03abb7921a4715c4b"},
|
||||
{file = "ijson-3.2.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4252e48c95cd8ceefc2caade310559ab61c37d82dfa045928ed05328eb5b5f65"},
|
||||
{file = "ijson-3.2.3-cp37-cp37m-win32.whl", hash = "sha256:644f4f03349ff2731fd515afd1c91b9e439e90c9f8c28292251834154edbffca"},
|
||||
{file = "ijson-3.2.3-cp37-cp37m-win_amd64.whl", hash = "sha256:ba33c764afa9ecef62801ba7ac0319268a7526f50f7601370d9f8f04e77fc02b"},
|
||||
{file = "ijson-3.2.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:4b2ec8c2a3f1742cbd5f36b65e192028e541b5fd8c7fd97c1fc0ca6c427c704a"},
|
||||
{file = "ijson-3.2.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7dc357da4b4ebd8903e77dbcc3ce0555ee29ebe0747c3c7f56adda423df8ec89"},
|
||||
{file = "ijson-3.2.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bcc51c84bb220ac330122468fe526a7777faa6464e3b04c15b476761beea424f"},
|
||||
{file = "ijson-3.2.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8d54b624629f9903005c58d9321a036c72f5c212701bbb93d1a520ecd15e370"},
|
||||
{file = "ijson-3.2.3-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6ea7c7e3ec44742e867c72fd750c6a1e35b112f88a917615332c4476e718d40"},
|
||||
{file = "ijson-3.2.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:916acdc5e504f8b66c3e287ada5d4b39a3275fc1f2013c4b05d1ab9933671a6c"},
|
||||
{file = "ijson-3.2.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81815b4184b85ce124bfc4c446d5f5e5e643fc119771c5916f035220ada29974"},
|
||||
{file = "ijson-3.2.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b49fd5fe1cd9c1c8caf6c59f82b08117dd6bea2ec45b641594e25948f48f4169"},
|
||||
{file = "ijson-3.2.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:86b3c91fdcb8ffb30556c9669930f02b7642de58ca2987845b04f0d7fe46d9a8"},
|
||||
{file = "ijson-3.2.3-cp38-cp38-win32.whl", hash = "sha256:a729b0c8fb935481afe3cf7e0dadd0da3a69cc7f145dbab8502e2f1e01d85a7c"},
|
||||
{file = "ijson-3.2.3-cp38-cp38-win_amd64.whl", hash = "sha256:d34e049992d8a46922f96483e96b32ac4c9cffd01a5c33a928e70a283710cd58"},
|
||||
{file = "ijson-3.2.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9c2a12dcdb6fa28f333bf10b3a0f80ec70bc45280d8435be7e19696fab2bc706"},
|
||||
{file = "ijson-3.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1844c5b57da21466f255a0aeddf89049e730d7f3dfc4d750f0e65c36e6a61a7c"},
|
||||
{file = "ijson-3.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2ec3e5ff2515f1c40ef6a94983158e172f004cd643b9e4b5302017139b6c96e4"},
|
||||
{file = "ijson-3.2.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46bafb1b9959872a1f946f8dd9c6f1a30a970fc05b7bfae8579da3f1f988e598"},
|
||||
{file = "ijson-3.2.3-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab4db9fee0138b60e31b3c02fff8a4c28d7b152040553b6a91b60354aebd4b02"},
|
||||
{file = "ijson-3.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4bc87e69d1997c6a55fff5ee2af878720801ff6ab1fb3b7f94adda050651e37"},
|
||||
{file = "ijson-3.2.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e9fd906f0c38e9f0bfd5365e1bed98d649f506721f76bb1a9baa5d7374f26f19"},
|
||||
{file = "ijson-3.2.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e84d27d1acb60d9102728d06b9650e5b7e5cb0631bd6e3dfadba8fb6a80d6c2f"},
|
||||
{file = "ijson-3.2.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2cc04fc0a22bb945cd179f614845c8b5106c0b3939ee0d84ce67c7a61ac1a936"},
|
||||
{file = "ijson-3.2.3-cp39-cp39-win32.whl", hash = "sha256:e641814793a037175f7ec1b717ebb68f26d89d82cfd66f36e588f32d7e488d5f"},
|
||||
{file = "ijson-3.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:6bd3e7e91d031f1e8cea7ce53f704ab74e61e505e8072467e092172422728b22"},
|
||||
{file = "ijson-3.2.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:06f9707da06a19b01013f8c65bf67db523662a9b4a4ff027e946e66c261f17f0"},
|
||||
{file = "ijson-3.2.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be8495f7c13fa1f622a2c6b64e79ac63965b89caf664cc4e701c335c652d15f2"},
|
||||
{file = "ijson-3.2.3-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7596b42f38c3dcf9d434dddd50f46aeb28e96f891444c2b4b1266304a19a2c09"},
|
||||
{file = "ijson-3.2.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbac4e9609a1086bbad075beb2ceec486a3b138604e12d2059a33ce2cba93051"},
|
||||
{file = "ijson-3.2.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:db2d6341f9cb538253e7fe23311d59252f124f47165221d3c06a7ed667ecd595"},
|
||||
{file = "ijson-3.2.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fa8b98be298efbb2588f883f9953113d8a0023ab39abe77fe734b71b46b1220a"},
|
||||
{file = "ijson-3.2.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:674e585361c702fad050ab4c153fd168dc30f5980ef42b64400bc84d194e662d"},
|
||||
{file = "ijson-3.2.3-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd12e42b9cb9c0166559a3ffa276b4f9fc9d5b4c304e5a13668642d34b48b634"},
|
||||
{file = "ijson-3.2.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d31e0d771d82def80cd4663a66de277c3b44ba82cd48f630526b52f74663c639"},
|
||||
{file = "ijson-3.2.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7ce4c70c23521179d6da842bb9bc2e36bb9fad1e0187e35423ff0f282890c9ca"},
|
||||
{file = "ijson-3.2.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:39f551a6fbeed4433c85269c7c8778e2aaea2501d7ebcb65b38f556030642c17"},
|
||||
{file = "ijson-3.2.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b14d322fec0de7af16f3ef920bf282f0dd747200b69e0b9628117f381b7775b"},
|
||||
{file = "ijson-3.2.3-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7851a341429b12d4527ca507097c959659baf5106c7074d15c17c387719ffbcd"},
|
||||
{file = "ijson-3.2.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db3bf1b42191b5cc9b6441552fdcb3b583594cb6b19e90d1578b7cbcf80d0fae"},
|
||||
{file = "ijson-3.2.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:6f662dc44362a53af3084d3765bb01cd7b4734d1f484a6095cad4cb0cbfe5374"},
|
||||
{file = "ijson-3.2.3.tar.gz", hash = "sha256:10294e9bf89cb713da05bc4790bdff616610432db561964827074898e174f917"},
|
||||
{file = "ijson-3.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7f7a5250599c366369fbf3bc4e176f5daa28eb6bc7d6130d02462ed335361675"},
|
||||
{file = "ijson-3.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f87a7e52f79059f9c58f6886c262061065eb6f7554a587be7ed3aa63e6b71b34"},
|
||||
{file = "ijson-3.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b73b493af9e947caed75d329676b1b801d673b17481962823a3e55fe529c8b8b"},
|
||||
{file = "ijson-3.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5576415f3d76290b160aa093ff968f8bf6de7d681e16e463a0134106b506f49"},
|
||||
{file = "ijson-3.3.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e9ffe358d5fdd6b878a8a364e96e15ca7ca57b92a48f588378cef315a8b019e"},
|
||||
{file = "ijson-3.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8643c255a25824ddd0895c59f2319c019e13e949dc37162f876c41a283361527"},
|
||||
{file = "ijson-3.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:df3ab5e078cab19f7eaeef1d5f063103e1ebf8c26d059767b26a6a0ad8b250a3"},
|
||||
{file = "ijson-3.3.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3dc1fb02c6ed0bae1b4bf96971258bf88aea72051b6e4cebae97cff7090c0607"},
|
||||
{file = "ijson-3.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e9afd97339fc5a20f0542c971f90f3ca97e73d3050cdc488d540b63fae45329a"},
|
||||
{file = "ijson-3.3.0-cp310-cp310-win32.whl", hash = "sha256:844c0d1c04c40fd1b60f148dc829d3f69b2de789d0ba239c35136efe9a386529"},
|
||||
{file = "ijson-3.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:d654d045adafdcc6c100e8e911508a2eedbd2a1b5f93f930ba13ea67d7704ee9"},
|
||||
{file = "ijson-3.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:501dce8eaa537e728aa35810656aa00460a2547dcb60937c8139f36ec344d7fc"},
|
||||
{file = "ijson-3.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:658ba9cad0374d37b38c9893f4864f284cdcc7d32041f9808fba8c7bcaadf134"},
|
||||
{file = "ijson-3.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2636cb8c0f1023ef16173f4b9a233bcdb1df11c400c603d5f299fac143ca8d70"},
|
||||
{file = "ijson-3.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd174b90db68c3bcca273e9391934a25d76929d727dc75224bf244446b28b03b"},
|
||||
{file = "ijson-3.3.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:97a9aea46e2a8371c4cf5386d881de833ed782901ac9f67ebcb63bb3b7d115af"},
|
||||
{file = "ijson-3.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c594c0abe69d9d6099f4ece17763d53072f65ba60b372d8ba6de8695ce6ee39e"},
|
||||
{file = "ijson-3.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8e0ff16c224d9bfe4e9e6bd0395826096cda4a3ef51e6c301e1b61007ee2bd24"},
|
||||
{file = "ijson-3.3.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0015354011303175eae7e2ef5136414e91de2298e5a2e9580ed100b728c07e51"},
|
||||
{file = "ijson-3.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:034642558afa57351a0ffe6de89e63907c4cf6849070cc10a3b2542dccda1afe"},
|
||||
{file = "ijson-3.3.0-cp311-cp311-win32.whl", hash = "sha256:192e4b65495978b0bce0c78e859d14772e841724d3269fc1667dc6d2f53cc0ea"},
|
||||
{file = "ijson-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:72e3488453754bdb45c878e31ce557ea87e1eb0f8b4fc610373da35e8074ce42"},
|
||||
{file = "ijson-3.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:988e959f2f3d59ebd9c2962ae71b97c0df58323910d0b368cc190ad07429d1bb"},
|
||||
{file = "ijson-3.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b2f73f0d0fce5300f23a1383d19b44d103bb113b57a69c36fd95b7c03099b181"},
|
||||
{file = "ijson-3.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0ee57a28c6bf523d7cb0513096e4eb4dac16cd935695049de7608ec110c2b751"},
|
||||
{file = "ijson-3.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0155a8f079c688c2ccaea05de1ad69877995c547ba3d3612c1c336edc12a3a5"},
|
||||
{file = "ijson-3.3.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ab00721304af1ae1afa4313ecfa1bf16b07f55ef91e4a5b93aeaa3e2bd7917c"},
|
||||
{file = "ijson-3.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40ee3821ee90be0f0e95dcf9862d786a7439bd1113e370736bfdf197e9765bfb"},
|
||||
{file = "ijson-3.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:da3b6987a0bc3e6d0f721b42c7a0198ef897ae50579547b0345f7f02486898f5"},
|
||||
{file = "ijson-3.3.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:63afea5f2d50d931feb20dcc50954e23cef4127606cc0ecf7a27128ed9f9a9e6"},
|
||||
{file = "ijson-3.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b5c3e285e0735fd8c5a26d177eca8b52512cdd8687ca86ec77a0c66e9c510182"},
|
||||
{file = "ijson-3.3.0-cp312-cp312-win32.whl", hash = "sha256:907f3a8674e489abdcb0206723e5560a5cb1fa42470dcc637942d7b10f28b695"},
|
||||
{file = "ijson-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:8f890d04ad33262d0c77ead53c85f13abfb82f2c8f078dfbf24b78f59534dfdd"},
|
||||
{file = "ijson-3.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b9d85a02e77ee8ea6d9e3fd5d515bcc3d798d9c1ea54817e5feb97a9bc5d52fe"},
|
||||
{file = "ijson-3.3.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6576cdc36d5a09b0c1a3d81e13a45d41a6763188f9eaae2da2839e8a4240bce"},
|
||||
{file = "ijson-3.3.0-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5589225c2da4bb732c9c370c5961c39a6db72cf69fb2a28868a5413ed7f39e6"},
|
||||
{file = "ijson-3.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad04cf38164d983e85f9cba2804566c0160b47086dcca4cf059f7e26c5ace8ca"},
|
||||
{file = "ijson-3.3.0-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:a3b730ef664b2ef0e99dec01b6573b9b085c766400af363833e08ebc1e38eb2f"},
|
||||
{file = "ijson-3.3.0-cp36-cp36m-musllinux_1_2_i686.whl", hash = "sha256:4690e3af7b134298055993fcbea161598d23b6d3ede11b12dca6815d82d101d5"},
|
||||
{file = "ijson-3.3.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:aaa6bfc2180c31a45fac35d40e3312a3d09954638ce0b2e9424a88e24d262a13"},
|
||||
{file = "ijson-3.3.0-cp36-cp36m-win32.whl", hash = "sha256:44367090a5a876809eb24943f31e470ba372aaa0d7396b92b953dda953a95d14"},
|
||||
{file = "ijson-3.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:7e2b3e9ca957153557d06c50a26abaf0d0d6c0ddf462271854c968277a6b5372"},
|
||||
{file = "ijson-3.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:47c144117e5c0e2babb559bc8f3f76153863b8dd90b2d550c51dab5f4b84a87f"},
|
||||
{file = "ijson-3.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29ce02af5fbf9ba6abb70765e66930aedf73311c7d840478f1ccecac53fefbf3"},
|
||||
{file = "ijson-3.3.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ac6c3eeed25e3e2cb9b379b48196413e40ac4e2239d910bb33e4e7f6c137745"},
|
||||
{file = "ijson-3.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d92e339c69b585e7b1d857308ad3ca1636b899e4557897ccd91bb9e4a56c965b"},
|
||||
{file = "ijson-3.3.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:8c85447569041939111b8c7dbf6f8fa7a0eb5b2c4aebb3c3bec0fb50d7025121"},
|
||||
{file = "ijson-3.3.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:542c1e8fddf082159a5d759ee1412c73e944a9a2412077ed00b303ff796907dc"},
|
||||
{file = "ijson-3.3.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:30cfea40936afb33b57d24ceaf60d0a2e3d5c1f2335ba2623f21d560737cc730"},
|
||||
{file = "ijson-3.3.0-cp37-cp37m-win32.whl", hash = "sha256:6b661a959226ad0d255e49b77dba1d13782f028589a42dc3172398dd3814c797"},
|
||||
{file = "ijson-3.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:0b003501ee0301dbf07d1597482009295e16d647bb177ce52076c2d5e64113e0"},
|
||||
{file = "ijson-3.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3e8d8de44effe2dbd0d8f3eb9840344b2d5b4cc284a14eb8678aec31d1b6bea8"},
|
||||
{file = "ijson-3.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9cd5c03c63ae06d4f876b9844c5898d0044c7940ff7460db9f4cd984ac7862b5"},
|
||||
{file = "ijson-3.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:04366e7e4a4078d410845e58a2987fd9c45e63df70773d7b6e87ceef771b51ee"},
|
||||
{file = "ijson-3.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de7c1ddb80fa7a3ab045266dca169004b93f284756ad198306533b792774f10a"},
|
||||
{file = "ijson-3.3.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8851584fb931cffc0caa395f6980525fd5116eab8f73ece9d95e6f9c2c326c4c"},
|
||||
{file = "ijson-3.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdcfc88347fd981e53c33d832ce4d3e981a0d696b712fbcb45dcc1a43fe65c65"},
|
||||
{file = "ijson-3.3.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:3917b2b3d0dbbe3296505da52b3cb0befbaf76119b2edaff30bd448af20b5400"},
|
||||
{file = "ijson-3.3.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:e10c14535abc7ddf3fd024aa36563cd8ab5d2bb6234a5d22c77c30e30fa4fb2b"},
|
||||
{file = "ijson-3.3.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3aba5c4f97f4e2ce854b5591a8b0711ca3b0c64d1b253b04ea7b004b0a197ef6"},
|
||||
{file = "ijson-3.3.0-cp38-cp38-win32.whl", hash = "sha256:b325f42e26659df1a0de66fdb5cde8dd48613da9c99c07d04e9fb9e254b7ee1c"},
|
||||
{file = "ijson-3.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:ff835906f84451e143f31c4ce8ad73d83ef4476b944c2a2da91aec8b649570e1"},
|
||||
{file = "ijson-3.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3c556f5553368dff690c11d0a1fb435d4ff1f84382d904ccc2dc53beb27ba62e"},
|
||||
{file = "ijson-3.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e4396b55a364a03ff7e71a34828c3ed0c506814dd1f50e16ebed3fc447d5188e"},
|
||||
{file = "ijson-3.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e6850ae33529d1e43791b30575070670070d5fe007c37f5d06aebc1dd152ab3f"},
|
||||
{file = "ijson-3.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36aa56d68ea8def26778eb21576ae13f27b4a47263a7a2581ab2ef58b8de4451"},
|
||||
{file = "ijson-3.3.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7ec759c4a0fc820ad5dc6a58e9c391e7b16edcb618056baedbedbb9ea3b1524"},
|
||||
{file = "ijson-3.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b51bab2c4e545dde93cb6d6bb34bf63300b7cd06716f195dd92d9255df728331"},
|
||||
{file = "ijson-3.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:92355f95a0e4da96d4c404aa3cff2ff033f9180a9515f813255e1526551298c1"},
|
||||
{file = "ijson-3.3.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:8795e88adff5aa3c248c1edce932db003d37a623b5787669ccf205c422b91e4a"},
|
||||
{file = "ijson-3.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8f83f553f4cde6d3d4eaf58ec11c939c94a0ec545c5b287461cafb184f4b3a14"},
|
||||
{file = "ijson-3.3.0-cp39-cp39-win32.whl", hash = "sha256:ead50635fb56577c07eff3e557dac39533e0fe603000684eea2af3ed1ad8f941"},
|
||||
{file = "ijson-3.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:c8a9befb0c0369f0cf5c1b94178d0d78f66d9cebb9265b36be6e4f66236076b8"},
|
||||
{file = "ijson-3.3.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2af323a8aec8a50fa9effa6d640691a30a9f8c4925bd5364a1ca97f1ac6b9b5c"},
|
||||
{file = "ijson-3.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f64f01795119880023ba3ce43072283a393f0b90f52b66cc0ea1a89aa64a9ccb"},
|
||||
{file = "ijson-3.3.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a716e05547a39b788deaf22725490855337fc36613288aa8ae1601dc8c525553"},
|
||||
{file = "ijson-3.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:473f5d921fadc135d1ad698e2697025045cd8ed7e5e842258295012d8a3bc702"},
|
||||
{file = "ijson-3.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:dd26b396bc3a1e85f4acebeadbf627fa6117b97f4c10b177d5779577c6607744"},
|
||||
{file = "ijson-3.3.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:25fd49031cdf5fd5f1fd21cb45259a64dad30b67e64f745cc8926af1c8c243d3"},
|
||||
{file = "ijson-3.3.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b72178b1e565d06ab19319965022b36ef41bcea7ea153b32ec31194bec032a2"},
|
||||
{file = "ijson-3.3.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d0b6b637d05dbdb29d0bfac2ed8425bb369e7af5271b0cc7cf8b801cb7360c2"},
|
||||
{file = "ijson-3.3.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5378d0baa59ae422905c5f182ea0fd74fe7e52a23e3821067a7d58c8306b2191"},
|
||||
{file = "ijson-3.3.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:99f5c8ab048ee4233cc4f2b461b205cbe01194f6201018174ac269bf09995749"},
|
||||
{file = "ijson-3.3.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:45ff05de889f3dc3d37a59d02096948ce470699f2368b32113954818b21aa74a"},
|
||||
{file = "ijson-3.3.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1efb521090dd6cefa7aafd120581947b29af1713c902ff54336b7c7130f04c47"},
|
||||
{file = "ijson-3.3.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87c727691858fd3a1c085d9980d12395517fcbbf02c69fbb22dede8ee03422da"},
|
||||
{file = "ijson-3.3.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0420c24e50389bc251b43c8ed379ab3e3ba065ac8262d98beb6735ab14844460"},
|
||||
{file = "ijson-3.3.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:8fdf3721a2aa7d96577970f5604bd81f426969c1822d467f07b3d844fa2fecc7"},
|
||||
{file = "ijson-3.3.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:891f95c036df1bc95309951940f8eea8537f102fa65715cdc5aae20b8523813b"},
|
||||
{file = "ijson-3.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed1336a2a6e5c427f419da0154e775834abcbc8ddd703004108121c6dd9eba9d"},
|
||||
{file = "ijson-3.3.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0c819f83e4f7b7f7463b2dc10d626a8be0c85fbc7b3db0edc098c2b16ac968e"},
|
||||
{file = "ijson-3.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33afc25057377a6a43c892de34d229a86f89ea6c4ca3dd3db0dcd17becae0dbb"},
|
||||
{file = "ijson-3.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7914d0cf083471856e9bc2001102a20f08e82311dfc8cf1a91aa422f9414a0d6"},
|
||||
{file = "ijson-3.3.0.tar.gz", hash = "sha256:7f172e6ba1bee0d4c8f8ebd639577bfe429dee0f3f96775a067b8bae4492d8a0"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -4026,13 +4031,13 @@ zookeeper = ["kazoo (>=2.8.0)"]
|
|||
|
||||
[[package]]
|
||||
name = "kubernetes"
|
||||
version = "29.0.0"
|
||||
version = "30.1.0"
|
||||
description = "Kubernetes python client"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "kubernetes-29.0.0-py2.py3-none-any.whl", hash = "sha256:ab8cb0e0576ccdfb71886366efb102c6a20f268d817be065ce7f9909c631e43e"},
|
||||
{file = "kubernetes-29.0.0.tar.gz", hash = "sha256:c4812e227ae74d07d53c88293e564e54b850452715a59a927e7e1bc6b9a60459"},
|
||||
{file = "kubernetes-30.1.0-py2.py3-none-any.whl", hash = "sha256:e212e8b7579031dd2e512168b617373bc1e03888d41ac4e04039240a292d478d"},
|
||||
{file = "kubernetes-30.1.0.tar.gz", hash = "sha256:41e4c77af9f28e7a6c314e3bd06a8c6229ddd787cad684e0ab9f69b498e98ebc"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -4052,13 +4057,13 @@ adal = ["adal (>=1.0.2)"]
|
|||
|
||||
[[package]]
|
||||
name = "langchain"
|
||||
version = "0.2.2"
|
||||
version = "0.2.3"
|
||||
description = "Building applications with LLMs through composability"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8.1"
|
||||
files = [
|
||||
{file = "langchain-0.2.2-py3-none-any.whl", hash = "sha256:58ca0c47bcdd156da66f50a0a4fcedc49bf6950827f4a6b06c8c4842d55805f3"},
|
||||
{file = "langchain-0.2.2.tar.gz", hash = "sha256:9d61e50e9cdc2bea659bc5e6c03650ba048fda63a307490ae368e539f61a0d3a"},
|
||||
{file = "langchain-0.2.3-py3-none-any.whl", hash = "sha256:5dc33cd9c8008693d328b7cb698df69073acecc89ad9c2a95f243b3314f8d834"},
|
||||
{file = "langchain-0.2.3.tar.gz", hash = "sha256:81962cc72cce6515f7bd71e01542727870789bf8b666c6913d85559080c1a201"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -4074,20 +4079,6 @@ requests = ">=2,<3"
|
|||
SQLAlchemy = ">=1.4,<3"
|
||||
tenacity = ">=8.1.0,<9.0.0"
|
||||
|
||||
[package.extras]
|
||||
azure = ["azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-textanalytics (>=5.3.0,<6.0.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-core (>=1.26.4,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "azure-search-documents (==11.4.0b8)", "openai (<2)"]
|
||||
clarifai = ["clarifai (>=9.1.0)"]
|
||||
cli = ["typer (>=0.9.0,<0.10.0)"]
|
||||
cohere = ["cohere (>=4,<6)"]
|
||||
docarray = ["docarray[hnswlib] (>=0.32.0,<0.33.0)"]
|
||||
embeddings = ["sentence-transformers (>=2,<3)"]
|
||||
extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<6)", "couchbase (>=4.1.9,<5.0.0)", "dashvector (>=1.0.1,<2.0.0)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "langchain-openai (>=0.1,<0.2)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"]
|
||||
javascript = ["esprima (>=4.0.1,<5.0.0)"]
|
||||
llms = ["clarifai (>=9.1.0)", "cohere (>=4,<6)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (<2)", "openlm (>=0.0.5,<0.0.6)", "torch (>=1,<3)", "transformers (>=4,<5)"]
|
||||
openai = ["openai (<2)", "tiktoken (>=0.7,<1.0)"]
|
||||
qdrant = ["qdrant-client (>=1.3.1,<2.0.0)"]
|
||||
text-helpers = ["chardet (>=5.1.0,<6.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "langchain-anthropic"
|
||||
version = "0.1.15"
|
||||
|
|
@ -4154,13 +4145,13 @@ langchain-core = ">=0.1.42,<0.3"
|
|||
|
||||
[[package]]
|
||||
name = "langchain-community"
|
||||
version = "0.2.2"
|
||||
version = "0.2.4"
|
||||
description = "Community contributed LangChain integrations."
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8.1"
|
||||
files = [
|
||||
{file = "langchain_community-0.2.2-py3-none-any.whl", hash = "sha256:470ee16e05f1acacb91a656b6d3c2cbf6fb6a8dcb00a13901cd1353cd29c2bb3"},
|
||||
{file = "langchain_community-0.2.2.tar.gz", hash = "sha256:fb09faf4640726a929932056dc55ff120e490aaf2e424fae8ddbb15605195447"},
|
||||
{file = "langchain_community-0.2.4-py3-none-any.whl", hash = "sha256:8582e9800f4837660dc297cccd2ee1ddc1d8c440d0fe8b64edb07620f0373b0e"},
|
||||
{file = "langchain_community-0.2.4.tar.gz", hash = "sha256:2bb6a1a36b8500a564d25d76469c02457b1a7c3afea6d4a609a47c06b993e3e4"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -4175,19 +4166,15 @@ requests = ">=2,<3"
|
|||
SQLAlchemy = ">=1.4,<3"
|
||||
tenacity = ">=8.1.0,<9.0.0"
|
||||
|
||||
[package.extras]
|
||||
cli = ["typer (>=0.9.0,<0.10.0)"]
|
||||
extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "azure-identity (>=1.15.0,<2.0.0)", "azure-search-documents (==11.4.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.6,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cloudpathlib (>=0.18,<0.19)", "cloudpickle (>=2.0.0)", "cohere (>=4,<5)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "friendli-client (>=1.2.4,<2.0.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "httpx (>=0.24.1,<0.25.0)", "httpx-sse (>=0.4.0,<0.5.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "nvidia-riva-client (>=2.14.0,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "oracledb (>=2.2.0,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "premai (>=0.3.25,<0.4.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pyjwt (>=2.8.0,<3.0.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "simsimd (>=4.3.1,<5.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "tidb-vector (>=0.0.3,<1.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "tree-sitter (>=0.20.2,<0.21.0)", "tree-sitter-languages (>=1.8.0,<2.0.0)", "upstash-redis (>=0.15.0,<0.16.0)", "vdms (>=0.0.20,<0.0.21)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.2.4"
|
||||
version = "0.2.5"
|
||||
description = "Building applications with LLMs through composability"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8.1"
|
||||
files = [
|
||||
{file = "langchain_core-0.2.4-py3-none-any.whl", hash = "sha256:5212f7ec78a525e88a178ed3aefe2fd7134b03fb92573dfbab9914f1d92d6ec5"},
|
||||
{file = "langchain_core-0.2.4.tar.gz", hash = "sha256:82bdcc546eb0341cefcf1f4ecb3e49836fff003903afddda2d1312bb8491ef81"},
|
||||
{file = "langchain_core-0.2.5-py3-none-any.whl", hash = "sha256:abe5138f22acff23a079ec538be5268bbf97cf023d51987a0dd474d2a16cae3e"},
|
||||
{file = "langchain_core-0.2.5.tar.gz", hash = "sha256:4a5c2f56b22396a63ef4790043660e393adbfa6832b978f023ca996a04b8e752"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -4198,9 +4185,6 @@ pydantic = ">=1,<3"
|
|||
PyYAML = ">=5.3"
|
||||
tenacity = ">=8.1.0,<9.0.0"
|
||||
|
||||
[package.extras]
|
||||
extended-testing = ["jinja2 (>=3,<4)"]
|
||||
|
||||
[[package]]
|
||||
name = "langchain-experimental"
|
||||
version = "0.0.60"
|
||||
|
|
@ -4221,37 +4205,37 @@ extended-testing = ["faker (>=19.3.1,<20.0.0)", "jinja2 (>=3,<4)", "pandas (>=2.
|
|||
|
||||
[[package]]
|
||||
name = "langchain-google-genai"
|
||||
version = "1.0.5"
|
||||
version = "1.0.6"
|
||||
description = "An integration package connecting Google's genai package and LangChain"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.9"
|
||||
files = [
|
||||
{file = "langchain_google_genai-1.0.5-py3-none-any.whl", hash = "sha256:06b1af072e14fe2d4f9257be4bf883ccd544896094f847c2b1ab09b123ba3b9e"},
|
||||
{file = "langchain_google_genai-1.0.5.tar.gz", hash = "sha256:5b515192755fd396a1b61b33d1b08c77fb9b53394cc25954f9d7e9a0f615de9b"},
|
||||
{file = "langchain_google_genai-1.0.6-py3-none-any.whl", hash = "sha256:65188b3c2867efda78e09c29371499ab0d25c6a111b175365fdae2b5be1502e6"},
|
||||
{file = "langchain_google_genai-1.0.6.tar.gz", hash = "sha256:7c964117fa385c490b323ee50ab46907229823d3678b80bfacc8fa0a237fb0b9"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
google-generativeai = ">=0.5.2,<0.6.0"
|
||||
langchain-core = ">=0.2.0,<0.3"
|
||||
langchain-core = ">=0.2.2,<0.3"
|
||||
|
||||
[package.extras]
|
||||
images = ["pillow (>=10.1.0,<11.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "langchain-google-vertexai"
|
||||
version = "1.0.4"
|
||||
version = "1.0.5"
|
||||
description = "An integration package connecting Google VertexAI and LangChain"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8.1"
|
||||
files = [
|
||||
{file = "langchain_google_vertexai-1.0.4-py3-none-any.whl", hash = "sha256:f9d217df2d5cfafb2e551ddd5f1c43611222f542ee0df0cc3b5faed82e657ee3"},
|
||||
{file = "langchain_google_vertexai-1.0.4.tar.gz", hash = "sha256:bb2d2e93cc2896b9bdc96789c2df247f6392184dffc0c3dddc06889f2b530465"},
|
||||
{file = "langchain_google_vertexai-1.0.5-py3-none-any.whl", hash = "sha256:38f4a39bf35927d744d0883907c4d4a59eef059e9b36f28bb5c737c2aae6963b"},
|
||||
{file = "langchain_google_vertexai-1.0.5.tar.gz", hash = "sha256:50005dc12ff9d66bbbab9e1ab660574b1584eee3e7b5a647dc8a009a94f0c500"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
google-cloud-aiplatform = ">=1.47.0,<2.0.0"
|
||||
google-cloud-storage = ">=2.14.0,<3.0.0"
|
||||
langchain-core = ">=0.1.42,<0.3"
|
||||
langchain-core = ">=0.2.2,<0.3"
|
||||
|
||||
[package.extras]
|
||||
anthropic = ["anthropic[vertexai] (>=0.23.0,<1)"]
|
||||
|
|
@ -4354,7 +4338,7 @@ types-requests = ">=2.31.0.2,<3.0.0.0"
|
|||
|
||||
[[package]]
|
||||
name = "langflow-base"
|
||||
version = "0.0.56"
|
||||
version = "0.0.60"
|
||||
description = "A Python package with a built-in web application"
|
||||
optional = false
|
||||
python-versions = ">=3.10,<3.13"
|
||||
|
|
@ -4411,13 +4395,13 @@ url = "src/backend/base"
|
|||
|
||||
[[package]]
|
||||
name = "langfuse"
|
||||
version = "2.33.1"
|
||||
version = "2.35.0"
|
||||
description = "A client library for accessing langfuse"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8.1"
|
||||
files = [
|
||||
{file = "langfuse-2.33.1-py3-none-any.whl", hash = "sha256:61ff3ff4b9c9c195028c981cba892106fdf90028e3950209a15f0ae06a378a36"},
|
||||
{file = "langfuse-2.33.1.tar.gz", hash = "sha256:444a870e8b13ad37df710931389ecd3bad9997e550edf3c3178b5a0bd7ada013"},
|
||||
{file = "langfuse-2.35.0-py3-none-any.whl", hash = "sha256:e9df2474a01f8e167b7b13674c554915415b27064e48ad207054475f7fa8f82d"},
|
||||
{file = "langfuse-2.35.0.tar.gz", hash = "sha256:b1d4b478233eefbc8a6fc63ca00ca82f6afecf2b0fdc1835ca65e751cf901577"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -4435,13 +4419,13 @@ openai = ["openai (>=0.27.8)"]
|
|||
|
||||
[[package]]
|
||||
name = "langsmith"
|
||||
version = "0.1.71"
|
||||
version = "0.1.75"
|
||||
description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8.1"
|
||||
files = [
|
||||
{file = "langsmith-0.1.71-py3-none-any.whl", hash = "sha256:a9979de2780442eb24eced31314e49f5ece6f807a0d70740b2c6c39217226794"},
|
||||
{file = "langsmith-0.1.71.tar.gz", hash = "sha256:bdb1037a08acf7c19b3969c085df09c1eecb65baca8400b3b76ae871e2c8a97e"},
|
||||
{file = "langsmith-0.1.75-py3-none-any.whl", hash = "sha256:d08b08dd6b3fa4da170377f95123d77122ef4c52999d10fff4ae08ff70d07aed"},
|
||||
{file = "langsmith-0.1.75.tar.gz", hash = "sha256:61274e144ea94c297dd78ce03e6dfae18459fe9bd8ab5094d61a0c4816561279"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -4451,13 +4435,13 @@ requests = ">=2,<3"
|
|||
|
||||
[[package]]
|
||||
name = "litellm"
|
||||
version = "1.40.2"
|
||||
version = "1.40.4"
|
||||
description = "Library to easily interface with LLM API providers"
|
||||
optional = false
|
||||
python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8"
|
||||
files = [
|
||||
{file = "litellm-1.40.2-py3-none-any.whl", hash = "sha256:56ee777eed30ee9acb86e74401d090dcac4adb57b5c8a8714f791b0c97a34afc"},
|
||||
{file = "litellm-1.40.2.tar.gz", hash = "sha256:1f5dc4eab7100962c3a2985c7d8c13070ff5793b341540d19b98a2bd85955cb0"},
|
||||
{file = "litellm-1.40.4-py3-none-any.whl", hash = "sha256:b3b8e4401f717c3a18595446bfdb80fc6bb74974aac4eae537fb7b3be37fbf9e"},
|
||||
{file = "litellm-1.40.4.tar.gz", hash = "sha256:3edaa1189742afd7c7df2b122f77373d47154a8fb6df6187ff5875e188baa3e1"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -4828,13 +4812,13 @@ files = [
|
|||
|
||||
[[package]]
|
||||
name = "marshmallow"
|
||||
version = "3.21.2"
|
||||
version = "3.21.3"
|
||||
description = "A lightweight library for converting complex datatypes to and from native Python datatypes."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "marshmallow-3.21.2-py3-none-any.whl", hash = "sha256:70b54a6282f4704d12c0a41599682c5c5450e843b9ec406308653b47c59648a1"},
|
||||
{file = "marshmallow-3.21.2.tar.gz", hash = "sha256:82408deadd8b33d56338d2182d455db632c6313aa2af61916672146bb32edc56"},
|
||||
{file = "marshmallow-3.21.3-py3-none-any.whl", hash = "sha256:86ce7fb914aa865001a4b2092c4c2872d13bc347f3d42673272cabfdbad386f1"},
|
||||
{file = "marshmallow-3.21.3.tar.gz", hash = "sha256:4f57c5e050a54d66361e826f94fba213eb10b67b2fdb02c3e0343ce207ba1662"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -5615,13 +5599,13 @@ sympy = "*"
|
|||
|
||||
[[package]]
|
||||
name = "openai"
|
||||
version = "1.31.0"
|
||||
version = "1.32.0"
|
||||
description = "The official Python library for the openai API"
|
||||
optional = false
|
||||
python-versions = ">=3.7.1"
|
||||
files = [
|
||||
{file = "openai-1.31.0-py3-none-any.whl", hash = "sha256:82044ee3122113f2a468a1f308a8882324d09556ba5348687c535d3655ee331c"},
|
||||
{file = "openai-1.31.0.tar.gz", hash = "sha256:54ae0625b005d6a3b895db2b8438dae1059cffff0cd262a26e9015c13a29ab06"},
|
||||
{file = "openai-1.32.0-py3-none-any.whl", hash = "sha256:953d57669f309002044fd2f678aba9f07a43256d74b3b00cd04afb5b185568ea"},
|
||||
{file = "openai-1.32.0.tar.gz", hash = "sha256:a6df15a7ab9344b1bc2bc8d83639f68b7a7e2453c0f5e50c1666547eee86f0bd"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -6869,13 +6853,13 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
|
|||
|
||||
[[package]]
|
||||
name = "pydantic-settings"
|
||||
version = "2.3.0"
|
||||
version = "2.3.1"
|
||||
description = "Settings management using Pydantic"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pydantic_settings-2.3.0-py3-none-any.whl", hash = "sha256:26eeed27370a9c5e3f64e4a7d6602573cbedf05ed940f1d5b11c3f178427af7a"},
|
||||
{file = "pydantic_settings-2.3.0.tar.gz", hash = "sha256:78db28855a71503cfe47f39500a1dece523c640afd5280edb5c5c9c9cfa534c9"},
|
||||
{file = "pydantic_settings-2.3.1-py3-none-any.whl", hash = "sha256:acb2c213140dfff9669f4fe9f8180d43914f51626db28ab2db7308a576cce51a"},
|
||||
{file = "pydantic_settings-2.3.1.tar.gz", hash = "sha256:e34bbd649803a6bb3e2f0f58fb0edff1f0c7f556849fda106cc21bcce12c30ab"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -7607,13 +7591,13 @@ websockets = ">=11,<13"
|
|||
|
||||
[[package]]
|
||||
name = "redis"
|
||||
version = "5.0.4"
|
||||
version = "5.0.5"
|
||||
description = "Python client for Redis database and key-value store"
|
||||
optional = true
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "redis-5.0.4-py3-none-any.whl", hash = "sha256:7adc2835c7a9b5033b7ad8f8918d09b7344188228809c98df07af226d39dec91"},
|
||||
{file = "redis-5.0.4.tar.gz", hash = "sha256:ec31f2ed9675cc54c21ba854cfe0462e6faf1d83c8ce5944709db8a4700b9c61"},
|
||||
{file = "redis-5.0.5-py3-none-any.whl", hash = "sha256:30b47d4ebb6b7a0b9b40c1275a19b87bb6f46b3bed82a89012cf56dea4024ada"},
|
||||
{file = "redis-5.0.5.tar.gz", hash = "sha256:3417688621acf6ee368dec4a04dd95881be24efd34c79f00d31f62bb528800ae"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -7798,28 +7782,28 @@ pyasn1 = ">=0.1.3"
|
|||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.4.7"
|
||||
version = "0.4.8"
|
||||
description = "An extremely fast Python linter and code formatter, written in Rust."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "ruff-0.4.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:e089371c67892a73b6bb1525608e89a2aca1b77b5440acf7a71dda5dac958f9e"},
|
||||
{file = "ruff-0.4.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:10f973d521d910e5f9c72ab27e409e839089f955be8a4c8826601a6323a89753"},
|
||||
{file = "ruff-0.4.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59c3d110970001dfa494bcd95478e62286c751126dfb15c3c46e7915fc49694f"},
|
||||
{file = "ruff-0.4.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa9773c6c00f4958f73b317bc0fd125295110c3776089f6ef318f4b775f0abe4"},
|
||||
{file = "ruff-0.4.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07fc80bbb61e42b3b23b10fda6a2a0f5a067f810180a3760c5ef1b456c21b9db"},
|
||||
{file = "ruff-0.4.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:fa4dafe3fe66d90e2e2b63fa1591dd6e3f090ca2128daa0be33db894e6c18648"},
|
||||
{file = "ruff-0.4.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a7c0083febdec17571455903b184a10026603a1de078428ba155e7ce9358c5f6"},
|
||||
{file = "ruff-0.4.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ad1b20e66a44057c326168437d680a2166c177c939346b19c0d6b08a62a37589"},
|
||||
{file = "ruff-0.4.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbf5d818553add7511c38b05532d94a407f499d1a76ebb0cad0374e32bc67202"},
|
||||
{file = "ruff-0.4.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:50e9651578b629baec3d1513b2534de0ac7ed7753e1382272b8d609997e27e83"},
|
||||
{file = "ruff-0.4.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8874a9df7766cb956b218a0a239e0a5d23d9e843e4da1e113ae1d27ee420877a"},
|
||||
{file = "ruff-0.4.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:b9de9a6e49f7d529decd09381c0860c3f82fa0b0ea00ea78409b785d2308a567"},
|
||||
{file = "ruff-0.4.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:13a1768b0691619822ae6d446132dbdfd568b700ecd3652b20d4e8bc1e498f78"},
|
||||
{file = "ruff-0.4.7-py3-none-win32.whl", hash = "sha256:769e5a51df61e07e887b81e6f039e7ed3573316ab7dd9f635c5afaa310e4030e"},
|
||||
{file = "ruff-0.4.7-py3-none-win_amd64.whl", hash = "sha256:9e3ab684ad403a9ed1226894c32c3ab9c2e0718440f6f50c7c5829932bc9e054"},
|
||||
{file = "ruff-0.4.7-py3-none-win_arm64.whl", hash = "sha256:10f2204b9a613988e3484194c2c9e96a22079206b22b787605c255f130db5ed7"},
|
||||
{file = "ruff-0.4.7.tar.gz", hash = "sha256:2331d2b051dc77a289a653fcc6a42cce357087c5975738157cd966590b18b5e1"},
|
||||
{file = "ruff-0.4.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:7663a6d78f6adb0eab270fa9cf1ff2d28618ca3a652b60f2a234d92b9ec89066"},
|
||||
{file = "ruff-0.4.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:eeceb78da8afb6de0ddada93112869852d04f1cd0f6b80fe464fd4e35c330913"},
|
||||
{file = "ruff-0.4.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aad360893e92486662ef3be0a339c5ca3c1b109e0134fcd37d534d4be9fb8de3"},
|
||||
{file = "ruff-0.4.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:284c2e3f3396fb05f5f803c9fffb53ebbe09a3ebe7dda2929ed8d73ded736deb"},
|
||||
{file = "ruff-0.4.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7354f921e3fbe04d2a62d46707e569f9315e1a613307f7311a935743c51a764"},
|
||||
{file = "ruff-0.4.8-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:72584676164e15a68a15778fd1b17c28a519e7a0622161eb2debdcdabdc71883"},
|
||||
{file = "ruff-0.4.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9678d5c9b43315f323af2233a04d747409d1e3aa6789620083a82d1066a35199"},
|
||||
{file = "ruff-0.4.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704977a658131651a22b5ebeb28b717ef42ac6ee3b11e91dc87b633b5d83142b"},
|
||||
{file = "ruff-0.4.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d05f8d6f0c3cce5026cecd83b7a143dcad503045857bc49662f736437380ad45"},
|
||||
{file = "ruff-0.4.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:6ea874950daca5697309d976c9afba830d3bf0ed66887481d6bca1673fc5b66a"},
|
||||
{file = "ruff-0.4.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:fc95aac2943ddf360376be9aa3107c8cf9640083940a8c5bd824be692d2216dc"},
|
||||
{file = "ruff-0.4.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:384154a1c3f4bf537bac69f33720957ee49ac8d484bfc91720cc94172026ceed"},
|
||||
{file = "ruff-0.4.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e9d5ce97cacc99878aa0d084c626a15cd21e6b3d53fd6f9112b7fc485918e1fa"},
|
||||
{file = "ruff-0.4.8-py3-none-win32.whl", hash = "sha256:6d795d7639212c2dfd01991259460101c22aabf420d9b943f153ab9d9706e6a9"},
|
||||
{file = "ruff-0.4.8-py3-none-win_amd64.whl", hash = "sha256:e14a3a095d07560a9d6769a72f781d73259655919d9b396c650fc98a8157555d"},
|
||||
{file = "ruff-0.4.8-py3-none-win_arm64.whl", hash = "sha256:14019a06dbe29b608f6b7cbcec300e3170a8d86efaddb7b23405cb7f7dcaf780"},
|
||||
{file = "ruff-0.4.8.tar.gz", hash = "sha256:16d717b1d57b2e2fd68bd0bf80fb43931b79d05a7131aa477d66fc40fbd86268"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -8356,13 +8340,13 @@ full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7
|
|||
|
||||
[[package]]
|
||||
name = "storage3"
|
||||
version = "0.7.5"
|
||||
version = "0.7.6"
|
||||
description = "Supabase Storage client for Python."
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8"
|
||||
files = [
|
||||
{file = "storage3-0.7.5-py3-none-any.whl", hash = "sha256:a2d9fdacafdcbcdb6776a54987a7d84c3e3195a5e4782955c4ccfb36cb021f14"},
|
||||
{file = "storage3-0.7.5.tar.gz", hash = "sha256:ffe43f3877898b43a94024e68c2aaf4cebb3ad73dbbbd67747041d1d70bbf032"},
|
||||
{file = "storage3-0.7.6-py3-none-any.whl", hash = "sha256:d8c23bf87b3a88cafb03761b7f936e4e49daca67741d571513edf746e0f8ba72"},
|
||||
{file = "storage3-0.7.6.tar.gz", hash = "sha256:0b7781cea7fe6382e6b9349b84395808c5f4203dfcac31478304eedc2f81acf6"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -8424,13 +8408,13 @@ supafunc = ">=0.3.1,<0.5.0"
|
|||
|
||||
[[package]]
|
||||
name = "supafunc"
|
||||
version = "0.4.5"
|
||||
version = "0.4.6"
|
||||
description = "Library for Supabase Functions"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8"
|
||||
files = [
|
||||
{file = "supafunc-0.4.5-py3-none-any.whl", hash = "sha256:2208045f8f5c797924666f6a332efad75ad368f8030b2e4ceb9d2bf63f329373"},
|
||||
{file = "supafunc-0.4.5.tar.gz", hash = "sha256:a6466d78bdcaa58b7f0303793643103baae8106a87acd5d01e196179a9d0d024"},
|
||||
{file = "supafunc-0.4.6-py3-none-any.whl", hash = "sha256:f7ca7b244365e171da7055a64edb462c2ec449cdaa210fc418cfccd132f4cf98"},
|
||||
{file = "supafunc-0.4.6.tar.gz", hash = "sha256:92db51f8f8568d1430285219c9c0072e44207409c416622d7387f609e31928a6"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -8696,31 +8680,31 @@ files = [
|
|||
|
||||
[[package]]
|
||||
name = "torch"
|
||||
version = "2.3.0"
|
||||
version = "2.3.1"
|
||||
description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration"
|
||||
optional = true
|
||||
python-versions = ">=3.8.0"
|
||||
files = [
|
||||
{file = "torch-2.3.0-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:d8ea5a465dbfd8501f33c937d1f693176c9aef9d1c1b0ca1d44ed7b0a18c52ac"},
|
||||
{file = "torch-2.3.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:09c81c5859a5b819956c6925a405ef1cdda393c9d8a01ce3851453f699d3358c"},
|
||||
{file = "torch-2.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:1bf023aa20902586f614f7682fedfa463e773e26c58820b74158a72470259459"},
|
||||
{file = "torch-2.3.0-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:758ef938de87a2653bba74b91f703458c15569f1562bf4b6c63c62d9c5a0c1f5"},
|
||||
{file = "torch-2.3.0-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:493d54ee2f9df100b5ce1d18c96dbb8d14908721f76351e908c9d2622773a788"},
|
||||
{file = "torch-2.3.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:bce43af735c3da16cc14c7de2be7ad038e2fbf75654c2e274e575c6c05772ace"},
|
||||
{file = "torch-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:729804e97b7cf19ae9ab4181f91f5e612af07956f35c8b2c8e9d9f3596a8e877"},
|
||||
{file = "torch-2.3.0-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:d24e328226d8e2af7cf80fcb1d2f1d108e0de32777fab4aaa2b37b9765d8be73"},
|
||||
{file = "torch-2.3.0-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:b0de2bdc0486ea7b14fc47ff805172df44e421a7318b7c4d92ef589a75d27410"},
|
||||
{file = "torch-2.3.0-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:a306c87a3eead1ed47457822c01dfbd459fe2920f2d38cbdf90de18f23f72542"},
|
||||
{file = "torch-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:f9b98bf1a3c8af2d4c41f0bf1433920900896c446d1ddc128290ff146d1eb4bd"},
|
||||
{file = "torch-2.3.0-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:dca986214267b34065a79000cee54232e62b41dff1ec2cab9abc3fc8b3dee0ad"},
|
||||
{file = "torch-2.3.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:20572f426965dd8a04e92a473d7e445fa579e09943cc0354f3e6fef6130ce061"},
|
||||
{file = "torch-2.3.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:e65ba85ae292909cde0dde6369826d51165a3fc8823dc1854cd9432d7f79b932"},
|
||||
{file = "torch-2.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:5515503a193781fd1b3f5c474e89c9dfa2faaa782b2795cc4a7ab7e67de923f6"},
|
||||
{file = "torch-2.3.0-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:6ae9f64b09516baa4ef890af0672dc981c20b1f0d829ce115d4420a247e88fba"},
|
||||
{file = "torch-2.3.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:cd0dc498b961ab19cb3f8dbf0c6c50e244f2f37dbfa05754ab44ea057c944ef9"},
|
||||
{file = "torch-2.3.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:e05f836559251e4096f3786ee99f4a8cbe67bc7fbedba8ad5e799681e47c5e80"},
|
||||
{file = "torch-2.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:4fb27b35dbb32303c2927da86e27b54a92209ddfb7234afb1949ea2b3effffea"},
|
||||
{file = "torch-2.3.0-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:760f8bedff506ce9e6e103498f9b1e9e15809e008368594c3a66bf74a8a51380"},
|
||||
{file = "torch-2.3.1-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:605a25b23944be5ab7c3467e843580e1d888b8066e5aaf17ff7bf9cc30001cc3"},
|
||||
{file = "torch-2.3.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:f2357eb0965583a0954d6f9ad005bba0091f956aef879822274b1bcdb11bd308"},
|
||||
{file = "torch-2.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:32b05fe0d1ada7f69c9f86c14ff69b0ef1957a5a54199bacba63d22d8fab720b"},
|
||||
{file = "torch-2.3.1-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:7c09a94362778428484bcf995f6004b04952106aee0ef45ff0b4bab484f5498d"},
|
||||
{file = "torch-2.3.1-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:b2ec81b61bb094ea4a9dee1cd3f7b76a44555375719ad29f05c0ca8ef596ad39"},
|
||||
{file = "torch-2.3.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:490cc3d917d1fe0bd027057dfe9941dc1d6d8e3cae76140f5dd9a7e5bc7130ab"},
|
||||
{file = "torch-2.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:5802530783bd465fe66c2df99123c9a54be06da118fbd785a25ab0a88123758a"},
|
||||
{file = "torch-2.3.1-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:a7dd4ed388ad1f3d502bf09453d5fe596c7b121de7e0cfaca1e2017782e9bbac"},
|
||||
{file = "torch-2.3.1-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:a486c0b1976a118805fc7c9641d02df7afbb0c21e6b555d3bb985c9f9601b61a"},
|
||||
{file = "torch-2.3.1-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:224259821fe3e4c6f7edf1528e4fe4ac779c77addaa74215eb0b63a5c474d66c"},
|
||||
{file = "torch-2.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:e5fdccbf6f1334b2203a61a0e03821d5845f1421defe311dabeae2fc8fbeac2d"},
|
||||
{file = "torch-2.3.1-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:3c333dc2ebc189561514eda06e81df22bf8fb64e2384746b2cb9f04f96d1d4c8"},
|
||||
{file = "torch-2.3.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:07e9ba746832b8d069cacb45f312cadd8ad02b81ea527ec9766c0e7404bb3feb"},
|
||||
{file = "torch-2.3.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:462d1c07dbf6bb5d9d2f3316fee73a24f3d12cd8dacf681ad46ef6418f7f6626"},
|
||||
{file = "torch-2.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:ff60bf7ce3de1d43ad3f6969983f321a31f0a45df3690921720bcad6a8596cc4"},
|
||||
{file = "torch-2.3.1-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:bee0bd33dc58aa8fc8a7527876e9b9a0e812ad08122054a5bff2ce5abf005b10"},
|
||||
{file = "torch-2.3.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:aaa872abde9a3d4f91580f6396d54888620f4a0b92e3976a6034759df4b961ad"},
|
||||
{file = "torch-2.3.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:3d7a7f7ef21a7520510553dc3938b0c57c116a7daee20736a9e25cbc0e832bdc"},
|
||||
{file = "torch-2.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:4777f6cefa0c2b5fa87223c213e7b6f417cf254a45e5829be4ccd1b2a4ee1011"},
|
||||
{file = "torch-2.3.1-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:2bb5af780c55be68fe100feb0528d2edebace1d55cb2e351de735809ba7391eb"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -8741,7 +8725,7 @@ nvidia-cusparse-cu12 = {version = "12.1.0.106", markers = "platform_system == \"
|
|||
nvidia-nccl-cu12 = {version = "2.20.5", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
|
||||
nvidia-nvtx-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
|
||||
sympy = "*"
|
||||
triton = {version = "2.3.0", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and python_version < \"3.12\""}
|
||||
triton = {version = "2.3.1", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and python_version < \"3.12\""}
|
||||
typing-extensions = ">=4.8.0"
|
||||
|
||||
[package.extras]
|
||||
|
|
@ -8750,22 +8734,22 @@ optree = ["optree (>=0.9.1)"]
|
|||
|
||||
[[package]]
|
||||
name = "tornado"
|
||||
version = "6.4"
|
||||
version = "6.4.1"
|
||||
description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed."
|
||||
optional = false
|
||||
python-versions = ">= 3.8"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "tornado-6.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:02ccefc7d8211e5a7f9e8bc3f9e5b0ad6262ba2fbb683a6443ecc804e5224ce0"},
|
||||
{file = "tornado-6.4-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:27787de946a9cffd63ce5814c33f734c627a87072ec7eed71f7fc4417bb16263"},
|
||||
{file = "tornado-6.4-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7894c581ecdcf91666a0912f18ce5e757213999e183ebfc2c3fdbf4d5bd764e"},
|
||||
{file = "tornado-6.4-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e43bc2e5370a6a8e413e1e1cd0c91bedc5bd62a74a532371042a18ef19e10579"},
|
||||
{file = "tornado-6.4-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0251554cdd50b4b44362f73ad5ba7126fc5b2c2895cc62b14a1c2d7ea32f212"},
|
||||
{file = "tornado-6.4-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fd03192e287fbd0899dd8f81c6fb9cbbc69194d2074b38f384cb6fa72b80e9c2"},
|
||||
{file = "tornado-6.4-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:88b84956273fbd73420e6d4b8d5ccbe913c65d31351b4c004ae362eba06e1f78"},
|
||||
{file = "tornado-6.4-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:71ddfc23a0e03ef2df1c1397d859868d158c8276a0603b96cf86892bff58149f"},
|
||||
{file = "tornado-6.4-cp38-abi3-win32.whl", hash = "sha256:6f8a6c77900f5ae93d8b4ae1196472d0ccc2775cc1dfdc9e7727889145c45052"},
|
||||
{file = "tornado-6.4-cp38-abi3-win_amd64.whl", hash = "sha256:10aeaa8006333433da48dec9fe417877f8bcc21f48dda8d661ae79da357b2a63"},
|
||||
{file = "tornado-6.4.tar.gz", hash = "sha256:72291fa6e6bc84e626589f1c29d90a5a6d593ef5ae68052ee2ef000dfd273dee"},
|
||||
{file = "tornado-6.4.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8"},
|
||||
{file = "tornado-6.4.1-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14"},
|
||||
{file = "tornado-6.4.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e20b9113cd7293f164dc46fffb13535266e713cdb87bd2d15ddb336e96cfc4"},
|
||||
{file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ae50a504a740365267b2a8d1a90c9fbc86b780a39170feca9bcc1787ff80842"},
|
||||
{file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:613bf4ddf5c7a95509218b149b555621497a6cc0d46ac341b30bd9ec19eac7f3"},
|
||||
{file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25486eb223babe3eed4b8aecbac33b37e3dd6d776bc730ca14e1bf93888b979f"},
|
||||
{file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:454db8a7ecfcf2ff6042dde58404164d969b6f5d58b926da15e6b23817950fc4"},
|
||||
{file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a02a08cc7a9314b006f653ce40483b9b3c12cda222d6a46d4ac63bb6c9057698"},
|
||||
{file = "tornado-6.4.1-cp38-abi3-win32.whl", hash = "sha256:d9a566c40b89757c9aa8e6f032bcdb8ca8795d7c1a9762910c722b1635c9de4d"},
|
||||
{file = "tornado-6.4.1-cp38-abi3-win_amd64.whl", hash = "sha256:b24b8982ed444378d7f21d563f4180a2de31ced9d8d84443907a0a64da2072e7"},
|
||||
{file = "tornado-6.4.1.tar.gz", hash = "sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -8873,17 +8857,17 @@ vision = ["Pillow (>=10.0.1,<=15.0)"]
|
|||
|
||||
[[package]]
|
||||
name = "triton"
|
||||
version = "2.3.0"
|
||||
version = "2.3.1"
|
||||
description = "A language and compiler for custom Deep Learning operations"
|
||||
optional = true
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "triton-2.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ce4b8ff70c48e47274c66f269cce8861cf1dc347ceeb7a67414ca151b1822d8"},
|
||||
{file = "triton-2.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c3d9607f85103afdb279938fc1dd2a66e4f5999a58eb48a346bd42738f986dd"},
|
||||
{file = "triton-2.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:218d742e67480d9581bafb73ed598416cc8a56f6316152e5562ee65e33de01c0"},
|
||||
{file = "triton-2.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:381ec6b3dac06922d3e4099cfc943ef032893b25415de295e82b1a82b0359d2c"},
|
||||
{file = "triton-2.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:038e06a09c06a164fef9c48de3af1e13a63dc1ba3c792871e61a8e79720ea440"},
|
||||
{file = "triton-2.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d8f636e0341ac348899a47a057c3daea99ea7db31528a225a3ba4ded28ccc65"},
|
||||
{file = "triton-2.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c84595cbe5e546b1b290d2a58b1494df5a2ef066dd890655e5b8a8a92205c33"},
|
||||
{file = "triton-2.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9d64ae33bcb3a7a18081e3a746e8cf87ca8623ca13d2c362413ce7a486f893e"},
|
||||
{file = "triton-2.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaf80e8761a9e3498aa92e7bf83a085b31959c61f5e8ac14eedd018df6fccd10"},
|
||||
{file = "triton-2.3.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b13bf35a2b659af7159bf78e92798dc62d877aa991de723937329e2d382f1991"},
|
||||
{file = "triton-2.3.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63381e35ded3304704ea867ffde3b7cfc42c16a55b3062d41e017ef510433d66"},
|
||||
{file = "triton-2.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d968264523c7a07911c8fb51b4e0d1b920204dae71491b1fe7b01b62a31e124"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -10097,4 +10081,4 @@ local = ["ctransformers", "llama-cpp-python", "sentence-transformers"]
|
|||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.10,<3.13"
|
||||
content-hash = "8fd6622a9bdd88dcac9ab9ca8136b6b7c7939f0abeb6cd3f4ee754cc38527648"
|
||||
content-hash = "2ba268be17a69253c9631ec721ece465a85a22949c2df7c712b7aa12d1a002fa"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
[tool.poetry]
|
||||
name = "langflow"
|
||||
version = "1.0.0a45"
|
||||
version = "1.0.0a49"
|
||||
description = "A Python package with a built-in web application"
|
||||
authors = ["Langflow <contact@langflow.org>"]
|
||||
maintainers = [
|
||||
|
|
@ -66,7 +66,7 @@ qianfan = "0.3.5"
|
|||
pgvector = "^0.2.3"
|
||||
pyautogen = "^0.2.0"
|
||||
langchain-google-genai = "^1.0.1"
|
||||
langchain-cohere = "^0.1.0rc1"
|
||||
langchain-cohere = "^0.1.5"
|
||||
elasticsearch = "^8.12.0"
|
||||
pytube = "^15.0.0"
|
||||
dspy-ai = "^2.4.0"
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import os
|
||||
import argparse
|
||||
|
||||
from huggingface_hub import HfApi, list_models
|
||||
from rich import print
|
||||
|
|
@ -6,11 +6,27 @@ from rich import print
|
|||
# Use root method
|
||||
models = list_models()
|
||||
|
||||
args = argparse.ArgumentParser(description="Restart a space in the Hugging Face Hub.")
|
||||
args.add_argument("--space", type=str, help="The space to restart.")
|
||||
args.add_argument("--token", type=str, help="The Hugging Face API token.")
|
||||
|
||||
parsed_args = args.parse_args()
|
||||
|
||||
space = parsed_args.space
|
||||
|
||||
if not space:
|
||||
print("Please provide a space to restart.")
|
||||
exit()
|
||||
|
||||
if not parsed_args.token:
|
||||
print("Please provide an API token.")
|
||||
exit()
|
||||
|
||||
# Or configure a HfApi client
|
||||
hf_api = HfApi(
|
||||
endpoint="https://huggingface.co", # Can be a Private Hub endpoint.
|
||||
token=os.getenv("HUGGINFACE_API_TOKEN"),
|
||||
token=parsed_args.token,
|
||||
)
|
||||
|
||||
space_runtime = hf_api.restart_space("Langflow/Langflow-Preview", factory_reboot=True)
|
||||
space_runtime = hf_api.restart_space(space, factory_reboot=True)
|
||||
print(space_runtime)
|
||||
|
|
|
|||
|
|
@ -20,8 +20,7 @@ When running as a [spot (preemptible) instance](https://cloud.google.com/compute
|
|||
|
||||
## Pricing (approximate)
|
||||
|
||||
> For a more accurate breakdown of costs, please use the [**GCP Pricing Calculator**](https://cloud.google.com/products/calculator)
|
||||
> <br>
|
||||
> For a more accurate breakdown of costs, please use the [**GCP Pricing Calculator**](https://cloud.google.com/products/calculator) > <br>
|
||||
|
||||
| Component | Regular Cost (Hourly) | Regular Cost (Monthly) | Spot/Preemptible Cost (Hourly) | Spot/Preemptible Cost (Monthly) | Notes |
|
||||
| ------------------ | --------------------- | ---------------------- | ------------------------------ | ------------------------------- | -------------------------------------------------------------------------- |
|
||||
|
|
|
|||
|
|
@ -121,7 +121,7 @@ def run(
|
|||
),
|
||||
):
|
||||
"""
|
||||
Run the Langflow.
|
||||
Run Langflow.
|
||||
"""
|
||||
|
||||
configure(log_level=log_level, log_file=log_file)
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ from loguru import logger
|
|||
from sqlmodel import Session, col, select
|
||||
|
||||
from langflow.api.utils import remove_api_keys, validate_is_component
|
||||
from langflow.api.v1.schemas import FlowListCreate, FlowListIds, FlowListRead
|
||||
from langflow.api.v1.schemas import FlowListCreate, FlowListRead
|
||||
from langflow.initial_setup.setup import STARTER_FOLDER_NAME
|
||||
from langflow.services.auth.utils import get_current_active_user
|
||||
from langflow.services.database.models.flow import Flow, FlowCreate, FlowRead, FlowUpdate
|
||||
|
|
@ -258,9 +258,9 @@ async def download_file(
|
|||
return FlowListRead(flows=flows)
|
||||
|
||||
|
||||
@router.post("/multiple_delete/")
|
||||
@router.delete("/")
|
||||
async def delete_multiple_flows(
|
||||
flow_ids: FlowListIds, user: User = Depends(get_current_active_user), db: Session = Depends(get_session)
|
||||
flow_ids: List[UUID], user: User = Depends(get_current_active_user), db: Session = Depends(get_session)
|
||||
):
|
||||
"""
|
||||
Delete multiple flows by their IDs.
|
||||
|
|
@ -274,9 +274,7 @@ async def delete_multiple_flows(
|
|||
|
||||
"""
|
||||
try:
|
||||
deleted_flows = db.exec(
|
||||
select(Flow).where(col(Flow.id).in_(flow_ids.flow_ids)).where(Flow.user_id == user.id)
|
||||
).all()
|
||||
deleted_flows = db.exec(select(Flow).where(col(Flow.id).in_(flow_ids)).where(Flow.user_id == user.id)).all()
|
||||
for flow in deleted_flows:
|
||||
db.delete(flow)
|
||||
db.commit()
|
||||
|
|
|
|||
|
|
@ -1,9 +1,9 @@
|
|||
from typing import List, Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
|
||||
from langflow.services.deps import get_monitor_service
|
||||
from langflow.services.monitor.schema import (
|
||||
MessageModelRequest,
|
||||
MessageModelResponse,
|
||||
TransactionModelResponse,
|
||||
VertexBuildMapModel,
|
||||
|
|
@ -66,6 +66,44 @@ async def get_messages(
|
|||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.delete("/messages", status_code=204)
|
||||
async def delete_messages(
|
||||
message_ids: List[int],
|
||||
monitor_service: MonitorService = Depends(get_monitor_service),
|
||||
):
|
||||
try:
|
||||
monitor_service.delete_messages(message_ids=message_ids)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.post("/messages/{message_id}", response_model=MessageModelResponse)
|
||||
async def update_message(
|
||||
message_id: str,
|
||||
message: MessageModelRequest,
|
||||
monitor_service: MonitorService = Depends(get_monitor_service),
|
||||
):
|
||||
try:
|
||||
message_dict = message.model_dump(exclude_none=True)
|
||||
message_dict.pop("index", None)
|
||||
monitor_service.update_message(message_id=message_id, **message_dict)
|
||||
return MessageModelResponse(index=message_id, **message_dict)
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.delete("/messages/session/{session_id}", status_code=204)
|
||||
async def delete_messages_session(
|
||||
session_id: str,
|
||||
monitor_service: MonitorService = Depends(get_monitor_service),
|
||||
):
|
||||
try:
|
||||
monitor_service.delete_messages_session(session_id=session_id)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get("/transactions", response_model=List[TransactionModelResponse])
|
||||
async def get_transactions(
|
||||
source: Optional[str] = Query(None),
|
||||
|
|
|
|||
|
|
@ -15,10 +15,25 @@ import shlex
|
|||
from collections import OrderedDict, namedtuple
|
||||
from http.cookies import SimpleCookie
|
||||
|
||||
from uncurl.api import parser # type: ignore
|
||||
|
||||
parser.add_argument("-x", "--proxy", default={})
|
||||
parser.add_argument("-U", "--proxy-user", default="")
|
||||
ParsedArgs = namedtuple(
|
||||
"ParsedContext",
|
||||
[
|
||||
"command",
|
||||
"url",
|
||||
"data",
|
||||
"data_binary",
|
||||
"method",
|
||||
"headers",
|
||||
"compressed",
|
||||
"insecure",
|
||||
"user",
|
||||
"include",
|
||||
"silent",
|
||||
"proxy",
|
||||
"proxy_user",
|
||||
"cookies",
|
||||
],
|
||||
)
|
||||
|
||||
ParsedContext = namedtuple("ParsedContext", ["method", "url", "data", "headers", "cookies", "verify", "auth", "proxy"])
|
||||
|
||||
|
|
@ -27,24 +42,89 @@ def normalize_newlines(multiline_text):
|
|||
return multiline_text.replace(" \\\n", " ")
|
||||
|
||||
|
||||
def parse_curl_command(curl_command):
|
||||
tokens = shlex.split(normalize_newlines(curl_command))
|
||||
tokens = [token for token in tokens if token and token != " "]
|
||||
if "curl" not in tokens[0]:
|
||||
raise ValueError("Invalid curl command")
|
||||
args_template = {
|
||||
"command": None,
|
||||
"url": None,
|
||||
"data": None,
|
||||
"data_binary": None,
|
||||
"method": "get",
|
||||
"headers": [],
|
||||
"compressed": False,
|
||||
"insecure": False,
|
||||
"user": (),
|
||||
"include": False,
|
||||
"silent": False,
|
||||
"proxy": None,
|
||||
"proxy_user": None,
|
||||
"cookies": {},
|
||||
}
|
||||
args = args_template.copy()
|
||||
|
||||
i = 0
|
||||
while i < len(tokens):
|
||||
token = tokens[i]
|
||||
if token == "-X":
|
||||
i += 1
|
||||
args["method"] = tokens[i].lower()
|
||||
elif token in ("-d", "--data"):
|
||||
i += 1
|
||||
args["data"] = tokens[i]
|
||||
args["method"] = "post"
|
||||
elif token in ("-b", "--data-binary", "--data-raw"):
|
||||
i += 1
|
||||
args["data_binary"] = tokens[i]
|
||||
args["method"] = "post"
|
||||
elif token in ("-H", "--header"):
|
||||
i += 1
|
||||
args["headers"].append(tokens[i])
|
||||
elif token == "--compressed":
|
||||
args["compressed"] = True
|
||||
elif token in ("-k", "--insecure"):
|
||||
args["insecure"] = True
|
||||
elif token in ("-u", "--user"):
|
||||
i += 1
|
||||
args["user"] = tuple(tokens[i].split(":"))
|
||||
elif token in ("-I", "--include"):
|
||||
args["include"] = True
|
||||
elif token in ("-s", "--silent"):
|
||||
args["silent"] = True
|
||||
elif token in ("-x", "--proxy"):
|
||||
i += 1
|
||||
args["proxy"] = tokens[i]
|
||||
elif token in ("-U", "--proxy-user"):
|
||||
i += 1
|
||||
args["proxy_user"] = tokens[i]
|
||||
elif not token.startswith("-"):
|
||||
if args["command"] is None:
|
||||
args["command"] = token
|
||||
else:
|
||||
args["url"] = token
|
||||
i += 1
|
||||
|
||||
return ParsedArgs(**args)
|
||||
|
||||
|
||||
def parse_context(curl_command):
|
||||
method = "get"
|
||||
|
||||
tokens = shlex.split(normalize_newlines(curl_command))
|
||||
tokens = [token for token in tokens if token and token != " "]
|
||||
parsed_args = parser.parse_args(tokens)
|
||||
parsed_args: ParsedArgs = parse_curl_command(curl_command)
|
||||
|
||||
post_data = parsed_args.data or parsed_args.data_binary
|
||||
if post_data:
|
||||
method = "post"
|
||||
|
||||
if parsed_args.X:
|
||||
method = parsed_args.X.lower()
|
||||
if parsed_args.method:
|
||||
method = parsed_args.method.lower()
|
||||
|
||||
cookie_dict = OrderedDict()
|
||||
quoted_headers = OrderedDict()
|
||||
|
||||
for curl_header in parsed_args.header:
|
||||
for curl_header in parsed_args.headers:
|
||||
if curl_header.startswith(":"):
|
||||
occurrence = [m.start() for m in re.finditer(":", curl_header)]
|
||||
header_key, header_value = curl_header[: occurrence[1]], curl_header[occurrence[1] + 1 :]
|
||||
|
|
|
|||
|
|
@ -92,7 +92,7 @@ def read_text_file(file_path: str) -> str:
|
|||
with open(file_path, "rb") as f:
|
||||
raw_data = f.read()
|
||||
result = chardet.detect(raw_data)
|
||||
encoding = result['encoding']
|
||||
encoding = result["encoding"]
|
||||
|
||||
with open(file_path, "r", encoding=encoding) as f:
|
||||
return f.read()
|
||||
|
|
|
|||
|
|
@ -1,27 +0,0 @@
|
|||
from .AstraDBSearch import AstraDBSearchComponent
|
||||
from .ChromaSearch import ChromaSearchComponent
|
||||
from .FAISSSearch import FAISSSearchComponent
|
||||
from .MongoDBAtlasVectorSearch import MongoDBAtlasSearchComponent
|
||||
from .PineconeSearch import PineconeSearchComponent
|
||||
from .QdrantSearch import QdrantSearchComponent
|
||||
from .RedisSearch import RedisSearchComponent
|
||||
from .SupabaseVectorStoreSearch import SupabaseSearchComponent
|
||||
from .VectaraSearch import VectaraSearchComponent
|
||||
from .WeaviateSearch import WeaviateSearchVectorStore
|
||||
from .pgvectorSearch import PGVectorSearchComponent
|
||||
from .Couchbase import CouchbaseSearchComponent # type: ignore
|
||||
|
||||
__all__ = [
|
||||
"AstraDBSearchComponent",
|
||||
"ChromaSearchComponent",
|
||||
"CouchbaseSearchComponent",
|
||||
"FAISSSearchComponent",
|
||||
"MongoDBAtlasSearchComponent",
|
||||
"PineconeSearchComponent",
|
||||
"QdrantSearchComponent",
|
||||
"RedisSearchComponent",
|
||||
"SupabaseSearchComponent",
|
||||
"VectaraSearchComponent",
|
||||
"WeaviateSearchVectorStore",
|
||||
"PGVectorSearchComponent",
|
||||
]
|
||||
|
|
@ -1,28 +0,0 @@
|
|||
from .AstraDB import AstraDBVectorStoreComponent
|
||||
from .Chroma import ChromaComponent
|
||||
from .FAISS import FAISSComponent
|
||||
from .MongoDBAtlasVector import MongoDBAtlasComponent
|
||||
from .Pinecone import PineconeComponent
|
||||
from .Qdrant import QdrantComponent
|
||||
from .Redis import RedisComponent
|
||||
from .SupabaseVectorStore import SupabaseComponent
|
||||
from .Vectara import VectaraComponent
|
||||
from .Weaviate import WeaviateVectorStoreComponent
|
||||
from .pgvector import PGVectorComponent
|
||||
from .Couchbase import CouchbaseComponent
|
||||
|
||||
__all__ = [
|
||||
"AstraDBVectorStoreComponent",
|
||||
"ChromaComponent",
|
||||
"CouchbaseComponent",
|
||||
"FAISSComponent",
|
||||
"MongoDBAtlasComponent",
|
||||
"PineconeComponent",
|
||||
"QdrantComponent",
|
||||
"RedisComponent",
|
||||
"SupabaseComponent",
|
||||
"VectaraComponent",
|
||||
"WeaviateVectorStoreComponent",
|
||||
"base",
|
||||
"PGVectorComponent",
|
||||
]
|
||||
|
|
@ -297,7 +297,7 @@ class CodeParser:
|
|||
bases = self.execute_and_inspect_classes(self.code)
|
||||
except Exception as e:
|
||||
# If the code cannot be executed, return an empty list
|
||||
logger.exception(e)
|
||||
logger.debug(e)
|
||||
bases = []
|
||||
raise e
|
||||
return bases
|
||||
|
|
|
|||
|
|
@ -79,7 +79,8 @@ class DirectoryReader:
|
|||
component_tuple = (*build_component(component), component)
|
||||
components.append(component_tuple)
|
||||
except Exception as e:
|
||||
logger.error(f"Error while loading component { component['name']}: {e}")
|
||||
logger.debug(f"Error while loading component { component['name']}")
|
||||
logger.debug(e)
|
||||
continue
|
||||
items.append({"name": menu["name"], "path": menu["path"], "components": components})
|
||||
filtered = [menu for menu in items if menu["components"]]
|
||||
|
|
@ -265,8 +266,7 @@ class DirectoryReader:
|
|||
if validation_result:
|
||||
try:
|
||||
output_types = self.get_output_types_from_code(result_content)
|
||||
except Exception as exc:
|
||||
logger.exception(f"Error while getting output types from code: {str(exc)}")
|
||||
except Exception:
|
||||
output_types = [component_name_camelcase]
|
||||
else:
|
||||
output_types = [component_name_camelcase]
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -20,7 +20,11 @@
|
|||
"list": false,
|
||||
"show": true,
|
||||
"multiline": true,
|
||||
<<<<<<< HEAD
|
||||
"value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.custom import CustomComponent\nfrom langflow.field_typing import Input, Prompt, Text\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": Input(display_name=\"Template\"),\n \"code\": Input(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n",
|
||||
=======
|
||||
"value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.custom import CustomComponent\nfrom langflow.field_typing import Prompt, TemplateField, Text\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n",
|
||||
>>>>>>> origin/dev
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
|
|
@ -45,9 +49,13 @@
|
|||
"name": "template",
|
||||
"display_name": "Template",
|
||||
"advanced": false,
|
||||
<<<<<<< HEAD
|
||||
"input_types": [
|
||||
"Text"
|
||||
],
|
||||
=======
|
||||
"input_types": ["Text"],
|
||||
>>>>>>> origin/dev
|
||||
"dynamic": false,
|
||||
"info": "",
|
||||
"load_from_db": false,
|
||||
|
|
@ -138,15 +146,20 @@
|
|||
"is_input": null,
|
||||
"is_output": null,
|
||||
"is_composition": null,
|
||||
<<<<<<< HEAD
|
||||
"base_classes": [
|
||||
"object",
|
||||
"Text",
|
||||
"str"
|
||||
],
|
||||
=======
|
||||
"base_classes": ["object", "Text", "str"],
|
||||
>>>>>>> origin/dev
|
||||
"name": "",
|
||||
"display_name": "Prompt",
|
||||
"documentation": "",
|
||||
"custom_fields": {
|
||||
<<<<<<< HEAD
|
||||
"template": [
|
||||
"reference_1",
|
||||
"reference_2",
|
||||
|
|
@ -156,11 +169,17 @@
|
|||
"output_types": [
|
||||
"Text"
|
||||
],
|
||||
=======
|
||||
"template": ["reference_1", "reference_2", "instructions"]
|
||||
},
|
||||
"output_types": ["Text"],
|
||||
>>>>>>> origin/dev
|
||||
"full_path": null,
|
||||
"field_formatters": {},
|
||||
"frozen": false,
|
||||
"field_order": [],
|
||||
"beta": false,
|
||||
<<<<<<< HEAD
|
||||
"error": null,
|
||||
"outputs": [
|
||||
{
|
||||
|
|
@ -173,6 +192,9 @@
|
|||
"method": null
|
||||
}
|
||||
]
|
||||
=======
|
||||
"error": null
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"id": "Prompt-Rse03",
|
||||
"description": "Create a prompt template with dynamic variables.",
|
||||
|
|
@ -233,9 +255,13 @@
|
|||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
<<<<<<< HEAD
|
||||
"input_types": [
|
||||
"Text"
|
||||
],
|
||||
=======
|
||||
"input_types": ["Text"],
|
||||
>>>>>>> origin/dev
|
||||
"value": [
|
||||
"https://www.promptingguide.ai/techniques/prompt_chaining"
|
||||
]
|
||||
|
|
@ -244,14 +270,19 @@
|
|||
},
|
||||
"description": "Fetch content from one or more URLs.",
|
||||
"icon": "layout-template",
|
||||
<<<<<<< HEAD
|
||||
"base_classes": [
|
||||
"Record"
|
||||
],
|
||||
=======
|
||||
"base_classes": ["Record"],
|
||||
>>>>>>> origin/dev
|
||||
"display_name": "URL",
|
||||
"documentation": "",
|
||||
"custom_fields": {
|
||||
"urls": null
|
||||
},
|
||||
<<<<<<< HEAD
|
||||
"output_types": [
|
||||
"Record"
|
||||
],
|
||||
|
|
@ -270,6 +301,13 @@
|
|||
"method": null
|
||||
}
|
||||
]
|
||||
=======
|
||||
"output_types": ["Record"],
|
||||
"field_formatters": {},
|
||||
"frozen": false,
|
||||
"field_order": [],
|
||||
"beta": false
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"id": "URL-HYPkR"
|
||||
},
|
||||
|
|
@ -300,7 +338,11 @@
|
|||
"list": false,
|
||||
"show": true,
|
||||
"multiline": true,
|
||||
<<<<<<< HEAD
|
||||
"value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\nfrom langflow.template import Input, Output\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n inputs = [\n Input(\n name=\"input_value\", type=str, display_name=\"Message\", multiline=True, info=\"Message to be passed as output.\"\n ),\n Input(\n name=\"sender\",\n type=str,\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"Machine\",\n advanced=True,\n info=\"Type of sender.\",\n ),\n Input(name=\"sender_name\", type=str, display_name=\"Sender Name\", info=\"Name of the sender.\", value=\"AI\"),\n Input(\n name=\"session_id\", type=str, display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True\n ),\n Input(\n name=\"record_template\",\n type=str,\n display_name=\"Record Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"text_response\"),\n Output(display_name=\"Record\", name=\"record\", method=\"record_response\"),\n ]\n\n def text_response(self) -> Text:\n result = self.input_value\n if self.session_id and isinstance(result, (Record, str)):\n self.store_message(result, self.session_id, self.sender, self.sender_name)\n return result\n\n def record_response(self) -> Record:\n record = Record(\n data={\n \"message\": self.input_value,\n \"sender\": self.sender,\n \"sender_name\": self.sender_name,\n \"session_id\": self.session_id,\n \"template\": self.record_template or \"\",\n }\n )\n if self.session_id and isinstance(record, (Record, str)):\n self.store_message(record, self.session_id, self.sender, self.sender_name)\n return record\n",
|
||||
=======
|
||||
"value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n record_template: Optional[str] = \"{text}\",\n ) -> Union[Text, Record]:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n record_template=record_template or \"\",\n )\n",
|
||||
>>>>>>> origin/dev
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
|
|
@ -318,13 +360,17 @@
|
|||
"list": false,
|
||||
"show": true,
|
||||
"multiline": true,
|
||||
<<<<<<< HEAD
|
||||
"value": "",
|
||||
=======
|
||||
>>>>>>> origin/dev
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"name": "input_value",
|
||||
"display_name": "Message",
|
||||
"advanced": false,
|
||||
<<<<<<< HEAD
|
||||
"dynamic": false,
|
||||
"info": "Message to be passed as output.",
|
||||
"load_from_db": false,
|
||||
|
|
@ -332,6 +378,13 @@
|
|||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
=======
|
||||
"input_types": ["Text"],
|
||||
"dynamic": false,
|
||||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"record_template": {
|
||||
"type": "str",
|
||||
|
|
@ -339,8 +392,13 @@
|
|||
"placeholder": "",
|
||||
"list": false,
|
||||
"show": true,
|
||||
<<<<<<< HEAD
|
||||
"multiline": false,
|
||||
"value": "",
|
||||
=======
|
||||
"multiline": true,
|
||||
"value": "{text}",
|
||||
>>>>>>> origin/dev
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
|
|
@ -348,12 +406,38 @@
|
|||
"display_name": "Record Template",
|
||||
"advanced": true,
|
||||
"dynamic": false,
|
||||
<<<<<<< HEAD
|
||||
"info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
=======
|
||||
"info": "In case of Message being a Record, this template will be used to convert it to text.",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"input_types": ["Text"]
|
||||
},
|
||||
"return_record": {
|
||||
"type": "bool",
|
||||
"required": false,
|
||||
"placeholder": "",
|
||||
"list": false,
|
||||
"show": true,
|
||||
"multiline": false,
|
||||
"value": false,
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"name": "return_record",
|
||||
"display_name": "Return Record",
|
||||
"advanced": true,
|
||||
"dynamic": false,
|
||||
"info": "Return the message as a record containing the sender, sender_name, and session_id.",
|
||||
"load_from_db": false,
|
||||
"title_case": false
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"sender": {
|
||||
"type": "str",
|
||||
|
|
@ -362,6 +446,7 @@
|
|||
"list": true,
|
||||
"show": true,
|
||||
"multiline": false,
|
||||
<<<<<<< HEAD
|
||||
"value": "",
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
|
|
@ -370,16 +455,30 @@
|
|||
"Machine",
|
||||
"User"
|
||||
],
|
||||
=======
|
||||
"value": "Machine",
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"options": ["Machine", "User"],
|
||||
>>>>>>> origin/dev
|
||||
"name": "sender",
|
||||
"display_name": "Sender Type",
|
||||
"advanced": true,
|
||||
"dynamic": false,
|
||||
<<<<<<< HEAD
|
||||
"info": "Type of sender.",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
=======
|
||||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"input_types": ["Text"]
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"sender_name": {
|
||||
"type": "str",
|
||||
|
|
@ -388,7 +487,11 @@
|
|||
"list": false,
|
||||
"show": true,
|
||||
"multiline": false,
|
||||
<<<<<<< HEAD
|
||||
"value": "",
|
||||
=======
|
||||
"value": "AI",
|
||||
>>>>>>> origin/dev
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
|
|
@ -396,12 +499,19 @@
|
|||
"display_name": "Sender Name",
|
||||
"advanced": false,
|
||||
"dynamic": false,
|
||||
<<<<<<< HEAD
|
||||
"info": "Name of the sender.",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
=======
|
||||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"input_types": ["Text"]
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"session_id": {
|
||||
"type": "str",
|
||||
|
|
@ -410,7 +520,10 @@
|
|||
"list": false,
|
||||
"show": true,
|
||||
"multiline": false,
|
||||
<<<<<<< HEAD
|
||||
"value": "",
|
||||
=======
|
||||
>>>>>>> origin/dev
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
|
|
@ -418,6 +531,7 @@
|
|||
"display_name": "Session ID",
|
||||
"advanced": true,
|
||||
"dynamic": false,
|
||||
<<<<<<< HEAD
|
||||
"info": "Session ID for the message.",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
|
|
@ -435,6 +549,18 @@
|
|||
"object",
|
||||
"str"
|
||||
],
|
||||
=======
|
||||
"info": "If provided, the message will be stored in the memory.",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"input_types": ["Text"]
|
||||
},
|
||||
"_type": "CustomComponent"
|
||||
},
|
||||
"description": "Display a chat message in the Playground.",
|
||||
"icon": "ChatOutput",
|
||||
"base_classes": ["Text", "Record", "object", "str"],
|
||||
>>>>>>> origin/dev
|
||||
"display_name": "Chat Output",
|
||||
"documentation": "",
|
||||
"custom_fields": {
|
||||
|
|
@ -445,6 +571,7 @@
|
|||
"return_record": null,
|
||||
"record_template": null
|
||||
},
|
||||
<<<<<<< HEAD
|
||||
"output_types": [
|
||||
"Text",
|
||||
"Record"
|
||||
|
|
@ -473,6 +600,13 @@
|
|||
"method": "record_response"
|
||||
}
|
||||
]
|
||||
=======
|
||||
"output_types": ["Text", "Record"],
|
||||
"field_formatters": {},
|
||||
"frozen": false,
|
||||
"field_order": [],
|
||||
"beta": false
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"id": "ChatOutput-JPlxl"
|
||||
},
|
||||
|
|
@ -508,9 +642,13 @@
|
|||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
<<<<<<< HEAD
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
=======
|
||||
"input_types": ["Text"]
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"code": {
|
||||
"type": "code",
|
||||
|
|
@ -593,9 +731,13 @@
|
|||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
<<<<<<< HEAD
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
=======
|
||||
"input_types": ["Text"]
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"openai_api_base": {
|
||||
"type": "str",
|
||||
|
|
@ -614,9 +756,13 @@
|
|||
"info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
<<<<<<< HEAD
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
=======
|
||||
"input_types": ["Text"]
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"openai_api_key": {
|
||||
"type": "str",
|
||||
|
|
@ -635,9 +781,13 @@
|
|||
"info": "The OpenAI API Key to use for the OpenAI model.",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
<<<<<<< HEAD
|
||||
"input_types": [
|
||||
"Text"
|
||||
],
|
||||
=======
|
||||
"input_types": ["Text"],
|
||||
>>>>>>> origin/dev
|
||||
"value": "OPENAI_API_KEY"
|
||||
},
|
||||
"stream": {
|
||||
|
|
@ -676,9 +826,13 @@
|
|||
"info": "System message to pass to the model.",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
<<<<<<< HEAD
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
=======
|
||||
"input_types": ["Text"]
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"temperature": {
|
||||
"type": "float",
|
||||
|
|
@ -709,11 +863,15 @@
|
|||
},
|
||||
"description": "Generates text using OpenAI LLMs.",
|
||||
"icon": "OpenAI",
|
||||
<<<<<<< HEAD
|
||||
"base_classes": [
|
||||
"str",
|
||||
"Text",
|
||||
"object"
|
||||
],
|
||||
=======
|
||||
"base_classes": ["str", "Text", "object"],
|
||||
>>>>>>> origin/dev
|
||||
"display_name": "OpenAI",
|
||||
"documentation": "",
|
||||
"custom_fields": {
|
||||
|
|
@ -727,9 +885,13 @@
|
|||
"stream": null,
|
||||
"system_message": null
|
||||
},
|
||||
<<<<<<< HEAD
|
||||
"output_types": [
|
||||
"Text"
|
||||
],
|
||||
=======
|
||||
"output_types": ["Text"],
|
||||
>>>>>>> origin/dev
|
||||
"field_formatters": {},
|
||||
"frozen": false,
|
||||
"field_order": [
|
||||
|
|
@ -743,6 +905,7 @@
|
|||
"system_message",
|
||||
"stream"
|
||||
],
|
||||
<<<<<<< HEAD
|
||||
"beta": false,
|
||||
"outputs": [
|
||||
{
|
||||
|
|
@ -755,6 +918,9 @@
|
|||
"method": null
|
||||
}
|
||||
]
|
||||
=======
|
||||
"beta": false
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"id": "OpenAIModel-gi29P"
|
||||
},
|
||||
|
|
@ -813,25 +979,35 @@
|
|||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
<<<<<<< HEAD
|
||||
"input_types": [
|
||||
"Text"
|
||||
],
|
||||
"value": [
|
||||
"https://www.promptingguide.ai/introduction/basics"
|
||||
]
|
||||
=======
|
||||
"input_types": ["Text"],
|
||||
"value": ["https://www.promptingguide.ai/introduction/basics"]
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"_type": "CustomComponent"
|
||||
},
|
||||
"description": "Fetch content from one or more URLs.",
|
||||
"icon": "layout-template",
|
||||
<<<<<<< HEAD
|
||||
"base_classes": [
|
||||
"Record"
|
||||
],
|
||||
=======
|
||||
"base_classes": ["Record"],
|
||||
>>>>>>> origin/dev
|
||||
"display_name": "URL",
|
||||
"documentation": "",
|
||||
"custom_fields": {
|
||||
"urls": null
|
||||
},
|
||||
<<<<<<< HEAD
|
||||
"output_types": [
|
||||
"Record"
|
||||
],
|
||||
|
|
@ -850,6 +1026,13 @@
|
|||
"method": null
|
||||
}
|
||||
]
|
||||
=======
|
||||
"output_types": ["Record"],
|
||||
"field_formatters": {},
|
||||
"frozen": false,
|
||||
"field_order": [],
|
||||
"beta": false
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"id": "URL-2cX90"
|
||||
},
|
||||
|
|
@ -905,10 +1088,14 @@
|
|||
"name": "input_value",
|
||||
"display_name": "Value",
|
||||
"advanced": false,
|
||||
<<<<<<< HEAD
|
||||
"input_types": [
|
||||
"Record",
|
||||
"Text"
|
||||
],
|
||||
=======
|
||||
"input_types": ["Record", "Text"],
|
||||
>>>>>>> origin/dev
|
||||
"dynamic": false,
|
||||
"info": "Text or Record to be passed as input.",
|
||||
"load_from_db": false,
|
||||
|
|
@ -932,25 +1119,34 @@
|
|||
"info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
<<<<<<< HEAD
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
=======
|
||||
"input_types": ["Text"]
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"_type": "CustomComponent"
|
||||
},
|
||||
"description": "Get text inputs from the Playground.",
|
||||
"icon": "type",
|
||||
<<<<<<< HEAD
|
||||
"base_classes": [
|
||||
"object",
|
||||
"Text",
|
||||
"str"
|
||||
],
|
||||
=======
|
||||
"base_classes": ["object", "Text", "str"],
|
||||
>>>>>>> origin/dev
|
||||
"display_name": "Instructions",
|
||||
"documentation": "",
|
||||
"custom_fields": {
|
||||
"input_value": null,
|
||||
"record_template": null
|
||||
},
|
||||
<<<<<<< HEAD
|
||||
"output_types": [
|
||||
"Text"
|
||||
],
|
||||
|
|
@ -967,6 +1163,13 @@
|
|||
"name": "Text"
|
||||
}
|
||||
]
|
||||
=======
|
||||
"output_types": ["Text"],
|
||||
"field_formatters": {},
|
||||
"frozen": false,
|
||||
"field_order": [],
|
||||
"beta": false
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"id": "TextInput-og8Or"
|
||||
},
|
||||
|
|
@ -984,13 +1187,18 @@
|
|||
{
|
||||
"source": "URL-HYPkR",
|
||||
"target": "Prompt-Rse03",
|
||||
<<<<<<< HEAD
|
||||
"sourceHandle": "{\"dataType\": \"URL\", \"id\": \"URL-HYPkR\", \"output_types\": [\"Record\"], \"name\": \"Record\"}",
|
||||
=======
|
||||
"sourceHandle": "{œbaseClassesœ:[œRecordœ],œdataTypeœ:œURLœ,œidœ:œURL-HYPkRœ}",
|
||||
>>>>>>> origin/dev
|
||||
"targetHandle": "{œfieldNameœ:œreference_2œ,œidœ:œPrompt-Rse03œ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}",
|
||||
"id": "reactflow__edge-URL-HYPkR{œbaseClassesœ:[œRecordœ],œdataTypeœ:œURLœ,œidœ:œURL-HYPkRœ}-Prompt-Rse03{œfieldNameœ:œreference_2œ,œidœ:œPrompt-Rse03œ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}",
|
||||
"data": {
|
||||
"targetHandle": {
|
||||
"fieldName": "reference_2",
|
||||
"id": "Prompt-Rse03",
|
||||
<<<<<<< HEAD
|
||||
"inputTypes": [
|
||||
"Document",
|
||||
"BaseOutputParser",
|
||||
|
|
@ -1006,6 +1214,15 @@
|
|||
"Record"
|
||||
],
|
||||
"name": "Record"
|
||||
=======
|
||||
"inputTypes": ["Document", "BaseOutputParser", "Record", "Text"],
|
||||
"type": "str"
|
||||
},
|
||||
"sourceHandle": {
|
||||
"baseClasses": ["Record"],
|
||||
"dataType": "URL",
|
||||
"id": "URL-HYPkR"
|
||||
>>>>>>> origin/dev
|
||||
}
|
||||
},
|
||||
"style": {
|
||||
|
|
@ -1016,13 +1233,18 @@
|
|||
},
|
||||
{
|
||||
"source": "OpenAIModel-gi29P",
|
||||
<<<<<<< HEAD
|
||||
"sourceHandle": "{\"dataType\": \"OpenAIModel\", \"id\": \"OpenAIModel-gi29P\", \"output_types\": [\"Text\"], \"name\": \"Text\"}",
|
||||
=======
|
||||
"sourceHandle": "{œbaseClassesœ:[œstrœ,œTextœ,œobjectœ],œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-gi29Pœ}",
|
||||
>>>>>>> origin/dev
|
||||
"target": "ChatOutput-JPlxl",
|
||||
"targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-JPlxlœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}",
|
||||
"data": {
|
||||
"targetHandle": {
|
||||
"fieldName": "input_value",
|
||||
"id": "ChatOutput-JPlxl",
|
||||
<<<<<<< HEAD
|
||||
"inputTypes": [
|
||||
"Text"
|
||||
],
|
||||
|
|
@ -1035,6 +1257,15 @@
|
|||
"Text"
|
||||
],
|
||||
"name": "Text"
|
||||
=======
|
||||
"inputTypes": ["Text"],
|
||||
"type": "str"
|
||||
},
|
||||
"sourceHandle": {
|
||||
"baseClasses": ["str", "Text", "object"],
|
||||
"dataType": "OpenAIModel",
|
||||
"id": "OpenAIModel-gi29P"
|
||||
>>>>>>> origin/dev
|
||||
}
|
||||
},
|
||||
"style": {
|
||||
|
|
@ -1045,13 +1276,18 @@
|
|||
},
|
||||
{
|
||||
"source": "URL-2cX90",
|
||||
<<<<<<< HEAD
|
||||
"sourceHandle": "{\"dataType\": \"URL\", \"id\": \"URL-2cX90\", \"output_types\": [\"Record\"], \"name\": \"Record\"}",
|
||||
=======
|
||||
"sourceHandle": "{œbaseClassesœ:[œRecordœ],œdataTypeœ:œURLœ,œidœ:œURL-2cX90œ}",
|
||||
>>>>>>> origin/dev
|
||||
"target": "Prompt-Rse03",
|
||||
"targetHandle": "{œfieldNameœ:œreference_1œ,œidœ:œPrompt-Rse03œ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}",
|
||||
"data": {
|
||||
"targetHandle": {
|
||||
"fieldName": "reference_1",
|
||||
"id": "Prompt-Rse03",
|
||||
<<<<<<< HEAD
|
||||
"inputTypes": [
|
||||
"Document",
|
||||
"BaseOutputParser",
|
||||
|
|
@ -1067,6 +1303,15 @@
|
|||
"Record"
|
||||
],
|
||||
"name": "Record"
|
||||
=======
|
||||
"inputTypes": ["Document", "BaseOutputParser", "Record", "Text"],
|
||||
"type": "str"
|
||||
},
|
||||
"sourceHandle": {
|
||||
"baseClasses": ["Record"],
|
||||
"dataType": "URL",
|
||||
"id": "URL-2cX90"
|
||||
>>>>>>> origin/dev
|
||||
}
|
||||
},
|
||||
"style": {
|
||||
|
|
@ -1077,13 +1322,18 @@
|
|||
},
|
||||
{
|
||||
"source": "TextInput-og8Or",
|
||||
<<<<<<< HEAD
|
||||
"sourceHandle": "{\"dataType\": \"TextInput\", \"id\": \"TextInput-og8Or\", \"output_types\": [\"Text\"], \"name\": \"Text\"}",
|
||||
=======
|
||||
"sourceHandle": "{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œTextInputœ,œidœ:œTextInput-og8Orœ}",
|
||||
>>>>>>> origin/dev
|
||||
"target": "Prompt-Rse03",
|
||||
"targetHandle": "{œfieldNameœ:œinstructionsœ,œidœ:œPrompt-Rse03œ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}",
|
||||
"data": {
|
||||
"targetHandle": {
|
||||
"fieldName": "instructions",
|
||||
"id": "Prompt-Rse03",
|
||||
<<<<<<< HEAD
|
||||
"inputTypes": [
|
||||
"Document",
|
||||
"BaseOutputParser",
|
||||
|
|
@ -1099,6 +1349,15 @@
|
|||
"Text"
|
||||
],
|
||||
"name": "Text"
|
||||
=======
|
||||
"inputTypes": ["Document", "BaseOutputParser", "Record", "Text"],
|
||||
"type": "str"
|
||||
},
|
||||
"sourceHandle": {
|
||||
"baseClasses": ["object", "Text", "str"],
|
||||
"dataType": "TextInput",
|
||||
"id": "TextInput-og8Or"
|
||||
>>>>>>> origin/dev
|
||||
}
|
||||
},
|
||||
"style": {
|
||||
|
|
@ -1109,13 +1368,18 @@
|
|||
},
|
||||
{
|
||||
"source": "Prompt-Rse03",
|
||||
<<<<<<< HEAD
|
||||
"sourceHandle": "{\"dataType\": \"Prompt\", \"id\": \"Prompt-Rse03\", \"output_types\": [\"Text\"], \"name\": \"Text\"}",
|
||||
=======
|
||||
"sourceHandle": "{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œPromptœ,œidœ:œPrompt-Rse03œ}",
|
||||
>>>>>>> origin/dev
|
||||
"target": "OpenAIModel-gi29P",
|
||||
"targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-gi29Pœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}",
|
||||
"data": {
|
||||
"targetHandle": {
|
||||
"fieldName": "input_value",
|
||||
"id": "OpenAIModel-gi29P",
|
||||
<<<<<<< HEAD
|
||||
"inputTypes": [
|
||||
"Text"
|
||||
],
|
||||
|
|
@ -1128,6 +1392,15 @@
|
|||
"Text"
|
||||
],
|
||||
"name": "Text"
|
||||
=======
|
||||
"inputTypes": ["Text"],
|
||||
"type": "str"
|
||||
},
|
||||
"sourceHandle": {
|
||||
"baseClasses": ["object", "Text", "str"],
|
||||
"dataType": "Prompt",
|
||||
"id": "Prompt-Rse03"
|
||||
>>>>>>> origin/dev
|
||||
}
|
||||
},
|
||||
"style": {
|
||||
|
|
@ -1148,4 +1421,8 @@
|
|||
"name": "Blog Writer",
|
||||
"last_tested_version": "1.0.0a0",
|
||||
"is_component": false
|
||||
}
|
||||
<<<<<<< HEAD
|
||||
}
|
||||
=======
|
||||
}
|
||||
>>>>>>> origin/dev
|
||||
|
|
|
|||
|
|
@ -20,7 +20,11 @@
|
|||
"list": false,
|
||||
"show": true,
|
||||
"multiline": true,
|
||||
<<<<<<< HEAD
|
||||
"value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.custom import CustomComponent\nfrom langflow.field_typing import Input, Prompt, Text\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": Input(display_name=\"Template\"),\n \"code\": Input(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n",
|
||||
=======
|
||||
"value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.custom import CustomComponent\nfrom langflow.field_typing import Prompt, TemplateField, Text\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n",
|
||||
>>>>>>> origin/dev
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
|
|
@ -45,9 +49,13 @@
|
|||
"name": "template",
|
||||
"display_name": "Template",
|
||||
"advanced": false,
|
||||
<<<<<<< HEAD
|
||||
"input_types": [
|
||||
"Text"
|
||||
],
|
||||
=======
|
||||
"input_types": ["Text"],
|
||||
>>>>>>> origin/dev
|
||||
"dynamic": false,
|
||||
"info": "",
|
||||
"load_from_db": false,
|
||||
|
|
@ -112,15 +120,20 @@
|
|||
"is_input": null,
|
||||
"is_output": null,
|
||||
"is_composition": null,
|
||||
<<<<<<< HEAD
|
||||
"base_classes": [
|
||||
"object",
|
||||
"str",
|
||||
"Text"
|
||||
],
|
||||
=======
|
||||
"base_classes": ["object", "str", "Text"],
|
||||
>>>>>>> origin/dev
|
||||
"name": "",
|
||||
"display_name": "Prompt",
|
||||
"documentation": "",
|
||||
"custom_fields": {
|
||||
<<<<<<< HEAD
|
||||
"template": [
|
||||
"Document",
|
||||
"Question"
|
||||
|
|
@ -129,11 +142,17 @@
|
|||
"output_types": [
|
||||
"Text"
|
||||
],
|
||||
=======
|
||||
"template": ["Document", "Question"]
|
||||
},
|
||||
"output_types": ["Text"],
|
||||
>>>>>>> origin/dev
|
||||
"full_path": null,
|
||||
"field_formatters": {},
|
||||
"frozen": false,
|
||||
"field_order": [],
|
||||
"beta": false,
|
||||
<<<<<<< HEAD
|
||||
"error": null,
|
||||
"outputs": [
|
||||
{
|
||||
|
|
@ -146,6 +165,9 @@
|
|||
"method": null
|
||||
}
|
||||
]
|
||||
=======
|
||||
"error": null
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"id": "Prompt-tHwPf",
|
||||
"description": "A component for creating prompt templates using dynamic variables.",
|
||||
|
|
@ -242,15 +264,20 @@
|
|||
"_type": "CustomComponent"
|
||||
},
|
||||
"description": "A generic file loader.",
|
||||
<<<<<<< HEAD
|
||||
"base_classes": [
|
||||
"Record"
|
||||
],
|
||||
=======
|
||||
"base_classes": ["Record"],
|
||||
>>>>>>> origin/dev
|
||||
"display_name": "Files",
|
||||
"documentation": "",
|
||||
"custom_fields": {
|
||||
"path": null,
|
||||
"silent_errors": null
|
||||
},
|
||||
<<<<<<< HEAD
|
||||
"output_types": [
|
||||
"Record"
|
||||
],
|
||||
|
|
@ -267,6 +294,13 @@
|
|||
"name": "Record"
|
||||
}
|
||||
]
|
||||
=======
|
||||
"output_types": ["Record"],
|
||||
"field_formatters": {},
|
||||
"frozen": false,
|
||||
"field_order": [],
|
||||
"beta": false
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"id": "File-6TEsD"
|
||||
},
|
||||
|
|
@ -297,7 +331,11 @@
|
|||
"list": false,
|
||||
"show": true,
|
||||
"multiline": true,
|
||||
<<<<<<< HEAD
|
||||
"value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\nfrom langflow.template import Input, Output\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n\n inputs = [\n Input(\n name=\"input_value\",\n type=str,\n display_name=\"Message\",\n multiline=True,\n input_types=[],\n info=\"Message to be passed as input.\",\n ),\n Input(\n name=\"sender\",\n type=str,\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"User\",\n info=\"Type of sender.\",\n advanced=True,\n ),\n Input(name=\"sender_name\", type=str, display_name=\"Sender Name\", info=\"Name of the sender.\", value=\"User\"),\n Input(\n name=\"session_id\", type=str, display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"text_response\"),\n Output(display_name=\"Record\", name=\"record\", method=\"record_response\"),\n ]\n\n def text_response(self) -> Text:\n result = self.input_value\n if self.session_id and isinstance(result, (Record, str)):\n self.store_message(result, self.session_id, self.sender, self.sender_name)\n return result\n\n def record_response(self) -> Record:\n record = Record(\n data={\n \"text\": self.input_value,\n \"sender\": self.sender,\n \"sender_name\": self.sender_name,\n \"session_id\": self.session_id,\n },\n )\n if self.session_id and isinstance(record, (Record, str)):\n self.store_message(record, self.session_id, self.sender, self.sender_name)\n return record\n",
|
||||
=======
|
||||
"value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n\n def build_config(self):\n build_config = super().build_config()\n build_config[\"input_value\"] = {\n \"input_types\": [],\n \"display_name\": \"Message\",\n \"multiline\": True,\n }\n\n return build_config\n\n def build(\n self,\n sender: Optional[str] = \"User\",\n sender_name: Optional[str] = \"User\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n ) -> Union[Text, Record]:\n return super().build_no_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n )\n",
|
||||
>>>>>>> origin/dev
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
|
|
@ -315,7 +353,10 @@
|
|||
"list": false,
|
||||
"show": true,
|
||||
"multiline": true,
|
||||
<<<<<<< HEAD
|
||||
"value": "",
|
||||
=======
|
||||
>>>>>>> origin/dev
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
|
|
@ -324,7 +365,31 @@
|
|||
"advanced": false,
|
||||
"input_types": [],
|
||||
"dynamic": false,
|
||||
<<<<<<< HEAD
|
||||
"info": "Message to be passed as input.",
|
||||
=======
|
||||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"value": ""
|
||||
},
|
||||
"return_record": {
|
||||
"type": "bool",
|
||||
"required": false,
|
||||
"placeholder": "",
|
||||
"list": false,
|
||||
"show": true,
|
||||
"multiline": false,
|
||||
"value": false,
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"name": "return_record",
|
||||
"display_name": "Return Record",
|
||||
"advanced": true,
|
||||
"dynamic": false,
|
||||
"info": "Return the message as a record containing the sender, sender_name, and session_id.",
|
||||
>>>>>>> origin/dev
|
||||
"load_from_db": false,
|
||||
"title_case": false
|
||||
},
|
||||
|
|
@ -335,6 +400,7 @@
|
|||
"list": true,
|
||||
"show": true,
|
||||
"multiline": false,
|
||||
<<<<<<< HEAD
|
||||
"value": "",
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
|
|
@ -343,16 +409,30 @@
|
|||
"Machine",
|
||||
"User"
|
||||
],
|
||||
=======
|
||||
"value": "User",
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"options": ["Machine", "User"],
|
||||
>>>>>>> origin/dev
|
||||
"name": "sender",
|
||||
"display_name": "Sender Type",
|
||||
"advanced": true,
|
||||
"dynamic": false,
|
||||
<<<<<<< HEAD
|
||||
"info": "Type of sender.",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
=======
|
||||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"input_types": ["Text"]
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"sender_name": {
|
||||
"type": "str",
|
||||
|
|
@ -361,7 +441,11 @@
|
|||
"list": false,
|
||||
"show": true,
|
||||
"multiline": false,
|
||||
<<<<<<< HEAD
|
||||
"value": "",
|
||||
=======
|
||||
"value": "User",
|
||||
>>>>>>> origin/dev
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
|
|
@ -369,12 +453,19 @@
|
|||
"display_name": "Sender Name",
|
||||
"advanced": false,
|
||||
"dynamic": false,
|
||||
<<<<<<< HEAD
|
||||
"info": "Name of the sender.",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
=======
|
||||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"input_types": ["Text"]
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"session_id": {
|
||||
"type": "str",
|
||||
|
|
@ -383,7 +474,10 @@
|
|||
"list": false,
|
||||
"show": true,
|
||||
"multiline": false,
|
||||
<<<<<<< HEAD
|
||||
"value": "",
|
||||
=======
|
||||
>>>>>>> origin/dev
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
|
|
@ -391,6 +485,7 @@
|
|||
"display_name": "Session ID",
|
||||
"advanced": true,
|
||||
"dynamic": false,
|
||||
<<<<<<< HEAD
|
||||
"info": "Session ID for the message.",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
|
|
@ -408,6 +503,18 @@
|
|||
"Text",
|
||||
"object"
|
||||
],
|
||||
=======
|
||||
"info": "If provided, the message will be stored in the memory.",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"input_types": ["Text"]
|
||||
},
|
||||
"_type": "CustomComponent"
|
||||
},
|
||||
"description": "Get chat inputs from the Playground.",
|
||||
"icon": "ChatInput",
|
||||
"base_classes": ["str", "Record", "Text", "object"],
|
||||
>>>>>>> origin/dev
|
||||
"display_name": "Chat Input",
|
||||
"documentation": "",
|
||||
"custom_fields": {
|
||||
|
|
@ -417,6 +524,7 @@
|
|||
"session_id": null,
|
||||
"return_record": null
|
||||
},
|
||||
<<<<<<< HEAD
|
||||
"output_types": [
|
||||
"Text",
|
||||
"Record"
|
||||
|
|
@ -445,6 +553,13 @@
|
|||
"method": "record_response"
|
||||
}
|
||||
]
|
||||
=======
|
||||
"output_types": ["Text", "Record"],
|
||||
"field_formatters": {},
|
||||
"frozen": false,
|
||||
"field_order": [],
|
||||
"beta": false
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"id": "ChatInput-MsSJ9"
|
||||
},
|
||||
|
|
@ -475,7 +590,11 @@
|
|||
"list": false,
|
||||
"show": true,
|
||||
"multiline": true,
|
||||
<<<<<<< HEAD
|
||||
"value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\nfrom langflow.template import Input, Output\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n inputs = [\n Input(\n name=\"input_value\", type=str, display_name=\"Message\", multiline=True, info=\"Message to be passed as output.\"\n ),\n Input(\n name=\"sender\",\n type=str,\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"Machine\",\n advanced=True,\n info=\"Type of sender.\",\n ),\n Input(name=\"sender_name\", type=str, display_name=\"Sender Name\", info=\"Name of the sender.\", value=\"AI\"),\n Input(\n name=\"session_id\", type=str, display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True\n ),\n Input(\n name=\"record_template\",\n type=str,\n display_name=\"Record Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"text_response\"),\n Output(display_name=\"Record\", name=\"record\", method=\"record_response\"),\n ]\n\n def text_response(self) -> Text:\n result = self.input_value\n if self.session_id and isinstance(result, (Record, str)):\n self.store_message(result, self.session_id, self.sender, self.sender_name)\n return result\n\n def record_response(self) -> Record:\n record = Record(\n data={\n \"message\": self.input_value,\n \"sender\": self.sender,\n \"sender_name\": self.sender_name,\n \"session_id\": self.session_id,\n \"template\": self.record_template or \"\",\n }\n )\n if self.session_id and isinstance(record, (Record, str)):\n self.store_message(record, self.session_id, self.sender, self.sender_name)\n return record\n",
|
||||
=======
|
||||
"value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n record_template: Optional[str] = \"{text}\",\n ) -> Union[Text, Record]:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n record_template=record_template or \"\",\n )\n",
|
||||
>>>>>>> origin/dev
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
|
|
@ -493,13 +612,17 @@
|
|||
"list": false,
|
||||
"show": true,
|
||||
"multiline": true,
|
||||
<<<<<<< HEAD
|
||||
"value": "",
|
||||
=======
|
||||
>>>>>>> origin/dev
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"name": "input_value",
|
||||
"display_name": "Message",
|
||||
"advanced": false,
|
||||
<<<<<<< HEAD
|
||||
"dynamic": false,
|
||||
"info": "Message to be passed as output.",
|
||||
"load_from_db": false,
|
||||
|
|
@ -510,11 +633,22 @@
|
|||
},
|
||||
"record_template": {
|
||||
"type": "str",
|
||||
=======
|
||||
"input_types": ["Text"],
|
||||
"dynamic": false,
|
||||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false
|
||||
},
|
||||
"return_record": {
|
||||
"type": "bool",
|
||||
>>>>>>> origin/dev
|
||||
"required": false,
|
||||
"placeholder": "",
|
||||
"list": false,
|
||||
"show": true,
|
||||
"multiline": false,
|
||||
<<<<<<< HEAD
|
||||
"value": "",
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
|
|
@ -529,6 +663,19 @@
|
|||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
=======
|
||||
"value": false,
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"name": "return_record",
|
||||
"display_name": "Return Record",
|
||||
"advanced": true,
|
||||
"dynamic": false,
|
||||
"info": "Return the message as a record containing the sender, sender_name, and session_id.",
|
||||
"load_from_db": false,
|
||||
"title_case": false
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"sender": {
|
||||
"type": "str",
|
||||
|
|
@ -537,6 +684,7 @@
|
|||
"list": true,
|
||||
"show": true,
|
||||
"multiline": false,
|
||||
<<<<<<< HEAD
|
||||
"value": "",
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
|
|
@ -545,16 +693,30 @@
|
|||
"Machine",
|
||||
"User"
|
||||
],
|
||||
=======
|
||||
"value": "Machine",
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"options": ["Machine", "User"],
|
||||
>>>>>>> origin/dev
|
||||
"name": "sender",
|
||||
"display_name": "Sender Type",
|
||||
"advanced": true,
|
||||
"dynamic": false,
|
||||
<<<<<<< HEAD
|
||||
"info": "Type of sender.",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
=======
|
||||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"input_types": ["Text"]
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"sender_name": {
|
||||
"type": "str",
|
||||
|
|
@ -563,7 +725,11 @@
|
|||
"list": false,
|
||||
"show": true,
|
||||
"multiline": false,
|
||||
<<<<<<< HEAD
|
||||
"value": "",
|
||||
=======
|
||||
"value": "AI",
|
||||
>>>>>>> origin/dev
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
|
|
@ -571,12 +737,19 @@
|
|||
"display_name": "Sender Name",
|
||||
"advanced": false,
|
||||
"dynamic": false,
|
||||
<<<<<<< HEAD
|
||||
"info": "Name of the sender.",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
=======
|
||||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"input_types": ["Text"]
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"session_id": {
|
||||
"type": "str",
|
||||
|
|
@ -585,7 +758,10 @@
|
|||
"list": false,
|
||||
"show": true,
|
||||
"multiline": false,
|
||||
<<<<<<< HEAD
|
||||
"value": "",
|
||||
=======
|
||||
>>>>>>> origin/dev
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
|
|
@ -593,6 +769,7 @@
|
|||
"display_name": "Session ID",
|
||||
"advanced": true,
|
||||
"dynamic": false,
|
||||
<<<<<<< HEAD
|
||||
"info": "Session ID for the message.",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
|
|
@ -610,6 +787,18 @@
|
|||
"Text",
|
||||
"object"
|
||||
],
|
||||
=======
|
||||
"info": "If provided, the message will be stored in the memory.",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"input_types": ["Text"]
|
||||
},
|
||||
"_type": "CustomComponent"
|
||||
},
|
||||
"description": "Display a chat message in the Playground.",
|
||||
"icon": "ChatOutput",
|
||||
"base_classes": ["str", "Record", "Text", "object"],
|
||||
>>>>>>> origin/dev
|
||||
"display_name": "Chat Output",
|
||||
"documentation": "",
|
||||
"custom_fields": {
|
||||
|
|
@ -619,6 +808,7 @@
|
|||
"session_id": null,
|
||||
"return_record": null
|
||||
},
|
||||
<<<<<<< HEAD
|
||||
"output_types": [
|
||||
"Text",
|
||||
"Record"
|
||||
|
|
@ -647,6 +837,13 @@
|
|||
"method": "record_response"
|
||||
}
|
||||
]
|
||||
=======
|
||||
"output_types": ["Text", "Record"],
|
||||
"field_formatters": {},
|
||||
"frozen": false,
|
||||
"field_order": [],
|
||||
"beta": false
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"id": "ChatOutput-F5Awj"
|
||||
},
|
||||
|
|
@ -687,9 +884,13 @@
|
|||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
<<<<<<< HEAD
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
=======
|
||||
"input_types": ["Text"]
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"code": {
|
||||
"type": "code",
|
||||
|
|
@ -772,9 +973,13 @@
|
|||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
<<<<<<< HEAD
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
=======
|
||||
"input_types": ["Text"]
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"openai_api_base": {
|
||||
"type": "str",
|
||||
|
|
@ -793,9 +998,13 @@
|
|||
"info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
<<<<<<< HEAD
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
=======
|
||||
"input_types": ["Text"]
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"openai_api_key": {
|
||||
"type": "str",
|
||||
|
|
@ -814,9 +1023,13 @@
|
|||
"info": "The OpenAI API Key to use for the OpenAI model.",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
<<<<<<< HEAD
|
||||
"input_types": [
|
||||
"Text"
|
||||
],
|
||||
=======
|
||||
"input_types": ["Text"],
|
||||
>>>>>>> origin/dev
|
||||
"value": "OPENAI_API_KEY"
|
||||
},
|
||||
"stream": {
|
||||
|
|
@ -832,7 +1045,11 @@
|
|||
"password": false,
|
||||
"name": "stream",
|
||||
"display_name": "Stream",
|
||||
<<<<<<< HEAD
|
||||
"advanced": true,
|
||||
=======
|
||||
"advanced": false,
|
||||
>>>>>>> origin/dev
|
||||
"dynamic": false,
|
||||
"info": "Stream the response from the model. Streaming works only in Chat.",
|
||||
"load_from_db": false,
|
||||
|
|
@ -855,9 +1072,13 @@
|
|||
"info": "System message to pass to the model.",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
<<<<<<< HEAD
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
=======
|
||||
"input_types": ["Text"]
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"temperature": {
|
||||
"type": "float",
|
||||
|
|
@ -888,11 +1109,15 @@
|
|||
},
|
||||
"description": "Generates text using OpenAI LLMs.",
|
||||
"icon": "OpenAI",
|
||||
<<<<<<< HEAD
|
||||
"base_classes": [
|
||||
"object",
|
||||
"str",
|
||||
"Text"
|
||||
],
|
||||
=======
|
||||
"base_classes": ["object", "str", "Text"],
|
||||
>>>>>>> origin/dev
|
||||
"display_name": "OpenAI",
|
||||
"documentation": "",
|
||||
"custom_fields": {
|
||||
|
|
@ -906,9 +1131,13 @@
|
|||
"stream": null,
|
||||
"system_message": null
|
||||
},
|
||||
<<<<<<< HEAD
|
||||
"output_types": [
|
||||
"Text"
|
||||
],
|
||||
=======
|
||||
"output_types": ["Text"],
|
||||
>>>>>>> origin/dev
|
||||
"field_formatters": {},
|
||||
"frozen": false,
|
||||
"field_order": [
|
||||
|
|
@ -922,6 +1151,7 @@
|
|||
"system_message",
|
||||
"stream"
|
||||
],
|
||||
<<<<<<< HEAD
|
||||
"beta": false,
|
||||
"outputs": [
|
||||
{
|
||||
|
|
@ -934,6 +1164,9 @@
|
|||
"method": null
|
||||
}
|
||||
]
|
||||
=======
|
||||
"beta": false
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"id": "OpenAIModel-Bt067"
|
||||
},
|
||||
|
|
@ -950,13 +1183,18 @@
|
|||
"edges": [
|
||||
{
|
||||
"source": "ChatInput-MsSJ9",
|
||||
<<<<<<< HEAD
|
||||
"sourceHandle": "{\"dataType\": \"ChatInput\", \"id\": \"ChatInput-MsSJ9\", \"output_types\": [\"Text\"], \"name\": \"message\"}",
|
||||
=======
|
||||
"sourceHandle": "{œbaseClassesœ:[œstrœ,œRecordœ,œTextœ,œobjectœ],œdataTypeœ:œChatInputœ,œidœ:œChatInput-MsSJ9œ}",
|
||||
>>>>>>> origin/dev
|
||||
"target": "Prompt-tHwPf",
|
||||
"targetHandle": "{œfieldNameœ:œQuestionœ,œidœ:œPrompt-tHwPfœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}",
|
||||
"data": {
|
||||
"targetHandle": {
|
||||
"fieldName": "Question",
|
||||
"id": "Prompt-tHwPf",
|
||||
<<<<<<< HEAD
|
||||
"inputTypes": [
|
||||
"Document",
|
||||
"BaseOutputParser",
|
||||
|
|
@ -972,6 +1210,15 @@
|
|||
"Text"
|
||||
],
|
||||
"name": "message"
|
||||
=======
|
||||
"inputTypes": ["Document", "BaseOutputParser", "Record", "Text"],
|
||||
"type": "str"
|
||||
},
|
||||
"sourceHandle": {
|
||||
"baseClasses": ["str", "Record", "Text", "object"],
|
||||
"dataType": "ChatInput",
|
||||
"id": "ChatInput-MsSJ9"
|
||||
>>>>>>> origin/dev
|
||||
}
|
||||
},
|
||||
"style": {
|
||||
|
|
@ -982,13 +1229,18 @@
|
|||
},
|
||||
{
|
||||
"source": "File-6TEsD",
|
||||
<<<<<<< HEAD
|
||||
"sourceHandle": "{\"dataType\": \"File\", \"id\": \"File-6TEsD\", \"output_types\": [\"Record\"], \"name\": \"Record\"}",
|
||||
=======
|
||||
"sourceHandle": "{œbaseClassesœ:[œRecordœ],œdataTypeœ:œFileœ,œidœ:œFile-6TEsDœ}",
|
||||
>>>>>>> origin/dev
|
||||
"target": "Prompt-tHwPf",
|
||||
"targetHandle": "{œfieldNameœ:œDocumentœ,œidœ:œPrompt-tHwPfœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}",
|
||||
"data": {
|
||||
"targetHandle": {
|
||||
"fieldName": "Document",
|
||||
"id": "Prompt-tHwPf",
|
||||
<<<<<<< HEAD
|
||||
"inputTypes": [
|
||||
"Document",
|
||||
"BaseOutputParser",
|
||||
|
|
@ -1004,6 +1256,15 @@
|
|||
"Record"
|
||||
],
|
||||
"name": "Record"
|
||||
=======
|
||||
"inputTypes": ["Document", "BaseOutputParser", "Record", "Text"],
|
||||
"type": "str"
|
||||
},
|
||||
"sourceHandle": {
|
||||
"baseClasses": ["Record"],
|
||||
"dataType": "File",
|
||||
"id": "File-6TEsD"
|
||||
>>>>>>> origin/dev
|
||||
}
|
||||
},
|
||||
"style": {
|
||||
|
|
@ -1014,13 +1275,18 @@
|
|||
},
|
||||
{
|
||||
"source": "Prompt-tHwPf",
|
||||
<<<<<<< HEAD
|
||||
"sourceHandle": "{\"dataType\": \"Prompt\", \"id\": \"Prompt-tHwPf\", \"output_types\": [\"Text\"], \"name\": \"Text\"}",
|
||||
=======
|
||||
"sourceHandle": "{œbaseClassesœ:[œobjectœ,œstrœ,œTextœ],œdataTypeœ:œPromptœ,œidœ:œPrompt-tHwPfœ}",
|
||||
>>>>>>> origin/dev
|
||||
"target": "OpenAIModel-Bt067",
|
||||
"targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-Bt067œ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}",
|
||||
"data": {
|
||||
"targetHandle": {
|
||||
"fieldName": "input_value",
|
||||
"id": "OpenAIModel-Bt067",
|
||||
<<<<<<< HEAD
|
||||
"inputTypes": [
|
||||
"Text"
|
||||
],
|
||||
|
|
@ -1033,6 +1299,15 @@
|
|||
"Text"
|
||||
],
|
||||
"name": "Text"
|
||||
=======
|
||||
"inputTypes": ["Text"],
|
||||
"type": "str"
|
||||
},
|
||||
"sourceHandle": {
|
||||
"baseClasses": ["object", "str", "Text"],
|
||||
"dataType": "Prompt",
|
||||
"id": "Prompt-tHwPf"
|
||||
>>>>>>> origin/dev
|
||||
}
|
||||
},
|
||||
"style": {
|
||||
|
|
@ -1043,13 +1318,18 @@
|
|||
},
|
||||
{
|
||||
"source": "OpenAIModel-Bt067",
|
||||
<<<<<<< HEAD
|
||||
"sourceHandle": "{\"dataType\": \"OpenAIModel\", \"id\": \"OpenAIModel-Bt067\", \"output_types\": [\"Text\"], \"name\": \"Text\"}",
|
||||
=======
|
||||
"sourceHandle": "{œbaseClassesœ:[œobjectœ,œstrœ,œTextœ],œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-Bt067œ}",
|
||||
>>>>>>> origin/dev
|
||||
"target": "ChatOutput-F5Awj",
|
||||
"targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-F5Awjœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}",
|
||||
"data": {
|
||||
"targetHandle": {
|
||||
"fieldName": "input_value",
|
||||
"id": "ChatOutput-F5Awj",
|
||||
<<<<<<< HEAD
|
||||
"inputTypes": [
|
||||
"Text"
|
||||
],
|
||||
|
|
@ -1062,6 +1342,15 @@
|
|||
"Text"
|
||||
],
|
||||
"name": "Text"
|
||||
=======
|
||||
"inputTypes": ["Text"],
|
||||
"type": "str"
|
||||
},
|
||||
"sourceHandle": {
|
||||
"baseClasses": ["object", "str", "Text"],
|
||||
"dataType": "OpenAIModel",
|
||||
"id": "OpenAIModel-Bt067"
|
||||
>>>>>>> origin/dev
|
||||
}
|
||||
},
|
||||
"style": {
|
||||
|
|
@ -1081,4 +1370,8 @@
|
|||
"name": "Document QA",
|
||||
"last_tested_version": "1.0.0a0",
|
||||
"is_component": false
|
||||
}
|
||||
<<<<<<< HEAD
|
||||
}
|
||||
=======
|
||||
}
|
||||
>>>>>>> origin/dev
|
||||
|
|
|
|||
|
|
@ -22,7 +22,11 @@
|
|||
"list": false,
|
||||
"show": true,
|
||||
"multiline": true,
|
||||
<<<<<<< HEAD
|
||||
"value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\nfrom langflow.template import Input, Output\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n\n inputs = [\n Input(\n name=\"input_value\",\n type=str,\n display_name=\"Message\",\n multiline=True,\n input_types=[],\n info=\"Message to be passed as input.\",\n ),\n Input(\n name=\"sender\",\n type=str,\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"User\",\n info=\"Type of sender.\",\n advanced=True,\n ),\n Input(name=\"sender_name\", type=str, display_name=\"Sender Name\", info=\"Name of the sender.\", value=\"User\"),\n Input(\n name=\"session_id\", type=str, display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"text_response\"),\n Output(display_name=\"Record\", name=\"record\", method=\"record_response\"),\n ]\n\n def text_response(self) -> Text:\n result = self.input_value\n if self.session_id and isinstance(result, (Record, str)):\n self.store_message(result, self.session_id, self.sender, self.sender_name)\n return result\n\n def record_response(self) -> Record:\n record = Record(\n data={\n \"text\": self.input_value,\n \"sender\": self.sender,\n \"sender_name\": self.sender_name,\n \"session_id\": self.session_id,\n },\n )\n if self.session_id and isinstance(record, (Record, str)):\n self.store_message(record, self.session_id, self.sender, self.sender_name)\n return record\n",
|
||||
=======
|
||||
"value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n\n def build_config(self):\n build_config = super().build_config()\n build_config[\"input_value\"] = {\n \"input_types\": [],\n \"display_name\": \"Message\",\n \"multiline\": True,\n }\n\n return build_config\n\n def build(\n self,\n sender: Optional[str] = \"User\",\n sender_name: Optional[str] = \"User\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n ) -> Union[Text, Record]:\n return super().build_no_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n )\n",
|
||||
>>>>>>> origin/dev
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
|
|
@ -40,7 +44,10 @@
|
|||
"list": false,
|
||||
"show": true,
|
||||
"multiline": true,
|
||||
<<<<<<< HEAD
|
||||
"value": "",
|
||||
=======
|
||||
>>>>>>> origin/dev
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
|
|
@ -49,7 +56,31 @@
|
|||
"advanced": false,
|
||||
"input_types": [],
|
||||
"dynamic": false,
|
||||
<<<<<<< HEAD
|
||||
"info": "Message to be passed as input.",
|
||||
=======
|
||||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"value": ""
|
||||
},
|
||||
"return_record": {
|
||||
"type": "bool",
|
||||
"required": false,
|
||||
"placeholder": "",
|
||||
"list": false,
|
||||
"show": true,
|
||||
"multiline": false,
|
||||
"value": false,
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"name": "return_record",
|
||||
"display_name": "Return Record",
|
||||
"advanced": true,
|
||||
"dynamic": false,
|
||||
"info": "Return the message as a record containing the sender, sender_name, and session_id.",
|
||||
>>>>>>> origin/dev
|
||||
"load_from_db": false,
|
||||
"title_case": false
|
||||
},
|
||||
|
|
@ -60,6 +91,7 @@
|
|||
"list": true,
|
||||
"show": true,
|
||||
"multiline": false,
|
||||
<<<<<<< HEAD
|
||||
"value": "",
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
|
|
@ -68,16 +100,30 @@
|
|||
"Machine",
|
||||
"User"
|
||||
],
|
||||
=======
|
||||
"value": "User",
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"options": ["Machine", "User"],
|
||||
>>>>>>> origin/dev
|
||||
"name": "sender",
|
||||
"display_name": "Sender Type",
|
||||
"advanced": true,
|
||||
"dynamic": false,
|
||||
<<<<<<< HEAD
|
||||
"info": "Type of sender.",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
=======
|
||||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"input_types": ["Text"]
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"sender_name": {
|
||||
"type": "str",
|
||||
|
|
@ -86,7 +132,11 @@
|
|||
"list": false,
|
||||
"show": true,
|
||||
"multiline": false,
|
||||
<<<<<<< HEAD
|
||||
"value": "",
|
||||
=======
|
||||
"value": "User",
|
||||
>>>>>>> origin/dev
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
|
|
@ -94,12 +144,19 @@
|
|||
"display_name": "Sender Name",
|
||||
"advanced": false,
|
||||
"dynamic": false,
|
||||
<<<<<<< HEAD
|
||||
"info": "Name of the sender.",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
=======
|
||||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"input_types": ["Text"]
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"session_id": {
|
||||
"type": "str",
|
||||
|
|
@ -108,12 +165,16 @@
|
|||
"list": false,
|
||||
"show": true,
|
||||
"multiline": false,
|
||||
<<<<<<< HEAD
|
||||
"value": "",
|
||||
=======
|
||||
>>>>>>> origin/dev
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"name": "session_id",
|
||||
"display_name": "Session ID",
|
||||
<<<<<<< HEAD
|
||||
"advanced": true,
|
||||
"dynamic": false,
|
||||
"info": "Session ID for the message.",
|
||||
|
|
@ -133,6 +194,21 @@
|
|||
"Record",
|
||||
"str"
|
||||
],
|
||||
=======
|
||||
"advanced": false,
|
||||
"dynamic": false,
|
||||
"info": "If provided, the message will be stored in the memory.",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"input_types": ["Text"],
|
||||
"value": "MySessionID"
|
||||
},
|
||||
"_type": "CustomComponent"
|
||||
},
|
||||
"description": "Get chat inputs from the Playground.",
|
||||
"icon": "ChatInput",
|
||||
"base_classes": ["Text", "object", "Record", "str"],
|
||||
>>>>>>> origin/dev
|
||||
"display_name": "Chat Input",
|
||||
"documentation": "",
|
||||
"custom_fields": {
|
||||
|
|
@ -142,6 +218,7 @@
|
|||
"session_id": null,
|
||||
"return_record": null
|
||||
},
|
||||
<<<<<<< HEAD
|
||||
"output_types": [
|
||||
"Text",
|
||||
"Record"
|
||||
|
|
@ -170,6 +247,13 @@
|
|||
"method": "record_response"
|
||||
}
|
||||
]
|
||||
=======
|
||||
"output_types": ["Text", "Record"],
|
||||
"field_formatters": {},
|
||||
"frozen": false,
|
||||
"field_order": [],
|
||||
"beta": false
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"id": "ChatInput-t7F8v"
|
||||
},
|
||||
|
|
@ -200,7 +284,11 @@
|
|||
"list": false,
|
||||
"show": true,
|
||||
"multiline": true,
|
||||
<<<<<<< HEAD
|
||||
"value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\nfrom langflow.template import Input, Output\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n inputs = [\n Input(\n name=\"input_value\", type=str, display_name=\"Message\", multiline=True, info=\"Message to be passed as output.\"\n ),\n Input(\n name=\"sender\",\n type=str,\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"Machine\",\n advanced=True,\n info=\"Type of sender.\",\n ),\n Input(name=\"sender_name\", type=str, display_name=\"Sender Name\", info=\"Name of the sender.\", value=\"AI\"),\n Input(\n name=\"session_id\", type=str, display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True\n ),\n Input(\n name=\"record_template\",\n type=str,\n display_name=\"Record Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"text_response\"),\n Output(display_name=\"Record\", name=\"record\", method=\"record_response\"),\n ]\n\n def text_response(self) -> Text:\n result = self.input_value\n if self.session_id and isinstance(result, (Record, str)):\n self.store_message(result, self.session_id, self.sender, self.sender_name)\n return result\n\n def record_response(self) -> Record:\n record = Record(\n data={\n \"message\": self.input_value,\n \"sender\": self.sender,\n \"sender_name\": self.sender_name,\n \"session_id\": self.session_id,\n \"template\": self.record_template or \"\",\n }\n )\n if self.session_id and isinstance(record, (Record, str)):\n self.store_message(record, self.session_id, self.sender, self.sender_name)\n return record\n",
|
||||
=======
|
||||
"value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n record_template: Optional[str] = \"{text}\",\n ) -> Union[Text, Record]:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n record_template=record_template or \"\",\n )\n",
|
||||
>>>>>>> origin/dev
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
|
|
@ -218,13 +306,17 @@
|
|||
"list": false,
|
||||
"show": true,
|
||||
"multiline": true,
|
||||
<<<<<<< HEAD
|
||||
"value": "",
|
||||
=======
|
||||
>>>>>>> origin/dev
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"name": "input_value",
|
||||
"display_name": "Message",
|
||||
"advanced": false,
|
||||
<<<<<<< HEAD
|
||||
"dynamic": false,
|
||||
"info": "Message to be passed as output.",
|
||||
"load_from_db": false,
|
||||
|
|
@ -235,11 +327,22 @@
|
|||
},
|
||||
"record_template": {
|
||||
"type": "str",
|
||||
=======
|
||||
"input_types": ["Text"],
|
||||
"dynamic": false,
|
||||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false
|
||||
},
|
||||
"return_record": {
|
||||
"type": "bool",
|
||||
>>>>>>> origin/dev
|
||||
"required": false,
|
||||
"placeholder": "",
|
||||
"list": false,
|
||||
"show": true,
|
||||
"multiline": false,
|
||||
<<<<<<< HEAD
|
||||
"value": "",
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
|
|
@ -254,6 +357,19 @@
|
|||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
=======
|
||||
"value": false,
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"name": "return_record",
|
||||
"display_name": "Return Record",
|
||||
"advanced": true,
|
||||
"dynamic": false,
|
||||
"info": "Return the message as a record containing the sender, sender_name, and session_id.",
|
||||
"load_from_db": false,
|
||||
"title_case": false
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"sender": {
|
||||
"type": "str",
|
||||
|
|
@ -262,6 +378,7 @@
|
|||
"list": true,
|
||||
"show": true,
|
||||
"multiline": false,
|
||||
<<<<<<< HEAD
|
||||
"value": "",
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
|
|
@ -270,16 +387,30 @@
|
|||
"Machine",
|
||||
"User"
|
||||
],
|
||||
=======
|
||||
"value": "Machine",
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"options": ["Machine", "User"],
|
||||
>>>>>>> origin/dev
|
||||
"name": "sender",
|
||||
"display_name": "Sender Type",
|
||||
"advanced": true,
|
||||
"dynamic": false,
|
||||
<<<<<<< HEAD
|
||||
"info": "Type of sender.",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
=======
|
||||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"input_types": ["Text"]
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"sender_name": {
|
||||
"type": "str",
|
||||
|
|
@ -288,7 +419,11 @@
|
|||
"list": false,
|
||||
"show": true,
|
||||
"multiline": false,
|
||||
<<<<<<< HEAD
|
||||
"value": "",
|
||||
=======
|
||||
"value": "AI",
|
||||
>>>>>>> origin/dev
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
|
|
@ -296,12 +431,19 @@
|
|||
"display_name": "Sender Name",
|
||||
"advanced": false,
|
||||
"dynamic": false,
|
||||
<<<<<<< HEAD
|
||||
"info": "Name of the sender.",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
=======
|
||||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"input_types": ["Text"]
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"session_id": {
|
||||
"type": "str",
|
||||
|
|
@ -310,12 +452,16 @@
|
|||
"list": false,
|
||||
"show": true,
|
||||
"multiline": false,
|
||||
<<<<<<< HEAD
|
||||
"value": "",
|
||||
=======
|
||||
>>>>>>> origin/dev
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
"name": "session_id",
|
||||
"display_name": "Session ID",
|
||||
<<<<<<< HEAD
|
||||
"advanced": true,
|
||||
"dynamic": false,
|
||||
"info": "Session ID for the message.",
|
||||
|
|
@ -335,6 +481,21 @@
|
|||
"Record",
|
||||
"str"
|
||||
],
|
||||
=======
|
||||
"advanced": false,
|
||||
"dynamic": false,
|
||||
"info": "If provided, the message will be stored in the memory.",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
"input_types": ["Text"],
|
||||
"value": "MySessionID"
|
||||
},
|
||||
"_type": "CustomComponent"
|
||||
},
|
||||
"description": "Display a chat message in the Playground.",
|
||||
"icon": "ChatOutput",
|
||||
"base_classes": ["Text", "object", "Record", "str"],
|
||||
>>>>>>> origin/dev
|
||||
"display_name": "Chat Output",
|
||||
"documentation": "",
|
||||
"custom_fields": {
|
||||
|
|
@ -344,6 +505,7 @@
|
|||
"session_id": null,
|
||||
"return_record": null
|
||||
},
|
||||
<<<<<<< HEAD
|
||||
"output_types": [
|
||||
"Text",
|
||||
"Record"
|
||||
|
|
@ -372,6 +534,13 @@
|
|||
"method": "record_response"
|
||||
}
|
||||
]
|
||||
=======
|
||||
"output_types": ["Text", "Record"],
|
||||
"field_formatters": {},
|
||||
"frozen": false,
|
||||
"field_order": [],
|
||||
"beta": false
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"id": "ChatOutput-P1jEe"
|
||||
},
|
||||
|
|
@ -443,10 +612,14 @@
|
|||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
<<<<<<< HEAD
|
||||
"options": [
|
||||
"Ascending",
|
||||
"Descending"
|
||||
],
|
||||
=======
|
||||
"options": ["Ascending", "Descending"],
|
||||
>>>>>>> origin/dev
|
||||
"name": "order",
|
||||
"display_name": "Order",
|
||||
"advanced": true,
|
||||
|
|
@ -454,9 +627,13 @@
|
|||
"info": "Order of the messages.",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
<<<<<<< HEAD
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
=======
|
||||
"input_types": ["Text"]
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"record_template": {
|
||||
"type": "str",
|
||||
|
|
@ -476,9 +653,13 @@
|
|||
"info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
<<<<<<< HEAD
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
=======
|
||||
"input_types": ["Text"]
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"sender": {
|
||||
"type": "str",
|
||||
|
|
@ -491,11 +672,15 @@
|
|||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
<<<<<<< HEAD
|
||||
"options": [
|
||||
"Machine",
|
||||
"User",
|
||||
"Machine and User"
|
||||
],
|
||||
=======
|
||||
"options": ["Machine", "User", "Machine and User"],
|
||||
>>>>>>> origin/dev
|
||||
"name": "sender",
|
||||
"display_name": "Sender Type",
|
||||
"advanced": false,
|
||||
|
|
@ -503,9 +688,13 @@
|
|||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
<<<<<<< HEAD
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
=======
|
||||
"input_types": ["Text"]
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"sender_name": {
|
||||
"type": "str",
|
||||
|
|
@ -524,9 +713,13 @@
|
|||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
<<<<<<< HEAD
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
=======
|
||||
"input_types": ["Text"]
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"session_id": {
|
||||
"type": "str",
|
||||
|
|
@ -541,9 +734,13 @@
|
|||
"name": "session_id",
|
||||
"display_name": "Session ID",
|
||||
"advanced": false,
|
||||
<<<<<<< HEAD
|
||||
"input_types": [
|
||||
"Text"
|
||||
],
|
||||
=======
|
||||
"input_types": ["Text"],
|
||||
>>>>>>> origin/dev
|
||||
"dynamic": false,
|
||||
"info": "Session ID of the chat history.",
|
||||
"load_from_db": false,
|
||||
|
|
@ -554,11 +751,15 @@
|
|||
},
|
||||
"description": "Retrieves stored chat messages given a specific Session ID.",
|
||||
"icon": "history",
|
||||
<<<<<<< HEAD
|
||||
"base_classes": [
|
||||
"str",
|
||||
"Text",
|
||||
"object"
|
||||
],
|
||||
=======
|
||||
"base_classes": ["str", "Text", "object"],
|
||||
>>>>>>> origin/dev
|
||||
"display_name": "Chat Memory",
|
||||
"documentation": "",
|
||||
"custom_fields": {
|
||||
|
|
@ -569,6 +770,7 @@
|
|||
"order": null,
|
||||
"record_template": null
|
||||
},
|
||||
<<<<<<< HEAD
|
||||
"output_types": [
|
||||
"Text"
|
||||
],
|
||||
|
|
@ -587,6 +789,13 @@
|
|||
"method": null
|
||||
}
|
||||
]
|
||||
=======
|
||||
"output_types": ["Text"],
|
||||
"field_formatters": {},
|
||||
"frozen": false,
|
||||
"field_order": [],
|
||||
"beta": true
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"id": "MemoryComponent-cdA1J",
|
||||
"description": "Retrieves stored chat messages given a specific Session ID.",
|
||||
|
|
@ -619,7 +828,11 @@
|
|||
"list": false,
|
||||
"show": true,
|
||||
"multiline": true,
|
||||
<<<<<<< HEAD
|
||||
"value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.custom import CustomComponent\nfrom langflow.field_typing import Input, Prompt, Text\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": Input(display_name=\"Template\"),\n \"code\": Input(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n",
|
||||
=======
|
||||
"value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.custom import CustomComponent\nfrom langflow.field_typing import Prompt, TemplateField, Text\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n",
|
||||
>>>>>>> origin/dev
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"password": false,
|
||||
|
|
@ -644,9 +857,13 @@
|
|||
"name": "template",
|
||||
"display_name": "Template",
|
||||
"advanced": false,
|
||||
<<<<<<< HEAD
|
||||
"input_types": [
|
||||
"Text"
|
||||
],
|
||||
=======
|
||||
"input_types": ["Text"],
|
||||
>>>>>>> origin/dev
|
||||
"dynamic": false,
|
||||
"info": "",
|
||||
"load_from_db": false,
|
||||
|
|
@ -711,15 +928,20 @@
|
|||
"is_input": null,
|
||||
"is_output": null,
|
||||
"is_composition": null,
|
||||
<<<<<<< HEAD
|
||||
"base_classes": [
|
||||
"Text",
|
||||
"str",
|
||||
"object"
|
||||
],
|
||||
=======
|
||||
"base_classes": ["Text", "str", "object"],
|
||||
>>>>>>> origin/dev
|
||||
"name": "",
|
||||
"display_name": "Prompt",
|
||||
"documentation": "",
|
||||
"custom_fields": {
|
||||
<<<<<<< HEAD
|
||||
"template": [
|
||||
"context",
|
||||
"user_message"
|
||||
|
|
@ -728,11 +950,17 @@
|
|||
"output_types": [
|
||||
"Text"
|
||||
],
|
||||
=======
|
||||
"template": ["context", "user_message"]
|
||||
},
|
||||
"output_types": ["Text"],
|
||||
>>>>>>> origin/dev
|
||||
"full_path": null,
|
||||
"field_formatters": {},
|
||||
"frozen": false,
|
||||
"field_order": [],
|
||||
"beta": false,
|
||||
<<<<<<< HEAD
|
||||
"error": null,
|
||||
"outputs": [
|
||||
{
|
||||
|
|
@ -745,6 +973,9 @@
|
|||
"method": null
|
||||
}
|
||||
]
|
||||
=======
|
||||
"error": null
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"id": "Prompt-ODkUx",
|
||||
"description": "A component for creating prompt templates using dynamic variables.",
|
||||
|
|
@ -787,9 +1018,13 @@
|
|||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
<<<<<<< HEAD
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
=======
|
||||
"input_types": ["Text"]
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"code": {
|
||||
"type": "code",
|
||||
|
|
@ -872,9 +1107,13 @@
|
|||
"info": "",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
<<<<<<< HEAD
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
=======
|
||||
"input_types": ["Text"]
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"openai_api_base": {
|
||||
"type": "str",
|
||||
|
|
@ -893,9 +1132,13 @@
|
|||
"info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
<<<<<<< HEAD
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
=======
|
||||
"input_types": ["Text"]
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"openai_api_key": {
|
||||
"type": "str",
|
||||
|
|
@ -914,9 +1157,13 @@
|
|||
"info": "The OpenAI API Key to use for the OpenAI model.",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
<<<<<<< HEAD
|
||||
"input_types": [
|
||||
"Text"
|
||||
],
|
||||
=======
|
||||
"input_types": ["Text"],
|
||||
>>>>>>> origin/dev
|
||||
"value": "OPENAI_API_KEY"
|
||||
},
|
||||
"stream": {
|
||||
|
|
@ -955,9 +1202,13 @@
|
|||
"info": "System message to pass to the model.",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
<<<<<<< HEAD
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
=======
|
||||
"input_types": ["Text"]
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"temperature": {
|
||||
"type": "float",
|
||||
|
|
@ -988,11 +1239,15 @@
|
|||
},
|
||||
"description": "Generates text using OpenAI LLMs.",
|
||||
"icon": "OpenAI",
|
||||
<<<<<<< HEAD
|
||||
"base_classes": [
|
||||
"str",
|
||||
"object",
|
||||
"Text"
|
||||
],
|
||||
=======
|
||||
"base_classes": ["str", "object", "Text"],
|
||||
>>>>>>> origin/dev
|
||||
"display_name": "OpenAI",
|
||||
"documentation": "",
|
||||
"custom_fields": {
|
||||
|
|
@ -1006,9 +1261,13 @@
|
|||
"stream": null,
|
||||
"system_message": null
|
||||
},
|
||||
<<<<<<< HEAD
|
||||
"output_types": [
|
||||
"Text"
|
||||
],
|
||||
=======
|
||||
"output_types": ["Text"],
|
||||
>>>>>>> origin/dev
|
||||
"field_formatters": {},
|
||||
"frozen": false,
|
||||
"field_order": [
|
||||
|
|
@ -1022,6 +1281,7 @@
|
|||
"system_message",
|
||||
"stream"
|
||||
],
|
||||
<<<<<<< HEAD
|
||||
"beta": false,
|
||||
"outputs": [
|
||||
{
|
||||
|
|
@ -1034,6 +1294,9 @@
|
|||
"method": null
|
||||
}
|
||||
]
|
||||
=======
|
||||
"beta": false
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"id": "OpenAIModel-9RykF"
|
||||
},
|
||||
|
|
@ -1071,10 +1334,14 @@
|
|||
"name": "input_value",
|
||||
"display_name": "Value",
|
||||
"advanced": false,
|
||||
<<<<<<< HEAD
|
||||
"input_types": [
|
||||
"Record",
|
||||
"Text"
|
||||
],
|
||||
=======
|
||||
"input_types": ["Record", "Text"],
|
||||
>>>>>>> origin/dev
|
||||
"dynamic": false,
|
||||
"info": "Text or Record to be passed as output.",
|
||||
"load_from_db": false,
|
||||
|
|
@ -1116,28 +1383,40 @@
|
|||
"info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.",
|
||||
"load_from_db": false,
|
||||
"title_case": false,
|
||||
<<<<<<< HEAD
|
||||
"input_types": [
|
||||
"Text"
|
||||
]
|
||||
=======
|
||||
"input_types": ["Text"]
|
||||
>>>>>>> origin/dev
|
||||
},
|
||||
"_type": "CustomComponent"
|
||||
},
|
||||
"description": "Display a text output in the Playground.",
|
||||
"icon": "type",
|
||||
<<<<<<< HEAD
|
||||
"base_classes": [
|
||||
"str",
|
||||
"object",
|
||||
"Text"
|
||||
],
|
||||
=======
|
||||
"base_classes": ["str", "object", "Text"],
|
||||
>>>>>>> origin/dev
|
||||
"display_name": "Inspect Memory",
|
||||
"documentation": "",
|
||||
"custom_fields": {
|
||||
"input_value": null,
|
||||
"record_template": null
|
||||
},
|
||||
<<<<<<< HEAD
|
||||
"output_types": [
|
||||
"Text"
|
||||
],
|
||||
=======
|
||||
"output_types": ["Text"],
|
||||
>>>>>>> origin/dev
|
||||
"field_formatters": {},
|
||||
"frozen": false,
|
||||
"field_order": [],
|
||||
|
|
@ -1158,12 +1437,17 @@
|
|||
"edges": [
|
||||
{
|
||||
"source": "MemoryComponent-cdA1J",
|
||||
<<<<<<< HEAD
|
||||
"sourceHandle": "{\"dataType\": \"MemoryComponent\", \"id\": \"MemoryComponent-cdA1J\", \"output_types\": [\"Text\"], \"name\": \"Text\"}",
|
||||
=======
|
||||
"sourceHandle": "{œbaseClassesœ:[œstrœ,œTextœ,œobjectœ],œdataTypeœ:œMemoryComponentœ,œidœ:œMemoryComponent-cdA1Jœ}",
|
||||
>>>>>>> origin/dev
|
||||
"target": "Prompt-ODkUx",
|
||||
"targetHandle": "{œfieldNameœ:œcontextœ,œidœ:œPrompt-ODkUxœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}",
|
||||
"data": {
|
||||
"targetHandle": {
|
||||
"fieldName": "context",
|
||||
<<<<<<< HEAD
|
||||
"id": "Prompt-ODkUx",
|
||||
"inputTypes": [
|
||||
"Document",
|
||||
|
|
@ -1180,6 +1464,16 @@
|
|||
"Text"
|
||||
],
|
||||
"name": "Text"
|
||||
=======
|
||||
"type": "str",
|
||||
"id": "Prompt-ODkUx",
|
||||
"inputTypes": ["Document", "BaseOutputParser", "Record", "Text"]
|
||||
},
|
||||
"sourceHandle": {
|
||||
"baseClasses": ["str", "Text", "object"],
|
||||
"dataType": "MemoryComponent",
|
||||
"id": "MemoryComponent-cdA1J"
|
||||
>>>>>>> origin/dev
|
||||
}
|
||||
},
|
||||
"style": {
|
||||
|
|
@ -1191,12 +1485,17 @@
|
|||
},
|
||||
{
|
||||
"source": "ChatInput-t7F8v",
|
||||
<<<<<<< HEAD
|
||||
"sourceHandle": "{\"dataType\": \"ChatInput\", \"id\": \"ChatInput-t7F8v\", \"output_types\": [\"Text\"], \"name\": \"message\"}",
|
||||
=======
|
||||
"sourceHandle": "{œbaseClassesœ:[œTextœ,œobjectœ,œRecordœ,œstrœ],œdataTypeœ:œChatInputœ,œidœ:œChatInput-t7F8vœ}",
|
||||
>>>>>>> origin/dev
|
||||
"target": "Prompt-ODkUx",
|
||||
"targetHandle": "{œfieldNameœ:œuser_messageœ,œidœ:œPrompt-ODkUxœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}",
|
||||
"data": {
|
||||
"targetHandle": {
|
||||
"fieldName": "user_message",
|
||||
<<<<<<< HEAD
|
||||
"id": "Prompt-ODkUx",
|
||||
"inputTypes": [
|
||||
"Document",
|
||||
|
|
@ -1213,6 +1512,16 @@
|
|||
"Text"
|
||||
],
|
||||
"name": "message"
|
||||
=======
|
||||
"type": "str",
|
||||
"id": "Prompt-ODkUx",
|
||||
"inputTypes": ["Document", "BaseOutputParser", "Record", "Text"]
|
||||
},
|
||||
"sourceHandle": {
|
||||
"baseClasses": ["Text", "object", "Record", "str"],
|
||||
"dataType": "ChatInput",
|
||||
"id": "ChatInput-t7F8v"
|
||||
>>>>>>> origin/dev
|
||||
}
|
||||
},
|
||||
"style": {
|
||||
|
|
@ -1224,13 +1533,18 @@
|
|||
},
|
||||
{
|
||||
"source": "Prompt-ODkUx",
|
||||
<<<<<<< HEAD
|
||||
"sourceHandle": "{\"dataType\": \"Prompt\", \"id\": \"Prompt-ODkUx\", \"output_types\": [\"Text\"], \"name\": \"Text\"}",
|
||||
=======
|
||||
"sourceHandle": "{œbaseClassesœ:[œTextœ,œstrœ,œobjectœ],œdataTypeœ:œPromptœ,œidœ:œPrompt-ODkUxœ}",
|
||||
>>>>>>> origin/dev
|
||||
"target": "OpenAIModel-9RykF",
|
||||
"targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-9RykFœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}",
|
||||
"data": {
|
||||
"targetHandle": {
|
||||
"fieldName": "input_value",
|
||||
"id": "OpenAIModel-9RykF",
|
||||
<<<<<<< HEAD
|
||||
"inputTypes": [
|
||||
"Text"
|
||||
],
|
||||
|
|
@ -1243,6 +1557,15 @@
|
|||
"Text"
|
||||
],
|
||||
"name": "Text"
|
||||
=======
|
||||
"inputTypes": ["Text"],
|
||||
"type": "str"
|
||||
},
|
||||
"sourceHandle": {
|
||||
"baseClasses": ["Text", "str", "object"],
|
||||
"dataType": "Prompt",
|
||||
"id": "Prompt-ODkUx"
|
||||
>>>>>>> origin/dev
|
||||
}
|
||||
},
|
||||
"style": {
|
||||
|
|
@ -1253,13 +1576,18 @@
|
|||
},
|
||||
{
|
||||
"source": "OpenAIModel-9RykF",
|
||||
<<<<<<< HEAD
|
||||
"sourceHandle": "{\"dataType\": \"OpenAIModel\", \"id\": \"OpenAIModel-9RykF\", \"output_types\": [\"Text\"], \"name\": \"Text\"}",
|
||||
=======
|
||||
"sourceHandle": "{œbaseClassesœ:[œstrœ,œobjectœ,œTextœ],œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-9RykFœ}",
|
||||
>>>>>>> origin/dev
|
||||
"target": "ChatOutput-P1jEe",
|
||||
"targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-P1jEeœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}",
|
||||
"data": {
|
||||
"targetHandle": {
|
||||
"fieldName": "input_value",
|
||||
"id": "ChatOutput-P1jEe",
|
||||
<<<<<<< HEAD
|
||||
"inputTypes": [
|
||||
"Text"
|
||||
],
|
||||
|
|
@ -1272,6 +1600,15 @@
|
|||
"Text"
|
||||
],
|
||||
"name": "Text"
|
||||
=======
|
||||
"inputTypes": ["Text"],
|
||||
"type": "str"
|
||||
},
|
||||
"sourceHandle": {
|
||||
"baseClasses": ["str", "object", "Text"],
|
||||
"dataType": "OpenAIModel",
|
||||
"id": "OpenAIModel-9RykF"
|
||||
>>>>>>> origin/dev
|
||||
}
|
||||
},
|
||||
"style": {
|
||||
|
|
@ -1282,13 +1619,18 @@
|
|||
},
|
||||
{
|
||||
"source": "MemoryComponent-cdA1J",
|
||||
<<<<<<< HEAD
|
||||
"sourceHandle": "{\"dataType\": \"MemoryComponent\", \"id\": \"MemoryComponent-cdA1J\", \"output_types\": [\"Text\"], \"name\": \"Text\"}",
|
||||
=======
|
||||
"sourceHandle": "{œbaseClassesœ:[œstrœ,œTextœ,œobjectœ],œdataTypeœ:œMemoryComponentœ,œidœ:œMemoryComponent-cdA1Jœ}",
|
||||
>>>>>>> origin/dev
|
||||
"target": "TextOutput-vrs6T",
|
||||
"targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œTextOutput-vrs6Tœ,œinputTypesœ:[œRecordœ,œTextœ],œtypeœ:œstrœ}",
|
||||
"data": {
|
||||
"targetHandle": {
|
||||
"fieldName": "input_value",
|
||||
"id": "TextOutput-vrs6T",
|
||||
<<<<<<< HEAD
|
||||
"inputTypes": [
|
||||
"Record",
|
||||
"Text"
|
||||
|
|
@ -1302,6 +1644,15 @@
|
|||
"Text"
|
||||
],
|
||||
"name": "Text"
|
||||
=======
|
||||
"inputTypes": ["Record", "Text"],
|
||||
"type": "str"
|
||||
},
|
||||
"sourceHandle": {
|
||||
"baseClasses": ["str", "Text", "object"],
|
||||
"dataType": "MemoryComponent",
|
||||
"id": "MemoryComponent-cdA1J"
|
||||
>>>>>>> origin/dev
|
||||
}
|
||||
},
|
||||
"style": {
|
||||
|
|
@ -1321,4 +1672,8 @@
|
|||
"name": "Memory Chatbot",
|
||||
"last_tested_version": "1.0.0a0",
|
||||
"is_component": false
|
||||
}
|
||||
<<<<<<< HEAD
|
||||
}
|
||||
=======
|
||||
}
|
||||
>>>>>>> origin/dev
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -1,3 +1,4 @@
|
|||
from .load import load_flow_from_json, run_flow_from_json # noqa: F401
|
||||
from .load import load_flow_from_json, run_flow_from_json
|
||||
from .utils import upload_file, get_flow
|
||||
|
||||
__all__ = ["load_flow_from_json", "run_flow_from_json"]
|
||||
__all__ = ["load_flow_from_json", "run_flow_from_json", "upload_file", "get_flow"]
|
||||
|
|
|
|||
89
src/backend/base/langflow/load/utils.py
Normal file
89
src/backend/base/langflow/load/utils.py
Normal file
|
|
@ -0,0 +1,89 @@
|
|||
import httpx
|
||||
|
||||
from langflow.services.database.models.flow.model import FlowBase
|
||||
|
||||
|
||||
def upload(file_path, host, flow_id):
|
||||
"""
|
||||
Upload a file to Langflow and return the file path.
|
||||
|
||||
Args:
|
||||
file_path (str): The path to the file to be uploaded.
|
||||
host (str): The host URL of Langflow.
|
||||
flow_id (UUID): The ID of the flow to which the file belongs.
|
||||
|
||||
Returns:
|
||||
dict: A dictionary containing the file path.
|
||||
|
||||
Raises:
|
||||
Exception: If an error occurs during the upload process.
|
||||
"""
|
||||
try:
|
||||
url = f"{host}/api/v1/upload/{flow_id}"
|
||||
response = httpx.post(url, files={"file": open(file_path, "rb")})
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
else:
|
||||
raise Exception(f"Error uploading file: {response.status_code}")
|
||||
except Exception as e:
|
||||
raise Exception(f"Error uploading file: {e}")
|
||||
|
||||
|
||||
def upload_file(file_path, host, flow_id, components, tweaks={}):
|
||||
"""
|
||||
Upload a file to Langflow and return the file path.
|
||||
|
||||
Args:
|
||||
file_path (str): The path to the file to be uploaded.
|
||||
host (str): The host URL of Langflow.
|
||||
port (int): The port number of Langflow.
|
||||
flow_id (UUID): The ID of the flow to which the file belongs.
|
||||
components (str): List of component IDs or names that need the file.
|
||||
tweaks (dict): A dictionary of tweaks to be applied to the file.
|
||||
|
||||
Returns:
|
||||
dict: A dictionary containing the file path and any tweaks that were applied.
|
||||
|
||||
Raises:
|
||||
Exception: If an error occurs during the upload process.
|
||||
"""
|
||||
try:
|
||||
response = upload(file_path, host, flow_id)
|
||||
if response["file_path"]:
|
||||
for component in components:
|
||||
if isinstance(component, str):
|
||||
tweaks[component] = {"file_path": response["file_path"]}
|
||||
else:
|
||||
raise ValueError(f"Component ID or name must be a string. Got {type(component)}")
|
||||
return tweaks
|
||||
else:
|
||||
raise ValueError("Error uploading file")
|
||||
except Exception as e:
|
||||
raise ValueError(f"Error uploading file: {e}")
|
||||
|
||||
|
||||
def get_flow(url: str, flow_id: str):
|
||||
"""Get the details of a flow from Langflow.
|
||||
|
||||
Args:
|
||||
url (str): The host URL of Langflow.
|
||||
port (int): The port number of Langflow.
|
||||
flow_id (UUID): The ID of the flow to retrieve.
|
||||
|
||||
Returns:
|
||||
dict: A dictionary containing the details of the flow.
|
||||
|
||||
Raises:
|
||||
Exception: If an error occurs during the retrieval process.
|
||||
"""
|
||||
try:
|
||||
flow_url = f"{url}/api/v1/flows/{flow_id}"
|
||||
response = httpx.get(flow_url)
|
||||
if response.status_code == 200:
|
||||
json_response = response.json()
|
||||
flow = FlowBase(**json_response).model_dump()
|
||||
return flow
|
||||
else:
|
||||
raise Exception(f"Error retrieving flow: {response.status_code}")
|
||||
except Exception as e:
|
||||
raise Exception(f"Error retrieving flow: {e}")
|
||||
|
|
@ -59,7 +59,7 @@ async def run_graph_internal(
|
|||
outputs or [],
|
||||
stream=stream,
|
||||
session_id=session_id_str or "",
|
||||
fallback_to_env_vars=fallback_to_env_vars
|
||||
fallback_to_env_vars=fallback_to_env_vars,
|
||||
)
|
||||
if session_id_str and session_service:
|
||||
await session_service.update_session(session_id_str, (graph, artifacts))
|
||||
|
|
|
|||
|
|
@ -55,6 +55,7 @@ class ApiKeyRead(ApiKeyBase):
|
|||
id: UUID
|
||||
api_key: str = Field(schema_extra={"validate_default": True})
|
||||
user_id: UUID = Field()
|
||||
created_at: datetime = Field()
|
||||
|
||||
@field_validator("api_key")
|
||||
@classmethod
|
||||
|
|
|
|||
|
|
@ -29,7 +29,6 @@ class FlowBase(SQLModel):
|
|||
is_component: Optional[bool] = Field(default=False, nullable=True)
|
||||
updated_at: Optional[datetime] = Field(default_factory=lambda: datetime.now(timezone.utc), nullable=True)
|
||||
webhook: Optional[bool] = Field(default=False, nullable=True, description="Can be used on the webhook endpoint")
|
||||
folder_id: Optional[UUID] = Field(default=None, nullable=True)
|
||||
endpoint_name: Optional[str] = Field(default=None, nullable=True, index=True)
|
||||
|
||||
@field_validator("endpoint_name")
|
||||
|
|
|
|||
|
|
@ -122,6 +122,13 @@ class MessageModelResponse(MessageModel):
|
|||
return v
|
||||
|
||||
|
||||
class MessageModelRequest(MessageModel):
|
||||
message: str = Field(default="")
|
||||
sender: str = Field(default="")
|
||||
sender_name: str = Field(default="")
|
||||
session_id: str = Field(default="")
|
||||
|
||||
|
||||
class VertexBuildModel(BaseModel):
|
||||
index: Optional[int] = Field(default=None, alias="index", exclude=True)
|
||||
id: Optional[str] = Field(default=None, alias="id")
|
||||
|
|
|
|||
|
|
@ -32,6 +32,10 @@ class MonitorService(Service):
|
|||
except Exception as e:
|
||||
logger.exception(f"Error initializing monitor service: {e}")
|
||||
|
||||
def exec_query(self, query: str):
|
||||
with duckdb.connect(str(self.db_path)) as conn:
|
||||
return conn.execute(query).df()
|
||||
|
||||
def to_df(self, table_name):
|
||||
return self.load_table_as_dataframe(table_name)
|
||||
|
||||
|
|
@ -69,7 +73,7 @@ class MonitorService(Service):
|
|||
valid: Optional[bool] = None,
|
||||
order_by: Optional[str] = "timestamp",
|
||||
):
|
||||
query = "SELECT index,flow_id, valid, params, data, artifacts, timestamp FROM vertex_builds"
|
||||
query = "SELECT id, index,flow_id, valid, params, data, artifacts, timestamp FROM vertex_builds"
|
||||
conditions = []
|
||||
if flow_id:
|
||||
conditions.append(f"flow_id = '{flow_id}'")
|
||||
|
|
@ -88,6 +92,8 @@ class MonitorService(Service):
|
|||
with duckdb.connect(str(self.db_path)) as conn:
|
||||
df = conn.execute(query).df()
|
||||
|
||||
print(query)
|
||||
|
||||
return df.to_dict(orient="records")
|
||||
|
||||
def delete_vertex_builds(self, flow_id: Optional[str] = None):
|
||||
|
|
@ -98,11 +104,22 @@ class MonitorService(Service):
|
|||
with duckdb.connect(str(self.db_path)) as conn:
|
||||
conn.execute(query)
|
||||
|
||||
def delete_messages(self, session_id: str):
|
||||
def delete_messages_session(self, session_id: str):
|
||||
query = f"DELETE FROM messages WHERE session_id = '{session_id}'"
|
||||
|
||||
with duckdb.connect(str(self.db_path)) as conn:
|
||||
conn.execute(query)
|
||||
return self.exec_query(query)
|
||||
|
||||
def delete_messages(self, message_ids: list[int]):
|
||||
query = f"DELETE FROM messages WHERE index IN ({','.join(map(str, message_ids))})"
|
||||
|
||||
return self.exec_query(query)
|
||||
|
||||
def update_message(self, message_id: int, **kwargs):
|
||||
query = (
|
||||
f"""UPDATE messages SET {', '.join(f"{k} = '{v}'" for k, v in kwargs.items())} WHERE index = {message_id}"""
|
||||
)
|
||||
|
||||
return self.exec_query(query)
|
||||
|
||||
def add_message(self, message: MessageModel):
|
||||
self.add_row("messages", message)
|
||||
|
|
|
|||
|
|
@ -78,7 +78,6 @@ class Settings(BaseSettings):
|
|||
langchain_cache: str = "InMemoryCache"
|
||||
load_flows_path: Optional[str] = None
|
||||
|
||||
|
||||
# Redis
|
||||
redis_host: str = "localhost"
|
||||
redis_port: int = 6379
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue