Merge branch 'dev' into cz/fixTestsCI
This commit is contained in:
commit
00f6bea71b
47 changed files with 1017 additions and 339 deletions
14
.github/workflows/auto-update.yml
vendored
Normal file
14
.github/workflows/auto-update.yml
vendored
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
name: Auto-update
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- dev
|
||||
- main
|
||||
|
||||
jobs:
|
||||
Auto:
|
||||
name: Auto-update
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: tibdex/auto-update@v2
|
||||
50
.github/workflows/js_autofix.yml
vendored
Normal file
50
.github/workflows/js_autofix.yml
vendored
Normal file
|
|
@ -0,0 +1,50 @@
|
|||
name: autofix.ci
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened, auto_merge_enabled, auto_merge_disabled]
|
||||
paths:
|
||||
- "src/frontend/**"
|
||||
push:
|
||||
branches: [ "main" ]
|
||||
paths:
|
||||
- "src/frontend/**"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
NODE_VERSION: "21"
|
||||
jobs:
|
||||
autofix:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
id: setup-node
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
|
||||
- name: Cache Node.js dependencies
|
||||
uses: actions/cache@v4
|
||||
id: npm-cache
|
||||
with:
|
||||
path: ~/.npm
|
||||
key: ${{ runner.os }}-node-${{ hashFiles('src/frontend/package-lock.json') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-node-
|
||||
|
||||
- name: Install Node.js dependencies
|
||||
run: |
|
||||
cd src/frontend
|
||||
npm ci
|
||||
if: ${{ steps.setup-node.outputs.cache-hit != 'true' }}
|
||||
- name: Run Prettier
|
||||
run: |
|
||||
cd src/frontend
|
||||
npm run format
|
||||
|
||||
- uses: autofix-ci/action@dd55f44df8f7cdb7a6bf74c78677eb8acd40cd0a
|
||||
29
.github/workflows/py_autofix.yml
vendored
Normal file
29
.github/workflows/py_autofix.yml
vendored
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
name: autofix.ci
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened, auto_merge_enabled, auto_merge_disabled]
|
||||
paths:
|
||||
- "poetry.lock"
|
||||
- "pyproject.toml"
|
||||
- "src/backend/**"
|
||||
- "tests/**"
|
||||
env:
|
||||
POETRY_VERSION: "1.8.2"
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
name: Run Mypy
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version:
|
||||
- "3.12"
|
||||
- "3.11"
|
||||
- "3.10"
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: install-pinned/ruff@6b463d795ce39011cc004438ae507ae56235e12a
|
||||
- run: ruff --fix-only .
|
||||
- run: ruff format .
|
||||
|
||||
- uses: autofix-ci/action@dd55f44df8f7cdb7a6bf74c78677eb8acd40cd0a
|
||||
8
.github/workflows/release.yml
vendored
8
.github/workflows/release.yml
vendored
|
|
@ -14,11 +14,7 @@ on:
|
|||
required: false
|
||||
type: boolean
|
||||
default: true
|
||||
branch:
|
||||
description: "Branch to release from"
|
||||
required: true
|
||||
type: string
|
||||
default: "main"
|
||||
|
||||
|
||||
env:
|
||||
POETRY_VERSION: "1.8.2"
|
||||
|
|
@ -180,4 +176,4 @@ jobs:
|
|||
generateReleaseNotes: true
|
||||
prerelease: ${{ inputs.pre_release }}
|
||||
tag: v${{ needs.release-main.outputs.version }}
|
||||
commit: ${{ inputs.branch }}
|
||||
commit: ${{ github.ref }}
|
||||
10
.github/workflows/style-check-py.yml
vendored
10
.github/workflows/style-check-py.yml
vendored
|
|
@ -35,14 +35,8 @@ jobs:
|
|||
poetry install
|
||||
- name: Register problem matcher
|
||||
run: echo "::add-matcher::.github/workflows/matchers/ruff.json"
|
||||
- name: Run Ruff
|
||||
- name: Run Ruff Check
|
||||
run: poetry run ruff check --output-format=github .
|
||||
- name: Run Ruff format
|
||||
run: poetry run ruff format .
|
||||
- name: Commit changes
|
||||
uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
commit_message: Apply Ruff formatting
|
||||
branch: ${{ github.head_ref }}
|
||||
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,5 @@
|
|||
|
||||
# Kubernetes
|
||||
|
||||
|
||||
This guide will help you get LangFlow up and running in Kubernetes cluster, including the following steps:
|
||||
|
||||
- Install [LangFlow as IDE](#langflow-ide) in a Kubernetes cluster (for development)
|
||||
|
|
|
|||
297
poetry.lock
generated
297
poetry.lock
generated
|
|
@ -123,13 +123,13 @@ frozenlist = ">=1.1.0"
|
|||
|
||||
[[package]]
|
||||
name = "alembic"
|
||||
version = "1.13.1"
|
||||
version = "1.13.2"
|
||||
description = "A database migration tool for SQLAlchemy."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "alembic-1.13.1-py3-none-any.whl", hash = "sha256:2edcc97bed0bd3272611ce3a98d98279e9c209e7186e43e75bbb1b2bdfdbcc43"},
|
||||
{file = "alembic-1.13.1.tar.gz", hash = "sha256:4932c8558bf68f2ee92b9bbcb8218671c627064d5b08939437af6d77dc05e595"},
|
||||
{file = "alembic-1.13.2-py3-none-any.whl", hash = "sha256:6b8733129a6224a9a711e17c99b08462dbf7cc9670ba8f2e2ae9af860ceb1953"},
|
||||
{file = "alembic-1.13.2.tar.gz", hash = "sha256:1ff0ae32975f4fd96028c39ed9bb3c867fe3af956bd7bb37343b54c9fe7445ef"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -167,13 +167,13 @@ files = [
|
|||
|
||||
[[package]]
|
||||
name = "anthropic"
|
||||
version = "0.29.0"
|
||||
version = "0.30.0"
|
||||
description = "The official Python library for the anthropic API"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "anthropic-0.29.0-py3-none-any.whl", hash = "sha256:d16010715129c8bc3295b74fbf4da73cfb156618bf0abb2d007255983266b76a"},
|
||||
{file = "anthropic-0.29.0.tar.gz", hash = "sha256:3eb558a232d83bdf7cdedb75663bf7ff7a8b50cc10acaa9ce6494ff295b8506a"},
|
||||
{file = "anthropic-0.30.0-py3-none-any.whl", hash = "sha256:061bf58c9c64968361e6c21c76ff5016a6f7fdd9a5f6b7f2280ede2c3b44bfd5"},
|
||||
{file = "anthropic-0.30.0.tar.gz", hash = "sha256:9e9ee2bfce833370eac74d7de433db97a0bf141f9118c40ac0e2f4c39bc2b76f"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -461,17 +461,17 @@ files = [
|
|||
|
||||
[[package]]
|
||||
name = "boto3"
|
||||
version = "1.34.132"
|
||||
version = "1.34.133"
|
||||
description = "The AWS SDK for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "boto3-1.34.132-py3-none-any.whl", hash = "sha256:b5d1681a0d8bf255787c8b37f911d706672d5722c9ace5342cd283a3cdb04820"},
|
||||
{file = "boto3-1.34.132.tar.gz", hash = "sha256:3b2964060620f1bbe9574b5f8d3fb2a4e087faacfc6023c24154b184f1b16443"},
|
||||
{file = "boto3-1.34.133-py3-none-any.whl", hash = "sha256:da7e78c03270be872ad78301892396ffea56647efcb2c3a8621ef46a905541ab"},
|
||||
{file = "boto3-1.34.133.tar.gz", hash = "sha256:7071f8ce1f09113ca5630860fd590464e6325a4df55faae83c956225941016fc"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
botocore = ">=1.34.132,<1.35.0"
|
||||
botocore = ">=1.34.133,<1.35.0"
|
||||
jmespath = ">=0.7.1,<2.0.0"
|
||||
s3transfer = ">=0.10.0,<0.11.0"
|
||||
|
||||
|
|
@ -480,13 +480,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
|
|||
|
||||
[[package]]
|
||||
name = "botocore"
|
||||
version = "1.34.132"
|
||||
version = "1.34.133"
|
||||
description = "Low-level, data-driven core of boto 3."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "botocore-1.34.132-py3-none-any.whl", hash = "sha256:06ef8b4bd3b3cb5a9b9a4273a543b257be3304030978ba51516b576a65156c39"},
|
||||
{file = "botocore-1.34.132.tar.gz", hash = "sha256:372a6cfce29e5de9bcf8c95af901d0bc3e27d8aa2295fadee295424f95f43f16"},
|
||||
{file = "botocore-1.34.133-py3-none-any.whl", hash = "sha256:f269dad8e17432d2527b97ed9f1fd30ec8dc705f8b818957170d1af484680ef2"},
|
||||
{file = "botocore-1.34.133.tar.gz", hash = "sha256:5ea609aa4831a6589e32eef052a359ad8d7311733b4d86a9d35dab4bd3ec80ff"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -1501,33 +1501,33 @@ vision = ["Pillow (>=6.2.1)"]
|
|||
|
||||
[[package]]
|
||||
name = "debugpy"
|
||||
version = "1.8.1"
|
||||
version = "1.8.2"
|
||||
description = "An implementation of the Debug Adapter Protocol for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "debugpy-1.8.1-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:3bda0f1e943d386cc7a0e71bfa59f4137909e2ed947fb3946c506e113000f741"},
|
||||
{file = "debugpy-1.8.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dda73bf69ea479c8577a0448f8c707691152e6c4de7f0c4dec5a4bc11dee516e"},
|
||||
{file = "debugpy-1.8.1-cp310-cp310-win32.whl", hash = "sha256:3a79c6f62adef994b2dbe9fc2cc9cc3864a23575b6e387339ab739873bea53d0"},
|
||||
{file = "debugpy-1.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:7eb7bd2b56ea3bedb009616d9e2f64aab8fc7000d481faec3cd26c98a964bcdd"},
|
||||
{file = "debugpy-1.8.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:016a9fcfc2c6b57f939673c874310d8581d51a0fe0858e7fac4e240c5eb743cb"},
|
||||
{file = "debugpy-1.8.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd97ed11a4c7f6d042d320ce03d83b20c3fb40da892f994bc041bbc415d7a099"},
|
||||
{file = "debugpy-1.8.1-cp311-cp311-win32.whl", hash = "sha256:0de56aba8249c28a300bdb0672a9b94785074eb82eb672db66c8144fff673146"},
|
||||
{file = "debugpy-1.8.1-cp311-cp311-win_amd64.whl", hash = "sha256:1a9fe0829c2b854757b4fd0a338d93bc17249a3bf69ecf765c61d4c522bb92a8"},
|
||||
{file = "debugpy-1.8.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3ebb70ba1a6524d19fa7bb122f44b74170c447d5746a503e36adc244a20ac539"},
|
||||
{file = "debugpy-1.8.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2e658a9630f27534e63922ebf655a6ab60c370f4d2fc5c02a5b19baf4410ace"},
|
||||
{file = "debugpy-1.8.1-cp312-cp312-win32.whl", hash = "sha256:caad2846e21188797a1f17fc09c31b84c7c3c23baf2516fed5b40b378515bbf0"},
|
||||
{file = "debugpy-1.8.1-cp312-cp312-win_amd64.whl", hash = "sha256:edcc9f58ec0fd121a25bc950d4578df47428d72e1a0d66c07403b04eb93bcf98"},
|
||||
{file = "debugpy-1.8.1-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:7a3afa222f6fd3d9dfecd52729bc2e12c93e22a7491405a0ecbf9e1d32d45b39"},
|
||||
{file = "debugpy-1.8.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d915a18f0597ef685e88bb35e5d7ab968964b7befefe1aaea1eb5b2640b586c7"},
|
||||
{file = "debugpy-1.8.1-cp38-cp38-win32.whl", hash = "sha256:92116039b5500633cc8d44ecc187abe2dfa9b90f7a82bbf81d079fcdd506bae9"},
|
||||
{file = "debugpy-1.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:e38beb7992b5afd9d5244e96ad5fa9135e94993b0c551ceebf3fe1a5d9beb234"},
|
||||
{file = "debugpy-1.8.1-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:bfb20cb57486c8e4793d41996652e5a6a885b4d9175dd369045dad59eaacea42"},
|
||||
{file = "debugpy-1.8.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efd3fdd3f67a7e576dd869c184c5dd71d9aaa36ded271939da352880c012e703"},
|
||||
{file = "debugpy-1.8.1-cp39-cp39-win32.whl", hash = "sha256:58911e8521ca0c785ac7a0539f1e77e0ce2df753f786188f382229278b4cdf23"},
|
||||
{file = "debugpy-1.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:6df9aa9599eb05ca179fb0b810282255202a66835c6efb1d112d21ecb830ddd3"},
|
||||
{file = "debugpy-1.8.1-py2.py3-none-any.whl", hash = "sha256:28acbe2241222b87e255260c76741e1fbf04fdc3b6d094fcf57b6c6f75ce1242"},
|
||||
{file = "debugpy-1.8.1.zip", hash = "sha256:f696d6be15be87aef621917585f9bb94b1dc9e8aced570db1b8a6fc14e8f9b42"},
|
||||
{file = "debugpy-1.8.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:7ee2e1afbf44b138c005e4380097d92532e1001580853a7cb40ed84e0ef1c3d2"},
|
||||
{file = "debugpy-1.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f8c3f7c53130a070f0fc845a0f2cee8ed88d220d6b04595897b66605df1edd6"},
|
||||
{file = "debugpy-1.8.2-cp310-cp310-win32.whl", hash = "sha256:f179af1e1bd4c88b0b9f0fa153569b24f6b6f3de33f94703336363ae62f4bf47"},
|
||||
{file = "debugpy-1.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:0600faef1d0b8d0e85c816b8bb0cb90ed94fc611f308d5fde28cb8b3d2ff0fe3"},
|
||||
{file = "debugpy-1.8.2-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:8a13417ccd5978a642e91fb79b871baded925d4fadd4dfafec1928196292aa0a"},
|
||||
{file = "debugpy-1.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acdf39855f65c48ac9667b2801234fc64d46778021efac2de7e50907ab90c634"},
|
||||
{file = "debugpy-1.8.2-cp311-cp311-win32.whl", hash = "sha256:2cbd4d9a2fc5e7f583ff9bf11f3b7d78dfda8401e8bb6856ad1ed190be4281ad"},
|
||||
{file = "debugpy-1.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:d3408fddd76414034c02880e891ea434e9a9cf3a69842098ef92f6e809d09afa"},
|
||||
{file = "debugpy-1.8.2-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:5d3ccd39e4021f2eb86b8d748a96c766058b39443c1f18b2dc52c10ac2757835"},
|
||||
{file = "debugpy-1.8.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62658aefe289598680193ff655ff3940e2a601765259b123dc7f89c0239b8cd3"},
|
||||
{file = "debugpy-1.8.2-cp312-cp312-win32.whl", hash = "sha256:bd11fe35d6fd3431f1546d94121322c0ac572e1bfb1f6be0e9b8655fb4ea941e"},
|
||||
{file = "debugpy-1.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:15bc2f4b0f5e99bf86c162c91a74c0631dbd9cef3c6a1d1329c946586255e859"},
|
||||
{file = "debugpy-1.8.2-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:5a019d4574afedc6ead1daa22736c530712465c0c4cd44f820d803d937531b2d"},
|
||||
{file = "debugpy-1.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40f062d6877d2e45b112c0bbade9a17aac507445fd638922b1a5434df34aed02"},
|
||||
{file = "debugpy-1.8.2-cp38-cp38-win32.whl", hash = "sha256:c78ba1680f1015c0ca7115671fe347b28b446081dada3fedf54138f44e4ba031"},
|
||||
{file = "debugpy-1.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:cf327316ae0c0e7dd81eb92d24ba8b5e88bb4d1b585b5c0d32929274a66a5210"},
|
||||
{file = "debugpy-1.8.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:1523bc551e28e15147815d1397afc150ac99dbd3a8e64641d53425dba57b0ff9"},
|
||||
{file = "debugpy-1.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e24ccb0cd6f8bfaec68d577cb49e9c680621c336f347479b3fce060ba7c09ec1"},
|
||||
{file = "debugpy-1.8.2-cp39-cp39-win32.whl", hash = "sha256:7f8d57a98c5a486c5c7824bc0b9f2f11189d08d73635c326abef268f83950326"},
|
||||
{file = "debugpy-1.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:16c8dcab02617b75697a0a925a62943e26a0330da076e2a10437edd9f0bf3755"},
|
||||
{file = "debugpy-1.8.2-py2.py3-none-any.whl", hash = "sha256:16e16df3a98a35c63c3ab1e4d19be4cbc7fdda92d9ddc059294f18910928e0ca"},
|
||||
{file = "debugpy-1.8.2.zip", hash = "sha256:95378ed08ed2089221896b9b3a8d021e642c24edc8fef20e5d4342ca8be65c00"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -2112,6 +2112,20 @@ files = [
|
|||
{file = "filetype-1.2.0.tar.gz", hash = "sha256:66b56cd6474bf41d8c54660347d37afcc3f7d1970648de365c102ef77548aadb"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "firecrawl-py"
|
||||
version = "0.0.16"
|
||||
description = "Python SDK for Firecrawl API"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "firecrawl_py-0.0.16-py3-none-any.whl", hash = "sha256:9024f483b501852a6b9c4e6cdfc9e8dde452d922afac357080bb278a0c9c2a26"},
|
||||
{file = "firecrawl_py-0.0.16.tar.gz", hash = "sha256:6c662fa0a549bc7f5c0acb704baba6731869ca0451094034264dfc1b4eb086e4"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
requests = "*"
|
||||
|
||||
[[package]]
|
||||
name = "flaml"
|
||||
version = "2.1.2"
|
||||
|
|
@ -4108,19 +4122,19 @@ adal = ["adal (>=1.0.2)"]
|
|||
|
||||
[[package]]
|
||||
name = "langchain"
|
||||
version = "0.2.5"
|
||||
version = "0.2.6"
|
||||
description = "Building applications with LLMs through composability"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8.1"
|
||||
files = [
|
||||
{file = "langchain-0.2.5-py3-none-any.whl", hash = "sha256:9aded9a65348254e1c93dcdaacffe4d1b6a5e7f74ef80c160c88ff78ad299228"},
|
||||
{file = "langchain-0.2.5.tar.gz", hash = "sha256:ffdbf4fcea46a10d461bcbda2402220fcfd72a0c70e9f4161ae0510067b9b3bd"},
|
||||
{file = "langchain-0.2.6-py3-none-any.whl", hash = "sha256:f86e8a7afd3e56f8eb5ba47f01dd00144fb9fc2f1db9873bd197347be2857aa4"},
|
||||
{file = "langchain-0.2.6.tar.gz", hash = "sha256:867f6add370c1e3911b0e87d3dd0e36aec1e8f513bf06131340fe8f151d89dc5"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
aiohttp = ">=3.8.3,<4.0.0"
|
||||
async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""}
|
||||
langchain-core = ">=0.2.7,<0.3.0"
|
||||
langchain-core = ">=0.2.10,<0.3.0"
|
||||
langchain-text-splitters = ">=0.2.0,<0.3.0"
|
||||
langsmith = ">=0.1.17,<0.2.0"
|
||||
numpy = [
|
||||
|
|
@ -4131,7 +4145,7 @@ pydantic = ">=1,<3"
|
|||
PyYAML = ">=5.3"
|
||||
requests = ">=2,<3"
|
||||
SQLAlchemy = ">=1.4,<3"
|
||||
tenacity = ">=8.1.0,<9.0.0"
|
||||
tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<9.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "langchain-anthropic"
|
||||
|
|
@ -4167,18 +4181,18 @@ numpy = ">=1,<2"
|
|||
|
||||
[[package]]
|
||||
name = "langchain-aws"
|
||||
version = "0.1.7"
|
||||
version = "0.1.8"
|
||||
description = "An integration package connecting AWS and LangChain"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8.1"
|
||||
files = [
|
||||
{file = "langchain_aws-0.1.7-py3-none-any.whl", hash = "sha256:413f88cbb120cc1d6ca0e9f6d72b89c1d930b78ce071fef5b03e1595fc4d6029"},
|
||||
{file = "langchain_aws-0.1.7.tar.gz", hash = "sha256:aa0bbd3e530e21fdc1d0459e97ee14fa387ce9bb2d00d721cf526e9c3ecea78f"},
|
||||
{file = "langchain_aws-0.1.8-py3-none-any.whl", hash = "sha256:d1ade6d01af7d86f42c106bb32c08a99a38c84f54a3d669201362f42fd2684b8"},
|
||||
{file = "langchain_aws-0.1.8.tar.gz", hash = "sha256:d1e5edbda092ddbeda45ef8245a494b5b4f6bef79ed5afd56054c7d348dfed74"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
boto3 = ">=1.34.51,<1.35.0"
|
||||
langchain-core = ">=0.2.2,<0.3"
|
||||
boto3 = ">=1.34.127,<1.35.0"
|
||||
langchain-core = ">=0.2.6,<0.3"
|
||||
numpy = ">=1,<2"
|
||||
|
||||
[[package]]
|
||||
|
|
@ -4215,20 +4229,20 @@ langchain-core = ">=0.2.0,<0.3"
|
|||
|
||||
[[package]]
|
||||
name = "langchain-community"
|
||||
version = "0.2.5"
|
||||
version = "0.2.6"
|
||||
description = "Community contributed LangChain integrations."
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8.1"
|
||||
files = [
|
||||
{file = "langchain_community-0.2.5-py3-none-any.whl", hash = "sha256:bf37a334952e42c7676d083cf2d2c4cbfbb7de1949c4149fe19913e2b06c485f"},
|
||||
{file = "langchain_community-0.2.5.tar.gz", hash = "sha256:476787b8c8c213b67e7b0eceb53346e787f00fbae12d8e680985bd4f93b0bf64"},
|
||||
{file = "langchain_community-0.2.6-py3-none-any.whl", hash = "sha256:758cc800acfe5dd396bf8ba1b57c4792639ead0eab48ed0367f0732ec6ee1f68"},
|
||||
{file = "langchain_community-0.2.6.tar.gz", hash = "sha256:40ce09a50ed798aa651ddb34c8978200fa8589b9813c7a28ce8af027bbf249f0"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
aiohttp = ">=3.8.3,<4.0.0"
|
||||
dataclasses-json = ">=0.5.7,<0.7"
|
||||
langchain = ">=0.2.5,<0.3.0"
|
||||
langchain-core = ">=0.2.7,<0.3.0"
|
||||
langchain = ">=0.2.6,<0.3.0"
|
||||
langchain-core = ">=0.2.10,<0.3.0"
|
||||
langsmith = ">=0.1.0,<0.2.0"
|
||||
numpy = [
|
||||
{version = ">=1,<2", markers = "python_version < \"3.12\""},
|
||||
|
|
@ -4237,17 +4251,17 @@ numpy = [
|
|||
PyYAML = ">=5.3"
|
||||
requests = ">=2,<3"
|
||||
SQLAlchemy = ">=1.4,<3"
|
||||
tenacity = ">=8.1.0,<9.0.0"
|
||||
tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<9.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.2.9"
|
||||
version = "0.2.10"
|
||||
description = "Building applications with LLMs through composability"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8.1"
|
||||
files = [
|
||||
{file = "langchain_core-0.2.9-py3-none-any.whl", hash = "sha256:426a5a4fea95a5db995ba5ab560b76edd4998fb6fe52ccc28ac987092a4cbfcd"},
|
||||
{file = "langchain_core-0.2.9.tar.gz", hash = "sha256:f1c59082642921727844e1cd0eb36d451edd1872c20e193aa3142aac03495986"},
|
||||
{file = "langchain_core-0.2.10-py3-none-any.whl", hash = "sha256:6eb72086b6bc86db9812da98f79e507c2209a15c0112aefd214a04182ada8586"},
|
||||
{file = "langchain_core-0.2.10.tar.gz", hash = "sha256:33d1fc234ab58c80476eb5bbde2107ef522a2ce8f46bdf47d9e1bd21e054208f"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -4263,18 +4277,18 @@ tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<9.0.0"
|
|||
|
||||
[[package]]
|
||||
name = "langchain-experimental"
|
||||
version = "0.0.61"
|
||||
version = "0.0.62"
|
||||
description = "Building applications with LLMs through composability"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8.1"
|
||||
files = [
|
||||
{file = "langchain_experimental-0.0.61-py3-none-any.whl", hash = "sha256:f9c516f528f55919743bd56fe1689a53bf74ae7f8902d64b9d8aebc61249cbe2"},
|
||||
{file = "langchain_experimental-0.0.61.tar.gz", hash = "sha256:e9538efb994be5db3045cc582cddb9787c8299c86ffeee9d3779b7f58eef2226"},
|
||||
{file = "langchain_experimental-0.0.62-py3-none-any.whl", hash = "sha256:9240f9e3490e819976f20a37863970036e7baacb7104b9eb6833d19ab6d518c9"},
|
||||
{file = "langchain_experimental-0.0.62.tar.gz", hash = "sha256:9737fbc8429d24457ea4d368e3c9ba9ed1cace0564fb5f1a96a3027a588bd0ac"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
langchain-community = ">=0.2.5,<0.3.0"
|
||||
langchain-core = ">=0.2.7,<0.3.0"
|
||||
langchain-community = ">=0.2.6,<0.3.0"
|
||||
langchain-core = ">=0.2.10,<0.3.0"
|
||||
|
||||
[[package]]
|
||||
name = "langchain-google-genai"
|
||||
|
|
@ -4363,13 +4377,13 @@ pymongo = ">=4.6.1,<5.0"
|
|||
|
||||
[[package]]
|
||||
name = "langchain-openai"
|
||||
version = "0.1.9"
|
||||
version = "0.1.10"
|
||||
description = "An integration package connecting OpenAI and LangChain"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8.1"
|
||||
files = [
|
||||
{file = "langchain_openai-0.1.9-py3-none-any.whl", hash = "sha256:afae71ef315c967685e53fe061470438000946739a9492d5f2d53bd4ae9d495a"},
|
||||
{file = "langchain_openai-0.1.9.tar.gz", hash = "sha256:730a94d68208678b9b9f64e4959a87057e021d6600754ea8b954e7765c7a62f1"},
|
||||
{file = "langchain_openai-0.1.10-py3-none-any.whl", hash = "sha256:62eb000980eb45e4f16c88acdbaeccf3d59266554b0dd3ce6bebea1bbe8143dd"},
|
||||
{file = "langchain_openai-0.1.10.tar.gz", hash = "sha256:30f881f8ccaec28c054759837c41fd2a2264fcc5564728ce12e1715891a9ce3c"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -4395,20 +4409,17 @@ pinecone-client = ">=3.2.2,<4.0.0"
|
|||
|
||||
[[package]]
|
||||
name = "langchain-text-splitters"
|
||||
version = "0.2.1"
|
||||
version = "0.2.2"
|
||||
description = "LangChain text splitting utilities"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8.1"
|
||||
files = [
|
||||
{file = "langchain_text_splitters-0.2.1-py3-none-any.whl", hash = "sha256:c2774a85f17189eaca50339629d2316d13130d4a8d9f1a1a96f3a03670c4a138"},
|
||||
{file = "langchain_text_splitters-0.2.1.tar.gz", hash = "sha256:06853d17d7241ecf5c97c7b6ef01f600f9b0fb953dd997838142a527a4f32ea4"},
|
||||
{file = "langchain_text_splitters-0.2.2-py3-none-any.whl", hash = "sha256:1c80d4b11b55e2995f02d2a326c0323ee1eeff24507329bb22924e420c782dff"},
|
||||
{file = "langchain_text_splitters-0.2.2.tar.gz", hash = "sha256:a1e45de10919fa6fb080ef0525deab56557e9552083600455cb9fa4238076140"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
langchain-core = ">=0.2.0,<0.3.0"
|
||||
|
||||
[package.extras]
|
||||
extended-testing = ["beautifulsoup4 (>=4.12.3,<5.0.0)", "lxml (>=4.9.3,<6.0)"]
|
||||
langchain-core = ">=0.2.10,<0.3.0"
|
||||
|
||||
[[package]]
|
||||
name = "langchainhub"
|
||||
|
|
@ -4442,7 +4453,7 @@ six = "*"
|
|||
|
||||
[[package]]
|
||||
name = "langflow-base"
|
||||
version = "0.0.80"
|
||||
version = "0.0.81"
|
||||
description = "A Python package with a built-in web application"
|
||||
optional = false
|
||||
python-versions = ">=3.10,<3.13"
|
||||
|
|
@ -4460,6 +4471,7 @@ docstring-parser = "^0.15"
|
|||
duckdb = "^1.0.0"
|
||||
emoji = "^2.12.0"
|
||||
fastapi = "^0.111.0"
|
||||
firecrawl-py = "^0.0.16"
|
||||
gunicorn = "^22.0.0"
|
||||
httpx = "*"
|
||||
jq = {version = "^1.7.0", markers = "sys_platform != \"win32\""}
|
||||
|
|
@ -4543,13 +4555,13 @@ requests = ">=2,<3"
|
|||
|
||||
[[package]]
|
||||
name = "litellm"
|
||||
version = "1.40.26"
|
||||
version = "1.40.27"
|
||||
description = "Library to easily interface with LLM API providers"
|
||||
optional = false
|
||||
python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8"
|
||||
files = [
|
||||
{file = "litellm-1.40.26-py3-none-any.whl", hash = "sha256:5daedec00a3a8e32f55b3099190ccd9d550177f6e516823002831e6620ae771c"},
|
||||
{file = "litellm-1.40.26.tar.gz", hash = "sha256:4dfd4ca3eb50a62600e60303c4975ba9fe7c52d07882d0d2b6bad2d474d88758"},
|
||||
{file = "litellm-1.40.27-py3-none-any.whl", hash = "sha256:f6906e5260d784e7e31d579f5b28545e87517268cb96dd0dcaf31e4c5d34073f"},
|
||||
{file = "litellm-1.40.27.tar.gz", hash = "sha256:a13a04168be5a8e52d43c34c2e657ca2521da61039ac39a17abc233a1875923f"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -5734,13 +5746,13 @@ sympy = "*"
|
|||
|
||||
[[package]]
|
||||
name = "openai"
|
||||
version = "1.35.3"
|
||||
version = "1.35.5"
|
||||
description = "The official Python library for the openai API"
|
||||
optional = false
|
||||
python-versions = ">=3.7.1"
|
||||
files = [
|
||||
{file = "openai-1.35.3-py3-none-any.whl", hash = "sha256:7b26544cef80f125431c073ffab3811d2421fbb9e30d3bd5c2436aba00b042d5"},
|
||||
{file = "openai-1.35.3.tar.gz", hash = "sha256:d6177087f150b381d49499be782d764213fdf638d391b29ca692b84dd675a389"},
|
||||
{file = "openai-1.35.5-py3-none-any.whl", hash = "sha256:28d92503c6e4b6a32a89277b36693023ef41f60922a4b5c8c621e8c5697ae3a6"},
|
||||
{file = "openai-1.35.5.tar.gz", hash = "sha256:67ef289ae22d350cbf9381d83ae82c4e3596d71b7ad1cc886143554ee12fe0c9"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -7053,71 +7065,61 @@ windows-terminal = ["colorama (>=0.4.6)"]
|
|||
|
||||
[[package]]
|
||||
name = "pymongo"
|
||||
version = "4.7.3"
|
||||
version = "4.8.0"
|
||||
description = "Python driver for MongoDB <http://www.mongodb.org>"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pymongo-4.7.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e9580b4537b3cc5d412070caabd1dabdf73fdce249793598792bac5782ecf2eb"},
|
||||
{file = "pymongo-4.7.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:517243b2b189c98004570dd8fc0e89b1a48363d5578b3b99212fa2098b2ea4b8"},
|
||||
{file = "pymongo-4.7.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23b1e9dabd61da1c7deb54d888f952f030e9e35046cebe89309b28223345b3d9"},
|
||||
{file = "pymongo-4.7.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:03e0f9901ad66c6fb7da0d303461377524d61dab93a4e4e5af44164c5bb4db76"},
|
||||
{file = "pymongo-4.7.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9a870824aa54453aee030bac08c77ebcf2fe8999400f0c2a065bebcbcd46b7f8"},
|
||||
{file = "pymongo-4.7.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd7b3d3f4261bddbb74a332d87581bc523353e62bb9da4027cc7340f6fcbebc"},
|
||||
{file = "pymongo-4.7.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4d719a643ea6da46d215a3ba51dac805a773b611c641319558d8576cbe31cef8"},
|
||||
{file = "pymongo-4.7.3-cp310-cp310-win32.whl", hash = "sha256:d8b1e06f361f3c66ee694cb44326e1a2e4f93bc9c3a4849ae8547889fca71154"},
|
||||
{file = "pymongo-4.7.3-cp310-cp310-win_amd64.whl", hash = "sha256:c450ab2f9397e2d5caa7fddeb4feb30bf719c47c13ae02c0bbb3b71bf4099c1c"},
|
||||
{file = "pymongo-4.7.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:79cc6459209e885ba097779eaa0fe7f2fa049db39ab43b1731cf8d065a4650e8"},
|
||||
{file = "pymongo-4.7.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6e2287f1e2cc35e73cd74a4867e398a97962c5578a3991c730ef78d276ca8e46"},
|
||||
{file = "pymongo-4.7.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:413506bd48d8c31ee100645192171e4773550d7cb940b594d5175ac29e329ea1"},
|
||||
{file = "pymongo-4.7.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1cc1febf17646d52b7561caa762f60bdfe2cbdf3f3e70772f62eb624269f9c05"},
|
||||
{file = "pymongo-4.7.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8dfcf18a49955d50a16c92b39230bd0668ffc9c164ccdfe9d28805182b48fa72"},
|
||||
{file = "pymongo-4.7.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89872041196c008caddf905eb59d3dc2d292ae6b0282f1138418e76f3abd3ad6"},
|
||||
{file = "pymongo-4.7.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3ed97b89de62ea927b672ad524de0d23f3a6b4a01c8d10e3d224abec973fbc3"},
|
||||
{file = "pymongo-4.7.3-cp311-cp311-win32.whl", hash = "sha256:d2f52b38151e946011d888a8441d3d75715c663fc5b41a7ade595e924e12a90a"},
|
||||
{file = "pymongo-4.7.3-cp311-cp311-win_amd64.whl", hash = "sha256:4a4cc91c28e81c0ce03d3c278e399311b0af44665668a91828aec16527082676"},
|
||||
{file = "pymongo-4.7.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cb30c8a78f5ebaca98640943447b6a0afcb146f40b415757c9047bf4a40d07b4"},
|
||||
{file = "pymongo-4.7.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9cf2069f5d37c398186453589486ea98bb0312214c439f7d320593b61880dc05"},
|
||||
{file = "pymongo-4.7.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3564f423958fced8a8c90940fd2f543c27adbcd6c7c6ed6715d847053f6200a0"},
|
||||
{file = "pymongo-4.7.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7a8af8a38fa6951fff73e6ff955a6188f829b29fed7c5a1b739a306b4aa56fe8"},
|
||||
{file = "pymongo-4.7.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3a0e81c8dba6d825272867d487f18764cfed3c736d71d7d4ff5b79642acbed42"},
|
||||
{file = "pymongo-4.7.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88fc1d146feabac4385ea8ddb1323e584922922641303c8bf392fe1c36803463"},
|
||||
{file = "pymongo-4.7.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4225100b2c5d1f7393d7c5d256ceb8b20766830eecf869f8ae232776347625a6"},
|
||||
{file = "pymongo-4.7.3-cp312-cp312-win32.whl", hash = "sha256:5f3569ed119bf99c0f39ac9962fb5591eff02ca210fe80bb5178d7a1171c1b1e"},
|
||||
{file = "pymongo-4.7.3-cp312-cp312-win_amd64.whl", hash = "sha256:eb383c54c0c8ba27e7712b954fcf2a0905fee82a929d277e2e94ad3a5ba3c7db"},
|
||||
{file = "pymongo-4.7.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a46cffe91912570151617d866a25d07b9539433a32231ca7e7cf809b6ba1745f"},
|
||||
{file = "pymongo-4.7.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c3cba427dac50944c050c96d958c5e643c33a457acee03bae27c8990c5b9c16"},
|
||||
{file = "pymongo-4.7.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a7a5fd893edbeb7fa982f8d44b6dd0186b6cd86c89e23f6ef95049ff72bffe46"},
|
||||
{file = "pymongo-4.7.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c168a2fadc8b19071d0a9a4f85fe38f3029fe22163db04b4d5c046041c0b14bd"},
|
||||
{file = "pymongo-4.7.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c59c2c9e70f63a7f18a31e367898248c39c068c639b0579623776f637e8f482"},
|
||||
{file = "pymongo-4.7.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d08165fd82c89d372e82904c3268bd8fe5de44f92a00e97bb1db1785154397d9"},
|
||||
{file = "pymongo-4.7.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:397fed21afec4fdaecf72f9c4344b692e489756030a9c6d864393e00c7e80491"},
|
||||
{file = "pymongo-4.7.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:f903075f8625e2d228f1b9b9a0cf1385f1c41e93c03fd7536c91780a0fb2e98f"},
|
||||
{file = "pymongo-4.7.3-cp37-cp37m-win32.whl", hash = "sha256:8ed1132f58c38add6b6138b771d0477a3833023c015c455d9a6e26f367f9eb5c"},
|
||||
{file = "pymongo-4.7.3-cp37-cp37m-win_amd64.whl", hash = "sha256:8d00a5d8fc1043a4f641cbb321da766699393f1b6f87c70fae8089d61c9c9c54"},
|
||||
{file = "pymongo-4.7.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9377b868c38700c7557aac1bc4baae29f47f1d279cc76b60436e547fd643318c"},
|
||||
{file = "pymongo-4.7.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:da4a6a7b4f45329bb135aa5096823637bd5f760b44d6224f98190ee367b6b5dd"},
|
||||
{file = "pymongo-4.7.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:487e2f9277f8a63ac89335ec4f1699ae0d96ebd06d239480d69ed25473a71b2c"},
|
||||
{file = "pymongo-4.7.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db3d608d541a444c84f0bfc7bad80b0b897e0f4afa580a53f9a944065d9b633"},
|
||||
{file = "pymongo-4.7.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e90af2ad3a8a7c295f4d09a2fbcb9a350c76d6865f787c07fe843b79c6e821d1"},
|
||||
{file = "pymongo-4.7.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e28feb18dc559d50ededba27f9054c79f80c4edd70a826cecfe68f3266807b3"},
|
||||
{file = "pymongo-4.7.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f21ecddcba2d9132d5aebd8e959de8d318c29892d0718420447baf2b9bccbb19"},
|
||||
{file = "pymongo-4.7.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:26140fbb3f6a9a74bd73ed46d0b1f43d5702e87a6e453a31b24fad9c19df9358"},
|
||||
{file = "pymongo-4.7.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:94baa5fc7f7d22c3ce2ac7bd92f7e03ba7a6875f2480e3b97a400163d6eaafc9"},
|
||||
{file = "pymongo-4.7.3-cp38-cp38-win32.whl", hash = "sha256:92dd247727dd83d1903e495acc743ebd757f030177df289e3ba4ef8a8c561fad"},
|
||||
{file = "pymongo-4.7.3-cp38-cp38-win_amd64.whl", hash = "sha256:1c90c848a5e45475731c35097f43026b88ef14a771dfd08f20b67adc160a3f79"},
|
||||
{file = "pymongo-4.7.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f598be401b416319a535c386ac84f51df38663f7a9d1071922bda4d491564422"},
|
||||
{file = "pymongo-4.7.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:35ba90477fae61c65def6e7d09e8040edfdd3b7fd47c3c258b4edded60c4d625"},
|
||||
{file = "pymongo-4.7.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9aa8735955c70892634d7e61b0ede9b1eefffd3cd09ccabee0ffcf1bdfe62254"},
|
||||
{file = "pymongo-4.7.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:82a97d8f7f138586d9d0a0cff804a045cdbbfcfc1cd6bba542b151e284fbbec5"},
|
||||
{file = "pymongo-4.7.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de3b9db558930efab5eaef4db46dcad8bf61ac3ddfd5751b3e5ac6084a25e366"},
|
||||
{file = "pymongo-4.7.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0e149217ef62812d3c2401cf0e2852b0c57fd155297ecc4dcd67172c4eca402"},
|
||||
{file = "pymongo-4.7.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3a8a1ef4a824f5feb793b3231526d0045eadb5eb01080e38435dfc40a26c3e5"},
|
||||
{file = "pymongo-4.7.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d14e5e89a4be1f10efc3d9dcb13eb7a3b2334599cb6bb5d06c6a9281b79c8e22"},
|
||||
{file = "pymongo-4.7.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c6bfa29f032fd4fd7b129520f8cdb51ab71d88c2ba0567cccd05d325f963acb5"},
|
||||
{file = "pymongo-4.7.3-cp39-cp39-win32.whl", hash = "sha256:1421d0bd2ce629405f5157bd1aaa9b83f12d53a207cf68a43334f4e4ee312b66"},
|
||||
{file = "pymongo-4.7.3-cp39-cp39-win_amd64.whl", hash = "sha256:f7ee974f8b9370a998919c55b1050889f43815ab588890212023fecbc0402a6d"},
|
||||
{file = "pymongo-4.7.3.tar.gz", hash = "sha256:6354a66b228f2cd399be7429685fb68e07f19110a3679782ecb4fdb68da03831"},
|
||||
{file = "pymongo-4.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f2b7bec27e047e84947fbd41c782f07c54c30c76d14f3b8bf0c89f7413fac67a"},
|
||||
{file = "pymongo-4.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3c68fe128a171493018ca5c8020fc08675be130d012b7ab3efe9e22698c612a1"},
|
||||
{file = "pymongo-4.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:920d4f8f157a71b3cb3f39bc09ce070693d6e9648fb0e30d00e2657d1dca4e49"},
|
||||
{file = "pymongo-4.8.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52b4108ac9469febba18cea50db972605cc43978bedaa9fea413378877560ef8"},
|
||||
{file = "pymongo-4.8.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:180d5eb1dc28b62853e2f88017775c4500b07548ed28c0bd9c005c3d7bc52526"},
|
||||
{file = "pymongo-4.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aec2b9088cdbceb87e6ca9c639d0ff9b9d083594dda5ca5d3c4f6774f4c81b33"},
|
||||
{file = "pymongo-4.8.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0cf61450feadca81deb1a1489cb1a3ae1e4266efd51adafecec0e503a8dcd84"},
|
||||
{file = "pymongo-4.8.0-cp310-cp310-win32.whl", hash = "sha256:8b18c8324809539c79bd6544d00e0607e98ff833ca21953df001510ca25915d1"},
|
||||
{file = "pymongo-4.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e5df28f74002e37bcbdfdc5109799f670e4dfef0fb527c391ff84f078050e7b5"},
|
||||
{file = "pymongo-4.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6b50040d9767197b77ed420ada29b3bf18a638f9552d80f2da817b7c4a4c9c68"},
|
||||
{file = "pymongo-4.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:417369ce39af2b7c2a9c7152c1ed2393edfd1cbaf2a356ba31eb8bcbd5c98dd7"},
|
||||
{file = "pymongo-4.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf821bd3befb993a6db17229a2c60c1550e957de02a6ff4dd0af9476637b2e4d"},
|
||||
{file = "pymongo-4.8.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9365166aa801c63dff1a3cb96e650be270da06e3464ab106727223123405510f"},
|
||||
{file = "pymongo-4.8.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc8b8582f4209c2459b04b049ac03c72c618e011d3caa5391ff86d1bda0cc486"},
|
||||
{file = "pymongo-4.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e5019f75f6827bb5354b6fef8dfc9d6c7446894a27346e03134d290eb9e758"},
|
||||
{file = "pymongo-4.8.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b5802151fc2b51cd45492c80ed22b441d20090fb76d1fd53cd7760b340ff554"},
|
||||
{file = "pymongo-4.8.0-cp311-cp311-win32.whl", hash = "sha256:4bf58e6825b93da63e499d1a58de7de563c31e575908d4e24876234ccb910eba"},
|
||||
{file = "pymongo-4.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:b747c0e257b9d3e6495a018309b9e0c93b7f0d65271d1d62e572747f4ffafc88"},
|
||||
{file = "pymongo-4.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e6a720a3d22b54183352dc65f08cd1547204d263e0651b213a0a2e577e838526"},
|
||||
{file = "pymongo-4.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:31e4d21201bdf15064cf47ce7b74722d3e1aea2597c6785882244a3bb58c7eab"},
|
||||
{file = "pymongo-4.8.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6b804bb4f2d9dc389cc9e827d579fa327272cdb0629a99bfe5b83cb3e269ebf"},
|
||||
{file = "pymongo-4.8.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f2fbdb87fe5075c8beb17a5c16348a1ea3c8b282a5cb72d173330be2fecf22f5"},
|
||||
{file = "pymongo-4.8.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd39455b7ee70aabee46f7399b32ab38b86b236c069ae559e22be6b46b2bbfc4"},
|
||||
{file = "pymongo-4.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:940d456774b17814bac5ea7fc28188c7a1338d4a233efbb6ba01de957bded2e8"},
|
||||
{file = "pymongo-4.8.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:236bbd7d0aef62e64caf4b24ca200f8c8670d1a6f5ea828c39eccdae423bc2b2"},
|
||||
{file = "pymongo-4.8.0-cp312-cp312-win32.whl", hash = "sha256:47ec8c3f0a7b2212dbc9be08d3bf17bc89abd211901093e3ef3f2adea7de7a69"},
|
||||
{file = "pymongo-4.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:e84bc7707492f06fbc37a9f215374d2977d21b72e10a67f1b31893ec5a140ad8"},
|
||||
{file = "pymongo-4.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:519d1bab2b5e5218c64340b57d555d89c3f6c9d717cecbf826fb9d42415e7750"},
|
||||
{file = "pymongo-4.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:87075a1feb1e602e539bdb1ef8f4324a3427eb0d64208c3182e677d2c0718b6f"},
|
||||
{file = "pymongo-4.8.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f53429515d2b3e86dcc83dadecf7ff881e538c168d575f3688698a8707b80a"},
|
||||
{file = "pymongo-4.8.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fdc20cd1e1141b04696ffcdb7c71e8a4a665db31fe72e51ec706b3bdd2d09f36"},
|
||||
{file = "pymongo-4.8.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:284d0717d1a7707744018b0b6ee7801b1b1ff044c42f7be7a01bb013de639470"},
|
||||
{file = "pymongo-4.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5bf0eb8b6ef40fa22479f09375468c33bebb7fe49d14d9c96c8fd50355188b0"},
|
||||
{file = "pymongo-4.8.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ecd71b9226bd1d49416dc9f999772038e56f415a713be51bf18d8676a0841c8"},
|
||||
{file = "pymongo-4.8.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e0061af6e8c5e68b13f1ec9ad5251247726653c5af3c0bbdfbca6cf931e99216"},
|
||||
{file = "pymongo-4.8.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:658d0170f27984e0d89c09fe5c42296613b711a3ffd847eb373b0dbb5b648d5f"},
|
||||
{file = "pymongo-4.8.0-cp38-cp38-win32.whl", hash = "sha256:3ed1c316718a2836f7efc3d75b4b0ffdd47894090bc697de8385acd13c513a70"},
|
||||
{file = "pymongo-4.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:7148419eedfea9ecb940961cfe465efaba90595568a1fb97585fb535ea63fe2b"},
|
||||
{file = "pymongo-4.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e8400587d594761e5136a3423111f499574be5fd53cf0aefa0d0f05b180710b0"},
|
||||
{file = "pymongo-4.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:af3e98dd9702b73e4e6fd780f6925352237f5dce8d99405ff1543f3771201704"},
|
||||
{file = "pymongo-4.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de3a860f037bb51f968de320baef85090ff0bbb42ec4f28ec6a5ddf88be61871"},
|
||||
{file = "pymongo-4.8.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0fc18b3a093f3db008c5fea0e980dbd3b743449eee29b5718bc2dc15ab5088bb"},
|
||||
{file = "pymongo-4.8.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18c9d8f975dd7194c37193583fd7d1eb9aea0c21ee58955ecf35362239ff31ac"},
|
||||
{file = "pymongo-4.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:408b2f8fdbeca3c19e4156f28fff1ab11c3efb0407b60687162d49f68075e63c"},
|
||||
{file = "pymongo-4.8.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b6564780cafd6abeea49759fe661792bd5a67e4f51bca62b88faab497ab5fe89"},
|
||||
{file = "pymongo-4.8.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d18d86bc9e103f4d3d4f18b85a0471c0e13ce5b79194e4a0389a224bb70edd53"},
|
||||
{file = "pymongo-4.8.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9097c331577cecf8034422956daaba7ec74c26f7b255d718c584faddd7fa2e3c"},
|
||||
{file = "pymongo-4.8.0-cp39-cp39-win32.whl", hash = "sha256:d5428dbcd43d02f6306e1c3c95f692f68b284e6ee5390292242f509004c9e3a8"},
|
||||
{file = "pymongo-4.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:ef7225755ed27bfdb18730c68f6cb023d06c28f2b734597480fb4c0e500feb6f"},
|
||||
{file = "pymongo-4.8.0.tar.gz", hash = "sha256:454f2295875744dc70f1881e4b2eb99cdad008a33574bc8aaf120530f66c0cde"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -7125,6 +7127,7 @@ dnspython = ">=1.16.0,<3.0.0"
|
|||
|
||||
[package.extras]
|
||||
aws = ["pymongo-auth-aws (>=1.1.0,<2.0.0)"]
|
||||
docs = ["furo (==2023.9.10)", "readthedocs-sphinx-search (>=0.3,<1.0)", "sphinx (>=5.3,<8)", "sphinx-rtd-theme (>=2,<3)", "sphinxcontrib-shellcheck (>=1,<2)"]
|
||||
encryption = ["certifi", "pymongo-auth-aws (>=1.1.0,<2.0.0)", "pymongocrypt (>=1.6.0,<2.0.0)"]
|
||||
gssapi = ["pykerberos", "winkerberos (>=0.5.0)"]
|
||||
ocsp = ["certifi", "cryptography (>=2.5)", "pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)"]
|
||||
|
|
@ -7866,13 +7869,13 @@ websockets = ">=11,<13"
|
|||
|
||||
[[package]]
|
||||
name = "redis"
|
||||
version = "5.0.6"
|
||||
version = "5.0.7"
|
||||
description = "Python client for Redis database and key-value store"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "redis-5.0.6-py3-none-any.whl", hash = "sha256:c0d6d990850c627bbf7be01c5c4cbaadf67b48593e913bb71c9819c30df37eee"},
|
||||
{file = "redis-5.0.6.tar.gz", hash = "sha256:38473cd7c6389ad3e44a91f4c3eaf6bcb8a9f746007f29bf4fb20824ff0b2197"},
|
||||
{file = "redis-5.0.7-py3-none-any.whl", hash = "sha256:0e479e24da960c690be5d9b96d21f7b918a98c0cf49af3b6fafaa0753f93a0db"},
|
||||
{file = "redis-5.0.7.tar.gz", hash = "sha256:8f611490b93c8109b50adc317b31bfd84fff31def3475b92e7e80bf39f48175b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -8347,13 +8350,13 @@ dev = ["pre-commit", "pytest", "ruff (>=0.3.0)"]
|
|||
|
||||
[[package]]
|
||||
name = "sentry-sdk"
|
||||
version = "2.6.0"
|
||||
version = "2.7.0"
|
||||
description = "Python client for Sentry (https://sentry.io)"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "sentry_sdk-2.6.0-py2.py3-none-any.whl", hash = "sha256:422b91cb49378b97e7e8d0e8d5a1069df23689d45262b86f54988a7db264e874"},
|
||||
{file = "sentry_sdk-2.6.0.tar.gz", hash = "sha256:65cc07e9c6995c5e316109f138570b32da3bd7ff8d0d0ee4aaf2628c3dd8127d"},
|
||||
{file = "sentry_sdk-2.7.0-py2.py3-none-any.whl", hash = "sha256:db9594c27a4d21c1ebad09908b1f0dc808ef65c2b89c1c8e7e455143262e37c1"},
|
||||
{file = "sentry_sdk-2.7.0.tar.gz", hash = "sha256:d846a211d4a0378b289ced3c434480945f110d0ede00450ba631fc2852e7a0d4"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -8385,7 +8388,7 @@ langchain = ["langchain (>=0.0.210)"]
|
|||
loguru = ["loguru (>=0.5)"]
|
||||
openai = ["openai (>=1.0.0)", "tiktoken (>=0.3.0)"]
|
||||
opentelemetry = ["opentelemetry-distro (>=0.35b0)"]
|
||||
opentelemetry-experimental = ["opentelemetry-distro (>=0.40b0,<1.0)", "opentelemetry-instrumentation-aiohttp-client (>=0.40b0,<1.0)", "opentelemetry-instrumentation-django (>=0.40b0,<1.0)", "opentelemetry-instrumentation-fastapi (>=0.40b0,<1.0)", "opentelemetry-instrumentation-flask (>=0.40b0,<1.0)", "opentelemetry-instrumentation-requests (>=0.40b0,<1.0)", "opentelemetry-instrumentation-sqlite3 (>=0.40b0,<1.0)", "opentelemetry-instrumentation-urllib (>=0.40b0,<1.0)"]
|
||||
opentelemetry-experimental = ["opentelemetry-instrumentation-aio-pika (==0.46b0)", "opentelemetry-instrumentation-aiohttp-client (==0.46b0)", "opentelemetry-instrumentation-aiopg (==0.46b0)", "opentelemetry-instrumentation-asgi (==0.46b0)", "opentelemetry-instrumentation-asyncio (==0.46b0)", "opentelemetry-instrumentation-asyncpg (==0.46b0)", "opentelemetry-instrumentation-aws-lambda (==0.46b0)", "opentelemetry-instrumentation-boto (==0.46b0)", "opentelemetry-instrumentation-boto3sqs (==0.46b0)", "opentelemetry-instrumentation-botocore (==0.46b0)", "opentelemetry-instrumentation-cassandra (==0.46b0)", "opentelemetry-instrumentation-celery (==0.46b0)", "opentelemetry-instrumentation-confluent-kafka (==0.46b0)", "opentelemetry-instrumentation-dbapi (==0.46b0)", "opentelemetry-instrumentation-django (==0.46b0)", "opentelemetry-instrumentation-elasticsearch (==0.46b0)", "opentelemetry-instrumentation-falcon (==0.46b0)", "opentelemetry-instrumentation-fastapi (==0.46b0)", "opentelemetry-instrumentation-flask (==0.46b0)", "opentelemetry-instrumentation-grpc (==0.46b0)", "opentelemetry-instrumentation-httpx (==0.46b0)", "opentelemetry-instrumentation-jinja2 (==0.46b0)", "opentelemetry-instrumentation-kafka-python (==0.46b0)", "opentelemetry-instrumentation-logging (==0.46b0)", "opentelemetry-instrumentation-mysql (==0.46b0)", "opentelemetry-instrumentation-mysqlclient (==0.46b0)", "opentelemetry-instrumentation-pika (==0.46b0)", "opentelemetry-instrumentation-psycopg (==0.46b0)", "opentelemetry-instrumentation-psycopg2 (==0.46b0)", "opentelemetry-instrumentation-pymemcache (==0.46b0)", "opentelemetry-instrumentation-pymongo (==0.46b0)", "opentelemetry-instrumentation-pymysql (==0.46b0)", "opentelemetry-instrumentation-pyramid (==0.46b0)", "opentelemetry-instrumentation-redis (==0.46b0)", "opentelemetry-instrumentation-remoulade (==0.46b0)", "opentelemetry-instrumentation-requests (==0.46b0)", "opentelemetry-instrumentation-sklearn (==0.46b0)", "opentelemetry-instrumentation-sqlalchemy (==0.46b0)", "opentelemetry-instrumentation-sqlite3 (==0.46b0)", "opentelemetry-instrumentation-starlette (==0.46b0)", "opentelemetry-instrumentation-system-metrics (==0.46b0)", "opentelemetry-instrumentation-threading (==0.46b0)", "opentelemetry-instrumentation-tornado (==0.46b0)", "opentelemetry-instrumentation-tortoiseorm (==0.46b0)", "opentelemetry-instrumentation-urllib (==0.46b0)", "opentelemetry-instrumentation-urllib3 (==0.46b0)", "opentelemetry-instrumentation-wsgi (==0.46b0)"]
|
||||
pure-eval = ["asttokens", "executing", "pure-eval"]
|
||||
pymongo = ["pymongo (>=3.1)"]
|
||||
pyspark = ["pyspark (>=2.4.4)"]
|
||||
|
|
@ -10552,4 +10555,4 @@ local = ["ctransformers", "llama-cpp-python", "sentence-transformers"]
|
|||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.10,<3.13"
|
||||
content-hash = "f7377e3a997651cbcec2b9227b0bcde2507afc7d6236b708f4dc62857f150578"
|
||||
content-hash = "3e72b6faa1c674615a7e5dec3e7d962349e736bf6675c08a49080b7f336cc75b"
|
||||
|
|
|
|||
|
|
@ -92,6 +92,7 @@ unstructured = {extras = ["docx", "md", "pptx"], version = "^0.14.4"}
|
|||
langchain-aws = "^0.1.6"
|
||||
langchain-mongodb = "^0.1.6"
|
||||
kubernetes = "^30.1.0"
|
||||
firecrawl-py = "^0.0.16"
|
||||
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
|
|
@ -146,7 +147,7 @@ ignore-regex = '.*(Stati Uniti|Tense=Pres).*'
|
|||
minversion = "6.0"
|
||||
testpaths = ["tests", "integration"]
|
||||
console_output_style = "progress"
|
||||
filterwarnings = ["ignore::DeprecationWarning"]
|
||||
filterwarnings = ["ignore::DeprecationWarning", "ignore::ResourceWarning"]
|
||||
log_cli = true
|
||||
markers = ["async_test", "api_key_required"]
|
||||
|
||||
|
|
|
|||
|
|
@ -276,15 +276,15 @@ async def webhook_run_flow(
|
|||
# get all webhook components in the flow
|
||||
webhook_components = get_all_webhook_components_in_flow(flow.data)
|
||||
tweaks = {}
|
||||
data_dict = await request.json()
|
||||
|
||||
for component in webhook_components:
|
||||
tweaks[component["id"]] = {"data": data.decode() if isinstance(data, bytes) else data}
|
||||
input_request = SimplifiedAPIRequest(
|
||||
input_value=data_dict.get("input_value", ""),
|
||||
input_type=data_dict.get("input_type", "chat"),
|
||||
output_type=data_dict.get("output_type", "chat"),
|
||||
input_value="",
|
||||
input_type="chat",
|
||||
output_type="chat",
|
||||
tweaks=tweaks,
|
||||
session_id=data_dict.get("session_id"),
|
||||
session_id=None,
|
||||
)
|
||||
logger.debug("Starting background task")
|
||||
background_tasks.add_task( # type: ignore
|
||||
|
|
|
|||
|
|
@ -131,8 +131,8 @@ async def list_profile_pictures(storage_service: StorageService = Depends(get_st
|
|||
people = await storage_service.list_files(flow_id=people_path) # type: ignore
|
||||
space = await storage_service.list_files(flow_id=space_path) # type: ignore
|
||||
|
||||
files = [Path("People") / i for i in people]
|
||||
files += [Path("Space") / i for i in space]
|
||||
files = [f"People/{i}" for i in people]
|
||||
files += [f"Space/{i}" for i in space]
|
||||
|
||||
return {"files": files}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
from datetime import datetime, timezone
|
||||
import re
|
||||
from datetime import datetime, timezone
|
||||
from typing import List
|
||||
from uuid import UUID
|
||||
|
||||
|
|
@ -211,8 +211,7 @@ def update_flow(
|
|||
if settings_service.settings.remove_api_keys:
|
||||
flow_data = remove_api_keys(flow_data)
|
||||
for key, value in flow_data.items():
|
||||
if value is not None:
|
||||
setattr(db_flow, key, value)
|
||||
setattr(db_flow, key, value)
|
||||
webhook_component = get_webhook_component_in_flow(db_flow.data)
|
||||
db_flow.webhook = webhook_component is not None
|
||||
db_flow.updated_at = datetime.now(timezone.utc)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
from typing import Any
|
||||
from langflow.custom import Component
|
||||
from langflow.inputs.inputs import DictInput, SecretStrInput, MessageTextInput
|
||||
from langflow.inputs.inputs import DictInput, SecretStrInput, MessageTextInput, DropdownInput
|
||||
from langflow.template.field.base import Output
|
||||
|
||||
|
||||
|
|
@ -10,32 +10,77 @@ class AstraVectorize(Component):
|
|||
documentation: str = "https://docs.datastax.com/en/astra-db-serverless/databases/embedding-generation.html"
|
||||
icon = "AstraDB"
|
||||
|
||||
VECTORIZE_PROVIDERS_MAPPING = {
|
||||
"Azure OpenAI": ["azureOpenAI", ["text-embedding-3-small", "text-embedding-3-large", "text-embedding-ada-002"]],
|
||||
"Hugging Face - Dedicated": ["huggingfaceDedicated", ["endpoint-defined-model"]],
|
||||
"Hugging Face - Serverless": [
|
||||
"huggingface",
|
||||
[
|
||||
"sentence-transformers/all-MiniLM-L6-v2",
|
||||
"intfloat/multilingual-e5-large",
|
||||
"intfloat/multilingual-e5-large-instruct",
|
||||
"BAAI/bge-small-en-v1.5",
|
||||
"BAAI/bge-base-en-v1.5",
|
||||
"BAAI/bge-large-en-v1.5",
|
||||
],
|
||||
],
|
||||
"Jina AI": [
|
||||
"jinaAI",
|
||||
[
|
||||
"jina-embeddings-v2-base-en",
|
||||
"jina-embeddings-v2-base-de",
|
||||
"jina-embeddings-v2-base-es",
|
||||
"jina-embeddings-v2-base-code",
|
||||
"jina-embeddings-v2-base-zh",
|
||||
],
|
||||
],
|
||||
"Mistral AI": ["mistral", ["mistral-embed"]],
|
||||
"NVIDIA": ["nvidia", ["NV-Embed-QA"]],
|
||||
"OpenAI": ["openai", ["text-embedding-3-small", "text-embedding-3-large", "text-embedding-ada-002"]],
|
||||
"Upstage": ["upstageAI", ["solar-embedding-1-large"]],
|
||||
"Voyage AI": [
|
||||
"voyageAI",
|
||||
["voyage-large-2-instruct", "voyage-law-2", "voyage-code-2", "voyage-large-2", "voyage-2"],
|
||||
],
|
||||
}
|
||||
VECTORIZE_MODELS_STR = "\n\n".join(
|
||||
[provider + ": " + (", ".join(models[1])) for provider, models in VECTORIZE_PROVIDERS_MAPPING.items()]
|
||||
)
|
||||
|
||||
inputs = [
|
||||
MessageTextInput(
|
||||
DropdownInput(
|
||||
name="provider",
|
||||
display_name="Provider name",
|
||||
info="The embedding provider to use.",
|
||||
options=VECTORIZE_PROVIDERS_MAPPING.keys(),
|
||||
value="",
|
||||
),
|
||||
MessageTextInput(
|
||||
name="model_name",
|
||||
display_name="Model name",
|
||||
info="The embedding model to use.",
|
||||
info=f"The embedding model to use for the selected provider. Each provider has a different set of models "
|
||||
f"available (full list at https://docs.datastax.com/en/astra-db-serverless/databases/embedding-generation.html):\n\n{VECTORIZE_MODELS_STR}",
|
||||
required=True,
|
||||
),
|
||||
MessageTextInput(
|
||||
name="api_key_name",
|
||||
display_name="API Key name",
|
||||
info="The name of the embeddings provider API key stored on Astra. If set, it will override the 'ProviderKey' in the authentication parameters.",
|
||||
),
|
||||
DictInput(
|
||||
name="authentication",
|
||||
display_name="Authentication",
|
||||
info="Authentication parameters. Use the Astra Portal to add the embedding provider integration to your Astra organization.",
|
||||
display_name="Authentication parameters",
|
||||
is_list=True,
|
||||
advanced=True,
|
||||
),
|
||||
SecretStrInput(
|
||||
name="provider_api_key",
|
||||
display_name="Provider API Key",
|
||||
info="An alternative to the Astra Authentication that let you use directly the API key of the provider.",
|
||||
advanced=True,
|
||||
),
|
||||
DictInput(
|
||||
name="model_parameters",
|
||||
display_name="Model parameters",
|
||||
info="Additional model parameters.",
|
||||
advanced=True,
|
||||
is_list=True,
|
||||
),
|
||||
|
|
@ -45,12 +90,17 @@ class AstraVectorize(Component):
|
|||
]
|
||||
|
||||
def build_options(self) -> dict[str, Any]:
|
||||
provider_value = self.VECTORIZE_PROVIDERS_MAPPING[self.provider][0]
|
||||
authentication = {**self.authentication}
|
||||
api_key_name = self.api_key_name
|
||||
if api_key_name:
|
||||
authentication["providerKey"] = api_key_name
|
||||
return {
|
||||
# must match exactly astra CollectionVectorServiceOptions
|
||||
"collection_vector_service_options": {
|
||||
"provider": self.provider,
|
||||
"provider": provider_value,
|
||||
"modelName": self.model_name,
|
||||
"authentication": self.authentication,
|
||||
"authentication": authentication,
|
||||
"parameters": self.model_parameters,
|
||||
},
|
||||
"collection_embedding_api_key": self.provider_api_key,
|
||||
|
|
|
|||
|
|
@ -0,0 +1,89 @@
|
|||
import uuid
|
||||
from typing import Optional
|
||||
|
||||
from langflow.custom import CustomComponent
|
||||
from langflow.schema import Data
|
||||
|
||||
|
||||
class FirecrawlCrawlApi(CustomComponent):
|
||||
display_name: str = "FirecrawlCrawlApi"
|
||||
description: str = "Firecrawl Crawl API."
|
||||
output_types: list[str] = ["Document"]
|
||||
documentation: str = "https://docs.firecrawl.dev/api-reference/endpoint/crawl"
|
||||
field_config = {
|
||||
"api_key": {
|
||||
"display_name": "API Key",
|
||||
"field_type": "str",
|
||||
"required": True,
|
||||
"password": True,
|
||||
"info": "The API key to use Firecrawl API.",
|
||||
},
|
||||
"url": {
|
||||
"display_name": "URL",
|
||||
"field_type": "str",
|
||||
"required": True,
|
||||
"info": "The base URL to start crawling from.",
|
||||
},
|
||||
"timeout": {
|
||||
"display_name": "Timeout",
|
||||
"field_type": "int",
|
||||
"info": "The timeout in milliseconds.",
|
||||
},
|
||||
"crawlerOptions": {
|
||||
"display_name": "Crawler Options",
|
||||
"info": "Options for the crawler behavior.",
|
||||
},
|
||||
"pageOptions": {
|
||||
"display_name": "Page Options",
|
||||
"info": "The page options to send with the request.",
|
||||
},
|
||||
"idempotency_key": {
|
||||
"display_name": "Idempotency Key",
|
||||
"field_type": "str",
|
||||
"info": "Optional idempotency key to ensure unique requests.",
|
||||
},
|
||||
}
|
||||
|
||||
def build(
|
||||
self,
|
||||
api_key: str,
|
||||
url: str,
|
||||
timeout: int = 30000,
|
||||
crawlerOptions: Optional[Data] = None,
|
||||
pageOptions: Optional[Data] = None,
|
||||
idempotency_key: Optional[str] = None,
|
||||
) -> Data:
|
||||
try:
|
||||
from firecrawl.firecrawl import FirecrawlApp # type: ignore
|
||||
except ImportError:
|
||||
raise ImportError(
|
||||
"Could not import firecrawl integration package. " "Please install it with `pip install firecrawl-py`."
|
||||
)
|
||||
|
||||
if crawlerOptions:
|
||||
crawler_options_dict = crawlerOptions.__dict__["data"]["text"]
|
||||
else:
|
||||
crawler_options_dict = {}
|
||||
|
||||
if pageOptions:
|
||||
page_options_dict = pageOptions.__dict__["data"]["text"]
|
||||
else:
|
||||
page_options_dict = {}
|
||||
|
||||
if not idempotency_key:
|
||||
idempotency_key = str(uuid.uuid4())
|
||||
|
||||
app = FirecrawlApp(api_key=api_key)
|
||||
crawl_result = app.crawl_url(
|
||||
url,
|
||||
{
|
||||
"crawlerOptions": crawler_options_dict,
|
||||
"pageOptions": page_options_dict,
|
||||
},
|
||||
True,
|
||||
int(timeout / 1000),
|
||||
idempotency_key,
|
||||
)
|
||||
|
||||
records = Data(data={"results": crawl_result})
|
||||
return records
|
||||
|
|
@ -0,0 +1,77 @@
|
|||
from typing import Optional
|
||||
|
||||
from langflow.custom import CustomComponent
|
||||
from langflow.schema import Data
|
||||
|
||||
|
||||
class FirecrawlScrapeApi(CustomComponent):
|
||||
display_name: str = "FirecrawlScrapeApi"
|
||||
description: str = "Firecrawl Scrape API."
|
||||
output_types: list[str] = ["Document"]
|
||||
documentation: str = "https://docs.firecrawl.dev/api-reference/endpoint/scrape"
|
||||
field_config = {
|
||||
"api_key": {
|
||||
"display_name": "API Key",
|
||||
"field_type": "str",
|
||||
"required": True,
|
||||
"password": True,
|
||||
"info": "The API key to use Firecrawl API.",
|
||||
},
|
||||
"url": {
|
||||
"display_name": "URL",
|
||||
"field_type": "str",
|
||||
"required": True,
|
||||
"info": "The URL to scrape.",
|
||||
},
|
||||
"timeout": {
|
||||
"display_name": "Timeout",
|
||||
"info": "Timeout in milliseconds for the request.",
|
||||
"field_type": "int",
|
||||
"default_value": 10000,
|
||||
},
|
||||
"pageOptions": {
|
||||
"display_name": "Page Options",
|
||||
"info": "The page options to send with the request.",
|
||||
},
|
||||
"extractorOptions": {
|
||||
"display_name": "Extractor Options",
|
||||
"info": "The extractor options to send with the request.",
|
||||
},
|
||||
}
|
||||
|
||||
def build(
|
||||
self,
|
||||
api_key: str,
|
||||
url: str,
|
||||
timeout: Optional[int] = 10000,
|
||||
pageOptions: Optional[Data] = None,
|
||||
extractorOptions: Optional[Data] = None,
|
||||
) -> Data:
|
||||
try:
|
||||
from firecrawl.firecrawl import FirecrawlApp # type: ignore
|
||||
except ImportError:
|
||||
raise ImportError(
|
||||
"Could not import firecrawl integration package. " "Please install it with `pip install firecrawl-py`."
|
||||
)
|
||||
if extractorOptions:
|
||||
extractor_options_dict = extractorOptions.__dict__["data"]["text"]
|
||||
else:
|
||||
extractor_options_dict = {}
|
||||
|
||||
if pageOptions:
|
||||
page_options_dict = pageOptions.__dict__["data"]["text"]
|
||||
else:
|
||||
page_options_dict = {}
|
||||
|
||||
app = FirecrawlApp(api_key=api_key)
|
||||
results = app.scrape_url(
|
||||
url,
|
||||
{
|
||||
"timeout": str(timeout),
|
||||
"extractorOptions": extractor_options_dict,
|
||||
"pageOptions": page_options_dict,
|
||||
},
|
||||
)
|
||||
|
||||
record = Data(data=results)
|
||||
return record
|
||||
|
|
@ -34,6 +34,12 @@ class OpenAIModelComponent(LCModelComponent):
|
|||
info="The maximum number of tokens to generate. Set to 0 for unlimited tokens.",
|
||||
),
|
||||
DictInput(name="model_kwargs", display_name="Model Kwargs", advanced=True),
|
||||
BoolInput(
|
||||
name="json_mode",
|
||||
display_name="JSON Mode",
|
||||
advanced=True,
|
||||
info="If True, it will output JSON regardless of passing a schema.",
|
||||
),
|
||||
DictInput(
|
||||
name="output_schema",
|
||||
is_list=True,
|
||||
|
|
@ -84,7 +90,7 @@ class OpenAIModelComponent(LCModelComponent):
|
|||
max_tokens = self.max_tokens
|
||||
model_kwargs = self.model_kwargs or {}
|
||||
openai_api_base = self.openai_api_base or "https://api.openai.com/v1"
|
||||
json_mode = bool(output_schema_dict)
|
||||
json_mode = bool(output_schema_dict) or self.json_mode
|
||||
seed = self.seed
|
||||
model_kwargs["seed"] = seed
|
||||
|
||||
|
|
@ -101,7 +107,10 @@ class OpenAIModelComponent(LCModelComponent):
|
|||
temperature=temperature or 0.1,
|
||||
)
|
||||
if json_mode:
|
||||
output = output.with_structured_output(schema=output_schema_dict, method="json_mode") # type: ignore
|
||||
if output_schema_dict:
|
||||
output = output.with_structured_output(schema=output_schema_dict, method="json_mode") # type: ignore
|
||||
else:
|
||||
output = output.bind(response_format={"type": "json_object"}) # type: ignore
|
||||
|
||||
return output
|
||||
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ from langchain_community.vectorstores import Cassandra
|
|||
|
||||
from langflow.base.vectorstores.model import LCVectorStoreComponent
|
||||
from langflow.helpers.data import docs_to_data
|
||||
from langflow.inputs import DictInput
|
||||
from langflow.io import (
|
||||
DataInput,
|
||||
DropdownInput,
|
||||
|
|
@ -23,24 +24,32 @@ class CassandraVectorStoreComponent(LCVectorStoreComponent):
|
|||
icon = "Cassandra"
|
||||
|
||||
inputs = [
|
||||
SecretStrInput(
|
||||
name="token",
|
||||
display_name="Token",
|
||||
info="Authentication token for accessing Cassandra on Astra DB.",
|
||||
MessageTextInput(
|
||||
name="database_ref",
|
||||
display_name="Contact Points / Astra Database ID",
|
||||
info="Contact points for the database (or AstraDB database ID)",
|
||||
required=True,
|
||||
),
|
||||
MessageTextInput(name="database_id", display_name="Database ID", info="The Astra database ID.", required=True),
|
||||
MessageTextInput(
|
||||
name="table_name",
|
||||
display_name="Table Name",
|
||||
info="The name of the table where vectors will be stored.",
|
||||
name="username", display_name="Username", info="Username for the database (leave empty for AstraDB)."
|
||||
),
|
||||
SecretStrInput(
|
||||
name="token",
|
||||
display_name="Password / AstraDB Token",
|
||||
info="User password for the database (or AstraDB token).",
|
||||
required=True,
|
||||
),
|
||||
MessageTextInput(
|
||||
name="keyspace",
|
||||
display_name="Keyspace",
|
||||
info="Optional key space within Astra DB. The keyspace should already be created.",
|
||||
advanced=False,
|
||||
info="Table Keyspace (or AstraDB namespace).",
|
||||
required=True,
|
||||
),
|
||||
MessageTextInput(
|
||||
name="table_name",
|
||||
display_name="Table Name",
|
||||
info="The name of the table (or AstraDB collection) where vectors will be stored.",
|
||||
required=True,
|
||||
),
|
||||
IntInput(
|
||||
name="ttl_seconds",
|
||||
|
|
@ -69,6 +78,13 @@ class CassandraVectorStoreComponent(LCVectorStoreComponent):
|
|||
value="Sync",
|
||||
advanced=True,
|
||||
),
|
||||
DictInput(
|
||||
name="cluster_kwargs",
|
||||
display_name="Cluster arguments",
|
||||
info="Optional dictionary of additional keyword arguments for the Cassandra cluster.",
|
||||
advanced=True,
|
||||
is_list=True,
|
||||
),
|
||||
MultilineInput(name="search_query", display_name="Search Query"),
|
||||
DataInput(
|
||||
name="ingest_data",
|
||||
|
|
@ -96,10 +112,35 @@ class CassandraVectorStoreComponent(LCVectorStoreComponent):
|
|||
"Could not import cassio integration package. " "Please install it with `pip install cassio`."
|
||||
)
|
||||
|
||||
cassio.init(
|
||||
database_id=self.database_id,
|
||||
token=self.token,
|
||||
)
|
||||
from uuid import UUID
|
||||
|
||||
database_ref = self.database_ref
|
||||
|
||||
try:
|
||||
UUID(self.database_ref)
|
||||
is_astra = True
|
||||
except ValueError:
|
||||
is_astra = False
|
||||
if "," in self.database_ref:
|
||||
# use a copy because we can't change the type of the parameter
|
||||
database_ref = self.database_ref.split(",")
|
||||
|
||||
if is_astra:
|
||||
cassio.init(
|
||||
database_id=database_ref,
|
||||
token=self.token,
|
||||
cluster_kwargs=self.cluster_kwargs,
|
||||
)
|
||||
else:
|
||||
cassio.init(
|
||||
contact_points=database_ref,
|
||||
username=self.username,
|
||||
password=self.token,
|
||||
cluster_kwargs=self.cluster_kwargs,
|
||||
)
|
||||
|
||||
if not self.ttl_seconds: # type: ignore
|
||||
self.ttl_seconds = None
|
||||
|
||||
documents = []
|
||||
|
||||
|
|
|
|||
|
|
@ -343,10 +343,10 @@ class Graph:
|
|||
except Exception as exc:
|
||||
logger.exception(exc)
|
||||
tb = traceback.format_exc()
|
||||
await self.end_all_traces(error=f"{exc.__class__.__name__}: {exc}\n\n{tb}")
|
||||
asyncio.create_task(self.end_all_traces(error=f"{exc.__class__.__name__}: {exc}\n\n{tb}"))
|
||||
raise ValueError(f"Error running graph: {exc}") from exc
|
||||
finally:
|
||||
await self.end_all_traces()
|
||||
asyncio.create_task(self.end_all_traces())
|
||||
# Get the outputs
|
||||
vertex_outputs = []
|
||||
for vertex in self.vertices:
|
||||
|
|
@ -1444,7 +1444,7 @@ class Graph:
|
|||
|
||||
def is_vertex_runnable(self, vertex_id: str) -> bool:
|
||||
"""Returns whether a vertex is runnable."""
|
||||
return self.run_manager.is_vertex_runnable(vertex_id)
|
||||
return self.run_manager.is_vertex_runnable(vertex_id, self.inactivated_vertices)
|
||||
|
||||
def build_run_map(self):
|
||||
"""
|
||||
|
|
@ -1464,7 +1464,7 @@ class Graph:
|
|||
This checks the direct predecessors of each successor to identify any that are
|
||||
immediately runnable, expanding the search to ensure progress can be made.
|
||||
"""
|
||||
return self.run_manager.find_runnable_predecessors_for_successors(vertex_id)
|
||||
return self.run_manager.find_runnable_predecessors_for_successors(vertex_id, self.inactivated_vertices)
|
||||
|
||||
def remove_from_predecessors(self, vertex_id: str):
|
||||
self.run_manager.remove_from_predecessors(vertex_id)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import asyncio
|
||||
from collections import defaultdict
|
||||
from typing import TYPE_CHECKING, Callable, List, Coroutine
|
||||
from typing import TYPE_CHECKING, Callable, Coroutine, List
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from langflow.graph.graph.base import Graph
|
||||
|
|
@ -40,19 +40,23 @@ class RunnableVerticesManager:
|
|||
self.run_predecessors = state["run_predecessors"]
|
||||
self.vertices_to_run = state["vertices_to_run"]
|
||||
|
||||
def is_vertex_runnable(self, vertex_id: str) -> bool:
|
||||
def is_vertex_runnable(self, vertex_id: str, inactivated_vertices: set[str]) -> bool:
|
||||
"""Determines if a vertex is runnable."""
|
||||
|
||||
return vertex_id in self.vertices_to_run and not self.run_predecessors.get(vertex_id)
|
||||
return (
|
||||
vertex_id in self.vertices_to_run
|
||||
and not self.run_predecessors.get(vertex_id)
|
||||
and vertex_id not in inactivated_vertices
|
||||
)
|
||||
|
||||
def find_runnable_predecessors_for_successors(self, vertex_id: str) -> List[str]:
|
||||
def find_runnable_predecessors_for_successors(self, vertex_id: str, inactivated_vertices: set[str]) -> List[str]:
|
||||
"""Finds runnable predecessors for the successors of a given vertex."""
|
||||
runnable_vertices = []
|
||||
visited = set()
|
||||
|
||||
for successor_id in self.run_map.get(vertex_id, []):
|
||||
for predecessor_id in self.run_predecessors.get(successor_id, []):
|
||||
if predecessor_id not in visited and self.is_vertex_runnable(predecessor_id):
|
||||
if predecessor_id not in visited and self.is_vertex_runnable(predecessor_id, inactivated_vertices):
|
||||
runnable_vertices.append(predecessor_id)
|
||||
visited.add(predecessor_id)
|
||||
return runnable_vertices
|
||||
|
|
@ -104,10 +108,14 @@ class RunnableVerticesManager:
|
|||
"""
|
||||
async with lock:
|
||||
self.remove_from_predecessors(vertex.id)
|
||||
direct_successors_ready = [v for v in vertex.successors_ids if self.is_vertex_runnable(v)]
|
||||
direct_successors_ready = [
|
||||
v for v in vertex.successors_ids if self.is_vertex_runnable(v, graph.inactivated_vertices)
|
||||
]
|
||||
if not direct_successors_ready:
|
||||
# No direct successors ready, look for runnable predecessors of successors
|
||||
next_runnable_vertices = self.find_runnable_predecessors_for_successors(vertex.id)
|
||||
next_runnable_vertices = self.find_runnable_predecessors_for_successors(
|
||||
vertex.id, graph.inactivated_vertices
|
||||
)
|
||||
else:
|
||||
next_runnable_vertices = direct_successors_ready
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
import warnings
|
||||
from typing import List, Optional
|
||||
from uuid import UUID
|
||||
|
||||
from loguru import logger
|
||||
|
|
@ -8,17 +7,18 @@ from sqlmodel import Session, col, select
|
|||
|
||||
from langflow.schema.message import Message
|
||||
from langflow.services.database.models.message.model import MessageRead, MessageTable
|
||||
from langflow.services.database.utils import migrate_messages_from_monitor_service_to_database
|
||||
from langflow.services.deps import session_scope
|
||||
|
||||
|
||||
def get_messages(
|
||||
sender: Optional[str] = None,
|
||||
sender_name: Optional[str] = None,
|
||||
session_id: Optional[str] = None,
|
||||
order_by: Optional[str] = "timestamp",
|
||||
order: Optional[str] = "DESC",
|
||||
flow_id: Optional[UUID] = None,
|
||||
limit: Optional[int] = None,
|
||||
sender: str | None = None,
|
||||
sender_name: str | None = None,
|
||||
session_id: str | None = None,
|
||||
order_by: str | None = "timestamp",
|
||||
order: str | None = "DESC",
|
||||
flow_id: UUID | None = None,
|
||||
limit: int | None = None,
|
||||
):
|
||||
"""
|
||||
Retrieves messages from the monitor service based on the provided filters.
|
||||
|
|
@ -33,6 +33,8 @@ def get_messages(
|
|||
Returns:
|
||||
List[Data]: A list of Data objects representing the retrieved messages.
|
||||
"""
|
||||
with session_scope() as session:
|
||||
migrate_messages_from_monitor_service_to_database(session)
|
||||
messages_read: list[Message] = []
|
||||
with session_scope() as session:
|
||||
stmt = select(MessageTable)
|
||||
|
|
@ -58,7 +60,7 @@ def get_messages(
|
|||
return messages_read
|
||||
|
||||
|
||||
def add_messages(messages: Message | list[Message], flow_id: Optional[str] = None):
|
||||
def add_messages(messages: Message | list[Message], flow_id: str | None = None):
|
||||
"""
|
||||
Add a message to the monitor service.
|
||||
"""
|
||||
|
|
@ -111,8 +113,8 @@ def delete_messages(session_id: str):
|
|||
|
||||
def store_message(
|
||||
message: Message,
|
||||
flow_id: Optional[str] = None,
|
||||
) -> List[Message]:
|
||||
flow_id: str | None = None,
|
||||
) -> list[Message]:
|
||||
"""
|
||||
Stores a message in the memory.
|
||||
|
||||
|
|
|
|||
|
|
@ -41,6 +41,12 @@ class Message(Data):
|
|||
value = str(value)
|
||||
return value
|
||||
|
||||
@field_serializer("flow_id")
|
||||
def serialize_flow_id(value):
|
||||
if isinstance(value, str):
|
||||
return UUID(value)
|
||||
return value
|
||||
|
||||
@field_validator("files", mode="before")
|
||||
@classmethod
|
||||
def validate_files(cls, value):
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ class MessageBase(SQLModel):
|
|||
return value
|
||||
|
||||
@classmethod
|
||||
def from_message(cls, message: "Message", flow_id: str | None = None):
|
||||
def from_message(cls, message: "Message", flow_id: str | UUID | None = None):
|
||||
# first check if the record has all the required fields
|
||||
if message.text is None or not message.sender or not message.sender_name:
|
||||
raise ValueError("The message does not have the required fields (text, sender, sender_name).")
|
||||
|
|
@ -34,6 +34,8 @@ class MessageBase(SQLModel):
|
|||
timestamp = datetime.fromisoformat(message.timestamp)
|
||||
else:
|
||||
timestamp = message.timestamp
|
||||
if not flow_id and message.flow_id:
|
||||
flow_id = message.flow_id
|
||||
return cls(
|
||||
sender=message.sender,
|
||||
sender_name=message.sender_name,
|
||||
|
|
@ -52,6 +54,15 @@ class MessageTable(MessageBase, table=True):
|
|||
flow: "Flow" = Relationship(back_populates="messages")
|
||||
files: List[str] = Field(sa_column=Column(JSON))
|
||||
|
||||
@field_validator("flow_id", mode="before")
|
||||
@classmethod
|
||||
def validate_flow_id(cls, value):
|
||||
if value is None:
|
||||
return value
|
||||
if isinstance(value, str):
|
||||
value = UUID(value)
|
||||
return value
|
||||
|
||||
# Needed for Column(JSON)
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
|
|
|
|||
|
|
@ -6,22 +6,24 @@ from typing import TYPE_CHECKING
|
|||
import sqlalchemy as sa
|
||||
from alembic import command, util
|
||||
from alembic.config import Config
|
||||
from langflow.services.base import Service
|
||||
from langflow.services.database import models # noqa
|
||||
from langflow.services.database.models.user.crud import get_user_by_username
|
||||
from langflow.services.database.utils import Result, TableResults
|
||||
from langflow.services.deps import get_settings_service
|
||||
from langflow.services.utils import teardown_superuser
|
||||
from loguru import logger
|
||||
from sqlalchemy import event, inspect
|
||||
from sqlalchemy.engine import Engine
|
||||
from sqlalchemy.exc import OperationalError
|
||||
from sqlmodel import Session, SQLModel, create_engine, select, text
|
||||
|
||||
from langflow.services.base import Service
|
||||
from langflow.services.database import models # noqa
|
||||
from langflow.services.database.models.user.crud import get_user_by_username
|
||||
from langflow.services.database.utils import Result, TableResults, migrate_messages_from_monitor_service_to_database
|
||||
from langflow.services.deps import get_settings_service
|
||||
from langflow.services.utils import teardown_superuser
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from langflow.services.settings.service import SettingsService
|
||||
from sqlalchemy.engine import Engine
|
||||
|
||||
from langflow.services.settings.service import SettingsService
|
||||
|
||||
|
||||
class DatabaseService(Service):
|
||||
name = "database_service"
|
||||
|
|
@ -205,6 +207,10 @@ class DatabaseService(Service):
|
|||
logger.error(f"AutogenerateDiffsDetected: {exc}")
|
||||
if not fix:
|
||||
raise RuntimeError(f"There's a mismatch between the models and the database.\n{exc}")
|
||||
try:
|
||||
migrate_messages_from_monitor_service_to_database(session)
|
||||
except Exception as exc:
|
||||
logger.error(f"Error migrating messages from monitor service to database: {exc}")
|
||||
|
||||
if fix:
|
||||
self.try_downgrade_upgrade_until_success(alembic_cfg)
|
||||
|
|
|
|||
|
|
@ -4,11 +4,78 @@ from typing import TYPE_CHECKING
|
|||
|
||||
from alembic.util.exc import CommandError
|
||||
from loguru import logger
|
||||
from sqlmodel import Session, text
|
||||
from sqlmodel import Session, select, text
|
||||
|
||||
from langflow.services.deps import get_monitor_service
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from langflow.services.database.service import DatabaseService
|
||||
|
||||
from typing import Dict, List
|
||||
|
||||
|
||||
def migrate_messages_from_monitor_service_to_database(session: Session) -> bool:
|
||||
from langflow.schema.message import Message
|
||||
from langflow.services.database.models.message import MessageTable
|
||||
|
||||
monitor_service = get_monitor_service()
|
||||
messages_df = monitor_service.get_messages()
|
||||
|
||||
if messages_df.empty:
|
||||
logger.info("No messages to migrate.")
|
||||
return True
|
||||
|
||||
original_messages: List[Dict] = messages_df.to_dict(orient="records")
|
||||
|
||||
db_messages = session.exec(select(MessageTable)).all()
|
||||
db_messages = [msg[0] for msg in db_messages] # type: ignore
|
||||
db_msg_dict = {(msg.text, msg.timestamp.isoformat(), str(msg.flow_id), msg.session_id): msg for msg in db_messages}
|
||||
# Filter out messages that already exist in the database
|
||||
original_messages_filtered = []
|
||||
for message in original_messages:
|
||||
key = (message["text"], message["timestamp"].isoformat(), str(message["flow_id"]), message["session_id"])
|
||||
if key not in db_msg_dict:
|
||||
original_messages_filtered.append(message)
|
||||
if not original_messages_filtered:
|
||||
logger.info("No messages to migrate.")
|
||||
return True
|
||||
try:
|
||||
# Bulk insert messages
|
||||
session.bulk_insert_mappings(
|
||||
MessageTable, # type: ignore
|
||||
[MessageTable.from_message(Message(**msg)).model_dump() for msg in original_messages_filtered],
|
||||
)
|
||||
session.commit()
|
||||
except Exception as e:
|
||||
logger.error(f"Error during message insertion: {str(e)}")
|
||||
session.rollback()
|
||||
return False
|
||||
|
||||
# Create a dictionary for faster lookup
|
||||
|
||||
all_ok = True
|
||||
for orig_msg in original_messages_filtered:
|
||||
key = (orig_msg["text"], orig_msg["timestamp"].isoformat(), str(orig_msg["flow_id"]), orig_msg["session_id"])
|
||||
matching_db_msg = db_msg_dict.get(key)
|
||||
|
||||
if matching_db_msg is None:
|
||||
logger.warning(f"Message not found in database: {orig_msg}")
|
||||
all_ok = False
|
||||
else:
|
||||
# Validate other fields
|
||||
if any(getattr(matching_db_msg, k) != v for k, v in orig_msg.items() if k != "index"):
|
||||
logger.warning(f"Message mismatch in database: {orig_msg}")
|
||||
all_ok = False
|
||||
|
||||
if all_ok:
|
||||
messages_ids = [message["index"] for message in original_messages]
|
||||
monitor_service.delete_messages(messages_ids)
|
||||
logger.info("Migration completed successfully. Original messages deleted.")
|
||||
else:
|
||||
logger.warning("Migration completed with errors. Original messages not deleted.")
|
||||
|
||||
return all_ok
|
||||
|
||||
|
||||
def initialize_database(fix_migration: bool = False):
|
||||
logger.debug("Initializing database")
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import json
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel, Field, field_serializer, field_validator
|
||||
|
|
@ -28,15 +28,15 @@ class DefaultModel(BaseModel):
|
|||
|
||||
|
||||
class TransactionModel(DefaultModel):
|
||||
index: Optional[int] = Field(default=None)
|
||||
timestamp: Optional[datetime] = Field(default_factory=datetime.now, alias="timestamp")
|
||||
index: int | None = Field(default=None)
|
||||
timestamp: datetime | None = Field(default_factory=datetime.now, alias="timestamp")
|
||||
vertex_id: str
|
||||
target_id: str | None = None
|
||||
inputs: dict
|
||||
outputs: Optional[dict] = None
|
||||
outputs: dict | None = None
|
||||
status: str
|
||||
error: Optional[str] = None
|
||||
flow_id: Optional[str] = Field(default=None, alias="flow_id")
|
||||
error: str | None = None
|
||||
flow_id: str | None = Field(default=None, alias="flow_id")
|
||||
|
||||
# validate target_args in case it is a JSON
|
||||
@field_validator("outputs", "inputs", mode="before")
|
||||
|
|
@ -53,16 +53,16 @@ class TransactionModel(DefaultModel):
|
|||
|
||||
|
||||
class TransactionModelResponse(DefaultModel):
|
||||
index: Optional[int] = Field(default=None)
|
||||
timestamp: Optional[datetime] = Field(default_factory=datetime.now, alias="timestamp")
|
||||
index: int | None = Field(default=None)
|
||||
timestamp: datetime | None = Field(default_factory=datetime.now, alias="timestamp")
|
||||
vertex_id: str
|
||||
inputs: dict
|
||||
outputs: Optional[dict] = None
|
||||
outputs: dict | None = None
|
||||
status: str
|
||||
error: Optional[str] = None
|
||||
flow_id: Optional[str] = Field(default=None, alias="flow_id")
|
||||
source: Optional[str] = None
|
||||
target: Optional[str] = None
|
||||
error: str | None = None
|
||||
flow_id: str | None = Field(default=None, alias="flow_id")
|
||||
source: str | None = None
|
||||
target: str | None = None
|
||||
|
||||
# validate target_args in case it is a JSON
|
||||
@field_validator("outputs", "inputs", mode="before")
|
||||
|
|
@ -81,9 +81,9 @@ class TransactionModelResponse(DefaultModel):
|
|||
return v
|
||||
|
||||
|
||||
class MessageModel(DefaultModel):
|
||||
id: Optional[str | UUID] = Field(default=None)
|
||||
flow_id: Optional[UUID] = Field(default=None)
|
||||
class DuckDbMessageModel(DefaultModel):
|
||||
index: int | None = Field(default=None, alias="index")
|
||||
flow_id: str | None = Field(default=None, alias="flow_id")
|
||||
timestamp: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
|
||||
sender: str
|
||||
sender_name: str
|
||||
|
|
@ -112,7 +112,53 @@ class MessageModel(DefaultModel):
|
|||
return v
|
||||
|
||||
@classmethod
|
||||
def from_message(cls, message: Message, flow_id: Optional[str] = None):
|
||||
def from_message(cls, message: Message, flow_id: str | None = None):
|
||||
# first check if the record has all the required fields
|
||||
if message.text is None or not message.sender or not message.sender_name:
|
||||
raise ValueError("The message does not have the required fields (text, sender, sender_name).")
|
||||
return cls(
|
||||
sender=message.sender,
|
||||
sender_name=message.sender_name,
|
||||
text=message.text,
|
||||
session_id=message.session_id,
|
||||
files=message.files or [],
|
||||
timestamp=message.timestamp,
|
||||
flow_id=flow_id,
|
||||
)
|
||||
|
||||
|
||||
class MessageModel(DefaultModel):
|
||||
id: str | UUID | None = Field(default=None)
|
||||
flow_id: UUID | None = Field(default=None)
|
||||
timestamp: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
|
||||
sender: str
|
||||
sender_name: str
|
||||
session_id: str
|
||||
text: str
|
||||
files: list[str] = []
|
||||
|
||||
@field_validator("files", mode="before")
|
||||
@classmethod
|
||||
def validate_files(cls, v):
|
||||
if isinstance(v, str):
|
||||
v = json.loads(v)
|
||||
return v
|
||||
|
||||
@field_serializer("timestamp")
|
||||
@classmethod
|
||||
def serialize_timestamp(cls, v):
|
||||
v = v.replace(microsecond=0)
|
||||
return v.strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
@field_serializer("files")
|
||||
@classmethod
|
||||
def serialize_files(cls, v):
|
||||
if isinstance(v, list):
|
||||
return json.dumps(v)
|
||||
return v
|
||||
|
||||
@classmethod
|
||||
def from_message(cls, message: Message, flow_id: str | None = None):
|
||||
# first check if the record has all the required fields
|
||||
if message.text is None or not message.sender or not message.sender_name:
|
||||
raise ValueError("The message does not have the required fields (text, sender, sender_name).")
|
||||
|
|
@ -139,8 +185,8 @@ class MessageModelRequest(MessageModel):
|
|||
|
||||
|
||||
class VertexBuildModel(DefaultModel):
|
||||
index: Optional[int] = Field(default=None, alias="index", exclude=True)
|
||||
id: Optional[str] = Field(default=None, alias="id")
|
||||
index: int | None = Field(default=None, alias="index", exclude=True)
|
||||
id: str | None = Field(default=None, alias="id")
|
||||
flow_id: str
|
||||
valid: bool
|
||||
params: Any
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, List, Optional, Union
|
||||
from typing import TYPE_CHECKING, Union
|
||||
|
||||
import duckdb
|
||||
from loguru import logger
|
||||
|
|
@ -10,7 +10,7 @@ from langflow.services.base import Service
|
|||
from langflow.services.monitor.utils import add_row_to_table, drop_and_create_table_if_schema_mismatch
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from langflow.services.monitor.schema import MessageModel, TransactionModel, VertexBuildModel
|
||||
from langflow.services.monitor.schema import DuckDbMessageModel, TransactionModel, VertexBuildModel
|
||||
from langflow.services.settings.service import SettingsService
|
||||
|
||||
|
||||
|
|
@ -18,14 +18,14 @@ class MonitorService(Service):
|
|||
name = "monitor_service"
|
||||
|
||||
def __init__(self, settings_service: "SettingsService"):
|
||||
from langflow.services.monitor.schema import MessageModel, TransactionModel, VertexBuildModel
|
||||
from langflow.services.monitor.schema import DuckDbMessageModel, TransactionModel, VertexBuildModel
|
||||
|
||||
self.settings_service = settings_service
|
||||
self.base_cache_dir = Path(user_cache_dir("langflow"))
|
||||
self.db_path = self.base_cache_dir / "monitor.duckdb"
|
||||
self.table_map: dict[str, type[TransactionModel | MessageModel | VertexBuildModel]] = {
|
||||
self.table_map: dict[str, type[TransactionModel | DuckDbMessageModel | VertexBuildModel]] = {
|
||||
"transactions": TransactionModel,
|
||||
"messages": MessageModel,
|
||||
"messages": DuckDbMessageModel,
|
||||
"vertex_builds": VertexBuildModel,
|
||||
}
|
||||
|
||||
|
|
@ -48,7 +48,7 @@ class MonitorService(Service):
|
|||
def add_row(
|
||||
self,
|
||||
table_name: str,
|
||||
data: Union[dict, "TransactionModel", "MessageModel", "VertexBuildModel"],
|
||||
data: Union[dict, "TransactionModel", "DuckDbMessageModel", "VertexBuildModel"],
|
||||
):
|
||||
# Make sure the model passed matches the table
|
||||
|
||||
|
|
@ -68,12 +68,48 @@ class MonitorService(Service):
|
|||
def get_timestamp():
|
||||
return datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
def get_messages(
|
||||
self,
|
||||
flow_id: str | None = None,
|
||||
sender: str | None = None,
|
||||
sender_name: str | None = None,
|
||||
session_id: str | None = None,
|
||||
order_by: str | None = "timestamp",
|
||||
order: str | None = "DESC",
|
||||
limit: int | None = None,
|
||||
):
|
||||
query = "SELECT index, flow_id, sender_name, sender, session_id, text, files, timestamp FROM messages"
|
||||
conditions = []
|
||||
if sender:
|
||||
conditions.append(f"sender = '{sender}'")
|
||||
if sender_name:
|
||||
conditions.append(f"sender_name = '{sender_name}'")
|
||||
if session_id:
|
||||
conditions.append(f"session_id = '{session_id}'")
|
||||
if flow_id:
|
||||
conditions.append(f"flow_id = '{flow_id}'")
|
||||
|
||||
if conditions:
|
||||
query += " WHERE " + " AND ".join(conditions)
|
||||
|
||||
if order_by and order:
|
||||
# Make sure the order is from newest to oldest
|
||||
query += f" ORDER BY {order_by} {order.upper()}"
|
||||
|
||||
if limit is not None:
|
||||
query += f" LIMIT {limit}"
|
||||
|
||||
with duckdb.connect(str(self.db_path), read_only=True) as conn:
|
||||
df = conn.execute(query).df()
|
||||
|
||||
return df
|
||||
|
||||
def get_vertex_builds(
|
||||
self,
|
||||
flow_id: Optional[str] = None,
|
||||
vertex_id: Optional[str] = None,
|
||||
valid: Optional[bool] = None,
|
||||
order_by: Optional[str] = "timestamp",
|
||||
flow_id: str | None = None,
|
||||
vertex_id: str | None = None,
|
||||
valid: bool | None = None,
|
||||
order_by: str | None = "timestamp",
|
||||
):
|
||||
query = "SELECT id, index,flow_id, valid, params, data, artifacts, timestamp FROM vertex_builds"
|
||||
conditions = []
|
||||
|
|
@ -96,7 +132,7 @@ class MonitorService(Service):
|
|||
|
||||
return df.to_dict(orient="records")
|
||||
|
||||
def delete_vertex_builds(self, flow_id: Optional[str] = None):
|
||||
def delete_vertex_builds(self, flow_id: str | None = None):
|
||||
query = "DELETE FROM vertex_builds"
|
||||
if flow_id:
|
||||
query += f" WHERE flow_id = '{flow_id}'"
|
||||
|
|
@ -109,7 +145,7 @@ class MonitorService(Service):
|
|||
|
||||
return self.exec_query(query, read_only=False)
|
||||
|
||||
def delete_messages(self, message_ids: Union[List[int], str]):
|
||||
def delete_messages(self, message_ids: list[int] | str):
|
||||
if isinstance(message_ids, list):
|
||||
# If message_ids is a list, join the string representations of the integers
|
||||
ids_str = ",".join(map(str, message_ids))
|
||||
|
|
@ -132,11 +168,11 @@ class MonitorService(Service):
|
|||
|
||||
def get_transactions(
|
||||
self,
|
||||
source: Optional[str] = None,
|
||||
target: Optional[str] = None,
|
||||
status: Optional[str] = None,
|
||||
order_by: Optional[str] = "timestamp",
|
||||
flow_id: Optional[str] = None,
|
||||
source: str | None = None,
|
||||
target: str | None = None,
|
||||
status: str | None = None,
|
||||
order_by: str | None = "timestamp",
|
||||
flow_id: str | None = None,
|
||||
):
|
||||
query = (
|
||||
"SELECT index,flow_id, status, error, timestamp, vertex_id, inputs, outputs, target_id FROM transactions"
|
||||
|
|
|
|||
79
src/backend/base/poetry.lock
generated
79
src/backend/base/poetry.lock
generated
|
|
@ -112,13 +112,13 @@ frozenlist = ">=1.1.0"
|
|||
|
||||
[[package]]
|
||||
name = "alembic"
|
||||
version = "1.13.1"
|
||||
version = "1.13.2"
|
||||
description = "A database migration tool for SQLAlchemy."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "alembic-1.13.1-py3-none-any.whl", hash = "sha256:2edcc97bed0bd3272611ce3a98d98279e9c209e7186e43e75bbb1b2bdfdbcc43"},
|
||||
{file = "alembic-1.13.1.tar.gz", hash = "sha256:4932c8558bf68f2ee92b9bbcb8218671c627064d5b08939437af6d77dc05e595"},
|
||||
{file = "alembic-1.13.2-py3-none-any.whl", hash = "sha256:6b8733129a6224a9a711e17c99b08462dbf7cc9670ba8f2e2ae9af860ceb1953"},
|
||||
{file = "alembic-1.13.2.tar.gz", hash = "sha256:1ff0ae32975f4fd96028c39ed9bb3c867fe3af956bd7bb37343b54c9fe7445ef"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -739,6 +739,20 @@ typer = ">=0.12.3"
|
|||
[package.extras]
|
||||
standard = ["fastapi", "uvicorn[standard] (>=0.15.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "firecrawl-py"
|
||||
version = "0.0.16"
|
||||
description = "Python SDK for Firecrawl API"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "firecrawl_py-0.0.16-py3-none-any.whl", hash = "sha256:9024f483b501852a6b9c4e6cdfc9e8dde452d922afac357080bb278a0c9c2a26"},
|
||||
{file = "firecrawl_py-0.0.16.tar.gz", hash = "sha256:6c662fa0a549bc7f5c0acb704baba6731869ca0451094034264dfc1b4eb086e4"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
requests = "*"
|
||||
|
||||
[[package]]
|
||||
name = "frozenlist"
|
||||
version = "1.4.1"
|
||||
|
|
@ -1158,19 +1172,19 @@ files = [
|
|||
|
||||
[[package]]
|
||||
name = "langchain"
|
||||
version = "0.2.5"
|
||||
version = "0.2.6"
|
||||
description = "Building applications with LLMs through composability"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8.1"
|
||||
files = [
|
||||
{file = "langchain-0.2.5-py3-none-any.whl", hash = "sha256:9aded9a65348254e1c93dcdaacffe4d1b6a5e7f74ef80c160c88ff78ad299228"},
|
||||
{file = "langchain-0.2.5.tar.gz", hash = "sha256:ffdbf4fcea46a10d461bcbda2402220fcfd72a0c70e9f4161ae0510067b9b3bd"},
|
||||
{file = "langchain-0.2.6-py3-none-any.whl", hash = "sha256:f86e8a7afd3e56f8eb5ba47f01dd00144fb9fc2f1db9873bd197347be2857aa4"},
|
||||
{file = "langchain-0.2.6.tar.gz", hash = "sha256:867f6add370c1e3911b0e87d3dd0e36aec1e8f513bf06131340fe8f151d89dc5"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
aiohttp = ">=3.8.3,<4.0.0"
|
||||
async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""}
|
||||
langchain-core = ">=0.2.7,<0.3.0"
|
||||
langchain-core = ">=0.2.10,<0.3.0"
|
||||
langchain-text-splitters = ">=0.2.0,<0.3.0"
|
||||
langsmith = ">=0.1.17,<0.2.0"
|
||||
numpy = [
|
||||
|
|
@ -1181,24 +1195,24 @@ pydantic = ">=1,<3"
|
|||
PyYAML = ">=5.3"
|
||||
requests = ">=2,<3"
|
||||
SQLAlchemy = ">=1.4,<3"
|
||||
tenacity = ">=8.1.0,<9.0.0"
|
||||
tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<9.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "langchain-community"
|
||||
version = "0.2.5"
|
||||
version = "0.2.6"
|
||||
description = "Community contributed LangChain integrations."
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8.1"
|
||||
files = [
|
||||
{file = "langchain_community-0.2.5-py3-none-any.whl", hash = "sha256:bf37a334952e42c7676d083cf2d2c4cbfbb7de1949c4149fe19913e2b06c485f"},
|
||||
{file = "langchain_community-0.2.5.tar.gz", hash = "sha256:476787b8c8c213b67e7b0eceb53346e787f00fbae12d8e680985bd4f93b0bf64"},
|
||||
{file = "langchain_community-0.2.6-py3-none-any.whl", hash = "sha256:758cc800acfe5dd396bf8ba1b57c4792639ead0eab48ed0367f0732ec6ee1f68"},
|
||||
{file = "langchain_community-0.2.6.tar.gz", hash = "sha256:40ce09a50ed798aa651ddb34c8978200fa8589b9813c7a28ce8af027bbf249f0"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
aiohttp = ">=3.8.3,<4.0.0"
|
||||
dataclasses-json = ">=0.5.7,<0.7"
|
||||
langchain = ">=0.2.5,<0.3.0"
|
||||
langchain-core = ">=0.2.7,<0.3.0"
|
||||
langchain = ">=0.2.6,<0.3.0"
|
||||
langchain-core = ">=0.2.10,<0.3.0"
|
||||
langsmith = ">=0.1.0,<0.2.0"
|
||||
numpy = [
|
||||
{version = ">=1,<2", markers = "python_version < \"3.12\""},
|
||||
|
|
@ -1207,17 +1221,17 @@ numpy = [
|
|||
PyYAML = ">=5.3"
|
||||
requests = ">=2,<3"
|
||||
SQLAlchemy = ">=1.4,<3"
|
||||
tenacity = ">=8.1.0,<9.0.0"
|
||||
tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<9.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.2.9"
|
||||
version = "0.2.10"
|
||||
description = "Building applications with LLMs through composability"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8.1"
|
||||
files = [
|
||||
{file = "langchain_core-0.2.9-py3-none-any.whl", hash = "sha256:426a5a4fea95a5db995ba5ab560b76edd4998fb6fe52ccc28ac987092a4cbfcd"},
|
||||
{file = "langchain_core-0.2.9.tar.gz", hash = "sha256:f1c59082642921727844e1cd0eb36d451edd1872c20e193aa3142aac03495986"},
|
||||
{file = "langchain_core-0.2.10-py3-none-any.whl", hash = "sha256:6eb72086b6bc86db9812da98f79e507c2209a15c0112aefd214a04182ada8586"},
|
||||
{file = "langchain_core-0.2.10.tar.gz", hash = "sha256:33d1fc234ab58c80476eb5bbde2107ef522a2ce8f46bdf47d9e1bd21e054208f"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -1233,35 +1247,32 @@ tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<9.0.0"
|
|||
|
||||
[[package]]
|
||||
name = "langchain-experimental"
|
||||
version = "0.0.61"
|
||||
version = "0.0.62"
|
||||
description = "Building applications with LLMs through composability"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8.1"
|
||||
files = [
|
||||
{file = "langchain_experimental-0.0.61-py3-none-any.whl", hash = "sha256:f9c516f528f55919743bd56fe1689a53bf74ae7f8902d64b9d8aebc61249cbe2"},
|
||||
{file = "langchain_experimental-0.0.61.tar.gz", hash = "sha256:e9538efb994be5db3045cc582cddb9787c8299c86ffeee9d3779b7f58eef2226"},
|
||||
{file = "langchain_experimental-0.0.62-py3-none-any.whl", hash = "sha256:9240f9e3490e819976f20a37863970036e7baacb7104b9eb6833d19ab6d518c9"},
|
||||
{file = "langchain_experimental-0.0.62.tar.gz", hash = "sha256:9737fbc8429d24457ea4d368e3c9ba9ed1cace0564fb5f1a96a3027a588bd0ac"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
langchain-community = ">=0.2.5,<0.3.0"
|
||||
langchain-core = ">=0.2.7,<0.3.0"
|
||||
langchain-community = ">=0.2.6,<0.3.0"
|
||||
langchain-core = ">=0.2.10,<0.3.0"
|
||||
|
||||
[[package]]
|
||||
name = "langchain-text-splitters"
|
||||
version = "0.2.1"
|
||||
version = "0.2.2"
|
||||
description = "LangChain text splitting utilities"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8.1"
|
||||
files = [
|
||||
{file = "langchain_text_splitters-0.2.1-py3-none-any.whl", hash = "sha256:c2774a85f17189eaca50339629d2316d13130d4a8d9f1a1a96f3a03670c4a138"},
|
||||
{file = "langchain_text_splitters-0.2.1.tar.gz", hash = "sha256:06853d17d7241ecf5c97c7b6ef01f600f9b0fb953dd997838142a527a4f32ea4"},
|
||||
{file = "langchain_text_splitters-0.2.2-py3-none-any.whl", hash = "sha256:1c80d4b11b55e2995f02d2a326c0323ee1eeff24507329bb22924e420c782dff"},
|
||||
{file = "langchain_text_splitters-0.2.2.tar.gz", hash = "sha256:a1e45de10919fa6fb080ef0525deab56557e9552083600455cb9fa4238076140"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
langchain-core = ">=0.2.0,<0.3.0"
|
||||
|
||||
[package.extras]
|
||||
extended-testing = ["beautifulsoup4 (>=4.12.3,<5.0.0)", "lxml (>=4.9.3,<6.0)"]
|
||||
langchain-core = ">=0.2.10,<0.3.0"
|
||||
|
||||
[[package]]
|
||||
name = "langchainhub"
|
||||
|
|
@ -2468,13 +2479,13 @@ pyasn1 = ">=0.1.3"
|
|||
|
||||
[[package]]
|
||||
name = "sentry-sdk"
|
||||
version = "2.6.0"
|
||||
version = "2.7.0"
|
||||
description = "Python client for Sentry (https://sentry.io)"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "sentry_sdk-2.6.0-py2.py3-none-any.whl", hash = "sha256:422b91cb49378b97e7e8d0e8d5a1069df23689d45262b86f54988a7db264e874"},
|
||||
{file = "sentry_sdk-2.6.0.tar.gz", hash = "sha256:65cc07e9c6995c5e316109f138570b32da3bd7ff8d0d0ee4aaf2628c3dd8127d"},
|
||||
{file = "sentry_sdk-2.7.0-py2.py3-none-any.whl", hash = "sha256:db9594c27a4d21c1ebad09908b1f0dc808ef65c2b89c1c8e7e455143262e37c1"},
|
||||
{file = "sentry_sdk-2.7.0.tar.gz", hash = "sha256:d846a211d4a0378b289ced3c434480945f110d0ede00450ba631fc2852e7a0d4"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -2506,7 +2517,7 @@ langchain = ["langchain (>=0.0.210)"]
|
|||
loguru = ["loguru (>=0.5)"]
|
||||
openai = ["openai (>=1.0.0)", "tiktoken (>=0.3.0)"]
|
||||
opentelemetry = ["opentelemetry-distro (>=0.35b0)"]
|
||||
opentelemetry-experimental = ["opentelemetry-distro (>=0.40b0,<1.0)", "opentelemetry-instrumentation-aiohttp-client (>=0.40b0,<1.0)", "opentelemetry-instrumentation-django (>=0.40b0,<1.0)", "opentelemetry-instrumentation-fastapi (>=0.40b0,<1.0)", "opentelemetry-instrumentation-flask (>=0.40b0,<1.0)", "opentelemetry-instrumentation-requests (>=0.40b0,<1.0)", "opentelemetry-instrumentation-sqlite3 (>=0.40b0,<1.0)", "opentelemetry-instrumentation-urllib (>=0.40b0,<1.0)"]
|
||||
opentelemetry-experimental = ["opentelemetry-instrumentation-aio-pika (==0.46b0)", "opentelemetry-instrumentation-aiohttp-client (==0.46b0)", "opentelemetry-instrumentation-aiopg (==0.46b0)", "opentelemetry-instrumentation-asgi (==0.46b0)", "opentelemetry-instrumentation-asyncio (==0.46b0)", "opentelemetry-instrumentation-asyncpg (==0.46b0)", "opentelemetry-instrumentation-aws-lambda (==0.46b0)", "opentelemetry-instrumentation-boto (==0.46b0)", "opentelemetry-instrumentation-boto3sqs (==0.46b0)", "opentelemetry-instrumentation-botocore (==0.46b0)", "opentelemetry-instrumentation-cassandra (==0.46b0)", "opentelemetry-instrumentation-celery (==0.46b0)", "opentelemetry-instrumentation-confluent-kafka (==0.46b0)", "opentelemetry-instrumentation-dbapi (==0.46b0)", "opentelemetry-instrumentation-django (==0.46b0)", "opentelemetry-instrumentation-elasticsearch (==0.46b0)", "opentelemetry-instrumentation-falcon (==0.46b0)", "opentelemetry-instrumentation-fastapi (==0.46b0)", "opentelemetry-instrumentation-flask (==0.46b0)", "opentelemetry-instrumentation-grpc (==0.46b0)", "opentelemetry-instrumentation-httpx (==0.46b0)", "opentelemetry-instrumentation-jinja2 (==0.46b0)", "opentelemetry-instrumentation-kafka-python (==0.46b0)", "opentelemetry-instrumentation-logging (==0.46b0)", "opentelemetry-instrumentation-mysql (==0.46b0)", "opentelemetry-instrumentation-mysqlclient (==0.46b0)", "opentelemetry-instrumentation-pika (==0.46b0)", "opentelemetry-instrumentation-psycopg (==0.46b0)", "opentelemetry-instrumentation-psycopg2 (==0.46b0)", "opentelemetry-instrumentation-pymemcache (==0.46b0)", "opentelemetry-instrumentation-pymongo (==0.46b0)", "opentelemetry-instrumentation-pymysql (==0.46b0)", "opentelemetry-instrumentation-pyramid (==0.46b0)", "opentelemetry-instrumentation-redis (==0.46b0)", "opentelemetry-instrumentation-remoulade (==0.46b0)", "opentelemetry-instrumentation-requests (==0.46b0)", "opentelemetry-instrumentation-sklearn (==0.46b0)", "opentelemetry-instrumentation-sqlalchemy (==0.46b0)", "opentelemetry-instrumentation-sqlite3 (==0.46b0)", "opentelemetry-instrumentation-starlette (==0.46b0)", "opentelemetry-instrumentation-system-metrics (==0.46b0)", "opentelemetry-instrumentation-threading (==0.46b0)", "opentelemetry-instrumentation-tornado (==0.46b0)", "opentelemetry-instrumentation-tortoiseorm (==0.46b0)", "opentelemetry-instrumentation-urllib (==0.46b0)", "opentelemetry-instrumentation-urllib3 (==0.46b0)", "opentelemetry-instrumentation-wsgi (==0.46b0)"]
|
||||
pure-eval = ["asttokens", "executing", "pure-eval"]
|
||||
pymongo = ["pymongo (>=3.1)"]
|
||||
pyspark = ["pyspark (>=2.4.4)"]
|
||||
|
|
@ -3235,4 +3246,4 @@ local = []
|
|||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.10,<3.13"
|
||||
content-hash = "4f566531a8539ddc81cb91a7e7f9b723c84679f0af5bb8619f7b02f9ffc6cfaa"
|
||||
content-hash = "7e46144d27c633214f00e73e496c0e4d56db1fb47032a21861677ec275b79d86"
|
||||
|
|
|
|||
|
|
@ -64,6 +64,7 @@ pyperclip = "^1.8.2"
|
|||
uncurl = "^0.0.11"
|
||||
sentry-sdk = {extras = ["fastapi", "loguru"], version = "^2.5.1"}
|
||||
chardet = "^5.2.0"
|
||||
firecrawl-py = "^0.0.16"
|
||||
|
||||
|
||||
[tool.poetry.extras]
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import { Label } from "../../components/ui/label";
|
|||
import { Textarea } from "../../components/ui/textarea";
|
||||
import useFlowsManagerStore from "../../stores/flowsManagerStore";
|
||||
import { InputProps } from "../../types/components";
|
||||
import { cn } from "../../utils/utils";
|
||||
import { cn, isEndpointNameValid } from "../../utils/utils";
|
||||
|
||||
export const EditFlowSettings: React.FC<InputProps> = ({
|
||||
name,
|
||||
|
|
@ -17,7 +17,7 @@ export const EditFlowSettings: React.FC<InputProps> = ({
|
|||
setEndpointName,
|
||||
}: InputProps): JSX.Element => {
|
||||
const [isMaxLength, setIsMaxLength] = useState(false);
|
||||
const [isEndpointNameValid, setIsEndpointNameValid] = useState(true);
|
||||
const [validEndpointName, setValidEndpointName] = useState(true);
|
||||
const [isInvalidName, setIsInvalidName] = useState(false);
|
||||
const currentFlow = useFlowsManagerStore((state) => state.currentFlow);
|
||||
|
||||
|
|
@ -34,10 +34,6 @@ export const EditFlowSettings: React.FC<InputProps> = ({
|
|||
invalid = true;
|
||||
break;
|
||||
}
|
||||
if (value === currentFlow?.name) {
|
||||
invalid = true;
|
||||
break;
|
||||
}
|
||||
invalid = false;
|
||||
}
|
||||
setIsInvalidName(invalid);
|
||||
|
|
@ -51,12 +47,8 @@ export const EditFlowSettings: React.FC<InputProps> = ({
|
|||
const handleEndpointNameChange = (event: ChangeEvent<HTMLInputElement>) => {
|
||||
// Validate the endpoint name
|
||||
// use this regex r'^[a-zA-Z0-9_-]+$'
|
||||
const isValid =
|
||||
(/^[a-zA-Z0-9_-]+$/.test(event.target.value) &&
|
||||
event.target.value.length <= maxLength) ||
|
||||
// empty is also valid
|
||||
event.target.value.length === 0;
|
||||
setIsEndpointNameValid(isValid);
|
||||
const isValid = isEndpointNameValid(event.target.value, maxLength);
|
||||
setValidEndpointName(isValid);
|
||||
setEndpointName!(event.target.value);
|
||||
};
|
||||
|
||||
|
|
@ -115,21 +107,21 @@ export const EditFlowSettings: React.FC<InputProps> = ({
|
|||
}}
|
||||
/>
|
||||
) : (
|
||||
<span
|
||||
<div
|
||||
className={cn(
|
||||
"font-normal text-muted-foreground word-break-break-word",
|
||||
"max-h-[250px] overflow-auto font-normal text-muted-foreground word-break-break-word",
|
||||
description === "" ? "font-light italic" : "",
|
||||
)}
|
||||
>
|
||||
{description === "" ? "No description" : description}
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
</Label>
|
||||
{setEndpointName && (
|
||||
<Label>
|
||||
<div className="edit-flow-arrangement mt-3">
|
||||
<span className="font-medium">Endpoint Name</span>
|
||||
{!isEndpointNameValid && (
|
||||
{!validEndpointName && (
|
||||
<span className="edit-flow-span">
|
||||
Invalid endpoint name. Use only letters, numbers, hyphens, and
|
||||
underscores ({maxLength} characters max).
|
||||
|
|
|
|||
|
|
@ -1112,7 +1112,7 @@ export async function getMessagesTable(
|
|||
return { rows: rowsOrganized, columns };
|
||||
}
|
||||
|
||||
export async function deleteMessagesFn(ids: number[]) {
|
||||
export async function deleteMessagesFn(ids: string[]) {
|
||||
try {
|
||||
return await api.delete(`${BASE_URL_API}monitor/messages`, {
|
||||
data: ids,
|
||||
|
|
@ -1124,5 +1124,5 @@ export async function deleteMessagesFn(ids: number[]) {
|
|||
}
|
||||
|
||||
export async function updateMessageApi(data: Message) {
|
||||
return await api.post(`${BASE_URL_API}monitor/messages/${data.index}`, data);
|
||||
return await api.post(`${BASE_URL_API}monitor/messages/${data.id}`, data);
|
||||
}
|
||||
|
|
|
|||
61
src/frontend/src/icons/Firecrawl/FirecrawlLogo.jsx
Normal file
61
src/frontend/src/icons/Firecrawl/FirecrawlLogo.jsx
Normal file
|
|
@ -0,0 +1,61 @@
|
|||
const SvgFirecrawlLogo = (props) => (
|
||||
<svg
|
||||
viewBox="-33 0 255 255"
|
||||
width="24"
|
||||
height="24"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlnsXlink="http://www.w3.org/1999/xlink"
|
||||
preserveAspectRatio="xMidYMid"
|
||||
>
|
||||
<defs>
|
||||
<style>
|
||||
{`
|
||||
.cls-3 {
|
||||
fill: url(#linear-gradient-1);
|
||||
}
|
||||
|
||||
.cls-4 {
|
||||
fill: #fc9502;
|
||||
}
|
||||
|
||||
.cls-5 {
|
||||
fill: #fce202;
|
||||
}
|
||||
`}
|
||||
</style>
|
||||
|
||||
<linearGradient
|
||||
id="linear-gradient-1"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
x1="94.141"
|
||||
y1="255"
|
||||
x2="94.141"
|
||||
y2="0.188"
|
||||
>
|
||||
<stop offset="0" stopColor="#ff4c0d" />
|
||||
<stop offset="1" stopColor="#fc9502" />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<g id="fire">
|
||||
<path
|
||||
d="M187.899,164.809 C185.803,214.868 144.574,254.812 94.000,254.812 C42.085,254.812 -0.000,211.312 -0.000,160.812 C-0.000,154.062 -0.121,140.572 10.000,117.812 C16.057,104.191 19.856,95.634 22.000,87.812 C23.178,83.513 25.469,76.683 32.000,87.812 C35.851,94.374 36.000,103.812 36.000,103.812 C36.000,103.812 50.328,92.817 60.000,71.812 C74.179,41.019 62.866,22.612 59.000,9.812 C57.662,5.384 56.822,-2.574 66.000,0.812 C75.352,4.263 100.076,21.570 113.000,39.812 C131.445,65.847 138.000,90.812 138.000,90.812 C138.000,90.812 143.906,83.482 146.000,75.812 C148.365,67.151 148.400,58.573 155.999,67.813 C163.226,76.600 173.959,93.113 180.000,108.812 C190.969,137.321 187.899,164.809 187.899,164.809 Z"
|
||||
id="path-1"
|
||||
className="cls-3"
|
||||
fillRule="evenodd"
|
||||
/>
|
||||
<path
|
||||
d="M94.000,254.812 C58.101,254.812 29.000,225.711 29.000,189.812 C29.000,168.151 37.729,155.000 55.896,137.166 C67.528,125.747 78.415,111.722 83.042,102.172 C83.953,100.292 86.026,90.495 94.019,101.966 C98.212,107.982 104.785,118.681 109.000,127.812 C116.266,143.555 118.000,158.812 118.000,158.812 C118.000,158.812 125.121,154.616 130.000,143.812 C131.573,140.330 134.753,127.148 143.643,140.328 C150.166,150.000 159.127,167.390 159.000,189.812 C159.000,225.711 129.898,254.812 94.000,254.812 Z"
|
||||
id="path-2"
|
||||
className="cls-4"
|
||||
fillRule="evenodd"
|
||||
/>
|
||||
<path
|
||||
d="M95.000,183.812 C104.250,183.812 104.250,200.941 116.000,223.812 C123.824,239.041 112.121,254.812 95.000,254.812 C77.879,254.812 69.000,240.933 69.000,223.812 C69.000,206.692 85.750,183.812 95.000,183.812 Z"
|
||||
id="path-3"
|
||||
className="cls-5"
|
||||
fillRule="evenodd"
|
||||
/>
|
||||
</g>
|
||||
</svg>
|
||||
);
|
||||
export default SvgFirecrawlLogo;
|
||||
28
src/frontend/src/icons/Firecrawl/firecraw-logo.svg
Normal file
28
src/frontend/src/icons/Firecrawl/firecraw-logo.svg
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
<svg width="800px" height="800px" viewBox="-33 0 255 255" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid">
|
||||
<defs>
|
||||
<style>
|
||||
|
||||
.cls-3 {
|
||||
fill: url(#linear-gradient-1);
|
||||
}
|
||||
|
||||
.cls-4 {
|
||||
fill: #fc9502;
|
||||
}
|
||||
|
||||
.cls-5 {
|
||||
fill: #fce202;
|
||||
}
|
||||
</style>
|
||||
|
||||
<linearGradient id="linear-gradient-1" gradientUnits="userSpaceOnUse" x1="94.141" y1="255" x2="94.141" y2="0.188">
|
||||
<stop offset="0" stop-color="#ff4c0d"/>
|
||||
<stop offset="1" stop-color="#fc9502"/>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<g id="fire">
|
||||
<path d="M187.899,164.809 C185.803,214.868 144.574,254.812 94.000,254.812 C42.085,254.812 -0.000,211.312 -0.000,160.812 C-0.000,154.062 -0.121,140.572 10.000,117.812 C16.057,104.191 19.856,95.634 22.000,87.812 C23.178,83.513 25.469,76.683 32.000,87.812 C35.851,94.374 36.000,103.812 36.000,103.812 C36.000,103.812 50.328,92.817 60.000,71.812 C74.179,41.019 62.866,22.612 59.000,9.812 C57.662,5.384 56.822,-2.574 66.000,0.812 C75.352,4.263 100.076,21.570 113.000,39.812 C131.445,65.847 138.000,90.812 138.000,90.812 C138.000,90.812 143.906,83.482 146.000,75.812 C148.365,67.151 148.400,58.573 155.999,67.813 C163.226,76.600 173.959,93.113 180.000,108.812 C190.969,137.321 187.899,164.809 187.899,164.809 Z" id="path-1" class="cls-3" fill-rule="evenodd"/>
|
||||
<path d="M94.000,254.812 C58.101,254.812 29.000,225.711 29.000,189.812 C29.000,168.151 37.729,155.000 55.896,137.166 C67.528,125.747 78.415,111.722 83.042,102.172 C83.953,100.292 86.026,90.495 94.019,101.966 C98.212,107.982 104.785,118.681 109.000,127.812 C116.266,143.555 118.000,158.812 118.000,158.812 C118.000,158.812 125.121,154.616 130.000,143.812 C131.573,140.330 134.753,127.148 143.643,140.328 C150.166,150.000 159.127,167.390 159.000,189.812 C159.000,225.711 129.898,254.812 94.000,254.812 Z" id="path-2" class="cls-4" fill-rule="evenodd"/>
|
||||
<path d="M95.000,183.812 C104.250,183.812 104.250,200.941 116.000,223.812 C123.824,239.041 112.121,254.812 95.000,254.812 C77.879,254.812 69.000,240.933 69.000,223.812 C69.000,206.692 85.750,183.812 95.000,183.812 Z" id="path-3" class="cls-5" fill-rule="evenodd"/>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 2.2 KiB |
9
src/frontend/src/icons/Firecrawl/index.tsx
Normal file
9
src/frontend/src/icons/Firecrawl/index.tsx
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
import React, { forwardRef } from "react";
|
||||
import SvgFirecrawlLogo from "./FirecrawlLogo";
|
||||
|
||||
export const FirecrawlIcon = forwardRef<
|
||||
SVGSVGElement,
|
||||
React.PropsWithChildren<{}>
|
||||
>((props, ref) => {
|
||||
return <SvgFirecrawlLogo ref={ref} {...props} />;
|
||||
});
|
||||
|
|
@ -10,7 +10,7 @@ const useRemoveSession = (setSuccessData, setErrorData) => {
|
|||
await deleteMessagesFn(
|
||||
messages
|
||||
.filter((msg) => msg.session_id === session_id)
|
||||
.map((msg) => msg.index),
|
||||
.map((msg) => msg.id),
|
||||
);
|
||||
deleteSession(session_id);
|
||||
setSuccessData({
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ export default function SessionView({ rows }: { rows: Array<any> }) {
|
|||
const setErrorData = useAlertStore((state) => state.setErrorData);
|
||||
const setSuccessData = useAlertStore((state) => state.setSuccessData);
|
||||
|
||||
const [selectedRows, setSelectedRows] = useState<number[]>([]);
|
||||
const [selectedRows, setSelectedRows] = useState<string[]>([]);
|
||||
|
||||
const { handleRemoveMessages } = useRemoveMessages(
|
||||
setSelectedRows,
|
||||
|
|
@ -52,7 +52,8 @@ export default function SessionView({ rows }: { rows: Array<any> }) {
|
|||
]}
|
||||
overlayNoRowsTemplate="No data available"
|
||||
onSelectionChanged={(event: SelectionChangedEvent) => {
|
||||
setSelectedRows(event.api.getSelectedRows().map((row) => row.index));
|
||||
console.log(event.api.getSelectedRows());
|
||||
setSelectedRows(event.api.getSelectedRows().map((row) => row.id));
|
||||
}}
|
||||
rowSelection="multiple"
|
||||
suppressRowClickSelection={true}
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ import useAlertStore from "../../stores/alertStore";
|
|||
import useFlowsManagerStore from "../../stores/flowsManagerStore";
|
||||
import { FlowSettingsPropsType } from "../../types/components";
|
||||
import { FlowType } from "../../types/flow";
|
||||
import { isEndpointNameValid } from "../../utils/utils";
|
||||
import BaseModal from "../baseModal";
|
||||
|
||||
export default function FlowSettingsModal({
|
||||
|
|
@ -22,13 +23,17 @@ export default function FlowSettingsModal({
|
|||
|
||||
const [name, setName] = useState(currentFlow!.name);
|
||||
const [description, setDescription] = useState(currentFlow!.description);
|
||||
const [endpoint_name, setEndpointName] = useState(currentFlow!.endpoint_name);
|
||||
const [endpoint_name, setEndpointName] = useState(
|
||||
currentFlow!.endpoint_name ?? "",
|
||||
);
|
||||
const [isSaving, setIsSaving] = useState(false);
|
||||
const [disableSave, setDisableSave] = useState(true);
|
||||
function handleClick(): void {
|
||||
setIsSaving(true);
|
||||
currentFlow!.name = name;
|
||||
currentFlow!.description = description;
|
||||
currentFlow!.endpoint_name = endpoint_name;
|
||||
currentFlow!.endpoint_name =
|
||||
endpoint_name && endpoint_name.length > 0 ? endpoint_name : null;
|
||||
saveFlow(currentFlow!)
|
||||
?.then(() => {
|
||||
setOpen(false);
|
||||
|
|
@ -54,6 +59,18 @@ export default function FlowSettingsModal({
|
|||
setNameList(tempNameList.filter((name) => name !== currentFlow!.name));
|
||||
}, [flows]);
|
||||
|
||||
useEffect(() => {
|
||||
if (
|
||||
(!nameLists.includes(name) && currentFlow?.name !== name) ||
|
||||
currentFlow?.description !== description ||
|
||||
((currentFlow?.endpoint_name ?? "") !== endpoint_name &&
|
||||
isEndpointNameValid(endpoint_name ?? "", 50))
|
||||
) {
|
||||
setDisableSave(false);
|
||||
} else {
|
||||
setDisableSave(true);
|
||||
}
|
||||
}, [nameLists, currentFlow, description, endpoint_name, name]);
|
||||
return (
|
||||
<BaseModal
|
||||
open={open}
|
||||
|
|
@ -80,8 +97,8 @@ export default function FlowSettingsModal({
|
|||
<BaseModal.Footer
|
||||
submit={{
|
||||
label: "Save",
|
||||
disabled: nameLists.includes(name) || name === currentFlow!.name,
|
||||
dataTestId: "save-flow-settings",
|
||||
disabled: disableSave,
|
||||
loading: isSaving,
|
||||
}}
|
||||
/>
|
||||
|
|
|
|||
|
|
@ -128,7 +128,7 @@ export default function ShareModal({
|
|||
successShare,
|
||||
(err) => {
|
||||
setErrorData({
|
||||
title: "Error sharing " + is_component ? "component" : "flow",
|
||||
title: "Error sharing " + (is_component ? "component" : "flow"),
|
||||
list: [err["response"]["data"]["detail"]],
|
||||
});
|
||||
},
|
||||
|
|
|
|||
|
|
@ -2,10 +2,10 @@ import { deleteMessagesFn } from "../../../../../controllers/API";
|
|||
import { useMessagesStore } from "../../../../../stores/messagesStore";
|
||||
|
||||
const useRemoveMessages = (
|
||||
setSelectedRows: (data: number[]) => void,
|
||||
setSelectedRows: (data: string[]) => void,
|
||||
setSuccessData: (data: { title: string }) => void,
|
||||
setErrorData: (data: { title: string }) => void,
|
||||
selectedRows: number[],
|
||||
selectedRows: string[],
|
||||
) => {
|
||||
const deleteMessages = useMessagesStore((state) => state.removeMessages);
|
||||
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ export default function MessagesPage() {
|
|||
const setErrorData = useAlertStore((state) => state.setErrorData);
|
||||
const setSuccessData = useAlertStore((state) => state.setSuccessData);
|
||||
|
||||
const [selectedRows, setSelectedRows] = useState<number[]>([]);
|
||||
const [selectedRows, setSelectedRows] = useState<string[]>([]);
|
||||
|
||||
const { handleRemoveMessages } = useRemoveMessages(
|
||||
setSelectedRows,
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ export const useMessagesStore = create<MessagesStoreType>((set, get) => ({
|
|||
updateMessage: (message) => {
|
||||
set(() => ({
|
||||
messages: get().messages.map((msg) =>
|
||||
msg.index === message.index ? message : msg,
|
||||
msg.id === message.id ? message : msg,
|
||||
),
|
||||
}));
|
||||
},
|
||||
|
|
@ -41,7 +41,7 @@ export const useMessagesStore = create<MessagesStoreType>((set, get) => ({
|
|||
try {
|
||||
set((state) => {
|
||||
const updatedMessages = state.messages.filter(
|
||||
(msg) => !ids.includes(msg.index),
|
||||
(msg) => !ids.includes(msg.id),
|
||||
);
|
||||
get().setMessages(updatedMessages);
|
||||
resolve(updatedMessages);
|
||||
|
|
|
|||
|
|
@ -303,7 +303,7 @@ export type IconComponentProps = {
|
|||
export type InputProps = {
|
||||
name: string | null;
|
||||
description: string | null;
|
||||
endpointName?: string;
|
||||
endpointName?: string | null;
|
||||
maxLength?: number;
|
||||
setName?: (name: string) => void;
|
||||
setDescription?: (description: string) => void;
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ export type FlowType = {
|
|||
id: string;
|
||||
data: ReactFlowJsonObject | null;
|
||||
description: string;
|
||||
endpoint_name?: string;
|
||||
endpoint_name?: string | null;
|
||||
style?: FlowStyleType;
|
||||
is_component?: boolean;
|
||||
last_tested_version?: string;
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
type Message = {
|
||||
artifacts: Record<string, any>;
|
||||
flow_id: string;
|
||||
index: number;
|
||||
message: string;
|
||||
sender: string;
|
||||
sender_name: string;
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ export type MessagesStoreType = {
|
|||
removeMessage: (message: Message) => void;
|
||||
updateMessage: (message: Message) => void;
|
||||
clearMessages: () => void;
|
||||
removeMessages: (ids: number[]) => void;
|
||||
removeMessages: (ids: string[]) => void;
|
||||
columns: Array<ColDef | ColGroupDef>;
|
||||
setColumns: (columns: Array<ColDef | ColGroupDef>) => void;
|
||||
deleteSession: (id: string) => void;
|
||||
|
|
|
|||
|
|
@ -171,6 +171,7 @@ import { CouchbaseIcon } from "../icons/Couchbase";
|
|||
import { ElasticsearchIcon } from "../icons/ElasticsearchStore";
|
||||
import { EvernoteIcon } from "../icons/Evernote";
|
||||
import { FBIcon } from "../icons/FacebookMessenger";
|
||||
import { FirecrawlIcon } from "../icons/Firecrawl";
|
||||
import { GitBookIcon } from "../icons/GitBook";
|
||||
import { GoogleIcon } from "../icons/Google";
|
||||
import { GoogleGenerativeAIIcon } from "../icons/GoogleGenerativeAI";
|
||||
|
|
@ -363,6 +364,8 @@ export const nodeIconsLucide: iconsType = {
|
|||
CohereEmbeddings: CohereIcon,
|
||||
EverNoteLoader: EvernoteIcon,
|
||||
FacebookChatLoader: FBIcon,
|
||||
FirecrawlCrawlApi: FirecrawlIcon,
|
||||
FirecrawlScrapeApi: FirecrawlIcon,
|
||||
GitbookLoader: GitBookIcon,
|
||||
GoogleSearchAPIWrapper: GoogleIcon,
|
||||
GoogleSearchResults: GoogleIcon,
|
||||
|
|
|
|||
|
|
@ -463,3 +463,11 @@ export const logTypeIsError = (
|
|||
return isErrorLog(outputs);
|
||||
}
|
||||
};
|
||||
|
||||
export function isEndpointNameValid(name: string, maxLength: number): boolean {
|
||||
return (
|
||||
(/^[a-zA-Z0-9_-]+$/.test(name) && name.length <= maxLength) ||
|
||||
// empty is also valid
|
||||
name.length === 0
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -26,3 +26,15 @@ def test_webhook_endpoint(client, added_webhook_test):
|
|||
response = client.post(endpoint, json=payload)
|
||||
assert response.status_code == 202
|
||||
assert not file_path.exists()
|
||||
|
||||
|
||||
def test_webhook_with_random_payload(client, added_webhook_test):
|
||||
endpoint_name = added_webhook_test["endpoint_name"]
|
||||
endpoint = f"api/v1/webhook/{endpoint_name}"
|
||||
# Just test that "Random Payload" returns 202
|
||||
# returns 202
|
||||
response = client.post(
|
||||
endpoint,
|
||||
json="Random Payload",
|
||||
)
|
||||
assert response.status_code == 202
|
||||
|
|
|
|||
|
|
@ -35,16 +35,20 @@ def created_messages(session):
|
|||
return messages_read
|
||||
|
||||
|
||||
def test_get_messages(session):
|
||||
add_messages(Message(text="Test message 1", sender="User", sender_name="User", session_id="session_id2"))
|
||||
add_messages(Message(text="Test message 2", sender="User", sender_name="User", session_id="session_id2"))
|
||||
def test_get_messages():
|
||||
add_messages(
|
||||
[
|
||||
Message(text="Test message 1", sender="User", sender_name="User", session_id="session_id2"),
|
||||
Message(text="Test message 2", sender="User", sender_name="User", session_id="session_id2"),
|
||||
]
|
||||
)
|
||||
messages = get_messages(sender="User", session_id="session_id2", limit=2)
|
||||
assert len(messages) == 2
|
||||
assert messages[0].text == "Test message 1"
|
||||
assert messages[1].text == "Test message 2"
|
||||
|
||||
|
||||
def test_add_messages(session):
|
||||
def test_add_messages():
|
||||
message = Message(text="New Test message", sender="User", sender_name="User", session_id="new_session_id")
|
||||
messages = add_messages(message)
|
||||
assert len(messages) == 1
|
||||
|
|
@ -65,7 +69,7 @@ def test_delete_messages(session):
|
|||
assert len(messages) == 0
|
||||
|
||||
|
||||
def test_store_message(session):
|
||||
def test_store_message():
|
||||
message = Message(text="Stored message", sender="User", sender_name="User", session_id="stored_session_id")
|
||||
stored_messages = store_message(message)
|
||||
assert len(stored_messages) == 1
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue