Merge branch 'feature/output_dropdown' into fix_build_fe

This commit is contained in:
ogabrielluiz 2024-05-31 16:25:43 -03:00
commit 1fa3a6a379
55 changed files with 890 additions and 498 deletions

View file

@ -52,3 +52,12 @@ jobs:
push: true
file: ${{ env.DOCKERFILE }}
tags: ${{ env.TAGS }}
- name: Wait for Docker Hub to propagate
run: sleep 120
- name: Restart HuggingFace Spaces Build
# There's a script in ./scripts/factory_reset_space.py that will reset the build
# using the HUGGINGFACE_API_TOKEN secret
run: |
python ./scripts/factory_reset_space.py
env:
HUGGINGFACE_API_TOKEN: ${{ secrets.HUGGINGFACE_API_TOKEN }}

View file

@ -25,13 +25,6 @@ repos:
args:
- --fix=lf
- id: trailing-whitespace
- id: pretty-format-json
exclude: ^tsconfig.*.json
args:
- --autofix
- --indent=4
- --no-sort-keys
- id: check-merge-conflict
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.4.2

View file

@ -3,7 +3,7 @@
# [![Langflow](./docs/static/img/hero.png)](https://www.langflow.org)
<p align="center"><strong>
A visual framework for building Gen-AI and RAG apps with LangChain
A visual framework for building multi-agent and RAG applications
</strong></p>
<p align="center" style="font-size: 12px;">
Open-source, Python-powered, fully customizable, LLM and vector store agnostic
@ -42,7 +42,7 @@
You can install Langflow with pip:
```shell
# Make sure you have Python 3.10 or greater installed on your system.
# Make sure you have >=Python 3.10 installed on your system.
# Install the pre-release version (recommended for the latest updates)
python -m pip install langflow --pre --force-reinstall

View file

@ -105,6 +105,8 @@ The default list at the moment is:
- PINECONE_API_KEY
- SEARCHAPI_API_KEY
- SERPAPI_API_KEY
- UPSTASH_VECTOR_REST_URL
- UPSTASH_VECTOR_REST_TOKEN
- VECTARA_CUSTOMER_ID
- VECTARA_CORPUS_ID
- VECTARA_API_KEY

363
poetry.lock generated
View file

@ -167,20 +167,19 @@ files = [
[[package]]
name = "anthropic"
version = "0.26.1"
version = "0.28.0"
description = "The official Python library for the anthropic API"
optional = false
python-versions = ">=3.7"
files = [
{file = "anthropic-0.26.1-py3-none-any.whl", hash = "sha256:2812b9b250b551ed8a1f0a7e6ae3f005654098994f45ebca5b5808bd154c9628"},
{file = "anthropic-0.26.1.tar.gz", hash = "sha256:26680ff781a6f678a30a1dccd0743631e602b23a47719439ffdef5335fa167d8"},
{file = "anthropic-0.28.0-py3-none-any.whl", hash = "sha256:2b620b21aee3d20c5d8005483c34df239d53ae895687113b26b8a36892a7e20f"},
]
[package.dependencies]
anyio = ">=3.5.0,<5"
distro = ">=1.7.0,<2"
httpx = ">=0.23.0,<1"
jiter = ">=0.1.0,<1"
jiter = ">=0.4.0,<1"
pydantic = ">=1.9.0,<3"
sniffio = "*"
tokenizers = ">=0.13.0"
@ -472,17 +471,17 @@ files = [
[[package]]
name = "boto3"
version = "1.34.114"
version = "1.34.116"
description = "The AWS SDK for Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "boto3-1.34.114-py3-none-any.whl", hash = "sha256:4460958d2b0c53bd2195b23ed5d45db2350e514486fe8caeb38b285b30742280"},
{file = "boto3-1.34.114.tar.gz", hash = "sha256:eeb11bca9b19d12baf93436fb8a16b8b824f1f7e8b9bcc722607e862c46b1b08"},
{file = "boto3-1.34.116-py3-none-any.whl", hash = "sha256:e7f5ab2d1f1b90971a2b9369760c2c6bae49dae98c084a5c3f5c78e3968ace15"},
{file = "boto3-1.34.116.tar.gz", hash = "sha256:53cb8aeb405afa1cd2b25421e27a951aeb568026675dec020587861fac96ac87"},
]
[package.dependencies]
botocore = ">=1.34.114,<1.35.0"
botocore = ">=1.34.116,<1.35.0"
jmespath = ">=0.7.1,<2.0.0"
s3transfer = ">=0.10.0,<0.11.0"
@ -491,13 +490,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
[[package]]
name = "botocore"
version = "1.34.114"
version = "1.34.116"
description = "Low-level, data-driven core of boto 3."
optional = false
python-versions = ">=3.8"
files = [
{file = "botocore-1.34.114-py3-none-any.whl", hash = "sha256:606d1e55984d45e41a812badee292755f4db0233eed9cca63ea3bb8f5755507f"},
{file = "botocore-1.34.114.tar.gz", hash = "sha256:5705f74fda009656a218ffaf4afd81228359160f2ab806ab8222d07e9da3a73b"},
{file = "botocore-1.34.116-py3-none-any.whl", hash = "sha256:ec4d42c816e9b2d87a2439ad277e7dda16a4a614ef6839cf66f4c1a58afa547c"},
{file = "botocore-1.34.116.tar.gz", hash = "sha256:269cae7ba99081519a9f87d7298e238d9e68ba94eb4f8ddfa906224c34cb8b6c"},
]
[package.dependencies]
@ -2595,13 +2594,13 @@ httplib2 = ">=0.19.0"
[[package]]
name = "google-cloud-aiplatform"
version = "1.52.0"
version = "1.53.0"
description = "Vertex AI API client library"
optional = false
python-versions = ">=3.8"
files = [
{file = "google-cloud-aiplatform-1.52.0.tar.gz", hash = "sha256:932a56e3050b4bc9a2c0630e6af3c0bd52f0bcf72b5dc01c059874231099edd3"},
{file = "google_cloud_aiplatform-1.52.0-py2.py3-none-any.whl", hash = "sha256:8c62f5d0ec39e008737ebba4875105ed7563dd0958f591f95dc7816e4b30f92a"},
{file = "google-cloud-aiplatform-1.53.0.tar.gz", hash = "sha256:574cfad8ac5fa5d57ef717f5335ce05636a5fa9b8aeea0f5c325b46b9448e6b1"},
{file = "google_cloud_aiplatform-1.53.0-py2.py3-none-any.whl", hash = "sha256:9dfb1f110e6d4795b45afcfab79108fc5c8ed9aa4eaf899e433bc2ca1b76c778"},
]
[package.dependencies]
@ -3545,22 +3544,22 @@ files = [
[[package]]
name = "importlib-metadata"
version = "7.0.0"
version = "7.1.0"
description = "Read metadata from Python packages"
optional = false
python-versions = ">=3.8"
files = [
{file = "importlib_metadata-7.0.0-py3-none-any.whl", hash = "sha256:d97503976bb81f40a193d41ee6570868479c69d5068651eb039c40d850c59d67"},
{file = "importlib_metadata-7.0.0.tar.gz", hash = "sha256:7fc841f8b8332803464e5dc1c63a2e59121f46ca186c0e2e182e80bf8c1319f7"},
{file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"},
{file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"},
]
[package.dependencies]
zipp = ">=0.5"
[package.extras]
docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"]
docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
perf = ["ipython"]
testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"]
testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"]
[[package]]
name = "importlib-resources"
@ -3637,13 +3636,13 @@ test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio
[[package]]
name = "ipython"
version = "8.24.0"
version = "8.25.0"
description = "IPython: Productive Interactive Computing"
optional = false
python-versions = ">=3.10"
files = [
{file = "ipython-8.24.0-py3-none-any.whl", hash = "sha256:d7bf2f6c4314984e3e02393213bab8703cf163ede39672ce5918c51fe253a2a3"},
{file = "ipython-8.24.0.tar.gz", hash = "sha256:010db3f8a728a578bb641fdd06c063b9fb8e96a9464c63aec6310fbcb5e80501"},
{file = "ipython-8.25.0-py3-none-any.whl", hash = "sha256:53eee7ad44df903a06655871cbab66d156a051fd86f3ec6750470ac9604ac1ab"},
{file = "ipython-8.25.0.tar.gz", hash = "sha256:c6ed726a140b6e725b911528f80439c534fac915246af3efc39440a6b0f9d716"},
]
[package.dependencies]
@ -3662,7 +3661,7 @@ typing-extensions = {version = ">=4.6", markers = "python_version < \"3.12\""}
[package.extras]
all = ["ipython[black,doc,kernel,matplotlib,nbconvert,nbformat,notebook,parallel,qtconsole]", "ipython[test,test-extra]"]
black = ["black"]
doc = ["docrepr", "exceptiongroup", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "stack-data", "typing-extensions"]
doc = ["docrepr", "exceptiongroup", "intersphinx-registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli", "typing-extensions"]
kernel = ["ipykernel"]
matplotlib = ["matplotlib"]
nbconvert = ["nbconvert"]
@ -3722,72 +3721,72 @@ i18n = ["Babel (>=2.7)"]
[[package]]
name = "jiter"
version = "0.4.0"
version = "0.4.1"
description = "Fast iterable JSON parser."
optional = false
python-versions = ">=3.8"
files = [
{file = "jiter-0.4.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:4aa6226d82a4a4505078c0bd5947bad65399635fc5cd4b226512e41753624edf"},
{file = "jiter-0.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:947111ac906740a948e7b63799481acd3d5ef666ccb178d146e25718640b7408"},
{file = "jiter-0.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69572ffb4e84ae289a7422b9af4ea123cae2ce0772228859b37d4b26b4bc92ea"},
{file = "jiter-0.4.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ba6046cbb5d1baa5a781b846f7e5438596a332f249a857d63f86ef5d1d9563b0"},
{file = "jiter-0.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d4f346e54602782e66d07df0d1c7389384fd93680052ed6170da2c6dc758409e"},
{file = "jiter-0.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:49110ce693f07e97d61089d894cea05a0b9894d5ccc6ac6fc583028726c8c8af"},
{file = "jiter-0.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e358df6fd129f3a4e087539f086355ad0107e5da16dbc8bc857d94222eaeed5"},
{file = "jiter-0.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7eb852ca39a48f3c049def56f0d1771b32e948e4f429a782d14ef4cc64cfd26e"},
{file = "jiter-0.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:44dc045decb2545bffe2da04ea4c36d9438d3f3d49fc47ed423ea75c352b712e"},
{file = "jiter-0.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:413adb15372ac63db04373240f40925788e4282c997eeafc2040530049a0a599"},
{file = "jiter-0.4.0-cp310-none-win32.whl", hash = "sha256:0b48ea71673a97b897e4b94bbc871e62495a5a85f836c9f90712a4c70aa3ef7e"},
{file = "jiter-0.4.0-cp310-none-win_amd64.whl", hash = "sha256:6a1c84b44afafaf0ba6223679cf17af664b889da14da31d8af3595fd977d96fa"},
{file = "jiter-0.4.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:b2cc498345fa37ca23fbc20271a553aa46e6eb00924600f49b7dc4b2aa8952ee"},
{file = "jiter-0.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:69f7221ac09ab421abf04f89942026868297c568133998fb181bcf435760cbf3"},
{file = "jiter-0.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef7d01c52f3e5a56ae73af36bd13797dd1a56711eb522748e5e84d15425b3f10"},
{file = "jiter-0.4.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:39be97d5ce0c4d0dae28c23c03a0af0501a725589427e99763f99c42e18aa402"},
{file = "jiter-0.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eac2ed1ec1e577b92b7ea2d4e6de8aec0c1164defd8af8affdc8ec0f0ec2904a"},
{file = "jiter-0.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6258837d184c92c9cb91c983c310ad7269d41afb49d34f00ca9246e073943a03"},
{file = "jiter-0.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:123c2a77b066bf17a4d021e238e8351058cfa56b90ac04f2522d120dc64ea055"},
{file = "jiter-0.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2df939f792c7a40e55f36700417db551b9f6b84d348990fa0f2c608adeb1f11b"},
{file = "jiter-0.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cb1b09b16d40cf9ba1d11ba11e5b96ad29286a6a1c4ad5e6a2aef5e352a89f5d"},
{file = "jiter-0.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0efb4208889ebdbf933bf08dbcbc16e64ffd34c8e2b28044ee142789a9dc3a67"},
{file = "jiter-0.4.0-cp311-none-win32.whl", hash = "sha256:20545ac1b68e7e5b066a1e8347840c9cebdd02ace65faae2e655fc02ec5c915c"},
{file = "jiter-0.4.0-cp311-none-win_amd64.whl", hash = "sha256:6b300f9887c8e4431cd03a974ea3e4f9958885636003c3864220a9b2d2f8462b"},
{file = "jiter-0.4.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:923432a0563bbae404ff25bb010e348514a69bfab979f2f8119b23b625dbf6d9"},
{file = "jiter-0.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ab8bb0ec8b97cec4422dc8b37b525442d969244488c805b834609ab0ccd788e2"},
{file = "jiter-0.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b857adb127b9c533907226791eafa79c5038c3eb5a477984994bf7c4715ba518"},
{file = "jiter-0.4.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2609cc0d1d8d470e921ff9a604afeb4c701bbe13e00bd9834d5aa6e7ea732a9b"},
{file = "jiter-0.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d39e99f8b7df46a119b6f84321f6ba01f16fa46abfa765d44c05c486d8e66829"},
{file = "jiter-0.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:56de8b518ebfe76a70f856741f6de248ce396c50a87acef827b6e8388e3a502d"},
{file = "jiter-0.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:488b7e777be47f67ce1a1f8f8eb907f9bbd81af5c03784a9bab09d025c250233"},
{file = "jiter-0.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7ea35e0ecbb5dadd457855eb980dcc548c14cf5341bcd22a43814cb56f2bcc79"},
{file = "jiter-0.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e1a9e9ee69c80b63951c93226b68d0e955953f64fe758bad2afe7ef7f9016af9"},
{file = "jiter-0.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:78e2f3cc2a32a21d43ccc5afcf66f5d17e827ccc4e6d21c0b353bdad2c7dcc9c"},
{file = "jiter-0.4.0-cp312-none-win32.whl", hash = "sha256:eeaa7a2b47a99f4ebbb4142bb58b95617e09f24c87570f6a57d2770687c9ddbe"},
{file = "jiter-0.4.0-cp312-none-win_amd64.whl", hash = "sha256:8d4a78b385b93ff59a67215d26000fcb4789a388fca3730d1b60fab17fc81e3c"},
{file = "jiter-0.4.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ebf20a3fac1089ce26963bf04140da0f803d55332ec69d59c5a87cf1a87d29c4"},
{file = "jiter-0.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d62244ffc6a168187452277adeefb7b2c30170689c6bf543a51e98e8c17ddab7"},
{file = "jiter-0.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40b2cde77446a41cec595739fd168be87edff2428eaf7c3438231224dd0ab7a5"},
{file = "jiter-0.4.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e51fc0a22021ec8905b9b00a2f7d25756f2ff7a653e35a790a2067ae126b51f6"},
{file = "jiter-0.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a56e6f980b89d7cfe5c43811dcf52d6f37b319428a4540511235dafda9ea7808"},
{file = "jiter-0.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0fec16adab8d3d3d6d74e3711a1f380836ebeab2a20e3f88cfe2ec5094d8b84"},
{file = "jiter-0.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19e3de515801c954e8f1dc1f575282a4a86df9e782d4993ea1ed2be9a8dedaa0"},
{file = "jiter-0.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:17e0ad8abf0bb04d81810eaeaab35d2c99b5da11fcd1058e0a389607ff6503b0"},
{file = "jiter-0.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8dc0132b728f3b3e90ff0d1874504cd49c78f3553bf3745168a7fc0b4cf674e1"},
{file = "jiter-0.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:81a883104aa96e494d3d28eaf7070780d03ecee8ccfdfaf7e4899710340c47f1"},
{file = "jiter-0.4.0-cp38-none-win32.whl", hash = "sha256:a044c53ab1aaa4af624ac9574181b5bad8e260aea7e03104738156511433deba"},
{file = "jiter-0.4.0-cp38-none-win_amd64.whl", hash = "sha256:d920035c869053e3d9a0b3ff94384d16a8ef5fde3dea55f97bd29916f6e27554"},
{file = "jiter-0.4.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:091e978f4e586a2f1c69bf940d45f4e6a23455877172a0ab7d6de04a3b119299"},
{file = "jiter-0.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:79134b2d601309bcbe3304a262d7d228ad61d53c80883231c637773000a6d683"},
{file = "jiter-0.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c471473e0b05058b5d729ff04271b6d45a575ac8bd9948563268c734b380ac7e"},
{file = "jiter-0.4.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb84b8930da8b32b0b1fdff9817e2c4b47e8981b5647ad11c4975403416e4112"},
{file = "jiter-0.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7f2805e28941751ebfe0948596a64cde4cfb9b84bea5282affd020063e659c96"},
{file = "jiter-0.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:42ef59f9e513bf081a8b5c5578933ea9c3a63e559e6e3501a3e72edcd456ff5e"},
{file = "jiter-0.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ae12e3906f9e565120ab569de261b738e3a1ec50c40e30c67499e4f893e9a8c"},
{file = "jiter-0.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:264dc1324f45a793bc89af4f653225229eb17bca9ec7107dce6c8fb4fe68d20f"},
{file = "jiter-0.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9a1c172ec47d846e25881dfbd52438ddb690da4ea04d185e477abd3db6c32f8a"},
{file = "jiter-0.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ccde31d0bc114aedad0dbd71b7f63ba0f0eecd7ec9ae1926a0ca01c1eb2854e7"},
{file = "jiter-0.4.0-cp39-none-win32.whl", hash = "sha256:13139b05792fbc13a0f9a5b4c89823ea0874141decae1b8f693f12bb1d28e061"},
{file = "jiter-0.4.0-cp39-none-win_amd64.whl", hash = "sha256:3a729b2631c6d5551a41069697415fee9659c3eadc9ab87369376ba51930cd00"},
{file = "jiter-0.4.0.tar.gz", hash = "sha256:68203e02e0419bc3eca717c580c2d8f615aeee1150e2a1fb68d6600a7e52a37c"},
{file = "jiter-0.4.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3c2370cd8826b484f3fc6ed729cb58510ba24b4bc277c92323a57d35cf4df223"},
{file = "jiter-0.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3587af23140a2eb282bba980010dae60f3b8b1579a034c5e869e9b94220a5972"},
{file = "jiter-0.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df8788d34545d47de864032a78bae49a14b66b67196c73cd95f1c1e3081d9c73"},
{file = "jiter-0.4.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:91bf2d31e906a3ca26fc8ee0cb979e0e51b12aa7e83999c6afea047538f95e5c"},
{file = "jiter-0.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8586e68702666b6acd919c65f718a09603adcfd8b4c7026bade2441d9e7bd34e"},
{file = "jiter-0.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:639b766bde088546b5205fd31608502b5b42abee3294b43cc95c6ea8f9a257c3"},
{file = "jiter-0.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cb32457296351c98da289d21a092a6c53c75beb80e7127c8e16224ee342c7c7"},
{file = "jiter-0.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:583263bd81bce5426806cf27ba85e4b97746797fae13c71e50a8689e06e57f81"},
{file = "jiter-0.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c0e413999a819ccef9b5fd22ef4b9b8c48a98e49da4d09b43ebce286d0d80e26"},
{file = "jiter-0.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5e50468d5acfef335ba8bc3892bb304354c38ba18acb3f7ae428451e47136e49"},
{file = "jiter-0.4.1-cp310-none-win32.whl", hash = "sha256:b2ac90b94dd717644c61c8ed0c2ec6e9505bd7314b91a1549680d7f1cb8f1da4"},
{file = "jiter-0.4.1-cp310-none-win_amd64.whl", hash = "sha256:2509868b8dacf4f65d04b4d951d390f30f403a87a997a14e2db2d232c7a468a7"},
{file = "jiter-0.4.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:b56e4f2fa5767976f2332e9e067010ddfe1379b6456b5458123ba50657c33e02"},
{file = "jiter-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f813b49db21c946aa010accc54b8e5c9d0007be252bda4738159fa6c65d6d396"},
{file = "jiter-0.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2933c04ebd77b3e9cf34f80ba45c093739c687c9c5a4fd0a8c701a3bfd90940"},
{file = "jiter-0.4.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b02ddd65513705ec38211ea48ffc0fde41aa46166d9f7706972daf97b57c8599"},
{file = "jiter-0.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:88d06af883524e5429d75395bb4ee6ddeda4c30818b2f3e3b8f4afa2dd8f28c0"},
{file = "jiter-0.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd333eca1090cf21e6359721eecbb2a7fe031cc4db3dd595081430b4a59371c5"},
{file = "jiter-0.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdc90017cd22dca6b40f2f8518b38363e78aee3cb32f84e1cb08900a598ca91b"},
{file = "jiter-0.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aedce5b11ca58853d46461e1880079836bfab4e132be2b7d2093ec193081bbc8"},
{file = "jiter-0.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e55b2f4d2d5066979b0e0e58d85e3fffd0f6e6a0523aab7e0ce75950259387da"},
{file = "jiter-0.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b3c85c586f1cd71c2a1e78756f6857119947b532379bd9be4338bf3dacf1e87f"},
{file = "jiter-0.4.1-cp311-none-win32.whl", hash = "sha256:37875f56222f2bb61410e15196d9b91510ccca322c391f3d20c91d667130d15e"},
{file = "jiter-0.4.1-cp311-none-win_amd64.whl", hash = "sha256:b71758befea8dbdc10e0fb40a776e085eed0e74afef42468ebb58562289e9190"},
{file = "jiter-0.4.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:93a8869c18a3721e41d7adb289c5c71aea8887eb368a3411219a0afb62955cbe"},
{file = "jiter-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0ffbc61349f2f27676d40d68e8ef83fc2a9dd2c1464962b1d1b1d8504bccbf85"},
{file = "jiter-0.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5f1f33e9fd4de4369a8d00fdf2571a8246a942095fb2a9d4cd25135ee675c85"},
{file = "jiter-0.4.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d8f91a19eba23b4a1bb1e5b64c19cfdbf46604180e5dee40548b53ca13afd2d9"},
{file = "jiter-0.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a60f8e495448d8e02d291fa9a8522cfe775a10210ba428994f383965e6f6e65"},
{file = "jiter-0.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7387998c6585ce0f02ae4f5338fabf72b99494860c347f27bc34720290eafb15"},
{file = "jiter-0.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7cbf41da6506b42db21a1a0befa48e16384591e84e80db002a826ccf07668f1"},
{file = "jiter-0.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:793ae2499722b9fc31e300abd07418902512109bca17f617598a31a9e17bddce"},
{file = "jiter-0.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:19f7953b8ada7ee109764ad91d4afb1a9f69b77cde0b890844744c513612dbf8"},
{file = "jiter-0.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:dcd3d6a142d7b267a8c5f1e28d02759e2e29343b095f6d8aaf463333a842e1f8"},
{file = "jiter-0.4.1-cp312-none-win32.whl", hash = "sha256:fffdf137c3ab7f0c5facb7c478b57ad3e1eb9b149daff48687844de77b78ab70"},
{file = "jiter-0.4.1-cp312-none-win_amd64.whl", hash = "sha256:fde004e47a801512c4167f188a6372960374fbd59e635753b3ee536e81953eb3"},
{file = "jiter-0.4.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b429ba25e05ca28d5e7efa4249032746ac28ec6ad68017ed3ea009989c597911"},
{file = "jiter-0.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:27df9925d0282c80bdd41613ace7cd799bd6355acdfe25cc48ec16843541999e"},
{file = "jiter-0.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb68736a0e2b00eda83937c1937f999e8d7dab68820c04343ac2e2eb2c5c2193"},
{file = "jiter-0.4.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c218458ac32ce0b495f013293867649b40c067a4d7533fa0d70a46f7194febae"},
{file = "jiter-0.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ebead86e80e352753f6e6f78ca96c12d764a8dbbc7c4b25938ce657ab0e4e879"},
{file = "jiter-0.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf58f878d43294bea400a9df86ef7796dd2e67969109bce22d337ca77372c69"},
{file = "jiter-0.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba671e60570cd99b8ed83ce0d82703040dc34c793229ac607f09683ba1981163"},
{file = "jiter-0.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef0bd8b68ad14f045544989b6ad3758bee6dc01f6924bce5b4fd7060b0a09b1b"},
{file = "jiter-0.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7888f165a0fe285e015ee18cfcb8b5432c4fa389235b4c24c339ca0cc51ba979"},
{file = "jiter-0.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7d9c443b2a71a8c3ab6578f5faf7725ad5f63dbb92d87f820eec56de9da0560f"},
{file = "jiter-0.4.1-cp38-none-win32.whl", hash = "sha256:6f618d1b04493bc9196e466ef59e0a6388eb85e936d1a61833449677643bbdd9"},
{file = "jiter-0.4.1-cp38-none-win_amd64.whl", hash = "sha256:46b6364a0b2a81cc259768bda131e8528aa3af4312f23f7e10aa04d24f54bbb1"},
{file = "jiter-0.4.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:6680785a9273a87e463c86a962042d620c00c7bb8100dde1a4c78b2184cdd613"},
{file = "jiter-0.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:36b10d945b9ccd2e9f2720e37395daf9e63cfa47e5e0e2887c4931888f0800cd"},
{file = "jiter-0.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78820599693bda34be17119abf9fad1f02e501b4816e47addbee9c5c768fb361"},
{file = "jiter-0.4.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:68697317170d8f851dfe978ba278b886e54e837ecd2a80c4a33ae780a0f19526"},
{file = "jiter-0.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d126ffc3876cfc1fba6ae2be37f2532b5db593a96cf4b845724b50b44339c4fd"},
{file = "jiter-0.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b082223f2e7e6f506d837df935f58f25cabf0a2b35902b4ec73fb561fbf2694a"},
{file = "jiter-0.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13314287782782be8439dfafca50f13fcab18046227068a3a8e8d8ac888f092b"},
{file = "jiter-0.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3da1346375605926f1ca4604d154ff41f5e3b933c6e01005e534bca2197d919f"},
{file = "jiter-0.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9b67a97fbce3ec35ee97439c8b786393f71ecbe7458d5e9279d4c172772eac36"},
{file = "jiter-0.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7b0f34544923bff0f3393aa3d60087686d86089c9361f6530bb5d19ebfb3db47"},
{file = "jiter-0.4.1-cp39-none-win32.whl", hash = "sha256:b0c93ef95b896a4ce5edff23071e4dcad77c9e9262fcb6ca2b050f781e8335a9"},
{file = "jiter-0.4.1-cp39-none-win_amd64.whl", hash = "sha256:3db5c83c8655ce031943b6f08434dac1a91e1477b0df452de0c44f3390a9b22c"},
{file = "jiter-0.4.1.tar.gz", hash = "sha256:741851cf5f37cf3583f2a56829d734c9fd17334770c9a326e6d25291603d4278"},
]
[[package]]
@ -4075,17 +4074,17 @@ langchain-core = ">=0.1.43,<0.3"
[[package]]
name = "langchain-astradb"
version = "0.3.2"
version = "0.3.3"
description = "An integration package connecting Astra DB and LangChain"
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
{file = "langchain_astradb-0.3.2-py3-none-any.whl", hash = "sha256:15afc5c0105e863e8f57bf8686490c00be47ed05e47d3263ad1577f2031c0dd5"},
{file = "langchain_astradb-0.3.2.tar.gz", hash = "sha256:4316f2c59402779a347a811e1b5470a0570348cb89baac17472d860b63188122"},
{file = "langchain_astradb-0.3.3-py3-none-any.whl", hash = "sha256:39deef1253947ef1bfaf3c27881ecdf07621d96c2cf37814aed9e506a9bee217"},
{file = "langchain_astradb-0.3.3.tar.gz", hash = "sha256:f9a996ec4bef134896195430adeb7f264389c368a03d2ea91356837e8ddde091"},
]
[package.dependencies]
astrapy = ">=1,<2"
astrapy = ">=1.2,<2.0"
langchain-core = ">=0.1.31,<0.3"
numpy = ">=1,<2"
@ -4150,18 +4149,18 @@ extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.
[[package]]
name = "langchain-core"
version = "0.2.1"
version = "0.2.3"
description = "Building applications with LLMs through composability"
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
{file = "langchain_core-0.2.1-py3-none-any.whl", hash = "sha256:3521e1e573988c47399fca9739270c5d34f8ecec147253ad829eb9ff288f76d5"},
{file = "langchain_core-0.2.1.tar.gz", hash = "sha256:49383126168d934559a543ce812c485048d9e6ac9b6798fbf3d4a72b6bba5b0c"},
{file = "langchain_core-0.2.3-py3-none-any.whl", hash = "sha256:22189b5a3a30bfd65eb995f95e627f7c2c3acb322feb89f5f5f2fb7df21833a7"},
{file = "langchain_core-0.2.3.tar.gz", hash = "sha256:fbc75a64b9c0b7655d96ca57a707df1e6c09efc1539c36adbd73260612549810"},
]
[package.dependencies]
jsonpatch = ">=1.33,<2.0"
langsmith = ">=0.1.0,<0.2.0"
langsmith = ">=0.1.65,<0.2.0"
packaging = ">=23.2,<24.0"
pydantic = ">=1,<3"
PyYAML = ">=5.3"
@ -4259,18 +4258,18 @@ tokenizers = ">=0.15.1,<1"
[[package]]
name = "langchain-openai"
version = "0.1.7"
version = "0.1.8"
description = "An integration package connecting OpenAI and LangChain"
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
{file = "langchain_openai-0.1.7-py3-none-any.whl", hash = "sha256:39c3cb22bb739900ae8294d4d9939a6138c0ca7ad11198e57038eb14c08d04ec"},
{file = "langchain_openai-0.1.7.tar.gz", hash = "sha256:fd7e1c33ba8e2cab4b2154f3a2fd4a0d9cc6518b41cf49bb87255f9f732a4896"},
{file = "langchain_openai-0.1.8-py3-none-any.whl", hash = "sha256:8125c84223e9f43b05defbca64eedbcf362fd78a680de6c25e64f973b34a8063"},
{file = "langchain_openai-0.1.8.tar.gz", hash = "sha256:a11fcce15def7917c44232abda6baaa63dfc79fe44be1531eea650d39a44cd95"},
]
[package.dependencies]
langchain-core = ">=0.1.46,<0.3"
openai = ">=1.24.0,<2.0.0"
langchain-core = ">=0.2.2,<0.3"
openai = ">=1.26.0,<2.0.0"
tiktoken = ">=0.7,<1"
[[package]]
@ -4323,7 +4322,7 @@ types-requests = ">=2.31.0.2,<3.0.0.0"
[[package]]
name = "langflow-base"
version = "0.0.53"
version = "0.0.54"
description = "A Python package with a built-in web application"
optional = false
python-versions = ">=3.10,<3.13"
@ -4380,13 +4379,13 @@ url = "src/backend/base"
[[package]]
name = "langfuse"
version = "2.33.0"
version = "2.33.1"
description = "A client library for accessing langfuse"
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
{file = "langfuse-2.33.0-py3-none-any.whl", hash = "sha256:362e3078c5a891df0b7ba3c9ce82f046d1f0274eab3d55337e443fff526f18ad"},
{file = "langfuse-2.33.0.tar.gz", hash = "sha256:3ca2ef8539a8f28cb80135f4b46b80d5585ce183f8e2035f318be296d09d7d88"},
{file = "langfuse-2.33.1-py3-none-any.whl", hash = "sha256:61ff3ff4b9c9c195028c981cba892106fdf90028e3950209a15f0ae06a378a36"},
{file = "langfuse-2.33.1.tar.gz", hash = "sha256:444a870e8b13ad37df710931389ecd3bad9997e550edf3c3178b5a0bd7ada013"},
]
[package.dependencies]
@ -4404,13 +4403,13 @@ openai = ["openai (>=0.27.8)"]
[[package]]
name = "langsmith"
version = "0.1.63"
version = "0.1.67"
description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
{file = "langsmith-0.1.63-py3-none-any.whl", hash = "sha256:7810afdf5e3f3b472fc581a29371fb96cd843dde2149e048d1b9610325159d1e"},
{file = "langsmith-0.1.63.tar.gz", hash = "sha256:a609405b52f6f54df442a142cbf19ab38662d54e532f96028b4c546434d4afdf"},
{file = "langsmith-0.1.67-py3-none-any.whl", hash = "sha256:7eb2e1c1b375925ff47700ed8071e10c15e942e9d1d634b4a449a9060364071a"},
{file = "langsmith-0.1.67.tar.gz", hash = "sha256:149558669a2ac4f21471cd964e61072687bba23b7c1ccb51f190a8f59b595b39"},
]
[package.dependencies]
@ -4420,13 +4419,13 @@ requests = ">=2,<3"
[[package]]
name = "litellm"
version = "1.39.2"
version = "1.39.5"
description = "Library to easily interface with LLM API providers"
optional = false
python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8"
files = [
{file = "litellm-1.39.2-py3-none-any.whl", hash = "sha256:843cb9a4d45c89ba6da95529815ec83ee7e4b7fe07aa0ed633102f600fddd9ad"},
{file = "litellm-1.39.2.tar.gz", hash = "sha256:96c4f3d522ccf32817357b1e9f5f63fa36a4a884f336314e1f6d66c0576d689e"},
{file = "litellm-1.39.5-py3-none-any.whl", hash = "sha256:1e8dd43c5d257fa8d7a0039b20aed7aeed4463d53608d1ba4ac233f1967a5330"},
{file = "litellm-1.39.5.tar.gz", hash = "sha256:8f4ea9fe21d67890e81a578e12c30b4172260ff35971dc7c3edf7eb69167d3be"},
]
[package.dependencies]
@ -5584,13 +5583,13 @@ sympy = "*"
[[package]]
name = "openai"
version = "1.30.4"
version = "1.30.5"
description = "The official Python library for the openai API"
optional = false
python-versions = ">=3.7.1"
files = [
{file = "openai-1.30.4-py3-none-any.whl", hash = "sha256:fb2635efd270efaf9fac2e07558d7948373b940637d3ae3ab624c1a983d4f03f"},
{file = "openai-1.30.4.tar.gz", hash = "sha256:f3488d9a1c4e0d332b019377d27d7cb4b3d6103fd5d0a416c7ceac780d1d9b88"},
{file = "openai-1.30.5-py3-none-any.whl", hash = "sha256:2ad95e926de0d2e09cde632a9204b0a6dca4a03c2cdcc84329b01f355784355a"},
{file = "openai-1.30.5.tar.gz", hash = "sha256:5366562eb2c5917e6116ae0391b7ae6e3acd62b0ae3f565ada32b35d8fcfa106"},
]
[package.dependencies]
@ -5607,42 +5606,42 @@ datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"]
[[package]]
name = "opentelemetry-api"
version = "1.24.0"
version = "1.25.0"
description = "OpenTelemetry Python API"
optional = false
python-versions = ">=3.8"
files = [
{file = "opentelemetry_api-1.24.0-py3-none-any.whl", hash = "sha256:0f2c363d98d10d1ce93330015ca7fd3a65f60be64e05e30f557c61de52c80ca2"},
{file = "opentelemetry_api-1.24.0.tar.gz", hash = "sha256:42719f10ce7b5a9a73b10a4baf620574fb8ad495a9cbe5c18d76b75d8689c67e"},
{file = "opentelemetry_api-1.25.0-py3-none-any.whl", hash = "sha256:757fa1aa020a0f8fa139f8959e53dec2051cc26b832e76fa839a6d76ecefd737"},
{file = "opentelemetry_api-1.25.0.tar.gz", hash = "sha256:77c4985f62f2614e42ce77ee4c9da5fa5f0bc1e1821085e9a47533a9323ae869"},
]
[package.dependencies]
deprecated = ">=1.2.6"
importlib-metadata = ">=6.0,<=7.0"
importlib-metadata = ">=6.0,<=7.1"
[[package]]
name = "opentelemetry-exporter-otlp-proto-common"
version = "1.24.0"
version = "1.25.0"
description = "OpenTelemetry Protobuf encoding"
optional = false
python-versions = ">=3.8"
files = [
{file = "opentelemetry_exporter_otlp_proto_common-1.24.0-py3-none-any.whl", hash = "sha256:e51f2c9735054d598ad2df5d3eca830fecfb5b0bda0a2fa742c9c7718e12f641"},
{file = "opentelemetry_exporter_otlp_proto_common-1.24.0.tar.gz", hash = "sha256:5d31fa1ff976cacc38be1ec4e3279a3f88435c75b38b1f7a099a1faffc302461"},
{file = "opentelemetry_exporter_otlp_proto_common-1.25.0-py3-none-any.whl", hash = "sha256:15637b7d580c2675f70246563363775b4e6de947871e01d0f4e3881d1848d693"},
{file = "opentelemetry_exporter_otlp_proto_common-1.25.0.tar.gz", hash = "sha256:c93f4e30da4eee02bacd1e004eb82ce4da143a2f8e15b987a9f603e0a85407d3"},
]
[package.dependencies]
opentelemetry-proto = "1.24.0"
opentelemetry-proto = "1.25.0"
[[package]]
name = "opentelemetry-exporter-otlp-proto-grpc"
version = "1.24.0"
version = "1.25.0"
description = "OpenTelemetry Collector Protobuf over gRPC Exporter"
optional = false
python-versions = ">=3.8"
files = [
{file = "opentelemetry_exporter_otlp_proto_grpc-1.24.0-py3-none-any.whl", hash = "sha256:f40d62aa30a0a43cc1657428e59fcf82ad5f7ea8fff75de0f9d9cb6f739e0a3b"},
{file = "opentelemetry_exporter_otlp_proto_grpc-1.24.0.tar.gz", hash = "sha256:217c6e30634f2c9797999ea9da29f7300479a94a610139b9df17433f915e7baa"},
{file = "opentelemetry_exporter_otlp_proto_grpc-1.25.0-py3-none-any.whl", hash = "sha256:3131028f0c0a155a64c430ca600fd658e8e37043cb13209f0109db5c1a3e4eb4"},
{file = "opentelemetry_exporter_otlp_proto_grpc-1.25.0.tar.gz", hash = "sha256:c0b1661415acec5af87625587efa1ccab68b873745ca0ee96b69bb1042087eac"},
]
[package.dependencies]
@ -5650,22 +5649,19 @@ deprecated = ">=1.2.6"
googleapis-common-protos = ">=1.52,<2.0"
grpcio = ">=1.0.0,<2.0.0"
opentelemetry-api = ">=1.15,<2.0"
opentelemetry-exporter-otlp-proto-common = "1.24.0"
opentelemetry-proto = "1.24.0"
opentelemetry-sdk = ">=1.24.0,<1.25.0"
[package.extras]
test = ["pytest-grpc"]
opentelemetry-exporter-otlp-proto-common = "1.25.0"
opentelemetry-proto = "1.25.0"
opentelemetry-sdk = ">=1.25.0,<1.26.0"
[[package]]
name = "opentelemetry-instrumentation"
version = "0.45b0"
version = "0.46b0"
description = "Instrumentation Tools & Auto Instrumentation for OpenTelemetry Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "opentelemetry_instrumentation-0.45b0-py3-none-any.whl", hash = "sha256:06c02e2c952c1b076e8eaedf1b82f715e2937ba7eeacab55913dd434fbcec258"},
{file = "opentelemetry_instrumentation-0.45b0.tar.gz", hash = "sha256:6c47120a7970bbeb458e6a73686ee9ba84b106329a79e4a4a66761f933709c7e"},
{file = "opentelemetry_instrumentation-0.46b0-py3-none-any.whl", hash = "sha256:89cd721b9c18c014ca848ccd11181e6b3fd3f6c7669e35d59c48dc527408c18b"},
{file = "opentelemetry_instrumentation-0.46b0.tar.gz", hash = "sha256:974e0888fb2a1e01c38fbacc9483d024bb1132aad92d6d24e2e5543887a7adda"},
]
[package.dependencies]
@ -5675,55 +5671,55 @@ wrapt = ">=1.0.0,<2.0.0"
[[package]]
name = "opentelemetry-instrumentation-asgi"
version = "0.45b0"
version = "0.46b0"
description = "ASGI instrumentation for OpenTelemetry"
optional = false
python-versions = ">=3.8"
files = [
{file = "opentelemetry_instrumentation_asgi-0.45b0-py3-none-any.whl", hash = "sha256:8be1157ed62f0db24e45fdf7933c530c4338bd025c5d4af7830e903c0756021b"},
{file = "opentelemetry_instrumentation_asgi-0.45b0.tar.gz", hash = "sha256:97f55620f163fd3d20323e9fd8dc3aacc826c03397213ff36b877e0f4b6b08a6"},
{file = "opentelemetry_instrumentation_asgi-0.46b0-py3-none-any.whl", hash = "sha256:f13c55c852689573057837a9500aeeffc010c4ba59933c322e8f866573374759"},
{file = "opentelemetry_instrumentation_asgi-0.46b0.tar.gz", hash = "sha256:02559f30cf4b7e2a737ab17eb52aa0779bcf4cc06573064f3e2cb4dcc7d3040a"},
]
[package.dependencies]
asgiref = ">=3.0,<4.0"
opentelemetry-api = ">=1.12,<2.0"
opentelemetry-instrumentation = "0.45b0"
opentelemetry-semantic-conventions = "0.45b0"
opentelemetry-util-http = "0.45b0"
opentelemetry-instrumentation = "0.46b0"
opentelemetry-semantic-conventions = "0.46b0"
opentelemetry-util-http = "0.46b0"
[package.extras]
instruments = ["asgiref (>=3.0,<4.0)"]
[[package]]
name = "opentelemetry-instrumentation-fastapi"
version = "0.45b0"
version = "0.46b0"
description = "OpenTelemetry FastAPI Instrumentation"
optional = false
python-versions = ">=3.8"
files = [
{file = "opentelemetry_instrumentation_fastapi-0.45b0-py3-none-any.whl", hash = "sha256:77d9c123a363129148f5f66d44094f3d67aaaa2b201396d94782b4a7f9ce4314"},
{file = "opentelemetry_instrumentation_fastapi-0.45b0.tar.gz", hash = "sha256:5a6b91e1c08a01601845fcfcfdefd0a2aecdb3c356d4a436a3210cb58c21487e"},
{file = "opentelemetry_instrumentation_fastapi-0.46b0-py3-none-any.whl", hash = "sha256:e0f5d150c6c36833dd011f0e6ef5ede6d7406c1aed0c7c98b2d3b38a018d1b33"},
{file = "opentelemetry_instrumentation_fastapi-0.46b0.tar.gz", hash = "sha256:928a883a36fc89f9702f15edce43d1a7104da93d740281e32d50ffd03dbb4365"},
]
[package.dependencies]
opentelemetry-api = ">=1.12,<2.0"
opentelemetry-instrumentation = "0.45b0"
opentelemetry-instrumentation-asgi = "0.45b0"
opentelemetry-semantic-conventions = "0.45b0"
opentelemetry-util-http = "0.45b0"
opentelemetry-instrumentation = "0.46b0"
opentelemetry-instrumentation-asgi = "0.46b0"
opentelemetry-semantic-conventions = "0.46b0"
opentelemetry-util-http = "0.46b0"
[package.extras]
instruments = ["fastapi (>=0.58,<1.0)"]
[[package]]
name = "opentelemetry-proto"
version = "1.24.0"
version = "1.25.0"
description = "OpenTelemetry Python Proto"
optional = false
python-versions = ">=3.8"
files = [
{file = "opentelemetry_proto-1.24.0-py3-none-any.whl", hash = "sha256:bcb80e1e78a003040db71ccf83f2ad2019273d1e0828089d183b18a1476527ce"},
{file = "opentelemetry_proto-1.24.0.tar.gz", hash = "sha256:ff551b8ad63c6cabb1845ce217a6709358dfaba0f75ea1fa21a61ceddc78cab8"},
{file = "opentelemetry_proto-1.25.0-py3-none-any.whl", hash = "sha256:f07e3341c78d835d9b86665903b199893befa5e98866f63d22b00d0b7ca4972f"},
{file = "opentelemetry_proto-1.25.0.tar.gz", hash = "sha256:35b6ef9dc4a9f7853ecc5006738ad40443701e52c26099e197895cbda8b815a3"},
]
[package.dependencies]
@ -5731,40 +5727,43 @@ protobuf = ">=3.19,<5.0"
[[package]]
name = "opentelemetry-sdk"
version = "1.24.0"
version = "1.25.0"
description = "OpenTelemetry Python SDK"
optional = false
python-versions = ">=3.8"
files = [
{file = "opentelemetry_sdk-1.24.0-py3-none-any.whl", hash = "sha256:fa731e24efe832e98bcd90902085b359dcfef7d9c9c00eb5b9a18587dae3eb59"},
{file = "opentelemetry_sdk-1.24.0.tar.gz", hash = "sha256:75bc0563affffa827700e0f4f4a68e1e257db0df13372344aebc6f8a64cde2e5"},
{file = "opentelemetry_sdk-1.25.0-py3-none-any.whl", hash = "sha256:d97ff7ec4b351692e9d5a15af570c693b8715ad78b8aafbec5c7100fe966b4c9"},
{file = "opentelemetry_sdk-1.25.0.tar.gz", hash = "sha256:ce7fc319c57707ef5bf8b74fb9f8ebdb8bfafbe11898410e0d2a761d08a98ec7"},
]
[package.dependencies]
opentelemetry-api = "1.24.0"
opentelemetry-semantic-conventions = "0.45b0"
opentelemetry-api = "1.25.0"
opentelemetry-semantic-conventions = "0.46b0"
typing-extensions = ">=3.7.4"
[[package]]
name = "opentelemetry-semantic-conventions"
version = "0.45b0"
version = "0.46b0"
description = "OpenTelemetry Semantic Conventions"
optional = false
python-versions = ">=3.8"
files = [
{file = "opentelemetry_semantic_conventions-0.45b0-py3-none-any.whl", hash = "sha256:a4a6fb9a7bacd9167c082aa4681009e9acdbfa28ffb2387af50c2fef3d30c864"},
{file = "opentelemetry_semantic_conventions-0.45b0.tar.gz", hash = "sha256:7c84215a44ac846bc4b8e32d5e78935c5c43482e491812a0bb8aaf87e4d92118"},
{file = "opentelemetry_semantic_conventions-0.46b0-py3-none-any.whl", hash = "sha256:6daef4ef9fa51d51855d9f8e0ccd3a1bd59e0e545abe99ac6203804e36ab3e07"},
{file = "opentelemetry_semantic_conventions-0.46b0.tar.gz", hash = "sha256:fbc982ecbb6a6e90869b15c1673be90bd18c8a56ff1cffc0864e38e2edffaefa"},
]
[package.dependencies]
opentelemetry-api = "1.25.0"
[[package]]
name = "opentelemetry-util-http"
version = "0.45b0"
version = "0.46b0"
description = "Web util for OpenTelemetry"
optional = false
python-versions = ">=3.8"
files = [
{file = "opentelemetry_util_http-0.45b0-py3-none-any.whl", hash = "sha256:6628868b501b3004e1860f976f410eeb3d3499e009719d818000f24ce17b6e33"},
{file = "opentelemetry_util_http-0.45b0.tar.gz", hash = "sha256:4ce08b6a7d52dd7c96b7705b5b4f06fdb6aa3eac1233b3b0bfef8a0cab9a92cd"},
{file = "opentelemetry_util_http-0.46b0-py3-none-any.whl", hash = "sha256:8dc1949ce63caef08db84ae977fdc1848fe6dc38e6bbaad0ae3e6ecd0d451629"},
{file = "opentelemetry_util_http-0.46b0.tar.gz", hash = "sha256:03b6e222642f9c7eae58d9132343e045b50aca9761fcb53709bd2b663571fdf6"},
]
[[package]]
@ -6630,13 +6629,13 @@ pyasn1 = ">=0.4.6,<0.7.0"
[[package]]
name = "pyautogen"
version = "0.2.27"
version = "0.2.28"
description = "Enabling Next-Gen LLM Applications via Multi-Agent Conversation Framework"
optional = false
python-versions = "<3.13,>=3.8"
files = [
{file = "pyautogen-0.2.27-py3-none-any.whl", hash = "sha256:9eb5c38544a0f79475c43442f9c5af2623165e32a7b9dd24ec141492f603a630"},
{file = "pyautogen-0.2.27.tar.gz", hash = "sha256:a8939d14fed1893109738a4c34ce490bfc6d869fd8a4ecb22932b86c81d9a5a5"},
{file = "pyautogen-0.2.28-py3-none-any.whl", hash = "sha256:69dffa4053096f496a50c8a252bbe23105b58fd6ffbb422fa8c043ecf3fc732b"},
{file = "pyautogen-0.2.28.tar.gz", hash = "sha256:f74686a981f2b6046a9cf6aff5a5e61615ec60d5559a49e7474467fbdf4e077b"},
]
[package.dependencies]
@ -6645,6 +6644,7 @@ docker = "*"
flaml = "*"
numpy = ">=1.17.0,<2"
openai = ">=1.3"
packaging = "*"
pydantic = ">=1.10,<2.6.0 || >2.6.0,<3"
python-dotenv = "*"
termcolor = "*"
@ -6658,11 +6658,12 @@ gemini = ["google-generativeai (>=0.5,<1)", "pillow", "pydantic"]
graph = ["matplotlib", "networkx"]
jupyter-executor = ["ipykernel (>=6.29.0)", "jupyter-client (>=8.6.0)", "jupyter-kernel-gateway", "requests", "websocket-client"]
lmm = ["pillow", "replicate"]
long-context = ["llmlingua (<0.3)"]
mathchat = ["pydantic (==1.10.9)", "sympy", "wolframalpha"]
redis = ["redis"]
retrievechat = ["beautifulsoup4", "chromadb", "ipython", "markdownify", "pypdf", "sentence-transformers"]
retrievechat-pgvector = ["beautifulsoup4", "chromadb", "ipython", "markdownify", "pgvector (>=0.2.5)", "psycopg (>=3.1.18)", "pypdf", "sentence-transformers"]
retrievechat-qdrant = ["beautifulsoup4", "chromadb", "ipython", "markdownify", "pypdf", "qdrant-client[fastembed]", "sentence-transformers"]
retrievechat = ["beautifulsoup4", "chromadb", "ipython", "markdownify", "protobuf (==4.25.3)", "pypdf", "sentence-transformers"]
retrievechat-pgvector = ["beautifulsoup4", "chromadb", "ipython", "markdownify", "pgvector (>=0.2.5)", "protobuf (==4.25.3)", "psycopg (>=3.1.18)", "pypdf", "sentence-transformers"]
retrievechat-qdrant = ["beautifulsoup4", "chromadb", "ipython", "markdownify", "protobuf (==4.25.3)", "pypdf", "qdrant-client[fastembed]", "sentence-transformers"]
teachable = ["chromadb"]
test = ["ipykernel", "nbconvert", "nbformat", "pandas", "pre-commit", "pytest (>=6.1.1,<8)", "pytest-asyncio", "pytest-cov (>=5)"]
types = ["ipykernel (>=6.29.0)", "jupyter-client (>=8.6.0)", "jupyter-kernel-gateway", "mypy (==1.9.0)", "pytest (>=6.1.1,<8)", "requests", "websocket-client"]
@ -8391,17 +8392,17 @@ httpx = ">=0.24,<0.28"
[[package]]
name = "sympy"
version = "1.12"
version = "1.12.1"
description = "Computer algebra system (CAS) in Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "sympy-1.12-py3-none-any.whl", hash = "sha256:c3588cd4295d0c0f603d0f2ae780587e64e2efeedb3521e46b9bb1d08d184fa5"},
{file = "sympy-1.12.tar.gz", hash = "sha256:ebf595c8dac3e0fdc4152c51878b498396ec7f30e7a914d6071e674d49420fb8"},
{file = "sympy-1.12.1-py3-none-any.whl", hash = "sha256:9b2cbc7f1a640289430e13d2a56f02f867a1da0190f2f99d8968c2f74da0e515"},
{file = "sympy-1.12.1.tar.gz", hash = "sha256:2877b03f998cd8c08f07cd0de5b767119cd3ef40d09f41c30d722f6686b0fb88"},
]
[package.dependencies]
mpmath = ">=0.19"
mpmath = ">=1.1.0,<1.4.0"
[[package]]
name = "tbb"
@ -9174,6 +9175,20 @@ files = [
pyperclip = "*"
six = "*"
[[package]]
name = "upstash-vector"
version = "0.4.0"
description = "Serverless Vector SDK from Upstash"
optional = false
python-versions = "<4.0,>=3.8"
files = [
{file = "upstash_vector-0.4.0-py3-none-any.whl", hash = "sha256:1ba11d8fc7d036bf93fde741b862c9e04ad962397dc600d1dc7546b63a84da82"},
{file = "upstash_vector-0.4.0.tar.gz", hash = "sha256:a8ae11b2d3989c2615f1f06c66af39da763af7f7239b625fede621bf2fbb997d"},
]
[package.dependencies]
httpx = ">=0.24.0,<0.28"
[[package]]
name = "uritemplate"
version = "4.1.1"
@ -9944,18 +9959,18 @@ pydantic = ">=2.0.0"
[[package]]
name = "zipp"
version = "3.19.0"
version = "3.19.1"
description = "Backport of pathlib-compatible object wrapper for zip files"
optional = false
python-versions = ">=3.8"
files = [
{file = "zipp-3.19.0-py3-none-any.whl", hash = "sha256:96dc6ad62f1441bcaccef23b274ec471518daf4fbbc580341204936a5a3dddec"},
{file = "zipp-3.19.0.tar.gz", hash = "sha256:952df858fb3164426c976d9338d3961e8e8b3758e2e059e0f754b8c4262625ee"},
{file = "zipp-3.19.1-py3-none-any.whl", hash = "sha256:2828e64edb5386ea6a52e7ba7cdb17bb30a73a858f5eb6eb93d8d36f5ea26091"},
{file = "zipp-3.19.1.tar.gz", hash = "sha256:35427f6d5594f4acf82d25541438348c26736fa9b3afa2754bcd63cdb99d8e8f"},
]
[package.extras]
docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
testing = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"]
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
test = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"]
[[package]]
name = "zope-event"
@ -10035,4 +10050,4 @@ local = ["ctransformers", "llama-cpp-python", "sentence-transformers"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.10,<3.13"
content-hash = "36778b105f6f6e5efd0c1d37651d7b97defb0bc0db74b868a41e38de22251924"
content-hash = "476c95dc8c6adb597a0cd2783eab65c02e0398fc144aa74d56a4cb36032f496f"

View file

@ -1,6 +1,6 @@
[tool.poetry]
name = "langflow"
version = "1.0.0a42"
version = "1.0.0a43"
description = "A Python package with a built-in web application"
authors = ["Langflow <contact@langflow.org>"]
maintainers = [
@ -85,6 +85,7 @@ couchbase = "^4.2.1"
youtube-transcript-api = "^0.6.2"
markdown = "^3.6"
langchain-chroma = "^0.1.1"
upstash-vector = "^0.4.0"
[tool.poetry.group.dev.dependencies]

View file

@ -0,0 +1,16 @@
import os
from huggingface_hub import HfApi, list_models
from rich import print
# Use root method
models = list_models()
# Or configure a HfApi client
hf_api = HfApi(
endpoint="https://huggingface.co", # Can be a Private Hub endpoint.
token=os.getenv("HUGGINFACE_API_TOKEN") or "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",
)
space_runtime = hf_api.restart_space("Langflow/Langflow-Preview", factory_reboot=True)
print(space_runtime)

View file

@ -0,0 +1,45 @@
"""Add webhook columns
Revision ID: 631faacf5da2
Revises: 1c79524817ed
Create Date: 2024-04-22 15:14:43.454784
"""
from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op
from sqlalchemy.engine.reflection import Inspector
# revision identifiers, used by Alembic.
revision: str = "631faacf5da2"
down_revision: Union[str, None] = "1c79524817ed"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
conn = op.get_bind()
inspector = Inspector.from_engine(conn) # type: ignore
table_names = inspector.get_table_names()
# ### commands auto generated by Alembic - please adjust! ###
column_names = [column["name"] for column in inspector.get_columns("flow")]
with op.batch_alter_table("flow", schema=None) as batch_op:
if "flow" in table_names and "webhook" not in column_names:
batch_op.add_column(sa.Column("webhook", sa.Boolean(), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
conn = op.get_bind()
inspector = Inspector.from_engine(conn) # type: ignore
table_names = inspector.get_table_names()
# ### commands auto generated by Alembic - please adjust! ###
column_names = [column["name"] for column in inspector.get_columns("flow")]
with op.batch_alter_table("flow", schema=None) as batch_op:
if "flow" in table_names and "webhook" in column_names:
batch_op.drop_column("webhook")
# ### end Alembic commands ###

View file

@ -3,7 +3,7 @@ from typing import TYPE_CHECKING, Annotated, List, Optional, Union
from uuid import UUID
import sqlalchemy as sa
from fastapi import APIRouter, Body, Depends, HTTPException, UploadFile, status
from fastapi import APIRouter, BackgroundTasks, Body, Depends, HTTPException, Request, UploadFile, status
from loguru import logger
from sqlmodel import Session, select
@ -22,11 +22,14 @@ from langflow.api.v1.schemas import (
from langflow.custom import CustomComponent
from langflow.custom.utils import build_custom_component_template
from langflow.graph.graph.base import Graph
from langflow.graph.schema import RunOutputs
from langflow.helpers.flow import get_flow_by_id_or_endpoint_name
from langflow.processing.process import process_tweaks, run_graph_internal
from langflow.schema.graph import Tweaks
from langflow.services.auth.utils import api_key_security, get_current_active_user
from langflow.services.cache.utils import save_uploaded_file
from langflow.services.database.models.flow import Flow
from langflow.services.database.models.flow.utils import get_all_webhook_components_in_flow, get_flow_by_id
from langflow.services.database.models.user.model import User
from langflow.services.deps import get_session, get_session_service, get_settings_service, get_task_service
from langflow.services.session.service import SessionService
@ -53,10 +56,75 @@ async def get_all(
raise HTTPException(status_code=500, detail=str(exc)) from exc
async def simple_run_flow(
db: Session,
flow: Flow,
input_request: SimplifiedAPIRequest,
session_service: SessionService,
stream: bool = False,
api_key_user: Optional[User] = None,
):
try:
task_result: List[RunOutputs] = []
artifacts = {}
user_id = api_key_user.id if api_key_user else None
flow_id_str = str(flow.id)
if input_request.session_id:
session_data = await session_service.load_session(input_request.session_id, flow_id=flow_id_str)
graph, artifacts = session_data if session_data else (None, None)
if graph is None:
raise ValueError(f"Session {input_request.session_id} not found")
else:
if flow.data is None:
raise ValueError(f"Flow {flow_id_str} has no data")
graph_data = flow.data
graph_data = process_tweaks(graph_data, input_request.tweaks or {})
graph = Graph.from_payload(graph_data, flow_id=flow_id_str, user_id=str(user_id))
inputs = [
InputValueRequest(components=[], input_value=input_request.input_value, type=input_request.input_type)
]
# outputs is a list of all components that should return output
# we need to get them by checking their type
# if the output type is debug, we return all outputs
# if the output type is any, we return all outputs that are either chat or text
# if the output type is chat or text, we return only the outputs that match the type
if input_request.output_component:
outputs = [input_request.output_component]
else:
outputs = [
vertex.id
for vertex in graph.vertices
if input_request.output_type == "debug"
or (
vertex.is_output
and (input_request.output_type == "any" or input_request.output_type in vertex.id.lower())
)
]
task_result, session_id = await run_graph_internal(
graph=graph,
flow_id=flow_id_str,
session_id=input_request.session_id,
inputs=inputs,
outputs=outputs,
artifacts=artifacts,
session_service=session_service,
stream=stream,
)
return RunResponse(outputs=task_result, session_id=session_id)
except sa.exc.StatementError as exc:
# StatementError('(builtins.ValueError) badly formed hexadecimal UUID string')
if "badly formed hexadecimal UUID string" in str(exc):
logger.error(f"Flow ID {flow_id_str} is not a valid UUID")
# This means the Flow ID is not a valid UUID which means it can't find the flow
raise ValueError(str(exc)) from exc
@router.post("/run/{flow_id_or_name}", response_model=RunResponse, response_model_exclude_none=True)
async def simplified_run_flow(
db: Annotated[Session, Depends(get_session)],
flow_id_or_name: str,
flow: Annotated[Flow, Depends(get_flow_by_id_or_endpoint_name)],
input_request: SimplifiedAPIRequest = SimplifiedAPIRequest(),
stream: bool = False,
api_key_user: User = Depends(api_key_security),
@ -67,7 +135,7 @@ async def simplified_run_flow(
### Parameters:
- `db` (Session): Database session for executing queries.
- `flow_id` (str): Unique identifier of the flow to be executed.
- `flow_id_or_name` (str): ID or endpoint name of the flow to run.
- `input_request` (SimplifiedAPIRequest): Request object containing input values, types, output selection, tweaks, and session ID.
- `api_key_user` (User): User object derived from the provided API key, used for authentication.
- `session_service` (SessionService): Service for managing flow sessions, essential for session reuse and caching.
@ -110,89 +178,21 @@ async def simplified_run_flow(
This endpoint provides a powerful interface for executing flows with enhanced flexibility and efficiency, supporting a wide range of applications by allowing for dynamic input and output configuration along with performance optimizations through session management and caching.
"""
session_id = input_request.session_id
endpoint_name = None
flow_id_str = None
try:
try:
flow_id = UUID(flow_id_or_name)
except ValueError:
endpoint_name = flow_id_or_name
flow = db.exec(
select(Flow).where(Flow.endpoint_name == endpoint_name).where(Flow.user_id == api_key_user.id)
).first()
if flow is None:
raise ValueError(f"Flow with endpoint name {endpoint_name} not found")
flow_id = flow.id
flow_id_str = str(flow_id)
artifacts = {}
if input_request.session_id:
session_data = await session_service.load_session(input_request.session_id, flow_id=flow_id_str)
graph, artifacts = session_data if session_data else (None, None)
if graph is None:
raise ValueError(f"Session {input_request.session_id} not found")
else:
# Get the flow that matches the flow_id and belongs to the user
# flow = session.query(Flow).filter(Flow.id == flow_id).filter(Flow.user_id == api_key_user.id).first()
flow = db.exec(select(Flow).where(Flow.id == flow_id_str).where(Flow.user_id == api_key_user.id)).first()
if flow is None:
raise ValueError(f"Flow {flow_id_str} not found")
if flow.data is None:
raise ValueError(f"Flow {flow_id_str} has no data")
graph_data = flow.data
graph_data = process_tweaks(graph_data, input_request.tweaks or {}, stream=stream)
graph = Graph.from_payload(graph_data, flow_id=flow_id_str, user_id=str(api_key_user.id))
inputs = [
InputValueRequest(components=[], input_value=input_request.input_value, type=input_request.input_type)
]
# outputs is a list of all components that should return output
# we need to get them by checking their type
# if the output type is debug, we return all outputs
# if the output type is any, we return all outputs that are either chat or text
# if the output type is chat or text, we return only the outputs that match the type
if input_request.output_component:
outputs = [input_request.output_component]
else:
outputs = [
vertex.id
for vertex in graph.vertices
if input_request.output_type == "debug"
or (
vertex.is_output
and (input_request.output_type == "any" or input_request.output_type in vertex.id.lower())
)
]
task_result, session_id = await run_graph_internal(
graph=graph,
flow_id=flow_id_str,
session_id=input_request.session_id,
inputs=inputs,
outputs=outputs,
artifacts=artifacts,
return await simple_run_flow(
db=db,
flow=flow,
input_request=input_request,
session_service=session_service,
stream=stream,
api_key_user=api_key_user,
)
return RunResponse(outputs=task_result, session_id=session_id)
except sa.exc.StatementError as exc:
# StatementError('(builtins.ValueError) badly formed hexadecimal UUID string')
if "badly formed hexadecimal UUID string" in str(exc):
logger.error(f"Flow ID {flow_id_str} is not a valid UUID")
# This means the Flow ID is not a valid UUID which means it can't find the flow
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)) from exc
except ValueError as exc:
if flow_id_str and f"Flow {flow_id_str} not found" in str(exc):
logger.error(f"Flow {flow_id_str} not found")
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)) from exc
elif endpoint_name and f"Flow with endpoint name {endpoint_name} not found" in str(exc):
logger.error(f"Flow with endpoint name {endpoint_name} not found")
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)) from exc
elif session_id and f"Session {session_id} not found" in str(exc):
logger.error(f"Session {session_id} not found")
if "badly formed hexadecimal UUID string" in str(exc):
# This means the Flow ID is not a valid UUID which means it can't find the flow
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(exc)) from exc
if "not found" in str(exc):
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)) from exc
else:
logger.exception(exc)
@ -202,6 +202,68 @@ async def simplified_run_flow(
raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(exc)) from exc
@router.post("/webhook/{flow_id}", response_model=dict, status_code=HTTPStatus.ACCEPTED)
async def webhook_run_flow(
db: Annotated[Session, Depends(get_session)],
flow: Annotated[Flow, Depends(get_flow_by_id)],
request: Request,
background_tasks: BackgroundTasks,
session_service: SessionService = Depends(get_session_service),
):
"""
Run a flow using a webhook request.
Args:
db (Session): The database session.
request (Request): The incoming HTTP request.
background_tasks (BackgroundTasks): The background tasks manager.
session_service (SessionService, optional): The session service. Defaults to Depends(get_session_service).
flow (Flow, optional): The flow to be executed. Defaults to Depends(get_flow_by_id).
Returns:
dict: A dictionary containing the status of the task.
Raises:
HTTPException: If the flow is not found or if there is an error processing the request.
"""
try:
logger.debug("Received webhook request")
data = await request.body()
if not data:
logger.error("Request body is empty")
raise ValueError(
"Request body is empty. You should provide a JSON payload containing the flow ID.",
)
# get all webhook components in the flow
webhook_components = get_all_webhook_components_in_flow(flow.data)
tweaks = {}
data_dict = await request.json()
for component in webhook_components:
tweaks[component["id"]] = {"data": data.decode() if isinstance(data, bytes) else data}
input_request = SimplifiedAPIRequest(
input_value=data_dict.get("input_value", ""),
input_type=data_dict.get("input_type", "chat"),
output_type=data_dict.get("output_type", "chat"),
tweaks=tweaks,
session_id=data_dict.get("session_id"),
)
logger.debug("Starting background task")
background_tasks.add_task(
simple_run_flow,
db=db,
flow=flow,
input_request=input_request,
session_service=session_service,
)
return {"message": "Task started in the background", "status": "in progress"}
except Exception as exc:
if "Flow ID is required" in str(exc) or "Request body is empty" in str(exc):
raise HTTPException(status_code=400, detail=str(exc)) from exc
logger.exception(exc)
raise HTTPException(status_code=500, detail=str(exc)) from exc
@router.post("/run/advanced/{flow_id}", response_model=RunResponse, response_model_exclude_none=True)
async def experimental_run_flow(
session: Annotated[Session, Depends(get_session)],

View file

@ -13,6 +13,7 @@ from langflow.api.v1.schemas import FlowListCreate, FlowListIds, FlowListRead
from langflow.initial_setup.setup import STARTER_FOLDER_NAME
from langflow.services.auth.utils import get_current_active_user
from langflow.services.database.models.flow import Flow, FlowCreate, FlowRead, FlowUpdate
from langflow.services.database.models.flow.utils import get_webhook_component_in_flow
from langflow.services.database.models.folder.constants import DEFAULT_FOLDER_NAME
from langflow.services.database.models.folder.model import Folder
from langflow.services.database.models.user.model import User
@ -150,6 +151,8 @@ def update_flow(
for key, value in flow_data.items():
if value is not None:
setattr(db_flow, key, value)
webhook_component = get_webhook_component_in_flow(db_flow.data)
db_flow.webhook = webhook_component is not None
db_flow.updated_at = datetime.now(timezone.utc)
if db_flow.folder_id is None:
default_folder = session.exec(select(Folder).where(Folder.name == DEFAULT_FOLDER_NAME)).first()

View file

@ -1,5 +1,4 @@
from typing import List
from uuid import UUID
import orjson
from fastapi import APIRouter, Depends, File, HTTPException, Response, UploadFile, status
@ -88,7 +87,7 @@ def read_folders(
def read_folder(
*,
session: Session = Depends(get_session),
folder_id: UUID,
folder_id: str,
current_user: User = Depends(get_current_active_user),
):
try:
@ -106,7 +105,7 @@ def read_folder(
def update_folder(
*,
session: Session = Depends(get_session),
folder_id: UUID,
folder_id: str,
folder: FolderUpdate, # Assuming FolderUpdate is a Pydantic model defining updatable fields
current_user: User = Depends(get_current_active_user),
):
@ -155,7 +154,7 @@ def update_folder(
def delete_folder(
*,
session: Session = Depends(get_session),
folder_id: UUID,
folder_id: str,
current_user: User = Depends(get_current_active_user),
):
try:
@ -177,7 +176,7 @@ def delete_folder(
async def download_file(
*,
session: Session = Depends(get_session),
folder_id: UUID,
folder_id: str,
current_user: User = Depends(get_current_active_user),
):
"""Download all flows from folder."""

View file

@ -0,0 +1,39 @@
import json
import uuid
from typing import Any, Optional
from langflow.custom import CustomComponent
from langflow.schema.dotdict import dotdict
from langflow.schema.schema import Record
class WebhookComponent(CustomComponent):
display_name = "Webhook Input"
description = "Defines a webhook input for the flow."
def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None):
if field_name == "webhook_id":
build_config["webhook_id"]["value"] = uuid.uuid4().hex
return build_config
def build_config(self):
return {
"data": {
"display_name": "Data",
"info": "Use this field to quickly test the webhook component by providing a JSON payload.",
"multiline": True,
}
}
def build(self, data: Optional[str] = "") -> Record:
message = ""
try:
body = json.loads(data or "{}")
except json.JSONDecodeError:
body = {"payload": data}
message = f"Invalid JSON payload. Please check the format.\n\n{data}"
record = Record(data=body)
if not message:
message = json.dumps(body, indent=2)
self.status = message
return record

View file

@ -1,7 +1,8 @@
from .APIRequest import APIRequest
from .Directory import DirectoryComponent
from .File import FileComponent
from .Webhook import WebhookComponent
from .URL import URLComponent
__all__ = ["APIRequest", "DirectoryComponent", "FileComponent", "URLComponent"]
__all__ = ["APIRequest", "DirectoryComponent", "FileComponent", "URLComponent", "WebhookComponent"]

View file

@ -0,0 +1,79 @@
from typing import List, Optional
from langchain_core.embeddings import Embeddings
from langflow.components.vectorstores.base.model import LCVectorStoreComponent
from langflow.components.vectorstores.Upstash import UpstashVectorStoreComponent
from langflow.field_typing import Text
from langflow.schema import Record
class UpstashSearchComponent(UpstashVectorStoreComponent, LCVectorStoreComponent):
"""
A custom component for implementing a Vector Store using Upstash.
"""
display_name: str = "Upstash Search"
description: str = "Search an Upstash Vector Store for similar documents."
def build_config(self):
"""
Builds the configuration for the component.
Returns:
- dict: A dictionary containing the configuration options for the component.
"""
return {
"search_type": {
"display_name": "Search Type",
"options": ["Similarity", "MMR"],
},
"input_value": {"display_name": "Input"},
"inputs": {"display_name": "Input", "input_types": ["Document", "Record"]},
"embedding": {
"display_name": "Embedding",
"input_types": ["Embeddings"],
"info": "To use Upstash's embeddings, don't provide an embedding.",
},
"index_url": {
"display_name": "Index URL",
"info": "The URL of the Upstash index.",
},
"index_token": {
"display_name": "Index Token",
"info": "The token for the Upstash index.",
},
"number_of_results": {
"display_name": "Number of Results",
"info": "Number of results to return.",
"advanced": True,
},
"text_key": {
"display_name": "Text Key",
"info": "The key in the record to use as text.",
"advanced": True,
},
}
def build( # type: ignore[override]
self,
input_value: Text,
search_type: str,
text_key: str = "text",
index_url: Optional[str] = None,
index_token: Optional[str] = None,
embedding: Optional[Embeddings] = None,
number_of_results: int = 4,
) -> List[Record]:
vector_store = super().build(
embedding=embedding,
text_key=text_key,
index_url=index_url,
index_token=index_token,
)
if not vector_store:
raise ValueError("Failed to load the Upstash Vector Store.")
return self.search_with_vector_store(
input_value=input_value, search_type=search_type, vector_store=vector_store, k=number_of_results
)

View file

@ -0,0 +1,89 @@
from typing import List, Optional, Union
from langchain_community.vectorstores.upstash import UpstashVectorStore
from langchain_core.embeddings import Embeddings
from langchain_core.retrievers import BaseRetriever
from langchain_core.vectorstores import VectorStore
from langflow.custom import CustomComponent
from langflow.schema.schema import Record
class UpstashVectorStoreComponent(CustomComponent):
"""
A custom component for implementing a Vector Store using Upstash.
"""
display_name: str = "Upstash"
description: str = "Create and Utilize an Upstash Vector Store"
def build_config(self):
"""
Builds the configuration for the component.
Returns:
- dict: A dictionary containing the configuration options for the component.
"""
return {
"inputs": {"display_name": "Input", "input_types": ["Document", "Record"]},
"embedding": {
"display_name": "Embedding",
"input_types": ["Embeddings"],
"info": "To use Upstash's embeddings, don't provide an embedding.",
},
"index_url": {
"display_name": "Index URL",
"info": "The URL of the Upstash index.",
},
"index_token": {
"display_name": "Index Token",
"info": "The token for the Upstash index.",
},
"text_key": {
"display_name": "Text Key",
"info": "The key in the record to use as text.",
"advanced": True,
},
}
def build(
self,
inputs: Optional[List[Record]] = None,
text_key: str = "text",
index_url: Optional[str] = None,
index_token: Optional[str] = None,
embedding: Optional[Embeddings] = None,
) -> Union[VectorStore, BaseRetriever]:
documents = []
for _input in inputs or []:
if isinstance(_input, Record):
documents.append(_input.to_lc_document())
else:
documents.append(_input)
use_upstash_embedding = embedding is None
if not documents:
upstash_vs = UpstashVectorStore(
embedding=embedding or use_upstash_embedding,
text_key=text_key,
index_url=index_url,
index_token=index_token,
)
else:
if use_upstash_embedding:
upstash_vs = UpstashVectorStore(
embedding=use_upstash_embedding,
text_key=text_key,
index_url=index_url,
index_token=index_token,
)
upstash_vs.add_documents(documents)
elif embedding:
upstash_vs = UpstashVectorStore.from_documents(
documents=documents, # type: ignore
embedding=embedding,
text_key=text_key,
index_url=index_url,
index_token=index_token,
)
return upstash_vs

View file

@ -164,6 +164,11 @@ def add_new_custom_field(
if field_type == "bool" and field_value is None:
field_value = False
if field_type == "SecretStr":
field_config["password"] = True
field_config["load_from_db"] = True
field_config["input_types"] = ["Text"]
# If options is a list, then it's a dropdown
# If options is None, then it's a list of strings
is_list = isinstance(field_config.get("options"), list)

View file

@ -20,6 +20,7 @@ from langflow.schema.schema import INPUT_FIELD_NAME, InputType
from langflow.services.cache.utils import CacheMiss
from langflow.services.chat.service import ChatService
from langflow.services.deps import get_chat_service
from langflow.services.monitor.utils import log_transaction
if TYPE_CHECKING:
from langflow.graph.schema import ResultData
@ -763,9 +764,11 @@ class Graph:
next_runnable_vertices, top_level_vertices = await self.get_next_and_top_level_vertices(
lock, set_cache_coro, vertex
)
log_transaction(vertex, status="success")
return next_runnable_vertices, top_level_vertices, result_dict, params, valid, artifacts, vertex
except Exception as exc:
logger.exception(f"Error building vertex: {exc}")
log_transaction(vertex, status="failure", error=str(exc))
raise exc
async def get_next_and_top_level_vertices(

View file

@ -703,7 +703,8 @@ class Vertex:
self._finalize_build()
return await self.get_requester_result(requester)
result = await self.get_requester_result(requester)
return result
async def get_requester_result(self, requester: Optional["Vertex"]):
# If the requester is None, this means that

View file

@ -1,13 +1,14 @@
from typing import TYPE_CHECKING, Any, Awaitable, Callable, List, Optional, Tuple, Type, Union, cast
from uuid import UUID
from fastapi import Depends, HTTPException
from pydantic.v1 import BaseModel, Field, create_model
from sqlmodel import select
from sqlmodel import Session, select
from langflow.graph.schema import RunOutputs
from langflow.schema.schema import INPUT_FIELD_NAME, Record
from langflow.services.database.models.flow.model import Flow
from langflow.services.deps import session_scope
from langflow.services.database.models.flow import Flow
from langflow.services.deps import get_session, session_scope
if TYPE_CHECKING:
from langflow.graph.graph.base import Graph
@ -235,3 +236,22 @@ def get_arg_names(inputs: List["Vertex"]) -> List[dict[str, str]]:
{"component_name": input_.display_name, "arg_name": input_.display_name.lower().replace(" ", "_")}
for input_ in inputs
]
def get_flow_by_id_or_endpoint_name(
flow_id_or_name: str, db: Session = Depends(get_session), user_id: Optional[UUID] = None
) -> Flow:
endpoint_name = None
try:
flow_id = UUID(flow_id_or_name)
flow = db.get(Flow, flow_id)
except ValueError:
endpoint_name = flow_id_or_name
stmt = select(Flow).where(Flow.name == endpoint_name)
if user_id:
stmt = stmt.where(Flow.user_id == user_id)
flow = db.exec(stmt).first()
if flow is None:
raise HTTPException(status_code=404, detail=f"Flow identifier {flow_id_or_name} not found")
return flow

View file

@ -14,7 +14,11 @@ from rich import print as rprint
from starlette.middleware.base import BaseHTTPMiddleware
from langflow.api import router
from langflow.initial_setup.setup import create_or_update_starter_projects, initialize_super_user_if_needed, load_flows_from_directory
from langflow.initial_setup.setup import (
create_or_update_starter_projects,
initialize_super_user_if_needed,
load_flows_from_directory,
)
from langflow.interface.utils import setup_llm_caching
from langflow.services.plugins.langfuse_plugin import LangfuseInstance
from langflow.services.utils import initialize_services, teardown_services
@ -33,20 +37,13 @@ class JavaScriptMIMETypeMiddleware(BaseHTTPMiddleware):
return response
def get_lifespan(fix_migration=False, socketio_server=None):
try:
from langflow.version import __version__ # type: ignore
except ImportError:
from importlib.metadata import version
__version__ = version("langflow-base")
def get_lifespan(fix_migration=False, socketio_server=None, version=None):
@asynccontextmanager
async def lifespan(app: FastAPI):
nest_asyncio.apply()
# Startup message
if __version__:
rprint(f"[bold green]Starting Langflow v{__version__}...[/bold green]")
if version:
rprint(f"[bold green]Starting Langflow v{version}...[/bold green]")
else:
rprint("[bold green]Starting Langflow...[/bold green]")
try:
@ -70,11 +67,17 @@ def get_lifespan(fix_migration=False, socketio_server=None):
def create_app():
"""Create the FastAPI app and include the router."""
try:
from langflow.version import __version__ # type: ignore
except ImportError:
from importlib.metadata import version
__version__ = version("langflow-base")
configure()
socketio_server = socketio.AsyncServer(async_mode="asgi", cors_allowed_origins="*", logger=True)
lifespan = get_lifespan(socketio_server=socketio_server)
app = FastAPI(lifespan=lifespan)
lifespan = get_lifespan(socketio_server=socketio_server, version=__version__)
app = FastAPI(lifespan=lifespan, title="Langflow", version=__version__)
origins = ["*"]
app.add_middleware(

View file

@ -28,6 +28,7 @@ class FlowBase(SQLModel):
data: Optional[Dict] = Field(default=None, nullable=True)
is_component: Optional[bool] = Field(default=False, nullable=True)
updated_at: Optional[datetime] = Field(default_factory=lambda: datetime.now(timezone.utc), nullable=True)
webhook: Optional[bool] = Field(default=False, nullable=True, description="Can be used on the webhook endpoint")
folder_id: Optional[UUID] = Field(default=None, nullable=True)
endpoint_name: Optional[str] = Field(default=None, nullable=True, index=True)
@ -114,10 +115,15 @@ class FlowBase(SQLModel):
# updated_at can be serialized to JSON
@field_serializer("updated_at")
def serialize_dt(self, dt: datetime, _info):
if dt is None:
return None
return dt.isoformat()
def serialize_datetime(value):
if isinstance(value, datetime):
# I'm getting 2024-05-29T17:57:17.631346
# and I want 2024-05-29T17:57:17-05:00
value = value.replace(microsecond=0)
if value.tzinfo is None:
value = value.replace(tzinfo=timezone.utc)
return value.isoformat()
return value
@field_validator("updated_at", mode="before")
def validate_dt(cls, v):

View file

@ -0,0 +1,33 @@
from typing import Optional
from fastapi import Depends
from sqlmodel import Session
from langflow.services.deps import get_session
from .model import Flow
def get_flow_by_id(session: Session = Depends(get_session), flow_id: Optional[str] = None) -> Flow | None:
"""Get flow by id."""
if flow_id is None:
raise ValueError("Flow id is required.")
return session.get(Flow, flow_id)
def get_webhook_component_in_flow(flow_data: dict):
"""Get webhook component in flow data."""
for node in flow_data.get("nodes", []):
if "Webhook" in node.get("id"):
return node
return None
def get_all_webhook_components_in_flow(flow_data: dict | None):
"""Get all webhook components in flow data."""
if not flow_data:
return []
return [node for node in flow_data.get("nodes", []) if "Webhook" in node.get("id")]

View file

@ -8,6 +8,7 @@ from langflow.services.deps import get_monitor_service
if TYPE_CHECKING:
from langflow.api.v1.schemas import ResultDataResponse
from langflow.graph.vertex.base import Vertex
INDEX_KEY = "index"
@ -165,3 +166,35 @@ async def log_vertex_build(
monitor_service.add_row(table_name="vertex_builds", data=row)
except Exception as e:
logger.exception(f"Error logging vertex build: {e}")
def build_clean_params(target: "Vertex") -> dict:
"""
Cleans the parameters of the target vertex.
"""
# Removes all keys that the values aren't python types like str, int, bool, etc.
params = {
key: value for key, value in target.params.items() if isinstance(value, (str, int, bool, float, list, dict))
}
# if it is a list we need to check if the contents are python types
for key, value in params.items():
if isinstance(value, list):
params[key] = [item for item in value if isinstance(item, (str, int, bool, float, list, dict))]
return params
def log_transaction(vertex: "Vertex", status, error=None):
try:
monitor_service = get_monitor_service()
clean_params = build_clean_params(vertex)
data = {
"vertex_id": vertex.id,
"inputs": clean_params,
"output": str(vertex.result),
"timestamp": monitor_service.get_timestamp(),
"status": status,
"error": error,
}
monitor_service.add_row(table_name="transactions", data=data)
except Exception as e:
logger.error(f"Error logging transaction: {e}")

View file

@ -17,6 +17,8 @@ VARIABLES_TO_GET_FROM_ENVIRONMENT = [
"PINECONE_API_KEY",
"SEARCHAPI_API_KEY",
"SERPAPI_API_KEY",
"UPSTASH_VECTOR_REST_URL",
"UPSTASH_VECTOR_REST_TOKEN",
"VECTARA_CUSTOMER_ID",
"VECTARA_CORPUS_ID",
"VECTARA_API_KEY",

View file

@ -1224,18 +1224,18 @@ extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.
[[package]]
name = "langchain-core"
version = "0.2.1"
version = "0.2.3"
description = "Building applications with LLMs through composability"
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
{file = "langchain_core-0.2.1-py3-none-any.whl", hash = "sha256:3521e1e573988c47399fca9739270c5d34f8ecec147253ad829eb9ff288f76d5"},
{file = "langchain_core-0.2.1.tar.gz", hash = "sha256:49383126168d934559a543ce812c485048d9e6ac9b6798fbf3d4a72b6bba5b0c"},
{file = "langchain_core-0.2.3-py3-none-any.whl", hash = "sha256:22189b5a3a30bfd65eb995f95e627f7c2c3acb322feb89f5f5f2fb7df21833a7"},
{file = "langchain_core-0.2.3.tar.gz", hash = "sha256:fbc75a64b9c0b7655d96ca57a707df1e6c09efc1539c36adbd73260612549810"},
]
[package.dependencies]
jsonpatch = ">=1.33,<2.0"
langsmith = ">=0.1.0,<0.2.0"
langsmith = ">=0.1.65,<0.2.0"
packaging = ">=23.2,<24.0"
pydantic = ">=1,<3"
PyYAML = ">=5.3"
@ -1281,13 +1281,13 @@ extended-testing = ["beautifulsoup4 (>=4.12.3,<5.0.0)", "lxml (>=4.9.3,<6.0)"]
[[package]]
name = "langchainhub"
version = "0.1.16"
version = "0.1.17"
description = "The LangChain Hub API client"
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
{file = "langchainhub-0.1.16-py3-none-any.whl", hash = "sha256:a4379a1879cc6b441b8d02cc65e28a54f160fba61c9d1d4b0eddc3a276dff99a"},
{file = "langchainhub-0.1.16.tar.gz", hash = "sha256:9f11e68fddb575e70ef4b28800eedbd9eeb180ba508def04f7153ea5b246b6fc"},
{file = "langchainhub-0.1.17-py3-none-any.whl", hash = "sha256:4c609b3948252c71670f0d98f73413b515cfd2f6701a7b40ce959203e6133e04"},
{file = "langchainhub-0.1.17.tar.gz", hash = "sha256:af7df0cb1cebc7a6e0864e8632ae48ecad39ed96568f699c78657b9d04e50b46"},
]
[package.dependencies]
@ -1296,13 +1296,13 @@ types-requests = ">=2.31.0.2,<3.0.0.0"
[[package]]
name = "langsmith"
version = "0.1.63"
version = "0.1.67"
description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
{file = "langsmith-0.1.63-py3-none-any.whl", hash = "sha256:7810afdf5e3f3b472fc581a29371fb96cd843dde2149e048d1b9610325159d1e"},
{file = "langsmith-0.1.63.tar.gz", hash = "sha256:a609405b52f6f54df442a142cbf19ab38662d54e532f96028b4c546434d4afdf"},
{file = "langsmith-0.1.67-py3-none-any.whl", hash = "sha256:7eb2e1c1b375925ff47700ed8071e10c15e942e9d1d634b4a449a9060364071a"},
{file = "langsmith-0.1.67.tar.gz", hash = "sha256:149558669a2ac4f21471cd964e61072687bba23b7c1ccb51f190a8f59b595b39"},
]
[package.dependencies]
@ -2466,13 +2466,13 @@ files = [
[[package]]
name = "requests"
version = "2.32.2"
version = "2.32.3"
description = "Python HTTP for Humans."
optional = false
python-versions = ">=3.8"
files = [
{file = "requests-2.32.2-py3-none-any.whl", hash = "sha256:fc06670dd0ed212426dfeb94fc1b983d917c4f9847c863f313c9dfaaffb7c23c"},
{file = "requests-2.32.2.tar.gz", hash = "sha256:dd951ff5ecf3e3b3aa26b40703ba77495dab41da839ae72ef3c8e5d8e2433289"},
{file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"},
{file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"},
]
[package.dependencies]

View file

@ -1,6 +1,6 @@
[tool.poetry]
name = "langflow-base"
version = "0.0.53"
version = "0.0.54"
description = "A Python package with a built-in web application"
authors = ["Langflow <contact@langflow.org>"]
maintainers = [

View file

@ -15,7 +15,7 @@ export default function FolderAccordionComponent({
options,
}: AccordionComponentType): JSX.Element {
const [value, setValue] = useState(
open.length === 0 ? "" : getOpenAccordion(),
open.length === 0 ? "" : getOpenAccordion()
);
function getOpenAccordion(): string {

View file

@ -1,7 +1,6 @@
import { storeComponent } from "../../../../types/store";
import { cn } from "../../../../utils/utils";
import ForwardedIconComponent from "../../../genericIconComponent";
import ShadTooltip from "../../../shadTooltipComponent";
import { Card, CardHeader, CardTitle } from "../../../ui/card";
export default function DragCardComponent({ data }: { data: storeComponent }) {
@ -11,7 +10,7 @@ export default function DragCardComponent({ data }: { data: storeComponent }) {
draggable
//TODO check color schema
className={cn(
"group relative flex flex-col justify-between overflow-hidden transition-all hover:bg-muted/50 hover:shadow-md hover:dark:bg-[#ffffff10]",
"group relative flex flex-col justify-between overflow-hidden transition-all hover:bg-muted/50 hover:shadow-md hover:dark:bg-[#ffffff10]"
)}
>
<div>
@ -23,7 +22,7 @@ export default function DragCardComponent({ data }: { data: storeComponent }) {
"visible flex-shrink-0",
data.is_component
? "mx-0.5 h-6 w-6 text-component-icon"
: "h-7 w-7 flex-shrink-0 text-flow-icon",
: "h-7 w-7 flex-shrink-0 text-flow-icon"
)}
name={data.is_component ? "ToyBrick" : "Group"}
/>

View file

@ -18,7 +18,7 @@ export default function CodeAreaComponent({
setOpen,
}: CodeAreaComponentType) {
const [myValue, setMyValue] = useState(
typeof value == "string" ? value : JSON.stringify(value),
typeof value == "string" ? value : JSON.stringify(value)
);
useEffect(() => {
if (disabled && myValue !== "") {

View file

@ -59,7 +59,7 @@ export default function Dropdown({
? "dropdown-component-outline"
: "dropdown-component-false-outline",
"w-full justify-between font-normal",
editNode ? "input-edit-node" : "py-2",
editNode ? "input-edit-node" : "py-2"
)}
>
<span data-testid={`value-dropdown-` + id}>
@ -107,7 +107,7 @@ export default function Dropdown({
name="Check"
className={cn(
"ml-auto h-4 w-4 text-primary",
value === option ? "opacity-100" : "opacity-0",
value === option ? "opacity-100" : "opacity-0"
)}
/>
</CommandItem>

View file

@ -18,7 +18,7 @@ export const ForwardedIconComponent = memo(
strokeWidth,
id = "",
}: IconComponentProps,
ref,
ref
) => {
const [showFallback, setShowFallback] = useState(false);
@ -65,8 +65,8 @@ export const ForwardedIconComponent = memo(
/>
</Suspense>
);
},
),
}
)
);
export default ForwardedIconComponent;

View file

@ -132,7 +132,7 @@ export const MenuBar = ({}: {}): JSX.Element => {
title: UPLOAD_ERROR_ALERT,
list: [error],
});
},
}
);
}}
>
@ -214,7 +214,7 @@ export const MenuBar = ({}: {}): JSX.Element => {
name={isBuilding || saveLoading ? "Loader2" : "CheckCircle2"}
className={cn(
"h-4 w-4",
isBuilding || saveLoading ? "animate-spin" : "animate-wiggle",
isBuilding || saveLoading ? "animate-spin" : "animate-wiggle"
)}
/>
{printByBuildStatus()}

View file

@ -32,11 +32,11 @@ export default function HorizontalScrollFadeComponent({
fadeContainerRef.current.classList.toggle(
"fade-left",
isScrollable && !atStart,
isScrollable && !atStart
);
fadeContainerRef.current.classList.toggle(
"fade-right",
isScrollable && !atEnd,
isScrollable && !atEnd
);
};

View file

@ -104,8 +104,8 @@ export default function InputFileComponent({
editNode
? "input-edit-node input-dialog text-muted-foreground"
: disabled
? "input-disable input-dialog primary-input"
: "input-dialog primary-input text-muted-foreground"
? "input-disable input-dialog primary-input"
: "input-dialog primary-input text-muted-foreground"
}
>
{myValue !== "" ? myValue : "No file"}

View file

@ -19,15 +19,15 @@ export default function InputGlobalComponent({
editNode = false,
}: InputGlobalComponentType): JSX.Element {
const globalVariablesEntries = useGlobalVariablesStore(
(state) => state.globalVariablesEntries,
(state) => state.globalVariablesEntries
);
const getVariableId = useGlobalVariablesStore((state) => state.getVariableId);
const unavaliableFields = useGlobalVariablesStore(
(state) => state.unavaliableFields,
(state) => state.unavaliableFields
);
const removeGlobalVariable = useGlobalVariablesStore(
(state) => state.removeGlobalVariable,
(state) => state.removeGlobalVariable
);
const setErrorData = useAlertStore((state) => state.setErrorData);
@ -130,7 +130,7 @@ export default function InputGlobalComponent({
<ForwardedIconComponent
name="Trash2"
className={cn(
"h-4 w-4 text-primary opacity-0 hover:text-status-red group-hover:opacity-100",
"h-4 w-4 text-primary opacity-0 hover:text-status-red group-hover:opacity-100"
)}
aria-hidden="true"
/>

View file

@ -2,8 +2,8 @@ import { ColDef, ColGroupDef } from "ag-grid-community";
import "ag-grid-community/styles/ag-grid.css"; // Mandatory CSS required by the grid
import "ag-grid-community/styles/ag-theme-balham.css"; // Optional Theme applied to the grid
import { FlowPoolObjectType } from "../../types/chat";
import TableComponent from "../tableComponent";
import { extractColumnsFromRows } from "../../utils/utils";
import TableComponent from "../tableComponent";
function RecordsOutputComponent({
flowPool,

View file

@ -21,7 +21,7 @@ const SideBarButtonsComponent = ({ items }: SideBarButtonsComponentProps) => {
data-testid={`sidebar-nav-${item.title}`}
className={cn(
buttonVariants({ variant: "ghost" }),
"!w-[200px] cursor-pointer justify-start gap-2 border border-transparent hover:border-border hover:bg-transparent",
"!w-[200px] cursor-pointer justify-start gap-2 border border-transparent hover:border-border hover:bg-transparent"
)}
>
{item.title}

View file

@ -5,7 +5,6 @@ import DateReader from "../dateReaderComponent";
import NumberReader from "../numberReader";
import ObjectRender from "../objectRender";
import StringReader from "../stringReaderComponent";
import { Label } from "../ui/label";
import { Badge } from "../ui/badge";
export default function TableAutoCellRender({
@ -35,7 +34,7 @@ export default function TableAutoCellRender({
variant="outline"
size="sq"
className={cn(
"min-w-min bg-success-background text-success-foreground hover:bg-success-background",
"min-w-min bg-success-background text-success-foreground hover:bg-success-background"
)}
>
{value}

View file

@ -48,7 +48,7 @@ export function TagsSelector({
className={cn(
selectedTags.some((category) => category === tag.name)
? "min-w-min bg-beta-foreground text-background hover:bg-beta-foreground"
: "",
: ""
)}
>
{tag.name}

View file

@ -1,5 +1,5 @@
import * as React from "react";
import { cva, type VariantProps } from "class-variance-authority";
import * as React from "react";
import { cn } from "../../utils/utils";
const alertVariants = cva(
@ -15,7 +15,7 @@ const alertVariants = cva(
defaultVariants: {
variant: "default",
},
},
}
);
const Alert = React.forwardRef<
@ -55,4 +55,4 @@ const AlertDescription = React.forwardRef<
));
AlertDescription.displayName = "AlertDescription";
export { Alert, AlertTitle, AlertDescription };
export { Alert, AlertDescription, AlertTitle };

View file

@ -13,7 +13,7 @@ const Checkbox = React.forwardRef<
ref={ref}
className={cn(
"peer h-4 w-4 shrink-0 rounded-sm border border-primary ring-offset-background focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50 data-[state=checked]:bg-primary data-[state=checked]:text-primary-foreground",
className,
className
)}
{...props}
>
@ -37,7 +37,7 @@ const CheckBoxDiv = ({
className={cn(
className,
"peer h-4 w-4 shrink-0 rounded-sm border border-primary ring-offset-background focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50",
checked ? "bg-primary text-primary-foreground" : "",
checked ? "bg-primary text-primary-foreground" : ""
)}
>
{checked && (

View file

@ -24,7 +24,7 @@ const AccordionTrigger = React.forwardRef<
<div
className={cn(
" flex flex-1 cursor-pointer items-center justify-between border-[1px] py-2 text-sm font-medium data-[state=closed]:rounded-md data-[state=open]:rounded-t-md data-[state=open]:border-b-0 data-[state=open]:bg-muted [&[data-state=open]>svg]:rotate-180",
className,
className
)}
>
{children}
@ -43,7 +43,7 @@ const AccordionContent = React.forwardRef<
ref={ref}
className={cn(
"data-[state=closed]:animate-accordion-up data-[state=open]:animate-accordion-down overflow-hidden border-[1px] text-sm data-[state=open]:rounded-b-md data-[state=open]:border-t-0 data-[state=open]:bg-muted",
className,
className
)}
{...props}
>

View file

@ -16,18 +16,18 @@ const Form = FormProvider;
type FormFieldContextValue<
TFieldValues extends FieldValues = FieldValues,
TName extends FieldPath<TFieldValues> = FieldPath<TFieldValues>,
TName extends FieldPath<TFieldValues> = FieldPath<TFieldValues>
> = {
name: TName;
};
const FormFieldContext = React.createContext<FormFieldContextValue>(
{} as FormFieldContextValue,
{} as FormFieldContextValue
);
const FormField = <
TFieldValues extends FieldValues = FieldValues,
TName extends FieldPath<TFieldValues> = FieldPath<TFieldValues>,
TName extends FieldPath<TFieldValues> = FieldPath<TFieldValues>
>({
...props
}: ControllerProps<TFieldValues, TName>) => {
@ -66,7 +66,7 @@ type FormItemContextValue = {
};
const FormItemContext = React.createContext<FormItemContextValue>(
{} as FormItemContextValue,
{} as FormItemContextValue
);
const FormItem = React.forwardRef<

View file

@ -606,84 +606,6 @@ export const CONTROL_NEW_USER = {
export const tabsCode = [];
export function tabsArray(codes: string[], method: number) {
if (!method) return;
if (method === 0) {
return [
{
name: "cURL",
mode: "bash",
image: "https://curl.se/logo/curl-symbol-transparent.png",
language: "sh",
code: codes[0],
},
{
name: "Python API",
mode: "python",
image:
"https://images.squarespace-cdn.com/content/v1/5df3d8c5d2be5962e4f87890/1628015119369-OY4TV3XJJ53ECO0W2OLQ/Python+API+Training+Logo.png?format=1000w",
language: "py",
code: codes[1],
},
{
name: "Python Code",
mode: "python",
image: "https://cdn-icons-png.flaticon.com/512/5968/5968350.png",
language: "py",
code: codes[2],
},
{
name: "Chat Widget HTML",
description:
"Insert this code anywhere in your &lt;body&gt; tag. To use with react and other libs, check our <a class='link-color' href='https://langflow.org/guidelines/widget'>documentation</a>.",
mode: "html",
image: "https://cdn-icons-png.flaticon.com/512/5968/5968350.png",
language: "py",
code: codes[3],
},
];
}
return [
{
name: "cURL",
mode: "bash",
image: "https://curl.se/logo/curl-symbol-transparent.png",
language: "sh",
code: codes[0],
},
{
name: "Python API",
mode: "python",
image:
"https://images.squarespace-cdn.com/content/v1/5df3d8c5d2be5962e4f87890/1628015119369-OY4TV3XJJ53ECO0W2OLQ/Python+API+Training+Logo.png?format=1000w",
language: "py",
code: codes[1],
},
{
name: "Python Code",
mode: "python",
language: "py",
image: "https://cdn-icons-png.flaticon.com/512/5968/5968350.png",
code: codes[2],
},
{
name: "Chat Widget HTML",
description:
"Insert this code anywhere in your &lt;body&gt; tag. To use with react and other libs, check our <a class='link-color' href='https://langflow.org/guidelines/widget'>documentation</a>.",
mode: "html",
image: "https://cdn-icons-png.flaticon.com/512/5968/5968350.png",
language: "py",
code: codes[3],
},
{
name: "Tweaks",
mode: "python",
image: "https://cdn-icons-png.flaticon.com/512/5968/5968350.png",
language: "py",
code: codes[4],
},
];
}
export const FETCH_ERROR_MESSAGE = "Couldn't establish a connection.";
export const FETCH_ERROR_DESCRIPION =
"Check if everything is working properly and try again.";

View file

@ -0,0 +1 @@
export const TEXT_FIELD_TYPES: string[] = ["str", "SecretStr"];

View file

@ -43,6 +43,7 @@ import useHandleNodeClass from "../../../hooks/use-handle-node-class";
import useHandleRefreshButtonPress from "../../../hooks/use-handle-refresh-buttons";
import HandleTooltips from "../HandleTooltipComponent";
import OutputComponent from "../OutputComponent";
import { TEXT_FIELD_TYPES } from "./constants";
export default function ParameterComponent({
left,
@ -80,7 +81,7 @@ export default function ParameterComponent({
debouncedHandleUpdateValues,
setNode,
isLoading,
setIsLoading,
setIsLoading
);
const { handleNodeClass: handleNodeClassHook } = useHandleNodeClass(
@ -88,7 +89,7 @@ export default function ParameterComponent({
name,
takeSnapshot,
setNode,
updateNodeInternals,
updateNodeInternals
);
const { handleRefreshButtonPress: handleRefreshButtonPressHook } =
@ -97,7 +98,7 @@ export default function ParameterComponent({
let disabled =
edges.some(
(edge) =>
edge.targetHandle === scapedJSONStringfy(proxy ? { ...id, proxy } : id),
edge.targetHandle === scapedJSONStringfy(proxy ? { ...id, proxy } : id)
) ?? false;
const handleRefreshButtonPress = async (name, data) => {
@ -108,7 +109,7 @@ export default function ParameterComponent({
const handleOnNewValue = async (
newValue: string | string[] | boolean | Object[],
skipSnapshot: boolean | undefined = false,
skipSnapshot: boolean | undefined = false
): Promise<void> => {
handleOnNewValueHook(newValue, skipSnapshot);
};
@ -190,14 +191,14 @@ export default function ParameterComponent({
className={classNames(
left ? "my-12 -ml-0.5 " : " my-12 -mr-0.5 ",
"h-3 w-3 rounded-full border-2 bg-background",
!showNode ? "mt-0" : "",
!showNode ? "mt-0" : ""
)}
style={{
borderColor: color ?? nodeColors.unknown,
}}
onClick={() => {
setFilterEdge(
groupByFamily(myData, tooltipTitle!, left, nodes!),
groupByFamily(myData, tooltipTitle!, left, nodes!)
);
}}
></Handle>
@ -282,12 +283,12 @@ export default function ParameterComponent({
}
className={classNames(
left ? "-ml-0.5" : "-mr-0.5",
"h-3 w-3 rounded-full border-2 bg-background",
"h-3 w-3 rounded-full border-2 bg-background"
)}
style={{ borderColor: color ?? nodeColors.unknown }}
onClick={() => {
setFilterEdge(
groupByFamily(myData, tooltipTitle!, left, nodes!),
groupByFamily(myData, tooltipTitle!, left, nodes!)
);
}}
/>
@ -299,7 +300,7 @@ export default function ParameterComponent({
<Case
condition={
left === true &&
type === "str" &&
TEXT_FIELD_TYPES.includes(type ?? "") &&
!data.node?.template[name]?.options
}
>
@ -345,8 +346,7 @@ export default function ParameterComponent({
name={name}
data={data}
button_text={
data.node?.template[name]?.refresh_button_text ??
"Refresh"
data.node?.template[name].refresh_button_text
}
className="extra-side-bar-buttons mt-1"
handleUpdateValues={handleRefreshButtonPress}
@ -394,8 +394,7 @@ export default function ParameterComponent({
name={name}
data={data}
button_text={
data.node?.template[name]?.refresh_button_text ??
"Refresh"
data.node?.template[name].refresh_button_text
}
className="extra-side-bar-buttons ml-2 mt-1"
handleUpdateValues={handleRefreshButtonPress}

View file

@ -24,7 +24,7 @@ const TooltipRenderComponent = ({ item, index, left }) => {
<span
key={index}
className={classNames(
index > 0 ? "mt-2 flex items-center" : "mt-3 flex items-center",
index > 0 ? "mt-2 flex items-center" : "mt-3 flex items-center"
)}
>
<div

View file

@ -8,7 +8,7 @@ const useFetchDataOnMount = (
name,
handleUpdateValues,
setNode,
setIsLoading,
setIsLoading
) => {
const setErrorData = useAlertStore((state) => state.setErrorData);

View file

@ -9,8 +9,7 @@ const useHandleOnNewValue = (
handleUpdateValues,
debouncedHandleUpdateValues,
setNode,
isLoading,
setIsLoading,
setIsLoading
) => {
const setErrorData = useAlertStore((state) => state.setErrorData);

View file

@ -5,7 +5,7 @@ const useHandleNodeClass = (
name,
takeSnapshot,
setNode,
updateNodeInternals,
updateNodeInternals
) => {
const handleNodeClass = (newNodeClass, code) => {
if (!data.node) return;

View file

@ -4,7 +4,7 @@
* @param {boolean} isAuth - If the API is authenticated
* @returns {string} - The curl code
*/
export default function getCurlCode(
export function getCurlRunCode(
flowId: string,
isAuth: boolean,
tweaksBuildedObject,
@ -13,9 +13,9 @@ export default function getCurlCode(
const tweaksObject = tweaksBuildedObject[0];
// show the endpoint name in the curl command if it exists
return `curl -X POST \\
${window.location.protocol}//${window.location.host}/api/v1/run/${
"${window.location.protocol}//${window.location.host}/api/v1/run/${
endpointName || flowId
}?stream=false \\
}?stream=false" \\
-H 'Content-Type: application/json'\\${
!isAuth ? `\n -H 'x-api-key: <your api key>'\\` : ""
}
@ -25,3 +25,24 @@ export default function getCurlCode(
"tweaks": ${JSON.stringify(tweaksObject, null, 2)}}'
`;
}
/**
* Generates a cURL command for making a POST request to a webhook endpoint.
*
* @param {Object} options - The options for generating the cURL command.
* @param {string} options.flowId - The ID of the flow.
* @param {boolean} options.isAuth - Indicates whether authentication is required.
* @param {string} options.endpointName - The name of the webhook endpoint.
* @returns {string} The cURL command.
*/
export function getCurlWebhookCode(flowId, isAuth, endpointName?: string) {
return `curl -X POST \\
"${window.location.protocol}//${window.location.host}/api/v1/webhook/${
endpointName || flowId
}" \\
-H 'Content-Type: application/json'\\${
!isAuth ? `\n -H 'x-api-key: <your api key>'\\` : ""
}
-d '{"any": "data"}'
`;
}

View file

@ -1,43 +1,11 @@
export default function tabsArray(codes: string[], method: number) {
if (!method) return;
if (method === 0) {
return [
{
name: "cURL",
mode: "bash",
image: "https://curl.se/logo/curl-symbol-transparent.png",
language: "sh",
code: codes[0],
},
{
name: "Python API",
mode: "python",
image:
"https://images.squarespace-cdn.com/content/v1/5df3d8c5d2be5962e4f87890/1628015119369-OY4TV3XJJ53ECO0W2OLQ/Python+API+Training+Logo.png?format=1000w",
language: "py",
code: codes[1],
},
{
name: "Python Code",
mode: "python",
image: "https://cdn-icons-png.flaticon.com/512/5968/5968350.png",
language: "py",
code: codes[2],
},
{
name: "Chat Widget HTML",
description:
"Insert this code anywhere in your &lt;body&gt; tag. To use with react and other libs, check our <a class='link-color' href='https://langflow.org/guidelines/widget'>documentation</a>.",
mode: "html",
image: "https://cdn-icons-png.flaticon.com/512/5968/5968350.png",
language: "py",
code: codes[3],
},
];
}
return [
export function createTabsArray(
codes,
includeWebhookCurl = false,
includeTweaks = false
) {
const tabs = [
{
name: "cURL",
name: "Run cURL",
mode: "bash",
image: "https://curl.se/logo/curl-symbol-transparent.png",
language: "sh",
@ -49,14 +17,14 @@ export default function tabsArray(codes: string[], method: number) {
image:
"https://images.squarespace-cdn.com/content/v1/5df3d8c5d2be5962e4f87890/1628015119369-OY4TV3XJJ53ECO0W2OLQ/Python+API+Training+Logo.png?format=1000w",
language: "py",
code: codes[1],
code: codes[2],
},
{
name: "Python Code",
mode: "python",
language: "py",
image: "https://cdn-icons-png.flaticon.com/512/5968/5968350.png",
code: codes[2],
language: "py",
code: codes[3],
},
{
name: "Chat Widget HTML",
@ -64,15 +32,30 @@ export default function tabsArray(codes: string[], method: number) {
"Insert this code anywhere in your &lt;body&gt; tag. To use with react and other libs, check our <a class='link-color' href='https://langflow.org/guidelines/widget'>documentation</a>.",
mode: "html",
image: "https://cdn-icons-png.flaticon.com/512/5968/5968350.png",
language: "py",
code: codes[3],
language: "html",
code: codes[4],
},
{
];
if (includeWebhookCurl) {
tabs.splice(1, 0, {
name: "Webhook cURL",
mode: "bash",
image: "https://curl.se/logo/curl-symbol-transparent.png",
language: "sh",
code: codes[1],
});
}
if (includeTweaks) {
tabs.push({
name: "Tweaks",
mode: "python",
image: "https://cdn-icons-png.flaticon.com/512/5968/5968350.png",
language: "py",
code: codes[4],
},
];
code: codes[5],
});
}
return tabs;
}

View file

@ -18,13 +18,13 @@ import { buildContent } from "../utils/build-content";
import { buildTweaks } from "../utils/build-tweaks";
import { checkCanBuildTweakObject } from "../utils/check-can-build-tweak-object";
import { getChangesType } from "../utils/get-changes-types";
import getCurlCode from "../utils/get-curl-code";
import { getCurlRunCode, getCurlWebhookCode } from "../utils/get-curl-code";
import { getNodesWithDefaultValue } from "../utils/get-nodes-with-default-value";
import getPythonApiCode from "../utils/get-python-api-code";
import getPythonCode from "../utils/get-python-code";
import { getValue } from "../utils/get-value";
import getWidgetCode from "../utils/get-widget-code";
import tabsArray from "../utils/tabs-array";
import { createTabsArray } from "../utils/tabs-array";
const ApiModal = forwardRef(
(
@ -47,23 +47,33 @@ const ApiModal = forwardRef(
const [open, setOpen] = useState(false);
const [activeTab, setActiveTab] = useState("0");
const pythonApiCode = getPythonApiCode(flow?.id, autoLogin, tweak);
const curl_code = getCurlCode(
const curl_run_code = getCurlRunCode(
flow?.id,
autoLogin,
tweak,
flow?.endpoint_name
);
const curl_webhook_code = getCurlWebhookCode(
flow?.id,
autoLogin,
flow?.endpoint_name
);
const pythonCode = getPythonCode(flow?.name, tweak);
const widgetCode = getWidgetCode(flow?.id, flow?.name, autoLogin);
console.log("flow", flow);
const includeWebhook = flow.webhook;
const tweaksCode = buildTweaks(flow);
const codesArray = [
curl_code,
curl_run_code,
curl_webhook_code,
pythonApiCode,
pythonCode,
widgetCode,
pythonCode,
];
const [tabs, setTabs] = useState(tabsArray(codesArray, 0));
const [tabs, setTabs] = useState(
createTabsArray(codesArray, includeWebhook)
);
const canShowTweaks =
flow &&
@ -93,9 +103,9 @@ const ApiModal = forwardRef(
if (Object.keys(tweaksCode).length > 0) {
setActiveTab("0");
setTabs(tabsArray(codesArray, 1));
setTabs(createTabsArray(codesArray, includeWebhook, true));
} else {
setTabs(tabsArray(codesArray, 1));
setTabs(createTabsArray(codesArray, includeWebhook, true));
}
}, [flow["data"]!["nodes"], open]);
@ -166,7 +176,7 @@ const ApiModal = forwardRef(
const addCodes = (cloneTweak) => {
const pythonApiCode = getPythonApiCode(flow?.id, autoLogin, cloneTweak);
const curl_code = getCurlCode(
const curl_code = getCurlRunCode(
flow?.id,
autoLogin,
cloneTweak,

View file

@ -19,6 +19,7 @@ export type FlowType = {
icon?: string;
icon_bg_color?: string;
folder_id?: string;
webhook?: boolean;
};
export type NodeType = {

View file

@ -4,7 +4,6 @@ from uuid import UUID, uuid4
import pytest
from fastapi import status
from fastapi.testclient import TestClient
from langflow.custom.directory_reader.directory_reader import DirectoryReader
from langflow.services.deps import get_settings_service
@ -685,7 +684,7 @@ def test_run_flow_with_caching_invalid_flow_id(client: TestClient, created_api_k
assert response.status_code == status.HTTP_404_NOT_FOUND
data = response.json()
assert "detail" in data
assert f"Flow {invalid_flow_id} not found" in data["detail"]
assert f"Flow identifier {invalid_flow_id} not found" in data["detail"]
def test_run_flow_with_caching_invalid_input_format(client: TestClient, starter_project, created_api_key):