Merge branch 'chat_and_cache' into 186-endpoints-for-node-validation-and-debugging

This commit is contained in:
Gabriel Luiz Freitas Almeida 2023-04-26 01:28:43 -03:00 committed by GitHub
commit 5dbca85512
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
55 changed files with 1736 additions and 606 deletions

View file

@ -27,7 +27,7 @@ run_frontend:
cd src/frontend && npm start
run_backend:
poetry run uvicorn langflow.main:app --port 5003 --reload
poetry run uvicorn langflow.main:app --port 5003 --reload --log-level debug
build_frontend:
cd src/frontend && CI='' npm run build

View file

@ -1,11 +0,0 @@
#! /bin/bash
cd src/frontend
docker build -t logspace/frontend_build -f build.Dockerfile .
cd ../backend
docker build -t logspace/backend_build -f build.Dockerfile .
cd ../../
VERSION=$(toml get --toml-path pyproject.toml tool.poetry.version)
docker build --build-arg VERSION=$VERSION -t ibiscp/langflow:$VERSION .
docker push ibiscp/langflow:$VERSION

6
package-lock.json generated Normal file
View file

@ -0,0 +1,6 @@
{
"name": "reactFlow",
"lockfileVersion": 3,
"requires": true,
"packages": {}
}

161
poetry.lock generated
View file

@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 1.4.0 and should not be changed by hand.
# This file is automatically @generated by Poetry and should not be changed by hand.
[[package]]
name = "aiohttp"
@ -3837,7 +3837,7 @@ files = [
]
[package.dependencies]
greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and platform_machine == \"aarch64\" or python_version >= \"3\" and platform_machine == \"ppc64le\" or python_version >= \"3\" and platform_machine == \"x86_64\" or python_version >= \"3\" and platform_machine == \"amd64\" or python_version >= \"3\" and platform_machine == \"AMD64\" or python_version >= \"3\" and platform_machine == \"win32\" or python_version >= \"3\" and platform_machine == \"WIN32\""}
greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"}
[package.extras]
aiomysql = ["aiomysql", "greenlet (!=0.4.17)"]
@ -4016,6 +4016,10 @@ category = "main"
optional = false
python-versions = ">=3.8.0"
files = [
{file = "torch-2.0.0-1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:c9090bda7d2eeeecd74f51b721420dbeb44f838d4536cc1b284e879417e3064a"},
{file = "torch-2.0.0-1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:bd42db2a48a20574d2c33489e120e9f32789c4dc13c514b0c44272972d14a2d7"},
{file = "torch-2.0.0-1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:8969aa8375bcbc0c2993e7ede0a7f889df9515f18b9b548433f412affed478d9"},
{file = "torch-2.0.0-1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:ab2da16567cb55b67ae39e32d520d68ec736191d88ac79526ca5874754c32203"},
{file = "torch-2.0.0-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:7a9319a67294ef02459a19738bbfa8727bb5307b822dadd708bc2ccf6c901aca"},
{file = "torch-2.0.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:9f01fe1f6263f31bd04e1757946fd63ad531ae37f28bb2dbf66f5c826ee089f4"},
{file = "torch-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:527f4ae68df7b8301ee6b1158ca56350282ea633686537b30dbb5d7b4a52622a"},
@ -4233,6 +4237,15 @@ category = "main"
optional = false
python-versions = "*"
files = [
{file = "triton-2.0.0-1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:38806ee9663f4b0f7cd64790e96c579374089e58f49aac4a6608121aa55e2505"},
{file = "triton-2.0.0-1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:226941c7b8595219ddef59a1fdb821e8c744289a132415ddd584facedeb475b1"},
{file = "triton-2.0.0-1-cp36-cp36m-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4c9fc8c89874bc48eb7e7b2107a9b8d2c0bf139778637be5bfccb09191685cfd"},
{file = "triton-2.0.0-1-cp37-cp37m-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d2684b6a60b9f174f447f36f933e9a45f31db96cb723723ecd2dcfd1c57b778b"},
{file = "triton-2.0.0-1-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9d4978298b74fcf59a75fe71e535c092b023088933b2f1df933ec32615e4beef"},
{file = "triton-2.0.0-1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:74f118c12b437fb2ca25e1a04759173b517582fcf4c7be11913316c764213656"},
{file = "triton-2.0.0-1-pp37-pypy37_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9618815a8da1d9157514f08f855d9e9ff92e329cd81c0305003eb9ec25cc5add"},
{file = "triton-2.0.0-1-pp38-pypy38_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1aca3303629cd3136375b82cb9921727f804e47ebee27b2677fef23005c3851a"},
{file = "triton-2.0.0-1-pp39-pypy39_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e3e13aa8b527c9b642e3a9defcc0fbd8ffbe1c80d8ac8c15a01692478dc64d8a"},
{file = "triton-2.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f05a7e64e4ca0565535e3d5d3405d7e49f9d308505bb7773d21fb26a4c008c2"},
{file = "triton-2.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb4b99ca3c6844066e516658541d876c28a5f6e3a852286bbc97ad57134827fd"},
{file = "triton-2.0.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47b4d70dc92fb40af553b4460492c31dc7d3a114a979ffb7a5cdedb7eb546c08"},
@ -4530,82 +4543,82 @@ files = [
[[package]]
name = "websockets"
version = "11.0.1"
version = "11.0.2"
description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "websockets-11.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3d30cc1a90bcbf9e22e1f667c1c5a7428e2d37362288b4ebfd5118eb0b11afa9"},
{file = "websockets-11.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dc77283a7c7b2b24e00fe8c3c4f7cf36bba4f65125777e906aae4d58d06d0460"},
{file = "websockets-11.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0929c2ebdf00cedda77bf77685693e38c269011236e7c62182fee5848c29a4fa"},
{file = "websockets-11.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db234da3aff01e8483cf0015b75486c04d50dbf90004bd3e5b46d384e1bd6c9e"},
{file = "websockets-11.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7fdfbed727ce6b4b5e6622d15a6efb2098b2d9e22ba4dc54b2e3ce80f982045"},
{file = "websockets-11.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5f3d0d177b3db3d1d02cce7ba6c0063586499ac28afe0c992be74ffc40d9257"},
{file = "websockets-11.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:25ea5dbd3b00c56b034639dc6fe4d1dd095b8205bab1782d9a47cb020695fdf4"},
{file = "websockets-11.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:dbeada3b8f1f6d9497840f761906c4236f912a42da4515520168bc7c525b52b0"},
{file = "websockets-11.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:892959b627eedcdf98ac7022f9f71f050a59624b380b67862da10c32ea3c221a"},
{file = "websockets-11.0.1-cp310-cp310-win32.whl", hash = "sha256:fc0a96a6828bfa6f1ccec62b54630bcdcc205d483f5a8806c0a8abb26101c54b"},
{file = "websockets-11.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:3a88375b648a2c479532943cc19a018df1e5fcea85d5f31963c0b22794d1bdc1"},
{file = "websockets-11.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3cf18bbd44b36749b7b66f047a30a40b799b8c0bd9a1b9173cba86a234b4306b"},
{file = "websockets-11.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:deb0dd98ea4e76b833f0bfd7a6042b51115360d5dfcc7c1daa72dfc417b3327a"},
{file = "websockets-11.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:45a85dc6b3ff76239379feb4355aadebc18d6e587c8deb866d11060755f4d3ea"},
{file = "websockets-11.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d68bd2a3e9fff6f7043c0a711cb1ebba9f202c196a3943d0c885650cd0b6464"},
{file = "websockets-11.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfd0b9b18d64c51e5cd322e16b5bf4fe490db65c9f7b18fd5382c824062ead7e"},
{file = "websockets-11.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef0e6253c36e42f2637cfa3ff9b3903df60d05ec040c718999f6a0644ce1c497"},
{file = "websockets-11.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:12180bc1d72c6a9247472c1dee9dfd7fc2e23786f25feee7204406972d8dab39"},
{file = "websockets-11.0.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a797da96d4127e517a5cb0965cd03fd6ec21e02667c1258fa0579501537fbe5c"},
{file = "websockets-11.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:07cc20655fb16aeef1a8f03236ba8671c61d332580b996b6396a5b7967ba4b3d"},
{file = "websockets-11.0.1-cp311-cp311-win32.whl", hash = "sha256:a01c674e0efe0f14aec7e722ed0e0e272fa2f10e8ea8260837e1f4f5dc4b3e53"},
{file = "websockets-11.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:2796f097841619acf053245f266a4f66cb27c040f0d9097e5f21301aab95ff43"},
{file = "websockets-11.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:54d084756c50dfc8086dce97b945f210ca43950154e1e04a44a30c6e6a2bcbb1"},
{file = "websockets-11.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fe2aed5963ca267c40a2d29b1ee4e8ab008ac8d5daa284fdda9275201b8a334"},
{file = "websockets-11.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84e92dbac318a84fef722f38ca57acef19cbb89527aba5d420b96aa2656970ee"},
{file = "websockets-11.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec4e87eb9916b481216b1fede7d8913be799915f5216a0c801867cbed8eeb903"},
{file = "websockets-11.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d4e0990b6a04b07095c969969da659eecf9069cf8e7b8f49c8f5ee1bb50e3352"},
{file = "websockets-11.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c90343fd0774749d23c1891dd8b3e9210f9afd30986673ce0f9d5857f5cb1562"},
{file = "websockets-11.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ac042e8ba9d7f2618e84af27927fdce0f3e03528eb74f343977486c093868389"},
{file = "websockets-11.0.1-cp37-cp37m-win32.whl", hash = "sha256:385c5391becb9b58e0a4f33345e12762fd857ccf9fbf6fee428669929ba45e4c"},
{file = "websockets-11.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:aef1602db81096ce3d3847865128c8879635bdad7963fb2b7df290edb9e9150a"},
{file = "websockets-11.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:52ba83ea132390e426f9a7b48848248a2dc0e7120ca8c65d5a8fc1efaa4eb51b"},
{file = "websockets-11.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:007ed0d62f7e06eeb6e3a848b0d83b9fbd9e14674a59a61326845f27d20d7452"},
{file = "websockets-11.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f888b9565ca1d1c25ab827d184f57f4772ffbfa6baf5710b873b01936cc335ee"},
{file = "websockets-11.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db78535b791840a584c48cf3f4215eae38a7e2f43271ecd27ce4ba8a798beaaa"},
{file = "websockets-11.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fa1c23ed3a02732fba906ec337df65d4cc23f9f453635e1a803c285b59c7d987"},
{file = "websockets-11.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e039f106d48d3c241f1943bccfb383bd38ec39900d6dcaad0c73cc5fe129f346"},
{file = "websockets-11.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b0ed24a3aa4213029e100257e5e73c5f912e70ca35630081de94b7f9e2cf4a9b"},
{file = "websockets-11.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d5a3022f9291bf2d35ebf65929297d625e68effd3a5647b8eb8b89d51b09394c"},
{file = "websockets-11.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:1cb23597819f68ac6a6d133a002a1b3ef12a22850236b083242c93f81f206d5a"},
{file = "websockets-11.0.1-cp38-cp38-win32.whl", hash = "sha256:349dd1fa56a30d530555988be98013688de67809f384671883f8bf8b8c9de984"},
{file = "websockets-11.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:87ae582cf2319e45bc457a57232daded27a3c771263cab42fb8864214bbd74ea"},
{file = "websockets-11.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a88815a0c6253ad1312ef186620832fb347706c177730efec34e3efe75e0e248"},
{file = "websockets-11.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d5a6fa353b5ef36970c3bd1cd7cecbc08bb8f2f1a3d008b0691208cf34ebf5b0"},
{file = "websockets-11.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5ffe6fc5e5fe9f2634cdc59b805e4ba1fcccf3a5622f5f36c3c7c287f606e283"},
{file = "websockets-11.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b138f4bf8a64c344e12c76283dac279d11adab89ac62ae4a32ac8490d3c94832"},
{file = "websockets-11.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aedd94422745da60672a901f53de1f50b16e85408b18672b9b210db4a776b5a6"},
{file = "websockets-11.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4667d4e41fa37fa3d836b2603b8b40d6887fa4838496d48791036394f7ace39"},
{file = "websockets-11.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:43e0de552be624e5c0323ff4fcc9f0b4a9a6dc6e0116b8aa2cbb6e0d3d2baf09"},
{file = "websockets-11.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ceeef57b9aec8f27e523de4da73c518ece7721aefe7064f18aa28baabfe61b94"},
{file = "websockets-11.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9d91279d57f6546eaf43671d1de50621e0578f13c2f17c96c458a72d170698d7"},
{file = "websockets-11.0.1-cp39-cp39-win32.whl", hash = "sha256:29282631da3bfeb5db497e4d3d94d56ee36222fbebd0b51014e68a2e70736fb1"},
{file = "websockets-11.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:e2654e94c705ce9b768441d8e3a387a84951ca1056efdc4a26a4a6ee723c01b6"},
{file = "websockets-11.0.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:60a19d4ff5f451254f8623f6aa4169065f73a50ec7b59ab6b9dcddff4aa00267"},
{file = "websockets-11.0.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a58e83f82098d062ae5d4cbe7073b8783999c284d6f079f2fefe87cd8957ac8"},
{file = "websockets-11.0.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b91657b65355954e47f0df874917fa200426b3a7f4e68073326a8cfc2f6deef8"},
{file = "websockets-11.0.1-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53b8e1ee01eb5b8be5c8a69ae26b0820dbc198d092ad50b3451adc3cdd55d455"},
{file = "websockets-11.0.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:5d8d5d17371ed9eb9f0e3a8d326bdf8172700164c2e705bc7f1905a719a189be"},
{file = "websockets-11.0.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e53419201c6c1439148feb99de6b307651a88b8defd41348cc23bbe2a290de1d"},
{file = "websockets-11.0.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:718d19c494637f28e651031b3df6a791b9e86e0097c65ed5e8ec49b400b1210e"},
{file = "websockets-11.0.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f7b2544eb3e7bc39ce59812371214cd97762080dab90c3afc857890039384753"},
{file = "websockets-11.0.1-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec4a887d2236e3878c07033ad5566f6b4d5d954b85f92a219519a1745d0c93e9"},
{file = "websockets-11.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:ae59a9f0a77ecb0cbdedea7d206a547ff136e8bfbc7d2d98772fb02d398797bb"},
{file = "websockets-11.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ef35cef161f76031f833146f895e7e302196e01c704c00d269c04d8e18f3ac37"},
{file = "websockets-11.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79b6548e57ab18f071b9bfe3ffe02af7184dd899bc674e2817d8fe7e9e7489ec"},
{file = "websockets-11.0.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a8d9793f3fb0da16232503df14411dabafed5a81fc9077dc430cfc6f60e71179"},
{file = "websockets-11.0.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42aa05e890fcf1faed8e535c088a1f0f27675827cbacf62d3024eb1e6d4c9e0c"},
{file = "websockets-11.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:5d4f4b341100d313b08149d7031eb6d12738ac758b0c90d2f9be8675f401b019"},
{file = "websockets-11.0.1-py3-none-any.whl", hash = "sha256:85b4127f7da332feb932eee833c70e5e1670469e8c9de7ef3874aa2a91a6fbb2"},
{file = "websockets-11.0.1.tar.gz", hash = "sha256:369410925b240b30ef1c1deadbd6331e9cd865ad0b8966bf31e276cc8e0da159"},
{file = "websockets-11.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:580cc95c58118f8c39106be71e24d0b7e1ad11a155f40a2ee687f99b3e5e432e"},
{file = "websockets-11.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:143782041e95b63083b02107f31cda999f392903ae331de1307441f3a4557d51"},
{file = "websockets-11.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8df63dcd955eb6b2e371d95aacf8b7c535e482192cff1b6ce927d8f43fb4f552"},
{file = "websockets-11.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9b2dced5cbbc5094678cc1ec62160f7b0fe4defd601cd28a36fde7ee71bbb5"},
{file = "websockets-11.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0eeeea3b01c97fd3b5049a46c908823f68b59bf0e18d79b231d8d6764bc81ee"},
{file = "websockets-11.0.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:502683c5dedfc94b9f0f6790efb26aa0591526e8403ad443dce922cd6c0ec83b"},
{file = "websockets-11.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d3cc3e48b6c9f7df8c3798004b9c4b92abca09eeea5e1b0a39698f05b7a33b9d"},
{file = "websockets-11.0.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:808b8a33c961bbd6d33c55908f7c137569b09ea7dd024bce969969aa04ecf07c"},
{file = "websockets-11.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:34a6f8996964ccaa40da42ee36aa1572adcb1e213665e24aa2f1037da6080909"},
{file = "websockets-11.0.2-cp310-cp310-win32.whl", hash = "sha256:8f24cd758cbe1607a91b720537685b64e4d39415649cac9177cd1257317cf30c"},
{file = "websockets-11.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:3b87cd302f08ea9e74fdc080470eddbed1e165113c1823fb3ee6328bc40ca1d3"},
{file = "websockets-11.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3565a8f8c7bdde7c29ebe46146bd191290413ee6f8e94cf350609720c075b0a1"},
{file = "websockets-11.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f97e03d4d5a4f0dca739ea274be9092822f7430b77d25aa02da6775e490f6846"},
{file = "websockets-11.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8f392587eb2767afa8a34e909f2fec779f90b630622adc95d8b5e26ea8823cb8"},
{file = "websockets-11.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7742cd4524622cc7aa71734b51294644492a961243c4fe67874971c4d3045982"},
{file = "websockets-11.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:46dda4bc2030c335abe192b94e98686615f9274f6b56f32f2dd661fb303d9d12"},
{file = "websockets-11.0.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6b2bfa1d884c254b841b0ff79373b6b80779088df6704f034858e4d705a4802"},
{file = "websockets-11.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1df2413266bf48430ef2a752c49b93086c6bf192d708e4a9920544c74cd2baa6"},
{file = "websockets-11.0.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cf45d273202b0c1cec0f03a7972c655b93611f2e996669667414557230a87b88"},
{file = "websockets-11.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a09cce3dacb6ad638fdfa3154d9e54a98efe7c8f68f000e55ca9c716496ca67"},
{file = "websockets-11.0.2-cp311-cp311-win32.whl", hash = "sha256:2174a75d579d811279855df5824676d851a69f52852edb0e7551e0eeac6f59a4"},
{file = "websockets-11.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:c78ca3037a954a4209b9f900e0eabbc471fb4ebe96914016281df2c974a93e3e"},
{file = "websockets-11.0.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3a2100b02d1aaf66dc48ff1b2a72f34f6ebc575a02bc0350cc8e9fbb35940166"},
{file = "websockets-11.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dca9708eea9f9ed300394d4775beb2667288e998eb6f542cdb6c02027430c599"},
{file = "websockets-11.0.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:320ddceefd2364d4afe6576195201a3632a6f2e6d207b0c01333e965b22dbc84"},
{file = "websockets-11.0.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2a573c8d71b7af937852b61e7ccb37151d719974146b5dc734aad350ef55a02"},
{file = "websockets-11.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:13bd5bebcd16a4b5e403061b8b9dcc5c77e7a71e3c57e072d8dff23e33f70fba"},
{file = "websockets-11.0.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:95c09427c1c57206fe04277bf871b396476d5a8857fa1b99703283ee497c7a5d"},
{file = "websockets-11.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2eb042734e710d39e9bc58deab23a65bd2750e161436101488f8af92f183c239"},
{file = "websockets-11.0.2-cp37-cp37m-win32.whl", hash = "sha256:5875f623a10b9ba154cb61967f940ab469039f0b5e61c80dd153a65f024d9fb7"},
{file = "websockets-11.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:634239bc844131863762865b75211a913c536817c0da27f691400d49d256df1d"},
{file = "websockets-11.0.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3178d965ec204773ab67985a09f5696ca6c3869afeed0bb51703ea404a24e975"},
{file = "websockets-11.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:955fcdb304833df2e172ce2492b7b47b4aab5dcc035a10e093d911a1916f2c87"},
{file = "websockets-11.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cb46d2c7631b2e6f10f7c8bac7854f7c5e5288f024f1c137d4633c79ead1e3c0"},
{file = "websockets-11.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25aae96c1060e85836552a113495db6d857400288161299d77b7b20f2ac569f2"},
{file = "websockets-11.0.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2abeeae63154b7f63d9f764685b2d299e9141171b8b896688bd8baec6b3e2303"},
{file = "websockets-11.0.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:daa1e8ea47507555ed7a34f8b49398d33dff5b8548eae3de1dc0ef0607273a33"},
{file = "websockets-11.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:954eb789c960fa5daaed3cfe336abc066941a5d456ff6be8f0e03dd89886bb4c"},
{file = "websockets-11.0.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3ffe251a31f37e65b9b9aca5d2d67fd091c234e530f13d9dce4a67959d5a3fba"},
{file = "websockets-11.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:adf6385f677ed2e0b021845b36f55c43f171dab3a9ee0ace94da67302f1bc364"},
{file = "websockets-11.0.2-cp38-cp38-win32.whl", hash = "sha256:aa7b33c1fb2f7b7b9820f93a5d61ffd47f5a91711bc5fa4583bbe0c0601ec0b2"},
{file = "websockets-11.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:220d5b93764dd70d7617f1663da64256df7e7ea31fc66bc52c0e3750ee134ae3"},
{file = "websockets-11.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0fb4480556825e4e6bf2eebdbeb130d9474c62705100c90e59f2f56459ddab42"},
{file = "websockets-11.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ec00401846569aaf018700249996143f567d50050c5b7b650148989f956547af"},
{file = "websockets-11.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87c69f50281126dcdaccd64d951fb57fbce272578d24efc59bce72cf264725d0"},
{file = "websockets-11.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:232b6ba974f5d09b1b747ac232f3a3d8f86de401d7b565e837cc86988edf37ac"},
{file = "websockets-11.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:392d409178db1e46d1055e51cc850136d302434e12d412a555e5291ab810f622"},
{file = "websockets-11.0.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4fe2442091ff71dee0769a10449420fd5d3b606c590f78dd2b97d94b7455640"},
{file = "websockets-11.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ede13a6998ba2568b21825809d96e69a38dc43184bdeebbde3699c8baa21d015"},
{file = "websockets-11.0.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4c54086b2d2aec3c3cb887ad97e9c02c6be9f1d48381c7419a4aa932d31661e4"},
{file = "websockets-11.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e37a76ccd483a6457580077d43bc3dfe1fd784ecb2151fcb9d1c73f424deaeba"},
{file = "websockets-11.0.2-cp39-cp39-win32.whl", hash = "sha256:d1881518b488a920434a271a6e8a5c9481a67c4f6352ebbdd249b789c0467ddc"},
{file = "websockets-11.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:25e265686ea385f22a00cc2b719b880797cd1bb53b46dbde969e554fb458bfde"},
{file = "websockets-11.0.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ce69f5c742eefd039dce8622e99d811ef2135b69d10f9aa79fbf2fdcc1e56cd7"},
{file = "websockets-11.0.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b985ba2b9e972cf99ddffc07df1a314b893095f62c75bc7c5354a9c4647c6503"},
{file = "websockets-11.0.2-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b52def56d2a26e0e9c464f90cadb7e628e04f67b0ff3a76a4d9a18dfc35e3dd"},
{file = "websockets-11.0.2-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d70a438ef2a22a581d65ad7648e949d4ccd20e3c8ed7a90bbc46df4e60320891"},
{file = "websockets-11.0.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:752fbf420c71416fb1472fec1b4cb8631c1aa2be7149e0a5ba7e5771d75d2bb9"},
{file = "websockets-11.0.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:dd906b0cdc417ea7a5f13bb3c6ca3b5fd563338dc596996cb0fdd7872d691c0a"},
{file = "websockets-11.0.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e79065ff6549dd3c765e7916067e12a9c91df2affea0ac51bcd302aaf7ad207"},
{file = "websockets-11.0.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:46388a050d9e40316e58a3f0838c63caacb72f94129eb621a659a6e49bad27ce"},
{file = "websockets-11.0.2-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c7de298371d913824f71b30f7685bb07ad13969c79679cca5b1f7f94fec012f"},
{file = "websockets-11.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:6d872c972c87c393e6a49c1afbdc596432df8c06d0ff7cd05aa18e885e7cfb7c"},
{file = "websockets-11.0.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b444366b605d2885f0034dd889faf91b4b47668dd125591e2c64bfde611ac7e1"},
{file = "websockets-11.0.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b967a4849db6b567dec3f7dd5d97b15ce653e3497b8ce0814e470d5e074750"},
{file = "websockets-11.0.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2acdc82099999e44fa7bd8c886f03c70a22b1d53ae74252f389be30d64fd6004"},
{file = "websockets-11.0.2-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:518ed6782d9916c5721ebd61bb7651d244178b74399028302c8617d0620af291"},
{file = "websockets-11.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:58477b041099bb504e1a5ddd8aa86302ed1d5c6995bdd3db2b3084ef0135d277"},
{file = "websockets-11.0.2-py3-none-any.whl", hash = "sha256:5004c087d17251938a52cce21b3dbdabeecbbe432ce3f5bbbf15d8692c36eac9"},
{file = "websockets-11.0.2.tar.gz", hash = "sha256:b1a69701eb98ed83dd099de4a686dc892c413d974fa31602bc00aca7cb988ac9"},
]
[[package]]
@ -4801,4 +4814,4 @@ cffi = ["cffi (>=1.11)"]
[metadata]
lock-version = "2.0"
python-versions = "^3.9"
content-hash = "f0a23552d8cbd3d38722a69698cf823cdd19a90b707538837da64a933f8d13b4"
content-hash = "1320f2d5466c6569ee0563cc81b28d8c8897c8f07b4e0912c231c6e4033a2bf8"

View file

@ -47,6 +47,7 @@ fake-useragent = "^1.1.3"
docstring-parser = "^0.15"
psycopg2-binary = "^2.9.6"
pyarrow = "^11.0.0"
websockets = "^11.0.2"
[tool.poetry.group.dev.dependencies]
black = "^23.1.0"

View file

@ -1,52 +0,0 @@
# `python-base` sets up all our shared environment variables
FROM python:3.10-slim
# python
ENV PYTHONUNBUFFERED=1 \
# prevents python creating .pyc files
PYTHONDONTWRITEBYTECODE=1 \
\
# pip
PIP_NO_CACHE_DIR=off \
PIP_DISABLE_PIP_VERSION_CHECK=on \
PIP_DEFAULT_TIMEOUT=100 \
\
# poetry
# https://python-poetry.org/docs/configuration/#using-environment-variables
POETRY_VERSION=1.4.0 \
# make poetry install to this location
POETRY_HOME="/opt/poetry" \
# make poetry create the virtual environment in the project's root
# it gets named `.venv`
POETRY_VIRTUALENVS_IN_PROJECT=true \
# do not ask any interactive question
POETRY_NO_INTERACTION=1 \
\
# paths
# this is where our requirements + virtual environment will live
PYSETUP_PATH="/opt/pysetup" \
VENV_PATH="/opt/pysetup/.venv"
# prepend poetry and venv to path
ENV PATH="$POETRY_HOME/bin:$VENV_PATH/bin:$PATH"
RUN apt-get update \
&& apt-get install --no-install-recommends -y \
# deps for installing poetry
curl \
# deps for building python deps
build-essential libpq-dev
# install poetry - respects $POETRY_VERSION & $POETRY_HOME
RUN curl -sSL https://install.python-poetry.org | python3 -
# copy project requirement files here to ensure they will be cached.
WORKDIR /app
COPY poetry.lock pyproject.toml ./
COPY langflow/ ./langflow
# poetry install
RUN poetry install --without dev
# build wheel
RUN poetry build -f wheel

View file

@ -1,6 +0,0 @@
#! /bin/bash
docker build -t logspace/backend_build -f build.Dockerfile .
VERSION=$(toml get --toml-path pyproject.toml tool.poetry.version)
docker build --build-arg VERSION=$VERSION -t ibiscp/langflow:$VERSION .
docker push ibiscp/langflow:$VERSION

View file

@ -1 +1,4 @@
from langflow.interface.loading import load_flow_from_json # noqa
from langflow.interface.loading import load_flow_from_json
from langflow.cache import cache_manager
__all__ = ["load_flow_from_json", "cache_manager"]

View file

@ -3,6 +3,10 @@ from pydantic import BaseModel, validator
from langflow.graph.utils import extract_input_variables_from_prompt
class CacheResponse(BaseModel):
data: dict
class Code(BaseModel):
code: str

View file

@ -0,0 +1,16 @@
from typing import Any
from langchain.callbacks.base import AsyncCallbackHandler
from langflow.api.schemas import ChatResponse
# https://github.com/hwchase17/chat-langchain/blob/master/callback.py
class StreamingLLMCallbackHandler(AsyncCallbackHandler):
"""Callback handler for streaming LLM responses."""
def __init__(self, websocket):
self.websocket = websocket
async def on_llm_new_token(self, token: str, **kwargs: Any) -> None:
resp = ChatResponse(message=token, type="stream", intermediate_steps="")
await self.websocket.send_json(resp.dict())

View file

@ -0,0 +1,18 @@
from fastapi import APIRouter, WebSocket
from langflow.api.chat_manager import ChatManager
from langflow.utils.logger import logger
router = APIRouter()
chat_manager = ChatManager()
@router.websocket("/chat/{client_id}")
async def websocket_endpoint(client_id: str, websocket: WebSocket):
"""Websocket endpoint for chat."""
try:
await chat_manager.handle_websocket(client_id, websocket)
except Exception as e:
# Log stack trace
logger.exception(e)
raise e

View file

@ -0,0 +1,207 @@
import asyncio
from typing import Dict, List
from collections import defaultdict
from fastapi import WebSocket
import json
from langflow.api.schemas import ChatMessage, ChatResponse, FileResponse
from langflow.cache.manager import Subject
from langflow.interface.run import (
get_result_and_steps,
load_or_build_langchain_object,
)
from langflow.interface.utils import pil_to_base64, try_setting_streaming_options
from langflow.utils.logger import logger
from langflow.cache import cache_manager
class ChatHistory(Subject):
def __init__(self):
super().__init__()
self.history: Dict[str, List[ChatMessage]] = defaultdict(list)
def add_message(self, client_id: str, message: ChatMessage):
"""Add a message to the chat history."""
self.history[client_id].append(message)
if not isinstance(message, FileResponse):
self.notify()
def get_history(self, client_id: str, filter=True) -> List[ChatMessage]:
"""Get the chat history for a client."""
if history := self.history.get(client_id, []):
if filter:
return [msg for msg in history if msg.type not in ["start", "stream"]]
return history
else:
return []
class ChatManager:
def __init__(self):
self.active_connections: Dict[str, WebSocket] = {}
self.chat_history = ChatHistory()
self.chat_history.attach(self.on_chat_history_update)
self.cache_manager = cache_manager
self.cache_manager.attach(self.update)
def on_chat_history_update(self):
"""Send the last chat message to the client."""
client_id = self.cache_manager.current_client_id
if client_id in self.active_connections:
chat_response = self.chat_history.get_history(client_id, filter=False)[-1]
if chat_response.is_bot:
# Process FileResponse
if isinstance(chat_response, FileResponse):
# If data_type is pandas, convert to csv
if chat_response.data_type == "pandas":
chat_response.data = chat_response.data.to_csv()
elif chat_response.data_type == "image":
# Base64 encode the image
chat_response.data = pil_to_base64(chat_response.data)
# get event loop
loop = asyncio.get_event_loop()
coroutine = self.send_json(client_id, chat_response)
asyncio.run_coroutine_threadsafe(coroutine, loop)
def update(self):
if self.cache_manager.current_client_id in self.active_connections:
self.last_cached_object_dict = self.cache_manager.get_last()
# Add a new ChatResponse with the data
chat_response = FileResponse(
message=None,
type="file",
data=self.last_cached_object_dict["obj"],
data_type=self.last_cached_object_dict["type"],
)
self.chat_history.add_message(
self.cache_manager.current_client_id, chat_response
)
async def connect(self, client_id: str, websocket: WebSocket):
await websocket.accept()
self.active_connections[client_id] = websocket
def disconnect(self, client_id: str):
del self.active_connections[client_id]
async def send_message(self, client_id: str, message: str):
websocket = self.active_connections[client_id]
await websocket.send_text(message)
async def send_json(self, client_id: str, message: ChatMessage):
websocket = self.active_connections[client_id]
await websocket.send_json(message.dict())
async def process_message(self, client_id: str, payload: Dict):
# Process the graph data and chat message
chat_message = payload.pop("message", "")
chat_message = ChatMessage(message=chat_message)
self.chat_history.add_message(client_id, chat_message)
graph_data = payload
start_resp = ChatResponse(message=None, type="start", intermediate_steps="")
self.chat_history.add_message(client_id, start_resp)
is_first_message = len(self.chat_history.get_history(client_id=client_id)) == 0
# Generate result and thought
try:
logger.debug("Generating result and thought")
result, intermediate_steps = await process_graph(
graph_data=graph_data,
is_first_message=is_first_message,
chat_message=chat_message,
websocket=self.active_connections[client_id],
)
except Exception as e:
# Log stack trace
logger.exception(e)
raise e
# Send a response back to the frontend, if needed
intermediate_steps = intermediate_steps or ""
history = self.chat_history.get_history(client_id, filter=False)
file_responses = []
if history:
# Iterate backwards through the history
for msg in reversed(history):
if isinstance(msg, FileResponse):
if msg.data_type == "image":
# Base64 encode the image
msg.data = pil_to_base64(msg.data)
file_responses.append(msg)
if msg.type == "start":
break
response = ChatResponse(
message=result or "",
intermediate_steps=intermediate_steps.strip(),
type="end",
files=file_responses,
)
self.chat_history.add_message(client_id, response)
async def handle_websocket(self, client_id: str, websocket: WebSocket):
await self.connect(client_id, websocket)
try:
chat_history = self.chat_history.get_history(client_id)
# iterate and make BaseModel into dict
chat_history = [chat.dict() for chat in chat_history]
await websocket.send_json(chat_history)
while True:
json_payload = await websocket.receive_json()
try:
payload = json.loads(json_payload)
except TypeError:
payload = json_payload
if "clear_history" in payload:
self.chat_history.history[client_id] = []
continue
with self.cache_manager.set_client_id(client_id):
await self.process_message(client_id, payload)
except Exception as e:
# Handle any exceptions that might occur
logger.exception(e)
# send a message to the client
await self.send_message(client_id, str(e))
raise e
finally:
await self.active_connections[client_id].close(
code=1000, reason="Client disconnected"
)
self.disconnect(client_id)
async def process_graph(
graph_data: Dict,
is_first_message: bool,
chat_message: ChatMessage,
websocket: WebSocket,
):
langchain_object = load_or_build_langchain_object(graph_data, is_first_message)
langchain_object = try_setting_streaming_options(langchain_object, websocket)
logger.debug("Loaded langchain object")
if langchain_object is None:
# Raise user facing error
raise ValueError(
"There was an error loading the langchain_object. Please, check all the nodes and try again."
)
# Generate result and thought
try:
logger.debug("Generating result and thought")
result, intermediate_steps = get_result_and_steps(
langchain_object, chat_message.message or ""
)
logger.debug("Generated result and intermediate_steps")
return result, intermediate_steps
except Exception as e:
# Log stack trace
logger.exception(e)
raise e

View file

@ -0,0 +1,40 @@
from typing import Any, Union
from pydantic import BaseModel, validator
class ChatMessage(BaseModel):
"""Chat message schema."""
is_bot: bool = False
message: Union[str, None] = None
type: str = "human"
class ChatResponse(ChatMessage):
"""Chat response schema."""
intermediate_steps: str
type: str
is_bot: bool = True
files: list = []
@validator("type")
def validate_message_type(cls, v):
if v not in ["start", "stream", "end", "error", "info", "file"]:
raise ValueError("type must be start, stream, end, error, info, or file")
return v
class FileResponse(ChatMessage):
"""File response schema."""
data: Any
data_type: str
type: str = "file"
is_bot: bool = True
@validator("data_type")
def validate_data_type(cls, v):
if v not in ["image", "csv"]:
raise ValueError("data_type must be image or csv")
return v

View file

@ -0,0 +1 @@
from langflow.cache.manager import cache_manager # noqa

View file

@ -2,13 +2,18 @@ import base64
import contextlib
import functools
import hashlib
import json
import os
import tempfile
from collections import OrderedDict
from pathlib import Path
from typing import Any
from PIL import Image
import dill
import pandas as pd # type: ignore
import dill # type: ignore
CACHE = {}
def create_cache_folder(func):

127
src/backend/langflow/cache/manager.py vendored Normal file
View file

@ -0,0 +1,127 @@
from contextlib import contextmanager
from typing import Any, Awaitable, Callable, List
from PIL import Image
import pandas as pd
class Subject:
"""Base class for implementing the observer pattern."""
def __init__(self):
self.observers: List[Callable[[], None]] = []
def attach(self, observer: Callable[[], None]):
"""Attach an observer to the subject."""
self.observers.append(observer)
def detach(self, observer: Callable[[], None]):
"""Detach an observer from the subject."""
self.observers.remove(observer)
def notify(self):
"""Notify all observers about an event."""
for observer in self.observers:
if observer is None:
continue
observer()
class AsyncSubject:
"""Base class for implementing the async observer pattern."""
def __init__(self):
self.observers: List[Callable[[], Awaitable]] = []
def attach(self, observer: Callable[[], Awaitable]):
"""Attach an observer to the subject."""
self.observers.append(observer)
def detach(self, observer: Callable[[], Awaitable]):
"""Detach an observer from the subject."""
self.observers.remove(observer)
async def notify(self):
"""Notify all observers about an event."""
for observer in self.observers:
if observer is None:
continue
await observer()
class CacheManager(Subject):
"""Manages cache for different clients and notifies observers on changes."""
def __init__(self):
super().__init__()
self.CACHE = {}
self.current_client_id = None
self.current_cache = {}
@contextmanager
def set_client_id(self, client_id: str):
"""
Context manager to set the current client_id and associated cache.
Args:
client_id (str): The client identifier.
"""
previous_client_id = self.current_client_id
self.current_client_id = client_id
self.current_cache = self.CACHE.setdefault(client_id, {})
try:
yield
finally:
self.current_client_id = previous_client_id
self.current_cache = self.CACHE.get(self.current_client_id, {})
def add_pandas(self, name: str, obj: Any):
"""
Add a pandas DataFrame or Series to the current client's cache.
Args:
name (str): The cache key.
obj (Any): The pandas DataFrame or Series object.
"""
if isinstance(obj, (pd.DataFrame, pd.Series)):
self.current_cache[name] = {"obj": obj, "type": "pandas"}
self.notify()
else:
raise ValueError("Object is not a pandas DataFrame or Series")
def add_image(self, name: str, obj: Any):
"""
Add a PIL Image to the current client's cache.
Args:
name (str): The cache key.
obj (Any): The PIL Image object.
"""
if isinstance(obj, Image.Image):
self.current_cache[name] = {"obj": obj, "type": "image"}
self.notify()
else:
raise ValueError("Object is not a PIL Image")
def get(self, name: str):
"""
Get an object from the current client's cache.
Args:
name (str): The cache key.
Returns:
The cached object associated with the given cache key.
"""
return self.current_cache[name]
def get_last(self):
"""
Get the last added item in the current client's cache.
Returns:
The last added item in the cache.
"""
return list(self.current_cache.values())[-1]
cache_manager = CacheManager()

View file

@ -4,16 +4,18 @@
# - Build each inner agent first, then build the outer agent
import contextlib
import inspect
import types
import warnings
from copy import deepcopy
from typing import Any, Dict, List, Optional
from langflow.cache import utils as cache_utils
from langflow.cache import base as cache_utils
from langflow.graph.constants import DIRECT_TYPES
from langflow.interface import loading
from langflow.interface.listing import ALL_TYPES_DICT
from langflow.utils.logger import logger
from langflow.utils.util import sync_to_async
class Node:
@ -158,13 +160,20 @@ class Node:
continue
result = value.build()
# If the key is "func", then we need to use the run method
if key == "func" and not isinstance(result, types.FunctionType):
# func can be PythonFunction(code='\ndef upper_case(text: str) -> str:\n return text.upper()\n')
# so we need to check if there is an attribute called run
if hasattr(result, "run"):
result = result.run # type: ignore
elif hasattr(result, "get_function"):
result = result.get_function() # type: ignore
if key == "func":
if not isinstance(result, types.FunctionType):
# func can be PythonFunction(code='\ndef upper_case(text: str) -> str:\n return text.upper()\n')
# so we need to check if there is an attribute called run
if hasattr(result, "run"):
result = result.run # type: ignore
elif hasattr(result, "get_function"):
result = result.get_function() # type: ignore
elif inspect.iscoroutinefunction(result):
self.params["coroutine"] = result
else:
# turn result which is a function into a coroutine
# so that it can be awaited
self.params["coroutine"] = sync_to_async(result)
self.params[key] = result
elif isinstance(value, list) and all(

View file

@ -20,6 +20,7 @@ from langchain.llms.loading import load_llm_from_config
from langflow.interface.agents.custom import CUSTOM_AGENTS
from langflow.interface.importing.utils import import_by_type
from langflow.interface.run import fix_memory_inputs
from langflow.interface.toolkits.base import toolkits_creator
from langflow.interface.types import get_type_list
from langflow.interface.utils import load_file_into_dict
@ -106,7 +107,19 @@ def load_flow_from_json(path: str, build=True):
# Nodes, edges and root node
edges = data_graph["edges"]
graph = Graph(nodes, edges)
return graph.build() if build else graph
if build:
langchain_object = graph.build()
if hasattr(langchain_object, "verbose"):
langchain_object.verbose = True
if hasattr(langchain_object, "return_intermediate_steps"):
# https://github.com/hwchase17/langchain/issues/2068
# Deactivating until we have a frontend solution
# to display intermediate steps
langchain_object.return_intermediate_steps = False
fix_memory_inputs(langchain_object)
return langchain_object
return graph
def replace_zero_shot_prompt_with_prompt_template(nodes):

View file

@ -3,7 +3,7 @@ import io
from typing import Any, Dict
from chromadb.errors import NotEnoughElementsException
from langflow.cache.utils import compute_dict_hash, load_cache, memoize_dict
from langflow.cache.base import compute_dict_hash, load_cache, memoize_dict
from langflow.graph.graph import Graph
from langflow.interface import loading
from langflow.utils.logger import logger
@ -32,7 +32,7 @@ def load_or_build_langchain_object(data_graph, is_first_message=False):
return build_langchain_object_with_caching(data_graph)
@memoize_dict(maxsize=1)
@memoize_dict(maxsize=10)
def build_langchain_object_with_caching(data_graph):
"""
Build langchain object from data_graph.
@ -87,7 +87,7 @@ def process_graph(data_graph: Dict[str, Any]):
# Generate result and thought
logger.debug("Generating result and thought")
result, thought = get_result_and_thought_using_graph(langchain_object, message)
result, thought = get_result_and_steps(langchain_object, message)
logger.debug("Generated result and thought")
# Save langchain_object to cache
@ -118,7 +118,7 @@ def process_graph_cached(data_graph: Dict[str, Any]):
# Generate result and thought
logger.debug("Generating result and thought")
result, thought = get_result_and_thought_using_graph(langchain_object, message)
result, thought = get_result_and_steps(langchain_object, message)
logger.debug("Generated result and thought")
return {"result": str(result), "thought": thought.strip()}
@ -184,7 +184,7 @@ def fix_memory_inputs(langchain_object):
update_memory_keys(langchain_object, possible_new_mem_key)
def get_result_and_thought_using_graph(langchain_object, message: str):
def get_result_and_steps(langchain_object, message: str):
"""Get result and thought from extracted json"""
try:
if hasattr(langchain_object, "verbose"):
@ -240,6 +240,61 @@ def get_result_and_thought_using_graph(langchain_object, message: str):
return result, thought
def async_get_result_and_steps(langchain_object, message: str):
"""Get result and thought from extracted json"""
try:
if hasattr(langchain_object, "verbose"):
langchain_object.verbose = True
chat_input = None
memory_key = ""
if hasattr(langchain_object, "memory") and langchain_object.memory is not None:
memory_key = langchain_object.memory.memory_key
if hasattr(langchain_object, "input_keys"):
for key in langchain_object.input_keys:
if key not in [memory_key, "chat_history"]:
chat_input = {key: message}
else:
chat_input = message # type: ignore
if hasattr(langchain_object, "return_intermediate_steps"):
# https://github.com/hwchase17/langchain/issues/2068
# Deactivating until we have a frontend solution
# to display intermediate steps
langchain_object.return_intermediate_steps = False
fix_memory_inputs(langchain_object)
with io.StringIO() as output_buffer, contextlib.redirect_stdout(output_buffer):
try:
# if hasattr(langchain_object, "acall"):
# output = await langchain_object.acall(chat_input)
# else:
output = langchain_object(chat_input)
except ValueError as exc:
# make the error message more informative
logger.debug(f"Error: {str(exc)}")
output = langchain_object.run(chat_input)
intermediate_steps = (
output.get("intermediate_steps", []) if isinstance(output, dict) else []
)
result = (
output.get(langchain_object.output_keys[0])
if isinstance(output, dict)
else output
)
if intermediate_steps:
thought = format_intermediate_steps(intermediate_steps)
else:
thought = output_buffer.getvalue()
except Exception as exc:
raise ValueError(f"Error: {str(exc)}") from exc
return result, thought
def get_result_and_thought(extracted_json: Dict[str, Any], message: str):
"""Get result and thought from extracted json"""
try:

View file

@ -1,5 +1,12 @@
import base64
from io import BytesIO
import json
import os
from PIL.Image import Image
from langchain.callbacks.base import AsyncCallbackManager
from langchain.chat_models import AzureChatOpenAI, ChatOpenAI
from langchain.llms import AzureOpenAI, OpenAI
from langflow.api.callback import StreamingLLMCallbackHandler
import yaml
@ -20,3 +27,30 @@ def load_file_into_dict(file_path: str) -> dict:
raise ValueError("Unsupported file type. Please provide a JSON or YAML file.")
return data
def pil_to_base64(image: Image) -> str:
buffered = BytesIO()
image.save(buffered, format="PNG")
img_str = base64.b64encode(buffered.getvalue())
return img_str.decode("utf-8")
def try_setting_streaming_options(langchain_object, websocket):
# If the LLM type is OpenAI or ChatOpenAI,
# set streaming to True
# First we need to find the LLM
llm = None
if hasattr(langchain_object, "llm"):
llm = langchain_object.llm
elif hasattr(langchain_object, "llm_chain") and hasattr(
langchain_object.llm_chain, "llm"
):
llm = langchain_object.llm_chain.llm
if isinstance(llm, (OpenAI, ChatOpenAI, AzureOpenAI, AzureChatOpenAI)):
llm.streaming = bool(hasattr(llm, "streaming"))
stream_handler = StreamingLLMCallbackHandler(websocket)
stream_manager = AsyncCallbackManager([stream_handler])
llm.callback_manager = stream_manager
return langchain_object

View file

@ -3,6 +3,7 @@ from fastapi.middleware.cors import CORSMiddleware
from langflow.api.endpoints import router as endpoints_router
from langflow.api.validate import router as validate_router
from langflow.api.chat import router as chat_router
def create_app():
@ -23,6 +24,7 @@ def create_app():
app.include_router(endpoints_router)
app.include_router(validate_router)
app.include_router(chat_router)
return app

View file

@ -1,3 +1,5 @@
import asyncio
from functools import partial, wraps
import importlib
import inspect
import re
@ -301,3 +303,15 @@ def update_verbose(d: dict, new_value: bool) -> dict:
elif k == "verbose":
d[k] = new_value
return d
def sync_to_async(func):
"""
Decorator to convert a sync function to an async function.
"""
@wraps(func)
async def async_wrapper(*args, **kwargs):
return func(*args, **kwargs)
return async_wrapper

View file

@ -155,7 +155,7 @@ def create_function(code, function_name):
exec_globals[function_name] = locals()[function_name]
# Return a function that imports necessary modules and calls the target function
def wrapped_function(*args, **kwargs):
async def wrapped_function(*args, **kwargs):
for module_name, module in exec_globals.items():
if isinstance(module, type(importlib)):
globals()[module_name] = module

View file

@ -1,8 +0,0 @@
#! /bin/bash
poetry remove langchain
docker build -t logspace/backend_build -f build.Dockerfile .
VERSION=$(toml get --toml-path pyproject.toml tool.poetry.version)
docker build --build-arg VERSION=$VERSION -t ibiscp/langflow:$VERSION .
docker run -p 5003:80 -d ibiscp/langflow:$VERSION
poetry add --editable ../../../langchain

View file

@ -1,5 +0,0 @@
FROM node:14-alpine
WORKDIR /app
COPY . /app
RUN npm install
RUN npm run build

View file

@ -1,11 +0,0 @@
#! /bin/bash
# Read the contents of the JSON file
json=$(cat package.json)
# Extract the value of the "version" field using jq
VERSION=$(echo "$json" | jq -r '.version')
docker build -t logspace/frontend_build -f build.Dockerfile .
docker build --build-arg VERSION=$VERSION -t ibiscp/langflow_frontend:$VERSION .
docker push ibiscp/langflow_frontend:$VERSION

View file

@ -14,6 +14,7 @@
"@heroicons/react": "^2.0.15",
"@mui/material": "^5.11.9",
"@tailwindcss/forms": "^0.5.3",
"@tailwindcss/line-clamp": "^0.4.4",
"@testing-library/jest-dom": "^5.16.5",
"@testing-library/react": "^13.4.0",
"@testing-library/user-event": "^13.5.0",
@ -24,13 +25,14 @@
"ace-builds": "^1.16.0",
"ansi-to-html": "^0.7.2",
"axios": "^1.3.2",
"base64-js": "^1.5.1",
"lodash": "^4.17.21",
"react": "^18.2.0",
"react-ace": "^10.1.0",
"react-cookie": "^4.1.1",
"react-dom": "^18.2.0",
"react-error-boundary": "^4.0.2",
"react-icons": "^4.7.1",
"react-icons": "^4.8.0",
"react-laag": "^2.0.5",
"react-router-dom": "^6.8.1",
"react-scripts": "5.0.1",
@ -3931,6 +3933,14 @@
"tailwindcss": ">=3.0.0 || >= 3.0.0-alpha.1"
}
},
"node_modules/@tailwindcss/line-clamp": {
"version": "0.4.4",
"resolved": "https://registry.npmjs.org/@tailwindcss/line-clamp/-/line-clamp-0.4.4.tgz",
"integrity": "sha512-5U6SY5z8N42VtrCrKlsTAA35gy2VSyYtHWCsg1H87NU1SXnEfekTVlrga9fzUDrrHcGi2Lb5KenUWb4lRQT5/g==",
"peerDependencies": {
"tailwindcss": ">=2.0.0 || >=3.0.0 || >=3.0.0-alpha.1"
}
},
"node_modules/@testing-library/dom": {
"version": "8.20.0",
"resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-8.20.0.tgz",
@ -5854,6 +5864,25 @@
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="
},
"node_modules/base64-js": {
"version": "1.5.1",
"resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz",
"integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
]
},
"node_modules/batch": {
"version": "0.6.1",
"resolved": "https://registry.npmjs.org/batch/-/batch-0.6.1.tgz",
@ -15032,9 +15061,9 @@
"integrity": "sha512-/6UZ2qgEyH2aqzYZgQPxEnz33NJ2gNsnHA2o5+o4wW9bLM/JYQitNP9xPhsXwC08hMMovfGe/8retsdDsczPRg=="
},
"node_modules/react-icons": {
"version": "4.7.1",
"resolved": "https://registry.npmjs.org/react-icons/-/react-icons-4.7.1.tgz",
"integrity": "sha512-yHd3oKGMgm7zxo3EA7H2n7vxSoiGmHk5t6Ou4bXsfcgWyhfDKMpyKfhHR6Bjnn63c+YXBLBPUql9H4wPJM6sXw==",
"version": "4.8.0",
"resolved": "https://registry.npmjs.org/react-icons/-/react-icons-4.8.0.tgz",
"integrity": "sha512-N6+kOLcihDiAnj5Czu637waJqSnwlMNROzVZMhfX68V/9bu9qHaMIJC4UdozWoOk57gahFCNHwVvWzm0MTzRjg==",
"peerDependencies": {
"react": "*"
}

View file

@ -9,6 +9,7 @@
"@heroicons/react": "^2.0.15",
"@mui/material": "^5.11.9",
"@tailwindcss/forms": "^0.5.3",
"@tailwindcss/line-clamp": "^0.4.4",
"@testing-library/jest-dom": "^5.16.5",
"@testing-library/react": "^13.4.0",
"@testing-library/user-event": "^13.5.0",
@ -19,13 +20,14 @@
"ace-builds": "^1.16.0",
"ansi-to-html": "^0.7.2",
"axios": "^1.3.2",
"base64-js": "^1.5.1",
"lodash": "^4.17.21",
"react": "^18.2.0",
"react-ace": "^10.1.0",
"react-cookie": "^4.1.1",
"react-dom": "^18.2.0",
"react-error-boundary": "^4.0.2",
"react-icons": "^4.7.1",
"react-icons": "^4.8.0",
"react-laag": "^2.0.5",
"react-router-dom": "^6.8.1",
"react-scripts": "5.0.1",
@ -60,5 +62,5 @@
"last 1 safari version"
]
},
"proxy": "http://backend:7860"
"proxy": "http://127.0.0.1:5003"
}

View file

@ -0,0 +1,40 @@
<?xml version="1.0" encoding="utf-8"?>
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" style="margin: auto; background: none; display: block; shape-rendering: auto;" width="271px" height="271px" viewBox="0 0 100 100" preserveAspectRatio="xMidYMid">
<defs>
<filter id="ldio-978hsxudfzl-filter" x="-100%" y="-100%" width="300%" height="300%" color-interpolation-filters="sRGB">
<feGaussianBlur in="SourceGraphic" stdDeviation="3.6"></feGaussianBlur>
<feComponentTransfer result="cutoff">
<feFuncA type="table" tableValues="0 0 0 0 0 0 1 1 1 1 1"></feFuncA>
</feComponentTransfer>
</filter>
</defs>
<g filter="url(#ldio-978hsxudfzl-filter)"><g transform="translate(50 50)">
<g>
<circle cx="8" cy="0" r="5" fill="#2edbb5">
<animate attributeName="r" keyTimes="0;0.5;1" values="5.3999999999999995;12.6;5.3999999999999995" dur="5s" repeatCount="indefinite" begin="-0.2s"></animate>
</circle>
<animateTransform attributeName="transform" type="rotate" keyTimes="0;1" values="0;360" dur="5s" repeatCount="indefinite" begin="0s"></animateTransform>
</g>
</g><g transform="translate(50 50)">
<g>
<circle cx="8" cy="0" r="5" fill="#1d99ff">
<animate attributeName="r" keyTimes="0;0.5;1" values="5.3999999999999995;12.6;5.3999999999999995" dur="2.5s" repeatCount="indefinite" begin="-0.15000000000000002s"></animate>
</circle>
<animateTransform attributeName="transform" type="rotate" keyTimes="0;1" values="0;360" dur="2.5s" repeatCount="indefinite" begin="-0.05s"></animateTransform>
</g>
</g><g transform="translate(50 50)">
<g>
<circle cx="8" cy="0" r="5" fill="#4f41ff">
<animate attributeName="r" keyTimes="0;0.5;1" values="5.3999999999999995;12.6;5.3999999999999995" dur="1.6666666666666665s" repeatCount="indefinite" begin="-0.1s"></animate>
</circle>
<animateTransform attributeName="transform" type="rotate" keyTimes="0;1" values="0;360" dur="1.6666666666666665s" repeatCount="indefinite" begin="-0.1s"></animateTransform>
</g>
</g><g transform="translate(50 50)">
<g>
<circle cx="8" cy="0" r="5" fill="#8400ff">
<animate attributeName="r" keyTimes="0;0.5;1" values="5.3999999999999995;12.6;5.3999999999999995" dur="1.25s" repeatCount="indefinite" begin="-0.05s"></animate>
</circle>
<animateTransform attributeName="transform" type="rotate" keyTimes="0;1" values="0;360" dur="1.25s" repeatCount="indefinite" begin="-0.15000000000000002s"></animateTransform>
</g>
</g></g>
<!-- [ldio] generated by https://loading.io/ --></svg>

After

Width:  |  Height:  |  Size: 2.5 KiB

View file

@ -1,6 +1,7 @@
import { ReactElement } from "react";
import { LightTooltip } from "../LightTooltipComponent";
import { TooltipComponentType } from "../../types/components";
export default function Tooltip({ children, title }:{children:ReactElement,title:string}) {
return <LightTooltip title={title} arrow>{children}</LightTooltip>;
export default function Tooltip({ children, title,placement }:TooltipComponentType) {
return <LightTooltip placement={placement} title={title} arrow>{children}</LightTooltip>;
}

View file

@ -0,0 +1,37 @@
import { Transition } from "@headlessui/react";
import { Bars3CenterLeftIcon, ChatBubbleBottomCenterTextIcon } from "@heroicons/react/24/outline";
import { nodeColors } from "../../../utils";
import { PopUpContext } from "../../../contexts/popUpContext";
import { useContext } from "react";
import ChatModal from "../../../modals/chatModal";
export default function ChatTrigger({open, setOpen,flow}){
const {openPopUp} = useContext(PopUpContext)
return(<Transition
show={!open}
appear={true}
enter="transition ease-out duration-300"
enterFrom="translate-y-96"
enterTo="translate-y-0"
leave="transition ease-in duration-300"
leaveFrom="translate-y-0"
leaveTo="translate-y-96"
>
<div className="absolute bottom-2 right-3">
<div style={{backgroundColor:nodeColors['chat']}} className="border flex justify-center align-center py-1 px-3 w-12 h-12 rounded-full dark:bg-gray-800 dark:border-gray-600 dark:text-white">
<button
onClick={() => {
setOpen(true);
}}
>
<div className="flex gap-3 items-center">
<ChatBubbleBottomCenterTextIcon
className="h-6 w-6 mt-1"
style={{ color: "white" }}
/>
</div>
</button>
</div>
</div>
</Transition>)
}

View file

@ -1,291 +1,32 @@
import { Transition } from "@headlessui/react";
import {
Bars3CenterLeftIcon,
LockClosedIcon,
PaperAirplaneIcon,
XMarkIcon,
} from "@heroicons/react/24/outline";
import {
MouseEventHandler,
useContext,
useEffect,
useRef,
useState,
} from "react";
import { sendAll } from "../../controllers/API";
import { alertContext } from "../../contexts/alertContext";
import { classNames, nodeColors, snakeToNormalCase } from "../../utils";
import { TabsContext } from "../../contexts/tabsContext";
import { ChatType } from "../../types/chat";
import ChatMessage from "./chatMessage";
import { NodeType } from "../../types/flow";
import { ChatMessageType, ChatType } from "../../types/chat";
import ChatTrigger from "./chatTrigger";
import ChatModal from "../../modals/chatModal";
const _ = require("lodash");
export default function Chat({ flow, reactFlowInstance }: ChatType) {
const { updateFlow, lockChat, setLockChat, flows, tabIndex } =
useContext(TabsContext);
const [saveChat, setSaveChat] = useState(false);
const [open, setOpen] = useState(true);
const [chatValue, setChatValue] = useState("");
const [chatHistory, setChatHistory] = useState(flow.chat);
const { setErrorData, setNoticeData } = useContext(alertContext);
const addChatHistory = (
message: string,
isSend: boolean,
thought?: string
) => {
let tabsChange = false;
setChatHistory((old) => {
let newChat = _.cloneDeep(old);
if (JSON.stringify(flow.chat) !== JSON.stringify(old)) {
tabsChange = true;
return old;
}
if (thought) {
newChat.push({ message, isSend, thought });
} else {
newChat.push({ message, isSend });
}
return newChat;
});
if (tabsChange) {
if (thought) {
updateFlow({
..._.cloneDeep(flow),
chat: [...flow.chat, { isSend, message, thought }],
});
} else {
updateFlow({
..._.cloneDeep(flow),
chat: [...flow.chat, { isSend, message }],
});
}
}
setSaveChat((chat) => !chat);
};
export default function Chat({ flow }: ChatType) {
const [open, setOpen] = useState(false);
useEffect(() => {
updateFlow({ ..._.cloneDeep(flow), chat: chatHistory });
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [saveChat]);
useEffect(() => {
setChatHistory(flow.chat);
}, [flow]);
useEffect(() => {
if (ref.current) ref.current.scrollIntoView({ behavior: "smooth" });
}, [chatHistory]);
function validateNode(n: NodeType): Array<string> {
if (!n.data?.node?.template || !Object.keys(n.data.node.template)) {
setNoticeData({
title:
"We've noticed a potential issue with a node in the flow. Please review it and, if necessary, submit a bug report with your exported flow file. Thank you for your help!",
});
return [];
}
const {
type,
node: { template },
} = n.data;
return Object.keys(template).reduce(
(errors: Array<string>, t) =>
errors.concat(
(template[t].required && template[t].show) &&
(!template[t].value || template[t].value === "") &&
!reactFlowInstance
.getEdges()
.some(
(e) =>
e.targetHandle.split("|")[1] === t &&
e.targetHandle.split("|")[2] === n.id
)
? [
`${type} is missing ${
template.display_name
? template.display_name
: snakeToNormalCase(template[t].name)
}.`,
]
: []
),
[] as string[]
);
}
function validateNodes() {
return reactFlowInstance
.getNodes()
.flatMap((n: NodeType) => validateNode(n));
}
const ref = useRef(null);
function sendMessage() {
if (chatValue !== "") {
let nodeValidationErrors = validateNodes();
if (nodeValidationErrors.length === 0) {
setLockChat(true);
let message = chatValue;
setChatValue("");
addChatHistory(message, true);
sendAll({
...reactFlowInstance.toObject(),
message,
chatHistory,
name: flow.name,
description: flow.description,
})
.then((r) => {
addChatHistory(r.data.result, false, r.data.thought);
setLockChat(false);
})
.catch((error) => {
setErrorData({
title: error.message ?? "Unknown Error",
list: [error.response.data.detail],
});
setLockChat(false);
let lastMessage;
setChatHistory((chatHistory) => {
let newChat = chatHistory;
lastMessage = newChat.pop().message;
return newChat;
});
setChatValue(lastMessage);
});
} else {
setErrorData({
title: "Oops! Looks like you missed some required information:",
list: nodeValidationErrors,
});
}
} else {
setErrorData({
title: "Error sending message",
list: ["The message cannot be empty."],
});
}
}
function clearChat() {
setChatHistory([]);
updateFlow({ ..._.cloneDeep(flow), chat: [] });
}
const handleKeyDown = (event: KeyboardEvent) => {
if (event.key === "K" && event.shiftKey && (event.metaKey||event.ctrlKey)) {
setOpen(oldState=>!oldState);
}
};
document.addEventListener("keydown", handleKeyDown);
return () => {
document.removeEventListener("keydown", handleKeyDown);
};
}, []);
return (
<>
<Transition
show={open}
appear={true}
enter="transition ease-out duration-300"
enterFrom="translate-y-96"
enterTo="translate-y-0"
leave="transition ease-in duration-300"
leaveFrom="translate-y-0"
leaveTo="translate-y-96"
>
<div className="w-[340px] absolute bottom-0 right-1">
<div className="border dark:border-gray-700 h-full rounded-xl rounded-b-none bg-white dark:bg-gray-800 shadow">
<div
onClick={() => {
setOpen(false);
}}
className="flex justify-between cursor-pointer items-center px-5 py-2 border-b dark:border-b-gray-700"
>
<div className="flex gap-3 text-lg dark:text-white font-medium items-center">
<Bars3CenterLeftIcon
className="h-5 w-5 mt-1"
style={{ color: nodeColors["chat"] }}
/>
Chat
</div>
<button
className="hover:text-blue-500 dark:text-white"
onClick={(e) => {
e.stopPropagation();
clearChat();
}}
>
Clear
</button>
</div>
<div className="w-full h-[400px] flex gap-3 mb-auto overflow-y-auto scrollbar-hide flex-col bg-gray-50 dark:bg-gray-900 p-3 py-5">
{chatHistory.map((c, i) => (
<ChatMessage chat={c} key={i} />
))}
<div ref={ref}></div>
</div>
<div className="w-full bg-white dark:bg-gray-800 border-t dark:border-t-gray-600 flex items-center justify-between p-3">
<div className="relative w-full mt-1 rounded-md shadow-sm">
<input
onKeyDown={(event) => {
if (event.key === "Enter" && !lockChat) {
sendMessage();
}
}}
type="text"
disabled={lockChat}
value={lockChat ? "Thinking..." : chatValue}
onChange={(e) => {
setChatValue(e.target.value);
}}
className={classNames(
lockChat ? "bg-gray-500 text-white" : "dark:bg-gray-700",
"form-input block w-full rounded-md border-gray-300 dark:border-gray-600 dark:text-white pr-10 sm:text-sm"
)}
placeholder={"Send a message..."}
/>
<div className="absolute inset-y-0 right-0 flex items-center pr-3">
<button disabled={lockChat} onClick={() => sendMessage()}>
{lockChat ? (
<LockClosedIcon
className="h-5 w-5 text-gray-400 dark:hover:text-gray-300 animate-pulse"
aria-hidden="true"
/>
) : (
<PaperAirplaneIcon
className="h-5 w-5 text-gray-400 hover:text-gray-600 dark:hover:text-gray-300"
aria-hidden="true"
/>
)}
</button>
</div>
</div>
</div>
</div>
</div>
</Transition>
<Transition
show={!open}
appear={true}
enter="transition ease-out duration-300"
enterFrom="translate-y-96"
enterTo="translate-y-0"
leave="transition ease-in duration-300"
leaveFrom="translate-y-0"
leaveTo="translate-y-96"
>
<div className="absolute bottom-0 right-1">
<div className="border flex justify-center align-center py-1 px-3 rounded-xl rounded-b-none bg-white dark:bg-gray-800 dark:border-gray-600 dark:text-white shadow">
<button
onClick={() => {
setOpen(true);
}}
>
<div className="flex gap-3 items-center">
<Bars3CenterLeftIcon
className="h-6 w-6 mt-1"
style={{ color: nodeColors["chat"] }}
/>
Chat
</div>
</button>
</div>
</div>
</Transition>
<ChatModal key={flow.id} flow={flow} open={open} setOpen={setOpen} />
<ChatTrigger open={open} setOpen={setOpen} flow={flow} />
</>
);
}

View file

@ -10,6 +10,7 @@ export default function LoadingComponent({remSize}:LoadingComponentProps){
<path d="M100 50.5908C100 78.2051 77.6142 100.591 50 100.591C22.3858 100.591 0 78.2051 0 50.5908C0 22.9766 22.3858 0.59082 50 0.59082C77.6142 0.59082 100 22.9766 100 50.5908ZM9.08144 50.5908C9.08144 73.1895 27.4013 91.5094 50 91.5094C72.5987 91.5094 90.9186 73.1895 90.9186 50.5908C90.9186 27.9921 72.5987 9.67226 50 9.67226C27.4013 9.67226 9.08144 27.9921 9.08144 50.5908Z" fill="currentColor"/>
<path d="M93.9676 39.0409C96.393 38.4038 97.8624 35.9116 97.0079 33.5539C95.2932 28.8227 92.871 24.3692 89.8167 20.348C85.8452 15.1192 80.8826 10.7238 75.2124 7.41289C69.5422 4.10194 63.2754 1.94025 56.7698 1.05124C51.7666 0.367541 46.6976 0.446843 41.7345 1.27873C39.2613 1.69328 37.813 4.19778 38.4501 6.62326C39.0873 9.04874 41.5694 10.4717 44.0505 10.1071C47.8511 9.54855 51.7191 9.52689 55.5402 10.0491C60.8642 10.7766 65.9928 12.5457 70.6331 15.2552C75.2735 17.9648 79.3347 21.5619 82.5849 25.841C84.9175 28.9121 86.7997 32.2913 88.1811 35.8758C89.083 38.2158 91.5421 39.6781 93.9676 39.0409Z" fill="currentFill"/>
</svg>
<br></br>
<span className="animate-pulse text-blue-600 text-lg">Loading...</span>
</div>
)

View file

@ -13,12 +13,10 @@ export default function ContextWrapper({ children }: { children: ReactNode }) {
<DarkProvider>
<LocationProvider>
<AlertProvider>
<TabsProvider>
<PopUpProvider>
<TabsProvider>
<TypesProvider>
{children}
<PopUpProvider>{children}</PopUpProvider>
</TypesProvider>
</PopUpProvider>
</TabsProvider>
</AlertProvider>
</LocationProvider>

View file

@ -1,11 +1,19 @@
import { createContext, useEffect, useState, useRef, ReactNode, useContext } from "react";
import {
createContext,
useEffect,
useState,
useRef,
ReactNode,
useContext,
} from "react";
import { FlowType } from "../types/flow";
import { TabsContextType } from "../types/tabs";
import { normalCaseToSnakeCase } from "../utils";
import { alertContext } from "./alertContext";
const { v4: uuidv4 } = require('uuid');
const TabsContextInitialValue: TabsContextType = {
save:()=>{},
save: () => {},
tabIndex: 0,
setTabIndex: (index: number) => {},
flows: [],
@ -13,11 +21,9 @@ const TabsContextInitialValue: TabsContextType = {
addFlow: (flowData?: any) => {},
updateFlow: (newFlow: FlowType) => {},
incrementNodeId: () => 0,
downloadFlow: (flow:FlowType) => {},
downloadFlow: (flow: FlowType) => {},
uploadFlow: () => {},
lockChat: false,
setLockChat:(prevState:boolean)=>{},
hardReset:()=>{},
hardReset: () => {},
};
export const TabsContext = createContext<TabsContextType>(
@ -25,31 +31,28 @@ export const TabsContext = createContext<TabsContextType>(
);
export function TabsProvider({ children }: { children: ReactNode }) {
const {setNoticeData} = useContext(alertContext)
const { setNoticeData } = useContext(alertContext);
const [tabIndex, setTabIndex] = useState(0);
const [flows, setFlows] = useState<Array<FlowType>>([]);
const [id, setId] = useState(0);
const [lockChat, setLockChat] = useState(false);
const [id, setId] = useState("");
const newNodeId = useRef(0);
function incrementNodeId() {
newNodeId.current = newNodeId.current + 1;
return newNodeId.current;
}
function save(){
function save() {
if (flows.length !== 0)
window.localStorage.setItem(
"tabsData",
JSON.stringify({ tabIndex, flows, id, nodeId: newNodeId.current })
);
window.localStorage.setItem(
"tabsData",
JSON.stringify({ tabIndex, flows, id, nodeId: newNodeId.current })
);
}
useEffect(() => {
//save tabs locally
save()
save();
}, [flows, id, tabIndex, newNodeId]);
useEffect(() => {
//get tabs locally saved
let cookie = window.localStorage.getItem("tabsData");
@ -61,15 +64,17 @@ export function TabsProvider({ children }: { children: ReactNode }) {
newNodeId.current = cookieObject.nodeId;
}
}, []);
function hardReset(){
newNodeId.current=0;
setTabIndex(0);setFlows([]);setId(0);
function hardReset() {
newNodeId.current = 0;
setTabIndex(0);
setFlows([]);
setId(uuidv4());
}
/**
* Downloads the current flow as a JSON file
*/
function downloadFlow(flow:FlowType) {
function downloadFlow(flow: FlowType) {
// create a data URI with the current flow data
const jsonString = `data:text/json;chatset=utf-8,${encodeURIComponent(
JSON.stringify(flow)
@ -82,7 +87,9 @@ export function TabsProvider({ children }: { children: ReactNode }) {
// simulate a click on the link element to trigger the download
link.click();
setNoticeData({title:"Warning: Critical data,JSON file may including API keys."})
setNoticeData({
title: "Warning: Critical data,JSON file may including API keys.",
});
}
/**
@ -139,19 +146,18 @@ export function TabsProvider({ children }: { children: ReactNode }) {
function addFlow(flow?: FlowType) {
// Get data from the flow or set it to null if there's no flow provided.
const data = flow?.data ? flow.data : null;
const description = flow?.description?flow.description:""
const description = flow?.description ? flow.description : "";
// Create a new flow with a default name if no flow is provided.
let newFlow: FlowType = {
description,
name: "New Flow",
name: flow?.name ?? "New Flow",
id: id.toString(),
data,
chat: flow ? flow.chat : [],
};
// Increment the ID counter.
setId((old) => old + 1);
setId(uuidv4());
// Add the new flow to the list of flows.
setFlows((prevState) => {
@ -171,10 +177,9 @@ export function TabsProvider({ children }: { children: ReactNode }) {
const newFlows = [...prevState];
const index = newFlows.findIndex((flow) => flow.id === newFlow.id);
if (index !== -1) {
newFlows[index].description = newFlow.description??""
newFlows[index].description = newFlow.description ?? "";
newFlows[index].data = newFlow.data;
newFlows[index].name = newFlow.name;
newFlows[index].chat = newFlow.chat;
}
return newFlows;
});
@ -185,8 +190,6 @@ export function TabsProvider({ children }: { children: ReactNode }) {
value={{
save,
hardReset,
lockChat,
setLockChat,
tabIndex,
setTabIndex,
flows,

View file

@ -1,6 +1,7 @@
import { PromptTypeAPI, errorsTypeAPI } from './../../types/api/index';
import { APIObjectType, sendAllProps } from '../../types/api/index';
import axios, { AxiosResponse } from "axios";
import { FlowType } from '../../types/flow';
export async function getAll():Promise<AxiosResponse<APIObjectType>> {
return await axios.get(`/all`);
@ -18,4 +19,22 @@ export async function checkCode(code:string):Promise<AxiosResponse<errorsTypeAPI
export async function checkPrompt(template:string):Promise<AxiosResponse<PromptTypeAPI>>{
return await axios.post('/validate/prompt',{template})
}
}
export async function getExamples(): Promise<FlowType[]> {
const url = 'https://api.github.com/repos/logspace-ai/langflow_examples/contents/examples';
const response = await axios.get(url);
const jsonFiles = response.data.filter((file: any) => {
return file.name.endsWith('.json');
});
const contentsPromises = jsonFiles.map(async (file: any) => {
const contentResponse = await axios.get(file.download_url);
return contentResponse.data;
});
const contents = await Promise.all(contentsPromises);
return contents;
}

View file

@ -0,0 +1,55 @@
import { LockClosedIcon, PaperAirplaneIcon } from "@heroicons/react/24/outline";
import { classNames } from "../../../utils";
import { useRef } from "react";
export default function ChatInput({
lockChat,
chatValue,
sendMessage,
setChatValue,
}: {
lockChat: boolean;
chatValue: string;
sendMessage: Function;
setChatValue: Function;
}) {
const inputRef = useRef(null);
return (
<>
<textarea
onKeyDown={(event) => {
if (event.key === "Enter" && !lockChat && !event.shiftKey) {
sendMessage();
}
}}
ref={inputRef}
disabled={lockChat}
style={{resize: "none" }}
value={lockChat ? "Thinking..." : chatValue}
onChange={(e) => {
setChatValue(e.target.value);
}}
className={classNames(
lockChat ? "bg-gray-500 text-white" : "dark:bg-gray-700",
"form-input block w-full custom-scroll h-10 rounded-md border-gray-300 dark:border-gray-600 dark:text-white pr-10 sm:text-sm"
)}
placeholder={"Send a message..."}
/>
<div className="absolute inset-y-0 right-0 flex items-center pr-3">
<button disabled={lockChat} onClick={() => sendMessage()}>
{lockChat ? (
<LockClosedIcon
className="h-5 w-5 text-gray-400 dark:hover:text-gray-300 animate-pulse"
aria-hidden="true"
/>
) : (
<PaperAirplaneIcon
className="h-5 w-5 text-gray-400 hover:text-gray-600 dark:hover:text-gray-300"
aria-hidden="true"
/>
)}
</button>
</div>
</>
);
}

View file

@ -0,0 +1,82 @@
import { ChatBubbleOvalLeftEllipsisIcon } from "@heroicons/react/24/outline";
import { useState } from "react";
import { ChatMessageType } from "../../../types/chat";
import { classNames } from "../../../utils";
import AiIcon from "../../../assets/Gooey Ring-5s-271px.svg";
import { UserIcon } from "@heroicons/react/24/solid";
import FileCard from "../fileComponent";
var Convert = require("ansi-to-html");
var convert = new Convert({ newline: true });
export default function ChatMessage({ chat }: { chat: ChatMessageType }) {
const [hidden, setHidden] = useState(true);
return (
<div
className={classNames(
"w-full py-2 pl-2 flex",
chat.isSend ? "bg-white" : "bg-gray-200"
)}
>
<div
className={classNames(
"rounded-full w-8 h-8 flex items-center my-3 justify-center",
chat.isSend ? "bg-gray-900" : "bg-gray-200"
)}
>
{!chat.isSend && <img className="scale-150" src={AiIcon} />}
{chat.isSend && <UserIcon className="w-6 h-6 -mb-1 text-gray-200" />}
</div>
{!chat.isSend ? (
<div className="w-full text-start flex items-center">
<div className=" relative text-start inline-block text-gray-600 text-sm font-normal">
{hidden && chat.thought && chat.thought !== "" && (
<div
onClick={() => setHidden((prev) => !prev)}
className="absolute -top-1 -left-2 cursor-pointer"
>
<ChatBubbleOvalLeftEllipsisIcon className="w-5 h-5 animate-bounce" />
</div>
)}
{chat.thought && chat.thought !== "" && !hidden && (
<div
onClick={() => setHidden((prev) => !prev)}
className=" text-start inline-block rounded-md h-full border border-gray-300
bg-gray-100 w-[95%] pb-3 pt-3 px-2 ml-3 cursor-pointer scrollbar-hide overflow-scroll"
dangerouslySetInnerHTML={{
__html: convert.toHtml(chat.thought),
}}
></div>
)}
{chat.thought && chat.thought !== "" && !hidden && <br></br>}
<div className="w-full px-4 pb-3 pt-3 pr-8">
<span>
{chat.message}
{chat.files && (
<div className="my-2 w-full">
{chat.files.map((file, index) => {
return (
<div key={index} className="my-2 w-full">
<FileCard
fileName={"Generated File"}
fileType={file.data_type}
content={file.data}
/>
</div>
);
})}
</div>
)}
</span>
</div>
</div>
</div>
) : (
<div className="w-full flex items-center">
<div className="text-start inline-block px-3 text-sm text-gray-600 dark:text-white dark:bg-gray-700">
{chat.message}
</div>
</div>
)}
</div>
);
}

View file

@ -0,0 +1,36 @@
import { CloudArrowDownIcon, DocumentIcon } from "@heroicons/react/24/outline";
import * as base64js from 'base64-js';
export default function FileCard({ fileName, content, fileType }) {
const handleDownload = () => {
const byteArray = new Uint8Array(base64js.toByteArray(content));
const blob = new Blob([byteArray], { type: 'application/octet-stream' });
const url = URL.createObjectURL(blob);
const link = document.createElement('a');
link.href = url;
link.download = fileName+".png";
document.body.appendChild(link);
link.click();
document.body.removeChild(link);
URL.revokeObjectURL(url);
};
return (
<button
onClick={handleDownload}
className="bg-gray-100 shadow rounded w-1/2 text-gray-700 hover:drop-shadow-lg px-2 py-2 flex justify-between items-center border border-gray-300"
>
<div className="flex gap-2 text-current items-center w-full mr-2">
{" "}
<DocumentIcon className="w-8 h-8" />
<div className="flex flex-col items-start">
{" "}
<div className="truncate text-sm text-current">{fileName}</div>
<div className="truncate text-xs text-gray-500">{fileType}</div>
</div>
<CloudArrowDownIcon className="w-6 h-6 text-current ml-auto" />
</div>
</button>
);
}

View file

@ -0,0 +1,330 @@
import { Dialog, Transition } from "@headlessui/react";
import {
ChatBubbleOvalLeftEllipsisIcon,
LockClosedIcon,
PaperAirplaneIcon,
} from "@heroicons/react/24/outline";
import { Fragment, useContext, useEffect, useRef, useState } from "react";
import { PopUpContext } from "../../contexts/popUpContext";
import { FlowType, NodeType } from "../../types/flow";
import { TabsContext } from "../../contexts/tabsContext";
import { alertContext } from "../../contexts/alertContext";
import { classNames, snakeToNormalCase } from "../../utils";
import { typesContext } from "../../contexts/typesContext";
import ChatMessage from "./chatMessage";
import { FaEraser } from "react-icons/fa";
import { sendAllProps } from "../../types/api";
import { ChatMessageType, ChatType } from "../../types/chat";
import ChatInput from "./chatInput";
const _ = require("lodash");
export default function ChatModal({
flow,
open,
setOpen,
}: {
open: boolean;
setOpen: Function;
flow: FlowType;
}) {
const { updateFlow } = useContext(TabsContext);
const [chatValue, setChatValue] = useState("");
const [chatHistory, setChatHistory] = useState<ChatMessageType[]>([]);
const { reactFlowInstance } = useContext(typesContext);
const { setErrorData, setNoticeData } = useContext(alertContext);
const [ws, setWs] = useState<WebSocket | null>(null);
const [lockChat, setLockChat] = useState(false);
const addChatHistory = (
message: string,
isSend: boolean,
thought?: string,
files?: Array<any>
) => {
setChatHistory((old) => {
let newChat = _.cloneDeep(old);
if (files) {
newChat.push({ message, isSend, files });
} else if (thought) {
newChat.push({ message, isSend, thought });
} else {
newChat.push({ message, isSend });
}
return newChat;
});
};
function connectWS() {
const newWs = new WebSocket(`ws://127.0.0.1:5003/chat/${flow.id}`);
newWs.onopen = () => {
console.log("WebSocket connection established!");
};
newWs.onmessage = (event) => {
try {
setLockChat(false);
const data = JSON.parse(event.data);
console.log("Received data:", data);
//get chat history
if (Array.isArray(data)) {
console.log(data);
setChatHistory((_) => {
let newChatHistory: ChatMessageType[] = [];
data.forEach(
(chatItem: {
intermediate_steps?: "string";
is_bot: boolean;
message: string;
type: string;
files?: Array<any>;
}) => {
if (chatItem.message) {
newChatHistory.push(
chatItem.files
? {
isSend: !chatItem.is_bot,
message: chatItem.message,
thought: chatItem.intermediate_steps,
files: chatItem.files,
}
: {
isSend: !chatItem.is_bot,
message: chatItem.message,
thought: chatItem.intermediate_steps,
}
);
}
}
);
return newChatHistory;
});
}
if (data.type === "end") {
if (data.files) {
addChatHistory(
data.message,
false,
data.intermediate_steps,
data.files
);
} else {
addChatHistory(data.message, false, data.intermediate_steps);
}
}
if (data.type == "file") {
console.log(data);
}
} catch (error) {
if(event.data!=="Error: 1005"){
setErrorData({ title: event.data });
}
}
};
newWs.onclose = (_) => {
if (open) {
setLockChat(false);
setTimeout(() => {
connectWS();
}, 100);
}
};
setWs(newWs);
return newWs;
}
useEffect(() => {
let newWs = connectWS();
return () => {
newWs.close();
};
}, []);
async function sendAll(data: sendAllProps) {
if (ws) {
ws.send(JSON.stringify(data));
}
}
useEffect(() => {
if (ref.current) ref.current.scrollIntoView({ behavior: "smooth" });
}, [chatHistory]);
useEffect(() => {
if (ws && ws.readyState === ws.CLOSED) {
setLockChat(false);
}
}, [lockChat]);
function validateNode(n: NodeType): Array<string> {
if (!n.data?.node?.template || !Object.keys(n.data.node.template)) {
setNoticeData({
title:
"We've noticed a potential issue with a node in the flow. Please review it and, if necessary, submit a bug report with your exported flow file. Thank you for your help!",
});
return [];
}
const {
type,
node: { template },
} = n.data;
return Object.keys(template).reduce(
(errors: Array<string>, t) =>
errors.concat(
template[t].required &&
template[t].show &&
(!template[t].value || template[t].value === "") &&
!reactFlowInstance
.getEdges()
.some(
(e) =>
e.targetHandle.split("|")[1] === t &&
e.targetHandle.split("|")[2] === n.id
)
? [
`${type} is missing ${
template.display_name
? template.display_name
: snakeToNormalCase(template[t].name)
}.`,
]
: []
),
[] as string[]
);
}
function validateNodes() {
return reactFlowInstance
.getNodes()
.flatMap((n: NodeType) => validateNode(n));
}
const ref = useRef(null);
function sendMessage() {
if (chatValue !== "") {
let nodeValidationErrors = validateNodes();
if (nodeValidationErrors.length === 0) {
setLockChat(true);
let message = chatValue;
setChatValue("");
addChatHistory(message, true);
sendAll({
...reactFlowInstance.toObject(),
message,
chatHistory,
name: flow.name,
description: flow.description,
});
} else {
setErrorData({
title: "Oops! Looks like you missed some required information:",
list: nodeValidationErrors,
});
}
} else {
setErrorData({
title: "Error sending message",
list: ["The message cannot be empty."],
});
}
}
function clearChat() {
setChatHistory([]);
ws.send(JSON.stringify({ clear_history: true }));
}
const { closePopUp } = useContext(PopUpContext);
function setModalOpen(x: boolean) {
setOpen(x);
if (x === false) {
setTimeout(() => {
closePopUp();
}, 300);
}
}
return (
<Transition.Root show={open} appear={true} as={Fragment}>
<Dialog
as="div"
className="relative z-10"
onClose={setModalOpen}
initialFocus={ref}
>
<Transition.Child
as={Fragment}
enter="ease-out duration-300"
enterFrom="opacity-0"
enterTo="opacity-100"
leave="ease-in duration-200"
leaveFrom="opacity-100"
leaveTo="opacity-0"
>
<div className="fixed inset-0 bg-black backdrop-blur-sm dark:bg-gray-600 dark:bg-opacity-80 bg-opacity-80 transition-opacity" />
</Transition.Child>
<div className="fixed inset-0 z-10 overflow-y-auto">
<div className="flex h-full items-end justify-center p-4 text-center sm:items-center sm:p-0">
<Transition.Child
as={Fragment}
enter="ease-out duration-300"
enterFrom="opacity-0 translate-y-4 sm:translate-y-0 sm:scale-95"
enterTo="opacity-100 translate-y-0 sm:scale-100"
leave="ease-in duration-200"
leaveFrom="opacity-100 translate-y-0 sm:scale-100"
leaveTo="opacity-0 translate-y-4 sm:translate-y-0 sm:scale-95"
>
<Dialog.Panel className=" drop-shadow-2xl relative flex flex-col justify-between transform h-[95%] overflow-hidden rounded-lg bg-white dark:bg-gray-800 text-left shadow-xl transition-all sm:my-8 w-[690px]">
<div className="relative w-full">
<button
onClick={() => clearChat()}
className="absolute top-2 right-2 hover:text-red-500"
>
<FaEraser className="w-4 h-4" />
</button>
</div>
<div className="w-full h-full bg-white dark:bg-gray-800 border-t dark:border-t-gray-600 flex-col flex items-center overflow-scroll scrollbar-hide">
{chatHistory.length > 0 ? (
chatHistory.map((c, i) => <ChatMessage chat={c} key={i} />)
) : (
<div className="flex flex-col h-full text-center justify-center w-full items-center align-middle ">
<span>
👋 <span className="text-gray-600 text-lg">LangFlow Chat</span>
</span>
<br />
<div className="bg-gray-100 rounded-md w-2/4 px-6 py-8 border border-gray-200">
<span className="text-base text-gray-500">
Start a conversation and click the agents thoughts{" "}
<span>
<ChatBubbleOvalLeftEllipsisIcon className="w-6 h-6 inline animate-bounce " />
</span>{" "}
to inspect the chaining process.
</span>
</div>
</div>
)}
<div ref={ref}></div>
</div>
<div className="w-full bg-white dark:bg-gray-800 border-t dark:border-t-gray-600 flex-col flex items-center justify-between p-3">
<div className="relative w-full mt-1 rounded-md shadow-sm">
<ChatInput
chatValue={chatValue}
lockChat={lockChat}
sendMessage={sendMessage}
setChatValue={setChatValue}
/>
</div>
</div>
</Dialog.Panel>
</Transition.Child>
</div>
</div>
</Dialog>
</Transition.Root>
);
}

View file

@ -1,6 +1,7 @@
import React, { ReactNode } from "react";
import { DocumentDuplicateIcon } from "@heroicons/react/solid";
import { classNames } from "../../../utils";
import Tooltip from "../../../components/TooltipComponent";
export default function ButtonBox({
onClick,
@ -9,7 +10,8 @@ export default function ButtonBox({
icon,
bgColor,
textColor,
deactivate
deactivate,
size,
}: {
onClick: () => void;
title: string;
@ -17,31 +19,103 @@ export default function ButtonBox({
icon: ReactNode;
bgColor: string;
textColor: string;
deactivate?:boolean;
deactivate?: boolean;
size: "small" | "medium" | "big";
}) {
let bigCircle: string;
let smallCircle: string;
let titleFontSize: string;
let descriptionFontSize: string;
let padding: string;
let marginTop: string;
let height: string;
let widht: string;
switch (size) {
case "small":
bigCircle = "h-12 w-12";
smallCircle = "h-8 w-8";
titleFontSize = "text-sm";
descriptionFontSize = "text-xs";
padding = "p-2";
marginTop = "mt-2";
height = "h-36";
widht = "w-32";
break;
case "medium":
bigCircle = "h-16 w-16";
smallCircle = "h-12 w-12";
titleFontSize = "text-base";
descriptionFontSize = "text-sm";
padding = "p-4";
marginTop = "mt-3";
height = "h-44";
widht = "w-36";
break;
case "big":
bigCircle = "h-20 w-20";
smallCircle = "h-16 w-16";
titleFontSize = "text-lg";
descriptionFontSize = "text-sm";
padding = "p-8";
marginTop = "mt-6";
height = "h-56";
widht = "w-44";
break;
default:
bigCircle = "h-20 w-20";
smallCircle = "h-16 w-16";
titleFontSize = "text-lg";
descriptionFontSize = "text-sm";
padding = "p-8";
marginTop = "mt-6";
height = "h-56";
widht = "w-44";
break;
}
return (
<button disabled={deactivate} onClick={onClick}>
<div
className={classNames(
"col-span-1 flex flex-col divide-y divide-gray-200 rounded-lg text-center shadow border border-gray-300 hover:shadow-lg transform hover:scale-105",
bgColor
)}
>
<div className="flex flex-1 flex-col p-8">
<div className="mx-auto flex items-center justify-center h-20 w-20 bg-white/30 rounded-full">
<div className="mx-auto flex items-center justify-center h-16 w-16 bg-white rounded-full">
<div className={textColor}>
{icon}
</div>
<Tooltip title={description} placement="bottom">
<div
className={classNames(
"col-span-1 flex flex-col divide-y divide-gray-200 rounded-lg text-center shadow border border-gray-300 hover:shadow-lg transform hover:scale-105",
bgColor,
height,
widht
)}
>
<div className={`flex flex-1 flex-col ${padding}`}>
<div
className={`mx-auto flex items-center justify-center ${bigCircle} bg-white/30 rounded-full`}
>
<div
className={`mx-auto flex items-center justify-center ${smallCircle} bg-white rounded-full`}
>
<div className={textColor}>{icon}</div>
</div>
</div>
<h3
className={classNames(
"font-semibold text-white",
titleFontSize,
marginTop
)}
>
{title}
</h3>
<div className="mt-1 flex flex-grow flex-col justify-between">
<dt className="sr-only">{title}</dt>
{/* <dd
className={classNames(
"text-gray-100 line-clamp-2",
descriptionFontSize
)}
>
{deactivate ? "Coming soon" : description}
</dd> */}
</div>
</div>
<h3 className="mt-6 text-lg font-semibold text-white">{title}</h3>
<div className="mt-1 flex flex-grow flex-col justify-between">
<dt className="sr-only">{title}</dt>
<dd className="text-sm text-gray-100">{deactivate? "Coming soon":description}</dd>
</div>
</div>
</div>
</Tooltip>
</button>
);
}

View file

@ -3,19 +3,30 @@ import {
XMarkIcon,
ArrowDownTrayIcon,
DocumentDuplicateIcon,
ComputerDesktopIcon,
ComputerDesktopIcon,
ArrowUpTrayIcon,
ArrowLeftIcon,
} from "@heroicons/react/24/outline";
import { Fragment, useContext, useRef, useState } from "react";
import { PopUpContext } from "../../contexts/popUpContext";
import { TabsContext } from "../../contexts/tabsContext";
import ButtonBox from "./buttonBox";
import { getExamples } from "../../controllers/API";
import { error } from "console";
import { alertContext } from "../../contexts/alertContext";
import LoadingComponent from "../../components/loadingComponent";
import { FlowType } from "../../types/flow";
import { classNames } from "../../utils";
export default function ImportModal() {
const [open, setOpen] = useState(true);
const { setErrorData } = useContext(alertContext);
const { closePopUp } = useContext(PopUpContext);
const ref = useRef();
const {uploadFlow} = useContext(TabsContext)
const [showExamples, setShowExamples] = useState(false);
const [loadingExamples, setLoadingExamples] = useState(false);
const [examples, setExamples] = useState<FlowType[]>([]);
const { uploadFlow, addFlow } = useContext(TabsContext);
function setModalOpen(x: boolean) {
setOpen(x);
if (x === false) {
@ -24,6 +35,22 @@ export default function ImportModal() {
}, 300);
}
}
function handleExamples() {
setLoadingExamples(true);
getExamples()
.then((result) => {
setLoadingExamples(false);
setExamples(result);
})
.catch((error) =>
setErrorData({
title: "there was an error loading examples, please try again",
list: [error.message],
})
);
}
return (
<Transition.Root show={open} appear={true} as={Fragment}>
<Dialog
@ -68,6 +95,41 @@ export default function ImportModal() {
<XMarkIcon className="h-6 w-6" aria-hidden="true" />
</button>
</div>
{showExamples && (
<>
<div className="z-50 absolute top-2 left-0 hidden pt-4 pl-4 sm:block">
<button
type="button"
className="rounded-md text-gray-400 hover:text-gray-500 focus:outline-none focus:ring-2 focus:ring-indigo-500 focus:ring-offset-2"
onClick={() => {
setShowExamples(false);
}}
>
<span className="sr-only">Close</span>
<ArrowLeftIcon className="h-6 w-6" aria-hidden="true" />
</button>
</div>
<div className="z-50 absolute bottom-2 left-0 hidden pt-4 pl-2 sm:block">
<a
href="https://github.com/logspace-ai/langflow_examples"
target="_blank"
>
<svg
width="24"
viewBox="0 0 98 96"
xmlns="http://www.w3.org/2000/svg"
>
<path
fill-rule="evenodd"
clip-rule="evenodd"
d="M48.854 0C21.839 0 0 22 0 49.217c0 21.756 13.993 40.172 33.405 46.69 2.427.49 3.316-1.059 3.316-2.362 0-1.141-.08-5.052-.08-9.127-13.59 2.934-16.42-5.867-16.42-5.867-2.184-5.704-5.42-7.17-5.42-7.17-4.448-3.015.324-3.015.324-3.015 4.934.326 7.523 5.052 7.523 5.052 4.367 7.496 11.404 5.378 14.235 4.074.404-3.178 1.699-5.378 3.074-6.6-10.839-1.141-22.243-5.378-22.243-24.283 0-5.378 1.94-9.778 5.014-13.2-.485-1.222-2.184-6.275.486-13.038 0 0 4.125-1.304 13.426 5.052a46.97 46.97 0 0 1 12.214-1.63c4.125 0 8.33.571 12.213 1.63 9.302-6.356 13.427-5.052 13.427-5.052 2.67 6.763.97 11.816.485 13.038 3.155 3.422 5.015 7.822 5.015 13.2 0 18.905-11.404 23.06-22.324 24.283 1.78 1.548 3.316 4.481 3.316 9.126 0 6.6-.08 11.897-.08 13.526 0 1.304.89 2.853 3.316 2.364 19.412-6.52 33.405-24.935 33.405-46.691C97.707 22 75.788 0 48.854 0z"
fill="#24292f"
/>
</svg>
</a>
</div>
</>
)}
<div className="h-full w-full flex flex-col justify-center items-center">
<div className="flex w-full pb-4 z-10 justify-center shadow-sm">
<div className="mx-auto mt-4 flex h-12 w-12 flex-shrink-0 items-center justify-center rounded-full bg-blue-100 dark:bg-gray-900 sm:mx-0 sm:h-10 sm:w-10">
@ -81,36 +143,81 @@ export default function ImportModal() {
as="h3"
className="text-lg font-medium dark:text-white leading-10 text-gray-900"
>
Import from
{showExamples ?"Select an example":"Import from"}
</Dialog.Title>
</div>
</div>
<div className="h-full w-full bg-gray-200 dark:bg-gray-900 p-4 gap-4 flex flex-row justify-center items-center">
<div className="flex h-full w-full justify-evenly items-center">
<ButtonBox
deactivate
bgColor="bg-slate-400"
description="Prebuilt Examples"
icon={
<DocumentDuplicateIcon className="h-10 w-10 flex-shrink-0" />
}
onClick={() => console.log("sdsds")}
textColor="text-slate-400"
title="Examples"
></ButtonBox>
<ButtonBox
bgColor="bg-blue-500"
description="Import from Local"
icon={
<ComputerDesktopIcon className="h-10 w-10 flex-shrink-0" />
}
onClick={() => {uploadFlow();setModalOpen(false)}}
textColor="text-blue-500"
title="Local file"
></ButtonBox>
</div>
<div
className={classNames(
"h-full w-full bg-gray-200 dark:bg-gray-900 gap-4",
showExamples && !loadingExamples
? "flex flex-row start justify-start items-start p-9 flex-wrap overflow-auto"
: "flex flex-row justify-center items-center p-4"
)}
>
{!showExamples && (
<div className="flex h-full w-full justify-evenly items-center">
<ButtonBox
size="big"
bgColor="bg-emerald-500"
description="Prebuilt Examples"
icon={
<DocumentDuplicateIcon className="h-10 w-10 flex-shrink-0" />
}
onClick={() => {
setShowExamples(true);
handleExamples();
}}
textColor="text-emerald-400"
title="Examples"
></ButtonBox>
<ButtonBox
size="big"
bgColor="bg-blue-500"
description="Import from Local"
icon={
<ComputerDesktopIcon className="h-10 w-10 flex-shrink-0" />
}
onClick={() => {
uploadFlow();
setModalOpen(false);
}}
textColor="text-blue-500"
title="Local file"
></ButtonBox>
</div>
)}
{showExamples && loadingExamples && (
<div className="flex align-middle justify-center items-center">
<LoadingComponent remSize={30} />
</div>
)}
{showExamples &&
!loadingExamples &&
examples.map((example, index) => {
return (
<div id="index">
{" "}
<ButtonBox
size="small"
bgColor="bg-emerald-500"
description={
example.description ?? "Prebuilt Examples"
}
icon={
<DocumentDuplicateIcon className="h-6 w-6 flex-shrink-0" />
}
onClick={() => {
addFlow(example);
setModalOpen(false);
}}
textColor="text-emerald-400"
title={example.name}
></ButtonBox>
</div>
);
})}
</div>
</div>
</Dialog.Panel>
</Transition.Child>

View file

@ -18,7 +18,8 @@ export default function TabComponent({ selected, flow, onClick }:{flow:FlowType,
className="dark:text-white flex justify-between select-none truncate w-44 items-center px-4 my-1.5 border-x border-x-gray-300 dark:border-x-gray-600 -ml-px"
onClick={onClick}
>
{flow.name}
<span className="w-32 truncate text-left">{flow.name}</span>
<button
onClick={(e) => {
e.stopPropagation();

View file

@ -60,18 +60,18 @@ export default function TabsManagerComponent() {
/>
)
}
className="flex items-center gap-1 pr-2 border-gray-400 border-r text-sm text-gray-400 hover:text-gray-500"
className="flex items-center gap-1 pr-2 border-gray-400 border-r text-sm text-gray-600 hover:text-gray-500"
>
Import <ArrowUpTrayIcon className="w-5 h-5" />
</button>
<button
onClick={() =>openPopUp(<ExportModal/>)}
className="flex items-center gap-1 mr-2 text-sm text-gray-400 hover:text-gray-500"
className="flex items-center gap-1 mr-2 text-sm text-gray-600 hover:text-gray-500"
>
Export <ArrowDownTrayIcon className="h-5 w-5" />
</button>
<button
className="text-gray-400 hover:text-gray-500 "
className="text-gray-600 hover:text-gray-500 "
onClick={() => {
setDark(!dark);
}}
@ -83,7 +83,7 @@ export default function TabsManagerComponent() {
)}
</button>
<button
className="text-gray-400 hover:text-gray-500 relative"
className="text-gray-600 hover:text-gray-500 relative"
onClick={(event: React.MouseEvent<HTMLElement>) => {
setNotificationCenter(false);
const top = (event.target as Element).getBoundingClientRect().top;

5
src/frontend/src/svg.d.ts vendored Normal file
View file

@ -0,0 +1,5 @@
declare module '*.svg' {
const content: any;
export default content;
}

View file

@ -1,5 +1,10 @@
import { ReactFlowInstance } from 'reactflow';
import { ReactFlowInstance } from "reactflow";
import { FlowType } from "../flow";
export type ChatType = {flow:FlowType,reactFlowInstance:ReactFlowInstance}
export type ChatMessageType = { message: string; isSend: boolean, thought?:string }
export type ChatType = { flow: FlowType; reactFlowInstance: ReactFlowInstance };
export type ChatMessageType = {
message: string;
isSend: boolean;
thought?: string;
files?: Array<{ data: string; type: string; data_type: string }>;
};

View file

@ -65,3 +65,17 @@ export type FloatComponentType = {
disabled?: boolean;
onChange: (value: string) => void;
};
export type TooltipComponentType={children:ReactElement,title:string,placement?:
| 'bottom-end'
| 'bottom-start'
| 'bottom'
| 'left-end'
| 'left-start'
| 'left'
| 'right-end'
| 'right-start'
| 'right'
| 'top-end'
| 'top-start'
| 'top';}

View file

@ -6,7 +6,6 @@ export type FlowType = {
name: string;
id: string;
data: ReactFlowJsonObject;
chat: Array<ChatMessageType>;
description:string;
};
export type NodeType = {id:string,type:string,position:XYPosition,data:NodeDataType}

View file

@ -6,12 +6,10 @@ export type TabsContextType = {
setTabIndex: (index: number) => void;
flows: Array<FlowType>;
removeFlow: (id: string) => void;
addFlow: (flowData?: any) => void;
addFlow: (flowData?: FlowType) => void;
updateFlow: (newFlow: FlowType) => void;
incrementNodeId: () => number;
downloadFlow: (flow:FlowType) => void;
uploadFlow: () => void;
lockChat:boolean;
setLockChat:(prevState:boolean)=>void;
hardReset:()=>void;
};

View file

@ -78,7 +78,7 @@ export const nodeColors: {[char: string]: string} = {
tools: "#FF3434",
memories: "#F5B85A",
advanced: "#000000",
chat: "#454173",
chat: "#198BF6",
thought:"#272541",
embeddings:"#42BAA7",
documentloaders:"#7AAE42",

View file

@ -52,11 +52,27 @@ module.exports = {
margin: 0,
},
},
".password": {
"-webkit-text-security": "disc",
"font-family": "text-security-disc",
'.password':{
"-webkit-text-security":"disc",
"font-family": "text-security-disc"
},
});
}),
'.custom-scroll':{
'&::-webkit-scrollbar': {
'width': '8px',
},
'&::-webkit-scrollbar-track': {
'backgroundColor': '#f1f1f1',
},
'&::-webkit-scrollbar-thumb': {
'backgroundColor': '#ccc',
'borderRadius': '999px',
},
'&::-webkit-scrollbar-thumb:hover': {
'backgroundColor': '#bbb'
}
}
})
}),require('@tailwindcss/line-clamp')
],
};

View file

@ -1,4 +1,8 @@
import json
from pathlib import Path
from typing import AsyncGenerator
from httpx import AsyncClient
import pytest
from fastapi.testclient import TestClient
@ -21,6 +25,15 @@ def get_text():
"""
@pytest.fixture()
async def async_client() -> AsyncGenerator:
from langflow.main import create_app
app = create_app()
async with AsyncClient(app=app, base_url="http://testserver") as client:
yield client
# Create client fixture for FastAPI
@pytest.fixture(scope="module")
def client():
@ -30,3 +43,37 @@ def client():
with TestClient(app) as client:
yield client
def get_graph(_type="basic"):
"""Get a graph from a json file"""
from langflow.graph.graph import Graph
if _type == "basic":
path = pytest.BASIC_EXAMPLE_PATH
elif _type == "complex":
path = pytest.COMPLEX_EXAMPLE_PATH
elif _type == "openapi":
path = pytest.OPENAPI_EXAMPLE_PATH
with open(path, "r") as f:
flow_graph = json.load(f)
data_graph = flow_graph["data"]
nodes = data_graph["nodes"]
edges = data_graph["edges"]
return Graph(nodes, edges)
@pytest.fixture
def basic_graph():
return get_graph()
@pytest.fixture
def complex_graph():
return get_graph("complex")
@pytest.fixture
def openapi_graph():
return get_graph("openapi")

View file

@ -3,7 +3,7 @@ import tempfile
from pathlib import Path
import pytest
from langflow.cache.utils import PREFIX, save_cache
from langflow.cache.base import PREFIX, save_cache
from langflow.interface.run import load_langchain_object

View file

@ -15,7 +15,7 @@ from langflow.graph.nodes import (
ToolNode,
WrapperNode,
)
from langflow.interface.run import get_result_and_thought_using_graph
from langflow.interface.run import get_result_and_steps
from langflow.utils.payload import build_json, get_root_node
# Test cases for the graph module
@ -24,38 +24,6 @@ from langflow.utils.payload import build_json, get_root_node
# BASIC_EXAMPLE_PATH, COMPLEX_EXAMPLE_PATH, OPENAPI_EXAMPLE_PATH
def get_graph(_type="basic"):
"""Get a graph from a json file"""
if _type == "basic":
path = pytest.BASIC_EXAMPLE_PATH
elif _type == "complex":
path = pytest.COMPLEX_EXAMPLE_PATH
elif _type == "openapi":
path = pytest.OPENAPI_EXAMPLE_PATH
with open(path, "r") as f:
flow_graph = json.load(f)
data_graph = flow_graph["data"]
nodes = data_graph["nodes"]
edges = data_graph["edges"]
return Graph(nodes, edges)
@pytest.fixture
def basic_graph():
return get_graph()
@pytest.fixture
def complex_graph():
return get_graph("complex")
@pytest.fixture
def openapi_graph():
return get_graph("openapi")
def get_node_by_type(graph, node_type: Type[Node]) -> Union[Node, None]:
"""Get a node by type"""
return next((node for node in graph.nodes if isinstance(node, node_type)), None)
@ -441,7 +409,7 @@ def test_get_result_and_thought(basic_graph):
# now build again and check if FakeListLLM was used
# Get the result and thought
result, thought = get_result_and_thought_using_graph(langchain_object, message)
result, thought = get_result_and_steps(langchain_object, message)
# The result should be a str
assert isinstance(result, str)
# The thought should be a Thought

47
tests/test_websocket.py Normal file
View file

@ -0,0 +1,47 @@
import json
from unittest.mock import patch
from langflow.api.schemas import ChatMessage
from fastapi.testclient import TestClient
def test_websocket_connection(client: TestClient):
with client.websocket_connect("/ws/test_client") as websocket:
assert websocket.scope["client"] == ["testclient", 50000]
assert websocket.scope["path"] == "/ws/test_client"
def test_chat_history(client: TestClient):
chat_history = []
# Mock the process_graph function to return a specific value
with patch("langflow.api.chat_manager.process_graph") as mock_process_graph:
mock_process_graph.return_value = ("Hello, I'm a mock response!", "")
with client.websocket_connect("/ws/test_client") as websocket:
# First message should be the history
history = websocket.receive_json()
assert json.loads(history) == [] # Empty history
# Send a message
payload = {"message": "Hello"}
websocket.send_json(json.dumps(payload))
# Receive the response from the server
response = websocket.receive_json()
assert json.loads(response) == {
"is_bot": True,
"message": None,
"intermediate_steps": "",
"type": "start",
}
# Send another message
payload = {"message": "How are you?"}
websocket.send_json(json.dumps(payload))
# Receive the response from the server
response = websocket.receive_json()
assert json.loads(response) == {
"is_bot": True,
"message": "Hello, I'm a mock response!",
"intermediate_steps": "",
"type": "end",
}