Add LangWatch Integration (#2608)

* First implementation of LangWatch tracer

* Update to langwatch 0.1.4 to use root span for better control of the all-wrapping trace, workaround for llm not being used for an actual call and better ids on langwatch ui

* Remove dependency from backend base internal and add langwatch docs

* [autofix.ci] apply automated fixes

* Fix using session id for metadata, except if the same as flow_id

* Fix wrong error variable and support for python <3.10 is not necessary given langflow pyproject restrictions

* Bump langwatch to v0.1.4

* [autofix.ci] apply automated fixes

* Remove extra line the popped up

* Fix missing log parameter on method

* Fix mypy issues

* Bump langwatch to v0.1.7 to truncate large documents

* Move docs to the right folder

* chore: update lock

* chore: Update clarifai-grpc to version 10.6.4, cohere to version 5.6.1, langwatch to version 0.1.3, and litellm to version 1.41.23

---------

Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: Gabriel Luiz Freitas Almeida <gabriel@langflow.org>
This commit is contained in:
Rogério Chaves 2024-07-16 19:28:57 +02:00 committed by GitHub
commit 89f2f62041
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
12 changed files with 578 additions and 147 deletions

View file

@ -0,0 +1,46 @@
import Admonition from "@theme/Admonition";
import ThemedImage from "@theme/ThemedImage";
import useBaseUrl from "@docusaurus/useBaseUrl";
import ZoomableImage from "/src/theme/ZoomableImage.js";
# LangWatch
LangWatch is an all-in-one LLMOps platform for monitoring, observability, analytics, evaluations and alerting for getting user insights and improve your LLM workflows.
To integrate with Langflow, just add your LangWatch API as a Langflow environment variable and you are good to go!
## Step-by-step Configuration
1. Obtain your LangWatch API key from https://app.langwatch.com/
2. Add the following key to Langflow .env file:
```bash
LANGWATCH_API_KEY="your-api-key"
```
or export it in your terminal:
```bash
export LANGWATCH_API_KEY="your-api-key"
```
3. Restart Langflow using `langflow run --env-file .env`
4. Run any project and check the LangWatch dashboard for monitoring and observability.
<ZoomableImage
alt="LangWatch Flow Example"
sources={{
light: useBaseUrl("img/langwatch-flow.png"),
dark: useBaseUrl("img/langwatch-flow.png"),
}}
style={{ width: "100%", margin: "20px auto", boxShadow: "rgba(50, 50, 93, 0.2) 0px 0px 27px" }}
/>
<ZoomableImage
alt="LangSmith Trace"
sources={{
light: useBaseUrl("img/langwatch-trace.png"),
dark: useBaseUrl("img/langwatch-trace.png"),
}}
style={{ width: "100%", margin: "20px auto", background: "red", boxShadow: "rgba(50, 50, 93, 0.2) 0px 0px 27px" }}
/>

BIN
docs/static/img/langwatch-flow.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 156 KiB

BIN
docs/static/img/langwatch-trace.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 149 KiB

172
poetry.lock generated
View file

@ -1083,13 +1083,13 @@ all = ["pycocotools (==2.0.6)"]
[[package]]
name = "clarifai-grpc"
version = "10.6.3"
version = "10.6.4"
description = "Clarifai gRPC API Client"
optional = false
python-versions = ">=3.8"
files = [
{file = "clarifai_grpc-10.6.3-py3-none-any.whl", hash = "sha256:304a8123dfe6fb41953026e0e4b800bffc3998a75408f51f3ab2a6eacfb1781e"},
{file = "clarifai_grpc-10.6.3.tar.gz", hash = "sha256:6be24d21f124678384902e693eb9b5d2e5c57aeef57c098e3a2b94e40a79c198"},
{file = "clarifai_grpc-10.6.4-py3-none-any.whl", hash = "sha256:9fb8c94e6ede5e1005010643f516aa7c1dd3c59ab9282e03ba4932fd929d7584"},
{file = "clarifai_grpc-10.6.4.tar.gz", hash = "sha256:84d92b10fc32c17ecf0c0fc30ec4c9960a6a9ee701f2829f0c372892ae761d14"},
]
[package.dependencies]
@ -1232,13 +1232,13 @@ types = ["chardet (>=5.1.0)", "mypy", "pytest", "pytest-cov", "pytest-dependency
[[package]]
name = "cohere"
version = "5.5.8"
version = "5.6.1"
description = ""
optional = false
python-versions = "<4.0,>=3.8"
files = [
{file = "cohere-5.5.8-py3-none-any.whl", hash = "sha256:e1ed84b90eadd13c6a68ee28e378a0bb955f8945eadc6eb7ee126b3399cafd54"},
{file = "cohere-5.5.8.tar.gz", hash = "sha256:84ce7666ff8fbdf4f41fb5f6ca452ab2639a514bc88967a2854a9b1b820d6ea0"},
{file = "cohere-5.6.1-py3-none-any.whl", hash = "sha256:1c8bcd39a54622d64b83cafb865f102cd2565ce091b0856fd5ce11bf7169109a"},
{file = "cohere-5.6.1.tar.gz", hash = "sha256:5d7efda64f0e512d4cc35aa04b17a6f74b3d8c175a99f2797991a7f31dfac349"},
]
[package.dependencies]
@ -1341,6 +1341,17 @@ files = [
{file = "contextlib2-21.6.0.tar.gz", hash = "sha256:ab1e2bfe1d01d968e1b7e8d9023bc51ef3509bba217bb730cee3827e1ee82869"},
]
[[package]]
name = "coolname"
version = "2.2.0"
description = "Random name and slug generator"
optional = false
python-versions = "*"
files = [
{file = "coolname-2.2.0-py2.py3-none-any.whl", hash = "sha256:4d1563186cfaf71b394d5df4c744f8c41303b6846413645e31d31915cdeb13e8"},
{file = "coolname-2.2.0.tar.gz", hash = "sha256:6c5d5731759104479e7ca195a9b64f7900ac5bead40183c09323c7d0be9e75c7"},
]
[[package]]
name = "couchbase"
version = "4.3.0"
@ -4824,13 +4835,13 @@ pymongo = ">=4.6.1,<5.0"
[[package]]
name = "langchain-nvidia-ai-endpoints"
version = "0.1.3"
version = "0.1.4"
description = "An integration package connecting NVIDIA AI Endpoints and LangChain"
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
{file = "langchain_nvidia_ai_endpoints-0.1.3-py3-none-any.whl", hash = "sha256:16d5a5862156d7e177a55727dc6411356529dda35b729344d7157f6cf8315a24"},
{file = "langchain_nvidia_ai_endpoints-0.1.3.tar.gz", hash = "sha256:a68f53ef2bcbb3360a6217a4a4dad014efc50c73a2bd0fe6896818288d456283"},
{file = "langchain_nvidia_ai_endpoints-0.1.4-py3-none-any.whl", hash = "sha256:e65e21b4ecdd1e5add44520399bffcd4a158194d28021a6e789f76e895fe27d6"},
{file = "langchain_nvidia_ai_endpoints-0.1.4.tar.gz", hash = "sha256:8b2c8b80f00d1d64f5ee25a93df0692cbb6d13e51f00e45332c2851fef77f9f3"},
]
[package.dependencies]
@ -4943,6 +4954,7 @@ jq = {version = "^1.7.0", markers = "sys_platform != \"win32\""}
langchain = "~0.2.0"
langchain-experimental = "^0.0.61"
langchainhub = "~0.1.15"
langwatch = "^0.1.3"
loguru = "^0.7.1"
multiprocess = "^0.70.14"
nest-asyncio = "^1.6.0"
@ -4950,7 +4962,7 @@ opentelemetry-api = "^1.25.0"
opentelemetry-exporter-prometheus = "^0.46b0"
opentelemetry-sdk = "^1.25.0"
orjson = "3.10.0"
pandas = "2.2.0"
pandas = "2.2.2"
passlib = "^1.7.4"
pillow = "^10.2.0"
platformdirs = "^4.2.0"
@ -5007,13 +5019,13 @@ openai = ["openai (>=0.27.8)"]
[[package]]
name = "langsmith"
version = "0.1.85"
version = "0.1.86"
description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
{file = "langsmith-0.1.85-py3-none-any.whl", hash = "sha256:c1f94384f10cea96f7b4d33fd3db7ec180c03c7468877d50846f881d2017ff94"},
{file = "langsmith-0.1.85.tar.gz", hash = "sha256:acff31f9e53efa48586cf8e32f65625a335c74d7c4fa306d1655ac18452296f6"},
{file = "langsmith-0.1.86-py3-none-any.whl", hash = "sha256:55ed80cc6e98f9761f9b3ec3c49e01f6745d13e40bef80d9f831acabfd9a8a1e"},
{file = "langsmith-0.1.86.tar.gz", hash = "sha256:2e66577817253327b99b727588c3173fbba217fe0ca07ac6b7cdd23fc4894104"},
]
[package.dependencies]
@ -5024,15 +5036,43 @@ pydantic = [
]
requests = ">=2,<3"
[[package]]
name = "langwatch"
version = "0.1.8"
description = "Python SDK for LangWatch for monitoring your LLMs"
optional = false
python-versions = "<4.0,>=3.9"
files = [
{file = "langwatch-0.1.8-py3-none-any.whl", hash = "sha256:e6e7ffe2f3cd61c477e8ff6de4ad3d1e06f63b0c8f02880d68293fc126ec2bf7"},
{file = "langwatch-0.1.8.tar.gz", hash = "sha256:2ccc3a741ef9bf493946264ab8fff5cb33845e51d4426136218d62b1a4cbd26d"},
]
[package.dependencies]
coolname = ">=2.2.0,<3.0.0"
deprecated = ">=1.2.14,<2.0.0"
httpx = ">=0.27.0,<0.28.0"
nanoid = ">=2.0.0,<3.0.0"
pandas = ">=2.2.2,<3.0.0"
pydantic = ">=2.5.2"
requests = ">=2.31.0,<3.0.0"
retry = ">=0.9.2,<0.10.0"
tqdm = ">=4.66.2,<5.0.0"
[package.extras]
dspy = ["dspy-ai (>=2.4.12,<3.0.0)"]
langchain = ["langchain (>=0.2.0,<0.3.0)"]
litellm = ["litellm (>=1.40.15,<2.0.0)"]
openai = ["openai (>=1.3.7,<2.0.0)"]
[[package]]
name = "litellm"
version = "1.41.22"
version = "1.41.23"
description = "Library to easily interface with LLM API providers"
optional = false
python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8"
files = [
{file = "litellm-1.41.22-py3-none-any.whl", hash = "sha256:b7560a5487ad23a2d8d5ce0d810d640488e6264f22f35041c1e3f9ca8933e71b"},
{file = "litellm-1.41.22.tar.gz", hash = "sha256:1cdfffc792535e87a40463fe04205353ebe681ce87fc3245a0436f4c124f6ed6"},
{file = "litellm-1.41.23-py3-none-any.whl", hash = "sha256:ccfe7763e694ae43b50229fc78bb999a18507b654ec2046c55c6e2a2ea48bf96"},
{file = "litellm-1.41.23.tar.gz", hash = "sha256:320afcd172fb936f1297ce135075e3397141cf245cdb936f01871c9d6ed56516"},
]
[package.dependencies]
@ -5917,6 +5957,17 @@ files = [
{file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
]
[[package]]
name = "nanoid"
version = "2.0.0"
description = "A tiny, secure, URL-friendly, unique string ID generator for Python"
optional = false
python-versions = "*"
files = [
{file = "nanoid-2.0.0-py3-none-any.whl", hash = "sha256:90aefa650e328cffb0893bbd4c236cfd44c48bc1f2d0b525ecc53c3187b653bb"},
{file = "nanoid-2.0.0.tar.gz", hash = "sha256:5a80cad5e9c6e9ae3a41fa2fb34ae189f7cb420b2a5d8f82bd9d23466e4efa68"},
]
[[package]]
name = "nest-asyncio"
version = "1.6.0"
@ -6270,13 +6321,13 @@ sympy = "*"
[[package]]
name = "openai"
version = "1.35.13"
version = "1.35.14"
description = "The official Python library for the openai API"
optional = false
python-versions = ">=3.7.1"
files = [
{file = "openai-1.35.13-py3-none-any.whl", hash = "sha256:36ec3e93e0d1f243f69be85c89b9221a471c3e450dfd9df16c9829e3cdf63e60"},
{file = "openai-1.35.13.tar.gz", hash = "sha256:c684f3945608baf7d2dcc0ef3ee6f3e27e4c66f21076df0b47be45d57e6ae6e4"},
{file = "openai-1.35.14-py3-none-any.whl", hash = "sha256:adadf8c176e0b8c47ad782ed45dc20ef46438ee1f02c7103c4155cff79c8f68b"},
{file = "openai-1.35.14.tar.gz", hash = "sha256:394ba1dfd12ecec1d634c50e512d24ff1858bbc2674ffcce309b822785a058de"},
]
[package.dependencies]
@ -6628,47 +6679,47 @@ files = [
[[package]]
name = "pandas"
version = "2.2.0"
version = "2.2.2"
description = "Powerful data structures for data analysis, time series, and statistics"
optional = false
python-versions = ">=3.9"
files = [
{file = "pandas-2.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8108ee1712bb4fa2c16981fba7e68b3f6ea330277f5ca34fa8d557e986a11670"},
{file = "pandas-2.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:736da9ad4033aeab51d067fc3bd69a0ba36f5a60f66a527b3d72e2030e63280a"},
{file = "pandas-2.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38e0b4fc3ddceb56ec8a287313bc22abe17ab0eb184069f08fc6a9352a769b18"},
{file = "pandas-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20404d2adefe92aed3b38da41d0847a143a09be982a31b85bc7dd565bdba0f4e"},
{file = "pandas-2.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7ea3ee3f125032bfcade3a4cf85131ed064b4f8dd23e5ce6fa16473e48ebcaf5"},
{file = "pandas-2.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9670b3ac00a387620489dfc1bca66db47a787f4e55911f1293063a78b108df1"},
{file = "pandas-2.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:5a946f210383c7e6d16312d30b238fd508d80d927014f3b33fb5b15c2f895430"},
{file = "pandas-2.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a1b438fa26b208005c997e78672f1aa8138f67002e833312e6230f3e57fa87d5"},
{file = "pandas-2.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8ce2fbc8d9bf303ce54a476116165220a1fedf15985b09656b4b4275300e920b"},
{file = "pandas-2.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2707514a7bec41a4ab81f2ccce8b382961a29fbe9492eab1305bb075b2b1ff4f"},
{file = "pandas-2.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85793cbdc2d5bc32620dc8ffa715423f0c680dacacf55056ba13454a5be5de88"},
{file = "pandas-2.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cfd6c2491dc821b10c716ad6776e7ab311f7df5d16038d0b7458bc0b67dc10f3"},
{file = "pandas-2.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a146b9dcacc3123aa2b399df1a284de5f46287a4ab4fbfc237eac98a92ebcb71"},
{file = "pandas-2.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:fbc1b53c0e1fdf16388c33c3cca160f798d38aea2978004dd3f4d3dec56454c9"},
{file = "pandas-2.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a41d06f308a024981dcaa6c41f2f2be46a6b186b902c94c2674e8cb5c42985bc"},
{file = "pandas-2.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:159205c99d7a5ce89ecfc37cb08ed179de7783737cea403b295b5eda8e9c56d1"},
{file = "pandas-2.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb1e1f3861ea9132b32f2133788f3b14911b68102d562715d71bd0013bc45440"},
{file = "pandas-2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:761cb99b42a69005dec2b08854fb1d4888fdf7b05db23a8c5a099e4b886a2106"},
{file = "pandas-2.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a20628faaf444da122b2a64b1e5360cde100ee6283ae8effa0d8745153809a2e"},
{file = "pandas-2.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f5be5d03ea2073627e7111f61b9f1f0d9625dc3c4d8dda72cc827b0c58a1d042"},
{file = "pandas-2.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:a626795722d893ed6aacb64d2401d017ddc8a2341b49e0384ab9bf7112bdec30"},
{file = "pandas-2.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9f66419d4a41132eb7e9a73dcec9486cf5019f52d90dd35547af11bc58f8637d"},
{file = "pandas-2.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:57abcaeda83fb80d447f28ab0cc7b32b13978f6f733875ebd1ed14f8fbc0f4ab"},
{file = "pandas-2.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e60f1f7dba3c2d5ca159e18c46a34e7ca7247a73b5dd1a22b6d59707ed6b899a"},
{file = "pandas-2.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb61dc8567b798b969bcc1fc964788f5a68214d333cade8319c7ab33e2b5d88a"},
{file = "pandas-2.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:52826b5f4ed658fa2b729264d63f6732b8b29949c7fd234510d57c61dbeadfcd"},
{file = "pandas-2.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bde2bc699dbd80d7bc7f9cab1e23a95c4375de615860ca089f34e7c64f4a8de7"},
{file = "pandas-2.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:3de918a754bbf2da2381e8a3dcc45eede8cd7775b047b923f9006d5f876802ae"},
{file = "pandas-2.2.0.tar.gz", hash = "sha256:30b83f7c3eb217fb4d1b494a57a2fda5444f17834f5df2de6b2ffff68dc3c8e2"},
{file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"},
{file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"},
{file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"},
{file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"},
{file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"},
{file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"},
{file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"},
{file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"},
{file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"},
{file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"},
{file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"},
{file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"},
{file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"},
{file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"},
{file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"},
{file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"},
{file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"},
{file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"},
{file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"},
{file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"},
{file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"},
{file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"},
{file = "pandas-2.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9057e6aa78a584bc93a13f0a9bf7e753a5e9770a30b4d758b8d5f2a62a9433cd"},
{file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"},
{file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"},
{file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"},
{file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"},
{file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"},
{file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"},
]
[package.dependencies]
numpy = [
{version = ">=1.22.4,<2", markers = "python_version < \"3.11\""},
{version = ">=1.23.2,<2", markers = "python_version == \"3.11\""},
{version = ">=1.26.0,<2", markers = "python_version >= \"3.12\""},
{version = ">=1.22.4", markers = "python_version < \"3.11\""},
{version = ">=1.23.2", markers = "python_version == \"3.11\""},
{version = ">=1.26.0", markers = "python_version >= \"3.12\""},
]
python-dateutil = ">=2.8.2"
pytz = ">=2020.1"
@ -6693,6 +6744,7 @@ parquet = ["pyarrow (>=10.0.1)"]
performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"]
plot = ["matplotlib (>=3.6.3)"]
postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"]
pyarrow = ["pyarrow (>=10.0.1)"]
spss = ["pyreadstat (>=1.2.0)"]
sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"]
test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"]
@ -6975,18 +7027,18 @@ tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "p
[[package]]
name = "postgrest"
version = "0.16.8"
version = "0.16.9"
description = "PostgREST client for Python. This library provides an ORM interface to PostgREST."
optional = false
python-versions = "<4.0,>=3.8"
files = [
{file = "postgrest-0.16.8-py3-none-any.whl", hash = "sha256:c353a24452f51ab9760cf2b884c4b7457a2653ff36444e66b12615bc4cc8e23e"},
{file = "postgrest-0.16.8.tar.gz", hash = "sha256:7b3802a514dc1e0fc8b5bbdeb2c99af35a0bd910e4ddb17855ca4e3422350c84"},
{file = "postgrest-0.16.9-py3-none-any.whl", hash = "sha256:8a20a256e86c4181575d271ddd77152b305313890ecc7d2df5b25aeb330bd9a4"},
{file = "postgrest-0.16.9.tar.gz", hash = "sha256:fee42e89d265e904e823d9602803980016128ff7dde0ce1e869014cf1fd2c19d"},
]
[package.dependencies]
deprecation = ">=2.1.0,<3.0.0"
httpx = ">=0.24,<0.28"
httpx = {version = ">=0.24,<0.28", extras = ["http2"]}
pydantic = ">=1.9,<3.0"
strenum = ">=0.4.9,<0.5.0"
@ -9765,13 +9817,13 @@ typing = ["mypy (>=1.4)", "rich", "twisted"]
[[package]]
name = "supabase"
version = "2.5.1"
version = "2.5.2"
description = "Supabase client for Python."
optional = false
python-versions = "<4.0,>=3.8"
files = [
{file = "supabase-2.5.1-py3-none-any.whl", hash = "sha256:74a1f24f04fede1967ef084b50dea688228f7b10eb2f9d73350fe2251a865188"},
{file = "supabase-2.5.1.tar.gz", hash = "sha256:c50e0eba5b03de3abd5ac0f887957ca43558ba44c4d17bb44e73ec454b41734c"},
{file = "supabase-2.5.2-py3-none-any.whl", hash = "sha256:321b2278f6f71bdb2ac13f3f36e565f4045a3c099d44aa4f8454ef12c743b445"},
{file = "supabase-2.5.2.tar.gz", hash = "sha256:f97e06d9b821f972e50cfe188e49b2a657cd817d48a5b4604304ebd61b7b87ef"},
]
[package.dependencies]
@ -11685,4 +11737,4 @@ local = ["ctransformers", "llama-cpp-python", "sentence-transformers"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.10,<3.13"
content-hash = "3561ffbe8cc249b7ca946135d550991879ce9767e607f4a474f80119d2ffd5b2"
content-hash = "afe38792a9e202ab55558389026b64beb95d1437b2fd4bccfbb300cbc7ad0677"

View file

@ -99,6 +99,7 @@ langchain-nvidia-ai-endpoints = "^0.1.2"
langchain-google-calendar-tools = "^0.0.1"
langchain-milvus = "^0.1.1"
crewai = {extras = ["tools"], version = "^0.36.0"}
langwatch = "^0.1.7"
[tool.poetry.group.dev.dependencies]

View file

@ -221,7 +221,7 @@ async def build_vertex(
outputs = {output_label: OutputLog(message=message, type="error")}
result_data_response = ResultDataResponse(results={}, outputs=outputs)
artifacts = {}
background_tasks.add_task(graph.end_all_traces, error=message["errorMessage"])
background_tasks.add_task(graph.end_all_traces, error=exc)
# If there's an error building the vertex
# we need to clear the cache
await chat_service.clear_cache(flow_id_str)

View file

@ -123,10 +123,9 @@ class Component(CustomComponent):
async def _build_with_tracing(self):
inputs = self.get_trace_as_inputs()
metadata = self.get_trace_as_metadata()
async with self.tracing_service.trace_context(self.trace_name, self.trace_type, inputs, metadata):
async with self.tracing_service.trace_context(self, self.trace_name, inputs, metadata):
_results, _artifacts = await self._build_results()
trace_name = self.tracing_service.run_name
self.tracing_service.set_outputs(trace_name, _results)
self.tracing_service.set_outputs(self.trace_name, _results)
return _results, _artifacts

View file

@ -1,5 +1,4 @@
import asyncio
import traceback
import uuid
from collections import defaultdict, deque
from datetime import datetime, timezone
@ -257,7 +256,7 @@ class Graph:
async def initialize_run(self):
await self.tracing_service.initialize_tracers()
async def end_all_traces(self, outputs: dict[str, Any] | None = None, error: str | None = None):
async def end_all_traces(self, outputs: dict[str, Any] | None = None, error: Exception | None = None):
if not self.tracing_service:
return
self._end_time = datetime.now(timezone.utc)
@ -354,9 +353,7 @@ class Graph:
await self.process(start_component_id=start_component_id, fallback_to_env_vars=fallback_to_env_vars)
self.increment_run_count()
except Exception as exc:
logger.exception(exc)
tb = traceback.format_exc()
asyncio.create_task(self.end_all_traces(error=f"{exc.__class__.__name__}: {exc}\n\n{tb}"))
asyncio.create_task(self.end_all_traces(error=exc))
raise ValueError(f"Error running graph: {exc}") from exc
finally:
asyncio.create_task(self.end_all_traces())

View file

@ -1,6 +1,10 @@
from abc import ABC, abstractmethod
from typing import Any, Dict
from typing import TYPE_CHECKING, Any, Dict, Optional
from uuid import UUID
from langflow.services.tracing.schema import Log
if TYPE_CHECKING:
from langflow.graph.vertex.base import Vertex
class BaseTracer(ABC):
@ -9,17 +13,30 @@ class BaseTracer(ABC):
raise NotImplementedError
@abstractmethod
def ready(self):
def ready(self) -> bool:
raise NotImplementedError
@abstractmethod
def add_trace(
self, trace_name: str, trace_type: str, inputs: Dict[str, Any], metadata: Dict[str, Any] | None = None
self,
trace_id: str,
trace_name: str,
trace_type: str,
inputs: Dict[str, Any],
metadata: Dict[str, Any] | None = None,
vertex: Optional["Vertex"] = None,
):
raise NotImplementedError
@abstractmethod
def end_trace(self, trace_name: str, outputs: Dict[str, Any] | None = None, error: str | None = None):
def end_trace(
self,
trace_id: str,
trace_name: str,
outputs: Dict[str, Any] | None = None,
error: Exception | None = None,
logs: list[Log | dict] = [],
):
raise NotImplementedError
@abstractmethod
@ -27,7 +44,7 @@ class BaseTracer(ABC):
self,
inputs: dict[str, Any],
outputs: Dict[str, Any],
error: str | None = None,
error: Exception | None = None,
metadata: dict[str, Any] | None = None,
):
raise NotImplementedError

View file

@ -5,7 +5,7 @@ import types
from collections import defaultdict
from contextlib import asynccontextmanager
from datetime import datetime, timezone
from typing import TYPE_CHECKING, Any, Dict, Optional
from typing import TYPE_CHECKING, Any, Dict, Optional, cast
from uuid import UUID
from loguru import logger
@ -18,6 +18,10 @@ from langflow.services.tracing.schema import Log
if TYPE_CHECKING:
from langflow.services.monitor.service import MonitorService
from langflow.services.settings.service import SettingsService
from langflow.custom.custom_component.component import Component
from langflow.graph.vertex.base import Vertex
from langwatch.tracer import ContextSpan
from langwatch.types import SpanTypes
class TracingService(Service):
@ -33,7 +37,7 @@ class TracingService(Service):
self.run_name: str | None = None
self.run_id: UUID | None = None
self.project_name = None
self._tracers: dict[str, LangSmithTracer] = {}
self._tracers: dict[str, BaseTracer] = {}
self._logs: dict[str, list[Log | dict[Any, Any]]] = defaultdict(list)
self.logs_queue: asyncio.Queue = asyncio.Queue()
self.running = False
@ -88,6 +92,7 @@ class TracingService(Service):
try:
await self.start()
self._initialize_langsmith_tracer()
self._initialize_langwatch_tracer()
except Exception as e:
logger.debug(f"Error initializing tracers: {e}")
@ -101,6 +106,19 @@ class TracingService(Service):
trace_id=self.run_id,
)
def _initialize_langwatch_tracer(self):
if (
os.getenv("LANGWATCH_API_KEY")
and "langwatch" not in self._tracers
or self._tracers["langwatch"].trace_id != self.run_id # type: ignore
):
self._tracers["langwatch"] = LangWatchTracer(
trace_name=self.run_name,
trace_type="chain",
project_name=self.project_name,
trace_id=self.run_id,
)
def set_run_name(self, name: str):
self.run_name = name
@ -108,39 +126,50 @@ class TracingService(Service):
self.run_id = run_id
def _start_traces(
self, trace_name: str, trace_type: str, inputs: Dict[str, Any], metadata: Optional[Dict[str, Any]] = None
self,
trace_id: str,
trace_name: str,
trace_type: str,
inputs: Dict[str, Any],
metadata: Optional[Dict[str, Any]] = None,
vertex: Optional["Vertex"] = None,
):
inputs = self._cleanup_inputs(inputs)
self.inputs[trace_name] = inputs
self.inputs_metadata[trace_name] = metadata or {}
for tracer in self._tracers.values():
if not tracer.ready:
if not tracer.ready: # type: ignore
continue
try:
tracer.add_trace(trace_name, trace_type, inputs, metadata)
tracer.add_trace(trace_id, trace_name, trace_type, inputs, metadata, vertex)
except Exception as e:
logger.error(f"Error starting trace {trace_name}: {e}")
def _end_traces(self, trace_name: str, error: str | None = None):
def _end_traces(self, trace_id: str, trace_name: str, error: Exception | None = None):
for tracer in self._tracers.values():
if not tracer.ready:
if not tracer.ready: # type: ignore
continue
try:
tracer.end_trace(
trace_name=trace_name, outputs=self.outputs[trace_name], error=error, logs=self._logs[trace_name]
trace_id=trace_id,
trace_name=trace_name,
outputs=self.outputs[trace_name],
error=error,
logs=self._logs[trace_name],
)
except Exception as e:
logger.error(f"Error ending trace {trace_name}: {e}")
def _end_all_traces(self, outputs: dict, error: str | None = None):
def _end_all_traces(self, outputs: dict, error: Exception | None = None):
for tracer in self._tracers.values():
if not tracer.ready:
if not tracer.ready: # type: ignore
continue
try:
tracer.end(self.inputs, outputs=self.outputs, error=error, metadata=outputs)
except Exception as e:
logger.error(f"Error ending all traces: {e}")
async def end(self, outputs: dict, error: str | None = None):
async def end(self, outputs: dict, error: Exception | None = None):
self._end_all_traces(outputs, error)
self._reset_io()
await self.stop()
@ -151,27 +180,48 @@ class TracingService(Service):
@asynccontextmanager
async def trace_context(
self,
component: "Component",
trace_name: str,
trace_type: str,
inputs: Dict[str, Any],
metadata: Optional[Dict[str, Any]] = None,
):
self._start_traces(trace_name, trace_type, inputs, metadata)
trace_id = trace_name
if component.vertex:
trace_id = component.vertex.id
trace_type = component.trace_type
self._start_traces(
trace_id,
trace_name,
trace_type,
self._cleanup_inputs(inputs),
metadata,
component.vertex,
)
try:
yield self
except Exception as e:
tb = traceback.format_exc()
error_message = f"{e.__class__.__name__}: {e}\n\n{tb}"
self._end_traces(trace_name, error_message)
self._end_traces(trace_id, trace_name, e)
raise e
finally:
self._end_traces(trace_name, None)
self._end_traces(trace_id, trace_name, None)
self._reset_io()
def set_outputs(self, trace_name: str, outputs: Dict[str, Any], output_metadata: Dict[str, Any] | None = None):
def set_outputs(
self,
trace_name: str,
outputs: Dict[str, Any],
output_metadata: Dict[str, Any] | None = None,
):
self.outputs[trace_name] |= outputs or {}
self.outputs_metadata[trace_name] |= output_metadata or {}
def _cleanup_inputs(self, inputs: Dict[str, Any]):
inputs = inputs.copy()
for key in inputs.keys():
if "api_key" in key:
inputs[key] = "*****" # avoid logging api_keys for security reasons
return inputs
class LangSmithTracer(BaseTracer):
def __init__(self, trace_name: str, trace_type: str, project_name: str, trace_id: UUID):
@ -211,7 +261,13 @@ class LangSmithTracer(BaseTracer):
return True
def add_trace(
self, trace_name: str, trace_type: str, inputs: Dict[str, Any], metadata: Dict[str, Any] | None = None
self,
trace_id: str,
trace_name: str,
trace_type: str,
inputs: Dict[str, Any],
metadata: Dict[str, Any] | None = None,
vertex: Optional["Vertex"] = None,
):
if not self._ready:
return
@ -259,9 +315,10 @@ class LangSmithTracer(BaseTracer):
def end_trace(
self,
trace_id: str,
trace_name: str,
outputs: Dict[str, Any] | None = None,
error: str | None = None,
error: Exception | None = None,
logs: list[Log | dict] = [],
):
child = self._children[trace_name]
@ -273,23 +330,196 @@ class LangSmithTracer(BaseTracer):
if logs:
child.add_metadata(self._convert_to_langchain_types({"logs": {log.get("name"): log for log in logs}}))
child.add_metadata(self._convert_to_langchain_types({"outputs": raw_outputs}))
child.end(outputs=processed_outputs, error=error)
child.end(outputs=processed_outputs, error=self._error_to_string(error))
if error:
child.patch()
else:
child.post()
self._child_link[trace_name] = child.get_url()
def _error_to_string(self, error: Optional[Exception]):
error_message = None
if error:
string_stacktrace = traceback.format_exception(error)
error_message = f"{error.__class__.__name__}: {error}\n\n{string_stacktrace}"
return error_message
def end(
self,
inputs: dict[str, Any],
outputs: Dict[str, Any],
error: str | None = None,
error: Exception | None = None,
metadata: dict[str, Any] | None = None,
):
self._run_tree.add_metadata({"inputs": inputs})
if metadata:
self._run_tree.add_metadata(metadata)
self._run_tree.end(outputs=outputs, error=error)
self._run_tree.end(outputs=outputs, error=self._error_to_string(error))
self._run_tree.post()
self._run_link = self._run_tree.get_url()
class LangWatchTracer(BaseTracer):
flow_id: str
def __init__(self, trace_name: str, trace_type: str, project_name: str, trace_id: UUID):
self.trace_name = trace_name
self.trace_type = trace_type
self.project_name = project_name
self.trace_id = trace_id
self.flow_id = trace_name.split(" - ")[-1]
try:
self._ready = self.setup_langwatch()
# import after setting up langwatch so we are sure to be available
import nanoid # type: ignore
self.trace = self._client.trace(
trace_id=str(self.trace_id),
)
self.spans: dict[str, "ContextSpan"] = {}
name_without_id = " - ".join(trace_name.split(" - ")[0:-1])
self.trace.root_span.update(
span_id=f"{self.flow_id}-{nanoid.generate(size=6)}", # nanoid to make the span_id globally unique, which is required for LangWatch for now
name=name_without_id,
type=self._convert_trace_type(trace_type),
)
except Exception as e:
logger.debug(f"Error setting up LangWatch tracer: {e}")
self._ready = False
@property
def ready(self):
return self._ready
def setup_langwatch(self):
try:
import langwatch
self._client = langwatch
except ImportError:
logger.error("Could not import langwatch. Please install it with `pip install langwatch`.")
return False
return True
def _convert_trace_type(self, trace_type: str):
trace_type_: "SpanTypes" = (
cast("SpanTypes", trace_type)
if trace_type in ["span", "llm", "chain", "tool", "agent", "guardrail", "rag"]
else "span"
)
return trace_type_
def add_trace(
self,
trace_id: str,
trace_name: str,
trace_type: str,
inputs: Dict[str, Any],
metadata: Dict[str, Any] | None = None,
vertex: Optional["Vertex"] = None,
):
import nanoid
# If user is not using session_id, then it becomes the same as flow_id, but
# we don't want to have an infinite thread with all the flow messages
if "session_id" in inputs and inputs["session_id"] != self.flow_id:
self.trace.update(metadata=(self.trace.metadata or {}) | {"thread_id": inputs["session_id"]})
name_without_id = " (".join(trace_name.split(" (")[0:-1])
trace_type_ = self._convert_trace_type(trace_type)
self.spans[trace_id] = self.trace.span(
span_id=f"{trace_id}-{nanoid.generate(size=6)}", # Add a nanoid to make the span_id globally unique, which is required for LangWatch for now
name=name_without_id,
type=trace_type_,
parent=(
[span for key, span in self.spans.items() for edge in vertex.incoming_edges if key == edge.source_id][
-1
]
if vertex and len(vertex.incoming_edges) > 0
else self.trace.root_span
),
input=self._convert_to_langwatch_types(inputs),
)
if trace_type_ == "llm" and "model_name" in inputs:
self.spans[trace_id].update(model=inputs["model_name"])
def end_trace(
self,
trace_id: str,
trace_name: str,
outputs: Dict[str, Any] | None = None,
error: Exception | None = None,
logs: list[Log | dict] = [],
):
if self.spans.get(trace_id):
# Workaround for when model is used just as a component not actually called as an LLM,
# to prevent LangWatch from calculating the cost based on it when it was in fact never called
if (
self.spans[trace_id].type == "llm"
and outputs
and "model_output" in outputs
and "text_output" not in outputs
):
self.spans[trace_id].update(metrics={"prompt_tokens": 0, "completion_tokens": 0})
self.spans[trace_id].end(output=self._convert_to_langwatch_types(outputs), error=error)
def end(
self,
inputs: dict[str, Any],
outputs: Dict[str, Any],
error: Exception | None = None,
metadata: dict[str, Any] | None = None,
):
self.trace.root_span.end(
input=self._convert_to_langwatch_types(inputs),
output=self._convert_to_langwatch_types(outputs),
error=error,
)
if metadata and "flow_name" in metadata:
self.trace.update(metadata=(self.trace.metadata or {}) | {"labels": [f"Flow: {metadata['flow_name']}"]})
self.trace.deferred_send_spans()
def _convert_to_langwatch_types(self, io_dict: Optional[Dict[str, Any]]):
from langwatch.utils import autoconvert_typed_values
if io_dict is None:
return None
converted = {}
for key, value in io_dict.items():
converted[key] = self._convert_to_langwatch_type(value)
return autoconvert_typed_values(converted)
def _convert_to_langwatch_type(self, value):
from langflow.schema.message import Message, BaseMessage
from langwatch.langchain import (
langchain_messages_to_chat_messages,
langchain_message_to_chat_message,
)
if isinstance(value, dict):
for key, _value in value.copy().items():
_value = self._convert_to_langwatch_type(_value)
value[key] = _value
elif isinstance(value, list):
value = [self._convert_to_langwatch_type(v) for v in value]
elif isinstance(value, Message):
if "prompt" in value:
prompt = value.load_lc_prompt()
if len(prompt.input_variables) == 0 and all(isinstance(m, BaseMessage) for m in prompt.messages):
value = langchain_messages_to_chat_messages([cast(list[BaseMessage], prompt.messages)])
else:
value = cast(dict, value.load_lc_prompt())
elif value.sender:
value = langchain_message_to_chat_message(value.to_lc_message())
else:
value = cast(dict, value.to_lc_document())
elif isinstance(value, Data):
value = cast(dict, value.to_lc_document())
return value

View file

@ -676,13 +676,13 @@ all = ["pycocotools (==2.0.6)"]
[[package]]
name = "clarifai-grpc"
version = "10.6.3"
version = "10.6.4"
description = "Clarifai gRPC API Client"
optional = false
python-versions = ">=3.8"
files = [
{file = "clarifai_grpc-10.6.3-py3-none-any.whl", hash = "sha256:304a8123dfe6fb41953026e0e4b800bffc3998a75408f51f3ab2a6eacfb1781e"},
{file = "clarifai_grpc-10.6.3.tar.gz", hash = "sha256:6be24d21f124678384902e693eb9b5d2e5c57aeef57c098e3a2b94e40a79c198"},
{file = "clarifai_grpc-10.6.4-py3-none-any.whl", hash = "sha256:9fb8c94e6ede5e1005010643f516aa7c1dd3c59ab9282e03ba4932fd929d7584"},
{file = "clarifai_grpc-10.6.4.tar.gz", hash = "sha256:84d92b10fc32c17ecf0c0fc30ec4c9960a6a9ee701f2829f0c372892ae761d14"},
]
[package.dependencies]
@ -707,13 +707,13 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""}
[[package]]
name = "cohere"
version = "5.5.8"
version = "5.6.1"
description = ""
optional = false
python-versions = "<4.0,>=3.8"
files = [
{file = "cohere-5.5.8-py3-none-any.whl", hash = "sha256:e1ed84b90eadd13c6a68ee28e378a0bb955f8945eadc6eb7ee126b3399cafd54"},
{file = "cohere-5.5.8.tar.gz", hash = "sha256:84ce7666ff8fbdf4f41fb5f6ca452ab2639a514bc88967a2854a9b1b820d6ea0"},
{file = "cohere-5.6.1-py3-none-any.whl", hash = "sha256:1c8bcd39a54622d64b83cafb865f102cd2565ce091b0856fd5ce11bf7169109a"},
{file = "cohere-5.6.1.tar.gz", hash = "sha256:5d7efda64f0e512d4cc35aa04b17a6f74b3d8c175a99f2797991a7f31dfac349"},
]
[package.dependencies]
@ -767,6 +767,17 @@ files = [
{file = "contextlib2-21.6.0.tar.gz", hash = "sha256:ab1e2bfe1d01d968e1b7e8d9023bc51ef3509bba217bb730cee3827e1ee82869"},
]
[[package]]
name = "coolname"
version = "2.2.0"
description = "Random name and slug generator"
optional = false
python-versions = "*"
files = [
{file = "coolname-2.2.0-py2.py3-none-any.whl", hash = "sha256:4d1563186cfaf71b394d5df4c744f8c41303b6846413645e31d31915cdeb13e8"},
{file = "coolname-2.2.0.tar.gz", hash = "sha256:6c5d5731759104479e7ca195a9b64f7900ac5bead40183c09323c7d0be9e75c7"},
]
[[package]]
name = "crewai"
version = "0.36.1"
@ -866,6 +877,17 @@ files = [
marshmallow = ">=3.18.0,<4.0.0"
typing-inspect = ">=0.4.0,<1"
[[package]]
name = "decorator"
version = "5.1.1"
description = "Decorators for Humans"
optional = false
python-versions = ">=3.5"
files = [
{file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"},
{file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"},
]
[[package]]
name = "deprecated"
version = "1.2.14"
@ -1999,13 +2021,13 @@ files = [
[[package]]
name = "huggingface-hub"
version = "0.23.4"
version = "0.23.5"
description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub"
optional = false
python-versions = ">=3.8.0"
files = [
{file = "huggingface_hub-0.23.4-py3-none-any.whl", hash = "sha256:3a0b957aa87150addf0cc7bd71b4d954b78e749850e1e7fb29ebbd2db64ca037"},
{file = "huggingface_hub-0.23.4.tar.gz", hash = "sha256:35d99016433900e44ae7efe1c209164a5a81dbbcd53a52f99c281dcd7ce22431"},
{file = "huggingface_hub-0.23.5-py3-none-any.whl", hash = "sha256:d7a7d337615e11a45cc14a0ce5a605db6b038dc24af42866f731684825226e90"},
{file = "huggingface_hub-0.23.5.tar.gz", hash = "sha256:67a9caba79b71235be3752852ca27da86bd54311d2424ca8afdb8dda056edf98"},
]
[package.dependencies]
@ -2552,13 +2574,13 @@ types-requests = ">=2.31.0.2,<3.0.0.0"
[[package]]
name = "langsmith"
version = "0.1.85"
version = "0.1.86"
description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
{file = "langsmith-0.1.85-py3-none-any.whl", hash = "sha256:c1f94384f10cea96f7b4d33fd3db7ec180c03c7468877d50846f881d2017ff94"},
{file = "langsmith-0.1.85.tar.gz", hash = "sha256:acff31f9e53efa48586cf8e32f65625a335c74d7c4fa306d1655ac18452296f6"},
{file = "langsmith-0.1.86-py3-none-any.whl", hash = "sha256:55ed80cc6e98f9761f9b3ec3c49e01f6745d13e40bef80d9f831acabfd9a8a1e"},
{file = "langsmith-0.1.86.tar.gz", hash = "sha256:2e66577817253327b99b727588c3173fbba217fe0ca07ac6b7cdd23fc4894104"},
]
[package.dependencies]
@ -2569,6 +2591,34 @@ pydantic = [
]
requests = ">=2,<3"
[[package]]
name = "langwatch"
version = "0.1.8"
description = "Python SDK for LangWatch for monitoring your LLMs"
optional = false
python-versions = "<4.0,>=3.9"
files = [
{file = "langwatch-0.1.8-py3-none-any.whl", hash = "sha256:e6e7ffe2f3cd61c477e8ff6de4ad3d1e06f63b0c8f02880d68293fc126ec2bf7"},
{file = "langwatch-0.1.8.tar.gz", hash = "sha256:2ccc3a741ef9bf493946264ab8fff5cb33845e51d4426136218d62b1a4cbd26d"},
]
[package.dependencies]
coolname = ">=2.2.0,<3.0.0"
deprecated = ">=1.2.14,<2.0.0"
httpx = ">=0.27.0,<0.28.0"
nanoid = ">=2.0.0,<3.0.0"
pandas = ">=2.2.2,<3.0.0"
pydantic = ">=2.5.2"
requests = ">=2.31.0,<3.0.0"
retry = ">=0.9.2,<0.10.0"
tqdm = ">=4.66.2,<5.0.0"
[package.extras]
dspy = ["dspy-ai (>=2.4.12,<3.0.0)"]
langchain = ["langchain (>=0.2.0,<0.3.0)"]
litellm = ["litellm (>=1.40.15,<2.0.0)"]
openai = ["openai (>=1.3.7,<2.0.0)"]
[[package]]
name = "loguru"
version = "0.7.2"
@ -3157,6 +3207,17 @@ files = [
{file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
]
[[package]]
name = "nanoid"
version = "2.0.0"
description = "A tiny, secure, URL-friendly, unique string ID generator for Python"
optional = false
python-versions = "*"
files = [
{file = "nanoid-2.0.0-py3-none-any.whl", hash = "sha256:90aefa650e328cffb0893bbd4c236cfd44c48bc1f2d0b525ecc53c3187b653bb"},
{file = "nanoid-2.0.0.tar.gz", hash = "sha256:5a80cad5e9c6e9ae3a41fa2fb34ae189f7cb420b2a5d8f82bd9d23466e4efa68"},
]
[[package]]
name = "nest-asyncio"
version = "1.6.0"
@ -3273,13 +3334,13 @@ sympy = "*"
[[package]]
name = "openai"
version = "1.35.13"
version = "1.35.14"
description = "The official Python library for the openai API"
optional = false
python-versions = ">=3.7.1"
files = [
{file = "openai-1.35.13-py3-none-any.whl", hash = "sha256:36ec3e93e0d1f243f69be85c89b9221a471c3e450dfd9df16c9829e3cdf63e60"},
{file = "openai-1.35.13.tar.gz", hash = "sha256:c684f3945608baf7d2dcc0ef3ee6f3e27e4c66f21076df0b47be45d57e6ae6e4"},
{file = "openai-1.35.14-py3-none-any.whl", hash = "sha256:adadf8c176e0b8c47ad782ed45dc20ef46438ee1f02c7103c4155cff79c8f68b"},
{file = "openai-1.35.14.tar.gz", hash = "sha256:394ba1dfd12ecec1d634c50e512d24ff1858bbc2674ffcce309b822785a058de"},
]
[package.dependencies]
@ -3576,47 +3637,47 @@ files = [
[[package]]
name = "pandas"
version = "2.2.0"
version = "2.2.2"
description = "Powerful data structures for data analysis, time series, and statistics"
optional = false
python-versions = ">=3.9"
files = [
{file = "pandas-2.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8108ee1712bb4fa2c16981fba7e68b3f6ea330277f5ca34fa8d557e986a11670"},
{file = "pandas-2.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:736da9ad4033aeab51d067fc3bd69a0ba36f5a60f66a527b3d72e2030e63280a"},
{file = "pandas-2.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38e0b4fc3ddceb56ec8a287313bc22abe17ab0eb184069f08fc6a9352a769b18"},
{file = "pandas-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20404d2adefe92aed3b38da41d0847a143a09be982a31b85bc7dd565bdba0f4e"},
{file = "pandas-2.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7ea3ee3f125032bfcade3a4cf85131ed064b4f8dd23e5ce6fa16473e48ebcaf5"},
{file = "pandas-2.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9670b3ac00a387620489dfc1bca66db47a787f4e55911f1293063a78b108df1"},
{file = "pandas-2.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:5a946f210383c7e6d16312d30b238fd508d80d927014f3b33fb5b15c2f895430"},
{file = "pandas-2.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a1b438fa26b208005c997e78672f1aa8138f67002e833312e6230f3e57fa87d5"},
{file = "pandas-2.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8ce2fbc8d9bf303ce54a476116165220a1fedf15985b09656b4b4275300e920b"},
{file = "pandas-2.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2707514a7bec41a4ab81f2ccce8b382961a29fbe9492eab1305bb075b2b1ff4f"},
{file = "pandas-2.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85793cbdc2d5bc32620dc8ffa715423f0c680dacacf55056ba13454a5be5de88"},
{file = "pandas-2.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cfd6c2491dc821b10c716ad6776e7ab311f7df5d16038d0b7458bc0b67dc10f3"},
{file = "pandas-2.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a146b9dcacc3123aa2b399df1a284de5f46287a4ab4fbfc237eac98a92ebcb71"},
{file = "pandas-2.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:fbc1b53c0e1fdf16388c33c3cca160f798d38aea2978004dd3f4d3dec56454c9"},
{file = "pandas-2.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a41d06f308a024981dcaa6c41f2f2be46a6b186b902c94c2674e8cb5c42985bc"},
{file = "pandas-2.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:159205c99d7a5ce89ecfc37cb08ed179de7783737cea403b295b5eda8e9c56d1"},
{file = "pandas-2.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb1e1f3861ea9132b32f2133788f3b14911b68102d562715d71bd0013bc45440"},
{file = "pandas-2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:761cb99b42a69005dec2b08854fb1d4888fdf7b05db23a8c5a099e4b886a2106"},
{file = "pandas-2.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a20628faaf444da122b2a64b1e5360cde100ee6283ae8effa0d8745153809a2e"},
{file = "pandas-2.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f5be5d03ea2073627e7111f61b9f1f0d9625dc3c4d8dda72cc827b0c58a1d042"},
{file = "pandas-2.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:a626795722d893ed6aacb64d2401d017ddc8a2341b49e0384ab9bf7112bdec30"},
{file = "pandas-2.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9f66419d4a41132eb7e9a73dcec9486cf5019f52d90dd35547af11bc58f8637d"},
{file = "pandas-2.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:57abcaeda83fb80d447f28ab0cc7b32b13978f6f733875ebd1ed14f8fbc0f4ab"},
{file = "pandas-2.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e60f1f7dba3c2d5ca159e18c46a34e7ca7247a73b5dd1a22b6d59707ed6b899a"},
{file = "pandas-2.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb61dc8567b798b969bcc1fc964788f5a68214d333cade8319c7ab33e2b5d88a"},
{file = "pandas-2.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:52826b5f4ed658fa2b729264d63f6732b8b29949c7fd234510d57c61dbeadfcd"},
{file = "pandas-2.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bde2bc699dbd80d7bc7f9cab1e23a95c4375de615860ca089f34e7c64f4a8de7"},
{file = "pandas-2.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:3de918a754bbf2da2381e8a3dcc45eede8cd7775b047b923f9006d5f876802ae"},
{file = "pandas-2.2.0.tar.gz", hash = "sha256:30b83f7c3eb217fb4d1b494a57a2fda5444f17834f5df2de6b2ffff68dc3c8e2"},
{file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"},
{file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"},
{file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"},
{file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"},
{file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"},
{file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"},
{file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"},
{file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"},
{file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"},
{file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"},
{file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"},
{file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"},
{file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"},
{file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"},
{file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"},
{file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"},
{file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"},
{file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"},
{file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"},
{file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"},
{file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"},
{file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"},
{file = "pandas-2.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9057e6aa78a584bc93a13f0a9bf7e753a5e9770a30b4d758b8d5f2a62a9433cd"},
{file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"},
{file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"},
{file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"},
{file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"},
{file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"},
{file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"},
]
[package.dependencies]
numpy = [
{version = ">=1.22.4,<2", markers = "python_version < \"3.11\""},
{version = ">=1.23.2,<2", markers = "python_version == \"3.11\""},
{version = ">=1.26.0,<2", markers = "python_version >= \"3.12\""},
{version = ">=1.22.4", markers = "python_version < \"3.11\""},
{version = ">=1.23.2", markers = "python_version == \"3.11\""},
{version = ">=1.26.0", markers = "python_version >= \"3.12\""},
]
python-dateutil = ">=2.8.2"
pytz = ">=2020.1"
@ -3641,6 +3702,7 @@ parquet = ["pyarrow (>=10.0.1)"]
performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"]
plot = ["matplotlib (>=3.6.3)"]
postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"]
pyarrow = ["pyarrow (>=10.0.1)"]
spss = ["pyreadstat (>=1.2.0)"]
sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"]
test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"]
@ -3954,6 +4016,17 @@ all = ["apache-bookkeeper-client (>=4.16.1)", "fastavro (>=1.9.2)", "grpcio (>=1
avro = ["fastavro (>=1.9.2)"]
functions = ["apache-bookkeeper-client (>=4.16.1)", "grpcio (>=1.60.0)", "prometheus-client", "protobuf (>=3.6.1,<=3.20.3)", "ratelimit"]
[[package]]
name = "py"
version = "1.11.0"
description = "library with cross-python path, ini-parsing, io, code, log facilities"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
files = [
{file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"},
{file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
]
[[package]]
name = "pyasn1"
version = "0.6.0"
@ -4624,6 +4697,21 @@ requests = ">=2.0.0"
[package.extras]
rsa = ["oauthlib[signedtoken] (>=3.0.0)"]
[[package]]
name = "retry"
version = "0.9.2"
description = "Easy to use retry decorator."
optional = false
python-versions = "*"
files = [
{file = "retry-0.9.2-py2.py3-none-any.whl", hash = "sha256:ccddf89761fa2c726ab29391837d4327f819ea14d244c232a1d24c67a2f98606"},
{file = "retry-0.9.2.tar.gz", hash = "sha256:f8bfa8b99b69c4506d6f5bd3b0aabf77f98cdb17f3c9fc3f5ca820033336fba4"},
]
[package.dependencies]
decorator = ">=3.4.2"
py = ">=1.4.26,<2.0.0"
[[package]]
name = "rich"
version = "13.7.1"
@ -5822,4 +5910,4 @@ local = []
[metadata]
lock-version = "2.0"
python-versions = ">=3.10,<3.13"
content-hash = "3bae2ded5bb5a16aadb39c90e5e8d5e4ef072c0b112fd62f5f95efabbfbc594d"
content-hash = "8e59c93824c2f9ddeb114af3bc07e0a8ab351d1fa19c20c40ca8d110face8452"

View file

@ -51,7 +51,7 @@ bcrypt = "4.0.1"
pillow = "^10.2.0"
docstring-parser = "^0.16"
python-jose = "^3.3.0"
pandas = "2.2.0"
pandas = "2.2.2"
multiprocess = "^0.70.14"
duckdb = "^1.0.0"
python-docx = "^1.1.0"
@ -73,6 +73,7 @@ prometheus-client = "^0.20.0"
aiofiles = "^24.1.0"
crewai = "^0.36.0"
setuptools = ">=70"
langwatch = "^0.1.3"
[tool.poetry.extras]
deploy = ["celery", "redis", "flower"]