diff --git a/README.md b/README.md
index 53ce90086..72bb7fcc6 100644
--- a/README.md
+++ b/README.md
@@ -1,46 +1,27 @@
-
+
# ⛓️ Langflow
-~ An effortless way to experiment and prototype [LangChain](https://github.com/hwchase17/langchain) pipelines ~
+
Discover a simpler & smarter way to build around Foundation Models
-
-
-
-
-
-
-
-
+[](https://github.com/logspace-ai/langflow/releases)
+[](https://github.com/logspace-ai/langflow/contributors)
+[](https://github.com/logspace-ai/langflow/last-commit)
+[](https://github.com/logspace-ai/langflow/issues)
+[](https://github.com/logspace-ai/langflow/repo-size)
+[](https://vscode.dev/redirect?url=vscode://ms-vscode-remote.remote-containers/cloneInVolume?url=https://github.com/logspace-ai/langflow)
+[](https://opensource.org/licenses/MIT)
+[](https://star-history.com/#logspace-ai/langflow)
+[](https://github.com/logspace-ai/langflow/fork)
+[](https://twitter.com/langflow_ai)
+[](https://discord.com/invite/EqksyE2EX9)
+[](https://huggingface.co/spaces/Logspace/Langflow)
+[](https://codespaces.new/logspace-ai/langflow)
-
-
-
-
+The easiest way to create and customize your flow
-
-
-
-
-
-# Table of Contents
-
-- [⛓️ Langflow](#️-langflow)
-- [Table of Contents](#table-of-contents)
-- [📦 Installation](#-installation)
- - [Locally](#locally)
- - [HuggingFace Spaces](#huggingface-spaces)
-- [🖥️ Command Line Interface (CLI)](#️-command-line-interface-cli)
- - [Usage](#usage)
- - [Environment Variables](#environment-variables)
-- [Deployment](#deployment)
- - [Deploy Langflow on Google Cloud Platform](#deploy-langflow-on-google-cloud-platform)
- - [Deploy on Railway](#deploy-on-railway)
- - [Deploy on Render](#deploy-on-render)
-- [🎨 Creating Flows](#-creating-flows)
-- [👋 Contributing](#-contributing)
-- [📄 License](#-license)
+
# 📦 Installation
@@ -65,7 +46,7 @@ This will install the following dependencies:
- [llama-cpp-python](https://github.com/abetlen/llama-cpp-python)
- [sentence-transformers](https://github.com/UKPLab/sentence-transformers)
-You can still use models from projects like LocalAI
+You can still use models from projects like LocalAI, Ollama, LM Studio, Jan and others.
Next, run:
@@ -117,7 +98,7 @@ Each option is detailed below:
- `--backend-only`: This parameter, with a default value of `False`, allows running only the backend server without the frontend. It can also be set using the `LANGFLOW_BACKEND_ONLY` environment variable.
- `--store`: This parameter, with a default value of `True`, enables the store features, use `--no-store` to deactivate it. It can be configured using the `LANGFLOW_STORE` environment variable.
-These parameters are important for users who need to customize the behavior of Langflow, especially in development or specialized deployment scenarios. You may want to update the documentation to include these parameters for completeness and clarity.
+These parameters are important for users who need to customize the behavior of Langflow, especially in development or specialized deployment scenarios.
### Environment Variables
@@ -147,19 +128,19 @@ Alternatively, click the **"Open in Cloud Shell"** button below to launch Google
# 🎨 Creating Flows
-Creating flows with Langflow is easy. Simply drag sidebar components onto the canvas and connect them together to create your pipeline. Langflow provides a range of [LangChain components](https://python.langchain.com/docs/integrations/components) to choose from, including LLMs, prompt serializers, agents, and chains.
+Creating flows with Langflow is easy. Simply drag components from the sidebar onto the canvas and connect them to start building your application.
-Explore by editing prompt parameters, link chains and agents, track an agent's thought process, and export your flow.
+Explore by editing prompt parameters, grouping components into a single high-level component, and building your own Custom Components.
-Once you're done, you can export your flow as a JSON file to use with LangChain.
-To do so, click the "Export" button in the top right corner of the canvas, then
-in Python, you can load the flow with:
+Once you’re done, you can export your flow as a JSON file.
+
+Load the flow with:
```python
from langflow import load_flow_from_json
flow = load_flow_from_json("path/to/flow.json")
-# Now you can use it like any chain
+# Now you can use it
flow("Hey, have you heard of Langflow?")
```
@@ -167,15 +148,16 @@ flow("Hey, have you heard of Langflow?")
We welcome contributions from developers of all levels to our open-source project on GitHub. If you'd like to contribute, please check our [contributing guidelines](./CONTRIBUTING.md) and help make Langflow more accessible.
+Join our [Discord](https://discord.com/invite/EqksyE2EX9) server to ask questions, make suggestions, and showcase your projects! 🦾
+
---
-Join our [Discord](https://discord.com/invite/EqksyE2EX9) server to ask questions, make suggestions and showcase your projects! 🦾
-
-
-
-
[](https://star-history.com/#logspace-ai/langflow&Date)
+# 🌟 Contributors
+
+[](https://github.com/logspace-ai/langflow/graphs/contributors)
+
# 📄 License
Langflow is released under the MIT License. See the LICENSE file for details.
diff --git a/docs/docs/components/custom.mdx b/docs/docs/components/custom.mdx
index 112e40ed8..d8c6ff2f5 100644
--- a/docs/docs/components/custom.mdx
+++ b/docs/docs/components/custom.mdx
@@ -81,7 +81,18 @@ The CustomComponent class serves as the foundation for creating custom component
| _`required: bool`_ | Makes the field required. |
| _`info: str`_ | Adds a tooltip to the field. |
| _`file_types: List[str]`_ | This is a requirement if the _`field_type`_ is _file_. Defines which file types will be accepted. For example, _json_, _yaml_ or _yml_. |
- | _`range_spec: langflow.field_typing.RangeSpec`_ | This is a requirement if the _`field_type`_ is _`float`_. Defines the range of values accepted and the step size. If none is defined, the default is _`[-1, 1, 0.1]`_. |
+ | _`range_spec: langflow.field_typing.RangeSpec`_ | This is a requirement if the _`field_type`_ is _`float`_. Defines the range of values accepted and the step size. If none is defined, the default is _`[-1, 1, 0.1]`_. |
+ | _`title_case: bool`_ | Formats the name of the field when _`display_name`_ is not defined. Set it to False to keep the name as you set it in the _`build`_ method. |
+
+
+
+ Keys _`options`_ and _`value`_ can receive a method or function that returns a list of strings or a string, respectively. This is useful when you want to dynamically generate the options or the default value of a field. A refresh button will appear next to the field in the component, allowing the user to update the options or the default value.
+
+
+
+
+
+
- The CustomComponent class also provides helpful methods for specific tasks (e.g., to load and use other flows from the Langflow platform):
| Method Name | Description |
@@ -96,6 +107,7 @@ The CustomComponent class serves as the foundation for creating custom component
| -------------- | ----------------------------------------------------------------------------- |
| _`status`_ | Displays the value it receives in the _`build`_ method. Useful for debugging. |
| _`field_order`_ | Defines the order the fields will be displayed in the canvas. |
+ | _`icon`_ | Defines the emoji (for example, _`:rocket:`_) that will be displayed in the canvas. |
diff --git a/docs/docs/contributing/community.md b/docs/docs/contributing/community.md
index 51016f508..6bb62641d 100644
--- a/docs/docs/contributing/community.md
+++ b/docs/docs/contributing/community.md
@@ -12,7 +12,7 @@
## 🐦 Stay tunned for **Langflow** on Twitter
-Follow [@logspace_ai](https://twitter.com/langflow_ai) on **Twitter** to get the latest news about **Langflow**.
+Follow [@langflow_ai](https://twitter.com/langflow_ai) on **Twitter** to get the latest news about **Langflow**.
---
## ⭐️ Star **Langflow** on GitHub
diff --git a/docs/docs/index.mdx b/docs/docs/index.mdx
index f0fee5009..512d2578b 100644
--- a/docs/docs/index.mdx
+++ b/docs/docs/index.mdx
@@ -1,6 +1,6 @@
# 👋 Welcome to Langflow
-Langflow is an easy way to prototype [LangChain](https://github.com/hwchase17/langchain) flows. The drag-and-drop feature allows quick and effortless experimentation, while the built-in chat interface facilitates real-time interaction. It provides options to edit prompt parameters, create chains and agents, track thought processes, and export flows.
+Langflow is an easy way to create flows. The drag-and-drop feature allows quick and effortless experimentation, while the built-in chat interface facilitates real-time interaction. It provides options to edit prompt parameters, create chains and agents, track thought processes, and export flows.
import ThemedImage from "@theme/ThemedImage";
import useBaseUrl from "@docusaurus/useBaseUrl";
@@ -11,7 +11,7 @@ import ZoomableImage from "/src/theme/ZoomableImage.js";
diff --git a/img/new_langflow_demo.gif b/docs/static/img/new_langflow_demo.gif
similarity index 100%
rename from img/new_langflow_demo.gif
rename to docs/static/img/new_langflow_demo.gif
diff --git a/img/langflow-demo.gif b/img/langflow-demo.gif
deleted file mode 100644
index 4cea58628..000000000
Binary files a/img/langflow-demo.gif and /dev/null differ
diff --git a/img/langflow-screen.png b/img/langflow-screen.png
deleted file mode 100644
index 49ef0b053..000000000
Binary files a/img/langflow-screen.png and /dev/null differ
diff --git a/poetry.lock b/poetry.lock
index 8d5e7a89c..3039ac668 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -978,13 +978,13 @@ testing = ["pytest (>=7.2.1)", "pytest-cov (>=4.0.0)", "tox (>=4.4.3)"]
[[package]]
name = "cohere"
-version = "4.50"
+version = "4.51"
description = "Python SDK for the Cohere API"
optional = false
python-versions = ">=3.8,<4.0"
files = [
- {file = "cohere-4.50-py3-none-any.whl", hash = "sha256:790744034c76cabd9c3d8e05c0b2e85733caee64e695e5a9904c4527202c913e"},
- {file = "cohere-4.50.tar.gz", hash = "sha256:64908677069fee23bb5dc968d576eab3ed644278e800bb7dcc3e5a336e5fc206"},
+ {file = "cohere-4.51-py3-none-any.whl", hash = "sha256:71193475ba08b00244bcc6de0e4fa1de869eaa82d6a00e04ab07f64429498268"},
+ {file = "cohere-4.51.tar.gz", hash = "sha256:01fb092ea9038dd4fb360efb3506fad2451ed231cb6774e324b993c9374a550a"},
]
[package.dependencies]
@@ -3464,12 +3464,12 @@ regex = ["regex"]
[[package]]
name = "llama-cpp-python"
-version = "0.2.50"
+version = "0.2.52"
description = "Python bindings for the llama.cpp library"
optional = true
python-versions = ">=3.8"
files = [
- {file = "llama_cpp_python-0.2.50.tar.gz", hash = "sha256:28caf4e665dac62ad1d347061b7a96669af7fb9e7f1e4e8c17e736504e321a51"},
+ {file = "llama_cpp_python-0.2.52.tar.gz", hash = "sha256:cc3f670ea5b315547396b0bbc108fcc9602d19b8af858e03c4c0fae385fb9a04"},
]
[package.dependencies]
@@ -4562,35 +4562,36 @@ full = ["XLMMacroDeobfuscator"]
[[package]]
name = "onnxruntime"
-version = "1.15.1"
+version = "1.17.1"
description = "ONNX Runtime is a runtime accelerator for Machine Learning models"
optional = false
python-versions = "*"
files = [
- {file = "onnxruntime-1.15.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:baad59e6a763237fa39545325d29c16f98b8a45d2dfc524c67631e2e3ba44d16"},
- {file = "onnxruntime-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:568c2db848f619a0a93e843c028e9fb4879929d40b04bd60f9ba6eb8d2e93421"},
- {file = "onnxruntime-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69088d7784bb04dedfd9e883e2c96e4adf8ae0451acdd0abb78d68f59ecc6d9d"},
- {file = "onnxruntime-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cef43737b2cd886d5d718d100f56ec78c9c476c5db5f8f946e95024978fe754"},
- {file = "onnxruntime-1.15.1-cp310-cp310-win32.whl", hash = "sha256:79d7e65abb44a47c633ede8e53fe7b9756c272efaf169758c482c983cca98d7e"},
- {file = "onnxruntime-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:8bc4c47682933a7a2c79808688aad5f12581305e182be552de50783b5438e6bd"},
- {file = "onnxruntime-1.15.1-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:652b2cb777f76446e3cc41072dd3d1585a6388aeff92b9de656724bc22e241e4"},
- {file = "onnxruntime-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:89b86dbed15740abc385055a29c9673a212600248d702737ce856515bdeddc88"},
- {file = "onnxruntime-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed5cdd9ee748149a57f4cdfa67187a0d68f75240645a3c688299dcd08742cc98"},
- {file = "onnxruntime-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f748cce6a70ed38c19658615c55f4eedb9192765a4e9c4bd2682adfe980698d"},
- {file = "onnxruntime-1.15.1-cp311-cp311-win32.whl", hash = "sha256:e0312046e814c40066e7823da58075992d51364cbe739eeeb2345ec440c3ac59"},
- {file = "onnxruntime-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:f0980969689cb956c22bd1318b271e1be260060b37f3ddd82c7d63bd7f2d9a79"},
- {file = "onnxruntime-1.15.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:345986cfdbd6f4b20a89b6a6cd9abd3e2ced2926ae0b6e91fefa8149f95c0f09"},
- {file = "onnxruntime-1.15.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d7b3ad75e040f1e95757f69826a11051737b31584938a26d466a0234c6de98"},
- {file = "onnxruntime-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3603d07b829bcc1c14963a76103e257aade8861eb208173b300cc26e118ec2f8"},
- {file = "onnxruntime-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3df0625b9295daf1f7409ea55f72e1eeb38d54f5769add53372e79ddc3cf98d"},
- {file = "onnxruntime-1.15.1-cp38-cp38-win32.whl", hash = "sha256:f68b47fdf1a0406c0292f81ac993e2a2ae3e8b166b436d590eb221f64e8e187a"},
- {file = "onnxruntime-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:52d762d297cc3f731f54fa65a3e329b813164970671547bef6414d0ed52765c9"},
- {file = "onnxruntime-1.15.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:99228f9f03dc1fc8af89a28c9f942e8bd3e97e894e263abe1a32e4ddb1f6363b"},
- {file = "onnxruntime-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:45db7f96febb0cf23e3af147f35c4f8de1a37dd252d1cef853c242c2780250cd"},
- {file = "onnxruntime-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bafc112a36db25c821b90ab747644041cb4218f6575889775a2c12dd958b8c3"},
- {file = "onnxruntime-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:985693d18f2d46aa34fd44d7f65ff620660b2c8fa4b8ec365c2ca353f0fbdb27"},
- {file = "onnxruntime-1.15.1-cp39-cp39-win32.whl", hash = "sha256:708eb31b0c04724bf0f01c1309a9e69bbc09b85beb750e5662c8aed29f1ff9fd"},
- {file = "onnxruntime-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:73d6de4c42dfde1e9dbea04773e6dc23346c8cda9c7e08c6554fafc97ac60138"},
+ {file = "onnxruntime-1.17.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:d43ac17ac4fa3c9096ad3c0e5255bb41fd134560212dc124e7f52c3159af5d21"},
+ {file = "onnxruntime-1.17.1-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:55b5e92a4c76a23981c998078b9bf6145e4fb0b016321a8274b1607bd3c6bd35"},
+ {file = "onnxruntime-1.17.1-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ebbcd2bc3a066cf54e6f18c75708eb4d309ef42be54606d22e5bdd78afc5b0d7"},
+ {file = "onnxruntime-1.17.1-cp310-cp310-win32.whl", hash = "sha256:5e3716b5eec9092e29a8d17aab55e737480487deabfca7eac3cd3ed952b6ada9"},
+ {file = "onnxruntime-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:fbb98cced6782ae1bb799cc74ddcbbeeae8819f3ad1d942a74d88e72b6511337"},
+ {file = "onnxruntime-1.17.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:36fd6f87a1ecad87e9c652e42407a50fb305374f9a31d71293eb231caae18784"},
+ {file = "onnxruntime-1.17.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:99a8bddeb538edabc524d468edb60ad4722cff8a49d66f4e280c39eace70500b"},
+ {file = "onnxruntime-1.17.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd7fddb4311deb5a7d3390cd8e9b3912d4d963efbe4dfe075edbaf18d01c024e"},
+ {file = "onnxruntime-1.17.1-cp311-cp311-win32.whl", hash = "sha256:606a7cbfb6680202b0e4f1890881041ffc3ac6e41760a25763bd9fe146f0b335"},
+ {file = "onnxruntime-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:53e4e06c0a541696ebdf96085fd9390304b7b04b748a19e02cf3b35c869a1e76"},
+ {file = "onnxruntime-1.17.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:40f08e378e0f85929712a2b2c9b9a9cc400a90c8a8ca741d1d92c00abec60843"},
+ {file = "onnxruntime-1.17.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ac79da6d3e1bb4590f1dad4bb3c2979d7228555f92bb39820889af8b8e6bd472"},
+ {file = "onnxruntime-1.17.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ae9ba47dc099004e3781f2d0814ad710a13c868c739ab086fc697524061695ea"},
+ {file = "onnxruntime-1.17.1-cp312-cp312-win32.whl", hash = "sha256:2dff1a24354220ac30e4a4ce2fb1df38cb1ea59f7dac2c116238d63fe7f4c5ff"},
+ {file = "onnxruntime-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:6226a5201ab8cafb15e12e72ff2a4fc8f50654e8fa5737c6f0bd57c5ff66827e"},
+ {file = "onnxruntime-1.17.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:cd0c07c0d1dfb8629e820b05fda5739e4835b3b82faf43753d2998edf2cf00aa"},
+ {file = "onnxruntime-1.17.1-cp38-cp38-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:617ebdf49184efa1ba6e4467e602fbfa029ed52c92f13ce3c9f417d303006381"},
+ {file = "onnxruntime-1.17.1-cp38-cp38-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9dae9071e3facdf2920769dceee03b71c684b6439021defa45b830d05e148924"},
+ {file = "onnxruntime-1.17.1-cp38-cp38-win32.whl", hash = "sha256:835d38fa1064841679433b1aa8138b5e1218ddf0cfa7a3ae0d056d8fd9cec713"},
+ {file = "onnxruntime-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:96621e0c555c2453bf607606d08af3f70fbf6f315230c28ddea91754e17ad4e6"},
+ {file = "onnxruntime-1.17.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:7a9539935fb2d78ebf2cf2693cad02d9930b0fb23cdd5cf37a7df813e977674d"},
+ {file = "onnxruntime-1.17.1-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:45c6a384e9d9a29c78afff62032a46a993c477b280247a7e335df09372aedbe9"},
+ {file = "onnxruntime-1.17.1-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4e19f966450f16863a1d6182a685ca33ae04d7772a76132303852d05b95411ea"},
+ {file = "onnxruntime-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e2ae712d64a42aac29ed7a40a426cb1e624a08cfe9273dcfe681614aa65b07dc"},
+ {file = "onnxruntime-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:f7e9f7fb049825cdddf4a923cfc7c649d84d63c0134315f8e0aa9e0c3004672c"},
]
[package.dependencies]
@@ -7458,13 +7459,13 @@ test = ["pylint", "pytest", "pytest-black", "pytest-cov", "pytest-pylint"]
[[package]]
name = "supabase"
-version = "2.3.6"
+version = "2.3.7"
description = "Supabase client for Python."
optional = false
python-versions = ">=3.8,<4.0"
files = [
- {file = "supabase-2.3.6-py3-none-any.whl", hash = "sha256:4288ba658c1c7f33ba3c232a6b9eae8b549ba3355ee8ff061bad9085e5d1326e"},
- {file = "supabase-2.3.6.tar.gz", hash = "sha256:eb3c0d6f087b5da3b84a04da3430ee55a854d81f92f2bbc6c8b45fcf34be6f85"},
+ {file = "supabase-2.3.7-py3-none-any.whl", hash = "sha256:a4616aa9149231d20f6e61884b90b7e5bdbde0ef0c2f0c12ced14536f39055bc"},
+ {file = "supabase-2.3.7.tar.gz", hash = "sha256:d70dc986b7cd2a97c1916da1fa0ea6abae25690521cc9dd78016ab0e0c07116e"},
]
[package.dependencies]
diff --git a/pyproject.toml b/pyproject.toml
index deb9ffd31..44ec7d19d 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = "langflow"
-version = "0.6.7a5"
+version = "0.6.7"
description = "A Python package with a built-in web application"
authors = ["Logspace "]
maintainers = [
diff --git a/src/backend/langflow/components/model_specs/HuggingFaceEndpointsSpecs.py b/src/backend/langflow/components/model_specs/HuggingFaceEndpointsSpecs.py
index 0d28d5b9b..579e3c0b3 100644
--- a/src/backend/langflow/components/model_specs/HuggingFaceEndpointsSpecs.py
+++ b/src/backend/langflow/components/model_specs/HuggingFaceEndpointsSpecs.py
@@ -1,7 +1,8 @@
from typing import Optional
-from langflow import CustomComponent
-from langchain.llms.huggingface_endpoint import HuggingFaceEndpoint
+
from langchain.llms.base import BaseLLM
+from langchain.llms.huggingface_endpoint import HuggingFaceEndpoint
+from langflow import CustomComponent
class HuggingFaceEndpointsComponent(CustomComponent):
@@ -31,11 +32,11 @@ class HuggingFaceEndpointsComponent(CustomComponent):
model_kwargs: Optional[dict] = None,
) -> BaseLLM:
try:
- output = HuggingFaceEndpoint(
+ output = HuggingFaceEndpoint( # type: ignore
endpoint_url=endpoint_url,
task=task,
huggingfacehub_api_token=huggingfacehub_api_token,
- model_kwargs=model_kwargs,
+ model_kwargs=model_kwargs or {},
)
except Exception as e:
raise ValueError("Could not connect to HuggingFace Endpoints API.") from e
diff --git a/src/backend/langflow/interface/custom/utils.py b/src/backend/langflow/interface/custom/utils.py
index 45e81d151..4e8063c0d 100644
--- a/src/backend/langflow/interface/custom/utils.py
+++ b/src/backend/langflow/interface/custom/utils.py
@@ -27,14 +27,18 @@ from langflow.utils import validate
from langflow.utils.util import get_base_classes
-def add_output_types(frontend_node: CustomComponentFrontendNode, return_types: List[str]):
+def add_output_types(
+ frontend_node: CustomComponentFrontendNode, return_types: List[str]
+):
"""Add output types to the frontend node"""
for return_type in return_types:
if return_type is None:
raise HTTPException(
status_code=400,
detail={
- "error": ("Invalid return type. Please check your code and try again."),
+ "error": (
+ "Invalid return type. Please check your code and try again."
+ ),
"traceback": traceback.format_exc(),
},
)
@@ -63,14 +67,18 @@ def reorder_fields(frontend_node: CustomComponentFrontendNode, field_order: List
frontend_node.template.fields = reordered_fields
-def add_base_classes(frontend_node: CustomComponentFrontendNode, return_types: List[str]):
+def add_base_classes(
+ frontend_node: CustomComponentFrontendNode, return_types: List[str]
+):
"""Add base classes to the frontend node"""
for return_type_instance in return_types:
if return_type_instance is None:
raise HTTPException(
status_code=400,
detail={
- "error": ("Invalid return type. Please check your code and try again."),
+ "error": (
+ "Invalid return type. Please check your code and try again."
+ ),
"traceback": traceback.format_exc(),
},
)
@@ -145,10 +153,14 @@ def add_new_custom_field(
# If options is a list, then it's a dropdown
# If options is None, then it's a list of strings
is_list = isinstance(field_config.get("options"), list)
- field_config["is_list"] = is_list or field_config.get("is_list", False) or field_contains_list
+ field_config["is_list"] = (
+ is_list or field_config.get("is_list", False) or field_contains_list
+ )
if "name" in field_config:
- warnings.warn("The 'name' key in field_config is used to build the object and can't be changed.")
+ warnings.warn(
+ "The 'name' key in field_config is used to build the object and can't be changed."
+ )
required = field_config.pop("required", field_required)
placeholder = field_config.pop("placeholder", "")
@@ -179,7 +191,9 @@ def add_extra_fields(frontend_node, field_config, function_args):
if "name" not in extra_field or extra_field["name"] == "self":
continue
- field_name, field_type, field_value, field_required = get_field_properties(extra_field)
+ field_name, field_type, field_value, field_required = get_field_properties(
+ extra_field
+ )
config = field_config.get(field_name, {})
frontend_node = add_new_custom_field(
frontend_node,
@@ -217,7 +231,9 @@ def run_build_config(
raise HTTPException(
status_code=400,
detail={
- "error": ("Invalid type convertion. Please check your code and try again."),
+ "error": (
+ "Invalid type convertion. Please check your code and try again."
+ ),
"traceback": traceback.format_exc(),
},
) from exc
@@ -245,7 +261,9 @@ def run_build_config(
raise HTTPException(
status_code=400,
detail={
- "error": ("Invalid type convertion. Please check your code and try again."),
+ "error": (
+ "Invalid type convertion. Please check your code and try again."
+ ),
"traceback": traceback.format_exc(),
},
) from exc
@@ -300,16 +318,24 @@ def build_custom_component_template(
frontend_node = build_frontend_node(custom_component.template_config)
logger.debug("Updated attributes")
- field_config, custom_instance = run_build_config(custom_component, user_id=user_id, update_field=update_field)
+ field_config, custom_instance = run_build_config(
+ custom_component, user_id=user_id, update_field=update_field
+ )
logger.debug("Built field config")
entrypoint_args = custom_component.get_function_entrypoint_args
add_extra_fields(frontend_node, field_config, entrypoint_args)
- frontend_node = add_code_field(frontend_node, custom_component.code, field_config.get("code", {}))
+ frontend_node = add_code_field(
+ frontend_node, custom_component.code, field_config.get("code", {})
+ )
- add_base_classes(frontend_node, custom_component.get_function_entrypoint_return_type)
- add_output_types(frontend_node, custom_component.get_function_entrypoint_return_type)
+ add_base_classes(
+ frontend_node, custom_component.get_function_entrypoint_return_type
+ )
+ add_output_types(
+ frontend_node, custom_component.get_function_entrypoint_return_type
+ )
logger.debug("Added base classes")
reorder_fields(frontend_node, custom_instance._get_field_order())
@@ -321,7 +347,9 @@ def build_custom_component_template(
raise HTTPException(
status_code=400,
detail={
- "error": ("Invalid type convertion. Please check your code and try again."),
+ "error": (
+ "Invalid type convertion. Please check your code and try again."
+ ),
"traceback": traceback.format_exc(),
},
) from exc
@@ -345,7 +373,9 @@ def build_custom_components(settings_service):
if not settings_service.settings.COMPONENTS_PATH:
return {}
- logger.info(f"Building custom components from {settings_service.settings.COMPONENTS_PATH}")
+ logger.info(
+ f"Building custom components from {settings_service.settings.COMPONENTS_PATH}"
+ )
custom_components_from_file = {}
processed_paths = set()
for path in settings_service.settings.COMPONENTS_PATH:
@@ -356,7 +386,9 @@ def build_custom_components(settings_service):
custom_component_dict = build_custom_component_list_from_path(path_str)
if custom_component_dict:
category = next(iter(custom_component_dict))
- logger.info(f"Loading {len(custom_component_dict[category])} component(s) from category {category}")
+ logger.info(
+ f"Loading {len(custom_component_dict[category])} component(s) from category {category}"
+ )
custom_components_from_file = merge_nested_dicts_with_renaming(
custom_components_from_file, custom_component_dict
)
@@ -373,7 +405,7 @@ def update_field_dict(field_dict):
field_dict["refresh"] = True
if "value" in field_dict and callable(field_dict["value"]):
- field_dict["value"] = field_dict["value"](field_dict.get("options", []))
+ field_dict["value"] = field_dict["value"]()
field_dict["refresh"] = True
# Let's check if "range_spec" is a RangeSpec object
diff --git a/src/backend/langflow/template/field/base.py b/src/backend/langflow/template/field/base.py
index a9bec5507..4317ac464 100644
--- a/src/backend/langflow/template/field/base.py
+++ b/src/backend/langflow/template/field/base.py
@@ -68,7 +68,9 @@ class TemplateField(BaseModel):
refresh: Optional[bool] = None
"""Specifies if the field should be refreshed. Defaults to False."""
- range_spec: Optional[RangeSpec] = Field(default=None, serialization_alias="rangeSpec")
+ range_spec: Optional[RangeSpec] = Field(
+ default=None, serialization_alias="rangeSpec"
+ )
"""Range specification for the field. Defaults to None."""
title_case: bool = False
@@ -115,6 +117,10 @@ class TemplateField(BaseModel):
if not isinstance(value, list):
raise ValueError("file_types must be a list")
return [
- (f".{file_type}" if isinstance(file_type, str) and not file_type.startswith(".") else file_type)
+ (
+ f".{file_type}"
+ if isinstance(file_type, str) and not file_type.startswith(".")
+ else file_type
+ )
for file_type in value
]
diff --git a/src/frontend/package-lock.json b/src/frontend/package-lock.json
index f18616611..bb825a587 100644
--- a/src/frontend/package-lock.json
+++ b/src/frontend/package-lock.json
@@ -12085,4 +12085,4 @@
}
}
}
-}
+}
\ No newline at end of file
diff --git a/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx b/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx
index ca8f7489f..09d1f0423 100644
--- a/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx
+++ b/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx
@@ -24,6 +24,7 @@ import {
generateNodeFromFlow,
getNodeId,
isValidConnection,
+ reconnectEdges,
validateSelection,
} from "../../../../utils/reactflowUtils";
import { getRandomName, isWrappedWithClass } from "../../../../utils/utils";
@@ -394,7 +395,7 @@ export default function Page({
if (
validateSelection(lastSelection!, edges).length === 0
) {
- const { newFlow } = generateFlow(
+ const { newFlow, removedEdges } = generateFlow(
lastSelection!,
nodes,
edges,
@@ -404,6 +405,10 @@ export default function Page({
newFlow,
getNodeId
);
+ const newEdges = reconnectEdges(
+ newGroupNode,
+ removedEdges
+ );
setNodes((oldNodes) => [
...oldNodes.filter(
(oldNodes) =>
@@ -414,16 +419,17 @@ export default function Page({
),
newGroupNode,
]);
- setEdges((oldEdges) =>
- oldEdges.filter(
+ setEdges((oldEdges) => [
+ ...oldEdges.filter(
(oldEdge) =>
!lastSelection!.nodes.some(
(selectionNode) =>
selectionNode.id === oldEdge.target ||
selectionNode.id === oldEdge.source
)
- )
- );
+ ),
+ ...newEdges,
+ ]);
} else {
setErrorData({
title: "Invalid selection",
diff --git a/src/frontend/src/utils/reactflowUtils.ts b/src/frontend/src/utils/reactflowUtils.ts
index 75314b274..5b38290a7 100644
--- a/src/frontend/src/utils/reactflowUtils.ts
+++ b/src/frontend/src/utils/reactflowUtils.ts
@@ -597,8 +597,7 @@ export function generateFlow(
const newFlowData = { nodes, edges, viewport: { zoom: 1, x: 0, y: 0 } };
const uid = new ShortUniqueId({ length: 5 });
/* remove edges that are not connected to selected nodes on both ends
- in future we can save this edges to when ungrouping reconect to the old nodes
- */
+ */
newFlowData.edges = selection.edges.filter(
(edge) =>
selection.nodes.some((node) => node.id === edge.target) &&
@@ -619,12 +618,48 @@ export function generateFlow(
// in the future we can use a better aproach using a set
return {
newFlow,
- removedEdges: selection.edges.filter(
- (edge) => !newFlowData.edges.includes(edge)
+ removedEdges: edges.filter(
+ (edge) =>
+ (selection.nodes.some((node) => node.id === edge.target) ||
+ selection.nodes.some((node) => node.id === edge.source)) &&
+ newFlowData.edges.every((e) => e.id !== edge.id)
),
};
}
+export function reconnectEdges(groupNode: NodeType, excludedEdges: Edge[]) {
+ let newEdges = cloneDeep(excludedEdges);
+ if (!groupNode.data.node!.flow) return [];
+ const { nodes, edges } = groupNode.data.node!.flow!.data!;
+ const lastNode = findLastNode(groupNode.data.node!.flow!.data!);
+ newEdges.forEach((edge) => {
+ if (lastNode && edge.source === lastNode.id) {
+ edge.source = groupNode.id;
+ let newSourceHandle: sourceHandleType = scapeJSONParse(
+ edge.sourceHandle!
+ );
+ newSourceHandle.id = groupNode.id;
+ edge.sourceHandle = scapedJSONStringfy(newSourceHandle);
+ edge.data.sourceHandle = newSourceHandle;
+ }
+ if (nodes.some((node) => node.id === edge.target)) {
+ const targetNode = nodes.find((node) => node.id === edge.target)!;
+ console.log("targetNode", targetNode);
+ const targetHandle: targetHandleType = scapeJSONParse(edge.targetHandle!);
+ console.log("targetHandle", targetHandle);
+ const proxy = { id: targetNode.id, field: targetHandle.fieldName };
+ let newTargetHandle: targetHandleType = cloneDeep(targetHandle);
+ newTargetHandle.id = groupNode.id;
+ newTargetHandle.proxy = proxy;
+ edge.target = groupNode.id;
+ newTargetHandle.fieldName = targetHandle.fieldName + "_" + targetNode.id;
+ edge.targetHandle = scapedJSONStringfy(newTargetHandle);
+ edge.data.targetHandle = newTargetHandle;
+ }
+ });
+ return newEdges;
+}
+
export function filterFlow(
selection: OnSelectionChangeParams,
setNodes: (update: Node[] | ((oldState: Node[]) => Node[])) => void,