Merge branch 'python_custom_node_component' into ChatWidgetAPI
This commit is contained in:
commit
397c665536
102 changed files with 6034 additions and 671 deletions
2
.gitignore
vendored
2
.gitignore
vendored
|
|
@ -251,3 +251,5 @@ langflow.db
|
|||
|
||||
# docusaurus
|
||||
.docusaurus/
|
||||
|
||||
/tmp/*
|
||||
|
|
|
|||
3
.vscode/launch.json
vendored
3
.vscode/launch.json
vendored
|
|
@ -6,7 +6,8 @@
|
|||
"request": "launch",
|
||||
"module": "uvicorn",
|
||||
"args": [
|
||||
"langflow.main:app",
|
||||
"--factory",
|
||||
"langflow.main:create_app",
|
||||
"--port",
|
||||
"7860",
|
||||
"--reload",
|
||||
|
|
|
|||
2
Makefile
2
Makefile
|
|
@ -46,7 +46,7 @@ install_backend:
|
|||
|
||||
backend:
|
||||
make install_backend
|
||||
poetry run uvicorn src.backend.langflow.main:app --port 7860 --reload --log-level debug
|
||||
poetry run uvicorn --factory src.backend.langflow.main:create_app --port 7860 --reload --log-level debug
|
||||
|
||||
build_and_run:
|
||||
echo 'Removing dist folder'
|
||||
|
|
|
|||
|
|
@ -39,6 +39,8 @@
|
|||
- [Deploy Langflow on Google Cloud Platform](#deploy-langflow-on-google-cloud-platform)
|
||||
- [Deploy Langflow on Jina AI Cloud](#deploy-langflow-on-jina-ai-cloud)
|
||||
- [API Usage](#api-usage)
|
||||
- [Deploy on Railway](#deploy-on-railway)
|
||||
- [Deploy on Render](#deploy-on-render)
|
||||
- [🎨 Creating Flows](#-creating-flows)
|
||||
- [👋 Contributing](#-contributing)
|
||||
- [📄 License](#-license)
|
||||
|
|
@ -87,6 +89,7 @@ Each option is detailed below:
|
|||
- `--config`: Defines the path to the configuration file. The default is `config.yaml`.
|
||||
- `--env-file`: Specifies the path to the .env file containing environment variables. The default is `.env`.
|
||||
- `--log-level`: Defines the logging level. Can be set using the `LANGFLOW_LOG_LEVEL` environment variable. The default is `critical`.
|
||||
- `component-path`: Specifies the path to the directory containing custom components. Can be set using the `LANGFLOW_COMPONENT_PATH` environment variable. The default is `langflow/components`.
|
||||
- `--log-file`: Specifies the path to the log file. Can be set using the `LANGFLOW_LOG_FILE` environment variable. The default is `logs/langflow.log`.
|
||||
- `--cache`: Selects the type of cache to use. Options are `InMemoryCache` and `SQLiteCache`. Can be set using the `LANGFLOW_LANGCHAIN_CACHE` environment variable. The default is `SQLiteCache`.
|
||||
- `--jcloud/--no-jcloud`: Toggles the option to deploy on Jina AI Cloud. The default is `no-jcloud`.
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import ThemedImage from "@theme/ThemedImage";
|
||||
import useBaseUrl from "@docusaurus/useBaseUrl";
|
||||
import ZoomableImage from "/src/theme/ZoomableImage.js";
|
||||
import Admonition from "@theme/Admonition";
|
||||
|
||||
# Chains
|
||||
|
||||
|
|
@ -12,22 +13,23 @@ Chains, in the context of language models, refer to a series of calls made to a
|
|||
|
||||
The `CombineDocsChain` incorporates methods to combine or aggregate loaded documents for question-answering functionality.
|
||||
|
||||
:::info
|
||||
<Admonition type="info">
|
||||
|
||||
Works as a proxy of LangChain’s [documents](https://python.langchain.com/docs/modules/chains/document/) chains generated by the `load_qa_chain` function.
|
||||
|
||||
:::
|
||||
</Admonition>
|
||||
|
||||
**Params**
|
||||
|
||||
- **LLM:** Language Model to use in the chain.
|
||||
- **chain_type:** The chain type to be used. Each one of them applies a different “combination strategy”.
|
||||
- **stuff**: The stuff [documents](https://python.langchain.com/docs/modules/chains/document/stuff) chain (“stuff" as in "to stuff" or "to fill") is the most straightforward of *the* document chains. It takes a list of documents, inserts them all into a prompt, and passes that prompt to an LLM. This chain is well-suited for applications where documents are small and only a few are passed in for most calls.
|
||||
- **map_reduce**: The map-reduce [documents](https://python.langchain.com/docs/modules/chains/document/map_reduce) chain first applies an LLM chain to each document individually (the Map step), treating the chain output as a new document. It then passes all the new documents to a separate combined documents chain to get a single output (the Reduce step). It can optionally first compress or collapse the mapped documents to make sure that they fit in the combined documents chain (which will often pass them to an LLM). This compression step is performed recursively if necessary.
|
||||
- **map_rerank**: The map re-rank [documents](https://python.langchain.com/docs/modules/chains/document/map_rerank) chain runs an initial prompt on each document that not only tries to complete a task but also gives a score for how certain it is in its answer. The highest-scoring response is returned.
|
||||
- **refine**: The refine [documents](https://python.langchain.com/docs/modules/chains/document/refine) chain constructs a response by looping over the input documents and iteratively updating its answer. For each document, it passes all non-document inputs, the current document, and the latest intermediate answer to an LLM chain to get a new answer.
|
||||
|
||||
Since the Refine chain only passes a single document to the LLM at a time, it is well-suited for tasks that require analyzing more documents than can fit in the model's context. The obvious tradeoff is that this chain will make far more LLM calls than, for example, the Stuff documents chain. There are also certain tasks that are difficult to accomplish iteratively. For example, the Refine chain can perform poorly when documents frequently cross-reference one another or when a task requires detailed information from many documents.
|
||||
- **stuff**: The stuff [documents](https://python.langchain.com/docs/modules/chains/document/stuff) chain (“stuff" as in "to stuff" or "to fill") is the most straightforward of _the_ document chains. It takes a list of documents, inserts them all into a prompt, and passes that prompt to an LLM. This chain is well-suited for applications where documents are small and only a few are passed in for most calls.
|
||||
- **map_reduce**: The map-reduce [documents](https://python.langchain.com/docs/modules/chains/document/map_reduce) chain first applies an LLM chain to each document individually (the Map step), treating the chain output as a new document. It then passes all the new documents to a separate combined documents chain to get a single output (the Reduce step). It can optionally first compress or collapse the mapped documents to make sure that they fit in the combined documents chain (which will often pass them to an LLM). This compression step is performed recursively if necessary.
|
||||
- **map_rerank**: The map re-rank [documents](https://python.langchain.com/docs/modules/chains/document/map_rerank) chain runs an initial prompt on each document that not only tries to complete a task but also gives a score for how certain it is in its answer. The highest-scoring response is returned.
|
||||
- **refine**: The refine [documents](https://python.langchain.com/docs/modules/chains/document/refine) chain constructs a response by looping over the input documents and iteratively updating its answer. For each document, it passes all non-document inputs, the current document, and the latest intermediate answer to an LLM chain to get a new answer.
|
||||
|
||||
Since the Refine chain only passes a single document to the LLM at a time, it is well-suited for tasks that require analyzing more documents than can fit in the model's context. The obvious tradeoff is that this chain will make far more LLM calls than, for example, the Stuff documents chain. There are also certain tasks that are difficult to accomplish iteratively. For example, the Refine chain can perform poorly when documents frequently cross-reference one another or when a task requires detailed information from many documents.
|
||||
|
||||
---
|
||||
|
||||
|
|
@ -41,7 +43,7 @@ The `ConversationChain` is a straightforward chain for interactive conversations
|
|||
- **Memory:** Default memory store.
|
||||
- **input_key:** Used to specify the key under which the user input will be stored in the conversation memory. It allows you to provide the user's input to the chain for processing and generating a response.
|
||||
- **output_key:** Used to specify the key under which the generated response will be stored in the conversation memory. It allows you to retrieve the response using the specified key.
|
||||
- **verbose:** This parameter is used to control the level of detail in the output of the chain. When set to True, it will print out some internal states of the chain while it is being run, which can be helpful for debugging and understanding the chain's behavior. If set to False, it will suppress the verbose output — defaults to `False`.
|
||||
- **verbose:** This parameter is used to control the level of detail in the output of the chain. When set to True, it will print out some internal states of the chain while it is being run, which can be helpful for debugging and understanding the chain's behavior. If set to False, it will suppress the verbose output — defaults to `False`.
|
||||
|
||||
---
|
||||
|
||||
|
|
@ -49,11 +51,11 @@ The `ConversationChain` is a straightforward chain for interactive conversations
|
|||
|
||||
The `ConversationalRetrievalChain` extracts information and provides answers by combining document search and question-answering abilities.
|
||||
|
||||
:::info
|
||||
<Admonition type="info">
|
||||
|
||||
A retriever is a component that finds documents based on a query. It doesn't store the documents themselves, but it returns the ones that match the query.
|
||||
|
||||
:::
|
||||
</Admonition >
|
||||
|
||||
**Params**
|
||||
|
||||
|
|
@ -61,12 +63,13 @@ A retriever is a component that finds documents based on a query. It doesn't sto
|
|||
- **Memory:** Default memory store.
|
||||
- **Retriever:** The retriever used to fetch relevant documents.
|
||||
- **chain_type:** The chain type to be used. Each one of them applies a different “combination strategy”.
|
||||
- **stuff**: The stuff [documents](https://python.langchain.com/docs/modules/chains/document/stuff) chain (“stuff" as in "to stuff" or "to fill") is the most straightforward of *the* document chains. It takes a list of documents, inserts them all into a prompt, and passes that prompt to an LLM. This chain is well-suited for applications where documents are small and only a few are passed in for most calls.
|
||||
- **map_reduce**: The map-reduce [documents](https://python.langchain.com/docs/modules/chains/document/map_reduce) chain first applies an LLM chain to each document individually (the Map step), treating the chain output as a new document. It then passes all the new documents to a separate combined documents chain to get a single output (the Reduce step). It can optionally first compress or collapse the mapped documents to make sure that they fit in the combined documents chain (which will often pass them to an LLM). This compression step is performed recursively if necessary.
|
||||
- **map_rerank**: The map re-rank [documents](https://python.langchain.com/docs/modules/chains/document/map_rerank) chain runs an initial prompt on each document that not only tries to complete a task but also gives a score for how certain it is in its answer. The highest-scoring response is returned.
|
||||
- **refine**: The refine [documents](https://python.langchain.com/docs/modules/chains/document/refine) chain constructs a response by looping over the input documents and iteratively updating its answer. For each document, it passes all non-document inputs, the current document, and the latest intermediate answer to an LLM chain to get a new answer.
|
||||
|
||||
Since the Refine chain only passes a single document to the LLM at a time, it is well-suited for tasks that require analyzing more documents than can fit in the model's context. The obvious tradeoff is that this chain will make far more LLM calls than, for example, the Stuff documents chain. There are also certain tasks that are difficult to accomplish iteratively. For example, the Refine chain can perform poorly when documents frequently cross-reference one another or when a task requires detailed information from many documents.
|
||||
- **stuff**: The stuff [documents](https://python.langchain.com/docs/modules/chains/document/stuff) chain (“stuff" as in "to stuff" or "to fill") is the most straightforward of _the_ document chains. It takes a list of documents, inserts them all into a prompt, and passes that prompt to an LLM. This chain is well-suited for applications where documents are small and only a few are passed in for most calls.
|
||||
- **map_reduce**: The map-reduce [documents](https://python.langchain.com/docs/modules/chains/document/map_reduce) chain first applies an LLM chain to each document individually (the Map step), treating the chain output as a new document. It then passes all the new documents to a separate combined documents chain to get a single output (the Reduce step). It can optionally first compress or collapse the mapped documents to make sure that they fit in the combined documents chain (which will often pass them to an LLM). This compression step is performed recursively if necessary.
|
||||
- **map_rerank**: The map re-rank [documents](https://python.langchain.com/docs/modules/chains/document/map_rerank) chain runs an initial prompt on each document that not only tries to complete a task but also gives a score for how certain it is in its answer. The highest-scoring response is returned.
|
||||
- **refine**: The refine [documents](https://python.langchain.com/docs/modules/chains/document/refine) chain constructs a response by looping over the input documents and iteratively updating its answer. For each document, it passes all non-document inputs, the current document, and the latest intermediate answer to an LLM chain to get a new answer.
|
||||
|
||||
Since the Refine chain only passes a single document to the LLM at a time, it is well-suited for tasks that require analyzing more documents than can fit in the model's context. The obvious tradeoff is that this chain will make far more LLM calls than, for example, the Stuff documents chain. There are also certain tasks that are difficult to accomplish iteratively. For example, the Refine chain can perform poorly when documents frequently cross-reference one another or when a task requires detailed information from many documents.
|
||||
|
||||
- **return_source_documents:** Used to specify whether or not to include the source documents that were used to answer the question in the output. When set to `True`, source documents will be included in the output along with the generated answer. This can be useful for providing additional context or references to the user — defaults to `True`.
|
||||
- **verbose:** Whether or not to run in verbose mode. In verbose mode, intermediate logs will be printed to the console — defaults to `False`.
|
||||
|
|
@ -108,17 +111,17 @@ The `LLMMathChain` works by using the language model with an `LLMChain` to under
|
|||
|
||||
`RetrievalQA` is a chain used to find relevant documents or information to answer a given query. The retriever is responsible for returning the relevant documents based on the query, and the QA component then extracts the answer from those documents. The retrieval QA system combines the capabilities of both the retriever and the QA component to provide accurate and relevant answers to user queries.
|
||||
|
||||
:::info
|
||||
<Admonition type="info">
|
||||
|
||||
A retriever is a component that finds documents based on a query. It doesn't store the documents themselves, but it returns the ones that match the query.
|
||||
|
||||
:::
|
||||
</Admonition >
|
||||
|
||||
**Params**
|
||||
|
||||
- **Combine Documents Chain:** Chain to use to combine the documents.
|
||||
- **Memory:** Default memory store.
|
||||
- **Retriever:** The retriever used to fetch relevant documents.
|
||||
- **Retriever:** The retriever used to fetch relevant documents.
|
||||
- **input_key:** This parameter is used to specify the key in the input data that contains the question. It is used to retrieve the question from the input data and pass it to the question-answering model for generating the answer — defaults to `query`.
|
||||
- **output_key:** This parameter is used to specify the key in the output data where the generated answer will be stored. It is used to retrieve the answer from the output data after the question-answering model has generated it — defaults to `result`.
|
||||
- **return_source_documents:** Used to specify whether or not to include the source documents that were used to answer the question in the output. When set to `True`, source documents will be included in the output along with the generated answer. This can be useful for providing additional context or references to the user — defaults to `True`.
|
||||
|
|
@ -134,4 +137,4 @@ The `SQLDatabaseChain` finds answers to questions using a SQL database. It works
|
|||
|
||||
- **Db:** SQL Database to connect to.
|
||||
- **LLM:** Language Model to use in the chain.
|
||||
- **Prompt:** Prompt template to translate natural language to SQL.
|
||||
- **Prompt:** Prompt template to translate natural language to SQL.
|
||||
|
|
|
|||
17
docs/docs/components/custom.mdx
Normal file
17
docs/docs/components/custom.mdx
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
import Admonition from "@theme/Admonition";
|
||||
|
||||
# Custom Component
|
||||
|
||||
---
|
||||
|
||||
Used to create a custom component. The code is the class that will be converted to a Custom Component with the fields and formatting you define.
|
||||
|
||||
**Params**
|
||||
|
||||
- **Code:** The code of the component.
|
||||
|
||||
<Admonition type="info" label="Tip">
|
||||
|
||||
[Learn more about Custom Components](../guidelines/custom-component)
|
||||
|
||||
</Admonition>
|
||||
|
|
@ -1,3 +1,5 @@
|
|||
import Admonition from "@theme/Admonition";
|
||||
|
||||
# Prompts
|
||||
|
||||
A prompt refers to the input given to a language model. It is constructed from multiple components and can be parametrized using prompt templates. A prompt template is a reproducible way to generate prompts and allow for easy customization through input variables.
|
||||
|
|
@ -8,8 +10,10 @@ A prompt refers to the input given to a language model. It is constructed from m
|
|||
|
||||
The `PromptTemplate` component allows users to create prompts and define variables that provide control over instructing the model. The template can take in a set of variables from the end user and generates the prompt once the conversation is initiated.
|
||||
|
||||
:::info
|
||||
Once a variable is defined in the prompt template, it becomes a component input of its own. Check out [Prompt Customization](../guidelines/prompt-customization.mdx) to learn more.
|
||||
:::
|
||||
<Admonition type="info">
|
||||
Once a variable is defined in the prompt template, it becomes a component
|
||||
input of its own. Check out [Prompt
|
||||
Customization](../guidelines/prompt-customization.mdx) to learn more.
|
||||
</Admonition>
|
||||
|
||||
- **template:** Template used to format an individual request.
|
||||
- **template:** Template used to format an individual request.
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
import Admonition from "@theme/Admonition";
|
||||
|
||||
# Buffer Memory
|
||||
|
||||
For certain applications, retaining past interactions is crucial. For that, chains and agents may accept a memory component as one of their input parameters. The `ConversationBufferMemory` component is one of them. It stores messages and extracts them into variables.
|
||||
|
|
@ -17,9 +19,10 @@ import ZoomableImage from "/src/theme/ZoomableImage.js";
|
|||
|
||||
#### <a target="\_blank" href="json_files/Buffer_Memory.json" download>Download Flow</a>
|
||||
|
||||
:::note LangChain Components 🦜🔗
|
||||
<Admonition type="note" title="LangChain Components 🦜🔗">
|
||||
|
||||
- [`ConversationBufferMemory`](https://python.langchain.com/docs/modules/memory/how_to/buffer)
|
||||
- [`ConversationChain`](https://python.langchain.com/docs/modules/chains/)
|
||||
- [`ChatOpenAI`](https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai)
|
||||
:::
|
||||
|
||||
</Admonition>
|
||||
|
|
|
|||
|
|
@ -1,10 +1,14 @@
|
|||
import Admonition from "@theme/Admonition";
|
||||
|
||||
# Conversation Chain
|
||||
|
||||
This example shows how to instantiate a simple `ConversationChain` component using a Language Model (LLM). Once the Node Status turns green 🟢, the chat will be ready to take in user messages. Here, we used `ChatOpenAI` to act as the required LLM input, but you can use any LLM for this purpose.
|
||||
|
||||
:::info
|
||||
<Admonition type="info">
|
||||
|
||||
Make sure to always get the API key from the provider.
|
||||
:::
|
||||
|
||||
</Admonition>
|
||||
|
||||
## ⛓️ Langflow Example
|
||||
|
||||
|
|
@ -21,8 +25,9 @@ import ZoomableImage from "/src/theme/ZoomableImage.js";
|
|||
|
||||
#### <a target="\_blank" href="json_files/Basic_Chat.json" download>Download Flow</a>
|
||||
|
||||
:::note LangChain Components 🦜🔗
|
||||
<Admonition type="note" title="LangChain Components 🦜🔗">
|
||||
|
||||
- [`ConversationChain`](https://python.langchain.com/docs/modules/chains/)
|
||||
- [`ChatOpenAI`](https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai)
|
||||
:::
|
||||
|
||||
</Admonition>
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
import Admonition from "@theme/Admonition";
|
||||
|
||||
# CSV Loader
|
||||
|
||||
The `VectoStoreAgent` component retrieves information from one or more vector stores. This example shows a `VectoStoreAgent` connected to a CSV file through the `Chroma` vector store. Process description:
|
||||
|
|
@ -7,13 +9,18 @@ The `VectoStoreAgent` component retrieves information from one or more vector st
|
|||
- These chunks feed the `Chroma` vector store, which converts them into vectors and stores them for fast indexing.
|
||||
- Finally, the agent accesses the information of the vector store through the `VectorStoreInfo` tool.
|
||||
|
||||
:::info
|
||||
The vector store is used for efficient semantic search, while `VectorStoreInfo` carries information about it, such as its name and description. Embeddings are a way to represent words, phrases, or any entities in a vector space. Learn more about them [here](https://platform.openai.com/docs/guides/embeddings/what-are-embeddings).
|
||||
:::
|
||||
<Admonition type="info">
|
||||
The vector store is used for efficient semantic search, while
|
||||
`VectorStoreInfo` carries information about it, such as its name and
|
||||
description. Embeddings are a way to represent words, phrases, or any entities
|
||||
in a vector space. Learn more about them
|
||||
[here](https://platform.openai.com/docs/guides/embeddings/what-are-embeddings).
|
||||
</Admonition>
|
||||
|
||||
:::tip
|
||||
Once you build this flow, ask questions about the data in the chat interface (e.g., number of rows or columns).
|
||||
:::
|
||||
<Admonition type="tip">
|
||||
Once you build this flow, ask questions about the data in the chat interface
|
||||
(e.g., number of rows or columns).
|
||||
</Admonition>
|
||||
|
||||
## ⛓️ Langflow Example
|
||||
|
||||
|
|
@ -30,7 +37,7 @@ import ZoomableImage from "/src/theme/ZoomableImage.js";
|
|||
|
||||
#### <a target="\_blank" href="json_files/CSV_Loader.json" download>Download Flow</a>
|
||||
|
||||
:::note LangChain Components 🦜🔗
|
||||
<Admonition type="note" title="LangChain Components 🦜🔗">
|
||||
|
||||
- [`CSVLoader`](https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/csv)
|
||||
- [`CharacterTextSplitter`](https://python.langchain.com/docs/modules/data_connection/document_transformers/text_splitters/character_text_splitter)
|
||||
|
|
@ -39,4 +46,5 @@ import ZoomableImage from "/src/theme/ZoomableImage.js";
|
|||
- [`VectorStoreInfo`](https://python.langchain.com/docs/modules/data_connection/vectorstores/)
|
||||
- [`OpenAI`](https://python.langchain.com/docs/modules/model_io/models/llms/integrations/openai)
|
||||
- [`VectorStoreAgent`](https://python.langchain.com/docs/modules/agents/toolkits/vectorstore)
|
||||
:::
|
||||
|
||||
</Admonition>
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
import Admonition from "@theme/Admonition";
|
||||
|
||||
# MidJourney Prompt Chain
|
||||
|
||||
The `MidJourneyPromptChain` can be used to generate imaginative and detailed MidJourney prompts.
|
||||
|
|
@ -14,9 +16,11 @@ And get a response such as:
|
|||
Imagine a mysterious forest, the trees are tall and ancient, their branches reaching up to the sky. Through the darkness, a dragon emerges from the shadows, its scales shimmering in the moonlight. Its wingspan is immense, and its eyes glow with a fierce intensity. It is a majestic and powerful creature, one that commands both respect and fear.
|
||||
```
|
||||
|
||||
:::tip
|
||||
Notice that the `ConversationSummaryMemory` stores a summary of the conversation over time. Try using it to create better prompts as the conversation goes on.
|
||||
:::
|
||||
<Admonition type="tip">
|
||||
Notice that the `ConversationSummaryMemory` stores a summary of the
|
||||
conversation over time. Try using it to create better prompts as the
|
||||
conversation goes on.
|
||||
</Admonition>
|
||||
|
||||
## ⛓️ Langflow Example
|
||||
|
||||
|
|
@ -33,8 +37,9 @@ import ZoomableImage from "/src/theme/ZoomableImage.js";
|
|||
|
||||
#### <a target="\_blank" href="json_files/MidJourney_Prompt_Chain.json" download>Download Flow</a>
|
||||
|
||||
:::note LangChain Components 🦜🔗
|
||||
<Admonition type="note" title="LangChain Components 🦜🔗">
|
||||
|
||||
- [`OpenAI`](https://python.langchain.com/docs/modules/model_io/models/llms/integrations/openai)
|
||||
- [`ConversationSummaryMemory`](https://python.langchain.com/docs/modules/memory/how_to/summary)
|
||||
:::
|
||||
|
||||
</Admonition>
|
||||
|
|
|
|||
|
|
@ -1,12 +1,15 @@
|
|||
import Admonition from "@theme/Admonition";
|
||||
|
||||
# Multiple Vector Stores
|
||||
|
||||
The example below shows an agent operating with two vector stores built upon different data sources.
|
||||
|
||||
The `TextLoader` loads a TXT file, while the `WebBaseLoader` pulls text from webpages into a document format to accessed downstream. The `Chroma` vector stores are created analogous to what we have demonstrated in our [CSV Loader](/examples/csv-loader.mdx) example. Finally, the `VectorStoreRouterAgent` constructs an agent that routes between the vector stores.
|
||||
|
||||
:::info
|
||||
Get the TXT file used [here](https://github.com/hwchase17/chat-your-data/blob/master/state_of_the_union.txt).
|
||||
:::
|
||||
<Admonition type="info">
|
||||
Get the TXT file used
|
||||
[here](https://github.com/hwchase17/chat-your-data/blob/master/state_of_the_union.txt).
|
||||
</Admonition>
|
||||
|
||||
URL used by the `WebBaseLoader`:
|
||||
|
||||
|
|
@ -14,13 +17,15 @@ URL used by the `WebBaseLoader`:
|
|||
https://pt.wikipedia.org/wiki/Harry_Potter
|
||||
```
|
||||
|
||||
:::tip
|
||||
When you build the flow, request information about one of the sources. The agent should be able to use the correct source to generate a response.
|
||||
:::
|
||||
<Admonition type="tip">
|
||||
When you build the flow, request information about one of the sources. The
|
||||
agent should be able to use the correct source to generate a response.
|
||||
</Admonition>
|
||||
|
||||
:::info
|
||||
Learn more about Multiple Vector Stores [here](https://python.langchain.com/docs/modules/agents/toolkits/vectorstore?highlight=Multiple%20Vector%20Stores#multiple-vectorstores).
|
||||
:::
|
||||
<Admonition type="info">
|
||||
Learn more about Multiple Vector Stores
|
||||
[here](https://python.langchain.com/docs/modules/agents/toolkits/vectorstore?highlight=Multiple%20Vector%20Stores#multiple-vectorstores).
|
||||
</Admonition>
|
||||
|
||||
## ⛓️ Langflow Example
|
||||
|
||||
|
|
@ -37,7 +42,7 @@ import ZoomableImage from "/src/theme/ZoomableImage.js";
|
|||
|
||||
#### <a target="\_blank" href="json_files/Multiple_Vector_Stores.json" download>Download Flow</a>
|
||||
|
||||
:::note LangChain Components 🦜🔗
|
||||
<Admonition type="note" title="LangChain Components 🦜🔗">
|
||||
|
||||
- [`WebBaseLoader`](https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/web_base)
|
||||
- [`TextLoader`](https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/unstructured_file)
|
||||
|
|
@ -49,4 +54,4 @@ import ZoomableImage from "/src/theme/ZoomableImage.js";
|
|||
- [`VectorStoreRouterToolkit`](https://python.langchain.com/docs/modules/agents/toolkits/vectorstore)
|
||||
- [`VectorStoreRouterAgent`](https://python.langchain.com/docs/modules/agents/toolkits/vectorstore)
|
||||
|
||||
:::
|
||||
</Admonition>
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
import Admonition from "@theme/Admonition";
|
||||
|
||||
# Python Function
|
||||
|
||||
Langflow allows you to create a customized tool using the `PythonFunction` connected to a `Tool` component. In this example, Regex is used in Python to validate a pattern.
|
||||
|
|
@ -15,15 +17,19 @@ def is_brazilian_zipcode(zipcode: str) -> bool:
|
|||
return False
|
||||
```
|
||||
|
||||
:::tip
|
||||
When a tool is called, it is often desirable to have its output returned directly to the user. You can do this by setting the **return_direct** flag for a tool to be True.
|
||||
:::
|
||||
<Admonition type="tip">
|
||||
When a tool is called, it is often desirable to have its output returned
|
||||
directly to the user. You can do this by setting the **return_direct** flag
|
||||
for a tool to be True.
|
||||
</Admonition>
|
||||
|
||||
The `AgentInitializer` component is a quick way to construct an agent from the model and tools.
|
||||
|
||||
:::info
|
||||
The `PythonFunction` is a custom component that uses the LangChain 🦜🔗 tool decorator. Learn more about it [here](https://python.langchain.com/docs/modules/agents/tools/how_to/custom_tools).
|
||||
:::
|
||||
<Admonition type="info">
|
||||
The `PythonFunction` is a custom component that uses the LangChain 🦜🔗 tool
|
||||
decorator. Learn more about it
|
||||
[here](https://python.langchain.com/docs/modules/agents/tools/how_to/custom_tools).
|
||||
</Admonition>
|
||||
|
||||
## ⛓️ Langflow Example
|
||||
|
||||
|
|
@ -40,9 +46,10 @@ import ZoomableImage from "/src/theme/ZoomableImage.js";
|
|||
|
||||
#### <a target="\_blank" href="json_files/Python_Function.json" download>Download Flow</a>
|
||||
|
||||
:::note LangChain Components 🦜🔗
|
||||
<Admonition type="note" title="LangChain Components 🦜🔗">
|
||||
|
||||
- [`PythonFunctionTool`](https://python.langchain.com/docs/modules/agents/tools/how_to/custom_tools)
|
||||
- [`ChatOpenAI`](https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai)
|
||||
- [`AgentInitializer`](https://python.langchain.com/docs/modules/agents/)
|
||||
:::
|
||||
|
||||
</Admonition>
|
||||
|
|
|
|||
|
|
@ -1,24 +1,29 @@
|
|||
import Admonition from "@theme/Admonition";
|
||||
|
||||
# Serp API Tool
|
||||
|
||||
The [Serp API](https://serpapi.com/) (Search Engine Results Page) allows developers to scrape results from search engines such as Google, Bing and Yahoo, and can be used as in Langflow through the `Search` component.
|
||||
|
||||
:::info
|
||||
To use the Serp API, you first need to sign up [Serp API](https://serpapi.com/) for an API key on the provider's website.
|
||||
:::
|
||||
<Admonition type="info">
|
||||
To use the Serp API, you first need to sign up [Serp
|
||||
API](https://serpapi.com/) for an API key on the provider's website.
|
||||
</Admonition>
|
||||
|
||||
Here, the `ZeroShotPrompt` component specifies a prompt template for the `ZeroShotAgent`. Set a _Prefix_ and _Suffix_ with rules for the agent to obey. In the example, we used default templates.
|
||||
|
||||
The `LLMChain` is a simple chain that takes in a prompt template, formats it with the user input, and returns the response from an LLM.
|
||||
|
||||
:::tip
|
||||
In this example, we used [`ChatOpenAI`](https://platform.openai.com/) as the LLM, but feel free to experiment with other Language Models!
|
||||
:::
|
||||
<Admonition type="tip">
|
||||
In this example, we used [`ChatOpenAI`](https://platform.openai.com/) as the
|
||||
LLM, but feel free to experiment with other Language Models!
|
||||
</Admonition>
|
||||
|
||||
The `ZeroShotAgent` takes the `LLMChain` and the `Search` tool as inputs, using the tool to find information when necessary.
|
||||
|
||||
:::info
|
||||
Learn more about the Serp API [here](https://python.langchain.com/docs/modules/agents/tools/integrations/serpapi).
|
||||
:::
|
||||
<Admonition type="info">
|
||||
Learn more about the Serp API
|
||||
[here](https://python.langchain.com/docs/modules/agents/tools/integrations/serpapi).
|
||||
</Admonition>
|
||||
|
||||
## ⛓️ Langflow Example
|
||||
|
||||
|
|
@ -35,11 +40,12 @@ import ZoomableImage from "/src/theme/ZoomableImage.js";
|
|||
|
||||
#### <a target="\_blank" href="json_files/SerpAPI_Tool.json" download>Download Flow</a>
|
||||
|
||||
:::note LangChain Components 🦜🔗
|
||||
<Admonition type="note" title="LangChain Components 🦜🔗">
|
||||
|
||||
- [`ZeroShotPrompt`](https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/)
|
||||
- [`OpenAI`](https://python.langchain.com/docs/modules/model_io/models/llms/integrations/openai)
|
||||
- [`LLMChain`](https://python.langchain.com/docs/modules/chains/foundational/llm_chain)
|
||||
- [`Search`](https://python.langchain.com/docs/modules/agents/tools/integrations/serpapi)
|
||||
- [`ZeroShotAgent`](https://python.langchain.com/docs/modules/agents/how_to/custom_mrkl_agent)
|
||||
:::
|
||||
|
||||
</Admonition>
|
||||
|
|
|
|||
465
docs/docs/guidelines/custom-component.mdx
Normal file
465
docs/docs/guidelines/custom-component.mdx
Normal file
|
|
@ -0,0 +1,465 @@
|
|||
---
|
||||
description: Custom Components
|
||||
hide_table_of_contents: true
|
||||
---
|
||||
|
||||
# Custom Components
|
||||
|
||||
A Custom Component has almost infinite possibilities. It can be a simple function that takes a string and returns a string,
|
||||
or it can be a complex function that takes other components, calls APIs, and returns a custom object only you know how to use (which might not be ideal).
|
||||
|
||||
Let's take a look at the basic rules, then we'll talk about the ones that are not so basic.
|
||||
|
||||
## TL;DR
|
||||
|
||||
You need to create a class that inherits from _`CustomComponent`_ and has a _`build`_ method.
|
||||
Use the type annotations of the _`build`_ method to create the fields of the component.
|
||||
Use the _`build_config`_ method to create the config fields of the component (if any).
|
||||
|
||||
Here is an example:
|
||||
|
||||
<CH.Code linuNumbers={false}>
|
||||
|
||||
```python
|
||||
from langflow import CustomComponent
|
||||
from langchain.chains import LLMChain
|
||||
from langchain.chains.base import Chain
|
||||
from langchain import PromptTemplate
|
||||
from langchain.llms.base import BaseLLM
|
||||
from langchain import Tool
|
||||
|
||||
class BestComponent(CustomComponent):
|
||||
display_name = "Best Component"
|
||||
description = "This is the best component ever"
|
||||
|
||||
def build_config(self) -> dict:
|
||||
cool_tool_names = ["Cool Tool",
|
||||
"Cooler Tool",
|
||||
"Coolest Tool"]
|
||||
return {
|
||||
"description": {"multiline": True},
|
||||
"name": {"is_list": True,
|
||||
"options": cool_tool_names}}
|
||||
|
||||
def build(self, name: str, description: str, chain: Chain) -> Tool:
|
||||
return Tool(name=name,
|
||||
description=description,
|
||||
func=chain.run)
|
||||
```
|
||||
|
||||
</CH.Code>
|
||||
|
||||
## Now, let's go over the rules one by one:
|
||||
|
||||
<CH.Scrollycoding rows={20} className={""}>
|
||||
|
||||
## Rule 1
|
||||
|
||||
The script must contain a **single class** that inherits from _`CustomComponent`_.
|
||||
|
||||
```python
|
||||
# focus
|
||||
from langflow import CustomComponent
|
||||
from langchain.chains import LLMChain
|
||||
from langchain.chains.base import Chain
|
||||
from langchain import PromptTemplate
|
||||
from langchain.llms.base import BaseLLM
|
||||
from langchain import Tool
|
||||
|
||||
# focus
|
||||
class BestComponent(CustomComponent):
|
||||
display_name = "Custom Component"
|
||||
description = "This is a custom component"
|
||||
|
||||
def build_config(self) -> dict:
|
||||
...
|
||||
|
||||
def build(self):
|
||||
...
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Rule 2
|
||||
|
||||
The class must have a _`build`_ method which defines the fields of the component and is used to run it.
|
||||
|
||||
```python
|
||||
from langflow import CustomComponent
|
||||
from langchain.chains import LLMChain
|
||||
from langchain.chains.base import Chain
|
||||
from langchain import PromptTemplate
|
||||
from langchain.llms.base import BaseLLM
|
||||
from langchain import Tool
|
||||
|
||||
class BestComponent(CustomComponent):
|
||||
display_name = "Custom Component"
|
||||
description = "This is a custom component"
|
||||
|
||||
def build_config(self) -> dict:
|
||||
...
|
||||
|
||||
# focus[5:13]
|
||||
def build(self):
|
||||
...
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Rule 3
|
||||
|
||||
The type annotations of the _`build`_ method will be used to create the fields of the component.
|
||||
|
||||
The types supported are:
|
||||
|
||||
- _`str`_, _`int`_, _`float`_, _`bool`_, _`list`_, _`dict`_
|
||||
- [_`langchain.chains.base.Chain`_](focus://3)
|
||||
- [_`langchain.PromptTemplate`_](focus://4)
|
||||
- [_`langchain.llms.base.BaseLLM`_](focus://5)
|
||||
- [_`langchain.Tool`_](focus://6)
|
||||
- _`langchain.document_loaders.base.BaseLoader`_
|
||||
- _`langchain.schema.Document`_
|
||||
- _`langchain.text_splitters.TextSplitter`_
|
||||
- _`langchain.vectorstores.base.VectorStore`_
|
||||
- _`langchain.embeddings.base.Embeddings`_
|
||||
- _`langchain.schema.BaseRetriever`_
|
||||
|
||||
```python
|
||||
from langflow import CustomComponent
|
||||
from langchain.chains import LLMChain
|
||||
from langchain.chains.base import Chain
|
||||
from langchain import PromptTemplate
|
||||
from langchain.llms.base import BaseLLM
|
||||
from langchain import Tool
|
||||
|
||||
class BestComponent(CustomComponent):
|
||||
display_name = "Custom Component"
|
||||
description = "This is a custom component"
|
||||
|
||||
def build_config(self) -> dict:
|
||||
...
|
||||
|
||||
# mark
|
||||
def build(self):
|
||||
...
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
```python
|
||||
from langflow import CustomComponent
|
||||
from langchain.chains import LLMChain
|
||||
from langchain.chains.base import Chain
|
||||
from langchain import PromptTemplate
|
||||
from langchain.llms.base import BaseLLM
|
||||
from langchain import Tool
|
||||
|
||||
class BestComponent(CustomComponent):
|
||||
display_name = "Custom Component"
|
||||
description = "This is a custom component"
|
||||
|
||||
# focus
|
||||
def build_config(self) -> dict:
|
||||
...
|
||||
|
||||
def build(self):
|
||||
...
|
||||
```
|
||||
|
||||
## Rule 4
|
||||
|
||||
The class can have a [_`build_config`_](focus://11:19) method
|
||||
|
||||
- The _`build_config`_ method will be used to create the config fields of the component (if any)
|
||||
- It should always return a _`dict`_
|
||||
|
||||
The _`dict`_ should have the following format:
|
||||
|
||||
- The top level keys are the names of the fields
|
||||
- The values are _`dict`_ with the following keys:
|
||||
|
||||
- _`field_type: str`_: The type of the field (can be any of the types supported by the _`build`_ method)
|
||||
- _`is_list: bool`_: If the field is a list.
|
||||
- _`options: List[str]`_: If the field is a list, the options that will be displayed.
|
||||
- _`multiline: bool`_: If the field is a string, if it should be multiline.
|
||||
- _`input_types: List[str]`_: To be used when you want a _`str`_ field to have connectable handles.
|
||||
- _`display_name: str`_: To change the name of the field
|
||||
- _`advanced: bool`_: To hide the field in the default view
|
||||
- _`password: bool`_: To mask the input
|
||||
- _`required: bool`_: To make the field required
|
||||
- _`info: str`_: To add a tooltip to the field
|
||||
- _`file_types: List[str]`_: This is a requirement if the _`field_type`_ is 'file'
|
||||
(must be used in conjunction with _`suffixes`_)
|
||||
|
||||
Example: _`["json", "yaml", "yml"]`_
|
||||
|
||||
- _`suffixes: List[str]`_: This is a requirement if the _`field_type`_ is 'file' (must be used in conjunction with _`file_types`_, and it must be a list of strings like 'json')
|
||||
|
||||
Example: _`[".json", ".yaml", ".yml"]`_
|
||||
|
||||
---
|
||||
|
||||
```python
|
||||
from langflow import CustomComponent
|
||||
from langchain.chains import LLMChain
|
||||
from langchain.chains.base import Chain
|
||||
from langchain import PromptTemplate
|
||||
from langchain.llms.base import BaseLLM
|
||||
from langchain import Tool
|
||||
|
||||
class BestComponent(CustomComponent):
|
||||
display_name = "Custom Component"
|
||||
description = "This is a custom component"
|
||||
|
||||
# focus
|
||||
def build_config(self) -> dict:
|
||||
...
|
||||
|
||||
def build(self):
|
||||
...
|
||||
```
|
||||
|
||||
# Example
|
||||
|
||||
Now let's create a custom component that creates a Tool from a name, a description and a chain.
|
||||
|
||||
---
|
||||
|
||||
# Change the name
|
||||
|
||||
We can change the name of the component by adding a _`display_name`_ attribute.
|
||||
|
||||
```python focus=9
|
||||
from langflow import CustomComponent
|
||||
from langchain.chains import LLMChain
|
||||
from langchain.chains.base import Chain
|
||||
from langchain import PromptTemplate
|
||||
from langchain.llms.base import BaseLLM
|
||||
from langchain import Tool
|
||||
|
||||
class BestComponent(CustomComponent):
|
||||
display_name = "Best Component"
|
||||
description = "This is a custom component"
|
||||
|
||||
def build_config(self) -> dict:
|
||||
...
|
||||
|
||||
def build(self):
|
||||
...
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
# Change the description
|
||||
|
||||
We can change the description of the component by adding a _`description`_ attribute.
|
||||
|
||||
```python focus=10
|
||||
from langflow import CustomComponent
|
||||
from langchain.chains import LLMChain
|
||||
from langchain.chains.base import Chain
|
||||
from langchain import PromptTemplate
|
||||
from langchain.llms.base import BaseLLM
|
||||
from langchain import Tool
|
||||
|
||||
class BestComponent(CustomComponent):
|
||||
display_name = "Best Component"
|
||||
description = "This is the best component ever"
|
||||
|
||||
def build_config(self) -> dict:
|
||||
...
|
||||
|
||||
def build(self):
|
||||
...
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
# Add a config
|
||||
|
||||
The _`build_config`_ method will be used to configure the fields of the component.
|
||||
|
||||
- _`multiline`_ will add the possibility of editing text in a spaceous text editor.
|
||||
|
||||
- _`is_list`_ is a special option that allows you to add many values. When paired with _`options`_ it will transform it into a dropdown menu with the options you provide.
|
||||
If you set the _`value`_ attribute to one of the options, it will be selected by default.
|
||||
|
||||
```python focus=12:19
|
||||
from langflow import CustomComponent
|
||||
from langchain.chains import LLMChain
|
||||
from langchain.chains.base import Chain
|
||||
from langchain import PromptTemplate
|
||||
from langchain.llms.base import BaseLLM
|
||||
from langchain import Tool
|
||||
|
||||
class BestComponent(CustomComponent):
|
||||
display_name = "Best Component"
|
||||
description = "This is the best component ever"
|
||||
|
||||
def build_config(self) -> dict:
|
||||
cool_tool_names = ["Cool Tool",
|
||||
"Cooler Tool",
|
||||
"Coolest Tool"]
|
||||
return {
|
||||
"description": {"multiline": True},
|
||||
"name": {"is_list": True,
|
||||
"options": cool_tool_names}}
|
||||
|
||||
def build(self):
|
||||
...
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
```python focus=21:25
|
||||
from langflow import CustomComponent
|
||||
from langchain.chains import LLMChain
|
||||
from langchain.chains.base import Chain
|
||||
from langchain import PromptTemplate
|
||||
from langchain.llms.base import BaseLLM
|
||||
from langchain import Tool
|
||||
|
||||
class BestComponent(CustomComponent):
|
||||
display_name = "Best Component"
|
||||
description = "This is the best component ever"
|
||||
|
||||
def build_config(self) -> dict:
|
||||
cool_tool_names = ["Cool Tool",
|
||||
"Cooler Tool",
|
||||
"Coolest Tool"]
|
||||
return {
|
||||
"description": {"multiline": True},
|
||||
"name": {"is_list": True,
|
||||
"options": cool_tool_names}}
|
||||
|
||||
def build(self, name: str, description: str, chain: Chain) -> Tool:
|
||||
return Tool(name=name,
|
||||
description=description,
|
||||
func=chain.run)
|
||||
```
|
||||
|
||||
# Add the build method
|
||||
|
||||
The parameters used are:
|
||||
|
||||
- name is a string
|
||||
- description is a string
|
||||
- chain is a Chain
|
||||
- The return type is Tool
|
||||
|
||||
We then instantiate a Tool and return it.
|
||||
|
||||
</CH.Scrollycoding>
|
||||
|
||||
## FlowRunner Example
|
||||
|
||||
Now let's see how to create a component that runs other flows.
|
||||
|
||||
<CH.Scrollycoding rows={20} className={""}>
|
||||
|
||||
```python
|
||||
from langflow.interface.custom.custom_component import CustomComponent
|
||||
|
||||
class MyComponent(CustomComponent):
|
||||
display_name = "Custom Component"
|
||||
|
||||
def build_config(self):
|
||||
...
|
||||
|
||||
def build(self):
|
||||
...
|
||||
|
||||
```
|
||||
|
||||
So, let's start by adding the _`display_name`_ and a _`description`_.
|
||||
|
||||
---
|
||||
|
||||
```python
|
||||
from langflow.interface.custom.custom_component import CustomComponent
|
||||
|
||||
# focus
|
||||
class FlowRunner(CustomComponent):
|
||||
# focus
|
||||
display_name = "Flow Runner"
|
||||
# focus
|
||||
description = "Run other flows"
|
||||
|
||||
def build_config(self):
|
||||
...
|
||||
|
||||
def build(self):
|
||||
...
|
||||
|
||||
```
|
||||
|
||||
That's better.
|
||||
|
||||
---
|
||||
|
||||
```python
|
||||
from langflow.interface.custom.custom_component import CustomComponent
|
||||
from langchain.schema import Document
|
||||
|
||||
# focus[6:16]
|
||||
class FlowRunner(CustomComponent):
|
||||
# focus[19:35]
|
||||
display_name = "Flow Runner"
|
||||
# focus[18:35]
|
||||
description = "Run other flows"
|
||||
|
||||
def build_config(self):
|
||||
...
|
||||
|
||||
def build(self):
|
||||
...
|
||||
|
||||
```
|
||||
|
||||
Now let's import Document from the schema module which will be our return type for the _`build`_ method.
|
||||
|
||||
---
|
||||
|
||||
```python
|
||||
from langflow.interface.custom.custom_component import CustomComponent
|
||||
from langchain.schema import Document
|
||||
|
||||
class FlowRunner(CustomComponent):
|
||||
display_name = "Flow Runner"
|
||||
description = "Run other flows using a document as input."
|
||||
|
||||
def build_config(self):
|
||||
...
|
||||
|
||||
# focus
|
||||
def build(self, flow_name: str, document: Document) -> Document:
|
||||
...
|
||||
|
||||
```
|
||||
|
||||
Let's add the parameters and the return type to the _`build`_ method.
|
||||
|
||||
---
|
||||
|
||||
```python
|
||||
from langflow.interface.custom.custom_component import CustomComponent
|
||||
from langchain.schema import Document
|
||||
|
||||
# focus
|
||||
class FlowRunner(CustomComponent):
|
||||
# focus
|
||||
display_name = "Flow Runner"
|
||||
# focus
|
||||
description = "Run other flows using a document as input."
|
||||
|
||||
def build_config(self):
|
||||
...
|
||||
|
||||
def build(self, flow_name: str, document: Document) -> Document:
|
||||
...
|
||||
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
</CH.Scrollycoding>
|
||||
|
|
@ -2,6 +2,7 @@ import ThemedImage from "@theme/ThemedImage";
|
|||
import useBaseUrl from "@docusaurus/useBaseUrl";
|
||||
import ZoomableImage from "/src/theme/ZoomableImage.js";
|
||||
import ReactPlayer from "react-player";
|
||||
import Admonition from "@theme/Admonition";
|
||||
|
||||
# Features
|
||||
|
||||
|
|
@ -34,9 +35,10 @@ import ReactPlayer from "react-player";
|
|||
|
||||
Flows can be exported and imported as JSON files.
|
||||
|
||||
:::caution
|
||||
<Admonition type="caution">
|
||||
Watch out for API keys being stored in local files.
|
||||
:::
|
||||
|
||||
</Admonition>
|
||||
|
||||
---
|
||||
|
||||
|
|
|
|||
|
|
@ -1,127 +1,141 @@
|
|||
const lightCodeTheme = require("prism-react-renderer/themes/github");
|
||||
|
||||
const { remarkCodeHike } = require("@code-hike/mdx");
|
||||
// With JSDoc @type annotations, IDEs can provide config autocompletion
|
||||
/** @type {import('@docusaurus/types').DocusaurusConfig} */
|
||||
(
|
||||
module.exports = {
|
||||
title: "Langflow Documentation",
|
||||
tagline: "Langflow is a GUI for LangChain, designed with react-flow",
|
||||
favicon: "img/favicon.ico",
|
||||
url: "https://logspace-ai.github.io",
|
||||
baseUrl: "/",
|
||||
onBrokenLinks: "throw",
|
||||
onBrokenMarkdownLinks: "warn",
|
||||
organizationName: "logspace-ai",
|
||||
projectName: "langflow",
|
||||
trailingSlash: false,
|
||||
customFields: {
|
||||
mendableAnonKey: process.env.MENDABLE_ANON_KEY,
|
||||
},
|
||||
i18n: {
|
||||
defaultLocale: "en",
|
||||
locales: ["en"],
|
||||
},
|
||||
presets: [
|
||||
[
|
||||
"@docusaurus/preset-classic",
|
||||
/** @type {import('@docusaurus/preset-classic').Options} */
|
||||
({
|
||||
docs: {
|
||||
routeBasePath: "/",
|
||||
sidebarPath: require.resolve("./sidebars.js"),
|
||||
path: "docs",
|
||||
// sidebarPath: 'sidebars.js',
|
||||
},
|
||||
theme: {
|
||||
customCss: require.resolve("./src/css/custom.css"),
|
||||
},
|
||||
}),
|
||||
],
|
||||
],
|
||||
plugins: [
|
||||
["docusaurus-node-polyfills", { excludeAliases: ["console"] }],
|
||||
"docusaurus-plugin-image-zoom",
|
||||
// ....
|
||||
async function myPlugin(context, options) {
|
||||
return {
|
||||
name: "docusaurus-tailwindcss",
|
||||
configurePostCss(postcssOptions) {
|
||||
// Appends TailwindCSS and AutoPrefixer.
|
||||
postcssOptions.plugins.push(require("tailwindcss"));
|
||||
postcssOptions.plugins.push(require("autoprefixer"));
|
||||
return postcssOptions;
|
||||
},
|
||||
};
|
||||
},
|
||||
],
|
||||
themeConfig:
|
||||
/** @type {import('@docusaurus/preset-classic').ThemeConfig} */
|
||||
module.exports = {
|
||||
title: "Langflow Documentation",
|
||||
tagline: "Langflow is a GUI for LangChain, designed with react-flow",
|
||||
favicon: "img/favicon.ico",
|
||||
url: "https://logspace-ai.github.io",
|
||||
baseUrl: "/",
|
||||
onBrokenLinks: "throw",
|
||||
onBrokenMarkdownLinks: "warn",
|
||||
organizationName: "logspace-ai",
|
||||
projectName: "langflow",
|
||||
trailingSlash: false,
|
||||
customFields: {
|
||||
mendableAnonKey: process.env.MENDABLE_ANON_KEY,
|
||||
},
|
||||
i18n: {
|
||||
defaultLocale: "en",
|
||||
locales: ["en"],
|
||||
},
|
||||
presets: [
|
||||
[
|
||||
"@docusaurus/preset-classic",
|
||||
/** @type {import('@docusaurus/preset-classic').Options} */
|
||||
({
|
||||
navbar: {
|
||||
hideOnScroll: true,
|
||||
title: "Langflow",
|
||||
logo: {
|
||||
alt: "Langflow",
|
||||
src: "img/chain.png",
|
||||
},
|
||||
items: [
|
||||
// right
|
||||
{
|
||||
position: "right",
|
||||
href: "https://github.com/logspace-ai/langflow",
|
||||
position: "right",
|
||||
className: "header-github-link",
|
||||
target: "_blank",
|
||||
rel: null,
|
||||
},
|
||||
{
|
||||
position: "right",
|
||||
href: "https://twitter.com/logspace_ai",
|
||||
position: "right",
|
||||
className: "header-twitter-link",
|
||||
target: "_blank",
|
||||
rel: null,
|
||||
},
|
||||
{
|
||||
position: "right",
|
||||
href: "https://discord.gg/EqksyE2EX9",
|
||||
position: "right",
|
||||
className: "header-discord-link",
|
||||
target: "_blank",
|
||||
rel: null,
|
||||
},
|
||||
docs: {
|
||||
beforeDefaultRemarkPlugins: [
|
||||
[
|
||||
remarkCodeHike,
|
||||
{ theme: "monokai", showCopyButton: true, lineNumbers: true },
|
||||
],
|
||||
],
|
||||
routeBasePath: "/",
|
||||
sidebarPath: require.resolve("./sidebars.js"),
|
||||
path: "docs",
|
||||
// sidebarPath: 'sidebars.js',
|
||||
},
|
||||
theme: {
|
||||
customCss: [
|
||||
require.resolve("@code-hike/mdx/styles.css"),
|
||||
require.resolve("./src/css/custom.css"),
|
||||
],
|
||||
},
|
||||
tableOfContents: {
|
||||
minHeadingLevel: 2,
|
||||
maxHeadingLevel: 5,
|
||||
},
|
||||
colorMode: {
|
||||
defaultMode: "light",
|
||||
disableSwitch: true,
|
||||
respectPrefersColorScheme: false,
|
||||
},
|
||||
announcementBar: {
|
||||
content:
|
||||
'⭐️ If you like ⛓️Langflow, star it on <a target="_blank" rel="noopener noreferrer" href="https://github.com/logspace-ai/langflow">GitHub</a>! ⭐️',
|
||||
backgroundColor: "#B53D38", //Mustard Yellow #D19900 #D4B20B - Salmon #E9967A
|
||||
textColor: "#fff",
|
||||
isCloseable: false,
|
||||
},
|
||||
footer: {
|
||||
links: [],
|
||||
copyright: `Copyright © ${new Date().getFullYear()} Logspace.`,
|
||||
},
|
||||
zoom: {
|
||||
selector: ".markdown :not(a) > img:not(.no-zoom)",
|
||||
background: {
|
||||
light: "rgba(240, 240, 240, 0.9)",
|
||||
},
|
||||
config: {},
|
||||
},
|
||||
prism: {
|
||||
theme: lightCodeTheme,
|
||||
},
|
||||
}),
|
||||
}
|
||||
);
|
||||
],
|
||||
],
|
||||
plugins: [
|
||||
["docusaurus-node-polyfills", { excludeAliases: ["console"] }],
|
||||
"docusaurus-plugin-image-zoom",
|
||||
// ....
|
||||
async function myPlugin(context, options) {
|
||||
return {
|
||||
name: "docusaurus-tailwindcss",
|
||||
configurePostCss(postcssOptions) {
|
||||
// Appends TailwindCSS and AutoPrefixer.
|
||||
postcssOptions.plugins.push(require("tailwindcss"));
|
||||
postcssOptions.plugins.push(require("autoprefixer"));
|
||||
return postcssOptions;
|
||||
},
|
||||
};
|
||||
},
|
||||
],
|
||||
themes: ["mdx-v2"],
|
||||
themeConfig:
|
||||
/** @type {import('@docusaurus/preset-classic').ThemeConfig} */
|
||||
({
|
||||
navbar: {
|
||||
hideOnScroll: true,
|
||||
title: "Langflow",
|
||||
logo: {
|
||||
alt: "Langflow",
|
||||
src: "img/chain.png",
|
||||
},
|
||||
items: [
|
||||
// right
|
||||
{
|
||||
position: "right",
|
||||
href: "https://github.com/logspace-ai/langflow",
|
||||
position: "right",
|
||||
className: "header-github-link",
|
||||
target: "_blank",
|
||||
rel: null,
|
||||
},
|
||||
{
|
||||
position: "right",
|
||||
href: "https://twitter.com/logspace_ai",
|
||||
position: "right",
|
||||
className: "header-twitter-link",
|
||||
target: "_blank",
|
||||
rel: null,
|
||||
},
|
||||
{
|
||||
position: "right",
|
||||
href: "https://discord.gg/EqksyE2EX9",
|
||||
position: "right",
|
||||
className: "header-discord-link",
|
||||
target: "_blank",
|
||||
rel: null,
|
||||
},
|
||||
],
|
||||
},
|
||||
tableOfContents: {
|
||||
minHeadingLevel: 2,
|
||||
maxHeadingLevel: 5,
|
||||
},
|
||||
colorMode: {
|
||||
defaultMode: "light",
|
||||
disableSwitch: true,
|
||||
respectPrefersColorScheme: false,
|
||||
},
|
||||
announcementBar: {
|
||||
content:
|
||||
'⭐️ If you like ⛓️Langflow, star it on <a target="_blank" rel="noopener noreferrer" href="https://github.com/logspace-ai/langflow">GitHub</a>! ⭐️',
|
||||
backgroundColor: "#B53D38", //Mustard Yellow #D19900 #D4B20B - Salmon #E9967A
|
||||
textColor: "#fff",
|
||||
isCloseable: false,
|
||||
},
|
||||
footer: {
|
||||
links: [],
|
||||
copyright: `Copyright © ${new Date().getFullYear()} Logspace.`,
|
||||
},
|
||||
zoom: {
|
||||
selector: ".markdown :not(a) > img:not(.no-zoom)",
|
||||
background: {
|
||||
light: "rgba(240, 240, 240, 0.9)",
|
||||
},
|
||||
config: {},
|
||||
},
|
||||
// prism: {
|
||||
// theme: require("prism-react-renderer/themes/dracula"),
|
||||
// },
|
||||
docs: {
|
||||
sidebar: {
|
||||
hideable: true,
|
||||
},
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
|
|
|||
2033
docs/package-lock.json
generated
2033
docs/package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
|
@ -15,12 +15,13 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@babel/preset-react": "^7.22.3",
|
||||
"@code-hike/mdx": "^0.9.0",
|
||||
"@docusaurus/core": "2.4.1",
|
||||
"@docusaurus/plugin-ideal-image": "^2.4.1",
|
||||
"@docusaurus/preset-classic": "2.4.1",
|
||||
"@docusaurus/theme-classic": "^2.4.1",
|
||||
"@docusaurus/theme-search-algolia": "^2.4.1",
|
||||
"@mdx-js/react": "^1.6.22",
|
||||
"@mdx-js/react": "^2.3.0",
|
||||
"@mendable/search": "^0.0.114",
|
||||
"@pbe/react-yandex-maps": "^1.2.4",
|
||||
"@prismicio/client": "^7.0.1",
|
||||
|
|
@ -28,6 +29,7 @@
|
|||
"autoprefixer": "^10.4.14",
|
||||
"clsx": "^1.2.1",
|
||||
"docusaurus-plugin-image-zoom": "^0.1.4",
|
||||
"docusaurus-theme-mdx-v2": "^0.1.2",
|
||||
"jquery": "^3.7.0",
|
||||
"medium-zoom": "^1.0.8",
|
||||
"node-fetch": "^3.3.1",
|
||||
|
|
@ -67,4 +69,4 @@
|
|||
"engines": {
|
||||
"node": ">=16.14"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -22,6 +22,7 @@ module.exports = {
|
|||
"guidelines/prompt-customization",
|
||||
"guidelines/chat-interface",
|
||||
"guidelines/chat-widget",
|
||||
"guidelines/custom-component",
|
||||
],
|
||||
},
|
||||
{
|
||||
|
|
@ -31,6 +32,7 @@ module.exports = {
|
|||
items: [
|
||||
"components/agents",
|
||||
"components/chains",
|
||||
"components/custom",
|
||||
"components/embeddings",
|
||||
"components/llms",
|
||||
"components/loaders",
|
||||
|
|
|
|||
|
|
@ -3,17 +3,19 @@
|
|||
* bundles Infima by default. Infima is a CSS framework designed to
|
||||
* work well for content-centric websites.
|
||||
*/
|
||||
:root {
|
||||
:root {
|
||||
--ifm-background-color: var(--token-primary-bg-c);
|
||||
--ifm-navbar-link-hover-color: initial;
|
||||
--ifm-navbar-padding-vertical: 0;
|
||||
--ifm-navbar-item-padding-vertical: 0;
|
||||
--ifm-font-family-base: -apple-system, BlinkMacSystemFont, Inter, Helvetica, Arial, sans-serif, 'Apple Color Emoji', 'Segoe UI emoji';
|
||||
--ifm-font-family-monospace: 'SFMono-Regular', 'Roboto Mono', Consolas, 'Liberation Mono', Menlo, Courier, monospace;
|
||||
--ifm-font-family-base: -apple-system, BlinkMacSystemFont, Inter, Helvetica,
|
||||
Arial, sans-serif, "Apple Color Emoji", "Segoe UI emoji";
|
||||
--ifm-font-family-monospace: "SFMono-Regular", "Roboto Mono", Consolas,
|
||||
"Liberation Mono", Menlo, Courier, monospace;
|
||||
}
|
||||
|
||||
.theme-doc-sidebar-item-category.menu__list-item:not(:first-child) {
|
||||
margin-top: 1.5rem!important;
|
||||
margin-top: 1.5rem !important;
|
||||
}
|
||||
|
||||
.docusaurus-highlight-code-line {
|
||||
|
|
@ -31,7 +33,7 @@
|
|||
transform: skewY(6deg);
|
||||
}
|
||||
|
||||
[class^='announcementBar'] {
|
||||
[class^="announcementBar"] {
|
||||
z-index: 10;
|
||||
}
|
||||
|
||||
|
|
@ -112,7 +114,7 @@ body {
|
|||
}
|
||||
|
||||
.header-github-link:before {
|
||||
content: '';
|
||||
content: "";
|
||||
width: 24px;
|
||||
height: 24px;
|
||||
display: flex;
|
||||
|
|
@ -126,7 +128,7 @@ body {
|
|||
}
|
||||
|
||||
.header-twitter-link::before {
|
||||
content: '';
|
||||
content: "";
|
||||
width: 24px;
|
||||
height: 24px;
|
||||
display: flex;
|
||||
|
|
@ -140,7 +142,7 @@ body {
|
|||
}
|
||||
|
||||
.header-discord-link::before {
|
||||
content: '';
|
||||
content: "";
|
||||
width: 24px;
|
||||
height: 24px;
|
||||
display: flex;
|
||||
|
|
@ -148,7 +150,6 @@ body {
|
|||
background-size: contain;
|
||||
}
|
||||
|
||||
|
||||
/* Images */
|
||||
.image-rendering-crisp {
|
||||
image-rendering: crisp-edges;
|
||||
|
|
@ -164,7 +165,7 @@ body {
|
|||
.img-center {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
width: 100%,
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.resized-image {
|
||||
|
|
@ -188,4 +189,22 @@ body {
|
|||
.mendable-search {
|
||||
width: 140px;
|
||||
}
|
||||
}
|
||||
}
|
||||
/*
|
||||
.ch-scrollycoding {
|
||||
gap: 10rem !important;
|
||||
} */
|
||||
|
||||
.ch-scrollycoding-content {
|
||||
max-width: 55% !important;
|
||||
min-width: 40% !important;
|
||||
}
|
||||
|
||||
.ch-scrollycoding-sticker {
|
||||
max-width: 60% !important;
|
||||
min-width: 45% !important;
|
||||
}
|
||||
|
||||
.ch-scrollycoding-step-content {
|
||||
min-height: 70px;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
from importlib import metadata
|
||||
from langflow.cache import cache_manager # noqa: E402
|
||||
from langflow.processing.process import load_flow_from_json # noqa: E402
|
||||
from langflow.cache import cache_manager
|
||||
from langflow.processing.process import load_flow_from_json
|
||||
from langflow.interface.custom.custom_component import CustomComponent
|
||||
|
||||
try:
|
||||
__version__ = metadata.version(__package__)
|
||||
|
|
@ -9,5 +10,4 @@ except metadata.PackageNotFoundError:
|
|||
__version__ = ""
|
||||
del metadata # optional, avoids polluting the results of dir(__package__)
|
||||
|
||||
|
||||
__all__ = ["load_flow_from_json", "cache_manager"]
|
||||
__all__ = ["load_flow_from_json", "cache_manager", "CustomComponent"]
|
||||
|
|
|
|||
|
|
@ -2,7 +2,8 @@ import os
|
|||
import sys
|
||||
import time
|
||||
import httpx
|
||||
from multiprocess import Process, cpu_count # type: ignore
|
||||
from langflow.utils.util import get_number_of_workers
|
||||
from multiprocess import Process # type: ignore
|
||||
import platform
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
|
@ -20,18 +21,13 @@ from dotenv import load_dotenv
|
|||
app = typer.Typer()
|
||||
|
||||
|
||||
def get_number_of_workers(workers=None):
|
||||
if workers == -1:
|
||||
workers = (cpu_count() * 2) + 1
|
||||
return workers
|
||||
|
||||
|
||||
def update_settings(
|
||||
config: str,
|
||||
cache: str,
|
||||
dev: bool = False,
|
||||
database_url: Optional[str] = None,
|
||||
remove_api_keys: bool = False,
|
||||
component_path: Optional[Path] = None,
|
||||
):
|
||||
"""Update the settings from a config file."""
|
||||
|
||||
|
|
@ -39,13 +35,19 @@ def update_settings(
|
|||
database_url = database_url or os.getenv("langflow_database_url")
|
||||
|
||||
if config:
|
||||
logger.debug(f"Loading settings from {config}")
|
||||
settings.update_from_yaml(config, dev=dev)
|
||||
if database_url:
|
||||
settings.update_settings(database_url=database_url)
|
||||
if remove_api_keys:
|
||||
logger.debug(f"Setting remove_api_keys to {remove_api_keys}")
|
||||
settings.update_settings(remove_api_keys=remove_api_keys)
|
||||
if cache:
|
||||
logger.debug(f"Setting cache to {cache}")
|
||||
settings.update_settings(cache=cache)
|
||||
if component_path:
|
||||
logger.debug(f"Adding component path {component_path}")
|
||||
settings.update_settings(component_path=component_path)
|
||||
|
||||
|
||||
def load_params():
|
||||
|
|
@ -120,10 +122,15 @@ def serve(
|
|||
"127.0.0.1", help="Host to bind the server to.", envvar="LANGFLOW_HOST"
|
||||
),
|
||||
workers: int = typer.Option(
|
||||
1, help="Number of worker processes.", envvar="LANGFLOW_WORKERS"
|
||||
-1, help="Number of worker processes.", envvar="LANGFLOW_WORKERS"
|
||||
),
|
||||
timeout: int = typer.Option(60, help="Worker timeout in seconds."),
|
||||
timeout: int = typer.Option(300, help="Worker timeout in seconds."),
|
||||
port: int = typer.Option(7860, help="Port to listen on.", envvar="LANGFLOW_PORT"),
|
||||
component_path: Optional[Path] = typer.Option(
|
||||
Path(__file__).parent,
|
||||
help="Path to the directory containing custom components.",
|
||||
envvar="LANGFLOW_COMPONENT_PATH",
|
||||
),
|
||||
config: str = typer.Option("config.yaml", help="Path to the configuration file."),
|
||||
# .env file param
|
||||
env_file: Path = typer.Option(
|
||||
|
|
@ -181,6 +188,7 @@ def serve(
|
|||
database_url=database_url,
|
||||
remove_api_keys=remove_api_keys,
|
||||
cache=cache,
|
||||
component_path=component_path,
|
||||
)
|
||||
# create path object if path is provided
|
||||
static_files_dir: Optional[Path] = Path(path) if path else None
|
||||
|
|
@ -298,7 +306,7 @@ def run_langflow(host, port, log_level, options, app):
|
|||
Run Langflow server on localhost
|
||||
"""
|
||||
try:
|
||||
if platform.system() in ["Darwin", "Windows"]:
|
||||
if platform.system() in ["Windows"]:
|
||||
# Run using uvicorn on MacOS and Windows
|
||||
# Windows doesn't support gunicorn
|
||||
# MacOS requires an env variable to be set to use gunicorn
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ from langflow.api.v1 import (
|
|||
validate_router,
|
||||
flows_router,
|
||||
flow_styles_router,
|
||||
component_router,
|
||||
)
|
||||
|
||||
router = APIRouter(
|
||||
|
|
@ -14,5 +15,6 @@ router = APIRouter(
|
|||
router.include_router(chat_router)
|
||||
router.include_router(endpoints_router)
|
||||
router.include_router(validate_router)
|
||||
router.include_router(component_router)
|
||||
router.include_router(flows_router)
|
||||
router.include_router(flow_styles_router)
|
||||
|
|
|
|||
|
|
@ -57,3 +57,12 @@ def build_input_keys_response(langchain_object, artifacts):
|
|||
input_keys_response["template"] = langchain_object.prompt.template
|
||||
|
||||
return input_keys_response
|
||||
|
||||
|
||||
def merge_nested_dicts(dict1, dict2):
|
||||
for key, value in dict2.items():
|
||||
if isinstance(value, dict) and isinstance(dict1.get(key), dict):
|
||||
dict1[key] = merge_nested_dicts(dict1[key], value)
|
||||
else:
|
||||
dict1[key] = value
|
||||
return dict1
|
||||
|
|
|
|||
|
|
@ -3,10 +3,12 @@ from langflow.api.v1.validate import router as validate_router
|
|||
from langflow.api.v1.chat import router as chat_router
|
||||
from langflow.api.v1.flows import router as flows_router
|
||||
from langflow.api.v1.flow_styles import router as flow_styles_router
|
||||
from langflow.api.v1.components import router as component_router
|
||||
|
||||
__all__ = [
|
||||
"chat_router",
|
||||
"endpoints_router",
|
||||
"component_router",
|
||||
"validate_router",
|
||||
"flows_router",
|
||||
"flow_styles_router",
|
||||
|
|
|
|||
|
|
@ -91,8 +91,8 @@ class AsyncStreamingLLMCallbackHandler(AsyncCallbackHandler):
|
|||
# This is to emulate the stream of tokens
|
||||
for resp in resps:
|
||||
await self.websocket.send_json(resp.dict())
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
except Exception as exc:
|
||||
logger.error(f"Error sending response: {exc}")
|
||||
|
||||
async def on_tool_error(
|
||||
self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ async def chat(client_id: str, websocket: WebSocket):
|
|||
message = "Please, build the flow before sending messages"
|
||||
await websocket.close(code=status.WS_1011_INTERNAL_ERROR, reason=message)
|
||||
except WebSocketException as exc:
|
||||
logger.error(exc)
|
||||
logger.error(f"Websocket error: {exc}")
|
||||
await websocket.close(code=status.WS_1011_INTERNAL_ERROR, reason=str(exc))
|
||||
|
||||
|
||||
|
|
@ -56,7 +56,7 @@ async def init_build(graph_data: dict, flow_id: str):
|
|||
|
||||
return InitResponse(flowId=flow_id)
|
||||
except Exception as exc:
|
||||
logger.error(exc)
|
||||
logger.error(f"Error initializing build: {exc}")
|
||||
return HTTPException(status_code=500, detail=str(exc))
|
||||
|
||||
|
||||
|
|
@ -74,7 +74,7 @@ async def build_status(flow_id: str):
|
|||
)
|
||||
|
||||
except Exception as exc:
|
||||
logger.error(exc)
|
||||
logger.error(f"Error checking build status: {exc}")
|
||||
return HTTPException(status_code=500, detail=str(exc))
|
||||
|
||||
|
||||
|
|
@ -177,5 +177,5 @@ async def stream_build(flow_id: str):
|
|||
try:
|
||||
return StreamingResponse(event_stream(flow_id), media_type="text/event-stream")
|
||||
except Exception as exc:
|
||||
logger.error(exc)
|
||||
logger.error(f"Error streaming build: {exc}")
|
||||
raise HTTPException(status_code=500, detail=str(exc))
|
||||
|
|
|
|||
77
src/backend/langflow/api/v1/components.py
Normal file
77
src/backend/langflow/api/v1/components.py
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
from datetime import timezone
|
||||
from typing import List
|
||||
from uuid import UUID
|
||||
from langflow.database.models.component import Component, ComponentModel
|
||||
from langflow.database.base import get_session
|
||||
from sqlmodel import Session, select
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
COMPONENT_NOT_FOUND = "Component not found"
|
||||
COMPONENT_ALREADY_EXISTS = "A component with the same id already exists."
|
||||
COMPONENT_DELETED = "Component deleted"
|
||||
|
||||
|
||||
router = APIRouter(prefix="/components", tags=["Components"])
|
||||
|
||||
|
||||
@router.post("/", response_model=Component)
|
||||
def create_component(component: ComponentModel, db: Session = Depends(get_session)):
|
||||
db_component = Component(**component.dict())
|
||||
try:
|
||||
db.add(db_component)
|
||||
db.commit()
|
||||
db.refresh(db_component)
|
||||
except IntegrityError as e:
|
||||
db.rollback()
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=COMPONENT_ALREADY_EXISTS,
|
||||
) from e
|
||||
return db_component
|
||||
|
||||
|
||||
@router.get("/{component_id}", response_model=Component)
|
||||
def read_component(component_id: UUID, db: Session = Depends(get_session)):
|
||||
if component := db.get(Component, component_id):
|
||||
return component
|
||||
else:
|
||||
raise HTTPException(status_code=404, detail=COMPONENT_NOT_FOUND)
|
||||
|
||||
|
||||
@router.get("/", response_model=List[Component])
|
||||
def read_components(skip: int = 0, limit: int = 50, db: Session = Depends(get_session)):
|
||||
query = select(Component)
|
||||
query = query.offset(skip).limit(limit)
|
||||
|
||||
return db.execute(query).fetchall()
|
||||
|
||||
|
||||
@router.patch("/{component_id}", response_model=Component)
|
||||
def update_component(
|
||||
component_id: UUID, component: ComponentModel, db: Session = Depends(get_session)
|
||||
):
|
||||
db_component = db.get(Component, component_id)
|
||||
if not db_component:
|
||||
raise HTTPException(status_code=404, detail=COMPONENT_NOT_FOUND)
|
||||
component_data = component.dict(exclude_unset=True)
|
||||
|
||||
for key, value in component_data.items():
|
||||
setattr(db_component, key, value)
|
||||
|
||||
db_component.update_at = datetime.now(timezone.utc)
|
||||
db.commit()
|
||||
db.refresh(db_component)
|
||||
return db_component
|
||||
|
||||
|
||||
@router.delete("/{component_id}")
|
||||
def delete_component(component_id: UUID, db: Session = Depends(get_session)):
|
||||
component = db.get(Component, component_id)
|
||||
if not component:
|
||||
raise HTTPException(status_code=404, detail=COMPONENT_NOT_FOUND)
|
||||
db.delete(component)
|
||||
db.commit()
|
||||
return {"detail": COMPONENT_DELETED}
|
||||
|
|
@ -1,17 +1,34 @@
|
|||
from typing import Optional
|
||||
from http import HTTPStatus
|
||||
from typing import Annotated, Optional
|
||||
|
||||
from langflow.cache.utils import save_uploaded_file
|
||||
from langflow.database.models.flow import Flow
|
||||
from langflow.processing.process import process_graph_cached, process_tweaks
|
||||
from langflow.utils.logger import logger
|
||||
from langflow.settings import settings
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, UploadFile
|
||||
from fastapi import APIRouter, Depends, HTTPException, UploadFile, Body
|
||||
|
||||
from langflow.interface.custom.custom_component import CustomComponent
|
||||
|
||||
from langflow.interface.custom.directory_reader import (
|
||||
CustomComponentPathValueError,
|
||||
)
|
||||
|
||||
from langflow.api.v1.schemas import (
|
||||
ProcessResponse,
|
||||
UploadFileResponse,
|
||||
CustomComponentCode,
|
||||
)
|
||||
|
||||
from langflow.api.utils import merge_nested_dicts
|
||||
|
||||
from langflow.interface.types import (
|
||||
build_langchain_types_dict,
|
||||
build_langchain_template_custom_component,
|
||||
build_langchain_custom_component_list_from_path,
|
||||
)
|
||||
|
||||
from langflow.interface.types import langchain_types_dict
|
||||
from langflow.database.base import get_session
|
||||
from sqlmodel import Session
|
||||
|
||||
|
|
@ -21,7 +38,47 @@ router = APIRouter(tags=["Base"])
|
|||
|
||||
@router.get("/all")
|
||||
def get_all():
|
||||
return langchain_types_dict
|
||||
native_components = build_langchain_types_dict()
|
||||
|
||||
# custom_components is a list of dicts
|
||||
# need to merge all the keys into one dict
|
||||
custom_components_from_file = {}
|
||||
if settings.component_path:
|
||||
custom_component_dicts = [
|
||||
build_langchain_custom_component_list_from_path(str(path))
|
||||
for path in settings.component_path
|
||||
]
|
||||
for custom_component_dict in custom_component_dicts:
|
||||
custom_components_from_file = merge_nested_dicts(
|
||||
custom_components_from_file, custom_component_dict
|
||||
)
|
||||
return merge_nested_dicts(native_components, custom_components_from_file)
|
||||
|
||||
|
||||
@router.get("/load_custom_component_from_path")
|
||||
def get_load_custom_component_from_path(path: str):
|
||||
try:
|
||||
data = build_langchain_custom_component_list_from_path(path)
|
||||
except CustomComponentPathValueError as err:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail={"error": type(err).__name__, "traceback": str(err)},
|
||||
) from err
|
||||
|
||||
return data
|
||||
|
||||
|
||||
@router.get("/load_custom_component_from_path_TEST")
|
||||
def get_load_custom_component_from_path_test(path: str):
|
||||
from langflow.interface.custom.directory_reader import (
|
||||
DirectoryReader,
|
||||
)
|
||||
|
||||
reader = DirectoryReader(path, False)
|
||||
file_list = reader.get_files()
|
||||
data = reader.build_component_menu_list(file_list)
|
||||
|
||||
return reader.filter_loaded_components(data, True)
|
||||
|
||||
|
||||
# For backwards compatibility we will keep the old endpoint
|
||||
|
|
@ -31,6 +88,7 @@ async def process_flow(
|
|||
flow_id: str,
|
||||
inputs: Optional[dict] = None,
|
||||
tweaks: Optional[dict] = None,
|
||||
clear_cache: Annotated[bool, Body(embed=True)] = False, # noqa: F821
|
||||
session: Session = Depends(get_session),
|
||||
):
|
||||
"""
|
||||
|
|
@ -50,7 +108,7 @@ async def process_flow(
|
|||
graph_data = process_tweaks(graph_data, tweaks)
|
||||
except Exception as exc:
|
||||
logger.error(f"Error processing tweaks: {exc}")
|
||||
response = process_graph_cached(graph_data, inputs)
|
||||
response = process_graph_cached(graph_data, inputs, clear_cache)
|
||||
return ProcessResponse(
|
||||
result=response,
|
||||
)
|
||||
|
|
@ -60,7 +118,11 @@ async def process_flow(
|
|||
raise HTTPException(status_code=500, detail=str(e)) from e
|
||||
|
||||
|
||||
@router.post("/upload/{flow_id}", response_model=UploadFileResponse, status_code=201)
|
||||
@router.post(
|
||||
"/upload/{flow_id}",
|
||||
response_model=UploadFileResponse,
|
||||
status_code=HTTPStatus.CREATED,
|
||||
)
|
||||
async def create_upload_file(file: UploadFile, flow_id: str):
|
||||
# Cache file
|
||||
try:
|
||||
|
|
@ -81,3 +143,13 @@ def get_version():
|
|||
from langflow import __version__
|
||||
|
||||
return {"version": __version__}
|
||||
|
||||
|
||||
@router.post("/custom_component", status_code=HTTPStatus.OK)
|
||||
async def custom_component(
|
||||
raw_code: CustomComponentCode,
|
||||
):
|
||||
extractor = CustomComponent(code=raw_code.code)
|
||||
extractor.is_check_valid()
|
||||
|
||||
return build_langchain_template_custom_component(extractor)
|
||||
|
|
|
|||
|
|
@ -116,3 +116,20 @@ class StreamData(BaseModel):
|
|||
|
||||
def __str__(self) -> str:
|
||||
return f"event: {self.event}\ndata: {json.dumps(self.data)}\n\n"
|
||||
|
||||
|
||||
class CustomComponentCode(BaseModel):
|
||||
code: str
|
||||
|
||||
|
||||
class CustomComponentResponseError(BaseModel):
|
||||
detail: str
|
||||
traceback: str
|
||||
|
||||
|
||||
class ComponentListCreate(BaseModel):
|
||||
flows: List[FlowCreate]
|
||||
|
||||
|
||||
class ComponentListRead(BaseModel):
|
||||
flows: List[FlowRead]
|
||||
|
|
|
|||
|
|
@ -111,7 +111,7 @@ class ChatManager:
|
|||
# This is to catch the following error:
|
||||
# Unexpected ASGI message 'websocket.close', after sending 'websocket.close'
|
||||
if "after sending" in str(exc):
|
||||
logger.error(exc)
|
||||
logger.error(f"Error closing connection: {exc}")
|
||||
|
||||
async def process_message(
|
||||
self, client_id: str, payload: Dict, langchain_object: Any
|
||||
|
|
@ -197,13 +197,13 @@ class ChatManager:
|
|||
langchain_object = self.in_memory_cache.get(client_id)
|
||||
await self.process_message(client_id, payload, langchain_object)
|
||||
|
||||
except Exception as e:
|
||||
except Exception as exc:
|
||||
# Handle any exceptions that might occur
|
||||
logger.error(e)
|
||||
logger.error(f"Error handling websocket: {exc}")
|
||||
await self.close_connection(
|
||||
client_id=client_id,
|
||||
code=status.WS_1011_INTERNAL_ERROR,
|
||||
reason=str(e)[:120],
|
||||
reason=str(exc)[:120],
|
||||
)
|
||||
finally:
|
||||
try:
|
||||
|
|
@ -212,6 +212,6 @@ class ChatManager:
|
|||
code=status.WS_1000_NORMAL_CLOSURE,
|
||||
reason="Client disconnected",
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
except Exception as exc:
|
||||
logger.error(f"Error closing connection: {exc}")
|
||||
self.disconnect(client_id)
|
||||
|
|
|
|||
|
|
@ -290,3 +290,6 @@ output_parsers:
|
|||
documentation: "https://python.langchain.com/docs/modules/model_io/output_parsers/structured"
|
||||
ResponseSchema:
|
||||
documentation: "https://python.langchain.com/docs/modules/model_io/output_parsers/structured"
|
||||
custom_components:
|
||||
CustomComponent:
|
||||
documentation: ""
|
||||
|
|
|
|||
|
|
@ -31,6 +31,9 @@ CUSTOM_NODES = {
|
|||
"MidJourneyPromptChain": frontend_node.chains.MidJourneyPromptChainNode(),
|
||||
"load_qa_chain": frontend_node.chains.CombineDocsChainNode(),
|
||||
},
|
||||
"custom_components": {
|
||||
"CustomComponent": frontend_node.custom_components.CustomComponentFrontendNode(),
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
from contextlib import contextmanager
|
||||
from langflow.settings import settings
|
||||
from sqlmodel import SQLModel, Session, create_engine
|
||||
from langflow.utils.logger import logger
|
||||
|
|
@ -32,6 +33,19 @@ def create_db_and_tables():
|
|||
logger.debug("Database and tables created successfully")
|
||||
|
||||
|
||||
def get_session():
|
||||
with Session(engine) as session:
|
||||
@contextmanager
|
||||
def session_getter():
|
||||
try:
|
||||
session = Session(engine)
|
||||
yield session
|
||||
except Exception as e:
|
||||
print("Session rollback because of exception:", e)
|
||||
session.rollback()
|
||||
raise
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
def get_session():
|
||||
with session_getter() as session:
|
||||
yield session
|
||||
|
|
|
|||
29
src/backend/langflow/database/models/component.py
Normal file
29
src/backend/langflow/database/models/component.py
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
from langflow.database.models.base import SQLModelSerializable, SQLModel
|
||||
from sqlmodel import Field
|
||||
from typing import Optional
|
||||
from datetime import datetime
|
||||
import uuid
|
||||
|
||||
|
||||
class Component(SQLModelSerializable, table=True):
|
||||
id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True)
|
||||
frontend_node_id: uuid.UUID = Field(index=True)
|
||||
name: str = Field(index=True)
|
||||
description: Optional[str] = Field(default=None)
|
||||
python_code: Optional[str] = Field(default=None)
|
||||
return_type: Optional[str] = Field(default=None)
|
||||
is_disabled: bool = Field(default=False)
|
||||
is_read_only: bool = Field(default=False)
|
||||
create_at: datetime = Field(default_factory=datetime.utcnow)
|
||||
update_at: datetime = Field(default_factory=datetime.utcnow)
|
||||
|
||||
|
||||
class ComponentModel(SQLModel):
|
||||
id: uuid.UUID = Field(default_factory=uuid.uuid4)
|
||||
frontend_node_id: uuid.UUID = Field(default=uuid.uuid4())
|
||||
name: str = Field(default="")
|
||||
description: Optional[str] = None
|
||||
python_code: Optional[str] = None
|
||||
return_type: Optional[str] = None
|
||||
is_disabled: bool = False
|
||||
is_read_only: bool = False
|
||||
|
|
@ -77,6 +77,8 @@ class Graph:
|
|||
|
||||
def _validate_nodes(self) -> None:
|
||||
"""Check that all nodes have edges"""
|
||||
if len(self.nodes) == 1:
|
||||
return
|
||||
for node in self.nodes:
|
||||
if not self._validate_node(node):
|
||||
raise ValueError(
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ from langflow.interface.vector_store.base import vectorstore_creator
|
|||
from langflow.interface.wrappers.base import wrapper_creator
|
||||
from langflow.interface.output_parsers.base import output_parser_creator
|
||||
from langflow.interface.retrievers.base import retriever_creator
|
||||
|
||||
from langflow.interface.custom.base import custom_component_creator
|
||||
from typing import Dict, Type
|
||||
|
||||
|
||||
|
|
@ -32,5 +32,6 @@ VERTEX_TYPE_MAP: Dict[str, Type[Vertex]] = {
|
|||
**{t: types.DocumentLoaderVertex for t in documentloader_creator.to_list()},
|
||||
**{t: types.TextSplitterVertex for t in textsplitter_creator.to_list()},
|
||||
**{t: types.OutputParserVertex for t in output_parser_creator.to_list()},
|
||||
**{t: types.CustomComponentVertex for t in custom_component_creator.to_list()},
|
||||
**{t: types.RetrieverVertex for t in retriever_creator.to_list()},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -239,3 +239,12 @@ class PromptVertex(Vertex):
|
|||
class OutputParserVertex(Vertex):
|
||||
def __init__(self, data: Dict):
|
||||
super().__init__(data, base_type="output_parsers")
|
||||
|
||||
|
||||
class CustomComponentVertex(Vertex):
|
||||
def __init__(self, data: Dict):
|
||||
super().__init__(data, base_type="custom_components")
|
||||
|
||||
def _built_object_repr(self):
|
||||
if self.artifacts and "repr" in self.artifacts:
|
||||
return self.artifacts["repr"] or super()._built_object_repr()
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ class LangChainTypeCreator(BaseModel, ABC):
|
|||
for name, value_dict in type_settings.items()
|
||||
}
|
||||
except AttributeError as exc:
|
||||
logger.error(exc)
|
||||
logger.error(f"Error getting settings for {self.type_name}: {exc}")
|
||||
|
||||
self.name_docs_dict = {}
|
||||
return self.name_docs_dict
|
||||
|
|
|
|||
4
src/backend/langflow/interface/custom/__init__.py
Normal file
4
src/backend/langflow/interface/custom/__init__.py
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
from langflow.interface.custom.base import CustomComponentCreator
|
||||
from langflow.interface.custom.custom_component import CustomComponent
|
||||
|
||||
__all__ = ["CustomComponentCreator", "CustomComponent"]
|
||||
48
src/backend/langflow/interface/custom/base.py
Normal file
48
src/backend/langflow/interface/custom/base.py
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
from typing import Any, Dict, List, Optional, Type
|
||||
|
||||
|
||||
from langflow.interface.base import LangChainTypeCreator
|
||||
|
||||
# from langflow.interface.custom.custom import CustomComponent
|
||||
from langflow.interface.custom.custom_component import CustomComponent
|
||||
from langflow.template.frontend_node.custom_components import (
|
||||
CustomComponentFrontendNode,
|
||||
)
|
||||
from langflow.utils.logger import logger
|
||||
|
||||
# Assuming necessary imports for Field, Template, and FrontendNode classes
|
||||
|
||||
|
||||
class CustomComponentCreator(LangChainTypeCreator):
|
||||
type_name: str = "custom_components"
|
||||
|
||||
@property
|
||||
def frontend_node_class(self) -> Type[CustomComponentFrontendNode]:
|
||||
return CustomComponentFrontendNode
|
||||
|
||||
@property
|
||||
def type_to_loader_dict(self) -> Dict:
|
||||
if self.type_dict is None:
|
||||
self.type_dict: dict[str, Any] = {
|
||||
"CustomComponent": CustomComponent,
|
||||
}
|
||||
return self.type_dict
|
||||
|
||||
def get_signature(self, name: str) -> Optional[Dict]:
|
||||
from langflow.custom.customs import get_custom_nodes
|
||||
|
||||
try:
|
||||
if name in get_custom_nodes(self.type_name).keys():
|
||||
return get_custom_nodes(self.type_name)[name]
|
||||
except ValueError as exc:
|
||||
raise ValueError(f"CustomComponent {name} not found: {exc}") from exc
|
||||
except AttributeError as exc:
|
||||
logger.error(f"CustomComponent {name} not loaded: {exc}")
|
||||
return None
|
||||
return None
|
||||
|
||||
def to_list(self) -> List[str]:
|
||||
return list(self.type_to_loader_dict.keys())
|
||||
|
||||
|
||||
custom_component_creator = CustomComponentCreator()
|
||||
272
src/backend/langflow/interface/custom/code_parser.py
Normal file
272
src/backend/langflow/interface/custom/code_parser.py
Normal file
|
|
@ -0,0 +1,272 @@
|
|||
import ast
|
||||
import inspect
|
||||
import traceback
|
||||
|
||||
from typing import Dict, Any, List, Type, Union
|
||||
from fastapi import HTTPException
|
||||
from langflow.interface.custom.schema import CallableCodeDetails, ClassCodeDetails
|
||||
|
||||
|
||||
class CodeSyntaxError(HTTPException):
|
||||
pass
|
||||
|
||||
|
||||
class CodeParser:
|
||||
"""
|
||||
A parser for Python source code, extracting code details.
|
||||
"""
|
||||
|
||||
def __init__(self, code: Union[str, Type]) -> None:
|
||||
"""
|
||||
Initializes the parser with the provided code.
|
||||
"""
|
||||
if isinstance(code, type):
|
||||
if not inspect.isclass(code):
|
||||
raise ValueError("The provided code must be a class.")
|
||||
# If the code is a class, get its source code
|
||||
code = inspect.getsource(code)
|
||||
self.code = code
|
||||
self.data: Dict[str, Any] = {
|
||||
"imports": [],
|
||||
"functions": [],
|
||||
"classes": [],
|
||||
"global_vars": [],
|
||||
}
|
||||
self.handlers = {
|
||||
ast.Import: self.parse_imports,
|
||||
ast.ImportFrom: self.parse_imports,
|
||||
ast.FunctionDef: self.parse_functions,
|
||||
ast.ClassDef: self.parse_classes,
|
||||
ast.Assign: self.parse_global_vars,
|
||||
}
|
||||
|
||||
def __get_tree(self):
|
||||
"""
|
||||
Parses the provided code to validate its syntax.
|
||||
It tries to parse the code into an abstract syntax tree (AST).
|
||||
"""
|
||||
try:
|
||||
tree = ast.parse(self.code)
|
||||
except SyntaxError as err:
|
||||
raise CodeSyntaxError(
|
||||
status_code=400,
|
||||
detail={"error": err.msg, "traceback": traceback.format_exc()},
|
||||
) from err
|
||||
|
||||
return tree
|
||||
|
||||
def parse_node(self, node: Union[ast.stmt, ast.AST]) -> None:
|
||||
"""
|
||||
Parses an AST node and updates the data
|
||||
dictionary with the relevant information.
|
||||
"""
|
||||
if handler := self.handlers.get(type(node)): # type: ignore
|
||||
handler(node) # type: ignore
|
||||
|
||||
def parse_imports(self, node: Union[ast.Import, ast.ImportFrom]) -> None:
|
||||
"""
|
||||
Extracts "imports" from the code.
|
||||
"""
|
||||
if isinstance(node, ast.Import):
|
||||
for alias in node.names:
|
||||
self.data["imports"].append(alias.name)
|
||||
elif isinstance(node, ast.ImportFrom):
|
||||
for alias in node.names:
|
||||
self.data["imports"].append((node.module, alias.name))
|
||||
|
||||
def parse_functions(self, node: ast.FunctionDef) -> None:
|
||||
"""
|
||||
Extracts "functions" from the code.
|
||||
"""
|
||||
self.data["functions"].append(self.parse_callable_details(node))
|
||||
|
||||
def parse_arg(self, arg, default):
|
||||
"""
|
||||
Parses an argument and its default value.
|
||||
"""
|
||||
arg_dict = {"name": arg.arg, "default": default}
|
||||
if arg.annotation:
|
||||
arg_dict["type"] = ast.unparse(arg.annotation)
|
||||
return arg_dict
|
||||
|
||||
def parse_callable_details(self, node: ast.FunctionDef) -> Dict[str, Any]:
|
||||
"""
|
||||
Extracts details from a single function or method node.
|
||||
"""
|
||||
func = CallableCodeDetails(
|
||||
name=node.name,
|
||||
doc=ast.get_docstring(node),
|
||||
args=[],
|
||||
body=[],
|
||||
return_type=ast.unparse(node.returns) if node.returns else None,
|
||||
)
|
||||
|
||||
func.args = self.parse_function_args(node)
|
||||
func.body = self.parse_function_body(node)
|
||||
|
||||
return func.dict()
|
||||
|
||||
def parse_function_args(self, node: ast.FunctionDef) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Parses the arguments of a function or method node.
|
||||
"""
|
||||
args = []
|
||||
|
||||
args += self.parse_positional_args(node)
|
||||
args += self.parse_varargs(node)
|
||||
args += self.parse_keyword_args(node)
|
||||
args += self.parse_kwargs(node)
|
||||
|
||||
return args
|
||||
|
||||
def parse_positional_args(self, node: ast.FunctionDef) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Parses the positional arguments of a function or method node.
|
||||
"""
|
||||
num_args = len(node.args.args)
|
||||
num_defaults = len(node.args.defaults)
|
||||
num_missing_defaults = num_args - num_defaults
|
||||
missing_defaults = [None] * num_missing_defaults
|
||||
default_values = [
|
||||
ast.unparse(default).strip("'") if default else None
|
||||
for default in node.args.defaults
|
||||
]
|
||||
# Now check all default values to see if there
|
||||
# are any "None" values in the middle
|
||||
default_values = [
|
||||
None if value == "None" else value for value in default_values
|
||||
]
|
||||
|
||||
defaults = missing_defaults + default_values
|
||||
|
||||
args = [
|
||||
self.parse_arg(arg, default)
|
||||
for arg, default in zip(node.args.args, defaults)
|
||||
]
|
||||
return args
|
||||
|
||||
def parse_varargs(self, node: ast.FunctionDef) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Parses the *args argument of a function or method node.
|
||||
"""
|
||||
args = []
|
||||
|
||||
if node.args.vararg:
|
||||
args.append(self.parse_arg(node.args.vararg, None))
|
||||
|
||||
return args
|
||||
|
||||
def parse_keyword_args(self, node: ast.FunctionDef) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Parses the keyword-only arguments of a function or method node.
|
||||
"""
|
||||
kw_defaults = [None] * (
|
||||
len(node.args.kwonlyargs) - len(node.args.kw_defaults)
|
||||
) + [
|
||||
ast.unparse(default) if default else None
|
||||
for default in node.args.kw_defaults
|
||||
]
|
||||
|
||||
args = [
|
||||
self.parse_arg(arg, default)
|
||||
for arg, default in zip(node.args.kwonlyargs, kw_defaults)
|
||||
]
|
||||
return args
|
||||
|
||||
def parse_kwargs(self, node: ast.FunctionDef) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Parses the **kwargs argument of a function or method node.
|
||||
"""
|
||||
args = []
|
||||
|
||||
if node.args.kwarg:
|
||||
args.append(self.parse_arg(node.args.kwarg, None))
|
||||
|
||||
return args
|
||||
|
||||
def parse_function_body(self, node: ast.FunctionDef) -> List[str]:
|
||||
"""
|
||||
Parses the body of a function or method node.
|
||||
"""
|
||||
return [ast.unparse(line) for line in node.body]
|
||||
|
||||
def parse_assign(self, stmt):
|
||||
"""
|
||||
Parses an Assign statement and returns a dictionary
|
||||
with the target's name and value.
|
||||
"""
|
||||
for target in stmt.targets:
|
||||
if isinstance(target, ast.Name):
|
||||
return {"name": target.id, "value": ast.unparse(stmt.value)}
|
||||
|
||||
def parse_ann_assign(self, stmt):
|
||||
"""
|
||||
Parses an AnnAssign statement and returns a dictionary
|
||||
with the target's name, value, and annotation.
|
||||
"""
|
||||
if isinstance(stmt.target, ast.Name):
|
||||
return {
|
||||
"name": stmt.target.id,
|
||||
"value": ast.unparse(stmt.value) if stmt.value else None,
|
||||
"annotation": ast.unparse(stmt.annotation),
|
||||
}
|
||||
|
||||
def parse_function_def(self, stmt):
|
||||
"""
|
||||
Parses a FunctionDef statement and returns the parsed
|
||||
method and a boolean indicating if it's an __init__ method.
|
||||
"""
|
||||
method = self.parse_callable_details(stmt)
|
||||
return (method, True) if stmt.name == "__init__" else (method, False)
|
||||
|
||||
def parse_classes(self, node: ast.ClassDef) -> None:
|
||||
"""
|
||||
Extracts "classes" from the code, including inheritance and init methods.
|
||||
"""
|
||||
|
||||
class_details = ClassCodeDetails(
|
||||
name=node.name,
|
||||
doc=ast.get_docstring(node),
|
||||
bases=[ast.unparse(base) for base in node.bases],
|
||||
attributes=[],
|
||||
methods=[],
|
||||
init=None,
|
||||
)
|
||||
|
||||
for stmt in node.body:
|
||||
if isinstance(stmt, ast.Assign):
|
||||
if attr := self.parse_assign(stmt):
|
||||
class_details.attributes.append(attr)
|
||||
elif isinstance(stmt, ast.AnnAssign):
|
||||
if attr := self.parse_ann_assign(stmt):
|
||||
class_details.attributes.append(attr)
|
||||
elif isinstance(stmt, ast.FunctionDef):
|
||||
method, is_init = self.parse_function_def(stmt)
|
||||
if is_init:
|
||||
class_details.init = method
|
||||
else:
|
||||
class_details.methods.append(method)
|
||||
|
||||
self.data["classes"].append(class_details.dict())
|
||||
|
||||
def parse_global_vars(self, node: ast.Assign) -> None:
|
||||
"""
|
||||
Extracts global variables from the code.
|
||||
"""
|
||||
global_var = {
|
||||
"targets": [
|
||||
t.id if hasattr(t, "id") else ast.dump(t) for t in node.targets
|
||||
],
|
||||
"value": ast.unparse(node.value),
|
||||
}
|
||||
self.data["global_vars"].append(global_var)
|
||||
|
||||
def parse_code(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Runs all parsing operations and returns the resulting data.
|
||||
"""
|
||||
tree = self.__get_tree()
|
||||
|
||||
for node in ast.walk(tree):
|
||||
self.parse_node(node)
|
||||
return self.data
|
||||
72
src/backend/langflow/interface/custom/component.py
Normal file
72
src/backend/langflow/interface/custom/component.py
Normal file
|
|
@ -0,0 +1,72 @@
|
|||
import ast
|
||||
from typing import Optional
|
||||
from pydantic import BaseModel
|
||||
from fastapi import HTTPException
|
||||
|
||||
from langflow.utils import validate
|
||||
from langflow.interface.custom.code_parser import CodeParser
|
||||
|
||||
|
||||
class ComponentCodeNullError(HTTPException):
|
||||
pass
|
||||
|
||||
|
||||
class ComponentFunctionEntrypointNameNullError(HTTPException):
|
||||
pass
|
||||
|
||||
|
||||
class Component(BaseModel):
|
||||
ERROR_CODE_NULL = "Python code must be provided."
|
||||
ERROR_FUNCTION_ENTRYPOINT_NAME_NULL = (
|
||||
"The name of the entrypoint function must be provided."
|
||||
)
|
||||
|
||||
code: Optional[str]
|
||||
function_entrypoint_name = "build"
|
||||
field_config: dict = {}
|
||||
|
||||
def __init__(self, **data):
|
||||
super().__init__(**data)
|
||||
|
||||
def get_code_tree(self, code: str):
|
||||
parser = CodeParser(code)
|
||||
return parser.parse_code()
|
||||
|
||||
def get_function(self):
|
||||
if not self.code:
|
||||
raise ComponentCodeNullError(
|
||||
status_code=400,
|
||||
detail={"error": self.ERROR_CODE_NULL, "traceback": ""},
|
||||
)
|
||||
|
||||
if not self.function_entrypoint_name:
|
||||
raise ComponentFunctionEntrypointNameNullError(
|
||||
status_code=400,
|
||||
detail={
|
||||
"error": self.ERROR_FUNCTION_ENTRYPOINT_NAME_NULL,
|
||||
"traceback": "",
|
||||
},
|
||||
)
|
||||
|
||||
return validate.create_function(self.code, self.function_entrypoint_name)
|
||||
|
||||
def build_template_config(self, attributes) -> dict:
|
||||
template_config = {}
|
||||
|
||||
for item in attributes:
|
||||
item_name = item.get("name")
|
||||
|
||||
if item_value := item.get("value"):
|
||||
if "display_name" in item_name:
|
||||
template_config["display_name"] = ast.literal_eval(item_value)
|
||||
|
||||
elif "description" in item_name:
|
||||
template_config["description"] = ast.literal_eval(item_value)
|
||||
|
||||
elif "field_config" in item_name:
|
||||
template_config["field_config"] = ast.literal_eval(item_value)
|
||||
|
||||
return template_config
|
||||
|
||||
def build(self):
|
||||
raise NotImplementedError
|
||||
59
src/backend/langflow/interface/custom/constants.py
Normal file
59
src/backend/langflow/interface/custom/constants.py
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
from langchain import PromptTemplate
|
||||
from langchain.chains.base import Chain
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.embeddings.base import Embeddings
|
||||
from langchain.llms.base import BaseLLM
|
||||
from langchain.schema import BaseRetriever, Document
|
||||
from langchain.text_splitter import TextSplitter
|
||||
from langchain.tools import Tool
|
||||
from langchain.vectorstores.base import VectorStore
|
||||
|
||||
|
||||
LANGCHAIN_BASE_TYPES = {
|
||||
"Chain": Chain,
|
||||
"Tool": Tool,
|
||||
"BaseLLM": BaseLLM,
|
||||
"PromptTemplate": PromptTemplate,
|
||||
"BaseLoader": BaseLoader,
|
||||
"Document": Document,
|
||||
"TextSplitter": TextSplitter,
|
||||
"VectorStore": VectorStore,
|
||||
"Embeddings": Embeddings,
|
||||
"BaseRetriever": BaseRetriever,
|
||||
}
|
||||
|
||||
# Langchain base types plus Python base types
|
||||
CUSTOM_COMPONENT_SUPPORTED_TYPES = {
|
||||
**LANGCHAIN_BASE_TYPES,
|
||||
"str": str,
|
||||
"int": int,
|
||||
"float": float,
|
||||
"bool": bool,
|
||||
"list": list,
|
||||
"dict": dict,
|
||||
}
|
||||
|
||||
|
||||
DEFAULT_CUSTOM_COMPONENT_CODE = """
|
||||
from langflow import CustomComponent
|
||||
|
||||
from langchain.llms.base import BaseLLM
|
||||
from langchain.chains import LLMChain
|
||||
from langchain import PromptTemplate
|
||||
from langchain.schema import Document
|
||||
|
||||
import requests
|
||||
|
||||
class YourComponent(CustomComponent):
|
||||
display_name: str = "Your Component"
|
||||
description: str = "Your description"
|
||||
|
||||
def build_config(self):
|
||||
return { "url": { "multiline": True, "required": True } }
|
||||
|
||||
def build(self, url: str, llm: BaseLLM, prompt: PromptTemplate) -> Document:
|
||||
response = requests.get(url)
|
||||
chain = LLMChain(llm=llm, prompt=prompt)
|
||||
result = chain.run(response.text[:300])
|
||||
return Document(page_content=str(result))
|
||||
"""
|
||||
162
src/backend/langflow/interface/custom/custom_component.py
Normal file
162
src/backend/langflow/interface/custom/custom_component.py
Normal file
|
|
@ -0,0 +1,162 @@
|
|||
from typing import Callable, Optional
|
||||
from fastapi import HTTPException
|
||||
from langflow.interface.custom.constants import CUSTOM_COMPONENT_SUPPORTED_TYPES
|
||||
from langflow.interface.custom.component import Component
|
||||
|
||||
from langflow.utils import validate
|
||||
|
||||
from langflow.database.base import session_getter
|
||||
from langflow.database.models.flow import Flow
|
||||
from pydantic import Extra
|
||||
|
||||
|
||||
class CustomComponent(Component, extra=Extra.allow):
|
||||
code: Optional[str]
|
||||
field_config: dict = {}
|
||||
code_class_base_inheritance = "CustomComponent"
|
||||
function_entrypoint_name = "build"
|
||||
function: Optional[Callable] = None
|
||||
return_type_valid_list = list(CUSTOM_COMPONENT_SUPPORTED_TYPES.keys())
|
||||
repr_value: Optional[str] = ""
|
||||
|
||||
def __init__(self, **data):
|
||||
super().__init__(**data)
|
||||
|
||||
def custom_repr(self):
|
||||
return str(self.repr_value)
|
||||
|
||||
def build_config(self):
|
||||
return self.field_config
|
||||
|
||||
def _class_template_validation(self, code: str) -> bool:
|
||||
if not code:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail={
|
||||
"error": self.ERROR_CODE_NULL,
|
||||
"traceback": "",
|
||||
},
|
||||
)
|
||||
|
||||
# TODO: Create the logic to validate what the Custom Component
|
||||
# should have as a prerequisite to be able to execute
|
||||
return True
|
||||
|
||||
def is_check_valid(self) -> bool:
|
||||
return self._class_template_validation(self.code) if self.code else False
|
||||
|
||||
def get_code_tree(self, code: str):
|
||||
return super().get_code_tree(code)
|
||||
|
||||
@property
|
||||
def get_function_entrypoint_args(self) -> str:
|
||||
if not self.code:
|
||||
return ""
|
||||
tree = self.get_code_tree(self.code)
|
||||
|
||||
component_classes = [
|
||||
cls
|
||||
for cls in tree["classes"]
|
||||
if self.code_class_base_inheritance in cls["bases"]
|
||||
]
|
||||
if not component_classes:
|
||||
return ""
|
||||
|
||||
# Assume the first Component class is the one we're interested in
|
||||
component_class = component_classes[0]
|
||||
build_methods = [
|
||||
method
|
||||
for method in component_class["methods"]
|
||||
if method["name"] == self.function_entrypoint_name
|
||||
]
|
||||
|
||||
if not build_methods:
|
||||
return ""
|
||||
|
||||
build_method = build_methods[0]
|
||||
|
||||
return build_method["args"]
|
||||
|
||||
@property
|
||||
def get_function_entrypoint_return_type(self) -> str:
|
||||
if not self.code:
|
||||
return ""
|
||||
tree = self.get_code_tree(self.code)
|
||||
|
||||
component_classes = [
|
||||
cls
|
||||
for cls in tree["classes"]
|
||||
if self.code_class_base_inheritance in cls["bases"]
|
||||
]
|
||||
if not component_classes:
|
||||
return ""
|
||||
|
||||
# Assume the first Component class is the one we're interested in
|
||||
component_class = component_classes[0]
|
||||
build_methods = [
|
||||
method
|
||||
for method in component_class["methods"]
|
||||
if method["name"] == self.function_entrypoint_name
|
||||
]
|
||||
|
||||
if not build_methods:
|
||||
return ""
|
||||
|
||||
build_method = build_methods[0]
|
||||
|
||||
return build_method["return_type"]
|
||||
|
||||
@property
|
||||
def get_main_class_name(self):
|
||||
tree = self.get_code_tree(self.code)
|
||||
|
||||
base_name = self.code_class_base_inheritance
|
||||
method_name = self.function_entrypoint_name
|
||||
|
||||
classes = []
|
||||
for item in tree.get("classes"):
|
||||
if base_name in item["bases"]:
|
||||
method_names = [method["name"] for method in item["methods"]]
|
||||
if method_name in method_names:
|
||||
classes.append(item["name"])
|
||||
|
||||
# Get just the first item
|
||||
return next(iter(classes), "")
|
||||
|
||||
@property
|
||||
def build_template_config(self):
|
||||
tree = self.get_code_tree(self.code)
|
||||
|
||||
attributes = [
|
||||
main_class["attributes"]
|
||||
for main_class in tree.get("classes")
|
||||
if main_class["name"] == self.get_main_class_name
|
||||
]
|
||||
# Get just the first item
|
||||
attributes = next(iter(attributes), [])
|
||||
|
||||
return super().build_template_config(attributes)
|
||||
|
||||
@property
|
||||
def get_function(self):
|
||||
return validate.create_function(self.code, self.function_entrypoint_name)
|
||||
|
||||
def load_flow(self, flow_id: str, tweaks: Optional[dict] = None):
|
||||
from langflow.processing.process import build_sorted_vertices_with_caching
|
||||
from langflow.processing.process import process_tweaks
|
||||
|
||||
with session_getter() as session:
|
||||
graph_data = flow.data if (flow := session.get(Flow, flow_id)) else None
|
||||
if not graph_data:
|
||||
raise ValueError(f"Flow {flow_id} not found")
|
||||
if tweaks:
|
||||
graph_data = process_tweaks(graph_data=graph_data, tweaks=tweaks)
|
||||
return build_sorted_vertices_with_caching(graph_data)
|
||||
|
||||
def list_flows(self):
|
||||
with session_getter() as session:
|
||||
flows = session.query(Flow).all()
|
||||
return flows
|
||||
|
||||
def build(self):
|
||||
raise NotImplementedError
|
||||
181
src/backend/langflow/interface/custom/directory_reader.py
Normal file
181
src/backend/langflow/interface/custom/directory_reader.py
Normal file
|
|
@ -0,0 +1,181 @@
|
|||
import os
|
||||
import ast
|
||||
import zlib
|
||||
|
||||
|
||||
class CustomComponentPathValueError(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
class StringCompressor:
|
||||
def __init__(self, input_string):
|
||||
"""Initialize StringCompressor with a string to compress."""
|
||||
self.input_string = input_string
|
||||
|
||||
def compress_string(self):
|
||||
"""
|
||||
Compress the initial string and return the compressed data.
|
||||
"""
|
||||
# Convert string to bytes
|
||||
byte_data = self.input_string.encode("utf-8")
|
||||
# Compress the bytes
|
||||
self.compressed_data = zlib.compress(byte_data)
|
||||
|
||||
return self.compressed_data
|
||||
|
||||
def decompress_string(self):
|
||||
"""
|
||||
Decompress the compressed data and return the original string.
|
||||
"""
|
||||
# Decompress the bytes
|
||||
decompressed_data = zlib.decompress(self.compressed_data)
|
||||
# Convert bytes back to string
|
||||
return decompressed_data.decode("utf-8")
|
||||
|
||||
|
||||
class DirectoryReader:
|
||||
# Ensure the base path to read the files that contain
|
||||
# the custom components from this directory.
|
||||
base_path = ""
|
||||
|
||||
def __init__(self, directory_path, compress_code_field=False):
|
||||
"""
|
||||
Initialize DirectoryReader with a directory path
|
||||
and a flag indicating whether to compress the code.
|
||||
"""
|
||||
self.directory_path = directory_path
|
||||
self.compress_code_field = compress_code_field
|
||||
|
||||
def get_safe_path(self):
|
||||
"""Check if the path is valid and return it, or None if it's not."""
|
||||
return self.directory_path if self.is_valid_path() else None
|
||||
|
||||
def is_valid_path(self) -> bool:
|
||||
"""Check if the directory path is valid by comparing it to the base path."""
|
||||
fullpath = os.path.normpath(os.path.join(self.directory_path))
|
||||
return fullpath.startswith(self.base_path)
|
||||
|
||||
def is_empty_file(self, file_content):
|
||||
"""
|
||||
Check if the file content is empty.
|
||||
"""
|
||||
return len(file_content.strip()) == 0
|
||||
|
||||
def filter_loaded_components(self, data: dict, with_errors: bool) -> dict:
|
||||
items = [
|
||||
{
|
||||
"name": menu["name"],
|
||||
"path": menu["path"],
|
||||
"components": [
|
||||
component
|
||||
for component in menu["components"]
|
||||
if (component["error"] if with_errors else not component["error"])
|
||||
],
|
||||
}
|
||||
for menu in data["menu"]
|
||||
]
|
||||
filtred = [menu for menu in items if menu["components"]]
|
||||
return {"menu": filtred}
|
||||
|
||||
def validate_code(self, file_content):
|
||||
"""
|
||||
Validate the Python code by trying to parse it with ast.parse.
|
||||
"""
|
||||
try:
|
||||
ast.parse(file_content)
|
||||
return True
|
||||
except SyntaxError:
|
||||
return False
|
||||
|
||||
def validate_build(self, file_content):
|
||||
"""
|
||||
Check if the file content contains a function named 'build'.
|
||||
"""
|
||||
return "def build" in file_content
|
||||
|
||||
def read_file_content(self, file_path):
|
||||
"""
|
||||
Read and return the content of a file.
|
||||
"""
|
||||
if not os.path.isfile(file_path):
|
||||
return None
|
||||
with open(file_path, "r") as file:
|
||||
return file.read()
|
||||
|
||||
def get_files(self):
|
||||
"""
|
||||
Walk through the directory path and return a list of all .py files.
|
||||
"""
|
||||
if not (safe_path := self.get_safe_path()):
|
||||
raise CustomComponentPathValueError(
|
||||
f"The path needs to start with '{self.base_path}'."
|
||||
)
|
||||
|
||||
file_list = []
|
||||
for root, _, files in os.walk(safe_path):
|
||||
file_list.extend(
|
||||
os.path.join(root, filename)
|
||||
for filename in files
|
||||
if filename.endswith(".py")
|
||||
)
|
||||
return file_list
|
||||
|
||||
def find_menu(self, response, menu_name):
|
||||
"""
|
||||
Find and return a menu by its name in the response.
|
||||
"""
|
||||
return next(
|
||||
(menu for menu in response["menu"] if menu["name"] == menu_name),
|
||||
None,
|
||||
)
|
||||
|
||||
def process_file(self, file_path):
|
||||
"""
|
||||
Process a file by validating its content and
|
||||
returning the result and content/error message.
|
||||
"""
|
||||
file_content = self.read_file_content(file_path)
|
||||
if file_content is None:
|
||||
return False, f"Could not read {file_path}"
|
||||
|
||||
if self.is_empty_file(file_content):
|
||||
return False, "Empty file"
|
||||
elif not self.validate_code(file_content):
|
||||
return False, "Syntax error"
|
||||
elif not self.validate_build(file_content):
|
||||
return False, "Missing build function"
|
||||
else:
|
||||
if self.compress_code_field:
|
||||
file_content = str(StringCompressor(file_content).compress_string())
|
||||
return True, file_content
|
||||
|
||||
def build_component_menu_list(self, file_paths):
|
||||
"""
|
||||
Build a list of menus with their components
|
||||
from the .py files in the directory.
|
||||
"""
|
||||
response = {"menu": []}
|
||||
|
||||
for file_path in file_paths:
|
||||
menu_name = os.path.basename(os.path.dirname(file_path))
|
||||
filename = os.path.basename(file_path)
|
||||
validation_result, result_content = self.process_file(file_path)
|
||||
|
||||
menu_result = self.find_menu(response, menu_name) or {
|
||||
"name": menu_name,
|
||||
"path": os.path.dirname(file_path),
|
||||
"components": [],
|
||||
}
|
||||
|
||||
component_info = {
|
||||
"name": filename.split(".")[0],
|
||||
"file": filename,
|
||||
"code": result_content if validation_result else "",
|
||||
"error": "" if validation_result else result_content,
|
||||
}
|
||||
menu_result["components"].append(component_info)
|
||||
|
||||
if menu_result not in response["menu"]:
|
||||
response["menu"].append(menu_result)
|
||||
|
||||
return response
|
||||
29
src/backend/langflow/interface/custom/schema.py
Normal file
29
src/backend/langflow/interface/custom/schema.py
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
from typing import Optional
|
||||
|
||||
|
||||
class ClassCodeDetails(BaseModel):
|
||||
"""
|
||||
A dataclass for storing details about a class.
|
||||
"""
|
||||
|
||||
name: str
|
||||
doc: Optional[str]
|
||||
bases: list
|
||||
attributes: list
|
||||
methods: list
|
||||
init: Optional[dict] = Field(default_factory=dict)
|
||||
|
||||
|
||||
class CallableCodeDetails(BaseModel):
|
||||
"""
|
||||
A dataclass for storing details about a callable.
|
||||
"""
|
||||
|
||||
name: str
|
||||
doc: Optional[str]
|
||||
args: list
|
||||
body: list
|
||||
return_type: Optional[str]
|
||||
|
|
@ -9,6 +9,7 @@ from langchain.base_language import BaseLanguageModel
|
|||
from langchain.chains.base import Chain
|
||||
from langchain.chat_models.base import BaseChatModel
|
||||
from langchain.tools import BaseTool
|
||||
from langflow.interface.custom.custom_component import CustomComponent
|
||||
from langflow.utils import validate
|
||||
from langflow.interface.wrappers.base import wrapper_creator
|
||||
|
||||
|
|
@ -47,6 +48,7 @@ def import_by_type(_type: str, name: str) -> Any:
|
|||
"utilities": import_utility,
|
||||
"output_parsers": import_output_parser,
|
||||
"retrievers": import_retriever,
|
||||
"custom_components": import_custom_component,
|
||||
}
|
||||
if _type == "llms":
|
||||
key = "chat" if "chat" in name.lower() else "llm"
|
||||
|
|
@ -57,6 +59,13 @@ def import_by_type(_type: str, name: str) -> Any:
|
|||
return loaded_func(name)
|
||||
|
||||
|
||||
def import_custom_component(custom_component: str) -> CustomComponent:
|
||||
"""Import custom component from custom component name"""
|
||||
return import_class(
|
||||
f"langflow.interface.custom.custom_component.{custom_component}"
|
||||
)
|
||||
|
||||
|
||||
def import_output_parser(output_parser: str) -> Any:
|
||||
"""Import output parser from output parser name"""
|
||||
return import_module(f"from langchain.output_parsers import {output_parser}")
|
||||
|
|
@ -172,3 +181,8 @@ def get_function(code):
|
|||
function_name = validate.extract_function_name(code)
|
||||
|
||||
return validate.create_function(code, function_name)
|
||||
|
||||
|
||||
def get_function_custom(code):
|
||||
class_name = validate.extract_class_name(code)
|
||||
return validate.create_class(code, class_name)
|
||||
|
|
|
|||
|
|
@ -1,21 +1,23 @@
|
|||
import contextlib
|
||||
import json
|
||||
from typing import Any, Callable, Dict, List, Sequence, Type
|
||||
from typing import Any, Callable, Dict, Sequence, Type
|
||||
|
||||
from langchain.agents import ZeroShotAgent
|
||||
from langchain.agents import agent as agent_module
|
||||
from langchain.agents.agent import AgentExecutor
|
||||
from langchain.agents.agent_toolkits.base import BaseToolkit
|
||||
from langchain.agents.tools import BaseTool
|
||||
from langflow.interface.initialize.llm import initialize_vertexai
|
||||
from langflow.interface.initialize.utils import handle_format_kwargs, handle_node_type
|
||||
|
||||
from langflow.interface.initialize.vector_store import vecstore_initializer
|
||||
|
||||
from langchain.schema import Document, BaseOutputParser
|
||||
from pydantic import ValidationError
|
||||
|
||||
from langflow.interface.importing.utils import (
|
||||
get_function,
|
||||
get_function_custom,
|
||||
import_by_type,
|
||||
)
|
||||
from langflow.interface.custom_lists import CUSTOM_NODES
|
||||
from langflow.interface.importing.utils import get_function, import_by_type
|
||||
from langflow.interface.agents.base import agent_creator
|
||||
from langflow.interface.toolkits.base import toolkits_creator
|
||||
from langflow.interface.chains.base import chain_creator
|
||||
|
|
@ -95,12 +97,21 @@ def instantiate_based_on_type(class_object, base_type, node_type, params):
|
|||
return instantiate_retriever(node_type, class_object, params)
|
||||
elif base_type == "memory":
|
||||
return instantiate_memory(node_type, class_object, params)
|
||||
elif base_type == "custom_components":
|
||||
return instantiate_custom_component(node_type, class_object, params)
|
||||
elif base_type == "wrappers":
|
||||
return instantiate_wrapper(node_type, class_object, params)
|
||||
else:
|
||||
return class_object(**params)
|
||||
|
||||
|
||||
def instantiate_custom_component(node_type, class_object, params):
|
||||
class_object = get_function_custom(params.pop("code"))
|
||||
custom_component = class_object()
|
||||
built_object = custom_component.build(**params)
|
||||
return built_object, {"repr": custom_component.custom_repr()}
|
||||
|
||||
|
||||
def instantiate_wrapper(node_type, class_object, params):
|
||||
if node_type in wrapper_creator.from_method_nodes:
|
||||
method = wrapper_creator.from_method_nodes[node_type]
|
||||
|
|
@ -199,68 +210,8 @@ def instantiate_agent(node_type, class_object: Type[agent_module.Agent], params:
|
|||
|
||||
|
||||
def instantiate_prompt(node_type, class_object, params: Dict):
|
||||
if node_type == "ZeroShotPrompt":
|
||||
if "tools" not in params:
|
||||
params["tools"] = []
|
||||
return ZeroShotAgent.create_prompt(**params)
|
||||
elif "MessagePromptTemplate" in node_type:
|
||||
# Then we only need the template
|
||||
from_template_params = {
|
||||
"template": params.pop("prompt", params.pop("template", ""))
|
||||
}
|
||||
|
||||
if not from_template_params.get("template"):
|
||||
raise ValueError("Prompt template is required")
|
||||
prompt = class_object.from_template(**from_template_params)
|
||||
|
||||
elif node_type == "ChatPromptTemplate":
|
||||
prompt = class_object.from_messages(**params)
|
||||
else:
|
||||
prompt = class_object(**params)
|
||||
|
||||
format_kwargs: Dict[str, Any] = {}
|
||||
for input_variable in prompt.input_variables:
|
||||
if input_variable in params:
|
||||
variable = params[input_variable]
|
||||
if isinstance(variable, str):
|
||||
format_kwargs[input_variable] = variable
|
||||
elif isinstance(variable, BaseOutputParser) and hasattr(
|
||||
variable, "get_format_instructions"
|
||||
):
|
||||
format_kwargs[input_variable] = variable.get_format_instructions()
|
||||
elif isinstance(variable, List) and all(
|
||||
isinstance(item, Document) for item in variable
|
||||
):
|
||||
# Format document to contain page_content and metadata
|
||||
# as one string separated by a newline
|
||||
if len(variable) > 1:
|
||||
content = "\n".join(
|
||||
[item.page_content for item in variable if item.page_content]
|
||||
)
|
||||
else:
|
||||
content = variable[0].page_content
|
||||
# content could be a json list of strings
|
||||
with contextlib.suppress(json.JSONDecodeError):
|
||||
content = json.loads(content)
|
||||
if isinstance(content, list):
|
||||
content = ",".join([str(item) for item in content])
|
||||
format_kwargs[input_variable] = content
|
||||
# handle_keys will be a list but it does not exist yet
|
||||
# so we need to create it
|
||||
|
||||
if (
|
||||
isinstance(variable, List)
|
||||
and all(isinstance(item, Document) for item in variable)
|
||||
) or (
|
||||
isinstance(variable, BaseOutputParser)
|
||||
and hasattr(variable, "get_format_instructions")
|
||||
):
|
||||
if "handle_keys" not in format_kwargs:
|
||||
format_kwargs["handle_keys"] = []
|
||||
|
||||
# Add the handle_keys to the list
|
||||
format_kwargs["handle_keys"].append(input_variable)
|
||||
|
||||
params, prompt = handle_node_type(node_type, class_object, params)
|
||||
format_kwargs = handle_format_kwargs(prompt, params)
|
||||
return prompt, format_kwargs
|
||||
|
||||
|
||||
|
|
@ -363,6 +314,8 @@ def instantiate_textsplitter(
|
|||
):
|
||||
try:
|
||||
documents = params.pop("documents")
|
||||
if not isinstance(documents, list):
|
||||
documents = [documents]
|
||||
except KeyError as exc:
|
||||
raise ValueError(
|
||||
"The source you provided did not load correctly or was empty."
|
||||
|
|
|
|||
103
src/backend/langflow/interface/initialize/utils.py
Normal file
103
src/backend/langflow/interface/initialize/utils.py
Normal file
|
|
@ -0,0 +1,103 @@
|
|||
import contextlib
|
||||
import json
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from langchain.agents import ZeroShotAgent
|
||||
|
||||
|
||||
from langchain.schema import Document, BaseOutputParser
|
||||
|
||||
|
||||
def handle_node_type(node_type, class_object, params: Dict):
|
||||
if node_type == "ZeroShotPrompt":
|
||||
params = check_tools_in_params(params)
|
||||
prompt = ZeroShotAgent.create_prompt(**params)
|
||||
elif "MessagePromptTemplate" in node_type:
|
||||
prompt = instantiate_from_template(class_object, params)
|
||||
elif node_type == "ChatPromptTemplate":
|
||||
prompt = class_object.from_messages(**params)
|
||||
else:
|
||||
prompt = class_object(**params)
|
||||
return params, prompt
|
||||
|
||||
|
||||
def check_tools_in_params(params: Dict):
|
||||
if "tools" not in params:
|
||||
params["tools"] = []
|
||||
return params
|
||||
|
||||
|
||||
def instantiate_from_template(class_object, params: Dict):
|
||||
from_template_params = {
|
||||
"template": params.pop("prompt", params.pop("template", ""))
|
||||
}
|
||||
if not from_template_params.get("template"):
|
||||
raise ValueError("Prompt template is required")
|
||||
return class_object.from_template(**from_template_params)
|
||||
|
||||
|
||||
def handle_format_kwargs(prompt, params: Dict):
|
||||
format_kwargs: Dict[str, Any] = {}
|
||||
for input_variable in prompt.input_variables:
|
||||
if input_variable in params:
|
||||
format_kwargs = handle_variable(params, input_variable, format_kwargs)
|
||||
return format_kwargs
|
||||
|
||||
|
||||
def handle_variable(params: Dict, input_variable: str, format_kwargs: Dict):
|
||||
variable = params[input_variable]
|
||||
if isinstance(variable, str):
|
||||
format_kwargs[input_variable] = variable
|
||||
elif isinstance(variable, BaseOutputParser) and hasattr(
|
||||
variable, "get_format_instructions"
|
||||
):
|
||||
format_kwargs[input_variable] = variable.get_format_instructions()
|
||||
elif is_instance_of_list_or_document(variable):
|
||||
format_kwargs = format_document(variable, input_variable, format_kwargs)
|
||||
if needs_handle_keys(variable):
|
||||
format_kwargs = add_handle_keys(input_variable, format_kwargs)
|
||||
return format_kwargs
|
||||
|
||||
|
||||
def is_instance_of_list_or_document(variable):
|
||||
return (
|
||||
isinstance(variable, List)
|
||||
and all(isinstance(item, Document) for item in variable)
|
||||
or isinstance(variable, Document)
|
||||
)
|
||||
|
||||
|
||||
def format_document(variable, input_variable: str, format_kwargs: Dict):
|
||||
variable = variable if isinstance(variable, List) else [variable]
|
||||
content = format_content(variable)
|
||||
format_kwargs[input_variable] = content
|
||||
return format_kwargs
|
||||
|
||||
|
||||
def format_content(variable):
|
||||
if len(variable) > 1:
|
||||
return "\n".join([item.page_content for item in variable if item.page_content])
|
||||
content = variable[0].page_content
|
||||
return try_to_load_json(content)
|
||||
|
||||
|
||||
def try_to_load_json(content):
|
||||
with contextlib.suppress(json.JSONDecodeError):
|
||||
content = json.loads(content)
|
||||
if isinstance(content, list):
|
||||
content = ",".join([str(item) for item in content])
|
||||
return content
|
||||
|
||||
|
||||
def needs_handle_keys(variable):
|
||||
return is_instance_of_list_or_document(variable) or (
|
||||
isinstance(variable, BaseOutputParser)
|
||||
and hasattr(variable, "get_format_instructions")
|
||||
)
|
||||
|
||||
|
||||
def add_handle_keys(input_variable: str, format_kwargs: Dict):
|
||||
if "handle_keys" not in format_kwargs:
|
||||
format_kwargs["handle_keys"] = []
|
||||
format_kwargs["handle_keys"].append(input_variable)
|
||||
return format_kwargs
|
||||
|
|
@ -13,6 +13,7 @@ from langflow.interface.vector_store.base import vectorstore_creator
|
|||
from langflow.interface.wrappers.base import wrapper_creator
|
||||
from langflow.interface.output_parsers.base import output_parser_creator
|
||||
from langflow.interface.retrievers.base import retriever_creator
|
||||
from langflow.interface.custom.base import custom_component_creator
|
||||
|
||||
|
||||
def get_type_dict():
|
||||
|
|
@ -32,6 +33,7 @@ def get_type_dict():
|
|||
"utilities": utility_creator.to_list(),
|
||||
"outputParsers": output_parser_creator.to_list(),
|
||||
"retrievers": retriever_creator.to_list(),
|
||||
"custom_components": custom_component_creator.to_list(),
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -55,7 +55,7 @@ TOOL_INPUTS = {
|
|||
show=True,
|
||||
value="",
|
||||
suffixes=[".json", ".yaml", ".yml"],
|
||||
fileTypes=["json", "yaml", "yml"],
|
||||
file_types=["json", "yaml", "yml"],
|
||||
),
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -9,7 +9,10 @@ from langchain.agents.load_tools import (
|
|||
from langchain.tools.json.tool import JsonSpec
|
||||
|
||||
from langflow.interface.importing.utils import import_class
|
||||
from langflow.interface.tools.custom import PythonFunctionTool, PythonFunction
|
||||
from langflow.interface.tools.custom import (
|
||||
PythonFunctionTool,
|
||||
PythonFunction,
|
||||
)
|
||||
|
||||
FILE_TOOLS = {"JsonSpec": JsonSpec}
|
||||
CUSTOM_TOOLS = {
|
||||
|
|
|
|||
|
|
@ -34,8 +34,6 @@ class Function(BaseModel):
|
|||
|
||||
|
||||
class PythonFunctionTool(Function, Tool):
|
||||
"""Python function"""
|
||||
|
||||
name: str = "Custom Tool"
|
||||
description: str
|
||||
code: str
|
||||
|
|
@ -49,6 +47,4 @@ class PythonFunctionTool(Function, Tool):
|
|||
|
||||
|
||||
class PythonFunction(Function):
|
||||
"""Python function"""
|
||||
|
||||
code: str
|
||||
|
|
|
|||
|
|
@ -1,7 +1,9 @@
|
|||
from langflow.interface.agents.base import agent_creator
|
||||
from langflow.interface.chains.base import chain_creator
|
||||
from langflow.interface.custom.constants import CUSTOM_COMPONENT_SUPPORTED_TYPES
|
||||
from langflow.interface.document_loaders.base import documentloader_creator
|
||||
from langflow.interface.embeddings.base import embedding_creator
|
||||
from langflow.interface.importing.utils import get_function_custom
|
||||
from langflow.interface.llms.base import llm_creator
|
||||
from langflow.interface.memories.base import memory_creator
|
||||
from langflow.interface.prompts.base import prompt_creator
|
||||
|
|
@ -12,9 +14,28 @@ from langflow.interface.utilities.base import utility_creator
|
|||
from langflow.interface.vector_store.base import vectorstore_creator
|
||||
from langflow.interface.wrappers.base import wrapper_creator
|
||||
from langflow.interface.output_parsers.base import output_parser_creator
|
||||
from langflow.interface.custom.base import custom_component_creator
|
||||
from langflow.interface.custom.custom_component import CustomComponent
|
||||
|
||||
from langflow.template.field.base import TemplateField
|
||||
from langflow.template.frontend_node.constants import CLASSES_TO_REMOVE
|
||||
from langflow.template.frontend_node.custom_components import (
|
||||
CustomComponentFrontendNode,
|
||||
)
|
||||
from langflow.interface.retrievers.base import retriever_creator
|
||||
|
||||
from langflow.interface.custom.directory_reader import DirectoryReader
|
||||
from langflow.utils.logger import logger
|
||||
from langflow.utils.util import get_base_classes
|
||||
from langflow.api.utils import merge_nested_dicts
|
||||
|
||||
import re
|
||||
import warnings
|
||||
import traceback
|
||||
from fastapi import HTTPException
|
||||
|
||||
|
||||
# Used to get the base_classes list
|
||||
def get_type_list():
|
||||
"""Get a list of all langchain types"""
|
||||
all_types = build_langchain_types_dict()
|
||||
|
|
@ -29,7 +50,6 @@ def get_type_list():
|
|||
|
||||
def build_langchain_types_dict(): # sourcery skip: dict-assign-update-to-union
|
||||
"""Build a dictionary of all langchain types"""
|
||||
|
||||
all_types = {}
|
||||
|
||||
creators = [
|
||||
|
|
@ -48,6 +68,7 @@ def build_langchain_types_dict(): # sourcery skip: dict-assign-update-to-union
|
|||
utility_creator,
|
||||
output_parser_creator,
|
||||
retriever_creator,
|
||||
custom_component_creator,
|
||||
]
|
||||
|
||||
all_types = {}
|
||||
|
|
@ -55,7 +76,298 @@ def build_langchain_types_dict(): # sourcery skip: dict-assign-update-to-union
|
|||
created_types = creator.to_dict()
|
||||
if created_types[creator.type_name].values():
|
||||
all_types.update(created_types)
|
||||
|
||||
return all_types
|
||||
|
||||
|
||||
langchain_types_dict = build_langchain_types_dict()
|
||||
def process_type(field_type: str):
|
||||
return "prompt" if field_type == "Prompt" else field_type
|
||||
|
||||
|
||||
# TODO: Move to correct place
|
||||
def add_new_custom_field(
|
||||
template,
|
||||
field_name: str,
|
||||
field_type: str,
|
||||
field_value: str,
|
||||
field_required: bool,
|
||||
field_config: dict,
|
||||
):
|
||||
# Check field_config if any of the keys are in it
|
||||
# if it is, update the value
|
||||
display_name = field_config.pop("display_name", field_name)
|
||||
field_type = field_config.pop("field_type", field_type)
|
||||
field_type = process_type(field_type)
|
||||
field_value = field_config.pop("value", field_value)
|
||||
field_advanced = field_config.pop("advanced", False)
|
||||
|
||||
if "name" in field_config:
|
||||
warnings.warn(
|
||||
"The 'name' key in field_config is used to build the object and can't be changed."
|
||||
)
|
||||
field_config.pop("name", None)
|
||||
|
||||
required = field_config.pop("required", field_required)
|
||||
placeholder = field_config.pop("placeholder", "")
|
||||
|
||||
new_field = TemplateField(
|
||||
name=field_name,
|
||||
field_type=field_type,
|
||||
value=field_value,
|
||||
show=True,
|
||||
required=required,
|
||||
advanced=field_advanced,
|
||||
placeholder=placeholder,
|
||||
display_name=display_name,
|
||||
**field_config,
|
||||
)
|
||||
template.get("template")[field_name] = new_field.to_dict()
|
||||
template.get("custom_fields")[field_name] = None
|
||||
|
||||
return template
|
||||
|
||||
|
||||
# TODO: Move to correct place
|
||||
def add_code_field(template, raw_code, field_config):
|
||||
# Field with the Python code to allow update
|
||||
|
||||
code_field = {
|
||||
"code": {
|
||||
"dynamic": True,
|
||||
"required": True,
|
||||
"placeholder": "",
|
||||
"show": True,
|
||||
"multiline": True,
|
||||
"value": raw_code,
|
||||
"password": False,
|
||||
"name": "code",
|
||||
"advanced": field_config.pop("advanced", False),
|
||||
"type": "code",
|
||||
"list": False,
|
||||
}
|
||||
}
|
||||
template.get("template")["code"] = code_field.get("code")
|
||||
|
||||
return template
|
||||
|
||||
|
||||
def extract_type_from_optional(field_type):
|
||||
"""
|
||||
Extract the type from a string formatted as "Optional[<type>]".
|
||||
|
||||
Parameters:
|
||||
field_type (str): The string from which to extract the type.
|
||||
|
||||
Returns:
|
||||
str: The extracted type, or an empty string if no type was found.
|
||||
"""
|
||||
match = re.search(r"\[(.*?)\]", field_type)
|
||||
return match[1] if match else None
|
||||
|
||||
|
||||
def build_frontend_node(custom_component: CustomComponent):
|
||||
"""Build a frontend node for a custom component"""
|
||||
try:
|
||||
return (
|
||||
CustomComponentFrontendNode().to_dict().get(type(custom_component).__name__)
|
||||
)
|
||||
|
||||
except Exception as exc:
|
||||
logger.error(f"Error while building base frontend node: {exc}")
|
||||
return None
|
||||
|
||||
|
||||
def update_display_name_and_description(frontend_node, template_config):
|
||||
"""Update the display name and description of a frontend node"""
|
||||
if "display_name" in template_config:
|
||||
frontend_node["display_name"] = template_config["display_name"]
|
||||
|
||||
if "description" in template_config:
|
||||
frontend_node["description"] = template_config["description"]
|
||||
|
||||
|
||||
def build_field_config(custom_component):
|
||||
"""Build the field configuration for a custom component"""
|
||||
try:
|
||||
custom_class = get_function_custom(custom_component.code)
|
||||
return custom_class().build_config()
|
||||
|
||||
except Exception as exc:
|
||||
logger.error(f"Error while building field config: {exc}")
|
||||
return {}
|
||||
|
||||
|
||||
def add_extra_fields(frontend_node, field_config, function_args):
|
||||
"""Add extra fields to the frontend node"""
|
||||
if function_args is None:
|
||||
return
|
||||
# sort function_args which is a list of dicts
|
||||
function_args.sort(key=lambda x: x["name"])
|
||||
|
||||
for extra_field in function_args:
|
||||
if "name" not in extra_field or extra_field["name"] == "self":
|
||||
continue
|
||||
|
||||
field_name, field_type, field_value, field_required = get_field_properties(
|
||||
extra_field
|
||||
)
|
||||
config = field_config.get(field_name, {})
|
||||
frontend_node = add_new_custom_field(
|
||||
frontend_node,
|
||||
field_name,
|
||||
field_type,
|
||||
field_value,
|
||||
field_required,
|
||||
config,
|
||||
)
|
||||
|
||||
|
||||
def get_field_properties(extra_field):
|
||||
"""Get the properties of an extra field"""
|
||||
field_name = extra_field["name"]
|
||||
field_type = extra_field.get("type", "str")
|
||||
field_value = extra_field.get("default", "")
|
||||
field_required = "optional" not in field_type.lower()
|
||||
|
||||
if not field_required:
|
||||
field_type = extract_type_from_optional(field_type)
|
||||
|
||||
return field_name, field_type, field_value, field_required
|
||||
|
||||
|
||||
def add_base_classes(frontend_node, return_type):
|
||||
"""Add base classes to the frontend node"""
|
||||
if return_type not in CUSTOM_COMPONENT_SUPPORTED_TYPES or return_type is None:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail={
|
||||
"error": (
|
||||
"Invalid return type should be one of: "
|
||||
f"{list(CUSTOM_COMPONENT_SUPPORTED_TYPES.keys())}"
|
||||
),
|
||||
"traceback": traceback.format_exc(),
|
||||
},
|
||||
)
|
||||
|
||||
return_type_instance = CUSTOM_COMPONENT_SUPPORTED_TYPES.get(return_type)
|
||||
base_classes = get_base_classes(return_type_instance)
|
||||
|
||||
for base_class in base_classes:
|
||||
if base_class not in CLASSES_TO_REMOVE:
|
||||
frontend_node.get("base_classes").append(base_class)
|
||||
|
||||
|
||||
def build_langchain_template_custom_component(custom_component: CustomComponent):
|
||||
"""Build a custom component template for the langchain"""
|
||||
frontend_node = build_frontend_node(custom_component)
|
||||
|
||||
if frontend_node is None:
|
||||
return None
|
||||
|
||||
template_config = custom_component.build_template_config
|
||||
|
||||
update_display_name_and_description(frontend_node, template_config)
|
||||
|
||||
field_config = build_field_config(custom_component)
|
||||
add_extra_fields(
|
||||
frontend_node, field_config, custom_component.get_function_entrypoint_args
|
||||
)
|
||||
|
||||
frontend_node = add_code_field(
|
||||
frontend_node, custom_component.code, field_config.get("code", {})
|
||||
)
|
||||
|
||||
add_base_classes(
|
||||
frontend_node, custom_component.get_function_entrypoint_return_type
|
||||
)
|
||||
|
||||
return frontend_node
|
||||
|
||||
|
||||
def load_files_from_path(path: str):
|
||||
"""Load all files from a given path"""
|
||||
reader = DirectoryReader(path, False)
|
||||
|
||||
return reader.get_files()
|
||||
|
||||
|
||||
def build_and_validate_all_files(reader, file_list):
|
||||
"""Build and validate all files"""
|
||||
data = reader.build_component_menu_list(file_list)
|
||||
valid_components = reader.filter_loaded_components(data=data, with_errors=False)
|
||||
|
||||
invalid_components = reader.filter_loaded_components(data=data, with_errors=True)
|
||||
|
||||
return valid_components, invalid_components
|
||||
|
||||
|
||||
def build_valid_menu(valid_components):
|
||||
"""Build the valid menu"""
|
||||
valid_menu = {}
|
||||
for menu_item in valid_components["menu"]:
|
||||
menu_name = menu_item["name"]
|
||||
valid_menu[menu_name] = {}
|
||||
|
||||
for component in menu_item["components"]:
|
||||
try:
|
||||
component_name = component["name"]
|
||||
component_code = component["code"]
|
||||
|
||||
component_extractor = CustomComponent(code=component_code)
|
||||
component_extractor.is_check_valid()
|
||||
component_template = build_langchain_template_custom_component(
|
||||
component_extractor
|
||||
)
|
||||
|
||||
valid_menu[menu_name][component_name] = component_template
|
||||
|
||||
except Exception as exc:
|
||||
logger.error(f"Error while building custom component: {exc}")
|
||||
|
||||
return valid_menu
|
||||
|
||||
|
||||
def build_invalid_menu(invalid_components):
|
||||
"""Build the invalid menu"""
|
||||
invalid_menu = {}
|
||||
for menu_item in invalid_components["menu"]:
|
||||
menu_name = menu_item["name"]
|
||||
invalid_menu[menu_name] = {}
|
||||
|
||||
for component in menu_item["components"]:
|
||||
try:
|
||||
component_name = component["name"]
|
||||
component_code = component["code"]
|
||||
|
||||
component_template = (
|
||||
CustomComponentFrontendNode(
|
||||
description="ERROR - Check your Python Code",
|
||||
display_name=f"ERROR - {component_name}",
|
||||
)
|
||||
.to_dict()
|
||||
.get(type(CustomComponent()).__name__)
|
||||
)
|
||||
|
||||
component_template.get("template").get("code")["value"] = component_code
|
||||
|
||||
invalid_menu[menu_name][component_name] = component_template
|
||||
|
||||
except Exception as exc:
|
||||
logger.error(f"Error while creating custom component: {exc}")
|
||||
|
||||
return invalid_menu
|
||||
|
||||
|
||||
def build_langchain_custom_component_list_from_path(path: str):
|
||||
"""Build a list of custom components for the langchain from a given path"""
|
||||
file_list = load_files_from_path(path)
|
||||
reader = DirectoryReader(path, False)
|
||||
|
||||
valid_components, invalid_components = build_and_validate_all_files(
|
||||
reader, file_list
|
||||
)
|
||||
|
||||
valid_menu = build_valid_menu(valid_components)
|
||||
invalid_menu = build_invalid_menu(invalid_components)
|
||||
|
||||
return merge_nested_dicts(valid_menu, invalid_menu)
|
||||
|
|
|
|||
|
|
@ -8,10 +8,12 @@ from fastapi.staticfiles import StaticFiles
|
|||
from langflow.api import router
|
||||
from langflow.database.base import create_db_and_tables
|
||||
from langflow.interface.utils import setup_llm_caching
|
||||
from langflow.utils.logger import configure
|
||||
|
||||
|
||||
def create_app():
|
||||
"""Create the FastAPI app and include the router."""
|
||||
configure()
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
|
|
@ -78,10 +80,16 @@ def setup_app(static_files_dir: Optional[Path] = None) -> FastAPI:
|
|||
return app
|
||||
|
||||
|
||||
app = create_app()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
from langflow.utils.util import get_number_of_workers
|
||||
|
||||
uvicorn.run(app, host="127.0.0.1", port=7860)
|
||||
configure()
|
||||
uvicorn.run(
|
||||
create_app,
|
||||
host="127.0.0.1",
|
||||
port=7860,
|
||||
workers=get_number_of_workers(),
|
||||
log_level="debug",
|
||||
reload=True,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ async def get_result_and_steps(langchain_object, inputs: Union[dict, str], **kwa
|
|||
try:
|
||||
fix_memory_inputs(langchain_object)
|
||||
except Exception as exc:
|
||||
logger.error(exc)
|
||||
logger.error(f"Error fixing memory inputs: {exc}")
|
||||
|
||||
try:
|
||||
async_callbacks = [AsyncStreamingLLMCallbackHandler(**kwargs)]
|
||||
|
|
|
|||
|
|
@ -85,12 +85,17 @@ def get_input_str_if_only_one_input(inputs: dict) -> Optional[str]:
|
|||
return list(inputs.values())[0] if len(inputs) == 1 else None
|
||||
|
||||
|
||||
def process_graph_cached(data_graph: Dict[str, Any], inputs: Optional[dict] = None):
|
||||
def process_graph_cached(
|
||||
data_graph: Dict[str, Any], inputs: Optional[dict] = None, clear_cache=False
|
||||
):
|
||||
"""
|
||||
Process graph by extracting input variables and replacing ZeroShotPrompt
|
||||
with PromptTemplate,then run the graph and return the result and thought.
|
||||
"""
|
||||
# Load langchain object
|
||||
if clear_cache:
|
||||
build_sorted_vertices_with_caching.clear_cache()
|
||||
logger.debug("Cleared cache")
|
||||
langchain_object, artifacts = build_sorted_vertices_with_caching(data_graph)
|
||||
logger.debug("Loaded LangChain object")
|
||||
if inputs is None:
|
||||
|
|
|
|||
|
|
@ -1,10 +1,13 @@
|
|||
import os
|
||||
from typing import Optional
|
||||
from typing import Optional, List
|
||||
from pathlib import Path
|
||||
|
||||
import yaml
|
||||
from pydantic import BaseSettings, root_validator
|
||||
from langflow.utils.logger import logger
|
||||
|
||||
BASE_COMPONENTS_PATH = Path(__file__).parent / "components"
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
chains: dict = {}
|
||||
|
|
@ -22,13 +25,16 @@ class Settings(BaseSettings):
|
|||
textsplitters: dict = {}
|
||||
utilities: dict = {}
|
||||
output_parsers: dict = {}
|
||||
custom_components: dict = {}
|
||||
|
||||
dev: bool = False
|
||||
database_url: Optional[str] = None
|
||||
cache: str = "InMemoryCache"
|
||||
remove_api_keys: bool = False
|
||||
component_path: List[Path]
|
||||
|
||||
@root_validator(pre=True)
|
||||
def set_database_url(cls, values):
|
||||
def set_env_variables(cls, values):
|
||||
if "database_url" not in values:
|
||||
logger.debug(
|
||||
"No database_url provided, trying LANGFLOW_DATABASE_URL env variable"
|
||||
|
|
@ -38,6 +44,19 @@ class Settings(BaseSettings):
|
|||
else:
|
||||
logger.debug("No DATABASE_URL env variable, using sqlite database")
|
||||
values["database_url"] = "sqlite:///./langflow.db"
|
||||
|
||||
if not values.get("component_path"):
|
||||
values["component_path"] = [BASE_COMPONENTS_PATH]
|
||||
elif BASE_COMPONENTS_PATH not in values["component_path"]:
|
||||
values["component_path"].append(BASE_COMPONENTS_PATH)
|
||||
|
||||
if os.getenv("LANGFLOW_COMPONENT_PATH"):
|
||||
langflow_component_path = Path(os.getenv("LANGFLOW_COMPONENT_PATH"))
|
||||
if (
|
||||
langflow_component_path.exists()
|
||||
and langflow_component_path not in values["component_path"]
|
||||
):
|
||||
values["component_path"].append(langflow_component_path)
|
||||
return values
|
||||
|
||||
class Config:
|
||||
|
|
@ -68,12 +87,20 @@ class Settings(BaseSettings):
|
|||
self.documentloaders = new_settings.documentloaders or {}
|
||||
self.retrievers = new_settings.retrievers or {}
|
||||
self.output_parsers = new_settings.output_parsers or {}
|
||||
self.custom_components = new_settings.custom_components or {}
|
||||
self.component_path = new_settings.component_path or []
|
||||
self.dev = dev
|
||||
|
||||
def update_settings(self, **kwargs):
|
||||
for key, value in kwargs.items():
|
||||
if hasattr(self, key):
|
||||
setattr(self, key, value)
|
||||
if isinstance(getattr(self, key), list):
|
||||
if isinstance(value, list):
|
||||
getattr(self, key).extend(value)
|
||||
else:
|
||||
getattr(self, key).append(value)
|
||||
else:
|
||||
setattr(self, key, value)
|
||||
|
||||
|
||||
def save_settings_to_yaml(settings: Settings, file_path: str):
|
||||
|
|
|
|||
|
|
@ -6,23 +6,58 @@ from pydantic import BaseModel
|
|||
|
||||
class TemplateFieldCreator(BaseModel, ABC):
|
||||
field_type: str = "str"
|
||||
"""The type of field this is. Default is a string."""
|
||||
|
||||
required: bool = False
|
||||
"""Specifies if the field is required. Defaults to False."""
|
||||
|
||||
placeholder: str = ""
|
||||
"""A placeholder string for the field. Default is an empty string."""
|
||||
|
||||
is_list: bool = False
|
||||
"""Defines if the field is a list. Default is False."""
|
||||
|
||||
show: bool = True
|
||||
"""Should the field be shown. Defaults to True."""
|
||||
|
||||
multiline: bool = False
|
||||
"""Defines if the field will allow the user to open a text editor. Default is False."""
|
||||
|
||||
value: Any = None
|
||||
"""The value of the field. Default is None."""
|
||||
|
||||
suffixes: list[str] = []
|
||||
fileTypes: list[str] = []
|
||||
"""List of suffixes for a file field. Default is an empty list."""
|
||||
|
||||
file_types: list[str] = []
|
||||
"""List of file types associated with the field. Default is an empty list. (duplicate)"""
|
||||
|
||||
file_path: Union[str, None] = None
|
||||
"""The file path of the field if it is a file. Defaults to None."""
|
||||
|
||||
password: bool = False
|
||||
"""Specifies if the field is a password. Defaults to False."""
|
||||
|
||||
options: list[str] = []
|
||||
"""List of options for the field. Only used when is_list=True. Default is an empty list."""
|
||||
|
||||
name: str = ""
|
||||
"""Name of the field. Default is an empty string."""
|
||||
|
||||
display_name: Optional[str] = None
|
||||
"""Display name of the field. Defaults to None."""
|
||||
|
||||
advanced: bool = False
|
||||
"""Specifies if the field will an advanced parameter (hidden). Defaults to False."""
|
||||
|
||||
input_types: list[str] = []
|
||||
"""List of input types for the handle when the field has more than one type. Default is an empty list."""
|
||||
|
||||
dynamic: bool = False
|
||||
"""Specifies if the field is dynamic. Defaults to False."""
|
||||
|
||||
info: Optional[str] = ""
|
||||
"""Additional information about the field to be shown in the tooltip. Defaults to an empty string."""
|
||||
|
||||
def to_dict(self):
|
||||
result = self.dict()
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ from langflow.template.frontend_node import (
|
|||
vectorstores,
|
||||
documentloaders,
|
||||
textsplitters,
|
||||
custom_components,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
|
|
@ -22,4 +23,5 @@ __all__ = [
|
|||
"vectorstores",
|
||||
"documentloaders",
|
||||
"textsplitters",
|
||||
"custom_components",
|
||||
]
|
||||
|
|
|
|||
|
|
@ -145,7 +145,7 @@ class CSVAgentNode(FrontendNode):
|
|||
name="path",
|
||||
value="",
|
||||
suffixes=[".csv"],
|
||||
fileTypes=["csv"],
|
||||
file_types=["csv"],
|
||||
),
|
||||
TemplateField(
|
||||
field_type="BaseLanguageModel",
|
||||
|
|
|
|||
|
|
@ -5,13 +5,14 @@ from typing import List, Optional
|
|||
from pydantic import BaseModel, Field
|
||||
|
||||
from langflow.template.frontend_node.formatter import field_formatters
|
||||
from langflow.template.frontend_node.constants import FORCE_SHOW_FIELDS
|
||||
from langflow.template.frontend_node.constants import (
|
||||
CLASSES_TO_REMOVE,
|
||||
FORCE_SHOW_FIELDS,
|
||||
)
|
||||
from langflow.template.field.base import TemplateField
|
||||
from langflow.template.template.base import Template
|
||||
from langflow.utils import constants
|
||||
|
||||
CLASSES_TO_REMOVE = ["Serializable", "BaseModel", "object"]
|
||||
|
||||
|
||||
class FieldFormatters(BaseModel):
|
||||
formatters = {
|
||||
|
|
@ -51,14 +52,7 @@ class FrontendNode(BaseModel):
|
|||
custom_fields: defaultdict = defaultdict(list)
|
||||
output_types: List[str] = []
|
||||
field_formatters: FieldFormatters = Field(default_factory=FieldFormatters)
|
||||
|
||||
def process_base_classes(self) -> None:
|
||||
"""Removes unwanted base classes from the list of base classes."""
|
||||
self.base_classes = [
|
||||
base_class
|
||||
for base_class in self.base_classes
|
||||
if base_class not in CLASSES_TO_REMOVE
|
||||
]
|
||||
beta: bool = False
|
||||
|
||||
# field formatters is an instance attribute but it is not used in the class
|
||||
# so we need to create a method to get it
|
||||
|
|
@ -70,6 +64,14 @@ class FrontendNode(BaseModel):
|
|||
"""Sets the documentation of the frontend node."""
|
||||
self.documentation = documentation
|
||||
|
||||
def process_base_classes(self) -> None:
|
||||
"""Removes unwanted base classes from the list of base classes."""
|
||||
self.base_classes = [
|
||||
base_class
|
||||
for base_class in self.base_classes
|
||||
if base_class not in CLASSES_TO_REMOVE
|
||||
]
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
"""Returns a dict representation of the frontend node."""
|
||||
self.process_base_classes()
|
||||
|
|
@ -82,6 +84,7 @@ class FrontendNode(BaseModel):
|
|||
"custom_fields": self.custom_fields,
|
||||
"output_types": self.output_types,
|
||||
"documentation": self.documentation,
|
||||
"beta": self.beta,
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -63,3 +63,6 @@ You can change this to use other APIs like JinaChat, LocalAI and Prem.
|
|||
|
||||
INPUT_KEY_INFO = """The variable to be used as Chat Input when more than one variable is available."""
|
||||
OUTPUT_KEY_INFO = """The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)"""
|
||||
|
||||
|
||||
CLASSES_TO_REMOVE = ["Serializable", "BaseModel", "object"]
|
||||
|
|
|
|||
|
|
@ -0,0 +1,31 @@
|
|||
from langflow.template.field.base import TemplateField
|
||||
from langflow.template.frontend_node.base import FrontendNode
|
||||
from langflow.template.template.base import Template
|
||||
from langflow.interface.custom.constants import DEFAULT_CUSTOM_COMPONENT_CODE
|
||||
|
||||
|
||||
class CustomComponentFrontendNode(FrontendNode):
|
||||
name: str = "CustomComponent"
|
||||
display_name: str = "Custom Component"
|
||||
beta: bool = True
|
||||
template: Template = Template(
|
||||
type_name="CustomComponent",
|
||||
fields=[
|
||||
TemplateField(
|
||||
field_type="code",
|
||||
required=True,
|
||||
placeholder="",
|
||||
is_list=False,
|
||||
show=True,
|
||||
value=DEFAULT_CUSTOM_COMPONENT_CODE,
|
||||
name="code",
|
||||
advanced=False,
|
||||
dynamic=True,
|
||||
)
|
||||
],
|
||||
)
|
||||
description: str = "Create any custom component you want!"
|
||||
base_classes: list[str] = []
|
||||
|
||||
def to_dict(self):
|
||||
return super().to_dict()
|
||||
|
|
@ -14,7 +14,7 @@ def build_file_field(
|
|||
name=name,
|
||||
value="",
|
||||
suffixes=suffixes,
|
||||
fileTypes=fileTypes,
|
||||
file_types=fileTypes,
|
||||
)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ class LLMFrontendNode(FrontendNode):
|
|||
name="credentials",
|
||||
value="",
|
||||
suffixes=[".json"],
|
||||
fileTypes=["json"],
|
||||
file_types=["json"],
|
||||
)
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,9 @@
|
|||
from langflow.template.field.base import TemplateField
|
||||
from langflow.template.frontend_node.base import FrontendNode
|
||||
from langflow.template.template.base import Template
|
||||
from langflow.utils.constants import DEFAULT_PYTHON_FUNCTION
|
||||
from langflow.utils.constants import (
|
||||
DEFAULT_PYTHON_FUNCTION,
|
||||
)
|
||||
|
||||
|
||||
class ToolNode(FrontendNode):
|
||||
|
|
|
|||
|
|
@ -17,18 +17,29 @@ CHAT_OPENAI_MODELS = [
|
|||
]
|
||||
|
||||
ANTHROPIC_MODELS = [
|
||||
"claude-v1", # largest model, ideal for a wide range of more complex tasks.
|
||||
"claude-v1-100k", # An enhanced version of claude-v1 with a 100,000 token (roughly 75,000 word) context window.
|
||||
"claude-instant-v1", # A smaller model with far lower latency, sampling at roughly 40 words/sec!
|
||||
"claude-instant-v1-100k", # Like claude-instant-v1 with a 100,000 token context window but retains its performance.
|
||||
# largest model, ideal for a wide range of more complex tasks.
|
||||
"claude-v1",
|
||||
# An enhanced version of claude-v1 with a 100,000 token (roughly 75,000 word) context window.
|
||||
"claude-v1-100k",
|
||||
# A smaller model with far lower latency, sampling at roughly 40 words/sec!
|
||||
"claude-instant-v1",
|
||||
# Like claude-instant-v1 with a 100,000 token context window but retains its performance.
|
||||
"claude-instant-v1-100k",
|
||||
# Specific sub-versions of the above models:
|
||||
"claude-v1.3", # Vs claude-v1.2: better instruction-following, code, and non-English dialogue and writing.
|
||||
"claude-v1.3-100k", # An enhanced version of claude-v1.3 with a 100,000 token (roughly 75,000 word) context window.
|
||||
"claude-v1.2", # Vs claude-v1.1: small adv in general helpfulness, instruction following, coding, and other tasks.
|
||||
"claude-v1.0", # An earlier version of claude-v1.
|
||||
"claude-instant-v1.1", # Latest version of claude-instant-v1. Better than claude-instant-v1.0 at most tasks.
|
||||
"claude-instant-v1.1-100k", # Version of claude-instant-v1.1 with a 100K token context window.
|
||||
"claude-instant-v1.0", # An earlier version of claude-instant-v1.
|
||||
# Vs claude-v1.2: better instruction-following, code, and non-English dialogue and writing.
|
||||
"claude-v1.3",
|
||||
# An enhanced version of claude-v1.3 with a 100,000 token (roughly 75,000 word) context window.
|
||||
"claude-v1.3-100k",
|
||||
# Vs claude-v1.1: small adv in general helpfulness, instruction following, coding, and other tasks.
|
||||
"claude-v1.2",
|
||||
# An earlier version of claude-v1.
|
||||
"claude-v1.0",
|
||||
# Latest version of claude-instant-v1. Better than claude-instant-v1.0 at most tasks.
|
||||
"claude-instant-v1.1",
|
||||
# Version of claude-instant-v1.1 with a 100K token context window.
|
||||
"claude-instant-v1.1-100k",
|
||||
# An earlier version of claude-instant-v1.
|
||||
"claude-instant-v1.0",
|
||||
]
|
||||
|
||||
DEFAULT_PYTHON_FUNCTION = """
|
||||
|
|
@ -36,4 +47,5 @@ def python_function(text: str) -> str:
|
|||
\"\"\"This is a default python function that returns the input text\"\"\"
|
||||
return text
|
||||
"""
|
||||
|
||||
DIRECT_TYPES = ["str", "bool", "code", "int", "float", "Any", "prompt"]
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ from rich.logging import RichHandler
|
|||
logger = logging.getLogger("langflow")
|
||||
|
||||
|
||||
def configure(log_level: str = "INFO", log_file: Path = None): # type: ignore
|
||||
def configure(log_level: str = "DEBUG", log_file: Path = None): # type: ignore
|
||||
log_format = "%(asctime)s - %(levelname)s - %(message)s"
|
||||
log_level_value = getattr(logging, log_level.upper(), logging.INFO)
|
||||
|
||||
|
|
|
|||
2
src/backend/langflow/utils/types.py
Normal file
2
src/backend/langflow/utils/types.py
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
class Prompt:
|
||||
pass
|
||||
|
|
@ -1,13 +1,15 @@
|
|||
import importlib
|
||||
import inspect
|
||||
import re
|
||||
import inspect
|
||||
import importlib
|
||||
from functools import wraps
|
||||
from typing import Dict, Optional
|
||||
from typing import Optional, Dict, Any, Union
|
||||
|
||||
from docstring_parser import parse # type: ignore
|
||||
|
||||
from langflow.template.frontend_node.constants import FORCE_SHOW_FIELDS
|
||||
from langflow.utils import constants
|
||||
from langflow.utils.logger import logger
|
||||
from multiprocess import cpu_count # type: ignore
|
||||
|
||||
|
||||
def build_template_from_function(
|
||||
|
|
@ -214,111 +216,6 @@ def get_default_factory(module: str, function: str):
|
|||
return None
|
||||
|
||||
|
||||
def format_dict(d, name: Optional[str] = None):
|
||||
"""
|
||||
Formats a dictionary by removing certain keys and modifying the
|
||||
values of other keys.
|
||||
|
||||
Args:
|
||||
d: the dictionary to format
|
||||
name: the name of the class to format
|
||||
|
||||
Returns:
|
||||
A new dictionary with the desired modifications applied.
|
||||
"""
|
||||
|
||||
# Process remaining keys
|
||||
for key, value in d.items():
|
||||
if key == "_type":
|
||||
continue
|
||||
|
||||
_type = value["type"]
|
||||
|
||||
if not isinstance(_type, str):
|
||||
_type = _type.__name__
|
||||
|
||||
# Remove 'Optional' wrapper
|
||||
if "Optional" in _type:
|
||||
_type = _type.replace("Optional[", "")[:-1]
|
||||
|
||||
# Check for list type
|
||||
if "List" in _type or "Sequence" in _type or "Set" in _type:
|
||||
_type = (
|
||||
_type.replace("List[", "")
|
||||
.replace("Sequence[", "")
|
||||
.replace("Set[", "")[:-1]
|
||||
)
|
||||
value["list"] = True
|
||||
else:
|
||||
value["list"] = False
|
||||
|
||||
# Replace 'Mapping' with 'dict'
|
||||
if "Mapping" in _type:
|
||||
_type = _type.replace("Mapping", "dict")
|
||||
|
||||
# Change type from str to Tool
|
||||
value["type"] = "Tool" if key in ["allowed_tools"] else _type
|
||||
|
||||
value["type"] = "int" if key in ["max_value_length"] else value["type"]
|
||||
|
||||
# Show or not field
|
||||
value["show"] = bool(
|
||||
(value["required"] and key not in ["input_variables"])
|
||||
or key in FORCE_SHOW_FIELDS
|
||||
or "api_key" in key
|
||||
)
|
||||
|
||||
# Add password field
|
||||
value["password"] = any(
|
||||
text in key.lower() for text in ["password", "token", "api", "key"]
|
||||
)
|
||||
|
||||
# Add multline
|
||||
value["multiline"] = key in [
|
||||
"suffix",
|
||||
"prefix",
|
||||
"template",
|
||||
"examples",
|
||||
"code",
|
||||
"headers",
|
||||
"format_instructions",
|
||||
]
|
||||
|
||||
# Replace dict type with str
|
||||
if "dict" in value["type"].lower():
|
||||
value["type"] = "code"
|
||||
|
||||
if key == "dict_":
|
||||
value["type"] = "file"
|
||||
value["suffixes"] = [".json", ".yaml", ".yml"]
|
||||
value["fileTypes"] = ["json", "yaml", "yml"]
|
||||
|
||||
# Replace default value with actual value
|
||||
if "default" in value:
|
||||
value["value"] = value["default"]
|
||||
value.pop("default")
|
||||
|
||||
if key == "headers":
|
||||
value[
|
||||
"value"
|
||||
] = """{'Authorization':
|
||||
'Bearer <token>'}"""
|
||||
# Add options to openai
|
||||
if name == "OpenAI" and key == "model_name":
|
||||
value["options"] = constants.OPENAI_MODELS
|
||||
value["list"] = True
|
||||
value["value"] = constants.OPENAI_MODELS[0]
|
||||
elif name == "ChatOpenAI" and key == "model_name":
|
||||
value["options"] = constants.CHAT_OPENAI_MODELS
|
||||
value["list"] = True
|
||||
value["value"] = constants.CHAT_OPENAI_MODELS[0]
|
||||
elif (name == "Anthropic" or name == "ChatAnthropic") and key == "model_name":
|
||||
value["options"] = constants.ANTHROPIC_MODELS
|
||||
value["list"] = True
|
||||
value["value"] = constants.ANTHROPIC_MODELS[0]
|
||||
return d
|
||||
|
||||
|
||||
def update_verbose(d: dict, new_value: bool) -> dict:
|
||||
"""
|
||||
Recursively updates the value of the 'verbose' key in a dictionary.
|
||||
|
|
@ -349,3 +246,219 @@ def sync_to_async(func):
|
|||
return func(*args, **kwargs)
|
||||
|
||||
return async_wrapper
|
||||
|
||||
|
||||
def format_dict(
|
||||
dictionary: Dict[str, Any], class_name: Optional[str] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Formats a dictionary by removing certain keys and modifying the
|
||||
values of other keys.
|
||||
|
||||
Returns:
|
||||
A new dictionary with the desired modifications applied.
|
||||
"""
|
||||
|
||||
for key, value in dictionary.items():
|
||||
if key == "_type":
|
||||
continue
|
||||
|
||||
_type: Union[str, type] = get_type(value)
|
||||
|
||||
_type = remove_optional_wrapper(_type)
|
||||
_type = check_list_type(_type, value)
|
||||
_type = replace_mapping_with_dict(_type)
|
||||
|
||||
value["type"] = get_formatted_type(key, _type)
|
||||
value["show"] = should_show_field(value, key)
|
||||
value["password"] = is_password_field(key)
|
||||
value["multiline"] = is_multiline_field(key)
|
||||
|
||||
replace_dict_type_with_code(value)
|
||||
|
||||
if key == "dict_":
|
||||
set_dict_file_attributes(value)
|
||||
|
||||
replace_default_value_with_actual(value)
|
||||
|
||||
if key == "headers":
|
||||
set_headers_value(value)
|
||||
|
||||
add_options_to_field(value, class_name, key)
|
||||
|
||||
return dictionary
|
||||
|
||||
|
||||
def get_type(value: Any) -> Union[str, type]:
|
||||
"""
|
||||
Retrieves the type value from the dictionary.
|
||||
|
||||
Returns:
|
||||
The type value.
|
||||
"""
|
||||
_type = value["type"]
|
||||
|
||||
return _type if isinstance(_type, str) else _type.__name__
|
||||
|
||||
|
||||
def remove_optional_wrapper(_type: Union[str, type]) -> str:
|
||||
"""
|
||||
Removes the 'Optional' wrapper from the type string.
|
||||
|
||||
Returns:
|
||||
The type string with the 'Optional' wrapper removed.
|
||||
"""
|
||||
if isinstance(_type, type):
|
||||
_type = str(_type)
|
||||
if "Optional" in _type:
|
||||
_type = _type.replace("Optional[", "")[:-1]
|
||||
|
||||
return _type
|
||||
|
||||
|
||||
def check_list_type(_type: str, value: Dict[str, Any]) -> str:
|
||||
"""
|
||||
Checks if the type is a list type and modifies the value accordingly.
|
||||
|
||||
Returns:
|
||||
The modified type string.
|
||||
"""
|
||||
if any(list_type in _type for list_type in ["List", "Sequence", "Set"]):
|
||||
_type = (
|
||||
_type.replace("List[", "").replace("Sequence[", "").replace("Set[", "")[:-1]
|
||||
)
|
||||
value["list"] = True
|
||||
else:
|
||||
value["list"] = False
|
||||
|
||||
return _type
|
||||
|
||||
|
||||
def replace_mapping_with_dict(_type: str) -> str:
|
||||
"""
|
||||
Replaces 'Mapping' with 'dict' in the type string.
|
||||
|
||||
Returns:
|
||||
The modified type string.
|
||||
"""
|
||||
if "Mapping" in _type:
|
||||
_type = _type.replace("Mapping", "dict")
|
||||
|
||||
return _type
|
||||
|
||||
|
||||
def get_formatted_type(key: str, _type: str) -> str:
|
||||
"""
|
||||
Formats the type value based on the given key.
|
||||
|
||||
Returns:
|
||||
The formatted type value.
|
||||
"""
|
||||
if key == "allowed_tools":
|
||||
return "Tool"
|
||||
|
||||
elif key == "max_value_length":
|
||||
return "int"
|
||||
|
||||
return _type
|
||||
|
||||
|
||||
def should_show_field(value: Dict[str, Any], key: str) -> bool:
|
||||
"""
|
||||
Determines if the field should be shown or not.
|
||||
|
||||
Returns:
|
||||
True if the field should be shown, False otherwise.
|
||||
"""
|
||||
return (
|
||||
(value["required"] and key != "input_variables")
|
||||
or key in FORCE_SHOW_FIELDS
|
||||
or any(text in key.lower() for text in ["password", "token", "api", "key"])
|
||||
)
|
||||
|
||||
|
||||
def is_password_field(key: str) -> bool:
|
||||
"""
|
||||
Determines if the field is a password field.
|
||||
|
||||
Returns:
|
||||
True if the field is a password field, False otherwise.
|
||||
"""
|
||||
return any(text in key.lower() for text in ["password", "token", "api", "key"])
|
||||
|
||||
|
||||
def is_multiline_field(key: str) -> bool:
|
||||
"""
|
||||
Determines if the field is a multiline field.
|
||||
|
||||
Returns:
|
||||
True if the field is a multiline field, False otherwise.
|
||||
"""
|
||||
return key in {
|
||||
"suffix",
|
||||
"prefix",
|
||||
"template",
|
||||
"examples",
|
||||
"code",
|
||||
"headers",
|
||||
"format_instructions",
|
||||
}
|
||||
|
||||
|
||||
def replace_dict_type_with_code(value: Dict[str, Any]) -> None:
|
||||
"""
|
||||
Replaces the type value with 'code' if the type is a dict.
|
||||
"""
|
||||
if "dict" in value["type"].lower():
|
||||
value["type"] = "code"
|
||||
|
||||
|
||||
def set_dict_file_attributes(value: Dict[str, Any]) -> None:
|
||||
"""
|
||||
Sets the file attributes for the 'dict_' key.
|
||||
"""
|
||||
value["type"] = "file"
|
||||
value["suffixes"] = [".json", ".yaml", ".yml"]
|
||||
value["fileTypes"] = ["json", "yaml", "yml"]
|
||||
|
||||
|
||||
def replace_default_value_with_actual(value: Dict[str, Any]) -> None:
|
||||
"""
|
||||
Replaces the default value with the actual value.
|
||||
"""
|
||||
if "default" in value:
|
||||
value["value"] = value["default"]
|
||||
value.pop("default")
|
||||
|
||||
|
||||
def set_headers_value(value: Dict[str, Any]) -> None:
|
||||
"""
|
||||
Sets the value for the 'headers' key.
|
||||
"""
|
||||
value["value"] = """{'Authorization': 'Bearer <token>'}"""
|
||||
|
||||
|
||||
def add_options_to_field(
|
||||
value: Dict[str, Any], class_name: Optional[str], key: str
|
||||
) -> None:
|
||||
"""
|
||||
Adds options to the field based on the class name and key.
|
||||
"""
|
||||
options_map = {
|
||||
"OpenAI": constants.OPENAI_MODELS,
|
||||
"ChatOpenAI": constants.CHAT_OPENAI_MODELS,
|
||||
"Anthropic": constants.ANTHROPIC_MODELS,
|
||||
"ChatAnthropic": constants.ANTHROPIC_MODELS,
|
||||
}
|
||||
|
||||
if class_name in options_map and key == "model_name":
|
||||
value["options"] = options_map[class_name]
|
||||
value["list"] = True
|
||||
value["value"] = options_map[class_name][0]
|
||||
|
||||
|
||||
def get_number_of_workers(workers=None):
|
||||
if workers == -1 or workers is None:
|
||||
workers = (cpu_count() * 2) + 1
|
||||
logger.debug(f"Number of workers: {workers}")
|
||||
return workers
|
||||
|
|
|
|||
|
|
@ -163,9 +163,77 @@ def create_function(code, function_name):
|
|||
return wrapped_function
|
||||
|
||||
|
||||
def create_class(code, class_name):
|
||||
if not hasattr(ast, "TypeIgnore"):
|
||||
|
||||
class TypeIgnore(ast.AST):
|
||||
_fields = ()
|
||||
|
||||
ast.TypeIgnore = TypeIgnore
|
||||
|
||||
module = ast.parse(code)
|
||||
exec_globals = globals().copy()
|
||||
|
||||
for node in module.body:
|
||||
if isinstance(node, ast.Import):
|
||||
for alias in node.names:
|
||||
try:
|
||||
exec_globals[alias.asname or alias.name] = importlib.import_module(
|
||||
alias.name
|
||||
)
|
||||
except ModuleNotFoundError as e:
|
||||
raise ModuleNotFoundError(
|
||||
f"Module {alias.name} not found. Please install it and try again."
|
||||
) from e
|
||||
elif isinstance(node, ast.ImportFrom):
|
||||
try:
|
||||
imported_module = importlib.import_module(node.module)
|
||||
for alias in node.names:
|
||||
exec_globals[alias.name] = getattr(imported_module, alias.name)
|
||||
except ModuleNotFoundError as e:
|
||||
raise ModuleNotFoundError(
|
||||
f"Module {node.module} not found. Please install it and try again."
|
||||
) from e
|
||||
|
||||
class_code = next(
|
||||
node
|
||||
for node in module.body
|
||||
if isinstance(node, ast.ClassDef) and node.name == class_name
|
||||
)
|
||||
class_code.parent = None
|
||||
code_obj = compile(
|
||||
ast.Module(body=[class_code], type_ignores=[]), "<string>", "exec"
|
||||
)
|
||||
# This suppresses import errors
|
||||
# with contextlib.suppress(Exception):
|
||||
exec(code_obj, exec_globals, locals())
|
||||
exec_globals[class_name] = locals()[class_name]
|
||||
|
||||
# Return a function that imports necessary modules and creates an instance of the target class
|
||||
def build_my_class(*args, **kwargs):
|
||||
for module_name, module in exec_globals.items():
|
||||
if isinstance(module, type(importlib)):
|
||||
globals()[module_name] = module
|
||||
|
||||
instance = exec_globals[class_name](*args, **kwargs)
|
||||
return instance
|
||||
|
||||
build_my_class.__globals__.update(exec_globals)
|
||||
|
||||
return build_my_class
|
||||
|
||||
|
||||
def extract_function_name(code):
|
||||
module = ast.parse(code)
|
||||
for node in module.body:
|
||||
if isinstance(node, ast.FunctionDef):
|
||||
return node.name
|
||||
raise ValueError("No function definition found in the code string")
|
||||
|
||||
|
||||
def extract_class_name(code):
|
||||
module = ast.parse(code)
|
||||
for node in module.body:
|
||||
if isinstance(node, ast.ClassDef):
|
||||
return node.name
|
||||
raise ValueError("No class definition found in the code string")
|
||||
|
|
|
|||
|
|
@ -27,6 +27,7 @@ import {
|
|||
classNames,
|
||||
getRandomKeyByssmm,
|
||||
groupByFamily,
|
||||
groupByFamilyCustom,
|
||||
} from "../../../../utils/utils";
|
||||
|
||||
export default function ParameterComponent({
|
||||
|
|
@ -49,7 +50,9 @@ export default function ParameterComponent({
|
|||
const infoHtml = useRef(null);
|
||||
const updateNodeInternals = useUpdateNodeInternals();
|
||||
const [position, setPosition] = useState(0);
|
||||
const { setTabsState, tabId, save } = useContext(TabsContext);
|
||||
const { setTabsState, tabId, save, flows } = useContext(TabsContext);
|
||||
|
||||
const flow = flows.find((f) => f.id === tabId).data?.nodes ?? null;
|
||||
|
||||
// Update component position
|
||||
useEffect(() => {
|
||||
|
|
@ -80,9 +83,11 @@ export default function ParameterComponent({
|
|||
[tabId]: {
|
||||
...prev[tabId],
|
||||
isPending: true,
|
||||
formKeysData: prev[tabId].formKeysData,
|
||||
},
|
||||
};
|
||||
});
|
||||
renderTooltips();
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
|
|
@ -98,57 +103,76 @@ export default function ParameterComponent({
|
|||
);
|
||||
}, [info]);
|
||||
|
||||
useEffect(() => {
|
||||
const groupedObj = groupByFamily(myData, tooltipTitle, left, data.type);
|
||||
function renderTooltips() {
|
||||
let groupedObj = groupByFamily(myData, tooltipTitle, left, data.type, flow);
|
||||
|
||||
refNumberComponents.current = groupedObj[0]?.type?.length;
|
||||
if (groupedObj?.length === 0 && flow && flow.length > 0) {
|
||||
groupedObj = groupByFamilyCustom(
|
||||
myData,
|
||||
tooltipTitle,
|
||||
left,
|
||||
data.type,
|
||||
flow
|
||||
);
|
||||
}
|
||||
|
||||
refHtml.current = groupedObj.map((item, i) => {
|
||||
const Icon: any = nodeIconsLucide[item.family];
|
||||
if (groupedObj) {
|
||||
refNumberComponents.current = groupedObj[0]?.type?.length;
|
||||
|
||||
return (
|
||||
<span
|
||||
key={getRandomKeyByssmm() + item.family + i}
|
||||
className={classNames(
|
||||
i > 0 ? "mt-2 flex items-center" : "flex items-center"
|
||||
)}
|
||||
>
|
||||
<div
|
||||
className="h-5 w-5"
|
||||
style={{
|
||||
color: nodeColors[item.family],
|
||||
}}
|
||||
refHtml.current = groupedObj.map((item, i) => {
|
||||
const Icon: any = nodeIconsLucide[item.family];
|
||||
|
||||
return (
|
||||
<span
|
||||
key={getRandomKeyByssmm() + item.family + i}
|
||||
className={classNames(
|
||||
i > 0 ? "mt-2 flex items-center" : "flex items-center"
|
||||
)}
|
||||
>
|
||||
<Icon
|
||||
<div
|
||||
className="h-5 w-5"
|
||||
strokeWidth={1.5}
|
||||
style={{
|
||||
color: nodeColors[item.family] ?? nodeColors.unknown,
|
||||
color: nodeColors[item.family],
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
<span className="ps-2 text-xs text-foreground">
|
||||
{nodeNames[item.family] ?? ""}{" "}
|
||||
<span className="text-xs">
|
||||
{" "}
|
||||
{item.type === "" ? "" : " - "}
|
||||
{item.type.split(", ").length > 2
|
||||
? item.type.split(", ").map((el, i) => (
|
||||
<React.Fragment key={el + i}>
|
||||
<span>
|
||||
{i === item.type.split(", ").length - 1
|
||||
? el
|
||||
: (el += `, `)}
|
||||
</span>
|
||||
</React.Fragment>
|
||||
))
|
||||
: item.type}
|
||||
>
|
||||
<Icon
|
||||
className="h-5 w-5"
|
||||
strokeWidth={1.5}
|
||||
style={{
|
||||
color: nodeColors[item.family] ?? nodeColors.unknown,
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
<span className="ps-2 text-xs text-foreground">
|
||||
{item.family !== "custom_components"
|
||||
? nodeNames[item.family]
|
||||
: item.component ?? ""}{" "}
|
||||
<span className="text-xs">
|
||||
{" "}
|
||||
{item.type === "" ? "" : " - "}
|
||||
{item.type.split(", ").length > 2
|
||||
? item.type.split(", ").map((el, i) => (
|
||||
<React.Fragment key={el + i}>
|
||||
<span>
|
||||
{i === item.type.split(", ").length - 1
|
||||
? el
|
||||
: (el += `, `)}
|
||||
</span>
|
||||
</React.Fragment>
|
||||
))
|
||||
: item.type}
|
||||
</span>
|
||||
</span>
|
||||
</span>
|
||||
</span>
|
||||
);
|
||||
});
|
||||
}, [tooltipTitle]);
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
renderTooltips();
|
||||
}, [tooltipTitle, flow]);
|
||||
|
||||
return (
|
||||
<div
|
||||
ref={ref}
|
||||
|
|
@ -280,6 +304,7 @@ export default function ParameterComponent({
|
|||
) : left === true && type === "code" ? (
|
||||
<div className="mt-2 w-full">
|
||||
<CodeAreaComponent
|
||||
dynamic={data.node.template[name].dynamic ?? false}
|
||||
setNodeClass={(nodeClass) => {
|
||||
data.node = nodeClass;
|
||||
}}
|
||||
|
|
|
|||
|
|
@ -95,6 +95,11 @@ export default function GenericNode({
|
|||
"generic-node-div"
|
||||
)}
|
||||
>
|
||||
{data.node.beta && (
|
||||
<div className="beta-badge-wrapper">
|
||||
<div className="beta-badge-content">BETA</div>
|
||||
</div>
|
||||
)}
|
||||
<div className="generic-node-div-title">
|
||||
<div className="generic-node-title-arrangement">
|
||||
<IconComponent
|
||||
|
|
@ -127,7 +132,7 @@ export default function GenericNode({
|
|||
</span>
|
||||
) : (
|
||||
<div className="max-h-96 overflow-auto">
|
||||
{validationStatus.params
|
||||
{typeof validationStatus.params === "string"
|
||||
? validationStatus.params
|
||||
.split("\n")
|
||||
.map((line, index) => <div key={index}>{line}</div>)
|
||||
|
|
@ -178,6 +183,14 @@ export default function GenericNode({
|
|||
{data.node.template[t].show &&
|
||||
!data.node.template[t].advanced ? (
|
||||
<ParameterComponent
|
||||
key={
|
||||
(data.node.template[t].input_types?.join(";") ??
|
||||
data.node.template[t].type) +
|
||||
"|" +
|
||||
t +
|
||||
"|" +
|
||||
data.id
|
||||
}
|
||||
data={data}
|
||||
setData={setData}
|
||||
color={
|
||||
|
|
@ -225,6 +238,7 @@ export default function GenericNode({
|
|||
{" "}
|
||||
</div>
|
||||
<ParameterComponent
|
||||
key={[data.type, data.id, ...data.node.base_classes].join("|")}
|
||||
data={data}
|
||||
setData={setData}
|
||||
color={nodeColors[types[data.type]] ?? nodeColors.unknown}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,8 @@
|
|||
import React, { ChangeEvent, useState } from "react";
|
||||
import React, { ChangeEvent, useEffect, useRef, useState } from "react";
|
||||
import { Input } from "../../components/ui/input";
|
||||
import { Label } from "../../components/ui/label";
|
||||
import { Textarea } from "../../components/ui/textarea";
|
||||
import { readFlowsFromDatabase } from "../../controllers/API";
|
||||
|
||||
type InputProps = {
|
||||
name: string | null;
|
||||
|
|
@ -9,6 +10,8 @@ type InputProps = {
|
|||
maxLength?: number;
|
||||
flows: Array<{ id: string; name: string; description: string }>;
|
||||
tabId: string;
|
||||
invalidName: boolean;
|
||||
setInvalidName: (invalidName: boolean) => void;
|
||||
setName: (name: string) => void;
|
||||
setDescription: (description: string) => void;
|
||||
updateFlow: (flow: { id: string; name: string }) => void;
|
||||
|
|
@ -16,6 +19,8 @@ type InputProps = {
|
|||
|
||||
export const EditFlowSettings: React.FC<InputProps> = ({
|
||||
name,
|
||||
invalidName,
|
||||
setInvalidName,
|
||||
description,
|
||||
maxLength = 50,
|
||||
flows,
|
||||
|
|
@ -25,6 +30,14 @@ export const EditFlowSettings: React.FC<InputProps> = ({
|
|||
updateFlow,
|
||||
}) => {
|
||||
const [isMaxLength, setIsMaxLength] = useState(false);
|
||||
const nameLists = useRef([]);
|
||||
useEffect(() => {
|
||||
readFlowsFromDatabase().then((flows) => {
|
||||
flows.forEach((flow) => {
|
||||
nameLists.current.push(flow.name);
|
||||
});
|
||||
});
|
||||
}, []);
|
||||
|
||||
const handleNameChange = (event: ChangeEvent<HTMLInputElement>) => {
|
||||
const { value } = event.target;
|
||||
|
|
@ -33,7 +46,11 @@ export const EditFlowSettings: React.FC<InputProps> = ({
|
|||
} else {
|
||||
setIsMaxLength(false);
|
||||
}
|
||||
|
||||
if (!nameLists.current.includes(value)) {
|
||||
setInvalidName(false);
|
||||
} else {
|
||||
setInvalidName(true);
|
||||
}
|
||||
setName(value);
|
||||
};
|
||||
|
||||
|
|
@ -55,6 +72,9 @@ export const EditFlowSettings: React.FC<InputProps> = ({
|
|||
{isMaxLength && (
|
||||
<span className="edit-flow-span">Character limit reached</span>
|
||||
)}
|
||||
{invalidName && (
|
||||
<span className="edit-flow-span">Name already in use</span>
|
||||
)}
|
||||
</div>
|
||||
<Input
|
||||
className="nopan nodrag noundo nocopy mt-2 font-normal"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import { useEffect, useState } from "react";
|
||||
import CodeAreaModal from "../../modals/codeAreaModal";
|
||||
import { TextAreaComponentType } from "../../types/components";
|
||||
import { CodeAreaComponentType } from "../../types/components";
|
||||
|
||||
import IconComponent from "../genericIconComponent";
|
||||
|
||||
|
|
@ -10,8 +10,9 @@ export default function CodeAreaComponent({
|
|||
disabled,
|
||||
editNode = false,
|
||||
nodeClass,
|
||||
dynamic,
|
||||
setNodeClass,
|
||||
}: TextAreaComponentType) {
|
||||
}: CodeAreaComponentType) {
|
||||
const [myValue, setMyValue] = useState(
|
||||
typeof value == "string" ? value : JSON.stringify(value)
|
||||
);
|
||||
|
|
@ -29,6 +30,7 @@ export default function CodeAreaComponent({
|
|||
return (
|
||||
<div className={disabled ? "pointer-events-none w-full " : " w-full"}>
|
||||
<CodeAreaModal
|
||||
dynamic={dynamic}
|
||||
value={myValue}
|
||||
nodeClass={nodeClass}
|
||||
setNodeClass={setNodeClass}
|
||||
|
|
|
|||
|
|
@ -491,3 +491,14 @@ export const NOUNS: string[] = [
|
|||
*
|
||||
*/
|
||||
export const USER_PROJECTS_HEADER = "My Collection";
|
||||
|
||||
/**
|
||||
* URLs excluded from error retries.
|
||||
* @constant
|
||||
*
|
||||
*/
|
||||
export const URL_EXCLUDED_FROM_ERROR_RETRIES = [
|
||||
"/api/v1/validate/code",
|
||||
"/api/v1/custom_component",
|
||||
"/api/v1/validate/prompt",
|
||||
];
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import axios, { AxiosError, AxiosInstance } from "axios";
|
||||
import { useContext, useEffect, useRef } from "react";
|
||||
import { URL_EXCLUDED_FROM_ERROR_RETRIES } from "../../constants/constants";
|
||||
import { alertContext } from "../../contexts/alertContext";
|
||||
|
||||
// Create a new Axios instance
|
||||
|
|
@ -15,6 +16,9 @@ function ApiInterceptor() {
|
|||
const interceptor = api.interceptors.response.use(
|
||||
(response) => response,
|
||||
async (error: AxiosError) => {
|
||||
if (URL_EXCLUDED_FROM_ERROR_RETRIES.includes(error.config?.url)) {
|
||||
return Promise.reject(error);
|
||||
}
|
||||
let retryCount = 0;
|
||||
|
||||
while (retryCount < 4) {
|
||||
|
|
@ -31,7 +35,7 @@ function ApiInterceptor() {
|
|||
"Refresh the page",
|
||||
"Use a new flow tab",
|
||||
"Check if the backend is up",
|
||||
"Endpoint: " + error.config.url,
|
||||
"Endpoint: " + error.config?.url,
|
||||
],
|
||||
});
|
||||
return Promise.reject(error);
|
||||
|
|
|
|||
|
|
@ -339,3 +339,10 @@ export async function uploadFile(
|
|||
formData.append("file", file);
|
||||
return await api.post(`/api/v1/upload/${id}`, formData);
|
||||
}
|
||||
|
||||
export async function postCustomComponent(
|
||||
code: string,
|
||||
apiClass: APIClassType
|
||||
): Promise<AxiosResponse<APIClassType>> {
|
||||
return await api.post(`/api/v1/custom_component`, { code });
|
||||
}
|
||||
|
|
|
|||
22
src/frontend/src/icons/GradientSparkles/index.tsx
Normal file
22
src/frontend/src/icons/GradientSparkles/index.tsx
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
import { Infinity } from "lucide-react";
|
||||
import { forwardRef } from "react";
|
||||
|
||||
const GradientSparkles = forwardRef<SVGSVGElement, React.PropsWithChildren<{}>>(
|
||||
(props, ref) => {
|
||||
return (
|
||||
<>
|
||||
<svg width="0" height="0" style={{ position: "absolute" }}>
|
||||
<defs>
|
||||
<linearGradient id="grad1" x1="0%" y1="0%" x2="100%" y2="0%">
|
||||
<stop className="gradient-start" offset="0%" />
|
||||
<stop className="gradient-end" offset="100%" />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
</svg>
|
||||
<Infinity stroke="url(#grad1)" ref={ref} {...props} />
|
||||
</>
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
export default GradientSparkles;
|
||||
|
|
@ -261,6 +261,13 @@ const EditNodeModal = forwardRef(
|
|||
) : myData.node.template[n].type === "code" ? (
|
||||
<div className="mx-auto">
|
||||
<CodeAreaComponent
|
||||
dynamic={
|
||||
data.node.template[n].dynamic ?? false
|
||||
}
|
||||
setNodeClass={(nodeClass) => {
|
||||
data.node = nodeClass;
|
||||
}}
|
||||
nodeClass={data.node}
|
||||
disabled={disabled}
|
||||
editNode={true}
|
||||
value={myData.node.template[n].value ?? ""}
|
||||
|
|
|
|||
|
|
@ -160,6 +160,11 @@ export default function ModalField({
|
|||
) : type === "code" ? (
|
||||
<div className="w-1/2">
|
||||
<CodeAreaComponent
|
||||
dynamic={data.node.template[name].dynamic ?? false}
|
||||
setNodeClass={(nodeClass) => {
|
||||
data.node = nodeClass;
|
||||
}}
|
||||
nodeClass={data.node}
|
||||
disabled={false}
|
||||
value={data.node.template[name].value ?? ""}
|
||||
onChange={(t: string) => {
|
||||
|
|
|
|||
|
|
@ -112,8 +112,9 @@ function BaseModal({
|
|||
<div className={`mt-2 flex flex-col ${height} w-full `}>
|
||||
{ContentChild}
|
||||
</div>
|
||||
|
||||
<div className="flex flex-row-reverse">{ContentFooter}</div>
|
||||
{ContentFooter && (
|
||||
<div className="flex flex-row-reverse">{ContentFooter}</div>
|
||||
)}
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
);
|
||||
|
|
|
|||
|
|
@ -3,14 +3,16 @@ import "ace-builds/src-noconflict/ext-language_tools";
|
|||
import "ace-builds/src-noconflict/mode-python";
|
||||
import "ace-builds/src-noconflict/theme-github";
|
||||
import "ace-builds/src-noconflict/theme-twilight";
|
||||
import { ReactNode, useContext, useState } from "react";
|
||||
// import "ace-builds/webpack-resolver";
|
||||
import { ReactNode, useContext, useEffect, useState } from "react";
|
||||
import AceEditor from "react-ace";
|
||||
import IconComponent from "../../components/genericIconComponent";
|
||||
import { Button } from "../../components/ui/button";
|
||||
import { CODE_PROMPT_DIALOG_SUBTITLE } from "../../constants/constants";
|
||||
import { alertContext } from "../../contexts/alertContext";
|
||||
import { darkContext } from "../../contexts/darkContext";
|
||||
import { postValidateCode } from "../../controllers/API";
|
||||
import { typesContext } from "../../contexts/typesContext";
|
||||
import { postCustomComponent, postValidateCode } from "../../controllers/API";
|
||||
import { APIClassType } from "../../types/api";
|
||||
import BaseModal from "../baseModal";
|
||||
|
||||
|
|
@ -20,18 +22,34 @@ export default function CodeAreaModal({
|
|||
nodeClass,
|
||||
setNodeClass,
|
||||
children,
|
||||
dynamic,
|
||||
}: {
|
||||
setValue: (value: string) => void;
|
||||
value: string;
|
||||
nodeClass: APIClassType;
|
||||
nodeClass?: APIClassType;
|
||||
children: ReactNode;
|
||||
setNodeClass: (Class: APIClassType) => void;
|
||||
setNodeClass?: (Class: APIClassType) => void;
|
||||
dynamic?: boolean;
|
||||
}) {
|
||||
const [code, setCode] = useState(value);
|
||||
const { dark } = useContext(darkContext);
|
||||
const { reactFlowInstance } = useContext(typesContext);
|
||||
const [height, setHeight] = useState(null);
|
||||
const { setErrorData, setSuccessData } = useContext(alertContext);
|
||||
const [error, setError] = useState<{
|
||||
detail: { error: string; traceback: string };
|
||||
}>(null);
|
||||
|
||||
function handleClick() {
|
||||
useEffect(() => {
|
||||
// if nodeClass.template has more fields other than code and dynamic is true
|
||||
// do not run handleClick
|
||||
if (dynamic && Object.keys(nodeClass.template).length > 2) {
|
||||
return;
|
||||
}
|
||||
processCode();
|
||||
}, []);
|
||||
|
||||
function processNonDynamicField() {
|
||||
postValidateCode(code)
|
||||
.then((apiReturn) => {
|
||||
if (apiReturn.data) {
|
||||
|
|
@ -41,8 +59,9 @@ export default function CodeAreaModal({
|
|||
setSuccessData({
|
||||
title: "Code is ready to run",
|
||||
});
|
||||
setValue(code);
|
||||
setOpen(false);
|
||||
setValue(code);
|
||||
// setValue(code);
|
||||
} else {
|
||||
if (funcErrors.length !== 0) {
|
||||
setErrorData({
|
||||
|
|
@ -70,6 +89,50 @@ export default function CodeAreaModal({
|
|||
});
|
||||
}
|
||||
|
||||
function processDynamicField() {
|
||||
postCustomComponent(code, nodeClass)
|
||||
.then((apiReturn) => {
|
||||
const { data } = apiReturn;
|
||||
if (data) {
|
||||
setNodeClass(data);
|
||||
setValue(code);
|
||||
setError({ detail: { error: undefined, traceback: undefined } });
|
||||
setOpen(false);
|
||||
}
|
||||
})
|
||||
.catch((err) => {
|
||||
setError(err.response.data);
|
||||
});
|
||||
}
|
||||
|
||||
function processCode() {
|
||||
if (!dynamic) {
|
||||
processNonDynamicField();
|
||||
} else {
|
||||
processDynamicField();
|
||||
}
|
||||
}
|
||||
|
||||
function handleClick() {
|
||||
processCode();
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
// Function to be executed after the state changes
|
||||
const delayedFunction = setTimeout(() => {
|
||||
if (error?.detail.error !== undefined) {
|
||||
//trigger to update the height, does not really apply any height
|
||||
setHeight("90%");
|
||||
}
|
||||
//600 to happen after the transition of 500ms
|
||||
}, 600);
|
||||
|
||||
// Cleanup function to clear the timeout if the component unmounts or the state changes again
|
||||
return () => {
|
||||
clearTimeout(delayedFunction);
|
||||
};
|
||||
}, [error, setHeight]);
|
||||
|
||||
const [open, setOpen] = useState(false);
|
||||
|
||||
return (
|
||||
|
|
@ -89,6 +152,7 @@ export default function CodeAreaModal({
|
|||
<AceEditor
|
||||
value={code}
|
||||
mode="python"
|
||||
height={height ?? "100%"}
|
||||
highlightActiveLine={true}
|
||||
showPrintMargin={false}
|
||||
fontSize={14}
|
||||
|
|
@ -99,9 +163,26 @@ export default function CodeAreaModal({
|
|||
onChange={(value) => {
|
||||
setCode(value);
|
||||
}}
|
||||
className="h-full w-full rounded-lg border-[1px] border-border custom-scroll"
|
||||
className="h-full w-full rounded-lg border-[1px] border-gray-300 custom-scroll dark:border-gray-600"
|
||||
/>
|
||||
</div>
|
||||
<div
|
||||
className={
|
||||
"w-full transition-all delay-500 " +
|
||||
(error?.detail.error !== undefined ? "h-2/6" : "h-0")
|
||||
}
|
||||
>
|
||||
<div className="mt-1 h-full w-full overflow-y-auto overflow-x-clip text-left custom-scroll">
|
||||
<h1 className="text-lg text-destructive">
|
||||
{error?.detail?.error}
|
||||
</h1>
|
||||
<div className="ml-2 w-full break-all text-sm text-status-red">
|
||||
<pre className="w-full whitespace-pre-wrap break-all">
|
||||
{error?.detail?.traceback}
|
||||
</pre>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="flex h-fit w-full justify-end">
|
||||
<Button className="mt-3" onClick={handleClick} type="submit">
|
||||
Check & Save
|
||||
|
|
|
|||
|
|
@ -23,6 +23,8 @@ export default function FlowSettingsModal({
|
|||
const [description, setDescription] = useState(
|
||||
flows.find((f) => f.id === tabId).description
|
||||
);
|
||||
const [invalidName, setInvalidName] = useState(false);
|
||||
|
||||
function handleClick() {
|
||||
let savedFlow = flows.find((f) => f.id === tabId);
|
||||
savedFlow.name = name;
|
||||
|
|
@ -39,6 +41,8 @@ export default function FlowSettingsModal({
|
|||
</BaseModal.Header>
|
||||
<BaseModal.Content>
|
||||
<EditFlowSettings
|
||||
invalidName={invalidName}
|
||||
setInvalidName={setInvalidName}
|
||||
name={name}
|
||||
description={description}
|
||||
flows={flows}
|
||||
|
|
@ -50,7 +54,7 @@ export default function FlowSettingsModal({
|
|||
</BaseModal.Content>
|
||||
|
||||
<BaseModal.Footer>
|
||||
<Button onClick={handleClick} type="submit">
|
||||
<Button disabled={invalidName} onClick={handleClick} type="submit">
|
||||
Save
|
||||
</Button>
|
||||
</BaseModal.Footer>
|
||||
|
|
|
|||
|
|
@ -31,6 +31,36 @@
|
|||
}
|
||||
}
|
||||
|
||||
@keyframes gradient-motion-start {
|
||||
0% {
|
||||
stop-color: rgb(156, 138, 236);
|
||||
}
|
||||
50% {
|
||||
stop-color: rgb(255, 130, 184);
|
||||
}
|
||||
80% {
|
||||
stop-color: rgb(255, 165, 100);
|
||||
}
|
||||
100% {
|
||||
stop-color: rgb(156, 138, 236);
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes gradient-motion-end {
|
||||
0% {
|
||||
stop-color: rgb(156, 138, 236);
|
||||
}
|
||||
50% {
|
||||
stop-color: rgb(255, 165, 100);
|
||||
}
|
||||
80% {
|
||||
stop-color: rgb(255, 130, 184);
|
||||
}
|
||||
100% {
|
||||
stop-color: rgb(156, 138, 236);
|
||||
}
|
||||
}
|
||||
|
||||
@layer components {
|
||||
.round-buttons-position {
|
||||
@apply fixed right-4;
|
||||
|
|
@ -979,6 +1009,13 @@
|
|||
@apply font-medium leading-none peer-disabled:cursor-not-allowed peer-disabled:opacity-70;
|
||||
}
|
||||
|
||||
.beta-badge-wrapper {
|
||||
@apply absolute right-0 top-0 h-16 w-16 overflow-hidden rounded-tr-lg;
|
||||
}
|
||||
.beta-badge-content {
|
||||
@apply mt-2 w-24 rotate-45 bg-beta-background text-center text-xs font-semibold text-beta-foreground;
|
||||
}
|
||||
|
||||
.chat-message-highlight {
|
||||
@apply rounded-md bg-indigo-100 px-0.5 dark:bg-indigo-900;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,30 +1,38 @@
|
|||
body {
|
||||
margin: 0;
|
||||
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", "Roboto", "Oxygen",
|
||||
"Ubuntu", "Cantarell", "Fira Sans", "Droid Sans", "Helvetica Neue",
|
||||
sans-serif;
|
||||
-webkit-font-smoothing: antialiased;
|
||||
-moz-osx-font-smoothing: grayscale;
|
||||
margin: 0;
|
||||
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", "Roboto", "Oxygen",
|
||||
"Ubuntu", "Cantarell", "Fira Sans", "Droid Sans", "Helvetica Neue",
|
||||
sans-serif;
|
||||
-webkit-font-smoothing: antialiased;
|
||||
-moz-osx-font-smoothing: grayscale;
|
||||
}
|
||||
|
||||
|
||||
code {
|
||||
font-family: source-code-pro, Menlo, Monaco, Consolas, "Courier New",
|
||||
monospace;
|
||||
font-family: source-code-pro, Menlo, Monaco, Consolas, "Courier New",
|
||||
monospace;
|
||||
}
|
||||
pre {
|
||||
font-family: inherit;
|
||||
font-family: inherit;
|
||||
}
|
||||
|
||||
.react-flow__pane {
|
||||
cursor: default;
|
||||
cursor: default;
|
||||
}
|
||||
|
||||
|
||||
.AccordionContent {
|
||||
overflow: hidden;
|
||||
overflow: hidden;
|
||||
}
|
||||
.AccordionContent[data-state='open'] {
|
||||
animation: slideDown 300ms ease-out;
|
||||
.AccordionContent[data-state="open"] {
|
||||
animation: slideDown 300ms ease-out;
|
||||
}
|
||||
.AccordionContent[data-state="closed"] {
|
||||
animation: slideUp 300ms ease-out;
|
||||
}
|
||||
|
||||
|
||||
.gradient-end {
|
||||
animation: gradient-motion-end 3s infinite forwards;
|
||||
}
|
||||
.gradient-start {
|
||||
animation: gradient-motion-start 4s infinite forwards;
|
||||
}
|
||||
.AccordionContent[data-state='closed'] {
|
||||
animation: slideUp 300ms ease-out;
|
||||
}
|
||||
|
|
@ -2,125 +2,127 @@
|
|||
@tailwind components;
|
||||
@tailwind utilities;
|
||||
|
||||
|
||||
/* TODO: Confirm that all colors here are found in tailwind config */
|
||||
|
||||
@layer base {
|
||||
|
||||
:root {
|
||||
--background: 0 0% 100%; /* hsl(0 0% 100%) */
|
||||
--foreground: 222.2 47.4% 11.2%; /* hsl(222 47% 11%) */
|
||||
--muted: 210 40% 98%; /* hsl(210 40% 98%) */
|
||||
--muted-foreground: 215.4 16.3% 46.9%; /* hsl(215 16% 46%) */
|
||||
--popover: 0 0% 100%; /* hsl(0 0% 100%) */
|
||||
--popover-foreground: 222.2 47.4% 11.2%; /* hsl(222 47% 11%) */
|
||||
--card: 0 0% 100%; /* hsl(0 0% 100%) */
|
||||
--card-foreground: 222.2 47.4% 11.2%; /* hsl(222 47% 11%) */
|
||||
--border: 214.3 21.8% 91.4%; /* hsl(214 32% 91%) */
|
||||
--input: 214.3 21.8% 91.4%; /* hsl(214 32% 91%) */
|
||||
--primary: 222.2 27% 11.2%; /* hsl(222 27% 18%) */
|
||||
--primary-foreground: 210 40% 98%; /* hsl(210 40% 98%) */
|
||||
--secondary: 210 40% 96.1%; /* hsl(210 40% 96%) */
|
||||
--secondary-foreground: 222.2 47.4% 11.2%; /* hsl(222 47% 11%) */
|
||||
--accent: 210 30% 96.1%; /* hsl(210 30% 96%) */
|
||||
--accent-foreground: 222.2 47.4% 11.2%; /* hsl(222 47% 11%) */
|
||||
--destructive: 0 100% 50%; /* hsl(0 100% 50%) */
|
||||
--destructive-foreground: 210 40% 98%; /* hsl(210 40% 98%) */
|
||||
--radius: 0.5rem;
|
||||
--ring: 215 20.2% 65.1%; /* hsl(215 20% 65%) */
|
||||
--round-btn-shadow: #00000063;
|
||||
|
||||
--error-background: #fef2f2;
|
||||
--error-foreground: #991b1b;
|
||||
|
||||
--success-background: #f0fdf4;
|
||||
--success-foreground: #14532d;
|
||||
--background: 0 0% 100%; /* hsl(0 0% 100%) */
|
||||
--foreground: 222.2 47.4% 11.2%; /* hsl(222 47% 11%) */
|
||||
--muted: 210 40% 98%; /* hsl(210 40% 98%) */
|
||||
--muted-foreground: 215.4 16.3% 46.9%; /* hsl(215 16% 46%) */
|
||||
--popover: 0 0% 100%; /* hsl(0 0% 100%) */
|
||||
--popover-foreground: 222.2 47.4% 11.2%; /* hsl(222 47% 11%) */
|
||||
--card: 0 0% 100%; /* hsl(0 0% 100%) */
|
||||
--card-foreground: 222.2 47.4% 11.2%; /* hsl(222 47% 11%) */
|
||||
--border: 214.3 21.8% 91.4%; /* hsl(214 32% 91%) */
|
||||
--input: 214.3 21.8% 91.4%; /* hsl(214 32% 91%) */
|
||||
--primary: 222.2 27% 11.2%; /* hsl(222 27% 18%) */
|
||||
--primary-foreground: 210 40% 98%; /* hsl(210 40% 98%) */
|
||||
--secondary: 210 40% 96.1%; /* hsl(210 40% 96%) */
|
||||
--secondary-foreground: 222.2 47.4% 11.2%; /* hsl(222 47% 11%) */
|
||||
--accent: 210 30% 96.1%; /* hsl(210 30% 96%) */
|
||||
--accent-foreground: 222.2 47.4% 11.2%; /* hsl(222 47% 11%) */
|
||||
--destructive: 0 100% 50%; /* hsl(0 100% 50%) */
|
||||
--destructive-foreground: 210 40% 98%; /* hsl(210 40% 98%) */
|
||||
--radius: 0.5rem;
|
||||
--ring: 215 20.2% 65.1%; /* hsl(215 20% 65%) */
|
||||
--round-btn-shadow: #00000063;
|
||||
|
||||
--info-background: #f0f4fd;
|
||||
--info-foreground: #141653;
|
||||
--error-background: #fef2f2;
|
||||
--error-foreground: #991b1b;
|
||||
|
||||
--high-indigo: #4338ca;
|
||||
--medium-indigo: #6366f1;
|
||||
--low-indigo: #e0e7ff;
|
||||
--success-background: #f0fdf4;
|
||||
--success-foreground: #14532d;
|
||||
|
||||
--chat-bot-icon: #afe6ef;
|
||||
--chat-user-icon: #aface9;
|
||||
|
||||
/* Colors that are shared in dark and light mode */
|
||||
--blur-shared: #151923de;
|
||||
--build-trigger: #dc735b;
|
||||
--chat-trigger: #5c8be1;
|
||||
--chat-trigger-disabled: #b4c3da;
|
||||
--status-red: #ef4444;
|
||||
--status-yellow: #eab308;
|
||||
--chat-send: #059669;
|
||||
--status-green: #4ade80;
|
||||
--status-blue:#2563eb;
|
||||
--connection: #555;
|
||||
--info-background: #f0f4fd;
|
||||
--info-foreground: #141653;
|
||||
|
||||
--high-indigo: #4338ca;
|
||||
--medium-indigo: #6366f1;
|
||||
--low-indigo: #e0e7ff;
|
||||
|
||||
--beta-background: rgb(219 234 254);
|
||||
--beta-foreground: rgb(37 99 235);
|
||||
|
||||
--chat-bot-icon: #afe6ef;
|
||||
--chat-user-icon: #aface9;
|
||||
|
||||
/* Colors that are shared in dark and light mode */
|
||||
--blur-shared: #151923de;
|
||||
--build-trigger: #dc735b;
|
||||
--chat-trigger: #5c8be1;
|
||||
--chat-trigger-disabled: #b4c3da;
|
||||
--status-red: #ef4444;
|
||||
--status-yellow: #eab308;
|
||||
--chat-send: #059669;
|
||||
--status-green: #4ade80;
|
||||
--status-blue: #2563eb;
|
||||
--connection: #555;
|
||||
}
|
||||
|
||||
.dark {
|
||||
--background: 224 35% 7.5%; /* hsl(224 40% 10%) */
|
||||
--foreground: 213 31% 80%; /* hsl(213 31% 91%) */
|
||||
|
||||
--muted: 223 27% 11%; /* hsl(223 27% 11%) */
|
||||
--muted-foreground: 215.4 16.3% 56.9%; /* hsl(215 16% 56%) */
|
||||
|
||||
--popover: 224 71% 4%; /* hsl(224 71% 4%) */
|
||||
--popover-foreground: 215 20.2% 65.1%; /* hsl(215 20% 65%) */
|
||||
|
||||
--card: 224 25% 15.5%; /* hsl(224 71% 4%) */
|
||||
--card-foreground: 213 31% 80%; /* hsl(213 31% 91%) */
|
||||
|
||||
--border: 216 24% 17%; /* hsl(216 34% 17%) */
|
||||
--input: 216 24% 17%; /* hsl(216 34% 17%) */
|
||||
|
||||
--primary: 210 20% 80%; /* hsl(210 20% 80%) */
|
||||
--primary-foreground: 222.2 27.4% 1.2%; /* hsl(222 47% 1%) */
|
||||
|
||||
--secondary: 222.2 37.4% 7.2%; /* hsl(222 47% 11%) */
|
||||
--secondary-foreground: 210 40% 80%; /* hsl(210 40% 80%) */
|
||||
|
||||
--accent: 216 24% 20%; /* hsl(216 34% 17%) */
|
||||
--accent-foreground: 210 30% 98%; /* hsl(210 40% 98%) */
|
||||
|
||||
--destructive: 0 63% 31%; /* hsl(0 63% 31%) */
|
||||
--destructive-foreground: 210 40% 98%; /* hsl(210 40% 98%) */
|
||||
|
||||
--ring: 216 24% 30%; /* hsl(216 24% 30%) */
|
||||
|
||||
--radius: 0.5rem;
|
||||
|
||||
--round-btn-shadow: #00000063;
|
||||
|
||||
--success-background: #022c22;
|
||||
--success-foreground: #ecfdf5;
|
||||
|
||||
--error-foreground: #fef2f2;
|
||||
--error-background: #450a0a;
|
||||
|
||||
--info-foreground: #eff6ff;
|
||||
--info-background: #172554;
|
||||
|
||||
--high-indigo: #4338ca;
|
||||
--medium-indigo: #6366f1;
|
||||
--low-indigo: #e0e7ff;
|
||||
|
||||
/* Colors that are shared in dark and light mode */
|
||||
--blur-shared: #151923d2;
|
||||
--build-trigger: #dc735b;
|
||||
--chat-trigger: #5c8be1;
|
||||
--chat-trigger-disabled: #2d3b54;
|
||||
--status-red: #ef4444;
|
||||
--status-yellow: #eab308;
|
||||
--chat-send: #059669;
|
||||
--status-green: #4ade80;
|
||||
--status-blue: #2563eb;
|
||||
--connection: #555;
|
||||
|
||||
--beta-background: rgb(37 99 235);
|
||||
--beta-foreground: rgb(219 234 254);
|
||||
|
||||
--chat-bot-icon: #235d70;
|
||||
--chat-user-icon: #4f3d6e;
|
||||
}
|
||||
}
|
||||
|
||||
.dark {
|
||||
--background: 224 35% 7.5%; /* hsl(224 40% 10%) */
|
||||
--foreground: 213 31% 80%; /* hsl(213 31% 91%) */
|
||||
|
||||
--muted: 223 27% 11%; /* hsl(223 27% 11%) */
|
||||
--muted-foreground: 215.4 16.3% 56.9%; /* hsl(215 16% 56%) */
|
||||
|
||||
--popover: 224 71% 4%; /* hsl(224 71% 4%) */
|
||||
--popover-foreground: 215 20.2% 65.1%; /* hsl(215 20% 65%) */
|
||||
|
||||
--card: 224 25% 15.5%; /* hsl(224 71% 4%) */
|
||||
--card-foreground: 213 31% 80%; /* hsl(213 31% 91%) */
|
||||
|
||||
--border: 216 24% 17%; /* hsl(216 34% 17%) */
|
||||
--input: 216 24% 17%; /* hsl(216 34% 17%) */
|
||||
|
||||
--primary: 210 20% 80%; /* hsl(210 20% 80%) */
|
||||
--primary-foreground: 222.2 27.4% 1.2%; /* hsl(222 47% 1%) */
|
||||
|
||||
--secondary: 222.2 37.4% 7.2%; /* hsl(222 47% 11%) */
|
||||
--secondary-foreground: 210 40% 80%; /* hsl(210 40% 80%) */
|
||||
|
||||
--accent: 216 24% 20%; /* hsl(216 34% 17%) */
|
||||
--accent-foreground: 210 30% 98%; /* hsl(210 40% 98%) */
|
||||
|
||||
--destructive: 0 63% 31%; /* hsl(0 63% 31%) */
|
||||
--destructive-foreground: 210 40% 98%; /* hsl(210 40% 98%) */
|
||||
|
||||
--ring: 216 24% 30%; /* hsl(216 24% 30%) */
|
||||
|
||||
--radius: 0.5rem;
|
||||
|
||||
--round-btn-shadow: #00000063;
|
||||
|
||||
--success-background: #022c22;
|
||||
--success-foreground: #ecfdf5;
|
||||
|
||||
--error-foreground: #fef2f2;
|
||||
--error-background: #450a0a;
|
||||
|
||||
--info-foreground: #eff6ff;
|
||||
--info-background: #172554;
|
||||
|
||||
|
||||
--high-indigo: #4338ca;
|
||||
--medium-indigo: #6366f1;
|
||||
--low-indigo: #e0e7ff;
|
||||
|
||||
/* Colors that are shared in dark and light mode */
|
||||
--blur-shared: #151923d2;
|
||||
--build-trigger: #dc735b;
|
||||
--chat-trigger: #5c8be1;
|
||||
--chat-trigger-disabled: #2d3b54;
|
||||
--status-red: #ef4444;
|
||||
--status-yellow: #eab308;
|
||||
--chat-send: #059669;
|
||||
--status-green: #4ade80;
|
||||
--status-blue: #2563eb;
|
||||
--connection: #555;
|
||||
|
||||
--chat-bot-icon: #235d70;
|
||||
--chat-user-icon: #4f3d6e;
|
||||
|
||||
}}
|
||||
|
|
|
|||
|
|
@ -14,8 +14,9 @@ export type APIClassType = {
|
|||
display_name: string;
|
||||
input_types?: Array<string>;
|
||||
output_types?: Array<string>;
|
||||
beta?: boolean;
|
||||
documentation: string;
|
||||
[key: string]: Array<string> | string | APITemplateType;
|
||||
[key: string]: Array<string> | string | APITemplateType | boolean;
|
||||
};
|
||||
|
||||
export type TemplateVariableType = {
|
||||
|
|
|
|||
|
|
@ -62,8 +62,9 @@ export type CodeAreaComponentType = {
|
|||
onChange: (value: string[] | string) => void;
|
||||
value: string;
|
||||
editNode?: boolean;
|
||||
nodeClass: APIClassType;
|
||||
setNodeClass: (value: APIClassType) => void;
|
||||
nodeClass?: APIClassType;
|
||||
setNodeClass?: (value: APIClassType) => void;
|
||||
dynamic?: boolean;
|
||||
};
|
||||
|
||||
export type FileComponentType = {
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ import {
|
|||
Cpu,
|
||||
Download,
|
||||
DownloadCloud,
|
||||
Edit,
|
||||
Eraser,
|
||||
ExternalLink,
|
||||
File,
|
||||
|
|
@ -74,6 +75,7 @@ import { EvernoteIcon } from "../icons/Evernote";
|
|||
import { FBIcon } from "../icons/FacebookMessenger";
|
||||
import { GitBookIcon } from "../icons/GitBook";
|
||||
import { GoogleIcon } from "../icons/Google";
|
||||
import GradientSparkles from "../icons/GradientSparkles";
|
||||
import { HuggingFaceIcon } from "../icons/HuggingFace";
|
||||
import { IFixIcon } from "../icons/IFixIt";
|
||||
import { MetaIcon } from "../icons/Meta";
|
||||
|
|
@ -146,6 +148,7 @@ export const nodeColors: { [char: string]: string } = {
|
|||
str: "#049524",
|
||||
retrievers: "#e6b25a",
|
||||
unknown: "#9CA3AF",
|
||||
custom_components: "#ab11ab",
|
||||
};
|
||||
|
||||
export const nodeNames: { [char: string]: string } = {
|
||||
|
|
@ -166,6 +169,7 @@ export const nodeNames: { [char: string]: string } = {
|
|||
retrievers: "Retrievers",
|
||||
utilities: "Utilities",
|
||||
output_parsers: "Output Parsers",
|
||||
custom_components: "Custom",
|
||||
unknown: "Unknown",
|
||||
};
|
||||
|
||||
|
|
@ -224,6 +228,8 @@ export const nodeIconsLucide = {
|
|||
unknown: HelpCircle,
|
||||
WikipediaQueryRun: SvgWikipedia,
|
||||
WolframAlphaQueryRun: SvgWolfram,
|
||||
custom_components: GradientSparkles,
|
||||
custom: Edit,
|
||||
Trash2,
|
||||
X,
|
||||
XCircle,
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import { IVarHighlightType } from "../types/components";
|
|||
import { FlowType } from "../types/flow";
|
||||
import { TabsState } from "../types/tabs";
|
||||
import { buildTweaks } from "./reactflowUtils";
|
||||
import { nodeNames } from "./styleUtils";
|
||||
|
||||
export function classNames(...classes: Array<string>) {
|
||||
return classes.filter(Boolean).join(" ");
|
||||
|
|
@ -88,12 +89,13 @@ export function checkUpperWords(str: string) {
|
|||
export const isWrappedWithClass = (event: any, className: string | undefined) =>
|
||||
event.target.closest(`.${className}`);
|
||||
|
||||
export function groupByFamily(data, baseClasses, left, type) {
|
||||
export function groupByFamily(data, baseClasses, left, type, flow) {
|
||||
let parentOutput: string;
|
||||
let arrOfParent: string[] = [];
|
||||
let arrOfType: { family: string; type: string; component: string }[] = [];
|
||||
let arrOfLength: { length: number; type: string }[] = [];
|
||||
let lastType = "";
|
||||
|
||||
Object.keys(data).forEach((d) => {
|
||||
Object.keys(data[d]).forEach((n) => {
|
||||
try {
|
||||
|
|
@ -165,7 +167,7 @@ export function groupByFamily(data, baseClasses, left, type) {
|
|||
});
|
||||
}
|
||||
|
||||
if (left === false) {
|
||||
if (parentOutput !== "custom_components") {
|
||||
let resFil = result.filter((group) => group.family === parentOutput);
|
||||
result = resFil;
|
||||
}
|
||||
|
|
@ -203,6 +205,107 @@ export function groupByFamily(data, baseClasses, left, type) {
|
|||
}
|
||||
}
|
||||
|
||||
export function groupByFamilyCustom(data, baseClasses, left, type, flow) {
|
||||
let arrOfParentCustom: string[] = [];
|
||||
let arrOfType: { family: string; type: string; component: string }[] = [];
|
||||
|
||||
if (type === "CustomComponent") {
|
||||
const uniqueValuesSet = new Set();
|
||||
flow.forEach((element) => {
|
||||
element["data"]["node"]["base_classes"].forEach((el) => {
|
||||
if (!uniqueValuesSet.has(el)) {
|
||||
arrOfParentCustom.push(el);
|
||||
uniqueValuesSet.add(el);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
if (left === false) {
|
||||
arrOfParentCustom.map((n) => {
|
||||
try {
|
||||
arrOfType.push({
|
||||
family: "custom_components",
|
||||
type: n,
|
||||
component: nodeNames["custom_components"],
|
||||
});
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
flow.forEach((element) => {
|
||||
Object.keys(element["data"]["node"]["template"]).map((el) => {
|
||||
if (
|
||||
element["data"]["node"]["template"][el].input_types &&
|
||||
element["data"]["node"]["template"][el].input_types.length > 0
|
||||
) {
|
||||
element["data"]["node"]["template"][el].input_types.map((n) => {
|
||||
try {
|
||||
arrOfType.push({
|
||||
family: "custom_components",
|
||||
type: n,
|
||||
component: nodeNames["custom_components"],
|
||||
});
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
const groupedResult = {};
|
||||
|
||||
arrOfType.forEach((item) => {
|
||||
const { family, type, component } = item;
|
||||
if (groupedResult.hasOwnProperty(family)) {
|
||||
if (!groupedResult[family].type.includes(type)) {
|
||||
groupedResult[family].type += `, ${type}`;
|
||||
}
|
||||
} else {
|
||||
groupedResult[family] = { family, type, component };
|
||||
}
|
||||
});
|
||||
|
||||
const result = Object.values(groupedResult);
|
||||
|
||||
if (left === false) {
|
||||
let resultFiltered = [];
|
||||
flow.forEach((element) => {
|
||||
Object.keys(element["data"]["node"]["template"]).map((el) => {
|
||||
if (
|
||||
element["data"]["node"]["template"][el].input_types &&
|
||||
element["data"]["node"]["template"][el].input_types.length > 0
|
||||
) {
|
||||
element["data"]["node"]["template"][el].input_types.map((n) => {
|
||||
resultFiltered.push({
|
||||
family: "custom_components",
|
||||
type: n,
|
||||
component: element["data"]["node"]["display_name"],
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
if (resultFiltered.length === 0) {
|
||||
Object.keys(groupedResult).forEach((el) => {
|
||||
resultFiltered.push({
|
||||
family: "custom_components",
|
||||
type: groupedResult[el].type,
|
||||
component: nodeNames["custom_components"],
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return resultFiltered;
|
||||
} else {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
export function buildInputs(tabsState, id) {
|
||||
return tabsState &&
|
||||
tabsState[id] &&
|
||||
|
|
|
|||
|
|
@ -71,6 +71,8 @@ module.exports = {
|
|||
"status-yellow": "var(--status-yellow)",
|
||||
"success-background": "var(--success-background)",
|
||||
"success-foreground": "var(--success-foreground)",
|
||||
"beta-background": "var(--beta-background)",
|
||||
"beta-foreground": "var(--beta-foreground)",
|
||||
"chat-bot-icon": "var(--chat-bot-icon)",
|
||||
"chat-user-icon": "var(--chat-user-icon)",
|
||||
|
||||
|
|
|
|||
|
|
@ -93,8 +93,8 @@ def json_flow():
|
|||
return f.read()
|
||||
|
||||
|
||||
@pytest.fixture(name="session") #
|
||||
def session_fixture(): #
|
||||
@pytest.fixture(name="session")
|
||||
def session_fixture():
|
||||
engine = create_engine(
|
||||
"sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool
|
||||
)
|
||||
|
|
@ -103,16 +103,16 @@ def session_fixture(): #
|
|||
yield session
|
||||
|
||||
|
||||
@pytest.fixture(name="client") #
|
||||
def client_fixture(session: Session): #
|
||||
def get_session_override(): #
|
||||
@pytest.fixture(name="client")
|
||||
def client_fixture(session: Session):
|
||||
def get_session_override():
|
||||
return session
|
||||
|
||||
from langflow.main import create_app
|
||||
|
||||
app = create_app()
|
||||
|
||||
app.dependency_overrides[get_session] = get_session_override #
|
||||
app.dependency_overrides[get_session] = get_session_override
|
||||
|
||||
yield TestClient(app)
|
||||
app.dependency_overrides.clear() #
|
||||
app.dependency_overrides.clear()
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ def test_zero_shot_agent(client: TestClient):
|
|||
|
||||
assert template["tools"] == {
|
||||
"required": True,
|
||||
"dynamic": False,
|
||||
"placeholder": "",
|
||||
"show": True,
|
||||
"multiline": False,
|
||||
|
|
@ -32,6 +33,7 @@ def test_zero_shot_agent(client: TestClient):
|
|||
# Additional assertions for other template variables
|
||||
assert template["callback_manager"] == {
|
||||
"required": False,
|
||||
"dynamic": False,
|
||||
"placeholder": "",
|
||||
"show": False,
|
||||
"multiline": False,
|
||||
|
|
@ -44,6 +46,7 @@ def test_zero_shot_agent(client: TestClient):
|
|||
}
|
||||
assert template["llm"] == {
|
||||
"required": True,
|
||||
"dynamic": False,
|
||||
"placeholder": "",
|
||||
"show": True,
|
||||
"multiline": False,
|
||||
|
|
@ -56,6 +59,7 @@ def test_zero_shot_agent(client: TestClient):
|
|||
}
|
||||
assert template["output_parser"] == {
|
||||
"required": False,
|
||||
"dynamic": False,
|
||||
"placeholder": "",
|
||||
"show": False,
|
||||
"multiline": False,
|
||||
|
|
@ -68,6 +72,7 @@ def test_zero_shot_agent(client: TestClient):
|
|||
}
|
||||
assert template["input_variables"] == {
|
||||
"required": False,
|
||||
"dynamic": False,
|
||||
"placeholder": "",
|
||||
"show": False,
|
||||
"multiline": False,
|
||||
|
|
@ -80,6 +85,7 @@ def test_zero_shot_agent(client: TestClient):
|
|||
}
|
||||
assert template["prefix"] == {
|
||||
"required": False,
|
||||
"dynamic": False,
|
||||
"placeholder": "",
|
||||
"show": True,
|
||||
"multiline": True,
|
||||
|
|
@ -93,6 +99,7 @@ def test_zero_shot_agent(client: TestClient):
|
|||
}
|
||||
assert template["suffix"] == {
|
||||
"required": False,
|
||||
"dynamic": False,
|
||||
"placeholder": "",
|
||||
"show": True,
|
||||
"multiline": True,
|
||||
|
|
@ -118,6 +125,7 @@ def test_json_agent(client: TestClient):
|
|||
|
||||
assert template["toolkit"] == {
|
||||
"required": True,
|
||||
"dynamic": False,
|
||||
"placeholder": "",
|
||||
"show": True,
|
||||
"multiline": False,
|
||||
|
|
@ -130,6 +138,7 @@ def test_json_agent(client: TestClient):
|
|||
}
|
||||
assert template["llm"] == {
|
||||
"required": True,
|
||||
"dynamic": False,
|
||||
"placeholder": "",
|
||||
"show": True,
|
||||
"multiline": False,
|
||||
|
|
@ -155,6 +164,7 @@ def test_csv_agent(client: TestClient):
|
|||
|
||||
assert template["path"] == {
|
||||
"required": True,
|
||||
"dynamic": False,
|
||||
"placeholder": "",
|
||||
"show": True,
|
||||
"multiline": False,
|
||||
|
|
@ -171,6 +181,7 @@ def test_csv_agent(client: TestClient):
|
|||
}
|
||||
assert template["llm"] == {
|
||||
"required": True,
|
||||
"dynamic": False,
|
||||
"placeholder": "",
|
||||
"show": True,
|
||||
"multiline": False,
|
||||
|
|
@ -196,6 +207,7 @@ def test_initialize_agent(client: TestClient):
|
|||
|
||||
assert template["agent"] == {
|
||||
"required": True,
|
||||
"dynamic": False,
|
||||
"placeholder": "",
|
||||
"show": True,
|
||||
"multiline": False,
|
||||
|
|
@ -217,6 +229,7 @@ def test_initialize_agent(client: TestClient):
|
|||
}
|
||||
assert template["memory"] == {
|
||||
"required": False,
|
||||
"dynamic": False,
|
||||
"placeholder": "",
|
||||
"show": True,
|
||||
"multiline": False,
|
||||
|
|
@ -229,6 +242,7 @@ def test_initialize_agent(client: TestClient):
|
|||
}
|
||||
assert template["tools"] == {
|
||||
"required": False,
|
||||
"dynamic": False,
|
||||
"placeholder": "",
|
||||
"show": True,
|
||||
"multiline": False,
|
||||
|
|
@ -241,6 +255,7 @@ def test_initialize_agent(client: TestClient):
|
|||
}
|
||||
assert template["llm"] == {
|
||||
"required": True,
|
||||
"dynamic": False,
|
||||
"placeholder": "",
|
||||
"show": True,
|
||||
"multiline": False,
|
||||
|
|
|
|||
|
|
@ -29,6 +29,7 @@ def test_conversation_chain(client: TestClient):
|
|||
template = chain["template"]
|
||||
assert template["memory"] == {
|
||||
"required": False,
|
||||
"dynamic": False,
|
||||
"placeholder": "",
|
||||
"show": True,
|
||||
"multiline": False,
|
||||
|
|
@ -41,6 +42,7 @@ def test_conversation_chain(client: TestClient):
|
|||
}
|
||||
assert template["verbose"] == {
|
||||
"required": False,
|
||||
"dynamic": False,
|
||||
"placeholder": "",
|
||||
"show": False,
|
||||
"multiline": False,
|
||||
|
|
@ -53,6 +55,7 @@ def test_conversation_chain(client: TestClient):
|
|||
}
|
||||
assert template["llm"] == {
|
||||
"required": True,
|
||||
"dynamic": False,
|
||||
"placeholder": "",
|
||||
"show": True,
|
||||
"multiline": False,
|
||||
|
|
@ -65,6 +68,7 @@ def test_conversation_chain(client: TestClient):
|
|||
}
|
||||
assert template["input_key"] == {
|
||||
"required": True,
|
||||
"dynamic": False,
|
||||
"placeholder": "",
|
||||
"show": True,
|
||||
"multiline": False,
|
||||
|
|
@ -78,6 +82,7 @@ def test_conversation_chain(client: TestClient):
|
|||
}
|
||||
assert template["output_key"] == {
|
||||
"required": True,
|
||||
"dynamic": False,
|
||||
"placeholder": "",
|
||||
"show": True,
|
||||
"multiline": False,
|
||||
|
|
@ -115,6 +120,7 @@ def test_llm_chain(client: TestClient):
|
|||
template = chain["template"]
|
||||
assert template["memory"] == {
|
||||
"required": False,
|
||||
"dynamic": False,
|
||||
"placeholder": "",
|
||||
"show": True,
|
||||
"multiline": False,
|
||||
|
|
@ -127,6 +133,7 @@ def test_llm_chain(client: TestClient):
|
|||
}
|
||||
assert template["verbose"] == {
|
||||
"required": False,
|
||||
"dynamic": False,
|
||||
"placeholder": "",
|
||||
"show": False,
|
||||
"multiline": False,
|
||||
|
|
@ -140,6 +147,7 @@ def test_llm_chain(client: TestClient):
|
|||
}
|
||||
assert template["llm"] == {
|
||||
"required": True,
|
||||
"dynamic": False,
|
||||
"placeholder": "",
|
||||
"show": True,
|
||||
"multiline": False,
|
||||
|
|
@ -152,6 +160,7 @@ def test_llm_chain(client: TestClient):
|
|||
}
|
||||
assert template["output_key"] == {
|
||||
"required": True,
|
||||
"dynamic": False,
|
||||
"placeholder": "",
|
||||
"show": True,
|
||||
"multiline": False,
|
||||
|
|
@ -182,6 +191,7 @@ def test_llm_checker_chain(client: TestClient):
|
|||
template = chain["template"]
|
||||
assert template["llm"] == {
|
||||
"required": True,
|
||||
"dynamic": False,
|
||||
"placeholder": "",
|
||||
"show": True,
|
||||
"multiline": False,
|
||||
|
|
@ -215,6 +225,7 @@ def test_llm_math_chain(client: TestClient):
|
|||
template = chain["template"]
|
||||
assert template["memory"] == {
|
||||
"required": False,
|
||||
"dynamic": False,
|
||||
"placeholder": "",
|
||||
"show": True,
|
||||
"multiline": False,
|
||||
|
|
@ -227,6 +238,7 @@ def test_llm_math_chain(client: TestClient):
|
|||
}
|
||||
assert template["verbose"] == {
|
||||
"required": False,
|
||||
"dynamic": False,
|
||||
"placeholder": "",
|
||||
"show": False,
|
||||
"multiline": False,
|
||||
|
|
@ -240,6 +252,7 @@ def test_llm_math_chain(client: TestClient):
|
|||
}
|
||||
assert template["llm"] == {
|
||||
"required": True,
|
||||
"dynamic": False,
|
||||
"placeholder": "",
|
||||
"show": True,
|
||||
"multiline": False,
|
||||
|
|
@ -252,6 +265,7 @@ def test_llm_math_chain(client: TestClient):
|
|||
}
|
||||
assert template["input_key"] == {
|
||||
"required": True,
|
||||
"dynamic": False,
|
||||
"placeholder": "",
|
||||
"show": True,
|
||||
"multiline": False,
|
||||
|
|
@ -265,6 +279,7 @@ def test_llm_math_chain(client: TestClient):
|
|||
}
|
||||
assert template["output_key"] == {
|
||||
"required": True,
|
||||
"dynamic": False,
|
||||
"placeholder": "",
|
||||
"show": True,
|
||||
"multiline": False,
|
||||
|
|
@ -306,6 +321,7 @@ def test_series_character_chain(client: TestClient):
|
|||
|
||||
assert template["llm"] == {
|
||||
"required": True,
|
||||
"dynamic": False,
|
||||
"display_name": "LLM",
|
||||
"placeholder": "",
|
||||
"show": True,
|
||||
|
|
@ -319,6 +335,7 @@ def test_series_character_chain(client: TestClient):
|
|||
}
|
||||
assert template["character"] == {
|
||||
"required": True,
|
||||
"dynamic": False,
|
||||
"placeholder": "",
|
||||
"show": True,
|
||||
"multiline": False,
|
||||
|
|
@ -331,6 +348,7 @@ def test_series_character_chain(client: TestClient):
|
|||
}
|
||||
assert template["series"] == {
|
||||
"required": True,
|
||||
"dynamic": False,
|
||||
"placeholder": "",
|
||||
"show": True,
|
||||
"multiline": False,
|
||||
|
|
@ -372,6 +390,7 @@ def test_mid_journey_prompt_chain(client: TestClient):
|
|||
|
||||
assert template["llm"] == {
|
||||
"required": True,
|
||||
"dynamic": False,
|
||||
"display_name": "LLM",
|
||||
"placeholder": "",
|
||||
"show": True,
|
||||
|
|
@ -412,6 +431,7 @@ def test_time_travel_guide_chain(client: TestClient):
|
|||
|
||||
assert template["llm"] == {
|
||||
"required": True,
|
||||
"dynamic": False,
|
||||
"placeholder": "",
|
||||
"display_name": "LLM",
|
||||
"show": True,
|
||||
|
|
@ -425,6 +445,7 @@ def test_time_travel_guide_chain(client: TestClient):
|
|||
}
|
||||
assert template["memory"] == {
|
||||
"required": False,
|
||||
"dynamic": False,
|
||||
"placeholder": "",
|
||||
"show": True,
|
||||
"multiline": False,
|
||||
|
|
|
|||
|
|
@ -35,6 +35,7 @@ def test_lang_chain_type_creator_to_dict(
|
|||
sample_lang_chain_type_creator: LangChainTypeCreator,
|
||||
):
|
||||
type_dict = sample_lang_chain_type_creator.to_dict()
|
||||
|
||||
assert len(type_dict) == 1
|
||||
assert "test_type" in type_dict
|
||||
assert "node1" in type_dict["test_type"]
|
||||
|
|
|
|||
558
tests/test_custom_component.py
Normal file
558
tests/test_custom_component.py
Normal file
|
|
@ -0,0 +1,558 @@
|
|||
import ast
|
||||
import pytest
|
||||
import types
|
||||
from uuid import uuid4
|
||||
|
||||
|
||||
from fastapi import HTTPException
|
||||
from langflow.database.models.flow import Flow, FlowCreate
|
||||
from langflow.interface.custom.base import CustomComponent
|
||||
from langflow.interface.custom.component import (
|
||||
Component,
|
||||
ComponentCodeNullError,
|
||||
ComponentFunctionEntrypointNameNullError,
|
||||
)
|
||||
from langflow.interface.custom.code_parser import CodeParser, CodeSyntaxError
|
||||
|
||||
|
||||
code_default = """
|
||||
from langflow import Prompt
|
||||
from langflow.interface.custom.custom_component import CustomComponent
|
||||
|
||||
from langchain.llms.base import BaseLLM
|
||||
from langchain.chains import LLMChain
|
||||
from langchain import PromptTemplate
|
||||
from langchain.schema import Document
|
||||
|
||||
import requests
|
||||
|
||||
class YourComponent(CustomComponent):
|
||||
display_name: str = "Your Component"
|
||||
description: str = "Your description"
|
||||
field_config = { "url": { "multiline": True, "required": True } }
|
||||
|
||||
def build(self, url: str, llm: BaseLLM, template: Prompt) -> Document:
|
||||
response = requests.get(url)
|
||||
prompt = PromptTemplate.from_template(template)
|
||||
chain = LLMChain(llm=llm, prompt=prompt)
|
||||
result = chain.run(response.text[:300])
|
||||
return Document(page_content=str(result))
|
||||
"""
|
||||
|
||||
|
||||
def test_code_parser_init():
|
||||
"""
|
||||
Test the initialization of the CodeParser class.
|
||||
"""
|
||||
parser = CodeParser(code_default)
|
||||
assert parser.code == code_default
|
||||
|
||||
|
||||
def test_code_parser_get_tree():
|
||||
"""
|
||||
Test the __get_tree method of the CodeParser class.
|
||||
"""
|
||||
parser = CodeParser(code_default)
|
||||
tree = parser._CodeParser__get_tree()
|
||||
assert isinstance(tree, ast.AST)
|
||||
|
||||
|
||||
def test_code_parser_syntax_error():
|
||||
"""
|
||||
Test the __get_tree method raises the
|
||||
CodeSyntaxError when given incorrect syntax.
|
||||
"""
|
||||
code_syntax_error = "zzz import os"
|
||||
|
||||
parser = CodeParser(code_syntax_error)
|
||||
with pytest.raises(CodeSyntaxError):
|
||||
parser._CodeParser__get_tree()
|
||||
|
||||
|
||||
def test_component_init():
|
||||
"""
|
||||
Test the initialization of the Component class.
|
||||
"""
|
||||
component = Component(code=code_default, function_entrypoint_name="build")
|
||||
assert component.code == code_default
|
||||
assert component.function_entrypoint_name == "build"
|
||||
|
||||
|
||||
def test_component_get_code_tree():
|
||||
"""
|
||||
Test the get_code_tree method of the Component class.
|
||||
"""
|
||||
component = Component(code=code_default, function_entrypoint_name="build")
|
||||
tree = component.get_code_tree(component.code)
|
||||
assert "imports" in tree
|
||||
|
||||
|
||||
def test_component_code_null_error():
|
||||
"""
|
||||
Test the get_function method raises the
|
||||
ComponentCodeNullError when the code is empty.
|
||||
"""
|
||||
component = Component(code="", function_entrypoint_name="")
|
||||
with pytest.raises(ComponentCodeNullError):
|
||||
component.get_function()
|
||||
|
||||
|
||||
def test_component_function_entrypoint_name_null_error():
|
||||
"""
|
||||
Test the get_function method raises the ComponentFunctionEntrypointNameNullError
|
||||
when the function_entrypoint_name is empty.
|
||||
"""
|
||||
component = Component(code=code_default, function_entrypoint_name="")
|
||||
with pytest.raises(ComponentFunctionEntrypointNameNullError):
|
||||
component.get_function()
|
||||
|
||||
|
||||
def test_custom_component_init():
|
||||
"""
|
||||
Test the initialization of the CustomComponent class.
|
||||
"""
|
||||
function_entrypoint_name = "build"
|
||||
|
||||
custom_component = CustomComponent(
|
||||
code=code_default, function_entrypoint_name=function_entrypoint_name
|
||||
)
|
||||
assert custom_component.code == code_default
|
||||
assert custom_component.function_entrypoint_name == function_entrypoint_name
|
||||
|
||||
|
||||
def test_custom_component_build_template_config():
|
||||
"""
|
||||
Test the build_template_config property of the CustomComponent class.
|
||||
"""
|
||||
custom_component = CustomComponent(
|
||||
code=code_default, function_entrypoint_name="build"
|
||||
)
|
||||
config = custom_component.build_template_config
|
||||
assert isinstance(config, dict)
|
||||
|
||||
|
||||
def test_custom_component_get_function():
|
||||
"""
|
||||
Test the get_function property of the CustomComponent class.
|
||||
"""
|
||||
custom_component = CustomComponent(
|
||||
code="def build(): pass", function_entrypoint_name="build"
|
||||
)
|
||||
my_function = custom_component.get_function
|
||||
assert isinstance(my_function, types.FunctionType)
|
||||
|
||||
|
||||
def test_code_parser_parse_imports_import():
|
||||
"""
|
||||
Test the parse_imports method of the CodeParser
|
||||
class with an import statement.
|
||||
"""
|
||||
parser = CodeParser(code_default)
|
||||
tree = parser._CodeParser__get_tree()
|
||||
for node in ast.walk(tree):
|
||||
if isinstance(node, ast.Import):
|
||||
parser.parse_imports(node)
|
||||
assert "requests" in parser.data["imports"]
|
||||
|
||||
|
||||
def test_code_parser_parse_imports_importfrom():
|
||||
"""
|
||||
Test the parse_imports method of the CodeParser
|
||||
class with an import from statement.
|
||||
"""
|
||||
parser = CodeParser("from os import path")
|
||||
tree = parser._CodeParser__get_tree()
|
||||
for node in ast.walk(tree):
|
||||
if isinstance(node, ast.ImportFrom):
|
||||
parser.parse_imports(node)
|
||||
assert ("os", "path") in parser.data["imports"]
|
||||
|
||||
|
||||
def test_code_parser_parse_functions():
|
||||
"""
|
||||
Test the parse_functions method of the CodeParser class.
|
||||
"""
|
||||
parser = CodeParser("def test(): pass")
|
||||
tree = parser._CodeParser__get_tree()
|
||||
for node in ast.walk(tree):
|
||||
if isinstance(node, ast.FunctionDef):
|
||||
parser.parse_functions(node)
|
||||
assert len(parser.data["functions"]) == 1
|
||||
assert parser.data["functions"][0]["name"] == "test"
|
||||
|
||||
|
||||
def test_code_parser_parse_classes():
|
||||
"""
|
||||
Test the parse_classes method of the CodeParser class.
|
||||
"""
|
||||
parser = CodeParser("class Test: pass")
|
||||
tree = parser._CodeParser__get_tree()
|
||||
for node in ast.walk(tree):
|
||||
if isinstance(node, ast.ClassDef):
|
||||
parser.parse_classes(node)
|
||||
assert len(parser.data["classes"]) == 1
|
||||
assert parser.data["classes"][0]["name"] == "Test"
|
||||
|
||||
|
||||
def test_code_parser_parse_global_vars():
|
||||
"""
|
||||
Test the parse_global_vars method of the CodeParser class.
|
||||
"""
|
||||
parser = CodeParser("x = 1")
|
||||
tree = parser._CodeParser__get_tree()
|
||||
for node in ast.walk(tree):
|
||||
if isinstance(node, ast.Assign):
|
||||
parser.parse_global_vars(node)
|
||||
assert len(parser.data["global_vars"]) == 1
|
||||
assert parser.data["global_vars"][0]["targets"] == ["x"]
|
||||
|
||||
|
||||
def test_component_get_function_valid():
|
||||
"""
|
||||
Test the get_function method of the Component
|
||||
class with valid code and function_entrypoint_name.
|
||||
"""
|
||||
component = Component(code="def build(): pass", function_entrypoint_name="build")
|
||||
my_function = component.get_function()
|
||||
assert callable(my_function)
|
||||
|
||||
|
||||
def test_custom_component_get_function_entrypoint_args():
|
||||
"""
|
||||
Test the get_function_entrypoint_args
|
||||
property of the CustomComponent class.
|
||||
"""
|
||||
custom_component = CustomComponent(
|
||||
code=code_default, function_entrypoint_name="build"
|
||||
)
|
||||
args = custom_component.get_function_entrypoint_args
|
||||
assert len(args) == 4
|
||||
assert args[0]["name"] == "self"
|
||||
assert args[1]["name"] == "url"
|
||||
assert args[2]["name"] == "llm"
|
||||
|
||||
|
||||
def test_custom_component_get_function_entrypoint_return_type():
|
||||
"""
|
||||
Test the get_function_entrypoint_return_type
|
||||
property of the CustomComponent class.
|
||||
"""
|
||||
custom_component = CustomComponent(
|
||||
code=code_default, function_entrypoint_name="build"
|
||||
)
|
||||
return_type = custom_component.get_function_entrypoint_return_type
|
||||
assert return_type == "Document"
|
||||
|
||||
|
||||
def test_custom_component_get_main_class_name():
|
||||
"""
|
||||
Test the get_main_class_name property of the CustomComponent class.
|
||||
"""
|
||||
custom_component = CustomComponent(
|
||||
code=code_default, function_entrypoint_name="build"
|
||||
)
|
||||
class_name = custom_component.get_main_class_name
|
||||
assert class_name == "YourComponent"
|
||||
|
||||
|
||||
def test_custom_component_get_function_valid():
|
||||
"""
|
||||
Test the get_function property of the CustomComponent
|
||||
class with valid code and function_entrypoint_name.
|
||||
"""
|
||||
custom_component = CustomComponent(
|
||||
code="def build(): pass", function_entrypoint_name="build"
|
||||
)
|
||||
my_function = custom_component.get_function
|
||||
assert callable(my_function)
|
||||
|
||||
|
||||
def test_code_parser_parse_arg_no_annotation():
|
||||
"""
|
||||
Test the parse_arg method of the CodeParser class without an annotation.
|
||||
"""
|
||||
parser = CodeParser("")
|
||||
arg = ast.arg(arg="x", annotation=None)
|
||||
result = parser.parse_arg(arg, None)
|
||||
assert result["name"] == "x"
|
||||
assert "type" not in result
|
||||
|
||||
|
||||
def test_code_parser_parse_arg_with_annotation():
|
||||
"""
|
||||
Test the parse_arg method of the CodeParser class with an annotation.
|
||||
"""
|
||||
parser = CodeParser("")
|
||||
arg = ast.arg(arg="x", annotation=ast.Name(id="int", ctx=ast.Load()))
|
||||
result = parser.parse_arg(arg, None)
|
||||
assert result["name"] == "x"
|
||||
assert result["type"] == "int"
|
||||
|
||||
|
||||
def test_code_parser_parse_callable_details_no_args():
|
||||
"""
|
||||
Test the parse_callable_details method of the
|
||||
CodeParser class with a function with no arguments.
|
||||
"""
|
||||
parser = CodeParser("")
|
||||
node = ast.FunctionDef(
|
||||
name="test",
|
||||
args=ast.arguments(
|
||||
args=[], vararg=None, kwonlyargs=[], kw_defaults=[], kwarg=None, defaults=[]
|
||||
),
|
||||
body=[],
|
||||
decorator_list=[],
|
||||
returns=None,
|
||||
)
|
||||
result = parser.parse_callable_details(node)
|
||||
assert result["name"] == "test"
|
||||
assert len(result["args"]) == 0
|
||||
|
||||
|
||||
def test_code_parser_parse_assign():
|
||||
"""
|
||||
Test the parse_assign method of the CodeParser class.
|
||||
"""
|
||||
parser = CodeParser("")
|
||||
stmt = ast.Assign(targets=[ast.Name(id="x", ctx=ast.Store())], value=ast.Num(n=1))
|
||||
result = parser.parse_assign(stmt)
|
||||
assert result["name"] == "x"
|
||||
assert result["value"] == "1"
|
||||
|
||||
|
||||
def test_code_parser_parse_ann_assign():
|
||||
"""
|
||||
Test the parse_ann_assign method of the CodeParser class.
|
||||
"""
|
||||
parser = CodeParser("")
|
||||
stmt = ast.AnnAssign(
|
||||
target=ast.Name(id="x", ctx=ast.Store()),
|
||||
annotation=ast.Name(id="int", ctx=ast.Load()),
|
||||
value=ast.Num(n=1),
|
||||
simple=1,
|
||||
)
|
||||
result = parser.parse_ann_assign(stmt)
|
||||
assert result["name"] == "x"
|
||||
assert result["value"] == "1"
|
||||
assert result["annotation"] == "int"
|
||||
|
||||
|
||||
def test_code_parser_parse_function_def_not_init():
|
||||
"""
|
||||
Test the parse_function_def method of the
|
||||
CodeParser class with a function that is not __init__.
|
||||
"""
|
||||
parser = CodeParser("")
|
||||
stmt = ast.FunctionDef(
|
||||
name="test",
|
||||
args=ast.arguments(
|
||||
args=[], vararg=None, kwonlyargs=[], kw_defaults=[], kwarg=None, defaults=[]
|
||||
),
|
||||
body=[],
|
||||
decorator_list=[],
|
||||
returns=None,
|
||||
)
|
||||
result, is_init = parser.parse_function_def(stmt)
|
||||
assert result["name"] == "test"
|
||||
assert not is_init
|
||||
|
||||
|
||||
def test_code_parser_parse_function_def_init():
|
||||
"""
|
||||
Test the parse_function_def method of the
|
||||
CodeParser class with an __init__ function.
|
||||
"""
|
||||
parser = CodeParser("")
|
||||
stmt = ast.FunctionDef(
|
||||
name="__init__",
|
||||
args=ast.arguments(
|
||||
args=[], vararg=None, kwonlyargs=[], kw_defaults=[], kwarg=None, defaults=[]
|
||||
),
|
||||
body=[],
|
||||
decorator_list=[],
|
||||
returns=None,
|
||||
)
|
||||
result, is_init = parser.parse_function_def(stmt)
|
||||
assert result["name"] == "__init__"
|
||||
assert is_init
|
||||
|
||||
|
||||
def test_component_get_code_tree_syntax_error():
|
||||
"""
|
||||
Test the get_code_tree method of the Component class
|
||||
raises the CodeSyntaxError when given incorrect syntax.
|
||||
"""
|
||||
component = Component(code="import os as", function_entrypoint_name="build")
|
||||
with pytest.raises(CodeSyntaxError):
|
||||
component.get_code_tree(component.code)
|
||||
|
||||
|
||||
def test_custom_component_class_template_validation_no_code():
|
||||
"""
|
||||
Test the _class_template_validation method of the CustomComponent class
|
||||
raises the HTTPException when the code is None.
|
||||
"""
|
||||
custom_component = CustomComponent(code=None, function_entrypoint_name="build")
|
||||
with pytest.raises(HTTPException):
|
||||
custom_component._class_template_validation(custom_component.code)
|
||||
|
||||
|
||||
def test_custom_component_get_code_tree_syntax_error():
|
||||
"""
|
||||
Test the get_code_tree method of the CustomComponent class
|
||||
raises the CodeSyntaxError when given incorrect syntax.
|
||||
"""
|
||||
custom_component = CustomComponent(
|
||||
code="import os as", function_entrypoint_name="build"
|
||||
)
|
||||
with pytest.raises(CodeSyntaxError):
|
||||
custom_component.get_code_tree(custom_component.code)
|
||||
|
||||
|
||||
def test_custom_component_get_function_entrypoint_args_no_args():
|
||||
"""
|
||||
Test the get_function_entrypoint_args property of
|
||||
the CustomComponent class with a build method with no arguments.
|
||||
"""
|
||||
my_code = """
|
||||
class MyMainClass(CustomComponent):
|
||||
def build():
|
||||
pass"""
|
||||
|
||||
custom_component = CustomComponent(code=my_code, function_entrypoint_name="build")
|
||||
args = custom_component.get_function_entrypoint_args
|
||||
assert len(args) == 0
|
||||
|
||||
|
||||
def test_custom_component_get_function_entrypoint_return_type_no_return_type():
|
||||
"""
|
||||
Test the get_function_entrypoint_return_type property of the
|
||||
CustomComponent class with a build method with no return type.
|
||||
"""
|
||||
my_code = """
|
||||
class MyClass(CustomComponent):
|
||||
def build():
|
||||
pass"""
|
||||
|
||||
custom_component = CustomComponent(code=my_code, function_entrypoint_name="build")
|
||||
return_type = custom_component.get_function_entrypoint_return_type
|
||||
assert return_type is None
|
||||
|
||||
|
||||
def test_custom_component_get_main_class_name_no_main_class():
|
||||
"""
|
||||
Test the get_main_class_name property of the
|
||||
CustomComponent class when there is no main class.
|
||||
"""
|
||||
my_code = """
|
||||
def build():
|
||||
pass"""
|
||||
|
||||
custom_component = CustomComponent(code=my_code, function_entrypoint_name="build")
|
||||
class_name = custom_component.get_main_class_name
|
||||
assert class_name == ""
|
||||
|
||||
|
||||
def test_custom_component_build_not_implemented():
|
||||
"""
|
||||
Test the build method of the CustomComponent
|
||||
class raises the NotImplementedError.
|
||||
"""
|
||||
custom_component = CustomComponent(
|
||||
code="def build(): pass", function_entrypoint_name="build"
|
||||
)
|
||||
with pytest.raises(NotImplementedError):
|
||||
custom_component.build()
|
||||
|
||||
|
||||
def test_build_config_no_code():
|
||||
component = CustomComponent(code=None)
|
||||
|
||||
assert component.get_function_entrypoint_args == ""
|
||||
assert component.get_function_entrypoint_return_type == ""
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def component():
|
||||
return CustomComponent(
|
||||
field_config={
|
||||
"fields": {
|
||||
"llm": {"type": "str"},
|
||||
"url": {"type": "str"},
|
||||
"year": {"type": "int"},
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def test_flow(db):
|
||||
flow_data = {
|
||||
"nodes": [{"id": "1"}, {"id": "2"}],
|
||||
"edges": [{"source": "1", "target": "2"}],
|
||||
}
|
||||
|
||||
# Create flow
|
||||
flow = FlowCreate(
|
||||
id=uuid4(), name="Test Flow", description="Fixture flow", data=flow_data
|
||||
)
|
||||
|
||||
# Add to database
|
||||
db.add(flow)
|
||||
db.commit()
|
||||
|
||||
yield flow
|
||||
|
||||
# Clean up
|
||||
db.delete(flow)
|
||||
db.commit()
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def db(app):
|
||||
# Setup database for tests
|
||||
yield app.db
|
||||
|
||||
# Teardown
|
||||
app.db.drop_all()
|
||||
|
||||
|
||||
def test_list_flows_return_type(component):
|
||||
flows = component.list_flows()
|
||||
assert isinstance(flows, list)
|
||||
|
||||
|
||||
def test_list_flows_flow_objects(component):
|
||||
flows = component.list_flows()
|
||||
assert all(isinstance(flow, Flow) for flow in flows)
|
||||
|
||||
|
||||
def test_build_config_return_type(component):
|
||||
config = component.build_config()
|
||||
assert isinstance(config, dict)
|
||||
|
||||
|
||||
def test_build_config_has_fields(component):
|
||||
config = component.build_config()
|
||||
assert "fields" in config
|
||||
|
||||
|
||||
def test_build_config_fields_dict(component):
|
||||
config = component.build_config()
|
||||
assert isinstance(config["fields"], dict)
|
||||
|
||||
|
||||
def test_build_config_field_keys(component):
|
||||
config = component.build_config()
|
||||
assert all(isinstance(key, str) for key in config["fields"])
|
||||
|
||||
|
||||
def test_build_config_field_values_dict(component):
|
||||
config = component.build_config()
|
||||
assert all(isinstance(value, dict) for value in config["fields"].values())
|
||||
|
||||
|
||||
def test_build_config_field_value_keys(component):
|
||||
config = component.build_config()
|
||||
field_values = config["fields"].values()
|
||||
assert all("type" in value for value in field_values)
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue