diff --git a/docs/docs/Agents/_category_.json b/docs/docs/Agents/_category_.json deleted file mode 100644 index 279ca75ee..000000000 --- a/docs/docs/Agents/_category_.json +++ /dev/null @@ -1 +0,0 @@ -{"position":5, "label":"Agents"} \ No newline at end of file diff --git a/docs/docs/Agents/agent-tool-calling-agent-component.md b/docs/docs/Agents/agent-tool-calling-agent-component.md index 8e2557e7a..92f9919dd 100644 --- a/docs/docs/Agents/agent-tool-calling-agent-component.md +++ b/docs/docs/Agents/agent-tool-calling-agent-component.md @@ -1,6 +1,5 @@ --- title: Create a problem-solving agent -sidebar_position: 2 slug: /agents-tool-calling-agent-component --- diff --git a/docs/docs/Agents/agents-overview.md b/docs/docs/Agents/agents-overview.md index d8901eb76..90104920e 100644 --- a/docs/docs/Agents/agents-overview.md +++ b/docs/docs/Agents/agents-overview.md @@ -1,6 +1,5 @@ --- title: Agents overview -sidebar_position: 1 slug: /agents-overview --- diff --git a/docs/docs/Components/components-agents.md b/docs/docs/Components/components-agents.md index e1f782e9c..1602b677f 100644 --- a/docs/docs/Components/components-agents.md +++ b/docs/docs/Components/components-agents.md @@ -1,6 +1,5 @@ --- title: Agents -sidebar_position: 12 slug: /components-agents --- @@ -18,7 +17,7 @@ The agent then uses a connected LLM to reason through the problem to decide whic ## Use an agent in a flow -The [simple agent starter project](/starter-projects-simple-agent) uses an [agent component](#agent-component-agent-component) connected to URL and Calculator tools to answer a user's questions. The OpenAI LLM acts as a brain for the agent to decide which tool to use. Tools are connected to agent components at the **Tools** port. +The [simple agent starter project](/starter-projects-simple-agent) uses an [agent component](#agent-component) connected to URL and Calculator tools to answer a user's questions. The OpenAI LLM acts as a brain for the agent to decide which tool to use. Tools are connected to agent components at the **Tools** port. ![Simple agent starter flow](/img/starter-flow-simple-agent.png) diff --git a/docs/docs/Components/components-custom-components.md b/docs/docs/Components/components-custom-components.md index 385abae02..7a34df476 100644 --- a/docs/docs/Components/components-custom-components.md +++ b/docs/docs/Components/components-custom-components.md @@ -488,4 +488,4 @@ Advanced methods and attributes offer additional control and functionality. Unde ## Contribute Custom Components to Langflow -See [How to Contribute](/contributing-how-to-contribute#submitting-components) to contribute your custom component to Langflow. +See [How to Contribute](/contributing-components) to contribute your custom component to Langflow. diff --git a/docs/docs/Components/components-data.md b/docs/docs/Components/components-data.md index 21566653f..c04c05faa 100644 --- a/docs/docs/Components/components-data.md +++ b/docs/docs/Components/components-data.md @@ -1,6 +1,5 @@ --- title: Data -sidebar_position: 3 slug: /components-data --- diff --git a/docs/docs/Components/components-embedding-models.md b/docs/docs/Components/components-embedding-models.md index 556651294..7507a72a2 100644 --- a/docs/docs/Components/components-embedding-models.md +++ b/docs/docs/Components/components-embedding-models.md @@ -1,6 +1,5 @@ --- title: Embeddings -sidebar_position: 6 slug: /components-embedding-models --- @@ -60,7 +59,7 @@ This component is used to load embedding models from [Amazon Bedrock](https://aw ## Astra DB vectorize -Connect this component to the **Embeddings** port of the [Astra DB vector store component](components-vector-stores#astra-db-serverless) to generate embeddings. +Connect this component to the **Embeddings** port of the [Astra DB vector store component](/components-vector-stores#astra-db-vector-store) to generate embeddings. This component requires that your Astra DB database has a collection that uses a vectorize embedding provider integration. For more information and instructions, see [Embedding Generation](https://docs.datastax.com/en/astra-db-serverless/databases/embedding-generation.html). @@ -221,7 +220,7 @@ This component generates embeddings using MistralAI models. | max_concurrent_requests | Integer | Maximum number of concurrent API requests (default: 64) | | max_retries | Integer | Maximum number of retry attempts for failed requests (default: 5) | | timeout | Integer | Request timeout in seconds (default: 120) | -| endpoint | String | Custom API endpoint URL (default: "https://api.mistral.ai/v1/") | +| endpoint | String | Custom API endpoint URL (default: `https://api.mistral.ai/v1/`) | #### Outputs @@ -239,10 +238,10 @@ This component generates embeddings using NVIDIA models. | Name | Type | Description | |------|------|-------------| -| model | String | The NVIDIA model to use for embeddings (e.g., nvidia/nv-embed-v1) | -| base_url | String | Base URL for the NVIDIA API (default: https://integrate.api.nvidia.com/v1) | +| model | String | The NVIDIA model to use for embeddings (e.g., `nvidia/nv-embed-v1`) | +| base_url | String | Base URL for the NVIDIA API (default: `https://integrate.api.nvidia.com/v1`) | | nvidia_api_key | SecretString | API key for authenticating with NVIDIA's service | -| temperature | Float | Model temperature for embedding generation (default: 0.1) | +| temperature | Float | Model temperature for embedding generation (default: `0.1`) | #### Outputs diff --git a/docs/docs/Components/components-helpers.md b/docs/docs/Components/components-helpers.md index d2a67fe4e..c183e5a7e 100644 --- a/docs/docs/Components/components-helpers.md +++ b/docs/docs/Components/components-helpers.md @@ -1,6 +1,5 @@ --- title: Helpers -sidebar_position: 4 slug: /components-helpers --- diff --git a/docs/docs/Components/components-io.md b/docs/docs/Components/components-io.md index 1d12b7963..cee9fe348 100644 --- a/docs/docs/Components/components-io.md +++ b/docs/docs/Components/components-io.md @@ -1,6 +1,5 @@ --- title: Inputs and outputs -sidebar_position: 1 slug: /components-io --- diff --git a/docs/docs/Components/components-loaders.md b/docs/docs/Components/components-loaders.md index 982bdf93e..2348c82f3 100644 --- a/docs/docs/Components/components-loaders.md +++ b/docs/docs/Components/components-loaders.md @@ -1,6 +1,5 @@ --- title: Loaders -sidebar_position: 10 slug: /components-loaders --- @@ -27,9 +26,9 @@ The Confluence component integrates with the Confluence wiki collaboration platf | Name | Display Name | Info | | --- | --- | --- | -| url | Site URL | The base URL of the Confluence Space (e.g., https://company.atlassian.net/wiki) | -| username | Username | Atlassian User E-mail (e.g., email@example.com) | -| api_key | API Key | Atlassian API Key (Create at: https://id.atlassian.com/manage-profile/security/api-tokens) | +| url | Site URL | The base URL of the Confluence Space (e.g., `https://company.atlassian.net/wiki`) | +| username | Username | Atlassian User E-mail (e.g., `email@example.com`) | +| api_key | API Key | Atlassian API Key (Create an API key at: [Atlassian](https://id.atlassian.com/manage-profile/security/api-tokens)) | | space_key | Space Key | The key of the Confluence space to access | | cloud | Use Cloud? | Whether to use Confluence Cloud (default: true) | | content_format | Content Format | Specify content format (default: STORAGE) | diff --git a/docs/docs/Components/components-logic.md b/docs/docs/Components/components-logic.md index 6641d1659..578873a31 100644 --- a/docs/docs/Components/components-logic.md +++ b/docs/docs/Components/components-logic.md @@ -1,6 +1,5 @@ --- title: Logic -sidebar_position: 13 slug: /components-logic --- diff --git a/docs/docs/Components/components-models.md b/docs/docs/Components/components-models.md index 8d9c84350..c091f7a88 100644 --- a/docs/docs/Components/components-models.md +++ b/docs/docs/Components/components-models.md @@ -1,6 +1,5 @@ --- title: Models -sidebar_position: 5 slug: /components-models --- @@ -32,10 +31,10 @@ For more information, see [AIML documentation](https://docs.aimlapi.com/). |--------------|-------------|---------------------------------------------------------------------------------------------| | max_tokens | Integer | The maximum number of tokens to generate. Set to 0 for unlimited tokens. Range: 0-128000. | | model_kwargs | Dictionary | Additional keyword arguments for the model. | -| model_name | String | The name of the AIML model to use. Options are predefined in AIML_CHAT_MODELS. | -| aiml_api_base| String | The base URL of the AIML API. Defaults to https://api.aimlapi.com. | +| model_name | String | The name of the AIML model to use. Options are predefined in `AIML_CHAT_MODELS`. | +| aiml_api_base| String | The base URL of the AIML API. Defaults to `https://api.aimlapi.com`. | | api_key | SecretString| The AIML API Key to use for the model. | -| temperature | Float | Controls randomness in the output. Default: 0.1. | +| temperature | Float | Controls randomness in the output. Default: `0.1`. | | seed | Integer | Controls reproducibility of the job. | ### Outputs @@ -58,7 +57,7 @@ For more information, see [Amazon Bedrock documentation](https://docs.aws.amazon | aws_access_key | SecretString | AWS Access Key for authentication. | | aws_secret_key | SecretString | AWS Secret Key for authentication. | | credentials_profile_name | String | Name of the AWS credentials profile to use (advanced). | -| region_name | String | AWS region name. Default: "us-east-1". | +| region_name | String | AWS region name. Default: `us-east-1`. | | model_kwargs | Dictionary | Additional keyword arguments for the model (advanced). | | endpoint_url | String | Custom endpoint URL for the Bedrock service (advanced). | @@ -78,14 +77,14 @@ For more information, see the [Anthropic documentation](https://docs.anthropic.c | Name | Type | Description | |---------------------|-------------|----------------------------------------------------------------------------------------| -| max_tokens | Integer | The maximum number of tokens to generate. Set to 0 for unlimited tokens. Default: 4096.| +| max_tokens | Integer | The maximum number of tokens to generate. Set to 0 for unlimited tokens. Default: `4096`.| | model | String | The name of the Anthropic model to use. Options include various Claude 3 models. | | anthropic_api_key | SecretString| Your Anthropic API key for authentication. | -| temperature | Float | Controls randomness in the output. Default: 0.1. | -| anthropic_api_url | String | Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified (advanced). | +| temperature | Float | Controls randomness in the output. Default: `0.1`. | +| anthropic_api_url | String | Endpoint of the Anthropic API. Defaults to `https://api.anthropic.com` if not specified (advanced). | | prefill | String | Prefill text to guide the model's response (advanced). | -#### Outputs +### Outputs | Name | Type | Description | |-------|---------------|------------------------------------------------------------------| @@ -97,9 +96,7 @@ This component generates text using Azure OpenAI LLM. For more information, see the [Azure OpenAI documentation](https://learn.microsoft.com/en-us/azure/ai-services/openai/). -### Parameters - -#### Inputs +### Inputs | Name | Display Name | Info | |---------------------|---------------------|---------------------------------------------------------------------------------| @@ -119,9 +116,7 @@ This component generates text using Cohere's language models. For more information, see the [Cohere documentation](https://cohere.ai/). -### Parameters - -#### Inputs +### Inputs | Name | Display Name | Info | |---------------------|--------------------|----------------------------------------------------------| @@ -134,11 +129,9 @@ For more information, see the [Cohere documentation](https://cohere.ai/). This component generates text using Google's Generative AI models. -For more information, see the [Google Generative AI documentation](https://cloud.google.com/ai-platform/training/docs/algorithms/gpt-3). +For more information, see the [Google Generative AI documentation](https://cloud.google.com/vertex-ai/docs/). -### Parameters - -#### Inputs +### Inputs | Name | Display Name | Info | |---------------------|--------------------|-----------------------------------------------------------------------| @@ -156,20 +149,18 @@ This component generates text using Groq's language models. For more information, see the [Groq documentation](https://groq.com/). -### Parameters - -#### Inputs +### Inputs | Name | Type | Description | |----------------|---------------|-----------------------------------------------------------------| | groq_api_key | SecretString | API key for the Groq API. | -| groq_api_base | String | Base URL path for API requests. Default: "https://api.groq.com" (advanced). | +| groq_api_base | String | Base URL path for API requests. Default: `https://api.groq.com` (advanced). | | max_tokens | Integer | The maximum number of tokens to generate (advanced). | -| temperature | Float | Controls randomness in the output. Range: [0.0, 1.0]. Default: 0.1. | +| temperature | Float | Controls randomness in the output. Range: `[0.0, 1.0]`. Default: `0.1`. | | n | Integer | Number of chat completions to generate for each prompt (advanced). | | model_name | String | The name of the Groq model to use. Options are dynamically fetched from the Groq API. | -#### Outputs +### Outputs | Name | Type | Description | |-------|---------------|------------------------------------------------------------------| @@ -181,9 +172,7 @@ This component generates text using Hugging Face's language models. For more information, see the [Hugging Face documentation](https://huggingface.co/). -### Parameters - -#### Inputs +### Inputs | Name | Display Name | Info | |---------------------|-------------------|-------------------------------------------| @@ -199,19 +188,17 @@ This component generates text using Maritalk LLMs. For more information, see [Maritalk documentation](https://www.maritalk.com/). -### Parameters - -#### Inputs +### Inputs | Name | Type | Description | |----------------|---------------|-----------------------------------------------------------------| -| max_tokens | Integer | The maximum number of tokens to generate. Set to 0 for unlimited tokens. Default: 512. | -| model_name | String | The name of the Maritalk model to use. Options: "sabia-2-small", "sabia-2-medium". Default: "sabia-2-small". | +| max_tokens | Integer | The maximum number of tokens to generate. Set to `0` for unlimited tokens. Default: `512`. | +| model_name | String | The name of the Maritalk model to use. Options: `sabia-2-small`, `sabia-2-medium`. Default: `sabia-2-small`. | | api_key | SecretString | The Maritalk API Key to use for authentication. | -| temperature | Float | Controls randomness in the output. Range: [0.0, 1.0]. Default: 0.5. | -| endpoint_url | String | The Maritalk API endpoint. Default: https://api.maritalk.com. | +| temperature | Float | Controls randomness in the output. Range: `[0.0, 1.0]`. Default: `0.5`. | +| endpoint_url | String | The Maritalk API endpoint. Default: `https://api.maritalk.com`. | -#### Outputs +### Outputs | Name | Type | Description | |-------|---------------|------------------------------------------------------------------| @@ -223,14 +210,13 @@ This component generates text using MistralAI LLMs. For more information, see [Mistral AI documentation](https://docs.mistral.ai/). -### Parameters +### Inputs -#### Inputs | Name | Type | Description | |---------------------|--------------|-----------------------------------------------------------------------------------------------| | max_tokens | Integer | The maximum number of tokens to generate. Set to 0 for unlimited tokens (advanced). | -| model_name | String | The name of the Mistral AI model to use. Options include "open-mixtral-8x7b", "open-mixtral-8x22b", "mistral-small-latest", "mistral-medium-latest", "mistral-large-latest", and "codestral-latest". Default: "codestral-latest". | -| mistral_api_base | String | The base URL of the Mistral API. Defaults to https://api.mistral.ai/v1 (advanced). | +| model_name | String | The name of the Mistral AI model to use. Options include `open-mixtral-8x7b`, `open-mixtral-8x22b`, `mistral-small-latest`, `mistral-medium-latest`, `mistral-large-latest`, and `codestral-latest`. Default: `codestral-latest`. | +| mistral_api_base | String | The base URL of the Mistral API. Defaults to `https://api.mistral.ai/v1` (advanced). | | api_key | SecretString | The Mistral API Key to use for authentication. | | temperature | Float | Controls randomness in the output. Default: 0.5. | | max_retries | Integer | Maximum number of retries for API calls. Default: 5 (advanced). | @@ -240,7 +226,8 @@ For more information, see [Mistral AI documentation](https://docs.mistral.ai/). | random_seed | Integer | Seed for random number generation. Default: 1 (advanced). | | safe_mode | Boolean | Enables safe mode for content generation (advanced). | -#### Outputs +### Outputs + | Name | Type | Description | |--------|---------------|-----------------------------------------------------| | model | LanguageModel | An instance of ChatMistralAI configured with the specified parameters. | @@ -249,21 +236,21 @@ For more information, see [Mistral AI documentation](https://docs.mistral.ai/). This component generates text using NVIDIA LLMs. -For more information, see [NVIDIA AI Foundation Models documentation](https://developer.nvidia.com/ai-foundation-models). +For more information, see [NVIDIA AI documentation](https://developer.nvidia.com/generative-ai). -### Parameters +### Inputs -#### Inputs | Name | Type | Description | |---------------------|--------------|-----------------------------------------------------------------------------------------------| -| max_tokens | Integer | The maximum number of tokens to generate. Set to 0 for unlimited tokens (advanced). | -| model_name | String | The name of the NVIDIA model to use. Default: "mistralai/mixtral-8x7b-instruct-v0.1". | -| base_url | String | The base URL of the NVIDIA API. Default: "https://integrate.api.nvidia.com/v1". | +| max_tokens | Integer | The maximum number of tokens to generate. Set to `0` for unlimited tokens (advanced). | +| model_name | String | The name of the NVIDIA model to use. Default: `mistralai/mixtral-8x7b-instruct-v0.1`. | +| base_url | String | The base URL of the NVIDIA API. Default: `https://integrate.api.nvidia.com/v1`. | | nvidia_api_key | SecretString | The NVIDIA API Key for authentication. | -| temperature | Float | Controls randomness in the output. Default: 0.1. | -| seed | Integer | The seed controls the reproducibility of the job (advanced). Default: 1. | +| temperature | Float | Controls randomness in the output. Default: `0.1`. | +| seed | Integer | The seed controls the reproducibility of the job (advanced). Default: `1`. | + +### Outputs -#### Outputs | Name | Type | Description | |--------|---------------|-----------------------------------------------------| | model | LanguageModel | An instance of ChatNVIDIA configured with the specified parameters. | @@ -274,9 +261,8 @@ This component generates text using Ollama's language models. For more information, see [Ollama documentation](https://ollama.com/). -### Parameters +### Inputs -#### Inputs | Name | Display Name | Info | |---------------------|---------------|---------------------------------------------| | Base URL | Base URL | Endpoint of the Ollama API. | @@ -289,9 +275,7 @@ This component generates text using OpenAI's language models. For more information, see [OpenAI documentation](https://beta.openai.com/docs/). -### Parameters - -#### Inputs +### Inputs | Name | Type | Description | |---------------------|---------------|------------------------------------------------------------------| @@ -303,7 +287,7 @@ For more information, see [OpenAI documentation](https://beta.openai.com/docs/). | frequency_penalty | Float | Controls the frequency penalty. Range: [0.0, 2.0]. Default: 0.0. | | presence_penalty | Float | Controls the presence penalty. Range: [0.0, 2.0]. Default: 0.0. | -#### Outputs +### Outputs | Name | Type | Description | |-------|---------------|------------------------------------------------------------------| @@ -321,9 +305,8 @@ This component generates text using Perplexity's language models. For more information, see [Perplexity documentation](https://perplexity.ai/). -### Parameters +### Inputs -#### Inputs | Name | Type | Description | |---------------------|--------------|-----------------------------------------------------------------------------------------------| | model_name | String | The name of the Perplexity model to use. Options include various Llama 3.1 models. | @@ -334,7 +317,8 @@ For more information, see [Perplexity documentation](https://perplexity.ai/). | n | Integer | Number of chat completions to generate for each prompt (advanced). | | top_k | Integer | Number of top tokens to consider for top-k sampling. Must be positive (advanced). | -#### Outputs +### Outputs + | Name | Type | Description | |--------|---------------|-----------------------------------------------------| | model | LanguageModel | An instance of ChatPerplexity configured with the specified parameters. | @@ -345,18 +329,17 @@ This component generates text using SambaNova LLMs. For more information, see [Sambanova Cloud documentation](https://cloud.sambanova.ai/). -### Parameters - -#### Inputs +### Inputs | Name | Type | Description | |---------------------|---------------|------------------------------------------------------------------| -| sambanova_url | String | Base URL path for API requests. Default: "https://api.sambanova.ai/v1/chat/completions". | +| sambanova_url | String | Base URL path for API requests. Default: `https://api.sambanova.ai/v1/chat/completions`. | | sambanova_api_key | SecretString | Your SambaNova API Key. | | model_name | String | The name of the Sambanova model to use. Options include various Llama models. | | max_tokens | Integer | The maximum number of tokens to generate. Set to 0 for unlimited tokens. | | temperature | Float | Controls randomness in the output. Range: [0.0, 1.0]. Default: 0.07. | -#### Outputs + +### Outputs | Name | Type | Description | |-------|---------------|------------------------------------------------------------------| @@ -368,9 +351,8 @@ This component generates text using Vertex AI LLMs. For more information, see [Google Vertex AI documentation](https://cloud.google.com/vertex-ai). -### Parameters +### Inputs -#### Inputs | Name | Type | Description | |---------------------|--------------|-----------------------------------------------------------------------------------------------| | credentials | File | JSON credentials file. Leave empty to fallback to environment variables. File type: JSON. | @@ -384,7 +366,8 @@ For more information, see [Google Vertex AI documentation](https://cloud.google. | top_p | Float | The cumulative probability of parameter highest probability vocabulary tokens to keep for nucleus sampling. Default: 0.95 (advanced). | | verbose | Boolean | Whether to print verbose output. Default: False (advanced). | -#### Outputs +### Outputs + | Name | Type | Description | |--------|---------------|-----------------------------------------------------| | model | LanguageModel | An instance of ChatVertexAI configured with the specified parameters. | diff --git a/docs/docs/Components/components-overview.md b/docs/docs/Components/components-overview.md index 0460515e4..9f694d018 100644 --- a/docs/docs/Components/components-overview.md +++ b/docs/docs/Components/components-overview.md @@ -1,6 +1,5 @@ --- title: Components overview -sidebar_position: 0 slug: /components-overview --- diff --git a/docs/docs/Components/components-processing.md b/docs/docs/Components/components-processing.md index efe51b9e8..3505347bc 100644 --- a/docs/docs/Components/components-processing.md +++ b/docs/docs/Components/components-processing.md @@ -82,7 +82,7 @@ The component iterates through the input list of data objects, merging them into | merged_data | Merged Data | A single data object containing the combined information from all input data objects | -## Parse Data component +## Parse Data The ParseData component converts data objects into plain text using a specified template. This component transforms structured data into human-readable text formats, allowing for customizable output through the use of templates. @@ -102,7 +102,7 @@ This component transforms structured data into human-readable text formats, allo | text | Text | The resulting formatted text string as a message object. | -## Split Text component +## Split Text This component splits text into chunks of a specified length. diff --git a/docs/docs/Components/components-prompts.md b/docs/docs/Components/components-prompts.md index f2a2f1c01..3884ccb2b 100644 --- a/docs/docs/Components/components-prompts.md +++ b/docs/docs/Components/components-prompts.md @@ -1,6 +1,5 @@ --- title: Prompts -sidebar_position: 2 slug: /components-prompts --- diff --git a/docs/docs/Components/components-tools.md b/docs/docs/Components/components-tools.md index e0527b1c2..2e7b6d212 100644 --- a/docs/docs/Components/components-tools.md +++ b/docs/docs/Components/components-tools.md @@ -17,7 +17,7 @@ The agent then uses a connected LLM to reason through the problem to decide whic Tools are typically connected to agent components at the **Tools** port. -The [simple agent starter project](/starter-projects-simple-agent) uses URL and Calculator tools connected to an [agent component](#agent-component-agent-component) to answer a user's questions. The OpenAI LLM acts as a brain for the agent to decide which tool to use. +The [simple agent starter project](/starter-projects-simple-agent) uses URL and Calculator tools connected to an [agent component](/components-agents#agent-component) to answer a user's questions. The OpenAI LLM acts as a brain for the agent to decide which tool to use. ![Simple agent starter flow](/img/starter-flow-simple-agent.png) diff --git a/docs/docs/Configuration/_category_.json b/docs/docs/Configuration/_category_.json deleted file mode 100644 index 27519db0b..000000000 --- a/docs/docs/Configuration/_category_.json +++ /dev/null @@ -1 +0,0 @@ -{"position":8, "label":"Configuration"} \ No newline at end of file diff --git a/docs/docs/Configuration/configuration-api-keys.md b/docs/docs/Configuration/configuration-api-keys.md index 72c1fb634..7569c7876 100644 --- a/docs/docs/Configuration/configuration-api-keys.md +++ b/docs/docs/Configuration/configuration-api-keys.md @@ -1,6 +1,5 @@ --- title: API keys -sidebar_position: 1 slug: /configuration-api-keys --- diff --git a/docs/docs/Configuration/configuration-authentication.md b/docs/docs/Configuration/configuration-authentication.md index f697cd11d..c15e40b84 100644 --- a/docs/docs/Configuration/configuration-authentication.md +++ b/docs/docs/Configuration/configuration-authentication.md @@ -1,6 +1,5 @@ --- title: Authentication -sidebar_position: 0 slug: /configuration-authentication --- diff --git a/docs/docs/Configuration/configuration-auto-saving.md b/docs/docs/Configuration/configuration-auto-saving.md index b7283641a..ded36e839 100644 --- a/docs/docs/Configuration/configuration-auto-saving.md +++ b/docs/docs/Configuration/configuration-auto-saving.md @@ -1,6 +1,5 @@ --- title: Auto-saving -sidebar_position: 6 slug: /configuration-auto-save --- diff --git a/docs/docs/Configuration/configuration-backend-only.md b/docs/docs/Configuration/configuration-backend-only.md index 1abac1cf2..0ffc99325 100644 --- a/docs/docs/Configuration/configuration-backend-only.md +++ b/docs/docs/Configuration/configuration-backend-only.md @@ -1,6 +1,5 @@ --- title: Run Langflow in backend-only mode -sidebar_position: 4 slug: /configuration-backend-only --- diff --git a/docs/docs/Configuration/configuration-cli.md b/docs/docs/Configuration/configuration-cli.md index 205604103..355f2e305 100644 --- a/docs/docs/Configuration/configuration-cli.md +++ b/docs/docs/Configuration/configuration-cli.md @@ -1,9 +1,10 @@ --- title: Langflow CLI -sidebar_position: 2 slug: /configuration-cli --- +import Link from '@docusaurus/Link'; + # Langflow CLI The Langflow command line interface (Langflow CLI) is the main interface for managing and running the Langflow server. @@ -25,10 +26,10 @@ python -m langflow [OPTIONS] #### Options | Option | Default | Values | Description | -|--------|------|-----------|-------------| -| `--install-completion` | *Not applicable* | *Not applicable* | Install auto-completion for the current shell. | -| `--show-completion` | *Not applicable* | *Not applicable* | Show the location of the auto-completion config file (if installed). | -| `--help` | *Not applicable* | *Not applicable* | Display information about the command usage and its options and arguments. | +|--------|---------|--------|-------------| +| `--install-completion` | *Not applicable* | *Not applicable* | Install auto-completion for the current shell. | +| `--show-completion` | *Not applicable* | *Not applicable* | Show the location of the auto-completion config file (if installed). | +| `--help` | *Not applicable* | *Not applicable* | Display information about the command usage and its options and arguments. | ### langflow api-key @@ -44,8 +45,9 @@ python -m langflow api-key [OPTIONS] | Option | Default | Values | Description | |--------|---------|--------|-------------| -| `--log-level` | `critical` | `debug`
`info`
`warning`
`error`
`critical` | Set the logging level. | -| `--help` | *Not applicable* | *Not applicable* | Display information about the command usage and its options and arguments. | +| `--install-completion` | *Not applicable* | *Not applicable* | Install auto-completion for the current shell. | +| `--show-completion` | *Not applicable* | *Not applicable* | Show the location of the auto-completion config file (if installed). | +| `--help` | *Not applicable* | *Not applicable* | Display information about the command usage and its options and arguments. | ### langflow copy-db @@ -67,7 +69,7 @@ python -m langflow copy-db | Option | Default | Values | Description | |--------|---------|--------|-------------| -| `--help` | *Not applicable* | *Not applicable* | Display information about the command usage and its options and arguments. | +| `--help` | *Not applicable* | *Not applicable* | Display information about the command usage and its options and arguments. | ### langflow migration @@ -83,10 +85,9 @@ python -m langflow migration [OPTIONS] | Option | Default | Values | Description | |--------|---------|--------|-------------| -| `--test` | `true` | [Boolean](#boolean) | Run migrations in test mode. Use `--no-test` to disable test mode. | -| `--fix` | `false` (`--no-fix`) | [Boolean](#boolean) | Fix migrations. This is a destructive operation, and all affected data will be deleted. Only use this option if you know what you are doing. | -| `--help` | *Not applicable* | *Not applicable* | Display information about the command usage and its options and arguments. | - +| `--test` | `true` | [Boolean](#boolean) | Run migrations in test mode. Use `--no-test` to disable test mode. | +| `--fix` | `false` (`--no-fix`) | [Boolean](#boolean) | Fix migrations. This is a destructive operation, and all affected data will be deleted. Only use this option if you know what you are doing. | +| `--help` | *Not applicable* | *Not applicable* | Display information about the command usage and its options and arguments. | ### langflow run @@ -102,26 +103,26 @@ python -m langflow run [OPTIONS] | Option | Default | Values | Description | |--------|---------|--------|-------------| -| `--host` | `127.0.0.1` | String | The host on which the Langflow server will run.
See [`LANGFLOW_HOST` variable](./environment-variables.md#LANGFLOW_HOST). | -| `--workers` | `1` | Integer | Number of worker processes.
See [`LANGFLOW_WORKERS` variable](./environment-variables.md#LANGFLOW_WORKERS). | -| `--worker-timeout` | `300` | Integer | Worker timeout in seconds.
See [`LANGFLOW_WORKER_TIMEOUT` variable](./environment-variables.md#LANGFLOW_WORKER_TIMEOUT). | -| `--port` | `7860` | Integer | The port on which the Langflow server will run. The server automatically selects a free port if the specified port is in use.
See [`LANGFLOW_PORT` variable](./environment-variables.md#LANGFLOW_PORT). | -| `--components-path` | `langflow/components` | String | Path to the directory containing custom components.
See [`LANGFLOW_COMPONENTS_PATH` variable](./environment-variables.md#LANGFLOW_COMPONENTS_PATH). | -| `--env-file` | Not set | String | Path to the `.env` file containing environment variables.
See [Import environment variables from a .env file](./environment-variables.md#configure-variables-env-file). | -| `--log-level` | `critical` | `debug`
`info`
`warning`
`error`
`critical` | Set the logging level.
See [`LANGFLOW_LOG_LEVEL` variable](./environment-variables.md#LANGFLOW_LOG_LEVEL). | -| `--log-file` | `logs/langflow.log` | String | Set the path to the log file for Langflow.
See [`LANGFLOW_LOG_FILE` variable](./environment-variables.md#LANGFLOW_LOG_FILE). | -| `--cache` | `InMemoryCache` | `InMemoryCache`
`SQLiteCache` | Type of cache to use.
See [`LANGFLOW_LANGCHAIN_CACHE` variable](./environment-variables.md#LANGFLOW_LANGCHAIN_CACHE). | -| `--dev` | `false` (`--no-dev`) | [Boolean](#boolean) | Run Langflow in development mode (may contain bugs).
See [`LANGFLOW_DEV` variable](./environment-variables.md#LANGFLOW_DEV). | -| `--frontend-path` | `./frontend` | String | Path to the frontend directory containing build files. This is for development purposes only.
See [`LANGFLOW_FRONTEND_PATH` variable](./environment-variables.md#LANGFLOW_FRONTEND_PATH). | -| `--open-browser` | `true` | [Boolean](#boolean) | Open the system web browser on startup. Use `--no-open-browser` to disable opening the system web browser on startup.
See [`LANGFLOW_OPEN_BROWSER` variable](./environment-variables.md#LANGFLOW_OPEN_BROWSER). | -| `--remove-api-keys` | `false` (`--no-remove-api-keys`) | [Boolean](#boolean) | Remove API keys from the projects saved in the database.
See [`LANGFLOW_REMOVE_API_KEYS` variable](./environment-variables.md#LANGFLOW_REMOVE_API_KEYS). | -| `--backend-only` | `false` (`--no-backend-only`) | [Boolean](#boolean) | Only run Langflow's backend server (no frontend).
See [`LANGFLOW_BACKEND_ONLY` variable](./environment-variables.md#LANGFLOW_BACKEND_ONLY). | -| `--store` | `true` | [Boolean](#boolean) | Enable the Langflow Store features. Use `--no-store` to disable the Langflow Store features.
See [`LANGFLOW_STORE` variable](./environment-variables.md#LANGFLOW_STORE). | -| `--auto-saving` | `true` | [Boolean](#boolean) | Enable flow auto-saving. Use `--no-auto-saving` to disable flow auto-saving.
See [`LANGFLOW_AUTO_SAVING` variable](./environment-variables.md#LANGFLOW_AUTO_SAVING). | -| `--auto-saving-interval` | `1000` | Integer | Set the interval for flow auto-saving in milliseconds.
See [`LANGFLOW_AUTO_SAVING_INTERVAL` variable](./environment-variables.md#LANGFLOW_AUTO_SAVING_INTERVAL). | -| `--health-check-max-retries` | `5` | Integer | Set the maximum number of retries for the health check. Use `--no-health-check-max-retries` to disable the maximum number of retries for the health check.
See [`LANGFLOW_HEALTH_CHECK_MAX_RETRIES` variable](./environment-variables.md#LANGFLOW_HEALTH_CHECK_MAX_RETRIES). | -| `--max-file-size-upload` | `100` | Integer | Set the maximum file size for the upload in megabytes.
See [`LANGFLOW_MAX_FILE_SIZE_UPLOAD` variable](./environment-variables.md#LANGFLOW_MAX_FILE_SIZE_UPLOAD). | -| `--help` | *Not applicable* | *Not applicable* | Display information about the command usage and its options and arguments. | +| `--host` | `127.0.0.1` | String | The host on which the Langflow server will run.
See [`LANGFLOW_HOST` variable](./environment-variables.md#LANGFLOW_HOST). | +| `--workers` | `1` | Integer | Number of worker processes.
See [`LANGFLOW_WORKERS` variable](./environment-variables.md#LANGFLOW_WORKERS). | +| `--worker-timeout` | `300` | Integer | Worker timeout in seconds.
See [`LANGFLOW_WORKER_TIMEOUT` variable](./environment-variables.md#LANGFLOW_WORKER_TIMEOUT). | +| `--port` | `7860` | Integer | The port on which the Langflow server will run. The server automatically selects a free port if the specified port is in use.
See [`LANGFLOW_PORT` variable](./environment-variables.md#LANGFLOW_PORT). | +| `--components-path` | `langflow/components` | String | Path to the directory containing custom components.
See [`LANGFLOW_COMPONENTS_PATH` variable](./environment-variables.md#LANGFLOW_COMPONENTS_PATH). | +| `--env-file` | Not set | String | Path to the `.env` file containing environment variables.
See [Import environment variables from a .env file](./environment-variables.md#configure-variables-env-file). | +| `--log-level` | `critical` | `debug`
`info`
`warning`
`error`
`critical` | Set the logging level.
See [`LANGFLOW_LOG_LEVEL` variable](./environment-variables.md#LANGFLOW_LOG_LEVEL). | +| `--log-file` | `logs/langflow.log` | String | Set the path to the log file for Langflow.
See [`LANGFLOW_LOG_FILE` variable](./environment-variables.md#LANGFLOW_LOG_FILE). | +| `--cache` | `InMemoryCache` | `InMemoryCache`
`SQLiteCache` | Type of cache to use.
See [`LANGFLOW_LANGCHAIN_CACHE` variable](./environment-variables.md#LANGFLOW_LANGCHAIN_CACHE). | +| `--dev` | `false` (`--no-dev`) | [Boolean](#boolean) | Run Langflow in development mode (may contain bugs).
See [`LANGFLOW_DEV` variable](./environment-variables.md#LANGFLOW_DEV). | +| `--frontend-path` | `./frontend` | String | Path to the frontend directory containing build files. This is for development purposes only.
See [`LANGFLOW_FRONTEND_PATH` variable](./environment-variables.md#LANGFLOW_FRONTEND_PATH). | +| `--open-browser` | `true` | [Boolean](#boolean) | Open the system web browser on startup. Use `--no-open-browser` to disable opening the system web browser on startup.
See [`LANGFLOW_OPEN_BROWSER` variable](./environment-variables.md#LANGFLOW_OPEN_BROWSER). | +| `--remove-api-keys` | `false` (`--no-remove-api-keys`) | [Boolean](#boolean) | Remove API keys from the projects saved in the database.
See [`LANGFLOW_REMOVE_API_KEYS` variable](./environment-variables.md#LANGFLOW_REMOVE_API_KEYS). | +| `--backend-only` | `false` (`--no-backend-only`) | [Boolean](#boolean) | Only run Langflow's backend server (no frontend).
See [`LANGFLOW_BACKEND_ONLY` variable](./environment-variables.md#LANGFLOW_BACKEND_ONLY). | +| `--store` | `true` | [Boolean](#boolean) | Enable the Langflow Store features. Use `--no-store` to disable the Langflow Store features.
See [`LANGFLOW_STORE` variable](./environment-variables.md#LANGFLOW_STORE). | +| `--auto-saving` | `true` | [Boolean](#boolean) | Enable flow auto-saving. Use `--no-auto-saving` to disable flow auto-saving.
See [`LANGFLOW_AUTO_SAVING` variable](./environment-variables.md#LANGFLOW_AUTO_SAVING). | +| `--auto-saving-interval` | `1000` | Integer | Set the interval for flow auto-saving in milliseconds.
See [`LANGFLOW_AUTO_SAVING_INTERVAL` variable](./environment-variables.md#LANGFLOW_AUTO_SAVING_INTERVAL). | +| `--health-check-max-retries` | `5` | Integer | Set the maximum number of retries for the health check. Use `--no-health-check-max-retries` to disable the maximum number of retries for the health check.
See [`LANGFLOW_HEALTH_CHECK_MAX_RETRIES` variable](./environment-variables.md#LANGFLOW_HEALTH_CHECK_MAX_RETRIES). | +| `--max-file-size-upload` | `100` | Integer | Set the maximum file size for the upload in megabytes.
See [`LANGFLOW_MAX_FILE_SIZE_UPLOAD` variable](./environment-variables.md#LANGFLOW_MAX_FILE_SIZE_UPLOAD). | +| `--help` | *Not applicable* | *Not applicable* | Display information about the command usage and its options and arguments. | ### langflow superuser @@ -137,9 +138,9 @@ python -m langflow superuser [OPTIONS] | Option | Default | Values | Description | |--------|---------|--------|-------------| -| `--username` | Required | String | Specify the name for the superuser.
See [`LANGFLOW_SUPERUSER` variable](./environment-variables.md#LANGFLOW_SUPERUSER). | -| `--password` | Required | String | Specify the password for the superuser.
See [`LANGFLOW_SUPERUSER_PASSWORD` variable](./environment-variables.md#LANGFLOW_SUPERUSER_PASSWORD). | -| `--log-level` | `critical` | `debug`
`info`
`warning`
`error`
`critical` | Set the logging level. | +| `--username` | Required | String | Specify the name for the superuser.
See [`LANGFLOW_SUPERUSER` variable](./environment-variables.md#LANGFLOW_SUPERUSER). | +| `--password` | Required | String | Specify the password for the superuser.
See [`LANGFLOW_SUPERUSER_PASSWORD` variable](./environment-variables.md#LANGFLOW_SUPERUSER_PASSWORD). | +| `--log-level` | `critical` | `debug`
`info`
`warning`
`error`
`critical` | Set the logging level. | ## Precedence diff --git a/docs/docs/Configuration/configuration-custom-database.md b/docs/docs/Configuration/configuration-custom-database.md index a81a7cf11..f4715327c 100644 --- a/docs/docs/Configuration/configuration-custom-database.md +++ b/docs/docs/Configuration/configuration-custom-database.md @@ -1,6 +1,6 @@ --- title: Configure an external PostgreSQL database -sidebar_position: 8 +slug: /configuration-custom-database --- Langflow's default database is [SQLite](https://www.sqlite.org/docs.html), but you can configure Langflow to use PostgreSQL instead. diff --git a/docs/docs/Configuration/configuration-global-variables.md b/docs/docs/Configuration/configuration-global-variables.md index 2900ab786..8331afb7c 100644 --- a/docs/docs/Configuration/configuration-global-variables.md +++ b/docs/docs/Configuration/configuration-global-variables.md @@ -1,6 +1,5 @@ --- title: Global variables -sidebar_position: 5 slug: /configuration-global-variables --- diff --git a/docs/docs/Configuration/configuration-security-best-practices.md b/docs/docs/Configuration/configuration-security-best-practices.md index 053ff5662..15600b73d 100644 --- a/docs/docs/Configuration/configuration-security-best-practices.md +++ b/docs/docs/Configuration/configuration-security-best-practices.md @@ -1,6 +1,5 @@ --- title: Security best practices -sidebar_position: 1 slug: /configuration-security-best-practices --- diff --git a/docs/docs/Configuration/environment-variables.md b/docs/docs/Configuration/environment-variables.md index 40215504a..7033f0910 100644 --- a/docs/docs/Configuration/environment-variables.md +++ b/docs/docs/Configuration/environment-variables.md @@ -1,11 +1,12 @@ --- title: Environment variables -sidebar_position: 7 slug: /environment-variables --- import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; +import Link from '@docusaurus/Link'; + Langflow lets you configure a number of settings using environment variables. @@ -104,44 +105,44 @@ That means, if you happen to set the same environment variable in both your term ## Supported environment variables {#supported-variables} The following table lists the environment variables supported by Langflow. +Here's the updated table with the requested changes: | Variable | Format / Values | Default | Description | |----------|---------------|---------|-------------| -| `DO_NOT_TRACK` | Boolean | `false` | If enabled, Langflow will not track telemetry. | -| `LANGFLOW_AUTO_LOGIN` | Boolean | `true` | Enable automatic login for Langflow. Set to `false` to disable automatic login and require the login form to log into the Langflow UI. Setting to `false` requires [`LANGFLOW_SUPERUSER`](#LANGFLOW_SUPERUSER) and [`LANGFLOW_SUPERUSER_PASSWORD`](#LANGFLOW_SUPERUSER_PASSWORD) to be set. | -| `LANGFLOW_AUTO_SAVING` | Boolean | `true` | Enable flow auto-saving.
See [`--auto-saving` option](./configuration-cli.md#run-auto-saving). | -| `LANGFLOW_AUTO_SAVING_INTERVAL` | Integer | `1000` | Set the interval for flow auto-saving in milliseconds.
See [`--auto-saving-interval` option](./configuration-cli.md#run-auto-saving-interval). | -| `LANGFLOW_BACKEND_ONLY` | Boolean | `false` | Only run Langflow's backend server (no frontend).
See [`--backend-only` option](./configuration-cli.md#run-backend-only). | -| `LANGFLOW_CACHE_TYPE` | `async`
`redis`
`memory`
`disk`
`critical` | `async` | Set the cache type for Langflow.
If you set the type to `redis`, then you must also set the following environment variables: [`LANGFLOW_REDIS_HOST`](#LANGFLOW_REDIS_HOST), [`LANGFLOW_REDIS_PORT`](#LANGFLOW_REDIS_PORT), [`LANGFLOW_REDIS_DB`](#LANGFLOW_REDIS_DB), and [`LANGFLOW_REDIS_CACHE_EXPIRE`](#LANGFLOW_REDIS_CACHE_EXPIRE). | -| `LANGFLOW_COMPONENTS_PATH` | String | `langflow/components` | Path to the directory containing custom components.
See [`--components-path` option](./configuration-cli.md#run-components-path). | -| `LANGFLOW_CONFIG_DIR` | String | | Set the Langflow configuration directory where files, logs, and the Langflow database are stored. | -| `LANGFLOW_DATABASE_URL` | String | | Set the database URL for Langflow. If you don't provide one, Langflow uses an SQLite database. | -| `LANGFLOW_DEV` | Boolean | `false` | Run Langflow in development mode (may contain bugs).
See [`--dev` option](./configuration-cli.md#run-dev). | -| `LANGFLOW_FALLBACK_TO_ENV_VAR` | Boolean | `true` | If enabled, [global variables](../Configuration/configuration-global-variables.md) set in the Langflow UI fall back to an environment variable with the same name when Langflow fails to retrieve the variable value. | -| `LANGFLOW_FRONTEND_PATH` | String | `./frontend` | Path to the frontend directory containing build files. This is for development purposes only.
See [`--frontend-path` option](./configuration-cli.md#run-frontend-path). | -| `LANGFLOW_HEALTH_CHECK_MAX_RETRIES` | Integer | `5` | Set the maximum number of retries for the health check.
See [`--health-check-max-retries` option](./configuration-cli.md#run-health-check-max-retries). | -| `LANGFLOW_HOST` | String | `127.0.0.1` | The host on which the Langflow server will run.
See [`--host` option](./configuration-cli.md#run-host). | -| `LANGFLOW_LANGCHAIN_CACHE` | `InMemoryCache`
`SQLiteCache` | `InMemoryCache` | Type of cache to use.
See [`--cache` option](./configuration-cli.md#run-cache). | -| `LANGFLOW_MAX_FILE_SIZE_UPLOAD` | Integer | `100` | Set the maximum file size for the upload in megabytes.
See [`--max-file-size-upload` option](./configuration-cli.md#run-max-file-size-upload). | -| `LANGFLOW_LOG_ENV` | `container_json`
`container_csv`
| Not set | Set the log environment. Default (Not set) is json with color. If not set a format string can be provided.
See [`LANGFLOW_LOG_FORMAT`](#LANGFLOW_CACHE_TYPE) | -| `LANGFLOW_LOG_FILE` | String | `logs/langflow.log` | Set the path to the log file for Langflow.
See [`--log-file` option](./configuration-cli.md#run-log-file). | -| `LANGFLOW_LOG_FORMAT` | String | `{time:YYYY-MM-DD HH:mm:ss} - {level: <8} - {module} - {message}` | Configure the logformat.
For example without colors: `{time:YYYY-MM-DD HH:mm:ss.SSS} {level} {file} {line} {function} {message}`
If [`LANGFLOW_LOG_ENV`](#LANGFLOW_LOG_ENV) is set this configuration will be ignored. -| `LANGFLOW_LOG_LEVEL` | `debug`
`info`
`warning`
`error`
`critical` | `critical` | Set the logging level.
See [`--log-level` option](./configuration-cli.md#run-log-level). | -| `LANGFLOW_MAX_FILE_SIZE_UPLOAD` | Integer | `100` | Set the maximum file size for the upload in megabytes.
See [`--max-file-size-upload` option](./configuration-cli.md#run-max-file-size-upload). | -| `LANGFLOW_OPEN_BROWSER` | Boolean | `true` | Open the system web browser on startup.
See [`--open-browser` option](./configuration-cli.md#run-open-browser). | -| `LANGFLOW_PORT` | Integer | `7860` | The port on which the Langflow server will run. The server automatically selects a free port if the specified port is in use.
See [`--port` option](./configuration-cli.md#run-port). | -| `LANGFLOW_PROMETHEUS_ENABLED` | Boolean | `false` | Expose Prometheus metrics. | -| `LANGFLOW_PROMETHEUS_PORT` | Integer | `9090` | Set the port on which Langflow exposes Prometheus metrics. | -| `LANGFLOW_REDIS_CACHE_EXPIRE` | Integer | `3600` | See [`LANGFLOW_CACHE_TYPE`](#LANGFLOW_CACHE_TYPE). | -| `LANGFLOW_REDIS_DB` | Integer | `0` | See [`LANGFLOW_CACHE_TYPE`](#LANGFLOW_CACHE_TYPE). | -| `LANGFLOW_REDIS_HOST` | String | `localhost` | See [`LANGFLOW_CACHE_TYPE`](#LANGFLOW_CACHE_TYPE). | -| `LANGFLOW_REDIS_PORT` | String | `6379` | See [`LANGFLOW_CACHE_TYPE`](#LANGFLOW_CACHE_TYPE). | -| `LANGFLOW_REMOVE_API_KEYS` | Boolean | `false` | Remove API keys from the projects saved in the database.
See [`--remove-api-keys` option](./configuration-cli.md#run-remove-api-keys). | -| `LANGFLOW_SAVE_DB_IN_CONFIG_DIR` | Boolean | `false` | Save the Langflow database in [`LANGFLOW_CONFIG_DIR`](#LANGFLOW_CONFIG_DIR) instead of in the Langflow package directory. Note, when this variable is set to default (`false`), the database isn't shared between different virtual environments and the database is deleted when you uninstall Langflow. | -| `LANGFLOW_STORE` | Boolean | `true` | Enable the Langflow Store.
See [`--store` option](./configuration-cli.md#run-store). | -| `LANGFLOW_STORE_ENVIRONMENT_VARIABLES` | Boolean | `true` | Store environment variables as [global variables](../Configuration/configuration-global-variables.md) in the database. | -| `LANGFLOW_SUPERUSER` | String | Not set | Set the name for the superuser. Required if [`LANGFLOW_AUTO_LOGIN`](#LANGFLOW_AUTO_LOGIN) is set to `false`.
See [`superuser --username` option](./configuration-cli.md#superuser-username). | -| `LANGFLOW_SUPERUSER_PASSWORD` | String | Not set | Set the password for the superuser. Required if [`LANGFLOW_AUTO_LOGIN`](#LANGFLOW_AUTO_LOGIN) is set to `false`.
See [`superuser --password` option](./configuration-cli.md#superuser-password).| -| `LANGFLOW_VARIABLES_TO_GET_FROM_ENVIRONMENT` | String | Not set | Comma-separated list of environment variables to get from the environment and store as [global variables](../Configuration/configuration-global-variables.md). | -| `LANGFLOW_WORKER_TIMEOUT` | Integer | `300` | Worker timeout in seconds.
See [`--worker-timeout` option](./configuration-cli.md#run-worker-timeout). | -| `LANGFLOW_WORKERS` | Integer | `1` | Number of worker processes.
See [`--workers` option](./configuration-cli.md#run-workers). | +| `DO_NOT_TRACK` | Boolean | `false` | If enabled, Langflow will not track telemetry. | +| `LANGFLOW_AUTO_LOGIN` | Boolean | `true` | Enable automatic login for Langflow. Set to `false` to disable automatic login and require the login form to log into the Langflow UI. Setting to `false` requires [`LANGFLOW_SUPERUSER`](#LANGFLOW_SUPERUSER) and [`LANGFLOW_SUPERUSER_PASSWORD`](environment-variables.md#LANGFLOW_SUPERUSER_PASSWORD) to be set. | +| `LANGFLOW_AUTO_SAVING` | Boolean | `true` | Enable flow auto-saving.
See [`--auto-saving` option](./configuration-cli.md#run-auto-saving). | +| `LANGFLOW_AUTO_SAVING_INTERVAL` | Integer | `1000` | Set the interval for flow auto-saving in milliseconds.
See [`--auto-saving-interval` option](./configuration-cli.md#run-auto-saving-interval). | +| `LANGFLOW_BACKEND_ONLY` | Boolean | `false` | Only run Langflow's backend server (no frontend).
See [`--backend-only` option](./configuration-cli.md#run-backend-only). | +| `LANGFLOW_CACHE_TYPE` | `async`
`redis`
`memory`
`disk`
`critical` | `async` | Set the cache type for Langflow.
If you set the type to `redis`, then you must also set the following environment variables: [`LANGFLOW_REDIS_HOST`](#LANGFLOW_REDIS_HOST), [`LANGFLOW_REDIS_PORT`](#LANGFLOW_REDIS_PORT), [`LANGFLOW_REDIS_DB`](#LANGFLOW_REDIS_DB), and [`LANGFLOW_REDIS_CACHE_EXPIRE`](#LANGFLOW_REDIS_CACHE_EXPIRE). | +| `LANGFLOW_COMPONENTS_PATH` | String | `langflow/components` | Path to the directory containing custom components.
See [`--components-path` option](./configuration-cli.md#run-components-path). | +| `LANGFLOW_CONFIG_DIR` | String | | Set the Langflow configuration directory where files, logs, and the Langflow database are stored. | +| `LANGFLOW_DATABASE_URL` | String | | Set the database URL for Langflow. If you don't provide one, Langflow uses an SQLite database. | +| `LANGFLOW_DEV` | Boolean | `false` | Run Langflow in development mode (may contain bugs).
See [`--dev` option](./configuration-cli.md#run-dev). | +| `LANGFLOW_FALLBACK_TO_ENV_VAR` | Boolean | `true` | If enabled, [global variables](../Configuration/configuration-global-variables.md) set in the Langflow UI fall back to an environment variable with the same name when Langflow fails to retrieve the variable value. | +| `LANGFLOW_FRONTEND_PATH` | String | `./frontend` | Path to the frontend directory containing build files. This is for development purposes only.
See [`--frontend-path` option](./configuration-cli.md#run-frontend-path). | +| `LANGFLOW_HEALTH_CHECK_MAX_RETRIES` | Integer | `5` | Set the maximum number of retries for the health check.
See [`--health-check-max-retries` option](./configuration-cli.md#run-health-check-max-retries). | +| `LANGFLOW_HOST` | String | `127.0.0.1` | The host on which the Langflow server will run.
See [`--host` option](./configuration-cli.md#run-host). | +| `LANGFLOW_LANGCHAIN_CACHE` | `InMemoryCache`
`SQLiteCache` | `InMemoryCache` | Type of cache to use.
See [`--cache` option](./configuration-cli.md#run-cache). | +| `LANGFLOW_MAX_FILE_SIZE_UPLOAD` | Integer | `100` | Set the maximum file size for the upload in megabytes.
See [`--max-file-size-upload` option](./configuration-cli.md#run-max-file-size-upload). | +| `LANGFLOW_LOG_ENV` | `container_json`
`container_csv`
| Not set | Set the log environment. Default (Not set) is json with color. If not set a format string can be provided.
See [`LANGFLOW_LOG_FORMAT`](#LANGFLOW_CACHE_TYPE) | +| `LANGFLOW_LOG_FILE` | String | `logs/langflow.log` | Set the path to the log file for Langflow.
See [`--log-file` option](./configuration-cli.md#run-log-file). | +| `LANGFLOW_LOG_FORMAT` | String | `{time:YYYY-MM-DD HH:mm:ss} - {level: <8} - {module} - {message}` | Configure the logformat.
For example without colors: `{time:YYYY-MM-DD HH:mm:ss.SSS} {level} {file} {line} {function} {message}`
If [`LANGFLOW_LOG_ENV`](#LANGFLOW_LOG_ENV) is set this configuration will be ignored. | +| `LANGFLOW_LOG_LEVEL` | `debug`
`info`
`warning`
`error`
`critical` | `critical` | Set the logging level.
See [`--log-level` option](./configuration-cli.md#run-log-level). | +| `LANGFLOW_OPEN_BROWSER` | Boolean | `true` | Open the system web browser on startup.
See [`--open-browser` option](./configuration-cli.md#run-open-browser). | +| `LANGFLOW_PORT` | Integer | `7860` | The port on which the Langflow server will run. The server automatically selects a free port if the specified port is in use.
See [`--port` option](./configuration-cli.md#run-port). | +| `LANGFLOW_PROMETHEUS_ENABLED` | Boolean | `false` | Expose Prometheus metrics. | +| `LANGFLOW_PROMETHEUS_PORT` | Integer | `9090` | Set the port on which Langflow exposes Prometheus metrics. | +| `LANGFLOW_REDIS_CACHE_EXPIRE` | Integer | `3600` | See [`LANGFLOW_CACHE_TYPE`](#LANGFLOW_CACHE_TYPE). | +| `LANGFLOW_REDIS_DB` | Integer | `0` | See [`LANGFLOW_CACHE_TYPE`](#LANGFLOW_CACHE_TYPE). | +| `LANGFLOW_REDIS_HOST` | String | `localhost` | See [`LANGFLOW_CACHE_TYPE`](#LANGFLOW_CACHE_TYPE). | +| `LANGFLOW_REDIS_PORT` | String | `6379` | See [`LANGFLOW_CACHE_TYPE`](#LANGFLOW_CACHE_TYPE). | +| `LANGFLOW_REMOVE_API_KEYS` | Boolean | `false` | Remove API keys from the projects saved in the database.
See [`--remove-api-keys` option](./configuration-cli.md#run-remove-api-keys). | +| `LANGFLOW_SAVE_DB_IN_CONFIG_DIR` | Boolean | `false` | Save the Langflow database in [`LANGFLOW_CONFIG_DIR`](#LANGFLOW_CONFIG_DIR) instead of in the Langflow package directory. Note, when this variable is set to default (`false`), the database isn't shared between different virtual environments and the database is deleted when you uninstall Langflow. | +| `LANGFLOW_STORE` | Boolean | `true` | Enable the Langflow Store.
See [`--store` option](./configuration-cli.md#run-store). | +| `LANGFLOW_STORE_ENVIRONMENT_VARIABLES` | Boolean | `true` | Store environment variables as [global variables](../Configuration/configuration-global-variables.md) in the database. | +| `LANGFLOW_SUPERUSER` | String | Not set | Set the name for the superuser. Required if [`LANGFLOW_AUTO_LOGIN`](#LANGFLOW_AUTO_LOGIN) is set to `false`.
See [`superuser --username` option](./configuration-cli.md#superuser-username). | +| `LANGFLOW_SUPERUSER_PASSWORD` | String | Not set | Set the password for the superuser. Required if [`LANGFLOW_AUTO_LOGIN`](#LANGFLOW_AUTO_LOGIN) is set to `false`.
See [`superuser --password` option](./configuration-cli.md#superuser-password).| +| `LANGFLOW_VARIABLES_TO_GET_FROM_ENVIRONMENT` | String | Not set | Comma-separated list of environment variables to get from the environment and store as [global variables](../Configuration/configuration-global-variables.md). | +| `LANGFLOW_WORKER_TIMEOUT` | Integer | `300` | Worker timeout in seconds.
See [`--worker-timeout` option](./configuration-cli.md#run-worker-timeout). | +| `LANGFLOW_WORKERS` | Integer | `1` | Number of worker processes.
See [`--workers` option](./configuration-cli.md#run-workers). | diff --git a/docs/docs/Contributing/_category_.json b/docs/docs/Contributing/_category_.json deleted file mode 100644 index 32da1de7c..000000000 --- a/docs/docs/Contributing/_category_.json +++ /dev/null @@ -1 +0,0 @@ -{"position":10, "label":"Contributing"} \ No newline at end of file diff --git a/docs/docs/Contributing/contributing-community.md b/docs/docs/Contributing/contributing-community.md index f37ae5d7e..4cdf32ea8 100644 --- a/docs/docs/Contributing/contributing-community.md +++ b/docs/docs/Contributing/contributing-community.md @@ -1,6 +1,5 @@ --- title: Join the Langflow community -sidebar_position: 5 slug: /contributing-community --- diff --git a/docs/docs/Contributing/contributing-components.md b/docs/docs/Contributing/contributing-components.md index 0cac371e4..138d52b94 100644 --- a/docs/docs/Contributing/contributing-components.md +++ b/docs/docs/Contributing/contributing-components.md @@ -1,6 +1,5 @@ --- title: Contribute components -sidebar_position: 4 slug: /contributing-components --- diff --git a/docs/docs/Contributing/contributing-github-discussion-board.md b/docs/docs/Contributing/contributing-github-discussion-board.md index e17d5366d..0a64f8722 100644 --- a/docs/docs/Contributing/contributing-github-discussion-board.md +++ b/docs/docs/Contributing/contributing-github-discussion-board.md @@ -1,6 +1,5 @@ --- title: Ask for help on the Discussions board -sidebar_position: 3 slug: /contributing-github-discussions --- diff --git a/docs/docs/Contributing/contributing-github-issues.md b/docs/docs/Contributing/contributing-github-issues.md index 11105a7e3..e49adddb2 100644 --- a/docs/docs/Contributing/contributing-github-issues.md +++ b/docs/docs/Contributing/contributing-github-issues.md @@ -1,6 +1,5 @@ --- title: Request an enhancement or report a bug -sidebar_position: 2 slug: /contributing-github-issues --- diff --git a/docs/docs/Contributing/contributing-how-to-contribute.md b/docs/docs/Contributing/contributing-how-to-contribute.md index 8a87bcd5d..857e1b97b 100644 --- a/docs/docs/Contributing/contributing-how-to-contribute.md +++ b/docs/docs/Contributing/contributing-how-to-contribute.md @@ -1,6 +1,5 @@ --- title: Contribute to Langflow -sidebar_position: 1 slug: /contributing-how-to-contribute --- diff --git a/docs/docs/Contributing/contributing-telemetry.md b/docs/docs/Contributing/contributing-telemetry.md index c374565ca..0d7d24b8d 100644 --- a/docs/docs/Contributing/contributing-telemetry.md +++ b/docs/docs/Contributing/contributing-telemetry.md @@ -1,6 +1,5 @@ --- title: Telemetry -sidebar_position: 0 slug: /contributing-telemetry --- diff --git a/docs/docs/Deployment/_category_.json b/docs/docs/Deployment/_category_.json deleted file mode 100644 index 41f76d47b..000000000 --- a/docs/docs/Deployment/_category_.json +++ /dev/null @@ -1 +0,0 @@ -{"position":7, "label":"Deployment"} \ No newline at end of file diff --git a/docs/docs/Deployment/deployment-docker.md b/docs/docs/Deployment/deployment-docker.md index 2f91fe8a4..71c2c2df1 100644 --- a/docs/docs/Deployment/deployment-docker.md +++ b/docs/docs/Deployment/deployment-docker.md @@ -1,7 +1,6 @@ --- -title: Docker -sidebar_position: 2 -slug: /deployment-docker +title: Dockers +lug: /deployment-docker --- @@ -38,7 +37,7 @@ This guide will help you get LangFlow up and running using Docker and Docker Com `docker compose up` -LangFlow will now be accessible at [http://localhost:7860/](http://localhost:7860/). +LangFlow will now be accessible at `http://localhost:7860/`. ### Docker Compose configuration {#02226209cad24185a6ec5b69bd820d0f} diff --git a/docs/docs/Deployment/deployment-gcp.md b/docs/docs/Deployment/deployment-gcp.md index b8c30b38c..43dabf992 100644 --- a/docs/docs/Deployment/deployment-gcp.md +++ b/docs/docs/Deployment/deployment-gcp.md @@ -1,6 +1,5 @@ --- title: GCP -sidebar_position: 3 slug: /deployment-gcp --- diff --git a/docs/docs/Deployment/deployment-hugging-face-spaces.md b/docs/docs/Deployment/deployment-hugging-face-spaces.md index c9bb552d6..187945383 100644 --- a/docs/docs/Deployment/deployment-hugging-face-spaces.md +++ b/docs/docs/Deployment/deployment-hugging-face-spaces.md @@ -1,6 +1,5 @@ --- title: HuggingFace Spaces -sidebar_position: 0 slug: /deployment-hugging-face-spaces --- diff --git a/docs/docs/Deployment/deployment-kubernetes.md b/docs/docs/Deployment/deployment-kubernetes.md index 98593b4f7..99980e82b 100644 --- a/docs/docs/Deployment/deployment-kubernetes.md +++ b/docs/docs/Deployment/deployment-kubernetes.md @@ -1,6 +1,5 @@ --- title: Kubernetes -sidebar_position: 1 slug: /deployment-kubernetes --- @@ -84,7 +83,7 @@ kubectl port-forward -n langflow svc/langflow-langflow-runtime 7860:7860 ``` -Now you can access LangFlow at [http://localhost:7860/](http://localhost:7860/). +Now you can access LangFlow at `http://localhost:7860/`. ### LangFlow version {#645c6ef7984d4da0bcc4170bab0ff415} @@ -258,7 +257,7 @@ kubectl port-forward -n langflow svc/langflow-my-langflow-app 7860:7860 ``` -Now you can access the API at [http://localhost:7860/api/v1/flows](http://localhost:7860/api/v1/flows) and execute the flow: +Now you can access the API at `http://localhost:7860/api/v1/flows` and execute the flow: ```shell diff --git a/docs/docs/Deployment/deployment-railway.md b/docs/docs/Deployment/deployment-railway.md index 69cf2b30d..0c41280f7 100644 --- a/docs/docs/Deployment/deployment-railway.md +++ b/docs/docs/Deployment/deployment-railway.md @@ -1,6 +1,5 @@ --- title: Railway -sidebar_position: 5 slug: /deployment-railway --- diff --git a/docs/docs/Deployment/deployment-render.md b/docs/docs/Deployment/deployment-render.md index 0e057efe5..2e0d1b26f 100644 --- a/docs/docs/Deployment/deployment-render.md +++ b/docs/docs/Deployment/deployment-render.md @@ -1,6 +1,5 @@ --- title: Render -sidebar_position: 4 slug: /deployment-render --- diff --git a/docs/docs/Get-Started/_category_.json b/docs/docs/Get-Started/_category_.json deleted file mode 100644 index dfddedbe3..000000000 --- a/docs/docs/Get-Started/_category_.json +++ /dev/null @@ -1 +0,0 @@ -{"position":1, "label":"Get Started"} \ No newline at end of file diff --git a/docs/docs/Get-Started/get-started-installation.md b/docs/docs/Get-Started/get-started-installation.md index cb227b73b..95c3fcd7f 100644 --- a/docs/docs/Get-Started/get-started-installation.md +++ b/docs/docs/Get-Started/get-started-installation.md @@ -1,6 +1,5 @@ --- title: Install Langflow -sidebar_position: 1 slug: /get-started-installation --- diff --git a/docs/docs/Get-Started/get-started-quickstart.md b/docs/docs/Get-Started/get-started-quickstart.md index 16b83a07c..6b86f6df9 100644 --- a/docs/docs/Get-Started/get-started-quickstart.md +++ b/docs/docs/Get-Started/get-started-quickstart.md @@ -1,6 +1,5 @@ --- title: Quickstart -sidebar_position: 2 slug: /get-started-quickstart --- @@ -121,9 +120,9 @@ The [Astra DB vector store](/components-vector-stores#astra-db-vector-store) com 3. Click **Data**, select the **File** component, and then drag it to the canvas. The [File](/components-data#file) component loads files from your local machine. 3. Click **Processing**, select the **Split Text** component, and then drag it to the canvas. -The [Split Text](/components-helpers#split-text) component splits the loaded text into smaller chunks. +The [Split Text](/components-processing#split-text) component splits the loaded text into smaller chunks. 4. Click **Processing**, select the **Parse Data** component, and then drag it to the canvas. -The [Parse Data](/components-helpers#parse-data) component converts the data from the **Astra DB** component into plain text. +The [Parse Data](/components-processing#parse-data) component converts the data from the **Astra DB** component into plain text. 5. Click **Embeddings**, select the **OpenAI Embeddings** component, and then drag it to the canvas. The [OpenAI Embeddings](/components-embedding-models#openai-embeddings) component generates embeddings for the user's input, which are compared to the vector data in the database. 6. Connect the new components into the existing flow, so your flow looks like this: diff --git a/docs/docs/Get-Started/welcome-to-langflow.md b/docs/docs/Get-Started/welcome-to-langflow.md index c5a2018ec..f54028907 100644 --- a/docs/docs/Get-Started/welcome-to-langflow.md +++ b/docs/docs/Get-Started/welcome-to-langflow.md @@ -1,6 +1,5 @@ --- title: Welcome to Langflow -sidebar_position: 0 slug: / --- diff --git a/docs/docs/Guides/_category_.json b/docs/docs/Guides/_category_.json deleted file mode 100644 index 4b98c2824..000000000 --- a/docs/docs/Guides/_category_.json +++ /dev/null @@ -1 +0,0 @@ -{"position":3, "label":"Guides"} \ No newline at end of file diff --git a/docs/docs/Guides/guides-chat-memory.md b/docs/docs/Guides/guides-chat-memory.md index 71fa1ab6f..8236a8b87 100644 --- a/docs/docs/Guides/guides-chat-memory.md +++ b/docs/docs/Guides/guides-chat-memory.md @@ -1,6 +1,5 @@ --- title: Chat Memory -sidebar_position: 1 slug: /guides-chat-memory --- @@ -12,7 +11,7 @@ Langflow allows every chat message to be stored, and a single flow can have mult In any project, as long as there are [**Chat**](/components-io) being used, memories are always being stored by default. These are messages from a user to the AI or vice-versa. -To see and access this history of messages, Langflow features a component called [Message history](/components-helpers#memory-history). It retrieves previous messages and outputs them in structured format or parsed. +To see and access this history of messages, Langflow features a component called [Message history](/components-helpers#message-history). It retrieves previous messages and outputs them in structured format or parsed. To learn the basics about memory in Langflow, check out the [Memory Chatbot](/tutorials-memory-chatbot) starter example. @@ -55,7 +54,7 @@ You can also display all messages stored across every flow and session by going ## Store chat memory in an external database -Chat memory is retrieved from an external database or vector store using the [Chat Memory](/components-helpers#chat-memory) component. +Chat memory is retrieved from an external database or vector store using the [Chat Memory](/components-helpers#message-history) component. Chat memory is stored to an external database or vector store using the [Store Message](/components-helpers#store-message) component. @@ -81,7 +80,7 @@ The **Astra DB Chat Memory** component stores and retrieves messages from **Astr 4. Configure the **AstraDBChatMemory** component with your AstraDB instance details. 1. In the **Astra DB Application Token** field, add your Astra token. (`AstraCS:...`) 2. In the **API Endpoint** field, add your Astra database's endpoint. (for example, `https://12adb-bc-5378c845f05a6-e0a12-bd889b4-us-east-2.apps.astra.datastax.com`) -5. Connect the **AstraDBChatMemory** component output to the external memory inputs of the [Chat Memory](/components-helpers#chat-memory) and [Store Message](/components-helpers#store-message) components. +5. Connect the **AstraDBChatMemory** component output to the external memory inputs of the [Message history](/components-helpers#message-history) and [Store Message](/components-helpers#store-message) components. 6. Link the [Chat Output](/components-io#chat-output) component to the input of the [Store Message](/components-helpers#store-message) component. Your completed flow should look like this: diff --git a/docs/docs/Guides/guides-data-message.md b/docs/docs/Guides/guides-data-message.md index a8e48ec11..cb2692feb 100644 --- a/docs/docs/Guides/guides-data-message.md +++ b/docs/docs/Guides/guides-data-message.md @@ -1,6 +1,5 @@ --- title: Data & Message -sidebar_position: 2 slug: /guides-data-message --- diff --git a/docs/docs/Guides/guides-new-to-llms.md b/docs/docs/Guides/guides-new-to-llms.md index b7117b707..12c36c9be 100644 --- a/docs/docs/Guides/guides-new-to-llms.md +++ b/docs/docs/Guides/guides-new-to-llms.md @@ -1,6 +1,5 @@ --- title: 📚 New to LLMs? -sidebar_position: 0 slug: /guides-new-to-llms --- diff --git a/docs/docs/Integrations/Google/_category_.json b/docs/docs/Integrations/Google/_category_.json deleted file mode 100644 index 8fd84656c..000000000 --- a/docs/docs/Integrations/Google/_category_.json +++ /dev/null @@ -1 +0,0 @@ -{ "position": 2, "label": "Google" } diff --git a/docs/docs/Integrations/Google/integrations-setup-google-cloud-vertex-ai-langflow.md b/docs/docs/Integrations/Google/integrations-setup-google-cloud-vertex-ai-langflow.md index 0f5e0cf4e..e71c552f4 100644 --- a/docs/docs/Integrations/Google/integrations-setup-google-cloud-vertex-ai-langflow.md +++ b/docs/docs/Integrations/Google/integrations-setup-google-cloud-vertex-ai-langflow.md @@ -1,7 +1,6 @@ --- -title: 'Integrate Google Cloud Vertex AI with Langflow' +title: Integrate Google Cloud Vertex AI with Langflow slug: /integrations-setup-google-cloud-vertex-ai-langflow -sidebar_position: 2 description: "A comprehensive guide on creating a Google OAuth app, obtaining tokens, and integrating them with Langflow's Google components." --- diff --git a/docs/docs/Integrations/Google/integrations-setup-google-oauth-langflow.md b/docs/docs/Integrations/Google/integrations-setup-google-oauth-langflow.md index 0bfcd3a63..172de6f8d 100644 --- a/docs/docs/Integrations/Google/integrations-setup-google-oauth-langflow.md +++ b/docs/docs/Integrations/Google/integrations-setup-google-oauth-langflow.md @@ -1,7 +1,7 @@ --- title: Integrate Google OAuth with Langflow slug: /integrations-setup-google-oauth-langflow -sidebar_position: 3 + description: "A comprehensive guide on creating a Google OAuth app, obtaining tokens, and integrating them with Langflow's Google components." --- diff --git a/docs/docs/Integrations/Notion/_category_.json b/docs/docs/Integrations/Notion/_category_.json deleted file mode 100644 index c245462ef..000000000 --- a/docs/docs/Integrations/Notion/_category_.json +++ /dev/null @@ -1 +0,0 @@ -{"position":5, "label":"Notion"} \ No newline at end of file diff --git a/docs/docs/Integrations/Notion/integrations-notion.md b/docs/docs/Integrations/Notion/integrations-notion.md index 6664991a0..c9b504cf6 100644 --- a/docs/docs/Integrations/Notion/integrations-notion.md +++ b/docs/docs/Integrations/Notion/integrations-notion.md @@ -1,6 +1,5 @@ --- title: Setup -sidebar_position: 0 slug: /integrations/notion/setup --- diff --git a/docs/docs/Integrations/Notion/notion-agent-conversational.md b/docs/docs/Integrations/Notion/notion-agent-conversational.md index 150c6e53c..1d5fd161f 100644 --- a/docs/docs/Integrations/Notion/notion-agent-conversational.md +++ b/docs/docs/Integrations/Notion/notion-agent-conversational.md @@ -1,6 +1,5 @@ --- title: Notion Conversational Agent -sidebar_position: 2 slug: /integrations/notion/notion-agent-conversational --- diff --git a/docs/docs/Integrations/Notion/notion-agent-meeting-notes.md b/docs/docs/Integrations/Notion/notion-agent-meeting-notes.md index 7980bc15f..dfd1cde63 100644 --- a/docs/docs/Integrations/Notion/notion-agent-meeting-notes.md +++ b/docs/docs/Integrations/Notion/notion-agent-meeting-notes.md @@ -1,6 +1,5 @@ --- title: Notion Meeting Notes Agent -sidebar_position: 1 slug: /integrations/notion/notion-agent-meeting-notes --- diff --git a/docs/docs/Integrations/_category_.json b/docs/docs/Integrations/_category_.json deleted file mode 100644 index 6aed9c635..000000000 --- a/docs/docs/Integrations/_category_.json +++ /dev/null @@ -1 +0,0 @@ -{"position":9, "label":"Integrations"} \ No newline at end of file diff --git a/docs/docs/Integrations/integrations-assemblyai.md b/docs/docs/Integrations/integrations-assemblyai.md index e68d3203b..a2609e539 100644 --- a/docs/docs/Integrations/integrations-assemblyai.md +++ b/docs/docs/Integrations/integrations-assemblyai.md @@ -1,6 +1,5 @@ --- title: AssemblyAI -sidebar_position: 1 slug: /integrations-assemblyai --- diff --git a/docs/docs/Integrations/integrations-langfuse.md b/docs/docs/Integrations/integrations-langfuse.md index 2e64536f0..6b19748fb 100644 --- a/docs/docs/Integrations/integrations-langfuse.md +++ b/docs/docs/Integrations/integrations-langfuse.md @@ -1,6 +1,5 @@ --- title: Langfuse -sidebar_position: 2 slug: /integrations-langfuse --- diff --git a/docs/docs/Integrations/integrations-langsmith.md b/docs/docs/Integrations/integrations-langsmith.md index 957ee9d6b..b65662496 100644 --- a/docs/docs/Integrations/integrations-langsmith.md +++ b/docs/docs/Integrations/integrations-langsmith.md @@ -1,6 +1,5 @@ --- title: LangSmith -sidebar_position: 3 slug: /integrations-langsmith --- diff --git a/docs/docs/Integrations/integrations-langwatch.md b/docs/docs/Integrations/integrations-langwatch.md index 626c1b8b5..85a735e7f 100644 --- a/docs/docs/Integrations/integrations-langwatch.md +++ b/docs/docs/Integrations/integrations-langwatch.md @@ -1,6 +1,5 @@ --- title: LangWatch -sidebar_position: 4 slug: /integrations-langwatch --- diff --git a/docs/docs/Starter-Projects/starter-projects-basic-prompting.md b/docs/docs/Starter-Projects/starter-projects-basic-prompting.md index 947fac524..04fad824a 100644 --- a/docs/docs/Starter-Projects/starter-projects-basic-prompting.md +++ b/docs/docs/Starter-Projects/starter-projects-basic-prompting.md @@ -1,6 +1,5 @@ --- title: Basic Prompting -sidebar_position: 0 slug: /starter-projects-basic-prompting --- diff --git a/docs/docs/Starter-Projects/starter-projects-simple-agent.md b/docs/docs/Starter-Projects/starter-projects-simple-agent.md index 52879c4ad..882446cad 100644 --- a/docs/docs/Starter-Projects/starter-projects-simple-agent.md +++ b/docs/docs/Starter-Projects/starter-projects-simple-agent.md @@ -1,6 +1,5 @@ --- title: Simple agent -sidebar_position: 6 slug: /starter-projects-simple-agent --- diff --git a/docs/docs/Starter-Projects/starter-projects-vector-store-rag.md b/docs/docs/Starter-Projects/starter-projects-vector-store-rag.md index 3f3c770fe..e3c5d2aed 100644 --- a/docs/docs/Starter-Projects/starter-projects-vector-store-rag.md +++ b/docs/docs/Starter-Projects/starter-projects-vector-store-rag.md @@ -1,6 +1,5 @@ --- title: Vector Store RAG -sidebar_position: 4 slug: /starter-projects-vector-store-rag --- diff --git a/docs/docs/Tutorials/tutorials-blog-writer.md b/docs/docs/Tutorials/tutorials-blog-writer.md index ba72c4a15..a9015b1d3 100644 --- a/docs/docs/Tutorials/tutorials-blog-writer.md +++ b/docs/docs/Tutorials/tutorials-blog-writer.md @@ -1,6 +1,5 @@ --- title: Blog Writer -sidebar_position: 1 slug: /tutorials-blog-writer --- diff --git a/docs/docs/Tutorials/tutorials-document-qa.md b/docs/docs/Tutorials/tutorials-document-qa.md index 9182c2e0c..614ecef35 100644 --- a/docs/docs/Tutorials/tutorials-document-qa.md +++ b/docs/docs/Tutorials/tutorials-document-qa.md @@ -1,6 +1,5 @@ --- title: Document QA -sidebar_position: 2 slug: /tutorials-document-qa --- diff --git a/docs/docs/Tutorials/tutorials-memory-chatbot.md b/docs/docs/Tutorials/tutorials-memory-chatbot.md index 4ff0113b0..634f18b33 100644 --- a/docs/docs/Tutorials/tutorials-memory-chatbot.md +++ b/docs/docs/Tutorials/tutorials-memory-chatbot.md @@ -1,6 +1,5 @@ --- title: Memory Chatbot -sidebar_position: 3 slug: /tutorials-memory-chatbot --- diff --git a/docs/docs/Tutorials/tutorials-sequential-agent.md b/docs/docs/Tutorials/tutorials-sequential-agent.md index daafc0008..24b092a9c 100644 --- a/docs/docs/Tutorials/tutorials-sequential-agent.md +++ b/docs/docs/Tutorials/tutorials-sequential-agent.md @@ -1,6 +1,5 @@ --- title: Sequential tasks agent -sidebar_position: 4 slug: /tutorials-sequential-agent --- diff --git a/docs/docs/Tutorials/tutorials-travel-planning-agent.md b/docs/docs/Tutorials/tutorials-travel-planning-agent.md index 75045eb40..87b48a541 100644 --- a/docs/docs/Tutorials/tutorials-travel-planning-agent.md +++ b/docs/docs/Tutorials/tutorials-travel-planning-agent.md @@ -1,6 +1,5 @@ --- title: Travel planning agent -sidebar_position: 8 slug: /tutorials-travel-planning-agent --- diff --git a/docs/docs/Workspace/_category_.json b/docs/docs/Workspace/_category_.json deleted file mode 100644 index 8e39087ba..000000000 --- a/docs/docs/Workspace/_category_.json +++ /dev/null @@ -1 +0,0 @@ -{"position":4, "label":"Workspace"} \ No newline at end of file diff --git a/docs/docs/Workspace/workspace-api.md b/docs/docs/Workspace/workspace-api.md index b85283c47..91ef420db 100644 --- a/docs/docs/Workspace/workspace-api.md +++ b/docs/docs/Workspace/workspace-api.md @@ -1,6 +1,5 @@ --- title: API -sidebar_position: 2 slug: /workspace-api --- diff --git a/docs/docs/Workspace/workspace-logs.md b/docs/docs/Workspace/workspace-logs.md index 25313f091..b881ef17b 100644 --- a/docs/docs/Workspace/workspace-logs.md +++ b/docs/docs/Workspace/workspace-logs.md @@ -1,6 +1,5 @@ --- title: Logs -sidebar_position: 4 slug: /workspace-logs --- diff --git a/docs/docs/Workspace/workspace-overview.md b/docs/docs/Workspace/workspace-overview.md index d72859ae5..56f9a99e9 100644 --- a/docs/docs/Workspace/workspace-overview.md +++ b/docs/docs/Workspace/workspace-overview.md @@ -1,6 +1,5 @@ --- title: Workspace concepts -sidebar_position: 1 slug: /workspace-overview --- diff --git a/docs/docs/Workspace/workspace-playground.md b/docs/docs/Workspace/workspace-playground.md index cfd914bce..59a3a516e 100644 --- a/docs/docs/Workspace/workspace-playground.md +++ b/docs/docs/Workspace/workspace-playground.md @@ -1,6 +1,5 @@ --- title: Playground -sidebar_position: 2 slug: /workspace-playground --- diff --git a/docs/docusaurus.config.js b/docs/docusaurus.config.js index 75ee52bc6..2765c01b3 100644 --- a/docs/docusaurus.config.js +++ b/docs/docusaurus.config.js @@ -15,6 +15,7 @@ const config = { baseUrl: "/", onBrokenLinks: "throw", onBrokenMarkdownLinks: "warn", + onBrokenAnchors: "warn", organizationName: "langflow-ai", projectName: "langflow", trailingSlash: false,