---
.dockerignore | 2 +-
.env.example | 7 +
.github/workflows/lint.yml | 8 +-
.../{pre-release.yml => pre-release-base.yml} | 13 +-
.github/workflows/pre-release-langflow.yml | 70 +
.../workflows/{test.yml => python_test.yml} | 10 +-
.github/workflows/release.yml | 2 +-
.github/workflows/typescript_test.yml | 149 +
.gitignore | 5 +
.vscode/launch.json | 26 +-
Makefile | 162 +-
README.md | 7 +-
base.Dockerfile | 2 +-
build_and_push.Dockerfile | 10 +-
build_and_push_base.Dockerfile | 91 +
deploy/base.Dockerfile | 2 +-
docs/docs/components/custom.mdx | 15 +-
docs/docs/components/data.mdx | 87 +
docs/docs/components/embeddings.mdx | 130 +-
docs/docs/components/experimental.mdx | 250 +
docs/docs/components/helpers.mdx | 127 +
docs/docs/components/inputs.mdx | 164 +
docs/docs/components/memories.mdx | 24 +-
.../components/{llms.mdx => model_specs.mdx} | 0
docs/docs/components/models.mdx | 346 +
docs/docs/components/outputs.mdx | 37 +
docs/docs/components/prompts.mdx | 2 +-
docs/docs/components/utilities.mdx | 20 +
docs/docs/components/vector-stores.mdx | 634 +-
docs/docs/components/wrappers.mdx | 20 -
docs/docs/examples/buffer-memory.mdx | 6 +
docs/docs/examples/conversation-chain.mdx | 7 +
docs/docs/examples/csv-loader.mdx | 6 +
docs/docs/examples/flow-runner.mdx | 28 +-
docs/docs/examples/how-upload-examples.mdx | 28 -
.../docs/examples/midjourney-prompt-chain.mdx | 46 -
docs/docs/examples/multiple-vectorstores.mdx | 58 -
docs/docs/examples/python-function.mdx | 6 +
docs/docs/examples/serp-api-tool.mdx | 6 +
.../docs/getting-started/basic-prompting.mdx | 0
.../docs/getting-started/blog-writer.mdx | 0
docs/docs/getting-started/cli.mdx | 44 +
docs/docs/getting-started/creating-flows.mdx | 38 -
.../docs/getting-started/document-qa.mdx | 0
.../getting-started/hugging-face-spaces.mdx | 20 -
docs/docs/getting-started/installation.md | 15 -
.../docs/getting-started/memory-chatbot.mdx | 0
.../docs/getting-started/rag-with-astradb.mdx | 195 +
docs/docs/guidelines/api.mdx | 12 +-
docs/docs/guidelines/async-api.mdx | 73 -
docs/docs/guidelines/components.mdx | 3 +-
docs/docs/guidelines/custom-component.mdx | 22 +-
docs/docs/guidelines/features.mdx | 25 +-
docs/docs/guidelines/login.mdx | 7 +-
docs/docs/guides/async-tasks.mdx | 44 -
docs/docs/guides/superuser.mdx | 7 -
docs/docs/index.mdx | 85 +-
.../docs/migration/api.mdx | 0
docs/docs/migration/compatibility.mdx | 44 +
.../component-status-and-data-passing.mdx | 0
.../connecting-output-components.mdx | 0
.../docs/migration/custom-component.mdx | 0
.../migration/experimental-components.mdx | 0
.../docs/migration/flow-of-data.mdx | 0
docs/docs/migration/global-variables.mdx | 65 +
docs/docs/migration/inputs-and-outputs.mdx | 36 +
.../docs/migration/multiple-flows.mdx | 0
.../new-categories-and-components.mdx | 0
.../migration/passing-tweaks-and-inputs.mdx | 0
.../renaming-and-editing-components.mdx | 0
.../sidebar-and-interaction-panel.mdx | 0
.../docs/migration/state-management.mdx | 0
.../docs/migration/supported-frameworks.mdx | 0
docs/docs/migration/text-and-record.mdx | 45 +
.../docs/whats-new/a-new-chapter-langflow.mdx | 96 +
docs/docs/whats-new/customization-control.mdx | 1 +
docs/docs/whats-new/debugging-reimagined.mdx | 1 +
.../whats-new/migrating-to-one-point-zero.mdx | 125 +
.../simplification-standardization.mdx | 1 +
docs/docusaurus.config.js | 5 +
docs/package-lock.json | 940 ++-
docs/package.json | 15 +-
docs/sidebars.js | 103 +-
docs/src/theme/DownloadableJsonFile.js | 29 +
docs/static/data/AstraDB-RAG-Flows.json | 3403 +++++++++
docs/static/img/add-new-variable.png | Bin 0 -> 49435 bytes
.../static/img/astra-configure-deployment.png | Bin 0 -> 207197 bytes
docs/static/img/astra-create-database.png | Bin 0 -> 37699 bytes
docs/static/img/astra-generate-token.png | Bin 0 -> 75966 bytes
docs/static/img/astra-ingestion-fields.png | Bin 0 -> 225568 bytes
docs/static/img/astra-ingestion-flow-dark.png | Bin 0 -> 87389 bytes
docs/static/img/astra-ingestion-flow.png | Bin 0 -> 81772 bytes
docs/static/img/astra-ingestion-run.png | Bin 0 -> 64198 bytes
docs/static/img/astra-rag-flow-dark.png | Bin 0 -> 164630 bytes
...rag-flow-interaction-panel-interaction.png | Bin 0 -> 361975 bytes
.../img/astra-rag-flow-interaction-panel.png | Bin 0 -> 168580 bytes
docs/static/img/astra-rag-flow-run.png | Bin 0 -> 194688 bytes
docs/static/img/astra-rag-flow.png | Bin 0 -> 152709 bytes
docs/static/img/chat-input-expanded.png | Bin 0 -> 92523 bytes
docs/static/img/chat-input.png | Bin 0 -> 54171 bytes
docs/static/img/create-variable-window.png | Bin 0 -> 82320 bytes
docs/static/img/drag-and-drop-canvas.png | Bin 0 -> 199514 bytes
docs/static/img/drag-and-drop-flow.png | Bin 0 -> 187938 bytes
docs/static/img/duplicate-space.png | Bin 0 -> 271831 bytes
.../img/interaction-panel-text-input.png | Bin 0 -> 120285 bytes
.../img/interaction-panel-with-chat-input.png | Bin 0 -> 127949 bytes
docs/static/img/ollama-gv.png | Bin 0 -> 50068 bytes
docs/static/img/prompt-with-template.png | Bin 0 -> 55738 bytes
docs/static/img/prompt.png | Bin 0 -> 41403 bytes
docs/static/img/text-input-expanded.png | Bin 0 -> 55994 bytes
docs/static/img/text-input.png | Bin 0 -> 39820 bytes
docs/static/json_files/SearchApi_Tool.json | 1748 ++++-
lcserve.Dockerfile | 16 -
poetry.lock | 2870 +++++---
pyproject.toml | 63 +-
scripts/setup/setup_env.sh | 9 +
scripts/setup/update_poetry.sh | 148 +
scripts/update_dependencies.py | 51 +
.../__init__.py => base/README.md} | 0
src/backend/{ => base}/langflow/__main__.py | 99 +-
src/backend/{ => base}/langflow/alembic.ini | 0
.../{ => base}/langflow/alembic/README | 0
.../{ => base}/langflow/alembic/env.py | 25 +-
.../langflow/alembic/script.py.mako | 2 +
.../006b3990db50_add_unique_constraints.py | 8 +-
.../alembic/versions/0b8757876a7c_.py | 0
..._replace_credential_table_with_variable.py | 65 +
.../alembic/versions/1ef9c4f3765d_.py | 8 +-
.../versions/260dbcc8b680_adds_tables.py | 49 +-
.../2ac71eb9c3ae_adds_credential_table.py | 4 +-
...fd30_add_icon_and_icon_bg_color_to_flow.py | 50 +
.../67cc006d50bf_add_profile_image_column.py | 0
.../versions/7843803a87b5_store_updates.py | 12 +-
...2acc8b2_adds_updated_at_and_folder_cols.py | 15 +-
.../b2fa308044b5_add_unique_constraints.py | 26 +-
.../versions/bc2f01c40e4a_new_fixes.py | 16 +-
...866d51fd2_change_columns_to_be_nullable.py | 2 +-
...5ee9749d1a6_user_id_can_be_null_in_flow.py | 8 +-
.../fd531f8868b1_fix_credential_table.py | 4 +-
.../{ => base}/langflow/api/__init__.py | 0
src/backend/{ => base}/langflow/api/router.py | 8 +-
src/backend/{ => base}/langflow/api/utils.py | 153 +-
.../{ => base}/langflow/api/v1/__init__.py | 8 +-
.../{ => base}/langflow/api/v1/api_key.py | 16 +-
src/backend/base/langflow/api/v1/base.py | 165 +
.../{ => base}/langflow/api/v1/callback.py | 56 +-
src/backend/base/langflow/api/v1/chat.py | 329 +
src/backend/base/langflow/api/v1/endpoints.py | 455 ++
src/backend/base/langflow/api/v1/files.py | 116 +
.../{ => base}/langflow/api/v1/flows.py | 64 +-
.../{ => base}/langflow/api/v1/login.py | 8 +-
src/backend/base/langflow/api/v1/monitor.py | 73 +
src/backend/base/langflow/api/v1/schemas.py | 325 +
.../{ => base}/langflow/api/v1/store.py | 0
.../{ => base}/langflow/api/v1/users.py | 0
src/backend/base/langflow/api/v1/validate.py | 81 +
src/backend/base/langflow/api/v1/variable.py | 113 +
.../langflow/base}/__init__.py | 0
.../langflow/base/agents}/__init__.py | 0
.../base/langflow/base/agents/agent.py | 75 +
src/backend/base/langflow/base/constants.py | 27 +
.../langflow/base/data}/__init__.py | 0
src/backend/base/langflow/base/data/utils.py | 141 +
.../langflow/base/io}/__init__.py | 0
src/backend/base/langflow/base/io/chat.py | 118 +
src/backend/base/langflow/base/io/text.py | 42 +
.../base/langflow/base/models/__init__.py | 3 +
.../base/langflow/base/models/model.py | 50 +
.../langflow/base/prompts}/__init__.py | 0
.../base/langflow/base/prompts/utils.py | 137 +
.../base/langflow/components/__init__.py | 17 +
.../components/agents/AgentInitializer.py | 3 +-
.../langflow/components/agents/CSVAgent.py | 34 +
.../langflow/components/agents/JsonAgent.py | 5 +-
.../agents/OpenAIConversationalAgent.py | 6 +-
.../langflow/components/agents/SQLAgent.py | 10 +-
.../components/agents/VectorStoreAgent.py | 6 +-
.../agents/VectorStoreRouterAgent.py | 10 +-
.../langflow/components/agents/XMLAgent.py | 89 +
.../langflow/components/agents/__init__.py | 19 +
.../components/chains/ConversationChain.py | 46 +
.../langflow/components/chains/LLMChain.py | 21 +-
.../components/chains/LLMCheckerChain.py | 31 +
.../components/chains/LLMMathChain.py | 26 +-
.../langflow/components/chains/RetrievalQA.py | 68 +
.../chains/RetrievalQAWithSourcesChain.py | 63 +
.../components/chains/SQLGenerator.py | 61 +
.../langflow/components/chains/__init__.py | 17 +
.../langflow/components/data/APIRequest.py | 121 +
.../langflow/components/data/Directory.py | 63 +
.../base/langflow/components/data/File.py | 48 +
.../base/langflow/components/data/URL.py | 27 +
.../base/langflow/components/data/__init__.py | 7 +
.../components/documentloaders}/__init__.py | 0
.../embeddings/AmazonBedrockEmbeddings.py | 12 +-
.../embeddings/AzureOpenAIEmbeddings.py | 7 +-
.../components/embeddings/CohereEmbeddings.py | 7 +-
.../embeddings/HuggingFaceEmbeddings.py | 11 +-
.../HuggingFaceInferenceAPIEmbeddings.py | 8 +-
.../components/embeddings/OllamaEmbeddings.py | 10 +-
.../components/embeddings/OpenAIEmbeddings.py | 49 +-
.../embeddings/VertexAIEmbeddings.py | 54 +-
.../components/embeddings/__init__.py | 19 +
.../experimental/ClearMessageHistory.py | 26 +
.../experimental/ExtractDataFromRecord.py | 45 +
.../components/experimental/FlowTool.py | 86 +
.../components/experimental/ListFlows.py | 21 +
.../components/experimental/Listen.py | 21 +
.../components/experimental/MergeRecords.py | 36 +
.../components/experimental/Notify.py | 41 +
.../components/experimental/PythonFunction.py | 25 +
.../components/experimental/RunFlow.py | 66 +
.../experimental/RunnableExecutor.py | 122 +
.../components/experimental/SQLExecutor.py | 69 +
.../components/experimental/SubFlow.py | 119 +
.../components/experimental/__init__.py | 28 +
.../components/helpers/CombineText.py | 29 +
.../components/helpers/CreateRecord.py | 81 +
.../components/helpers/CustomComponent.py | 16 +
.../components/helpers/DocumentToRecord.py | 22 +
.../components/helpers/IDGenerator.py | 31 +
.../components/helpers/MemoryComponent.py | 66 +
.../components/helpers/MessageHistory.py | 56 +
.../components/helpers/RecordsToText.py | 35 +
.../langflow/components/helpers/SplitText.py | 85 +
.../components/helpers/UpdateRecord.py | 39 +
.../langflow/components/helpers/__init__.py | 17 +
.../langflow/components/inputs/ChatInput.py | 37 +
.../base/langflow/components/inputs/Prompt.py | 33 +
.../langflow/components/inputs/TextInput.py | 32 +
.../langflow/components/inputs/__init__.py | 5 +
.../BingSearchAPIWrapper.py | 4 +-
.../GoogleSearchAPIWrapper.py | 3 +-
.../GoogleSerperAPIWrapper.py | 12 +-
.../JSONDocumentBuilder.py | 4 +-
.../langchain_utilities/SQLDatabase.py | 22 +
.../langchain_utilities}/SearchApi.py | 18 +-
.../SearxSearchWrapper.py | 6 +-
.../langchain_utilities}/SerpAPIWrapper.py | 3 +-
.../WikipediaAPIWrapper.py | 3 +-
.../WolframAlphaAPIWrapper.py | 3 +-
.../langflow/components/memories}/__init__.py | 0
.../model_specs/AmazonBedrockSpecs.py} | 6 +-
.../model_specs/AnthropicLLMSpecs.py} | 5 +-
.../components/model_specs/AnthropicSpecs.py} | 3 +-
.../model_specs/AzureChatOpenAISpecs.py} | 7 +-
.../BaiduQianfanChatEndpointsSpecs.py} | 4 +-
.../BaiduQianfanLLMEndpointsSpecs.py} | 4 +-
.../model_specs/ChatAnthropicSpecs.py} | 5 +-
.../model_specs/ChatLiteLLMSpecs.py} | 48 +-
.../model_specs/ChatOllamaEndpointSpecs.py} | 3 +-
.../model_specs/ChatOpenAISpecs.py} | 9 +-
.../model_specs/ChatVertexAISpecs.py} | 4 +-
.../components/model_specs/CohereSpecs.py} | 4 +-
.../model_specs/GoogleGenerativeAISpecs.py | 74 +
.../model_specs/HuggingFaceEndpointsSpecs.py} | 4 +-
.../components/model_specs/OllamaLLMSpecs.py} | 2 +-
.../components/model_specs/VertexAISpecs.py} | 7 +-
.../components/model_specs/__init__.py | 35 +
.../components/models/AmazonBedrockModel.py | 99 +
.../components/models/AnthropicModel.py | 106 +
.../components/models/AzureOpenAIModel.py | 121 +
.../models/BaiduQianfanChatModel.py | 123 +
.../components/models/ChatLiteLLMModel.py | 191 +
.../langflow/components/models/CohereModel.py | 73 +
.../models/GoogleGenerativeAIModel.py | 102 +
.../components/models/HuggingFaceModel.py | 74 +
.../langflow/components/models/OllamaModel.py | 300 +
.../langflow/components/models/OpenAIModel.py | 106 +
.../components/models/VertexAiModel.py | 126 +
.../langflow/components/models/__init__.py | 26 +
.../langflow/components/outputs/ChatOutput.py | 29 +
.../langflow/components/outputs/TextOutput.py | 28 +
.../langflow/components/outputs/__init__.py | 4 +
.../components/retrievers/AmazonKendra.py | 7 +-
.../components/retrievers/MetalRetriever.py | 6 +-
.../retrievers/MultiQueryRetriever.py | 3 +-
.../retrievers/VectaraSelfQueryRetriver.py | 14 +-
.../retrievers/VectorStoreRetriever.py | 2 +-
.../components/retrievers/__init__.py | 13 +
.../textsplitters/CharacterTextSplitter.py | 25 +-
.../LanguageRecursiveTextSplitter.py | 25 +-
.../RecursiveCharacterTextSplitter.py | 32 +-
.../components/textsplitters/__init__.py | 9 +
.../components/toolkits/JsonToolkit.py | 5 +-
.../langflow/components/toolkits/Metaphor.py | 2 +-
.../components/toolkits/OpenAPIToolkit.py | 3 +-
.../components/toolkits/VectorStoreInfo.py | 2 +-
.../toolkits/VectorStoreRouterToolkit.py | 7 +-
.../components/toolkits/VectorStoreToolkit.py | 14 +-
.../langflow/components/toolkits/__init__.py | 15 +
.../components/tools/RetrieverTool.py | 2 +-
.../components/tools/SearchAPITool.py | 37 +
.../langflow/components/tools/SearchApi.py | 53 +
.../langflow/components/tools/__init__.py | 5 +
.../components/vectorsearch/AstraDBSearch.py | 148 +
.../components/vectorsearch/ChromaSearch.py | 111 +
.../components/vectorsearch/FAISSSearch.py | 48 +
.../vectorsearch/MongoDBAtlasVectorSearch.py | 57 +
.../components/vectorsearch/PineconeSearch.py | 76 +
.../components/vectorsearch/QdrantSearch.py | 98 +
.../components/vectorsearch/RedisSearch.py | 82 +
.../vectorsearch/SupabaseVectorStoreSearch.py | 54 +
.../components/vectorsearch/VectaraSearch.py | 66 +
.../components/vectorsearch/WeaviateSearch.py | 86 +
.../components/vectorsearch/__init__.py | 25 +
.../components/vectorsearch/pgvectorSearch.py | 74 +
.../components/vectorstores/AstraDB.py | 158 +
.../components/vectorstores/Chroma.py | 45 +-
.../langflow/components/vectorstores/FAISS.py | 46 +
.../vectorstores/MongoDBAtlasVector.py | 67 +
.../components/vectorstores/Pinecone.py | 36 +-
.../components/vectorstores/Qdrant.py | 27 +-
.../langflow/components/vectorstores/Redis.py | 18 +-
.../vectorstores/SupabaseVectorStore.py | 18 +-
.../components/vectorstores/Vectara.py | 25 +-
.../components/vectorstores/Weaviate.py | 33 +-
.../components/vectorstores/__init__.py | 26 +
.../components/vectorstores/base/__init__.py | 3 +
.../components/vectorstores/base/model.py | 47 +
.../components/vectorstores/pgvector.py | 15 +-
src/backend/{ => base}/langflow/config.yaml | 70 +-
.../langflow/core}/__init__.py | 0
.../{ => base}/langflow/core/celery_app.py | 0
.../{ => base}/langflow/core/celeryconfig.py | 0
.../__init__.py => base/langflow/custom.py} | 5 +-
.../langflow/field_typing/__init__.py | 4 +
.../langflow/field_typing/constants.py | 7 +-
.../base/langflow/field_typing/range_spec.py | 30 +
.../{ => base}/langflow/graph/__init__.py | 6 +-
.../langflow/graph/edge}/__init__.py | 0
.../{ => base}/langflow/graph/edge/base.py | 91 +-
.../base/langflow/graph/edge/schema.py | 34 +
src/backend/base/langflow/graph/edge/utils.py | 19 +
.../langflow/graph/graph}/__init__.py | 0
src/backend/base/langflow/graph/graph/base.py | 1220 ++++
.../langflow/graph/graph/constants.py | 14 +-
.../graph/graph/runnable_vertices_manager.py | 111 +
.../langflow/graph/graph/state_manager.py | 36 +
.../{ => base}/langflow/graph/graph/utils.py | 46 +-
src/backend/base/langflow/graph/schema.py | 55 +
.../{ => base}/langflow/graph/utils.py | 21 +
.../langflow/graph/vertex}/__init__.py | 0
.../base/langflow/graph/vertex/base.py | 749 ++
.../langflow/graph/vertex/constants.py | 0
.../base/langflow/graph/vertex/types.py | 491 ++
.../base/langflow/graph/vertex/utils.py | 65 +
src/backend/base/langflow/helpers/__init__.py | 3 +
src/backend/base/langflow/helpers/flow.py | 201 +
src/backend/base/langflow/helpers/record.py | 41 +
.../langflow/initial_setup}/__init__.py | 0
.../base/langflow/initial_setup/setup.py | 239 +
.../Basic Prompting (Hello, world!).json | 888 +++
.../Langflow Blog Writter.json | 989 +++
.../Langflow Document QA.json | 1031 +++
.../Langflow Memory Conversation.json | 1274 ++++
.../Langflow Prompt Chaining.json | 1773 +++++
.../VectorStore-RAG-Flows.json | 3403 +++++++++
.../langflow/interface}/__init__.py | 0
.../langflow/interface/agents/__init__.py | 0
.../langflow/interface/agents/base.py | 5 +-
.../langflow/interface/agents/custom.py | 6 +-
.../langflow/interface/agents/prebuilt.py | 0
.../{ => base}/langflow/interface/base.py | 8 +-
.../langflow/interface/chains/__init__.py | 0
.../langflow/interface/chains/base.py | 16 +-
.../langflow/interface/chains/custom.py | 7 +-
.../langflow/interface/custom/__init__.py | 0
.../langflow/interface/custom/attributes.py | 43 +
.../langflow/interface/custom/base.py | 10 +-
.../interface/custom/code_parser/__init__.py | 0
.../custom/code_parser/code_parser.py | 38 +-
.../interface/custom/code_parser/utils.py | 11 +-
.../custom/custom_component/__init__.py | 0
.../custom/custom_component/component.py | 57 +-
.../custom_component/custom_component.py | 459 ++
.../custom/directory_reader/__init__.py | 0
.../directory_reader/directory_reader.py | 47 +-
.../custom/directory_reader/utils.py | 19 +-
.../base/langflow/interface/custom/eval.py | 12 +
.../langflow/interface/custom/schema.py | 9 +
.../langflow/interface/custom/utils.py | 214 +-
.../langflow/interface/custom_lists.py | 2 +-
.../interface/document_loaders}/__init__.py | 0
.../interface/document_loaders/base.py | 6 +-
.../interface/embeddings}/__init__.py | 0
.../langflow/interface/embeddings/base.py | 4 +-
.../langflow/interface/importing/__init__.py | 2 -
.../langflow/interface/importing/utils.py | 26 +-
.../interface/initialize}/__init__.py | 0
.../langflow/interface/initialize/llm.py | 0
.../langflow/interface/initialize/loading.py | 113 +-
.../langflow/interface/initialize/utils.py | 0
.../interface/initialize/vector_store.py | 0
.../{ => base}/langflow/interface/listing.py | 2 +-
.../langflow/interface/llms/__init__.py | 0
.../langflow/interface/llms/base.py | 6 +-
.../langflow/interface/memories/__init__.py | 0
.../langflow/interface/memories/base.py | 6 +-
.../interface/output_parsers}/__init__.py | 0
.../langflow/interface/output_parsers/base.py | 3 +-
.../langflow/interface/prompts/__init__.py | 0
.../langflow/interface/prompts/base.py | 5 +-
.../langflow/interface/prompts/custom.py | 0
.../interface/retrievers}/__init__.py | 0
.../langflow/interface/retrievers/base.py | 3 +-
.../{ => base}/langflow/interface/run.py | 16 +-
.../interface/text_splitters/__init__.py | 0
.../langflow/interface/text_splitters/base.py | 6 +-
.../langflow/interface/toolkits}/__init__.py | 0
.../langflow/interface/toolkits/base.py | 20 +-
.../langflow/interface/toolkits/custom.py | 0
.../langflow/interface/tools/__init__.py | 0
.../langflow/interface/tools/base.py | 18 +-
.../langflow/interface/tools/constants.py | 4 +-
.../langflow/interface/tools/custom.py | 4 +-
.../langflow/interface/tools/util.py | 0
.../{ => base}/langflow/interface/types.py | 24 +-
.../langflow/interface/utilities}/__init__.py | 0
.../langflow/interface/utilities/base.py | 2 +-
.../{ => base}/langflow/interface/utils.py | 38 +-
.../interface/vector_store}/__init__.py | 0
.../langflow/interface/vector_store/base.py | 0
.../langflow/interface/wrappers}/__init__.py | 0
.../langflow/interface/wrappers/base.py | 20 +-
.../langflow/legacy_custom}/__init__.py | 0
.../langflow/legacy_custom}/customs.py | 3 +-
src/backend/base/langflow/load.py | 1 +
src/backend/{ => base}/langflow/main.py | 37 +-
src/backend/base/langflow/memory.py | 100 +
.../base/langflow/processing/__init__.py | 0
.../{ => base}/langflow/processing/base.py | 9 +-
src/backend/base/langflow/processing/load.py | 67 +
.../base/langflow/processing/process.py | 302 +
src/backend/base/langflow/py.typed | 0
src/backend/base/langflow/schema/__init__.py | 4 +
src/backend/base/langflow/schema/dotdict.py | 71 +
src/backend/base/langflow/schema/schema.py | 150 +
src/backend/{ => base}/langflow/server.py | 10 +-
.../{ => base}/langflow/services/__init__.py | 0
.../base/langflow/services/auth/__init__.py | 0
.../langflow/services/auth/factory.py | 2 +-
.../langflow/services/auth/service.py | 3 +-
.../langflow/services/auth/utils.py | 16 +-
src/backend/base/langflow/services/base.py | 29 +
.../base/langflow/services/cache/__init__.py | 12 +
.../base/langflow/services/cache/base.py | 168 +
.../langflow/services/cache/factory.py | 10 +-
.../langflow/services/cache/service.py | 117 +-
.../langflow/services/cache/utils.py | 0
.../base/langflow/services/chat/__init__.py | 0
.../langflow/services/chat/cache.py | 3 +-
.../langflow/services/chat/config.py | 0
.../langflow/services/chat/factory.py | 0
.../base/langflow/services/chat/service.py | 39 +
.../langflow/services/chat/utils.py | 3 +-
.../langflow/services/database/__init__.py | 0
.../langflow/services/database/factory.py | 2 +
.../services/database/models/__init__.py | 6 +
.../database/models/api_key/__init__.py | 0
.../services/database/models/api_key/crud.py | 0
.../services/database/models/api_key/model.py | 0
.../langflow/services/database/models/base.py | 0
.../services/database/models/flow/__init__.py | 0
.../services/database/models/flow/model.py | 127 +
.../services/database/models/user/__init__.py | 0
.../services/database/models/user/crud.py | 5 +-
.../services/database/models/user/model.py | 4 +-
.../database/models/variable/__init__.py | 3 +
.../database/models/variable/model.py | 49 +
.../langflow/services/database/service.py | 19 +-
.../langflow/services/database/utils.py | 0
src/backend/base/langflow/services/deps.py | 219 +
src/backend/base/langflow/services/factory.py | 83 +
.../{ => base}/langflow/services/manager.py | 59 +-
.../langflow/services/monitor/__init__.py | 0
.../base/langflow/services/monitor/factory.py | 13 +
.../base/langflow/services/monitor/schema.py | 142 +
.../base/langflow/services/monitor/service.py | 167 +
.../base/langflow/services/monitor/utils.py | 159 +
.../langflow/services/plugins/__init__.py | 0
.../langflow/services/plugins/base.py | 0
.../langflow/services/plugins/factory.py | 0
.../services/plugins/langfuse_plugin.py | 3 +-
.../langflow/services/plugins/service.py | 3 +-
.../{ => base}/langflow/services/schema.py | 8 +-
.../langflow/services/session/__init__.py | 0
.../langflow/services/session/factory.py | 7 +-
.../langflow/services/session/service.py | 33 +-
.../langflow/services/session/utils.py | 0
.../langflow/services/settings/__init__.py | 0
.../langflow/services/settings/auth.py | 34 +-
.../langflow/services/settings/base.py | 6 +-
.../langflow/services/settings/constants.py | 0
.../langflow/services/settings/factory.py | 3 +-
.../langflow/services/settings/manager.py} | 8 +-
.../langflow/services/settings/service.py | 44 +
.../langflow/services/settings/utils.py | 0
.../base/langflow/services/socket/__init__.py | 0
.../base/langflow/services/socket/factory.py | 17 +
.../base/langflow/services/socket/service.py | 86 +
.../base/langflow/services/socket/utils.py | 103 +
.../base/langflow/services/state/__init__.py | 0
.../base/langflow/services/state/factory.py | 13 +
.../base/langflow/services/state/service.py | 74 +
.../langflow/services/storage/__init__.py | 0
.../langflow/services/storage/constants.py | 28 +
.../base/langflow/services/storage/factory.py | 29 +
.../base/langflow/services/storage/local.py | 95 +
.../base/langflow/services/storage/s3.py | 89 +
.../base/langflow/services/storage/service.py | 42 +
.../base/langflow/services/storage/utils.py | 5 +
.../base/langflow/services/store/__init__.py | 0
.../langflow/services/store/exceptions.py | 0
.../langflow/services/store/factory.py | 3 +-
.../langflow/services/store/schema.py | 0
.../langflow/services/store/service.py | 0
.../langflow/services/store/utils.py | 0
.../base/langflow/services/task/__init__.py | 0
.../services/task/backends/__init__.py | 0
.../langflow/services/task/backends/anyio.py | 0
.../langflow/services/task/backends/base.py | 2 +
.../langflow/services/task/backends/celery.py | 3 +
.../langflow/services/task/factory.py | 2 +-
.../langflow/services/task/service.py | 38 +-
.../langflow/services/task/utils.py | 0
.../{ => base}/langflow/services/utils.py | 90 +-
.../langflow/services/variable/__init__.py | 0
.../langflow/services/variable}/factory.py | 8 +-
.../langflow/services/variable/service.py | 66 +
src/backend/{ => base}/langflow/settings.py | 0
.../base/langflow/template/__init__.py | 0
.../base/langflow/template/field/__init__.py | 0
.../langflow/template/field/base.py | 48 +-
.../base/langflow/template/field/prompt.py | 14 +
.../template/frontend_node/__init__.py | 8 +-
.../langflow/template/frontend_node/agents.py | 2 +
.../langflow/template/frontend_node/base.py | 32 +-
.../langflow/template/frontend_node/chains.py | 36 +-
.../template/frontend_node/constants.py | 10 -
.../frontend_node/custom_components.py | 5 +-
.../template/frontend_node/documentloaders.py | 0
.../template/frontend_node/embeddings.py | 0
.../frontend_node/formatter/__init__.py | 0
.../template/frontend_node/formatter/base.py | 3 +-
.../formatter/field_formatters.py | 0
.../langflow/template/frontend_node/llms.py | 0
.../template/frontend_node/memories.py | 8 +-
.../template/frontend_node/output_parsers.py | 2 +
.../template/frontend_node/prompts.py | 0
.../template/frontend_node/retrievers.py | 0
.../template/frontend_node/textsplitters.py | 3 +-
.../langflow/template/frontend_node/tools.py | 0
.../template/frontend_node/utilities.py | 2 +-
.../template/frontend_node/vectorstores.py | 0
.../langflow/template/template/__init__.py | 0
.../langflow/template/template/base.py | 5 +-
src/backend/base/langflow/utils/__init__.py | 0
.../{ => base}/langflow/utils/constants.py | 4 +-
.../{ => base}/langflow/utils/lazy_load.py | 0
.../{ => base}/langflow/utils/logger.py | 4 +-
.../{ => base}/langflow/utils/payload.py | 0
src/backend/base/langflow/utils/schemas.py | 55 +
src/backend/{ => base}/langflow/utils/util.py | 40 +-
.../{ => base}/langflow/utils/validate.py | 7 +-
src/backend/base/langflow/worker.py | 37 +
src/backend/base/poetry.lock | 6271 +++++++++++++++++
src/backend/base/pyproject.toml | 120 +
src/backend/langflow/__init__.py | 16 -
src/backend/langflow/api/v1/base.py | 160 -
src/backend/langflow/api/v1/chat.py | 245 -
src/backend/langflow/api/v1/credential.py | 86 -
src/backend/langflow/api/v1/endpoints.py | 394 --
src/backend/langflow/api/v1/schemas.py | 214 -
src/backend/langflow/api/v1/validate.py | 120 -
.../langflow/components/agents/CSVAgent.py | 23 -
.../components/chains/ConversationChain.py | 29 -
.../components/chains/LLMCheckerChain.py | 22 -
.../components/chains/PromptRunner.py | 28 -
.../langflow/components/chains/RetrievalQA.py | 39 -
.../chains/RetrievalQAWithSourcesChain.py | 42 -
.../components/chains/SQLDatabaseChain.py | 25 -
.../custom_components/CustomComponent.py | 12 -
.../documentloaders/DirectoryLoader.py | 42 -
.../components/documentloaders/FileLoader.py | 113 -
.../components/documentloaders/UrlLoader.py | 47 -
.../langflow/components/llms/CTransformers.py | 33 -
.../components/llms/GoogleGenerativeAI.py | 72 -
.../langflow/components/llms/LlamaCpp.py | 129 -
.../components/utilities/GetRequest.py | 74 -
.../components/utilities/PostRequest.py | 77 -
.../components/utilities/UpdateRequest.py | 88 -
.../langflow/components/vectorstores/FAISS.py | 26 -
.../vectorstores/MongoDBAtlasVectorSearch.py | 48 -
.../langflow/field_typing/range_spec.py | 21 -
src/backend/langflow/graph/graph/base.py | 255 -
src/backend/langflow/graph/vertex/base.py | 378 -
src/backend/langflow/graph/vertex/types.py | 302 -
src/backend/langflow/graph/vertex/utils.py | 5 -
.../custom_component/custom_component.py | 238 -
src/backend/langflow/processing/load.py | 52 -
src/backend/langflow/processing/process.py | 299 -
src/backend/langflow/services/base.py | 12 -
.../langflow/services/cache/__init__.py | 9 -
src/backend/langflow/services/cache/base.py | 98 -
src/backend/langflow/services/chat/service.py | 260 -
.../langflow/services/credentials/service.py | 37 -
.../services/database/models/__init__.py | 6 -
.../database/models/component/__init__.py | 3 -
.../database/models/component/model.py | 29 -
.../database/models/credential/__init__.py | 3 -
.../database/models/credential/model.py | 43 -
.../database/models/credential/schema.py | 8 -
.../services/database/models/flow/model.py | 73 -
src/backend/langflow/services/deps.py | 63 -
src/backend/langflow/services/factory.py | 12 -
src/backend/langflow/utils/types.py | 2 -
src/backend/langflow/version/__init__.py | 1 +
src/backend/langflow/version/version.py | 7 +
src/backend/langflow/worker.py | 75 -
src/frontend/.eslintrc.json | 29 +
src/frontend/.github/workflows/playwright.yml | 27 -
src/frontend/.gitignore | 4 +
src/frontend/harFiles/backend_12112023.har | 599 --
src/frontend/harFiles/langflow.har | 944 ++-
src/frontend/index.html | 21 +-
src/frontend/package-lock.json | 4942 ++++++++-----
src/frontend/package.json | 28 +-
src/frontend/playwright-report/index.html | 18 -
src/frontend/playwright.config.ts | 67 +-
src/frontend/run-tests.sh | 112 +-
src/frontend/src/App.css | 9 +
src/frontend/src/App.tsx | 246 +-
.../components/parameterComponent/index.tsx | 350 +-
.../src/CustomNodes/GenericNode/index.tsx | 505 +-
.../src/alerts/alertDropDown/index.tsx | 10 +-
.../src/assets/Gooey Ring-5s-271px.svg | 40 -
src/frontend/src/assets/froze-flow.png | Bin 11568 -> 0 bytes
.../src/assets/undraw_blog_post_re_fy5x.svg | 1 +
.../src/assets/undraw_chat_bot_re_e2gj.svg | 1 +
.../src/assets/undraw_cloud_docs_re_xjht.svg | 1 +
.../assets/undraw_design_components_9vy6.svg | 1 +
.../assets/undraw_mobile_messages_re_yx8w.svg | 1 +
.../undraw_real_time_analytics_re_yliv.svg | 1 +
.../src/assets/undraw_short_bio_re_fmx0.svg | 1 +
.../undraw_team_collaboration_re_ow29.svg | 1 +
.../assets/undraw_transfer_files_re_a2a9.svg | 1 +
.../components/AccordionComponent/index.tsx | 9 +-
.../components/CrashErrorComponent/index.tsx | 4 +-
.../DropdownButtonComponent/index.tsx | 39 +-
.../EditFlowSettingsComponent/index.tsx | 2 +-
.../LightTooltipComponent/index.tsx | 17 -
.../src/components/LoadingSpinner/index.tsx | 3 -
.../src/components/RadialProgress/index.tsx | 18 -
.../ReactTooltipComponent/index.tsx | 45 -
.../components/ShadTooltipComponent/index.tsx | 3 +-
.../src/components/TooltipComponent/index.tsx | 14 -
.../components/ViewTriggers/chat/index.tsx | 31 +
.../addNewVariableButton.tsx | 99 +
.../src/components/cardComponent/index.tsx | 8 +-
.../chatComponent/buildTrigger/index.tsx | 189 -
.../chatComponent/chatTrigger/index.tsx | 71 -
.../src/components/chatComponent/index.tsx | 172 +-
.../components/codeAreaComponent/index.tsx | 4 +
.../components/codeTabsComponent/index.tsx | 114 +-
.../components/dropdownComponent/index.tsx | 211 +-
.../src/components/exampleComponent/index.tsx | 100 +
.../components/fetchErrorComponent/index.tsx | 47 +-
.../components/genericIconComponent/index.tsx | 77 +-
.../components/menuBar/index.tsx | 126 +-
.../src/components/headerComponent/index.tsx | 13 +-
.../src/components/inputComponent/index.tsx | 247 +-
.../components/inputFileComponent/index.tsx | 12 +-
.../components/inputGlobalComponent/index.tsx | 134 +
.../src/components/intComponent/index.tsx | 9 +-
.../components/keypairListComponent/index.tsx | 11 +-
.../src/components/promptComponent/index.tsx | 1 +
.../components/stackedComponents/index.tsx | 30 -
.../components/textAreaComponent/index.tsx | 73 +-
.../components/textInputComponent/index.tsx | 15 +
.../components/textOutputComponent/index.tsx | 15 +
.../src/components/toggleComponent/index.tsx | 59 -
.../components/toggleShadComponent/index.tsx | 2 +
src/frontend/src/components/ui/accordion.tsx | 2 +-
src/frontend/src/components/ui/button.tsx | 21 +-
src/frontend/src/components/ui/checkmark.tsx | 31 +
src/frontend/src/components/ui/command.tsx | 2 +-
.../components/ui/dialog-with-no-close.tsx | 119 +
src/frontend/src/components/ui/dialog.tsx | 2 +-
src/frontend/src/components/ui/popover.tsx | 19 +-
.../src/components/ui/refreshButton.tsx | 58 +
.../src/components/ui/rename-label.tsx | 3 +
.../src/components/ui/select-custom.tsx | 2 +-
src/frontend/src/components/ui/xmark.tsx | 45 +
.../src/constants/alerts_constants.tsx | 60 +
src/frontend/src/constants/constants.ts | 81 +-
src/frontend/src/constants/enums.ts | 13 +
src/frontend/src/contexts/authContext.tsx | 3 +-
src/frontend/src/controllers/API/api.tsx | 100 +-
src/frontend/src/controllers/API/index.ts | 111 +-
src/frontend/src/icons/AstraDB/AstraDB.jsx | 28 +
src/frontend/src/icons/AstraDB/Favicon.svg | 12 +
src/frontend/src/icons/AstraDB/index.tsx | 9 +
src/frontend/src/icons/Azure/Azure.jsx | 61 +
src/frontend/src/icons/Azure/index.tsx | 8 +
.../BotMessageSquare/BotMessageSquare.jsx | 23 +
.../src/icons/BotMessageSquare/index.tsx | 9 +
.../GoogleGenerativeAI/Google Gemini icon.svg | 1 +
.../icons/GoogleGenerativeAI/GoogleGemini.jsx | 28 +
.../src/icons/GoogleGenerativeAI/index.tsx | 9 +
src/frontend/src/icons/Ollama/Ollama.jsx | 64 +
src/frontend/src/icons/Ollama/Ollama.svg | 1 +
src/frontend/src/icons/Ollama/index.tsx | 9 +
src/frontend/src/icons/Postgres/Postgres.jsx | 67 +
src/frontend/src/icons/Postgres/Postgres.svg | 1 +
src/frontend/src/icons/Postgres/index.tsx | 9 +
src/frontend/src/icons/Python/Python.jsx | 158 +
src/frontend/src/icons/Python/Python.svg | 41 +
src/frontend/src/icons/Python/index.tsx | 9 +
.../src/icons/QianFanChat/QianFanChat.jsx | 23 +
.../src/icons/QianFanChat/QianFanChat.svg | 1 +
src/frontend/src/icons/QianFanChat/index.tsx | 9 +
src/frontend/src/icons/Redis/Redis.jsx | 67 +
src/frontend/src/icons/Redis/Redis.svg | 1 +
src/frontend/src/icons/Redis/index.tsx | 8 +
src/frontend/src/index.tsx | 9 +-
src/frontend/src/modals/ApiModal/index.tsx | 27 +-
.../src/modals/ConfirmationModal/index.tsx | 3 +-
.../modals/DeleteConfirmationModal/index.tsx | 28 +-
.../src/modals/EditNodeModal/index.tsx | 72 +-
.../components/FileInput/index.tsx | 151 +
.../IOModal/components/IOFieldView/index.tsx | 117 +
.../components/chatView/chatInput/index.tsx | 140 +
.../chatView}/chatMessage/codeBlock/index.tsx | 6 +-
.../components/chatView/chatMessage/index.tsx | 324 +
.../chatView}/fileComponent/index.tsx | 4 +-
.../IOModal/components/chatView/index.tsx | 216 +
src/frontend/src/modals/IOModal/index.tsx | 405 ++
.../components/NewFlowCardComponent/index.tsx | 31 +
.../components/undrawCards/index.tsx | 129 +
.../src/modals/NewFlowModal/index.tsx | 77 +
.../src/modals/SecretKeyModal/index.tsx | 3 +-
.../src/modals/StoreApiKeyModal/index.tsx | 22 +-
src/frontend/src/modals/baseModal/index.tsx | 72 +-
.../src/modals/codeAreaModal/index.tsx | 174 +-
src/frontend/src/modals/exportModal/index.tsx | 15 +-
.../src/modals/formModal/chatInput/index.tsx | 111 -
.../modals/formModal/chatMessage/index.tsx | 221 -
src/frontend/src/modals/formModal/index.tsx | 635 --
.../src/modals/genericModal/index.tsx | 41 +-
src/frontend/src/modals/shareModal/index.tsx | 16 +-
.../src/pages/AdminPage/LoginPage/index.tsx | 3 +-
src/frontend/src/pages/AdminPage/index.tsx | 28 +-
src/frontend/src/pages/ApiKeysPage/index.tsx | 8 +-
.../components/DisclosureComponent/index.tsx | 11 +-
.../components/PageComponent/index.tsx | 302 +-
.../ParentDisclosureComponent/index.tsx | 41 +
.../extraSidebarComponent/index.tsx | 352 +-
.../sideBarDraggableComponent/index.tsx | 1 +
.../extraSidebarComponent/utils.tsx | 26 +
.../components/nodeToolbarComponent/index.tsx | 573 +-
.../toolbarSelectItem/index.tsx | 51 +
src/frontend/src/pages/FlowPage/index.tsx | 18 +-
.../MainPage/components/components/index.tsx | 18 +-
src/frontend/src/pages/MainPage/index.tsx | 24 +-
.../src/pages/ProfileSettingsPage/index.tsx | 14 +-
src/frontend/src/pages/StorePage/index.tsx | 27 +-
.../src/pages/deleteAccountPage/index.tsx | 1 -
src/frontend/src/pages/loginPage/index.tsx | 3 +-
src/frontend/src/pages/signUpPage/index.tsx | 3 +-
src/frontend/src/stores/alertStore.ts | 138 +-
src/frontend/src/stores/darkStore.tsx | 27 +-
src/frontend/src/stores/flowStore.ts | 390 +-
src/frontend/src/stores/flowsManagerStore.ts | 49 +-
src/frontend/src/stores/globalVariables.ts | 34 +
src/frontend/src/stores/shortcuts.ts | 15 +
src/frontend/src/stores/typesStore.ts | 4 -
src/frontend/src/style/applies.css | 90 +-
src/frontend/src/style/index.css | 8 +
src/frontend/src/types/api/index.ts | 51 +
src/frontend/src/types/chat/index.ts | 24 +-
src/frontend/src/types/components/index.ts | 130 +-
src/frontend/src/types/flow/index.ts | 15 +-
src/frontend/src/types/store/index.ts | 5 +
src/frontend/src/types/zustand/alert/index.ts | 9 +-
src/frontend/src/types/zustand/flow/index.ts | 84 +-
.../src/types/zustand/flowIOStore/index.ts | 21 +
.../src/types/zustand/flowsManager/index.ts | 9 +-
.../types/zustand/globalVariables/index.ts | 10 +
src/frontend/src/utils/buildUtils.ts | 257 +
src/frontend/src/utils/parameterUtils.ts | 45 +
src/frontend/src/utils/reactflowUtils.ts | 151 +-
src/frontend/src/utils/storeUtils.ts | 27 +-
src/frontend/src/utils/styleUtils.ts | 156 +-
src/frontend/src/utils/utils.ts | 183 +-
src/frontend/tailwind.config.js | 14 +-
src/frontend/tests/custom_component_full.ts | 2 +-
.../tests/end-to-end/assets/ChatTest.json | 358 +
.../assets/collection.json | 295 +-
.../assets/flow.json | 0
.../end-to-end/assets/flow_group_test.json | 532 ++
.../assets/flowtest.json | 39 +-
.../auto_login.spec.ts | 30 +-
src/frontend/tests/end-to-end/chat_io.spec.ts | 46 +
.../end-to-end/codeAreaModalComponent.spec.ts | 142 +-
.../end-to-end/curl_api_generation.spec.ts | 17 +
.../dragAndDrop.spec.ts | 23 +-
.../end-to-end/dropdownComponent.spec.ts | 260 +
.../floatComponent.spec.ts | 44 +-
.../flowPage.spec.ts | 23 +-
src/frontend/tests/end-to-end/group.spec.ts | 29 +
.../inputComponent.spec.ts | 74 +-
.../tests/end-to-end/intComponent.spec.ts | 203 +
.../keyPairListComponent.spec.ts | 83 +-
.../end-to-end/langflowShortcuts.spec.ts | 90 +
.../nestedComponent.spec.ts | 195 +-
.../end-to-end/promptModalComponent.spec.ts | 41 +-
.../end-to-end/python_api_generation.spec.ts | 17 +
.../saveComponents.spec.ts | 52 +-
.../toggleComponent.spec.ts | 78 +-
.../tests/end-to-end/tweaks_test.spec.ts | 38 +
src/frontend/tests/globalTeardown.ts | 25 +
.../tests/onlyFront/dropdownComponent.spec.ts | 107 -
src/frontend/tests/onlyFront/group.spec.ts | 84 -
.../tests/onlyFront/intComponent.spec.ts | 124 -
src/frontend/tests/onlyFront/login.spec.ts | 121 -
tests/conftest.py | 131 +-
tests/data/ChatInputTest.json | 918 +++
tests/data/TwoOutputsTest.json | 1024 +++
tests/data/component.py | 2 +-
tests/data/component_with_templatefield.py | 2 +-
tests/test_api_key.py | 2 +-
tests/test_cache.py | 3 +-
tests/test_cli.py | 4 +-
tests/test_custom_component.py | 88 +-
tests/test_custom_types.py | 4 +-
tests/test_data_components.py | 166 +
tests/test_database.py | 43 +-
tests/test_embeddings_template.py | 59 -
tests/test_endpoints.py | 760 +-
tests/test_files.py | 104 +
tests/test_graph.py | 183 +-
tests/test_helper_components.py | 80 +
tests/test_initial_setup.py | 92 +
tests/test_loading.py | 26 +-
tests/test_login.py | 6 +-
tests/test_process.py | 73 +-
tests/test_prompts_template.py | 11 -
tests/test_record.py | 139 +
tests/test_setup_superuser.py | 9 +-
tests/test_user.py | 25 +-
tests/test_websocket.py | 50 -
tests/text_experimental_components.py | 15 +
853 files changed, 59936 insertions(+), 15456 deletions(-)
rename .github/workflows/{pre-release.yml => pre-release-base.yml} (86%)
create mode 100644 .github/workflows/pre-release-langflow.yml
rename .github/workflows/{test.yml => python_test.yml} (76%)
create mode 100644 .github/workflows/typescript_test.yml
create mode 100644 build_and_push_base.Dockerfile
create mode 100644 docs/docs/components/data.mdx
create mode 100644 docs/docs/components/experimental.mdx
create mode 100644 docs/docs/components/helpers.mdx
create mode 100644 docs/docs/components/inputs.mdx
rename docs/docs/components/{llms.mdx => model_specs.mdx} (100%)
create mode 100644 docs/docs/components/models.mdx
create mode 100644 docs/docs/components/outputs.mdx
delete mode 100644 docs/docs/components/wrappers.mdx
delete mode 100644 docs/docs/examples/how-upload-examples.mdx
delete mode 100644 docs/docs/examples/midjourney-prompt-chain.mdx
delete mode 100644 docs/docs/examples/multiple-vectorstores.mdx
rename src/backend/langflow/components/agents/__init__.py => docs/docs/getting-started/basic-prompting.mdx (100%)
rename src/backend/langflow/components/chains/__init__.py => docs/docs/getting-started/blog-writer.mdx (100%)
create mode 100644 docs/docs/getting-started/cli.mdx
delete mode 100644 docs/docs/getting-started/creating-flows.mdx
rename src/backend/langflow/components/custom_components/__init__.py => docs/docs/getting-started/document-qa.mdx (100%)
delete mode 100644 docs/docs/getting-started/hugging-face-spaces.mdx
delete mode 100644 docs/docs/getting-started/installation.md
rename src/backend/langflow/components/documentloaders/__init__.py => docs/docs/getting-started/memory-chatbot.mdx (100%)
create mode 100644 docs/docs/getting-started/rag-with-astradb.mdx
delete mode 100644 docs/docs/guidelines/async-api.mdx
delete mode 100644 docs/docs/guides/async-tasks.mdx
delete mode 100644 docs/docs/guides/superuser.mdx
rename src/backend/langflow/components/embeddings/__init__.py => docs/docs/migration/api.mdx (100%)
create mode 100644 docs/docs/migration/compatibility.mdx
rename src/backend/langflow/components/llms/__init__.py => docs/docs/migration/component-status-and-data-passing.mdx (100%)
rename src/backend/langflow/components/retrievers/__init__.py => docs/docs/migration/connecting-output-components.mdx (100%)
rename src/backend/langflow/components/textsplitters/__init__.py => docs/docs/migration/custom-component.mdx (100%)
rename src/backend/langflow/components/toolkits/__init__.py => docs/docs/migration/experimental-components.mdx (100%)
rename src/backend/langflow/components/tools/__init__.py => docs/docs/migration/flow-of-data.mdx (100%)
create mode 100644 docs/docs/migration/global-variables.mdx
create mode 100644 docs/docs/migration/inputs-and-outputs.mdx
rename src/backend/langflow/components/vectorstores/__init__.py => docs/docs/migration/multiple-flows.mdx (100%)
rename src/backend/langflow/core/__init__.py => docs/docs/migration/new-categories-and-components.mdx (100%)
rename src/backend/langflow/custom/__init__.py => docs/docs/migration/passing-tweaks-and-inputs.mdx (100%)
rename src/backend/langflow/graph/edge/__init__.py => docs/docs/migration/renaming-and-editing-components.mdx (100%)
rename src/backend/langflow/graph/graph/__init__.py => docs/docs/migration/sidebar-and-interaction-panel.mdx (100%)
rename src/backend/langflow/graph/vertex/__init__.py => docs/docs/migration/state-management.mdx (100%)
rename src/backend/langflow/interface/__init__.py => docs/docs/migration/supported-frameworks.mdx (100%)
create mode 100644 docs/docs/migration/text-and-record.mdx
create mode 100644 docs/docs/whats-new/a-new-chapter-langflow.mdx
create mode 100644 docs/docs/whats-new/customization-control.mdx
create mode 100644 docs/docs/whats-new/debugging-reimagined.mdx
create mode 100644 docs/docs/whats-new/migrating-to-one-point-zero.mdx
create mode 100644 docs/docs/whats-new/simplification-standardization.mdx
create mode 100644 docs/src/theme/DownloadableJsonFile.js
create mode 100644 docs/static/data/AstraDB-RAG-Flows.json
create mode 100644 docs/static/img/add-new-variable.png
create mode 100644 docs/static/img/astra-configure-deployment.png
create mode 100644 docs/static/img/astra-create-database.png
create mode 100644 docs/static/img/astra-generate-token.png
create mode 100644 docs/static/img/astra-ingestion-fields.png
create mode 100644 docs/static/img/astra-ingestion-flow-dark.png
create mode 100644 docs/static/img/astra-ingestion-flow.png
create mode 100644 docs/static/img/astra-ingestion-run.png
create mode 100644 docs/static/img/astra-rag-flow-dark.png
create mode 100644 docs/static/img/astra-rag-flow-interaction-panel-interaction.png
create mode 100644 docs/static/img/astra-rag-flow-interaction-panel.png
create mode 100644 docs/static/img/astra-rag-flow-run.png
create mode 100644 docs/static/img/astra-rag-flow.png
create mode 100644 docs/static/img/chat-input-expanded.png
create mode 100644 docs/static/img/chat-input.png
create mode 100644 docs/static/img/create-variable-window.png
create mode 100644 docs/static/img/drag-and-drop-canvas.png
create mode 100644 docs/static/img/drag-and-drop-flow.png
create mode 100644 docs/static/img/duplicate-space.png
create mode 100644 docs/static/img/interaction-panel-text-input.png
create mode 100644 docs/static/img/interaction-panel-with-chat-input.png
create mode 100644 docs/static/img/ollama-gv.png
create mode 100644 docs/static/img/prompt-with-template.png
create mode 100644 docs/static/img/prompt.png
create mode 100644 docs/static/img/text-input-expanded.png
create mode 100644 docs/static/img/text-input.png
delete mode 100644 lcserve.Dockerfile
create mode 100644 scripts/setup/setup_env.sh
create mode 100644 scripts/setup/update_poetry.sh
create mode 100644 scripts/update_dependencies.py
rename src/backend/{langflow/interface/document_loaders/__init__.py => base/README.md} (100%)
rename src/backend/{ => base}/langflow/__main__.py (85%)
rename src/backend/{ => base}/langflow/alembic.ini (100%)
rename src/backend/{ => base}/langflow/alembic/README (100%)
rename src/backend/{ => base}/langflow/alembic/env.py (72%)
rename src/backend/{ => base}/langflow/alembic/script.py.mako (90%)
rename src/backend/{ => base}/langflow/alembic/versions/006b3990db50_add_unique_constraints.py (93%)
rename src/backend/{ => base}/langflow/alembic/versions/0b8757876a7c_.py (100%)
create mode 100644 src/backend/base/langflow/alembic/versions/1a110b568907_replace_credential_table_with_variable.py
rename src/backend/{ => base}/langflow/alembic/versions/1ef9c4f3765d_.py (83%)
rename src/backend/{ => base}/langflow/alembic/versions/260dbcc8b680_adds_tables.py (81%)
rename src/backend/{ => base}/langflow/alembic/versions/2ac71eb9c3ae_adds_credential_table.py (92%)
create mode 100644 src/backend/base/langflow/alembic/versions/63b9c451fd30_add_icon_and_icon_bg_color_to_flow.py
rename src/backend/{ => base}/langflow/alembic/versions/67cc006d50bf_add_profile_image_column.py (100%)
rename src/backend/{ => base}/langflow/alembic/versions/7843803a87b5_store_updates.py (82%)
rename src/backend/{ => base}/langflow/alembic/versions/7d2162acc8b2_adds_updated_at_and_folder_cols.py (80%)
rename src/backend/{ => base}/langflow/alembic/versions/b2fa308044b5_add_unique_constraints.py (81%)
rename src/backend/{ => base}/langflow/alembic/versions/bc2f01c40e4a_new_fixes.py (82%)
rename src/backend/{ => base}/langflow/alembic/versions/eb5866d51fd2_change_columns_to_be_nullable.py (94%)
rename src/backend/{ => base}/langflow/alembic/versions/f5ee9749d1a6_user_id_can_be_null_in_flow.py (79%)
rename src/backend/{ => base}/langflow/alembic/versions/fd531f8868b1_fix_credential_table.py (93%)
rename src/backend/{ => base}/langflow/api/__init__.py (100%)
rename src/backend/{ => base}/langflow/api/router.py (76%)
rename src/backend/{ => base}/langflow/api/utils.py (55%)
rename src/backend/{ => base}/langflow/api/v1/__init__.py (72%)
rename src/backend/{ => base}/langflow/api/v1/api_key.py (89%)
create mode 100644 src/backend/base/langflow/api/v1/base.py
rename src/backend/{ => base}/langflow/api/v1/callback.py (69%)
create mode 100644 src/backend/base/langflow/api/v1/chat.py
create mode 100644 src/backend/base/langflow/api/v1/endpoints.py
create mode 100644 src/backend/base/langflow/api/v1/files.py
rename src/backend/{ => base}/langflow/api/v1/flows.py (67%)
rename src/backend/{ => base}/langflow/api/v1/login.py (88%)
create mode 100644 src/backend/base/langflow/api/v1/monitor.py
create mode 100644 src/backend/base/langflow/api/v1/schemas.py
rename src/backend/{ => base}/langflow/api/v1/store.py (100%)
rename src/backend/{ => base}/langflow/api/v1/users.py (100%)
create mode 100644 src/backend/base/langflow/api/v1/validate.py
create mode 100644 src/backend/base/langflow/api/v1/variable.py
rename src/backend/{langflow/interface/embeddings => base/langflow/base}/__init__.py (100%)
rename src/backend/{langflow/interface/initialize => base/langflow/base/agents}/__init__.py (100%)
create mode 100644 src/backend/base/langflow/base/agents/agent.py
create mode 100644 src/backend/base/langflow/base/constants.py
rename src/backend/{langflow/interface/output_parsers => base/langflow/base/data}/__init__.py (100%)
create mode 100644 src/backend/base/langflow/base/data/utils.py
rename src/backend/{langflow/interface/retrievers => base/langflow/base/io}/__init__.py (100%)
create mode 100644 src/backend/base/langflow/base/io/chat.py
create mode 100644 src/backend/base/langflow/base/io/text.py
create mode 100644 src/backend/base/langflow/base/models/__init__.py
create mode 100644 src/backend/base/langflow/base/models/model.py
rename src/backend/{langflow/interface/toolkits => base/langflow/base/prompts}/__init__.py (100%)
create mode 100644 src/backend/base/langflow/base/prompts/utils.py
create mode 100644 src/backend/base/langflow/components/__init__.py
rename src/backend/{ => base}/langflow/components/agents/AgentInitializer.py (96%)
create mode 100644 src/backend/base/langflow/components/agents/CSVAgent.py
rename src/backend/{ => base}/langflow/components/agents/JsonAgent.py (90%)
rename src/backend/{ => base}/langflow/components/agents/OpenAIConversationalAgent.py (95%)
rename src/backend/{ => base}/langflow/components/agents/SQLAgent.py (90%)
rename src/backend/{ => base}/langflow/components/agents/VectorStoreAgent.py (87%)
rename src/backend/{ => base}/langflow/components/agents/VectorStoreRouterAgent.py (91%)
create mode 100644 src/backend/base/langflow/components/agents/XMLAgent.py
create mode 100644 src/backend/base/langflow/components/agents/__init__.py
create mode 100644 src/backend/base/langflow/components/chains/ConversationChain.py
rename src/backend/{ => base}/langflow/components/chains/LLMChain.py (53%)
create mode 100644 src/backend/base/langflow/components/chains/LLMCheckerChain.py
rename src/backend/{ => base}/langflow/components/chains/LLMMathChain.py (56%)
create mode 100644 src/backend/base/langflow/components/chains/RetrievalQA.py
create mode 100644 src/backend/base/langflow/components/chains/RetrievalQAWithSourcesChain.py
create mode 100644 src/backend/base/langflow/components/chains/SQLGenerator.py
create mode 100644 src/backend/base/langflow/components/chains/__init__.py
create mode 100644 src/backend/base/langflow/components/data/APIRequest.py
create mode 100644 src/backend/base/langflow/components/data/Directory.py
create mode 100644 src/backend/base/langflow/components/data/File.py
create mode 100644 src/backend/base/langflow/components/data/URL.py
create mode 100644 src/backend/base/langflow/components/data/__init__.py
rename src/backend/{langflow/interface/utilities => base/langflow/components/documentloaders}/__init__.py (100%)
rename src/backend/{ => base}/langflow/components/embeddings/AmazonBedrockEmbeddings.py (84%)
rename src/backend/{ => base}/langflow/components/embeddings/AzureOpenAIEmbeddings.py (90%)
rename src/backend/{ => base}/langflow/components/embeddings/CohereEmbeddings.py (83%)
rename src/backend/{ => base}/langflow/components/embeddings/HuggingFaceEmbeddings.py (84%)
rename src/backend/{ => base}/langflow/components/embeddings/HuggingFaceInferenceAPIEmbeddings.py (87%)
rename src/backend/{ => base}/langflow/components/embeddings/OllamaEmbeddings.py (84%)
rename src/backend/{ => base}/langflow/components/embeddings/OpenAIEmbeddings.py (78%)
rename src/backend/{ => base}/langflow/components/embeddings/VertexAIEmbeddings.py (54%)
create mode 100644 src/backend/base/langflow/components/embeddings/__init__.py
create mode 100644 src/backend/base/langflow/components/experimental/ClearMessageHistory.py
create mode 100644 src/backend/base/langflow/components/experimental/ExtractDataFromRecord.py
create mode 100644 src/backend/base/langflow/components/experimental/FlowTool.py
create mode 100644 src/backend/base/langflow/components/experimental/ListFlows.py
create mode 100644 src/backend/base/langflow/components/experimental/Listen.py
create mode 100644 src/backend/base/langflow/components/experimental/MergeRecords.py
create mode 100644 src/backend/base/langflow/components/experimental/Notify.py
create mode 100644 src/backend/base/langflow/components/experimental/PythonFunction.py
create mode 100644 src/backend/base/langflow/components/experimental/RunFlow.py
create mode 100644 src/backend/base/langflow/components/experimental/RunnableExecutor.py
create mode 100644 src/backend/base/langflow/components/experimental/SQLExecutor.py
create mode 100644 src/backend/base/langflow/components/experimental/SubFlow.py
create mode 100644 src/backend/base/langflow/components/experimental/__init__.py
create mode 100644 src/backend/base/langflow/components/helpers/CombineText.py
create mode 100644 src/backend/base/langflow/components/helpers/CreateRecord.py
create mode 100644 src/backend/base/langflow/components/helpers/CustomComponent.py
create mode 100644 src/backend/base/langflow/components/helpers/DocumentToRecord.py
create mode 100644 src/backend/base/langflow/components/helpers/IDGenerator.py
create mode 100644 src/backend/base/langflow/components/helpers/MemoryComponent.py
create mode 100644 src/backend/base/langflow/components/helpers/MessageHistory.py
create mode 100644 src/backend/base/langflow/components/helpers/RecordsToText.py
create mode 100644 src/backend/base/langflow/components/helpers/SplitText.py
create mode 100644 src/backend/base/langflow/components/helpers/UpdateRecord.py
create mode 100644 src/backend/base/langflow/components/helpers/__init__.py
create mode 100644 src/backend/base/langflow/components/inputs/ChatInput.py
create mode 100644 src/backend/base/langflow/components/inputs/Prompt.py
create mode 100644 src/backend/base/langflow/components/inputs/TextInput.py
create mode 100644 src/backend/base/langflow/components/inputs/__init__.py
rename src/backend/{langflow/components/utilities => base/langflow/components/langchain_utilities}/BingSearchAPIWrapper.py (94%)
rename src/backend/{langflow/components/utilities => base/langflow/components/langchain_utilities}/GoogleSearchAPIWrapper.py (91%)
rename src/backend/{langflow/components/utilities => base/langflow/components/langchain_utilities}/GoogleSerperAPIWrapper.py (83%)
rename src/backend/{langflow/components/utilities => base/langflow/components/langchain_utilities}/JSONDocumentBuilder.py (95%)
create mode 100644 src/backend/base/langflow/components/langchain_utilities/SQLDatabase.py
rename src/backend/{langflow/components/tools => base/langflow/components/langchain_utilities}/SearchApi.py (87%)
rename src/backend/{langflow/components/utilities => base/langflow/components/langchain_utilities}/SearxSearchWrapper.py (90%)
rename src/backend/{langflow/components/utilities => base/langflow/components/langchain_utilities}/SerpAPIWrapper.py (93%)
rename src/backend/{langflow/components/utilities => base/langflow/components/langchain_utilities}/WikipediaAPIWrapper.py (93%)
rename src/backend/{langflow/components/utilities => base/langflow/components/langchain_utilities}/WolframAlphaAPIWrapper.py (90%)
rename src/backend/{langflow/interface/vector_store => base/langflow/components/memories}/__init__.py (100%)
rename src/backend/{langflow/components/llms/AmazonBedrock.py => base/langflow/components/model_specs/AmazonBedrockSpecs.py} (93%)
rename src/backend/{langflow/components/llms/AnthropicLLM.py => base/langflow/components/model_specs/AnthropicLLMSpecs.py} (96%)
rename src/backend/{langflow/components/llms/Anthropic.py => base/langflow/components/model_specs/AnthropicSpecs.py} (93%)
rename src/backend/{langflow/components/llms/AzureChatOpenAI.py => base/langflow/components/model_specs/AzureChatOpenAISpecs.py} (95%)
rename src/backend/{langflow/components/llms/BaiduQianfanChatEndpoints.py => base/langflow/components/model_specs/BaiduQianfanChatEndpointsSpecs.py} (98%)
rename src/backend/{langflow/components/llms/BaiduQianfanLLMEndpoints.py => base/langflow/components/model_specs/BaiduQianfanLLMEndpointsSpecs.py} (97%)
rename src/backend/{langflow/components/llms/ChatAnthropic.py => base/langflow/components/model_specs/ChatAnthropicSpecs.py} (94%)
rename src/backend/{langflow/components/llms/ChatLiteLLM.py => base/langflow/components/model_specs/ChatLiteLLMSpecs.py} (72%)
rename src/backend/{langflow/components/llms/ChatOllamaEndpoint.py => base/langflow/components/model_specs/ChatOllamaEndpointSpecs.py} (98%)
rename src/backend/{langflow/components/llms/ChatOpenAI.py => base/langflow/components/model_specs/ChatOpenAISpecs.py} (90%)
rename src/backend/{langflow/components/llms/ChatVertexAI.py => base/langflow/components/model_specs/ChatVertexAISpecs.py} (96%)
rename src/backend/{langflow/components/llms/Cohere.py => base/langflow/components/model_specs/CohereSpecs.py} (91%)
create mode 100644 src/backend/base/langflow/components/model_specs/GoogleGenerativeAISpecs.py
rename src/backend/{langflow/components/llms/HuggingFaceEndpoints.py => base/langflow/components/model_specs/HuggingFaceEndpointsSpecs.py} (93%)
rename src/backend/{langflow/components/llms/OllamaLLM.py => base/langflow/components/model_specs/OllamaLLMSpecs.py} (98%)
rename src/backend/{langflow/components/llms/VertexAI.py => base/langflow/components/model_specs/VertexAISpecs.py} (96%)
create mode 100644 src/backend/base/langflow/components/model_specs/__init__.py
create mode 100644 src/backend/base/langflow/components/models/AmazonBedrockModel.py
create mode 100644 src/backend/base/langflow/components/models/AnthropicModel.py
create mode 100644 src/backend/base/langflow/components/models/AzureOpenAIModel.py
create mode 100644 src/backend/base/langflow/components/models/BaiduQianfanChatModel.py
create mode 100644 src/backend/base/langflow/components/models/ChatLiteLLMModel.py
create mode 100644 src/backend/base/langflow/components/models/CohereModel.py
create mode 100644 src/backend/base/langflow/components/models/GoogleGenerativeAIModel.py
create mode 100644 src/backend/base/langflow/components/models/HuggingFaceModel.py
create mode 100644 src/backend/base/langflow/components/models/OllamaModel.py
create mode 100644 src/backend/base/langflow/components/models/OpenAIModel.py
create mode 100644 src/backend/base/langflow/components/models/VertexAiModel.py
create mode 100644 src/backend/base/langflow/components/models/__init__.py
create mode 100644 src/backend/base/langflow/components/outputs/ChatOutput.py
create mode 100644 src/backend/base/langflow/components/outputs/TextOutput.py
create mode 100644 src/backend/base/langflow/components/outputs/__init__.py
rename src/backend/{ => base}/langflow/components/retrievers/AmazonKendra.py (91%)
rename src/backend/{ => base}/langflow/components/retrievers/MetalRetriever.py (88%)
rename src/backend/{ => base}/langflow/components/retrievers/MultiQueryRetriever.py (96%)
rename src/backend/{ => base}/langflow/components/retrievers/VectaraSelfQueryRetriver.py (96%)
rename src/backend/{ => base}/langflow/components/retrievers/VectorStoreRetriever.py (87%)
create mode 100644 src/backend/base/langflow/components/retrievers/__init__.py
rename src/backend/{ => base}/langflow/components/textsplitters/CharacterTextSplitter.py (54%)
rename src/backend/{ => base}/langflow/components/textsplitters/LanguageRecursiveTextSplitter.py (81%)
rename src/backend/{ => base}/langflow/components/textsplitters/RecursiveCharacterTextSplitter.py (75%)
create mode 100644 src/backend/base/langflow/components/textsplitters/__init__.py
rename src/backend/{ => base}/langflow/components/toolkits/JsonToolkit.py (87%)
rename src/backend/{ => base}/langflow/components/toolkits/Metaphor.py (96%)
rename src/backend/{ => base}/langflow/components/toolkits/OpenAPIToolkit.py (91%)
rename src/backend/{ => base}/langflow/components/toolkits/VectorStoreInfo.py (91%)
rename src/backend/{ => base}/langflow/components/toolkits/VectorStoreRouterToolkit.py (84%)
rename src/backend/{ => base}/langflow/components/toolkits/VectorStoreToolkit.py (72%)
create mode 100644 src/backend/base/langflow/components/toolkits/__init__.py
rename src/backend/{ => base}/langflow/components/tools/RetrieverTool.py (92%)
create mode 100644 src/backend/base/langflow/components/tools/SearchAPITool.py
create mode 100644 src/backend/base/langflow/components/tools/SearchApi.py
create mode 100644 src/backend/base/langflow/components/tools/__init__.py
create mode 100644 src/backend/base/langflow/components/vectorsearch/AstraDBSearch.py
create mode 100644 src/backend/base/langflow/components/vectorsearch/ChromaSearch.py
create mode 100644 src/backend/base/langflow/components/vectorsearch/FAISSSearch.py
create mode 100644 src/backend/base/langflow/components/vectorsearch/MongoDBAtlasVectorSearch.py
create mode 100644 src/backend/base/langflow/components/vectorsearch/PineconeSearch.py
create mode 100644 src/backend/base/langflow/components/vectorsearch/QdrantSearch.py
create mode 100644 src/backend/base/langflow/components/vectorsearch/RedisSearch.py
create mode 100644 src/backend/base/langflow/components/vectorsearch/SupabaseVectorStoreSearch.py
create mode 100644 src/backend/base/langflow/components/vectorsearch/VectaraSearch.py
create mode 100644 src/backend/base/langflow/components/vectorsearch/WeaviateSearch.py
create mode 100644 src/backend/base/langflow/components/vectorsearch/__init__.py
create mode 100644 src/backend/base/langflow/components/vectorsearch/pgvectorSearch.py
create mode 100644 src/backend/base/langflow/components/vectorstores/AstraDB.py
rename src/backend/{ => base}/langflow/components/vectorstores/Chroma.py (75%)
create mode 100644 src/backend/base/langflow/components/vectorstores/FAISS.py
create mode 100644 src/backend/base/langflow/components/vectorstores/MongoDBAtlasVector.py
rename src/backend/{ => base}/langflow/components/vectorstores/Pinecone.py (65%)
rename src/backend/{ => base}/langflow/components/vectorstores/Qdrant.py (82%)
rename src/backend/{ => base}/langflow/components/vectorstores/Redis.py (84%)
rename src/backend/{ => base}/langflow/components/vectorstores/SupabaseVectorStore.py (72%)
rename src/backend/{ => base}/langflow/components/vectorstores/Vectara.py (78%)
rename src/backend/{ => base}/langflow/components/vectorstores/Weaviate.py (73%)
create mode 100644 src/backend/base/langflow/components/vectorstores/__init__.py
create mode 100644 src/backend/base/langflow/components/vectorstores/base/__init__.py
create mode 100644 src/backend/base/langflow/components/vectorstores/base/model.py
rename src/backend/{ => base}/langflow/components/vectorstores/pgvector.py (84%)
rename src/backend/{ => base}/langflow/config.yaml (74%)
rename src/backend/{langflow/interface/wrappers => base/langflow/core}/__init__.py (100%)
rename src/backend/{ => base}/langflow/core/celery_app.py (100%)
rename src/backend/{ => base}/langflow/core/celeryconfig.py (100%)
rename src/backend/{langflow/components/__init__.py => base/langflow/custom.py} (62%)
rename src/backend/{ => base}/langflow/field_typing/__init__.py (97%)
rename src/backend/{ => base}/langflow/field_typing/constants.py (95%)
create mode 100644 src/backend/base/langflow/field_typing/range_spec.py
rename src/backend/{ => base}/langflow/graph/__init__.py (93%)
rename src/backend/{langflow/processing => base/langflow/graph/edge}/__init__.py (100%)
rename src/backend/{ => base}/langflow/graph/edge/base.py (54%)
create mode 100644 src/backend/base/langflow/graph/edge/schema.py
create mode 100644 src/backend/base/langflow/graph/edge/utils.py
rename src/backend/{langflow/services/auth => base/langflow/graph/graph}/__init__.py (100%)
create mode 100644 src/backend/base/langflow/graph/graph/base.py
rename src/backend/{ => base}/langflow/graph/graph/constants.py (89%)
create mode 100644 src/backend/base/langflow/graph/graph/runnable_vertices_manager.py
create mode 100644 src/backend/base/langflow/graph/graph/state_manager.py
rename src/backend/{ => base}/langflow/graph/graph/utils.py (82%)
create mode 100644 src/backend/base/langflow/graph/schema.py
rename src/backend/{ => base}/langflow/graph/utils.py (53%)
rename src/backend/{langflow/services/chat => base/langflow/graph/vertex}/__init__.py (100%)
create mode 100644 src/backend/base/langflow/graph/vertex/base.py
rename src/backend/{ => base}/langflow/graph/vertex/constants.py (100%)
create mode 100644 src/backend/base/langflow/graph/vertex/types.py
create mode 100644 src/backend/base/langflow/graph/vertex/utils.py
create mode 100644 src/backend/base/langflow/helpers/__init__.py
create mode 100644 src/backend/base/langflow/helpers/flow.py
create mode 100644 src/backend/base/langflow/helpers/record.py
rename src/backend/{langflow/services/credentials => base/langflow/initial_setup}/__init__.py (100%)
create mode 100644 src/backend/base/langflow/initial_setup/setup.py
create mode 100644 src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting (Hello, world!).json
create mode 100644 src/backend/base/langflow/initial_setup/starter_projects/Langflow Blog Writter.json
create mode 100644 src/backend/base/langflow/initial_setup/starter_projects/Langflow Document QA.json
create mode 100644 src/backend/base/langflow/initial_setup/starter_projects/Langflow Memory Conversation.json
create mode 100644 src/backend/base/langflow/initial_setup/starter_projects/Langflow Prompt Chaining.json
create mode 100644 src/backend/base/langflow/initial_setup/starter_projects/VectorStore-RAG-Flows.json
rename src/backend/{langflow/services/database => base/langflow/interface}/__init__.py (100%)
rename src/backend/{ => base}/langflow/interface/agents/__init__.py (100%)
rename src/backend/{ => base}/langflow/interface/agents/base.py (97%)
rename src/backend/{ => base}/langflow/interface/agents/custom.py (98%)
rename src/backend/{ => base}/langflow/interface/agents/prebuilt.py (100%)
rename src/backend/{ => base}/langflow/interface/base.py (100%)
rename src/backend/{ => base}/langflow/interface/chains/__init__.py (100%)
rename src/backend/{ => base}/langflow/interface/chains/base.py (98%)
rename src/backend/{ => base}/langflow/interface/chains/custom.py (99%)
rename src/backend/{ => base}/langflow/interface/custom/__init__.py (100%)
create mode 100644 src/backend/base/langflow/interface/custom/attributes.py
rename src/backend/{ => base}/langflow/interface/custom/base.py (85%)
rename src/backend/{ => base}/langflow/interface/custom/code_parser/__init__.py (100%)
rename src/backend/{ => base}/langflow/interface/custom/code_parser/code_parser.py (90%)
rename src/backend/{ => base}/langflow/interface/custom/code_parser/utils.py (77%)
rename src/backend/{ => base}/langflow/interface/custom/custom_component/__init__.py (100%)
rename src/backend/{ => base}/langflow/interface/custom/custom_component/component.py (56%)
create mode 100644 src/backend/base/langflow/interface/custom/custom_component/custom_component.py
rename src/backend/{ => base}/langflow/interface/custom/directory_reader/__init__.py (100%)
rename src/backend/{ => base}/langflow/interface/custom/directory_reader/directory_reader.py (86%)
rename src/backend/{ => base}/langflow/interface/custom/directory_reader/utils.py (91%)
create mode 100644 src/backend/base/langflow/interface/custom/eval.py
rename src/backend/{ => base}/langflow/interface/custom/schema.py (80%)
rename src/backend/{ => base}/langflow/interface/custom/utils.py (66%)
rename src/backend/{ => base}/langflow/interface/custom_lists.py (100%)
rename src/backend/{langflow/services/plugins => base/langflow/interface/document_loaders}/__init__.py (100%)
rename src/backend/{ => base}/langflow/interface/document_loaders/base.py (100%)
rename src/backend/{langflow/services/session => base/langflow/interface/embeddings}/__init__.py (100%)
rename src/backend/{ => base}/langflow/interface/embeddings/base.py (100%)
rename src/backend/{ => base}/langflow/interface/importing/__init__.py (54%)
rename src/backend/{ => base}/langflow/interface/importing/utils.py (86%)
rename src/backend/{langflow/services/store => base/langflow/interface/initialize}/__init__.py (100%)
rename src/backend/{ => base}/langflow/interface/initialize/llm.py (100%)
rename src/backend/{ => base}/langflow/interface/initialize/loading.py (84%)
rename src/backend/{ => base}/langflow/interface/initialize/utils.py (100%)
rename src/backend/{ => base}/langflow/interface/initialize/vector_store.py (100%)
rename src/backend/{ => base}/langflow/interface/listing.py (89%)
rename src/backend/{ => base}/langflow/interface/llms/__init__.py (100%)
rename src/backend/{ => base}/langflow/interface/llms/base.py (97%)
rename src/backend/{ => base}/langflow/interface/memories/__init__.py (100%)
rename src/backend/{ => base}/langflow/interface/memories/base.py (97%)
rename src/backend/{langflow/services/task => base/langflow/interface/output_parsers}/__init__.py (100%)
rename src/backend/{ => base}/langflow/interface/output_parsers/base.py (99%)
rename src/backend/{ => base}/langflow/interface/prompts/__init__.py (100%)
rename src/backend/{ => base}/langflow/interface/prompts/base.py (97%)
rename src/backend/{ => base}/langflow/interface/prompts/custom.py (100%)
rename src/backend/{langflow/services/task/backends => base/langflow/interface/retrievers}/__init__.py (100%)
rename src/backend/{ => base}/langflow/interface/retrievers/base.py (99%)
rename src/backend/{ => base}/langflow/interface/run.py (79%)
rename src/backend/{ => base}/langflow/interface/text_splitters/__init__.py (100%)
rename src/backend/{ => base}/langflow/interface/text_splitters/base.py (100%)
rename src/backend/{langflow/template => base/langflow/interface/toolkits}/__init__.py (100%)
rename src/backend/{ => base}/langflow/interface/toolkits/base.py (79%)
rename src/backend/{ => base}/langflow/interface/toolkits/custom.py (100%)
rename src/backend/{ => base}/langflow/interface/tools/__init__.py (100%)
rename src/backend/{ => base}/langflow/interface/tools/base.py (95%)
rename src/backend/{ => base}/langflow/interface/tools/constants.py (86%)
rename src/backend/{ => base}/langflow/interface/tools/custom.py (95%)
rename src/backend/{ => base}/langflow/interface/tools/util.py (100%)
rename src/backend/{ => base}/langflow/interface/types.py (75%)
rename src/backend/{langflow/template/field => base/langflow/interface/utilities}/__init__.py (100%)
rename src/backend/{ => base}/langflow/interface/utilities/base.py (97%)
rename src/backend/{ => base}/langflow/interface/utils.py (71%)
rename src/backend/{langflow/template/frontend_node/formatter => base/langflow/interface/vector_store}/__init__.py (100%)
rename src/backend/{ => base}/langflow/interface/vector_store/base.py (100%)
rename src/backend/{langflow/template/template => base/langflow/interface/wrappers}/__init__.py (100%)
rename src/backend/{ => base}/langflow/interface/wrappers/base.py (51%)
rename src/backend/{langflow/utils => base/langflow/legacy_custom}/__init__.py (100%)
rename src/backend/{langflow/custom => base/langflow/legacy_custom}/customs.py (94%)
create mode 100644 src/backend/base/langflow/load.py
rename src/backend/{ => base}/langflow/main.py (74%)
create mode 100644 src/backend/base/langflow/memory.py
create mode 100644 src/backend/base/langflow/processing/__init__.py
rename src/backend/{ => base}/langflow/processing/base.py (90%)
create mode 100644 src/backend/base/langflow/processing/load.py
create mode 100644 src/backend/base/langflow/processing/process.py
create mode 100644 src/backend/base/langflow/py.typed
create mode 100644 src/backend/base/langflow/schema/__init__.py
create mode 100644 src/backend/base/langflow/schema/dotdict.py
create mode 100644 src/backend/base/langflow/schema/schema.py
rename src/backend/{ => base}/langflow/server.py (63%)
rename src/backend/{ => base}/langflow/services/__init__.py (100%)
create mode 100644 src/backend/base/langflow/services/auth/__init__.py
rename src/backend/{ => base}/langflow/services/auth/factory.py (100%)
rename src/backend/{ => base}/langflow/services/auth/service.py (99%)
rename src/backend/{ => base}/langflow/services/auth/utils.py (94%)
create mode 100644 src/backend/base/langflow/services/base.py
create mode 100644 src/backend/base/langflow/services/cache/__init__.py
create mode 100644 src/backend/base/langflow/services/cache/base.py
rename src/backend/{ => base}/langflow/services/cache/factory.py (78%)
rename src/backend/{ => base}/langflow/services/cache/service.py (73%)
rename src/backend/{ => base}/langflow/services/cache/utils.py (100%)
create mode 100644 src/backend/base/langflow/services/chat/__init__.py
rename src/backend/{ => base}/langflow/services/chat/cache.py (99%)
rename src/backend/{ => base}/langflow/services/chat/config.py (100%)
rename src/backend/{ => base}/langflow/services/chat/factory.py (100%)
create mode 100644 src/backend/base/langflow/services/chat/service.py
rename src/backend/{ => base}/langflow/services/chat/utils.py (99%)
create mode 100644 src/backend/base/langflow/services/database/__init__.py
rename src/backend/{ => base}/langflow/services/database/factory.py (99%)
create mode 100644 src/backend/base/langflow/services/database/models/__init__.py
rename src/backend/{ => base}/langflow/services/database/models/api_key/__init__.py (100%)
rename src/backend/{ => base}/langflow/services/database/models/api_key/crud.py (100%)
rename src/backend/{ => base}/langflow/services/database/models/api_key/model.py (100%)
rename src/backend/{ => base}/langflow/services/database/models/base.py (100%)
rename src/backend/{ => base}/langflow/services/database/models/flow/__init__.py (100%)
create mode 100644 src/backend/base/langflow/services/database/models/flow/model.py
rename src/backend/{ => base}/langflow/services/database/models/user/__init__.py (100%)
rename src/backend/{ => base}/langflow/services/database/models/user/crud.py (99%)
rename src/backend/{ => base}/langflow/services/database/models/user/model.py (93%)
create mode 100644 src/backend/base/langflow/services/database/models/variable/__init__.py
create mode 100644 src/backend/base/langflow/services/database/models/variable/model.py
rename src/backend/{ => base}/langflow/services/database/service.py (94%)
rename src/backend/{ => base}/langflow/services/database/utils.py (100%)
create mode 100644 src/backend/base/langflow/services/deps.py
create mode 100644 src/backend/base/langflow/services/factory.py
rename src/backend/{ => base}/langflow/services/manager.py (61%)
create mode 100644 src/backend/base/langflow/services/monitor/__init__.py
create mode 100644 src/backend/base/langflow/services/monitor/factory.py
create mode 100644 src/backend/base/langflow/services/monitor/schema.py
create mode 100644 src/backend/base/langflow/services/monitor/service.py
create mode 100644 src/backend/base/langflow/services/monitor/utils.py
create mode 100644 src/backend/base/langflow/services/plugins/__init__.py
rename src/backend/{ => base}/langflow/services/plugins/base.py (100%)
rename src/backend/{ => base}/langflow/services/plugins/factory.py (100%)
rename src/backend/{ => base}/langflow/services/plugins/langfuse_plugin.py (99%)
rename src/backend/{ => base}/langflow/services/plugins/service.py (99%)
rename src/backend/{ => base}/langflow/services/schema.py (66%)
create mode 100644 src/backend/base/langflow/services/session/__init__.py
rename src/backend/{ => base}/langflow/services/session/factory.py (72%)
rename src/backend/{ => base}/langflow/services/session/service.py (57%)
rename src/backend/{ => base}/langflow/services/session/utils.py (100%)
rename src/backend/{ => base}/langflow/services/settings/__init__.py (100%)
rename src/backend/{ => base}/langflow/services/settings/auth.py (81%)
rename src/backend/{ => base}/langflow/services/settings/base.py (98%)
rename src/backend/{ => base}/langflow/services/settings/constants.py (100%)
rename src/backend/{ => base}/langflow/services/settings/factory.py (99%)
rename src/backend/{langflow/services/settings/service.py => base/langflow/services/settings/manager.py} (99%)
create mode 100644 src/backend/base/langflow/services/settings/service.py
rename src/backend/{ => base}/langflow/services/settings/utils.py (100%)
create mode 100644 src/backend/base/langflow/services/socket/__init__.py
create mode 100644 src/backend/base/langflow/services/socket/factory.py
create mode 100644 src/backend/base/langflow/services/socket/service.py
create mode 100644 src/backend/base/langflow/services/socket/utils.py
create mode 100644 src/backend/base/langflow/services/state/__init__.py
create mode 100644 src/backend/base/langflow/services/state/factory.py
create mode 100644 src/backend/base/langflow/services/state/service.py
create mode 100644 src/backend/base/langflow/services/storage/__init__.py
create mode 100644 src/backend/base/langflow/services/storage/constants.py
create mode 100644 src/backend/base/langflow/services/storage/factory.py
create mode 100644 src/backend/base/langflow/services/storage/local.py
create mode 100644 src/backend/base/langflow/services/storage/s3.py
create mode 100644 src/backend/base/langflow/services/storage/service.py
create mode 100644 src/backend/base/langflow/services/storage/utils.py
create mode 100644 src/backend/base/langflow/services/store/__init__.py
rename src/backend/{ => base}/langflow/services/store/exceptions.py (100%)
rename src/backend/{ => base}/langflow/services/store/factory.py (99%)
rename src/backend/{ => base}/langflow/services/store/schema.py (100%)
rename src/backend/{ => base}/langflow/services/store/service.py (100%)
rename src/backend/{ => base}/langflow/services/store/utils.py (100%)
create mode 100644 src/backend/base/langflow/services/task/__init__.py
create mode 100644 src/backend/base/langflow/services/task/backends/__init__.py
rename src/backend/{ => base}/langflow/services/task/backends/anyio.py (100%)
rename src/backend/{ => base}/langflow/services/task/backends/base.py (95%)
rename src/backend/{ => base}/langflow/services/task/backends/celery.py (99%)
rename src/backend/{ => base}/langflow/services/task/factory.py (100%)
rename src/backend/{ => base}/langflow/services/task/service.py (76%)
rename src/backend/{ => base}/langflow/services/task/utils.py (100%)
rename src/backend/{ => base}/langflow/services/utils.py (71%)
create mode 100644 src/backend/base/langflow/services/variable/__init__.py
rename src/backend/{langflow/services/credentials => base/langflow/services/variable}/factory.py (55%)
create mode 100644 src/backend/base/langflow/services/variable/service.py
rename src/backend/{ => base}/langflow/settings.py (100%)
create mode 100644 src/backend/base/langflow/template/__init__.py
create mode 100644 src/backend/base/langflow/template/field/__init__.py
rename src/backend/{ => base}/langflow/template/field/base.py (64%)
create mode 100644 src/backend/base/langflow/template/field/prompt.py
rename src/backend/{ => base}/langflow/template/frontend_node/__init__.py (95%)
rename src/backend/{ => base}/langflow/template/frontend_node/agents.py (99%)
rename src/backend/{ => base}/langflow/template/frontend_node/base.py (88%)
rename src/backend/{ => base}/langflow/template/frontend_node/chains.py (90%)
rename src/backend/{ => base}/langflow/template/frontend_node/constants.py (92%)
rename src/backend/{ => base}/langflow/template/frontend_node/custom_components.py (92%)
rename src/backend/{ => base}/langflow/template/frontend_node/documentloaders.py (100%)
rename src/backend/{ => base}/langflow/template/frontend_node/embeddings.py (100%)
create mode 100644 src/backend/base/langflow/template/frontend_node/formatter/__init__.py
rename src/backend/{ => base}/langflow/template/frontend_node/formatter/base.py (99%)
rename src/backend/{ => base}/langflow/template/frontend_node/formatter/field_formatters.py (100%)
rename src/backend/{ => base}/langflow/template/frontend_node/llms.py (100%)
rename src/backend/{ => base}/langflow/template/frontend_node/memories.py (97%)
rename src/backend/{ => base}/langflow/template/frontend_node/output_parsers.py (99%)
rename src/backend/{ => base}/langflow/template/frontend_node/prompts.py (100%)
rename src/backend/{ => base}/langflow/template/frontend_node/retrievers.py (100%)
rename src/backend/{ => base}/langflow/template/frontend_node/textsplitters.py (99%)
rename src/backend/{ => base}/langflow/template/frontend_node/tools.py (100%)
rename src/backend/{ => base}/langflow/template/frontend_node/utilities.py (100%)
rename src/backend/{ => base}/langflow/template/frontend_node/vectorstores.py (100%)
create mode 100644 src/backend/base/langflow/template/template/__init__.py
rename src/backend/{ => base}/langflow/template/template/base.py (97%)
create mode 100644 src/backend/base/langflow/utils/__init__.py
rename src/backend/{ => base}/langflow/utils/constants.py (98%)
rename src/backend/{ => base}/langflow/utils/lazy_load.py (100%)
rename src/backend/{ => base}/langflow/utils/logger.py (93%)
rename src/backend/{ => base}/langflow/utils/payload.py (100%)
create mode 100644 src/backend/base/langflow/utils/schemas.py
rename src/backend/{ => base}/langflow/utils/util.py (92%)
rename src/backend/{ => base}/langflow/utils/validate.py (97%)
create mode 100644 src/backend/base/langflow/worker.py
create mode 100644 src/backend/base/poetry.lock
create mode 100644 src/backend/base/pyproject.toml
delete mode 100644 src/backend/langflow/__init__.py
delete mode 100644 src/backend/langflow/api/v1/base.py
delete mode 100644 src/backend/langflow/api/v1/chat.py
delete mode 100644 src/backend/langflow/api/v1/credential.py
delete mode 100644 src/backend/langflow/api/v1/endpoints.py
delete mode 100644 src/backend/langflow/api/v1/schemas.py
delete mode 100644 src/backend/langflow/api/v1/validate.py
delete mode 100644 src/backend/langflow/components/agents/CSVAgent.py
delete mode 100644 src/backend/langflow/components/chains/ConversationChain.py
delete mode 100644 src/backend/langflow/components/chains/LLMCheckerChain.py
delete mode 100644 src/backend/langflow/components/chains/PromptRunner.py
delete mode 100644 src/backend/langflow/components/chains/RetrievalQA.py
delete mode 100644 src/backend/langflow/components/chains/RetrievalQAWithSourcesChain.py
delete mode 100644 src/backend/langflow/components/chains/SQLDatabaseChain.py
delete mode 100644 src/backend/langflow/components/custom_components/CustomComponent.py
delete mode 100644 src/backend/langflow/components/documentloaders/DirectoryLoader.py
delete mode 100644 src/backend/langflow/components/documentloaders/FileLoader.py
delete mode 100644 src/backend/langflow/components/documentloaders/UrlLoader.py
delete mode 100644 src/backend/langflow/components/llms/CTransformers.py
delete mode 100644 src/backend/langflow/components/llms/GoogleGenerativeAI.py
delete mode 100644 src/backend/langflow/components/llms/LlamaCpp.py
delete mode 100644 src/backend/langflow/components/utilities/GetRequest.py
delete mode 100644 src/backend/langflow/components/utilities/PostRequest.py
delete mode 100644 src/backend/langflow/components/utilities/UpdateRequest.py
delete mode 100644 src/backend/langflow/components/vectorstores/FAISS.py
delete mode 100644 src/backend/langflow/components/vectorstores/MongoDBAtlasVectorSearch.py
delete mode 100644 src/backend/langflow/field_typing/range_spec.py
delete mode 100644 src/backend/langflow/graph/graph/base.py
delete mode 100644 src/backend/langflow/graph/vertex/base.py
delete mode 100644 src/backend/langflow/graph/vertex/types.py
delete mode 100644 src/backend/langflow/graph/vertex/utils.py
delete mode 100644 src/backend/langflow/interface/custom/custom_component/custom_component.py
delete mode 100644 src/backend/langflow/processing/load.py
delete mode 100644 src/backend/langflow/processing/process.py
delete mode 100644 src/backend/langflow/services/base.py
delete mode 100644 src/backend/langflow/services/cache/__init__.py
delete mode 100644 src/backend/langflow/services/cache/base.py
delete mode 100644 src/backend/langflow/services/chat/service.py
delete mode 100644 src/backend/langflow/services/credentials/service.py
delete mode 100644 src/backend/langflow/services/database/models/__init__.py
delete mode 100644 src/backend/langflow/services/database/models/component/__init__.py
delete mode 100644 src/backend/langflow/services/database/models/component/model.py
delete mode 100644 src/backend/langflow/services/database/models/credential/__init__.py
delete mode 100644 src/backend/langflow/services/database/models/credential/model.py
delete mode 100644 src/backend/langflow/services/database/models/credential/schema.py
delete mode 100644 src/backend/langflow/services/database/models/flow/model.py
delete mode 100644 src/backend/langflow/services/deps.py
delete mode 100644 src/backend/langflow/services/factory.py
delete mode 100644 src/backend/langflow/utils/types.py
create mode 100644 src/backend/langflow/version/__init__.py
create mode 100644 src/backend/langflow/version/version.py
delete mode 100644 src/backend/langflow/worker.py
create mode 100644 src/frontend/.eslintrc.json
delete mode 100644 src/frontend/.github/workflows/playwright.yml
delete mode 100644 src/frontend/harFiles/backend_12112023.har
delete mode 100644 src/frontend/playwright-report/index.html
delete mode 100644 src/frontend/src/assets/Gooey Ring-5s-271px.svg
delete mode 100644 src/frontend/src/assets/froze-flow.png
create mode 100644 src/frontend/src/assets/undraw_blog_post_re_fy5x.svg
create mode 100644 src/frontend/src/assets/undraw_chat_bot_re_e2gj.svg
create mode 100644 src/frontend/src/assets/undraw_cloud_docs_re_xjht.svg
create mode 100644 src/frontend/src/assets/undraw_design_components_9vy6.svg
create mode 100644 src/frontend/src/assets/undraw_mobile_messages_re_yx8w.svg
create mode 100644 src/frontend/src/assets/undraw_real_time_analytics_re_yliv.svg
create mode 100644 src/frontend/src/assets/undraw_short_bio_re_fmx0.svg
create mode 100644 src/frontend/src/assets/undraw_team_collaboration_re_ow29.svg
create mode 100644 src/frontend/src/assets/undraw_transfer_files_re_a2a9.svg
delete mode 100644 src/frontend/src/components/LightTooltipComponent/index.tsx
delete mode 100644 src/frontend/src/components/LoadingSpinner/index.tsx
delete mode 100644 src/frontend/src/components/RadialProgress/index.tsx
delete mode 100644 src/frontend/src/components/ReactTooltipComponent/index.tsx
delete mode 100644 src/frontend/src/components/TooltipComponent/index.tsx
create mode 100644 src/frontend/src/components/ViewTriggers/chat/index.tsx
create mode 100644 src/frontend/src/components/addNewVariableButtonComponent/addNewVariableButton.tsx
delete mode 100644 src/frontend/src/components/chatComponent/buildTrigger/index.tsx
delete mode 100644 src/frontend/src/components/chatComponent/chatTrigger/index.tsx
create mode 100644 src/frontend/src/components/exampleComponent/index.tsx
create mode 100644 src/frontend/src/components/inputGlobalComponent/index.tsx
delete mode 100644 src/frontend/src/components/stackedComponents/index.tsx
create mode 100644 src/frontend/src/components/textInputComponent/index.tsx
create mode 100644 src/frontend/src/components/textOutputComponent/index.tsx
delete mode 100644 src/frontend/src/components/toggleComponent/index.tsx
create mode 100644 src/frontend/src/components/ui/checkmark.tsx
create mode 100644 src/frontend/src/components/ui/dialog-with-no-close.tsx
create mode 100644 src/frontend/src/components/ui/refreshButton.tsx
create mode 100644 src/frontend/src/components/ui/xmark.tsx
create mode 100644 src/frontend/src/constants/alerts_constants.tsx
create mode 100644 src/frontend/src/icons/AstraDB/AstraDB.jsx
create mode 100644 src/frontend/src/icons/AstraDB/Favicon.svg
create mode 100644 src/frontend/src/icons/AstraDB/index.tsx
create mode 100644 src/frontend/src/icons/Azure/Azure.jsx
create mode 100644 src/frontend/src/icons/Azure/index.tsx
create mode 100644 src/frontend/src/icons/BotMessageSquare/BotMessageSquare.jsx
create mode 100644 src/frontend/src/icons/BotMessageSquare/index.tsx
create mode 100644 src/frontend/src/icons/GoogleGenerativeAI/Google Gemini icon.svg
create mode 100644 src/frontend/src/icons/GoogleGenerativeAI/GoogleGemini.jsx
create mode 100644 src/frontend/src/icons/GoogleGenerativeAI/index.tsx
create mode 100644 src/frontend/src/icons/Ollama/Ollama.jsx
create mode 100644 src/frontend/src/icons/Ollama/Ollama.svg
create mode 100644 src/frontend/src/icons/Ollama/index.tsx
create mode 100644 src/frontend/src/icons/Postgres/Postgres.jsx
create mode 100644 src/frontend/src/icons/Postgres/Postgres.svg
create mode 100644 src/frontend/src/icons/Postgres/index.tsx
create mode 100644 src/frontend/src/icons/Python/Python.jsx
create mode 100644 src/frontend/src/icons/Python/Python.svg
create mode 100644 src/frontend/src/icons/Python/index.tsx
create mode 100644 src/frontend/src/icons/QianFanChat/QianFanChat.jsx
create mode 100644 src/frontend/src/icons/QianFanChat/QianFanChat.svg
create mode 100644 src/frontend/src/icons/QianFanChat/index.tsx
create mode 100644 src/frontend/src/icons/Redis/Redis.jsx
create mode 100644 src/frontend/src/icons/Redis/Redis.svg
create mode 100644 src/frontend/src/icons/Redis/index.tsx
create mode 100644 src/frontend/src/modals/IOModal/components/IOFieldView/components/FileInput/index.tsx
create mode 100644 src/frontend/src/modals/IOModal/components/IOFieldView/index.tsx
create mode 100644 src/frontend/src/modals/IOModal/components/chatView/chatInput/index.tsx
rename src/frontend/src/modals/{formModal => IOModal/components/chatView}/chatMessage/codeBlock/index.tsx (92%)
create mode 100644 src/frontend/src/modals/IOModal/components/chatView/chatMessage/index.tsx
rename src/frontend/src/modals/{formModal => IOModal/components/chatView}/fileComponent/index.tsx (94%)
create mode 100644 src/frontend/src/modals/IOModal/components/chatView/index.tsx
create mode 100644 src/frontend/src/modals/IOModal/index.tsx
create mode 100644 src/frontend/src/modals/NewFlowModal/components/NewFlowCardComponent/index.tsx
create mode 100644 src/frontend/src/modals/NewFlowModal/components/undrawCards/index.tsx
create mode 100644 src/frontend/src/modals/NewFlowModal/index.tsx
delete mode 100644 src/frontend/src/modals/formModal/chatInput/index.tsx
delete mode 100644 src/frontend/src/modals/formModal/chatMessage/index.tsx
delete mode 100644 src/frontend/src/modals/formModal/index.tsx
create mode 100644 src/frontend/src/pages/FlowPage/components/ParentDisclosureComponent/index.tsx
create mode 100644 src/frontend/src/pages/FlowPage/components/extraSidebarComponent/utils.tsx
create mode 100644 src/frontend/src/pages/FlowPage/components/nodeToolbarComponent/toolbarSelectItem/index.tsx
create mode 100644 src/frontend/src/stores/globalVariables.ts
create mode 100644 src/frontend/src/stores/shortcuts.ts
create mode 100644 src/frontend/src/types/zustand/flowIOStore/index.ts
create mode 100644 src/frontend/src/types/zustand/globalVariables/index.ts
create mode 100644 src/frontend/src/utils/buildUtils.ts
create mode 100644 src/frontend/src/utils/parameterUtils.ts
create mode 100644 src/frontend/tests/end-to-end/assets/ChatTest.json
rename src/frontend/tests/{onlyFront => end-to-end}/assets/collection.json (88%)
rename src/frontend/tests/{onlyFront => end-to-end}/assets/flow.json (100%)
create mode 100644 src/frontend/tests/end-to-end/assets/flow_group_test.json
rename src/frontend/tests/{onlyFront => end-to-end}/assets/flowtest.json (54%)
rename src/frontend/tests/{onlyFront => end-to-end}/auto_login.spec.ts (52%)
create mode 100644 src/frontend/tests/end-to-end/chat_io.spec.ts
create mode 100644 src/frontend/tests/end-to-end/curl_api_generation.spec.ts
rename src/frontend/tests/{onlyFront => end-to-end}/dragAndDrop.spec.ts (68%)
create mode 100644 src/frontend/tests/end-to-end/dropdownComponent.spec.ts
rename src/frontend/tests/{onlyFront => end-to-end}/floatComponent.spec.ts (91%)
rename src/frontend/tests/{onlyFront => end-to-end}/flowPage.spec.ts (62%)
create mode 100644 src/frontend/tests/end-to-end/group.spec.ts
rename src/frontend/tests/{onlyFront => end-to-end}/inputComponent.spec.ts (62%)
create mode 100644 src/frontend/tests/end-to-end/intComponent.spec.ts
rename src/frontend/tests/{onlyFront => end-to-end}/keyPairListComponent.spec.ts (55%)
create mode 100644 src/frontend/tests/end-to-end/langflowShortcuts.spec.ts
rename src/frontend/tests/{onlyFront => end-to-end}/nestedComponent.spec.ts (80%)
create mode 100644 src/frontend/tests/end-to-end/python_api_generation.spec.ts
rename src/frontend/tests/{onlyFront => end-to-end}/saveComponents.spec.ts (73%)
rename src/frontend/tests/{onlyFront => end-to-end}/toggleComponent.spec.ts (67%)
create mode 100644 src/frontend/tests/end-to-end/tweaks_test.spec.ts
create mode 100644 src/frontend/tests/globalTeardown.ts
delete mode 100644 src/frontend/tests/onlyFront/dropdownComponent.spec.ts
delete mode 100644 src/frontend/tests/onlyFront/group.spec.ts
delete mode 100644 src/frontend/tests/onlyFront/intComponent.spec.ts
delete mode 100644 src/frontend/tests/onlyFront/login.spec.ts
create mode 100644 tests/data/ChatInputTest.json
create mode 100644 tests/data/TwoOutputsTest.json
create mode 100644 tests/test_data_components.py
delete mode 100644 tests/test_embeddings_template.py
create mode 100644 tests/test_files.py
create mode 100644 tests/test_helper_components.py
create mode 100644 tests/test_initial_setup.py
delete mode 100644 tests/test_prompts_template.py
create mode 100644 tests/test_record.py
delete mode 100644 tests/test_websocket.py
create mode 100644 tests/text_experimental_components.py
diff --git a/.dockerignore b/.dockerignore
index 130ca4c2c..7cae75457 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -1,6 +1,6 @@
.venv/
**/aws
-# node_modules
+node_modules
**/node_modules/
dist/
**/build/
diff --git a/.env.example b/.env.example
index 29a7259b0..26f6e3a29 100644
--- a/.env.example
+++ b/.env.example
@@ -56,6 +56,13 @@ LANGFLOW_REMOVE_API_KEYS=
# LANGFLOW_REDIS_CACHE_EXPIRE (default: 3600)
LANGFLOW_CACHE_TYPE=
+
+# Set AUTO_LOGIN to false if you want to disable auto login
+# and use the login form to login. LANGFLOW_SUPERUSER and LANGFLOW_SUPERUSER_PASSWORD
+# must be set if AUTO_LOGIN is set to false
+# Values: true, false
+LANGFLOW_AUTO_LOGIN=
+
# Superuser username
# Example: LANGFLOW_SUPERUSER=admin
LANGFLOW_SUPERUSER=
diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
index d4eaafb95..c78ba4c71 100644
--- a/.github/workflows/lint.yml
+++ b/.github/workflows/lint.yml
@@ -14,7 +14,7 @@ on:
- "src/backend/**"
env:
- POETRY_VERSION: "1.7.0"
+ POETRY_VERSION: "1.8.2"
jobs:
lint:
@@ -22,7 +22,6 @@ jobs:
strategy:
matrix:
python-version:
- - "3.9"
- "3.10"
- "3.11"
steps:
@@ -32,12 +31,15 @@ jobs:
pipx install poetry==$POETRY_VERSION
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
+ id: setup-python
with:
python-version: ${{ matrix.python-version }}
cache: poetry
- - name: Install dependencies
+ - name: Install Python dependencies
run: |
+ poetry env use ${{ matrix.python-version }}
poetry install
+ if: ${{ steps.setup-python.outputs.cache-hit != 'true' }}
- name: Analysing the code with our lint
run: |
make lint
diff --git a/.github/workflows/pre-release.yml b/.github/workflows/pre-release-base.yml
similarity index 86%
rename from .github/workflows/pre-release.yml
rename to .github/workflows/pre-release-base.yml
index 58bdd219e..60872def3 100644
--- a/.github/workflows/pre-release.yml
+++ b/.github/workflows/pre-release-base.yml
@@ -1,4 +1,4 @@
-name: pre-release
+name: Langflow Base Pre-release
on:
pull_request:
@@ -11,7 +11,7 @@ on:
workflow_dispatch:
env:
- POETRY_VERSION: "1.5.1"
+ POETRY_VERSION: "1.8.2"
jobs:
if_release:
@@ -27,7 +27,7 @@ jobs:
python-version: "3.10"
cache: "poetry"
- name: Build project for distribution
- run: make build
+ run: make build base=true
- name: Check Version
id: check-version
run: |
@@ -46,7 +46,7 @@ jobs:
env:
POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_API_TOKEN }}
run: |
- poetry publish
+ poetry publish base=true
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
@@ -61,5 +61,6 @@ jobs:
with:
context: .
push: true
- file: ./build_and_push.Dockerfile
- tags: logspace/langflow:${{ steps.check-version.outputs.version }}
+ file: ./build_and_push_base.Dockerfile
+ tags: |
+ logspace/langflow:base-${{ steps.check-version.outputs.version }}
diff --git a/.github/workflows/pre-release-langflow.yml b/.github/workflows/pre-release-langflow.yml
new file mode 100644
index 000000000..57f3b3e1a
--- /dev/null
+++ b/.github/workflows/pre-release-langflow.yml
@@ -0,0 +1,70 @@
+name: Langflow Pre-release
+
+on:
+ pull_request:
+ types:
+ - closed
+ branches:
+ - dev
+ paths:
+ - "pyproject.toml"
+ workflow_dispatch:
+ workflow_run:
+ workflows: ["pre-release-base"]
+ types: [completed]
+ branches: [dev]
+
+env:
+ POETRY_VERSION: "1.8.2"
+
+jobs:
+ if_release:
+ if: ${{ (github.event.pull_request.merged == true) && contains(github.event.pull_request.labels.*.name, 'pre-release') }}
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - name: Install poetry
+ run: pipx install poetry==$POETRY_VERSION
+ - name: Set up Python 3.10
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.10"
+ cache: "poetry"
+ - name: Build project for distribution
+ run: make build main=true
+ - name: Check Version
+ id: check-version
+ run: |
+ echo version=$(poetry version --short) >> $GITHUB_OUTPUT
+ - name: Create Release
+ uses: ncipollo/release-action@v1
+ with:
+ artifacts: "dist/*"
+ token: ${{ secrets.GITHUB_TOKEN }}
+ draft: false
+ generateReleaseNotes: true
+ prerelease: true
+ tag: v${{ steps.check-version.outputs.version }}
+ commit: dev
+ - name: Publish to PyPI
+ env:
+ POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_API_TOKEN }}
+ run: |
+ poetry publish main=true
+ - name: Set up QEMU
+ uses: docker/setup-qemu-action@v3
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v3
+ - name: Login to Docker Hub
+ uses: docker/login-action@v3
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
+ - name: Build and push
+ uses: docker/build-push-action@v5
+ with:
+ context: .
+ push: true
+ file: ./build_and_push.Dockerfile
+ tags: |
+ logspace/langflow:${{ steps.check-version.outputs.version }}
diff --git a/.github/workflows/test.yml b/.github/workflows/python_test.yml
similarity index 76%
rename from .github/workflows/test.yml
rename to .github/workflows/python_test.yml
index 10ab9b324..a04a208f3 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/python_test.yml
@@ -15,7 +15,7 @@ on:
- "src/backend/**"
env:
- POETRY_VERSION: "1.5.0"
+ POETRY_VERSION: "1.8.2"
jobs:
build:
@@ -33,11 +33,15 @@ jobs:
run: pipx install poetry==$POETRY_VERSION
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
+ id: setup-python
with:
python-version: ${{ matrix.python-version }}
cache: "poetry"
- - name: Install dependencies
- run: poetry install
+ - name: Install Python dependencies
+ run: |
+ poetry env use ${{ matrix.python-version }}
+ poetry install
+ if: ${{ steps.setup-python.outputs.cache-hit != 'true' }}
- name: Run unit tests
run: |
make tests
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 21d8d27eb..8004618f6 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -10,7 +10,7 @@ on:
- "pyproject.toml"
env:
- POETRY_VERSION: "1.5.1"
+ POETRY_VERSION: "1.8.2"
jobs:
if_release:
diff --git a/.github/workflows/typescript_test.yml b/.github/workflows/typescript_test.yml
new file mode 100644
index 000000000..a75327785
--- /dev/null
+++ b/.github/workflows/typescript_test.yml
@@ -0,0 +1,149 @@
+name: Run Frontend Tests
+
+on:
+ pull_request:
+ paths:
+ - "src/frontend/**"
+
+env:
+ POETRY_VERSION: "1.8.2"
+ NODE_VERSION: "21"
+ PYTHON_VERSION: "3.10"
+ # Define the directory where Playwright browsers will be installed.
+ # Adjust if your project uses a different path.
+ PLAYWRIGHT_BROWSERS_PATH: "ms-playwright"
+
+jobs:
+ setup-and-test:
+ runs-on: ubuntu-latest
+ strategy:
+ fail-fast: false
+ matrix:
+ shardIndex: [1, 2, 3, 4]
+ shardTotal: [4]
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+
+ - name: Setup Node.js
+ uses: actions/setup-node@v3
+ id: setup-node
+ with:
+ node-version: ${{ env.NODE_VERSION }}
+ cache: "npm"
+
+ - name: Install Node.js dependencies
+ run: |
+ cd src/frontend
+ npm ci
+ if: ${{ steps.setup-node.outputs.cache-hit != 'true' }}
+
+ # Attempt to restore the correct Playwright browser binaries based on the
+ # currently installed version of Playwright (The browser binary versions
+ # may change with Playwright versions).
+ # Note: Playwright's cache directory is hard coded because that's what it
+ # says to do in the docs. There doesn't appear to be a command that prints
+ # it out for us.
+ # - uses: actions/cache@v4
+ # id: playwright-cache
+ # with:
+ # path: ${{ env.PLAYWRIGHT_BROWSERS_PATH }}
+ # key: "${{ runner.os }}-playwright-${{ hashFiles('src/frontend/package-lock.json') }}"
+ # # As a fallback, if the Playwright version has changed, try use the
+ # # most recently cached version. There's a good chance that at least one
+ # # of the browser binary versions haven't been updated, so Playwright can
+ # # skip installing that in the next step.
+ # # Note: When falling back to an old cache, `cache-hit` (used below)
+ # # will be `false`. This allows us to restore the potentially out of
+ # # date cache, but still let Playwright decide if it needs to download
+ # # new binaries or not.
+ # restore-keys: |
+ # ${{ runner.os }}-playwright-
+ - name: Cache playwright binaries
+ uses: actions/cache@v4
+ id: playwright-cache
+ with:
+ path: |
+ ~/.cache/ms-playwright
+ key: ${{ runner.os }}-playwright-${{ hashFiles('src/frontend/package-lock.json') }}
+ - name: Install Frontend dependencies
+ run: |
+ cd src/frontend
+ npm ci
+
+ - name: Install Playwright's browser binaries
+ run: |
+ cd src/frontend
+ npx playwright install --with-deps
+ if: steps.playwright-cache.outputs.cache-hit != 'true'
+ - name: Install Playwright's dependencies
+ run: |
+ cd src/frontend
+ npx playwright install-deps
+ if: steps.playwright-cache.outputs.cache-hit != 'true'
+
+ # If the Playwright browser binaries weren't able to be restored, we tell
+ # paywright to install everything for us.
+ # - name: Install Playwright's dependencies
+ # if: steps.playwright-cache.outputs.cache-hit != 'true'
+ # run: npx playwright install --with-deps
+
+ - name: Install Poetry
+ run: pipx install "poetry==${{ env.POETRY_VERSION }}"
+
+ - name: Set up Python
+ uses: actions/setup-python@v5
+ id: setup-python
+ with:
+ python-version: ${{ env.PYTHON_VERSION }}
+ cache: "poetry"
+
+ - name: Install Python dependencies
+ run: |
+ poetry env use ${{ env.PYTHON_VERSION }}
+ poetry install
+ if: ${{ steps.setup-python.outputs.cache-hit != 'true' }}
+
+ - name: Run Playwright Tests
+ run: |
+ cd src/frontend
+ npx playwright test --shard=${{ matrix.shardIndex }}/${{ matrix.shardTotal }}
+
+ - name: Upload blob report to GitHub Actions Artifacts
+ if: always()
+ uses: actions/upload-artifact@v4
+ with:
+ name: blob-report-${{ matrix.shardIndex }}
+ path: src/frontend/blob-report
+ retention-days: 1
+
+ merge-reports:
+ needs: setup-and-test
+ runs-on: ubuntu-latest
+ if: always()
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+
+ - name: Setup Node.js
+ uses: actions/setup-node@v3
+ with:
+ node-version: ${{ env.NODE_VERSION }}
+
+ - name: Download blob reports from GitHub Actions Artifacts
+ uses: actions/download-artifact@v4
+ with:
+ path: all-blob-reports
+ pattern: blob-report-*
+ merge-multiple: true
+
+ - name: Merge into HTML Report
+ run: |
+ npx playwright merge-reports --reporter html ./all-blob-reports
+
+ - name: Upload HTML report
+ uses: actions/upload-artifact@v4
+ with:
+ name: html-report--attempt-${{ github.run_attempt }}
+ path: playwright-report
+ retention-days: 14
diff --git a/.gitignore b/.gitignore
index 744817491..0d74c2208 100644
--- a/.gitignore
+++ b/.gitignore
@@ -258,5 +258,10 @@ langflow.db
/tmp/*
src/backend/langflow/frontend/
+src/backend/base/langflow/frontend/
.docker
scratchpad*
+chroma*/*
+stuff/*
+src/frontend/playwright-report/index.html
+*.bak
\ No newline at end of file
diff --git a/.vscode/launch.json b/.vscode/launch.json
index a8229b155..962599449 100644
--- a/.vscode/launch.json
+++ b/.vscode/launch.json
@@ -13,10 +13,32 @@
"7860",
"--reload",
"--log-level",
- "debug"
+ "debug",
+ "--loop",
+ "asyncio"
],
"jinja": true,
- "justMyCode": true,
+ "justMyCode": false,
+ "env": {
+ "LANGFLOW_LOG_LEVEL": "debug"
+ },
+ "envFile": "${workspaceFolder}/.env"
+ },
+ {
+ "name": "Debug CLI",
+ "type": "python",
+ "request": "launch",
+ "module": "langflow",
+ "args": [
+ "run",
+ "--path",
+ "${workspaceFolder}/src/backend/langflow/frontend"
+ ],
+ "jinja": true,
+ "justMyCode": false,
+ "env": {
+ "LANGFLOW_LOG_LEVEL": "debug"
+ },
"envFile": "${workspaceFolder}/.env"
},
{
diff --git a/Makefile b/Makefile
index 7f0f998ae..e5d886652 100644
--- a/Makefile
+++ b/Makefile
@@ -1,12 +1,31 @@
.PHONY: all init format lint build build_frontend install_frontend run_frontend run_backend dev help tests coverage
all: help
+log_level ?= debug
+host ?= 0.0.0.0
+port ?= 7860
+env ?= .env
+open_browser ?= true
+path = src/backend/base/langflow/frontend
+
+setup_poetry:
+ pipx install poetry
+
+add:
+ @echo 'Adding dependencies'
+ifdef devel
+ cd src/backend/base && poetry add --group dev $(devel)
+endif
+
+ifdef main
+ poetry add $(main)
+endif
+
+ifdef base
+ cd src/backend/base && poetry add $(base)
+endif
init:
- @echo 'Installing pre-commit hooks'
- git config core.hooksPath .githooks
- @echo 'Making pre-commit hook executable'
- chmod +x .githooks/pre-commit
@echo 'Installing backend dependencies'
make install_backend
@echo 'Installing frontend dependencies'
@@ -32,12 +51,15 @@ format:
lint:
make install_backend
- poetry run mypy src/backend/langflow
+ poetry run mypy --namespace-packages -p "langflow"
poetry run ruff . --fix
install_frontend:
cd src/frontend && npm install
+install_frontendci:
+ cd src/frontend && npm ci
+
install_frontendc:
cd src/frontend && rm -rf node_modules package-lock.json && npm install
@@ -47,22 +69,57 @@ run_frontend:
tests_frontend:
ifeq ($(UI), true)
- cd src/frontend && ./run-tests.sh --ui
+ cd src/frontend && npx playwright test --ui --project=chromium
else
- cd src/frontend && ./run-tests.sh
+ cd src/frontend && npx playwright test --project=chromium
endif
run_cli:
- poetry run langflow run --path src/frontend/build
+ @echo 'Running the CLI'
+ @make install_frontend > /dev/null
+ @echo 'Install backend dependencies'
+ @make install_backend > /dev/null
+ @echo 'Building the frontend'
+ @make build_frontend > /dev/null
+ifdef env
+ @make start env=$(env) host=$(host) port=$(port) log_level=$(log_level)
+else
+ @make start host=$(host) port=$(port) log_level=$(log_level)
+endif
run_cli_debug:
- poetry run langflow run --path src/frontend/build --log-level debug
+ @echo 'Running the CLI in debug mode'
+ @make install_frontend > /dev/null
+ @echo 'Building the frontend'
+ @make build_frontend > /dev/null
+ @echo 'Install backend dependencies'
+ @make install_backend > /dev/null
+ifdef env
+ @make start env=$(env) host=$(host) port=$(port) log_level=debug
+else
+ @make start host=$(host) port=$(port) log_level=debug
+endif
+
+start:
+ @echo 'Running the CLI'
+
+ifeq ($(open_browser),false)
+ @make install_backend && poetry run langflow run --path $(path) --log-level $(log_level) --host $(host) --port $(port) --env-file $(env) --no-open-browser
+else
+ @make install_backend && poetry run langflow run --path $(path) --log-level $(log_level) --host $(host) --port $(port) --env-file $(env)
+endif
+
+
setup_devcontainer:
make init
make build_frontend
poetry run langflow --path src/frontend/build
+setup_env:
+ @sh ./scripts/setup/update_poetry.sh 1.8.2
+ @sh ./scripts/setup/setup_env.sh
+
frontend:
make install_frontend
make run_frontend
@@ -72,38 +129,67 @@ frontendc:
make run_frontend
install_backend:
- poetry install --extras deploy
+ @echo 'Setting up the environment'
+ @make setup_env
+ @echo 'Installing backend dependencies'
+ @poetry install --extras deploy
backend:
make install_backend
@-kill -9 `lsof -t -i:7860`
-ifeq ($(login),1)
- @echo "Running backend without autologin";
- poetry run langflow run --backend-only --port 7860 --host 0.0.0.0 --no-open-browser --env-file .env
+ifdef login
+ @echo "Running backend autologin is $(login)";
+ LANGFLOW_AUTO_LOGIN=$(login) poetry run uvicorn --factory langflow.main:create_app --host 0.0.0.0 --port 7860 --reload --env-file .env --loop asyncio
else
- @echo "Running backend with autologin";
- LANGFLOW_AUTO_LOGIN=True poetry run langflow run --backend-only --port 7860 --host 0.0.0.0 --no-open-browser --env-file .env
+ @echo "Running backend respecting the .env file";
+ poetry run uvicorn --factory langflow.main:create_app --host 0.0.0.0 --port 7860 --reload --env-file .env --loop asyncio
endif
build_and_run:
- echo 'Removing dist folder'
+ @echo 'Removing dist folder'
+ @make setup_env
rm -rf dist
- make build && poetry run pip install dist/*.tar.gz && poetry run langflow run
+ rm -rf src/backend/base/dist
+ make build
+ poetry run pip install dist/*.tar.gz
+ poetry run langflow run
build_and_install:
- echo 'Removing dist folder'
+ @echo 'Removing dist folder'
rm -rf dist
- make build && poetry run pip install dist/*.tar.gz
+ rm -rf src/backend/base/dist
+ make build && poetry run pip install dist/*.whl && pip install src/backend/base/dist/*.whl --force-reinstall
build_frontend:
cd src/frontend && CI='' npm run build
- cp -r src/frontend/build src/backend/langflow/frontend
+ cp -r src/frontend/build src/backend/base/langflow/frontend
build:
- make install_frontend
+ @echo 'Building the project'
+ @make setup_env
+ifdef base
+ make install_frontendci
make build_frontend
- poetry build --format sdist
- rm -rf src/backend/langflow/frontend
+ make build_langflow_base
+endif
+
+ifdef main
+ make build_langflow
+endif
+
+build_langflow_base:
+ cd src/backend/base && poetry build
+ rm -rf src/backend/base/langflow/frontend
+
+build_langflow_backup:
+ poetry lock && poetry build
+
+build_langflow:
+ cd ./scripts && poetry run python update_dependencies.py
+ poetry lock
+ poetry build
+ mv pyproject.toml.bak pyproject.toml
+ mv poetry.lock.bak poetry.lock
dev:
make install_frontend
@@ -115,10 +201,36 @@ else
docker compose $(if $(debug),-f docker-compose.debug.yml) up
endif
-publish:
- make build
+lock_base:
+ cd src/backend/base && poetry lock
+
+lock_langflow:
+ poetry lock
+
+lock:
+# Run both in parallel
+ @echo 'Locking dependencies'
+ cd src/backend/base && poetry lock
+ poetry lock
+
+publish_base:
+ make build_langflow_base
+ cd src/backend/base && poetry publish
+
+publish_langflow:
+ make build_langflow
poetry publish
+publish:
+ @echo 'Publishing the project'
+ifdef base
+ -make publish_base
+endif
+
+ifdef main
+ -make publish_langflow
+endif
+
help:
@echo '----'
@echo 'format - run code formatters'
diff --git a/README.md b/README.md
index 1431194e1..b9680aa97 100644
--- a/README.md
+++ b/README.md
@@ -3,6 +3,7 @@
# βοΈ Langflow
### Discover a simpler & smarter way to build around Foundation Models
+
# [](https://www.langflow.org)
# π¦ Installation
@@ -38,11 +39,9 @@ Once youβre done, you can export your flow as a JSON file.
Load the flow with:
```python
-from langflow import load_flow_from_json
+from langflow.load import run_flow_from_json
-flow = load_flow_from_json("path/to/flow.json")
-# Now you can use it
-flow("Hey, have you heard of Langflow?")
+results = run_flow_from_json("path/to/flow.json", input_value="Hello, World!")
```
# π₯οΈ Command Line Interface (CLI)
diff --git a/base.Dockerfile b/base.Dockerfile
index 2293c35dd..936943cd1 100644
--- a/base.Dockerfile
+++ b/base.Dockerfile
@@ -23,7 +23,7 @@ ENV PYTHONUNBUFFERED=1 \
\
# poetry
# https://python-poetry.org/docs/configuration/#using-environment-variables
- POETRY_VERSION=1.7.1 \
+ POETRY_VERSION=1.8.2 \
# make poetry install to this location
POETRY_HOME="/opt/poetry" \
# make poetry create the virtual environment in the project's root
diff --git a/build_and_push.Dockerfile b/build_and_push.Dockerfile
index f296d83aa..1c595e180 100644
--- a/build_and_push.Dockerfile
+++ b/build_and_push.Dockerfile
@@ -23,7 +23,7 @@ ENV PYTHONUNBUFFERED=1 \
\
# poetry
# https://python-poetry.org/docs/configuration/#using-environment-variables
- POETRY_VERSION=1.7.1 \
+ POETRY_VERSION=1.8.2 \
# make poetry install to this location
POETRY_HOME="/opt/poetry" \
# make poetry create the virtual environment in the project's root
@@ -62,10 +62,14 @@ RUN apt-get update \
WORKDIR /app
COPY pyproject.toml poetry.lock ./
COPY src ./src
+COPY scripts ./scripts
COPY Makefile ./
COPY README.md ./
-RUN curl -sSL https://install.python-poetry.org | python3 - && make build
-
+RUN --mount=type=cache,target=/root/.cache \
+ curl -sSL https://install.python-poetry.org | python3 -
+RUN python -m pip install requests && cd ./scripts && python update_dependencies.py
+RUN $POETRY_HOME/bin/poetry lock
+RUN $POETRY_HOME/bin/poetry build
# Final stage for the application
FROM python-base as final
diff --git a/build_and_push_base.Dockerfile b/build_and_push_base.Dockerfile
new file mode 100644
index 000000000..f5092c81c
--- /dev/null
+++ b/build_and_push_base.Dockerfile
@@ -0,0 +1,91 @@
+
+
+# syntax=docker/dockerfile:1
+# Keep this syntax directive! It's used to enable Docker BuildKit
+
+# Based on https://github.com/python-poetry/poetry/discussions/1879?sort=top#discussioncomment-216865
+# but I try to keep it updated (see history)
+
+################################
+# PYTHON-BASE
+# Sets up all our shared environment variables
+################################
+FROM python:3.10-slim as python-base
+
+# python
+ENV PYTHONUNBUFFERED=1 \
+ # prevents python creating .pyc files
+ PYTHONDONTWRITEBYTECODE=1 \
+ \
+ # pip
+ PIP_DISABLE_PIP_VERSION_CHECK=on \
+ PIP_DEFAULT_TIMEOUT=100 \
+ \
+ # poetry
+ # https://python-poetry.org/docs/configuration/#using-environment-variables
+ POETRY_VERSION=1.8.2 \
+ # make poetry install to this location
+ POETRY_HOME="/opt/poetry" \
+ # make poetry create the virtual environment in the project's root
+ # it gets named `.venv`
+ POETRY_VIRTUALENVS_IN_PROJECT=true \
+ # do not ask any interactive question
+ POETRY_NO_INTERACTION=1 \
+ \
+ # paths
+ # this is where our requirements + virtual environment will live
+ PYSETUP_PATH="/opt/pysetup" \
+ VENV_PATH="/opt/pysetup/.venv"
+
+
+# prepend poetry and venv to path
+ENV PATH="$POETRY_HOME/bin:$VENV_PATH/bin:$PATH"
+
+
+################################
+# BUILDER-BASE
+# Used to build deps + create our virtual environment
+################################
+FROM python-base as builder-base
+RUN apt-get update \
+ && apt-get install --no-install-recommends -y \
+ # deps for installing poetry
+ curl \
+ # deps for building python deps
+ build-essential \
+ # npm
+ npm \
+ && apt-get clean \
+ && rm -rf /var/lib/apt/lists/*
+
+RUN --mount=type=cache,target=/root/.cache \
+ curl -sSL https://install.python-poetry.org | python3 -
+
+# Now we need to copy the entire project into the image
+COPY pyproject.toml poetry.lock ./
+COPY src/frontend/package.json /tmp/package.json
+RUN cd /tmp && npm install
+WORKDIR /app
+COPY src/frontend ./src/frontend
+RUN rm -rf src/frontend/node_modules
+RUN cp -a /tmp/node_modules /app/src/frontend
+COPY scripts ./scripts
+COPY Makefile ./
+COPY README.md ./
+RUN cd src/frontend && npm run build
+COPY src/backend ./src/backend
+RUN cp -r src/frontend/build src/backend/base/langflow/frontend
+RUN rm -rf src/backend/base/dist
+RUN cd src/backend/base && $POETRY_HOME/bin/poetry build --format sdist
+
+# Final stage for the application
+FROM python-base as final
+
+# Copy virtual environment and built .tar.gz from builder base
+COPY --from=builder-base /app/src/backend/base/dist/*.tar.gz ./
+
+# Install the package from the .tar.gz
+RUN pip install *.tar.gz
+
+WORKDIR /app
+CMD ["python", "-m", "langflow", "run", "--host", "0.0.0.0", "--port", "7860"]
diff --git a/deploy/base.Dockerfile b/deploy/base.Dockerfile
index 323663283..58fae3dab 100644
--- a/deploy/base.Dockerfile
+++ b/deploy/base.Dockerfile
@@ -23,7 +23,7 @@ ENV PYTHONUNBUFFERED=1 \
\
# poetry
# https://python-poetry.org/docs/configuration/#using-environment-variables
- POETRY_VERSION=1.5.1 \
+ POETRY_VERSION=1.8.2 \
# make poetry install to this location
POETRY_HOME="/opt/poetry" \
# make poetry create the virtual environment in the project's root
diff --git a/docs/docs/components/custom.mdx b/docs/docs/components/custom.mdx
index d8c6ff2f5..b07f953fa 100644
--- a/docs/docs/components/custom.mdx
+++ b/docs/docs/components/custom.mdx
@@ -70,7 +70,6 @@ The CustomComponent class serves as the foundation for creating custom component
| Key | Description |
| -------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
- | _`field_type: str`_ | The type of the field (can be any of the types supported by the _`build`_ method). |
| _`is_list: bool`_ | If the field can be a list of values, meaning that the user can manually add more inputs to the same field. |
| _`options: List[str]`_ | When defined, the field becomes a dropdown menu where a list of strings defines the options to be displayed. If the _`value`_ attribute is set to one of the options, that option becomes default. For this parameter to work, _`field_type`_ should invariably be _`str`_. |
| _`multiline: bool`_ | Defines if a string field opens a text editor. Useful for longer texts. |
@@ -78,20 +77,20 @@ The CustomComponent class serves as the foundation for creating custom component
| _`display_name: str`_ | Defines the name of the field. |
| _`advanced: bool`_ | Hide the field in the canvas view (displayed component settings only). Useful when a field is for advanced users. |
| _`password: bool`_ | To mask the input text. Useful to hide sensitive text (e.g. API keys). |
- | _`required: bool`_ | Makes the field required. |
+ | _`required: bool`_ | This is determined automatically but can be used to override the default behavior. |
| _`info: str`_ | Adds a tooltip to the field. |
| _`file_types: List[str]`_ | This is a requirement if the _`field_type`_ is _file_. Defines which file types will be accepted. For example, _json_, _yaml_ or _yml_. |
| _`range_spec: langflow.field_typing.RangeSpec`_ | This is a requirement if the _`field_type`_ is _`float`_. Defines the range of values accepted and the step size. If none is defined, the default is _`[-1, 1, 0.1]`_. |
| _`title_case: bool`_ | Formats the name of the field when _`display_name`_ is not defined. Set it to False to keep the name as you set it in the _`build`_ method. |
+ | _`refresh_button: bool`_ | If set to True a button will appear to the right of the field, and when clicked, it will call the _`update_build_config`_ method which takes in the _`build_config`_, the name of the field (_`field_name`_) and the latest value of the field (_`field_value`_). This is useful when you want to update the _`build_config`_ based on the value of the field. |
+ | _`real_time_refresh: bool`_ | If set to True, the _`update_build_config`_ method will be called every time the field value changes. |
+ | _`field_type: str`_ | You should never define this key. It is automatically set based on the type hint of the _`build`_ method. |
-
-
- Keys _`options`_ and _`value`_ can receive a method or function that returns a list of strings or a string, respectively. This is useful when you want to dynamically generate the options or the default value of a field. A refresh button will appear next to the field in the component, allowing the user to update the options or the default value.
-
-
-
+
+By using the _`update_build_config`_ method, you can update the _`build_config`_ in whatever way you want based on the value of the field or not.
+
- The CustomComponent class also provides helpful methods for specific tasks (e.g., to load and use other flows from the Langflow platform):
diff --git a/docs/docs/components/data.mdx b/docs/docs/components/data.mdx
new file mode 100644
index 000000000..94620d047
--- /dev/null
+++ b/docs/docs/components/data.mdx
@@ -0,0 +1,87 @@
+import Admonition from '@theme/Admonition';
+
+# Data
+
+### API Request
+
+This component makes HTTP requests to the specified URLs.
+
+**Params**
+
+- **URLs:** URLs to make requests to.
+- **Method:** The HTTP method to use.
+- **Headers:** The headers to send with the request.
+- **Body:** The body to send with the request (for POST, PATCH, PUT).
+- **Timeout:** The timeout to use for the request.
+
+
+
+ Use this component to make HTTP requests to external APIs or services and retrieve data.
+
+
+ Ensure that you provide valid URLs and configure the method, headers, body, and timeout appropriately.
+
+
+
+---
+
+### Directory
+
+This component recursively loads files from a directory.
+
+**Params**
+
+- **Path:** The path to the directory.
+- **Types:** File types to load. Leave empty to load all types.
+- **Depth:** Depth to search for files.
+- **Max Concurrency:** The maximum number of concurrent file loading operations.
+- **Load Hidden:** If true, hidden files will be loaded.
+- **Recursive:** If true, the search will be recursive.
+- **Silent Errors:** If true, errors will not raise an exception.
+- **Use Multithreading:** If true, use multithreading for loading files.
+
+
+
+ Use this component to load files from a directory, such as text files, JSON files, etc.
+
+
+ Ensure that you provide the correct path to the directory and configure other parameters as needed.
+
+
+
+
+---
+
+### File
+
+This component loads a generic file.
+
+**Params**
+
+- **Path:** The path to the file.
+- **Silent Errors:** If true, errors will not raise an exception.
+
+
+
+ Use this component to load a generic file, such as a text file, JSON file, etc.
+
+
+ Ensure that you provide the correct path to the file and configure other parameters as needed.
+
+
+
+---
+
+### URL
+
+This component fetches content from one or more URLs.
+
+**Params**
+
+- **URLs:** The URLs from which content will be fetched.
+
+
+
+ Ensure that you provide valid URLs and configure other parameters as needed.
+
+
diff --git a/docs/docs/components/embeddings.mdx b/docs/docs/components/embeddings.mdx
index d4ad17542..9a20e5821 100644
--- a/docs/docs/components/embeddings.mdx
+++ b/docs/docs/components/embeddings.mdx
@@ -2,19 +2,7 @@ import Admonition from "@theme/Admonition";
# Embeddings
-
-
- We appreciate your understanding as we polish our documentation β it may
- contain some rough edges. Share your feedback or report issues to help us
- improve! π οΈπ
-
-
-
-Embeddings are vector representations of text that capture the semantic meaning of the text. They are created using text embedding models and allow us to think about the text in a vector space, enabling us to perform tasks like semantic search, where we look for pieces of text that are most similar in the vector space.
-
----
-
-### BedrockEmbeddings
+### Amazon Bedrock Embeddings
Used to load [Amazon Bedrocksβs](https://aws.amazon.com/bedrock/) embedding models.
@@ -30,7 +18,7 @@ Used to load [Amazon Bedrocksβs](https://aws.amazon.com/bedrock/) embedding mo
---
-### CohereEmbeddings
+### Cohere Embeddings
Used to load [Cohereβs](https://cohere.com/) embedding models.
@@ -44,57 +32,93 @@ Used to load [Cohereβs](https://cohere.com/) embedding models.
---
-### HuggingFaceEmbeddings
+### Azure OpenAI Embeddings
+
+Generate embeddings using Azure OpenAI models.
+
+**Params**
+
+- **Azure Endpoint:** Your Azure endpoint, including the resource. Example: `https://example-resource.azure.openai.com/`
+- **Deployment Name:** The name of the deployment.
+- **API Version:** The API version to use. (Options: 2022-12-01, 2023-03-15-preview, 2023-05-15, 2023-06-01-preview, 2023-07-01-preview, 2023-08-01-preview)
+- **API Key:** The API key to access the Azure OpenAI service.
+
+---
+
+### Hugging Face API Embeddings
+
+Generate embeddings using Hugging Face Inference API models.
+
+**Params**
+
+- **API Key:** API key for accessing the Hugging Face Inference API. (Type: str)
+- **API URL:** URL of the Hugging Face Inference API. (Default: http://localhost:8080)
+- **Model Name:** Name of the model to use. (Default: BAAI/bge-large-en-v1.5)
+- **Cache Folder:** Folder path to cache Hugging Face models. (Advanced)
+- **Encode Kwargs:** Additional arguments for the encoding process. (Type: dict, Advanced)
+- **Model Kwargs:** Additional arguments for the model. (Type: dict, Advanced)
+- **Multi Process:** Whether to use multiple processes. (Default: False, Advanced)
+
+---
+
+### Hugging Face Embeddings
Used to load [HuggingFaceβs](https://huggingface.co) embedding models.
**Params**
-- **cache_folder:** Used to specify the folder where the embeddings will be cached. When embeddings are computed for a text, they can be stored in the cache folder so that they can be reused later without the need to recompute them. This can improve the performance of the application by avoiding redundant computations.
-
-- **encode_kwargs:** Used to pass additional keyword arguments to the encoding method of the underlying HuggingFace model. These keyword arguments can be used to customize the encoding process, such as specifying the maximum length of the input sequence or enabling truncation or padding.
-
-- **model_kwargs:** Used to customize the behavior of the model, such as specifying the model architecture, the tokenizer, or any other model-specific configuration options. By using `model_kwargs`, the user can configure the HuggingFace model according to specific needs and preferences.
-
-- **model_name:** Used to specify the name or identifier of the HuggingFace model that will be used for generating embeddings. It allows users to choose a specific pre-trained model from the Hugging Face model hub β defaults to `sentence-transformers/all-mpnet-base-v2`.
+- **Cache Folder:** Folder path to cache HuggingFace models.
+- **Encode Kwargs:** Additional arguments for the encoding process. (Type: dict)
+- **Model Kwargs:** Additional arguments for the model. (Type: dict)
+- **Model Name:** Name of the HuggingFace model to use. (Default: sentence-transformers/all-mpnet-base-v2)
+- **Multi Process:** Whether to use multiple processes. (Default: False)
---
-### OpenAIEmbeddings
+### Ollama Embeddings
+
+Generate embeddings using Ollama models.
+
+**Params**
+
+- **Ollama Model:** Name of the Ollama model to use. (Default: llama2)
+- **Ollama Base URL:** Base URL of the Ollama API. (Default: http://localhost:11434)
+- **Model Temperature:** Temperature parameter for the model. (Type: float)
+
+---
+
+### OpenAI Embeddings
Used to load [OpenAIβs](https://openai.com/) embedding models.
**Params**
-- **chunk_size:** Determines the maximum size of each chunk of text that is processed for embedding. If any of the incoming text chunks exceeds `chunk_size` characters, it will be split into multiple chunks of size `chunk_size` or less before being embedded β defaults to `1000`.
-
-- **deployment:** Used to specify the deployment name or identifier of the text embedding model. It allows the user to choose a specific deployment of the model to use for embedding. When the deployment is provided, this can be useful when the user has multiple deployments of the same model with different configurations or versions β defaults to `text-embedding-ada-002`.
-
-- **embedding_ctx_length:** This parameter determines the maximum context length for the text embedding model. It specifies the number of tokens that the model considers when generating embeddings for a piece of text β defaults to `8191` (this means that the model will consider up to 8191 tokens when generating embeddings).
-
-- **max_retries:** Determines the maximum number of times to retry a request if the model provider returns an error from their API β defaults to `6`.
-
-- **model:** Defines which pre-trained text embedding model to use β defaults to `text-embedding-ada-002`.
-
-- **openai_api_base:** Refers to the base URL for the Azure OpenAI resource. It is used to configure the API to connect to the Azure OpenAI service. The base URL can be found in the Azure portal under the user Azure OpenAI resource.
-
-- **openai_api_key:** Is used to authenticate and authorize access to the OpenAI service.
-
-- **openai_api_type:** Is used to specify the type of OpenAI API being used, either the regular OpenAI API or the Azure OpenAI API. This parameter allows the `OpenAIEmbeddings` class to connect to the appropriate API service.
-
-- **openai_api_version:** Is used to specify the version of the OpenAI API being used. This parameter allows the `OpenAIEmbeddings` class to connect to the appropriate version of the OpenAI API service.
-
-- **openai_organization:** Is used to specify the organization associated with the OpenAI API key. If not provided, the default organization associated with the API key will be used.
-
-- **openai_proxy:** Proxy enables better budgeting and cost management for making OpenAI API calls, including more transparency into pricing.
-
-- **request_timeout:** Used to specify the maximum amount of time, in milliseconds, to wait for a response from the OpenAI API when generating embeddings for a given text.
-
-- **tiktoken_model_name:** Used to count the number of tokens in documents to constrain them to be under a certain limit. By default, when set to None, this will be the same as the embedding model name.
+- **OpenAI API Key:** The API key to use for accessing the OpenAI API. (Type: str)
+- **Default Headers:** Default headers for the HTTP requests. (Type: Dict[str, str], Optional)
+- **Default Query:** Default query parameters for the HTTP requests. (Type: NestedDict, Optional)
+- **Allowed Special:** Special tokens allowed for processing. (Type: List[str], Default: [])
+- **Disallowed Special:** Special tokens disallowed for processing. (Type: List[str], Default: ["all"])
+- **Chunk Size:** Chunk size for processing. (Type: int, Default: 1000)
+- **Client:** HTTP client for making requests. (Type: Any, Optional)
+- **Deployment:** Deployment name for the model. (Type: str, Default: "text-embedding-3-small")
+- **Embedding Context Length:** Length of embedding context. (Type: int, Default: 8191)
+- **Max Retries:** Maximum number of retries for failed requests. (Type: int, Default: 6)
+- **Model:** Name of the model to use. (Type: str, Default: "text-embedding-3-small")
+- **Model Kwargs:** Additional keyword arguments for the model. (Type: NestedDict, Optional)
+- **OpenAI API Base:** Base URL of the OpenAI API. (Type: str, Optional)
+- **OpenAI API Type:** Type of the OpenAI API. (Type: str, Optional)
+- **OpenAI API Version:** Version of the OpenAI API. (Type: str, Optional)
+- **OpenAI Organization:** Organization associated with the API key. (Type: str, Optional)
+- **OpenAI Proxy:** Proxy server for the requests. (Type: str, Optional)
+- **Request Timeout:** Timeout for the HTTP requests. (Type: float, Optional)
+- **Show Progress Bar:** Whether to show a progress bar for processing. (Type: bool, Default: False)
+- **Skip Empty:** Whether to skip empty inputs. (Type: bool, Default: False)
+- **TikToken Enable:** Whether to enable TikToken. (Type: bool, Default: True)
+- **TikToken Model Name:** Name of the TikToken model. (Type: str, Optional)
---
-### VertexAIEmbeddings
+### VertexAI Embeddings
Wrapper around [Google Vertex AI](https://cloud.google.com/vertex-ai) [Embeddings API](https://cloud.google.com/vertex-ai/docs/generative-ai/embeddings/get-text-embeddings).
@@ -113,11 +137,3 @@ Vertex AI is a cloud computing platform offered by Google Cloud Platform (GCP).
- **top_p:** Tokens are selected from most probable to least until the sum of their β defaults to `0.95`.
- **tuned_model_name:** The name of a tuned model. If provided, model_name is ignored.
- **verbose:** This parameter is used to control the level of detail in the output of the chain. When set to True, it will print out some internal states of the chain while it is being run, which can help debug and understand the chain's behavior. If set to False, it will suppress the verbose output β defaults to `False`.
-
-### OllamaEmbeddings
-
-Used to load [Ollamaβs](https://ollama.ai/) embedding models. Wrapper around LangChain's [Ollama API](https://python.langchain.com/docs/integrations/text_embedding/ollama).
-
-- **model** The name of the Ollama model to use β defaults to `llama2`.
-- **base_url** The base URL for the Ollama API β defaults to `http://localhost:11434`.
-- **temperature** Tunes the degree of randomness in text generations. Should be a non-negative value β defaults to `0`.
diff --git a/docs/docs/components/experimental.mdx b/docs/docs/components/experimental.mdx
new file mode 100644
index 000000000..d81f28ca3
--- /dev/null
+++ b/docs/docs/components/experimental.mdx
@@ -0,0 +1,250 @@
+import Admonition from '@theme/Admonition';
+
+# Experimental
+
+Experimental are components that are currently in a beta phase. This means they have undergone initial development and testing but have not yet reached a stable or fully supported status. Users are encouraged to explore these components, provide feedback, and report any issues encountered during their usage.
+
+### Clear Message History Component
+
+This component is designed to clear the message history associated with a specific session ID.
+
+**Beta:** This component is currently in beta.
+
+**Parameters**
+
+- **Session ID:**
+ - **Display Name:** Session ID
+ - **Info:** The session ID to clear the message history.
+
+**Usage**
+
+To use this component, provide the session ID for which you want to clear the message history.
+
+---
+
+### Extract Key From Record
+
+This component extracts specified keys from a record.
+
+**Parameters**
+
+- **Record:**
+ - **Display Name:** Record
+ - **Info:** The record from which to extract the keys.
+
+- **Keys:**
+ - **Display Name:** Keys
+ - **Info:** The keys to extract from the record.
+
+- **Silent Errors:**
+ - **Display Name:** Silent Errors
+ - **Info:** If True, errors will not be raised.
+ - **Advanced:** True
+
+**Usage**
+
+To use this component, provide the record from which you want to extract keys, specify the keys to extract, and optionally set whether to raise errors for missing keys.
+
+---
+
+### Flow as Tool
+
+This component constructs a Tool from a function that runs the loaded Flow.
+
+**Parameters**
+
+- **Flow Name:**
+ - **Display Name:** Flow Name
+ - **Info:** The name of the flow to run.
+ - **Options:** List of available flow names.
+ - **Real-time Refresh:** True
+ - **Refresh Button:** True
+
+- **Name:**
+ - **Display Name:** Name
+ - **Description:** The name of the tool.
+
+- **Description:**
+ - **Display Name:** Description
+ - **Description:** The description of the tool.
+
+- **Return Direct:**
+ - **Display Name:** Return Direct
+ - **Description:** Return the result directly from the Tool.
+ - **Advanced:** True
+
+**Usage**
+
+To use this component, select the desired flow from the available options, provide a name and description for the tool, and specify whether to return the result directly from the tool.
+
+---
+
+### Listen
+
+This component listens for a notification.
+
+**Parameters**
+
+- **Name:**
+ - **Display Name:** Name
+ - **Info:** The name of the notification to listen for.
+
+**Usage**
+
+To use this component, specify the name of the notification to listen for.
+
+---
+
+### List Flows
+
+This component lists all available flows.
+
+**Usage**
+
+To use this component, simply call it without any parameters.
+
+---
+
+### Merge Records
+
+**Parameters**
+
+- **Records:**
+ - **Display Name:** Records
+
+**Usage**
+
+To use this component, provide a list of records to merge.
+
+---
+
+### Notify
+
+This component generates a notification to the Get Notified component.
+
+**Parameters**
+
+- **Name:**
+ - **Display Name:** Name
+ - **Info:** The name of the notification.
+
+- **Record:**
+ - **Display Name:** Record
+ - **Info:** The record to store.
+
+- **Append:**
+ - **Display Name:** Append
+ - **Info:** If True, the record will be appended to the notification.
+
+**Usage**
+
+To use this component, specify the name of the notification, provide an optional record to store, and indicate whether to append the record to the notification.
+
+---
+
+### Run Flow
+
+This component runs a flow.
+
+**Parameters**
+
+- **Input Value:**
+ - **Display Name:** Input Value
+ - **Multiline:** True
+
+- **Flow Name:**
+ - **Display Name:** Flow Name
+ - **Info:** The name of the flow to run.
+ - **Options:** List of available flow names.
+ - **Refresh Button:** True
+
+- **Tweaks:**
+ - **Display Name:** Tweaks
+ - **Info:** Tweaks to apply to the flow.
+
+**Usage**
+
+To use this component, provide the input value, specify the flow name to run, and optionally provide tweaks to apply to the flow.
+
+---
+
+### Runnable Executor
+
+This component executes a runnable.
+
+**Parameters**
+
+- **Input Key:**
+ - **Display Name:** Input Key
+ - **Info:** The key to use for the input.
+
+- **Inputs:**
+ - **Display Name:** Inputs
+ - **Info:** The inputs to pass to the runnable.
+
+- **Runnable:**
+ - **Display Name:** Runnable
+ - **Info:** The runnable to execute.
+
+- **Output Key:**
+ - **Display Name:** Output Key
+ - **Info:** The key to use for the output.
+
+**Usage**
+
+To use this component, specify the input key, provide the inputs to pass to the runnable, select the runnable to execute, and optionally specify the output key.
+
+---
+
+### SQL Executor
+
+ This component executes an SQL query.
+
+**Parameters**
+
+- **Database URL:**
+ - **Display Name:** Database URL
+ - **Info:** The URL of the database.
+
+- **Include Columns:**
+ - **Display Name:** Include Columns
+ - **Info:** Include columns in the result.
+
+- **Passthrough:**
+ - **Display Name:** Passthrough
+ - **Info:** If an error occurs, return the query instead of raising an exception.
+
+- **Add Error:**
+ - **Display Name:** Add Error
+ - **Info:** Add the error to the result.
+
+**Usage**
+
+To use this component, provide the SQL query, specify the database URL, and optionally configure include columns, passthrough, and add error settings.
+
+---
+
+### SubFlow
+
+This component dynamically generates a component from a flow. The output is a list of records with keys 'result' and 'message'.
+
+**Parameters**
+
+- **Input Value:**
+ - **Display Name:** Input Value
+ - **Multiline:** True
+
+- **Flow Name:**
+ - **Display Name:** Flow Name
+ - **Info:** The name of the flow to run.
+ - **Options:** List of available flow names.
+ - **Real Time Refresh:** True
+ - **Refresh Button:** True
+
+- **Tweaks:**
+ - **Display Name:** Tweaks
+ - **Info:** Tweaks to apply to the flow.
+
+**Usage**
+
+To use this component, specify the flow name and provide any necessary tweaks to apply to the flow.
diff --git a/docs/docs/components/helpers.mdx b/docs/docs/components/helpers.mdx
new file mode 100644
index 000000000..d965761aa
--- /dev/null
+++ b/docs/docs/components/helpers.mdx
@@ -0,0 +1,127 @@
+import Admonition from '@theme/Admonition';
+
+# Helpers
+
+### Chat Memory
+
+This component retrieves stored chat messages given a specific Session ID.
+
+**Params**
+
+- **Sender Type:** Choose the sender type from options like "Machine", "User", or "Machine and User".
+- **Sender Name:** (Optional) The name of the sender.
+- **Number of Messages:** Number of messages to retrieve.
+- **Session ID:** The Session ID of the chat history.
+- **Order:** Choose the order of the messages, either "Ascending" or "Descending".
+- **Record Template:** (Optional) Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.
+
+---
+
+### Combine Text
+
+This component concatenates two text sources into a single text chunk using a specified delimiter.
+
+**Params**
+
+- **First Text:** The first text input to concatenate.
+- **Second Text:** The second text input to concatenate.
+- **Delimiter:** A string used to separate the two text inputs. Defaults to a whitespace.
+
+---
+
+### Create Record
+
+This component dynamically creates a Record with a specified number of fields.
+
+**Params**
+
+- **Number of Fields:** Number of fields to be added to the record.
+- **Text Key:** Key to be used as text.
+
+---
+
+### Custom Component
+
+Use this component as a template to create your own custom component.
+
+**Params**
+
+- **Parameter:** Describe the purpose of this parameter.
+
+
+
+ Customize the build_config and build methods according to your requirements.
+
+
+
+Learn more about [Custom Component](http://docs.langflow.org/components/custom).
+
+---
+
+### Documents to Records
+
+Convert LangChain Documents into Records.
+
+**Parameters**
+
+- **Documents:** Documents to be converted into Records.
+
+---
+
+### ID Generator
+
+Generates a unique ID.
+
+**Parameters**
+
+- **Value:** Unique ID generated.
+
+---
+
+### Message History
+
+Retrieves stored chat messages given a specific Session ID.
+
+**Parameters**
+
+- **Sender Type:** Options for the sender type.
+- **Sender Name:** Sender name.
+- **Number of Messages:** Number of messages to retrieve.
+- **Session ID:** Session ID of the chat history.
+- **Order:** Order of the messages.
+
+---
+
+### Records to Text
+
+Convert Records into plain text following a specified template.
+
+**Parameters**
+
+- **Records:** The records to convert to text.
+- **Template:** The template to use for formatting the records. It can contain the keys `{text}`, `{data}` or any other key in the Record.
+
+---
+
+### Split Text
+
+Split text into chunks of a specified length.
+
+**Parameters**
+
+- **Texts:** Texts to split.
+- **Separators:** The characters to split on. Defaults to [" "].
+- **Max Chunk Size:** The maximum length (in number of characters) of each chunk.
+- **Chunk Overlap:** The amount of character overlap between chunks.
+- **Recursive:** Whether to split recursively.
+
+---
+
+### Update Record
+
+Update Record with text-based key/value pairs, similar to updating a Python dictionary.
+
+**Parameters**
+
+- **Record:** The record to update.
+- **New Data:** The new data to update the record with.
diff --git a/docs/docs/components/inputs.mdx b/docs/docs/components/inputs.mdx
new file mode 100644
index 000000000..fe8804e43
--- /dev/null
+++ b/docs/docs/components/inputs.mdx
@@ -0,0 +1,164 @@
+import Admonition from "@theme/Admonition";
+import ZoomableImage from "/src/theme/ZoomableImage.js";
+
+# Inputs
+
+### Chat Input
+
+This component is designed to get user input from the chat.
+
+**Params**
+
+- **Sender Type:** specifies the sender type. Defaults to _`"User"`_. Options are _`"Machine"`_ and _`"User"`_.
+
+- **Sender Name:** specifies the name of the sender. Defaults to _`"User"`_.
+
+- **Message:** specifies the message text. It is a multiline text input.
+
+- **Session ID:** specifies the session ID of the chat history. If provided, the message will be saved in the Message History.
+
+
+
+ If _`As Record`_ is _`true`_ and the _`Message`_ is a _`Record`_, the data
+ of the _`Record`_ will be updated with the _`Sender`_, _`Sender Name`_, and
+ _`Session ID`_.
+
+
+
+When you get it from the sidebar, it will look like the image below but that is because some fields are in the advanced section.
+
+
+
+If you expose all its fields, it will look like the image below.
+
+
+
+One key capability of the Chat Input component is how it transforms the Interaction Panel into a chat window. This feature is particularly useful for scenarios where user input is required to initiate or influence the flow.
+
+
+
+---
+
+### Prompt
+
+Create a prompt template with dynamic variables. This is a very useful component for structuring prompts and passing dynamic data to a language model.
+
+**Parameters**
+
+- **Template:** the template for the prompt. This field allows you to create other fields dynamically by using curly brackets `{}`. For example, if you have a template like this: _`"Hello {name}, how are you?"`_, a new field called _`name`_ will be created.
+
+
+
+ Prompt variables can be created with any chosen name inside curly brackets,
+ e.g. `{variable_name}`
+
+
+
+Here is how it looks when you get it from the sidebar.
+
+
+
+And here when you add a Template with the value _`Hello {name}, how are you?`_.
+
+
+
+---
+
+### Text Input
+
+This component is designed for simple text input, allowing users to pass textual data to subsequent components in the workflow. It's particularly useful for scenarios where a brief user input is required to initiate or influence the flow.
+
+**Params**
+
+- **Value:** Specifies the text input value. This is where the user can input the text data that will be passed to the next component in the sequence. If no value is provided, it defaults to an empty string.
+- **Record Template:** Specifies how a Record should be converted into Text.
+
+
+
+ The `TextInput` component serves as a straightforward means for setting Text
+ input values in the chat window. It ensures that textual data can be
+ seamlessly passed to subsequent components in the flow.
+
+
+
+It should look like this when dropped directly from the sidebar.
+
+
+
+And when you expose all its fields, it will look like the image below.
+
+The **Record Template** field is used to specify how a Record should be converted into Text. This is particularly useful when you want to extract specific information from a Record and pass it as text to the next component in the sequence.
+
+For example, if you have a Record with the following structure:
+
+```json
+{
+ "name": "John Doe",
+ "age": 30,
+ "email": "johndoe@email.com"
+}
+```
+
+You can use a template like this: _`"Name: {name}, Age: {age}"`_ to convert the Record into a text string like this: _`"Name: John Doe, Age: 30"`_, and if you pass more than one Record, the text will be concatenated with a new line separator.
+
+
+
+The Text Input component gives you the possibility to add an Input field on the Interaction Panel. This is useful because it allows you to define parameters while running and testing your flow.
+
+
diff --git a/docs/docs/components/memories.mdx b/docs/docs/components/memories.mdx
index 3bf9a957c..b92538134 100644
--- a/docs/docs/components/memories.mdx
+++ b/docs/docs/components/memories.mdx
@@ -12,6 +12,26 @@ Memory is a concept in chat-based applications that allows the system to remembe
---
+### MessageHistory
+
+This component is designed to retrieve stored messages based on various filters such as sender type, sender name, session ID, and a specific file path where messages are stored. It allows for a flexible retrieval of chat history, providing insights into past interactions.
+
+**Params**
+
+- **Sender Type:** (Optional) Specifies the type of the sender. Options are _`"Machine"`_, _`"User"`_, or _`"Machine and User"`_. Filters the messages by the type of the sender.
+
+- **Sender Name:** (Optional) Specifies the name of the sender. Filters the messages by the name of the sender.
+
+- **Session ID:** (Optional) Specifies the session ID of the chat history. Filters the messages belonging to a specific session.
+
+- **Number of Messages:** Specifies the number of messages to retrieve. Defaults to _`5`_. Determines how many recent messages from the chat history to fetch.
+
+
+
+ The component retrieves messages based on the provided criteria, including the specific file path for stored messages. If no specific criteria are provided, it will return the most recent messages up to the specified limit. This component can be used to review past interactions and analyze the flow of conversations.
+
+
+
### ConversationBufferMemory
The `ConversationBufferMemory` component is a type of memory system that plainly stores the last few inputs and outputs of a conversation.
@@ -27,7 +47,7 @@ The `ConversationBufferMemory` component is a type of memory system that plainly
### ConversationBufferWindowMemory
-`ConversationBufferWindowMemory` is a variation of the `ConversationBufferMemory` that maintains a list of the recent interactions in a conversation. It only keeps the last K interactions in memory, which can be useful for maintaining a sliding window of the most recent interactions without letting the buffer get too large.
+`ConversationBufferWindowMemory` is a variation of the `ConversationBufferMemory` that maintains a list of the recent interactions in a conversation. It only keeps the last K interactions in memory, which can be useful for maintaining a sliding window of the most recent interactions without letting the buffer get too large.
**Params**
@@ -72,7 +92,7 @@ The `ConversationEntityMemory` component incorporates intricate memory structure
### ConversationSummaryMemory
-The `ConversationSummaryMemory` is a memory component that creates a summary of the conversation over time. It condenses information from the conversation and stores the current summary in memory. It is particularly useful for longer conversations where keeping the entire message history in the prompt would take up too many tokens.
+The `ConversationSummaryMemory` is a memory component that creates a summary of the conversation over time. It condenses information from the conversation and stores the current summary in memory. It is particularly useful for longer conversations where keeping the entire message history in the prompt would take up too many tokens.
**Params**
diff --git a/docs/docs/components/llms.mdx b/docs/docs/components/model_specs.mdx
similarity index 100%
rename from docs/docs/components/llms.mdx
rename to docs/docs/components/model_specs.mdx
diff --git a/docs/docs/components/models.mdx b/docs/docs/components/models.mdx
new file mode 100644
index 000000000..1c3b404b5
--- /dev/null
+++ b/docs/docs/components/models.mdx
@@ -0,0 +1,346 @@
+import Admonition from '@theme/Admonition';
+
+# Models
+
+### Amazon Bedrock
+
+This component facilitates the generation of text using the LLM (Large Language Model) model from Amazon Bedrock.
+
+**Params**
+
+- **Input Value:** Specifies the input text for text generation.
+
+- **System Message (Optional):** A system message to pass to the model.
+
+- **Model ID (Optional):** Specifies the model ID to be used for text generation. Defaults to _`"anthropic.claude-instant-v1"`_. Available options include:
+ - _`"ai21.j2-grande-instruct"`_
+ - _`"ai21.j2-jumbo-instruct"`_
+ - _`"ai21.j2-mid"`_
+ - _`"ai21.j2-mid-v1"`_
+ - _`"ai21.j2-ultra"`_
+ - _`"ai21.j2-ultra-v1"`_
+ - _`"anthropic.claude-instant-v1"`_
+ - _`"anthropic.claude-v1"`_
+ - _`"anthropic.claude-v2"`_
+ - _`"cohere.command-text-v14"`_
+
+- **Credentials Profile Name (Optional):** Specifies the name of the credentials profile.
+
+- **Region Name (Optional):** Specifies the region name.
+
+- **Model Kwargs (Optional):** Additional keyword arguments for the model.
+
+- **Endpoint URL (Optional):** Specifies the endpoint URL.
+
+- **Streaming (Optional):** Specifies whether to stream the response from the model. Defaults to _`False`_.
+
+- **Cache (Optional):** Specifies whether to cache the response.
+
+- **Stream (Optional):** Specifies whether to stream the response from the model. Defaults to _`False`_.
+
+
+
+ Ensure that necessary credentials are provided to connect to the Amazon Bedrock API. If connection fails, a ValueError will be raised.
+
+
+
+
+---
+
+### Anthropic
+
+This component allows the generation of text using Anthropic Chat&Completion large language models.
+
+**Params**
+
+- **Model Name:** Specifies the name of the Anthropic model to be used for text generation. Available options include:
+ - _`"claude-2.1"`_
+ - _`"claude-2.0"`_
+ - _`"claude-instant-1.2"`_
+ - _`"claude-instant-1"`_
+
+- **Anthropic API Key:** Your Anthropic API key.
+
+- **Max Tokens (Optional):** Specifies the maximum number of tokens to generate. Defaults to _`256`_.
+
+- **Temperature (Optional):** Specifies the sampling temperature. Defaults to _`0.7`_.
+
+- **API Endpoint (Optional):** Specifies the endpoint of the Anthropic API. Defaults to _`"https://api.anthropic.com"`_ if not specified.
+
+- **Input Value:** Specifies the input text for text generation.
+
+- **Stream (Optional):** Specifies whether to stream the response from the model. Defaults to _`False`_.
+
+- **System Message (Optional):** A system message to pass to the model.
+
+For detailed documentation and integration guides, please refer to the [Anthropic Component Documentation](https://python.langchain.com/docs/integrations/chat/anthropic).
+
+---
+
+### Azure OpenAI
+
+This component allows the generation of text using the LLM (Large Language Model) model from Azure OpenAI.
+
+**Params**
+
+- **Model Name:** Specifies the name of the Azure OpenAI model to be used for text generation. Available options include:
+ - _`"gpt-35-turbo"`_
+ - _`"gpt-35-turbo-16k"`_
+ - _`"gpt-35-turbo-instruct"`_
+ - _`"gpt-4"`_
+ - _`"gpt-4-32k"`_
+ - _`"gpt-4-vision"`_
+
+- **Azure Endpoint:** Your Azure endpoint, including the resource. Example: `https://example-resource.azure.openai.com/`.
+
+- **Deployment Name:** Specifies the name of the deployment.
+
+- **API Version:** Specifies the version of the Azure OpenAI API to be used. Available options include:
+ - _`"2023-03-15-preview"`_
+ - _`"2023-05-15"`_
+ - _`"2023-06-01-preview"`_
+ - _`"2023-07-01-preview"`_
+ - _`"2023-08-01-preview"`_
+ - _`"2023-09-01-preview"`_
+ - _`"2023-12-01-preview"`_
+
+- **API Key:** Your Azure OpenAI API key.
+
+- **Temperature (Optional):** Specifies the sampling temperature. Defaults to _`0.7`_.
+
+- **Max Tokens (Optional):** Specifies the maximum number of tokens to generate. Defaults to _`1000`_.
+
+- **Input Value:** Specifies the input text for text generation.
+
+- **Stream (Optional):** Specifies whether to stream the response from the model. Defaults to _`False`_.
+
+- **System Message (Optional):** A system message to pass to the model.
+
+For detailed documentation and integration guides, please refer to the [Azure OpenAI Component Documentation](https://python.langchain.com/docs/integrations/llms/azure_openai).
+
+
+---
+
+### Cohere
+
+This component enables text generation using Cohere large language models.
+
+**Params**
+
+- **Cohere API Key:** Your Cohere API key.
+
+- **Max Tokens (Optional):** Specifies the maximum number of tokens to generate. Defaults to _`256`_.
+
+- **Temperature (Optional):** Specifies the sampling temperature. Defaults to _`0.75`_.
+
+- **Input Value:** Specifies the input text for text generation.
+
+- **Stream (Optional):** Specifies whether to stream the response from the model. Defaults to _`False`_.
+
+- **System Message (Optional):** A system message to pass to the model.
+
+---
+
+### Google Generative AI
+
+This component enables text generation using Google Generative AI.
+
+**Params**
+
+- **Google API Key:** Your Google API key to use for the Google Generative AI.
+
+- **Model:** The name of the model to use. Supported examples are _`"gemini-pro"`_ and _`"gemini-pro-vision"`_.
+
+- **Max Output Tokens (Optional):** The maximum number of tokens to generate.
+
+- **Temperature:** Run inference with this temperature. Must be in the closed interval [0.0, 1.0].
+
+- **Top K (Optional):** Decode using top-k sampling: consider the set of top_k most probable tokens. Must be positive.
+
+- **Top P (Optional):** The maximum cumulative probability of tokens to consider when sampling.
+
+- **N (Optional):** Number of chat completions to generate for each prompt. Note that the API may not return the full n completions if duplicates are generated.
+
+- **Input Value:** The input to the model.
+
+- **Stream (Optional):** Specifies whether to stream the response from the model. Defaults to _`False`_.
+
+- **System Message (Optional):** A system message to pass to the model.
+
+---
+
+### Hugging Face API
+
+This component facilitates text generation using LLM models from the Hugging Face Inference API.
+
+**Params**
+
+- **Endpoint URL:** The URL of the Hugging Face Inference API endpoint. Should be provided along with necessary authentication credentials.
+
+- **Task:** Specifies the task for text generation. Options include _`"text2text-generation"`_, _`"text-generation"`_, and _`"summarization"`_.
+
+- **API Token:** The API token required for authentication with the Hugging Face Hub.
+
+- **Model Keyword Arguments (Optional):** Additional keyword arguments for the model. Should be provided as a Python dictionary.
+
+- **Input Value:** The input text for text generation.
+
+- **Stream (Optional):** Specifies whether to stream the response from the model. Defaults to _`False`_.
+
+- **System Message (Optional):** A system message to pass to the model.
+
+---
+
+### LiteLLM Model
+
+Generates text using the `LiteLLM` collection of large language models.
+
+**Parameters**
+
+- **Model name:** The name of the model to use. For example, `gpt-3.5-turbo`. (Type: str)
+- **API key:** The API key to use for accessing the provider's API. (Type: str, Optional)
+- **Provider:** The provider of the API key. (Type: str, Choices: "OpenAI", "Azure", "Anthropic", "Replicate", "Cohere", "OpenRouter")
+- **Temperature:** Controls the randomness of the text generation. (Type: float, Default: 0.7)
+- **Model kwargs:** Additional keyword arguments for the model. (Type: Dict, Optional)
+- **Top p:** Filter responses to keep the cumulative probability within the top p tokens. (Type: float, Optional)
+- **Top k:** Filter responses to only include the top k tokens. (Type: int, Optional)
+- **N:** Number of chat completions to generate for each prompt. (Type: int, Default: 1)
+- **Max tokens:** The maximum number of tokens to generate for each chat completion. (Type: int, Default: 256)
+- **Max retries:** Maximum number of retries for failed requests. (Type: int, Default: 6)
+- **Verbose:** Whether to print verbose output. (Type: bool, Default: False)
+- **Input:** The input prompt for text generation. (Type: str)
+- **Stream:** Whether to stream the output. (Type: bool, Default: False)
+- **System message:** System message to pass to the model. (Type: str, Optional)
+
+---
+
+### Ollama
+
+Generate text using Ollama Local LLMs.
+
+**Parameters**
+
+- **Base URL:** Endpoint of the Ollama API. Defaults to 'http://localhost:11434' if not specified.
+- **Model Name:** The model name to use. Refer to [Ollama Library](https://ollama.ai/library) for more models.
+- **Temperature:** Controls the creativity of model responses. (Default: 0.8)
+- **Cache:** Enable or disable caching. (Default: False)
+- **Format:** Specify the format of the output (e.g., json). (Advanced)
+- **Metadata:** Metadata to add to the run trace. (Advanced)
+- **Mirostat:** Enable/disable Mirostat sampling for controlling perplexity. (Default: Disabled)
+- **Mirostat Eta:** Learning rate for Mirostat algorithm. (Default: None) (Advanced)
+- **Mirostat Tau:** Controls the balance between coherence and diversity of the output. (Default: None) (Advanced)
+- **Context Window Size:** Size of the context window for generating tokens. (Default: None) (Advanced)
+- **Number of GPUs:** Number of GPUs to use for computation. (Default: None) (Advanced)
+- **Number of Threads:** Number of threads to use during computation. (Default: None) (Advanced)
+- **Repeat Last N:** How far back the model looks to prevent repetition. (Default: None) (Advanced)
+- **Repeat Penalty:** Penalty for repetitions in generated text. (Default: None) (Advanced)
+- **TFS Z:** Tail free sampling value. (Default: None) (Advanced)
+- **Timeout:** Timeout for the request stream. (Default: None) (Advanced)
+- **Top K:** Limits token selection to top K. (Default: None) (Advanced)
+- **Top P:** Works together with top-k. (Default: None) (Advanced)
+- **Verbose:** Whether to print out response text.
+- **Tags:** Tags to add to the run trace. (Advanced)
+- **Stop Tokens:** List of tokens to signal the model to stop generating text. (Advanced)
+- **System:** System to use for generating text. (Advanced)
+- **Template:** Template to use for generating text. (Advanced)
+- **Input:** The input text.
+- **Stream:** Whether to stream the response.
+- **System Message:** System message to pass to the model. (Advanced)
+
+---
+
+### OpenAI
+
+This component facilitates text generation using OpenAI's models.
+
+**Params**
+
+- **Input Value:** The input text for text generation.
+
+- **Max Tokens (Optional):** The maximum number of tokens to generate. Defaults to _`256`_.
+
+- **Model Kwargs (Optional):** Additional keyword arguments for the model. Should be provided as a nested dictionary.
+
+- **Model Name (Optional):** The name of the model to use. Defaults to _`gpt-4-1106-preview`_. Supported options include: _`gpt-4-turbo-preview`_, _`gpt-4-0125-preview`_, _`gpt-4-1106-preview`_, _`gpt-4-vision-preview`_, _`gpt-3.5-turbo-0125`_, _`gpt-3.5-turbo-1106`_.
+
+- **OpenAI API Base (Optional):** The base URL of the OpenAI API. Defaults to _`https://api.openai.com/v1`_.
+
+- **OpenAI API Key (Optional):** The API key for accessing the OpenAI API.
+
+- **Temperature:** Controls the creativity of model responses. Defaults to _`0.7`_.
+
+- **Stream (Optional):** Specifies whether to stream the response from the model. Defaults to _`False`_.
+
+- **System Message (Optional):** System message to pass to the model.
+
+---
+
+### Qianfan
+
+This component facilitates the generation of text using Baidu Qianfan chat models.
+
+**Params**
+
+- **Model Name:** Specifies the name of the Qianfan chat model to be used for text generation. Available options include:
+ - _`"ERNIE-Bot"`_
+ - _`"ERNIE-Bot-turbo"`_
+ - _`"BLOOMZ-7B"`_
+ - _`"Llama-2-7b-chat"`_
+ - _`"Llama-2-13b-chat"`_
+ - _`"Llama-2-70b-chat"`_
+ - _`"Qianfan-BLOOMZ-7B-compressed"`_
+ - _`"Qianfan-Chinese-Llama-2-7B"`_
+ - _`"ChatGLM2-6B-32K"`_
+ - _`"AquilaChat-7B"`_
+
+- **Qianfan Ak:** Your Baidu Qianfan access key, obtainable from [here](https://cloud.baidu.com/product/wenxinworkshop).
+
+- **Qianfan Sk:** Your Baidu Qianfan secret key, obtainable from [here](https://cloud.baidu.com/product/wenxinworkshop).
+
+- **Top p (Optional):** Model parameter. Specifies the top-p value. Only supported in ERNIE-Bot and ERNIE-Bot-turbo models. Defaults to _`0.8`_.
+
+- **Temperature (Optional):** Model parameter. Specifies the sampling temperature. Only supported in ERNIE-Bot and ERNIE-Bot-turbo models. Defaults to _`0.95`_.
+
+- **Penalty Score (Optional):** Model parameter. Specifies the penalty score. Only supported in ERNIE-Bot and ERNIE-Bot-turbo models. Defaults to _`1.0`_.
+
+- **Endpoint (Optional):** Endpoint of the Qianfan LLM, required if custom model is used.
+
+- **Input Value:** Specifies the input text for text generation.
+
+- **Stream (Optional):** Specifies whether to stream the response from the model. Defaults to _`False`_.
+
+- **System Message (Optional):** A system message to pass to the model.
+
+---
+
+### Vertex AI
+
+The `ChatVertexAI` is a component for generating text using Vertex AI Chat large language models API.
+
+**Params**
+
+- **Credentials:** The JSON file containing the credentials for accessing the Vertex AI Chat API.
+
+- **Project:** The name of the project associated with the Vertex AI Chat API.
+
+- **Examples (Optional):** List of examples to provide context for text generation.
+
+- **Location:** The location of the Vertex AI Chat API service. Defaults to _`us-central1`_.
+
+- **Max Output Tokens:** The maximum number of tokens to generate. Defaults to _`128`_.
+
+- **Model Name:** The name of the model to use. Defaults to _`chat-bison`_.
+
+- **Temperature:** Controls the creativity of model responses. Defaults to _`0.0`_.
+
+- **Input Value:** The input text for text generation.
+
+- **Top K:** Limits token selection to top K. Defaults to _`40`_.
+
+- **Top P:** Works together with top-k. Defaults to _`0.95`_.
+
+- **Verbose:** Whether to print out response text. Defaults to _`False`_.
+
+- **Stream (Optional):** Specifies whether to stream the response from the model. Defaults to _`False`_.
+
+- **System Message (Optional):** System message to pass to the model.
\ No newline at end of file
diff --git a/docs/docs/components/outputs.mdx b/docs/docs/components/outputs.mdx
new file mode 100644
index 000000000..6fe3f9d2a
--- /dev/null
+++ b/docs/docs/components/outputs.mdx
@@ -0,0 +1,37 @@
+import Admonition from '@theme/Admonition';
+
+# Outputs
+
+### Chat Output
+
+This component is designed to send a message to the chat.
+
+**Params**
+
+- **Sender Type:** specifies the sender type. Defaults to _`"Machine"`_. Options are _`"Machine"`_ and _`"User"`_.
+
+- **Sender Name:** specifies the name of the sender. Defaults to _`"AI"`_.
+
+- **Session ID:** specifies the session ID of the chat history. If provided, the message will be saved in the Message History.
+
+- **Message:** specifies the message text.
+
+
+
+ If _`As Record`_ is _`true`_ and the _`Message`_ is a _`Record`_, the data of the _`Record`_ will be updated with the _`Sender`_, _`Sender Name`_, and _`Session ID`_.
+
+
+
+### Text Output
+
+This component is designed to display text data to the user. It's particularly useful for scenarios where you don't want to send the text data to the chat, but still want to display it.
+
+**Params**
+
+- **Value:** Specifies the text data to be displayed. This is where the text data to be displayed is provided. If no value is provided, it defaults to an empty string.
+
+
+
+ The `TextOutput` component serves as a straightforward means for displaying text data. It ensures that textual data can be seamlessly observed in the chat window throughout your flow.
+
+
\ No newline at end of file
diff --git a/docs/docs/components/prompts.mdx b/docs/docs/components/prompts.mdx
index 3aafc9b96..0b1ad705c 100644
--- a/docs/docs/components/prompts.mdx
+++ b/docs/docs/components/prompts.mdx
@@ -21,7 +21,7 @@ The `PromptTemplate` component allows users to create prompts and define variabl
Once a variable is defined in the prompt template, it becomes a component
input of its own. Check out [Prompt
- Customization](../docs/guidelines/prompt-customization.mdx) to learn more.
+ Customization](../guidelines/prompt-customization) to learn more.
- **template:** Template used to format an individual request.
diff --git a/docs/docs/components/utilities.mdx b/docs/docs/components/utilities.mdx
index 593864213..e8c2ba216 100644
--- a/docs/docs/components/utilities.mdx
+++ b/docs/docs/components/utilities.mdx
@@ -74,3 +74,23 @@ Build a Document containing a JSON object using a key and another Document page
**Output**
- **List of Documents:** A list containing the Document with the JSON object.
+
+## Unique ID Generator
+
+Generates a unique identifier (UUID) for each instance it is invoked, providing a distinct and reliable identifier suitable for a variety of applications.
+
+**Params**
+
+- **Value:** This field displays the generated unique identifier (UUID). The UUID is generated dynamically for each instance of the component, ensuring uniqueness across different uses.
+
+**Output**
+
+- Returns a unique identifier (UUID) as a string. This UUID is generated using Python's `uuid` module, ensuring that each identifier is unique and can be used as a reliable reference in your application.
+
+
+
+ The Unique ID Generator is crucial for scenarios requiring distinct identifiers, such as session management, transaction tracking, or any context where different instances or entities must be uniquely identified. The generated UUID is provided as a hexadecimal string, offering a high level of uniqueness and security for identification purposes.
+
+
+
+For additional information and examples, please consult the [Langflow Components Custom Documentation](http://docs.langflow.org/components/custom).
diff --git a/docs/docs/components/vector-stores.mdx b/docs/docs/components/vector-stores.mdx
index 133984cda..103d755b4 100644
--- a/docs/docs/components/vector-stores.mdx
+++ b/docs/docs/components/vector-stores.mdx
@@ -1,9 +1,635 @@
-import Admonition from '@theme/Admonition';
+import Admonition from "@theme/Admonition";
# Vector Stores
-
+### Astra DB
+
+The `Astra DB` is a component for initializing an Astra DB Vector Store from Records. It facilitates the creation of Astra DB-based vector indexes for efficient document storage and retrieval.
+
+**Params**
+
+- **Input:** The input documents or records.
+
+- **Embedding:** The embedding model used by Astra DB.
+
+- **Collection Name:** The name of the collection in Astra DB.
+
+- **Token:** The token for Astra DB.
+
+- **API Endpoint:** The API endpoint for Astra DB.
+
+- **Namespace:** The namespace in Astra DB.
+
+- **Metric:** The metric to use in Astra DB.
+
+- **Batch Size:** The batch size for Astra DB.
+
+- **Bulk Insert Batch Concurrency:** The bulk insert batch concurrency for Astra DB.
+
+- **Bulk Insert Overwrite Concurrency:** The bulk insert overwrite concurrency for Astra DB.
+
+- **Bulk Delete Concurrency:** The bulk delete concurrency for Astra DB.
+
+- **Setup Mode:** The setup mode for the vector store.
+
+- **Pre Delete Collection:** Pre delete collection.
+
+- **Metadata Indexing Include:** Metadata indexing include.
+
+- **Metadata Indexing Exclude:** Metadata indexing exclude.
+
+- **Collection Indexing Policy:** Collection indexing policy.
+
+
- We appreciate your understanding as we polish our documentation β it may contain some rough edges. Share your feedback or report issues to help us improve! π οΈπ
+ Ensure that the required Astra DB token and API endpoint are properly configured.
-
\ No newline at end of file
+
+
+
+---
+
+### Astra DB Search
+
+The `Astra DBSearch` is a component for searching an existing Astra DB Vector Store for similar documents. It extends the functionality of the `Astra DB` component to provide efficient document retrieval based on similarity metrics.
+
+**Params**
+
+- **Search Type:** The type of search to perform (e.g., Similarity, MMR).
+
+- **Input Value:** The input value to search for.
+
+- **Embedding:** The embedding model used by Astra DB.
+
+- **Collection Name:** The name of the collection in Astra DB.
+
+- **Token:** The token for Astra DB.
+
+- **API Endpoint:** The API endpoint for Astra DB.
+
+- **Namespace:** The namespace in Astra DB.
+
+- **Metric:** The metric to use in Astra DB.
+
+- **Batch Size:** The batch size for Astra DB.
+
+- **Bulk Insert Batch Concurrency:** The bulk insert batch concurrency for Astra DB.
+
+- **Bulk Insert Overwrite Concurrency:** The bulk insert overwrite concurrency for Astra DB.
+
+- **Bulk Delete Concurrency:** The bulk delete concurrency for Astra DB.
+
+- **Setup Mode:** The setup mode for the vector store.
+
+- **Pre Delete Collection:** Pre delete collection.
+
+- **Metadata Indexing Include:** Metadata indexing include.
+
+- **Metadata Indexing Exclude:** Metadata indexing exclude.
+
+- **Collection Indexing Policy:** Collection indexing policy.
+
+---
+
+### Chroma
+
+The `Chroma` is a component designed for implementing a Vector Store using Chroma. This component allows users to utilize Chroma for efficient vector storage and retrieval within their language processing workflows.
+
+**Params**
+
+- **Collection Name:** The name of the collection.
+
+- **Persist Directory:** The directory to persist the Vector Store to.
+
+- **Server CORS Allow Origins (Optional):** The CORS allow origins for the Chroma server.
+
+- **Server Host (Optional):** The host for the Chroma server.
+
+- **Server Port (Optional):** The port for the Chroma server.
+
+- **Server gRPC Port (Optional):** The gRPC port for the Chroma server.
+
+- **Server SSL Enabled (Optional):** Whether to enable SSL for the Chroma server.
+
+- **Input:** Input data for creating the Vector Store.
+
+- **Embedding:** The embeddings to use for the Vector Store.
+
+For detailed documentation and integration guides, please refer to the [Chroma Component Documentation](https://python.langchain.com/docs/integrations/vectorstores/chroma).
+
+---
+
+### Chroma Search
+
+The `ChromaSearch` is a component designed for searching a Chroma collection for similar documents. This component integrates with Chroma to facilitate efficient document retrieval based on similarity metrics.
+
+**Params**
+
+- **Input:** The input text to search for similar documents.
+
+- **Search Type:** The type of search to perform ("Similarity" or "MMR").
+
+- **Collection Name:** The name of the Chroma collection.
+
+- **Index Directory:** The directory where the Chroma index is stored.
+
+- **Embedding:** The embedding model used to vectorize inputs (make sure to use the same as the index).
+
+- **Server CORS Allow Origins (Optional):** The CORS allow origins for the Chroma server.
+
+- **Server Host (Optional):** The host for the Chroma server.
+
+- **Server Port (Optional):** The port for the Chroma server.
+
+- **Server gRPC Port (Optional):** The gRPC port for the Chroma server.
+
+- **Server SSL Enabled (Optional):** Whether SSL is enabled for the Chroma server.
+
+---
+
+### FAISS
+
+The `FAISS` is a component designed for ingesting documents into a FAISS Vector Store. It facilitates efficient document indexing and retrieval using the FAISS library.
+
+**Params**
+
+- **Embedding:** The embedding model used to vectorize inputs.
+
+- **Input:** The input documents to ingest into the FAISS Vector Store.
+
+- **Folder Path:** The path to save the FAISS index. It will be relative to where Langflow is running.
+
+- **Index Name:** The name of the FAISS index.
+
+For detailed documentation and integration guides, please refer to the [FAISS Component Documentation](https://faiss.ai/index.html).
+
+---
+
+### FAISS Search
+
+The `FAISSSearch` is a component for searching a FAISS Vector Store for similar documents. It enables efficient document retrieval based on similarity metrics using FAISS.
+
+**Params**
+
+- **Embedding:** The embedding model used by the FAISS Vector Store.
+
+- **Folder Path:** The path from which to load the FAISS index. It will be relative to where Langflow is running.
+
+- **Input:** The input value to search for similar documents.
+
+- **Index Name:** The name of the FAISS index.
+
+---
+
+### MongoDB Atlas
+
+The `MongoDBAtlas` is a component used to construct a MongoDB Atlas Vector Search vector store from Records. It facilitates the creation of MongoDB Atlas-based vector stores for efficient document storage and retrieval.
+
+**Params**
+
+- **Embedding:** The embedding model used by the MongoDB Atlas Vector Search.
+
+- **Input:** The input documents or records.
+
+- **Collection Name:** The name of the collection in the MongoDB Atlas database.
+
+- **Database Name:** The name of the database in MongoDB Atlas.
+
+- **Index Name:** The name of the index in MongoDB Atlas.
+
+- **MongoDB Atlas Cluster URI:** The URI of the MongoDB Atlas cluster.
+
+- **Search Kwargs:** Additional search arguments for MongoDB Atlas.
+
+
+ Ensure that pymongo is installed to use MongoDB Atlas Vector Store.
+
+
+---
+
+### MongoDB Atlas Search
+
+The `MongoDBAtlasSearch` is a component for searching a MongoDB Atlas Vector Store for similar documents. It extends the functionality of the MongoDBAtlasComponent to provide efficient document retrieval based on similarity metrics.
+
+**Params**
+
+- **Search Type:** The type of search to perform. Options: "Similarity", "MMR".
+
+- **Input:** The input value to search for.
+
+- **Embedding:** The embedding model used by the MongoDB Atlas Vector Store.
+
+- **Collection Name:** The name of the collection in the MongoDB Atlas database.
+
+- **Database Name:** The name of the database in MongoDB Atlas.
+
+- **Index Name:** The name of the index in MongoDB Atlas.
+
+- **MongoDB Atlas Cluster URI:** The URI of the MongoDB Atlas cluster.
+
+- **Search Kwargs:** Additional search arguments for MongoDB Atlas.
+
+---
+
+### PGVector
+
+The `PGVector` is a component for implementing a Vector Store using PostgreSQL. It allows users to store and retrieve vectors efficiently within a PostgreSQL database.
+
+**Params**
+
+- **Input:** The input value to use for the Vector Store.
+
+- **Embedding:** The embedding model used by the Vector Store.
+
+- **PostgreSQL Server Connection String:** The URL for the PostgreSQL server.
+
+- **Table:** The name of the table in the PostgreSQL database.
+
+For detailed documentation and integration guides, please refer to the [PGVector Component Documentation](https://python.langchain.com/docs/integrations/vectorstores/pgvector).
+
+
+
+ Ensure that the required PostgreSQL server is accessible and properly
+ configured.
+
+
+
+---
+
+### PGVector Search
+
+The `PGVectorSearch` is a component for searching a PGVector Store for similar documents. It extends the functionality of the PGVectorComponent to provide efficient document retrieval based on similarity metrics.
+
+**Params**
+
+- **Input:** The input value to search for.
+
+- **Embedding:** The embedding model used by the Vector Store.
+
+- **PostgreSQL Server Connection String:** The URL for the PostgreSQL server.
+
+- **Table:** The name of the table in the PostgreSQL database.
+
+- **Search Type:** The type of search to perform (e.g., "Similarity", "MMR").
+
+---
+
+### Pinecone
+
+The `Pinecone` is a component used to construct a Pinecone wrapper from Records. It facilitates the creation of Pinecone-based vector indexes for efficient document storage and retrieval.
+
+**Params**
+
+- **Input:** The input documents or records.
+
+- **Embedding:** The embedding model used by Pinecone.
+
+- **Index Name:** The name of the index in Pinecone.
+
+- **Namespace:** The namespace in Pinecone.
+
+- **Pinecone API Key:** The API key for Pinecone.
+
+- **Pinecone Environment:** The environment for Pinecone.
+
+- **Search Kwargs:** Additional search keyword arguments for Pinecone.
+
+- **Pool Threads:** The number of threads to use for Pinecone.
+
+
+
+ Ensure that the required Pinecone API key and environment are properly
+ configured.
+
+
+
+---
+
+### Pinecone Search
+
+The `PineconeSearch` is a component used to search a Pinecone Vector Store for similar documents. It extends the functionality of the `PineconeComponent` to provide efficient document retrieval based on similarity metrics.
+
+**Params**
+
+- **Search Type:** The type of search to perform (e.g., Similarity, MMR).
+
+- **Input Value:** The input value to search for.
+
+- **Embedding:** The embedding model used by Pinecone.
+
+- **Index Name:** The name of the index in Pinecone.
+
+- **Namespace:** The namespace in Pinecone.
+
+- **Pinecone API Key:** The API key for Pinecone.
+
+- **Pinecone Environment:** The environment for Pinecone.
+
+- **Search Kwargs:** Additional search keyword arguments for Pinecone.
+
+- **Pool Threads:** The number of threads to use for Pinecone.
+
+---
+
+### Qdrant
+
+The `Qdrant` is a component used to construct a Qdrant wrapper from a list of texts. It allows for efficient similarity search and retrieval operations based on the provided embeddings.
+
+**Params**
+
+- **Input:** The input documents or records.
+
+- **Embedding:** The embedding model used by Qdrant.
+
+- **API Key:** The API key for Qdrant (password field).
+
+- **Collection Name:** The name of the collection in Qdrant.
+
+- **Content Payload Key:** The key for the content payload in the documents (advanced).
+
+- **Distance Function:** The distance function to use in Qdrant (advanced).
+
+- **gRPC Port:** The gRPC port for Qdrant (advanced).
+
+- **Host:** The host for Qdrant (advanced).
+
+- **HTTPS:** Enable HTTPS for Qdrant (advanced).
+
+- **Location:** The location for Qdrant (advanced).
+
+- **Metadata Payload Key:** The key for the metadata payload in the documents (advanced).
+
+- **Path:** The path for Qdrant (advanced).
+
+- **Port:** The port for Qdrant (advanced).
+
+- **Prefer gRPC:** Prefer gRPC for Qdrant (advanced).
+
+- **Prefix:** The prefix for Qdrant (advanced).
+
+- **Search Kwargs:** Additional search keyword arguments for Qdrant (advanced).
+
+- **Timeout:** The timeout for Qdrant (advanced).
+
+- **URL:** The URL for Qdrant (advanced).
+
+---
+
+### Qdrant Search
+
+The `QdrantSearch` is a component used to search a Qdrant Vector Store for similar documents. It extends the functionality of the `QdrantComponent` to provide efficient document retrieval based on similarity metrics.
+
+**Params**
+
+- **Search Type:** The type of search to perform (e.g., Similarity, MMR).
+
+- **Input Value:** The input value to search for.
+
+- **Embedding:** The embedding model used by Qdrant.
+
+- **API Key:** The API key for Qdrant (password field).
+
+- **Collection Name:** The name of the collection in Qdrant.
+
+- **Content Payload Key:** The key for the content payload in the documents (advanced).
+
+- **Distance Function:** The distance function to use in Qdrant (advanced).
+
+- **gRPC Port:** The gRPC port for Qdrant (advanced).
+
+- **Host:** The host for Qdrant (advanced).
+
+- **HTTPS:** Enable HTTPS for Qdrant (advanced).
+
+- **Location:** The location for Qdrant (advanced).
+
+- **Metadata Payload Key:** The key for the metadata payload in the documents (advanced).
+
+- **Path:** The path for Qdrant (advanced).
+
+- **Port:** The port for Qdrant (advanced).
+
+- **Prefer gRPC:** Prefer gRPC for Qdrant (advanced).
+
+- **Prefix:** The prefix for Qdrant (advanced).
+
+- **Search Kwargs:** Additional search keyword arguments for Qdrant (advanced).
+
+- **Timeout:** The timeout for Qdrant (advanced).
+
+- **URL:** The URL for Qdrant (advanced).
+
+---
+
+### Redis
+
+The `Redis` is a component for implementing a Vector Store using Redis. It provides functionality to store and retrieve vectors efficiently from a Redis database.
+
+**Params**
+
+- **Index Name:** The name of the index in Redis (default: your_index).
+
+- **Input:** The input data to build the Redis Vector Store (input types: Document, Record).
+
+- **Embedding:** The embedding model used by Redis.
+
+- **Schema:** The schema file (.yaml) to define the structure of the documents (optional).
+
+- **Redis Server Connection String:** The connection string for the Redis server.
+
+- **Redis Index:** The name of the Redis index (optional).
+
+For detailed documentation, please refer to the [Redis Documentation](https://python.langchain.com/docs/integrations/vectorstores/redis).
+
+
+
+ Ensure that the required Redis server connection URL and index name are
+ properly configured. If no documents are provided, a schema must be
+ provided.
+
+
+
+---
+
+### Redis Search
+
+The `RedisSearch` is a component for searching a Redis Vector Store for similar documents.
+
+**Params**
+
+- **Search Type:** The type of search to perform (e.g., Similarity, MMR).
+
+- **Input Value:** The input value to search for.
+
+- **Index Name:** The name of the index in Redis (default: your_index).
+
+- **Embedding:** The embedding model used by Redis.
+
+- **Schema:** The schema file (.yaml) to define the structure of the documents (optional).
+
+- **Redis Server Connection String:** The connection string for the Redis server.
+
+- **Redis Index:** The name of the Redis index (optional).
+
+---
+
+### Supabase
+
+The `Supabase` is a component for initializing a Supabase Vector Store from texts and embeddings.
+
+**Params**
+
+- **Input:** The input documents or records.
+
+- **Embedding:** The embedding model used by Supabase.
+
+- **Query Name:** The name of the query (optional).
+
+- **Search Kwargs:** Additional search keyword arguments for Supabase (advanced).
+
+- **Supabase Service Key:** The service key for Supabase.
+
+- **Supabase URL:** The URL for the Supabase instance.
+
+- **Table Name:** The name of the table in Supabase (advanced).
+
+
+
+ Ensure that the required Supabase service key, Supabase URL, and table name
+ are properly configured.
+
+
+
+---
+
+### Supabase Search
+
+The `SupabaseSearch` is a component for searching a Supabase Vector Store for similar documents.
+
+**Params**
+
+- **Search Type:** The type of search to perform (e.g., Similarity, MMR).
+
+- **Input Value:** The input value to search for.
+
+- **Embedding:** The embedding model used by Supabase.
+
+- **Query Name:** The name of the query (optional).
+
+- **Search Kwargs:** Additional search keyword arguments for Supabase (advanced).
+
+- **Supabase Service Key:** The service key for Supabase.
+
+- **Supabase URL:** The URL for the Supabase instance.
+
+- **Table Name:** The name of the table in Supabase (advanced).
+
+---
+
+### Vectara
+
+The `Vectara` is a component for implementing a Vector Store using Vectara.
+
+**Params**
+
+- **Vectara Customer ID:** The customer ID for Vectara.
+
+- **Vectara Corpus ID:** The corpus ID for Vectara.
+
+- **Vectara API Key:** The API key for Vectara.
+
+- **Files Url:** The URL(s) of the file(s) to be used for initializing the Vectara Vector Store (optional).
+
+- **Input:** The input data to be upserted to the corpus (optional).
+
+For detailed documentation and integration guides, please refer to the [Vectara Component Documentation](https://python.langchain.com/docs/integrations/vectorstores/vectara).
+
+
+
+ If `inputs` are provided, they will be upserted to the corpus. If
+ `files_url` are provided, Vectara will process the files from the URLs.
+
+
+
+---
+
+### Vectara Search
+
+The `VectaraSearch` is a component for searching a Vectara Vector Store for similar documents.
+
+**Params**
+
+- **Search Type:** The type of search to perform (e.g., Similarity, MMR).
+
+- **Input Value:** The input value to search for.
+
+- **Vectara Customer ID:** The customer ID for Vectara.
+
+- **Vectara Corpus ID:** The corpus ID for Vectara.
+
+- **Vectara API Key:** The API key for Vectara.
+
+- **Files Url:** The URL(s) of the file(s) to be used for initializing the Vectara Vector Store (optional).
+
+---
+
+### Weaviate
+
+The `Weaviate` is a component for implementing a Vector Store using Weaviate.
+
+**Params**
+
+- **Weaviate URL:** The URL of the Weaviate instance (default: http://localhost:8080).
+
+- **Search By Text:** Boolean indicating whether to search by text (default: False).
+
+- **API Key:** The API key for authentication (optional).
+
+- **Index name:** The name of the index in Weaviate (optional).
+
+- **Text Key:** The key used to extract text from documents (default: "text").
+
+- **Input:** The input document or record.
+
+- **Embedding:** The embedding model used by Weaviate.
+
+- **Attributes:** Additional attributes to consider during indexing (optional).
+
+For detailed documentation and integration guides, please refer to the [Weaviate Component Documentation](https://python.langchain.com/docs/integrations/vectorstores/weaviate).
+
+
+
+ Before using the Weaviate Vector Store component, ensure that you have a
+ Weaviate instance running and accessible at the specified URL. Additionally,
+ make sure to provide the correct API key for authentication if required.
+ Adjust the index name, text key, and attributes according to your dataset
+ and indexing requirements. Finally, ensure that the provided embeddings are
+ compatible with Weaviate's requirements.
+
+
+
+---
+
+### Weaviate Search
+
+The `WeaviateSearch` component facilitates searching a Weaviate Vector Store for similar documents.
+
+**Params**
+
+- **Search Type:** The type of search to perform (e.g., Similarity, MMR).
+
+- **Input Value:** The input value to search for.
+
+- **Weaviate URL:** The URL of the Weaviate instance (default: http://localhost:8080).
+
+- **Search By Text:** Boolean indicating whether to search by text (default: False).
+
+- **API Key:** The API key for authentication (optional).
+
+- **Index name:** The name of the index in Weaviate (optional).
+
+- **Text Key:** The key used to extract text from documents (default: "text").
+
+- **Embedding:** The embedding model used by Weaviate.
+
+- **Attributes:** Additional attributes to consider during indexing (optional).
diff --git a/docs/docs/components/wrappers.mdx b/docs/docs/components/wrappers.mdx
deleted file mode 100644
index 4b1251b60..000000000
--- a/docs/docs/components/wrappers.mdx
+++ /dev/null
@@ -1,20 +0,0 @@
-import Admonition from '@theme/Admonition';
-
-# Wrappers
-
-
-
- We appreciate your understanding as we polish our documentation β it may contain some rough edges. Share your feedback or report issues to help us improve! π οΈπ
-
-
-
-
-### TextRequestsWrapper
-
-This component is designed to work with the Python Requests module, which is a popular tool for making web requests. Used to fetch data from a particular website.
-
-**Params**
-
-- **header:** specifies the headers to be included in the HTTP request. Defaults to `{'Authorization': 'Bearer '}`.
-
- Headers are key-value pairs that provide additional information about the request or the client making the request. They can be used to send authentication credentials, specify the content type of the request, set cookies, and more. They allow the client and the server to communicate additional information beyond the basic request.
\ No newline at end of file
diff --git a/docs/docs/examples/buffer-memory.mdx b/docs/docs/examples/buffer-memory.mdx
index 3167081a5..b196f9031 100644
--- a/docs/docs/examples/buffer-memory.mdx
+++ b/docs/docs/examples/buffer-memory.mdx
@@ -16,6 +16,12 @@ import ZoomableImage from "/src/theme/ZoomableImage.js";
light: "img/buffer-memory.png",
dark: "img/buffer-memory.png",
}}
+ style={{
+ width: "80%",
+ margin: "20px auto",
+ display: "flex",
+ justifyContent: "center",
+ }}
/>
#### Download Flow
diff --git a/docs/docs/examples/conversation-chain.mdx b/docs/docs/examples/conversation-chain.mdx
index 1cd59ca55..294d1b440 100644
--- a/docs/docs/examples/conversation-chain.mdx
+++ b/docs/docs/examples/conversation-chain.mdx
@@ -22,6 +22,13 @@ import ZoomableImage from "/src/theme/ZoomableImage.js";
light: "img/basic-chat.png",
dark: "img/basic-chat.png",
}}
+
+style={{
+ width: "80%",
+ margin: "20px auto",
+ display: "flex",
+ justifyContent: "center",
+ }}
/>
#### Download Flow
diff --git a/docs/docs/examples/csv-loader.mdx b/docs/docs/examples/csv-loader.mdx
index 351e99440..25f3bb444 100644
--- a/docs/docs/examples/csv-loader.mdx
+++ b/docs/docs/examples/csv-loader.mdx
@@ -34,6 +34,12 @@ import ZoomableImage from "/src/theme/ZoomableImage.js";
light: "img/csv-loader.png",
dark: "img/csv-loader.png",
}}
+ style={{
+ width: "80%",
+ margin: "20px auto",
+ display: "flex",
+ justifyContent: "center",
+ }}
/>
#### Download Flow
diff --git a/docs/docs/examples/flow-runner.mdx b/docs/docs/examples/flow-runner.mdx
index e20dc39f7..8a07adb0a 100644
--- a/docs/docs/examples/flow-runner.mdx
+++ b/docs/docs/examples/flow-runner.mdx
@@ -3,9 +3,6 @@ description: Custom Components
hide_table_of_contents: true
---
-import ZoomableImage from "/src/theme/ZoomableImage.js";
-import Admonition from "@theme/Admonition";
-
# FlowRunner Component
The CustomComponent class allows us to create components that interact with Langflow itself. In this example, we will make a component that runs other flows available in "My Collection".
@@ -18,7 +15,7 @@ The CustomComponent class allows us to create components that interact with Lang
}}
style={{
width: "30%",
- margin: "0 auto",
+ margin: "20px auto",
display: "flex",
justifyContent: "center",
}}
@@ -35,7 +32,7 @@ We will cover how to:
Example Code
```python
-from langflow import CustomComponent
+from langflow.custom import CustomComponent
from langchain.schema import Document
class FlowRunner(CustomComponent):
@@ -75,7 +72,7 @@ class FlowRunner(CustomComponent):
```python
-from langflow import CustomComponent
+from langflow.custom import CustomComponent
class MyComponent(CustomComponent):
@@ -95,7 +92,7 @@ The typical structure of a Custom Component is composed of _`display_name`_ and
---
```python
-from langflow import CustomComponent
+from langflow.custom import CustomComponent
# focus
@@ -118,7 +115,7 @@ Let's start by defining our component's _`display_name`_ and _`description`_.
---
```python
-from langflow import CustomComponent
+from langflow.custom import CustomComponent
# focus
from langchain.schema import Document
@@ -140,7 +137,7 @@ Second, we will import _`Document`_ from the [_langchain.schema_](https://docs.l
---
```python
-from langflow import CustomComponent
+from langflow.custom import CustomComponent
# focus
from langchain.schema import Document
@@ -167,7 +164,7 @@ Now, let's add the [parameters](focus://11[20:55]) and the [return type](focus:/
---
```python focus=13:14
-from langflow import CustomComponent
+from langflow.custom import CustomComponent
from langchain.schema import Document
@@ -189,7 +186,7 @@ We can now start writing the _`build`_ method. Let's list available flows in "My
---
```python focus=15:18
-from langflow import CustomComponent
+from langflow.custom import CustomComponent
from langchain.schema import Document
@@ -222,7 +219,7 @@ And retrieve a flow that matches the selected name (we'll make a dropdown input
---
```python
-from langflow import CustomComponent
+from langflow.custom import CustomComponent
from langchain.schema import Document
@@ -250,7 +247,7 @@ You can load this flow using _`get_flow`_ and set a _`tweaks`_ dictionary to cus
---
```python
-from langflow import CustomComponent
+from langflow.custom import CustomComponent
from langchain.schema import Document
@@ -287,7 +284,7 @@ The content of a document can be extracted using the _`page_content`_ attribute,
---
```python focus=9:16
-from langflow import CustomComponent
+from langflow.custom import CustomComponent
from langchain.schema import Document
@@ -366,3 +363,6 @@ Done! This is what our script and custom component looks like:
/>
+
+import ZoomableImage from "/src/theme/ZoomableImage.js";
+import Admonition from "@theme/Admonition";
diff --git a/docs/docs/examples/how-upload-examples.mdx b/docs/docs/examples/how-upload-examples.mdx
deleted file mode 100644
index 4f54558eb..000000000
--- a/docs/docs/examples/how-upload-examples.mdx
+++ /dev/null
@@ -1,28 +0,0 @@
-import ThemedImage from "@theme/ThemedImage";
-import useBaseUrl from "@docusaurus/useBaseUrl";
-import ZoomableImage from "/src/theme/ZoomableImage.js";
-
-# π How to Upload Examples?
-
-We welcome all examples that can help our community learn and explore Langflow's capabilities.
-Langflow Examples is a repository on [GitHub](https://github.com/logspace-ai/langflow_examples) that contains examples of flows that people can use for inspiration and learning.
-
-{" "}
-
-
-To upload examples, please follow these steps:
-
-1. **Create a Flow:** First, create a flow using Langflow. You can use any of the available templates or create a new flow from scratch.
-
-2. **Export the Flow:** Once you have created a flow, export it as a JSON file. Make sure to give your file a descriptive name and include a brief description of what it does.
-
-3. **Submit a Pull Request:** Finally, submit a pull request (PR) to the examples repo. Make sure to include your JSON file in the PR.
-
-If your example uses any third-party libraries or packages, please include them in your PR and make sure that your example follows the [**βοΈ Langflow Code Of Conduct**](https://github.com/logspace-ai/langflow/blob/dev/CODE_OF_CONDUCT.md).
diff --git a/docs/docs/examples/midjourney-prompt-chain.mdx b/docs/docs/examples/midjourney-prompt-chain.mdx
deleted file mode 100644
index 9df732026..000000000
--- a/docs/docs/examples/midjourney-prompt-chain.mdx
+++ /dev/null
@@ -1,46 +0,0 @@
-import Admonition from "@theme/Admonition";
-
-# MidJourney Prompt Chain
-
-The `MidJourneyPromptChain` can be used to generate imaginative and detailed MidJourney prompts.
-
-For example, type something like:
-
-```bash
-Dragon
-```
-
-And get a response such as:
-
-```text
-Imagine a mysterious forest, the trees are tall and ancient, their branches reaching up to the sky. Through the darkness, a dragon emerges from the shadows, its scales shimmering in the moonlight. Its wingspan is immense, and its eyes glow with a fierce intensity. It is a majestic and powerful creature, one that commands both respect and fear.
-```
-
-
- Notice that the `ConversationSummaryMemory` stores a summary of the
- conversation over time. Try using it to create better prompts as the
- conversation goes on.
-
-
-## βοΈ Langflow Example
-
-import ThemedImage from "@theme/ThemedImage";
-import useBaseUrl from "@docusaurus/useBaseUrl";
-import ZoomableImage from "/src/theme/ZoomableImage.js";
-
-
-
-#### Download Flow
-
-
-
-- [`OpenAI`](https://python.langchain.com/docs/modules/model_io/models/llms/integrations/openai)
-- [`ConversationSummaryMemory`](https://python.langchain.com/docs/modules/memory/types/summary)
-
-
diff --git a/docs/docs/examples/multiple-vectorstores.mdx b/docs/docs/examples/multiple-vectorstores.mdx
deleted file mode 100644
index 2e554bbf1..000000000
--- a/docs/docs/examples/multiple-vectorstores.mdx
+++ /dev/null
@@ -1,58 +0,0 @@
-import Admonition from "@theme/Admonition";
-
-# Multiple Vector Stores
-
-The example below shows an agent operating with two vector stores built upon different data sources.
-
-The `TextLoader` loads a TXT file, while the `WebBaseLoader` pulls text from webpages into a document format to accessed downstream. The `Chroma` vector stores are created analogous to what we have demonstrated in our [CSV Loader](/examples/csv-loader.mdx) example. Finally, the `VectorStoreRouterAgent` constructs an agent that routes between the vector stores.
-
-
- Get the TXT file used
- [here](https://github.com/hwchase17/chat-your-data/blob/master/state_of_the_union.txt).
-
-
-URL used by the `WebBaseLoader`:
-
-```text
-https://pt.wikipedia.org/wiki/Harry_Potter
-```
-
-
- When you build the flow, request information about one of the sources. The
- agent should be able to use the correct source to generate a response.
-
-
-
- Learn more about Multiple Vector Stores
- [here](https://python.langchain.com/docs/modules/data_connection/vectorstores/).
-
-
-## βοΈ Langflow Example
-
-import ThemedImage from "@theme/ThemedImage";
-import useBaseUrl from "@docusaurus/useBaseUrl";
-import ZoomableImage from "/src/theme/ZoomableImage.js";
-
-
-
-#### Download Flow
-
-
-
-- [`WebBaseLoader`](https://python.langchain.com/docs/integrations/document_loaders/web_base)
-- [`TextLoader`](https://python.langchain.com/docs/modules/data_connection/document_loaders/)
-- [`CharacterTextSplitter`](https://python.langchain.com/docs/modules/data_connection/document_transformers/text_splitters/character_text_splitter)
-- [`OpenAIEmbedding`](https://python.langchain.com/docs/integrations/text_embedding/openai)
-- [`Chroma`](https://python.langchain.com/docs/integrations/vectorstores/chroma)
-- [`VectorStoreInfo`](https://python.langchain.com/docs/modules/data_connection/vectorstores/)
-- [`OpenAI`](https://python.langchain.com/docs/modules/model_io/models/llms/integrations/openai)
-- [`VectorStoreRouterToolkit`](https://js.langchain.com/docs/modules/agents/tools/how_to/agents_with_vectorstores)
-- [`VectorStoreRouterAgent`](https://js.langchain.com/docs/modules/agents/tools/how_to/agents_with_vectorstores)
-
-
diff --git a/docs/docs/examples/python-function.mdx b/docs/docs/examples/python-function.mdx
index 9eadd7273..2bb4b93e1 100644
--- a/docs/docs/examples/python-function.mdx
+++ b/docs/docs/examples/python-function.mdx
@@ -43,6 +43,12 @@ import ZoomableImage from "/src/theme/ZoomableImage.js";
light: "img/python-function.png",
dark: "img/python-function.png",
}}
+ style={{
+ width: "80%",
+ margin: "20px auto",
+ display: "flex",
+ justifyContent: "center",
+ }}
/>
#### Download Flow
diff --git a/docs/docs/examples/serp-api-tool.mdx b/docs/docs/examples/serp-api-tool.mdx
index 7e8d95936..175b6f1be 100644
--- a/docs/docs/examples/serp-api-tool.mdx
+++ b/docs/docs/examples/serp-api-tool.mdx
@@ -37,6 +37,12 @@ import ZoomableImage from "/src/theme/ZoomableImage.js";
light: "img/serp-api-tool.png",
dark: "img/serp-api-tool.png",
}}
+ style={{
+ width: "80%",
+ margin: "20px auto",
+ display: "flex",
+ justifyContent: "center",
+ }}
/>
#### Download Flow
diff --git a/src/backend/langflow/components/agents/__init__.py b/docs/docs/getting-started/basic-prompting.mdx
similarity index 100%
rename from src/backend/langflow/components/agents/__init__.py
rename to docs/docs/getting-started/basic-prompting.mdx
diff --git a/src/backend/langflow/components/chains/__init__.py b/docs/docs/getting-started/blog-writer.mdx
similarity index 100%
rename from src/backend/langflow/components/chains/__init__.py
rename to docs/docs/getting-started/blog-writer.mdx
diff --git a/docs/docs/getting-started/cli.mdx b/docs/docs/getting-started/cli.mdx
new file mode 100644
index 000000000..050c4ab50
--- /dev/null
+++ b/docs/docs/getting-started/cli.mdx
@@ -0,0 +1,44 @@
+# π₯οΈ Command Line Interface (CLI)
+
+
+## Overview
+
+Langflow's Command Line Interface (CLI) is a powerful tool that allows you to interact with the Langflow server from the command line. The CLI provides a wide range of commands to help you shape Langflow to your needs.
+
+Running the CLI without any arguments will display a list of available commands and options.
+
+```bash
+langflow --help
+# or
+langflow
+```
+
+Each option is detailed below:
+
+- `--help`: Displays all available options.
+- `--host`: Defines the host to bind the server to. Can be set using the `LANGFLOW_HOST` environment variable. The default is `127.0.0.1`.
+- `--workers`: Sets the number of worker processes. Can be set using the `LANGFLOW_WORKERS` environment variable. The default is `1`.
+- `--timeout`: Sets the worker timeout in seconds. The default is `60`.
+- `--port`: Sets the port to listen on. Can be set using the `LANGFLOW_PORT` environment variable. The default is `7860`.
+- `--config`: Defines the path to the configuration file. The default is `config.yaml`.
+- `--env-file`: Specifies the path to the .env file containing environment variables. The default is `.env`.
+- `--log-level`: Defines the logging level. Can be set using the `LANGFLOW_LOG_LEVEL` environment variable. The default is `critical`.
+- `--components-path`: Specifies the path to the directory containing custom components. Can be set using the `LANGFLOW_COMPONENTS_PATH` environment variable. The default is `langflow/components`.
+- `--log-file`: Specifies the path to the log file. Can be set using the `LANGFLOW_LOG_FILE` environment variable. The default is `logs/langflow.log`.
+- `--cache`: Select the type of cache to use. Options are `InMemoryCache` and `SQLiteCache`. Can be set using the `LANGFLOW_LANGCHAIN_CACHE` environment variable. The default is `SQLiteCache`.
+- `--dev/--no-dev`: Toggles the development mode. The default is `no-dev`.
+- `--path`: Specifies the path to the frontend directory containing build files. This option is for development purposes only. Can be set using the `LANGFLOW_FRONTEND_PATH` environment variable.
+- `--open-browser/--no-open-browser`: Toggles the option to open the browser after starting the server. Can be set using the `LANGFLOW_OPEN_BROWSER` environment variable. The default is `open-browser`.
+- `--remove-api-keys/--no-remove-api-keys`: Toggles the option to remove API keys from the projects saved in the database. Can be set using the `LANGFLOW_REMOVE_API_KEYS` environment variable. The default is `no-remove-api-keys`.
+- `--install-completion [bash|zsh|fish|powershell|pwsh]`: Installs completion for the specified shell.
+- `--show-completion [bash|zsh|fish|powershell|pwsh]`: Shows completion for the specified shell, allowing you to copy it or customize the installation.
+- `--backend-only`: This parameter, with a default value of `False`, allows running only the backend server without the frontend. It can also be set using the `LANGFLOW_BACKEND_ONLY` environment variable.
+- `--store`: This parameter, with a default value of `True`, enables the store features, use `--no-store` to deactivate it. It can be configured using the `LANGFLOW_STORE` environment variable.
+
+These parameters are important for users who need to customize the behavior of Langflow, especially in development or specialized deployment scenarios.
+
+### Environment Variables
+
+You can configure many of the CLI options using environment variables. These can be exported in your operating system or added to a `.env` file and loaded using the `--env-file` option.
+
+A sample `.env` file named `.env.example` is included with the project. Copy this file to a new file named `.env` and replace the example values with your actual settings. If you're setting values in both your OS and the `.env` file, the `.env` settings will take precedence.
diff --git a/docs/docs/getting-started/creating-flows.mdx b/docs/docs/getting-started/creating-flows.mdx
deleted file mode 100644
index aecc3ea16..000000000
--- a/docs/docs/getting-started/creating-flows.mdx
+++ /dev/null
@@ -1,38 +0,0 @@
-import ThemedImage from "@theme/ThemedImage";
-import useBaseUrl from "@docusaurus/useBaseUrl";
-import ZoomableImage from "/src/theme/ZoomableImage.js";
-import ReactPlayer from "react-player";
-
-# π¨ Creating Flows
-
-## Compose
-
-Creating flows with Langflow is easy. Drag sidebar components onto the canvas and connect them together to create your pipeline. Langflow provides a range of [LangChain components](https://python.langchain.com/docs/modules/) to choose from, including LLMs, prompt serializers, agents, and chains.
-
-
-
-## Fork
-
-The easiest way to start with Langflow is by forking a **community example**. Forking an example stores a copy in your project collection, allowing you to edit and save the modified version as a new flow.
-
-
-
-
-
-## Build
-
-Building a flow means validating if the components have prerequisites fulfilled and are properly instantiated. When a chat message is sent, the flow will run for the first time, executing the pipeline.
-
-
-
-
diff --git a/src/backend/langflow/components/custom_components/__init__.py b/docs/docs/getting-started/document-qa.mdx
similarity index 100%
rename from src/backend/langflow/components/custom_components/__init__.py
rename to docs/docs/getting-started/document-qa.mdx
diff --git a/docs/docs/getting-started/hugging-face-spaces.mdx b/docs/docs/getting-started/hugging-face-spaces.mdx
deleted file mode 100644
index 4759ea398..000000000
--- a/docs/docs/getting-started/hugging-face-spaces.mdx
+++ /dev/null
@@ -1,20 +0,0 @@
-# π€ HuggingFace Spaces
-
-A fully featured version of Langflow can be accessed via HuggingFace spaces with no installation required.
-
-import ThemedImage from "@theme/ThemedImage";
-import useBaseUrl from "@docusaurus/useBaseUrl";
-import ZoomableImage from "/src/theme/ZoomableImage.js";
-
-{" "}
-
-
-
-Check out Langflow on [HuggingFace Spaces](https://huggingface.co/spaces/Logspace/Langflow).
diff --git a/docs/docs/getting-started/installation.md b/docs/docs/getting-started/installation.md
deleted file mode 100644
index c3ad54239..000000000
--- a/docs/docs/getting-started/installation.md
+++ /dev/null
@@ -1,15 +0,0 @@
-# π¦ How to install?
-
-## Installation
-
-You can install Langflow from pip:
-
-```bash
-pip install langflow
-```
-
-Next, run:
-
-```bash
-langflow
-```
\ No newline at end of file
diff --git a/src/backend/langflow/components/documentloaders/__init__.py b/docs/docs/getting-started/memory-chatbot.mdx
similarity index 100%
rename from src/backend/langflow/components/documentloaders/__init__.py
rename to docs/docs/getting-started/memory-chatbot.mdx
diff --git a/docs/docs/getting-started/rag-with-astradb.mdx b/docs/docs/getting-started/rag-with-astradb.mdx
new file mode 100644
index 000000000..01daa7b6f
--- /dev/null
+++ b/docs/docs/getting-started/rag-with-astradb.mdx
@@ -0,0 +1,195 @@
+import ThemedImage from "@theme/ThemedImage";
+import useBaseUrl from "@docusaurus/useBaseUrl";
+import ZoomableImage from "/src/theme/ZoomableImage.js";
+import Admonition from "@theme/Admonition";
+
+# π RAG with Astra DB
+
+This guide will walk you through how to build a RAG (Retrieval Augmented Generation) application using **Astra DB** and **Langflow**.
+
+[Astra DB](https://www.datastax.com/products/datastax-astra?utm_source=langflow-pre-release&utm_medium=referral&utm_campaign=langflow-announcement&utm_content=astradb) is a cloud-native database built on Apache Cassandra that is optimized for the cloud. It is a fully managed database-as-a-service that simplifies operations and reduces costs. Astra DB is built on the same technology that powers the largest Cassandra deployments in the world.
+
+In this guide, we will use Astra DB as a vector store to store and retrieve the documents that will be used by the RAG application to generate responses.
+
+
+ This guide assumes that you have Langflow up and running. If you are new to
+ Langflow, you can check out the [Getting Started](/) guide.
+
+
+TLDR;
+
+- [Create a free Astra DB account](https://astra.datastax.com/signup?utm_source=langflow-pre-release&utm_medium=referral&utm_campaign=langflow-announcement&utm_content=create-a-free-astra-db-account)
+- Duplicate our [Langflow 1.0 Space](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true)
+- Create a new database, get a **Token** and the **API Endpoint**
+- Click on the **New Project** button and look for Vector Store RAG. This will create a new project with the necessary components
+- Import the project into Langflow by dropping it on the Canvas or My Collection page
+- Update the **Token** and **API Endpoint** in the **Astra DB** components
+- Update the OpenAI API key in the **OpenAI** components
+- Run the ingestion flow which is the one that uses the **Astra DB** component
+- Click on the β‘ _Run_ button and start interacting with your RAG application
+
+# First things first
+
+## Create an Astra DB Database
+
+To get started, you will need to [create an Astra DB database](https://astra.datastax.com/signup?utm_source=langflow-pre-release&utm_medium=referral&utm_campaign=langflow-announcement&utm_content=create-an-astradb-database).
+
+Once you have created an account, you will be taken to the Astra DB dashboard. Click on the **Create Database** button.
+
+
+
+Now you will need to configure your database. Choose the **Serverless (Vector)** deployment type, and pick a Database name, provider and region.
+
+After you have configured your database, click on the **Create Database** button.
+
+
+
+Once your database is initialized, to the right of the page, you will see the _Database Details_ section which contains a button for you to copy the **API Endpoint** and another to generate a **Token**.
+
+
+
+Now we are all set to start building our RAG application using Astra DB and Langflow.
+
+## (Optional) Duplicate the Langflow 1.0 HuggingFace Space
+
+If you haven't already, now is the time to launch Langflow. To make things easier, you can duplicate our [Langflow 1.0 Space](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true) which sets up a Langflow instance just for you.
+
+## Open the Vector Store RAG Project
+
+To get started, click on the **New Project** button and look for the **Vector Store RAG** project. This will open a starter project with the necessary components to run a RAG application using Astra DB.
+
+
+
+This project consists of two flows. The simpler one is the **Ingestion Flow** which is responsible for ingesting the documents into the Astra DB database.
+
+Your first step should be to understand what each flow does and how they interact with each other.
+
+The ingestion flow consists of:
+
+- **Files** component that uploads a text file to Langflow
+- **Recursive Character Text Splitter** component that splits the text into smaller chunks
+- **OpenAIEmbeddings** component that generates embeddings for the text chunks
+- **Astra DB** component that stores the text chunks in the Astra DB database
+
+
+
+Now, let's update the **Astra DB** and **Astra DB Search** components with the **Token** and **API Endpoint** that we generated earlier, and the OpenAI Embeddings components with your OpenAI API key.
+
+
+
+And run it! This will ingest the Text data from your file into the Astra DB database.
+
+
+
+Now, on to the **RAG Flow**. This flow is responsible for generating responses to your queries. It will define all of the steps from getting the User's input to generating a response and displaying it in the Interaction Panel.
+
+The RAG flow is a bit more complex. It consists of:
+
+- **Chat Input** component that defines where to put the user input coming from the Interaction Panel
+- **OpenAI Embeddings** component that generates embeddings from the user input
+- **Astra DB Search** component that retrieves the most relevant Records from the Astra DB database
+- **Text Output** component that turns the Records into Text by concatenating them and also displays it in the Interaction Panel
+ - One interesting point you'll see here is that this component is named `Extracted Chunks`, and that is how it will appear in the Interaction Panel
+- **Prompt** component that takes in the user input and the retrieved Records as text and builds a prompt for the OpenAI model
+- **OpenAI** component that generates a response to the prompt
+- **Chat Output** component that displays the response in the Interaction Panel
+
+
+
+To run it all we have to do is click on the β‘ _Run_ button and start interacting with your RAG application.
+
+
+
+This opens the Interaction Panel where you can chat your data.
+
+Because this flow has a **Chat Input** and a **Text Output** component, the Panel displays a chat input at the bottom and the Extracted Chunks section on the left.
+
+
+
+Once we interact with it we get a response and the Extracted Chunks section is updated with the retrieved records.
+
+
+
+And that's it! You have successfully ran a RAG application using Astra DB and Langflow.
+
+# Conclusion
+
+In this guide, we have learned how to run a RAG application using Astra DB and Langflow.
+We have seen how to create an Astra DB database, import the Astra DB RAG Flows project into Langflow, and run the ingestion and RAG flows.
diff --git a/docs/docs/guidelines/api.mdx b/docs/docs/guidelines/api.mdx
index 8bba633fb..25dbeb31e 100644
--- a/docs/docs/guidelines/api.mdx
+++ b/docs/docs/guidelines/api.mdx
@@ -1,5 +1,6 @@
import useBaseUrl from "@docusaurus/useBaseUrl";
import ZoomableImage from "/src/theme/ZoomableImage.js";
+import Admonition from "@theme/Admonition";
# API Keys
@@ -7,12 +8,17 @@ import ZoomableImage from "/src/theme/ZoomableImage.js";
Langflow offers an API Key functionality that allows users to access their individual components and flows without going through traditional login authentication. The API Key is a user-specific token that can be included in the request's header or query parameter to authenticate API calls. The following documentation outlines how to generate, use, and manage these API Keys in Langflow.
+
+ This feature requires the `LANGFLOW_AUTO_LOGIN` environment variable to be set
+ to `False`. The default user and password are set using _`LANGFLOW_SUPERUSER`_
+ and _`LANGFLOW_SUPERUSER_PASSWORD`_ environment variables. Default values are
+ _`langflow`_ and _`langflow`_ respectively.
+
+
## Generating an API Key
### Through Langflow UI
-{/* add image img/api-key.png */}
-
\
+ http://localhost:3000/api/v1/run/ \
-H 'Content-Type: application/json'\
-H 'x-api-key: '\
-d '{"inputs": {"text":""}, "tweaks": {}}'
diff --git a/docs/docs/guidelines/async-api.mdx b/docs/docs/guidelines/async-api.mdx
deleted file mode 100644
index c5473812e..000000000
--- a/docs/docs/guidelines/async-api.mdx
+++ /dev/null
@@ -1,73 +0,0 @@
-import Admonition from "@theme/Admonition";
-
-# Asynchronous Processing
-
-## Introduction
-
-Starting from version 0.5, Langflow introduces a new feature to its API: the _`sync`_ flag. This flag allows users to opt for asynchronous processing of their flows, freeing up resources and enabling better control over long-running tasks.
-This feature supports running tasks in a Celery worker queue and AnyIO task groups for now.
-
-
- This is an experimental feature. The default behavior of the API is still
- synchronous processing. The API may change in the future.
-
-
-## The _`sync`_ Flag
-
-The _`sync`_ flag can be included in the payload of your POST request to the _`/api/v1/process/`_ endpoint.
-When set to _`false`_, the API will initiate an asynchronous task instead of processing the flow synchronously.
-
-### API Request with _`sync`_ flag
-
-```bash
-curl -X POST \
- http://localhost:3000/api/v1/process/ \
- -H 'Content-Type: application/json' \
- -H 'x-api-key: ' \
- -d '{"inputs": {"text": ""}, "tweaks": {}, "sync": false}'
-```
-
-Response:
-
-```json
-{
- "result": {
- "output": "..."
- },
- "task": {
- "id": "...",
- "href": "api/v1/task/"
- },
- "session_id": "...",
- "backend": "..." // celery or anyio
-}
-```
-
-## Checking Task Status
-
-You can check the status of an asynchronous task by making a GET request to the `/task/{task_id}` endpoint.
-
-```bash
-curl -X GET \
- http://localhost:3000/api/v1/task/ \
- -H 'x-api-key: '
-```
-
-### Response
-
-The endpoint will return the current status of the task and, if completed, the result of the task. Possible statuses include:
-
-- _`PENDING`_: The task is waiting for execution.
-- _`SUCCESS`_: The task has completed successfully.
-- _`FAILURE`_: The task has failed.
-
-Example response for a completed task:
-
-```json
-{
- "status": "SUCCESS",
- "result": {
- "output": "..."
- }
-}
-```
diff --git a/docs/docs/guidelines/components.mdx b/docs/docs/guidelines/components.mdx
index 32ec00615..16aa83eff 100644
--- a/docs/docs/guidelines/components.mdx
+++ b/docs/docs/guidelines/components.mdx
@@ -26,13 +26,14 @@ Components are the building blocks of the flows. They are made of inputs, output
{" "}
+
diff --git a/docs/docs/guidelines/custom-component.mdx b/docs/docs/guidelines/custom-component.mdx
index 99106d400..daf47987f 100644
--- a/docs/docs/guidelines/custom-component.mdx
+++ b/docs/docs/guidelines/custom-component.mdx
@@ -30,7 +30,7 @@ Here is an example:
```python
-from langflow import CustomComponent
+from langflow.custom import CustomComponent
from langchain.schema import Document
class DocumentProcessor(CustomComponent):
@@ -92,7 +92,7 @@ The Python script for every Custom Component should follow a set of rules. Let's
The script must contain a **single class** that inherits from _`CustomComponent`_.
```python
-from langflow import CustomComponent
+from langflow.custom import CustomComponent
from langchain.schema import Document
class MyComponent(CustomComponent):
@@ -113,7 +113,7 @@ class MyComponent(CustomComponent):
This class requires a _`build`_ method used to run the component and define its fields.
```python
-from langflow import CustomComponent
+from langflow.custom import CustomComponent
from langchain.schema import Document
class MyComponent(CustomComponent):
@@ -134,7 +134,7 @@ class MyComponent(CustomComponent):
The [Return Type Annotation](https://docs.python.org/3/library/typing.html) of the _`build`_ method defines the component type (e.g., Chain, BaseLLM, or basic Python types). Check out all supported types in the [component reference](../components/custom).
```python
-from langflow import CustomComponent
+from langflow.custom import CustomComponent
from langchain.schema import Document
class MyComponent(CustomComponent):
@@ -153,7 +153,7 @@ class MyComponent(CustomComponent):
---
```python
-from langflow import CustomComponent
+from langflow.custom import CustomComponent
from langchain.schema import Document
class MyComponent(CustomComponent):
@@ -179,7 +179,7 @@ Check out the [component reference](../components/custom) for more details on th
---
```python
-from langflow import CustomComponent
+from langflow.custom import CustomComponent
from langchain.schema import Document
class MyComponent(CustomComponent):
@@ -204,7 +204,7 @@ Let's create a custom component that processes a document (_`langchain.schema.Do
To start, let's choose a name for our component by adding a _`display_name`_ attribute. This name will appear on the canvas. The name of the class is not relevant, but let's call it _`DocumentProcessor`_.
```python
-from langflow import CustomComponent
+from langflow.custom import CustomComponent
from langchain.schema import Document
# focus
@@ -227,7 +227,7 @@ class DocumentProcessor(CustomComponent):
We can also write a description for it using a _`description`_ attribute.
```python
-from langflow import CustomComponent
+from langflow.custom import CustomComponent
from langchain.schema import Document
class DocumentProcessor(CustomComponent):
@@ -244,7 +244,7 @@ class DocumentProcessor(CustomComponent):
---
```python
-from langflow import CustomComponent
+from langflow.custom import CustomComponent
from langchain.schema import Document
class DocumentProcessor(CustomComponent):
@@ -283,11 +283,11 @@ The return type is _`Document`_.
The _`build_config`_ method is here defined to customize the component fields.
- _`options`_ determines that the field will be a dropdown menu. The list values and field type must be _`str`_.
-- _`value`_ is the default option of the dropdown menu.
+- _`value`_ is the default value of the field.
- _`display_name`_ is the name of the field to be displayed.
```python
-from langflow import CustomComponent
+from langflow.custom import CustomComponent
from langchain.schema import Document
class DocumentProcessor(CustomComponent):
diff --git a/docs/docs/guidelines/features.mdx b/docs/docs/guidelines/features.mdx
index 19837430d..932607c30 100644
--- a/docs/docs/guidelines/features.mdx
+++ b/docs/docs/guidelines/features.mdx
@@ -1,9 +1,3 @@
-import ThemedImage from "@theme/ThemedImage";
-import useBaseUrl from "@docusaurus/useBaseUrl";
-import ZoomableImage from "/src/theme/ZoomableImage.js";
-import ReactPlayer from "react-player";
-import Admonition from "@theme/Admonition";
-
# Features
@@ -14,13 +8,14 @@ import Admonition from "@theme/Admonition";
{" "}
+
@@ -46,14 +41,12 @@ The Code button shows snippets to use your flow as a Python object or an API.
**Python Code**
-Through the Langflow package, you can load a flow from a JSON file and use it as a LangChain object.
+Through the Langflow package, you can run your flow from a JSON file. The example below shows how to run a flow from a JSON file.
-```py
-from langflow import load_flow_from_json
+```python
+from langflow.load import run_flow_from_json
-flow = load_flow_from_json("path/to/flow.json")
-# Now you can use it like any chain
-flow("Hey, have you heard of Langflow?")
+results = run_flow_from_json("path/to/flow.json", input_value="Hello, World!")
```
**API**
@@ -67,3 +60,9 @@ The example below shows a Python script making a POST request to a local API end
>
+
+import ThemedImage from "@theme/ThemedImage";
+import useBaseUrl from "@docusaurus/useBaseUrl";
+import ZoomableImage from "/src/theme/ZoomableImage.js";
+import ReactPlayer from "react-player";
+import Admonition from "@theme/Admonition";
diff --git a/docs/docs/guidelines/login.mdx b/docs/docs/guidelines/login.mdx
index fde7cd09a..1d5a1d031 100644
--- a/docs/docs/guidelines/login.mdx
+++ b/docs/docs/guidelines/login.mdx
@@ -105,7 +105,7 @@ Users can change their profile settings by clicking on the profile icon in the t
light: useBaseUrl("img/my-account.png"),
dark: useBaseUrl("img/my-account.png"),
}}
- style={{ width: "50%", maxWidth: "600px", margin: "0 auto" }}
+ style={{ width: "50%", maxWidth: "600px", margin: "20px auto" }}
/>
By clicking on **Profile Settings**, the user is taken to the profile settings page, where they can change their password and their profile picture.
@@ -116,10 +116,11 @@ By clicking on **Profile Settings**, the user is taken to the profile settings p
light: useBaseUrl("img/profile-settings.png"),
dark: useBaseUrl("img/profile-settings.png"),
}}
- style={{ maxWidth: "600px", margin: "0 auto" }}
+ style={{ maxWidth: "600px", margin: "20px auto" }}
/>
-By clicking on **Admin Page**, the superuser is taken to the admin page, where they can manage users and groups.
+By clicking on **Admin Page**, the superuser is taken to the admin page, where they
+can manage users and groups.
- This implementation is still in development. Contributions are welcome!
-
-
-The Async API is an implementation of the Langflow API that uses [Celery](https://docs.celeryproject.org/en/stable/)
-to run the tasks asynchronously, using a message broker to send and receive messages, a result backend to store the results and a cache to store the task states and session data.
-
-### Configuration
-
-The folder _`./deploy`_ in the [Github repository](https://github.com/logspace-ai/langflow) contains a _`.env.example`_ file that can be used to configure a Langflow deployment.
-The file contains the variables required to configure a Celery worker queue, Redis cache and result backend and a RabbitMQ message broker.
-
-To set it up locally you can copy the file to _`.env`_ and run the following command:
-
-```bash
-docker compose up -d
-```
-
-This will set up the following containers:
-
-- Langflow API
-- Celery worker
-- RabbitMQ message broker
-- Redis cache
-- PostgreSQL database
-- PGAdmin
-- Flower
-- Traefik
-- Grafana
-- Prometheus
-
-### Testing
-
-To run the tests for the Async API, you can run the following command:
-
-```bash
-docker compose -f docker-compose.with_tests.yml up --exit-code-from tests tests result_backend broker celeryworker db --build
-```
diff --git a/docs/docs/guides/superuser.mdx b/docs/docs/guides/superuser.mdx
deleted file mode 100644
index 04e0f96af..000000000
--- a/docs/docs/guides/superuser.mdx
+++ /dev/null
@@ -1,7 +0,0 @@
-import ThemedImage from "@theme/ThemedImage";
-import useBaseUrl from "@docusaurus/useBaseUrl";
-import ZoomableImage from "/src/theme/ZoomableImage.js";
-import ReactPlayer from "react-player";
-
-Now, we need to explain what are the permissions the superuser gets. Once logged in, they can activate new users,
-edit them,
diff --git a/docs/docs/index.mdx b/docs/docs/index.mdx
index 840f10f10..9357a8fdf 100644
--- a/docs/docs/index.mdx
+++ b/docs/docs/index.mdx
@@ -1,11 +1,13 @@
-# π Welcome to Langflow
-
-Langflow is an easy way to create flows. The drag-and-drop feature allows quick and effortless experimentation, while the built-in chat interface facilitates real-time interaction. It provides options to edit prompt parameters, create chains and agents, track thought processes, and export flows.
-
import ThemedImage from "@theme/ThemedImage";
import useBaseUrl from "@docusaurus/useBaseUrl";
import ZoomableImage from "/src/theme/ZoomableImage.js";
+# π Welcome to Langflow
+
+Langflow is an easy way to build from simple to complex AI applications. It is a low-code platform that allows you to integrate AI into everything you do.
+
+{" "}
+
{" "}
+
+## π First steps
+
+## Installation
+
+Make sure you have **Python 3.10** installed on your system.
+
+You can install **Langflow** with [pipx](https://pipx.pypa.io/stable/installation/) or with pip.
+
+Pipx can fetch the missing Python version for you, but you can also install it manually.
+
+```bash
+pip install langflow -U
+# or
+pipx install langflow --python python3.10 --fetch-missing-python
+```
+
+Or you can install a pre-release version using:
+
+```bash
+pip install langflow --pre --force-reinstall
+# or
+pipx install langflow --python python3.10 --fetch-missing-python --pip-args="--pre --force-reinstall"
+```
+
+We recommend using --force-reinstall to ensure you have the latest version of Langflow and its dependencies.
+
+### βοΈ Running Langflow
+
+Langflow can be run in a variety of ways, including using the command-line interface (CLI) or HuggingFace Spaces.
+
+```bash
+langflow run # or langflow --help
+```
+
+#### π€ HuggingFace Spaces
+
+Hugging Face provides a great alternative for running Langflow in their Spaces environment. This means you can run Langflow without any local installation required.
+
+The first step is to go to the [Langflow Space](https://huggingface.co/spaces/Logspace/Langflow?duplicate=true).
+
+Remember to use a Chromium-based browser for the best experience. You'll be presented with the following screen:
+
+
+
+From here, just name your Space, define the visibility (Public or Private), and click on `Duplicate Space` to start the installation process. When that is done, you'll be redirected to the Space's main page to start using Langflow right away!
+
+Once you get Langflow running, click on New Project in the top right corner of the screen. Langflow provides a range of example flows to help you get started.
+
+To quickly try one of them, open a starter example, set up your API keys and click β‘ Run, on the bottom right corner of the canvas. This will open up Langflow's Interaction Panel with the chat console, text inputs, and outputs.
+
+### π₯οΈ Command Line Interface (CLI)
+
+Langflow provides a command-line interface (CLI) for easy management and configuration.
+
+#### Usage
+
+You can run the Langflow using the following command:
+
+```bash
+langflow run [OPTIONS]
+```
+
+Find more information about the available options by running:
+
+```bash
+langflow --help
+```
diff --git a/src/backend/langflow/components/embeddings/__init__.py b/docs/docs/migration/api.mdx
similarity index 100%
rename from src/backend/langflow/components/embeddings/__init__.py
rename to docs/docs/migration/api.mdx
diff --git a/docs/docs/migration/compatibility.mdx b/docs/docs/migration/compatibility.mdx
new file mode 100644
index 000000000..8223bcceb
--- /dev/null
+++ b/docs/docs/migration/compatibility.mdx
@@ -0,0 +1,44 @@
+import Admonition from '@theme/Admonition';
+
+# Compatibility with Previous Versions
+
+
+## TLDR;
+
+- You'll need to add a few components to your flow to make it compatible with the new version of Langflow.
+- Add a Runnable Executor, connect it to the last component (a Chain or an Agent) in your flow, and connect a Chat Input and a Chat Output to the Runnable Executor. This should work *most of the time*.
+- You might also need to update the Chain or Agent component to the latest version.
+- Most Components will work as they are, but you'll need to add an Input and an Output to your flow.
+- You can use the Runnable Executor to run a LangChain runnable (which is the output of many components before 1.0)
+- We need your feedback on this, so please let us know how it goes and what you think.
+
+## Introduction
+
+Langflow now works best with a flow that has an Input and an Output and that is mostly what you'll need to add to your existing flows.
+
+Hopefully, you'll find that even though you still can work with your current flows, updating all your components to the new version of Langflow will be worth it.
+
+We've tried to make it as easy as possible for you to adapt your existing flows to work seamlessly in the new version of Langflow.
+
+## How to Adapt Your Existing Flows
+
+
+The steps to take are few but not always simple. Here's how you can adapt your existing flows to work seamlessly in the new version of Langflow:
+
+
+ **Caution:**
+ While this should work most of the time, it might not work for all flows. You might need to update the Chain or Agent component to the latest version. Please let us know if you encounter any issues.
+
+
+1. **Check if your flow ends with a Chain or Agent component**.
+ - If it does not, it *should* work as it is because it probably was not a chat flow.
+2. **Add a Runnable Executor**.
+ - Add a Runnable Executor to the end of your flow.
+ - Connect the last component (a Chain or an Agent) in your flow to the Runnable Executor.
+3. **Add a Chat Input and a Chat Output**.
+ - Add a Chat Input and a Chat Output to your flow.
+ - Connect the Chat Input to the Runnable Executor.
+ - Connect the Chat Output to the Runnable Executor.
+
+{/* Add picture of the flow */}
+
diff --git a/src/backend/langflow/components/llms/__init__.py b/docs/docs/migration/component-status-and-data-passing.mdx
similarity index 100%
rename from src/backend/langflow/components/llms/__init__.py
rename to docs/docs/migration/component-status-and-data-passing.mdx
diff --git a/src/backend/langflow/components/retrievers/__init__.py b/docs/docs/migration/connecting-output-components.mdx
similarity index 100%
rename from src/backend/langflow/components/retrievers/__init__.py
rename to docs/docs/migration/connecting-output-components.mdx
diff --git a/src/backend/langflow/components/textsplitters/__init__.py b/docs/docs/migration/custom-component.mdx
similarity index 100%
rename from src/backend/langflow/components/textsplitters/__init__.py
rename to docs/docs/migration/custom-component.mdx
diff --git a/src/backend/langflow/components/toolkits/__init__.py b/docs/docs/migration/experimental-components.mdx
similarity index 100%
rename from src/backend/langflow/components/toolkits/__init__.py
rename to docs/docs/migration/experimental-components.mdx
diff --git a/src/backend/langflow/components/tools/__init__.py b/docs/docs/migration/flow-of-data.mdx
similarity index 100%
rename from src/backend/langflow/components/tools/__init__.py
rename to docs/docs/migration/flow-of-data.mdx
diff --git a/docs/docs/migration/global-variables.mdx b/docs/docs/migration/global-variables.mdx
new file mode 100644
index 000000000..ce6d15a5f
--- /dev/null
+++ b/docs/docs/migration/global-variables.mdx
@@ -0,0 +1,65 @@
+import ZoomableImage from "/src/theme/ZoomableImage.js";
+import Admonition from "@theme/Admonition";
+
+# Global Variables
+
+Global Variables are a really useful feature of Langflow.
+They allow you to define reusable variables that can be accessed from any Text field in your project.
+
+The first thing you need to do is find a **Text field** in a Component, so let's talk about what a Text field is.
+
+## Text Fields
+
+Text fields are the fields in a Component where you can write text but that does not allow you to open a Text Area.
+
+The easiest way to find fields that are Text fields, though, is to look for fields that have a π button.
+
+
+
+## Creating a Global Variable
+
+To create a Global Variable, you need to click on the π button in a Text field and that will open a dropdown showing your currently available variables and at the end of it **+ Add New Variable**.
+
+
+
+Click on **+ Add New Variable** and a window will open where you can define your new Global Variable.
+
+In it, you can define the **Name** of the variable, the optional **Type** of the variable, and the **Value** of the variable.
+
+The **Name** is the name that you will use to refer to the variable in your Text fields.
+
+The **Type** is optional for now but will be used in the future to allow for more advanced features.
+
+The **Value** is the value that the variable will have.
+{/* say that all variables are encrypted */}
+
+
+ All Global Variables are encrypted and cannot be accessed by anyone but you.
+
+
+
+
+After you have defined your variable, click on **Save Variable** and your variable will be created.
+
+After that, once you click on the π button in a Text field, you will see your new variable in the dropdown.
diff --git a/docs/docs/migration/inputs-and-outputs.mdx b/docs/docs/migration/inputs-and-outputs.mdx
new file mode 100644
index 000000000..5db3f3af2
--- /dev/null
+++ b/docs/docs/migration/inputs-and-outputs.mdx
@@ -0,0 +1,36 @@
+# Inputs and Outputs
+
+TL;DR: Inputs and Outputs are a category of components that are used to define where data comes in and out of your flow. They also
+dynamically change the Interaction Panel and can be renamed to make it easier to build and maintain your flows.
+
+## Introduction
+
+Langflow 1.0 introduces new categories of components called Inputs and Outputs. They are used to make it easier to understand and interact with your flows.
+
+Let's start with what they have in common:
+
+- Components in these categories connect to components that have Text or Record inputs or outputs. Some can connect to both but you have to pick what type of data you want to output or input.
+- They can be renamed to help you identify them more easily in the Interaction Panel and while using the API.
+- They dynamically change the Interaction Panel to make it easier to understand and interact with your flows.
+
+Native Langflow Components were created to be powerful tools that work around Langflow's features. They are designed to be easy to use and understand, and to help you build your flows faster.
+
+Let's dive into Inputs and Outputs.
+
+## Inputs
+
+Inputs are components that are used to define where data comes into your flow. They can be used to receive data from the user, from a database, or from any other source that can be converted to Text or Record.
+
+The difference between Chat Input and other Input components is the format of the output, the number of configurable fields, and the way they are displayed in the Interaction Panel.
+
+Chat Input components can output Text or Record. When you want to pass the sender name, or sender to the next component, you can use the Record output, and when you want to pass the message only you can use the Text output. This is useful when saving the message to a database or a memory system like Zep.
+
+You can find out more about it and the other Inputs [here](../components/inputs).
+
+## Outputs
+
+Outputs are components that are used to define where data comes out of your flow. They can be used to send data to the user, to the Interaction Panel, or to define how the data will be displayed in the Interaction Panel.
+
+The Chat Output works similarly to the Chat Input but does not have a field that allows for written input. It is used as an Output definition and can be used to send data to the user.
+
+You can find out more about it and the other Outputs [here](../components/outputs).
diff --git a/src/backend/langflow/components/vectorstores/__init__.py b/docs/docs/migration/multiple-flows.mdx
similarity index 100%
rename from src/backend/langflow/components/vectorstores/__init__.py
rename to docs/docs/migration/multiple-flows.mdx
diff --git a/src/backend/langflow/core/__init__.py b/docs/docs/migration/new-categories-and-components.mdx
similarity index 100%
rename from src/backend/langflow/core/__init__.py
rename to docs/docs/migration/new-categories-and-components.mdx
diff --git a/src/backend/langflow/custom/__init__.py b/docs/docs/migration/passing-tweaks-and-inputs.mdx
similarity index 100%
rename from src/backend/langflow/custom/__init__.py
rename to docs/docs/migration/passing-tweaks-and-inputs.mdx
diff --git a/src/backend/langflow/graph/edge/__init__.py b/docs/docs/migration/renaming-and-editing-components.mdx
similarity index 100%
rename from src/backend/langflow/graph/edge/__init__.py
rename to docs/docs/migration/renaming-and-editing-components.mdx
diff --git a/src/backend/langflow/graph/graph/__init__.py b/docs/docs/migration/sidebar-and-interaction-panel.mdx
similarity index 100%
rename from src/backend/langflow/graph/graph/__init__.py
rename to docs/docs/migration/sidebar-and-interaction-panel.mdx
diff --git a/src/backend/langflow/graph/vertex/__init__.py b/docs/docs/migration/state-management.mdx
similarity index 100%
rename from src/backend/langflow/graph/vertex/__init__.py
rename to docs/docs/migration/state-management.mdx
diff --git a/src/backend/langflow/interface/__init__.py b/docs/docs/migration/supported-frameworks.mdx
similarity index 100%
rename from src/backend/langflow/interface/__init__.py
rename to docs/docs/migration/supported-frameworks.mdx
diff --git a/docs/docs/migration/text-and-record.mdx b/docs/docs/migration/text-and-record.mdx
new file mode 100644
index 000000000..cdfb26b6c
--- /dev/null
+++ b/docs/docs/migration/text-and-record.mdx
@@ -0,0 +1,45 @@
+# Text and Record
+
+In Langflow 1.0 we added two main input and output types: Text and Record. Text is a simple string input and output type, while Record is a structure very similar to a dictionary in Python. It is a key-value pair data structure.
+
+We've created a few components to help you work with these types. Let's see how a few of them work.
+
+### Records To Text
+
+This is a Component that takes in Records and outputs a Text. It does this using a template string and concatenating the values of the Record, one per line.
+
+If we have the following Records:
+
+```json
+{
+ "sender_name": "Alice",
+ "message": "Hello!"
+}
+{
+ "sender_name": "John",
+ "message": "Hi!"
+}
+```
+
+And the template string is: _`{sender_name}: {message}`_
+
+```
+Alice: Hello!
+John: Hi!
+```
+
+### Create Record
+
+This Component allows you to create a Record from a number of inputs. You can add as many key-value pairs as you want (as long as it is less than 15 π
). Once you've picked that number you'll need to write the name of the Key and can pass Text values from other components to it.
+
+### Documents To Records
+
+This Component takes in a [LangChain](https://langchain.com) Document and outputs a Record. It does this by extracting the _`page_content`_ and the _`metadata`_ from the Document and adding them to the Record as _`text`_ and _`data`_ respectively.
+
+## Why is this useful?
+
+The idea was to create a unified way to work with complex data in Langflow, and to make it easier to work with data that is not just a simple string. This way you can create more complex workflows and use the data in more ways.
+
+## What's next?
+
+We are planning to integrate an array of modalities to Langflow, such as images, audio, and video. This will allow you to create even more complex workflows and use cases. Stay tuned for more updates! π
diff --git a/docs/docs/whats-new/a-new-chapter-langflow.mdx b/docs/docs/whats-new/a-new-chapter-langflow.mdx
new file mode 100644
index 000000000..b312dc198
--- /dev/null
+++ b/docs/docs/whats-new/a-new-chapter-langflow.mdx
@@ -0,0 +1,96 @@
+# A new chapter for Langflow
+
+# First things first
+
+Thank you all for being part of the Langflow community. The journey so far has been amazing and we are happy to have you with us.
+
+We have some exciting news to share with you. Langflow is changing, and we want to tell you all about it.
+
+## Where have we been?
+
+We spent the last few months working on a new version of Langflow. We wanted to make it more powerful, more flexible, and easier to use.
+We're moving from version 0.6 straight to 1.0 (preview). This is a big change, and we want to explain why we're doing it and what it means for you.
+
+## Why?
+
+In the past year, we learned a lot from the community and our users. We saw the potential of Langflow and the need for a more powerful and flexible tool for building conversational AI applications (and beyond).
+We realized that Langflow was hiding things from you that would really help you build better and more complex conversational AI applications. So we decided to make a big change.
+
+## The only way to go is forward
+
+From all the people we talked to, we learned that the most important thing for (most of) them is to have a tool that is easy to use, but also powerful and controllable. They also told us that Langflow's transparency could be improved.
+
+In those points, we saw an opportunity to make Langflow much more powerful and flexible, while also making it easier to use and understand.
+
+One key change you'll notice is that projects now require you to define Inputs and Outputs.
+This is a big change, but it's also a big improvement.
+It allows you to define the structure of your conversation and the data that flows through it.
+This makes it easier to understand and control your conversation.
+
+This change comes with a new way of visualizing your projects. Before 1.0 you would connect Components to ultimately build one final Component that was processed behind the scenes.
+Now, each step of the process is defined by you, is visible on the canvas, and can be monitored and controlled by you. This makes it so that Composition is now just another way of building in Langflow. **Now data flows through your project more transparently**.
+
+The caveat is existing projects may need some new Components to get them back to their full functionality.
+[We've made this as easy as possible](../migration/compatibility), and there will be improvements to it as we get feedback in our Discord server and on GitHub.
+
+## Custom Interactions
+
+The moment we decided to make this change, we saw the potential to make Langflow even more yours.
+By having a clear definition of Inputs and Outputs, we could build the experience around that which led us to create the **Interaction Panel**.
+
+When building a project testing and debugging is crucial. The Interaction Panel is a tool that changes dynamically based on the Inputs and Outputs you defined in your project.
+
+For example, let's say you are building a simple RAG application. Generally, you have an Input, some references that come from a Vector Store Search, a Prompt and the answer.
+Now, you could plug the output of your Prompt into a [Text Output](../components/outputs#Text-Output), rename that to "Prompt Result" and see the output of your Prompt in the Interaction Panel.
+
+{/* Add image here of the described above */}
+
+This is just one example of how the Interaction Panel can help you build and debug your projects.
+
+We have many planned features for the Interaction Panel, and we're excited to see how you use it and what you think of it.
+
+## An easier start
+
+The experience for the first-time user is also something we wanted to improve.
+
+Meet the new and improved **New Project** screen. It's now easier to start a new project, and you can choose from a list of starter projects to get you started.
+
+{/* Add new project image */}
+
+We wanted to create start projects that would help you learn about new features and also give you a head start on your projects.
+
+For now, we have:
+
+- **[Basic Prompting (Hello, world!)](/getting-started/basic-prompting)**: A simple flow that shows you how to use the Prompt Component and how to talk like a pirate.
+- **[Vector Store RAG](/getting-started/rag-with-astradb)**: A flow that shows you how to ingest data into a Vector Store and then use it to run a RAG application.
+- **[Memory Chatbot](/getting-started/memory-chatbot)**: This one shows you how to create a simple chatbot that can remember things about the user.
+- **[Document QA](/getting-started/document-qa)**: This flow shows you how to build a simple flow that helps you get answers about a document.
+- **[Blog Writer](/getting-started/blog-writer)**: Shows you how you can expand on the Prompt variables and be creative about what inputs you add to it.
+
+As always, your feedback is invaluable, so please let us know what you think of the new starter projects and what you would like to see in the future.
+
+## Less is more
+
+We added many new Components to Langflow and updated some of the existing ones, and we will deprecate some of them.
+
+The idea is that Langflow has evolved, and we want to make sure that the Components you use are the best they can be.
+Some of them don't work well with the others, and some of them are just not needed anymore.
+
+We are working on a list of Components that will be deprecated.
+In the preview stages of 1.0, we will have a smaller list of Components so that we make sure that the ones we have are the best they can be.
+Regardless, community feedback is very important in this matter, so please let us know what you think of the new Components and which ones you miss.
+
+We are aiming at having a more stable and reliable set of Components that helps you get quickly to useful results.
+This also means that your contributions in the [Langflow Store](https://langflow.store) and throughout the community are more important than ever.
+
+## What's next?
+
+Langflow went through a big change, and we are excited to see how you use it and what you think of it.
+
+We plan to add more types of Input and Output like Image and Audio, and we also plan to add more Components to help you build more complex projects.
+
+We also have some experimental features like a State Management System (so cool!) and a new way of building Grouped Components that we are excited to show you.
+
+## Reach out
+
+One last time, we want to thank you for being part of the Langflow community. Your feedback is invaluable, and we want to hear from you.
diff --git a/docs/docs/whats-new/customization-control.mdx b/docs/docs/whats-new/customization-control.mdx
new file mode 100644
index 000000000..11f23f53c
--- /dev/null
+++ b/docs/docs/whats-new/customization-control.mdx
@@ -0,0 +1 @@
+# A New Customization and Control
\ No newline at end of file
diff --git a/docs/docs/whats-new/debugging-reimagined.mdx b/docs/docs/whats-new/debugging-reimagined.mdx
new file mode 100644
index 000000000..d30234088
--- /dev/null
+++ b/docs/docs/whats-new/debugging-reimagined.mdx
@@ -0,0 +1 @@
+# Debugging Reimagined
\ No newline at end of file
diff --git a/docs/docs/whats-new/migrating-to-one-point-zero.mdx b/docs/docs/whats-new/migrating-to-one-point-zero.mdx
new file mode 100644
index 000000000..45bae6084
--- /dev/null
+++ b/docs/docs/whats-new/migrating-to-one-point-zero.mdx
@@ -0,0 +1,125 @@
+# Migrating to Langflow 1.0: A Guide
+
+Langflow 1.0 is a significant update that brings many exciting changes and improvements to the platform.
+This guide will walk you through the key improvements and help you migrate your existing projects to the new version.
+
+If you have any questions or need assistance during the migration process, please don't hesitate to reach out to in our [Discord](https://discord.gg/wZSWQaukgJ) or [GitHub](https://github.com/logspace-ai/langflow/issues) community.
+
+We have a special channel in our Discord server dedicated to Langflow 1.0 migration, where you can ask questions, share your experiences, and get help from the community.
+
+## TLDR;
+
+- Inputs and Outputs of Components have changed
+- We've surfaced steps that were previously run in the background
+- Continued support for LangChain and new support for multiple frameworks
+- Redesigned sidebar and customizable interaction panel
+- New Native Categories and Components
+- Improved user experience with Text and Record modes
+- CustomComponent for all components
+- Compatibility with previous versions using Runnable Executor
+- Multiple flows in the canvas
+- Improved component status
+- Ability to connect Output components to any other Component
+- Rename and edit component descriptions
+- Pass tweaks and inputs in the API using Display Name
+- Global Variables for Text Fields
+- Experimental components like SubFlow and Flow as Tool
+- Experimental State Management system with Notify and Listen components
+
+## Inputs and Outputs of Components
+
+Langflow 1.0 introduces adds the concept of Inputs and Outputs to flows, allowing a clear definition of the data flow between components. Discover how to use Inputs and Outputs to pass data between components and create more dynamic flows.
+
+[Learn more about Inputs and Outputs of Components](../migration/inputs-and-outputs)
+
+## To Compose or Not to Compose: the choice is yours
+
+Even though composition is still possible in Langflow 1.0, the new standard is getting data moving through the flow. This allows for more flexibility and control over the data flow in your projects.
+
+We will create guides on how to interweave LangChain components with our Core components soon.
+
+## Continued Support for LangChain and Multiple Frameworks
+
+Langflow 1.0 continues to support LangChain while also introducing support for multiple frameworks. This is another important boon that adding the paradigm of data flow brings to the table. Find out how to leverage the power of different frameworks in your projects.
+
+[Learn more about Supported Frameworks](../migration/supported-frameworks)
+
+## Sidebar Redesign and Customizable Interaction Panel
+
+We've expanded on the chat experience by creating a customizable interaction panel that allows you to design a panel that fits your needs and interact with it. The sidebar has also been redesigned to provide a more intuitive and user-friendly experience. Explore the new sidebar and interaction panel features to enhance your workflow.
+
+[Learn more about some of the UI updates](../migration/sidebar-and-interaction-panel)
+
+## New Native Categories and Components
+
+Langflow 1.0 introduces many new native categories, including Inputs, Outputs, Helpers, Experimental, Models, and more. Discover the new components available, such as Chat Input, Prompt, Files, API Request, and others.
+
+[Learn more about New Categories and Components](../migration/new-categories-and-components)
+
+## New Way of Using Langflow: Text and Record (and more to come)
+
+With the introduction of Text and Record types connections between Components are more intuitive and easier to understand. This is the first step in a series of improvements to the way you interact with Langflow. Learn how to use Text, and Record and how they help you build better flows.
+
+[Learn more about Text and Record](../migration/text-and-record)
+
+## CustomComponent for All Components
+
+Almost all components in Langflow 1.0 are now CustomComponents, allowing you to check and modify the code of each component. Discover how to leverage this feature to customize your components to your specific needs.
+
+[Learn more about CustomComponent](../migration/custom-component)
+
+## Compatibility with Previous Versions
+
+To use flows built in previous versions of Langflow, you can utilize the experimental component Runnable Executor along with an Input and Output. **We'd love your feedback on this**. Learn how to adapt your existing flows to work seamlessly in the new version of Langflow.
+
+[Learn more about Compatibility with Previous Versions](../migration/compatibility)
+
+## Multiple Flows in the Canvas
+
+Langflow 1.0 allows you to have more than one flow in the canvas and run them separately. Discover how to create and manage multiple flows within a single project.
+
+[Learn more about Multiple Flows](../migration/multiple-flows)
+
+## Improved Component Status
+
+Each component now displays its status more clearly, allowing you to quickly identify any issues or errors. Explore how to use the new component status feature to troubleshoot and optimize your flows.
+
+[Learn more about Component Status](../migration/component-status-and-data-passing)
+
+## Connecting Output Components
+
+You can now connect Output components to any other component (that has a Text output), providing a better understanding of the data flow. Explore the possibilities of connecting Output components and how it enhances your flow's functionality.
+
+[Learn more about Connecting Output Components](../migration/connecting-output-components)
+
+## Renaming and Editing Component Descriptions
+
+Langflow 1.0 allows you to rename and edit the description of each component, making it easier to understand and interact with the flow. Learn how to customize your component names and descriptions for improved clarity.
+
+[Learn more about Renaming and Editing Components](../migration/renaming-and-editing-components)
+
+## Passing Tweaks and Inputs in the API
+
+Things got a whole lot easier. You can now pass tweaks and inputs in the API by referencing the Display Name of the component. Discover how to leverage this feature to dynamically control your flow's behavior.
+
+[Learn more about Passing Tweaks and Inputs](../migration/passing-tweaks-and-inputs)
+
+## Global Variables for Text Fields
+
+Global Variables can be used in any Text Field across your projects. Learn how to define and utilize Global Variables to streamline your workflow.
+
+[Learn more about Global Variables](../migration/global-variables)
+
+## Experimental Components
+
+Explore the experimental components available in Langflow 1.0, such as SubFlow, which allows you to load a flow as a component dynamically, and Flow as Tool, which enables you to use a flow as a tool for an Agent.
+
+[Learn more about Experimental Components](../migration/experimental-components)
+
+## Experimental State Management System
+
+We are experimenting with a State Management system for flows that allows components to trigger other components and pass messages between them using the Notify and Listen components. Discover how to leverage this system to create more dynamic and interactive flows.
+
+[Learn more about State Management](../migration/state-management)
+
+We hope this guide helps you navigate the changes and improvements in Langflow 1.0. If you have any questions or need further assistance, please don't hesitate to reach out to us in our [Discord](https://discord.gg/wZSWQaukgJ).
\ No newline at end of file
diff --git a/docs/docs/whats-new/simplification-standardization.mdx b/docs/docs/whats-new/simplification-standardization.mdx
new file mode 100644
index 000000000..f7e3115bc
--- /dev/null
+++ b/docs/docs/whats-new/simplification-standardization.mdx
@@ -0,0 +1 @@
+# Simplification Through Standardization
\ No newline at end of file
diff --git a/docs/docusaurus.config.js b/docs/docusaurus.config.js
index 430aebcb0..979953918 100644
--- a/docs/docusaurus.config.js
+++ b/docs/docusaurus.config.js
@@ -14,6 +14,7 @@ module.exports = {
organizationName: "logspace-ai",
projectName: "langflow",
trailingSlash: false,
+ staticDirectories: ["static"],
customFields: {
mendableAnonKey: process.env.MENDABLE_ANON_KEY,
},
@@ -42,6 +43,10 @@ module.exports = {
path: "docs",
// sidebarPath: 'sidebars.js',
},
+ gtag: {
+ trackingID: 'G-XHC7G628ZP',
+ anonymizeIP: true,
+ },
theme: {
customCss: [
require.resolve("@code-hike/mdx/styles.css"),
diff --git a/docs/package-lock.json b/docs/package-lock.json
index 6742b89e7..c91a4ec06 100644
--- a/docs/package-lock.json
+++ b/docs/package-lock.json
@@ -10,11 +10,12 @@
"dependencies": {
"@babel/preset-react": "^7.22.3",
"@code-hike/mdx": "^0.9.0",
- "@docusaurus/core": "3.0.1",
- "@docusaurus/plugin-ideal-image": "^3.0.1",
- "@docusaurus/preset-classic": "3.0.1",
- "@docusaurus/theme-classic": "^3.0.1",
- "@docusaurus/theme-search-algolia": "^3.0.1",
+ "@docusaurus/core": "^3.2.0",
+ "@docusaurus/plugin-google-gtag": "^3.2.0",
+ "@docusaurus/plugin-ideal-image": "^3.2.0",
+ "@docusaurus/preset-classic": "^3.2.0",
+ "@docusaurus/theme-classic": "^3.2.0",
+ "@docusaurus/theme-search-algolia": "^3.2.0",
"@mdx-js/react": "^2.3.0",
"@mendable/search": "^0.0.154",
"@pbe/react-yandex-maps": "^1.2.4",
@@ -41,7 +42,7 @@
"tailwindcss": "^3.3.2"
},
"devDependencies": {
- "@docusaurus/module-type-aliases": "2.4.1",
+ "@docusaurus/module-type-aliases": "^3.2.0",
"css-loader": "^6.8.1",
"docusaurus-node-polyfills": "^1.0.0",
"node-sass": "^9.0.0",
@@ -94,74 +95,74 @@
}
},
"node_modules/@algolia/cache-browser-local-storage": {
- "version": "4.22.0",
- "resolved": "https://registry.npmjs.org/@algolia/cache-browser-local-storage/-/cache-browser-local-storage-4.22.0.tgz",
- "integrity": "sha512-uZ1uZMLDZb4qODLfTSNHxSi4fH9RdrQf7DXEzW01dS8XK7QFtFh29N5NGKa9S+Yudf1vUMIF+/RiL4i/J0pWlQ==",
+ "version": "4.23.2",
+ "resolved": "https://registry.npmjs.org/@algolia/cache-browser-local-storage/-/cache-browser-local-storage-4.23.2.tgz",
+ "integrity": "sha512-PvRQdCmtiU22dw9ZcTJkrVKgNBVAxKgD0/cfiqyxhA5+PHzA2WDt6jOmZ9QASkeM2BpyzClJb/Wr1yt2/t78Kw==",
"dependencies": {
- "@algolia/cache-common": "4.22.0"
+ "@algolia/cache-common": "4.23.2"
}
},
"node_modules/@algolia/cache-common": {
- "version": "4.22.0",
- "resolved": "https://registry.npmjs.org/@algolia/cache-common/-/cache-common-4.22.0.tgz",
- "integrity": "sha512-TPwUMlIGPN16eW67qamNQUmxNiGHg/WBqWcrOoCddhqNTqGDPVqmgfaM85LPbt24t3r1z0zEz/tdsmuq3Q6oaA=="
+ "version": "4.23.2",
+ "resolved": "https://registry.npmjs.org/@algolia/cache-common/-/cache-common-4.23.2.tgz",
+ "integrity": "sha512-OUK/6mqr6CQWxzl/QY0/mwhlGvS6fMtvEPyn/7AHUx96NjqDA4X4+Ju7aXFQKh+m3jW9VPB0B9xvEQgyAnRPNw=="
},
"node_modules/@algolia/cache-in-memory": {
- "version": "4.22.0",
- "resolved": "https://registry.npmjs.org/@algolia/cache-in-memory/-/cache-in-memory-4.22.0.tgz",
- "integrity": "sha512-kf4Cio9NpPjzp1+uXQgL4jsMDeck7MP89BYThSvXSjf2A6qV/0KeqQf90TL2ECS02ovLOBXkk98P7qVarM+zGA==",
+ "version": "4.23.2",
+ "resolved": "https://registry.npmjs.org/@algolia/cache-in-memory/-/cache-in-memory-4.23.2.tgz",
+ "integrity": "sha512-rfbi/SnhEa3MmlqQvgYz/9NNJ156NkU6xFxjbxBtLWnHbpj+qnlMoKd+amoiacHRITpajg6zYbLM9dnaD3Bczw==",
"dependencies": {
- "@algolia/cache-common": "4.22.0"
+ "@algolia/cache-common": "4.23.2"
}
},
"node_modules/@algolia/client-account": {
- "version": "4.22.0",
- "resolved": "https://registry.npmjs.org/@algolia/client-account/-/client-account-4.22.0.tgz",
- "integrity": "sha512-Bjb5UXpWmJT+yGWiqAJL0prkENyEZTBzdC+N1vBuHjwIJcjLMjPB6j1hNBRbT12Lmwi55uzqeMIKS69w+0aPzA==",
+ "version": "4.23.2",
+ "resolved": "https://registry.npmjs.org/@algolia/client-account/-/client-account-4.23.2.tgz",
+ "integrity": "sha512-VbrOCLIN/5I7iIdskSoSw3uOUPF516k4SjDD4Qz3BFwa3of7D9A0lzBMAvQEJJEPHWdVraBJlGgdJq/ttmquJQ==",
"dependencies": {
- "@algolia/client-common": "4.22.0",
- "@algolia/client-search": "4.22.0",
- "@algolia/transporter": "4.22.0"
+ "@algolia/client-common": "4.23.2",
+ "@algolia/client-search": "4.23.2",
+ "@algolia/transporter": "4.23.2"
}
},
"node_modules/@algolia/client-analytics": {
- "version": "4.22.0",
- "resolved": "https://registry.npmjs.org/@algolia/client-analytics/-/client-analytics-4.22.0.tgz",
- "integrity": "sha512-os2K+kHUcwwRa4ArFl5p/3YbF9lN3TLOPkbXXXxOvDpqFh62n9IRZuzfxpHxMPKAQS3Et1s0BkKavnNP02E9Hg==",
+ "version": "4.23.2",
+ "resolved": "https://registry.npmjs.org/@algolia/client-analytics/-/client-analytics-4.23.2.tgz",
+ "integrity": "sha512-lLj7irsAztGhMoEx/SwKd1cwLY6Daf1Q5f2AOsZacpppSvuFvuBrmkzT7pap1OD/OePjLKxicJS8wNA0+zKtuw==",
"dependencies": {
- "@algolia/client-common": "4.22.0",
- "@algolia/client-search": "4.22.0",
- "@algolia/requester-common": "4.22.0",
- "@algolia/transporter": "4.22.0"
+ "@algolia/client-common": "4.23.2",
+ "@algolia/client-search": "4.23.2",
+ "@algolia/requester-common": "4.23.2",
+ "@algolia/transporter": "4.23.2"
}
},
"node_modules/@algolia/client-common": {
- "version": "4.22.0",
- "resolved": "https://registry.npmjs.org/@algolia/client-common/-/client-common-4.22.0.tgz",
- "integrity": "sha512-BlbkF4qXVWuwTmYxVWvqtatCR3lzXwxx628p1wj1Q7QP2+LsTmGt1DiUYRuy9jG7iMsnlExby6kRMOOlbhv2Ag==",
+ "version": "4.23.2",
+ "resolved": "https://registry.npmjs.org/@algolia/client-common/-/client-common-4.23.2.tgz",
+ "integrity": "sha512-Q2K1FRJBern8kIfZ0EqPvUr3V29ICxCm/q42zInV+VJRjldAD9oTsMGwqUQ26GFMdFYmqkEfCbY4VGAiQhh22g==",
"dependencies": {
- "@algolia/requester-common": "4.22.0",
- "@algolia/transporter": "4.22.0"
+ "@algolia/requester-common": "4.23.2",
+ "@algolia/transporter": "4.23.2"
}
},
"node_modules/@algolia/client-personalization": {
- "version": "4.22.0",
- "resolved": "https://registry.npmjs.org/@algolia/client-personalization/-/client-personalization-4.22.0.tgz",
- "integrity": "sha512-pEOftCxeBdG5pL97WngOBi9w5Vxr5KCV2j2D+xMVZH8MuU/JX7CglDSDDb0ffQWYqcUN+40Ry+xtXEYaGXTGow==",
+ "version": "4.23.2",
+ "resolved": "https://registry.npmjs.org/@algolia/client-personalization/-/client-personalization-4.23.2.tgz",
+ "integrity": "sha512-vwPsgnCGhUcHhhQG5IM27z8q7dWrN9itjdvgA6uKf2e9r7vB+WXt4OocK0CeoYQt3OGEAExryzsB8DWqdMK5wg==",
"dependencies": {
- "@algolia/client-common": "4.22.0",
- "@algolia/requester-common": "4.22.0",
- "@algolia/transporter": "4.22.0"
+ "@algolia/client-common": "4.23.2",
+ "@algolia/requester-common": "4.23.2",
+ "@algolia/transporter": "4.23.2"
}
},
"node_modules/@algolia/client-search": {
- "version": "4.22.0",
- "resolved": "https://registry.npmjs.org/@algolia/client-search/-/client-search-4.22.0.tgz",
- "integrity": "sha512-bn4qQiIdRPBGCwsNuuqB8rdHhGKKWIij9OqidM1UkQxnSG8yzxHdb7CujM30pvp5EnV7jTqDZRbxacbjYVW20Q==",
+ "version": "4.23.2",
+ "resolved": "https://registry.npmjs.org/@algolia/client-search/-/client-search-4.23.2.tgz",
+ "integrity": "sha512-CxSB29OVGSE7l/iyoHvamMonzq7Ev8lnk/OkzleODZ1iBcCs3JC/XgTIKzN/4RSTrJ9QybsnlrN/bYCGufo7qw==",
"dependencies": {
- "@algolia/client-common": "4.22.0",
- "@algolia/requester-common": "4.22.0",
- "@algolia/transporter": "4.22.0"
+ "@algolia/client-common": "4.23.2",
+ "@algolia/requester-common": "4.23.2",
+ "@algolia/transporter": "4.23.2"
}
},
"node_modules/@algolia/events": {
@@ -170,47 +171,65 @@
"integrity": "sha512-FQzvOCgoFXAbf5Y6mYozw2aj5KCJoA3m4heImceldzPSMbdyS4atVjJzXKMsfX3wnZTFYwkkt8/z8UesLHlSBQ=="
},
"node_modules/@algolia/logger-common": {
- "version": "4.22.0",
- "resolved": "https://registry.npmjs.org/@algolia/logger-common/-/logger-common-4.22.0.tgz",
- "integrity": "sha512-HMUQTID0ucxNCXs5d1eBJ5q/HuKg8rFVE/vOiLaM4Abfeq1YnTtGV3+rFEhOPWhRQxNDd+YHa4q864IMc0zHpQ=="
+ "version": "4.23.2",
+ "resolved": "https://registry.npmjs.org/@algolia/logger-common/-/logger-common-4.23.2.tgz",
+ "integrity": "sha512-jGM49Q7626cXZ7qRAWXn0jDlzvoA1FvN4rKTi1g0hxKsTTSReyYk0i1ADWjChDPl3Q+nSDhJuosM2bBUAay7xw=="
},
"node_modules/@algolia/logger-console": {
- "version": "4.22.0",
- "resolved": "https://registry.npmjs.org/@algolia/logger-console/-/logger-console-4.22.0.tgz",
- "integrity": "sha512-7JKb6hgcY64H7CRm3u6DRAiiEVXMvCJV5gRE672QFOUgDxo4aiDpfU61g6Uzy8NKjlEzHMmgG4e2fklELmPXhQ==",
+ "version": "4.23.2",
+ "resolved": "https://registry.npmjs.org/@algolia/logger-console/-/logger-console-4.23.2.tgz",
+ "integrity": "sha512-oo+lnxxEmlhTBTFZ3fGz1O8PJ+G+8FiAoMY2Qo3Q4w23xocQev6KqDTA1JQAGPDxAewNA2VBwWOsVXeXFjrI/Q==",
"dependencies": {
- "@algolia/logger-common": "4.22.0"
+ "@algolia/logger-common": "4.23.2"
+ }
+ },
+ "node_modules/@algolia/recommend": {
+ "version": "4.23.2",
+ "resolved": "https://registry.npmjs.org/@algolia/recommend/-/recommend-4.23.2.tgz",
+ "integrity": "sha512-Q75CjnzRCDzgIlgWfPnkLtrfF4t82JCirhalXkSSwe/c1GH5pWh4xUyDOR3KTMo+YxxX3zTlrL/FjHmUJEWEcg==",
+ "dependencies": {
+ "@algolia/cache-browser-local-storage": "4.23.2",
+ "@algolia/cache-common": "4.23.2",
+ "@algolia/cache-in-memory": "4.23.2",
+ "@algolia/client-common": "4.23.2",
+ "@algolia/client-search": "4.23.2",
+ "@algolia/logger-common": "4.23.2",
+ "@algolia/logger-console": "4.23.2",
+ "@algolia/requester-browser-xhr": "4.23.2",
+ "@algolia/requester-common": "4.23.2",
+ "@algolia/requester-node-http": "4.23.2",
+ "@algolia/transporter": "4.23.2"
}
},
"node_modules/@algolia/requester-browser-xhr": {
- "version": "4.22.0",
- "resolved": "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-4.22.0.tgz",
- "integrity": "sha512-BHfv1h7P9/SyvcDJDaRuIwDu2yrDLlXlYmjvaLZTtPw6Ok/ZVhBR55JqW832XN/Fsl6k3LjdkYHHR7xnsa5Wvg==",
+ "version": "4.23.2",
+ "resolved": "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-4.23.2.tgz",
+ "integrity": "sha512-TO9wLlp8+rvW9LnIfyHsu8mNAMYrqNdQ0oLF6eTWFxXfxG3k8F/Bh7nFYGk2rFAYty4Fw4XUtrv/YjeNDtM5og==",
"dependencies": {
- "@algolia/requester-common": "4.22.0"
+ "@algolia/requester-common": "4.23.2"
}
},
"node_modules/@algolia/requester-common": {
- "version": "4.22.0",
- "resolved": "https://registry.npmjs.org/@algolia/requester-common/-/requester-common-4.22.0.tgz",
- "integrity": "sha512-Y9cEH/cKjIIZgzvI1aI0ARdtR/xRrOR13g5psCxkdhpgRN0Vcorx+zePhmAa4jdQNqexpxtkUdcKYugBzMZJgQ=="
+ "version": "4.23.2",
+ "resolved": "https://registry.npmjs.org/@algolia/requester-common/-/requester-common-4.23.2.tgz",
+ "integrity": "sha512-3EfpBS0Hri0lGDB5H/BocLt7Vkop0bTTLVUBB844HH6tVycwShmsV6bDR7yXbQvFP1uNpgePRD3cdBCjeHmk6Q=="
},
"node_modules/@algolia/requester-node-http": {
- "version": "4.22.0",
- "resolved": "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-4.22.0.tgz",
- "integrity": "sha512-8xHoGpxVhz3u2MYIieHIB6MsnX+vfd5PS4REgglejJ6lPigftRhTdBCToe6zbwq4p0anZXjjPDvNWMlgK2+xYA==",
+ "version": "4.23.2",
+ "resolved": "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-4.23.2.tgz",
+ "integrity": "sha512-SVzgkZM/malo+2SB0NWDXpnT7nO5IZwuDTaaH6SjLeOHcya1o56LSWXk+3F3rNLz2GVH+I/rpYKiqmHhSOjerw==",
"dependencies": {
- "@algolia/requester-common": "4.22.0"
+ "@algolia/requester-common": "4.23.2"
}
},
"node_modules/@algolia/transporter": {
- "version": "4.22.0",
- "resolved": "https://registry.npmjs.org/@algolia/transporter/-/transporter-4.22.0.tgz",
- "integrity": "sha512-ieO1k8x2o77GNvOoC+vAkFKppydQSVfbjM3YrSjLmgywiBejPTvU1R1nEvG59JIIUvtSLrZsLGPkd6vL14zopA==",
+ "version": "4.23.2",
+ "resolved": "https://registry.npmjs.org/@algolia/transporter/-/transporter-4.23.2.tgz",
+ "integrity": "sha512-GY3aGKBy+8AK4vZh8sfkatDciDVKad5rTY2S10Aefyjh7e7UGBP4zigf42qVXwU8VOPwi7l/L7OACGMOFcjB0Q==",
"dependencies": {
- "@algolia/cache-common": "4.22.0",
- "@algolia/logger-common": "4.22.0",
- "@algolia/requester-common": "4.22.0"
+ "@algolia/cache-common": "4.23.2",
+ "@algolia/logger-common": "4.23.2",
+ "@algolia/requester-common": "4.23.2"
}
},
"node_modules/@alloc/quick-lru": {
@@ -2033,18 +2052,18 @@
}
},
"node_modules/@docsearch/css": {
- "version": "3.5.2",
- "resolved": "https://registry.npmjs.org/@docsearch/css/-/css-3.5.2.tgz",
- "integrity": "sha512-SPiDHaWKQZpwR2siD0KQUwlStvIAnEyK6tAE2h2Wuoq8ue9skzhlyVQ1ddzOxX6khULnAALDiR/isSF3bnuciA=="
+ "version": "3.6.0",
+ "resolved": "https://registry.npmjs.org/@docsearch/css/-/css-3.6.0.tgz",
+ "integrity": "sha512-+sbxb71sWre+PwDK7X2T8+bhS6clcVMLwBPznX45Qu6opJcgRjAp7gYSDzVFp187J+feSj5dNBN1mJoi6ckkUQ=="
},
"node_modules/@docsearch/react": {
- "version": "3.5.2",
- "resolved": "https://registry.npmjs.org/@docsearch/react/-/react-3.5.2.tgz",
- "integrity": "sha512-9Ahcrs5z2jq/DcAvYtvlqEBHImbm4YJI8M9y0x6Tqg598P40HTEkX7hsMcIuThI+hTFxRGZ9hll0Wygm2yEjng==",
+ "version": "3.6.0",
+ "resolved": "https://registry.npmjs.org/@docsearch/react/-/react-3.6.0.tgz",
+ "integrity": "sha512-HUFut4ztcVNmqy9gp/wxNbC7pTOHhgVVkHVGCACTuLhUKUhKAF9KYHJtMiLUJxEqiFLQiuri1fWF8zqwM/cu1w==",
"dependencies": {
"@algolia/autocomplete-core": "1.9.3",
"@algolia/autocomplete-preset-algolia": "1.9.3",
- "@docsearch/css": "3.5.2",
+ "@docsearch/css": "3.6.0",
"algoliasearch": "^4.19.1"
},
"peerDependencies": {
@@ -2069,9 +2088,9 @@
}
},
"node_modules/@docusaurus/core": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/core/-/core-3.0.1.tgz",
- "integrity": "sha512-CXrLpOnW+dJdSv8M5FAJ3JBwXtL6mhUWxFA8aS0ozK6jBG/wgxERk5uvH28fCeFxOGbAT9v1e9dOMo1X2IEVhQ==",
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/@docusaurus/core/-/core-3.2.0.tgz",
+ "integrity": "sha512-WTO6vW4404nhTmK9NL+95nd13I1JveFwZ8iOBYxb4xt+N2S3KzY+mm+1YtWw2vV37FbYfH+w+KrlrRaWuy5Hzw==",
"dependencies": {
"@babel/core": "^7.23.3",
"@babel/generator": "^7.23.3",
@@ -2083,14 +2102,13 @@
"@babel/runtime": "^7.22.6",
"@babel/runtime-corejs3": "^7.22.6",
"@babel/traverse": "^7.22.8",
- "@docusaurus/cssnano-preset": "3.0.1",
- "@docusaurus/logger": "3.0.1",
- "@docusaurus/mdx-loader": "3.0.1",
+ "@docusaurus/cssnano-preset": "3.2.0",
+ "@docusaurus/logger": "3.2.0",
+ "@docusaurus/mdx-loader": "3.2.0",
"@docusaurus/react-loadable": "5.5.2",
- "@docusaurus/utils": "3.0.1",
- "@docusaurus/utils-common": "3.0.1",
- "@docusaurus/utils-validation": "3.0.1",
- "@slorber/static-site-generator-webpack-plugin": "^4.0.7",
+ "@docusaurus/utils": "3.2.0",
+ "@docusaurus/utils-common": "3.2.0",
+ "@docusaurus/utils-validation": "3.2.0",
"@svgr/webpack": "^6.5.1",
"autoprefixer": "^10.4.14",
"babel-loader": "^9.1.3",
@@ -2111,6 +2129,7 @@
"detect-port": "^1.5.1",
"escape-html": "^1.0.3",
"eta": "^2.2.0",
+ "eval": "^0.1.8",
"file-loader": "^6.2.0",
"fs-extra": "^11.1.1",
"html-minifier-terser": "^7.2.0",
@@ -2119,6 +2138,7 @@
"leven": "^3.1.0",
"lodash": "^4.17.21",
"mini-css-extract-plugin": "^2.7.6",
+ "p-map": "^4.0.0",
"postcss": "^8.4.26",
"postcss-loader": "^7.3.3",
"prompts": "^2.4.2",
@@ -2249,9 +2269,9 @@
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
},
"node_modules/@docusaurus/cssnano-preset": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/cssnano-preset/-/cssnano-preset-3.0.1.tgz",
- "integrity": "sha512-wjuXzkHMW+ig4BD6Ya1Yevx9UJadO4smNZCEljqBoQfIQrQskTswBs7lZ8InHP7mCt273a/y/rm36EZhqJhknQ==",
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/@docusaurus/cssnano-preset/-/cssnano-preset-3.2.0.tgz",
+ "integrity": "sha512-H88RXGUia7r/VF3XfyoA4kbwgpUZcKsObF6VvwBOP91EdArTf6lnHbJ/x8Ca79KS/zf98qaWyBGzW+5ez58Iyw==",
"dependencies": {
"cssnano-preset-advanced": "^5.3.10",
"postcss": "^8.4.26",
@@ -2263,9 +2283,9 @@
}
},
"node_modules/@docusaurus/logger": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/logger/-/logger-3.0.1.tgz",
- "integrity": "sha512-I5L6Nk8OJzkVA91O2uftmo71LBSxe1vmOn9AMR6JRCzYeEBrqneWMH02AqMvjJ2NpMiviO+t0CyPjyYV7nxCWQ==",
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/@docusaurus/logger/-/logger-3.2.0.tgz",
+ "integrity": "sha512-Z1R1NcOGXZ8CkIJSvjvyxnuDDSlx/+1xlh20iVTw1DZRjonFmI3T3tTgk40YpXyWUYQpIgAoqqPMpuseMMdgRQ==",
"dependencies": {
"chalk": "^4.1.2",
"tslib": "^2.6.0"
@@ -2339,11 +2359,11 @@
}
},
"node_modules/@docusaurus/lqip-loader": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/lqip-loader/-/lqip-loader-3.0.1.tgz",
- "integrity": "sha512-hFSu8ltYo0ZnWBWmjMhSprOr6nNKG01YdMDxH/hahBfyaNDCkZU4o7mQNgUW845lvYdp6bhjyW31WJwBjOnLqw==",
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/@docusaurus/lqip-loader/-/lqip-loader-3.2.0.tgz",
+ "integrity": "sha512-lmYT3fslGH3ibdXySSUgtd4E3B8sQ7xizlNhmTj5eiqSQdiRiD15rVH73RohK8h+yrbu2QUDHTDAH6j7O5e2Gg==",
"dependencies": {
- "@docusaurus/logger": "3.0.1",
+ "@docusaurus/logger": "3.2.0",
"file-loader": "^6.2.0",
"lodash": "^4.17.21",
"sharp": "^0.32.3",
@@ -2354,15 +2374,13 @@
}
},
"node_modules/@docusaurus/mdx-loader": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/mdx-loader/-/mdx-loader-3.0.1.tgz",
- "integrity": "sha512-ldnTmvnvlrONUq45oKESrpy+lXtbnTcTsFkOTIDswe5xx5iWJjt6eSa0f99ZaWlnm24mlojcIGoUWNCS53qVlQ==",
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/@docusaurus/mdx-loader/-/mdx-loader-3.2.0.tgz",
+ "integrity": "sha512-JtkI5o6R/rJSr1Y23cHKz085aBJCvJw3AYHihJ7r+mBX+O8EuQIynG0e6/XpbSCpr7Ino0U50UtxaXcEbFwg9Q==",
"dependencies": {
- "@babel/parser": "^7.22.7",
- "@babel/traverse": "^7.22.8",
- "@docusaurus/logger": "3.0.1",
- "@docusaurus/utils": "3.0.1",
- "@docusaurus/utils-validation": "3.0.1",
+ "@docusaurus/logger": "3.2.0",
+ "@docusaurus/utils": "3.2.0",
+ "@docusaurus/utils-validation": "3.2.0",
"@mdx-js/mdx": "^3.0.0",
"@slorber/remark-comment": "^1.0.0",
"escape-html": "^1.0.3",
@@ -2394,13 +2412,12 @@
}
},
"node_modules/@docusaurus/module-type-aliases": {
- "version": "2.4.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/module-type-aliases/-/module-type-aliases-2.4.1.tgz",
- "integrity": "sha512-gLBuIFM8Dp2XOCWffUDSjtxY7jQgKvYujt7Mx5s4FCTfoL5dN1EVbnrn+O2Wvh8b0a77D57qoIDY7ghgmatR1A==",
- "dev": true,
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/@docusaurus/module-type-aliases/-/module-type-aliases-3.2.0.tgz",
+ "integrity": "sha512-jRSp9YkvBwwNz6Xgy0RJPsnie+Ebb//gy7GdbkJ2pW2gvvlYKGib2+jSF0pfIzvyZLulfCynS1KQdvDKdSl8zQ==",
"dependencies": {
"@docusaurus/react-loadable": "5.5.2",
- "@docusaurus/types": "2.4.1",
+ "@docusaurus/types": "3.2.0",
"@types/history": "^4.7.11",
"@types/react": "*",
"@types/react-router-config": "*",
@@ -2413,38 +2430,18 @@
"react-dom": "*"
}
},
- "node_modules/@docusaurus/module-type-aliases/node_modules/@docusaurus/types": {
- "version": "2.4.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/types/-/types-2.4.1.tgz",
- "integrity": "sha512-0R+cbhpMkhbRXX138UOc/2XZFF8hiZa6ooZAEEJFp5scytzCw4tC1gChMFXrpa3d2tYE6AX8IrOEpSonLmfQuQ==",
- "dev": true,
- "dependencies": {
- "@types/history": "^4.7.11",
- "@types/react": "*",
- "commander": "^5.1.0",
- "joi": "^17.6.0",
- "react-helmet-async": "^1.3.0",
- "utility-types": "^3.10.0",
- "webpack": "^5.73.0",
- "webpack-merge": "^5.8.0"
- },
- "peerDependencies": {
- "react": "^16.8.4 || ^17.0.0",
- "react-dom": "^16.8.4 || ^17.0.0"
- }
- },
"node_modules/@docusaurus/plugin-content-blog": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-blog/-/plugin-content-blog-3.0.1.tgz",
- "integrity": "sha512-cLOvtvAyaMQFLI8vm4j26svg3ktxMPSXpuUJ7EERKoGbfpJSsgtowNHcRsaBVmfuCsRSk1HZ/yHBsUkTmHFEsg==",
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-blog/-/plugin-content-blog-3.2.0.tgz",
+ "integrity": "sha512-MABqwjSicyHmYEfQueMthPCz18JkVxhK3EGhXTSRWwReAZ0UTuw9pG6+Wo+uXAugDaIcJH28rVZSwTDINPm2bw==",
"dependencies": {
- "@docusaurus/core": "3.0.1",
- "@docusaurus/logger": "3.0.1",
- "@docusaurus/mdx-loader": "3.0.1",
- "@docusaurus/types": "3.0.1",
- "@docusaurus/utils": "3.0.1",
- "@docusaurus/utils-common": "3.0.1",
- "@docusaurus/utils-validation": "3.0.1",
+ "@docusaurus/core": "3.2.0",
+ "@docusaurus/logger": "3.2.0",
+ "@docusaurus/mdx-loader": "3.2.0",
+ "@docusaurus/types": "3.2.0",
+ "@docusaurus/utils": "3.2.0",
+ "@docusaurus/utils-common": "3.2.0",
+ "@docusaurus/utils-validation": "3.2.0",
"cheerio": "^1.0.0-rc.12",
"feed": "^4.2.2",
"fs-extra": "^11.1.1",
@@ -2465,17 +2462,18 @@
}
},
"node_modules/@docusaurus/plugin-content-docs": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-docs/-/plugin-content-docs-3.0.1.tgz",
- "integrity": "sha512-dRfAOA5Ivo+sdzzJGXEu33yAtvGg8dlZkvt/NEJ7nwi1F2j4LEdsxtfX2GKeETB2fP6XoGNSQnFXqa2NYGrHFg==",
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-docs/-/plugin-content-docs-3.2.0.tgz",
+ "integrity": "sha512-uuqhahmsBnirxOz+SXksnWt7+wc+iN4ntxNRH48BUgo7QRNLATWjHCgI8t6zrMJxK4o+QL9DhLaPDlFHs91B3Q==",
"dependencies": {
- "@docusaurus/core": "3.0.1",
- "@docusaurus/logger": "3.0.1",
- "@docusaurus/mdx-loader": "3.0.1",
- "@docusaurus/module-type-aliases": "3.0.1",
- "@docusaurus/types": "3.0.1",
- "@docusaurus/utils": "3.0.1",
- "@docusaurus/utils-validation": "3.0.1",
+ "@docusaurus/core": "3.2.0",
+ "@docusaurus/logger": "3.2.0",
+ "@docusaurus/mdx-loader": "3.2.0",
+ "@docusaurus/module-type-aliases": "3.2.0",
+ "@docusaurus/types": "3.2.0",
+ "@docusaurus/utils": "3.2.0",
+ "@docusaurus/utils-common": "3.2.0",
+ "@docusaurus/utils-validation": "3.2.0",
"@types/react-router-config": "^5.0.7",
"combine-promises": "^1.1.0",
"fs-extra": "^11.1.1",
@@ -2493,35 +2491,16 @@
"react-dom": "^18.0.0"
}
},
- "node_modules/@docusaurus/plugin-content-docs/node_modules/@docusaurus/module-type-aliases": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/module-type-aliases/-/module-type-aliases-3.0.1.tgz",
- "integrity": "sha512-DEHpeqUDsLynl3AhQQiO7AbC7/z/lBra34jTcdYuvp9eGm01pfH1wTVq8YqWZq6Jyx0BgcVl/VJqtE9StRd9Ag==",
- "dependencies": {
- "@docusaurus/react-loadable": "5.5.2",
- "@docusaurus/types": "3.0.1",
- "@types/history": "^4.7.11",
- "@types/react": "*",
- "@types/react-router-config": "*",
- "@types/react-router-dom": "*",
- "react-helmet-async": "*",
- "react-loadable": "npm:@docusaurus/react-loadable@5.5.2"
- },
- "peerDependencies": {
- "react": "*",
- "react-dom": "*"
- }
- },
"node_modules/@docusaurus/plugin-content-pages": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-pages/-/plugin-content-pages-3.0.1.tgz",
- "integrity": "sha512-oP7PoYizKAXyEttcvVzfX3OoBIXEmXTMzCdfmC4oSwjG4SPcJsRge3mmI6O8jcZBgUPjIzXD21bVGWEE1iu8gg==",
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-pages/-/plugin-content-pages-3.2.0.tgz",
+ "integrity": "sha512-4ofAN7JDsdb4tODO9OIrizWY5DmEJXr0eu+UDIkLqGP+gXXTahJZv8h2mlxO+lPXGXRCVBOfA14OG1hOYJVPwA==",
"dependencies": {
- "@docusaurus/core": "3.0.1",
- "@docusaurus/mdx-loader": "3.0.1",
- "@docusaurus/types": "3.0.1",
- "@docusaurus/utils": "3.0.1",
- "@docusaurus/utils-validation": "3.0.1",
+ "@docusaurus/core": "3.2.0",
+ "@docusaurus/mdx-loader": "3.2.0",
+ "@docusaurus/types": "3.2.0",
+ "@docusaurus/utils": "3.2.0",
+ "@docusaurus/utils-validation": "3.2.0",
"fs-extra": "^11.1.1",
"tslib": "^2.6.0",
"webpack": "^5.88.1"
@@ -2535,13 +2514,13 @@
}
},
"node_modules/@docusaurus/plugin-debug": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/plugin-debug/-/plugin-debug-3.0.1.tgz",
- "integrity": "sha512-09dxZMdATky4qdsZGzhzlUvvC+ilQ2hKbYF+wez+cM2mGo4qHbv8+qKXqxq0CQZyimwlAOWQLoSozIXU0g0i7g==",
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/@docusaurus/plugin-debug/-/plugin-debug-3.2.0.tgz",
+ "integrity": "sha512-p6WxtO5XZGz66y6QNQtCJwBefq4S6/w75XaXVvH1/2P9uaijvF7R+Cm2EWQZ5WsvA5wl//DFWblyDHRyVC207Q==",
"dependencies": {
- "@docusaurus/core": "3.0.1",
- "@docusaurus/types": "3.0.1",
- "@docusaurus/utils": "3.0.1",
+ "@docusaurus/core": "3.2.0",
+ "@docusaurus/types": "3.2.0",
+ "@docusaurus/utils": "3.2.0",
"fs-extra": "^11.1.1",
"react-json-view-lite": "^1.2.0",
"tslib": "^2.6.0"
@@ -2555,13 +2534,13 @@
}
},
"node_modules/@docusaurus/plugin-google-analytics": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-analytics/-/plugin-google-analytics-3.0.1.tgz",
- "integrity": "sha512-jwseSz1E+g9rXQwDdr0ZdYNjn8leZBnKPjjQhMBEiwDoenL3JYFcNW0+p0sWoVF/f2z5t7HkKA+cYObrUh18gg==",
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-analytics/-/plugin-google-analytics-3.2.0.tgz",
+ "integrity": "sha512-//TepJTEyAZSvBwHKEbXHu9xT/VkK3wUil2ZakKvQZYfUC01uWn6A1E3toa8R7WhCy1xPUeIukqmJy1Clg8njQ==",
"dependencies": {
- "@docusaurus/core": "3.0.1",
- "@docusaurus/types": "3.0.1",
- "@docusaurus/utils-validation": "3.0.1",
+ "@docusaurus/core": "3.2.0",
+ "@docusaurus/types": "3.2.0",
+ "@docusaurus/utils-validation": "3.2.0",
"tslib": "^2.6.0"
},
"engines": {
@@ -2573,13 +2552,13 @@
}
},
"node_modules/@docusaurus/plugin-google-gtag": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-gtag/-/plugin-google-gtag-3.0.1.tgz",
- "integrity": "sha512-UFTDvXniAWrajsulKUJ1DB6qplui1BlKLQZjX4F7qS/qfJ+qkKqSkhJ/F4VuGQ2JYeZstYb+KaUzUzvaPK1aRQ==",
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-gtag/-/plugin-google-gtag-3.2.0.tgz",
+ "integrity": "sha512-3s6zxlaMMb87MW2Rxy6EnSRDs0WDEQPuHilZZH402C8kOrUnIwlhlfjWZ4ZyLDziGl/Eec/DvD0PVqj0qHRomA==",
"dependencies": {
- "@docusaurus/core": "3.0.1",
- "@docusaurus/types": "3.0.1",
- "@docusaurus/utils-validation": "3.0.1",
+ "@docusaurus/core": "3.2.0",
+ "@docusaurus/types": "3.2.0",
+ "@docusaurus/utils-validation": "3.2.0",
"@types/gtag.js": "^0.0.12",
"tslib": "^2.6.0"
},
@@ -2592,13 +2571,13 @@
}
},
"node_modules/@docusaurus/plugin-google-tag-manager": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-tag-manager/-/plugin-google-tag-manager-3.0.1.tgz",
- "integrity": "sha512-IPFvuz83aFuheZcWpTlAdiiX1RqWIHM+OH8wS66JgwAKOiQMR3+nLywGjkLV4bp52x7nCnwhNk1rE85Cpy/CIw==",
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-tag-manager/-/plugin-google-tag-manager-3.2.0.tgz",
+ "integrity": "sha512-rAKtsJ11vPHA7dTAqWCgyIy7AyFRF/lpI77Zd/4HKgqcIvIayVBvL3QtelhUazfYTLTH6ls6kQ9wjMcIFxRiGg==",
"dependencies": {
- "@docusaurus/core": "3.0.1",
- "@docusaurus/types": "3.0.1",
- "@docusaurus/utils-validation": "3.0.1",
+ "@docusaurus/core": "3.2.0",
+ "@docusaurus/types": "3.2.0",
+ "@docusaurus/utils-validation": "3.2.0",
"tslib": "^2.6.0"
},
"engines": {
@@ -2610,16 +2589,16 @@
}
},
"node_modules/@docusaurus/plugin-ideal-image": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/plugin-ideal-image/-/plugin-ideal-image-3.0.1.tgz",
- "integrity": "sha512-IvAUpEIz6v1/fVz6UTdQY12pYIE5geNFtsuKpsULpMaotwYf3Gs7acXjQog4qquKkc65yV5zuvMj8BZMHEwLyQ==",
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/@docusaurus/plugin-ideal-image/-/plugin-ideal-image-3.2.0.tgz",
+ "integrity": "sha512-FPvNyOmSRBnSUQkiti7098N9W950EC4z7hXRn5ZwaG7Q+JGLdXC7sYWyTsjR5KZbEGolJWG1uyi+bG2vd1zDsw==",
"dependencies": {
- "@docusaurus/core": "3.0.1",
- "@docusaurus/lqip-loader": "3.0.1",
+ "@docusaurus/core": "3.2.0",
+ "@docusaurus/lqip-loader": "3.2.0",
"@docusaurus/responsive-loader": "^1.7.0",
- "@docusaurus/theme-translations": "3.0.1",
- "@docusaurus/types": "3.0.1",
- "@docusaurus/utils-validation": "3.0.1",
+ "@docusaurus/theme-translations": "3.2.0",
+ "@docusaurus/types": "3.2.0",
+ "@docusaurus/utils-validation": "3.2.0",
"@slorber/react-ideal-image": "^0.0.12",
"react-waypoint": "^10.3.0",
"sharp": "^0.32.3",
@@ -2641,16 +2620,16 @@
}
},
"node_modules/@docusaurus/plugin-sitemap": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/plugin-sitemap/-/plugin-sitemap-3.0.1.tgz",
- "integrity": "sha512-xARiWnjtVvoEniZudlCq5T9ifnhCu/GAZ5nA7XgyLfPcNpHQa241HZdsTlLtVcecEVVdllevBKOp7qknBBaMGw==",
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/@docusaurus/plugin-sitemap/-/plugin-sitemap-3.2.0.tgz",
+ "integrity": "sha512-gnWDFt6MStjLkdtt63Lzc+14EPSd8B6mzJGJp9GQMvWDUoMAUijUqpVIHYQq+DPMcI4PJZ5I2nsl5XFf1vOldA==",
"dependencies": {
- "@docusaurus/core": "3.0.1",
- "@docusaurus/logger": "3.0.1",
- "@docusaurus/types": "3.0.1",
- "@docusaurus/utils": "3.0.1",
- "@docusaurus/utils-common": "3.0.1",
- "@docusaurus/utils-validation": "3.0.1",
+ "@docusaurus/core": "3.2.0",
+ "@docusaurus/logger": "3.2.0",
+ "@docusaurus/types": "3.2.0",
+ "@docusaurus/utils": "3.2.0",
+ "@docusaurus/utils-common": "3.2.0",
+ "@docusaurus/utils-validation": "3.2.0",
"fs-extra": "^11.1.1",
"sitemap": "^7.1.1",
"tslib": "^2.6.0"
@@ -2664,23 +2643,23 @@
}
},
"node_modules/@docusaurus/preset-classic": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/preset-classic/-/preset-classic-3.0.1.tgz",
- "integrity": "sha512-il9m9xZKKjoXn6h0cRcdnt6wce0Pv1y5t4xk2Wx7zBGhKG1idu4IFHtikHlD0QPuZ9fizpXspXcTzjL5FXc1Gw==",
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/@docusaurus/preset-classic/-/preset-classic-3.2.0.tgz",
+ "integrity": "sha512-t7tXyk8kUgT7hUqEOgSJnPs+Foem9ucuan/a9QVYaVFCDjp92Sb2FpCY8bVasAokYCjodYe2LfpAoSCj5YDYWg==",
"dependencies": {
- "@docusaurus/core": "3.0.1",
- "@docusaurus/plugin-content-blog": "3.0.1",
- "@docusaurus/plugin-content-docs": "3.0.1",
- "@docusaurus/plugin-content-pages": "3.0.1",
- "@docusaurus/plugin-debug": "3.0.1",
- "@docusaurus/plugin-google-analytics": "3.0.1",
- "@docusaurus/plugin-google-gtag": "3.0.1",
- "@docusaurus/plugin-google-tag-manager": "3.0.1",
- "@docusaurus/plugin-sitemap": "3.0.1",
- "@docusaurus/theme-classic": "3.0.1",
- "@docusaurus/theme-common": "3.0.1",
- "@docusaurus/theme-search-algolia": "3.0.1",
- "@docusaurus/types": "3.0.1"
+ "@docusaurus/core": "3.2.0",
+ "@docusaurus/plugin-content-blog": "3.2.0",
+ "@docusaurus/plugin-content-docs": "3.2.0",
+ "@docusaurus/plugin-content-pages": "3.2.0",
+ "@docusaurus/plugin-debug": "3.2.0",
+ "@docusaurus/plugin-google-analytics": "3.2.0",
+ "@docusaurus/plugin-google-gtag": "3.2.0",
+ "@docusaurus/plugin-google-tag-manager": "3.2.0",
+ "@docusaurus/plugin-sitemap": "3.2.0",
+ "@docusaurus/theme-classic": "3.2.0",
+ "@docusaurus/theme-common": "3.2.0",
+ "@docusaurus/theme-search-algolia": "3.2.0",
+ "@docusaurus/types": "3.2.0"
},
"engines": {
"node": ">=18.0"
@@ -2726,22 +2705,22 @@
}
},
"node_modules/@docusaurus/theme-classic": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/theme-classic/-/theme-classic-3.0.1.tgz",
- "integrity": "sha512-XD1FRXaJiDlmYaiHHdm27PNhhPboUah9rqIH0lMpBt5kYtsGjJzhqa27KuZvHLzOP2OEpqd2+GZ5b6YPq7Q05Q==",
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/@docusaurus/theme-classic/-/theme-classic-3.2.0.tgz",
+ "integrity": "sha512-4oSO5BQOJ5ja7WYdL6jK1n4J96tp+VJHamdwao6Ea252sA3W3vvR0otTflG4p4XVjNZH6hlPQoi5lKW0HeRgfQ==",
"dependencies": {
- "@docusaurus/core": "3.0.1",
- "@docusaurus/mdx-loader": "3.0.1",
- "@docusaurus/module-type-aliases": "3.0.1",
- "@docusaurus/plugin-content-blog": "3.0.1",
- "@docusaurus/plugin-content-docs": "3.0.1",
- "@docusaurus/plugin-content-pages": "3.0.1",
- "@docusaurus/theme-common": "3.0.1",
- "@docusaurus/theme-translations": "3.0.1",
- "@docusaurus/types": "3.0.1",
- "@docusaurus/utils": "3.0.1",
- "@docusaurus/utils-common": "3.0.1",
- "@docusaurus/utils-validation": "3.0.1",
+ "@docusaurus/core": "3.2.0",
+ "@docusaurus/mdx-loader": "3.2.0",
+ "@docusaurus/module-type-aliases": "3.2.0",
+ "@docusaurus/plugin-content-blog": "3.2.0",
+ "@docusaurus/plugin-content-docs": "3.2.0",
+ "@docusaurus/plugin-content-pages": "3.2.0",
+ "@docusaurus/theme-common": "3.2.0",
+ "@docusaurus/theme-translations": "3.2.0",
+ "@docusaurus/types": "3.2.0",
+ "@docusaurus/utils": "3.2.0",
+ "@docusaurus/utils-common": "3.2.0",
+ "@docusaurus/utils-validation": "3.2.0",
"@mdx-js/react": "^3.0.0",
"clsx": "^2.0.0",
"copy-text-to-clipboard": "^3.2.0",
@@ -2764,29 +2743,10 @@
"react-dom": "^18.0.0"
}
},
- "node_modules/@docusaurus/theme-classic/node_modules/@docusaurus/module-type-aliases": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/module-type-aliases/-/module-type-aliases-3.0.1.tgz",
- "integrity": "sha512-DEHpeqUDsLynl3AhQQiO7AbC7/z/lBra34jTcdYuvp9eGm01pfH1wTVq8YqWZq6Jyx0BgcVl/VJqtE9StRd9Ag==",
- "dependencies": {
- "@docusaurus/react-loadable": "5.5.2",
- "@docusaurus/types": "3.0.1",
- "@types/history": "^4.7.11",
- "@types/react": "*",
- "@types/react-router-config": "*",
- "@types/react-router-dom": "*",
- "react-helmet-async": "*",
- "react-loadable": "npm:@docusaurus/react-loadable@5.5.2"
- },
- "peerDependencies": {
- "react": "*",
- "react-dom": "*"
- }
- },
"node_modules/@docusaurus/theme-classic/node_modules/@mdx-js/react": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/@mdx-js/react/-/react-3.0.0.tgz",
- "integrity": "sha512-nDctevR9KyYFyV+m+/+S4cpzCWHqj+iHDHq3QrsWezcC+B17uZdIWgCguESUkwFhM3n/56KxWVE3V6EokrmONQ==",
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/@mdx-js/react/-/react-3.0.1.tgz",
+ "integrity": "sha512-9ZrPIU4MGf6et1m1ov3zKf+q9+deetI51zprKB1D/z3NOb+rUxxtEl3mCjW5wTGh6VhRdwPueh1oRzi6ezkA8A==",
"dependencies": {
"@types/mdx": "^2.0.0"
},
@@ -2800,9 +2760,9 @@
}
},
"node_modules/@docusaurus/theme-classic/node_modules/clsx": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.0.0.tgz",
- "integrity": "sha512-rQ1+kcj+ttHG0MKVGBUXwayCCF1oh39BF5COIpRzuCEv8Mwjv0XucrI2ExNTOn9IlLifGClWQcU9BrZORvtw6Q==",
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.0.tgz",
+ "integrity": "sha512-m3iNNWpd9rl3jvvcBnu70ylMdrXt8Vlq4HYadnU5fwcOtvkSQWPmj7amUcDT2qYI7risszBjI5AUIUox9D16pg==",
"engines": {
"node": ">=6"
}
@@ -2820,17 +2780,17 @@
}
},
"node_modules/@docusaurus/theme-common": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/theme-common/-/theme-common-3.0.1.tgz",
- "integrity": "sha512-cr9TOWXuIOL0PUfuXv6L5lPlTgaphKP+22NdVBOYah5jSq5XAAulJTjfe+IfLsEG4L7lJttLbhW7LXDFSAI7Ag==",
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/@docusaurus/theme-common/-/theme-common-3.2.0.tgz",
+ "integrity": "sha512-sFbw9XviNJJ+760kAcZCQMQ3jkNIznGqa6MQ70E5BnbP+ja36kGgPOfjcsvAcNey1H1Rkhh3p2Mhf4HVLdKVVw==",
"dependencies": {
- "@docusaurus/mdx-loader": "3.0.1",
- "@docusaurus/module-type-aliases": "3.0.1",
- "@docusaurus/plugin-content-blog": "3.0.1",
- "@docusaurus/plugin-content-docs": "3.0.1",
- "@docusaurus/plugin-content-pages": "3.0.1",
- "@docusaurus/utils": "3.0.1",
- "@docusaurus/utils-common": "3.0.1",
+ "@docusaurus/mdx-loader": "3.2.0",
+ "@docusaurus/module-type-aliases": "3.2.0",
+ "@docusaurus/plugin-content-blog": "3.2.0",
+ "@docusaurus/plugin-content-docs": "3.2.0",
+ "@docusaurus/plugin-content-pages": "3.2.0",
+ "@docusaurus/utils": "3.2.0",
+ "@docusaurus/utils-common": "3.2.0",
"@types/history": "^4.7.11",
"@types/react": "*",
"@types/react-router-config": "*",
@@ -2848,29 +2808,10 @@
"react-dom": "^18.0.0"
}
},
- "node_modules/@docusaurus/theme-common/node_modules/@docusaurus/module-type-aliases": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/module-type-aliases/-/module-type-aliases-3.0.1.tgz",
- "integrity": "sha512-DEHpeqUDsLynl3AhQQiO7AbC7/z/lBra34jTcdYuvp9eGm01pfH1wTVq8YqWZq6Jyx0BgcVl/VJqtE9StRd9Ag==",
- "dependencies": {
- "@docusaurus/react-loadable": "5.5.2",
- "@docusaurus/types": "3.0.1",
- "@types/history": "^4.7.11",
- "@types/react": "*",
- "@types/react-router-config": "*",
- "@types/react-router-dom": "*",
- "react-helmet-async": "*",
- "react-loadable": "npm:@docusaurus/react-loadable@5.5.2"
- },
- "peerDependencies": {
- "react": "*",
- "react-dom": "*"
- }
- },
"node_modules/@docusaurus/theme-common/node_modules/clsx": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.0.0.tgz",
- "integrity": "sha512-rQ1+kcj+ttHG0MKVGBUXwayCCF1oh39BF5COIpRzuCEv8Mwjv0XucrI2ExNTOn9IlLifGClWQcU9BrZORvtw6Q==",
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.0.tgz",
+ "integrity": "sha512-m3iNNWpd9rl3jvvcBnu70ylMdrXt8Vlq4HYadnU5fwcOtvkSQWPmj7amUcDT2qYI7risszBjI5AUIUox9D16pg==",
"engines": {
"node": ">=6"
}
@@ -2888,18 +2829,18 @@
}
},
"node_modules/@docusaurus/theme-search-algolia": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/theme-search-algolia/-/theme-search-algolia-3.0.1.tgz",
- "integrity": "sha512-DDiPc0/xmKSEdwFkXNf1/vH1SzJPzuJBar8kMcBbDAZk/SAmo/4lf6GU2drou4Ae60lN2waix+jYWTWcJRahSA==",
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/@docusaurus/theme-search-algolia/-/theme-search-algolia-3.2.0.tgz",
+ "integrity": "sha512-PgvF4qHoqJp8+GfqClUbTF/zYNOsz4De251IuzXon7+7FAXwvb2qmYtA2nEwyMbB7faKOz33Pxzv+y+153KS/g==",
"dependencies": {
"@docsearch/react": "^3.5.2",
- "@docusaurus/core": "3.0.1",
- "@docusaurus/logger": "3.0.1",
- "@docusaurus/plugin-content-docs": "3.0.1",
- "@docusaurus/theme-common": "3.0.1",
- "@docusaurus/theme-translations": "3.0.1",
- "@docusaurus/utils": "3.0.1",
- "@docusaurus/utils-validation": "3.0.1",
+ "@docusaurus/core": "3.2.0",
+ "@docusaurus/logger": "3.2.0",
+ "@docusaurus/plugin-content-docs": "3.2.0",
+ "@docusaurus/theme-common": "3.2.0",
+ "@docusaurus/theme-translations": "3.2.0",
+ "@docusaurus/utils": "3.2.0",
+ "@docusaurus/utils-validation": "3.2.0",
"algoliasearch": "^4.18.0",
"algoliasearch-helper": "^3.13.3",
"clsx": "^2.0.0",
@@ -2918,17 +2859,17 @@
}
},
"node_modules/@docusaurus/theme-search-algolia/node_modules/clsx": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.0.0.tgz",
- "integrity": "sha512-rQ1+kcj+ttHG0MKVGBUXwayCCF1oh39BF5COIpRzuCEv8Mwjv0XucrI2ExNTOn9IlLifGClWQcU9BrZORvtw6Q==",
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.0.tgz",
+ "integrity": "sha512-m3iNNWpd9rl3jvvcBnu70ylMdrXt8Vlq4HYadnU5fwcOtvkSQWPmj7amUcDT2qYI7risszBjI5AUIUox9D16pg==",
"engines": {
"node": ">=6"
}
},
"node_modules/@docusaurus/theme-translations": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/theme-translations/-/theme-translations-3.0.1.tgz",
- "integrity": "sha512-6UrbpzCTN6NIJnAtZ6Ne9492vmPVX+7Fsz4kmp+yor3KQwA1+MCzQP7ItDNkP38UmVLnvB/cYk/IvehCUqS3dg==",
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/@docusaurus/theme-translations/-/theme-translations-3.2.0.tgz",
+ "integrity": "sha512-VXzZJBuyVEmwUYyud+7IgJQEBRM6R2u/s10Rp3DOP19CBQxeKgHYTKkKhFtDeKMHDassb665kjgOi0YlJfUT6w==",
"dependencies": {
"fs-extra": "^11.1.1",
"tslib": "^2.6.0"
@@ -2938,10 +2879,11 @@
}
},
"node_modules/@docusaurus/types": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/types/-/types-3.0.1.tgz",
- "integrity": "sha512-plyX2iU1tcUsF46uQ01pAd4JhexR7n0iiQ5MSnBFX6M6NSJgDYdru/i1/YNPKOnQHBoXGLHv0dNT6OAlDWNjrg==",
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/@docusaurus/types/-/types-3.2.0.tgz",
+ "integrity": "sha512-uG3FfTkkkbZIPPNYx6xRfZHKeGyRd/inIT1cqvYt1FobFLd+7WhRXrSBqwJ9JajJjEAjNioRMVFgGofGf/Wdww==",
"dependencies": {
+ "@mdx-js/mdx": "^3.0.0",
"@types/history": "^4.7.11",
"@types/react": "*",
"commander": "^5.1.0",
@@ -2957,11 +2899,12 @@
}
},
"node_modules/@docusaurus/utils": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/utils/-/utils-3.0.1.tgz",
- "integrity": "sha512-TwZ33Am0q4IIbvjhUOs+zpjtD/mXNmLmEgeTGuRq01QzulLHuPhaBTTAC/DHu6kFx3wDgmgpAlaRuCHfTcXv8g==",
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/@docusaurus/utils/-/utils-3.2.0.tgz",
+ "integrity": "sha512-3rgrE7iL60yV2JQivlcoxUNNTK2APmn+OHLUmTvX2pueIM8DEOCEFHpJO4MiWjFO7V/Wq3iA/W1M03JnjdugVw==",
"dependencies": {
- "@docusaurus/logger": "3.0.1",
+ "@docusaurus/logger": "3.2.0",
+ "@docusaurus/utils-common": "3.2.0",
"@svgr/webpack": "^6.5.1",
"escape-string-regexp": "^4.0.0",
"file-loader": "^6.2.0",
@@ -2973,6 +2916,7 @@
"js-yaml": "^4.1.0",
"lodash": "^4.17.21",
"micromatch": "^4.0.5",
+ "prompts": "^2.4.2",
"resolve-pathname": "^3.0.0",
"shelljs": "^0.8.5",
"tslib": "^2.6.0",
@@ -2992,9 +2936,9 @@
}
},
"node_modules/@docusaurus/utils-common": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/utils-common/-/utils-common-3.0.1.tgz",
- "integrity": "sha512-W0AxD6w6T8g6bNro8nBRWf7PeZ/nn7geEWM335qHU2DDDjHuV4UZjgUGP1AQsdcSikPrlIqTJJbKzer1lRSlIg==",
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/@docusaurus/utils-common/-/utils-common-3.2.0.tgz",
+ "integrity": "sha512-WEQT5L2lT/tBQgDRgeZQAIi9YJBrwEILb1BuObQn1St3T/4K1gx5fWwOT8qdLOov296XLd1FQg9Ywu27aE9svw==",
"dependencies": {
"tslib": "^2.6.0"
},
@@ -3011,12 +2955,13 @@
}
},
"node_modules/@docusaurus/utils-validation": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/@docusaurus/utils-validation/-/utils-validation-3.0.1.tgz",
- "integrity": "sha512-ujTnqSfyGQ7/4iZdB4RRuHKY/Nwm58IIb+41s5tCXOv/MBU2wGAjOHq3U+AEyJ8aKQcHbxvTKJaRchNHYUVUQg==",
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/@docusaurus/utils-validation/-/utils-validation-3.2.0.tgz",
+ "integrity": "sha512-rCzMTqwNrBrEOyU8EaD1fYWdig4TDhfj+YLqB8DY68VUAqSIgbY+yshpqFKB0bznFYNBJbn0bGpvVuImQOa/vA==",
"dependencies": {
- "@docusaurus/logger": "3.0.1",
- "@docusaurus/utils": "3.0.1",
+ "@docusaurus/logger": "3.2.0",
+ "@docusaurus/utils": "3.2.0",
+ "@docusaurus/utils-common": "3.2.0",
"joi": "^17.9.2",
"js-yaml": "^4.1.0",
"tslib": "^2.6.0"
@@ -3234,9 +3179,9 @@
"integrity": "sha512-Hcv+nVC0kZnQ3tD9GVu5xSMR4VVYOteQIr/hwFPVEvPdlXqgGEuRjiheChHgdM+JyqdgNcmzZOX/tnl0JOiI7A=="
},
"node_modules/@mdx-js/mdx": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/@mdx-js/mdx/-/mdx-3.0.0.tgz",
- "integrity": "sha512-Icm0TBKBLYqroYbNW3BPnzMGn+7mwpQOK310aZ7+fkCtiU3aqv2cdcX+nd0Ydo3wI5Rx8bX2Z2QmGb/XcAClCw==",
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/@mdx-js/mdx/-/mdx-3.0.1.tgz",
+ "integrity": "sha512-eIQ4QTrOWyL3LWEe/bu6Taqzq2HQvHcyTMaOrI95P2/LmJE7AsfPfgJGuFLPVqBUE1BC1rik3VIhU+s9u72arA==",
"dependencies": {
"@types/estree": "^1.0.0",
"@types/estree-jsx": "^1.0.0",
@@ -3480,9 +3425,9 @@
}
},
"node_modules/@sideway/address": {
- "version": "4.1.4",
- "resolved": "https://registry.npmjs.org/@sideway/address/-/address-4.1.4.tgz",
- "integrity": "sha512-7vwq+rOHVWjyXxVlR76Agnvhy8I9rpzjosTESvmhNeXOXdZZB15Fl+TI9x1SiHZH5Jv2wTGduSxFDIaq0m3DUw==",
+ "version": "4.1.5",
+ "resolved": "https://registry.npmjs.org/@sideway/address/-/address-4.1.5.tgz",
+ "integrity": "sha512-IqO/DUQHUkPeixNQ8n0JA6102hT9CmaljNTPmQ1u8MEhBo/R4Q8eKLN/vGZxuebwOroDB4cbpjheD4+/sKFK4Q==",
"dependencies": {
"@hapi/hoek": "^9.0.0"
}
@@ -3537,19 +3482,6 @@
"micromark-util-symbol": "^1.0.1"
}
},
- "node_modules/@slorber/static-site-generator-webpack-plugin": {
- "version": "4.0.7",
- "resolved": "https://registry.npmjs.org/@slorber/static-site-generator-webpack-plugin/-/static-site-generator-webpack-plugin-4.0.7.tgz",
- "integrity": "sha512-Ug7x6z5lwrz0WqdnNFOMYrDQNTPAprvHLSh6+/fmml3qUiz6l5eq+2MzLKWtn/q5K5NpSiFsZTP/fck/3vjSxA==",
- "dependencies": {
- "eval": "^0.1.8",
- "p-map": "^4.0.0",
- "webpack-sources": "^3.2.2"
- },
- "engines": {
- "node": ">=14"
- }
- },
"node_modules/@svgr/babel-plugin-add-jsx-attribute": {
"version": "6.5.1",
"resolved": "https://registry.npmjs.org/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-6.5.1.tgz",
@@ -3930,9 +3862,9 @@
"integrity": "sha512-YQV9bUsemkzG81Ea295/nF/5GijnD2Af7QhEofh7xu+kvCN6RdodgNwwGWXB5GMI3NoyvQo0odNctoH/qLMIpg=="
},
"node_modules/@types/hast": {
- "version": "3.0.3",
- "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.3.tgz",
- "integrity": "sha512-2fYGlaDy/qyLlhidX42wAH0KBi2TCjKMH8CHmBXgRlJ3Y+OXTiqsPQ6IWarZKwF1JoUcAJdPogv1d4b0COTpmQ==",
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz",
+ "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==",
"dependencies": {
"@types/unist": "*"
}
@@ -4505,30 +4437,31 @@
}
},
"node_modules/algoliasearch": {
- "version": "4.22.0",
- "resolved": "https://registry.npmjs.org/algoliasearch/-/algoliasearch-4.22.0.tgz",
- "integrity": "sha512-gfceltjkwh7PxXwtkS8KVvdfK+TSNQAWUeNSxf4dA29qW5tf2EGwa8jkJujlT9jLm17cixMVoGNc+GJFO1Mxhg==",
+ "version": "4.23.2",
+ "resolved": "https://registry.npmjs.org/algoliasearch/-/algoliasearch-4.23.2.tgz",
+ "integrity": "sha512-8aCl055IsokLuPU8BzLjwzXjb7ty9TPcUFFOk0pYOwsE5DMVhE3kwCMFtsCFKcnoPZK7oObm+H5mbnSO/9ioxQ==",
"dependencies": {
- "@algolia/cache-browser-local-storage": "4.22.0",
- "@algolia/cache-common": "4.22.0",
- "@algolia/cache-in-memory": "4.22.0",
- "@algolia/client-account": "4.22.0",
- "@algolia/client-analytics": "4.22.0",
- "@algolia/client-common": "4.22.0",
- "@algolia/client-personalization": "4.22.0",
- "@algolia/client-search": "4.22.0",
- "@algolia/logger-common": "4.22.0",
- "@algolia/logger-console": "4.22.0",
- "@algolia/requester-browser-xhr": "4.22.0",
- "@algolia/requester-common": "4.22.0",
- "@algolia/requester-node-http": "4.22.0",
- "@algolia/transporter": "4.22.0"
+ "@algolia/cache-browser-local-storage": "4.23.2",
+ "@algolia/cache-common": "4.23.2",
+ "@algolia/cache-in-memory": "4.23.2",
+ "@algolia/client-account": "4.23.2",
+ "@algolia/client-analytics": "4.23.2",
+ "@algolia/client-common": "4.23.2",
+ "@algolia/client-personalization": "4.23.2",
+ "@algolia/client-search": "4.23.2",
+ "@algolia/logger-common": "4.23.2",
+ "@algolia/logger-console": "4.23.2",
+ "@algolia/recommend": "4.23.2",
+ "@algolia/requester-browser-xhr": "4.23.2",
+ "@algolia/requester-common": "4.23.2",
+ "@algolia/requester-node-http": "4.23.2",
+ "@algolia/transporter": "4.23.2"
}
},
"node_modules/algoliasearch-helper": {
- "version": "3.16.1",
- "resolved": "https://registry.npmjs.org/algoliasearch-helper/-/algoliasearch-helper-3.16.1.tgz",
- "integrity": "sha512-qxAHVjjmT7USVvrM8q6gZGaJlCK1fl4APfdAA7o8O6iXEc68G0xMNrzRkxoB/HmhhvyHnoteS/iMTiHiTcQQcg==",
+ "version": "3.17.0",
+ "resolved": "https://registry.npmjs.org/algoliasearch-helper/-/algoliasearch-helper-3.17.0.tgz",
+ "integrity": "sha512-R5422OiQjvjlK3VdpNQ/Qk7KsTIGeM5ACm8civGifOVWdRRV/3SgXuKmeNxe94Dz6fwj/IgpVmXbHutU4mHubg==",
"dependencies": {
"@algolia/events": "^4.0.1"
},
@@ -4928,12 +4861,12 @@
"dev": true
},
"node_modules/body-parser": {
- "version": "1.20.1",
- "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.1.tgz",
- "integrity": "sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw==",
+ "version": "1.20.2",
+ "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.2.tgz",
+ "integrity": "sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==",
"dependencies": {
"bytes": "3.1.2",
- "content-type": "~1.0.4",
+ "content-type": "~1.0.5",
"debug": "2.6.9",
"depd": "2.0.0",
"destroy": "1.2.0",
@@ -4941,7 +4874,7 @@
"iconv-lite": "0.4.24",
"on-finished": "2.4.1",
"qs": "6.11.0",
- "raw-body": "2.5.1",
+ "raw-body": "2.5.2",
"type-is": "~1.6.18",
"unpipe": "1.0.0"
},
@@ -6124,9 +6057,9 @@
"integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg=="
},
"node_modules/cookie": {
- "version": "0.5.0",
- "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz",
- "integrity": "sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==",
+ "version": "0.6.0",
+ "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz",
+ "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==",
"engines": {
"node": ">= 0.6"
}
@@ -9620,16 +9553,16 @@
}
},
"node_modules/express": {
- "version": "4.18.2",
- "resolved": "https://registry.npmjs.org/express/-/express-4.18.2.tgz",
- "integrity": "sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ==",
+ "version": "4.19.2",
+ "resolved": "https://registry.npmjs.org/express/-/express-4.19.2.tgz",
+ "integrity": "sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q==",
"dependencies": {
"accepts": "~1.3.8",
"array-flatten": "1.1.1",
- "body-parser": "1.20.1",
+ "body-parser": "1.20.2",
"content-disposition": "0.5.4",
"content-type": "~1.0.4",
- "cookie": "0.5.0",
+ "cookie": "0.6.0",
"cookie-signature": "1.0.6",
"debug": "2.6.9",
"depd": "2.0.0",
@@ -10005,9 +9938,9 @@
}
},
"node_modules/follow-redirects": {
- "version": "1.15.3",
- "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.3.tgz",
- "integrity": "sha512-1VzOtuEM8pC9SFU1E+8KfTjZyMztRsgEfwQl44z8A25uy13jSzTj6dyK2Df52iV0vgHCfBwLhDWevLn95w5v6Q==",
+ "version": "1.15.6",
+ "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz",
+ "integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==",
"funding": [
{
"type": "individual",
@@ -10923,9 +10856,9 @@
}
},
"node_modules/hast-util-raw": {
- "version": "9.0.1",
- "resolved": "https://registry.npmjs.org/hast-util-raw/-/hast-util-raw-9.0.1.tgz",
- "integrity": "sha512-5m1gmba658Q+lO5uqL5YNGQWeh1MYWZbZmWrM5lncdcuiXuo5E2HT/CIOp0rLF8ksfSwiCVJ3twlgVRyTGThGA==",
+ "version": "9.0.2",
+ "resolved": "https://registry.npmjs.org/hast-util-raw/-/hast-util-raw-9.0.2.tgz",
+ "integrity": "sha512-PldBy71wO9Uq1kyaMch9AHIghtQvIwxBUkv823pKmkTM3oV1JxtsTNYdevMxvUHqcnOAuO65JKU2+0NOxc2ksA==",
"dependencies": {
"@types/hast": "^3.0.0",
"@types/unist": "^3.0.0",
@@ -11000,16 +10933,16 @@
}
},
"node_modules/hast-util-to-jsx-runtime/node_modules/inline-style-parser": {
- "version": "0.2.2",
- "resolved": "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.2.2.tgz",
- "integrity": "sha512-EcKzdTHVe8wFVOGEYXiW9WmJXPjqi1T+234YpJr98RiFYKHV3cdy1+3mkTE+KHTHxFFLH51SfaGOoUdW+v7ViQ=="
+ "version": "0.2.3",
+ "resolved": "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.2.3.tgz",
+ "integrity": "sha512-qlD8YNDqyTKTyuITrDOffsl6Tdhv+UC4hcdAVuQsK4IMQ99nSgd1MIA/Q+jQYoh9r3hVUXhYh7urSRmXPkW04g=="
},
"node_modules/hast-util-to-jsx-runtime/node_modules/style-to-object": {
- "version": "1.0.5",
- "resolved": "https://registry.npmjs.org/style-to-object/-/style-to-object-1.0.5.tgz",
- "integrity": "sha512-rDRwHtoDD3UMMrmZ6BzOW0naTjMsVZLIjsGleSKS/0Oz+cgCfAPRspaqJuE8rDzpKha/nEvnM0IF4seEAZUTKQ==",
+ "version": "1.0.6",
+ "resolved": "https://registry.npmjs.org/style-to-object/-/style-to-object-1.0.6.tgz",
+ "integrity": "sha512-khxq+Qm3xEyZfKd/y9L3oIWQimxuc4STrQKtQn8aSDRHb8mFgpukgX1hdzfrMEW6JCjyJ8p89x+IUMVnCBI1PA==",
"dependencies": {
- "inline-style-parser": "0.2.2"
+ "inline-style-parser": "0.2.3"
}
},
"node_modules/hast-util-to-parse5": {
@@ -11688,9 +11621,9 @@
}
},
"node_modules/ip": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/ip/-/ip-2.0.0.tgz",
- "integrity": "sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ==",
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/ip/-/ip-2.0.1.tgz",
+ "integrity": "sha512-lJUL9imLTNi1ZfXT+DU6rBBdbiKGBuay9B6xGSPVjUeQwaH1RIGqef8RZkUtHioLmSNpPR5M4HVKJGm1j8FWVQ==",
"dev": true
},
"node_modules/ipaddr.js": {
@@ -12226,13 +12159,13 @@
}
},
"node_modules/joi": {
- "version": "17.11.0",
- "resolved": "https://registry.npmjs.org/joi/-/joi-17.11.0.tgz",
- "integrity": "sha512-NgB+lZLNoqISVy1rZocE9PZI36bL/77ie924Ri43yEvi9GUUMPeyVIr8KdFTMUlby1p0PBYMk9spIxEUQYqrJQ==",
+ "version": "17.12.2",
+ "resolved": "https://registry.npmjs.org/joi/-/joi-17.12.2.tgz",
+ "integrity": "sha512-RonXAIzCiHLc8ss3Ibuz45u28GOsWE1UpfDXLbN/9NKbL4tCJf8TWYVKsoYuuh+sAUt7fsSNpA+r2+TBA6Wjmw==",
"dependencies": {
- "@hapi/hoek": "^9.0.0",
- "@hapi/topo": "^5.0.0",
- "@sideway/address": "^4.1.3",
+ "@hapi/hoek": "^9.3.0",
+ "@hapi/topo": "^5.1.0",
+ "@sideway/address": "^4.1.5",
"@sideway/formula": "^3.0.1",
"@sideway/pinpoint": "^2.0.0"
}
@@ -12820,9 +12753,9 @@
}
},
"node_modules/mdast-util-gfm-autolink-literal/node_modules/micromark-util-character": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz",
- "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==",
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz",
+ "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==",
"funding": [
{
"type": "GitHub Sponsors",
@@ -12948,9 +12881,9 @@
}
},
"node_modules/mdast-util-mdx-jsx": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/mdast-util-mdx-jsx/-/mdast-util-mdx-jsx-3.0.0.tgz",
- "integrity": "sha512-XZuPPzQNBPAlaqsTTgRrcJnyFbSOBovSadFgbFu8SnuNgm+6Bdx1K+IWoitsmj6Lq6MNtI+ytOqwN70n//NaBA==",
+ "version": "3.1.2",
+ "resolved": "https://registry.npmjs.org/mdast-util-mdx-jsx/-/mdast-util-mdx-jsx-3.1.2.tgz",
+ "integrity": "sha512-eKMQDeywY2wlHc97k5eD8VC+9ASMjN8ItEZQNGwJ6E0XWKiW/Z0V5/H8pvoXUf+y+Mj0VIgeRRbujBmFn4FTyA==",
"dependencies": {
"@types/estree-jsx": "^1.0.0",
"@types/hast": "^3.0.0",
@@ -12989,9 +12922,9 @@
}
},
"node_modules/mdast-util-phrasing": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-4.0.0.tgz",
- "integrity": "sha512-xadSsJayQIucJ9n053dfQwVu1kuXg7jCTdYsMK8rqzKZh52nLfSH/k0sAxE0u+pj/zKZX+o5wB+ML5mRayOxFA==",
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-4.1.0.tgz",
+ "integrity": "sha512-TqICwyvJJpBwvGAMZjj4J2n0X8QWp21b9l0o7eXyVJ25YNWYbJDVIyD1bZXE6WtV6RmKJVYmQAKWa0zWOABz2w==",
"dependencies": {
"@types/mdast": "^4.0.0",
"unist-util-is": "^6.0.0"
@@ -13002,9 +12935,9 @@
}
},
"node_modules/mdast-util-to-hast": {
- "version": "13.0.2",
- "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-13.0.2.tgz",
- "integrity": "sha512-U5I+500EOOw9e3ZrclN3Is3fRpw8c19SMyNZlZ2IS+7vLsNzb2Om11VpIVOR+/0137GhZsFEF6YiKD5+0Hr2Og==",
+ "version": "13.1.0",
+ "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-13.1.0.tgz",
+ "integrity": "sha512-/e2l/6+OdGp/FB+ctrJ9Avz71AN/GRH3oi/3KAx/kMnoUsD6q0woXlDT8lLEeViVKE7oZxE7RXzvO3T8kF2/sA==",
"dependencies": {
"@types/hast": "^3.0.0",
"@types/mdast": "^4.0.0",
@@ -13013,7 +12946,8 @@
"micromark-util-sanitize-uri": "^2.0.0",
"trim-lines": "^3.0.0",
"unist-util-position": "^5.0.0",
- "unist-util-visit": "^5.0.0"
+ "unist-util-visit": "^5.0.0",
+ "vfile": "^6.0.0"
},
"funding": {
"type": "opencollective",
@@ -13236,9 +13170,9 @@
}
},
"node_modules/micromark-core-commonmark/node_modules/micromark-util-character": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz",
- "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==",
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz",
+ "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==",
"funding": [
{
"type": "GitHub Sponsors",
@@ -13307,9 +13241,9 @@
}
},
"node_modules/micromark-extension-directive/node_modules/micromark-util-character": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz",
- "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==",
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz",
+ "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==",
"funding": [
{
"type": "GitHub Sponsors",
@@ -13356,9 +13290,9 @@
}
},
"node_modules/micromark-extension-frontmatter/node_modules/micromark-util-character": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz",
- "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==",
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz",
+ "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==",
"funding": [
{
"type": "GitHub Sponsors",
@@ -13424,9 +13358,9 @@
}
},
"node_modules/micromark-extension-gfm-autolink-literal/node_modules/micromark-util-character": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz",
- "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==",
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz",
+ "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==",
"funding": [
{
"type": "GitHub Sponsors",
@@ -13496,9 +13430,9 @@
}
},
"node_modules/micromark-extension-gfm-footnote/node_modules/micromark-util-character": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz",
- "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==",
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz",
+ "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==",
"funding": [
{
"type": "GitHub Sponsors",
@@ -13597,9 +13531,9 @@
}
},
"node_modules/micromark-extension-gfm-table/node_modules/micromark-util-character": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz",
- "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==",
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz",
+ "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==",
"funding": [
{
"type": "GitHub Sponsors",
@@ -13678,9 +13612,9 @@
}
},
"node_modules/micromark-extension-gfm-task-list-item/node_modules/micromark-util-character": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz",
- "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==",
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz",
+ "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==",
"funding": [
{
"type": "GitHub Sponsors",
@@ -13756,9 +13690,9 @@
}
},
"node_modules/micromark-extension-mdx-expression/node_modules/micromark-util-character": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz",
- "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==",
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz",
+ "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==",
"funding": [
{
"type": "GitHub Sponsors",
@@ -13830,9 +13764,9 @@
}
},
"node_modules/micromark-extension-mdx-jsx/node_modules/micromark-util-character": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz",
- "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==",
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz",
+ "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==",
"funding": [
{
"type": "GitHub Sponsors",
@@ -13915,9 +13849,9 @@
}
},
"node_modules/micromark-extension-mdxjs-esm/node_modules/micromark-util-character": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz",
- "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==",
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz",
+ "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==",
"funding": [
{
"type": "GitHub Sponsors",
@@ -13969,9 +13903,9 @@
}
},
"node_modules/micromark-factory-destination/node_modules/micromark-util-character": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz",
- "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==",
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz",
+ "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==",
"funding": [
{
"type": "GitHub Sponsors",
@@ -14024,9 +13958,9 @@
}
},
"node_modules/micromark-factory-label/node_modules/micromark-util-character": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz",
- "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==",
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz",
+ "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==",
"funding": [
{
"type": "GitHub Sponsors",
@@ -14083,9 +14017,9 @@
}
},
"node_modules/micromark-factory-mdx-expression/node_modules/micromark-util-character": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz",
- "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==",
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz",
+ "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==",
"funding": [
{
"type": "GitHub Sponsors",
@@ -14191,9 +14125,9 @@
}
},
"node_modules/micromark-factory-title/node_modules/micromark-util-character": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz",
- "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==",
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz",
+ "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==",
"funding": [
{
"type": "GitHub Sponsors",
@@ -14265,9 +14199,9 @@
}
},
"node_modules/micromark-factory-whitespace/node_modules/micromark-util-character": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz",
- "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==",
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz",
+ "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==",
"funding": [
{
"type": "GitHub Sponsors",
@@ -14386,9 +14320,9 @@
}
},
"node_modules/micromark-util-classify-character/node_modules/micromark-util-character": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz",
- "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==",
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz",
+ "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==",
"funding": [
{
"type": "GitHub Sponsors",
@@ -14493,9 +14427,9 @@
}
},
"node_modules/micromark-util-decode-string/node_modules/micromark-util-character": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz",
- "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==",
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz",
+ "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==",
"funding": [
{
"type": "GitHub Sponsors",
@@ -14668,9 +14602,9 @@
}
},
"node_modules/micromark-util-sanitize-uri/node_modules/micromark-util-character": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz",
- "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==",
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz",
+ "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==",
"funding": [
{
"type": "GitHub Sponsors",
@@ -14787,9 +14721,9 @@
}
},
"node_modules/micromark/node_modules/micromark-util-character": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz",
- "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==",
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz",
+ "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==",
"funding": [
{
"type": "GitHub Sponsors",
@@ -17476,9 +17410,9 @@
}
},
"node_modules/raw-body": {
- "version": "2.5.1",
- "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.1.tgz",
- "integrity": "sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==",
+ "version": "2.5.2",
+ "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz",
+ "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==",
"dependencies": {
"bytes": "3.1.2",
"http-errors": "2.0.0",
@@ -17780,9 +17714,9 @@
"integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ=="
},
"node_modules/react-json-view-lite": {
- "version": "1.2.1",
- "resolved": "https://registry.npmjs.org/react-json-view-lite/-/react-json-view-lite-1.2.1.tgz",
- "integrity": "sha512-Itc0g86fytOmKZoIoJyGgvNqohWSbh3NXIKNgH6W6FT9PC1ck4xas1tT3Rr/b3UlFXyA9Jjaw9QSXdZy2JwGMQ==",
+ "version": "1.3.0",
+ "resolved": "https://registry.npmjs.org/react-json-view-lite/-/react-json-view-lite-1.3.0.tgz",
+ "integrity": "sha512-aN1biKC5v4DQkmQBlZjuMFR09MKZGMPtIg+cut8zEeg2HXd6gl2gRy0n4HMacHf0dznQgo0SVXN7eT8zV3hEuQ==",
"engines": {
"node": ">=14"
},
@@ -18587,9 +18521,9 @@
}
},
"node_modules/remark-mdx": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/remark-mdx/-/remark-mdx-3.0.0.tgz",
- "integrity": "sha512-O7yfjuC6ra3NHPbRVxfflafAj3LTwx3b73aBvkEFU5z4PsD6FD4vrqJAkE5iNGLz71GdjXfgRqm3SQ0h0VuE7g==",
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/remark-mdx/-/remark-mdx-3.0.1.tgz",
+ "integrity": "sha512-3Pz3yPQ5Rht2pM5R+0J2MrGoBSrzf+tJG94N+t/ilfdh8YLyyKYtidAYwTveB20BoHAcwIopOUqhcmh2F7hGYA==",
"dependencies": {
"mdast-util-mdx": "^3.0.0",
"micromark-extension-mdxjs": "^3.0.0"
@@ -19088,9 +19022,9 @@
}
},
"node_modules/remark-rehype": {
- "version": "11.0.0",
- "resolved": "https://registry.npmjs.org/remark-rehype/-/remark-rehype-11.0.0.tgz",
- "integrity": "sha512-vx8x2MDMcxuE4lBmQ46zYUDfcFMmvg80WYX+UNLeG6ixjdCCLcw1lrgAukwBTuOFsS78eoAedHGn9sNM0w7TPw==",
+ "version": "11.1.0",
+ "resolved": "https://registry.npmjs.org/remark-rehype/-/remark-rehype-11.1.0.tgz",
+ "integrity": "sha512-z3tJrAs2kIs1AqIIy6pzHmAHlF1hWQ+OdY4/hv+Wxe35EhyLKcajL33iUEn3ScxtFox9nUvRufR/Zre8Q08H/g==",
"dependencies": {
"@types/hast": "^3.0.0",
"@types/mdast": "^4.0.0",
@@ -21778,9 +21712,9 @@
"integrity": "sha512-Z0DbgELS9/L/75wZbro8xAnT50pBVFQZ+hUEueGDU5FN51YSCYM+jdxsfCiHjwNP/4LCDD0i/graKpeBnOXKRA=="
},
"node_modules/utility-types": {
- "version": "3.10.0",
- "resolved": "https://registry.npmjs.org/utility-types/-/utility-types-3.10.0.tgz",
- "integrity": "sha512-O11mqxmi7wMKCo6HKFt5AhO4BwY3VV68YU07tgxfz8zJTIxr4BpsezN49Ffwy9j3ZpwwJp4fkRwjRzq3uWE6Rg==",
+ "version": "3.11.0",
+ "resolved": "https://registry.npmjs.org/utility-types/-/utility-types-3.11.0.tgz",
+ "integrity": "sha512-6Z7Ma2aVEWisaL6TvBCy7P8rm2LQoPv6dJ7ecIaIixHcwfbJ0x7mWdbcwlIM5IGQxPZSFYeqRCqlOOeKoJYMkw==",
"engines": {
"node": ">= 4"
}
@@ -22018,9 +21952,9 @@
}
},
"node_modules/webpack-dev-middleware": {
- "version": "5.3.3",
- "resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-5.3.3.tgz",
- "integrity": "sha512-hj5CYrY0bZLB+eTO+x/j67Pkrquiy7kWepMHmUMoPsmcUaeEnQJqFzHJOyxgWlq746/wUuA64p9ta34Kyb01pA==",
+ "version": "5.3.4",
+ "resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-5.3.4.tgz",
+ "integrity": "sha512-BVdTqhhs+0IfoeAf7EoH5WE+exCmqGerHfDM0IL096Px60Tq2Mn9MAbnaGUe6HiMa41KMCYF19gyzZmBcq/o4Q==",
"dependencies": {
"colorette": "^2.0.10",
"memfs": "^3.4.3",
diff --git a/docs/package.json b/docs/package.json
index 35f38de59..87f3d3d71 100644
--- a/docs/package.json
+++ b/docs/package.json
@@ -16,11 +16,12 @@
"dependencies": {
"@babel/preset-react": "^7.22.3",
"@code-hike/mdx": "^0.9.0",
- "@docusaurus/core": "3.0.1",
- "@docusaurus/plugin-ideal-image": "^3.0.1",
- "@docusaurus/preset-classic": "3.0.1",
- "@docusaurus/theme-classic": "^3.0.1",
- "@docusaurus/theme-search-algolia": "^3.0.1",
+ "@docusaurus/core": "^3.2.0",
+ "@docusaurus/plugin-google-gtag": "^3.2.0",
+ "@docusaurus/plugin-ideal-image": "^3.2.0",
+ "@docusaurus/preset-classic": "^3.2.0",
+ "@docusaurus/theme-classic": "^3.2.0",
+ "@docusaurus/theme-search-algolia": "^3.2.0",
"@mdx-js/react": "^2.3.0",
"@mendable/search": "^0.0.154",
"@pbe/react-yandex-maps": "^1.2.4",
@@ -47,7 +48,7 @@
"tailwindcss": "^3.3.2"
},
"devDependencies": {
- "@docusaurus/module-type-aliases": "2.4.1",
+ "@docusaurus/module-type-aliases": "^3.2.0",
"css-loader": "^6.8.1",
"docusaurus-node-polyfills": "^1.0.0",
"node-sass": "^9.0.0",
@@ -69,4 +70,4 @@
"engines": {
"node": ">=16.14"
}
-}
\ No newline at end of file
+}
diff --git a/docs/sidebars.js b/docs/sidebars.js
index e238e1860..ddc9af202 100644
--- a/docs/sidebars.js
+++ b/docs/sidebars.js
@@ -2,13 +2,49 @@ module.exports = {
docs: [
{
type: "category",
- label: "Getting Started",
+ label: " Getting Started",
collapsed: false,
items: [
"index",
- "getting-started/installation",
- "getting-started/hugging-face-spaces",
- "getting-started/creating-flows",
+ "getting-started/cli",
+ "getting-started/basic-prompting",
+ "getting-started/document-qa",
+ "getting-started/blog-writer",
+ "getting-started/memory-chatbot",
+ "getting-started/rag-with-astradb",
+ ],
+ },
+ {
+ type: "category",
+ label: " What's New",
+ collapsed: false,
+ items: [
+ "whats-new/a-new-chapter-langflow",
+ "whats-new/migrating-to-one-point-zero",
+ ],
+ },
+
+ {
+ type: "category",
+ label: " Migration Guides",
+ collapsed: false,
+ items: [
+ // "migration/flow-of-data",
+ "migration/inputs-and-outputs",
+ // "migration/supported-frameworks",
+ // "migration/sidebar-and-interaction-panel",
+ // "migration/new-categories-and-components",
+ "migration/text-and-record",
+ // "migration/custom-component",
+ "migration/compatibility",
+ // "migration/multiple-flows",
+ // "migration/component-status-and-data-passing",
+ // "migration/connecting-output-components",
+ // "migration/renaming-and-editing-components",
+ // "migration/passing-tweaks-and-inputs",
+ "migration/global-variables",
+ // "migration/experimental-components",
+ // "migration/state-management",
],
},
{
@@ -18,7 +54,6 @@ module.exports = {
items: [
"guidelines/login",
"guidelines/api",
- "guidelines/async-api",
"guidelines/components",
"guidelines/features",
"guidelines/collection",
@@ -30,47 +65,42 @@ module.exports = {
},
{
type: "category",
- label: "Component Reference",
+ label: "Step-by-Step Guides",
+ collapsed: false,
+ items: ["guides/langfuse_integration"],
+ },
+ {
+ type: "category",
+ label: "Core Components",
collapsed: false,
items: [
- "components/agents",
- "components/chains",
- "components/custom",
- "components/embeddings",
- "components/llms",
- "components/loaders",
- "components/memories",
- "components/prompts",
- "components/retrievers",
- "components/text-splitters",
- "components/toolkits",
- "components/tools",
- "components/utilities",
+ "components/inputs",
+ "components/outputs",
+ "components/data",
+ "components/models",
+ "components/helpers",
"components/vector-stores",
- "components/wrappers",
+ "components/embeddings",
],
},
{
type: "category",
- label: "Step-by-Step Guides",
+ label: "Extended Components",
collapsed: false,
items: [
- "guides/async-tasks",
- "guides/loading_document",
- "guides/chatprompttemplate_guide",
- "guides/langfuse_integration",
+ "components/agents",
+ "components/chains",
+ "components/loaders",
+ "components/experimental",
+ "components/utilities",
+ "components/memories",
+ "components/model_specs",
+ "components/retrievers",
+ "components/text-splitters",
+ "components/toolkits",
+ "components/tools",
],
},
- // {
- // type: 'category',
- // label: 'Components',
- // collapsed: false,
- // items: [
- // 'components/agents', 'components/chains', 'components/loaders', 'components/embeddings', 'components/llms',
- // 'components/memories', 'components/prompts','components/text-splitters', 'components/toolkits', 'components/tools',
- // 'components/utilities', 'components/vector-stores', 'components/wrappers',
- // ],
- // },
{
type: "category",
label: "Examples",
@@ -79,13 +109,10 @@ module.exports = {
"examples/flow-runner",
"examples/conversation-chain",
"examples/buffer-memory",
- "examples/midjourney-prompt-chain",
"examples/csv-loader",
"examples/searchapi-tool",
"examples/serp-api-tool",
- "examples/multiple-vectorstores",
"examples/python-function",
- "examples/how-upload-examples",
],
},
{
diff --git a/docs/src/theme/DownloadableJsonFile.js b/docs/src/theme/DownloadableJsonFile.js
new file mode 100644
index 000000000..7b5466eac
--- /dev/null
+++ b/docs/src/theme/DownloadableJsonFile.js
@@ -0,0 +1,29 @@
+const DownloadableJsonFile = ({ source, title }) => {
+ const handleDownload = (event) => {
+ event.preventDefault();
+ fetch(source)
+ .then((response) => response.blob())
+ .then((blob) => {
+ const url = window.URL.createObjectURL(
+ new Blob([blob], { type: "application/json" })
+ );
+ const link = document.createElement("a");
+ link.href = url;
+ link.setAttribute("download", title);
+ document.body.appendChild(link);
+ link.click();
+ link.parentNode.removeChild(link);
+ })
+ .catch((error) => {
+ console.error("Error downloading file:", error);
+ });
+ };
+
+ return (
+
+ {title}
+
+ );
+};
+
+export default DownloadableJsonFile;
diff --git a/docs/static/data/AstraDB-RAG-Flows.json b/docs/static/data/AstraDB-RAG-Flows.json
new file mode 100644
index 000000000..5706a0fbf
--- /dev/null
+++ b/docs/static/data/AstraDB-RAG-Flows.json
@@ -0,0 +1,3403 @@
+{
+ "id": "51e2b78a-199b-4054-9f32-e288eef6924c",
+ "data": {
+ "nodes": [
+ {
+ "id": "ChatInput-yxMKE",
+ "type": "genericNode",
+ "position": {
+ "x": 1195.5276981160775,
+ "y": 209.421875
+ },
+ "data": {
+ "type": "ChatInput",
+ "node": {
+ "template": {
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Interaction Panel.\"\n icon = \"ChatInput\"\n\n def build_config(self):\n build_config = super().build_config()\n build_config[\"input_value\"] = {\n \"input_types\": [],\n \"display_name\": \"Message\",\n \"multiline\": True,\n }\n\n return build_config\n\n def build(\n self,\n sender: Optional[str] = \"User\",\n sender_name: Optional[str] = \"User\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n ) -> Union[Text, Record]:\n return super().build(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n )\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "input_value": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Message",
+ "advanced": false,
+ "input_types": [],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "value": "what is a line"
+ },
+ "return_record": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "return_record",
+ "display_name": "Return Record",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Return the message as a record containing the sender, sender_name, and session_id.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "sender": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "User",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": [
+ "Machine",
+ "User"
+ ],
+ "name": "sender",
+ "display_name": "Sender Type",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "sender_name": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "User",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "sender_name",
+ "display_name": "Sender Name",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "session_id": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "session_id",
+ "display_name": "Session ID",
+ "advanced": true,
+ "dynamic": false,
+ "info": "If provided, the message will be stored in the memory.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "_type": "CustomComponent"
+ },
+ "description": "Get chat inputs from the Interaction Panel.",
+ "icon": "ChatInput",
+ "base_classes": [
+ "Text",
+ "str",
+ "object",
+ "Record"
+ ],
+ "display_name": "Chat Input",
+ "documentation": "",
+ "custom_fields": {
+ "sender": null,
+ "sender_name": null,
+ "input_value": null,
+ "session_id": null,
+ "return_record": null
+ },
+ "output_types": [
+ "Text",
+ "Record"
+ ],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "ChatInput-yxMKE"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 383
+ },
+ {
+ "id": "TextOutput-BDknO",
+ "type": "genericNode",
+ "position": {
+ "x": 2322.600672827879,
+ "y": 604.9467307442569
+ },
+ "data": {
+ "type": "TextOutput",
+ "node": {
+ "template": {
+ "input_value": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Value",
+ "advanced": false,
+ "input_types": [
+ "Record",
+ "Text"
+ ],
+ "dynamic": false,
+ "info": "Text or Record to be passed as output.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional\n\nfrom langflow.base.io.text import TextComponent\nfrom langflow.field_typing import Text\n\n\nclass TextOutput(TextComponent):\n display_name = \"Text Output\"\n description = \"Display a text output in the Interaction Panel.\"\n icon = \"type\"\n\n def build_config(self):\n return {\n \"input_value\": {\n \"display_name\": \"Value\",\n \"input_types\": [\"Record\", \"Text\"],\n \"info\": \"Text or Record to be passed as output.\",\n },\n \"record_template\": {\n \"display_name\": \"Record Template\",\n \"multiline\": True,\n \"info\": \"Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.\",\n \"advanced\": True,\n },\n }\n\n def build(self, input_value: Optional[Text] = \"\", record_template: str = \"\") -> Text:\n return super().build(input_value=input_value, record_template=record_template)\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "record_template": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "{text}",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "record_template",
+ "display_name": "Record Template",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "_type": "CustomComponent"
+ },
+ "description": "Display a text output in the Interaction Panel.",
+ "icon": "type",
+ "base_classes": [
+ "object",
+ "Text",
+ "str"
+ ],
+ "display_name": "Extracted Chunks",
+ "documentation": "",
+ "custom_fields": {
+ "input_value": null,
+ "record_template": null
+ },
+ "output_types": [
+ "Text"
+ ],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "TextOutput-BDknO"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 289,
+ "positionAbsolute": {
+ "x": 2322.600672827879,
+ "y": 604.9467307442569
+ },
+ "dragging": false
+ },
+ {
+ "id": "OpenAIEmbeddings-ZlOk1",
+ "type": "genericNode",
+ "position": {
+ "x": 1183.667250865064,
+ "y": 687.3171828430261
+ },
+ "data": {
+ "type": "OpenAIEmbeddings",
+ "node": {
+ "template": {
+ "allowed_special": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": [],
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "allowed_special",
+ "display_name": "Allowed Special",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "chunk_size": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 1000,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "chunk_size",
+ "display_name": "Chunk Size",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "client": {
+ "type": "Any",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "client",
+ "display_name": "Client",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Any, Dict, List, Optional\n\nfrom langchain_openai.embeddings.base import OpenAIEmbeddings\n\nfrom langflow.field_typing import Embeddings, NestedDict\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass OpenAIEmbeddingsComponent(CustomComponent):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n\n def build_config(self):\n return {\n \"allowed_special\": {\n \"display_name\": \"Allowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"default_headers\": {\n \"display_name\": \"Default Headers\",\n \"advanced\": True,\n \"field_type\": \"dict\",\n },\n \"default_query\": {\n \"display_name\": \"Default Query\",\n \"advanced\": True,\n \"field_type\": \"NestedDict\",\n },\n \"disallowed_special\": {\n \"display_name\": \"Disallowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"chunk_size\": {\"display_name\": \"Chunk Size\", \"advanced\": True},\n \"client\": {\"display_name\": \"Client\", \"advanced\": True},\n \"deployment\": {\"display_name\": \"Deployment\", \"advanced\": True},\n \"embedding_ctx_length\": {\n \"display_name\": \"Embedding Context Length\",\n \"advanced\": True,\n },\n \"max_retries\": {\"display_name\": \"Max Retries\", \"advanced\": True},\n \"model\": {\n \"display_name\": \"Model\",\n \"advanced\": False,\n \"options\": [\n \"text-embedding-3-small\",\n \"text-embedding-3-large\",\n \"text-embedding-ada-002\",\n ],\n },\n \"model_kwargs\": {\"display_name\": \"Model Kwargs\", \"advanced\": True},\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"password\": True,\n \"advanced\": True,\n },\n \"openai_api_key\": {\"display_name\": \"OpenAI API Key\", \"password\": True},\n \"openai_api_type\": {\n \"display_name\": \"OpenAI API Type\",\n \"advanced\": True,\n \"password\": True,\n },\n \"openai_api_version\": {\n \"display_name\": \"OpenAI API Version\",\n \"advanced\": True,\n },\n \"openai_organization\": {\n \"display_name\": \"OpenAI Organization\",\n \"advanced\": True,\n },\n \"openai_proxy\": {\"display_name\": \"OpenAI Proxy\", \"advanced\": True},\n \"request_timeout\": {\"display_name\": \"Request Timeout\", \"advanced\": True},\n \"show_progress_bar\": {\n \"display_name\": \"Show Progress Bar\",\n \"advanced\": True,\n },\n \"skip_empty\": {\"display_name\": \"Skip Empty\", \"advanced\": True},\n \"tiktoken_model_name\": {\n \"display_name\": \"TikToken Model Name\",\n \"advanced\": True,\n },\n \"tiktoken_enable\": {\"display_name\": \"TikToken Enable\", \"advanced\": True},\n }\n\n def build(\n self,\n openai_api_key: str,\n default_headers: Optional[Dict[str, str]] = None,\n default_query: Optional[NestedDict] = {},\n allowed_special: List[str] = [],\n disallowed_special: List[str] = [\"all\"],\n chunk_size: int = 1000,\n client: Optional[Any] = None,\n deployment: str = \"text-embedding-ada-002\",\n embedding_ctx_length: int = 8191,\n max_retries: int = 6,\n model: str = \"text-embedding-ada-002\",\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n openai_api_type: Optional[str] = None,\n openai_api_version: Optional[str] = None,\n openai_organization: Optional[str] = None,\n openai_proxy: Optional[str] = None,\n request_timeout: Optional[float] = None,\n show_progress_bar: bool = False,\n skip_empty: bool = False,\n tiktoken_enable: bool = True,\n tiktoken_model_name: Optional[str] = None,\n ) -> Embeddings:\n # This is to avoid errors with Vector Stores (e.g Chroma)\n if disallowed_special == [\"all\"]:\n disallowed_special = \"all\" # type: ignore\n\n return OpenAIEmbeddings(\n tiktoken_enabled=tiktoken_enable,\n default_headers=default_headers,\n default_query=default_query,\n allowed_special=set(allowed_special),\n disallowed_special=\"all\",\n chunk_size=chunk_size,\n client=client,\n deployment=deployment,\n embedding_ctx_length=embedding_ctx_length,\n max_retries=max_retries,\n model=model,\n model_kwargs=model_kwargs,\n base_url=openai_api_base,\n api_key=openai_api_key,\n openai_api_type=openai_api_type,\n api_version=openai_api_version,\n organization=openai_organization,\n openai_proxy=openai_proxy,\n timeout=request_timeout,\n show_progress_bar=show_progress_bar,\n skip_empty=skip_empty,\n tiktoken_model_name=tiktoken_model_name,\n )\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "default_headers": {
+ "type": "dict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "default_headers",
+ "display_name": "Default Headers",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "default_query": {
+ "type": "NestedDict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": {},
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "default_query",
+ "display_name": "Default Query",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "deployment": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "text-embedding-ada-002",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "deployment",
+ "display_name": "Deployment",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "disallowed_special": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": [
+ "all"
+ ],
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "disallowed_special",
+ "display_name": "Disallowed Special",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "embedding_ctx_length": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 8191,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "embedding_ctx_length",
+ "display_name": "Embedding Context Length",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "max_retries": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 6,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "max_retries",
+ "display_name": "Max Retries",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "model": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "text-embedding-ada-002",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": [
+ "text-embedding-3-small",
+ "text-embedding-3-large",
+ "text-embedding-ada-002"
+ ],
+ "name": "model",
+ "display_name": "Model",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "model_kwargs": {
+ "type": "NestedDict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": {},
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "model_kwargs",
+ "display_name": "Model Kwargs",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "openai_api_base": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "openai_api_base",
+ "display_name": "OpenAI API Base",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "openai_api_key": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "openai_api_key",
+ "display_name": "OpenAI API Key",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ],
+ "value": ""
+ },
+ "openai_api_type": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "openai_api_type",
+ "display_name": "OpenAI API Type",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "openai_api_version": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "openai_api_version",
+ "display_name": "OpenAI API Version",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "openai_organization": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "openai_organization",
+ "display_name": "OpenAI Organization",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "openai_proxy": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "openai_proxy",
+ "display_name": "OpenAI Proxy",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "request_timeout": {
+ "type": "float",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "request_timeout",
+ "display_name": "Request Timeout",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "rangeSpec": {
+ "step_type": "float",
+ "min": -1,
+ "max": 1,
+ "step": 0.1
+ },
+ "load_from_db": false,
+ "title_case": false
+ },
+ "show_progress_bar": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "show_progress_bar",
+ "display_name": "Show Progress Bar",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "skip_empty": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "skip_empty",
+ "display_name": "Skip Empty",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "tiktoken_enable": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": true,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "tiktoken_enable",
+ "display_name": "TikToken Enable",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "tiktoken_model_name": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "tiktoken_model_name",
+ "display_name": "TikToken Model Name",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "_type": "CustomComponent"
+ },
+ "description": "Generate embeddings using OpenAI models.",
+ "base_classes": [
+ "Embeddings"
+ ],
+ "display_name": "OpenAI Embeddings",
+ "documentation": "",
+ "custom_fields": {
+ "openai_api_key": null,
+ "default_headers": null,
+ "default_query": null,
+ "allowed_special": null,
+ "disallowed_special": null,
+ "chunk_size": null,
+ "client": null,
+ "deployment": null,
+ "embedding_ctx_length": null,
+ "max_retries": null,
+ "model": null,
+ "model_kwargs": null,
+ "openai_api_base": null,
+ "openai_api_type": null,
+ "openai_api_version": null,
+ "openai_organization": null,
+ "openai_proxy": null,
+ "request_timeout": null,
+ "show_progress_bar": null,
+ "skip_empty": null,
+ "tiktoken_enable": null,
+ "tiktoken_model_name": null
+ },
+ "output_types": [
+ "Embeddings"
+ ],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "OpenAIEmbeddings-ZlOk1"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 383,
+ "dragging": false
+ },
+ {
+ "id": "OpenAIModel-EjXlN",
+ "type": "genericNode",
+ "position": {
+ "x": 3410.117202077183,
+ "y": 431.2038048137648
+ },
+ "data": {
+ "type": "OpenAIModel",
+ "node": {
+ "template": {
+ "input_value": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Input",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": [\n \"gpt-4-turbo-preview\",\n \"gpt-3.5-turbo\",\n \"gpt-4-0125-preview\",\n \"gpt-4-1106-preview\",\n \"gpt-4-vision-preview\",\n \"gpt-3.5-turbo-0125\",\n \"gpt-3.5-turbo-1106\",\n ],\n \"value\": \"gpt-4-turbo-preview\",\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str,\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n output = ChatOpenAI(\n max_tokens=max_tokens,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=openai_api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "max_tokens": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 256,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "max_tokens",
+ "display_name": "Max Tokens",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "model_kwargs": {
+ "type": "NestedDict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": {},
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "model_kwargs",
+ "display_name": "Model Kwargs",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "model_name": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "gpt-3.5-turbo",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": [
+ "gpt-4-turbo-preview",
+ "gpt-3.5-turbo",
+ "gpt-4-0125-preview",
+ "gpt-4-1106-preview",
+ "gpt-4-vision-preview",
+ "gpt-3.5-turbo-0125",
+ "gpt-3.5-turbo-1106"
+ ],
+ "name": "model_name",
+ "display_name": "Model Name",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "openai_api_base": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "openai_api_base",
+ "display_name": "OpenAI API Base",
+ "advanced": true,
+ "dynamic": false,
+ "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "openai_api_key": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "openai_api_key",
+ "display_name": "OpenAI API Key",
+ "advanced": false,
+ "dynamic": false,
+ "info": "The OpenAI API Key to use for the OpenAI model.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ],
+ "value": ""
+ },
+ "stream": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "stream",
+ "display_name": "Stream",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Stream the response from the model. Streaming works only in Chat.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "system_message": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "system_message",
+ "display_name": "System Message",
+ "advanced": true,
+ "dynamic": false,
+ "info": "System message to pass to the model.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "temperature": {
+ "type": "float",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 0.1,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "temperature",
+ "display_name": "Temperature",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "rangeSpec": {
+ "step_type": "float",
+ "min": -1,
+ "max": 1,
+ "step": 0.1
+ },
+ "load_from_db": false,
+ "title_case": false
+ },
+ "_type": "CustomComponent"
+ },
+ "description": "Generates text using OpenAI LLMs.",
+ "icon": "OpenAI",
+ "base_classes": [
+ "object",
+ "Text",
+ "str"
+ ],
+ "display_name": "OpenAI",
+ "documentation": "",
+ "custom_fields": {
+ "input_value": null,
+ "openai_api_key": null,
+ "temperature": null,
+ "model_name": null,
+ "max_tokens": null,
+ "model_kwargs": null,
+ "openai_api_base": null,
+ "stream": null,
+ "system_message": null
+ },
+ "output_types": [
+ "Text"
+ ],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [
+ "max_tokens",
+ "model_kwargs",
+ "model_name",
+ "openai_api_base",
+ "openai_api_key",
+ "temperature",
+ "input_value",
+ "system_message",
+ "stream"
+ ],
+ "beta": false
+ },
+ "id": "OpenAIModel-EjXlN"
+ },
+ "selected": true,
+ "width": 384,
+ "height": 563,
+ "positionAbsolute": {
+ "x": 3410.117202077183,
+ "y": 431.2038048137648
+ },
+ "dragging": false
+ },
+ {
+ "id": "Prompt-xeI6K",
+ "type": "genericNode",
+ "position": {
+ "x": 2969.0261961391298,
+ "y": 442.1613649809069
+ },
+ "data": {
+ "type": "Prompt",
+ "node": {
+ "template": {
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.field_typing import Prompt, TemplateField, Text\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "template": {
+ "type": "prompt",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "{context}\n\n---\n\nGiven the context above, answer the question as best as possible.\n\nQuestion: {question}\n\nAnswer: ",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "template",
+ "display_name": "Template",
+ "advanced": false,
+ "input_types": [
+ "Text"
+ ],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "_type": "CustomComponent",
+ "context": {
+ "field_type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "context",
+ "display_name": "context",
+ "advanced": false,
+ "input_types": [
+ "Document",
+ "BaseOutputParser",
+ "Record",
+ "Text"
+ ],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "type": "str"
+ },
+ "question": {
+ "field_type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "question",
+ "display_name": "question",
+ "advanced": false,
+ "input_types": [
+ "Document",
+ "BaseOutputParser",
+ "Record",
+ "Text"
+ ],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "type": "str"
+ }
+ },
+ "description": "Create a prompt template with dynamic variables.",
+ "icon": "prompts",
+ "is_input": null,
+ "is_output": null,
+ "is_composition": null,
+ "base_classes": [
+ "object",
+ "Text",
+ "str"
+ ],
+ "name": "",
+ "display_name": "Prompt",
+ "documentation": "",
+ "custom_fields": {
+ "template": [
+ "context",
+ "question"
+ ]
+ },
+ "output_types": [
+ "Text"
+ ],
+ "full_path": null,
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false,
+ "error": null
+ },
+ "id": "Prompt-xeI6K",
+ "description": "Create a prompt template with dynamic variables.",
+ "display_name": "Prompt"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 477,
+ "positionAbsolute": {
+ "x": 2969.0261961391298,
+ "y": 442.1613649809069
+ },
+ "dragging": false
+ },
+ {
+ "id": "ChatOutput-Q39I8",
+ "type": "genericNode",
+ "position": {
+ "x": 3887.2073667611485,
+ "y": 588.4801225794856
+ },
+ "data": {
+ "type": "ChatOutput",
+ "node": {
+ "template": {
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Interaction Panel.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n record_template: Optional[str] = \"{text}\",\n ) -> Union[Text, Record]:\n return super().build(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n record_template=record_template,\n )\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "input_value": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Message",
+ "advanced": false,
+ "input_types": [
+ "Text"
+ ],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "record_template": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "{text}",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "record_template",
+ "display_name": "Record Template",
+ "advanced": true,
+ "dynamic": false,
+ "info": "In case of Message being a Record, this template will be used to convert it to text.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "return_record": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "return_record",
+ "display_name": "Return Record",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Return the message as a record containing the sender, sender_name, and session_id.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "sender": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "Machine",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": [
+ "Machine",
+ "User"
+ ],
+ "name": "sender",
+ "display_name": "Sender Type",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "sender_name": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "AI",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "sender_name",
+ "display_name": "Sender Name",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "session_id": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "session_id",
+ "display_name": "Session ID",
+ "advanced": true,
+ "dynamic": false,
+ "info": "If provided, the message will be stored in the memory.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "_type": "CustomComponent"
+ },
+ "description": "Display a chat message in the Interaction Panel.",
+ "icon": "ChatOutput",
+ "base_classes": [
+ "object",
+ "Text",
+ "Record",
+ "str"
+ ],
+ "display_name": "Chat Output",
+ "documentation": "",
+ "custom_fields": {
+ "sender": null,
+ "sender_name": null,
+ "input_value": null,
+ "session_id": null,
+ "return_record": null,
+ "record_template": null
+ },
+ "output_types": [
+ "Text",
+ "Record"
+ ],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "ChatOutput-Q39I8"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 383,
+ "positionAbsolute": {
+ "x": 3887.2073667611485,
+ "y": 588.4801225794856
+ },
+ "dragging": false
+ },
+ {
+ "id": "File-t0a6a",
+ "type": "genericNode",
+ "position": {
+ "x": 2257.233450682836,
+ "y": 1747.5389618367233
+ },
+ "data": {
+ "type": "File",
+ "node": {
+ "template": {
+ "path": {
+ "type": "file",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [
+ ".txt",
+ ".md",
+ ".mdx",
+ ".csv",
+ ".json",
+ ".yaml",
+ ".yml",
+ ".xml",
+ ".html",
+ ".htm",
+ ".pdf",
+ ".docx"
+ ],
+ "file_path": "51e2b78a-199b-4054-9f32-e288eef6924c/Langflow conversation.pdf",
+ "password": false,
+ "name": "path",
+ "display_name": "Path",
+ "advanced": false,
+ "dynamic": false,
+ "info": "Supported file types: txt, md, mdx, csv, json, yaml, yml, xml, html, htm, pdf, docx",
+ "load_from_db": false,
+ "title_case": false,
+ "value": ""
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from pathlib import Path\nfrom typing import Any, Dict\n\nfrom langflow.base.data.utils import TEXT_FILE_TYPES, parse_text_file_to_record\nfrom langflow.interface.custom.custom_component import CustomComponent\nfrom langflow.schema import Record\n\n\nclass FileComponent(CustomComponent):\n display_name = \"File\"\n description = \"A generic file loader.\"\n icon = \"file-text\"\n\n def build_config(self) -> Dict[str, Any]:\n return {\n \"path\": {\n \"display_name\": \"Path\",\n \"field_type\": \"file\",\n \"file_types\": TEXT_FILE_TYPES,\n \"info\": f\"Supported file types: {', '.join(TEXT_FILE_TYPES)}\",\n },\n \"silent_errors\": {\n \"display_name\": \"Silent Errors\",\n \"advanced\": True,\n \"info\": \"If true, errors will not raise an exception.\",\n },\n }\n\n def load_file(self, path: str, silent_errors: bool = False) -> Record:\n resolved_path = self.resolve_path(path)\n path_obj = Path(resolved_path)\n extension = path_obj.suffix[1:].lower()\n if extension == \"doc\":\n raise ValueError(\"doc files are not supported. Please save as .docx\")\n if extension not in TEXT_FILE_TYPES:\n raise ValueError(f\"Unsupported file type: {extension}\")\n record = parse_text_file_to_record(resolved_path, silent_errors)\n self.status = record if record else \"No data\"\n return record or Record()\n\n def build(\n self,\n path: str,\n silent_errors: bool = False,\n ) -> Record:\n record = self.load_file(path, silent_errors)\n self.status = record\n return record\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "silent_errors": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "silent_errors",
+ "display_name": "Silent Errors",
+ "advanced": true,
+ "dynamic": false,
+ "info": "If true, errors will not raise an exception.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "_type": "CustomComponent"
+ },
+ "description": "A generic file loader.",
+ "icon": "file-text",
+ "base_classes": [
+ "Record"
+ ],
+ "display_name": "File",
+ "documentation": "",
+ "custom_fields": {
+ "path": null,
+ "silent_errors": null
+ },
+ "output_types": [
+ "Record"
+ ],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "File-t0a6a"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 281,
+ "positionAbsolute": {
+ "x": 2257.233450682836,
+ "y": 1747.5389618367233
+ },
+ "dragging": false
+ },
+ {
+ "id": "RecursiveCharacterTextSplitter-tR9QM",
+ "type": "genericNode",
+ "position": {
+ "x": 2791.013514133929,
+ "y": 1462.9588953494142
+ },
+ "data": {
+ "type": "RecursiveCharacterTextSplitter",
+ "node": {
+ "template": {
+ "inputs": {
+ "type": "Document",
+ "required": true,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "inputs",
+ "display_name": "Input",
+ "advanced": false,
+ "input_types": [
+ "Document",
+ "Record"
+ ],
+ "dynamic": false,
+ "info": "The texts to split.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "chunk_overlap": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 200,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "chunk_overlap",
+ "display_name": "Chunk Overlap",
+ "advanced": false,
+ "dynamic": false,
+ "info": "The amount of overlap between chunks.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "chunk_size": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 1000,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "chunk_size",
+ "display_name": "Chunk Size",
+ "advanced": false,
+ "dynamic": false,
+ "info": "The maximum length of each chunk.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional\n\nfrom langchain.text_splitter import RecursiveCharacterTextSplitter\nfrom langchain_core.documents import Document\n\nfrom langflow.interface.custom.custom_component import CustomComponent\nfrom langflow.schema import Record\nfrom langflow.utils.util import build_loader_repr_from_records, unescape_string\n\n\nclass RecursiveCharacterTextSplitterComponent(CustomComponent):\n display_name: str = \"Recursive Character Text Splitter\"\n description: str = \"Split text into chunks of a specified length.\"\n documentation: str = \"https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter\"\n\n def build_config(self):\n return {\n \"inputs\": {\n \"display_name\": \"Input\",\n \"info\": \"The texts to split.\",\n \"input_types\": [\"Document\", \"Record\"],\n },\n \"separators\": {\n \"display_name\": \"Separators\",\n \"info\": 'The characters to split on.\\nIf left empty defaults to [\"\\\\n\\\\n\", \"\\\\n\", \" \", \"\"].',\n \"is_list\": True,\n },\n \"chunk_size\": {\n \"display_name\": \"Chunk Size\",\n \"info\": \"The maximum length of each chunk.\",\n \"field_type\": \"int\",\n \"value\": 1000,\n },\n \"chunk_overlap\": {\n \"display_name\": \"Chunk Overlap\",\n \"info\": \"The amount of overlap between chunks.\",\n \"field_type\": \"int\",\n \"value\": 200,\n },\n \"code\": {\"show\": False},\n }\n\n def build(\n self,\n inputs: list[Document],\n separators: Optional[list[str]] = None,\n chunk_size: Optional[int] = 1000,\n chunk_overlap: Optional[int] = 200,\n ) -> list[Record]:\n \"\"\"\n Split text into chunks of a specified length.\n\n Args:\n separators (list[str]): The characters to split on.\n chunk_size (int): The maximum length of each chunk.\n chunk_overlap (int): The amount of overlap between chunks.\n length_function (function): The function to use to calculate the length of the text.\n\n Returns:\n list[str]: The chunks of text.\n \"\"\"\n\n if separators == \"\":\n separators = None\n elif separators:\n # check if the separators list has escaped characters\n # if there are escaped characters, unescape them\n separators = [unescape_string(x) for x in separators]\n\n # Make sure chunk_size and chunk_overlap are ints\n if isinstance(chunk_size, str):\n chunk_size = int(chunk_size)\n if isinstance(chunk_overlap, str):\n chunk_overlap = int(chunk_overlap)\n splitter = RecursiveCharacterTextSplitter(\n separators=separators,\n chunk_size=chunk_size,\n chunk_overlap=chunk_overlap,\n )\n documents = []\n for _input in inputs:\n if isinstance(_input, Record):\n documents.append(_input.to_lc_document())\n else:\n documents.append(_input)\n docs = splitter.split_documents(documents)\n records = self.to_records(docs)\n self.repr_value = build_loader_repr_from_records(records)\n return records\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "separators": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "separators",
+ "display_name": "Separators",
+ "advanced": false,
+ "dynamic": false,
+ "info": "The characters to split on.\nIf left empty defaults to [\"\\n\\n\", \"\\n\", \" \", \"\"].",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ],
+ "value": [
+ ""
+ ]
+ },
+ "_type": "CustomComponent"
+ },
+ "description": "Split text into chunks of a specified length.",
+ "base_classes": [
+ "Record"
+ ],
+ "display_name": "Recursive Character Text Splitter",
+ "documentation": "https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter",
+ "custom_fields": {
+ "inputs": null,
+ "separators": null,
+ "chunk_size": null,
+ "chunk_overlap": null
+ },
+ "output_types": [
+ "Record"
+ ],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "RecursiveCharacterTextSplitter-tR9QM"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 501,
+ "positionAbsolute": {
+ "x": 2791.013514133929,
+ "y": 1462.9588953494142
+ },
+ "dragging": false
+ },
+ {
+ "id": "AstraDBSearch-41nRz",
+ "type": "genericNode",
+ "position": {
+ "x": 1723.976434815103,
+ "y": 277.03317407245913
+ },
+ "data": {
+ "type": "AstraDBSearch",
+ "node": {
+ "template": {
+ "embedding": {
+ "type": "Embeddings",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "embedding",
+ "display_name": "Embedding",
+ "advanced": false,
+ "dynamic": false,
+ "info": "Embedding to use",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "input_value": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Input Value",
+ "advanced": false,
+ "dynamic": false,
+ "info": "Input value to search",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "api_endpoint": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "api_endpoint",
+ "display_name": "API Endpoint",
+ "advanced": false,
+ "dynamic": false,
+ "info": "API endpoint URL for the Astra DB service.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ],
+ "value": ""
+ },
+ "batch_size": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "batch_size",
+ "display_name": "Batch Size",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional number of records to process in a single batch.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "bulk_delete_concurrency": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "bulk_delete_concurrency",
+ "display_name": "Bulk Delete Concurrency",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional concurrency level for bulk delete operations.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "bulk_insert_batch_concurrency": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "bulk_insert_batch_concurrency",
+ "display_name": "Bulk Insert Batch Concurrency",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional concurrency level for bulk insert operations.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "bulk_insert_overwrite_concurrency": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "bulk_insert_overwrite_concurrency",
+ "display_name": "Bulk Insert Overwrite Concurrency",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional concurrency level for bulk insert operations that overwrite existing records.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import List, Optional\n\nfrom langflow.components.vectorstores.AstraDB import AstraDBVectorStoreComponent\nfrom langflow.components.vectorstores.base.model import LCVectorStoreComponent\nfrom langflow.field_typing import Embeddings, Text\nfrom langflow.schema import Record\n\n\nclass AstraDBSearchComponent(LCVectorStoreComponent):\n display_name = \"Astra DB Search\"\n description = \"Searches an existing Astra DB Vector Store.\"\n icon = \"AstraDB\"\n field_order = [\"token\", \"api_endpoint\", \"collection_name\", \"input_value\", \"embedding\"]\n\n def build_config(self):\n return {\n \"search_type\": {\n \"display_name\": \"Search Type\",\n \"options\": [\"Similarity\", \"MMR\"],\n },\n \"input_value\": {\n \"display_name\": \"Input Value\",\n \"info\": \"Input value to search\",\n },\n \"embedding\": {\"display_name\": \"Embedding\", \"info\": \"Embedding to use\"},\n \"collection_name\": {\n \"display_name\": \"Collection Name\",\n \"info\": \"The name of the collection within Astra DB where the vectors will be stored.\",\n },\n \"token\": {\n \"display_name\": \"Token\",\n \"info\": \"Authentication token for accessing Astra DB.\",\n \"password\": True,\n },\n \"api_endpoint\": {\n \"display_name\": \"API Endpoint\",\n \"info\": \"API endpoint URL for the Astra DB service.\",\n },\n \"namespace\": {\n \"display_name\": \"Namespace\",\n \"info\": \"Optional namespace within Astra DB to use for the collection.\",\n \"advanced\": True,\n },\n \"metric\": {\n \"display_name\": \"Metric\",\n \"info\": \"Optional distance metric for vector comparisons in the vector store.\",\n \"advanced\": True,\n },\n \"batch_size\": {\n \"display_name\": \"Batch Size\",\n \"info\": \"Optional number of records to process in a single batch.\",\n \"advanced\": True,\n },\n \"bulk_insert_batch_concurrency\": {\n \"display_name\": \"Bulk Insert Batch Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations.\",\n \"advanced\": True,\n },\n \"bulk_insert_overwrite_concurrency\": {\n \"display_name\": \"Bulk Insert Overwrite Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations that overwrite existing records.\",\n \"advanced\": True,\n },\n \"bulk_delete_concurrency\": {\n \"display_name\": \"Bulk Delete Concurrency\",\n \"info\": \"Optional concurrency level for bulk delete operations.\",\n \"advanced\": True,\n },\n \"setup_mode\": {\n \"display_name\": \"Setup Mode\",\n \"info\": \"Configuration mode for setting up the vector store, with options like βSyncβ, βAsyncβ, or βOffβ.\",\n \"options\": [\"Sync\", \"Async\", \"Off\"],\n \"advanced\": True,\n },\n \"pre_delete_collection\": {\n \"display_name\": \"Pre Delete Collection\",\n \"info\": \"Boolean flag to determine whether to delete the collection before creating a new one.\",\n \"advanced\": True,\n },\n \"metadata_indexing_include\": {\n \"display_name\": \"Metadata Indexing Include\",\n \"info\": \"Optional list of metadata fields to include in the indexing.\",\n \"advanced\": True,\n },\n \"metadata_indexing_exclude\": {\n \"display_name\": \"Metadata Indexing Exclude\",\n \"info\": \"Optional list of metadata fields to exclude from the indexing.\",\n \"advanced\": True,\n },\n \"collection_indexing_policy\": {\n \"display_name\": \"Collection Indexing Policy\",\n \"info\": \"Optional dictionary defining the indexing policy for the collection.\",\n \"advanced\": True,\n },\n \"number_of_results\": {\n \"display_name\": \"Number of Results\",\n \"info\": \"Number of results to return.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n embedding: Embeddings,\n collection_name: str,\n input_value: Text,\n token: str,\n api_endpoint: str,\n search_type: str = \"Similarity\",\n number_of_results: int = 4,\n namespace: Optional[str] = None,\n metric: Optional[str] = None,\n batch_size: Optional[int] = None,\n bulk_insert_batch_concurrency: Optional[int] = None,\n bulk_insert_overwrite_concurrency: Optional[int] = None,\n bulk_delete_concurrency: Optional[int] = None,\n setup_mode: str = \"Sync\",\n pre_delete_collection: bool = False,\n metadata_indexing_include: Optional[List[str]] = None,\n metadata_indexing_exclude: Optional[List[str]] = None,\n collection_indexing_policy: Optional[dict] = None,\n ) -> List[Record]:\n vector_store = AstraDBVectorStoreComponent().build(\n embedding=embedding,\n collection_name=collection_name,\n token=token,\n api_endpoint=api_endpoint,\n namespace=namespace,\n metric=metric,\n batch_size=batch_size,\n bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,\n bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,\n bulk_delete_concurrency=bulk_delete_concurrency,\n setup_mode=setup_mode,\n pre_delete_collection=pre_delete_collection,\n metadata_indexing_include=metadata_indexing_include,\n metadata_indexing_exclude=metadata_indexing_exclude,\n collection_indexing_policy=collection_indexing_policy,\n )\n try:\n return self.search_with_vector_store(input_value, search_type, vector_store, k=number_of_results)\n except KeyError as e:\n if \"content\" in str(e):\n raise ValueError(\n \"You should ingest data through Langflow (or LangChain) to query it in Langflow. Your collection does not contain a field name 'content'.\"\n )\n else:\n raise e\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "collection_indexing_policy": {
+ "type": "dict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "collection_indexing_policy",
+ "display_name": "Collection Indexing Policy",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional dictionary defining the indexing policy for the collection.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "collection_name": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "collection_name",
+ "display_name": "Collection Name",
+ "advanced": false,
+ "dynamic": false,
+ "info": "The name of the collection within Astra DB where the vectors will be stored.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ],
+ "value": "langflow"
+ },
+ "metadata_indexing_exclude": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "metadata_indexing_exclude",
+ "display_name": "Metadata Indexing Exclude",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional list of metadata fields to exclude from the indexing.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "metadata_indexing_include": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "metadata_indexing_include",
+ "display_name": "Metadata Indexing Include",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional list of metadata fields to include in the indexing.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "metric": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "metric",
+ "display_name": "Metric",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional distance metric for vector comparisons in the vector store.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "namespace": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "namespace",
+ "display_name": "Namespace",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional namespace within Astra DB to use for the collection.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "number_of_results": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 4,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "number_of_results",
+ "display_name": "Number of Results",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Number of results to return.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "pre_delete_collection": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "pre_delete_collection",
+ "display_name": "Pre Delete Collection",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Boolean flag to determine whether to delete the collection before creating a new one.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "search_type": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "Similarity",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": [
+ "Similarity",
+ "MMR"
+ ],
+ "name": "search_type",
+ "display_name": "Search Type",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "setup_mode": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "Sync",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": [
+ "Sync",
+ "Async",
+ "Off"
+ ],
+ "name": "setup_mode",
+ "display_name": "Setup Mode",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Configuration mode for setting up the vector store, with options like βSyncβ, βAsyncβ, or βOffβ.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "token": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "token",
+ "display_name": "Token",
+ "advanced": false,
+ "dynamic": false,
+ "info": "Authentication token for accessing Astra DB.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ],
+ "value": ""
+ },
+ "_type": "CustomComponent"
+ },
+ "description": "Searches an existing Astra DB Vector Store.",
+ "icon": "AstraDB",
+ "base_classes": [
+ "Record"
+ ],
+ "display_name": "Astra DB Search",
+ "documentation": "",
+ "custom_fields": {
+ "embedding": null,
+ "collection_name": null,
+ "input_value": null,
+ "token": null,
+ "api_endpoint": null,
+ "search_type": null,
+ "number_of_results": null,
+ "namespace": null,
+ "metric": null,
+ "batch_size": null,
+ "bulk_insert_batch_concurrency": null,
+ "bulk_insert_overwrite_concurrency": null,
+ "bulk_delete_concurrency": null,
+ "setup_mode": null,
+ "pre_delete_collection": null,
+ "metadata_indexing_include": null,
+ "metadata_indexing_exclude": null,
+ "collection_indexing_policy": null
+ },
+ "output_types": [
+ "Record"
+ ],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [
+ "token",
+ "api_endpoint",
+ "collection_name",
+ "input_value",
+ "embedding"
+ ],
+ "beta": false
+ },
+ "id": "AstraDBSearch-41nRz"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 713,
+ "dragging": false,
+ "positionAbsolute": {
+ "x": 1723.976434815103,
+ "y": 277.03317407245913
+ }
+ },
+ {
+ "id": "AstraDB-eUCSS",
+ "type": "genericNode",
+ "position": {
+ "x": 3372.04958055989,
+ "y": 1611.0742035495277
+ },
+ "data": {
+ "type": "AstraDB",
+ "node": {
+ "template": {
+ "embedding": {
+ "type": "Embeddings",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "embedding",
+ "display_name": "Embedding",
+ "advanced": false,
+ "dynamic": false,
+ "info": "Embedding to use",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "inputs": {
+ "type": "Record",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "inputs",
+ "display_name": "Inputs",
+ "advanced": false,
+ "dynamic": false,
+ "info": "Optional list of records to be processed and stored in the vector store.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "api_endpoint": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "api_endpoint",
+ "display_name": "API Endpoint",
+ "advanced": false,
+ "dynamic": false,
+ "info": "API endpoint URL for the Astra DB service.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ],
+ "value": ""
+ },
+ "batch_size": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "batch_size",
+ "display_name": "Batch Size",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional number of records to process in a single batch.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "bulk_delete_concurrency": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "bulk_delete_concurrency",
+ "display_name": "Bulk Delete Concurrency",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional concurrency level for bulk delete operations.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "bulk_insert_batch_concurrency": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "bulk_insert_batch_concurrency",
+ "display_name": "Bulk Insert Batch Concurrency",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional concurrency level for bulk insert operations.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "bulk_insert_overwrite_concurrency": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "bulk_insert_overwrite_concurrency",
+ "display_name": "Bulk Insert Overwrite Concurrency",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional concurrency level for bulk insert operations that overwrite existing records.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import List, Optional\n\nfrom langchain_astradb import AstraDBVectorStore\nfrom langchain_astradb.utils.astradb import SetupMode\n\nfrom langflow.custom import CustomComponent\nfrom langflow.field_typing import Embeddings, VectorStore\nfrom langflow.schema import Record\n\n\nclass AstraDBVectorStoreComponent(CustomComponent):\n display_name = \"Astra DB\"\n description = \"Builds or loads an Astra DB Vector Store.\"\n icon = \"AstraDB\"\n field_order = [\"token\", \"api_endpoint\", \"collection_name\", \"inputs\", \"embedding\"]\n\n def build_config(self):\n return {\n \"inputs\": {\n \"display_name\": \"Inputs\",\n \"info\": \"Optional list of records to be processed and stored in the vector store.\",\n },\n \"embedding\": {\"display_name\": \"Embedding\", \"info\": \"Embedding to use\"},\n \"collection_name\": {\n \"display_name\": \"Collection Name\",\n \"info\": \"The name of the collection within Astra DB where the vectors will be stored.\",\n },\n \"token\": {\n \"display_name\": \"Token\",\n \"info\": \"Authentication token for accessing Astra DB.\",\n \"password\": True,\n },\n \"api_endpoint\": {\n \"display_name\": \"API Endpoint\",\n \"info\": \"API endpoint URL for the Astra DB service.\",\n },\n \"namespace\": {\n \"display_name\": \"Namespace\",\n \"info\": \"Optional namespace within Astra DB to use for the collection.\",\n \"advanced\": True,\n },\n \"metric\": {\n \"display_name\": \"Metric\",\n \"info\": \"Optional distance metric for vector comparisons in the vector store.\",\n \"advanced\": True,\n },\n \"batch_size\": {\n \"display_name\": \"Batch Size\",\n \"info\": \"Optional number of records to process in a single batch.\",\n \"advanced\": True,\n },\n \"bulk_insert_batch_concurrency\": {\n \"display_name\": \"Bulk Insert Batch Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations.\",\n \"advanced\": True,\n },\n \"bulk_insert_overwrite_concurrency\": {\n \"display_name\": \"Bulk Insert Overwrite Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations that overwrite existing records.\",\n \"advanced\": True,\n },\n \"bulk_delete_concurrency\": {\n \"display_name\": \"Bulk Delete Concurrency\",\n \"info\": \"Optional concurrency level for bulk delete operations.\",\n \"advanced\": True,\n },\n \"setup_mode\": {\n \"display_name\": \"Setup Mode\",\n \"info\": \"Configuration mode for setting up the vector store, with options like βSyncβ, βAsyncβ, or βOffβ.\",\n \"options\": [\"Sync\", \"Async\", \"Off\"],\n \"advanced\": True,\n },\n \"pre_delete_collection\": {\n \"display_name\": \"Pre Delete Collection\",\n \"info\": \"Boolean flag to determine whether to delete the collection before creating a new one.\",\n \"advanced\": True,\n },\n \"metadata_indexing_include\": {\n \"display_name\": \"Metadata Indexing Include\",\n \"info\": \"Optional list of metadata fields to include in the indexing.\",\n \"advanced\": True,\n },\n \"metadata_indexing_exclude\": {\n \"display_name\": \"Metadata Indexing Exclude\",\n \"info\": \"Optional list of metadata fields to exclude from the indexing.\",\n \"advanced\": True,\n },\n \"collection_indexing_policy\": {\n \"display_name\": \"Collection Indexing Policy\",\n \"info\": \"Optional dictionary defining the indexing policy for the collection.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n embedding: Embeddings,\n token: str,\n api_endpoint: str,\n collection_name: str,\n inputs: Optional[List[Record]] = None,\n namespace: Optional[str] = None,\n metric: Optional[str] = None,\n batch_size: Optional[int] = None,\n bulk_insert_batch_concurrency: Optional[int] = None,\n bulk_insert_overwrite_concurrency: Optional[int] = None,\n bulk_delete_concurrency: Optional[int] = None,\n setup_mode: str = \"Async\",\n pre_delete_collection: bool = False,\n metadata_indexing_include: Optional[List[str]] = None,\n metadata_indexing_exclude: Optional[List[str]] = None,\n collection_indexing_policy: Optional[dict] = None,\n ) -> VectorStore:\n try:\n setup_mode_value = SetupMode[setup_mode.upper()]\n except KeyError:\n raise ValueError(f\"Invalid setup mode: {setup_mode}\")\n if inputs:\n documents = [_input.to_lc_document() for _input in inputs]\n\n vector_store = AstraDBVectorStore.from_documents(\n documents=documents,\n embedding=embedding,\n collection_name=collection_name,\n token=token,\n api_endpoint=api_endpoint,\n namespace=namespace,\n metric=metric,\n batch_size=batch_size,\n bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,\n bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,\n bulk_delete_concurrency=bulk_delete_concurrency,\n setup_mode=setup_mode_value,\n pre_delete_collection=pre_delete_collection,\n metadata_indexing_include=metadata_indexing_include,\n metadata_indexing_exclude=metadata_indexing_exclude,\n collection_indexing_policy=collection_indexing_policy,\n )\n else:\n vector_store = AstraDBVectorStore(\n embedding=embedding,\n collection_name=collection_name,\n token=token,\n api_endpoint=api_endpoint,\n namespace=namespace,\n metric=metric,\n batch_size=batch_size,\n bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,\n bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,\n bulk_delete_concurrency=bulk_delete_concurrency,\n setup_mode=setup_mode_value,\n pre_delete_collection=pre_delete_collection,\n metadata_indexing_include=metadata_indexing_include,\n metadata_indexing_exclude=metadata_indexing_exclude,\n collection_indexing_policy=collection_indexing_policy,\n )\n\n return vector_store\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "collection_indexing_policy": {
+ "type": "dict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "collection_indexing_policy",
+ "display_name": "Collection Indexing Policy",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional dictionary defining the indexing policy for the collection.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "collection_name": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "collection_name",
+ "display_name": "Collection Name",
+ "advanced": false,
+ "dynamic": false,
+ "info": "The name of the collection within Astra DB where the vectors will be stored.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ],
+ "value": "langflow"
+ },
+ "metadata_indexing_exclude": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "metadata_indexing_exclude",
+ "display_name": "Metadata Indexing Exclude",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional list of metadata fields to exclude from the indexing.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "metadata_indexing_include": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "metadata_indexing_include",
+ "display_name": "Metadata Indexing Include",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional list of metadata fields to include in the indexing.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "metric": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "metric",
+ "display_name": "Metric",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional distance metric for vector comparisons in the vector store.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "namespace": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "namespace",
+ "display_name": "Namespace",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional namespace within Astra DB to use for the collection.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "pre_delete_collection": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "pre_delete_collection",
+ "display_name": "Pre Delete Collection",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Boolean flag to determine whether to delete the collection before creating a new one.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "setup_mode": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "Async",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": [
+ "Sync",
+ "Async",
+ "Off"
+ ],
+ "name": "setup_mode",
+ "display_name": "Setup Mode",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Configuration mode for setting up the vector store, with options like βSyncβ, βAsyncβ, or βOffβ.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "token": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "token",
+ "display_name": "Token",
+ "advanced": false,
+ "dynamic": false,
+ "info": "Authentication token for accessing Astra DB.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ],
+ "value": ""
+ },
+ "_type": "CustomComponent"
+ },
+ "description": "Builds or loads an Astra DB Vector Store.",
+ "icon": "AstraDB",
+ "base_classes": [
+ "VectorStore"
+ ],
+ "display_name": "Astra DB",
+ "documentation": "",
+ "custom_fields": {
+ "embedding": null,
+ "token": null,
+ "api_endpoint": null,
+ "collection_name": null,
+ "inputs": null,
+ "namespace": null,
+ "metric": null,
+ "batch_size": null,
+ "bulk_insert_batch_concurrency": null,
+ "bulk_insert_overwrite_concurrency": null,
+ "bulk_delete_concurrency": null,
+ "setup_mode": null,
+ "pre_delete_collection": null,
+ "metadata_indexing_include": null,
+ "metadata_indexing_exclude": null,
+ "collection_indexing_policy": null
+ },
+ "output_types": [
+ "VectorStore"
+ ],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [
+ "token",
+ "api_endpoint",
+ "collection_name",
+ "inputs",
+ "embedding"
+ ],
+ "beta": false
+ },
+ "id": "AstraDB-eUCSS"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 573,
+ "positionAbsolute": {
+ "x": 3372.04958055989,
+ "y": 1611.0742035495277
+ },
+ "dragging": false
+ },
+ {
+ "id": "OpenAIEmbeddings-9TPjc",
+ "type": "genericNode",
+ "position": {
+ "x": 2814.0402191223047,
+ "y": 1955.9268168273086
+ },
+ "data": {
+ "type": "OpenAIEmbeddings",
+ "node": {
+ "template": {
+ "allowed_special": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": [],
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "allowed_special",
+ "display_name": "Allowed Special",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "chunk_size": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 1000,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "chunk_size",
+ "display_name": "Chunk Size",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "client": {
+ "type": "Any",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "client",
+ "display_name": "Client",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Any, Dict, List, Optional\n\nfrom langchain_openai.embeddings.base import OpenAIEmbeddings\n\nfrom langflow.field_typing import Embeddings, NestedDict\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass OpenAIEmbeddingsComponent(CustomComponent):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n\n def build_config(self):\n return {\n \"allowed_special\": {\n \"display_name\": \"Allowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"default_headers\": {\n \"display_name\": \"Default Headers\",\n \"advanced\": True,\n \"field_type\": \"dict\",\n },\n \"default_query\": {\n \"display_name\": \"Default Query\",\n \"advanced\": True,\n \"field_type\": \"NestedDict\",\n },\n \"disallowed_special\": {\n \"display_name\": \"Disallowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"chunk_size\": {\"display_name\": \"Chunk Size\", \"advanced\": True},\n \"client\": {\"display_name\": \"Client\", \"advanced\": True},\n \"deployment\": {\"display_name\": \"Deployment\", \"advanced\": True},\n \"embedding_ctx_length\": {\n \"display_name\": \"Embedding Context Length\",\n \"advanced\": True,\n },\n \"max_retries\": {\"display_name\": \"Max Retries\", \"advanced\": True},\n \"model\": {\n \"display_name\": \"Model\",\n \"advanced\": False,\n \"options\": [\n \"text-embedding-3-small\",\n \"text-embedding-3-large\",\n \"text-embedding-ada-002\",\n ],\n },\n \"model_kwargs\": {\"display_name\": \"Model Kwargs\", \"advanced\": True},\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"password\": True,\n \"advanced\": True,\n },\n \"openai_api_key\": {\"display_name\": \"OpenAI API Key\", \"password\": True},\n \"openai_api_type\": {\n \"display_name\": \"OpenAI API Type\",\n \"advanced\": True,\n \"password\": True,\n },\n \"openai_api_version\": {\n \"display_name\": \"OpenAI API Version\",\n \"advanced\": True,\n },\n \"openai_organization\": {\n \"display_name\": \"OpenAI Organization\",\n \"advanced\": True,\n },\n \"openai_proxy\": {\"display_name\": \"OpenAI Proxy\", \"advanced\": True},\n \"request_timeout\": {\"display_name\": \"Request Timeout\", \"advanced\": True},\n \"show_progress_bar\": {\n \"display_name\": \"Show Progress Bar\",\n \"advanced\": True,\n },\n \"skip_empty\": {\"display_name\": \"Skip Empty\", \"advanced\": True},\n \"tiktoken_model_name\": {\n \"display_name\": \"TikToken Model Name\",\n \"advanced\": True,\n },\n \"tiktoken_enable\": {\"display_name\": \"TikToken Enable\", \"advanced\": True},\n }\n\n def build(\n self,\n openai_api_key: str,\n default_headers: Optional[Dict[str, str]] = None,\n default_query: Optional[NestedDict] = {},\n allowed_special: List[str] = [],\n disallowed_special: List[str] = [\"all\"],\n chunk_size: int = 1000,\n client: Optional[Any] = None,\n deployment: str = \"text-embedding-ada-002\",\n embedding_ctx_length: int = 8191,\n max_retries: int = 6,\n model: str = \"text-embedding-ada-002\",\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n openai_api_type: Optional[str] = None,\n openai_api_version: Optional[str] = None,\n openai_organization: Optional[str] = None,\n openai_proxy: Optional[str] = None,\n request_timeout: Optional[float] = None,\n show_progress_bar: bool = False,\n skip_empty: bool = False,\n tiktoken_enable: bool = True,\n tiktoken_model_name: Optional[str] = None,\n ) -> Embeddings:\n # This is to avoid errors with Vector Stores (e.g Chroma)\n if disallowed_special == [\"all\"]:\n disallowed_special = \"all\" # type: ignore\n\n return OpenAIEmbeddings(\n tiktoken_enabled=tiktoken_enable,\n default_headers=default_headers,\n default_query=default_query,\n allowed_special=set(allowed_special),\n disallowed_special=\"all\",\n chunk_size=chunk_size,\n client=client,\n deployment=deployment,\n embedding_ctx_length=embedding_ctx_length,\n max_retries=max_retries,\n model=model,\n model_kwargs=model_kwargs,\n base_url=openai_api_base,\n api_key=openai_api_key,\n openai_api_type=openai_api_type,\n api_version=openai_api_version,\n organization=openai_organization,\n openai_proxy=openai_proxy,\n timeout=request_timeout,\n show_progress_bar=show_progress_bar,\n skip_empty=skip_empty,\n tiktoken_model_name=tiktoken_model_name,\n )\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "default_headers": {
+ "type": "dict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "default_headers",
+ "display_name": "Default Headers",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "default_query": {
+ "type": "NestedDict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": {},
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "default_query",
+ "display_name": "Default Query",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "deployment": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "text-embedding-ada-002",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "deployment",
+ "display_name": "Deployment",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "disallowed_special": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": [
+ "all"
+ ],
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "disallowed_special",
+ "display_name": "Disallowed Special",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "embedding_ctx_length": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 8191,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "embedding_ctx_length",
+ "display_name": "Embedding Context Length",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "max_retries": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 6,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "max_retries",
+ "display_name": "Max Retries",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "model": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "text-embedding-ada-002",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": [
+ "text-embedding-3-small",
+ "text-embedding-3-large",
+ "text-embedding-ada-002"
+ ],
+ "name": "model",
+ "display_name": "Model",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "model_kwargs": {
+ "type": "NestedDict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": {},
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "model_kwargs",
+ "display_name": "Model Kwargs",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "openai_api_base": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "openai_api_base",
+ "display_name": "OpenAI API Base",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "openai_api_key": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "openai_api_key",
+ "display_name": "OpenAI API Key",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ],
+ "value": ""
+ },
+ "openai_api_type": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "openai_api_type",
+ "display_name": "OpenAI API Type",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "openai_api_version": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "openai_api_version",
+ "display_name": "OpenAI API Version",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "openai_organization": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "openai_organization",
+ "display_name": "OpenAI Organization",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "openai_proxy": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "openai_proxy",
+ "display_name": "OpenAI Proxy",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "request_timeout": {
+ "type": "float",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "request_timeout",
+ "display_name": "Request Timeout",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "rangeSpec": {
+ "step_type": "float",
+ "min": -1,
+ "max": 1,
+ "step": 0.1
+ },
+ "load_from_db": false,
+ "title_case": false
+ },
+ "show_progress_bar": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "show_progress_bar",
+ "display_name": "Show Progress Bar",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "skip_empty": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "skip_empty",
+ "display_name": "Skip Empty",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "tiktoken_enable": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": true,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "tiktoken_enable",
+ "display_name": "TikToken Enable",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "tiktoken_model_name": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "tiktoken_model_name",
+ "display_name": "TikToken Model Name",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": [
+ "Text"
+ ]
+ },
+ "_type": "CustomComponent"
+ },
+ "description": "Generate embeddings using OpenAI models.",
+ "base_classes": [
+ "Embeddings"
+ ],
+ "display_name": "OpenAI Embeddings",
+ "documentation": "",
+ "custom_fields": {
+ "openai_api_key": null,
+ "default_headers": null,
+ "default_query": null,
+ "allowed_special": null,
+ "disallowed_special": null,
+ "chunk_size": null,
+ "client": null,
+ "deployment": null,
+ "embedding_ctx_length": null,
+ "max_retries": null,
+ "model": null,
+ "model_kwargs": null,
+ "openai_api_base": null,
+ "openai_api_type": null,
+ "openai_api_version": null,
+ "openai_organization": null,
+ "openai_proxy": null,
+ "request_timeout": null,
+ "show_progress_bar": null,
+ "skip_empty": null,
+ "tiktoken_enable": null,
+ "tiktoken_model_name": null
+ },
+ "output_types": [
+ "Embeddings"
+ ],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "OpenAIEmbeddings-9TPjc"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 383,
+ "positionAbsolute": {
+ "x": 2814.0402191223047,
+ "y": 1955.9268168273086
+ },
+ "dragging": false
+ }
+ ],
+ "edges": [
+ {
+ "source": "TextOutput-BDknO",
+ "target": "Prompt-xeI6K",
+ "sourceHandle": "{ΕbaseClassesΕ:[ΕobjectΕ,ΕTextΕ,ΕstrΕ],ΕdataTypeΕ:ΕTextOutputΕ,ΕidΕ:ΕTextOutput-BDknOΕ}",
+ "targetHandle": "{ΕfieldNameΕ:ΕcontextΕ,ΕidΕ:ΕPrompt-xeI6KΕ,ΕinputTypesΕ:[ΕDocumentΕ,ΕBaseOutputParserΕ,ΕRecordΕ,ΕTextΕ],ΕtypeΕ:ΕstrΕ}",
+ "id": "reactflow__edge-TextOutput-BDknO{ΕbaseClassesΕ:[ΕobjectΕ,ΕTextΕ,ΕstrΕ],ΕdataTypeΕ:ΕTextOutputΕ,ΕidΕ:ΕTextOutput-BDknOΕ}-Prompt-xeI6K{ΕfieldNameΕ:ΕcontextΕ,ΕidΕ:ΕPrompt-xeI6KΕ,ΕinputTypesΕ:[ΕDocumentΕ,ΕBaseOutputParserΕ,ΕRecordΕ,ΕTextΕ],ΕtypeΕ:ΕstrΕ}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "context",
+ "id": "Prompt-xeI6K",
+ "inputTypes": [
+ "Document",
+ "BaseOutputParser",
+ "Record",
+ "Text"
+ ],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": [
+ "object",
+ "Text",
+ "str"
+ ],
+ "dataType": "TextOutput",
+ "id": "TextOutput-BDknO"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "selected": false
+ },
+ {
+ "source": "ChatInput-yxMKE",
+ "target": "Prompt-xeI6K",
+ "sourceHandle": "{ΕbaseClassesΕ:[ΕTextΕ,ΕstrΕ,ΕobjectΕ,ΕRecordΕ],ΕdataTypeΕ:ΕChatInputΕ,ΕidΕ:ΕChatInput-yxMKEΕ}",
+ "targetHandle": "{ΕfieldNameΕ:ΕquestionΕ,ΕidΕ:ΕPrompt-xeI6KΕ,ΕinputTypesΕ:[ΕDocumentΕ,ΕBaseOutputParserΕ,ΕRecordΕ,ΕTextΕ],ΕtypeΕ:ΕstrΕ}",
+ "id": "reactflow__edge-ChatInput-yxMKE{ΕbaseClassesΕ:[ΕTextΕ,ΕstrΕ,ΕobjectΕ,ΕRecordΕ],ΕdataTypeΕ:ΕChatInputΕ,ΕidΕ:ΕChatInput-yxMKEΕ}-Prompt-xeI6K{ΕfieldNameΕ:ΕquestionΕ,ΕidΕ:ΕPrompt-xeI6KΕ,ΕinputTypesΕ:[ΕDocumentΕ,ΕBaseOutputParserΕ,ΕRecordΕ,ΕTextΕ],ΕtypeΕ:ΕstrΕ}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "question",
+ "id": "Prompt-xeI6K",
+ "inputTypes": [
+ "Document",
+ "BaseOutputParser",
+ "Record",
+ "Text"
+ ],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": [
+ "Text",
+ "str",
+ "object",
+ "Record"
+ ],
+ "dataType": "ChatInput",
+ "id": "ChatInput-yxMKE"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "selected": false
+ },
+ {
+ "source": "Prompt-xeI6K",
+ "target": "OpenAIModel-EjXlN",
+ "sourceHandle": "{ΕbaseClassesΕ:[ΕobjectΕ,ΕTextΕ,ΕstrΕ],ΕdataTypeΕ:ΕPromptΕ,ΕidΕ:ΕPrompt-xeI6KΕ}",
+ "targetHandle": "{ΕfieldNameΕ:Εinput_valueΕ,ΕidΕ:ΕOpenAIModel-EjXlNΕ,ΕinputTypesΕ:[ΕTextΕ],ΕtypeΕ:ΕstrΕ}",
+ "id": "reactflow__edge-Prompt-xeI6K{ΕbaseClassesΕ:[ΕobjectΕ,ΕTextΕ,ΕstrΕ],ΕdataTypeΕ:ΕPromptΕ,ΕidΕ:ΕPrompt-xeI6KΕ}-OpenAIModel-EjXlN{ΕfieldNameΕ:Εinput_valueΕ,ΕidΕ:ΕOpenAIModel-EjXlNΕ,ΕinputTypesΕ:[ΕTextΕ],ΕtypeΕ:ΕstrΕ}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "input_value",
+ "id": "OpenAIModel-EjXlN",
+ "inputTypes": [
+ "Text"
+ ],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": [
+ "object",
+ "Text",
+ "str"
+ ],
+ "dataType": "Prompt",
+ "id": "Prompt-xeI6K"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "selected": false
+ },
+ {
+ "source": "OpenAIModel-EjXlN",
+ "target": "ChatOutput-Q39I8",
+ "sourceHandle": "{ΕbaseClassesΕ:[ΕobjectΕ,ΕTextΕ,ΕstrΕ],ΕdataTypeΕ:ΕOpenAIModelΕ,ΕidΕ:ΕOpenAIModel-EjXlNΕ}",
+ "targetHandle": "{ΕfieldNameΕ:Εinput_valueΕ,ΕidΕ:ΕChatOutput-Q39I8Ε,ΕinputTypesΕ:[ΕTextΕ],ΕtypeΕ:ΕstrΕ}",
+ "id": "reactflow__edge-OpenAIModel-EjXlN{ΕbaseClassesΕ:[ΕobjectΕ,ΕTextΕ,ΕstrΕ],ΕdataTypeΕ:ΕOpenAIModelΕ,ΕidΕ:ΕOpenAIModel-EjXlNΕ}-ChatOutput-Q39I8{ΕfieldNameΕ:Εinput_valueΕ,ΕidΕ:ΕChatOutput-Q39I8Ε,ΕinputTypesΕ:[ΕTextΕ],ΕtypeΕ:ΕstrΕ}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "input_value",
+ "id": "ChatOutput-Q39I8",
+ "inputTypes": [
+ "Text"
+ ],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": [
+ "object",
+ "Text",
+ "str"
+ ],
+ "dataType": "OpenAIModel",
+ "id": "OpenAIModel-EjXlN"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "selected": false
+ },
+ {
+ "source": "File-t0a6a",
+ "target": "RecursiveCharacterTextSplitter-tR9QM",
+ "sourceHandle": "{ΕbaseClassesΕ:[ΕRecordΕ],ΕdataTypeΕ:ΕFileΕ,ΕidΕ:ΕFile-t0a6aΕ}",
+ "targetHandle": "{ΕfieldNameΕ:ΕinputsΕ,ΕidΕ:ΕRecursiveCharacterTextSplitter-tR9QMΕ,ΕinputTypesΕ:[ΕDocumentΕ,ΕRecordΕ],ΕtypeΕ:ΕDocumentΕ}",
+ "id": "reactflow__edge-File-t0a6a{ΕbaseClassesΕ:[ΕRecordΕ],ΕdataTypeΕ:ΕFileΕ,ΕidΕ:ΕFile-t0a6aΕ}-RecursiveCharacterTextSplitter-tR9QM{ΕfieldNameΕ:ΕinputsΕ,ΕidΕ:ΕRecursiveCharacterTextSplitter-tR9QMΕ,ΕinputTypesΕ:[ΕDocumentΕ,ΕRecordΕ],ΕtypeΕ:ΕDocumentΕ}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "inputs",
+ "id": "RecursiveCharacterTextSplitter-tR9QM",
+ "inputTypes": [
+ "Document",
+ "Record"
+ ],
+ "type": "Document"
+ },
+ "sourceHandle": {
+ "baseClasses": [
+ "Record"
+ ],
+ "dataType": "File",
+ "id": "File-t0a6a"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "selected": false
+ },
+ {
+ "source": "OpenAIEmbeddings-ZlOk1",
+ "sourceHandle": "{ΕbaseClassesΕ:[ΕEmbeddingsΕ],ΕdataTypeΕ:ΕOpenAIEmbeddingsΕ,ΕidΕ:ΕOpenAIEmbeddings-ZlOk1Ε}",
+ "target": "AstraDBSearch-41nRz",
+ "targetHandle": "{ΕfieldNameΕ:ΕembeddingΕ,ΕidΕ:ΕAstraDBSearch-41nRzΕ,ΕinputTypesΕ:null,ΕtypeΕ:ΕEmbeddingsΕ}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "embedding",
+ "id": "AstraDBSearch-41nRz",
+ "inputTypes": null,
+ "type": "Embeddings"
+ },
+ "sourceHandle": {
+ "baseClasses": [
+ "Embeddings"
+ ],
+ "dataType": "OpenAIEmbeddings",
+ "id": "OpenAIEmbeddings-ZlOk1"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-OpenAIEmbeddings-ZlOk1{ΕbaseClassesΕ:[ΕEmbeddingsΕ],ΕdataTypeΕ:ΕOpenAIEmbeddingsΕ,ΕidΕ:ΕOpenAIEmbeddings-ZlOk1Ε}-AstraDBSearch-41nRz{ΕfieldNameΕ:ΕembeddingΕ,ΕidΕ:ΕAstraDBSearch-41nRzΕ,ΕinputTypesΕ:null,ΕtypeΕ:ΕEmbeddingsΕ}"
+ },
+ {
+ "source": "ChatInput-yxMKE",
+ "sourceHandle": "{ΕbaseClassesΕ:[ΕTextΕ,ΕstrΕ,ΕobjectΕ,ΕRecordΕ],ΕdataTypeΕ:ΕChatInputΕ,ΕidΕ:ΕChatInput-yxMKEΕ}",
+ "target": "AstraDBSearch-41nRz",
+ "targetHandle": "{ΕfieldNameΕ:Εinput_valueΕ,ΕidΕ:ΕAstraDBSearch-41nRzΕ,ΕinputTypesΕ:[ΕTextΕ],ΕtypeΕ:ΕstrΕ}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "input_value",
+ "id": "AstraDBSearch-41nRz",
+ "inputTypes": [
+ "Text"
+ ],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": [
+ "Text",
+ "str",
+ "object",
+ "Record"
+ ],
+ "dataType": "ChatInput",
+ "id": "ChatInput-yxMKE"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-ChatInput-yxMKE{ΕbaseClassesΕ:[ΕTextΕ,ΕstrΕ,ΕobjectΕ,ΕRecordΕ],ΕdataTypeΕ:ΕChatInputΕ,ΕidΕ:ΕChatInput-yxMKEΕ}-AstraDBSearch-41nRz{ΕfieldNameΕ:Εinput_valueΕ,ΕidΕ:ΕAstraDBSearch-41nRzΕ,ΕinputTypesΕ:[ΕTextΕ],ΕtypeΕ:ΕstrΕ}"
+ },
+ {
+ "source": "RecursiveCharacterTextSplitter-tR9QM",
+ "sourceHandle": "{ΕbaseClassesΕ:[ΕRecordΕ],ΕdataTypeΕ:ΕRecursiveCharacterTextSplitterΕ,ΕidΕ:ΕRecursiveCharacterTextSplitter-tR9QMΕ}",
+ "target": "AstraDB-eUCSS",
+ "targetHandle": "{ΕfieldNameΕ:ΕinputsΕ,ΕidΕ:ΕAstraDB-eUCSSΕ,ΕinputTypesΕ:null,ΕtypeΕ:ΕRecordΕ}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "inputs",
+ "id": "AstraDB-eUCSS",
+ "inputTypes": null,
+ "type": "Record"
+ },
+ "sourceHandle": {
+ "baseClasses": [
+ "Record"
+ ],
+ "dataType": "RecursiveCharacterTextSplitter",
+ "id": "RecursiveCharacterTextSplitter-tR9QM"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-RecursiveCharacterTextSplitter-tR9QM{ΕbaseClassesΕ:[ΕRecordΕ],ΕdataTypeΕ:ΕRecursiveCharacterTextSplitterΕ,ΕidΕ:ΕRecursiveCharacterTextSplitter-tR9QMΕ}-AstraDB-eUCSS{ΕfieldNameΕ:ΕinputsΕ,ΕidΕ:ΕAstraDB-eUCSSΕ,ΕinputTypesΕ:null,ΕtypeΕ:ΕRecordΕ}",
+ "selected": false
+ },
+ {
+ "source": "OpenAIEmbeddings-9TPjc",
+ "sourceHandle": "{ΕbaseClassesΕ:[ΕEmbeddingsΕ],ΕdataTypeΕ:ΕOpenAIEmbeddingsΕ,ΕidΕ:ΕOpenAIEmbeddings-9TPjcΕ}",
+ "target": "AstraDB-eUCSS",
+ "targetHandle": "{ΕfieldNameΕ:ΕembeddingΕ,ΕidΕ:ΕAstraDB-eUCSSΕ,ΕinputTypesΕ:null,ΕtypeΕ:ΕEmbeddingsΕ}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "embedding",
+ "id": "AstraDB-eUCSS",
+ "inputTypes": null,
+ "type": "Embeddings"
+ },
+ "sourceHandle": {
+ "baseClasses": [
+ "Embeddings"
+ ],
+ "dataType": "OpenAIEmbeddings",
+ "id": "OpenAIEmbeddings-9TPjc"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-OpenAIEmbeddings-9TPjc{ΕbaseClassesΕ:[ΕEmbeddingsΕ],ΕdataTypeΕ:ΕOpenAIEmbeddingsΕ,ΕidΕ:ΕOpenAIEmbeddings-9TPjcΕ}-AstraDB-eUCSS{ΕfieldNameΕ:ΕembeddingΕ,ΕidΕ:ΕAstraDB-eUCSSΕ,ΕinputTypesΕ:null,ΕtypeΕ:ΕEmbeddingsΕ}",
+ "selected": false
+ },
+ {
+ "source": "AstraDBSearch-41nRz",
+ "sourceHandle": "{ΕbaseClassesΕ:[ΕRecordΕ],ΕdataTypeΕ:ΕAstraDBSearchΕ,ΕidΕ:ΕAstraDBSearch-41nRzΕ}",
+ "target": "TextOutput-BDknO",
+ "targetHandle": "{ΕfieldNameΕ:Εinput_valueΕ,ΕidΕ:ΕTextOutput-BDknOΕ,ΕinputTypesΕ:[ΕRecordΕ,ΕTextΕ],ΕtypeΕ:ΕstrΕ}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "input_value",
+ "id": "TextOutput-BDknO",
+ "inputTypes": [
+ "Record",
+ "Text"
+ ],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": [
+ "Record"
+ ],
+ "dataType": "AstraDBSearch",
+ "id": "AstraDBSearch-41nRz"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-AstraDBSearch-41nRz{ΕbaseClassesΕ:[ΕRecordΕ],ΕdataTypeΕ:ΕAstraDBSearchΕ,ΕidΕ:ΕAstraDBSearch-41nRzΕ}-TextOutput-BDknO{ΕfieldNameΕ:Εinput_valueΕ,ΕidΕ:ΕTextOutput-BDknOΕ,ΕinputTypesΕ:[ΕRecordΕ,ΕTextΕ],ΕtypeΕ:ΕstrΕ}"
+ }
+ ],
+ "viewport": {
+ "x": -259.6782520315529,
+ "y": 90.3428735006047,
+ "zoom": 0.2687057134854984
+ }
+ },
+ "description": "Visit https://pre-release.langflow.org/guides/rag-with-astradb for a detailed guide of this project.\nThis project give you both Ingestion and RAG in a single file. You'll need to visit https://astra.datastax.com/ to create an Astra DB instance, your Token and grab an API Endpoint.\nRunning this project requires you to add a file in the Files component, then define a Collection Name and click on the Play icon on the Astra DB component. \n\nAfter the ingestion ends you are ready to click on the Run button at the lower left corner and start asking questions about your data.",
+ "name": "Vector Store RAG",
+ "last_tested_version": "1.0.0a0",
+ "is_component": false
+}
\ No newline at end of file
diff --git a/docs/static/img/add-new-variable.png b/docs/static/img/add-new-variable.png
new file mode 100644
index 0000000000000000000000000000000000000000..178cab67c0ec12e7c9b2ae6700488f18b852fc00
GIT binary patch
literal 49435
zcmdq}XH=7Iw>AnZf>NX?3erIkq~CM_X(C0biV#{rq<18QUPJ{%q)2Z8M0)Qf5Q-?B
z(0d4=KnMXMHBz(Vv%dEmYu#k+KYQ#yn=wKHxyn4x+2=gw6{)YQK~2d_dFj$6>L;3y
zpI^E}x^d~!}=`@=K(bnJvKY7mc`_d)lOHUrFyzsrexo|De9FT_F(SVt3
z!*Fgr3f1F~88q3=!kgc&-sG$js`}C@1h(?AqM&;jN*Aw8iC%aO{qBuiigC%CIz3;5IKvSbL2WH`9^wYN(VR5
zuL(EAUbxqP?*ISbhBXF9*X!%+i(4N~t6$s$I%Q6`&`+N}H4Qs{qK{{tn53LFw+?hg
zT)D7Ygd?9__CeJ^2P^zYeV$NkAVlr2kiC=#V^-CL+fXNCd5&`@}CQn7gV=sRk>BgU#~Cs?*lg9w$HG0Jv!;WoCm
zPj!uwZS1Gd){_>NGZQSEG4UYWOhx=N3loU)TZT=36hhtd@w?{!eu3hm-5Z0l{{-H>
z$UbsO`ODPXo}k)$N-`Sa5)$Pdx*SvUdlh09oZC6@Ck&8?@t`d!fQ1FWA#cuCqO6M3
zCGqp)tr{3k5K4gi1&sUQ42~>blYzJ6;IkrvmH%)74ZqtD_KTzQ(7LI%(A!SWXCD@<
zMg3`FB_?bRDkO3&8I=Xy+CxV4;9EskJ;}&q3nG9O_^pM@kO284NwI5S-G2&czs^st
zkqz|s4|z;E5y&kJeDDe|k>CC38rK@_?7FND?Ek-aGPq&%l6Qjysbk!qp%Z
z8zSzbj6&*8TKC+V2z&2kCjw2#U**~xSzWbxl93xoKQehQw%FgHB8r$qDyxz`)#0nV
znef^$fTYD`oT9v`s6*-c6x&F1Ba1)Q12xSu;ioDG9IDSNyUUa33^1qDWa(P?AV*vg
zGv3%)@;%i(+7rRmq9xsasQGbG>Ewx*MKS+}w9yaoH(U6CF(y+DK`nRthWfnQspg64
zC3#--n~E-vec!Z$*64*=k#T5?a}UcHI=H!za5~e*T(Pwn7i1t>hX@&=`}FrEF~VNk
zBi*J4hSPTU6%1u+CNh%pe^PUkf;%JkGb8Rx&~F;1O^RjaM!qUMbmbA0_;xQdoT!9x
z#$6@xYnJ833e;rP)zr+DF_>*tf;?-WS36G!<-S~&w=$3hHW)>L_WGUnly4h?ld{j<
zBO27+dnJ?06I&WuB*zLE(GWJ}oS0!%t*gs$T9?ja%g0j$a$7S8;=p~TK924TF)wxH
z8p(<7jj&oy6{p@uO3y2m2G&>8HXO!1noV+^-IRXxRt}X8k1j0@%Kh~k5}%QAPc7^S
zE|N`*3i7%Kp+BX`rUJooq*L;gQnRA2WMJI~D8%>0jEXV#T~l-8CDt{1qH@T)B)A>_fH&_)DrFt2ZsX7+vP0{lZJ~KfU8nUqI^}uSO0Br|3izgVNUPu3ADUi
z)YH@?wnyQeyj>l)CuRi=9RO`5dVo3)w-<>VtQnYzok*fGt97ea61G9P_YN`@*k-zO
zQWrUNSd2xr&=5=)@mU(q8=keb
z@gDLbp+spuCST9JX=1_@daa`P3H+bw4l*Fic@7+1P)NTl`@<5o(U_8?rjMHWO>D^{
z{-lUzV+VaeN+H=^{dZs)JGj%l!pO;K^9U_2@vhJ6XX7tzBC2qibWE!Q66F|d#Mm!;
z8Mur5LAkMCJI
z%KD7-^wGq;GM#4mB^97kP_rD-bHykgU*CwYey*mir_4>9c1=#hW|QwZBGE3sFNhyu
z@g_%PtNoD$kwqxy+#{!(sDJPN*@u{=)(p7(7Fvg=^dll2mqL>{mG!4@wy7a66Zx8R
zi;R3@>|Ah#jM%bG-x5}>^@qXl8WEC1>litUv`#AH7>Uv3KtVzZ7G5@6VIy|ovH#C4
zc2;=#o-fgDTA|uh@_l@`sdP6YmOX|3o^6fWFPh@=@+3Ih{8KDDFfXX(6?(4u-M-lT
z(NS|>$QvVXwYN~C!>~Q*MQf+b6B<8eP~$bJZc^`7>M~QK^|W6%%oXZ7t$1Yf<0{5@
zF>AqQC9X}}ZiSOcuBs9I2{J<}-Y;0HLuZO{FVBYSfx-1h;sRFyj+W;4)x!KjgQSTu
z$TYkIsx!a35GJ_#%N7&A`$z%ml_bz#!iz+tKPbwf-x>B67`1yz`StD@o%N75DxXUn`*^Qx^0Q{E^Tiw?=?z?{6wOuHH
zc<%q{tJ?BnZ*dk;(BS>Vbo0;`RLN81yuC;_9C6W}<$dW$kDgy0wO>qp7ZDrw@T{Xv
z!Di$8Gx67(`uuJN_iGpb=v(;jbt$)4y*uvzM*^SOqDzZBNW$38B7_xM^uj)-fU0Q;?rYNtsX>c=6H(32G+xMCGSv-yi7SI+Kx
zM~9fZX5qC=Wf4Sm``K|tM{~a2jgK-&piB5|i6a
zV9~(wa%}MFxCet2??GAFE*Z@#YE{#XChGfD3o+
zgmA58@VFcXBm?B$n!9ZVj@%BcTCz52Z~-svH@HTvCTCPqY}OL-Ig9%lTBw;Rw4cl+Y22P~
zv=oGVLo7PYT=!-!)c}^R-)(dpH#&;Vo^5}SV*XIUQDvdjeJ}-oGN44#E`4^q&r?L@
zP<4?G9sV3DbzfB|2!2Pf(Og2}r?np{hl+6w-N(GtJnz)OSh00vACuw+lsf8h1v{CIs}cR}7gZgaEj!|+%-jZH&A$|~@pKMtMZsS4iH_1l*XNL&uH*Zagg
z<~Xw^!rZMUfYgVx&G`*8$>wDT?!`Fgmz(5$<2Uo!PHI;=Dyf~{P@>o{J*y<-(ErX1
zMsAV?wlUL>%~EM(1|8pNKJVZA+EyXsvKRgunE+GCl352eVi#Ll0uQE2?+HrWc_M7u
zpu5)A_MR=sWhUayxIypJ7stvzl41HJb@Ghs~cF=6Azy-rm
zM-vKI6Rxw^pBorXB1pI?7|8}1b~{^R3*E3PxMopyZ%TPM2!^S6FznCEM$%$6AmWo?
z)TpCBz0u}rap!{Kn$~nAvjy0oOLRg3tRsaH+A^gT-DVZ3Vp0_bh)#>3E=vfNrR=yx
z(a$X$qkM$vDxV6l^mNY8@q#IIC?>%Qn>)hh)KEGNhum9(!JtsE1Hqz91qaD@T|I0_
zcc6tGf20Xzo2azUnf;l>VvFzO4n3P{_`EP@(bU|AtylciB4>;UTIJ-s@
z88f^+R{YfqGo=PkmsYdWR?_N@VH#Zm{5}d4*T5biNoL!u8;@r1@x^4vSttZ(X1Bp?
zes`REjvoN}msGC&gr-=a&?t*)crzx$hx9O(Ly13DVW)Kvf)CxCs|OrnmG1kI9qon<
z55;2_!S*?9EEmKrE>#&^ug4%_!t$UQ&JgfIA{lt%(
zhY)#irVFx-&*NuqK8EM>m%31@yq&8mK>85B7Tjh6$LSxy?$3dWMVn~}Hc9^7bZ~|Z
z@|0|M4}S#HmJfP+ii*Vpd37ERO$sy&^_K`7opk99z;FseW{nXsM^lrA$rZ-G+Gnum
zzU@kPPT`GhLJ54)QRu5?joWdSr@OR9^C9@g`Fq^ls_X$9YR#AvrIeZ)PfSzlZ(Zv9
z7nU{RahYw`)DXzlG%^v%ePw=Bx)rwglkKo%`Y_xRGZY8{JZ?c3_Utod2kyA?D;TCQ
z35mfrOr7<6MyBrsTRop#Nt#~k%aag|x-%Bmf{ma`>HRdI#1|5@RP7PLJq3C2`nlc0
zAKN2j$f?gv96aDvIeG}!jlivpl`4ti15f?1%T0#|CfL*NkoVzUuYqCR7f964?%1F<
zE9a$uwW(7GTa15(C~b!ZtWWxE?O2DCrf<&q=+mUbAD36tO-PM4D&n?iS1h;xG-S#d
z-qpDv!j6nss7EC|0)BJonI3%hKHOXoWwi8*8Vm_}HpA%+r26WceuO$MY|+)!^N|#qVQ@(icb^QND~TDGu`Y5;JhB
zk41l+I~fAD!hRuoo>wdQrz)IHb(gmoyw0!-(Ft#p?gYGe_KeH&liD{Fs!bjS&d9M#80SvC(S1x^8#
zaxC4Oe`#eWI9IkkhI*k_lwJ@DJOhBT9A&d^iA$FFf7USfr^t
za69c9eA4)G`FZK_u2#Xo;`wR&^Op>s9x1H!sc3}HG3ZPxWuUw<#4INJK857bluz(o
zj==zn-My_n66$X*fptaONJd>H19rb992f37h|=%^)8gKAAg>%c6$b@)>nsUS%l~M}
zd0Yl?HZ$j8o4Y;mOdXTrY&GoUI+pU-`Mrrd9#ynoaKa2s5LM&+~!zsZdy?;P=O;WYAWl0_l!zj_~N)h<>ERJ0sYqQ_eb
z4FQYr6!-R7$_=9mB&S*9R^sKelL?;^{GXI|UkNUt_H)U9C&f8dBuKJ5$rW>nBVBDMp`P1n}=Yrv&<3x(71YwJsN9;jA$x17}
z$jfblJHP#yvaNu+gx7Nnz#XmCKZ=kni&VlChhZ#Wvk1)di;OxTS-qW9-2Q|cslwRn
z;u^*d*7+TsE#{XA23YEp2cBgpCDJOL8lSK3)!OXxoHR{7=!PT-PHWbSHaR&P*@b7y
z3b)-hgP5WBHoXtv^U$CaP!V$20tTI)55IarXbJWjC=1>ySn@KPoi&NKIu}iTPO#n%
z$!SUaCQOmIt*85@ZKl~1e>T3!28DmRKORKJ-Vo+WFpZszC8Z@l}7+RUz(ae#^}TLp-uwb(pbcYlL1iu7eB1N_~aHcl}AJ$P|RpoC}gwi+=-qcm|H(@+lIly`8srufgH|AuVA$u$B_@LjwL;VmR}zUvu+;;=yG-{X)J#tkQW4a_^O+$5IX(
zNw+5r5zVS6VVh$#lX(!hVmzp_P^A)d@3vq&4co1GR^0K@HDKwK_kL^C!7E{3zMbGt
z!gun;EXtjy;kz4~8Y=<#b3?~PxNa*9x7g&Dfxsx^FYS3_#yb5|rfS9o
z2o-zlbZ4i)Cnqt^gQ5m^)dITD+OdH;M#5QwA;$~Ds|2H{Wn~pSp~dzv@bNhY5RzCc
z+N7Q=65IOS(=FA)XVxyz=dRMR^7h_0KlLC7nhS+_<$f}#_cmwPkzeSE%WSnw6>zb;
zDin4_hk2~l?H$u`y5?-_LhzKL!d93teY2fh*0?};oA>6e^l#hqA^KNond1_%F75WI
zj)gH_a8cf0rCmR+tg$Hl!WWm6B;U5<`as4ZEC!khy33K4WERV=#N>J0z}B+5VPYBI
zXc_eAYTB1--0yLj(uy#>QiS2k0O$Tb>P
zZ)^<_s`Ouek82BUERmwp5&^>a{|fh}jN+$4zjyN@WwA{rnG0y!K0|;vU)<){T(XqoVQnR
z0SE-sW=xU^yyDMj*!m0!pe0Sul6KO6*;4OX_mLfZoKM5?3ZJWZa>`mmgUB{pK$fFQi=_?
zrgJCEiM)5~V|L$l{!$Sn8yo5jb;4ewJ1{ViR9Xq`Ezw$o&)VyK&U86(miOE@?iJqM
z-Cg;~gfcnMQ#^^UAEvpOBs~lz6m}XcHp-8p%Fi_A0q|5iwlQ|koy1vpk*f3_aV5xM
zo>`Tz&QhzQ-TS4YFP%AJ9yv)aGm&~!!DtqHAd~p%fqna*av`&7x#h<5NZF91e(=;$
zcJuCC$Hj_rLln~Q)MO{qKeDdNvj{E`WVZg$KH~pnZct6;*N9qq1gb-kixd{2N!cu9UhP4_cz9BoeMh_l+0UA
zrF|Lv$XUHyuOA4V(qSnpBCiKP_SQ5i_~>NugRiH8o#!2;RDdnsKRn-2U=W1Z@~PWD
zDzh5A9Ulw-1S%e_E`%g2;Sa1Zb!CIsn}0QAl*wUr8{Hs7P8CsClq%bHVotY0L;N$;
z5Fwis@(u&Ny>ZK3S0h|QMh8K*D;_5^w88rWsf&QcJuS<#3L%p=Q)$=PWxTA(bAj>U
zM*71lnZld@iWx7HrfV=2j4<~_mFCmh#(vS@qieJ6_#Chh0?rqe
zGB8=&wnDqn+S7i5_30;-kTI|GJWMOpk^z*IyN2v%`w;Rb4O)zsA`JfrUAcPRuH*(h
zPcIb}qtc$C(s)4a3G!Ug!<~LJ6*dBVggH&Q+VyU8=Tccc7QnW(Yr-L7TFBrY_Js3E
zc|}ozlliRsmHpxhLpvlTPQgviVi7~~hnhlGkbpm>hjCHh0|2&a`k0)CU6D7o-C;N%
z+vXF5YMl7Rw3;+oU+`y9PYIH}vEjteUwXaO=j7)jet|)%%|cA~+R*&UX_Px1kZjVd
zrdE8l6;#;IWWameRhdaSw+GqcZB9J`@)&Z9P}W=W+8lN_vfa}qwVr2wN@pZ33{3=4eqThU)%ni
znYkI~?D#`|Pbsmqvb{^r+Y8&Ds$G^aqw?=cf9O>zmxX4)`3bFePmZX$Pqi)plHj;BB;En
zDcX?3RU_c{LeOx~X!>J?sR~yXzwO8qo1gEGTi!eUIWAp1z2_p@Q6%%4EmQ1F=m@16
za%(G}v6yZ`E&j{Dc^+DD!}k{?b`>qVSDvxxhnW-)u2V2fZV8ns8_#kB3}&vmy>eX)
z!W#U{%HFQ1G#5o$)os&n2mMi#SOHt-lxP(m?tXPg-X%L4YJ6vGO$IJ^S})yW@gRhw
zyPd5l+bMfdP|3|>7lRj^e_j0)Sov)Uc*R
z@Bd6+3&vN-uXYB62)q6wr#~>?0vDnbk8z6{)aW|li-?T$ln!kPz`~1hvmxO5=?K;C
zPR+q~Nzwc^yEig?E(6Xs=I9R9)y=iQLd)dRSG7*!hvM
zO3UjYKk(izGP&AXy_b;rijt8Xd2Y^#$@;Eqe%C_(17Nc_spSYV3qF14{uBjg<8Fxd
zdinh~*IKiCyduoM7Y=OsimXgAp*g}>p1q~OZ6{N~m9%PVG5GC9rY^p$G}i?S)wXc&
zlRA;!i!R~&y?=uD>1Jz!D%V>*bVt=#cRfKi9(W5~!zF`v(nC+D+-I4J#?L%1Bb2iI
zfixTn><&25X-gr9e7N`1xscxAh0&}wlTtm9-H%TRCVxjM
zeT~(R&~>Z=@SS)l(g#Ht$?i+B8u|G(x9&^@)YvVkOYkkI%r;`*guIS?IKEXA&?qw2
zB{Y*0CGFteaLn+D+~>@AgNF41PpMI5g6bMA{0p~6e#iSrNQ+lIyInOk*Ai
zJF%VKhOOCWZ%W{{C%g7;EZC=EAQU0;?7{ZGqu=Qz{@3Jd@Wmz2@e_GF&W@zzMwBHdj
z@?Jh#YCkEbf}-8V?P6#|E}8r$ChHW#U2EG{vUKX<@4jW?lb`m^ZLRsRsMA3u6PIey
zBQLYiz(%O@HB;Ffr3?Gd2sAEwrD-!MeJgQ>%aekKj^>wVFT^;PLM3neb_|;}=()&kI8O>Lj$b9x=ek6a
zyEg^Q
zlljy6C)b6-@}NH6{NEKg+Jj%WA#{j?~Nv6CCbY}j8n
z#)&4meLJbf18r(jo@{O91Zpw`svpjauunSQ?IcHpzvV~jBC+Vms>$Lehq~a`CBLu|--}@gK
zxl2LRO8@8JQ1x4!Mb3|1+=$)djWQ3Ry{sud!A%6T#5wYT(C3Z*FjRg~5DAz5@VBS_
zB;5a^_z-*_^D_cq?M8L;?F%94-yX(i0xIoppMETnRliZ@rX*zQccmNuUEKJON9Bws
zbgg-qHZIBmfg*>;gvF?TUo*PMCc>Q_-{dTc;!#x~N>TKELL>dd41$u_28U`A_TjTl
z5iy{ca?TAxm!0`Wh8Y*JEu?!w=={fDKOjnxWG-Pb+^;}_M7Qw&J*GZ;3%x#UJ>h>i
ztM>0O<-bJ6<}c>KNkJBQcSMJSJ6uq4fNO00$v#1TTV77
zj#t~K`@LqBHd~C=DP;D(iQ4Jw>#uRrO#+ynZ2r69_8%6H#FO=#`=!$O4Z-X7YHZp9
zH1z>8@P=)Mv@|9H9!HyEpuaS4uE!kQcS_R4Kt$Y>_il~oIxAXw2sCYOzDz6_pSTxL
z0bQDm*6?6&UCQ&Fo%Mcd{M;1zaMf3UsA$CB_ox|!cX}!f^=*(qp>WsEhvmyMhu6TJ
z`=T=ex}xO=$Co;P-OVo)$gdg8dR)02+j*O)4m&84+~dtF6^|;FQwtbJuN(O28u<)T
zY;Kwl$%aGy-B(~
z9_91^b?RW~=G`C#fD}ab=qB=_Z!}#evLlyBhE#JJ8jNE6Ju5R=w{!rEUuDzMf_Ym=
z35cyZWX1!T;eoz}?Q+$ZvDta{kg#t=C322(z2=5SOp>vxg;e&q`Cwl!LKL;)pj)w~
znB42lStKDbW`e>{XJ(*7%gVI-h7W{)#q+A
ze)I{OQRXHJ!=Q60Ae2oFJvVKN`TLfi
z7+P8U^{_%h)we5|wbrFvx1lhbdCoRHMZ%r2K{$!1Y2*PaO9;_h7-q1R_OBSPCMhO3
z9(_ad3JP}FnDBOq3u->5TF>-=A6*ZqCKXWipO^%y28;$lS1Gr?g@H{0CRW+
z%eROymf)QXm9I2rvhLBVipue2mSUok5j2igj0r_%mm5Ti$1|Mo6#np|t4!pLmbH7O
zK)op$=QS5FUrpR!fo_bH3Cho32OkWp73$p%E-Kw?^_EUX#OY#ha21tOt!8!%`H?U9
zG(fJEolHDAaMGDp-g_f^capcNRGLa}J>Y4OTt5RZOO#BuKX}|QJ8~OX?|@vj
zv4A)1p%u0_G4?fD-*a@`^JrH{UEy8+U2@~!>#8}l0N;yRs=~qGE{B7MkbaS_#fI(o
z;p^TE2m3sfMpdomuoBB<0gbN|6v9Jcwvb~i?C88RMwd|8G}7Kmh>O19L+R9&nY6NE+iDe1Q`
zH(f|DwDI6_W)RlX9zAhbU@$$e&VHd$ZuP#y{)&Tx|FLwc(NF&+-x9X&x
z0l&iNWQd1KnQtH_ll33!MGyz^z?7Hz_U)QY9TgR7z~L4T(acT8Wq+coH^H~GWr%WH
zoM19~N3gzXZ$)pg-Zb+OdLRlJPu3%=ACbkD?K{~xZ3?l9y
zQK#MdPo4A`x5K*H9+!V00OmuAwnMraaxa$o2>Kg(pM=+DH}TV)>F6a9LBVklkJofx
zO}f_0l*AmkU-8Ivd1>*^q^6%?(!`6CJSbE>@aq2(#R!mz>lmdZCEfFrm5qmF2%GSB
zPysSsN{g?weA?~bcyAzqy7vv1({@m<=sN(&RzU$KwiI&7bY(a1rSf^Csm+E}57A<_p`TOo38KY=+0xJ3}8Yw6p#b`|t?GIr`7uPU{enY4juWetiu*
z`Uv`T98@t#>ZO=1DwJ68Syt90-nyF{^I+NKU3^;FGKb72*7wer6RVl1XGGo78_Lue
z>p5UqLxGXgAe4O4j#(PW@nuZA%g9zeKwTi+;P&m?k6CY)qNMDw(A0NO&C3vjkdklv
zx$=fL4q;K0p7)gWLUIcet@H|rvw@V%N!u6TrXG@<0_ZLNID(!f;23p`Q~2bBq)qQL
zZW+IgW$-kGsX*wzhh;hnWLoF*=9K=U0%^RJks>58m?8vejwK#l(o+%u23k){niG>E
z){N&yg@(N(OGV>KvA(JnqObH(?>#Xw59|02X(A7a!PYm&kwEvlt92d){;C{|M^sN=c4xb=V}9?h$5G
zfJ=4OC$IPihwHPpY#hVTj(}g4AbTsLuX;Wk^6cB04qzjF#;u>*){3?9>X9Z3lfb$3
zy6WhoFGSj{pY|0G*kZjbnvMy{rcdgVnwQY9iUPnYl{3H_{4@Q0j5=Inc;6V(V>#ga
zohbP@?pn(U0++XdBLp}jK@?Lgc~gBVAeHxL-H7e@0KDPIW*ECITJAQbI5
zq>J`q8Yfv^WK3Et+#4nZ6OYhL)8BaOgPwxH+$Y;eZF!WiNnaSX^Q
zIRA0*F{s+Z;0HRuzqT&CwIv8JofPj!KW#fpbnA0WsBG+fxg}bzn3Xk~lx?wo>HseA
z$;}_5k>0vLiO;ws3&efCaXy5AZDbB=n?W19vYE_WIg6;K5zUc%Ocm6`eL|hkW%*)|
zz4hk?ZdiLuP@c3dOsS|8`~Emap79s)8+kESSwo?X#iASpV<*IG6e|aNnHQyA61&P<
zM_frF@W8
z_OjL=zG=(~y6E%?qH7xUX1sXlq>Y*yg_s^VoFq+2DL?X>r$8aH+g?iGj$DWtInI$>
zGD-3A>pJ-1vK@i6v^iY4k+>p|y`iElhI8y=_2-QRkJ-t#^p3WK>vESNX;p9~dmz{~
z`sjKCRc6QkGA3l6(~nBfPN+*O8nny|cGuNUL8F(}CR$9sO*Y43ykD+6=ehZR-uS;+
zH61@+)C+9GhkPB*B9QL&%$3LQ*OYVMj^ByrUFDh`D2_u@+&5jLwLsn|E}o)sD!vom
zK7Zp1;@X%%)g;$+Z_ni0RWO`7Qx@Zu{Tu#lnHZ;Yq$nK}8MM?Higoh*d&W2>!Vo-a
z#W^4YMWQ7)P5a9}>kRSb_x`*hhy;#06Vu$YCVxpxc|Q8RE8a?-J8Th
z7_^8oVGWC%!^G0!k-LNvuj%*cUM*swL0$`?@CglhM|AzlQFMfY^$kmI(TmUuLLqUR
zM2rkfm@h`3XAwR95=kW0M4lo?6n~M{DfI(lGEOiauWk^E!D^8gVXXgg%EpAE?T-%g
ze|J6qi-X35mA!hFOHGt9{0zOiI|pX_t6EQ=u8u5!_1s-~!Y^7$mp#m9Vqy}Xoz3Su
z&!ANp!=cnb(MBQ
zRP`!tQKJGiu^7j+P+1*aOo^YXA@nDML+4CuorCvlp$*-56HMu60m4vj!VUe6NkTEL
zCEwk_y;(0=C?$VjTGP?d^%DGMz)b%)QsU|ZXy
zR)vsI7et+oow-MY*VdfuPSk5&B0M386TtwdR=z$+r`{D1j*k34&jQ#j1)X&Tcd^pE
zI9Q)Dg!*8G*yQU{a&sTy2_5;I_cXBzp8Khr^FI2A=^dh`qi&y5EX9H9F?X2DnRC5csf*Ui<0qv*Vr{_t@$tN5+
zMMcWnQ0wIV?b&)q_FnL`)41wU=^`Fds=iWDiRAn{2C?zHI)Ns{C+{b$`F}8Of@0)r
zMPvxQv@4QXERRLTJJH;?(wE0
z7+trwvUwC5r<>hUx1ad%Y&Y;InF5n;yK~B(zrm#HM*sYCi987S-(}iih(w6js_iU3
z{<+`kfzP*dc5@G;WZFYV^BL>N7j_v3fuhap5786VkPsCLtdxacP!{6w2y+TT`o-pB
z+OA3t7`Z1mBF%(Y8aB&tQgw_&1U&tzk{Zk2~Tpg{4x?66&+{0^2O<03n&9+D!7}
zFbN%hFQ&Run%yo?%sC8Cb3dTmB+McXKJnt=xu&lgsJ))BR+1jB6|7n^tJSX*Vg5QuyJFcH#zM0;BaR8znI^TVip5(b0R-rWcrao;?%2j?
z{pSt)G(T*s?ZF&%&FgU^cktX6$HPGF^OM-vzT^9_b@4&>mxLz&*jF>YzJa00KQV0^
z%zoDAe7h0XvQzp?NXazY|)tbJGY6$De)~fZArwZq_pk5mBkL&?~-0U+!9$JOmUuFd)Fn+
zNSIn2=e<`rJ%4wY)mQZdcZ0fq47UDzA$#F~%%ssV0&f^jyDfNhI{Oq?nS}c9p#uS5
zK+enKcuYd==K@rIg+-omr98kRx`C0;u_)>d@2y$B3?1K;7%HvA4390OhQ)OX^e&a>
z>hZvEiiEako+cHjP7D-{9)6>!u(C^4c+8mPi!P_l?`GWW&)Y*Nod2BP4a11g9Bmgl
zERe|EG6Z0}`saq`36FqCyOnV}jOF>^raVszj=fGqOzO$y9PlXb+~`X;L{X=Xmyg@O
zo(!=>p4w?v-dJV~<62oAP8C;?^LF8Qp56aZ7b4e-PSB%j{?TuED3HuWo$mOf7KBXt
z+M{c#2QYte-zVtEU)H%l^#d0K{C1XKs5fCi=R!A%`nJbeLc^DorRgzxm#|-Wn8Wl_jx-e%ASvC#&r!@Rf+l5swa4z$f
zH!`cwj84j9zCk}JQ$YkbvxnRGp}9h2mo=18rYzDql?1SlxkGrnbe(SmEQUiOb@>am
z+G^l`H#h`OLKkYpGU2+=AVF&VXGG<>If6H@O-x{}K{(k;^=df63DdyF1s-btxonTD
z;zdl`!bAVcjQ_;)yW{jXuBnD8>u_5FEMCoyJU3u<=$@cVFJTV&Cd_6K^%s@QA!H#e2blf*
zm2dTTHT`qoR)TK9H9!x|kFk7%09or(a`fa^t-{ediFMVrcuV7?_s;LlS&yEvPAX;-W~|MoTxJB!hTd}W4W7E)%eRe(Ky26i1mh12
zd{&muVK#5k&!%9`e7NEvyMSB@Fh{_-aIwdM`Xu}76m2DiFZSkjUeRESmIhaw6}pz`
ztjUgg1MiYYQ!WUR(rNdkg|z@du`Juw)%ffTzO9c`g!v|Eai{f55=scp(X7I1hOH;Z
z8}n42eAouRoS=++Erf%%{{5avL6c3SM|J=pxRO
ziFKH+%FG=+2>v(dwav)oq&B_miE9Kz%+EGhKsYPX-9I{C%cS+L7rdRN
zGApaD2RmAx@QFj}JBRWEM&PUSXZ{3RE8p@2Kk<8~-Y?1qy`eMpmH{5En<J+P(y-e$0
z+3pEddIO$QJDx!K6XL7#t}8>>tcpi13-6^u&U>PAFMn=y=)ef`=_QAmV+Q-aT2m6B
zZ*l^nZ9Ab#bY9oi;R^c-;M>S(GoQYQ>@o&wG+`js&@M~9@4X{6cBH&iSxGjaZF3IP
z(QLb95TH`AaDTUgf=lW}+veMGwf2^70qCDzR{8=FX}nk`>`n~+d~JAJ#kx=pAE=~Y
z-=>`{jl{+$(BtuDQ&~S^VgCRmi>LrG>`G`n%oysFkk%!pTEHApDk`TXS0_p{o%uf=
zr{nM_^bcNbleZX{Jr(uK7i&AQhKR*2R#@e1r=F{?+XvtwrI6QQ#FPj`pyjqO%dWtHwjjs#>|j4
z-@Qzf%>7SAgqe!P(*yp|FB#Q|QY;O9$HMX}4H%Im{$CM2o*&zX`1ei@Qz9(IzjPm_
z4v&T(o45a2;NER^w)=2ZeyDhT#YK|+|Z&HZOAQ!%~7t|kS2U{
zl}g%)-1e3!eXH<4oj(b{xibai%`LX#&>;n!68CMp)zue_XM5)zzbZ8i!QjEhlCqL`
zdrLpSKEi!LY@q$6_8?yXv*OO!{5j`@0h>1<;CQ3fZN7P!^km#%7Z-NY&AhF6CnEYh
z#_>-3t1Y{uW7q5(z(BsK+)~zdIrlG<;}7PVk6Q}96rcA*xFfuyk7B95;6v}v9kKp*
z^BOuqDcP!}>Xa>d4=|0qZ);mv=8R!|J@lmx_fX?WB->5N2>a`}>vIXemaODYCzb>I
zMCQz%281?+Jijrf@HbHHF{o6#j{Z2LFCG*=SoZrDdM&8xMQ{kdHn;O}?&@E39|gLm
zMj{`iLBGzh5)Lb?gsN|c{IpGLf9W^#3;C4V#!_=9ZGFamD3Cz8~_H0Mn
zqv-hw^VK(Qj-j2}^Bx8V1B
zdPrcxUG=Z0VfzHwVLxIdPIut|=M|sk{c`hz%2SP`__s
z`yaL8ktUMz8V#VWt%G|4;`BG(!wFHTI;ga?`^c?$I3r{JVV3C1vSRtnt2K$eVkX_k5sNk(BnR?>T$Bb_U!z1M^
zr~ig2ER}5?#UGCh0@!3rlQ#65Y70MoMi-=5r0BAp?zyAGp6bk$DI{0~psWRXCb>Su
z!X8X3`B(-i^jb*bM{qPOn=WtjnC$iThlST^6uh!phj=wpUBl&|0Y~U6#jW!F0S?3s
z--c96Ni}M)LkSHw;K#nC=SR2!ZdMEK?TtB`pabb7%Zuav9faw}yJm#c8C-hnO;
zbbxO4kdra_f-Vl|->(rAuFWn-Muj`(sOE44ESJZzn;Q&0jI`Hb!f>3IgT6bCa&E8h
z(C==4OUmU+Ybnfz&lh$n4$E?T{lNUwNc}=sLZK6RROM#tcn0#D`@nV1_4UfJ7j8Je
zh#DOHjoQrn2(>eA)wo^b(Pzpem($pcb==;_bEyn7Vx->Vi)Q0R>1A-9op@0V
zsuhB+1iw13NIN-yj{p4<#d=1|POG?+{x|pidU!gn)F|2x>yRD77l#1MeUALr0`AaP
zc`&cP6{sA>f5-bI85F|2Is6WVk2+=CDtsO^f@uE7L#*OVLyedw!nn)b;l&=Pyj&3k
zXnh2}X3^SprCuXK*F4)z4Zl-Hb)zZj@YTuBgOvW2*I&Ax)D8w5O2G`>AmGnRpPEtb
z!^WV#A+xH9eJ3V%+*?MuMM!dtUG>Kh{H{>#%g5Jn+((U3lvJlyU1s5I}o%3SV7YizotGA(Voy$Of%FSC0`}N3FgU%E_Naq1_Ec{|ENppEnl!Ve+~
zKDIxjp%>6)m6u%cW}X2P(6)-89TU$6zv=9=bE^Tb^S8EyHnSYZR|eqHiePrjb6u)&
zkc_B|M)!3YD!x<>SNz1lUfIi?S-_&io+eF_v>F-LaHd1pVOG%heGzapl%66F^{&d4UOn0`*wva*$&^ueGK_h10Uf8Ze%N*U
zjNMHfKbljRR*B87X#Z8`^Q+`w{?uq0{*~sxm%*3;IfgVrLo^a5K8DP
zx3(76&U#uvWo>%Yo^AMg7Q)xtASs~YpXaX_k#U4QP`
z(Xb%@8nKzN)~&QkVHjzcSr0$pCJjIr(|8Uf>DP9~3f@lr8G`Snp3h(6KXv>`+J;>r
zA2_*!ppYp^mgPGYl+|+R&)wn?0k633ht5*1Qvz3gM}qF&4GrhKRwV$JXiMVQ??gGC`hxa%qU>g}!K#fAup>ea`Y+fwJV+vV)7lb%AP
zW?q_%kI;)u6Mp0P+!ES${f3-)9fA&ye>c@D#oz>!x!1^Y!@LeAX(Pt!FyH-0E8=eZ0d%E_jdVC!d)_h5Vh3PSC>>HkaW3#ETW$zF@qFKcy9
z%{kNy`385JXK29K=ddA;GG}XQJlw}!=(KQ3R&EB+YYFA5i8Pm?gx@O*15*=>M`bhB
zK~}v07ZAM9vK%I?PVUnT1K*a{9fo`Id<92IoybE$XB<8rG>MPyR4l9bQ5oMiJF3;*
zE5$!h`6GL@c>M~OGN7!(SWU|{GOyo=t1|RA5GQT_9G8-(ZVd?r3?@Ct3=oWex#%ek$T%tb+o_}FJM
zDLF%`@V5Vb=gvhdQ}q$_mtk~Zq%mWa0ENDz`^-g{zJr!w>HKMJpRnrL%B|Q7*u{zK%s5
zZ9vEW5mbp_eNTzmHVXA<@{gqCeX&gGFKTueD&OF*DnW5B+~C=r*&{#}ltc!Imi6fP
zILFPJ2u{5E2Tg$z}%X-eP_i
z7zttI`CDemMfm)>YdmL6Mswu)h^${uc%P;JH9P~8)s1N~MCAM>5$`pItlumitZDC$
zFlFl#0pmf7gY8F_N89Tw!8Fsf)%kRg3I|qrtZq&z@a}D?o_+j~AawE@u|O
zg-ej}ZuvG5heOC)|6odGq%21hIgibPaHeESP3>KD1VdRnBdPXX{GOBjS~+!Q_S7~w{Fu#hrw04x3@xypmMKvhb$WN`*5YbJ}
zDsU_qN0QH)ui!wOYqh*hGpQF&_dZ!XC0p9;>DR=X)a~=wvjHt@oW;)(d>U
z(t_1z0|w4NXkSYZL3}J=vhDta0JBmN+anx-p#GYOOV&pDH=FjJ?u69e4OaYTx?~J!;P{aN_;Yvo+#&|LtY9MvsgEsF~aO%RR9GSa^wE
zDm1mJLeQgW{LQn?+>>@84Gq{}Vfwf5Heqs$FS1Nl
z)L%h&*IF$jtK)A4jNia4bnwc^(Uj4~PXBC@Dc`$T4!bAoCm(l!%haj4%`DiU^B(tu
zqs)0=XK@|uE7$*nIoG=kAs1bvDJ-Sq(1wJNEb1
z+im0@jVhhO_L4g-Lh2jAa=T!;)F8+H;DgRm@lwq29|7qJr+c=W@YW23XN
zhbTmGV)|@p+y_=DxvO4H8r{58ep3ecV)XuHn~FV4|MflU%D}kIjB?Kptic8ASNXzF
z+A>M~)e|J-6IRGCk+csKTk5@?r}e1GpV+Kl;J7pvFl!x~xYiML0hPrX226=|tQ{N7
z8xs4>JFV__k7(4J;CDrLWq1^$^K;|!ntwx*goGYj&;2N3|86p^V(Dc(PA=|_Am`1g
zP%ZTJ7mw6Ps}NuORp}p>hH!kgJk6r^*PX|?4D6O@0Qh~=A`BeWbD?%?mg#1c`3AKU+0~A>4tx8G%pLW==;?>x(nbP;-Q2CE9miMp3oq
zIznth@Cd+KOJIA!j`|!m-U>3IM41x-s9OxeW7$-$qx_}*s%8q+o?@Yz
zKBfT-OQ!1fJ9oh3vVgLqo91;ELAxuW_77R=R}H^ZzZkJFM+WP478qp7NadF+nVyp&
z2;-RgCY7}iWqavQXb=)u4pWRH1ANtjusBc}GHQ(=H%QppF
zy=6pZ;u84j3xzTESx<6KwUaaLX}SH4iWbzs1WWMhw8_H5b28sdv~Z)1C};FNSokfm
z4$9g+(^?chOPaCYZIf5|jd8d(!kO(F4BKPf+-`%==UGzY104w(w2H1N;43FrZY^);
zF&wqCv)pO&cXbLy?-blpw`~S?19sLkb~d$o$_N=#(F_BvjdwV}@A1#i0GhQJ~AN~@5m-=-#F
zIux@c;}E<`rA7kP$-~CQ(#l>RXgOBubaO}r4Q@76@R;!k8roo*TJ!4rxk2$hfY>(O
zUPRX)P5#AK(pX>`r|x(*A1P$0d=gZ&7%iQ-t`dN+g}d~Zcx8aw&N{rpJxwUC8hYbb
z(X1f7xT72}>tb)ucIKYc03FAJWx*vtlON|s(R?C_t1Pic>W+0yhgMa$8v6b&SX5{L
zB9>Rce$aKdT5VTfw+)kNQsE{5?=$cx0AgxzQZ;qK29M;mX+>Y1Ba&2p+)_
zFzZ|U@p3$p*6tngsX0&6EDK5wVHX{fy!55NbbS@B+O}!9p87uz{rx9=Uke`_7mQwkC|Zk2?a|h%-k^G)#Dr
zCK6d1^p_||whNuE;PviZ0{UFk)-IufMy0T97jSc4nxshp=%>c&S3oFWL*AeRQ6=ck
zohHWjtlu)(3p-45*$^dftJH
zx>FvjubCH-c%SON0#J1SLba;%n%WWo1;^7SfPWtv}Z^jtPa;&l=9kQfk*zti?7?eWdERW6WFhG`EoVBS9we0**f&5@s)lHM_
zEY#Rg^lpmS%$^IxymD_DVpDA)h8c@fA?{G
z=cu~$E6$!4hH`o!^lKo+QgZFKwJXJ~v<{c-**g7@2J~r0kEpwj{Xg~jC*Qj9eAyhB
zU$_t>pK_W}6<{G2&3^@1>{oZwG`X
zm`769G4MM0*U=S3>q%^E{p8C4&
z6m_1?<&qKIeck&fMe^zIUO2ygZ@0G6rm5-2*Rf5h^SId9^K=g`S>C{H+9=EtdVM$h
zduN+Svl|c+w8aai^k1Bs)l$LW&hsq>yj#hLV9k(b@TkZ$jLk
ztaZ2hd|I=8(eym4K5|eZV^Jsi}@uPo7yvLjXx$wN`iLgWq{C(
ztgEqnn|#!5568_;rE+*IWg%zBFr<}N8R3|a$MdFGz_^j&=8QI^j?(zNsOj_ljkQWM
z=wwT{25jz)>K{S(58)@P*w4Vc0OzsCew*b=Kp^XBi-y4WE9gNjh4_VNz%y3BHLN&d
zvu^rGCs`^5+?sk~+E&Swh6Pj~U43Za=N}_8_Mvt-52$l6SXzkNe6O(A1b$ZU&*CyC
z9w;O?E$jMZWdr?ld4&a&V{GWeG31Ta3Tj5Cv2PA2*WSmvSGhE#b(B58T+K^o#AD{?
z0$*R4?AhAS%UvrgY>!}!Nf$S@4K73_Ra(NX>YX;&^t<=Y#y3ekF!H!eUUP4^8gKjS
z>r65P`jf47IZ$wOK?GzQS+yE#V*O=U)Y|ApbsGBs%uh{)dgaXK0wsiM4&&pjO^?$D-YPvNj&q37kp~7>tY_|6P4WE|!ynVA
zI{nzOMtfctOmSL;oXSxXZi9&9YG0~VR7?D_&>EM8C5!5$l*O{}Ds(OPS!R|oEQi8L
zO~7LgLp(8a)3Ngvp=Z<>O06x)m>87ihs_o0{
z4{QjH=vY1NVUO9K#?Y~UJ=oOpLU)<82v@<`U%zxs6?nf{(@CdGZofIxvC%@D3|_-L
zb=>^1{xt_+GE~P@*zT}bJPUt)$UVaGpkDrt5F-J)hDb*Cg@ST+I6sKaw--?vw8Tl&$HydVx-h(*S8#G*x^rnG#+
z_DBP+pAYJk&cRJ$tGZXvpZ9reSmQG^o+5y#yJW2)Y^J>YE-071w+Vgld~2*f_e2N3
zx%&7G(av-`xDPTIAdhvDUttoddBP?1G(|6?dRc#$?En)H{JhNd=}btq=%)ve=*j$V
zC3o?`B8o)3bBM}uh7(tJlfp03YK6Pl!2#)%BUnA^;c{a7Z}jMiuY
zsQ(&l&Z)MQmq0NSl5SSI-;W;|#PxBO&Kx-2O=XJ+mMj@_B&lJ7Iuy#K_4;nDyw;UW
zS5T0kQbdAm75XSR1KIqb607m1gkkxT6O1~_rO?Ii_?J8NK)s5`>>G0_;zrk{JuLX#
zs^K28)wjOC!2wSJ0zkqE*iS!R=|Hse4nY9W3^zGrhw^J14u=Lc>r4$$?<}ch{Y8zx
z9(?i?NcdxndvF7v6-g*fCZz6|2zd$kbWe_>X1~FnN2ke9Kl!Yqjbg^{8)g9`ZvmNb
z=hoC$FcnGJ_Q#AZhiUBB2M?B9GlU|_9~T4e*QL$^Gy0}obWF>WY$;~fl#gZ2lqZU`Y#ebvQa)EfxRKKuymsqnotWY_-OR8hL|Wm_t`Ph8J65i#vvCf+!Jdcu$!P((
z68V$aMGRS!u;9$#+rA%Y+!1%sqY_q+5drwOXFZNuj1v8|r9R=|OO@elm&lppHT}CE
zkzT)=d~?63P6vAyo0hqsl1QuSqC248Mj>`(^P(i2hrUL!N=V`WYs%^K%j|{y+r7pJ
zlr=su_S7pw+KE$OFmBC?_uvwrBDK3PEMU%#H<*b7o?8oe>O;eho|xh-0`
zSC4yIsLqDkYT*mnw|j<YTuJ4xeBsAR6WR1YNdx=Xj}8-bGBoxZ<~O(`vAj)~EYZQfB#+
z?LWi$T{$-ok2XyEg9p`Q$7UrPYo8YEP<;DD080~NY5@`bF6TW)r#mCGR#hR26okR}
z!Ez96)Phsm+TnG451>Bx+o$IzSuM38ht39Hpeau1C8wAl-E|}HznunYcfD8Qy#IbT
zt-N{54}4r;aO#^bz~D3xsEgMnGbORgh7D4%pagX9bc6^1hyMwcx}#}x;GK#frSxkn
zfqxn_&W0ik_0N+Je$v4S3GuzbuBO&Q?MyG-Zask{kcp%hSTU=nekamSb*7L>0}`xM
zPF=}{s@TY(Zh-cSRb0OItKXarnciH>^dH+>ll;Uh;2QPXv({kP=383UbU!Dn%LCAu#cm2$=MB+p|L0}DG*j=%7wj3h
z%vhB-AD_a39LMR=ChbhF#f;XWDKP(_Au2V)?Z1lEBcEpct9gdTVDz_?yD4MDa4_pVJZ%LtDyd4U(0$A
zF8);-AC1L8_i;SAx3Y~)st_6}HcmS2HYR85e3klJ&2i6X8jkELoo!y|lns%$B$1tO-NT|*2Uh8vR?IyK
z_(n8yeFfo%%U6R)-e^UMd#hV@&$TN;EFw9dl75vue=RwdG3%pTy2p#T>iR8yDa`J}
z=9=@?>PLM^*ejiUiFPDf`u&k)2LBRwaWQFpXt$=vV94$b+cD`PjVlOhqdZ}Lyuue4
zV&LWUI{zE?bT1lk=||(CO30fuW{>=|3Vkean4b5w2-%9Llvg%fWDFRe4^DE?*!5=*
z%&=E$((DM4P#(TxHuM#&NN5^t+xi3gNO)@
z%LA6+*}lBde{AW3bI~{W=IUto+SQSA8%8Rt#E8n;PIt@mI+U3|eFl*mxm6-RDkl-W
zT+5Y84Fy#lIEdaVvJ7cSAJMC9e8?e-cwS(|Mct~`RMUC>BYkh#+9O%gvR(x^;Nn#>
zDPMAbc;U9mIgkkUM~_K{U8*H5W+2~Ob$Xa(j18A4&L=38%K^XOXoSzNLrN#z(rx@0
zQ(ML8Iqsm>s#ktr;Z^XIhpN;mK~-vC=_3+4l7u7U0TP1QsdT3D2ls1K&&_Vbmga-V
ztFkZ6srtAlrhm;kx-Xi&U76&j$S9rxwNH6T?tseP6HyW#3`ReSeG>vZ>1Ug;!S|eC
zu{=B0tS8*VG;E$5U(do(N>j@JAoHtO=kY8HQ0sqh^U>ftbICNj-1+CpAK#e8qqWY+
zJ6T)28+j77An-S7|34R4W
zdn2vNzrF5CD_NCsH9jHAsnj!$+2`)RJX6>+ktbGB+P8LVjY4_@&Hn8Z@#3_{$#3D_
zWd5(+{6`B@whbOBig9!
ze_4gElCg=2*SnU*M3lO$`3OfM;!jeLih9pTg&nHmZ2uua_`m#l5_k%SaTs@AFP2?3}C&(7-jFy+*=RL1a0rVw@{sU}IHl%abaN6d
zPFlzMk>E9{p%mJuao(oMQSWeQHpRaiTL0r_iI>B6Kc16CS*$ax#e>W(#Wto~fAun;
z<7L@1$$r*{CKs>eotOX}-zq_J3SHOxx0n9N!UCBul5T95z+4Wsh<_g?BI@{~h&XPw@P#OlW&w`#ckWtz8)i~g;F{*?xtt|r5*RPv?8B4E`|nCbuUOOof^
zd_G@c8)ouBwo%Rvsr^ed{_5;^v&b*GY{T2P|5aHb`wZQSvewIC$SeC#yqW&R8l9wT
zqZu4#vF|?r`p*vQjN$#$IEM}WC{2nquby%>#(Cw&RhmA_0awEkF>;Y0
zeDxFy461JcUhX_4=HFz)v
zuFC!AQMqnK%U9{wA>>c>UBg8fn2DnM^tJlw;EA=9i7Z0J{$ap)3{wQ3P!A
zu2j>aYLR)h#k#uHIn{6J@@p^0JGbE~8zXro!^Ot!hRI{SLj@8q*T?1p_3;7QO3&)t
zxed6}j+s9SD4Hir+j2X9rj?9*yWWPJK6pK@ZGesqn57!g)YVS+@eh6utMQtWiH)g+
z;T_R?bhurLI{8IICeYu&CG7&z33!nG5!S|TR`()Fb!VRdXV!%&7^;O*
z9kd@k+DgQV5J<;wl_u5ez1U8Hz~9aR`>Pxz76-*_wuW5?n~UTW!fIYWmTft@D-g7+
zWB`;_qka|>bW5XW=43=I(KAyeFN7hp6}5!_v~`JR{{bpEDjFA67iUorhwm#D!0CY$
zO|7NtdD$y;g&``Iv*ce7
z>lvd|=reGnM*MvBb+X~#f-IN*37>M~mzATxzUjg^5SGi78YGEG_kK1dcTJK8y@<^8d>+3-{@DL85%mQ?{~q82KZ5Jzjq!t5rulu$tO)o
z1l6{^F=EP-ld1*}Wd3ksG#O3dv$R<6zEyhvja!R7aZ1djq;uRnhi07#cb+^y)f9l}
z1}V;6b#ZZ_9~EWcXwk^34hod2a-Zlv#T;aCylgpv^k_nm)tq4ST^+8=?Y+j;8j
z{6CNOQqfD%mZ+%f-&cV?bAR6R{1h#Kd~rnnF+AfnUSr;pCUbQW5F?}cqLz`ZXrC_h
zXhRxId^;9}XK3&pK1JKFHsU7JXyEL-ID5cBv*U5Q&Dv;@q@;nea7ABI10h;%$6Kky
zNl~fTr~P7p*Yx?nmQ9~_G71PNt{y`+!P_>(!UD$fnYuphu#lOFW`_*j)Wq5s<=7Kx
zS2zOq_5$=`N^L*vrd!>_pF*D3iP=O1@my94dcERWcAz~~kB@zyfB>hjV2(bXGWY(D
zx&GG~q=6-`Xlr+ZUgfPM?M+^Q>sa~O*@73pO#vLjf!sW6MUM&x=!S-5mm)IoNgFBw
zoRjj-4bSP1+il0n=C056AFJ7NajQ^xC#sz)5T63MiL2o2|I1o2U8Eh5*ESDT=emy%
zqL#R1ONT`Hi^qyWc$EmdhJX?iTFbkpg8R60$rnk{umT;^PE9tpn}sF731@nJ*U)CZ
z;H|rGOavDA^u68H=60tHn0M)LML;E%=j9)<$UkTDtcg(L^Nc)sy4TF-h?18E$u;`_
z7y*Kz@nnsfd7+8v|K=A#Q~dt`$!=8(f5-ni)^m>;0lI=l39KyrnZr0qEYVP-B#
zPU~xSX<#Ua#JU)0mG3&^7~qk%&4J|DjxJ_
zeJ2helzZINy+n;q#m20$QBo`Hosm=A$4l4EYlHN%ElQI63MQC3=ywh#Xr?8$1!a(e
zwGEaX=uP!WSbVRw*^hD34PTOV>}RbhPzecwaFuQG?K=3Cx8K0DCYV=46KmbZx*|Sh
zT$?!ush|zAGb%B85UpGlmpNJ+E~ozG{WZ~t6#N5OC-QY*=&_P_tuiCO^JWP|sN&=@
zn>zOuLg<0@Zgn-N$E2tM`hYg6tEs^L$RsQXDH}BX(!9ajCwqM*k2eFV>da4j?1`p;
z3c`_0pDNZsow};J@RKFu+K*%F5|gsx;o_%y>56XR|A<3Gu&ljxlUiLkEY16=p9t;6
z?qoqZyFVDaEqy)h_86tP`Z%whxAQG_FWI8j_5O_K-OTLv@{SPlC)|jV${NXk;_O5-
zsA>2%3DH^~$?yMpcLSrzGpb%AB&Egs$1(r@k0omef{769ETi!dbShdT1cO!0>byv-^H%kqdB=O`Q`nu;Sx~+H)e*hTh7Dz<
z&;F7Sor6hzn3@h;a5rcGJWJz33ek-CHy0|sAvSu-i#F~S$$8XltF5iQ)Am&>A4meK
z+5}ARRc4OwGP6G~N|sJ=HlQq2(C_S&c=e>-W`7V~znR~z%$4yKA<81EcDP6w!v6*?
z6-!8Q!YU+Ep
z*3)dN#~+>ON6>0PlG83v`N!rfHZ@?Oo1l?VjKSJvV-*YV8>U-p=bbaUleK+L4~%rYnaj
zvz!C$cWNB`%9FkZxO&u5o7*4K=mH!Cofb=nT`26xW>|9{`f74542hzVzydoTald?>
zOB*v;hk(?Tm{ugr>2)pMkX@`2x6ZY?%OTBVW~iqGr_k~
zJx;kWZ_IzLchkF7if%ORl&EOMGbz)%J%X5q9E@Cc87r#S&5CPPLe-FIp@x;zYJl~C
z5R7`*V_F`!=7rRfLLEm08yw@kRHP4(Ahtv;!DbAwn8CyoMYv#KZs+tc?Uz-|ELYc7
zoGtdu5Pee}gH0BA(^v`nTEYQ^YfQ3S9T=S|aUCV8FPqE&-MeeJ;mr)^5fAp$4i;jd
zs&|oOqJ)@ay8L!c%A79kjNAbXPj5Ki5|?&qz&H29CLb?#C*4yhXw`}VC_U;&ZLP-M
zMLrjE%PyfNQIk|QL*>Tj-}RKED6C*oI2uTYyW%O-_JQYG)Bey&-y`doH|J=|uCD#l
zwgr|k%u58}S+H{WNMD)z{O^LHLVGXp-X}I+I@~d~AYBN~P=#I(d4GHmFsiP*zjo=R
zjR);*pr)w&!G8NxrO^t2wK@%A3xd!B-6ZRPe&H(
zULuFdLx{OGkPK7eygjfcA|jfa6FgGzJM@Sibv&H~O|WsY7C+=ZlQem73N%N(H70vW
zVHm9Z(U?iAF&R+7w7?|U0Ed}~iN$<6r9221E&vA}S{%Q6
z8aU{JR)SIS*0RS25blZH3U1E;Oc|-`m%-dQo=Zc*?~t+Y`B_mkZ6=!Az2f4&EYT~e
zH=`Jt+uEO4WDb-#>>-0;C4I2T3}NWoIJq;S)OG{LQ!u3K+GW{hLS_r#O3U=t(**zs
z)KW`q?442QI#)qnNstC$od=3r)10adDlmO`E28SlqM{-h==??S#vbYAo5pBJYBBMH
zD!HZ~eh7aU{xVt0`bLZ@oSZgzAPQ>04mz@SPRhF}-yN@AQ|uIWw5d5R3`4hyw^Ehn
zFdrF~%Ny43;arxrjl?qKJ`)!VTGNHMD!{MdJc&r$^3h@wE-&|Rs;(oE_
zbh#DK(h^sXIiM-6)zrLwXixBrNvWBnAXLF($cDqLRJ2*EeTJ
z>m=*`aJY%0Ve6iH1-K&c=oR+MD*jc@Uh)|=*pBylF<&OW))_{QKI@VnOCg?0Q)^dA
z3(59<2g_NG%4)6u5DFI99*Nu;ULYxdyUd%95%s%1*BF9M?6PHxWA5KrswuYpzR|`>
zlu7#yBXlQEm4zNOW5dMU6)I8tZ8SZYbuQ6Ifyi+on7}aPPO*xuy*-xG5<m+J_QQ7W4`wZ;hU;;CAE3Ygx?pkfrBcD3He8b;l#cz?(mP!A4bqPr
z=%c?IG}$;cpb;T;^S3?)91g{ABk=LqGUnbV?9h?A)~QF!&}}8ZgG5k9j<)
z(=c;GOr>=$#oD(VtBP{kDB#f>Qj!OyelD!@(2vI@M5HNkOP{5~)mU*|8k}tlm{!&(
z)V(Zz7(=(24-lCl95u!)IZ?0DW~PyVp+q?!ixgu2oEpq+7|>ckq2T7ITnuVA@vTSk
zSg#fZZv^)#6eNI1MQzDF{$qN1*t07%^dhl&<#$l8aLb4dB<<~08V}^uT$!5|I)|ns
zwR&xN6D~g)OI$tJA=GDneTA^IeaOW{^tbD{X?Qq5s1=q5fabJzpcPvh*Hjf>$UrHm0bH3&Pc7mpIf!w}
zJLLv9WTYYJ#IK_ceF88m#An1e%PIY
zCVQnb@XZB#WMfVI;!^ANq|N^XR13_rUw*8W5cYcBlPtOORWqiiJ1Ga-a5kWXW~!Nt
z4(1~Tmof=bOpAFlE<=lV8nNY;d4RI-!?s^uBn2x%%CVtAd*=i`V;-Y=Z%L6}t}TcL
zFZ5OvpbXnYHnb5@XB#lD+@upC_NpUi8eh39inzn
zBOFg#xAdx@ML5Z^+==ZAwPpCtt`2;HVH51C@<+1-);$b7ZnC-~OL;#6)=!x|;@%sh
zcAc}Eb`|Ze$JJRfbkc+%Q|5Y4T}UrXo%N^UpgDj{`!zPYL#~=mpL=S|lj$1orN(nh
z7F>r$EVVwuS9Xujj_qz;n4R_e#4CS^U!O0NJx3m|e-XC*MARLIE?!{iBfNn#myUGx^*)he
ze?HFTJ%v&+e)tJ?9SfV0U6&2@SWX9uu_WWpH#o-=qq)63py2&4*sE%K5OEg+*#PA7
zYR^mP*ex5F)cU|XI6
z3+H8uFS>x!`TKeU7ExkR5TEINJSb{5Axg{YlI%eZ@F04f>JVn_OSIXK5ZdtQGN9UO
zbEE=`;B8TNTGe8ORrc=1fK~el4{UPHn2}tV64a8?${55(%Tqy|4qIa^$z!vAo(;1^w*x~gV?fkh8nPJmC=|)PKqntJ3_M(_@8fANaJUCj{
zr`bJrGWmljPSu~5fa$Ae1NBzLe_s0oEq$DKLetjT3oz0vZ!#3$59KUz`OGxjm_SVJ
zk;JuK0O;d?7hS|{jWMxTpO%|^#h_Uurc+Y{8Pk6X+u6&oB1J>>Gb-f8UG5lsl4@bYi-$<-v~>LL6+u}TiV7hRNQKb8Gr
zOAb+lL<17-RUBMLr=JI9dt@Z*d*wuz2NW_eiFylER2kM5ajc#IUM9eL-hlQtIk
zX#}Z7ZwVeZe$t0VWNf4q!_(9v-&l@k0qc_-+afgd@+xbwwZ!kkAI4vN3m(ID@Z+_Z
zgqUkUi$rLKdj_RdYcAvKm;T0-x%_q;(8$YxRZq^-?-g)!m#He5w|pMg^ga1Qi#@F2
z0!>zS&3M+Yy|&*@@lk?ye?WwUNXX*!zrLW<+Yu3M7EHH(7i_L^XWYh~s
z+t%GC(}iK1XS4~%ED-7>`wEPt#ypIfHQ`Ym&>qq(@NhKCh0YfEx6FUa>EBCP15KmK
zjyuQ)G^}^`l8ba~bNR=~9BVfN^hJaAAE}MY-8wI;((-yxLTyKaEXn%(Q;$1nBK|19
zS?N{?yc|@r3=hsvTNabsHkzpdHKp--W*FT@{W>lRnHt?8Hf1q<00pi-;M!$c+=woh
zTHE~eGQ3EP4_eCi{I#y~cN5GoPBy$Bfne5afM_ew&=^_OfjV0=OY6$JTsIGBKtY%e
zO%{1gFJ|zG%l;Ys`Avl~gMP!Waz;mQpN0{tQy#w(VBk72dv3-GpBd=eld!mF%J^XJ5ntx*mn&z`hBQoL{9VL-T#6RV
zeJ=DVCdWHGGTPU~Z&z-myL3j8E2X59ZmL;A;2*6Gui?tO&&^&oAlJsgVmPnZm^FdW
zK(cp=MDl|2Tw?i9;3^Iqw48@rie`Md(;ywr(3aacDDPGsYP+tK*XNBaHZGZT|33AJ
zS8@4slRp`mO+rK&th_!k$*YeEXqhjn%fZd{v+j0dG~VoUBL^x
zO07G6WUdW>V6Zn#b?^M>8Xd1x32J^PWArf83nMGD)E*PDSTym}v0NpPA
z{L#1;VjEauT=F|K@iVc+xa+9#sr7ZXBsR5uA@E|D%3^Ch;QSp+?8WAVO|8XGrddph
zOAbyj>2amG-Fr>c90Xy=C>ppcKqh3nh`BZzKqAss|NTAHdZrW9J@Iw-f
zD$>c(%5~}@Gz)vM`48v`Vk<7|qaP~8W9f(>1PnK0WQ4mZJABMkeFY&UnxkzB95J>A
zl|O(xip}KmwUyo7ORHY)9QR3Z7)tbl!7D8|y8-{5enG2fRJdP%x|}yp&?^3dKqdx0{)z_XI}Q|AJvIP5vwrp`{{-0Bev42
z)SFIoln&HyH%VDMoK;)0*eLIzyi@I9m!Ttrl#=_tnbHn%?KHSawpVd|@1rLo!iujQ
zzA#JijJP{X_{zRa+BFUEncj#IP|+3$-BmLG;tGmWvrIa@Lt$eJ%A*l@BY8g7x#V#V
z!RCxoQL_qZ9OK%5=4K{g--ix=sRqn5(OI6j7tYP|Dr!+|9L7TNeOdVeQ5xL1qZEx3
z&jjlYW|oxLAPwcPNHm6goVn&zQn_QG=AY&KwF|ZhS-F9}Lbqu@PwYu#YWWo@^)fa^
z5CuPKo-UvJ)@z8>uMTE@e7xPE4j*w=sUeM4(4b#uX_buWl6KdLTxd#HdhG-td@|kG
zB}#b@H`5O6n$P>m7_l>X8bZuhMzy=!)Kh+3pYAw5O6Wa)>$Ho!o+|`v;k8yW#rnGY
zuE!PD+jsG8Za^RQsUP&}_>R1qx_WlVg{_Wfq~vk?VPrQg)nppAGID?r7s>cEn@4kB
zRFWikgWtXgfO#J|gdcArp`_f2F}KDw+MuF_9w6TzaZ>{ajhjE+GWi>P3lU2be`7TKQ2l(J^a{tpZ#qUfpYxS^#dyvb(!Uajy7i(D1RHYq2)~Gg_
zyDDA&J$W6E4N6a!0W{UaSPMqK#;b#z=N^Wpr_^5lS;JlZ(klp=e@#SbWEzF5(cr4Q
zzUoIy!k)QMKjZ@2T!!A7haTfh4e&<%KXJq{dShC-CHQ`Qag?!@{4WKgY#=i;>Nv$yic(*eBKMzO8qf4Lh4XcKgu`BBTm4Vu@cW9%VkBE7`*`3=Wyt1
ze#P{Izveq>2+*AwUOM^7@E-FyK{ct46#otLD(C||*Y1GiOa8%wmS@&LaokzBtwE-!
zlOH~FVfZ