Merge remote-tracking branch 'origin/dev' into celery
|
|
@ -1,32 +1,33 @@
|
|||
// For format details, see https://aka.ms/devcontainer.json. For config options, see the
|
||||
// README at: https://github.com/devcontainers/templates/tree/main/src/universal
|
||||
{
|
||||
"name": "LangChain Demo Container",
|
||||
// Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile
|
||||
"image": "mcr.microsoft.com/devcontainers/python:3.10",
|
||||
"features": {
|
||||
"ghcr.io/devcontainers/features/aws-cli:1": {},
|
||||
"ghcr.io/devcontainers/features/docker-in-docker": {},
|
||||
"ghcr.io/devcontainers/features/node": {}
|
||||
},
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"extensions": [
|
||||
"actboy168.tasks",
|
||||
"GitHub.copilot",
|
||||
"ms-python.python",
|
||||
"eamodio.gitlens"
|
||||
]
|
||||
}
|
||||
},
|
||||
// Features to add to the dev container. More info: https://containers.dev/features.
|
||||
// "features": {},
|
||||
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||
// "forwardPorts": [],
|
||||
// Use 'postCreateCommand' to run commands after the container is created.
|
||||
"postCreateCommand": "pipx install 'langflow>=0.0.33' && langflow --host 0.0.0.0"
|
||||
// Configure tool-specific properties.
|
||||
// "customizations": {},
|
||||
// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
|
||||
// "remoteUser": "root"
|
||||
}
|
||||
"name": "Langflow Demo Container",
|
||||
// Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile
|
||||
"image": "mcr.microsoft.com/devcontainers/python:3.10",
|
||||
"features": {
|
||||
"ghcr.io/devcontainers/features/aws-cli:1": {},
|
||||
"ghcr.io/devcontainers/features/docker-in-docker": {},
|
||||
"ghcr.io/devcontainers/features/node": {}
|
||||
},
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"extensions": [
|
||||
"actboy168.tasks",
|
||||
"GitHub.copilot",
|
||||
"ms-python.python",
|
||||
"eamodio.gitlens",
|
||||
"GitHub.vscode-pull-request-github"
|
||||
]
|
||||
}
|
||||
},
|
||||
// Features to add to the dev container. More info: https://containers.dev/features.
|
||||
// "features": {},
|
||||
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||
// "forwardPorts": [],
|
||||
// Use 'postCreateCommand' to run commands after the container is created.
|
||||
"postCreateCommand": "pipx install 'langflow>=0.0.33' && langflow --host 0.0.0.0"
|
||||
// Configure tool-specific properties.
|
||||
// "customizations": {},
|
||||
// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
|
||||
// "remoteUser": "root"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,40 +1,41 @@
|
|||
// For format details, see https://aka.ms/devcontainer.json. For config options, see the
|
||||
// README at: https://github.com/devcontainers/templates/tree/main/src/universal
|
||||
{
|
||||
"name": "LangChain Dev Container",
|
||||
// Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile
|
||||
"image": "mcr.microsoft.com/devcontainers/universal:2-linux",
|
||||
"features": {
|
||||
"ghcr.io/devcontainers/features/aws-cli:1": {},
|
||||
"ghcr.io/devcontainers/features/docker-in-docker": {},
|
||||
"ghcr.io/devcontainers-contrib/features/poetry": {}
|
||||
},
|
||||
"customizations": {
|
||||
"vscode": {"extensions": [
|
||||
"actboy168.tasks",
|
||||
"GitHub.copilot",
|
||||
"ms-python.python",
|
||||
"sourcery.sourcery",
|
||||
"eamodio.gitlens"
|
||||
]}
|
||||
},
|
||||
"name": "Langflow Dev Container",
|
||||
// Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile
|
||||
"image": "mcr.microsoft.com/devcontainers/python:1-3.10-bullseye",
|
||||
|
||||
"containerEnv": {
|
||||
"POETRY_VIRTUALENVS_IN_PROJECT": "true"
|
||||
},
|
||||
// Features to add to the dev container. More info: https://containers.dev/features.
|
||||
"features": {
|
||||
"ghcr.io/devcontainers/features/node": {},
|
||||
"ghcr.io/devcontainers-contrib/features/poetry": {}
|
||||
},
|
||||
|
||||
// Features to add to the dev container. More info: https://containers.dev/features.
|
||||
// "features": {},
|
||||
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||
// "forwardPorts": [],
|
||||
|
||||
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||
// "forwardPorts": [],
|
||||
// Use 'postCreateCommand' to run commands after the container is created.
|
||||
"postCreateCommand": "make install_frontend && make install_backend",
|
||||
|
||||
// Use 'postCreateCommand' to run commands after the container is created.
|
||||
"postCreateCommand": "poetry install"
|
||||
"containerEnv": {
|
||||
"POETRY_VIRTUALENVS_IN_PROJECT": "true"
|
||||
},
|
||||
|
||||
// Configure tool-specific properties.
|
||||
// "customizations": {},
|
||||
// Configure tool-specific properties.
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"extensions": [
|
||||
"actboy168.tasks",
|
||||
"GitHub.copilot",
|
||||
"ms-python.python",
|
||||
"sourcery.sourcery",
|
||||
"eamodio.gitlens",
|
||||
"ms-vscode.makefile-tools",
|
||||
"GitHub.vscode-pull-request-github"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
|
||||
// "remoteUser": "root"
|
||||
// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
|
||||
// "remoteUser": "root"
|
||||
}
|
||||
|
|
|
|||
34
.gitattributes
vendored
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
# Set the default behavior, in case people don't have core.autocrlf set.
|
||||
* text eol=lf
|
||||
|
||||
# Explicitly declare text files you want to always be normalized and converted
|
||||
# to native line endings on checkout.
|
||||
*.c text
|
||||
*.h text
|
||||
*.py text
|
||||
*.js text
|
||||
*.jsx text
|
||||
*.ts text
|
||||
*.tsx text
|
||||
*.md text
|
||||
*.mdx text
|
||||
*.yml text
|
||||
*.yaml text
|
||||
*.xml text
|
||||
*.csv text
|
||||
*.json text
|
||||
*.sh text
|
||||
*.Dockerfile text
|
||||
Dockerfile text
|
||||
|
||||
# Declare files that will always have CRLF line endings on checkout.
|
||||
*.sln text eol=crlf
|
||||
|
||||
# Denote all files that are truly binary and should not be modified.
|
||||
*.png binary
|
||||
*.jpg binary
|
||||
*.ico binary
|
||||
*.gif binary
|
||||
*.mp4 binary
|
||||
*.svg binary
|
||||
*.csv binary
|
||||
4
.github/workflows/pre-release.yml
vendored
|
|
@ -14,9 +14,7 @@ env:
|
|||
|
||||
jobs:
|
||||
if_release:
|
||||
if: |
|
||||
${{ github.event.pull_request.merged == true }}
|
||||
&& ${{ contains(github.event.pull_request.labels.*.name, 'pre-release') }}
|
||||
if: ${{ (github.event.pull_request.merged == true) && contains(github.event.pull_request.labels.*.name, 'pre-release') }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
|
|
|||
1
.gitignore
vendored
|
|
@ -253,3 +253,4 @@ langflow.db
|
|||
.docusaurus/
|
||||
|
||||
/tmp/*
|
||||
src/backend/langflow/frontend/
|
||||
|
|
|
|||
48
.vscode/tasks.json
vendored
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
{
|
||||
// See https://go.microsoft.com/fwlink/?LinkId=733558
|
||||
// for the documentation about the tasks.json format
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "Init",
|
||||
"type": "shell",
|
||||
"command": "make init"
|
||||
},
|
||||
// make backend
|
||||
{
|
||||
"label": "Backend",
|
||||
"type": "shell",
|
||||
"command": "make backend"
|
||||
},
|
||||
// make frontend
|
||||
{
|
||||
"label": "Frontend",
|
||||
"type": "shell",
|
||||
"command": "make frontend"
|
||||
},
|
||||
// make test
|
||||
{
|
||||
"label": "Test",
|
||||
"type": "shell",
|
||||
"command": "make tests"
|
||||
},
|
||||
// make lint
|
||||
{
|
||||
"label": "Lint",
|
||||
"type": "shell",
|
||||
"command": "make lint"
|
||||
},
|
||||
// make format
|
||||
{
|
||||
"label": "Format",
|
||||
"type": "shell",
|
||||
"command": "make format"
|
||||
},
|
||||
// make install
|
||||
{
|
||||
"label": "Install",
|
||||
"type": "shell",
|
||||
"command": "make install_backend && make install_frontend"
|
||||
}
|
||||
]
|
||||
}
|
||||
21
Makefile
|
|
@ -19,7 +19,7 @@ coverage:
|
|||
--cov-report term-missing:skip-covered
|
||||
|
||||
tests:
|
||||
poetry run pytest tests
|
||||
poetry run pytest tests -n auto
|
||||
|
||||
format:
|
||||
poetry run black .
|
||||
|
|
@ -27,23 +27,34 @@ format:
|
|||
cd src/frontend && npm run format
|
||||
|
||||
lint:
|
||||
poetry run mypy .
|
||||
poetry run mypy --exclude .venv .
|
||||
poetry run black . --check
|
||||
poetry run ruff . --fix
|
||||
|
||||
install_frontend:
|
||||
cd src/frontend && npm install;
|
||||
cd src/frontend && npm install
|
||||
|
||||
install_frontendc:
|
||||
cd src/frontend && npm ci;
|
||||
cd src/frontend && rm -rf node_modules package-lock.json && npm install
|
||||
|
||||
run_frontend:
|
||||
cd src/frontend && npm start
|
||||
|
||||
run_cli:
|
||||
poetry run langflow --path src/frontend/build
|
||||
|
||||
run_cli_debug:
|
||||
poetry run langflow --path src/frontend/build --log-level debug
|
||||
|
||||
setup_devcontainer:
|
||||
make init
|
||||
make build_frontend
|
||||
poetry run langflow --path src/frontend/build
|
||||
|
||||
frontend:
|
||||
make install_frontend
|
||||
make run_frontend
|
||||
|
||||
|
||||
frontendc:
|
||||
make install_frontendc
|
||||
make run_frontend
|
||||
|
|
|
|||
|
|
@ -1,33 +1,33 @@
|
|||
version: "3.4"
|
||||
|
||||
services:
|
||||
backend:
|
||||
volumes:
|
||||
- ./:/app
|
||||
build:
|
||||
context: ./
|
||||
dockerfile: ./dev.Dockerfile
|
||||
command:
|
||||
[
|
||||
"sh",
|
||||
"-c",
|
||||
"pip install debugpy -t /tmp && python /tmp/debugpy --wait-for-client --listen 0.0.0.0:5678 -m uvicorn --factory src.backend.langflow.main:create_app --host 0.0.0.0 --port 7860 --reload",
|
||||
]
|
||||
ports:
|
||||
- 7860:7860
|
||||
- 5678:5678
|
||||
restart: on-failure
|
||||
|
||||
frontend:
|
||||
build:
|
||||
context: ./src/frontend
|
||||
dockerfile: ./dev.Dockerfile
|
||||
args:
|
||||
- BACKEND_URL=http://backend:7860
|
||||
ports:
|
||||
- "3000:3000"
|
||||
volumes:
|
||||
- ./src/frontend/public:/home/node/app/public
|
||||
- ./src/frontend/src:/home/node/app/src
|
||||
- ./src/frontend/package.json:/home/node/app/package.json
|
||||
restart: on-failure
|
||||
version: "3.4"
|
||||
|
||||
services:
|
||||
backend:
|
||||
volumes:
|
||||
- ./:/app
|
||||
build:
|
||||
context: ./
|
||||
dockerfile: ./dev.Dockerfile
|
||||
command:
|
||||
[
|
||||
"sh",
|
||||
"-c",
|
||||
"pip install debugpy -t /tmp && python /tmp/debugpy --wait-for-client --listen 0.0.0.0:5678 -m uvicorn --factory src.backend.langflow.main:create_app --host 0.0.0.0 --port 7860 --reload",
|
||||
]
|
||||
ports:
|
||||
- 7860:7860
|
||||
- 5678:5678
|
||||
restart: on-failure
|
||||
|
||||
frontend:
|
||||
build:
|
||||
context: ./src/frontend
|
||||
dockerfile: ./dev.Dockerfile
|
||||
args:
|
||||
- BACKEND_URL=http://backend:7860
|
||||
ports:
|
||||
- "3000:3000"
|
||||
volumes:
|
||||
- ./src/frontend/public:/home/node/app/public
|
||||
- ./src/frontend/src:/home/node/app/src
|
||||
- ./src/frontend/package.json:/home/node/app/package.json
|
||||
restart: on-failure
|
||||
|
|
|
|||
9
docker_example/README.md
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
# LangFlow Docker Running
|
||||
|
||||
```sh
|
||||
git clone git@github.com:logspace-ai/langflow.git
|
||||
cd langflow/docker_example
|
||||
docker compose up
|
||||
```
|
||||
|
||||
The web UI will be accessible on port [7860](http://localhost:7860/)
|
||||
|
|
@ -1,10 +1,76 @@
|
|||
import Admonition from '@theme/Admonition';
|
||||
import Admonition from "@theme/Admonition";
|
||||
|
||||
# Utilities
|
||||
|
||||
<Admonition type="caution" icon="🚧" title="ZONE UNDER CONSTRUCTION">
|
||||
<p>
|
||||
We appreciate your understanding as we polish our documentation – it may contain some rough edges. Share your feedback or report issues to help us improve! 🛠️📝
|
||||
</p>
|
||||
<p>
|
||||
We appreciate your understanding as we polish our documentation – it may
|
||||
contain some rough edges. Share your feedback or report issues to help us
|
||||
improve! 🛠️📝
|
||||
</p>
|
||||
</Admonition>
|
||||
|
||||
Utilities are a set of actions that can be used to perform common tasks in a flow. They are available in the **Utilities** section in the sidebar.
|
||||
|
||||
---
|
||||
|
||||
### GET Request
|
||||
|
||||
Make a GET request to the given URL.
|
||||
|
||||
**Params**
|
||||
|
||||
- **URL:** The URL to make the request to. There can be more than one URL, in which case the request will be made to each URL in order.
|
||||
- **Headers:** A dictionary of headers to send with the request.
|
||||
|
||||
**Output**
|
||||
|
||||
- **List of Documents:** A list of Documents containing the JSON response from each request.
|
||||
|
||||
---
|
||||
|
||||
### POST Request
|
||||
|
||||
Make a POST request to the given URL.
|
||||
|
||||
**Params**
|
||||
|
||||
- **URL:** The URL to make the request to.
|
||||
- **Headers:** A dictionary of headers to send with the request.
|
||||
- **Document:** The Document containing a JSON object to send with the request.
|
||||
|
||||
**Output**
|
||||
|
||||
- **Document:** The JSON response from the request as a Document.
|
||||
|
||||
---
|
||||
|
||||
### Update Request
|
||||
|
||||
Make a PATCH or PUT request to the given URL.
|
||||
|
||||
**Params**
|
||||
|
||||
- **URL:** The URL to make the request to.
|
||||
- **Headers:** A dictionary of headers to send with the request.
|
||||
- **Document:** The Document containing a JSON object to send with the request.
|
||||
- **Method:** The HTTP method to use for the request. Can be either `PATCH` or `PUT`.
|
||||
|
||||
**Output**
|
||||
|
||||
- **Document:** The JSON response from the request as a Document.
|
||||
|
||||
---
|
||||
|
||||
### JSON Document Builder
|
||||
|
||||
Build a Document containing a JSON object using a key and another Document page content.
|
||||
|
||||
**Params**
|
||||
|
||||
- **Key:** The key to use for the JSON object.
|
||||
- **Document:** The Document page to use for the JSON object.
|
||||
|
||||
**Output**
|
||||
|
||||
- **List of Documents:** A list containing the Document with the JSON object.
|
||||
|
|
|
|||
71
docs/package-lock.json
generated
|
|
@ -16,7 +16,7 @@
|
|||
"@docusaurus/theme-classic": "^2.4.1",
|
||||
"@docusaurus/theme-search-algolia": "^2.4.1",
|
||||
"@mdx-js/react": "^2.3.0",
|
||||
"@mendable/search": "^0.0.114",
|
||||
"@mendable/search": "^0.0.154",
|
||||
"@pbe/react-yandex-maps": "^1.2.4",
|
||||
"@prismicio/client": "^7.0.1",
|
||||
"@uiball/loaders": "^1.2.6",
|
||||
|
|
@ -3250,10 +3250,11 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@mendable/search": {
|
||||
"version": "0.0.114",
|
||||
"resolved": "https://registry.npmjs.org/@mendable/search/-/search-0.0.114.tgz",
|
||||
"integrity": "sha512-0uR+zxONuu/16bpLli49Jocr5fee1WIjs06KzU1AnHsR+fdFBmfrlpgTDWctgGuXPzS5Dorlw4VMlR5dPW5qVQ==",
|
||||
"version": "0.0.154",
|
||||
"resolved": "https://registry.npmjs.org/@mendable/search/-/search-0.0.154.tgz",
|
||||
"integrity": "sha512-adNwXlIaMXVMCkPU2uUdghfn05Dmxb0BnE95SRLQJ6evHajsNFQdRl5Ltj3WijG+qo4ozTIJcPOBYrDPKMTPVw==",
|
||||
"dependencies": {
|
||||
"html-react-parser": "^4.2.0",
|
||||
"posthog-js": "^1.45.1"
|
||||
},
|
||||
"peerDependencies": {
|
||||
|
|
@ -9351,6 +9352,33 @@
|
|||
"safe-buffer": "~5.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/html-dom-parser": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/html-dom-parser/-/html-dom-parser-4.0.0.tgz",
|
||||
"integrity": "sha512-TUa3wIwi80f5NF8CVWzkopBVqVAtlawUzJoLwVLHns0XSJGynss4jiY0mTWpiDOsuyw+afP+ujjMgRh9CoZcXw==",
|
||||
"dependencies": {
|
||||
"domhandler": "5.0.3",
|
||||
"htmlparser2": "9.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/html-dom-parser/node_modules/htmlparser2": {
|
||||
"version": "9.0.0",
|
||||
"resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-9.0.0.tgz",
|
||||
"integrity": "sha512-uxbSI98wmFT/G4P2zXx4OVx04qWUmyFPrD2/CNepa2Zo3GPNaCaaxElDgwUrwYWkK1nr9fft0Ya8dws8coDLLQ==",
|
||||
"funding": [
|
||||
"https://github.com/fb55/htmlparser2?sponsor=1",
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/fb55"
|
||||
}
|
||||
],
|
||||
"dependencies": {
|
||||
"domelementtype": "^2.3.0",
|
||||
"domhandler": "^5.0.3",
|
||||
"domutils": "^3.1.0",
|
||||
"entities": "^4.5.0"
|
||||
}
|
||||
},
|
||||
"node_modules/html-entities": {
|
||||
"version": "2.4.0",
|
||||
"resolved": "https://registry.npmjs.org/html-entities/-/html-entities-2.4.0.tgz",
|
||||
|
|
@ -9394,6 +9422,20 @@
|
|||
"node": ">= 12"
|
||||
}
|
||||
},
|
||||
"node_modules/html-react-parser": {
|
||||
"version": "4.2.1",
|
||||
"resolved": "https://registry.npmjs.org/html-react-parser/-/html-react-parser-4.2.1.tgz",
|
||||
"integrity": "sha512-Dxzdowj5Zu/+7mr8X8PzCFbPXGuwCwGB2u4cB6oxZGES9inw85qlvnlfPD75VGKUGjcgsXs+9Dpj+THWNQyOBw==",
|
||||
"dependencies": {
|
||||
"domhandler": "5.0.3",
|
||||
"html-dom-parser": "4.0.0",
|
||||
"react-property": "2.0.0",
|
||||
"style-to-js": "1.1.3"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"react": "0.14 || 15 || 16 || 17 || 18"
|
||||
}
|
||||
},
|
||||
"node_modules/html-tags": {
|
||||
"version": "3.3.1",
|
||||
"resolved": "https://registry.npmjs.org/html-tags/-/html-tags-3.3.1.tgz",
|
||||
|
|
@ -15324,6 +15366,11 @@
|
|||
"react": ">=16.6.0"
|
||||
}
|
||||
},
|
||||
"node_modules/react-property": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/react-property/-/react-property-2.0.0.tgz",
|
||||
"integrity": "sha512-kzmNjIgU32mO4mmH5+iUyrqlpFQhF8K2k7eZ4fdLSOPFrD1XgEuSBv9LDEgxRXTMBqMd8ppT0x6TIzqE5pdGdw=="
|
||||
},
|
||||
"node_modules/react-router": {
|
||||
"version": "5.3.4",
|
||||
"resolved": "https://registry.npmjs.org/react-router/-/react-router-5.3.4.tgz",
|
||||
|
|
@ -17510,6 +17557,22 @@
|
|||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/style-to-js": {
|
||||
"version": "1.1.3",
|
||||
"resolved": "https://registry.npmjs.org/style-to-js/-/style-to-js-1.1.3.tgz",
|
||||
"integrity": "sha512-zKI5gN/zb7LS/Vm0eUwjmjrXWw8IMtyA8aPBJZdYiQTXj4+wQ3IucOLIOnF7zCHxvW8UhIGh/uZh/t9zEHXNTQ==",
|
||||
"dependencies": {
|
||||
"style-to-object": "0.4.1"
|
||||
}
|
||||
},
|
||||
"node_modules/style-to-js/node_modules/style-to-object": {
|
||||
"version": "0.4.1",
|
||||
"resolved": "https://registry.npmjs.org/style-to-object/-/style-to-object-0.4.1.tgz",
|
||||
"integrity": "sha512-HFpbb5gr2ypci7Qw+IOhnP2zOU7e77b+rzM+wTzXzfi1PrtBCX0E7Pk4wL4iTLnhzZ+JgEGAhX81ebTg/aYjQw==",
|
||||
"dependencies": {
|
||||
"inline-style-parser": "0.1.1"
|
||||
}
|
||||
},
|
||||
"node_modules/style-to-object": {
|
||||
"version": "0.3.0",
|
||||
"resolved": "https://registry.npmjs.org/style-to-object/-/style-to-object-0.3.0.tgz",
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@
|
|||
"@docusaurus/theme-classic": "^2.4.1",
|
||||
"@docusaurus/theme-search-algolia": "^2.4.1",
|
||||
"@mdx-js/react": "^2.3.0",
|
||||
"@mendable/search": "^0.0.114",
|
||||
"@mendable/search": "^0.0.154",
|
||||
"@pbe/react-yandex-maps": "^1.2.4",
|
||||
"@prismicio/client": "^7.0.1",
|
||||
"@uiball/loaders": "^1.2.6",
|
||||
|
|
@ -69,4 +69,4 @@
|
|||
"engines": {
|
||||
"node": ">=16.14"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -42,6 +42,7 @@ module.exports = {
|
|||
"components/text-splitters",
|
||||
"components/toolkits",
|
||||
"components/tools",
|
||||
"components/utilities",
|
||||
"components/vector-stores",
|
||||
"components/wrappers",
|
||||
],
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ export default function FooterWrapper(props) {
|
|||
|
||||
const mendableFloatingButton = React.createElement(MendableFloatingButton, {
|
||||
floatingButtonStyle: { color: "#000000", backgroundColor: "#f6f6f6" },
|
||||
anon_key: customFields.mendableAnonKey, // Mendable Search Public ANON key, ok to be public
|
||||
anon_key: 'b7f52734-297c-41dc-8737-edbd13196394', // Mendable Search Public ANON key, ok to be public
|
||||
showSimpleSearch: true,
|
||||
icon: icon,
|
||||
});
|
||||
|
|
|
|||
202
docs/static/data/organizations-100.csv
vendored
|
|
@ -1,101 +1,101 @@
|
|||
Index,Organization Id,Name,Website,Country,Description,Founded,Industry,Number of employees
|
||||
1,FAB0d41d5b5d22c,Ferrell LLC,https://price.net/,Papua New Guinea,Horizontal empowering knowledgebase,1990,Plastics,3498
|
||||
2,6A7EdDEA9FaDC52,"Mckinney, Riley and Day",http://www.hall-buchanan.info/,Finland,User-centric system-worthy leverage,2015,Glass / Ceramics / Concrete,4952
|
||||
3,0bFED1ADAE4bcC1,Hester Ltd,http://sullivan-reed.com/,China,Switchable scalable moratorium,1971,Public Safety,5287
|
||||
4,2bFC1Be8a4ce42f,Holder-Sellers,https://becker.com/,Turkmenistan,De-engineered systemic artificial intelligence,2004,Automotive,921
|
||||
5,9eE8A6a4Eb96C24,Mayer Group,http://www.brewer.com/,Mauritius,Synchronized needs-based challenge,1991,Transportation,7870
|
||||
6,cC757116fe1C085,Henry-Thompson,http://morse.net/,Bahamas,Face-to-face well-modulated customer loyalty,1992,Primary / Secondary Education,4914
|
||||
7,219233e8aFF1BC3,Hansen-Everett,https://www.kidd.org/,Pakistan,Seamless disintermediate collaboration,2018,Publishing Industry,7832
|
||||
8,ccc93DCF81a31CD,Mcintosh-Mora,https://www.brooks.com/,Heard Island and McDonald Islands,Centralized attitude-oriented capability,1970,Import / Export,4389
|
||||
9,0B4F93aA06ED03e,Carr Inc,http://ross.com/,Kuwait,Distributed impactful customer loyalty,1996,Plastics,8167
|
||||
10,738b5aDe6B1C6A5,Gaines Inc,http://sandoval-hooper.com/,Uzbekistan,Multi-lateral scalable protocol,1997,Outsourcing / Offshoring,9698
|
||||
11,AE61b8Ffebbc476,Kidd Group,http://www.lyons.com/,Bouvet Island (Bouvetoya),Proactive foreground paradigm,2001,Primary / Secondary Education,7473
|
||||
12,eb3B7D06cCdD609,Crane-Clarke,https://www.sandoval.com/,Denmark,Front-line clear-thinking encryption,2014,Food / Beverages,9011
|
||||
13,8D0c29189C9798B,"Keller, Campos and Black",https://www.garner.info/,Liberia,Ameliorated directional emulation,2020,Museums / Institutions,2862
|
||||
14,D2c91cc03CA394c,Glover-Pope,http://www.silva.biz/,United Arab Emirates,Persevering contextually-based approach,2013,Medical Practice,9079
|
||||
15,C8AC1eaf9C036F4,Pacheco-Spears,https://aguilar.com/,Sweden,Secured logistical synergy,1984,Maritime,769
|
||||
16,b5D10A14f7a8AfE,Hodge-Ayers,http://www.archer-elliott.com/,Honduras,Future-proofed radical implementation,1990,Facilities Services,8508
|
||||
17,68139b5C4De03B4,"Bowers, Guerra and Krause",http://www.carrillo-nicholson.com/,Uganda,De-engineered transitional strategy,1972,Primary / Secondary Education,6986
|
||||
18,5c2EffEfdba2BdF,Mckenzie-Melton,http://montoya-thompson.com/,Hong Kong,Reverse-engineered heuristic alliance,1998,Investment Management / Hedge Fund / Private Equity,4589
|
||||
19,ba179F19F7925f5,Branch-Mann,http://www.lozano.com/,Botswana,Adaptive intangible frame,1999,Architecture / Planning,7961
|
||||
20,c1Ce9B350BAc66b,Weiss and Sons,https://barrett.com/,Korea,Sharable optimal functionalities,2011,Plastics,5984
|
||||
21,8de40AC4e6EaCa4,"Velez, Payne and Coffey",http://burton.com/,Luxembourg,Mandatory coherent synergy,1986,Wholesale,5010
|
||||
22,Aad86a4F0385F2d,Harrell LLC,http://www.frey-rosario.com/,Guadeloupe,Reverse-engineered mission-critical moratorium,2018,Construction,2185
|
||||
23,22aC3FFd64fD703,"Eaton, Reynolds and Vargas",http://www.freeman.biz/,Monaco,Self-enabling multi-tasking process improvement,2014,Luxury Goods / Jewelry,8987
|
||||
24,5Ec4C272bCf085c,Robbins-Cummings,http://donaldson-wilkins.com/,Belgium,Organic non-volatile hierarchy,1991,Pharmaceuticals,5038
|
||||
25,5fDBeA8BB91a000,Jenkins Inc,http://www.kirk.biz/,South Africa,Front-line systematic help-desk,2002,Insurance,1215
|
||||
26,dFfD6a6F9AC2d9C,"Greene, Benjamin and Novak",http://www.kent.net/,Romania,Centralized leadingedge moratorium,2012,Museums / Institutions,4941
|
||||
27,4B217cC5a0674C5,"Dickson, Richmond and Clay",http://everett.com/,Czech Republic,Team-oriented tangible complexity,1980,Real Estate / Mortgage,3122
|
||||
28,88b1f1cDcf59a37,Prince-David,http://thompson.com/,Christmas Island,Virtual holistic methodology,1970,Banking / Mortgage,1046
|
||||
29,f9F7bBCAEeC360F,Ayala LLC,http://www.zhang.com/,Philippines,Open-source zero administration hierarchy,2021,Legal Services,7664
|
||||
30,7Cb3AeFcE4Ba31e,Rivas Group,https://hebert.org/,Australia,Open-architected well-modulated capacity,1998,Logistics / Procurement,4155
|
||||
31,ccBcC32adcbc530,"Sloan, Mays and Whitehead",http://lawson.com/,Chad,Face-to-face high-level conglomeration,1997,Civil Engineering,365
|
||||
32,f5afd686b3d05F5,"Durham, Allen and Barnes",http://chan-stafford.org/,Zimbabwe,Synergistic web-enabled framework,1993,Mechanical or Industrial Engineering,6135
|
||||
33,38C6cfC5074Fa5e,Fritz-Franklin,http://www.lambert.com/,Nepal,Automated 4thgeneration website,1972,Hospitality,4516
|
||||
34,5Cd7efccCcba38f,Burch-Ewing,http://cline.net/,Taiwan,User-centric 4thgeneration system engine,1981,Venture Capital / VC,7443
|
||||
35,9E6Acb51e3F9d6F,"Glass, Barrera and Turner",https://dunlap.com/,Kyrgyz Republic,Multi-channeled 3rdgeneration open system,2020,Utilities,2610
|
||||
36,4D4d7E18321eaeC,Pineda-Cox,http://aguilar.org/,Bolivia,Fundamental asynchronous capability,2010,Human Resources / HR,1312
|
||||
37,485f5d06B938F2b,"Baker, Mccann and Macdonald",http://www.anderson-barker.com/,Kenya,Cross-group user-facing focus group,2013,Legislative Office,1638
|
||||
38,19E3a5Bf6dBDc4F,Cuevas-Moss,https://dodson-castaneda.net/,Guatemala,Extended human-resource intranet,1994,Music,9995
|
||||
39,6883A965c7b68F7,Hahn PLC,http://newman.com/,Belarus,Organic logistical leverage,2012,Electrical / Electronic Manufacturing,3715
|
||||
40,AC5B7AA74Aa4A2E,"Valentine, Ferguson and Kramer",http://stuart.net/,Jersey,Centralized secondary time-frame,1997,Non - Profit / Volunteering,3585
|
||||
41,decab0D5027CA6a,Arroyo Inc,https://www.turner.com/,Grenada,Managed demand-driven website,2006,Writing / Editing,9067
|
||||
42,dF084FbBb613eea,Walls LLC,http://www.reese-vasquez.biz/,Cape Verde,Self-enabling fresh-thinking installation,1989,Investment Management / Hedge Fund / Private Equity,1678
|
||||
43,A2D89Ab9bCcAd4e,"Mitchell, Warren and Schneider",https://fox.biz/,Trinidad and Tobago,Enhanced intangible time-frame,2021,Capital Markets / Hedge Fund / Private Equity,3816
|
||||
44,77aDc905434a49f,Prince PLC,https://www.watts.com/,Sweden,Profit-focused coherent installation,2016,Individual / Family Services,7645
|
||||
45,235fdEFE2cfDa5F,Brock-Blackwell,http://www.small.com/,Benin,Secured foreground emulation,1986,Online Publishing,7034
|
||||
46,1eD64cFe986BBbE,Walton-Barnett,https://ashley-schaefer.com/,Western Sahara,Right-sized clear-thinking flexibility,2001,Luxury Goods / Jewelry,1746
|
||||
47,CbBbFcdd0eaE2cF,Bartlett-Arroyo,https://cruz.com/,Northern Mariana Islands,Realigned didactic function,1976,Civic / Social Organization,3987
|
||||
48,49aECbDaE6aBD53,"Wallace, Madden and Morris",http://www.blevins-fernandez.biz/,Germany,Persistent real-time customer loyalty,2016,Pharmaceuticals,9443
|
||||
49,7b3fe6e7E72bFa4,Berg-Sparks,https://cisneros-love.com/,Canada,Stand-alone static implementation,1974,Arts / Crafts,2073
|
||||
50,c6DedA82A8aef7E,Gonzales Ltd,http://bird.com/,Tonga,Managed human-resource policy,1988,Consumer Goods,9069
|
||||
51,7D9FBF85cdC3871,Lawson and Sons,https://www.wong.com/,French Southern Territories,Compatible analyzing intranet,2021,Arts / Crafts,3527
|
||||
52,7dd18Fb7cB07b65,"Mcguire, Mcconnell and Olsen",https://melton-briggs.com/,Korea,Profound client-server frame,1988,Printing,8445
|
||||
53,EF5B55FadccB8Fe,Charles-Phillips,https://bowman.com/,Cote d'Ivoire,Monitored client-server implementation,2012,Mental Health Care,3450
|
||||
54,f8D4B99e11fAF5D,Odom Ltd,https://www.humphrey-hess.com/,Cote d'Ivoire,Advanced static process improvement,2012,Management Consulting,1825
|
||||
55,e24D21BFd3bF1E5,Richard PLC,https://holden-coleman.net/,Mayotte,Object-based optimizing model,1971,Broadcast Media,4942
|
||||
56,B9BdfEB6D3Ca44E,Sampson Ltd,https://blevins.com/,Cayman Islands,Intuitive local adapter,2005,Farming,1418
|
||||
57,2a74D6f3D3B268e,"Cherry, Le and Callahan",https://waller-delacruz.biz/,Nigeria,Universal human-resource collaboration,2017,Entertainment / Movie Production,7202
|
||||
58,Bf3F3f62c8aBC33,Cherry PLC,https://www.avila.info/,Marshall Islands,Persistent tertiary website,1980,Plastics,8245
|
||||
59,aeBe26B80a7a23c,Melton-Nichols,https://kennedy.com/,Palau,User-friendly clear-thinking productivity,2021,Legislative Office,8741
|
||||
60,aAeb29ad43886C6,Potter-Walsh,http://thomas-french.org/,Turkey,Optional non-volatile open system,2008,Human Resources / HR,6923
|
||||
61,bD1bc6bB6d1FeD3,Freeman-Chen,https://mathis.com/,Timor-Leste,Phased next generation adapter,1973,International Trade / Development,346
|
||||
62,EB9f456e8b7022a,Soto Group,https://norris.info/,Vietnam,Enterprise-wide executive installation,1988,Business Supplies / Equipment,9097
|
||||
63,Dfef38C51D8DAe3,"Poole, Cruz and Whitney",https://reed.info/,Reunion,Balanced analyzing groupware,1978,Marketing / Advertising / Sales,2992
|
||||
64,055ffEfB2Dd95B0,Riley Ltd,http://wiley.com/,Brazil,Optional exuding superstructure,1986,Textiles,9315
|
||||
65,cBfe4dbAE1699da,"Erickson, Andrews and Bailey",https://www.hobbs-grant.com/,Eritrea,Vision-oriented secondary project,2014,Consumer Electronics,7829
|
||||
66,fdFbecbadcdCdf1,"Wilkinson, Charles and Arroyo",http://hunter-mcfarland.com/,United States Virgin Islands,Assimilated 24/7 archive,1996,Building Materials,602
|
||||
67,5DCb8A5a5ca03c0,Floyd Ltd,http://www.whitney.com/,Falkland Islands (Malvinas),Function-based fault-tolerant concept,2017,Public Relations / PR,2911
|
||||
68,ce57DCbcFD6d618,Newman-Galloway,https://www.scott.com/,Luxembourg,Enhanced foreground collaboration,1987,Information Technology / IT,3934
|
||||
69,5aaD187dc929371,Frazier-Butler,https://www.daugherty-farley.info/,Northern Mariana Islands,Persistent interactive circuit,1972,Outsourcing / Offshoring,5130
|
||||
70,902D7Ac8b6d476b,Newton Inc,https://www.richmond-manning.info/,Netherlands Antilles,Fundamental stable info-mediaries,1976,Military Industry,563
|
||||
71,32BB9Ff4d939788,Duffy-Levy,https://www.potter.com/,Guernsey,Diverse exuding installation,1982,Wireless,6146
|
||||
72,adcB0afbE58bAe3,Wagner LLC,https://decker-esparza.com/,Uruguay,Reactive attitude-oriented toolset,1987,International Affairs,6874
|
||||
73,dfcA1c84AdB61Ac,Mccall-Holmes,http://www.dean.com/,Benin,Object-based value-added database,2009,Legal Services,696
|
||||
74,208044AC2fe52F3,Massey LLC,https://frazier.biz/,Suriname,Configurable zero administration Graphical User Interface,1986,Accounting,5004
|
||||
75,f3C365f0c1A0623,Hicks LLC,http://alvarez.biz/,Pakistan,Quality-focused client-server Graphical User Interface,1970,Computer Software / Engineering,8480
|
||||
76,ec5Bdd3CBAfaB93,"Cole, Russell and Avery",http://www.blankenship.com/,Mongolia,De-engineered fault-tolerant challenge,2000,Law Enforcement,7012
|
||||
77,DDB19Be7eeB56B4,Cummings-Rojas,https://simon-pearson.com/,Svalbard & Jan Mayen Islands,User-centric modular customer loyalty,2012,Financial Services,7529
|
||||
78,dd6CA3d0bc3cAfc,"Beasley, Greene and Mahoney",http://www.petersen-lawrence.com/,Togo,Extended content-based methodology,1976,Religious Institutions,869
|
||||
79,A0B9d56e61070e3,"Beasley, Sims and Allison",http://burke.info/,Latvia,Secured zero tolerance hub,1972,Facilities Services,6182
|
||||
80,cBa7EFe5D05Adaf,Crawford-Rivera,https://black-ramirez.org/,Cuba,Persevering exuding budgetary management,1999,Online Publishing,7805
|
||||
81,Ea3f6D52Ec73563,Montes-Hensley,https://krueger.org/,Liechtenstein,Multi-tiered secondary productivity,2009,Printing,8433
|
||||
82,bC0CEd48A8000E0,Velazquez-Odom,https://stokes.com/,Djibouti,Streamlined 6thgeneration function,2002,Alternative Dispute Resolution,4044
|
||||
83,c89b9b59BC4baa1,Eaton-Morales,https://www.reeves-graham.com/,Micronesia,Customer-focused explicit frame,1990,Capital Markets / Hedge Fund / Private Equity,7013
|
||||
84,FEC51bce8421a7b,"Roberson, Pennington and Palmer",http://www.keith-fisher.com/,Cameroon,Adaptive bi-directional hierarchy,1993,Telecommunications,5571
|
||||
85,e0E8e27eAc9CAd5,"George, Russo and Guerra",https://drake.com/,Sweden,Centralized non-volatile capability,1989,Military Industry,2880
|
||||
86,B97a6CF9bf5983C,Davila Inc,https://mcconnell.info/,Cocos (Keeling) Islands,Profit-focused dedicated frame,2017,Consumer Electronics,2215
|
||||
87,a0a6f9b3DbcBEb5,Mays-Preston,http://www.browning-key.com/,Mali,User-centric heuristic focus group,2006,Military Industry,5786
|
||||
88,8cC1bDa330a5871,Pineda-Morton,https://www.carr.com/,United States Virgin Islands,Grass-roots methodical info-mediaries,1991,Printing,6168
|
||||
89,ED889CB2FE9cbd3,Huang and Sons,https://www.bolton.com/,Eritrea,Re-contextualized dynamic hierarchy,1981,Semiconductors,7484
|
||||
90,F4Dc1417BC6cb8f,Gilbert-Simon,https://www.bradford.biz/,Burundi,Grass-roots radical parallelism,1973,Newspapers / Journalism,1927
|
||||
91,7ABc3c7ecA03B34,Sampson-Griffith,http://hendricks.org/,Benin,Multi-layered composite paradigm,1972,Textiles,3881
|
||||
92,4e0719FBE38e0aB,Miles-Dominguez,http://www.turner.com/,Gibraltar,Organized empowering forecast,1996,Civic / Social Organization,897
|
||||
93,dEbDAAeDfaed00A,Rowe and Sons,https://www.simpson.org/,El Salvador,Balanced multimedia knowledgebase,1978,Facilities Services,8172
|
||||
94,61BDeCfeFD0cEF5,"Valenzuela, Holmes and Rowland",https://www.dorsey.net/,Taiwan,Persistent tertiary focus group,1999,Transportation,1483
|
||||
95,4e91eD25f486110,"Best, Wade and Shepard",https://zimmerman.com/,Zimbabwe,Innovative background definition,1991,Gambling / Casinos,4873
|
||||
96,0a0bfFbBbB8eC7c,Holmes Group,https://mcdowell.org/,Ethiopia,Right-sized zero tolerance focus group,1975,Photography,2988
|
||||
97,BA6Cd9Dae2Efd62,Good Ltd,http://duffy.com/,Anguilla,Reverse-engineered composite moratorium,1971,Consumer Services,4292
|
||||
98,E7df80C60Abd7f9,Clements-Espinoza,http://www.flowers.net/,Falkland Islands (Malvinas),Progressive modular hub,1991,Broadcast Media,236
|
||||
99,AFc285dbE2fEd24,Mendez Inc,https://www.burke.net/,Kyrgyz Republic,User-friendly exuding migration,1993,Education Management,339
|
||||
100,e9eB5A60Cef8354,Watkins-Kaiser,http://www.herring.com/,Togo,Synergistic background access,2009,Financial Services,2785
|
||||
Index,Organization Id,Name,Website,Country,Description,Founded,Industry,Number of employees
|
||||
1,FAB0d41d5b5d22c,Ferrell LLC,https://price.net/,Papua New Guinea,Horizontal empowering knowledgebase,1990,Plastics,3498
|
||||
2,6A7EdDEA9FaDC52,"Mckinney, Riley and Day",http://www.hall-buchanan.info/,Finland,User-centric system-worthy leverage,2015,Glass / Ceramics / Concrete,4952
|
||||
3,0bFED1ADAE4bcC1,Hester Ltd,http://sullivan-reed.com/,China,Switchable scalable moratorium,1971,Public Safety,5287
|
||||
4,2bFC1Be8a4ce42f,Holder-Sellers,https://becker.com/,Turkmenistan,De-engineered systemic artificial intelligence,2004,Automotive,921
|
||||
5,9eE8A6a4Eb96C24,Mayer Group,http://www.brewer.com/,Mauritius,Synchronized needs-based challenge,1991,Transportation,7870
|
||||
6,cC757116fe1C085,Henry-Thompson,http://morse.net/,Bahamas,Face-to-face well-modulated customer loyalty,1992,Primary / Secondary Education,4914
|
||||
7,219233e8aFF1BC3,Hansen-Everett,https://www.kidd.org/,Pakistan,Seamless disintermediate collaboration,2018,Publishing Industry,7832
|
||||
8,ccc93DCF81a31CD,Mcintosh-Mora,https://www.brooks.com/,Heard Island and McDonald Islands,Centralized attitude-oriented capability,1970,Import / Export,4389
|
||||
9,0B4F93aA06ED03e,Carr Inc,http://ross.com/,Kuwait,Distributed impactful customer loyalty,1996,Plastics,8167
|
||||
10,738b5aDe6B1C6A5,Gaines Inc,http://sandoval-hooper.com/,Uzbekistan,Multi-lateral scalable protocol,1997,Outsourcing / Offshoring,9698
|
||||
11,AE61b8Ffebbc476,Kidd Group,http://www.lyons.com/,Bouvet Island (Bouvetoya),Proactive foreground paradigm,2001,Primary / Secondary Education,7473
|
||||
12,eb3B7D06cCdD609,Crane-Clarke,https://www.sandoval.com/,Denmark,Front-line clear-thinking encryption,2014,Food / Beverages,9011
|
||||
13,8D0c29189C9798B,"Keller, Campos and Black",https://www.garner.info/,Liberia,Ameliorated directional emulation,2020,Museums / Institutions,2862
|
||||
14,D2c91cc03CA394c,Glover-Pope,http://www.silva.biz/,United Arab Emirates,Persevering contextually-based approach,2013,Medical Practice,9079
|
||||
15,C8AC1eaf9C036F4,Pacheco-Spears,https://aguilar.com/,Sweden,Secured logistical synergy,1984,Maritime,769
|
||||
16,b5D10A14f7a8AfE,Hodge-Ayers,http://www.archer-elliott.com/,Honduras,Future-proofed radical implementation,1990,Facilities Services,8508
|
||||
17,68139b5C4De03B4,"Bowers, Guerra and Krause",http://www.carrillo-nicholson.com/,Uganda,De-engineered transitional strategy,1972,Primary / Secondary Education,6986
|
||||
18,5c2EffEfdba2BdF,Mckenzie-Melton,http://montoya-thompson.com/,Hong Kong,Reverse-engineered heuristic alliance,1998,Investment Management / Hedge Fund / Private Equity,4589
|
||||
19,ba179F19F7925f5,Branch-Mann,http://www.lozano.com/,Botswana,Adaptive intangible frame,1999,Architecture / Planning,7961
|
||||
20,c1Ce9B350BAc66b,Weiss and Sons,https://barrett.com/,Korea,Sharable optimal functionalities,2011,Plastics,5984
|
||||
21,8de40AC4e6EaCa4,"Velez, Payne and Coffey",http://burton.com/,Luxembourg,Mandatory coherent synergy,1986,Wholesale,5010
|
||||
22,Aad86a4F0385F2d,Harrell LLC,http://www.frey-rosario.com/,Guadeloupe,Reverse-engineered mission-critical moratorium,2018,Construction,2185
|
||||
23,22aC3FFd64fD703,"Eaton, Reynolds and Vargas",http://www.freeman.biz/,Monaco,Self-enabling multi-tasking process improvement,2014,Luxury Goods / Jewelry,8987
|
||||
24,5Ec4C272bCf085c,Robbins-Cummings,http://donaldson-wilkins.com/,Belgium,Organic non-volatile hierarchy,1991,Pharmaceuticals,5038
|
||||
25,5fDBeA8BB91a000,Jenkins Inc,http://www.kirk.biz/,South Africa,Front-line systematic help-desk,2002,Insurance,1215
|
||||
26,dFfD6a6F9AC2d9C,"Greene, Benjamin and Novak",http://www.kent.net/,Romania,Centralized leadingedge moratorium,2012,Museums / Institutions,4941
|
||||
27,4B217cC5a0674C5,"Dickson, Richmond and Clay",http://everett.com/,Czech Republic,Team-oriented tangible complexity,1980,Real Estate / Mortgage,3122
|
||||
28,88b1f1cDcf59a37,Prince-David,http://thompson.com/,Christmas Island,Virtual holistic methodology,1970,Banking / Mortgage,1046
|
||||
29,f9F7bBCAEeC360F,Ayala LLC,http://www.zhang.com/,Philippines,Open-source zero administration hierarchy,2021,Legal Services,7664
|
||||
30,7Cb3AeFcE4Ba31e,Rivas Group,https://hebert.org/,Australia,Open-architected well-modulated capacity,1998,Logistics / Procurement,4155
|
||||
31,ccBcC32adcbc530,"Sloan, Mays and Whitehead",http://lawson.com/,Chad,Face-to-face high-level conglomeration,1997,Civil Engineering,365
|
||||
32,f5afd686b3d05F5,"Durham, Allen and Barnes",http://chan-stafford.org/,Zimbabwe,Synergistic web-enabled framework,1993,Mechanical or Industrial Engineering,6135
|
||||
33,38C6cfC5074Fa5e,Fritz-Franklin,http://www.lambert.com/,Nepal,Automated 4thgeneration website,1972,Hospitality,4516
|
||||
34,5Cd7efccCcba38f,Burch-Ewing,http://cline.net/,Taiwan,User-centric 4thgeneration system engine,1981,Venture Capital / VC,7443
|
||||
35,9E6Acb51e3F9d6F,"Glass, Barrera and Turner",https://dunlap.com/,Kyrgyz Republic,Multi-channeled 3rdgeneration open system,2020,Utilities,2610
|
||||
36,4D4d7E18321eaeC,Pineda-Cox,http://aguilar.org/,Bolivia,Fundamental asynchronous capability,2010,Human Resources / HR,1312
|
||||
37,485f5d06B938F2b,"Baker, Mccann and Macdonald",http://www.anderson-barker.com/,Kenya,Cross-group user-facing focus group,2013,Legislative Office,1638
|
||||
38,19E3a5Bf6dBDc4F,Cuevas-Moss,https://dodson-castaneda.net/,Guatemala,Extended human-resource intranet,1994,Music,9995
|
||||
39,6883A965c7b68F7,Hahn PLC,http://newman.com/,Belarus,Organic logistical leverage,2012,Electrical / Electronic Manufacturing,3715
|
||||
40,AC5B7AA74Aa4A2E,"Valentine, Ferguson and Kramer",http://stuart.net/,Jersey,Centralized secondary time-frame,1997,Non - Profit / Volunteering,3585
|
||||
41,decab0D5027CA6a,Arroyo Inc,https://www.turner.com/,Grenada,Managed demand-driven website,2006,Writing / Editing,9067
|
||||
42,dF084FbBb613eea,Walls LLC,http://www.reese-vasquez.biz/,Cape Verde,Self-enabling fresh-thinking installation,1989,Investment Management / Hedge Fund / Private Equity,1678
|
||||
43,A2D89Ab9bCcAd4e,"Mitchell, Warren and Schneider",https://fox.biz/,Trinidad and Tobago,Enhanced intangible time-frame,2021,Capital Markets / Hedge Fund / Private Equity,3816
|
||||
44,77aDc905434a49f,Prince PLC,https://www.watts.com/,Sweden,Profit-focused coherent installation,2016,Individual / Family Services,7645
|
||||
45,235fdEFE2cfDa5F,Brock-Blackwell,http://www.small.com/,Benin,Secured foreground emulation,1986,Online Publishing,7034
|
||||
46,1eD64cFe986BBbE,Walton-Barnett,https://ashley-schaefer.com/,Western Sahara,Right-sized clear-thinking flexibility,2001,Luxury Goods / Jewelry,1746
|
||||
47,CbBbFcdd0eaE2cF,Bartlett-Arroyo,https://cruz.com/,Northern Mariana Islands,Realigned didactic function,1976,Civic / Social Organization,3987
|
||||
48,49aECbDaE6aBD53,"Wallace, Madden and Morris",http://www.blevins-fernandez.biz/,Germany,Persistent real-time customer loyalty,2016,Pharmaceuticals,9443
|
||||
49,7b3fe6e7E72bFa4,Berg-Sparks,https://cisneros-love.com/,Canada,Stand-alone static implementation,1974,Arts / Crafts,2073
|
||||
50,c6DedA82A8aef7E,Gonzales Ltd,http://bird.com/,Tonga,Managed human-resource policy,1988,Consumer Goods,9069
|
||||
51,7D9FBF85cdC3871,Lawson and Sons,https://www.wong.com/,French Southern Territories,Compatible analyzing intranet,2021,Arts / Crafts,3527
|
||||
52,7dd18Fb7cB07b65,"Mcguire, Mcconnell and Olsen",https://melton-briggs.com/,Korea,Profound client-server frame,1988,Printing,8445
|
||||
53,EF5B55FadccB8Fe,Charles-Phillips,https://bowman.com/,Cote d'Ivoire,Monitored client-server implementation,2012,Mental Health Care,3450
|
||||
54,f8D4B99e11fAF5D,Odom Ltd,https://www.humphrey-hess.com/,Cote d'Ivoire,Advanced static process improvement,2012,Management Consulting,1825
|
||||
55,e24D21BFd3bF1E5,Richard PLC,https://holden-coleman.net/,Mayotte,Object-based optimizing model,1971,Broadcast Media,4942
|
||||
56,B9BdfEB6D3Ca44E,Sampson Ltd,https://blevins.com/,Cayman Islands,Intuitive local adapter,2005,Farming,1418
|
||||
57,2a74D6f3D3B268e,"Cherry, Le and Callahan",https://waller-delacruz.biz/,Nigeria,Universal human-resource collaboration,2017,Entertainment / Movie Production,7202
|
||||
58,Bf3F3f62c8aBC33,Cherry PLC,https://www.avila.info/,Marshall Islands,Persistent tertiary website,1980,Plastics,8245
|
||||
59,aeBe26B80a7a23c,Melton-Nichols,https://kennedy.com/,Palau,User-friendly clear-thinking productivity,2021,Legislative Office,8741
|
||||
60,aAeb29ad43886C6,Potter-Walsh,http://thomas-french.org/,Turkey,Optional non-volatile open system,2008,Human Resources / HR,6923
|
||||
61,bD1bc6bB6d1FeD3,Freeman-Chen,https://mathis.com/,Timor-Leste,Phased next generation adapter,1973,International Trade / Development,346
|
||||
62,EB9f456e8b7022a,Soto Group,https://norris.info/,Vietnam,Enterprise-wide executive installation,1988,Business Supplies / Equipment,9097
|
||||
63,Dfef38C51D8DAe3,"Poole, Cruz and Whitney",https://reed.info/,Reunion,Balanced analyzing groupware,1978,Marketing / Advertising / Sales,2992
|
||||
64,055ffEfB2Dd95B0,Riley Ltd,http://wiley.com/,Brazil,Optional exuding superstructure,1986,Textiles,9315
|
||||
65,cBfe4dbAE1699da,"Erickson, Andrews and Bailey",https://www.hobbs-grant.com/,Eritrea,Vision-oriented secondary project,2014,Consumer Electronics,7829
|
||||
66,fdFbecbadcdCdf1,"Wilkinson, Charles and Arroyo",http://hunter-mcfarland.com/,United States Virgin Islands,Assimilated 24/7 archive,1996,Building Materials,602
|
||||
67,5DCb8A5a5ca03c0,Floyd Ltd,http://www.whitney.com/,Falkland Islands (Malvinas),Function-based fault-tolerant concept,2017,Public Relations / PR,2911
|
||||
68,ce57DCbcFD6d618,Newman-Galloway,https://www.scott.com/,Luxembourg,Enhanced foreground collaboration,1987,Information Technology / IT,3934
|
||||
69,5aaD187dc929371,Frazier-Butler,https://www.daugherty-farley.info/,Northern Mariana Islands,Persistent interactive circuit,1972,Outsourcing / Offshoring,5130
|
||||
70,902D7Ac8b6d476b,Newton Inc,https://www.richmond-manning.info/,Netherlands Antilles,Fundamental stable info-mediaries,1976,Military Industry,563
|
||||
71,32BB9Ff4d939788,Duffy-Levy,https://www.potter.com/,Guernsey,Diverse exuding installation,1982,Wireless,6146
|
||||
72,adcB0afbE58bAe3,Wagner LLC,https://decker-esparza.com/,Uruguay,Reactive attitude-oriented toolset,1987,International Affairs,6874
|
||||
73,dfcA1c84AdB61Ac,Mccall-Holmes,http://www.dean.com/,Benin,Object-based value-added database,2009,Legal Services,696
|
||||
74,208044AC2fe52F3,Massey LLC,https://frazier.biz/,Suriname,Configurable zero administration Graphical User Interface,1986,Accounting,5004
|
||||
75,f3C365f0c1A0623,Hicks LLC,http://alvarez.biz/,Pakistan,Quality-focused client-server Graphical User Interface,1970,Computer Software / Engineering,8480
|
||||
76,ec5Bdd3CBAfaB93,"Cole, Russell and Avery",http://www.blankenship.com/,Mongolia,De-engineered fault-tolerant challenge,2000,Law Enforcement,7012
|
||||
77,DDB19Be7eeB56B4,Cummings-Rojas,https://simon-pearson.com/,Svalbard & Jan Mayen Islands,User-centric modular customer loyalty,2012,Financial Services,7529
|
||||
78,dd6CA3d0bc3cAfc,"Beasley, Greene and Mahoney",http://www.petersen-lawrence.com/,Togo,Extended content-based methodology,1976,Religious Institutions,869
|
||||
79,A0B9d56e61070e3,"Beasley, Sims and Allison",http://burke.info/,Latvia,Secured zero tolerance hub,1972,Facilities Services,6182
|
||||
80,cBa7EFe5D05Adaf,Crawford-Rivera,https://black-ramirez.org/,Cuba,Persevering exuding budgetary management,1999,Online Publishing,7805
|
||||
81,Ea3f6D52Ec73563,Montes-Hensley,https://krueger.org/,Liechtenstein,Multi-tiered secondary productivity,2009,Printing,8433
|
||||
82,bC0CEd48A8000E0,Velazquez-Odom,https://stokes.com/,Djibouti,Streamlined 6thgeneration function,2002,Alternative Dispute Resolution,4044
|
||||
83,c89b9b59BC4baa1,Eaton-Morales,https://www.reeves-graham.com/,Micronesia,Customer-focused explicit frame,1990,Capital Markets / Hedge Fund / Private Equity,7013
|
||||
84,FEC51bce8421a7b,"Roberson, Pennington and Palmer",http://www.keith-fisher.com/,Cameroon,Adaptive bi-directional hierarchy,1993,Telecommunications,5571
|
||||
85,e0E8e27eAc9CAd5,"George, Russo and Guerra",https://drake.com/,Sweden,Centralized non-volatile capability,1989,Military Industry,2880
|
||||
86,B97a6CF9bf5983C,Davila Inc,https://mcconnell.info/,Cocos (Keeling) Islands,Profit-focused dedicated frame,2017,Consumer Electronics,2215
|
||||
87,a0a6f9b3DbcBEb5,Mays-Preston,http://www.browning-key.com/,Mali,User-centric heuristic focus group,2006,Military Industry,5786
|
||||
88,8cC1bDa330a5871,Pineda-Morton,https://www.carr.com/,United States Virgin Islands,Grass-roots methodical info-mediaries,1991,Printing,6168
|
||||
89,ED889CB2FE9cbd3,Huang and Sons,https://www.bolton.com/,Eritrea,Re-contextualized dynamic hierarchy,1981,Semiconductors,7484
|
||||
90,F4Dc1417BC6cb8f,Gilbert-Simon,https://www.bradford.biz/,Burundi,Grass-roots radical parallelism,1973,Newspapers / Journalism,1927
|
||||
91,7ABc3c7ecA03B34,Sampson-Griffith,http://hendricks.org/,Benin,Multi-layered composite paradigm,1972,Textiles,3881
|
||||
92,4e0719FBE38e0aB,Miles-Dominguez,http://www.turner.com/,Gibraltar,Organized empowering forecast,1996,Civic / Social Organization,897
|
||||
93,dEbDAAeDfaed00A,Rowe and Sons,https://www.simpson.org/,El Salvador,Balanced multimedia knowledgebase,1978,Facilities Services,8172
|
||||
94,61BDeCfeFD0cEF5,"Valenzuela, Holmes and Rowland",https://www.dorsey.net/,Taiwan,Persistent tertiary focus group,1999,Transportation,1483
|
||||
95,4e91eD25f486110,"Best, Wade and Shepard",https://zimmerman.com/,Zimbabwe,Innovative background definition,1991,Gambling / Casinos,4873
|
||||
96,0a0bfFbBbB8eC7c,Holmes Group,https://mcdowell.org/,Ethiopia,Right-sized zero tolerance focus group,1975,Photography,2988
|
||||
97,BA6Cd9Dae2Efd62,Good Ltd,http://duffy.com/,Anguilla,Reverse-engineered composite moratorium,1971,Consumer Services,4292
|
||||
98,E7df80C60Abd7f9,Clements-Espinoza,http://www.flowers.net/,Falkland Islands (Malvinas),Progressive modular hub,1991,Broadcast Media,236
|
||||
99,AFc285dbE2fEd24,Mendez Inc,https://www.burke.net/,Kyrgyz Republic,User-friendly exuding migration,1993,Education Management,339
|
||||
100,e9eB5A60Cef8354,Watkins-Kaiser,http://www.herring.com/,Togo,Synergistic background access,2009,Financial Services,2785
|
||||
|
|
|
|||
|
BIN
docs/static/img/logo.svg
vendored
|
Before Width: | Height: | Size: 31 KiB After Width: | Height: | Size: 31 KiB |
BIN
docs/static/img/new_langflow.gif
vendored
|
Before Width: | Height: | Size: 3.2 MiB After Width: | Height: | Size: 3.2 MiB |
BIN
docs/static/img/new_langflow2.gif
vendored
|
Before Width: | Height: | Size: 3.2 MiB After Width: | Height: | Size: 3.2 MiB |
BIN
docs/static/videos/langflow_api.mp4
vendored
BIN
docs/static/videos/langflow_build.mp4
vendored
BIN
docs/static/videos/langflow_collection.mp4
vendored
BIN
docs/static/videos/langflow_collection_example.mp4
vendored
BIN
docs/static/videos/langflow_fork.mp4
vendored
BIN
docs/static/videos/langflow_parameters.mp4
vendored
BIN
docs/static/videos/langflow_widget.mp4
vendored
|
Before Width: | Height: | Size: 2 MiB After Width: | Height: | Size: 2 MiB |
1551
package-lock.json
generated
|
|
@ -1,5 +0,0 @@
|
|||
{
|
||||
"devDependencies": {
|
||||
"@svgr/cli": "^8.0.1"
|
||||
}
|
||||
}
|
||||
1186
poetry.lock
generated
|
|
@ -1,6 +1,6 @@
|
|||
[tool.poetry]
|
||||
name = "langflow"
|
||||
version = "0.4.11"
|
||||
version = "0.5.0a0"
|
||||
description = "A Python package with a built-in web application"
|
||||
authors = ["Logspace <contact@logspace.ai>"]
|
||||
maintainers = [
|
||||
|
|
@ -33,7 +33,7 @@ google-search-results = "^2.4.1"
|
|||
google-api-python-client = "^2.79.0"
|
||||
typer = "^0.9.0"
|
||||
gunicorn = "^21.1.0"
|
||||
langchain = "^0.0.256"
|
||||
langchain = "^0.0.274"
|
||||
openai = "^0.27.8"
|
||||
pandas = "^2.0.0"
|
||||
chromadb = "^0.3.21"
|
||||
|
|
@ -85,6 +85,8 @@ passlib = "^1.7.4"
|
|||
bcrypt = "^4.0.1"
|
||||
python-jose = "^3.3.0"
|
||||
metaphor-python = "^0.1.11"
|
||||
markupsafe = "^2.1.3"
|
||||
pywin32 = { version = "^306", markers = "sys_platform == 'win32'" }
|
||||
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
|
|
@ -104,6 +106,9 @@ types-pyyaml = "^6.0.12.8"
|
|||
types-python-jose = "^3.3.4.8"
|
||||
types-passlib = "^1.7.7.13"
|
||||
locust = "^2.16.1"
|
||||
pytest-mock = "^3.11.1"
|
||||
pytest-xdist = "^3.3.1"
|
||||
types-pywin32 = "^306.0.0.4"
|
||||
|
||||
|
||||
[tool.poetry.extras]
|
||||
|
|
|
|||
|
|
@ -1,10 +1,11 @@
|
|||
import sys
|
||||
import time
|
||||
import httpx
|
||||
from langflow.services.manager import initialize_settings_manager
|
||||
from langflow.services.utils import get_settings_manager
|
||||
from langflow.utils.util import get_number_of_workers
|
||||
from multiprocess import Process # type: ignore
|
||||
from langflow.services.database.utils import session_getter
|
||||
from langflow.services.manager import initialize_services, initialize_settings_manager
|
||||
from langflow.services.utils import get_db_manager, get_settings_manager
|
||||
|
||||
from multiprocess import Process, cpu_count # type: ignore
|
||||
import platform
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
|
@ -12,15 +13,46 @@ import socket
|
|||
from rich.panel import Panel
|
||||
from rich import box
|
||||
from rich import print as rprint
|
||||
from rich.table import Table
|
||||
import typer
|
||||
from langflow.main import setup_app
|
||||
from langflow.utils.logger import configure, logger
|
||||
import webbrowser
|
||||
from dotenv import load_dotenv
|
||||
|
||||
from rich.console import Console
|
||||
|
||||
console = Console()
|
||||
|
||||
app = typer.Typer()
|
||||
|
||||
|
||||
def get_number_of_workers(workers=None):
|
||||
if workers == -1 or workers is None:
|
||||
workers = (cpu_count() * 2) + 1
|
||||
logger.debug(f"Number of workers: {workers}")
|
||||
return workers
|
||||
|
||||
|
||||
def display_results(results):
|
||||
"""
|
||||
Display the results of the migration.
|
||||
"""
|
||||
for table_results in results:
|
||||
table = Table(title=f"Migration {table_results.table_name}")
|
||||
table.add_column("Name")
|
||||
table.add_column("Type")
|
||||
table.add_column("Status")
|
||||
|
||||
for result in table_results.results:
|
||||
status = "Success" if result.success else "Failure"
|
||||
color = "green" if result.success else "red"
|
||||
table.add_row(result.name, result.type, f"[{color}]{status}[/{color}]")
|
||||
|
||||
console.print(table)
|
||||
console.print() # Print a new line
|
||||
|
||||
|
||||
def update_settings(
|
||||
config: str,
|
||||
cache: str,
|
||||
|
|
@ -94,7 +126,7 @@ def serve_on_jcloud():
|
|||
|
||||
|
||||
@app.command()
|
||||
def serve(
|
||||
def run(
|
||||
host: str = typer.Option(
|
||||
"127.0.0.1", help="Host to bind the server to.", envvar="LANGFLOW_HOST"
|
||||
),
|
||||
|
|
@ -312,6 +344,43 @@ def run_langflow(host, port, log_level, options, app):
|
|||
sys.exit(1)
|
||||
|
||||
|
||||
@app.command()
|
||||
def superuser(
|
||||
username: str = typer.Option(..., prompt=True, help="Username for the superuser."),
|
||||
password: str = typer.Option(
|
||||
..., prompt=True, hide_input=True, help="Password for the superuser."
|
||||
),
|
||||
):
|
||||
initialize_services()
|
||||
db_manager = get_db_manager()
|
||||
with session_getter(db_manager) as session:
|
||||
from langflow.services.auth.utils import create_super_user
|
||||
|
||||
if create_super_user(db=session, username=username, password=password):
|
||||
# Verify that the superuser was created
|
||||
from langflow.services.database.models.user.user import User
|
||||
|
||||
user = session.query(User).filter(User.username == username).first()
|
||||
if user is None:
|
||||
typer.echo("Superuser creation failed.")
|
||||
return
|
||||
|
||||
typer.echo("Superuser created successfully.")
|
||||
|
||||
else:
|
||||
typer.echo("Superuser creation failed.")
|
||||
|
||||
|
||||
@app.command()
|
||||
def migration(test: bool = typer.Option(False, help="Run migrations in test mode.")):
|
||||
initialize_services()
|
||||
db_manager = get_db_manager()
|
||||
if not test:
|
||||
db_manager.run_migrations()
|
||||
results = db_manager.run_migrations_test()
|
||||
display_results(results)
|
||||
|
||||
|
||||
def main():
|
||||
app()
|
||||
|
||||
|
|
|
|||
|
|
@ -46,6 +46,7 @@ def run_migrations_offline() -> None:
|
|||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
render_as_batch=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
|
|
@ -66,7 +67,9 @@ def run_migrations_online() -> None:
|
|||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(connection=connection, target_metadata=target_metadata)
|
||||
context.configure(
|
||||
connection=connection, target_metadata=target_metadata, render_as_batch=True
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
|
|
|||
|
|
@ -1,42 +0,0 @@
|
|||
"""Remove FlowStyles table
|
||||
|
||||
Revision ID: 0a534bdfd84b
|
||||
Revises: 4814b6f4abfd
|
||||
Create Date: 2023-08-07 14:09:06.844104
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "0a534bdfd84b"
|
||||
down_revision: Union[str, None] = "4814b6f4abfd"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table("flowstyle")
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table(
|
||||
"flowstyle",
|
||||
sa.Column("color", sa.VARCHAR(), nullable=False),
|
||||
sa.Column("emoji", sa.VARCHAR(), nullable=False),
|
||||
sa.Column("flow_id", sa.CHAR(length=32), nullable=True),
|
||||
sa.Column("id", sa.CHAR(length=32), nullable=False),
|
||||
sa.ForeignKeyConstraint(
|
||||
["flow_id"],
|
||||
["flow.id"],
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("id"),
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
|
@ -0,0 +1,177 @@
|
|||
"""Adds tables
|
||||
|
||||
Revision ID: 260dbcc8b680
|
||||
Revises:
|
||||
Create Date: 2023-08-27 19:49:02.681355
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
import sqlmodel
|
||||
from sqlalchemy.engine.reflection import Inspector
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "260dbcc8b680"
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn)
|
||||
# List existing tables
|
||||
existing_tables = inspector.get_table_names()
|
||||
# Drop 'flowstyle' table if it exists
|
||||
# and other related indices
|
||||
if "flowstyle" in existing_tables:
|
||||
op.drop_table("flowstyle")
|
||||
if "ix_flowstyle_flow_id" in [
|
||||
index["name"] for index in inspector.get_indexes("flowstyle")
|
||||
]:
|
||||
op.drop_index("ix_flowstyle_flow_id", table_name="flowstyle")
|
||||
|
||||
existing_indices_flow = []
|
||||
existing_fks_flow = []
|
||||
if "flow" in existing_tables:
|
||||
existing_indices_flow = [
|
||||
index["name"] for index in inspector.get_indexes("flow")
|
||||
]
|
||||
# Existing foreign keys for the 'flow' table, if it exists
|
||||
existing_fks_flow = [
|
||||
fk["referred_table"] + "." + fk["referred_columns"][0]
|
||||
for fk in inspector.get_foreign_keys("flow")
|
||||
]
|
||||
# Now check if the columns user_id exists in the 'flow' table
|
||||
# If it does not exist, we need to create the foreign key
|
||||
|
||||
if "user" not in existing_tables:
|
||||
op.create_table(
|
||||
"user",
|
||||
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
|
||||
sa.Column("username", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column("password", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column("is_active", sa.Boolean(), nullable=False),
|
||||
sa.Column("is_superuser", sa.Boolean(), nullable=False),
|
||||
sa.Column("create_at", sa.DateTime(), nullable=False),
|
||||
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||
sa.Column("last_login_at", sa.DateTime(), nullable=True),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("id"),
|
||||
)
|
||||
with op.batch_alter_table("user", schema=None) as batch_op:
|
||||
batch_op.create_index(
|
||||
batch_op.f("ix_user_username"), ["username"], unique=True
|
||||
)
|
||||
|
||||
if "apikey" not in existing_tables:
|
||||
op.create_table(
|
||||
"apikey",
|
||||
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||
sa.Column("last_used_at", sa.DateTime(), nullable=True),
|
||||
sa.Column("total_uses", sa.Integer(), nullable=False, default=0),
|
||||
sa.Column("is_active", sa.Boolean(), nullable=False, default=True),
|
||||
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
|
||||
sa.Column("api_key", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
|
||||
sa.ForeignKeyConstraint(
|
||||
["user_id"],
|
||||
["user.id"],
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("id"),
|
||||
)
|
||||
with op.batch_alter_table("apikey", schema=None) as batch_op:
|
||||
batch_op.create_index(
|
||||
batch_op.f("ix_apikey_api_key"), ["api_key"], unique=True
|
||||
)
|
||||
batch_op.create_index(batch_op.f("ix_apikey_name"), ["name"], unique=False)
|
||||
batch_op.create_index(
|
||||
batch_op.f("ix_apikey_user_id"), ["user_id"], unique=False
|
||||
)
|
||||
if "flow" not in existing_tables:
|
||||
op.create_table(
|
||||
"flow",
|
||||
sa.Column("data", sa.JSON(), nullable=True),
|
||||
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column("description", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
|
||||
sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
|
||||
sa.ForeignKeyConstraint(
|
||||
["user_id"],
|
||||
["user.id"],
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("id"),
|
||||
)
|
||||
# Conditionally create indices for 'flow' table
|
||||
# if _alembic_tmp_flow exists, then we need to drop it first
|
||||
# This is to deal with SQLite not being able to ROLLBACK
|
||||
# for some unknown reason
|
||||
if "_alembic_tmp_flow" in existing_tables:
|
||||
op.drop_table("_alembic_tmp_flow")
|
||||
with op.batch_alter_table("flow", schema=None) as batch_op:
|
||||
flow_columns = [col["name"] for col in inspector.get_columns("flow")]
|
||||
if "user_id" not in flow_columns:
|
||||
batch_op.add_column(
|
||||
sa.Column(
|
||||
"user_id",
|
||||
sqlmodel.sql.sqltypes.GUID(),
|
||||
nullable=True, # This should be False, but we need to allow NULL values for now
|
||||
)
|
||||
)
|
||||
if "user.id" not in existing_fks_flow:
|
||||
batch_op.create_foreign_key("fk_flow_user_id", "user", ["user_id"], ["id"])
|
||||
if "ix_flow_description" not in existing_indices_flow:
|
||||
batch_op.create_index(
|
||||
batch_op.f("ix_flow_description"), ["description"], unique=False
|
||||
)
|
||||
if "ix_flow_name" not in existing_indices_flow:
|
||||
batch_op.create_index(batch_op.f("ix_flow_name"), ["name"], unique=False)
|
||||
with op.batch_alter_table("flow", schema=None) as batch_op:
|
||||
if "ix_flow_user_id" not in existing_indices_flow:
|
||||
batch_op.create_index(
|
||||
batch_op.f("ix_flow_user_id"), ["user_id"], unique=False
|
||||
)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn)
|
||||
# List existing tables
|
||||
existing_tables = inspector.get_table_names()
|
||||
if "flow" in existing_tables:
|
||||
with op.batch_alter_table("flow", schema=None) as batch_op:
|
||||
batch_op.drop_index(batch_op.f("ix_flow_user_id"))
|
||||
batch_op.drop_index(batch_op.f("ix_flow_name"))
|
||||
batch_op.drop_index(batch_op.f("ix_flow_description"))
|
||||
|
||||
op.drop_table("flow")
|
||||
if "apikey" in existing_tables:
|
||||
with op.batch_alter_table("apikey", schema=None) as batch_op:
|
||||
batch_op.drop_index(batch_op.f("ix_apikey_user_id"))
|
||||
batch_op.drop_index(batch_op.f("ix_apikey_name"))
|
||||
batch_op.drop_index(batch_op.f("ix_apikey_api_key"))
|
||||
|
||||
op.drop_table("apikey")
|
||||
if "user" in existing_tables:
|
||||
with op.batch_alter_table("user", schema=None) as batch_op:
|
||||
batch_op.drop_index(batch_op.f("ix_user_username"))
|
||||
|
||||
op.drop_table("user")
|
||||
|
||||
if "flowstyle" in existing_tables:
|
||||
op.drop_table("flowstyle")
|
||||
|
||||
if "component" in existing_tables:
|
||||
op.drop_table("component")
|
||||
# ### end Alembic commands ###
|
||||
|
|
@ -1,65 +0,0 @@
|
|||
"""Add Flow table
|
||||
|
||||
Revision ID: 4814b6f4abfd
|
||||
Revises:
|
||||
Create Date: 2023-08-05 17:47:42.879824
|
||||
|
||||
"""
|
||||
|
||||
import contextlib
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
import sqlmodel
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "4814b6f4abfd"
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
|
||||
# This suppress is used to not break the migration if the table already exists.
|
||||
with contextlib.suppress(sa.exc.OperationalError):
|
||||
op.create_table(
|
||||
"flow",
|
||||
sa.Column("data", sa.JSON(), nullable=True),
|
||||
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column("description", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("id"),
|
||||
)
|
||||
op.create_index(
|
||||
op.f("ix_flow_description"), "flow", ["description"], unique=False
|
||||
)
|
||||
op.create_index(op.f("ix_flow_name"), "flow", ["name"], unique=False)
|
||||
with contextlib.suppress(sa.exc.OperationalError):
|
||||
op.create_table(
|
||||
"flowstyle",
|
||||
sa.Column("color", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column("emoji", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column("flow_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
|
||||
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
|
||||
sa.ForeignKeyConstraint(
|
||||
["flow_id"],
|
||||
["flow.id"],
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("id"),
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table("flowstyle")
|
||||
op.drop_index(op.f("ix_flow_name"), table_name="flow")
|
||||
op.drop_index(op.f("ix_flow_description"), table_name="flow")
|
||||
op.drop_table("flow")
|
||||
# ### end Alembic commands ###
|
||||
|
|
@ -6,6 +6,9 @@ from langflow.api.v1 import (
|
|||
validate_router,
|
||||
flows_router,
|
||||
component_router,
|
||||
users_router,
|
||||
api_key_router,
|
||||
login_router,
|
||||
)
|
||||
|
||||
router = APIRouter(
|
||||
|
|
@ -16,3 +19,6 @@ router.include_router(endpoints_router)
|
|||
router.include_router(validate_router)
|
||||
router.include_router(component_router)
|
||||
router.include_router(flows_router)
|
||||
router.include_router(users_router)
|
||||
router.include_router(api_key_router)
|
||||
router.include_router(login_router)
|
||||
|
|
|
|||
|
|
@ -3,6 +3,9 @@ from langflow.api.v1.validate import router as validate_router
|
|||
from langflow.api.v1.chat import router as chat_router
|
||||
from langflow.api.v1.flows import router as flows_router
|
||||
from langflow.api.v1.components import router as component_router
|
||||
from langflow.api.v1.users import router as users_router
|
||||
from langflow.api.v1.api_key import router as api_key_router
|
||||
from langflow.api.v1.login import router as login_router
|
||||
|
||||
__all__ = [
|
||||
"chat_router",
|
||||
|
|
@ -10,4 +13,7 @@ __all__ = [
|
|||
"component_router",
|
||||
"validate_router",
|
||||
"flows_router",
|
||||
"users_router",
|
||||
"api_key_router",
|
||||
"login_router",
|
||||
]
|
||||
|
|
|
|||
61
src/backend/langflow/api/v1/api_key.py
Normal file
|
|
@ -0,0 +1,61 @@
|
|||
from uuid import UUID
|
||||
from fastapi import APIRouter, HTTPException, Depends
|
||||
from langflow.api.v1.schemas import ApiKeysResponse
|
||||
from langflow.services.auth.utils import get_current_active_user
|
||||
from langflow.services.database.models.api_key.api_key import (
|
||||
ApiKeyCreate,
|
||||
UnmaskedApiKeyRead,
|
||||
)
|
||||
|
||||
# Assuming you have these methods in your service layer
|
||||
from langflow.services.database.models.api_key.crud import (
|
||||
get_api_keys,
|
||||
create_api_key,
|
||||
delete_api_key,
|
||||
)
|
||||
from langflow.services.database.models.user.user import User
|
||||
from langflow.services.utils import get_session
|
||||
from sqlmodel import Session
|
||||
|
||||
|
||||
router = APIRouter(tags=["APIKey"], prefix="/api_key")
|
||||
|
||||
|
||||
@router.get("/", response_model=ApiKeysResponse)
|
||||
def get_api_keys_route(
|
||||
db: Session = Depends(get_session),
|
||||
current_user: User = Depends(get_current_active_user),
|
||||
):
|
||||
try:
|
||||
user_id = current_user.id
|
||||
keys = get_api_keys(db, user_id)
|
||||
|
||||
return ApiKeysResponse(total_count=len(keys), user_id=user_id, api_keys=keys)
|
||||
except Exception as exc:
|
||||
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
||||
|
||||
|
||||
@router.post("/", response_model=UnmaskedApiKeyRead)
|
||||
def create_api_key_route(
|
||||
req: ApiKeyCreate,
|
||||
current_user: User = Depends(get_current_active_user),
|
||||
db: Session = Depends(get_session),
|
||||
):
|
||||
try:
|
||||
user_id = current_user.id
|
||||
return create_api_key(db, req, user_id=user_id)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=str(e)) from e
|
||||
|
||||
|
||||
@router.delete("/{api_key_id}")
|
||||
def delete_api_key_route(
|
||||
api_key_id: UUID,
|
||||
current_user=Depends(get_current_active_user),
|
||||
db: Session = Depends(get_session),
|
||||
):
|
||||
try:
|
||||
delete_api_key(db, api_key_id)
|
||||
return {"detail": "API Key deleted"}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=str(e)) from e
|
||||
|
|
@ -1,3 +1,4 @@
|
|||
from typing import Optional
|
||||
from langflow.template.frontend_node.base import FrontendNode
|
||||
from pydantic import BaseModel, validator
|
||||
|
||||
|
|
@ -20,7 +21,8 @@ class FrontendNodeRequest(FrontendNode):
|
|||
class ValidatePromptRequest(BaseModel):
|
||||
name: str
|
||||
template: str
|
||||
frontend_node: FrontendNodeRequest
|
||||
# optional for tweak call
|
||||
frontend_node: Optional[FrontendNodeRequest]
|
||||
|
||||
|
||||
# Build ValidationResponse class for {"imports": {"errors": []}, "function": {"errors": []}}
|
||||
|
|
@ -39,7 +41,8 @@ class CodeValidationResponse(BaseModel):
|
|||
|
||||
class PromptValidationResponse(BaseModel):
|
||||
input_variables: list
|
||||
frontend_node: FrontendNodeRequest
|
||||
# object return for tweak call
|
||||
frontend_node: FrontendNodeRequest | object
|
||||
|
||||
|
||||
INVALID_CHARACTERS = {
|
||||
|
|
|
|||
|
|
@ -1,35 +1,76 @@
|
|||
from fastapi import APIRouter, HTTPException, WebSocket, WebSocketException, status
|
||||
from fastapi import (
|
||||
APIRouter,
|
||||
Depends,
|
||||
HTTPException,
|
||||
Query,
|
||||
WebSocket,
|
||||
WebSocketException,
|
||||
status,
|
||||
)
|
||||
from fastapi.responses import StreamingResponse
|
||||
from langflow.api.utils import build_input_keys_response
|
||||
from langflow.api.v1.schemas import BuildStatus, BuiltResponse, InitResponse, StreamData
|
||||
|
||||
from langflow.services import service_manager, ServiceType
|
||||
from langflow.graph.graph.base import Graph
|
||||
from langflow.services.auth.utils import get_current_active_user, get_current_user
|
||||
from langflow.services.utils import get_session
|
||||
from langflow.utils.logger import logger
|
||||
from sqlmodel import Session
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from langflow.services.chat.manager import ChatManager
|
||||
|
||||
router = APIRouter(tags=["Chat"])
|
||||
|
||||
|
||||
@router.websocket("/chat/{client_id}")
|
||||
async def chat(client_id: str, websocket: WebSocket):
|
||||
async def chat(
|
||||
client_id: str,
|
||||
websocket: WebSocket,
|
||||
token: str = Query(...),
|
||||
db: Session = Depends(get_session),
|
||||
):
|
||||
"""Websocket endpoint for chat."""
|
||||
try:
|
||||
chat_manager = service_manager.get(ServiceType.CHAT_MANAGER)
|
||||
if client_id in chat_manager.cache_manager:
|
||||
await websocket.accept()
|
||||
user = await get_current_user(token, db)
|
||||
if not user:
|
||||
await websocket.close(
|
||||
code=status.WS_1008_POLICY_VIOLATION, reason="Unauthorized"
|
||||
)
|
||||
if not user.is_active:
|
||||
await websocket.close(
|
||||
code=status.WS_1008_POLICY_VIOLATION, reason="Unauthorized"
|
||||
)
|
||||
|
||||
chat_manager: "ChatManager" = service_manager.get(ServiceType.CHAT_MANAGER)
|
||||
if client_id in chat_manager.in_memory_cache:
|
||||
await chat_manager.handle_websocket(client_id, websocket)
|
||||
else:
|
||||
# We accept the connection but close it immediately
|
||||
# if the flow is not built yet
|
||||
await websocket.accept()
|
||||
message = "Please, build the flow before sending messages"
|
||||
await websocket.close(code=status.WS_1011_INTERNAL_ERROR, reason=message)
|
||||
except WebSocketException as exc:
|
||||
logger.error(f"Websocket error: {exc}")
|
||||
logger.error(f"Websocket exrror: {exc}")
|
||||
await websocket.close(code=status.WS_1011_INTERNAL_ERROR, reason=str(exc))
|
||||
except Exception as exc:
|
||||
logger.error(f"Error in chat websocket: {exc}")
|
||||
messsage = exc.detail if isinstance(exc, HTTPException) else str(exc)
|
||||
if "Could not validate credentials" in str(exc):
|
||||
await websocket.close(
|
||||
code=status.WS_1008_POLICY_VIOLATION, reason="Unauthorized"
|
||||
)
|
||||
else:
|
||||
await websocket.close(code=status.WS_1011_INTERNAL_ERROR, reason=messsage)
|
||||
|
||||
|
||||
@router.post("/build/init/{flow_id}", response_model=InitResponse, status_code=201)
|
||||
async def init_build(graph_data: dict, flow_id: str):
|
||||
async def init_build(
|
||||
graph_data: dict, flow_id: str, current_user=Depends(get_current_active_user)
|
||||
):
|
||||
"""Initialize the build by storing graph data and returning a unique session ID."""
|
||||
cache_manager = service_manager.get(ServiceType.CACHE_MANAGER)
|
||||
try:
|
||||
|
|
@ -51,6 +92,7 @@ async def init_build(graph_data: dict, flow_id: str):
|
|||
cache_manager[flow_id] = {
|
||||
"graph_data": graph_data,
|
||||
"status": BuildStatus.STARTED,
|
||||
"user_id": current_user.id,
|
||||
}
|
||||
|
||||
return InitResponse(flowId=flow_id)
|
||||
|
|
@ -98,6 +140,7 @@ async def stream_build(flow_id: str):
|
|||
return
|
||||
|
||||
graph_data = cache_manager[flow_id].get("graph_data")
|
||||
cache_manager[flow_id]["user_id"]
|
||||
|
||||
if not graph_data:
|
||||
error_message = "No data provided"
|
||||
|
|
@ -105,14 +148,9 @@ async def stream_build(flow_id: str):
|
|||
return
|
||||
|
||||
logger.debug("Building langchain object")
|
||||
try:
|
||||
# Some error could happen when building the graph
|
||||
graph = Graph.from_payload(graph_data)
|
||||
except Exception as exc:
|
||||
logger.exception(exc)
|
||||
error_message = str(exc)
|
||||
yield str(StreamData(event="error", data={"error": error_message}))
|
||||
return
|
||||
|
||||
# Some error could happen when building the graph
|
||||
graph = Graph.from_payload(graph_data)
|
||||
|
||||
number_of_nodes = len(graph.nodes)
|
||||
cache_manager[flow_id]["status"] = BuildStatus.IN_PROGRESS
|
||||
|
|
@ -130,7 +168,9 @@ async def stream_build(flow_id: str):
|
|||
params = vertex._built_object_repr()
|
||||
valid = True
|
||||
logger.debug(f"Building node {str(vertex.vertex_type)}")
|
||||
logger.debug(f"Output: {params}")
|
||||
logger.debug(
|
||||
f"Output: {params[:100]}{'...' if len(params) > 100 else ''}"
|
||||
)
|
||||
if vertex.artifacts:
|
||||
# The artifacts will be prompt variables
|
||||
# passed to build_input_keys_response
|
||||
|
|
|
|||
|
|
@ -1,14 +1,16 @@
|
|||
from http import HTTPStatus
|
||||
from typing import Annotated, Optional, Union
|
||||
from typing import Annotated, Any, Optional, Union
|
||||
from langflow.services.auth.utils import api_key_security, get_current_active_user
|
||||
|
||||
|
||||
from langflow.services.cache.utils import save_uploaded_file
|
||||
from langflow.services.database.models.flow import Flow
|
||||
from langflow.processing.process import process_tweaks
|
||||
from langflow.services.database.models.user.user import User
|
||||
from langflow.services.utils import get_settings_manager
|
||||
from langflow.utils.logger import logger
|
||||
from fastapi import APIRouter, Depends, HTTPException, UploadFile, Body
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, UploadFile, Body, status
|
||||
import sqlalchemy as sa
|
||||
from langflow.interface.custom.custom_component import CustomComponent
|
||||
|
||||
|
||||
|
|
@ -38,21 +40,29 @@ router = APIRouter(tags=["Base"])
|
|||
|
||||
|
||||
@router.get("/all")
|
||||
def get_all():
|
||||
def get_all(current_user: User = Depends(get_current_active_user)):
|
||||
logger.debug("Building langchain types dict")
|
||||
native_components = build_langchain_types_dict()
|
||||
# custom_components is a list of dicts
|
||||
# need to merge all the keys into one dict
|
||||
custom_components_from_file = {}
|
||||
custom_components_from_file: dict[str, Any] = {}
|
||||
settings_manager = get_settings_manager()
|
||||
if settings_manager.settings.COMPONENTS_PATH:
|
||||
logger.info(
|
||||
f"Building custom components from {settings_manager.settings.COMPONENTS_PATH}"
|
||||
)
|
||||
custom_component_dicts = [
|
||||
build_langchain_custom_component_list_from_path(str(path))
|
||||
for path in settings_manager.settings.COMPONENTS_PATH
|
||||
]
|
||||
|
||||
custom_component_dicts = []
|
||||
processed_paths = []
|
||||
for path in settings_manager.settings.COMPONENTS_PATH:
|
||||
if str(path) in processed_paths:
|
||||
continue
|
||||
custom_component_dict = build_langchain_custom_component_list_from_path(
|
||||
str(path)
|
||||
)
|
||||
custom_component_dicts.append(custom_component_dict)
|
||||
processed_paths.append(str(path))
|
||||
|
||||
logger.info(f"Loading {len(custom_component_dicts)} category(ies)")
|
||||
for custom_component_dict in custom_component_dicts:
|
||||
# custom_component_dict is a dict of dicts
|
||||
|
|
@ -62,7 +72,6 @@ def get_all():
|
|||
logger.info(
|
||||
f"Loading {len(custom_component_dict[category])} component(s) from category {category}"
|
||||
)
|
||||
logger.debug(custom_component_dict)
|
||||
custom_components_from_file = merge_nested_dicts_with_renaming(
|
||||
custom_components_from_file, custom_component_dict
|
||||
)
|
||||
|
|
@ -73,22 +82,42 @@ def get_all():
|
|||
|
||||
|
||||
# For backwards compatibility we will keep the old endpoint
|
||||
@router.post("/predict/{flow_id}", response_model=ProcessResponse)
|
||||
@router.post("/process/{flow_id}", response_model=ProcessResponse)
|
||||
@router.post(
|
||||
"/predict/{flow_id}",
|
||||
response_model=ProcessResponse,
|
||||
dependencies=[Depends(api_key_security)],
|
||||
)
|
||||
@router.post(
|
||||
"/process/{flow_id}",
|
||||
response_model=ProcessResponse,
|
||||
)
|
||||
async def process_flow(
|
||||
session: Annotated[Session, Depends(get_session)],
|
||||
flow_id: str,
|
||||
inputs: Optional[dict] = None,
|
||||
tweaks: Optional[dict] = None,
|
||||
clear_cache: Annotated[bool, Body(embed=True)] = False, # noqa: F821
|
||||
session_id: Annotated[Union[None, str], Body(embed=True)] = None, # noqa: F821
|
||||
session: Session = Depends(get_session),
|
||||
api_key_user: User = Depends(api_key_security),
|
||||
):
|
||||
"""
|
||||
Endpoint to process an input with a given flow_id.
|
||||
"""
|
||||
|
||||
try:
|
||||
flow = session.get(Flow, flow_id)
|
||||
if api_key_user is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid API Key",
|
||||
)
|
||||
|
||||
# Get the flow that matches the flow_id and belongs to the user
|
||||
flow = (
|
||||
session.query(Flow)
|
||||
.filter(Flow.id == flow_id)
|
||||
.filter(Flow.user_id == api_key_user.id)
|
||||
.first()
|
||||
)
|
||||
if flow is None:
|
||||
raise ValueError(f"Flow {flow_id} not found")
|
||||
|
||||
|
|
@ -105,6 +134,22 @@ async def process_flow(
|
|||
graph_data, inputs, clear_cache, session_id
|
||||
)
|
||||
return ProcessResponse(result=task.state, id=task.id)
|
||||
except sa.exc.StatementError as exc:
|
||||
# StatementError('(builtins.ValueError) badly formed hexadecimal UUID string')
|
||||
if "badly formed hexadecimal UUID string" in str(exc):
|
||||
# This means the Flow ID is not a valid UUID which means it can't find the flow
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)
|
||||
) from exc
|
||||
except ValueError as exc:
|
||||
if f"Flow {flow_id} not found" in str(exc):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)
|
||||
) from exc
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(exc)
|
||||
) from exc
|
||||
except Exception as e:
|
||||
# Log stack trace
|
||||
logger.exception(e)
|
||||
|
|
|
|||
|
|
@ -1,30 +1,42 @@
|
|||
from typing import List
|
||||
from uuid import UUID
|
||||
from fastapi.encoders import jsonable_encoder
|
||||
|
||||
from langflow.api.utils import remove_api_keys
|
||||
from langflow.api.v1.schemas import FlowListCreate, FlowListRead
|
||||
from langflow.services.auth.utils import get_current_active_user
|
||||
from langflow.services.database.models.flow import (
|
||||
Flow,
|
||||
FlowCreate,
|
||||
FlowRead,
|
||||
FlowUpdate,
|
||||
)
|
||||
from langflow.services.database.models.user.user import User
|
||||
from langflow.services.utils import get_session
|
||||
from langflow.services.utils import get_settings_manager
|
||||
from sqlmodel import Session, select
|
||||
import orjson
|
||||
from sqlmodel import Session
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from fastapi.encoders import jsonable_encoder
|
||||
|
||||
from fastapi import File, UploadFile
|
||||
import json
|
||||
|
||||
# build router
|
||||
router = APIRouter(prefix="/flows", tags=["Flows"])
|
||||
|
||||
|
||||
@router.post("/", response_model=FlowRead, status_code=201)
|
||||
def create_flow(*, session: Session = Depends(get_session), flow: FlowCreate):
|
||||
def create_flow(
|
||||
*,
|
||||
session: Session = Depends(get_session),
|
||||
flow: FlowCreate,
|
||||
current_user: User = Depends(get_current_active_user),
|
||||
):
|
||||
"""Create a new flow."""
|
||||
if flow.user_id is None:
|
||||
flow.user_id = current_user.id
|
||||
|
||||
db_flow = Flow.from_orm(flow)
|
||||
|
||||
session.add(db_flow)
|
||||
session.commit()
|
||||
session.refresh(db_flow)
|
||||
|
|
@ -32,31 +44,49 @@ def create_flow(*, session: Session = Depends(get_session), flow: FlowCreate):
|
|||
|
||||
|
||||
@router.get("/", response_model=list[FlowRead], status_code=200)
|
||||
def read_flows(*, session: Session = Depends(get_session)):
|
||||
def read_flows(
|
||||
*,
|
||||
session: Session = Depends(get_session),
|
||||
current_user: User = Depends(get_current_active_user),
|
||||
):
|
||||
"""Read all flows."""
|
||||
try:
|
||||
flows = session.exec(select(Flow)).all()
|
||||
flows = current_user.flows
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e)) from e
|
||||
return [jsonable_encoder(flow) for flow in flows]
|
||||
|
||||
|
||||
@router.get("/{flow_id}", response_model=FlowRead, status_code=200)
|
||||
def read_flow(*, session: Session = Depends(get_session), flow_id: UUID):
|
||||
def read_flow(
|
||||
*,
|
||||
session: Session = Depends(get_session),
|
||||
flow_id: UUID,
|
||||
current_user: User = Depends(get_current_active_user),
|
||||
):
|
||||
"""Read a flow."""
|
||||
if flow := session.get(Flow, flow_id):
|
||||
return flow
|
||||
if user_flow := (
|
||||
session.query(Flow)
|
||||
.filter(Flow.id == flow_id)
|
||||
.filter(Flow.user_id == current_user.id)
|
||||
.first()
|
||||
):
|
||||
return user_flow
|
||||
else:
|
||||
raise HTTPException(status_code=404, detail="Flow not found")
|
||||
|
||||
|
||||
@router.patch("/{flow_id}", response_model=FlowRead, status_code=200)
|
||||
def update_flow(
|
||||
*, session: Session = Depends(get_session), flow_id: UUID, flow: FlowUpdate
|
||||
*,
|
||||
session: Session = Depends(get_session),
|
||||
flow_id: UUID,
|
||||
flow: FlowUpdate,
|
||||
current_user: User = Depends(get_current_active_user),
|
||||
):
|
||||
"""Update a flow."""
|
||||
|
||||
db_flow = session.get(Flow, flow_id)
|
||||
db_flow = read_flow(session=session, flow_id=flow_id, current_user=current_user)
|
||||
if not db_flow:
|
||||
raise HTTPException(status_code=404, detail="Flow not found")
|
||||
flow_data = flow.dict(exclude_unset=True)
|
||||
|
|
@ -64,7 +94,8 @@ def update_flow(
|
|||
if settings_manager.settings.REMOVE_API_KEYS:
|
||||
flow_data = remove_api_keys(flow_data)
|
||||
for key, value in flow_data.items():
|
||||
setattr(db_flow, key, value)
|
||||
if value is not None:
|
||||
setattr(db_flow, key, value)
|
||||
session.add(db_flow)
|
||||
session.commit()
|
||||
session.refresh(db_flow)
|
||||
|
|
@ -72,9 +103,14 @@ def update_flow(
|
|||
|
||||
|
||||
@router.delete("/{flow_id}", status_code=200)
|
||||
def delete_flow(*, session: Session = Depends(get_session), flow_id: UUID):
|
||||
def delete_flow(
|
||||
*,
|
||||
session: Session = Depends(get_session),
|
||||
flow_id: UUID,
|
||||
current_user: User = Depends(get_current_active_user),
|
||||
):
|
||||
"""Delete a flow."""
|
||||
flow = session.get(Flow, flow_id)
|
||||
flow = read_flow(session=session, flow_id=flow_id, current_user=current_user)
|
||||
if not flow:
|
||||
raise HTTPException(status_code=404, detail="Flow not found")
|
||||
session.delete(flow)
|
||||
|
|
@ -86,10 +122,16 @@ def delete_flow(*, session: Session = Depends(get_session), flow_id: UUID):
|
|||
|
||||
|
||||
@router.post("/batch/", response_model=List[FlowRead], status_code=201)
|
||||
def create_flows(*, session: Session = Depends(get_session), flow_list: FlowListCreate):
|
||||
def create_flows(
|
||||
*,
|
||||
session: Session = Depends(get_session),
|
||||
flow_list: FlowListCreate,
|
||||
current_user: User = Depends(get_current_active_user),
|
||||
):
|
||||
"""Create multiple new flows."""
|
||||
db_flows = []
|
||||
for flow in flow_list.flows:
|
||||
flow.user_id = current_user.id
|
||||
db_flow = Flow.from_orm(flow)
|
||||
session.add(db_flow)
|
||||
db_flows.append(db_flow)
|
||||
|
|
@ -101,20 +143,31 @@ def create_flows(*, session: Session = Depends(get_session), flow_list: FlowList
|
|||
|
||||
@router.post("/upload/", response_model=List[FlowRead], status_code=201)
|
||||
async def upload_file(
|
||||
*, session: Session = Depends(get_session), file: UploadFile = File(...)
|
||||
*,
|
||||
session: Session = Depends(get_session),
|
||||
file: UploadFile = File(...),
|
||||
current_user: User = Depends(get_current_active_user),
|
||||
):
|
||||
"""Upload flows from a file."""
|
||||
contents = await file.read()
|
||||
data = json.loads(contents)
|
||||
data = orjson.loads(contents)
|
||||
if "flows" in data:
|
||||
flow_list = FlowListCreate(**data)
|
||||
else:
|
||||
flow_list = FlowListCreate(flows=[FlowCreate(**flow) for flow in data])
|
||||
return create_flows(session=session, flow_list=flow_list)
|
||||
# Now we set the user_id for all flows
|
||||
for flow in flow_list.flows:
|
||||
flow.user_id = current_user.id
|
||||
|
||||
return create_flows(session=session, flow_list=flow_list, current_user=current_user)
|
||||
|
||||
|
||||
@router.get("/download/", response_model=FlowListRead, status_code=200)
|
||||
async def download_file(*, session: Session = Depends(get_session)):
|
||||
async def download_file(
|
||||
*,
|
||||
session: Session = Depends(get_session),
|
||||
current_user: User = Depends(get_current_active_user),
|
||||
):
|
||||
"""Download all flows as a file."""
|
||||
flows = read_flows(session=session)
|
||||
flows = read_flows(session=session, current_user=current_user)
|
||||
return FlowListRead(flows=flows)
|
||||
|
|
|
|||
|
|
@ -1,20 +1,20 @@
|
|||
from uuid import UUID
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlmodel import Session
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from fastapi.security import OAuth2PasswordRequestForm
|
||||
|
||||
from langflow.services.utils import get_session
|
||||
from langflow.database.models.token import Token
|
||||
from langflow.auth.auth import (
|
||||
from langflow.api.v1.schemas import Token
|
||||
from langflow.services.auth.utils import (
|
||||
authenticate_user,
|
||||
create_user_tokens,
|
||||
create_refresh_token,
|
||||
create_user_longterm_token,
|
||||
get_current_active_user,
|
||||
)
|
||||
|
||||
from langflow.services.utils import get_settings_manager
|
||||
|
||||
router = APIRouter()
|
||||
router = APIRouter(tags=["Login"])
|
||||
|
||||
|
||||
@router.post("/login", response_model=Token)
|
||||
|
|
@ -37,9 +37,8 @@ async def login_to_get_access_token(
|
|||
async def auto_login(db: Session = Depends(get_session)):
|
||||
settings_manager = get_settings_manager()
|
||||
|
||||
if settings_manager.settings.AUTO_LOGIN:
|
||||
user_id = UUID("3fa85f64-5717-4562-b3fc-2c963f66afa6")
|
||||
return create_user_longterm_token(user_id, db)
|
||||
if settings_manager.auth_settings.AUTO_LOGIN:
|
||||
return create_user_longterm_token(db)
|
||||
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
|
|
@ -51,7 +50,9 @@ async def auto_login(db: Session = Depends(get_session)):
|
|||
|
||||
|
||||
@router.post("/refresh")
|
||||
async def refresh_token(token: str):
|
||||
async def refresh_token(
|
||||
token: str, current_user: Session = Depends(get_current_active_user)
|
||||
):
|
||||
if token:
|
||||
return create_refresh_token(token)
|
||||
else:
|
||||
|
|
@ -1,9 +1,13 @@
|
|||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional, Union
|
||||
from uuid import UUID
|
||||
from langflow.services.database.models.api_key.api_key import ApiKeyRead
|
||||
from langflow.services.database.models.flow import FlowCreate, FlowRead
|
||||
from langflow.services.database.models.user import UserRead
|
||||
from langflow.services.database.models.base import orjson_dumps
|
||||
|
||||
from pydantic import BaseModel, Field, validator
|
||||
import json
|
||||
|
||||
|
||||
class BuildStatus(Enum):
|
||||
|
|
@ -126,7 +130,9 @@ class StreamData(BaseModel):
|
|||
data: dict
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"event: {self.event}\ndata: {json.dumps(self.data)}\n\n"
|
||||
return (
|
||||
f"event: {self.event}\ndata: {orjson_dumps(self.data, indent_2=False)}\n\n"
|
||||
)
|
||||
|
||||
|
||||
class CustomComponentCode(BaseModel):
|
||||
|
|
@ -144,3 +150,32 @@ class ComponentListCreate(BaseModel):
|
|||
|
||||
class ComponentListRead(BaseModel):
|
||||
flows: List[FlowRead]
|
||||
|
||||
|
||||
class UsersResponse(BaseModel):
|
||||
total_count: int
|
||||
users: List[UserRead]
|
||||
|
||||
|
||||
class ApiKeyResponse(BaseModel):
|
||||
id: str
|
||||
api_key: str
|
||||
name: str
|
||||
created_at: str
|
||||
last_used_at: str
|
||||
|
||||
|
||||
class ApiKeysResponse(BaseModel):
|
||||
total_count: int
|
||||
user_id: UUID
|
||||
api_keys: List[ApiKeyRead]
|
||||
|
||||
|
||||
class CreateApiKeyRequest(BaseModel):
|
||||
name: str
|
||||
|
||||
|
||||
class Token(BaseModel):
|
||||
access_token: str
|
||||
refresh_token: str
|
||||
token_type: str
|
||||
|
|
|
|||
|
|
@ -1,4 +1,11 @@
|
|||
from uuid import UUID
|
||||
from langflow.api.v1.schemas import UsersResponse
|
||||
from langflow.services.database.models.user import (
|
||||
User,
|
||||
UserCreate,
|
||||
UserRead,
|
||||
UserUpdate,
|
||||
)
|
||||
|
||||
from sqlalchemy import func
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
|
@ -7,28 +14,27 @@ from sqlmodel import Session, select
|
|||
from fastapi import APIRouter, Depends, HTTPException
|
||||
|
||||
from langflow.services.utils import get_session
|
||||
from langflow.auth.auth import get_current_active_user, get_password_hash
|
||||
from langflow.database.models.user import (
|
||||
User,
|
||||
UserAddModel,
|
||||
UserListModel,
|
||||
UserPatchModel,
|
||||
UsersResponse,
|
||||
from langflow.services.auth.utils import (
|
||||
get_current_active_superuser,
|
||||
get_current_active_user,
|
||||
get_password_hash,
|
||||
)
|
||||
from langflow.services.database.models.user.crud import (
|
||||
update_user,
|
||||
)
|
||||
|
||||
router = APIRouter(tags=["Login"])
|
||||
router = APIRouter(tags=["Users"])
|
||||
|
||||
|
||||
@router.post("/user", response_model=UserListModel)
|
||||
@router.post("/user", response_model=UserRead, status_code=201)
|
||||
def add_user(
|
||||
user: UserAddModel,
|
||||
user: UserCreate,
|
||||
db: Session = Depends(get_session),
|
||||
) -> User:
|
||||
"""
|
||||
Add a new user to the database.
|
||||
"""
|
||||
new_user = User(**user.dict())
|
||||
new_user = User.from_orm(user)
|
||||
try:
|
||||
new_user.password = get_password_hash(user.password)
|
||||
|
||||
|
|
@ -37,13 +43,17 @@ def add_user(
|
|||
db.refresh(new_user)
|
||||
except IntegrityError as e:
|
||||
db.rollback()
|
||||
raise HTTPException(status_code=400, detail="User exists") from e
|
||||
raise HTTPException(
|
||||
status_code=400, detail="This username is unavailable."
|
||||
) from e
|
||||
|
||||
return new_user
|
||||
|
||||
|
||||
@router.get("/user", response_model=UserListModel)
|
||||
def read_current_user(current_user: User = Depends(get_current_active_user)) -> User:
|
||||
@router.get("/user", response_model=UserRead)
|
||||
def read_current_user(
|
||||
current_user: User = Depends(get_current_active_user),
|
||||
) -> User:
|
||||
"""
|
||||
Retrieve the current user's data.
|
||||
"""
|
||||
|
|
@ -54,7 +64,7 @@ def read_current_user(current_user: User = Depends(get_current_active_user)) ->
|
|||
def read_all_users(
|
||||
skip: int = 0,
|
||||
limit: int = 10,
|
||||
_: Session = Depends(get_current_active_user),
|
||||
current_user: Session = Depends(get_current_active_superuser),
|
||||
db: Session = Depends(get_session),
|
||||
) -> UsersResponse:
|
||||
"""
|
||||
|
|
@ -68,14 +78,14 @@ def read_all_users(
|
|||
|
||||
return UsersResponse(
|
||||
total_count=total_count, # type: ignore
|
||||
users=[UserListModel(**dict(user.User)) for user in users],
|
||||
users=[UserRead(**dict(user.User)) for user in users],
|
||||
)
|
||||
|
||||
|
||||
@router.patch("/user/{user_id}", response_model=UserListModel)
|
||||
@router.patch("/user/{user_id}", response_model=UserRead)
|
||||
def patch_user(
|
||||
user_id: UUID,
|
||||
user: UserPatchModel,
|
||||
user: UserUpdate,
|
||||
_: Session = Depends(get_current_active_user),
|
||||
db: Session = Depends(get_session),
|
||||
) -> User:
|
||||
|
|
@ -88,12 +98,21 @@ def patch_user(
|
|||
@router.delete("/user/{user_id}")
|
||||
def delete_user(
|
||||
user_id: UUID,
|
||||
_: Session = Depends(get_current_active_user),
|
||||
current_user: User = Depends(get_current_active_superuser),
|
||||
db: Session = Depends(get_session),
|
||||
) -> dict:
|
||||
"""
|
||||
Delete a user from the database.
|
||||
"""
|
||||
if current_user.id == user_id:
|
||||
raise HTTPException(
|
||||
status_code=400, detail="You can't delete your own user account"
|
||||
)
|
||||
elif not current_user.is_superuser:
|
||||
raise HTTPException(
|
||||
status_code=403, detail="You don't have the permission to delete this user"
|
||||
)
|
||||
|
||||
user_db = db.query(User).filter(User.id == user_id).first()
|
||||
if not user_db:
|
||||
raise HTTPException(status_code=404, detail="User not found")
|
||||
|
|
@ -115,14 +134,13 @@ def add_super_user_for_testing_purposes_delete_me_before_merge_into_dev(
|
|||
"""
|
||||
new_user = User(
|
||||
username="superuser",
|
||||
password="12345",
|
||||
password=get_password_hash("12345"),
|
||||
is_active=True,
|
||||
is_superuser=True,
|
||||
last_login_at=None,
|
||||
)
|
||||
|
||||
try:
|
||||
new_user.password = get_password_hash(new_user.password)
|
||||
db.add(new_user)
|
||||
db.commit()
|
||||
db.refresh(new_user)
|
||||
|
|
@ -31,7 +31,12 @@ def post_validate_code(code: Code):
|
|||
def post_validate_prompt(prompt_request: ValidatePromptRequest):
|
||||
try:
|
||||
input_variables = validate_prompt(prompt_request.template)
|
||||
|
||||
# Check if frontend_node is None before proceeding to avoid attempting to update a non-existent node.
|
||||
if prompt_request.frontend_node is None:
|
||||
return PromptValidationResponse(
|
||||
input_variables=input_variables,
|
||||
frontend_node={},
|
||||
)
|
||||
old_custom_fields = get_old_custom_fields(prompt_request)
|
||||
|
||||
add_new_variables_to_template(input_variables, prompt_request)
|
||||
|
|
|
|||
|
|
@ -1,177 +0,0 @@
|
|||
from uuid import UUID
|
||||
from typing import Annotated
|
||||
from jose import JWTError, jwt
|
||||
from sqlalchemy.orm import Session
|
||||
from passlib.context import CryptContext
|
||||
from fastapi.security import OAuth2PasswordBearer
|
||||
from fastapi import Depends, HTTPException, status
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from langflow.services.utils import get_settings_manager
|
||||
|
||||
from langflow.services.utils import get_session
|
||||
from langflow.database.models.user import (
|
||||
User,
|
||||
get_user_by_id,
|
||||
get_user_by_username,
|
||||
update_user_last_login_at,
|
||||
)
|
||||
|
||||
|
||||
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
||||
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="login")
|
||||
|
||||
|
||||
async def get_current_user(
|
||||
token: Annotated[str, Depends(oauth2_scheme)], db: Session = Depends(get_session)
|
||||
) -> User:
|
||||
settings_manager = get_settings_manager()
|
||||
|
||||
credentials_exception = HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Could not validate credentials",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
try:
|
||||
payload = jwt.decode(
|
||||
token,
|
||||
settings_manager.settings.SECRET_KEY,
|
||||
algorithms=[settings_manager.settings.ALGORITHM],
|
||||
)
|
||||
user_id: UUID = payload.get("sub") # type: ignore
|
||||
token_type: str = payload.get("type") # type: ignore
|
||||
|
||||
if user_id is None or token_type:
|
||||
raise credentials_exception
|
||||
except JWTError as e:
|
||||
raise credentials_exception from e
|
||||
|
||||
user = get_user_by_id(db, user_id) # type: ignore
|
||||
if user is None:
|
||||
raise credentials_exception
|
||||
return user
|
||||
|
||||
|
||||
async def get_current_active_user(
|
||||
current_user: Annotated[User, Depends(get_current_user)]
|
||||
):
|
||||
if not current_user.is_active:
|
||||
raise HTTPException(status_code=400, detail="Inactive user")
|
||||
return current_user
|
||||
|
||||
|
||||
def verify_password(plain_password, hashed_password):
|
||||
return pwd_context.verify(plain_password, hashed_password)
|
||||
|
||||
|
||||
def get_password_hash(password):
|
||||
return pwd_context.hash(password)
|
||||
|
||||
|
||||
def create_token(data: dict, expires_delta: timedelta):
|
||||
settings_manager = get_settings_manager()
|
||||
|
||||
to_encode = data.copy()
|
||||
expire = datetime.now(timezone.utc) + expires_delta
|
||||
to_encode["exp"] = expire
|
||||
|
||||
return jwt.encode(
|
||||
to_encode,
|
||||
settings_manager.settings.SECRET_KEY,
|
||||
algorithm=settings_manager.settings.ALGORITHM,
|
||||
)
|
||||
|
||||
|
||||
def create_user_longterm_token(
|
||||
user_id: UUID, db: Session = Depends(get_session), update_last_login: bool = False
|
||||
) -> dict:
|
||||
access_token_expires_longterm = timedelta(days=365)
|
||||
access_token = create_token(
|
||||
data={"sub": str(user_id)},
|
||||
expires_delta=access_token_expires_longterm,
|
||||
)
|
||||
|
||||
# Update: last_login_at
|
||||
if update_last_login:
|
||||
update_user_last_login_at(user_id, db)
|
||||
|
||||
return {
|
||||
"access_token": access_token,
|
||||
"refresh_token": None,
|
||||
"token_type": "bearer",
|
||||
}
|
||||
|
||||
|
||||
def create_user_tokens(
|
||||
user_id: UUID, db: Session = Depends(get_session), update_last_login: bool = False
|
||||
) -> dict:
|
||||
settings_manager = get_settings_manager()
|
||||
|
||||
access_token_expires = timedelta(
|
||||
minutes=settings_manager.settings.ACCESS_TOKEN_EXPIRE_MINUTES
|
||||
)
|
||||
access_token = create_token(
|
||||
data={"sub": str(user_id)},
|
||||
expires_delta=access_token_expires,
|
||||
)
|
||||
|
||||
refresh_token_expires = timedelta(
|
||||
minutes=settings_manager.settings.REFRESH_TOKEN_EXPIRE_MINUTES
|
||||
)
|
||||
refresh_token = create_token(
|
||||
data={"sub": str(user_id), "type": "rf"},
|
||||
expires_delta=refresh_token_expires,
|
||||
)
|
||||
|
||||
# Update: last_login_at
|
||||
if update_last_login:
|
||||
update_user_last_login_at(user_id, db)
|
||||
|
||||
return {
|
||||
"access_token": access_token,
|
||||
"refresh_token": refresh_token,
|
||||
"token_type": "bearer",
|
||||
}
|
||||
|
||||
|
||||
def create_refresh_token(refresh_token: str, db: Session = Depends(get_session)):
|
||||
settings_manager = get_settings_manager()
|
||||
|
||||
try:
|
||||
payload = jwt.decode(
|
||||
refresh_token,
|
||||
settings_manager.settings.SECRET_KEY,
|
||||
algorithms=[settings_manager.settings.ALGORITHM],
|
||||
)
|
||||
user_id: UUID = payload.get("sub") # type: ignore
|
||||
token_type: str = payload.get("type") # type: ignore
|
||||
|
||||
if user_id is None or token_type is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid refresh token"
|
||||
)
|
||||
|
||||
return create_user_tokens(user_id, db)
|
||||
|
||||
except JWTError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid refresh token",
|
||||
) from e
|
||||
|
||||
|
||||
def authenticate_user(
|
||||
username: str, password: str, db: Session = Depends(get_session)
|
||||
) -> User | None:
|
||||
user = get_user_by_username(db, username)
|
||||
|
||||
if not user:
|
||||
return None
|
||||
|
||||
if not user.is_active:
|
||||
if not user.last_login_at:
|
||||
raise HTTPException(status_code=400, detail="Waiting for approval")
|
||||
raise HTTPException(status_code=400, detail="Inactive user")
|
||||
|
||||
return user if verify_password(password, user.password) else None
|
||||
76
src/backend/langflow/components/utilities/GetRequest.py
Normal file
|
|
@ -0,0 +1,76 @@
|
|||
from langflow import CustomComponent
|
||||
from langchain.schema import Document
|
||||
from langflow.services.database.models.base import orjson_dumps
|
||||
import requests
|
||||
from typing import Optional
|
||||
|
||||
|
||||
class GetRequest(CustomComponent):
|
||||
display_name: str = "GET Request"
|
||||
description: str = "Make a GET request to the given URL."
|
||||
output_types: list[str] = ["Document"]
|
||||
documentation: str = "https://docs.langflow.org/components/utilities#get-request"
|
||||
beta = True
|
||||
field_config = {
|
||||
"url": {
|
||||
"display_name": "URL",
|
||||
"info": "The URL to make the request to",
|
||||
"is_list": True,
|
||||
},
|
||||
"headers": {
|
||||
"display_name": "Headers",
|
||||
"field_type": "code",
|
||||
"info": "The headers to send with the request.",
|
||||
},
|
||||
"code": {"show": False},
|
||||
"timeout": {
|
||||
"display_name": "Timeout",
|
||||
"field_type": "int",
|
||||
"info": "The timeout to use for the request.",
|
||||
"value": 5,
|
||||
},
|
||||
}
|
||||
|
||||
def get_document(
|
||||
self, session: requests.Session, url: str, headers: Optional[dict], timeout: int
|
||||
) -> Document:
|
||||
try:
|
||||
response = session.get(url, headers=headers, timeout=int(timeout))
|
||||
try:
|
||||
response_json = response.json()
|
||||
result = orjson_dumps(response_json, indent_2=False)
|
||||
except Exception:
|
||||
result = response.text
|
||||
self.repr_value = result
|
||||
return Document(
|
||||
page_content=result,
|
||||
metadata={
|
||||
"source": url,
|
||||
"headers": headers,
|
||||
"status_code": response.status_code,
|
||||
},
|
||||
)
|
||||
except requests.Timeout:
|
||||
return Document(
|
||||
page_content="Request Timed Out",
|
||||
metadata={"source": url, "headers": headers, "status_code": 408},
|
||||
)
|
||||
except Exception as exc:
|
||||
return Document(
|
||||
page_content=str(exc),
|
||||
metadata={"source": url, "headers": headers, "status_code": 500},
|
||||
)
|
||||
|
||||
def build(
|
||||
self,
|
||||
url: str,
|
||||
headers: Optional[dict] = None,
|
||||
timeout: int = 5,
|
||||
) -> list[Document]:
|
||||
if headers is None:
|
||||
headers = {}
|
||||
urls = url if isinstance(url, list) else [url]
|
||||
with requests.Session() as session:
|
||||
documents = [self.get_document(session, u, headers, timeout) for u in urls]
|
||||
self.repr_value = documents
|
||||
return documents
|
||||
|
|
@ -0,0 +1,55 @@
|
|||
### JSON Document Builder
|
||||
|
||||
# Build a Document containing a JSON object using a key and another Document page content.
|
||||
|
||||
# **Params**
|
||||
|
||||
# - **Key:** The key to use for the JSON object.
|
||||
# - **Document:** The Document page to use for the JSON object.
|
||||
|
||||
# **Output**
|
||||
|
||||
# - **Document:** The Document containing the JSON object.
|
||||
|
||||
from langflow import CustomComponent
|
||||
from langchain.schema import Document
|
||||
from langflow.services.database.models.base import orjson_dumps
|
||||
|
||||
|
||||
class JSONDocumentBuilder(CustomComponent):
|
||||
display_name: str = "JSON Document Builder"
|
||||
description: str = "Build a Document containing a JSON object using a key and another Document page content."
|
||||
output_types: list[str] = ["Document"]
|
||||
beta = True
|
||||
documentation: str = (
|
||||
"https://docs.langflow.org/components/utilities#json-document-builder"
|
||||
)
|
||||
|
||||
field_config = {
|
||||
"key": {"display_name": "Key"},
|
||||
"document": {"display_name": "Document"},
|
||||
}
|
||||
|
||||
def build(
|
||||
self,
|
||||
key: str,
|
||||
document: Document,
|
||||
) -> Document:
|
||||
documents = None
|
||||
if isinstance(document, list):
|
||||
documents = [
|
||||
Document(
|
||||
page_content=orjson_dumps({key: doc.page_content}, indent_2=False)
|
||||
)
|
||||
for doc in document
|
||||
]
|
||||
elif isinstance(document, Document):
|
||||
documents = Document(
|
||||
page_content=orjson_dumps({key: document.page_content}, indent_2=False)
|
||||
)
|
||||
else:
|
||||
raise TypeError(
|
||||
f"Expected Document or list of Documents, got {type(document)}"
|
||||
)
|
||||
self.repr_value = documents
|
||||
return documents
|
||||
81
src/backend/langflow/components/utilities/PostRequest.py
Normal file
|
|
@ -0,0 +1,81 @@
|
|||
from langflow import CustomComponent
|
||||
from langchain.schema import Document
|
||||
from langflow.services.database.models.base import orjson_dumps
|
||||
import requests
|
||||
from typing import Optional
|
||||
|
||||
|
||||
class PostRequest(CustomComponent):
|
||||
display_name: str = "POST Request"
|
||||
description: str = "Make a POST request to the given URL."
|
||||
output_types: list[str] = ["Document"]
|
||||
documentation: str = "https://docs.langflow.org/components/utilities#post-request"
|
||||
beta = True
|
||||
field_config = {
|
||||
"url": {"display_name": "URL", "info": "The URL to make the request to."},
|
||||
"headers": {
|
||||
"display_name": "Headers",
|
||||
"field_type": "code",
|
||||
"info": "The headers to send with the request.",
|
||||
},
|
||||
"code": {"show": False},
|
||||
"document": {"display_name": "Document"},
|
||||
}
|
||||
|
||||
def post_document(
|
||||
self,
|
||||
session: requests.Session,
|
||||
document: Document,
|
||||
url: str,
|
||||
headers: Optional[dict] = None,
|
||||
) -> Document:
|
||||
try:
|
||||
response = session.post(url, headers=headers, data=document.page_content)
|
||||
try:
|
||||
response_json = response.json()
|
||||
result = orjson_dumps(response_json, indent_2=False)
|
||||
except Exception:
|
||||
result = response.text
|
||||
self.repr_value = result
|
||||
return Document(
|
||||
page_content=result,
|
||||
metadata={
|
||||
"source": url,
|
||||
"headers": headers,
|
||||
"status_code": response,
|
||||
},
|
||||
)
|
||||
except Exception as exc:
|
||||
return Document(
|
||||
page_content=str(exc),
|
||||
metadata={
|
||||
"source": url,
|
||||
"headers": headers,
|
||||
"status_code": 500,
|
||||
},
|
||||
)
|
||||
|
||||
def build(
|
||||
self,
|
||||
document: Document,
|
||||
url: str,
|
||||
headers: Optional[dict] = None,
|
||||
) -> list[Document]:
|
||||
if headers is None:
|
||||
headers = {}
|
||||
|
||||
if not isinstance(document, list) and isinstance(document, Document):
|
||||
documents: list[Document] = [document]
|
||||
elif isinstance(document, list) and all(
|
||||
isinstance(doc, Document) for doc in document
|
||||
):
|
||||
documents = document
|
||||
else:
|
||||
raise ValueError("document must be a Document or a list of Documents")
|
||||
|
||||
with requests.Session() as session:
|
||||
documents = [
|
||||
self.post_document(session, doc, url, headers) for doc in documents
|
||||
]
|
||||
self.repr_value = documents
|
||||
return documents
|
||||
94
src/backend/langflow/components/utilities/UpdateRequest.py
Normal file
|
|
@ -0,0 +1,94 @@
|
|||
from typing import List, Optional
|
||||
import requests
|
||||
from langflow import CustomComponent
|
||||
from langchain.schema import Document
|
||||
from langflow.services.database.models.base import orjson_dumps
|
||||
|
||||
|
||||
class UpdateRequest(CustomComponent):
|
||||
display_name: str = "Update Request"
|
||||
description: str = "Make a PATCH request to the given URL."
|
||||
output_types: list[str] = ["Document"]
|
||||
documentation: str = "https://docs.langflow.org/components/utilities#update-request"
|
||||
beta = True
|
||||
field_config = {
|
||||
"url": {"display_name": "URL", "info": "The URL to make the request to."},
|
||||
"headers": {
|
||||
"display_name": "Headers",
|
||||
"field_type": "code",
|
||||
"info": "The headers to send with the request.",
|
||||
},
|
||||
"code": {"show": False},
|
||||
"document": {"display_name": "Document"},
|
||||
"method": {
|
||||
"display_name": "Method",
|
||||
"field_type": "str",
|
||||
"info": "The HTTP method to use.",
|
||||
"options": ["PATCH", "PUT"],
|
||||
"value": "PATCH",
|
||||
},
|
||||
}
|
||||
|
||||
def update_document(
|
||||
self,
|
||||
session: requests.Session,
|
||||
document: Document,
|
||||
url: str,
|
||||
headers: Optional[dict] = None,
|
||||
method: str = "PATCH",
|
||||
) -> Document:
|
||||
try:
|
||||
if method == "PATCH":
|
||||
response = session.patch(
|
||||
url, headers=headers, data=document.page_content
|
||||
)
|
||||
elif method == "PUT":
|
||||
response = session.put(url, headers=headers, data=document.page_content)
|
||||
else:
|
||||
raise ValueError(f"Unsupported method: {method}")
|
||||
try:
|
||||
response_json = response.json()
|
||||
result = orjson_dumps(response_json, indent_2=False)
|
||||
except Exception:
|
||||
result = response.text
|
||||
self.repr_value = result
|
||||
return Document(
|
||||
page_content=result,
|
||||
metadata={
|
||||
"source": url,
|
||||
"headers": headers,
|
||||
"status_code": response.status_code,
|
||||
},
|
||||
)
|
||||
except Exception as exc:
|
||||
return Document(
|
||||
page_content=str(exc),
|
||||
metadata={"source": url, "headers": headers, "status_code": 500},
|
||||
)
|
||||
|
||||
def build(
|
||||
self,
|
||||
method: str,
|
||||
document: Document,
|
||||
url: str,
|
||||
headers: Optional[dict] = None,
|
||||
) -> List[Document]:
|
||||
if headers is None:
|
||||
headers = {}
|
||||
|
||||
if not isinstance(document, list) and isinstance(document, Document):
|
||||
documents: list[Document] = [document]
|
||||
elif isinstance(document, list) and all(
|
||||
isinstance(doc, Document) for doc in document
|
||||
):
|
||||
documents = document
|
||||
else:
|
||||
raise ValueError("document must be a Document or a list of Documents")
|
||||
|
||||
with requests.Session() as session:
|
||||
documents = [
|
||||
self.update_document(session, doc, url, headers, method)
|
||||
for doc in documents
|
||||
]
|
||||
self.repr_value = documents
|
||||
return documents
|
||||
109
src/backend/langflow/components/vectorstores/Chroma.py
Normal file
|
|
@ -0,0 +1,109 @@
|
|||
from typing import Optional, Union
|
||||
from langflow import CustomComponent
|
||||
|
||||
from langchain.vectorstores import Chroma
|
||||
from langchain.schema import Document
|
||||
from langchain.vectorstores.base import VectorStore
|
||||
from langchain.schema import BaseRetriever
|
||||
from langchain.embeddings.base import Embeddings
|
||||
import chromadb # type: ignore
|
||||
|
||||
|
||||
class ChromaComponent(CustomComponent):
|
||||
"""
|
||||
A custom component for implementing a Vector Store using Chroma.
|
||||
"""
|
||||
|
||||
display_name: str = "Chroma (Custom Component)"
|
||||
description: str = "Implementation of Vector Store using Chroma"
|
||||
documentation = "https://python.langchain.com/docs/integrations/vectorstores/chroma"
|
||||
beta = True
|
||||
|
||||
def build_config(self):
|
||||
"""
|
||||
Builds the configuration for the component.
|
||||
|
||||
Returns:
|
||||
- dict: A dictionary containing the configuration options for the component.
|
||||
"""
|
||||
return {
|
||||
"collection_name": {"display_name": "Collection Name", "value": "langflow"},
|
||||
"persist": {"display_name": "Persist"},
|
||||
"persist_directory": {"display_name": "Persist Directory"},
|
||||
"code": {"show": False, "display_name": "Code"},
|
||||
"documents": {"display_name": "Documents", "is_list": True},
|
||||
"embedding": {"display_name": "Embedding"},
|
||||
"chroma_server_cors_allow_origins": {
|
||||
"display_name": "Server CORS Allow Origins",
|
||||
"advanced": True,
|
||||
},
|
||||
"chroma_server_host": {"display_name": "Server Host", "advanced": True},
|
||||
"chroma_server_port": {"display_name": "Server Port", "advanced": True},
|
||||
"chroma_server_grpc_port": {
|
||||
"display_name": "Server gRPC Port",
|
||||
"advanced": True,
|
||||
},
|
||||
"chroma_server_ssl_enabled": {
|
||||
"display_name": "Server SSL Enabled",
|
||||
"advanced": True,
|
||||
},
|
||||
}
|
||||
|
||||
def build(
|
||||
self,
|
||||
collection_name: str,
|
||||
persist: bool,
|
||||
chroma_server_ssl_enabled: bool,
|
||||
persist_directory: Optional[str] = None,
|
||||
embedding: Optional[Embeddings] = None,
|
||||
documents: Optional[Document] = None,
|
||||
chroma_server_cors_allow_origins: Optional[str] = None,
|
||||
chroma_server_host: Optional[str] = None,
|
||||
chroma_server_port: Optional[int] = None,
|
||||
chroma_server_grpc_port: Optional[int] = None,
|
||||
) -> Union[VectorStore, BaseRetriever]:
|
||||
"""
|
||||
Builds the Vector Store or BaseRetriever object.
|
||||
|
||||
Args:
|
||||
- collection_name (str): The name of the collection.
|
||||
- persist_directory (Optional[str]): The directory to persist the Vector Store to.
|
||||
- chroma_server_ssl_enabled (bool): Whether to enable SSL for the Chroma server.
|
||||
- persist (bool): Whether to persist the Vector Store or not.
|
||||
- embedding (Optional[Embeddings]): The embeddings to use for the Vector Store.
|
||||
- documents (Optional[Document]): The documents to use for the Vector Store.
|
||||
- chroma_server_cors_allow_origins (Optional[str]): The CORS allow origins for the Chroma server.
|
||||
- chroma_server_host (Optional[str]): The host for the Chroma server.
|
||||
- chroma_server_port (Optional[int]): The port for the Chroma server.
|
||||
- chroma_server_grpc_port (Optional[int]): The gRPC port for the Chroma server.
|
||||
|
||||
Returns:
|
||||
- Union[VectorStore, BaseRetriever]: The Vector Store or BaseRetriever object.
|
||||
"""
|
||||
|
||||
# Chroma settings
|
||||
chroma_settings = None
|
||||
|
||||
if chroma_server_host is not None:
|
||||
chroma_settings = chromadb.config.Settings(
|
||||
chroma_server_cors_allow_origins=chroma_server_cors_allow_origins
|
||||
or None,
|
||||
chroma_server_host=chroma_server_host,
|
||||
chroma_server_port=chroma_server_port or None,
|
||||
chroma_server_grpc_port=chroma_server_grpc_port or None,
|
||||
chroma_server_ssl_enabled=chroma_server_ssl_enabled,
|
||||
)
|
||||
|
||||
# If documents, then we need to create a Chroma instance using .from_documents
|
||||
if documents is not None and embedding is not None:
|
||||
return Chroma.from_documents(
|
||||
documents=documents, # type: ignore
|
||||
persist_directory=persist_directory if persist else None,
|
||||
collection_name=collection_name,
|
||||
embedding=embedding,
|
||||
client_settings=chroma_settings,
|
||||
)
|
||||
|
||||
return Chroma(
|
||||
persist_directory=persist_directory, client_settings=chroma_settings
|
||||
)
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class Token(BaseModel):
|
||||
access_token: str
|
||||
refresh_token: str
|
||||
token_type: str
|
||||
|
|
@ -1,94 +0,0 @@
|
|||
from sqlmodel import Field
|
||||
from uuid import UUID, uuid4
|
||||
from pydantic import BaseModel
|
||||
from typing import Optional, List
|
||||
from sqlalchemy.orm import Session
|
||||
from datetime import timezone, datetime
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from fastapi import HTTPException, Depends
|
||||
|
||||
from langflow.services.utils import get_session
|
||||
from langflow.services.database.models.base import SQLModelSerializable, SQLModel
|
||||
|
||||
|
||||
class User(SQLModelSerializable, table=True):
|
||||
id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True)
|
||||
username: str = Field(index=True, unique=True)
|
||||
password: str = Field()
|
||||
is_active: bool = Field(default=False)
|
||||
is_superuser: bool = Field(default=False)
|
||||
create_at: datetime = Field(default_factory=datetime.utcnow)
|
||||
updated_at: datetime = Field(default_factory=datetime.utcnow)
|
||||
last_login_at: Optional[datetime] = Field()
|
||||
|
||||
|
||||
class UserAddModel(SQLModel):
|
||||
username: str = Field()
|
||||
password: str = Field()
|
||||
|
||||
|
||||
class UserListModel(SQLModel):
|
||||
id: UUID = Field(default_factory=uuid4)
|
||||
username: str = Field()
|
||||
is_active: bool = Field()
|
||||
is_superuser: bool = Field()
|
||||
create_at: datetime = Field()
|
||||
updated_at: datetime = Field()
|
||||
last_login_at: Optional[datetime] = Field()
|
||||
|
||||
|
||||
class UserPatchModel(SQLModel):
|
||||
username: Optional[str] = Field()
|
||||
is_active: Optional[bool] = Field()
|
||||
is_superuser: Optional[bool] = Field()
|
||||
last_login_at: Optional[datetime] = Field()
|
||||
|
||||
|
||||
class UsersResponse(BaseModel):
|
||||
total_count: int
|
||||
users: List[UserListModel]
|
||||
|
||||
|
||||
def get_user_by_username(db: Session, username: str) -> User:
|
||||
db_user = db.query(User).filter(User.username == username).first()
|
||||
return User.from_orm(db_user) if db_user else None # type: ignore
|
||||
|
||||
|
||||
def get_user_by_id(db: Session, id: UUID) -> User:
|
||||
db_user = db.query(User).filter(User.id == id).first()
|
||||
return User.from_orm(db_user) if db_user else None # type: ignore
|
||||
|
||||
|
||||
def update_user(
|
||||
user_id: UUID, user: UserPatchModel, db: Session = Depends(get_session)
|
||||
) -> User:
|
||||
user_db = get_user_by_username(db, user.username) # type: ignore
|
||||
if user_db and user_db.id != user_id:
|
||||
raise HTTPException(status_code=409, detail="Username already exists")
|
||||
|
||||
user_db = get_user_by_id(db, user_id)
|
||||
if not user_db:
|
||||
raise HTTPException(status_code=404, detail="User not found")
|
||||
|
||||
try:
|
||||
user_data = user.dict(exclude_unset=True)
|
||||
for key, value in user_data.items():
|
||||
setattr(user_db, key, value)
|
||||
|
||||
user_db.updated_at = datetime.now(timezone.utc)
|
||||
user_db = db.merge(user_db)
|
||||
db.commit()
|
||||
if db.identity_key(instance=user_db) is not None:
|
||||
db.refresh(user_db)
|
||||
|
||||
except IntegrityError as e:
|
||||
db.rollback()
|
||||
raise HTTPException(status_code=400, detail=str(e)) from e
|
||||
|
||||
return user_db
|
||||
|
||||
|
||||
def update_user_last_login_at(user_id: UUID, db: Session = Depends(get_session)):
|
||||
user_data = UserPatchModel(last_login_at=datetime.now(timezone.utc)) # type: ignore
|
||||
|
||||
return update_user(user_id, user_data, db)
|
||||
|
|
@ -40,7 +40,6 @@ class Edge:
|
|||
if no_matched_type:
|
||||
logger.debug(self.source_types)
|
||||
logger.debug(self.target_reqs)
|
||||
if no_matched_type:
|
||||
raise ValueError(
|
||||
f"Edge between {self.source.vertex_type} and {self.target.vertex_type} "
|
||||
f"has no matched type"
|
||||
|
|
|
|||
|
|
@ -144,7 +144,7 @@ class Graph:
|
|||
|
||||
return list(reversed(sorted_vertices))
|
||||
|
||||
def generator_build(self) -> Generator:
|
||||
def generator_build(self) -> Generator[Vertex, None, None]:
|
||||
"""Builds each vertex in the graph and yields it."""
|
||||
sorted_vertices = self.topological_sort()
|
||||
logger.debug("There are %s vertices in the graph", len(sorted_vertices))
|
||||
|
|
|
|||
|
|
@ -145,18 +145,18 @@ class Vertex:
|
|||
# Add _type to params
|
||||
self.params = params
|
||||
|
||||
def _build(self):
|
||||
def _build(self, user_id=None):
|
||||
"""
|
||||
Initiate the build process.
|
||||
"""
|
||||
logger.debug(f"Building {self.vertex_type}")
|
||||
self._build_each_node_in_params_dict()
|
||||
self._get_and_instantiate_class()
|
||||
self._build_each_node_in_params_dict(user_id)
|
||||
self._get_and_instantiate_class(user_id)
|
||||
self._validate_built_object()
|
||||
|
||||
self._built = True
|
||||
|
||||
def _build_each_node_in_params_dict(self):
|
||||
def _build_each_node_in_params_dict(self, user_id=None):
|
||||
"""
|
||||
Iterates over each node in the params dictionary and builds it.
|
||||
"""
|
||||
|
|
@ -165,9 +165,9 @@ class Vertex:
|
|||
if value == self:
|
||||
del self.params[key]
|
||||
continue
|
||||
self._build_node_and_update_params(key, value)
|
||||
self._build_node_and_update_params(key, value, user_id)
|
||||
elif isinstance(value, list) and self._is_list_of_nodes(value):
|
||||
self._build_list_of_nodes_and_update_params(key, value)
|
||||
self._build_list_of_nodes_and_update_params(key, value, user_id)
|
||||
|
||||
def _is_node(self, value):
|
||||
"""
|
||||
|
|
@ -181,7 +181,7 @@ class Vertex:
|
|||
"""
|
||||
return all(self._is_node(node) for node in value)
|
||||
|
||||
def get_result(self, timeout=None) -> Any:
|
||||
def get_result(self, user_id=None, timeout=None) -> Any:
|
||||
# Check if the Vertex was built already
|
||||
if self._built:
|
||||
return self._built_object
|
||||
|
|
@ -197,27 +197,29 @@ class Vertex:
|
|||
pass
|
||||
|
||||
# If there's no task_id, build the vertex locally
|
||||
self.build()
|
||||
self.build(user_id)
|
||||
return self._built_object
|
||||
|
||||
def _build_node_and_update_params(self, key, node):
|
||||
def _build_node_and_update_params(self, key, node, user_id=None):
|
||||
"""
|
||||
Builds a given node and updates the params dictionary accordingly.
|
||||
"""
|
||||
|
||||
result = node.get_result()
|
||||
result = node.get_result(user_id)
|
||||
self._handle_func(key, result)
|
||||
if isinstance(result, list):
|
||||
self._extend_params_list_with_result(key, result)
|
||||
self.params[key] = result
|
||||
|
||||
def _build_list_of_nodes_and_update_params(self, key, nodes):
|
||||
def _build_list_of_nodes_and_update_params(
|
||||
self, key, nodes: List["Vertex"], user_id=None
|
||||
):
|
||||
"""
|
||||
Iterates over a list of nodes, builds each and updates the params dictionary.
|
||||
"""
|
||||
self.params[key] = []
|
||||
for node in nodes:
|
||||
built = node.get_result()
|
||||
built = node.get_result(user_id)
|
||||
if isinstance(built, list):
|
||||
if key not in self.params:
|
||||
self.params[key] = []
|
||||
|
|
@ -247,7 +249,7 @@ class Vertex:
|
|||
if isinstance(self.params[key], list):
|
||||
self.params[key].extend(result)
|
||||
|
||||
def _get_and_instantiate_class(self):
|
||||
def _get_and_instantiate_class(self, user_id=None):
|
||||
"""
|
||||
Gets the class from a dictionary and instantiates it with the params.
|
||||
"""
|
||||
|
|
@ -258,6 +260,7 @@ class Vertex:
|
|||
node_type=self.vertex_type,
|
||||
base_type=self.base_type,
|
||||
params=self.params,
|
||||
user_id=user_id,
|
||||
)
|
||||
self._update_built_object_and_artifacts(result)
|
||||
except Exception as exc:
|
||||
|
|
@ -287,9 +290,9 @@ class Vertex:
|
|||
|
||||
raise ValueError(message)
|
||||
|
||||
def build(self, force: bool = False) -> Any:
|
||||
def build(self, force: bool = False, user_id=None, *args, **kwargs) -> Any:
|
||||
if not self._built or force:
|
||||
self._build()
|
||||
self._build(user_id, *args, **kwargs)
|
||||
|
||||
return self._built_object
|
||||
|
||||
|
|
|
|||
|
|
@ -21,18 +21,18 @@ class AgentVertex(Vertex):
|
|||
elif isinstance(source_node, ChainVertex):
|
||||
self.chains.append(source_node)
|
||||
|
||||
def build(self, force: bool = False) -> Any:
|
||||
def build(self, force: bool = False, user_id=None, *args, **kwargs) -> Any:
|
||||
if not self._built or force:
|
||||
self._set_tools_and_chains()
|
||||
# First, build the tools
|
||||
for tool_node in self.tools:
|
||||
tool_node.build()
|
||||
tool_node.build(user_id=user_id)
|
||||
|
||||
# Next, build the chains and the rest
|
||||
for chain_node in self.chains:
|
||||
chain_node.build(tools=self.tools)
|
||||
chain_node.build(tools=self.tools, user_id=user_id)
|
||||
|
||||
self._build()
|
||||
self._build(user_id=user_id)
|
||||
|
||||
return self._built_object
|
||||
|
||||
|
|
@ -49,13 +49,13 @@ class LLMVertex(Vertex):
|
|||
def __init__(self, data: Dict):
|
||||
super().__init__(data, base_type="llms")
|
||||
|
||||
def build(self, force: bool = False) -> Any:
|
||||
def build(self, force: bool = False, user_id=None, *args, **kwargs) -> Any:
|
||||
# LLM is different because some models might take up too much memory
|
||||
# or time to load. So we only load them when we need them.ß
|
||||
if self.vertex_type == self.built_node_type:
|
||||
return self.class_built_object
|
||||
if not self._built or force:
|
||||
self._build()
|
||||
self._build(user_id=user_id)
|
||||
self.built_node_type = self.vertex_type
|
||||
self.class_built_object = self._built_object
|
||||
# Avoid deepcopying the LLM
|
||||
|
|
@ -77,11 +77,11 @@ class WrapperVertex(Vertex):
|
|||
def __init__(self, data: Dict):
|
||||
super().__init__(data, base_type="wrappers")
|
||||
|
||||
def build(self, force: bool = False) -> Any:
|
||||
def build(self, force: bool = False, user_id=None, *args, **kwargs) -> Any:
|
||||
if not self._built or force:
|
||||
if "headers" in self.params:
|
||||
self.params["headers"] = ast.literal_eval(self.params["headers"])
|
||||
self._build()
|
||||
self._build(user_id=user_id)
|
||||
return self._built_object
|
||||
|
||||
|
||||
|
|
@ -148,16 +148,19 @@ class ChainVertex(Vertex):
|
|||
def build(
|
||||
self,
|
||||
force: bool = False,
|
||||
tools: Optional[List[Union[ToolkitVertex, ToolVertex]]] = None,
|
||||
user_id=None,
|
||||
*args,
|
||||
**kwargs,
|
||||
) -> Any:
|
||||
if not self._built or force:
|
||||
# Check if the chain requires a PromptVertex
|
||||
for key, value in self.params.items():
|
||||
if isinstance(value, PromptVertex):
|
||||
# Build the PromptVertex, passing the tools if available
|
||||
tools = kwargs.get("tools", None)
|
||||
self.params[key] = value.build(tools=tools, force=force)
|
||||
|
||||
self._build()
|
||||
self._build(user_id=user_id)
|
||||
|
||||
return self._built_object
|
||||
|
||||
|
|
@ -169,7 +172,10 @@ class PromptVertex(Vertex):
|
|||
def build(
|
||||
self,
|
||||
force: bool = False,
|
||||
user_id=None,
|
||||
tools: Optional[List[Union[ToolkitVertex, ToolVertex]]] = None,
|
||||
*args,
|
||||
**kwargs,
|
||||
) -> Any:
|
||||
if not self._built or force:
|
||||
if (
|
||||
|
|
@ -180,7 +186,7 @@ class PromptVertex(Vertex):
|
|||
# Check if it is a ZeroShotPrompt and needs a tool
|
||||
if "ShotPrompt" in self.vertex_type:
|
||||
tools = (
|
||||
[tool_node.build() for tool_node in tools]
|
||||
[tool_node.build(user_id=user_id) for tool_node in tools]
|
||||
if tools is not None
|
||||
else []
|
||||
)
|
||||
|
|
@ -208,7 +214,7 @@ class PromptVertex(Vertex):
|
|||
else:
|
||||
self.params.pop("input_variables", None)
|
||||
|
||||
self._build()
|
||||
self._build(user_id=user_id)
|
||||
return self._built_object
|
||||
|
||||
def _built_object_repr(self):
|
||||
|
|
|
|||
|
|
@ -1,9 +1,11 @@
|
|||
from typing import Any, Callable, List, Optional
|
||||
from typing import Any, Callable, List, Optional, Union
|
||||
from uuid import UUID
|
||||
from fastapi import HTTPException
|
||||
from langflow.interface.custom.constants import CUSTOM_COMPONENT_SUPPORTED_TYPES
|
||||
from langflow.interface.custom.component import Component
|
||||
from langflow.interface.custom.directory_reader import DirectoryReader
|
||||
from langflow.services.utils import get_db_manager
|
||||
from langflow.interface.custom.utils import extract_inner_type
|
||||
|
||||
from langflow.utils import validate
|
||||
|
||||
|
|
@ -20,7 +22,8 @@ class CustomComponent(Component, extra=Extra.allow):
|
|||
function_entrypoint_name = "build"
|
||||
function: Optional[Callable] = None
|
||||
return_type_valid_list = list(CUSTOM_COMPONENT_SUPPORTED_TYPES.keys())
|
||||
repr_value: Optional[str] = ""
|
||||
repr_value: Optional[Any] = ""
|
||||
user_id: Optional[Union[UUID, str]] = None
|
||||
|
||||
def __init__(self, **data):
|
||||
super().__init__(**data)
|
||||
|
|
@ -123,6 +126,10 @@ class CustomComponent(Component, extra=Extra.allow):
|
|||
return_type = build_method["return_type"]
|
||||
if not return_type:
|
||||
return []
|
||||
# If list or List is in the return type, then we remove it and return the inner type
|
||||
if return_type.startswith("list") or return_type.startswith("List"):
|
||||
return_type = extract_inner_type(return_type)
|
||||
|
||||
# If the return type is not a Union, then we just return it as a list
|
||||
if "Union" not in return_type:
|
||||
return [return_type] if return_type in self.return_type_valid_list else []
|
||||
|
|
@ -182,11 +189,16 @@ class CustomComponent(Component, extra=Extra.allow):
|
|||
return build_sorted_vertices(graph_data)
|
||||
|
||||
def list_flows(self, *, get_session: Optional[Callable] = None) -> List[Flow]:
|
||||
get_session = get_session or session_getter
|
||||
db_manager = get_db_manager()
|
||||
with get_session(db_manager) as session:
|
||||
flows = session.query(Flow).all()
|
||||
return flows
|
||||
if not self.user_id:
|
||||
raise ValueError("Session is invalid")
|
||||
try:
|
||||
get_session = get_session or session_getter
|
||||
db_manager = get_db_manager()
|
||||
with get_session(db_manager) as session:
|
||||
flows = session.query(Flow).filter(Flow.user_id == self.user_id).all()
|
||||
return flows
|
||||
except Exception as e:
|
||||
raise ValueError("Session is invalid") from e
|
||||
|
||||
def get_flow(
|
||||
self,
|
||||
|
|
@ -202,7 +214,11 @@ class CustomComponent(Component, extra=Extra.allow):
|
|||
if flow_id:
|
||||
flow = session.query(Flow).get(flow_id)
|
||||
elif flow_name:
|
||||
flow = session.query(Flow).filter(Flow.name == flow_name).first()
|
||||
flow = (
|
||||
session.query(Flow)
|
||||
.filter(Flow.name == flow_name)
|
||||
.filter(Flow.user_id == self.user_id)
|
||||
).first()
|
||||
else:
|
||||
raise ValueError("Either flow_name or flow_id must be provided")
|
||||
|
||||
|
|
|
|||
|
|
@ -77,7 +77,7 @@ class DirectoryReader:
|
|||
]
|
||||
filtered = [menu for menu in items if menu["components"]]
|
||||
logger.debug(
|
||||
f'Filtered components {"with errors" if with_errors else ""}: {filtered}'
|
||||
f'Filtered components {"with errors" if with_errors else ""}: {len(filtered)}'
|
||||
)
|
||||
return {"menu": filtered}
|
||||
|
||||
|
|
|
|||
10
src/backend/langflow/interface/custom/utils.py
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
import re
|
||||
|
||||
|
||||
def extract_inner_type(return_type: str) -> str:
|
||||
"""
|
||||
Extracts the inner type from a type hint that is a list.
|
||||
"""
|
||||
if match := re.match(r"list\[(.*)\]", return_type, re.IGNORECASE):
|
||||
return match[1]
|
||||
return return_type
|
||||
|
|
@ -1,5 +1,6 @@
|
|||
import json
|
||||
from typing import Any, Callable, Dict, Sequence, Type
|
||||
import orjson
|
||||
from typing import Any, Callable, Dict, Sequence, Type, TYPE_CHECKING
|
||||
|
||||
from langchain.agents import agent as agent_module
|
||||
from langchain.agents.agent import AgentExecutor
|
||||
|
|
@ -35,8 +36,13 @@ from langchain.vectorstores.base import VectorStore
|
|||
from langchain.document_loaders.base import BaseLoader
|
||||
from langflow.utils.logger import logger
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from langflow import CustomComponent
|
||||
|
||||
def instantiate_class(node_type: str, base_type: str, params: Dict) -> Any:
|
||||
|
||||
def instantiate_class(
|
||||
node_type: str, base_type: str, params: Dict, user_id=None
|
||||
) -> Any:
|
||||
"""Instantiate class from module type and key, and params"""
|
||||
params = convert_params_to_sets(params)
|
||||
params = convert_kwargs(params)
|
||||
|
|
@ -47,7 +53,9 @@ def instantiate_class(node_type: str, base_type: str, params: Dict) -> Any:
|
|||
return custom_node(**params)
|
||||
logger.debug(f"Instantiating {node_type} of type {base_type}")
|
||||
class_object = import_by_type(_type=base_type, name=node_type)
|
||||
return instantiate_based_on_type(class_object, base_type, node_type, params)
|
||||
return instantiate_based_on_type(
|
||||
class_object, base_type, node_type, params, user_id=user_id
|
||||
)
|
||||
|
||||
|
||||
def convert_params_to_sets(params):
|
||||
|
|
@ -66,7 +74,7 @@ def convert_kwargs(params):
|
|||
for key in kwargs_keys:
|
||||
if isinstance(params[key], str):
|
||||
try:
|
||||
params[key] = json.loads(params[key])
|
||||
params[key] = orjson.loads(params[key])
|
||||
except json.JSONDecodeError:
|
||||
# if the string is not a valid json string, we will
|
||||
# remove the key from the params
|
||||
|
|
@ -74,7 +82,7 @@ def convert_kwargs(params):
|
|||
return params
|
||||
|
||||
|
||||
def instantiate_based_on_type(class_object, base_type, node_type, params):
|
||||
def instantiate_based_on_type(class_object, base_type, node_type, params, user_id):
|
||||
if base_type == "agents":
|
||||
return instantiate_agent(node_type, class_object, params)
|
||||
elif base_type == "prompts":
|
||||
|
|
@ -108,19 +116,19 @@ def instantiate_based_on_type(class_object, base_type, node_type, params):
|
|||
elif base_type == "memory":
|
||||
return instantiate_memory(node_type, class_object, params)
|
||||
elif base_type == "custom_components":
|
||||
return instantiate_custom_component(node_type, class_object, params)
|
||||
return instantiate_custom_component(node_type, class_object, params, user_id)
|
||||
elif base_type == "wrappers":
|
||||
return instantiate_wrapper(node_type, class_object, params)
|
||||
else:
|
||||
return class_object(**params)
|
||||
|
||||
|
||||
def instantiate_custom_component(node_type, class_object, params):
|
||||
def instantiate_custom_component(node_type, class_object, params, user_id):
|
||||
# we need to make a copy of the params because we will be
|
||||
# modifying it
|
||||
params_copy = params.copy()
|
||||
class_object = get_function_custom(params_copy.pop("code"))
|
||||
custom_component = class_object()
|
||||
class_object: "CustomComponent" = get_function_custom(params_copy.pop("code"))
|
||||
custom_component = class_object(user_id=user_id)
|
||||
built_object = custom_component.build(**params_copy)
|
||||
return built_object, {"repr": custom_component.custom_repr()}
|
||||
|
||||
|
|
@ -310,7 +318,7 @@ def instantiate_documentloader(class_object: Type[BaseLoader], params: Dict):
|
|||
metadata = params.pop("metadata", None)
|
||||
if metadata and isinstance(metadata, str):
|
||||
try:
|
||||
metadata = json.loads(metadata)
|
||||
metadata = orjson.loads(metadata)
|
||||
except json.JSONDecodeError as exc:
|
||||
raise ValueError(
|
||||
"The metadata you provided is not a valid JSON string."
|
||||
|
|
|
|||
|
|
@ -1,5 +1,7 @@
|
|||
import contextlib
|
||||
import json
|
||||
from langflow.services.database.models.base import orjson_dumps
|
||||
import orjson
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from langchain.agents import ZeroShotAgent
|
||||
|
|
@ -95,9 +97,11 @@ def format_content(variable):
|
|||
|
||||
def try_to_load_json(content):
|
||||
with contextlib.suppress(json.JSONDecodeError):
|
||||
content = json.loads(content)
|
||||
content = orjson.loads(content)
|
||||
if isinstance(content, list):
|
||||
content = ",".join([str(item) for item in content])
|
||||
else:
|
||||
content = orjson_dumps(content)
|
||||
return content
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,3 @@
|
|||
import json
|
||||
from typing import Any, Callable, Dict, Type
|
||||
from langchain.vectorstores import (
|
||||
Pinecone,
|
||||
|
|
@ -12,6 +11,8 @@ from langchain.vectorstores import (
|
|||
|
||||
import os
|
||||
|
||||
import orjson
|
||||
|
||||
|
||||
def docs_in_params(params: dict) -> bool:
|
||||
"""Check if params has documents OR texts and one of them is not an empty list,
|
||||
|
|
@ -92,7 +93,7 @@ def initialize_weaviate(class_object: Type[Weaviate], params: dict):
|
|||
import weaviate # type: ignore
|
||||
|
||||
client_kwargs_json = params.get("client_kwargs", "{}")
|
||||
client_kwargs = json.loads(client_kwargs_json)
|
||||
client_kwargs = orjson.loads(client_kwargs_json)
|
||||
client_params = {
|
||||
"url": params.get("weaviate_url"),
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ import inspect
|
|||
from typing import Dict, Union
|
||||
|
||||
from langchain.agents.tools import Tool
|
||||
from langflow.utils.logger import logger
|
||||
|
||||
|
||||
def get_func_tool_params(func, **kwargs) -> Union[Dict, None]:
|
||||
|
|
@ -57,7 +58,13 @@ def get_func_tool_params(func, **kwargs) -> Union[Dict, None]:
|
|||
|
||||
|
||||
def get_class_tool_params(cls, **kwargs) -> Union[Dict, None]:
|
||||
tree = ast.parse(inspect.getsource(cls))
|
||||
try:
|
||||
tree = ast.parse(inspect.getsource(cls))
|
||||
except IndentationError:
|
||||
logger.error(
|
||||
f"Error parsing class {cls.__name__}. Make sure there are no tabs in the code."
|
||||
)
|
||||
return None
|
||||
|
||||
tool_params = {}
|
||||
|
||||
|
|
|
|||
|
|
@ -190,17 +190,16 @@ def build_frontend_node(custom_component: CustomComponent):
|
|||
|
||||
def update_attributes(frontend_node, template_config):
|
||||
"""Update the display name and description of a frontend node"""
|
||||
if "display_name" in template_config:
|
||||
frontend_node["display_name"] = template_config["display_name"]
|
||||
|
||||
if "description" in template_config:
|
||||
frontend_node["description"] = template_config["description"]
|
||||
|
||||
if "beta" in template_config:
|
||||
frontend_node["beta"] = template_config["beta"]
|
||||
|
||||
if "documentation" in template_config:
|
||||
frontend_node["documentation"] = template_config["documentation"]
|
||||
attributes = [
|
||||
"display_name",
|
||||
"description",
|
||||
"beta",
|
||||
"documentation",
|
||||
"output_types",
|
||||
]
|
||||
for attribute in attributes:
|
||||
if attribute in template_config:
|
||||
frontend_node[attribute] = template_config[attribute]
|
||||
|
||||
|
||||
def build_field_config(custom_component: CustomComponent):
|
||||
|
|
@ -338,7 +337,9 @@ def build_valid_menu(valid_components):
|
|||
valid_menu[menu_name] = {}
|
||||
|
||||
for component in menu_item["components"]:
|
||||
logger.debug(f"Building component: {component}")
|
||||
logger.debug(
|
||||
f"Building component: {component.get('name'), component.get('output_types')}"
|
||||
)
|
||||
try:
|
||||
component_name = component["name"]
|
||||
component_code = component["code"]
|
||||
|
|
|
|||
|
|
@ -6,11 +6,11 @@ from fastapi.responses import FileResponse
|
|||
from fastapi.staticfiles import StaticFiles
|
||||
|
||||
from langflow.api import router
|
||||
from langflow.routers import login, users, health
|
||||
|
||||
|
||||
from langflow.interface.utils import setup_llm_caching
|
||||
from langflow.services.database.utils import initialize_database
|
||||
from langflow.services.manager import initialize_services
|
||||
from langflow.services.manager import initialize_services, teardown_services
|
||||
from langflow.utils.logger import configure
|
||||
|
||||
|
||||
|
|
@ -31,15 +31,16 @@ def create_app():
|
|||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
app.include_router(login.router)
|
||||
app.include_router(users.router)
|
||||
app.include_router(health.router)
|
||||
@app.get("/health")
|
||||
def health():
|
||||
return {"status": "ok"}
|
||||
|
||||
app.include_router(router)
|
||||
|
||||
app.on_event("startup")(initialize_services)
|
||||
app.on_event("startup")(initialize_database)
|
||||
app.on_event("startup")(setup_llm_caching)
|
||||
app.on_event("shutdown")(teardown_services)
|
||||
return app
|
||||
|
||||
|
||||
|
|
@ -89,7 +90,7 @@ def setup_app(
|
|||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
from langflow.utils.util import get_number_of_workers
|
||||
from langflow.__main__ import get_number_of_workers
|
||||
|
||||
configure()
|
||||
uvicorn.run(
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import json
|
||||
from pathlib import Path
|
||||
from langchain.schema import AgentAction
|
||||
import json
|
||||
from langflow.interface.run import (
|
||||
build_sorted_vertices,
|
||||
get_memory_key,
|
||||
|
|
|
|||
|
|
@ -1,8 +0,0 @@
|
|||
from fastapi import APIRouter
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("/health")
|
||||
def get_health():
|
||||
return {"status": "OK"}
|
||||
12
src/backend/langflow/services/auth/factory.py
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
from langflow.services.factory import ServiceFactory
|
||||
from langflow.services.auth.service import AuthManager
|
||||
|
||||
|
||||
class AuthManagerFactory(ServiceFactory):
|
||||
name = "auth_manager"
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(AuthManager)
|
||||
|
||||
def create(self, settings_manager):
|
||||
return AuthManager(settings_manager)
|
||||
12
src/backend/langflow/services/auth/service.py
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
from langflow.services.base import Service
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from langflow.services.settings.manager import SettingsManager
|
||||
|
||||
|
||||
class AuthManager(Service):
|
||||
name = "auth_manager"
|
||||
|
||||
def __init__(self, settings_manager: "SettingsManager"):
|
||||
self.settings_manager = settings_manager
|
||||
298
src/backend/langflow/services/auth/utils.py
Normal file
|
|
@ -0,0 +1,298 @@
|
|||
from datetime import datetime, timedelta, timezone
|
||||
from fastapi import Depends, HTTPException, Security, status
|
||||
from fastapi.security import APIKeyHeader, APIKeyQuery, OAuth2PasswordBearer
|
||||
from jose import JWTError, jwt
|
||||
from typing import Annotated, Coroutine, Optional, Union
|
||||
from uuid import UUID
|
||||
from langflow.services.database.models.api_key.api_key import ApiKey
|
||||
from langflow.services.database.models.api_key.crud import check_key
|
||||
from langflow.services.database.models.user.user import User
|
||||
from langflow.services.database.models.user.crud import (
|
||||
get_user_by_id,
|
||||
get_user_by_username,
|
||||
update_user_last_login_at,
|
||||
)
|
||||
from langflow.services.utils import get_session, get_settings_manager
|
||||
from sqlmodel import Session
|
||||
|
||||
oauth2_login = OAuth2PasswordBearer(tokenUrl="api/v1/login")
|
||||
|
||||
API_KEY_NAME = "api-key"
|
||||
|
||||
api_key_query = APIKeyQuery(
|
||||
name=API_KEY_NAME, scheme_name="API key query", auto_error=False
|
||||
)
|
||||
api_key_header = APIKeyHeader(
|
||||
name=API_KEY_NAME, scheme_name="API key header", auto_error=False
|
||||
)
|
||||
|
||||
|
||||
# Source: https://github.com/mrtolkien/fastapi_simple_security/blob/master/fastapi_simple_security/security_api_key.py
|
||||
async def api_key_security(
|
||||
query_param: str = Security(api_key_query),
|
||||
header_param: str = Security(api_key_header),
|
||||
db: Session = Depends(get_session),
|
||||
) -> Optional[User]:
|
||||
settings_manager = get_settings_manager()
|
||||
result: Optional[Union[ApiKey, User]] = None
|
||||
if settings_manager.auth_settings.AUTO_LOGIN:
|
||||
# Get the first user
|
||||
if not settings_manager.auth_settings.FIRST_SUPERUSER:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Missing first superuser credentials",
|
||||
)
|
||||
|
||||
result = get_user_by_username(
|
||||
db, settings_manager.auth_settings.FIRST_SUPERUSER
|
||||
)
|
||||
|
||||
elif not query_param and not header_param:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="An API key must be passed as query or header",
|
||||
)
|
||||
|
||||
elif query_param:
|
||||
result = check_key(db, query_param)
|
||||
|
||||
else:
|
||||
result = check_key(db, header_param)
|
||||
|
||||
if not result:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Invalid or missing API key",
|
||||
)
|
||||
if isinstance(result, ApiKey):
|
||||
return result.user
|
||||
elif isinstance(result, User):
|
||||
return result
|
||||
|
||||
|
||||
async def get_current_user(
|
||||
token: Annotated[str, Depends(oauth2_login)],
|
||||
db: Session = Depends(get_session),
|
||||
) -> User:
|
||||
settings_manager = get_settings_manager()
|
||||
|
||||
credentials_exception = HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Could not validate credentials",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
if isinstance(token, Coroutine):
|
||||
token = await token
|
||||
|
||||
if settings_manager.auth_settings.SECRET_KEY is None:
|
||||
raise credentials_exception
|
||||
|
||||
try:
|
||||
payload = jwt.decode(
|
||||
token,
|
||||
settings_manager.auth_settings.SECRET_KEY,
|
||||
algorithms=[settings_manager.auth_settings.ALGORITHM],
|
||||
)
|
||||
user_id: UUID = payload.get("sub") # type: ignore
|
||||
token_type: str = payload.get("type") # type: ignore
|
||||
if expires := payload.get("exp", None):
|
||||
expires_datetime = datetime.fromtimestamp(expires, timezone.utc)
|
||||
# TypeError: can't compare offset-naive and offset-aware datetimes
|
||||
if datetime.now(timezone.utc) > expires_datetime:
|
||||
raise credentials_exception
|
||||
|
||||
if user_id is None or token_type:
|
||||
raise credentials_exception
|
||||
except JWTError as e:
|
||||
raise credentials_exception from e
|
||||
|
||||
user = get_user_by_id(db, user_id) # type: ignore
|
||||
if user is None or not user.is_active:
|
||||
raise credentials_exception
|
||||
return user
|
||||
|
||||
|
||||
def get_current_active_user(current_user: Annotated[User, Depends(get_current_user)]):
|
||||
if not current_user.is_active:
|
||||
raise HTTPException(status_code=400, detail="Inactive user")
|
||||
return current_user
|
||||
|
||||
|
||||
def get_current_active_superuser(
|
||||
current_user: Annotated[User, Depends(get_current_user)]
|
||||
) -> User:
|
||||
if not current_user.is_active:
|
||||
raise HTTPException(status_code=401, detail="Inactive user")
|
||||
if not current_user.is_superuser:
|
||||
raise HTTPException(
|
||||
status_code=400, detail="The user doesn't have enough privileges"
|
||||
)
|
||||
return current_user
|
||||
|
||||
|
||||
def verify_password(plain_password, hashed_password):
|
||||
settings_manager = get_settings_manager()
|
||||
return settings_manager.auth_settings.pwd_context.verify(
|
||||
plain_password, hashed_password
|
||||
)
|
||||
|
||||
|
||||
def get_password_hash(password):
|
||||
settings_manager = get_settings_manager()
|
||||
return settings_manager.auth_settings.pwd_context.hash(password)
|
||||
|
||||
|
||||
def create_token(data: dict, expires_delta: timedelta):
|
||||
settings_manager = get_settings_manager()
|
||||
|
||||
to_encode = data.copy()
|
||||
expire = datetime.now(timezone.utc) + expires_delta
|
||||
to_encode["exp"] = expire
|
||||
|
||||
return jwt.encode(
|
||||
to_encode,
|
||||
settings_manager.auth_settings.SECRET_KEY,
|
||||
algorithm=settings_manager.auth_settings.ALGORITHM,
|
||||
)
|
||||
|
||||
|
||||
def create_super_user(
|
||||
username: str,
|
||||
password: str,
|
||||
db: Session = Depends(get_session),
|
||||
) -> User:
|
||||
super_user = get_user_by_username(db, username)
|
||||
|
||||
if not super_user:
|
||||
super_user = User(
|
||||
username=username,
|
||||
password=get_password_hash(password),
|
||||
is_superuser=True,
|
||||
is_active=True,
|
||||
last_login_at=None,
|
||||
)
|
||||
|
||||
db.add(super_user)
|
||||
db.commit()
|
||||
db.refresh(super_user)
|
||||
|
||||
return super_user
|
||||
|
||||
|
||||
def create_user_longterm_token(db: Session = Depends(get_session)) -> dict:
|
||||
settings_manager = get_settings_manager()
|
||||
username = settings_manager.auth_settings.FIRST_SUPERUSER
|
||||
password = settings_manager.auth_settings.FIRST_SUPERUSER_PASSWORD
|
||||
if not username or not password:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Missing first superuser credentials",
|
||||
)
|
||||
super_user = create_super_user(db=db, username=username, password=password)
|
||||
|
||||
access_token_expires_longterm = timedelta(days=365)
|
||||
access_token = create_token(
|
||||
data={"sub": str(super_user.id)},
|
||||
expires_delta=access_token_expires_longterm,
|
||||
)
|
||||
|
||||
# Update: last_login_at
|
||||
update_user_last_login_at(super_user.id, db)
|
||||
|
||||
return {
|
||||
"access_token": access_token,
|
||||
"refresh_token": None,
|
||||
"token_type": "bearer",
|
||||
}
|
||||
|
||||
|
||||
def create_user_api_key(user_id: UUID) -> dict:
|
||||
access_token = create_token(
|
||||
data={"sub": str(user_id), "role": "api_key"},
|
||||
expires_delta=timedelta(days=365 * 2),
|
||||
)
|
||||
|
||||
return {"api_key": access_token}
|
||||
|
||||
|
||||
def get_user_id_from_token(token: str) -> UUID:
|
||||
try:
|
||||
user_id = jwt.get_unverified_claims(token)["sub"]
|
||||
return UUID(user_id)
|
||||
except (KeyError, JWTError, ValueError):
|
||||
return UUID(int=0)
|
||||
|
||||
|
||||
def create_user_tokens(
|
||||
user_id: UUID, db: Session = Depends(get_session), update_last_login: bool = False
|
||||
) -> dict:
|
||||
settings_manager = get_settings_manager()
|
||||
|
||||
access_token_expires = timedelta(
|
||||
minutes=settings_manager.auth_settings.ACCESS_TOKEN_EXPIRE_MINUTES
|
||||
)
|
||||
access_token = create_token(
|
||||
data={"sub": str(user_id)},
|
||||
expires_delta=access_token_expires,
|
||||
)
|
||||
|
||||
refresh_token_expires = timedelta(
|
||||
minutes=settings_manager.auth_settings.REFRESH_TOKEN_EXPIRE_MINUTES
|
||||
)
|
||||
refresh_token = create_token(
|
||||
data={"sub": str(user_id), "type": "rf"},
|
||||
expires_delta=refresh_token_expires,
|
||||
)
|
||||
|
||||
# Update: last_login_at
|
||||
if update_last_login:
|
||||
update_user_last_login_at(user_id, db)
|
||||
|
||||
return {
|
||||
"access_token": access_token,
|
||||
"refresh_token": refresh_token,
|
||||
"token_type": "bearer",
|
||||
}
|
||||
|
||||
|
||||
def create_refresh_token(refresh_token: str, db: Session = Depends(get_session)):
|
||||
settings_manager = get_settings_manager()
|
||||
|
||||
try:
|
||||
payload = jwt.decode(
|
||||
refresh_token,
|
||||
settings_manager.auth_settings.SECRET_KEY,
|
||||
algorithms=[settings_manager.auth_settings.ALGORITHM],
|
||||
)
|
||||
user_id: UUID = payload.get("sub") # type: ignore
|
||||
token_type: str = payload.get("type") # type: ignore
|
||||
|
||||
if user_id is None or token_type is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid refresh token"
|
||||
)
|
||||
|
||||
return create_user_tokens(user_id, db)
|
||||
|
||||
except JWTError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid refresh token",
|
||||
) from e
|
||||
|
||||
|
||||
def authenticate_user(
|
||||
username: str, password: str, db: Session = Depends(get_session)
|
||||
) -> Optional[User]:
|
||||
user = get_user_by_username(db, username)
|
||||
|
||||
if not user:
|
||||
return None
|
||||
|
||||
if not user.is_active:
|
||||
if not user.last_login_at:
|
||||
raise HTTPException(status_code=400, detail="Waiting for approval")
|
||||
raise HTTPException(status_code=400, detail="Inactive user")
|
||||
|
||||
return user if verify_password(password, user.password) else None
|
||||
|
|
@ -1,2 +1,8 @@
|
|||
class Service:
|
||||
from abc import ABC
|
||||
|
||||
|
||||
class Service(ABC):
|
||||
name: str
|
||||
|
||||
def teardown(self):
|
||||
pass
|
||||
|
|
|
|||
5
src/backend/langflow/services/cache/utils.py
vendored
|
|
@ -2,13 +2,13 @@ import base64
|
|||
import contextlib
|
||||
import functools
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import tempfile
|
||||
from collections import OrderedDict
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Any, Callable, Dict
|
||||
from appdirs import user_cache_dir
|
||||
from langflow.services.database.models.base import orjson_dumps
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from langflow.services.cache.base import BaseCacheManager
|
||||
|
|
@ -93,7 +93,8 @@ def clear_old_cache_files(max_cache_size: int = 3):
|
|||
def compute_dict_hash(graph_data):
|
||||
graph_data = filter_json(graph_data)
|
||||
|
||||
cleaned_graph_json = json.dumps(graph_data, sort_keys=True)
|
||||
cleaned_graph_json = orjson_dumps(graph_data, sort_keys=True)
|
||||
|
||||
return hashlib.sha256(cleaned_graph_json.encode("utf-8")).hexdigest()
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -9,10 +9,10 @@ from langflow.utils.logger import logger
|
|||
|
||||
from .cache import cache_manager
|
||||
import asyncio
|
||||
import json
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from langflow.services import service_manager, ServiceType
|
||||
import orjson
|
||||
|
||||
|
||||
class ChatHistory(Subject):
|
||||
|
|
@ -90,7 +90,6 @@ class ChatManager(Service):
|
|||
)
|
||||
|
||||
async def connect(self, client_id: str, websocket: WebSocket):
|
||||
await websocket.accept()
|
||||
self.active_connections[client_id] = websocket
|
||||
|
||||
def disconnect(self, client_id: str):
|
||||
|
|
@ -195,8 +194,8 @@ class ChatManager(Service):
|
|||
while True:
|
||||
json_payload = await websocket.receive_json()
|
||||
try:
|
||||
payload = json.loads(json_payload)
|
||||
except TypeError:
|
||||
payload = orjson.loads(json_payload)
|
||||
except Exception:
|
||||
payload = json_payload
|
||||
if "clear_history" in payload:
|
||||
self.chat_history.history[client_id] = []
|
||||
|
|
|
|||
|
|
@ -1,7 +1,11 @@
|
|||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
from langflow.services.base import Service
|
||||
from langflow.services.database.models.user.crud import get_user_by_username
|
||||
from langflow.services.database.utils import Result, TableResults
|
||||
from langflow.services.utils import get_settings_manager
|
||||
from sqlalchemy import inspect
|
||||
import sqlalchemy as sa
|
||||
from sqlmodel import SQLModel, Session, create_engine
|
||||
from langflow.utils.logger import logger
|
||||
from alembic.config import Config
|
||||
|
|
@ -54,6 +58,41 @@ class DatabaseManager(Service):
|
|||
with Session(self.engine) as session:
|
||||
yield session
|
||||
|
||||
def check_schema_health(self) -> bool:
|
||||
inspector = inspect(self.engine)
|
||||
|
||||
model_mapping = {
|
||||
"flow": models.Flow,
|
||||
"user": models.User,
|
||||
"apikey": models.ApiKey,
|
||||
# Add other SQLModel classes here
|
||||
}
|
||||
|
||||
# To account for tables that existed in older versions
|
||||
legacy_tables = ["flowstyle"]
|
||||
|
||||
for table, model in model_mapping.items():
|
||||
expected_columns = list(model.__fields__.keys())
|
||||
|
||||
try:
|
||||
available_columns = [
|
||||
col["name"] for col in inspector.get_columns(table)
|
||||
]
|
||||
except sa.exc.NoSuchTableError:
|
||||
logger.error(f"Missing table: {table}")
|
||||
return False
|
||||
|
||||
for column in expected_columns:
|
||||
if column not in available_columns:
|
||||
logger.error(f"Missing column: {column} in table {table}")
|
||||
return False
|
||||
|
||||
for table in legacy_tables:
|
||||
if table in inspector.get_table_names():
|
||||
logger.warn(f"Legacy table exists: {table}")
|
||||
|
||||
return True
|
||||
|
||||
def run_migrations(self):
|
||||
logger.info(
|
||||
f"Running DB migrations in {self.script_location} on {self.database_url}"
|
||||
|
|
@ -63,6 +102,40 @@ class DatabaseManager(Service):
|
|||
alembic_cfg.set_main_option("sqlalchemy.url", self.database_url)
|
||||
command.upgrade(alembic_cfg, "head")
|
||||
|
||||
def run_migrations_test(self):
|
||||
# This method is used for testing purposes only
|
||||
# We will check that all models are in the database
|
||||
# and that the database is up to date with all columns
|
||||
sql_models = [models.Flow, models.User, models.ApiKey]
|
||||
results = []
|
||||
for sql_model in sql_models:
|
||||
results.append(
|
||||
TableResults(sql_model.__tablename__, self.check_table(sql_model))
|
||||
)
|
||||
return results
|
||||
|
||||
def check_table(self, model):
|
||||
results = []
|
||||
inspector = inspect(self.engine)
|
||||
table_name = model.__tablename__
|
||||
expected_columns = list(model.__fields__.keys())
|
||||
try:
|
||||
available_columns = [
|
||||
col["name"] for col in inspector.get_columns(table_name)
|
||||
]
|
||||
results.append(Result(name=table_name, type="table", success=True))
|
||||
except sa.exc.NoSuchTableError:
|
||||
logger.error(f"Missing table: {table_name}")
|
||||
results.append(Result(name=table_name, type="table", success=False))
|
||||
|
||||
for column in expected_columns:
|
||||
if column not in available_columns:
|
||||
logger.error(f"Missing column: {column} in table {table_name}")
|
||||
results.append(Result(name=column, type="column", success=False))
|
||||
else:
|
||||
results.append(Result(name=column, type="column", success=True))
|
||||
return results
|
||||
|
||||
def create_db_and_tables(self):
|
||||
logger.debug("Creating database and tables")
|
||||
try:
|
||||
|
|
@ -76,9 +149,34 @@ class DatabaseManager(Service):
|
|||
from sqlalchemy import inspect
|
||||
|
||||
inspector = inspect(self.engine)
|
||||
if "flow" not in inspector.get_table_names():
|
||||
logger.error("Something went wrong creating the database and tables.")
|
||||
logger.error("Please check your database settings.")
|
||||
raise RuntimeError("Something went wrong creating the database and tables.")
|
||||
else:
|
||||
logger.debug("Database and tables created successfully")
|
||||
current_tables = ["flow", "user", "apikey"]
|
||||
table_names = inspector.get_table_names()
|
||||
for table in current_tables:
|
||||
if table not in table_names:
|
||||
logger.error("Something went wrong creating the database and tables.")
|
||||
logger.error("Please check your database settings.")
|
||||
raise RuntimeError(
|
||||
"Something went wrong creating the database and tables."
|
||||
)
|
||||
|
||||
logger.debug("Database and tables created successfully")
|
||||
|
||||
def teardown(self):
|
||||
logger.debug("Tearing down database")
|
||||
try:
|
||||
settings_manager = get_settings_manager()
|
||||
# remove the default superuser if auto_login is enabled
|
||||
# using the FIRST_SUPERUSER to get the user
|
||||
if settings_manager.auth_settings.AUTO_LOGIN:
|
||||
logger.debug("Removing default superuser")
|
||||
username = settings_manager.auth_settings.FIRST_SUPERUSER
|
||||
with Session(self.engine) as session:
|
||||
user = get_user_by_username(session, username)
|
||||
session.delete(user)
|
||||
session.commit()
|
||||
logger.debug("Default superuser removed")
|
||||
|
||||
except Exception as exc:
|
||||
logger.error(f"Error tearing down database: {exc}")
|
||||
|
||||
self.engine.dispose()
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
from .flow import Flow
|
||||
from .user import User
|
||||
from .api_key import ApiKey
|
||||
|
||||
|
||||
__all__ = ["Flow"]
|
||||
__all__ = ["Flow", "User", "ApiKey"]
|
||||
|
|
|
|||
|
|
@ -0,0 +1,3 @@
|
|||
from .api_key import ApiKey, ApiKeyCreate, UnmaskedApiKeyRead, ApiKeyRead
|
||||
|
||||
__all__ = ["ApiKey", "ApiKeyCreate", "UnmaskedApiKeyRead", "ApiKeyRead"]
|
||||
|
|
@ -0,0 +1,48 @@
|
|||
from pydantic import validator
|
||||
from sqlmodel import Field, Relationship
|
||||
from uuid import UUID, uuid4
|
||||
from typing import Optional, TYPE_CHECKING
|
||||
from datetime import datetime
|
||||
from langflow.services.database.models.base import SQLModelSerializable
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from langflow.services.database.models.user import User
|
||||
|
||||
|
||||
class ApiKeyBase(SQLModelSerializable):
|
||||
name: Optional[str] = Field(index=True)
|
||||
created_at: datetime = Field(default_factory=datetime.utcnow)
|
||||
last_used_at: Optional[datetime] = Field(default=None)
|
||||
total_uses: int = Field(default=0)
|
||||
is_active: bool = Field(default=True)
|
||||
|
||||
|
||||
class ApiKey(ApiKeyBase, table=True):
|
||||
id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True)
|
||||
|
||||
api_key: str = Field(index=True, unique=True)
|
||||
# User relationship
|
||||
user_id: UUID = Field(index=True, foreign_key="user.id")
|
||||
user: "User" = Relationship(back_populates="api_keys")
|
||||
|
||||
|
||||
class ApiKeyCreate(ApiKeyBase):
|
||||
api_key: Optional[str] = None
|
||||
user_id: Optional[UUID] = None
|
||||
|
||||
|
||||
class UnmaskedApiKeyRead(ApiKeyBase):
|
||||
id: UUID
|
||||
api_key: str = Field()
|
||||
user_id: UUID = Field()
|
||||
|
||||
|
||||
class ApiKeyRead(ApiKeyBase):
|
||||
id: UUID
|
||||
api_key: str = Field()
|
||||
user_id: UUID = Field()
|
||||
|
||||
@validator("api_key", always=True)
|
||||
def mask_api_key(cls, v):
|
||||
# This validator will always run, and will mask the API key
|
||||
return f"{v[:8]}{'*' * (len(v) - 8)}"
|
||||
|
|
@ -0,0 +1,71 @@
|
|||
import datetime
|
||||
import secrets
|
||||
import threading
|
||||
from uuid import UUID
|
||||
from typing import List, Optional
|
||||
from sqlmodel import Session, select
|
||||
from langflow.services.database.models.api_key import (
|
||||
ApiKey,
|
||||
ApiKeyCreate,
|
||||
UnmaskedApiKeyRead,
|
||||
ApiKeyRead,
|
||||
)
|
||||
|
||||
|
||||
def get_api_keys(session: Session, user_id: UUID) -> List[ApiKeyRead]:
|
||||
query = select(ApiKey).where(ApiKey.user_id == user_id)
|
||||
api_keys = session.exec(query).all()
|
||||
return [ApiKeyRead.from_orm(api_key) for api_key in api_keys]
|
||||
|
||||
|
||||
def create_api_key(
|
||||
session: Session, api_key_create: ApiKeyCreate, user_id: UUID
|
||||
) -> UnmaskedApiKeyRead:
|
||||
# Generate a random API key with 32 bytes of randomness
|
||||
generated_api_key = f"lf-{secrets.token_urlsafe(32)}"
|
||||
|
||||
api_key = ApiKey(
|
||||
api_key=generated_api_key,
|
||||
name=api_key_create.name,
|
||||
user_id=user_id,
|
||||
)
|
||||
|
||||
session.add(api_key)
|
||||
session.commit()
|
||||
session.refresh(api_key)
|
||||
unmasked = UnmaskedApiKeyRead.from_orm(api_key)
|
||||
unmasked.api_key = generated_api_key
|
||||
return unmasked
|
||||
|
||||
|
||||
def delete_api_key(session: Session, api_key_id: UUID) -> None:
|
||||
api_key = session.get(ApiKey, api_key_id)
|
||||
if api_key is None:
|
||||
raise ValueError("API Key not found")
|
||||
session.delete(api_key)
|
||||
session.commit()
|
||||
|
||||
|
||||
def check_key(session: Session, api_key: str) -> Optional[ApiKey]:
|
||||
"""Check if the API key is valid."""
|
||||
query = select(ApiKey).where(ApiKey.api_key == api_key)
|
||||
api_key_object: Optional[ApiKey] = session.exec(query).first()
|
||||
if api_key_object is not None:
|
||||
threading.Thread(
|
||||
target=update_total_uses,
|
||||
args=(
|
||||
session,
|
||||
api_key_object,
|
||||
),
|
||||
).start()
|
||||
return api_key_object
|
||||
|
||||
|
||||
def update_total_uses(session, api_key: ApiKey):
|
||||
"""Update the total uses and last used at."""
|
||||
api_key.total_uses += 1
|
||||
api_key.last_used_at = datetime.datetime.now(datetime.timezone.utc)
|
||||
session.add(api_key)
|
||||
session.commit()
|
||||
session.refresh(api_key)
|
||||
return api_key
|
||||
|
|
@ -2,9 +2,20 @@ from sqlmodel import SQLModel
|
|||
import orjson
|
||||
|
||||
|
||||
def orjson_dumps(v, *, default):
|
||||
# orjson.dumps returns bytes, to match standard json.dumps we need to decode
|
||||
return orjson.dumps(v, default=default).decode()
|
||||
def orjson_dumps(v, *, default=None, sort_keys=False, indent_2=True):
|
||||
option = orjson.OPT_SORT_KEYS if sort_keys else None
|
||||
if indent_2:
|
||||
# orjson.dumps returns bytes, to match standard json.dumps we need to decode
|
||||
# option
|
||||
# To modify how data is serialized, specify option. Each option is an integer constant in orjson.
|
||||
# To specify multiple options, mask them together, e.g., option=orjson.OPT_STRICT_INTEGER | orjson.OPT_NAIVE_UTC
|
||||
if option is None:
|
||||
option = orjson.OPT_INDENT_2
|
||||
else:
|
||||
option |= orjson.OPT_INDENT_2
|
||||
if default is None:
|
||||
return orjson.dumps(v, option=option).decode()
|
||||
return orjson.dumps(v, default=default, option=option).decode()
|
||||
|
||||
|
||||
class SQLModelSerializable(SQLModel):
|
||||
|
|
|
|||
|
|
@ -0,0 +1,3 @@
|
|||
from .component import Component, ComponentModel
|
||||
|
||||
__all__ = ["Component", "ComponentModel"]
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
from .flow import Flow, FlowCreate, FlowRead, FlowUpdate
|
||||
|
||||
__all__ = ["Flow", "FlowCreate", "FlowRead", "FlowUpdate"]
|
||||
|
|
@ -2,11 +2,12 @@
|
|||
|
||||
from langflow.services.database.models.base import SQLModelSerializable
|
||||
from pydantic import validator
|
||||
from sqlmodel import Field, JSON, Column
|
||||
from sqlmodel import Field, JSON, Column, Relationship
|
||||
from uuid import UUID, uuid4
|
||||
from typing import Dict, Optional
|
||||
from typing import Dict, Optional, TYPE_CHECKING
|
||||
|
||||
# if TYPE_CHECKING:
|
||||
if TYPE_CHECKING:
|
||||
from langflow.services.database.models.user import User
|
||||
|
||||
|
||||
class FlowBase(SQLModelSerializable):
|
||||
|
|
@ -16,7 +17,6 @@ class FlowBase(SQLModelSerializable):
|
|||
|
||||
@validator("data")
|
||||
def validate_json(v):
|
||||
# dict_keys(['description', 'name', 'id', 'data'])
|
||||
if not v:
|
||||
return v
|
||||
if not isinstance(v, dict):
|
||||
|
|
@ -34,14 +34,17 @@ class FlowBase(SQLModelSerializable):
|
|||
class Flow(FlowBase, table=True):
|
||||
id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True)
|
||||
data: Optional[Dict] = Field(default=None, sa_column=Column(JSON))
|
||||
user_id: UUID = Field(index=True, foreign_key="user.id")
|
||||
user: "User" = Relationship(back_populates="flows")
|
||||
|
||||
|
||||
class FlowCreate(FlowBase):
|
||||
pass
|
||||
user_id: Optional[UUID] = None
|
||||
|
||||
|
||||
class FlowRead(FlowBase):
|
||||
id: UUID
|
||||
user_id: UUID = Field()
|
||||
|
||||
|
||||
class FlowUpdate(SQLModelSerializable):
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
from .user import User, UserCreate, UserRead, UserUpdate
|
||||
|
||||
__all__ = [
|
||||
"User",
|
||||
"UserCreate",
|
||||
"UserRead",
|
||||
"UserUpdate",
|
||||
]
|
||||
53
src/backend/langflow/services/database/models/user/crud.py
Normal file
|
|
@ -0,0 +1,53 @@
|
|||
from datetime import datetime, timezone
|
||||
from typing import Union
|
||||
from uuid import UUID
|
||||
from fastapi import Depends, HTTPException
|
||||
from langflow.services.database.models.user.user import User, UserUpdate
|
||||
from langflow.services.utils import get_session
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlmodel import Session
|
||||
|
||||
|
||||
from sqlalchemy.orm.attributes import flag_modified
|
||||
|
||||
|
||||
def get_user_by_username(db: Session, username: str) -> Union[User, None]:
|
||||
return db.query(User).filter(User.username == username).first()
|
||||
|
||||
|
||||
def get_user_by_id(db: Session, id: UUID) -> Union[User, None]:
|
||||
return db.query(User).filter(User.id == id).first()
|
||||
|
||||
|
||||
def update_user(
|
||||
user_id: UUID, user: UserUpdate, db: Session = Depends(get_session)
|
||||
) -> User:
|
||||
user_db = get_user_by_id(db, user_id)
|
||||
if not user_db:
|
||||
raise HTTPException(status_code=404, detail="User not found")
|
||||
|
||||
user_db_by_username = get_user_by_username(db, user.username) # type: ignore
|
||||
if user_db_by_username and user_db_by_username.id != user_id:
|
||||
raise HTTPException(status_code=409, detail="Username already exists")
|
||||
|
||||
user_data = user.dict(exclude_unset=True)
|
||||
for attr, value in user_data.items():
|
||||
if hasattr(user_db, attr) and value is not None:
|
||||
setattr(user_db, attr, value)
|
||||
|
||||
user_db.updated_at = datetime.now(timezone.utc)
|
||||
flag_modified(user_db, "updated_at")
|
||||
|
||||
try:
|
||||
db.commit()
|
||||
except IntegrityError as e:
|
||||
db.rollback()
|
||||
raise HTTPException(status_code=400, detail=str(e)) from e
|
||||
|
||||
return user_db
|
||||
|
||||
|
||||
def update_user_last_login_at(user_id: UUID, db: Session = Depends(get_session)):
|
||||
user_data = UserUpdate(last_login_at=datetime.now(timezone.utc)) # type: ignore
|
||||
|
||||
return update_user(user_id, user_data, db)
|
||||
46
src/backend/langflow/services/database/models/user/user.py
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
from langflow.services.database.models.base import SQLModel, SQLModelSerializable
|
||||
from sqlmodel import Field, Relationship
|
||||
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional, TYPE_CHECKING
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from langflow.services.database.models.api_key import ApiKey
|
||||
from langflow.services.database.models.flow import Flow
|
||||
|
||||
|
||||
class User(SQLModelSerializable, table=True):
|
||||
id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True)
|
||||
username: str = Field(index=True, unique=True)
|
||||
password: str = Field()
|
||||
is_active: bool = Field(default=False)
|
||||
is_superuser: bool = Field(default=False)
|
||||
create_at: datetime = Field(default_factory=datetime.utcnow)
|
||||
updated_at: datetime = Field(default_factory=datetime.utcnow)
|
||||
last_login_at: Optional[datetime] = Field()
|
||||
api_keys: list["ApiKey"] = Relationship(back_populates="user")
|
||||
flows: list["Flow"] = Relationship(back_populates="user")
|
||||
|
||||
|
||||
class UserCreate(SQLModel):
|
||||
username: str = Field()
|
||||
password: str = Field()
|
||||
|
||||
|
||||
class UserRead(SQLModel):
|
||||
id: UUID = Field(default_factory=uuid4)
|
||||
username: str = Field()
|
||||
is_active: bool = Field()
|
||||
is_superuser: bool = Field()
|
||||
create_at: datetime = Field()
|
||||
updated_at: datetime = Field()
|
||||
last_login_at: Optional[datetime] = Field()
|
||||
|
||||
|
||||
class UserUpdate(SQLModel):
|
||||
username: Optional[str] = Field()
|
||||
is_active: Optional[bool] = Field()
|
||||
is_superuser: Optional[bool] = Field()
|
||||
last_login_at: Optional[datetime] = Field()
|
||||
|
|
@ -1,3 +1,4 @@
|
|||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING
|
||||
from langflow.utils.logger import logger
|
||||
from contextlib import contextmanager
|
||||
|
|
@ -13,6 +14,11 @@ def initialize_database():
|
|||
from langflow.services import service_manager, ServiceType
|
||||
|
||||
database_manager = service_manager.get(ServiceType.DATABASE_MANAGER)
|
||||
try:
|
||||
database_manager.check_schema_health()
|
||||
except Exception as exc:
|
||||
logger.error(f"Error checking schema health: {exc}")
|
||||
raise RuntimeError("Error checking schema health") from exc
|
||||
try:
|
||||
database_manager.run_migrations()
|
||||
except CommandError as exc:
|
||||
|
|
@ -28,8 +34,11 @@ def initialize_database():
|
|||
session.execute("DROP TABLE alembic_version")
|
||||
database_manager.run_migrations()
|
||||
except Exception as exc:
|
||||
logger.error(f"Error running migrations: {exc}")
|
||||
raise RuntimeError("Error running migrations") from exc
|
||||
# if the exception involves tables already existing
|
||||
# we can ignore it
|
||||
if "already exists" not in str(exc):
|
||||
logger.error(f"Error running migrations: {exc}")
|
||||
raise RuntimeError("Error running migrations") from exc
|
||||
database_manager.create_db_and_tables()
|
||||
logger.debug("Database initialized")
|
||||
|
||||
|
|
@ -45,3 +54,16 @@ def session_getter(db_manager: "DatabaseManager"):
|
|||
raise
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
@dataclass
|
||||
class Result:
|
||||
name: str
|
||||
type: str
|
||||
success: bool
|
||||
|
||||
|
||||
@dataclass
|
||||
class TableResults:
|
||||
table_name: str
|
||||
results: list[Result]
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
from langflow.services.schema import ServiceType
|
||||
from typing import TYPE_CHECKING, List
|
||||
from typing import TYPE_CHECKING, List, Optional
|
||||
from langflow.utils.logger import logger
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from langflow.services.factory import ServiceFactory
|
||||
|
|
@ -16,7 +17,9 @@ class ServiceManager:
|
|||
self.dependencies = {}
|
||||
|
||||
def register_factory(
|
||||
self, service_factory: "ServiceFactory", dependencies: List[ServiceType] = None
|
||||
self,
|
||||
service_factory: "ServiceFactory",
|
||||
dependencies: Optional[List[ServiceType]] = None,
|
||||
):
|
||||
"""
|
||||
Registers a new factory with dependencies.
|
||||
|
|
@ -40,6 +43,7 @@ class ServiceManager:
|
|||
"""
|
||||
Create a new service given its name, handling dependencies.
|
||||
"""
|
||||
logger.debug(f"Create service {service_name}")
|
||||
self._validate_service_creation(service_name)
|
||||
|
||||
# Create dependencies first
|
||||
|
|
@ -72,9 +76,21 @@ class ServiceManager:
|
|||
Update a service by its name.
|
||||
"""
|
||||
if service_name in self.services:
|
||||
logger.debug(f"Update service {service_name}")
|
||||
self.services.pop(service_name, None)
|
||||
self.get(service_name)
|
||||
|
||||
def teardown(self):
|
||||
"""
|
||||
Teardown all the services.
|
||||
"""
|
||||
for service in self.services.values():
|
||||
logger.debug(f"Teardown service {service.name}")
|
||||
service.teardown()
|
||||
self.services = {}
|
||||
self.factories = {}
|
||||
self.dependencies = {}
|
||||
|
||||
|
||||
service_manager = ServiceManager()
|
||||
|
||||
|
|
@ -88,6 +104,7 @@ def initialize_services():
|
|||
from langflow.services.chat import factory as chat_factory
|
||||
from langflow.services.settings import factory as settings_factory
|
||||
from langflow.services.session import factory as session_manager_factory
|
||||
from langflow.services.auth import factory as auth_factory
|
||||
|
||||
service_manager.register_factory(settings_factory.SettingsManagerFactory())
|
||||
service_manager.register_factory(
|
||||
|
|
@ -97,6 +114,11 @@ def initialize_services():
|
|||
service_manager.register_factory(
|
||||
cache_factory.CacheManagerFactory(), dependencies=[ServiceType.SETTINGS_MANAGER]
|
||||
)
|
||||
|
||||
service_manager.register_factory(
|
||||
auth_factory.AuthManagerFactory(), dependencies=[ServiceType.SETTINGS_MANAGER]
|
||||
)
|
||||
|
||||
service_manager.register_factory(chat_factory.ChatManagerFactory())
|
||||
service_manager.register_factory(
|
||||
session_manager_factory.SessionManagerFactory(),
|
||||
|
|
@ -108,6 +130,11 @@ def initialize_services():
|
|||
# Test database connection
|
||||
service_manager.get(ServiceType.DATABASE_MANAGER)
|
||||
|
||||
# Test cache connection
|
||||
service_manager.get(ServiceType.CACHE_MANAGER)
|
||||
# Test database connection
|
||||
service_manager.get(ServiceType.DATABASE_MANAGER)
|
||||
|
||||
|
||||
def initialize_settings_manager():
|
||||
"""
|
||||
|
|
@ -122,7 +149,7 @@ def initialize_session_manager():
|
|||
"""
|
||||
Initialize the session manager.
|
||||
"""
|
||||
from langflow.services.session import factory as session_manager_factory
|
||||
from langflow.services.session import factory as session_manager_factory # type: ignore
|
||||
from langflow.services.cache import factory as cache_factory
|
||||
|
||||
initialize_settings_manager()
|
||||
|
|
@ -135,3 +162,10 @@ def initialize_session_manager():
|
|||
session_manager_factory.SessionManagerFactory(),
|
||||
dependencies=[ServiceType.CACHE_MANAGER],
|
||||
)
|
||||
|
||||
|
||||
def teardown_services():
|
||||
"""
|
||||
Teardown all the services.
|
||||
"""
|
||||
service_manager.teardown()
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ class ServiceType(str, Enum):
|
|||
registered with the service manager.
|
||||
"""
|
||||
|
||||
AUTH_MANAGER = "auth_manager"
|
||||
CACHE_MANAGER = "cache_manager"
|
||||
SETTINGS_MANAGER = "settings_manager"
|
||||
DATABASE_MANAGER = "database_manager"
|
||||
|
|
|
|||
72
src/backend/langflow/services/settings/auth.py
Normal file
|
|
@ -0,0 +1,72 @@
|
|||
from pathlib import Path
|
||||
from typing import Optional
|
||||
import secrets
|
||||
from langflow.services.settings.utils import read_secret_from_file, write_secret_to_file
|
||||
|
||||
from pydantic import BaseSettings, Field, validator
|
||||
from passlib.context import CryptContext
|
||||
from langflow.utils.logger import logger
|
||||
|
||||
|
||||
class AuthSettings(BaseSettings):
|
||||
# Login settings
|
||||
CONFIG_DIR: str
|
||||
SECRET_KEY: str = Field(
|
||||
default="",
|
||||
description="Secret key for JWT. If not provided, a random one will be generated.",
|
||||
env="LANGFLOW_SECRET_KEY",
|
||||
allow_mutation=False,
|
||||
)
|
||||
ALGORITHM: str = "HS256"
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES: int = 60
|
||||
REFRESH_TOKEN_EXPIRE_MINUTES: int = 70
|
||||
|
||||
# API Key to execute /process endpoint
|
||||
API_KEY_SECRET_KEY: Optional[
|
||||
str
|
||||
] = "b82818e0ad4ff76615c5721ee21004b07d84cd9b87ba4d9cb42374da134b841a"
|
||||
API_KEY_ALGORITHM: str = "HS256"
|
||||
API_V1_STR: str = "/api/v1"
|
||||
|
||||
# If AUTO_LOGIN = True
|
||||
# > The application does not request login and logs in automatically as a super user.
|
||||
AUTO_LOGIN: bool = False
|
||||
FIRST_SUPERUSER: str = "langflow"
|
||||
FIRST_SUPERUSER_PASSWORD: str = "langflow"
|
||||
|
||||
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
||||
|
||||
class Config:
|
||||
validate_assignment = True
|
||||
extra = "ignore"
|
||||
env_prefix = "LANGFLOW_"
|
||||
|
||||
@validator("SECRET_KEY", pre=True)
|
||||
def get_secret_key(cls, value, values):
|
||||
config_dir = values.get("CONFIG_DIR")
|
||||
|
||||
if not config_dir:
|
||||
logger.debug("No CONFIG_DIR provided, not saving secret key")
|
||||
return value or secrets.token_urlsafe(32)
|
||||
|
||||
secret_key_path = Path(config_dir) / "secret_key"
|
||||
|
||||
if value:
|
||||
logger.debug("Secret key provided")
|
||||
write_secret_to_file(secret_key_path, value)
|
||||
else:
|
||||
logger.debug("No secret key provided, generating a random one")
|
||||
|
||||
if secret_key_path.exists():
|
||||
value = read_secret_from_file(secret_key_path)
|
||||
logger.debug("Loaded secret key")
|
||||
if not value:
|
||||
value = secrets.token_urlsafe(32)
|
||||
write_secret_to_file(secret_key_path, value)
|
||||
logger.debug("Saved secret key")
|
||||
else:
|
||||
value = secrets.token_urlsafe(32)
|
||||
write_secret_to_file(secret_key_path, value)
|
||||
logger.debug("Saved secret key")
|
||||
|
||||
return value
|
||||
|
|
@ -1,8 +1,8 @@
|
|||
import contextlib
|
||||
import json
|
||||
import orjson
|
||||
import os
|
||||
from shutil import copy2
|
||||
import secrets
|
||||
from typing import Optional, List
|
||||
from pathlib import Path
|
||||
|
||||
|
|
@ -10,7 +10,8 @@ import yaml
|
|||
from pydantic import BaseSettings, root_validator, validator
|
||||
from langflow.utils.logger import logger
|
||||
|
||||
BASE_COMPONENTS_PATH = str(Path(__file__).parent / "components")
|
||||
# BASE_COMPONENTS_PATH = str(Path(__file__).parent / "components")
|
||||
BASE_COMPONENTS_PATH = str(Path(__file__).parent.parent.parent / "components")
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
|
|
@ -40,23 +41,6 @@ class Settings(BaseSettings):
|
|||
REMOVE_API_KEYS: bool = False
|
||||
COMPONENTS_PATH: List[str] = []
|
||||
|
||||
# cache settings
|
||||
# if CACHE_TYPE is set to "redis", the following settings are used
|
||||
CACHE_TYPE: str = "redis"
|
||||
|
||||
REDIS_HOST: str = "localhost"
|
||||
REDIS_PORT: int = 6379
|
||||
REDIS_DB: int = 0
|
||||
REDIS_CACHE_EXPIRE: int = 3600
|
||||
# Login settings
|
||||
SECRET_KEY: str = secrets.token_hex(32)
|
||||
ALGORITHM: str = "HS256"
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES: int = 60
|
||||
REFRESH_TOKEN_EXPIRE_MINUTES: int = 70
|
||||
# If AUTO_LOGIN = True
|
||||
# > The application does not request login and logs in automatically as a super user.
|
||||
AUTO_LOGIN: bool = True
|
||||
|
||||
@validator("CONFIG_DIR", pre=True, allow_reuse=True)
|
||||
def set_langflow_dir(cls, value):
|
||||
if not value:
|
||||
|
|
@ -192,7 +176,7 @@ class Settings(BaseSettings):
|
|||
if isinstance(getattr(self, key), list):
|
||||
# value might be a '[something]' string
|
||||
with contextlib.suppress(json.decoder.JSONDecodeError):
|
||||
value = json.loads(str(value))
|
||||
value = orjson.loads(str(value))
|
||||
if isinstance(value, list):
|
||||
for item in value:
|
||||
if isinstance(item, Path):
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
from langflow.services.base import Service
|
||||
from langflow.services.settings.auth import AuthSettings
|
||||
from langflow.services.settings.base import Settings
|
||||
from langflow.utils.logger import logger
|
||||
import os
|
||||
|
|
@ -8,9 +9,10 @@ import yaml
|
|||
class SettingsManager(Service):
|
||||
name = "settings_manager"
|
||||
|
||||
def __init__(self, settings: Settings):
|
||||
def __init__(self, settings: Settings, auth_settings: AuthSettings):
|
||||
super().__init__()
|
||||
self.settings = settings
|
||||
self.auth_settings = auth_settings
|
||||
|
||||
@classmethod
|
||||
def load_settings_from_yaml(cls, file_path: str) -> "SettingsManager":
|
||||
|
|
@ -33,4 +35,10 @@ class SettingsManager(Service):
|
|||
)
|
||||
|
||||
settings = Settings(**settings_dict)
|
||||
return cls(settings)
|
||||
if not settings.CONFIG_DIR:
|
||||
raise ValueError("CONFIG_DIR must be set in settings")
|
||||
|
||||
auth_settings = AuthSettings(
|
||||
CONFIG_DIR=settings.CONFIG_DIR,
|
||||
)
|
||||
return cls(settings, auth_settings)
|
||||
|
|
|
|||
47
src/backend/langflow/services/settings/utils.py
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
import os
|
||||
from pathlib import Path
|
||||
import platform
|
||||
|
||||
from langflow.utils.logger import logger
|
||||
|
||||
|
||||
def set_secure_permissions(file_path):
|
||||
if platform.system() in ["Linux", "Darwin"]: # Unix/Linux/Mac
|
||||
os.chmod(file_path, 0o600)
|
||||
elif platform.system() == "Windows":
|
||||
import win32api
|
||||
import win32con
|
||||
import win32security
|
||||
|
||||
user, domain, _ = win32security.LookupAccountName("", win32api.GetUserName())
|
||||
sd = win32security.GetFileSecurity(
|
||||
file_path, win32security.DACL_SECURITY_INFORMATION
|
||||
)
|
||||
dacl = win32security.ACL()
|
||||
|
||||
# Set the new DACL for the file: read and write access for the owner, no access for everyone else
|
||||
dacl.AddAccessAllowedAce(
|
||||
win32security.ACL_REVISION,
|
||||
win32con.GENERIC_READ | win32con.GENERIC_WRITE,
|
||||
user,
|
||||
)
|
||||
sd.SetSecurityDescriptorDacl(1, dacl, 0)
|
||||
win32security.SetFileSecurity(
|
||||
file_path, win32security.DACL_SECURITY_INFORMATION, sd
|
||||
)
|
||||
else:
|
||||
print("Unsupported OS")
|
||||
|
||||
|
||||
def write_secret_to_file(path: Path, value: str) -> None:
|
||||
with path.open("wb") as f:
|
||||
f.write(value.encode("utf-8"))
|
||||
try:
|
||||
set_secure_permissions(path)
|
||||
except Exception:
|
||||
logger.error("Failed to set secure permissions on secret key")
|
||||
|
||||
|
||||
def read_secret_from_file(path: Path) -> str:
|
||||
with path.open("r") as f:
|
||||
return f.read()
|
||||
|
|
@ -1,7 +1,9 @@
|
|||
from langflow.services import ServiceType, service_manager
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from langflow.services.database.manager import DatabaseManager
|
||||
from langflow.services.settings.manager import SettingsManager
|
||||
|
||||
|
||||
|
|
@ -9,7 +11,7 @@ def get_settings_manager() -> "SettingsManager":
|
|||
return service_manager.get(ServiceType.SETTINGS_MANAGER)
|
||||
|
||||
|
||||
def get_db_manager():
|
||||
def get_db_manager() -> "DatabaseManager":
|
||||
return service_manager.get(ServiceType.DATABASE_MANAGER)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import json
|
||||
from typing import Optional
|
||||
from langflow.services.database.models.base import orjson_dumps
|
||||
|
||||
from langflow.template.field.base import TemplateField
|
||||
from langflow.template.frontend_node.base import FrontendNode
|
||||
|
|
@ -89,7 +89,7 @@ class LLMFrontendNode(FrontendNode):
|
|||
if field.name == "config":
|
||||
field.show = True
|
||||
field.advanced = True
|
||||
field.value = json.dumps(CTRANSFORMERS_DEFAULT_CONFIG, indent=2)
|
||||
field.value = orjson_dumps(CTRANSFORMERS_DEFAULT_CONFIG, indent_2=True)
|
||||
|
||||
@staticmethod
|
||||
def format_field(field: TemplateField, name: Optional[str] = None) -> None:
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import ast
|
||||
import json
|
||||
from typing import Optional
|
||||
from langflow.services.database.models.base import orjson_dumps
|
||||
|
||||
from langflow.template.field.base import TemplateField
|
||||
from langflow.template.frontend_node.base import FrontendNode
|
||||
|
|
@ -22,4 +22,4 @@ class UtilitiesFrontendNode(FrontendNode):
|
|||
|
||||
if isinstance(field.value, dict):
|
||||
field.field_type = "code"
|
||||
field.value = json.dumps(field.value, indent=4)
|
||||
field.value = orjson_dumps(field.value)
|
||||
|
|
|
|||