Merge remote-tracking branch 'origin/dev' into fix_display_prompt
1
.dockerignore
Normal file
|
|
@ -0,0 +1 @@
|
|||
.venv/
|
||||
34
.gitattributes
vendored
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
# Set the default behavior, in case people don't have core.autocrlf set.
|
||||
* text eol=lf
|
||||
|
||||
# Explicitly declare text files you want to always be normalized and converted
|
||||
# to native line endings on checkout.
|
||||
*.c text
|
||||
*.h text
|
||||
*.py text
|
||||
*.js text
|
||||
*.jsx text
|
||||
*.ts text
|
||||
*.tsx text
|
||||
*.md text
|
||||
*.mdx text
|
||||
*.yml text
|
||||
*.yaml text
|
||||
*.xml text
|
||||
*.csv text
|
||||
*.json text
|
||||
*.sh text
|
||||
*.Dockerfile text
|
||||
Dockerfile text
|
||||
|
||||
# Declare files that will always have CRLF line endings on checkout.
|
||||
*.sln text eol=crlf
|
||||
|
||||
# Denote all files that are truly binary and should not be modified.
|
||||
*.png binary
|
||||
*.jpg binary
|
||||
*.ico binary
|
||||
*.gif binary
|
||||
*.mp4 binary
|
||||
*.svg binary
|
||||
*.csv binary
|
||||
0
.githooks/pre-commit
Executable file → Normal file
4
.github/workflows/pre-release.yml
vendored
|
|
@ -14,9 +14,7 @@ env:
|
|||
|
||||
jobs:
|
||||
if_release:
|
||||
if: |
|
||||
${{ github.event.pull_request.merged == true }}
|
||||
&& ${{ contains(github.event.pull_request.labels.*.name, 'pre-release') }}
|
||||
if: ${{ (github.event.pull_request.merged == true) && contains(github.event.pull_request.labels.*.name, 'pre-release') }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
|
|
|||
1
.gitignore
vendored
|
|
@ -253,3 +253,4 @@ langflow.db
|
|||
.docusaurus/
|
||||
|
||||
/tmp/*
|
||||
src/backend/langflow/frontend/
|
||||
|
|
|
|||
10
.vscode/launch.json
vendored
|
|
@ -1,4 +1,5 @@
|
|||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Debug Backend",
|
||||
|
|
@ -38,6 +39,15 @@
|
|||
"request": "launch",
|
||||
"url": "http://localhost:3000/",
|
||||
"webRoot": "${workspaceRoot}/src/frontend"
|
||||
},
|
||||
{
|
||||
"name": "Python: Debug Tests",
|
||||
"type": "python",
|
||||
"request": "launch",
|
||||
"program": "${file}",
|
||||
"purpose": ["debug-test"],
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": false
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
|||
48
.vscode/tasks.json
vendored
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
{
|
||||
// See https://go.microsoft.com/fwlink/?LinkId=733558
|
||||
// for the documentation about the tasks.json format
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "Init",
|
||||
"type": "shell",
|
||||
"command": "make init"
|
||||
},
|
||||
// make backend
|
||||
{
|
||||
"label": "Backend",
|
||||
"type": "shell",
|
||||
"command": "make backend"
|
||||
},
|
||||
// make frontend
|
||||
{
|
||||
"label": "Frontend",
|
||||
"type": "shell",
|
||||
"command": "make frontend"
|
||||
},
|
||||
// make test
|
||||
{
|
||||
"label": "Test",
|
||||
"type": "shell",
|
||||
"command": "make tests"
|
||||
},
|
||||
// make lint
|
||||
{
|
||||
"label": "Lint",
|
||||
"type": "shell",
|
||||
"command": "make lint"
|
||||
},
|
||||
// make format
|
||||
{
|
||||
"label": "Format",
|
||||
"type": "shell",
|
||||
"command": "make format"
|
||||
},
|
||||
// make install
|
||||
{
|
||||
"label": "Install",
|
||||
"type": "shell",
|
||||
"command": "make install_backend && make install_frontend"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -7,6 +7,11 @@ to contributions, whether it be in the form of a new feature, improved infra, or
|
|||
To contribute to this project, please follow a ["fork and pull request"](https://docs.github.com/en/get-started/quickstart/contributing-to-projects) workflow.
|
||||
Please do not try to push directly to this repo unless you are a maintainer.
|
||||
|
||||
The branch structure is as follows:
|
||||
|
||||
- `main`: The stable version of Langflow
|
||||
- `dev`: The development version of Langflow. This branch is used to test new features before they are merged into `main` and, as such, may be unstable.
|
||||
|
||||
## 🗺️Contributing Guidelines
|
||||
|
||||
## 🚩GitHub Issues
|
||||
|
|
|
|||
22
Makefile
|
|
@ -19,7 +19,7 @@ coverage:
|
|||
--cov-report term-missing:skip-covered
|
||||
|
||||
tests:
|
||||
poetry run pytest tests
|
||||
poetry run pytest tests -n auto
|
||||
|
||||
format:
|
||||
poetry run black .
|
||||
|
|
@ -27,20 +27,38 @@ format:
|
|||
cd src/frontend && npm run format
|
||||
|
||||
lint:
|
||||
poetry run mypy .
|
||||
poetry run mypy --exclude .venv .
|
||||
poetry run black . --check
|
||||
poetry run ruff . --fix
|
||||
|
||||
install_frontend:
|
||||
cd src/frontend && npm install
|
||||
|
||||
install_frontendc:
|
||||
cd src/frontend && rm -rf node_modules package-lock.json && npm install
|
||||
|
||||
run_frontend:
|
||||
cd src/frontend && npm start
|
||||
|
||||
run_cli:
|
||||
poetry run langflow --path src/frontend/build
|
||||
|
||||
run_cli_debug:
|
||||
poetry run langflow --path src/frontend/build --log-level debug
|
||||
|
||||
setup_devcontainer:
|
||||
make init
|
||||
make build_frontend
|
||||
poetry run langflow --path src/frontend/build
|
||||
|
||||
frontend:
|
||||
make install_frontend
|
||||
make run_frontend
|
||||
|
||||
frontendc:
|
||||
make install_frontendc
|
||||
make run_frontend
|
||||
|
||||
install_backend:
|
||||
poetry install
|
||||
|
||||
|
|
|
|||
|
|
@ -275,6 +275,8 @@ flow("Hey, have you heard of Langflow?")
|
|||
|
||||
We welcome contributions from developers of all levels to our open-source project on GitHub. If you'd like to contribute, please check our [contributing guidelines](./CONTRIBUTING.md) and help make Langflow more accessible.
|
||||
|
||||
---
|
||||
|
||||
Join our [Discord](https://discord.com/invite/EqksyE2EX9) server to ask questions, make suggestions and showcase your projects! 🦾
|
||||
|
||||
<p>
|
||||
|
|
|
|||
|
|
@ -15,4 +15,4 @@ COPY ./ ./
|
|||
# Install dependencies
|
||||
RUN poetry config virtualenvs.create false && poetry install --no-interaction --no-ansi
|
||||
|
||||
CMD ["uvicorn", "langflow.main:app", "--host", "0.0.0.0", "--port", "5003", "--reload", "log-level", "debug"]
|
||||
CMD ["uvicorn", "--factory", "src.backend.langflow.main:create_app", "--host", "0.0.0.0", "--port", "7860", "--reload", "--log-level", "debug"]
|
||||
|
|
|
|||
|
|
@ -1,28 +1,33 @@
|
|||
version: '3.4'
|
||||
|
||||
services:
|
||||
backend:
|
||||
volumes:
|
||||
- ./:/app
|
||||
build:
|
||||
context: ./
|
||||
dockerfile: ./dev.Dockerfile
|
||||
command: ["sh", "-c", "pip install debugpy -t /tmp && python /tmp/debugpy --wait-for-client --listen 0.0.0.0:5678 -m uvicorn langflow.main:app --host 0.0.0.0 --port 7860 --reload"]
|
||||
ports:
|
||||
- 7860:7860
|
||||
- 5678:5678
|
||||
restart: on-failure
|
||||
|
||||
frontend:
|
||||
build:
|
||||
context: ./src/frontend
|
||||
dockerfile: ./dev.Dockerfile
|
||||
args:
|
||||
- BACKEND_URL=http://backend:7860
|
||||
ports:
|
||||
- "3000:3000"
|
||||
volumes:
|
||||
- ./src/frontend/public:/home/node/app/public
|
||||
- ./src/frontend/src:/home/node/app/src
|
||||
- ./src/frontend/package.json:/home/node/app/package.json
|
||||
restart: on-failure
|
||||
version: "3.4"
|
||||
|
||||
services:
|
||||
backend:
|
||||
volumes:
|
||||
- ./:/app
|
||||
build:
|
||||
context: ./
|
||||
dockerfile: ./dev.Dockerfile
|
||||
command:
|
||||
[
|
||||
"sh",
|
||||
"-c",
|
||||
"pip install debugpy -t /tmp && python /tmp/debugpy --wait-for-client --listen 0.0.0.0:5678 -m uvicorn --factory src.backend.langflow.main:create_app --host 0.0.0.0 --port 7860 --reload",
|
||||
]
|
||||
ports:
|
||||
- 7860:7860
|
||||
- 5678:5678
|
||||
restart: on-failure
|
||||
|
||||
frontend:
|
||||
build:
|
||||
context: ./src/frontend
|
||||
dockerfile: ./dev.Dockerfile
|
||||
args:
|
||||
- BACKEND_URL=http://backend:7860
|
||||
ports:
|
||||
- "3000:3000"
|
||||
volumes:
|
||||
- ./src/frontend/public:/home/node/app/public
|
||||
- ./src/frontend/src:/home/node/app/src
|
||||
- ./src/frontend/package.json:/home/node/app/package.json
|
||||
restart: on-failure
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
version: '3'
|
||||
version: "3"
|
||||
|
||||
services:
|
||||
backend:
|
||||
|
|
@ -9,7 +9,7 @@ services:
|
|||
- "7860:7860"
|
||||
volumes:
|
||||
- ./:/app
|
||||
command: bash -c "uvicorn langflow.main:app --host 0.0.0.0 --port 7860 --reload"
|
||||
command: bash -c "uvicorn --factory src.backend.langflow.main:create_app --host 0.0.0.0 --port 7860 --reload"
|
||||
|
||||
frontend:
|
||||
build:
|
||||
|
|
@ -22,7 +22,7 @@ services:
|
|||
ports:
|
||||
- "3000:3000"
|
||||
volumes:
|
||||
- ./src/frontend/public:/home/node/app/public
|
||||
- ./src/frontend/src:/home/node/app/src
|
||||
- ./src/frontend/package.json:/home/node/app/package.json
|
||||
restart: on-failure
|
||||
- ./src/frontend/public:/home/node/app/public
|
||||
- ./src/frontend/src:/home/node/app/src
|
||||
- ./src/frontend/package.json:/home/node/app/package.json
|
||||
restart: on-failure
|
||||
|
|
|
|||
9
docker_example/README.md
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
# LangFlow Docker Running
|
||||
|
||||
```sh
|
||||
git clone git@github.com:logspace-ai/langflow.git
|
||||
cd langflow/docker_example
|
||||
docker compose up
|
||||
```
|
||||
|
||||
The web UI will be accessible on port [7860](http://localhost:7860/)
|
||||
|
|
@ -6,5 +6,5 @@ services:
|
|||
context: .
|
||||
dockerfile: Dockerfile
|
||||
ports:
|
||||
- "5003:5003"
|
||||
- "7860:7860"
|
||||
command: langflow --host 0.0.0.0
|
||||
|
|
|
|||
|
|
@ -73,3 +73,25 @@ Used to load [OpenAI’s](https://openai.com/) embedding models.
|
|||
- **request_timeout:** Used to specify the maximum amount of time, in milliseconds, to wait for a response from the OpenAI API when generating embeddings for a given text.
|
||||
|
||||
- **tiktoken_model_name:** Used to count the number of tokens in documents to constrain them to be under a certain limit. By default, when set to None, this will be the same as the embedding model name.
|
||||
|
||||
---
|
||||
|
||||
### VertexAIEmbeddings
|
||||
|
||||
Wrapper around [Google Vertex AI](https://cloud.google.com/vertex-ai) [Embeddings API](https://cloud.google.com/vertex-ai/docs/generative-ai/embeddings/get-text-embeddings).
|
||||
|
||||
:::info
|
||||
Vertex AI is a cloud computing platform offered by Google Cloud Platform (GCP). It provides access, management, and development of applications and services through global data centers. To use Vertex AI PaLM, you need to have the [google-cloud-aiplatform](https://pypi.org/project/google-cloud-aiplatform/) Python package installed and credentials configured for your environment.
|
||||
:::
|
||||
|
||||
- **credentials:** The default custom credentials (google.auth.credentials.Credentials) to use.
|
||||
- **location:** The default location to use when making API calls – defaults to `us-central1`.
|
||||
- **max_output_tokens:** Token limit determines the maximum amount of text output from one prompt – defaults to `128`.
|
||||
- **model_name:** The name of the Vertex AI large language model – defaults to `text-bison`.
|
||||
- **project:** The default GCP project to use when making Vertex API calls.
|
||||
- **request_parallelism:** The amount of parallelism allowed for requests issued to VertexAI models – defaults to `5`.
|
||||
- **temperature:** Tunes the degree of randomness in text generations. Should be a non-negative value – defaults to `0`.
|
||||
- **top_k:** How the model selects tokens for output, the next token is selected from – defaults to `40`.
|
||||
- **top_p:** Tokens are selected from most probable to least until the sum of their – defaults to `0.95`.
|
||||
- **tuned_model_name:** The name of a tuned model. If provided, model_name is ignored.
|
||||
- **verbose:** This parameter is used to control the level of detail in the output of the chain. When set to True, it will print out some internal states of the chain while it is being run, which can help debug and understand the chain's behavior. If set to False, it will suppress the verbose output – defaults to `False`.
|
||||
|
|
@ -185,6 +185,28 @@ Wrapper around [Google Vertex AI](https://cloud.google.com/vertex-ai) large lang
|
|||
Vertex AI is a cloud computing platform offered by Google Cloud Platform (GCP). It provides access, management, and development of applications and services through global data centers. To use Vertex AI PaLM, you need to have the [google-cloud-aiplatform](https://pypi.org/project/google-cloud-aiplatform/) Python package installed and credentials configured for your environment.
|
||||
:::
|
||||
|
||||
- **credentials:** The default custom credentials (google.auth.credentials.Credentials) to use.
|
||||
- **location:** The default location to use when making API calls – defaults to `us-central1`.
|
||||
- **max_output_tokens:** Token limit determines the maximum amount of text output from one prompt – defaults to `128`.
|
||||
- **model_name:** The name of the Vertex AI large language model – defaults to `text-bison`.
|
||||
- **project:** The default GCP project to use when making Vertex API calls.
|
||||
- **request_parallelism:** The amount of parallelism allowed for requests issued to VertexAI models – defaults to `5`.
|
||||
- **temperature:** Tunes the degree of randomness in text generations. Should be a non-negative value – defaults to `0`.
|
||||
- **top_k:** How the model selects tokens for output, the next token is selected from – defaults to `40`.
|
||||
- **top_p:** Tokens are selected from most probable to least until the sum of their – defaults to `0.95`.
|
||||
- **tuned_model_name:** The name of a tuned model. If provided, model_name is ignored.
|
||||
- **verbose:** This parameter is used to control the level of detail in the output of the chain. When set to True, it will print out some internal states of the chain while it is being run, which can help debug and understand the chain's behavior. If set to False, it will suppress the verbose output – defaults to `False`.
|
||||
|
||||
---
|
||||
|
||||
### ChatVertexAI
|
||||
|
||||
Wrapper around [Google Vertex AI](https://cloud.google.com/vertex-ai) large language models.
|
||||
|
||||
:::info
|
||||
Vertex AI is a cloud computing platform offered by Google Cloud Platform (GCP). It provides access, management, and development of applications and services through global data centers. To use Vertex AI PaLM, you need to have the [google-cloud-aiplatform](https://pypi.org/project/google-cloud-aiplatform/) Python package installed and credentials configured for your environment.
|
||||
:::
|
||||
|
||||
- **credentials:** The default custom credentials (google.auth.credentials.Credentials) to use.
|
||||
- **location:** The default location to use when making API calls – defaults to `us-central1`.
|
||||
- **max_output_tokens:** Token limit determines the maximum amount of text output from one prompt – defaults to `128`.
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ export default function FooterWrapper(props) {
|
|||
|
||||
const mendableFloatingButton = React.createElement(MendableFloatingButton, {
|
||||
floatingButtonStyle: { color: "#000000", backgroundColor: "#f6f6f6" },
|
||||
anon_key: customFields.mendableAnonKey, // Mendable Search Public ANON key, ok to be public
|
||||
anon_key: 'b7f52734-297c-41dc-8737-edbd13196394', // Mendable Search Public ANON key, ok to be public
|
||||
showSimpleSearch: true,
|
||||
icon: icon,
|
||||
});
|
||||
|
|
|
|||
202
docs/static/data/organizations-100.csv
vendored
|
|
@ -1,101 +1,101 @@
|
|||
Index,Organization Id,Name,Website,Country,Description,Founded,Industry,Number of employees
|
||||
1,FAB0d41d5b5d22c,Ferrell LLC,https://price.net/,Papua New Guinea,Horizontal empowering knowledgebase,1990,Plastics,3498
|
||||
2,6A7EdDEA9FaDC52,"Mckinney, Riley and Day",http://www.hall-buchanan.info/,Finland,User-centric system-worthy leverage,2015,Glass / Ceramics / Concrete,4952
|
||||
3,0bFED1ADAE4bcC1,Hester Ltd,http://sullivan-reed.com/,China,Switchable scalable moratorium,1971,Public Safety,5287
|
||||
4,2bFC1Be8a4ce42f,Holder-Sellers,https://becker.com/,Turkmenistan,De-engineered systemic artificial intelligence,2004,Automotive,921
|
||||
5,9eE8A6a4Eb96C24,Mayer Group,http://www.brewer.com/,Mauritius,Synchronized needs-based challenge,1991,Transportation,7870
|
||||
6,cC757116fe1C085,Henry-Thompson,http://morse.net/,Bahamas,Face-to-face well-modulated customer loyalty,1992,Primary / Secondary Education,4914
|
||||
7,219233e8aFF1BC3,Hansen-Everett,https://www.kidd.org/,Pakistan,Seamless disintermediate collaboration,2018,Publishing Industry,7832
|
||||
8,ccc93DCF81a31CD,Mcintosh-Mora,https://www.brooks.com/,Heard Island and McDonald Islands,Centralized attitude-oriented capability,1970,Import / Export,4389
|
||||
9,0B4F93aA06ED03e,Carr Inc,http://ross.com/,Kuwait,Distributed impactful customer loyalty,1996,Plastics,8167
|
||||
10,738b5aDe6B1C6A5,Gaines Inc,http://sandoval-hooper.com/,Uzbekistan,Multi-lateral scalable protocol,1997,Outsourcing / Offshoring,9698
|
||||
11,AE61b8Ffebbc476,Kidd Group,http://www.lyons.com/,Bouvet Island (Bouvetoya),Proactive foreground paradigm,2001,Primary / Secondary Education,7473
|
||||
12,eb3B7D06cCdD609,Crane-Clarke,https://www.sandoval.com/,Denmark,Front-line clear-thinking encryption,2014,Food / Beverages,9011
|
||||
13,8D0c29189C9798B,"Keller, Campos and Black",https://www.garner.info/,Liberia,Ameliorated directional emulation,2020,Museums / Institutions,2862
|
||||
14,D2c91cc03CA394c,Glover-Pope,http://www.silva.biz/,United Arab Emirates,Persevering contextually-based approach,2013,Medical Practice,9079
|
||||
15,C8AC1eaf9C036F4,Pacheco-Spears,https://aguilar.com/,Sweden,Secured logistical synergy,1984,Maritime,769
|
||||
16,b5D10A14f7a8AfE,Hodge-Ayers,http://www.archer-elliott.com/,Honduras,Future-proofed radical implementation,1990,Facilities Services,8508
|
||||
17,68139b5C4De03B4,"Bowers, Guerra and Krause",http://www.carrillo-nicholson.com/,Uganda,De-engineered transitional strategy,1972,Primary / Secondary Education,6986
|
||||
18,5c2EffEfdba2BdF,Mckenzie-Melton,http://montoya-thompson.com/,Hong Kong,Reverse-engineered heuristic alliance,1998,Investment Management / Hedge Fund / Private Equity,4589
|
||||
19,ba179F19F7925f5,Branch-Mann,http://www.lozano.com/,Botswana,Adaptive intangible frame,1999,Architecture / Planning,7961
|
||||
20,c1Ce9B350BAc66b,Weiss and Sons,https://barrett.com/,Korea,Sharable optimal functionalities,2011,Plastics,5984
|
||||
21,8de40AC4e6EaCa4,"Velez, Payne and Coffey",http://burton.com/,Luxembourg,Mandatory coherent synergy,1986,Wholesale,5010
|
||||
22,Aad86a4F0385F2d,Harrell LLC,http://www.frey-rosario.com/,Guadeloupe,Reverse-engineered mission-critical moratorium,2018,Construction,2185
|
||||
23,22aC3FFd64fD703,"Eaton, Reynolds and Vargas",http://www.freeman.biz/,Monaco,Self-enabling multi-tasking process improvement,2014,Luxury Goods / Jewelry,8987
|
||||
24,5Ec4C272bCf085c,Robbins-Cummings,http://donaldson-wilkins.com/,Belgium,Organic non-volatile hierarchy,1991,Pharmaceuticals,5038
|
||||
25,5fDBeA8BB91a000,Jenkins Inc,http://www.kirk.biz/,South Africa,Front-line systematic help-desk,2002,Insurance,1215
|
||||
26,dFfD6a6F9AC2d9C,"Greene, Benjamin and Novak",http://www.kent.net/,Romania,Centralized leadingedge moratorium,2012,Museums / Institutions,4941
|
||||
27,4B217cC5a0674C5,"Dickson, Richmond and Clay",http://everett.com/,Czech Republic,Team-oriented tangible complexity,1980,Real Estate / Mortgage,3122
|
||||
28,88b1f1cDcf59a37,Prince-David,http://thompson.com/,Christmas Island,Virtual holistic methodology,1970,Banking / Mortgage,1046
|
||||
29,f9F7bBCAEeC360F,Ayala LLC,http://www.zhang.com/,Philippines,Open-source zero administration hierarchy,2021,Legal Services,7664
|
||||
30,7Cb3AeFcE4Ba31e,Rivas Group,https://hebert.org/,Australia,Open-architected well-modulated capacity,1998,Logistics / Procurement,4155
|
||||
31,ccBcC32adcbc530,"Sloan, Mays and Whitehead",http://lawson.com/,Chad,Face-to-face high-level conglomeration,1997,Civil Engineering,365
|
||||
32,f5afd686b3d05F5,"Durham, Allen and Barnes",http://chan-stafford.org/,Zimbabwe,Synergistic web-enabled framework,1993,Mechanical or Industrial Engineering,6135
|
||||
33,38C6cfC5074Fa5e,Fritz-Franklin,http://www.lambert.com/,Nepal,Automated 4thgeneration website,1972,Hospitality,4516
|
||||
34,5Cd7efccCcba38f,Burch-Ewing,http://cline.net/,Taiwan,User-centric 4thgeneration system engine,1981,Venture Capital / VC,7443
|
||||
35,9E6Acb51e3F9d6F,"Glass, Barrera and Turner",https://dunlap.com/,Kyrgyz Republic,Multi-channeled 3rdgeneration open system,2020,Utilities,2610
|
||||
36,4D4d7E18321eaeC,Pineda-Cox,http://aguilar.org/,Bolivia,Fundamental asynchronous capability,2010,Human Resources / HR,1312
|
||||
37,485f5d06B938F2b,"Baker, Mccann and Macdonald",http://www.anderson-barker.com/,Kenya,Cross-group user-facing focus group,2013,Legislative Office,1638
|
||||
38,19E3a5Bf6dBDc4F,Cuevas-Moss,https://dodson-castaneda.net/,Guatemala,Extended human-resource intranet,1994,Music,9995
|
||||
39,6883A965c7b68F7,Hahn PLC,http://newman.com/,Belarus,Organic logistical leverage,2012,Electrical / Electronic Manufacturing,3715
|
||||
40,AC5B7AA74Aa4A2E,"Valentine, Ferguson and Kramer",http://stuart.net/,Jersey,Centralized secondary time-frame,1997,Non - Profit / Volunteering,3585
|
||||
41,decab0D5027CA6a,Arroyo Inc,https://www.turner.com/,Grenada,Managed demand-driven website,2006,Writing / Editing,9067
|
||||
42,dF084FbBb613eea,Walls LLC,http://www.reese-vasquez.biz/,Cape Verde,Self-enabling fresh-thinking installation,1989,Investment Management / Hedge Fund / Private Equity,1678
|
||||
43,A2D89Ab9bCcAd4e,"Mitchell, Warren and Schneider",https://fox.biz/,Trinidad and Tobago,Enhanced intangible time-frame,2021,Capital Markets / Hedge Fund / Private Equity,3816
|
||||
44,77aDc905434a49f,Prince PLC,https://www.watts.com/,Sweden,Profit-focused coherent installation,2016,Individual / Family Services,7645
|
||||
45,235fdEFE2cfDa5F,Brock-Blackwell,http://www.small.com/,Benin,Secured foreground emulation,1986,Online Publishing,7034
|
||||
46,1eD64cFe986BBbE,Walton-Barnett,https://ashley-schaefer.com/,Western Sahara,Right-sized clear-thinking flexibility,2001,Luxury Goods / Jewelry,1746
|
||||
47,CbBbFcdd0eaE2cF,Bartlett-Arroyo,https://cruz.com/,Northern Mariana Islands,Realigned didactic function,1976,Civic / Social Organization,3987
|
||||
48,49aECbDaE6aBD53,"Wallace, Madden and Morris",http://www.blevins-fernandez.biz/,Germany,Persistent real-time customer loyalty,2016,Pharmaceuticals,9443
|
||||
49,7b3fe6e7E72bFa4,Berg-Sparks,https://cisneros-love.com/,Canada,Stand-alone static implementation,1974,Arts / Crafts,2073
|
||||
50,c6DedA82A8aef7E,Gonzales Ltd,http://bird.com/,Tonga,Managed human-resource policy,1988,Consumer Goods,9069
|
||||
51,7D9FBF85cdC3871,Lawson and Sons,https://www.wong.com/,French Southern Territories,Compatible analyzing intranet,2021,Arts / Crafts,3527
|
||||
52,7dd18Fb7cB07b65,"Mcguire, Mcconnell and Olsen",https://melton-briggs.com/,Korea,Profound client-server frame,1988,Printing,8445
|
||||
53,EF5B55FadccB8Fe,Charles-Phillips,https://bowman.com/,Cote d'Ivoire,Monitored client-server implementation,2012,Mental Health Care,3450
|
||||
54,f8D4B99e11fAF5D,Odom Ltd,https://www.humphrey-hess.com/,Cote d'Ivoire,Advanced static process improvement,2012,Management Consulting,1825
|
||||
55,e24D21BFd3bF1E5,Richard PLC,https://holden-coleman.net/,Mayotte,Object-based optimizing model,1971,Broadcast Media,4942
|
||||
56,B9BdfEB6D3Ca44E,Sampson Ltd,https://blevins.com/,Cayman Islands,Intuitive local adapter,2005,Farming,1418
|
||||
57,2a74D6f3D3B268e,"Cherry, Le and Callahan",https://waller-delacruz.biz/,Nigeria,Universal human-resource collaboration,2017,Entertainment / Movie Production,7202
|
||||
58,Bf3F3f62c8aBC33,Cherry PLC,https://www.avila.info/,Marshall Islands,Persistent tertiary website,1980,Plastics,8245
|
||||
59,aeBe26B80a7a23c,Melton-Nichols,https://kennedy.com/,Palau,User-friendly clear-thinking productivity,2021,Legislative Office,8741
|
||||
60,aAeb29ad43886C6,Potter-Walsh,http://thomas-french.org/,Turkey,Optional non-volatile open system,2008,Human Resources / HR,6923
|
||||
61,bD1bc6bB6d1FeD3,Freeman-Chen,https://mathis.com/,Timor-Leste,Phased next generation adapter,1973,International Trade / Development,346
|
||||
62,EB9f456e8b7022a,Soto Group,https://norris.info/,Vietnam,Enterprise-wide executive installation,1988,Business Supplies / Equipment,9097
|
||||
63,Dfef38C51D8DAe3,"Poole, Cruz and Whitney",https://reed.info/,Reunion,Balanced analyzing groupware,1978,Marketing / Advertising / Sales,2992
|
||||
64,055ffEfB2Dd95B0,Riley Ltd,http://wiley.com/,Brazil,Optional exuding superstructure,1986,Textiles,9315
|
||||
65,cBfe4dbAE1699da,"Erickson, Andrews and Bailey",https://www.hobbs-grant.com/,Eritrea,Vision-oriented secondary project,2014,Consumer Electronics,7829
|
||||
66,fdFbecbadcdCdf1,"Wilkinson, Charles and Arroyo",http://hunter-mcfarland.com/,United States Virgin Islands,Assimilated 24/7 archive,1996,Building Materials,602
|
||||
67,5DCb8A5a5ca03c0,Floyd Ltd,http://www.whitney.com/,Falkland Islands (Malvinas),Function-based fault-tolerant concept,2017,Public Relations / PR,2911
|
||||
68,ce57DCbcFD6d618,Newman-Galloway,https://www.scott.com/,Luxembourg,Enhanced foreground collaboration,1987,Information Technology / IT,3934
|
||||
69,5aaD187dc929371,Frazier-Butler,https://www.daugherty-farley.info/,Northern Mariana Islands,Persistent interactive circuit,1972,Outsourcing / Offshoring,5130
|
||||
70,902D7Ac8b6d476b,Newton Inc,https://www.richmond-manning.info/,Netherlands Antilles,Fundamental stable info-mediaries,1976,Military Industry,563
|
||||
71,32BB9Ff4d939788,Duffy-Levy,https://www.potter.com/,Guernsey,Diverse exuding installation,1982,Wireless,6146
|
||||
72,adcB0afbE58bAe3,Wagner LLC,https://decker-esparza.com/,Uruguay,Reactive attitude-oriented toolset,1987,International Affairs,6874
|
||||
73,dfcA1c84AdB61Ac,Mccall-Holmes,http://www.dean.com/,Benin,Object-based value-added database,2009,Legal Services,696
|
||||
74,208044AC2fe52F3,Massey LLC,https://frazier.biz/,Suriname,Configurable zero administration Graphical User Interface,1986,Accounting,5004
|
||||
75,f3C365f0c1A0623,Hicks LLC,http://alvarez.biz/,Pakistan,Quality-focused client-server Graphical User Interface,1970,Computer Software / Engineering,8480
|
||||
76,ec5Bdd3CBAfaB93,"Cole, Russell and Avery",http://www.blankenship.com/,Mongolia,De-engineered fault-tolerant challenge,2000,Law Enforcement,7012
|
||||
77,DDB19Be7eeB56B4,Cummings-Rojas,https://simon-pearson.com/,Svalbard & Jan Mayen Islands,User-centric modular customer loyalty,2012,Financial Services,7529
|
||||
78,dd6CA3d0bc3cAfc,"Beasley, Greene and Mahoney",http://www.petersen-lawrence.com/,Togo,Extended content-based methodology,1976,Religious Institutions,869
|
||||
79,A0B9d56e61070e3,"Beasley, Sims and Allison",http://burke.info/,Latvia,Secured zero tolerance hub,1972,Facilities Services,6182
|
||||
80,cBa7EFe5D05Adaf,Crawford-Rivera,https://black-ramirez.org/,Cuba,Persevering exuding budgetary management,1999,Online Publishing,7805
|
||||
81,Ea3f6D52Ec73563,Montes-Hensley,https://krueger.org/,Liechtenstein,Multi-tiered secondary productivity,2009,Printing,8433
|
||||
82,bC0CEd48A8000E0,Velazquez-Odom,https://stokes.com/,Djibouti,Streamlined 6thgeneration function,2002,Alternative Dispute Resolution,4044
|
||||
83,c89b9b59BC4baa1,Eaton-Morales,https://www.reeves-graham.com/,Micronesia,Customer-focused explicit frame,1990,Capital Markets / Hedge Fund / Private Equity,7013
|
||||
84,FEC51bce8421a7b,"Roberson, Pennington and Palmer",http://www.keith-fisher.com/,Cameroon,Adaptive bi-directional hierarchy,1993,Telecommunications,5571
|
||||
85,e0E8e27eAc9CAd5,"George, Russo and Guerra",https://drake.com/,Sweden,Centralized non-volatile capability,1989,Military Industry,2880
|
||||
86,B97a6CF9bf5983C,Davila Inc,https://mcconnell.info/,Cocos (Keeling) Islands,Profit-focused dedicated frame,2017,Consumer Electronics,2215
|
||||
87,a0a6f9b3DbcBEb5,Mays-Preston,http://www.browning-key.com/,Mali,User-centric heuristic focus group,2006,Military Industry,5786
|
||||
88,8cC1bDa330a5871,Pineda-Morton,https://www.carr.com/,United States Virgin Islands,Grass-roots methodical info-mediaries,1991,Printing,6168
|
||||
89,ED889CB2FE9cbd3,Huang and Sons,https://www.bolton.com/,Eritrea,Re-contextualized dynamic hierarchy,1981,Semiconductors,7484
|
||||
90,F4Dc1417BC6cb8f,Gilbert-Simon,https://www.bradford.biz/,Burundi,Grass-roots radical parallelism,1973,Newspapers / Journalism,1927
|
||||
91,7ABc3c7ecA03B34,Sampson-Griffith,http://hendricks.org/,Benin,Multi-layered composite paradigm,1972,Textiles,3881
|
||||
92,4e0719FBE38e0aB,Miles-Dominguez,http://www.turner.com/,Gibraltar,Organized empowering forecast,1996,Civic / Social Organization,897
|
||||
93,dEbDAAeDfaed00A,Rowe and Sons,https://www.simpson.org/,El Salvador,Balanced multimedia knowledgebase,1978,Facilities Services,8172
|
||||
94,61BDeCfeFD0cEF5,"Valenzuela, Holmes and Rowland",https://www.dorsey.net/,Taiwan,Persistent tertiary focus group,1999,Transportation,1483
|
||||
95,4e91eD25f486110,"Best, Wade and Shepard",https://zimmerman.com/,Zimbabwe,Innovative background definition,1991,Gambling / Casinos,4873
|
||||
96,0a0bfFbBbB8eC7c,Holmes Group,https://mcdowell.org/,Ethiopia,Right-sized zero tolerance focus group,1975,Photography,2988
|
||||
97,BA6Cd9Dae2Efd62,Good Ltd,http://duffy.com/,Anguilla,Reverse-engineered composite moratorium,1971,Consumer Services,4292
|
||||
98,E7df80C60Abd7f9,Clements-Espinoza,http://www.flowers.net/,Falkland Islands (Malvinas),Progressive modular hub,1991,Broadcast Media,236
|
||||
99,AFc285dbE2fEd24,Mendez Inc,https://www.burke.net/,Kyrgyz Republic,User-friendly exuding migration,1993,Education Management,339
|
||||
100,e9eB5A60Cef8354,Watkins-Kaiser,http://www.herring.com/,Togo,Synergistic background access,2009,Financial Services,2785
|
||||
Index,Organization Id,Name,Website,Country,Description,Founded,Industry,Number of employees
|
||||
1,FAB0d41d5b5d22c,Ferrell LLC,https://price.net/,Papua New Guinea,Horizontal empowering knowledgebase,1990,Plastics,3498
|
||||
2,6A7EdDEA9FaDC52,"Mckinney, Riley and Day",http://www.hall-buchanan.info/,Finland,User-centric system-worthy leverage,2015,Glass / Ceramics / Concrete,4952
|
||||
3,0bFED1ADAE4bcC1,Hester Ltd,http://sullivan-reed.com/,China,Switchable scalable moratorium,1971,Public Safety,5287
|
||||
4,2bFC1Be8a4ce42f,Holder-Sellers,https://becker.com/,Turkmenistan,De-engineered systemic artificial intelligence,2004,Automotive,921
|
||||
5,9eE8A6a4Eb96C24,Mayer Group,http://www.brewer.com/,Mauritius,Synchronized needs-based challenge,1991,Transportation,7870
|
||||
6,cC757116fe1C085,Henry-Thompson,http://morse.net/,Bahamas,Face-to-face well-modulated customer loyalty,1992,Primary / Secondary Education,4914
|
||||
7,219233e8aFF1BC3,Hansen-Everett,https://www.kidd.org/,Pakistan,Seamless disintermediate collaboration,2018,Publishing Industry,7832
|
||||
8,ccc93DCF81a31CD,Mcintosh-Mora,https://www.brooks.com/,Heard Island and McDonald Islands,Centralized attitude-oriented capability,1970,Import / Export,4389
|
||||
9,0B4F93aA06ED03e,Carr Inc,http://ross.com/,Kuwait,Distributed impactful customer loyalty,1996,Plastics,8167
|
||||
10,738b5aDe6B1C6A5,Gaines Inc,http://sandoval-hooper.com/,Uzbekistan,Multi-lateral scalable protocol,1997,Outsourcing / Offshoring,9698
|
||||
11,AE61b8Ffebbc476,Kidd Group,http://www.lyons.com/,Bouvet Island (Bouvetoya),Proactive foreground paradigm,2001,Primary / Secondary Education,7473
|
||||
12,eb3B7D06cCdD609,Crane-Clarke,https://www.sandoval.com/,Denmark,Front-line clear-thinking encryption,2014,Food / Beverages,9011
|
||||
13,8D0c29189C9798B,"Keller, Campos and Black",https://www.garner.info/,Liberia,Ameliorated directional emulation,2020,Museums / Institutions,2862
|
||||
14,D2c91cc03CA394c,Glover-Pope,http://www.silva.biz/,United Arab Emirates,Persevering contextually-based approach,2013,Medical Practice,9079
|
||||
15,C8AC1eaf9C036F4,Pacheco-Spears,https://aguilar.com/,Sweden,Secured logistical synergy,1984,Maritime,769
|
||||
16,b5D10A14f7a8AfE,Hodge-Ayers,http://www.archer-elliott.com/,Honduras,Future-proofed radical implementation,1990,Facilities Services,8508
|
||||
17,68139b5C4De03B4,"Bowers, Guerra and Krause",http://www.carrillo-nicholson.com/,Uganda,De-engineered transitional strategy,1972,Primary / Secondary Education,6986
|
||||
18,5c2EffEfdba2BdF,Mckenzie-Melton,http://montoya-thompson.com/,Hong Kong,Reverse-engineered heuristic alliance,1998,Investment Management / Hedge Fund / Private Equity,4589
|
||||
19,ba179F19F7925f5,Branch-Mann,http://www.lozano.com/,Botswana,Adaptive intangible frame,1999,Architecture / Planning,7961
|
||||
20,c1Ce9B350BAc66b,Weiss and Sons,https://barrett.com/,Korea,Sharable optimal functionalities,2011,Plastics,5984
|
||||
21,8de40AC4e6EaCa4,"Velez, Payne and Coffey",http://burton.com/,Luxembourg,Mandatory coherent synergy,1986,Wholesale,5010
|
||||
22,Aad86a4F0385F2d,Harrell LLC,http://www.frey-rosario.com/,Guadeloupe,Reverse-engineered mission-critical moratorium,2018,Construction,2185
|
||||
23,22aC3FFd64fD703,"Eaton, Reynolds and Vargas",http://www.freeman.biz/,Monaco,Self-enabling multi-tasking process improvement,2014,Luxury Goods / Jewelry,8987
|
||||
24,5Ec4C272bCf085c,Robbins-Cummings,http://donaldson-wilkins.com/,Belgium,Organic non-volatile hierarchy,1991,Pharmaceuticals,5038
|
||||
25,5fDBeA8BB91a000,Jenkins Inc,http://www.kirk.biz/,South Africa,Front-line systematic help-desk,2002,Insurance,1215
|
||||
26,dFfD6a6F9AC2d9C,"Greene, Benjamin and Novak",http://www.kent.net/,Romania,Centralized leadingedge moratorium,2012,Museums / Institutions,4941
|
||||
27,4B217cC5a0674C5,"Dickson, Richmond and Clay",http://everett.com/,Czech Republic,Team-oriented tangible complexity,1980,Real Estate / Mortgage,3122
|
||||
28,88b1f1cDcf59a37,Prince-David,http://thompson.com/,Christmas Island,Virtual holistic methodology,1970,Banking / Mortgage,1046
|
||||
29,f9F7bBCAEeC360F,Ayala LLC,http://www.zhang.com/,Philippines,Open-source zero administration hierarchy,2021,Legal Services,7664
|
||||
30,7Cb3AeFcE4Ba31e,Rivas Group,https://hebert.org/,Australia,Open-architected well-modulated capacity,1998,Logistics / Procurement,4155
|
||||
31,ccBcC32adcbc530,"Sloan, Mays and Whitehead",http://lawson.com/,Chad,Face-to-face high-level conglomeration,1997,Civil Engineering,365
|
||||
32,f5afd686b3d05F5,"Durham, Allen and Barnes",http://chan-stafford.org/,Zimbabwe,Synergistic web-enabled framework,1993,Mechanical or Industrial Engineering,6135
|
||||
33,38C6cfC5074Fa5e,Fritz-Franklin,http://www.lambert.com/,Nepal,Automated 4thgeneration website,1972,Hospitality,4516
|
||||
34,5Cd7efccCcba38f,Burch-Ewing,http://cline.net/,Taiwan,User-centric 4thgeneration system engine,1981,Venture Capital / VC,7443
|
||||
35,9E6Acb51e3F9d6F,"Glass, Barrera and Turner",https://dunlap.com/,Kyrgyz Republic,Multi-channeled 3rdgeneration open system,2020,Utilities,2610
|
||||
36,4D4d7E18321eaeC,Pineda-Cox,http://aguilar.org/,Bolivia,Fundamental asynchronous capability,2010,Human Resources / HR,1312
|
||||
37,485f5d06B938F2b,"Baker, Mccann and Macdonald",http://www.anderson-barker.com/,Kenya,Cross-group user-facing focus group,2013,Legislative Office,1638
|
||||
38,19E3a5Bf6dBDc4F,Cuevas-Moss,https://dodson-castaneda.net/,Guatemala,Extended human-resource intranet,1994,Music,9995
|
||||
39,6883A965c7b68F7,Hahn PLC,http://newman.com/,Belarus,Organic logistical leverage,2012,Electrical / Electronic Manufacturing,3715
|
||||
40,AC5B7AA74Aa4A2E,"Valentine, Ferguson and Kramer",http://stuart.net/,Jersey,Centralized secondary time-frame,1997,Non - Profit / Volunteering,3585
|
||||
41,decab0D5027CA6a,Arroyo Inc,https://www.turner.com/,Grenada,Managed demand-driven website,2006,Writing / Editing,9067
|
||||
42,dF084FbBb613eea,Walls LLC,http://www.reese-vasquez.biz/,Cape Verde,Self-enabling fresh-thinking installation,1989,Investment Management / Hedge Fund / Private Equity,1678
|
||||
43,A2D89Ab9bCcAd4e,"Mitchell, Warren and Schneider",https://fox.biz/,Trinidad and Tobago,Enhanced intangible time-frame,2021,Capital Markets / Hedge Fund / Private Equity,3816
|
||||
44,77aDc905434a49f,Prince PLC,https://www.watts.com/,Sweden,Profit-focused coherent installation,2016,Individual / Family Services,7645
|
||||
45,235fdEFE2cfDa5F,Brock-Blackwell,http://www.small.com/,Benin,Secured foreground emulation,1986,Online Publishing,7034
|
||||
46,1eD64cFe986BBbE,Walton-Barnett,https://ashley-schaefer.com/,Western Sahara,Right-sized clear-thinking flexibility,2001,Luxury Goods / Jewelry,1746
|
||||
47,CbBbFcdd0eaE2cF,Bartlett-Arroyo,https://cruz.com/,Northern Mariana Islands,Realigned didactic function,1976,Civic / Social Organization,3987
|
||||
48,49aECbDaE6aBD53,"Wallace, Madden and Morris",http://www.blevins-fernandez.biz/,Germany,Persistent real-time customer loyalty,2016,Pharmaceuticals,9443
|
||||
49,7b3fe6e7E72bFa4,Berg-Sparks,https://cisneros-love.com/,Canada,Stand-alone static implementation,1974,Arts / Crafts,2073
|
||||
50,c6DedA82A8aef7E,Gonzales Ltd,http://bird.com/,Tonga,Managed human-resource policy,1988,Consumer Goods,9069
|
||||
51,7D9FBF85cdC3871,Lawson and Sons,https://www.wong.com/,French Southern Territories,Compatible analyzing intranet,2021,Arts / Crafts,3527
|
||||
52,7dd18Fb7cB07b65,"Mcguire, Mcconnell and Olsen",https://melton-briggs.com/,Korea,Profound client-server frame,1988,Printing,8445
|
||||
53,EF5B55FadccB8Fe,Charles-Phillips,https://bowman.com/,Cote d'Ivoire,Monitored client-server implementation,2012,Mental Health Care,3450
|
||||
54,f8D4B99e11fAF5D,Odom Ltd,https://www.humphrey-hess.com/,Cote d'Ivoire,Advanced static process improvement,2012,Management Consulting,1825
|
||||
55,e24D21BFd3bF1E5,Richard PLC,https://holden-coleman.net/,Mayotte,Object-based optimizing model,1971,Broadcast Media,4942
|
||||
56,B9BdfEB6D3Ca44E,Sampson Ltd,https://blevins.com/,Cayman Islands,Intuitive local adapter,2005,Farming,1418
|
||||
57,2a74D6f3D3B268e,"Cherry, Le and Callahan",https://waller-delacruz.biz/,Nigeria,Universal human-resource collaboration,2017,Entertainment / Movie Production,7202
|
||||
58,Bf3F3f62c8aBC33,Cherry PLC,https://www.avila.info/,Marshall Islands,Persistent tertiary website,1980,Plastics,8245
|
||||
59,aeBe26B80a7a23c,Melton-Nichols,https://kennedy.com/,Palau,User-friendly clear-thinking productivity,2021,Legislative Office,8741
|
||||
60,aAeb29ad43886C6,Potter-Walsh,http://thomas-french.org/,Turkey,Optional non-volatile open system,2008,Human Resources / HR,6923
|
||||
61,bD1bc6bB6d1FeD3,Freeman-Chen,https://mathis.com/,Timor-Leste,Phased next generation adapter,1973,International Trade / Development,346
|
||||
62,EB9f456e8b7022a,Soto Group,https://norris.info/,Vietnam,Enterprise-wide executive installation,1988,Business Supplies / Equipment,9097
|
||||
63,Dfef38C51D8DAe3,"Poole, Cruz and Whitney",https://reed.info/,Reunion,Balanced analyzing groupware,1978,Marketing / Advertising / Sales,2992
|
||||
64,055ffEfB2Dd95B0,Riley Ltd,http://wiley.com/,Brazil,Optional exuding superstructure,1986,Textiles,9315
|
||||
65,cBfe4dbAE1699da,"Erickson, Andrews and Bailey",https://www.hobbs-grant.com/,Eritrea,Vision-oriented secondary project,2014,Consumer Electronics,7829
|
||||
66,fdFbecbadcdCdf1,"Wilkinson, Charles and Arroyo",http://hunter-mcfarland.com/,United States Virgin Islands,Assimilated 24/7 archive,1996,Building Materials,602
|
||||
67,5DCb8A5a5ca03c0,Floyd Ltd,http://www.whitney.com/,Falkland Islands (Malvinas),Function-based fault-tolerant concept,2017,Public Relations / PR,2911
|
||||
68,ce57DCbcFD6d618,Newman-Galloway,https://www.scott.com/,Luxembourg,Enhanced foreground collaboration,1987,Information Technology / IT,3934
|
||||
69,5aaD187dc929371,Frazier-Butler,https://www.daugherty-farley.info/,Northern Mariana Islands,Persistent interactive circuit,1972,Outsourcing / Offshoring,5130
|
||||
70,902D7Ac8b6d476b,Newton Inc,https://www.richmond-manning.info/,Netherlands Antilles,Fundamental stable info-mediaries,1976,Military Industry,563
|
||||
71,32BB9Ff4d939788,Duffy-Levy,https://www.potter.com/,Guernsey,Diverse exuding installation,1982,Wireless,6146
|
||||
72,adcB0afbE58bAe3,Wagner LLC,https://decker-esparza.com/,Uruguay,Reactive attitude-oriented toolset,1987,International Affairs,6874
|
||||
73,dfcA1c84AdB61Ac,Mccall-Holmes,http://www.dean.com/,Benin,Object-based value-added database,2009,Legal Services,696
|
||||
74,208044AC2fe52F3,Massey LLC,https://frazier.biz/,Suriname,Configurable zero administration Graphical User Interface,1986,Accounting,5004
|
||||
75,f3C365f0c1A0623,Hicks LLC,http://alvarez.biz/,Pakistan,Quality-focused client-server Graphical User Interface,1970,Computer Software / Engineering,8480
|
||||
76,ec5Bdd3CBAfaB93,"Cole, Russell and Avery",http://www.blankenship.com/,Mongolia,De-engineered fault-tolerant challenge,2000,Law Enforcement,7012
|
||||
77,DDB19Be7eeB56B4,Cummings-Rojas,https://simon-pearson.com/,Svalbard & Jan Mayen Islands,User-centric modular customer loyalty,2012,Financial Services,7529
|
||||
78,dd6CA3d0bc3cAfc,"Beasley, Greene and Mahoney",http://www.petersen-lawrence.com/,Togo,Extended content-based methodology,1976,Religious Institutions,869
|
||||
79,A0B9d56e61070e3,"Beasley, Sims and Allison",http://burke.info/,Latvia,Secured zero tolerance hub,1972,Facilities Services,6182
|
||||
80,cBa7EFe5D05Adaf,Crawford-Rivera,https://black-ramirez.org/,Cuba,Persevering exuding budgetary management,1999,Online Publishing,7805
|
||||
81,Ea3f6D52Ec73563,Montes-Hensley,https://krueger.org/,Liechtenstein,Multi-tiered secondary productivity,2009,Printing,8433
|
||||
82,bC0CEd48A8000E0,Velazquez-Odom,https://stokes.com/,Djibouti,Streamlined 6thgeneration function,2002,Alternative Dispute Resolution,4044
|
||||
83,c89b9b59BC4baa1,Eaton-Morales,https://www.reeves-graham.com/,Micronesia,Customer-focused explicit frame,1990,Capital Markets / Hedge Fund / Private Equity,7013
|
||||
84,FEC51bce8421a7b,"Roberson, Pennington and Palmer",http://www.keith-fisher.com/,Cameroon,Adaptive bi-directional hierarchy,1993,Telecommunications,5571
|
||||
85,e0E8e27eAc9CAd5,"George, Russo and Guerra",https://drake.com/,Sweden,Centralized non-volatile capability,1989,Military Industry,2880
|
||||
86,B97a6CF9bf5983C,Davila Inc,https://mcconnell.info/,Cocos (Keeling) Islands,Profit-focused dedicated frame,2017,Consumer Electronics,2215
|
||||
87,a0a6f9b3DbcBEb5,Mays-Preston,http://www.browning-key.com/,Mali,User-centric heuristic focus group,2006,Military Industry,5786
|
||||
88,8cC1bDa330a5871,Pineda-Morton,https://www.carr.com/,United States Virgin Islands,Grass-roots methodical info-mediaries,1991,Printing,6168
|
||||
89,ED889CB2FE9cbd3,Huang and Sons,https://www.bolton.com/,Eritrea,Re-contextualized dynamic hierarchy,1981,Semiconductors,7484
|
||||
90,F4Dc1417BC6cb8f,Gilbert-Simon,https://www.bradford.biz/,Burundi,Grass-roots radical parallelism,1973,Newspapers / Journalism,1927
|
||||
91,7ABc3c7ecA03B34,Sampson-Griffith,http://hendricks.org/,Benin,Multi-layered composite paradigm,1972,Textiles,3881
|
||||
92,4e0719FBE38e0aB,Miles-Dominguez,http://www.turner.com/,Gibraltar,Organized empowering forecast,1996,Civic / Social Organization,897
|
||||
93,dEbDAAeDfaed00A,Rowe and Sons,https://www.simpson.org/,El Salvador,Balanced multimedia knowledgebase,1978,Facilities Services,8172
|
||||
94,61BDeCfeFD0cEF5,"Valenzuela, Holmes and Rowland",https://www.dorsey.net/,Taiwan,Persistent tertiary focus group,1999,Transportation,1483
|
||||
95,4e91eD25f486110,"Best, Wade and Shepard",https://zimmerman.com/,Zimbabwe,Innovative background definition,1991,Gambling / Casinos,4873
|
||||
96,0a0bfFbBbB8eC7c,Holmes Group,https://mcdowell.org/,Ethiopia,Right-sized zero tolerance focus group,1975,Photography,2988
|
||||
97,BA6Cd9Dae2Efd62,Good Ltd,http://duffy.com/,Anguilla,Reverse-engineered composite moratorium,1971,Consumer Services,4292
|
||||
98,E7df80C60Abd7f9,Clements-Espinoza,http://www.flowers.net/,Falkland Islands (Malvinas),Progressive modular hub,1991,Broadcast Media,236
|
||||
99,AFc285dbE2fEd24,Mendez Inc,https://www.burke.net/,Kyrgyz Republic,User-friendly exuding migration,1993,Education Management,339
|
||||
100,e9eB5A60Cef8354,Watkins-Kaiser,http://www.herring.com/,Togo,Synergistic background access,2009,Financial Services,2785
|
||||
|
|
|
|||
|
BIN
docs/static/img/logo.svg
vendored
|
Before Width: | Height: | Size: 31 KiB After Width: | Height: | Size: 31 KiB |
BIN
docs/static/img/new_langflow.gif
vendored
|
Before Width: | Height: | Size: 3.2 MiB After Width: | Height: | Size: 3.2 MiB |
BIN
docs/static/img/new_langflow2.gif
vendored
|
Before Width: | Height: | Size: 3.2 MiB After Width: | Height: | Size: 3.2 MiB |
BIN
docs/static/videos/langflow_api.mp4
vendored
BIN
docs/static/videos/langflow_build.mp4
vendored
BIN
docs/static/videos/langflow_collection.mp4
vendored
BIN
docs/static/videos/langflow_collection_example.mp4
vendored
BIN
docs/static/videos/langflow_fork.mp4
vendored
BIN
docs/static/videos/langflow_parameters.mp4
vendored
BIN
docs/static/videos/langflow_widget.mp4
vendored
|
Before Width: | Height: | Size: 2 MiB After Width: | Height: | Size: 2 MiB |
532
poetry.lock
generated
|
|
@ -144,6 +144,25 @@ files = [
|
|||
{file = "aiostream-0.4.5.tar.gz", hash = "sha256:3ecbf87085230fbcd9605c32ca20c4fb41af02c71d076eab246ea22e35947d88"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "alembic"
|
||||
version = "1.11.3"
|
||||
description = "A database migration tool for SQLAlchemy."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "alembic-1.11.3-py3-none-any.whl", hash = "sha256:d6c96c2482740592777c400550a523bc7a9aada4e210cae2e733354ddae6f6f8"},
|
||||
{file = "alembic-1.11.3.tar.gz", hash = "sha256:3db4ce81a9072e1b5aa44c2d202add24553182672a12daf21608d6f62a8f9cf9"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
Mako = "*"
|
||||
SQLAlchemy = ">=1.3.0"
|
||||
typing-extensions = ">=4"
|
||||
|
||||
[package.extras]
|
||||
tz = ["python-dateutil"]
|
||||
|
||||
[[package]]
|
||||
name = "anthropic"
|
||||
version = "0.3.10"
|
||||
|
|
@ -316,6 +335,40 @@ files = [
|
|||
{file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bcrypt"
|
||||
version = "4.0.1"
|
||||
description = "Modern password hashing for your software and your servers"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "bcrypt-4.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f"},
|
||||
{file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:08d2947c490093a11416df18043c27abe3921558d2c03e2076ccb28a116cb6d0"},
|
||||
{file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0eaa47d4661c326bfc9d08d16debbc4edf78778e6aaba29c1bc7ce67214d4410"},
|
||||
{file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae88eca3024bb34bb3430f964beab71226e761f51b912de5133470b649d82344"},
|
||||
{file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:a522427293d77e1c29e303fc282e2d71864579527a04ddcfda6d4f8396c6c36a"},
|
||||
{file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:fbdaec13c5105f0c4e5c52614d04f0bca5f5af007910daa8b6b12095edaa67b3"},
|
||||
{file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ca3204d00d3cb2dfed07f2d74a25f12fc12f73e606fcaa6975d1f7ae69cacbb2"},
|
||||
{file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:089098effa1bc35dc055366740a067a2fc76987e8ec75349eb9484061c54f535"},
|
||||
{file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:e9a51bbfe7e9802b5f3508687758b564069ba937748ad7b9e890086290d2f79e"},
|
||||
{file = "bcrypt-4.0.1-cp36-abi3-win32.whl", hash = "sha256:2caffdae059e06ac23fce178d31b4a702f2a3264c20bfb5ff541b338194d8fab"},
|
||||
{file = "bcrypt-4.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:8a68f4341daf7522fe8d73874de8906f3a339048ba406be6ddc1b3ccb16fc0d9"},
|
||||
{file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf4fa8b2ca74381bb5442c089350f09a3f17797829d958fad058d6e44d9eb83c"},
|
||||
{file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:67a97e1c405b24f19d08890e7ae0c4f7ce1e56a712a016746c8b2d7732d65d4b"},
|
||||
{file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b3b85202d95dd568efcb35b53936c5e3b3600c7cdcc6115ba461df3a8e89f38d"},
|
||||
{file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbb03eec97496166b704ed663a53680ab57c5084b2fc98ef23291987b525cb7d"},
|
||||
{file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:5ad4d32a28b80c5fa6671ccfb43676e8c1cc232887759d1cd7b6f56ea4355215"},
|
||||
{file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b57adba8a1444faf784394de3436233728a1ecaeb6e07e8c22c8848f179b893c"},
|
||||
{file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:705b2cea8a9ed3d55b4491887ceadb0106acf7c6387699fca771af56b1cdeeda"},
|
||||
{file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:2b3ac11cf45161628f1f3733263e63194f22664bf4d0c0f3ab34099c02134665"},
|
||||
{file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3100851841186c25f127731b9fa11909ab7b1df6fc4b9f8353f4f1fd952fbf71"},
|
||||
{file = "bcrypt-4.0.1.tar.gz", hash = "sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
tests = ["pytest (>=3.2.1,!=3.3.0)"]
|
||||
typecheck = ["mypy"]
|
||||
|
||||
[[package]]
|
||||
name = "beautifulsoup4"
|
||||
version = "4.12.2"
|
||||
|
|
@ -650,76 +703,76 @@ click = "*"
|
|||
|
||||
[[package]]
|
||||
name = "clickhouse-connect"
|
||||
version = "0.6.9"
|
||||
version = "0.6.10"
|
||||
description = "ClickHouse Database Core Driver for Python, Pandas, and Superset"
|
||||
optional = false
|
||||
python-versions = "~=3.7"
|
||||
files = [
|
||||
{file = "clickhouse-connect-0.6.9.tar.gz", hash = "sha256:ba735bcb73c4743788e7c8bfeb865edd887da28a253bd189c449df20d9abff64"},
|
||||
{file = "clickhouse_connect-0.6.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:74fb1bc9eea037db1361d75adb6482ce6d8c22e4a47a37735edd8e3862d931f7"},
|
||||
{file = "clickhouse_connect-0.6.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fb5e4a489b5960fc4bc48fdda052b3a446a736ee4be74105ae7663307da8a063"},
|
||||
{file = "clickhouse_connect-0.6.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e954bfe759fd437595732a15a40356e9e29035a83f485a23601f863aab2f7c6f"},
|
||||
{file = "clickhouse_connect-0.6.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e2f4ada702a933f2ffc38e77bf948f11d745f3467191d6b6fc2190683d02bb8"},
|
||||
{file = "clickhouse_connect-0.6.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34d06a56e2be745adb692fbf1eedd9fb6d5dbb46ce325c1d9e57b53ba99eee95"},
|
||||
{file = "clickhouse_connect-0.6.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5dcbd145ad907e9bc7dbbc5e80440888201de8f1622f755032595c8b8302e4ef"},
|
||||
{file = "clickhouse_connect-0.6.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f155d1f214c81c5d8e72cbe325dfec72340c082156108db06c862ddd76771d7e"},
|
||||
{file = "clickhouse_connect-0.6.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0b47ec963b9d9cf1674490385183dc988d6e4d33287bb9d81e23373012232223"},
|
||||
{file = "clickhouse_connect-0.6.9-cp310-cp310-win32.whl", hash = "sha256:59dc14e47fa287578495835e4c5efdff90e40430b5b27a3c1453bb83d65e17cb"},
|
||||
{file = "clickhouse_connect-0.6.9-cp310-cp310-win_amd64.whl", hash = "sha256:bcabfaa3fbef4ea9ba723d16e9f50e5e02a3c871b3afc8106b6a04a53a7b19d7"},
|
||||
{file = "clickhouse_connect-0.6.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:93025ec43ffc25ae3e5111c0da65f8227dc6ae68834beeda3b0256c22baedd9f"},
|
||||
{file = "clickhouse_connect-0.6.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ac72323af794b4c79a804f4f311cfe4ae28426f92ac1b7f390aabbab6a93a4bd"},
|
||||
{file = "clickhouse_connect-0.6.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d8c1e71b34e870d8cd4805c3be86678e19a63931a718f3bf657b48da82c74df"},
|
||||
{file = "clickhouse_connect-0.6.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c286a6814facf3fa5bf492863e99b300154b33c3ecedf7799070ef9b8cd12474"},
|
||||
{file = "clickhouse_connect-0.6.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9924d53302f3333bcc40b9f5238bea4c29c107a6a82e22dc5ba24ce7c1cdb75"},
|
||||
{file = "clickhouse_connect-0.6.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:adf29b7319e5cdb9b6cb8ec3d4e85056e588ac51265b92ffaf6c69481283e643"},
|
||||
{file = "clickhouse_connect-0.6.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6fc2d4e428af2be8c9db23b37f4493848696417376f0eaba23b0e8f053f6a0d4"},
|
||||
{file = "clickhouse_connect-0.6.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b3793d26cabd88bad6f4a3fe9f93974e480cc785f137158be8b58d700baaeb4f"},
|
||||
{file = "clickhouse_connect-0.6.9-cp311-cp311-win32.whl", hash = "sha256:1fa2fddd00d019b4593b194a303339268699808c5ebeab2d331b2ee0d29eabc0"},
|
||||
{file = "clickhouse_connect-0.6.9-cp311-cp311-win_amd64.whl", hash = "sha256:536df34ecb49ddd7c61ebd6b900a7d06b3a246fbe30441cb68c568ea42e292d3"},
|
||||
{file = "clickhouse_connect-0.6.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d61337ca4c48b78a959627e9c0de58b86a8399510de184bd6d4c27b8b7e93c17"},
|
||||
{file = "clickhouse_connect-0.6.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69449ee694ed9aa4ee5f290c13ce5038efe394ff99b43eeee8e8190f3e4aa909"},
|
||||
{file = "clickhouse_connect-0.6.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b97ee1de55fea90f12e1f04ea6521dd827f2ed25361cfb99374cb0649222a8f0"},
|
||||
{file = "clickhouse_connect-0.6.9-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:adc42d8970b322bf78053a62e6c555fd8e03b29aecffe21521efaf5bad4e2ba4"},
|
||||
{file = "clickhouse_connect-0.6.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:064a27bd8d92f413ddcf20926a6c868a0482f7e757f9d412c0778b875d20c536"},
|
||||
{file = "clickhouse_connect-0.6.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a4aaf279efc5e8e13c6b4ac7ce41a3700f786dd34a58d3686ddf29660364dc62"},
|
||||
{file = "clickhouse_connect-0.6.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:022e0438728cc323d1817ade5105e4458acf71be822dffee130934140524fcbd"},
|
||||
{file = "clickhouse_connect-0.6.9-cp37-cp37m-win32.whl", hash = "sha256:e7a9c511f680e0f40b8765e7fbb8bde6b101cbf0a5b180f6ccc18ad59c9776ea"},
|
||||
{file = "clickhouse_connect-0.6.9-cp37-cp37m-win_amd64.whl", hash = "sha256:c2bd8fc5767fd883d5d1ca2e35e3034e590994ea64f76b11a3814ab20862a0b0"},
|
||||
{file = "clickhouse_connect-0.6.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:159cba55d55ea85cea310a8781144559deade5c8cee6b13bc720253e6a6e4a5c"},
|
||||
{file = "clickhouse_connect-0.6.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f4a34a7e3d16c0e4c34a11b7d8cbd633f22ee48e1a193f1977f64fe470ad9c79"},
|
||||
{file = "clickhouse_connect-0.6.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:026b9363553a6bd22f5c9f4b65cacd3c3fc0b50d5d0159c47bb34f63dc87ccff"},
|
||||
{file = "clickhouse_connect-0.6.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aaec4e9dc5110bf6f17a0abbb61e286293400d2c3450e29040fd100dc075dc89"},
|
||||
{file = "clickhouse_connect-0.6.9-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0224509fd36637ccf1add66f4ec12d464e2e80e4a5521e846f4d7602b6664a0c"},
|
||||
{file = "clickhouse_connect-0.6.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0aa5b9b5699ae385d4f7c1f493e1029d7803b47c74ecb2502f01fb79135253d2"},
|
||||
{file = "clickhouse_connect-0.6.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1b0418b347f4e7a27e692f6947b9788759b23ee4f4aabccbb376b5241190f8a7"},
|
||||
{file = "clickhouse_connect-0.6.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:80778a118f5b2c52daf64c051fdbc5df23a8c37e9e385205e4942fdd60925d25"},
|
||||
{file = "clickhouse_connect-0.6.9-cp38-cp38-win32.whl", hash = "sha256:841838e793cd4283d3a245e278b136ea5681e636dcdc816c27c4ff77e4bb2077"},
|
||||
{file = "clickhouse_connect-0.6.9-cp38-cp38-win_amd64.whl", hash = "sha256:63de88cf244adda961742c1e05e051340de09714b38cc33d757138af6033b364"},
|
||||
{file = "clickhouse_connect-0.6.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:47216788f28fa8d1c225acdd366f5eab53c00b131ca246c7f004d94b1aff7cef"},
|
||||
{file = "clickhouse_connect-0.6.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e8b6669ba538b6fe18cbe01f4451c4e5d5674471cd55aec7af3d6f1a8e064b8f"},
|
||||
{file = "clickhouse_connect-0.6.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43de9421a8f91e82efec5f9bffe366fb341e2fb7c7dce89e303061ba7065baa3"},
|
||||
{file = "clickhouse_connect-0.6.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62b184813807112a8ef2853df93b6c07899bdf04f188e547f92c54fc2e056be3"},
|
||||
{file = "clickhouse_connect-0.6.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5c869d6761f38bfb940fa0992078bd7d8ece1c1c47a330ab3e8390ca8b2ba980"},
|
||||
{file = "clickhouse_connect-0.6.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b83f62002ae8a19102f05868639217fe3dddabf4c099dc0ddf1586901f1501fb"},
|
||||
{file = "clickhouse_connect-0.6.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:442f9119a223c4f97608d3e4f38debc9c62873e0e9dc948cb1b21691be35af55"},
|
||||
{file = "clickhouse_connect-0.6.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cef80d7596887eff39256e29a31c23297a1a65aac735b3e0e323c702dd95d2ee"},
|
||||
{file = "clickhouse_connect-0.6.9-cp39-cp39-win32.whl", hash = "sha256:f9cdccf6fa12349e4c25d099bed1d80274104cc4e4a8110f6692e553f4491b99"},
|
||||
{file = "clickhouse_connect-0.6.9-cp39-cp39-win_amd64.whl", hash = "sha256:067b5b104e0ac1e16255313766cd97cccd06285ab36f7a2bea7960bd643f5c13"},
|
||||
{file = "clickhouse_connect-0.6.9-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e08de57aeb1272897208f91873511090d3f904fede4e509f9b2a0ff00db4d49"},
|
||||
{file = "clickhouse_connect-0.6.9-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5318b4ac3fbf91ed3ab766bfa55b5ce72ba520181ad6f61fbf37ed63150033f4"},
|
||||
{file = "clickhouse_connect-0.6.9-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5df27dfab1565dce2dad8d6ff41a9bc35fb52ed56fba857f26faffac05ffe201"},
|
||||
{file = "clickhouse_connect-0.6.9-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:71319fde3214e2382bc7ec402268b9b8c320a2eb86ea764e79a29f7562de06fd"},
|
||||
{file = "clickhouse_connect-0.6.9-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:7e0910ac23e7c1ba9af00818f52cdfc812210b4ab10c2be54f1f6456e144e0a7"},
|
||||
{file = "clickhouse_connect-0.6.9-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8566b0b57537b570bbef6736d461108c2354e40bd1b7166f9f51cc2e0d8124cb"},
|
||||
{file = "clickhouse_connect-0.6.9-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c7a0f9c9dcb3bb482043a2b0e3ceb371b17d7ad320317529c2834960649fd20"},
|
||||
{file = "clickhouse_connect-0.6.9-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70ae9442b369fa6365018ee92cda1fa26eade3b87640c744b8e3d327872cd8ae"},
|
||||
{file = "clickhouse_connect-0.6.9-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c018fcf03428f940dfa52eea7ff443dea9eee20c2e161486d0a6d2509693904"},
|
||||
{file = "clickhouse_connect-0.6.9-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:adb7d4a449af756ad9c5b2074ce7844ef34981827cce7510151a8a01493e68f3"},
|
||||
{file = "clickhouse_connect-0.6.9-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:66a1d90bcd837734efbe3060f17b98a071f773cac50183efc9a6d2dc40bdbce6"},
|
||||
{file = "clickhouse_connect-0.6.9-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41fbc4a75f7098d38246528af34a17b9b7411fb63914787141c82d178bc189be"},
|
||||
{file = "clickhouse_connect-0.6.9-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39b6da9732b26cd98b8ed672bc7684ccf7a589f8d7f56faa7439f8d78b5f4c32"},
|
||||
{file = "clickhouse_connect-0.6.9-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c1954ba9735bf4af6737adc9da68179b6c49b698e288e839705c5c0a260ce85"},
|
||||
{file = "clickhouse_connect-0.6.9-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fa6b01af8c82b860fb4799651dc10a79645b88c8ee103bb14c6e8cdd8b00d8bd"},
|
||||
{file = "clickhouse-connect-0.6.10.tar.gz", hash = "sha256:5d4fc0deff7151db66670f289bb2fe714eecc75352eb1d19b3144e267b21456d"},
|
||||
{file = "clickhouse_connect-0.6.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f13409dbd1bf06e13e4d24c0e6ef40dcf8a452f26bebf1b5ef2ca096acd6c357"},
|
||||
{file = "clickhouse_connect-0.6.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5e47206073d8714bc872bceebbbd0580f25cf02d3a5c3a298d19ebc6939876c5"},
|
||||
{file = "clickhouse_connect-0.6.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b0c6253e4be25c9ae65e5aafe0a35fb9d98ac8f22e41884abaeeefb37d432ed"},
|
||||
{file = "clickhouse_connect-0.6.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7c073c936672c918b440207bef0f40b691140646a3ab7a65f02d71de719dd53"},
|
||||
{file = "clickhouse_connect-0.6.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f901bca0f4a19cad08cd111a42c339ccf7682af6aae154f72454ec2032d0f422"},
|
||||
{file = "clickhouse_connect-0.6.10-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0214c4bc5f7aaae3e14cd67bd24f6bccd7bc97ec2e96e1ec8d69094ed3ffd399"},
|
||||
{file = "clickhouse_connect-0.6.10-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bdebd13aceeef172f88c6f717cbcf1546c6b916fc601547cfdf4e56a2a129191"},
|
||||
{file = "clickhouse_connect-0.6.10-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:535a92a2e45a9e64be6cfc1aa85b51ba66ec2cc5e753758f56bd4fdbaa9a7990"},
|
||||
{file = "clickhouse_connect-0.6.10-cp310-cp310-win32.whl", hash = "sha256:a45cf355a56aca90aca01d74d92bf8c94e1c853fc4c474ee3f8450701dbf6973"},
|
||||
{file = "clickhouse_connect-0.6.10-cp310-cp310-win_amd64.whl", hash = "sha256:aa98d776a86495f31a7641b7f31313292a37e0765f0821e82f7c54b3ff9d1325"},
|
||||
{file = "clickhouse_connect-0.6.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66dcf77fc062fe7d8134308ad8b4ddfdbeda2be81cc84147c8090cdb8743f11d"},
|
||||
{file = "clickhouse_connect-0.6.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9bc615e5df873e11dc034af56ec7fc96e7b95d29899188098a1dcba4852542b6"},
|
||||
{file = "clickhouse_connect-0.6.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a919e9d6d3e830a6dde38fd28785f3070531e468be9128bcc84ca80d0b0caa07"},
|
||||
{file = "clickhouse_connect-0.6.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eab582154a8204d1e2bd309f01c6e75c7d5a66370573069d969e4d8d6596f3ee"},
|
||||
{file = "clickhouse_connect-0.6.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cc9c22bd234ad76a3f7ff5493276254e6d94dae75476ce92aebba294c52ed30"},
|
||||
{file = "clickhouse_connect-0.6.10-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fc13116b43a37674db3641ba325ccb7abdfc45aa4c81c0d60a04d9b03c2a4e07"},
|
||||
{file = "clickhouse_connect-0.6.10-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2ed439110a634348af432ba891fc53b69eeb0f456867f279b440f73ef1ee243d"},
|
||||
{file = "clickhouse_connect-0.6.10-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6a7442ea25c7c2447c2efe7c81642f038470d65e70bf07f6d620fc9cd92459e1"},
|
||||
{file = "clickhouse_connect-0.6.10-cp311-cp311-win32.whl", hash = "sha256:e2e1248ce2b0b6dc00c04a183d0e5c5f5ff2e51d5b782d7095f7747ea38e0493"},
|
||||
{file = "clickhouse_connect-0.6.10-cp311-cp311-win_amd64.whl", hash = "sha256:995335519d79ea692160beaa98f9717c3c14170fe43a7ff3c19470b930ec8d81"},
|
||||
{file = "clickhouse_connect-0.6.10-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9becc18719e393afa840dfcf04e5645d7ae413cd66ef68fb5dadf1affe4b8616"},
|
||||
{file = "clickhouse_connect-0.6.10-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4878ec5ef82e09401418b9dd47e228d50f3100aabc15564193430e13c3ceb6c2"},
|
||||
{file = "clickhouse_connect-0.6.10-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed71b85f3b8fd66a57d55732981f0d478322c59346b31c5a733c3aaae99e8218"},
|
||||
{file = "clickhouse_connect-0.6.10-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe2d0af7165059e20c82dc663dc8c446d844282e4fac5f9673d1c63a4db4c23f"},
|
||||
{file = "clickhouse_connect-0.6.10-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:00c1480cce60e58ae0857f19134c84856bc033b496baaeb920be258e306200e4"},
|
||||
{file = "clickhouse_connect-0.6.10-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d9450ef2ffd09d01e4485bb6b41b3cb49c92a310d7ddf3c7aaa33abb12b924db"},
|
||||
{file = "clickhouse_connect-0.6.10-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b7ffcd268abf59e23757cb79e9279dcdea4dcfd5ea6603d3c994e7df5e94c6b5"},
|
||||
{file = "clickhouse_connect-0.6.10-cp37-cp37m-win32.whl", hash = "sha256:29dc07e8d48580b8dfa8a4c2bbb8a6343d5d2f8e64120dca854bc31c2faea45a"},
|
||||
{file = "clickhouse_connect-0.6.10-cp37-cp37m-win_amd64.whl", hash = "sha256:17e47909d02623e762d941b82237f6aa457985058b65aea4a46ceed2d7235919"},
|
||||
{file = "clickhouse_connect-0.6.10-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bb29eefe4c7899f85f7c675160ba973f780284a2baadf1f7ba8b09d3d6955aa9"},
|
||||
{file = "clickhouse_connect-0.6.10-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bf119182bdb6069d99ff70ad8d9603b888f070d7efae8792820be78333df566"},
|
||||
{file = "clickhouse_connect-0.6.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c131001f3143f8265fc5a0147590d103e26b04cf16877c045e490a48e577985"},
|
||||
{file = "clickhouse_connect-0.6.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c4cd828f669a13c1fe8179796277d0a66a647f5acf3add87d6f53dfb69a74dc"},
|
||||
{file = "clickhouse_connect-0.6.10-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b9992ee252ca78c84b0fd9f3c1c102f9ce51be11e3908a0419fb7c44d7df6bab"},
|
||||
{file = "clickhouse_connect-0.6.10-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7a1c81a6cc8d46865162699c1832cbfd8af5923b8516820415e7b5d7078e5df6"},
|
||||
{file = "clickhouse_connect-0.6.10-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5a2e2e847ab5ca88761c0736fbc70b22f612406e3b3b6478907defe321415ddc"},
|
||||
{file = "clickhouse_connect-0.6.10-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:937c1dc1457935778909058e8527f9c1b91bfa0ef943ec4fb3b214f2b81da863"},
|
||||
{file = "clickhouse_connect-0.6.10-cp38-cp38-win32.whl", hash = "sha256:23a819007930be03bfef325742f0bfb51ce42bce7cf1adb203f879a5ccd9b16a"},
|
||||
{file = "clickhouse_connect-0.6.10-cp38-cp38-win_amd64.whl", hash = "sha256:876134ded05bee9168d2c76eef49f7719501496a2ba440778649240e906017b6"},
|
||||
{file = "clickhouse_connect-0.6.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:816efe8f178fa212520a0f1f0c9f9899d03732b73aff33a875368e28bb4b8b06"},
|
||||
{file = "clickhouse_connect-0.6.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:891048c48dbcce7ff096fa0a338c763e3c2fe4768e41bcc7a27223133ba58101"},
|
||||
{file = "clickhouse_connect-0.6.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f7106ad09f736676e57987039de95817e260eeeb1cae1931e41b4362ea13b5f"},
|
||||
{file = "clickhouse_connect-0.6.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88d3edf76fb4d6ebd38c8ce4c84e7a7489c226592e9e5b8664156b3aac726a51"},
|
||||
{file = "clickhouse_connect-0.6.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1287a32fc28fd8b32401810948366c50a7a25c74074ab2f98c369804c6366743"},
|
||||
{file = "clickhouse_connect-0.6.10-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f2a8ac16f5750ee219960e68003a43dc25b873f0d608ad379035e9d642cd5a30"},
|
||||
{file = "clickhouse_connect-0.6.10-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9c36e19fd418ef41b4162fbbe658d2d579d590685c3f3db7ad55aca9cc2fb0f6"},
|
||||
{file = "clickhouse_connect-0.6.10-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4de6ce7bcb8da818929aaac03bb2c0c6d68d13702d79d1348d123ebf868c6917"},
|
||||
{file = "clickhouse_connect-0.6.10-cp39-cp39-win32.whl", hash = "sha256:bd498329370c3551c7dbc6013018b0521da102f63e40d554485a29e07ff57cf3"},
|
||||
{file = "clickhouse_connect-0.6.10-cp39-cp39-win_amd64.whl", hash = "sha256:128fba3e62292e340194d271883d049f63f2fdb6fc56513f776df3fccc81cfab"},
|
||||
{file = "clickhouse_connect-0.6.10-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ba12b89e19be14c9aeb1042ec72fa0ba0eb892e4c9d6bfb6ffc52a0e1a628fb1"},
|
||||
{file = "clickhouse_connect-0.6.10-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:227a6cc07c372778909c55d114b238616c46ae70d99fed4647e960aba2d189ff"},
|
||||
{file = "clickhouse_connect-0.6.10-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cecbee657bb902b0baa6d2b768166b967d1edba0e3ba77f22ddc8338ce588c46"},
|
||||
{file = "clickhouse_connect-0.6.10-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fbeca9ba2529bea8ac32f8e6fa35c43f61d8c3b4883b56668c712a1065a58b2e"},
|
||||
{file = "clickhouse_connect-0.6.10-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:7238fdbafdec8fb60f8a0e3e5f2237ae3ded8bad372216df82c0d94709815136"},
|
||||
{file = "clickhouse_connect-0.6.10-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7984d9b55927e6fbdcd774790327aa3abba2f12020caa7a422ef0ca0f95080ae"},
|
||||
{file = "clickhouse_connect-0.6.10-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:496cbc65916e83cb4c5175f22e678427b6aef75b582d04e73f3f6303ee674bf9"},
|
||||
{file = "clickhouse_connect-0.6.10-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e57182bb96fded997c526160626a1258ff050910ffa6e7eac21c6591f74f2d0"},
|
||||
{file = "clickhouse_connect-0.6.10-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:193b9e78a190814e36f1570466eb372e3981ce2c2c0536ef03d728ea6504af0f"},
|
||||
{file = "clickhouse_connect-0.6.10-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3e4e4aa9e83d7c73a347becba3ab5c4c7a0abde7c01d57edd0c2bb6399585946"},
|
||||
{file = "clickhouse_connect-0.6.10-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:56962ec499d25bd0cb33c90cdf52c9220691be30a587faf45ba346add679168b"},
|
||||
{file = "clickhouse_connect-0.6.10-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abf77febdf4e98990bd1e775c3909f9db62fd544a759298fc8b03e72167a2375"},
|
||||
{file = "clickhouse_connect-0.6.10-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d773e048635223ccd974ebbcac84190682772bcf7ed89333f7a2c3f42da5cc1"},
|
||||
{file = "clickhouse_connect-0.6.10-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29365f01bee352e0b0ed3ef145d41c6b642d3ffc54b85c8a743c75398a752384"},
|
||||
{file = "clickhouse_connect-0.6.10-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ce55154bb1d6911e865bd95d2af935bbb1eb3d66da7b5631d6c42369383eb36d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -916,13 +969,13 @@ test-randomorder = ["pytest-randomly"]
|
|||
|
||||
[[package]]
|
||||
name = "ctransformers"
|
||||
version = "0.2.23"
|
||||
version = "0.2.24"
|
||||
description = "Python bindings for the Transformer models implemented in C/C++ using GGML library."
|
||||
optional = true
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "ctransformers-0.2.23-py3-none-any.whl", hash = "sha256:ebbba968c81a71919f9c3975b20d9b6c85e6fd5f640d1f7b86cb8d5d56c3d347"},
|
||||
{file = "ctransformers-0.2.23.tar.gz", hash = "sha256:de665dfbd529cf369059e52b277dcbdd6761a0547c8931b3cfbf89ea1eeb3d3c"},
|
||||
{file = "ctransformers-0.2.24-py3-none-any.whl", hash = "sha256:fa2ad7a38726c3ad6e57d1aff696f6e89fe3c0de5df2109b579cb6bc6c2ef599"},
|
||||
{file = "ctransformers-0.2.24.tar.gz", hash = "sha256:bb463204f557d00d533e1dc50346e0b57870cea68965ec135d3fa8db1c76ed2e"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -1259,6 +1312,20 @@ files = [
|
|||
[package.extras]
|
||||
test = ["pytest (>=6)"]
|
||||
|
||||
[[package]]
|
||||
name = "execnet"
|
||||
version = "2.0.2"
|
||||
description = "execnet: rapid multi-Python deployment"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "execnet-2.0.2-py3-none-any.whl", hash = "sha256:88256416ae766bc9e8895c76a87928c0012183da3cc4fc18016e6f050e025f41"},
|
||||
{file = "execnet-2.0.2.tar.gz", hash = "sha256:cc59bc4423742fd71ad227122eb0dd44db51efb3dc4095b45ac9a08c770096af"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
testing = ["hatch", "pre-commit", "pytest", "tox"]
|
||||
|
||||
[[package]]
|
||||
name = "executing"
|
||||
version = "1.2.0"
|
||||
|
|
@ -1635,13 +1702,13 @@ six = "*"
|
|||
|
||||
[[package]]
|
||||
name = "google-cloud-aiplatform"
|
||||
version = "1.31.0"
|
||||
version = "1.31.1"
|
||||
description = "Vertex AI API client library"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "google-cloud-aiplatform-1.31.0.tar.gz", hash = "sha256:a5de8f5cb9bcd66db08a404cf74e7ed252d5d4038649a58f37588ccb4e2785f0"},
|
||||
{file = "google_cloud_aiplatform-1.31.0-py2.py3-none-any.whl", hash = "sha256:19429dfb6098414f758810fde1690d8e8170aff7add4281681dc61de79b4112b"},
|
||||
{file = "google-cloud-aiplatform-1.31.1.tar.gz", hash = "sha256:6de8d7d647990cc0ee601d938d3a1693e3ef50f3d54d735397b2e31ca8eeb946"},
|
||||
{file = "google_cloud_aiplatform-1.31.1-py2.py3-none-any.whl", hash = "sha256:360d95c4c6f6a27fc2a4a071741a66588f0f0ca245509315839cfa320d6862e2"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -1889,18 +1956,18 @@ grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"]
|
|||
|
||||
[[package]]
|
||||
name = "gotrue"
|
||||
version = "1.0.2"
|
||||
version = "1.0.4"
|
||||
description = "Python Client Library for GoTrue"
|
||||
optional = false
|
||||
python-versions = ">=3.8,<4.0"
|
||||
files = [
|
||||
{file = "gotrue-1.0.2-py3-none-any.whl", hash = "sha256:5377e7fd316b77df7be9e0c3c017d338bed2ba2e95a99fb44374b523d167ec65"},
|
||||
{file = "gotrue-1.0.2.tar.gz", hash = "sha256:9ad9b2536ca68676cf37dc663b64f259956826075e80a9cb3f5a3ba150355811"},
|
||||
{file = "gotrue-1.0.4-py3-none-any.whl", hash = "sha256:f016f5e317a21e55dfcee00fb360f2c7a33c5b87a38601e7b9e65cb898bcf7bb"},
|
||||
{file = "gotrue-1.0.4.tar.gz", hash = "sha256:2686c93b798fb2d3b120f067d21e66bb803b013ee6ab6fb7783093102f74603a"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
httpx = ">=0.23,<0.25"
|
||||
pydantic = ">=1.10.0,<2.0.0"
|
||||
pydantic = ">=1.10,<3"
|
||||
|
||||
[[package]]
|
||||
name = "greenlet"
|
||||
|
|
@ -2847,39 +2914,38 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)",
|
|||
|
||||
[[package]]
|
||||
name = "langchain"
|
||||
version = "0.0.256"
|
||||
version = "0.0.274"
|
||||
description = "Building applications with LLMs through composability"
|
||||
optional = false
|
||||
python-versions = ">=3.8.1,<4.0"
|
||||
files = [
|
||||
{file = "langchain-0.0.256-py3-none-any.whl", hash = "sha256:3389fcb85d8d4fb16bae5ca9995d3ce634a3330f8ac1f458afc6171e4ca52de5"},
|
||||
{file = "langchain-0.0.256.tar.gz", hash = "sha256:b80115e19f86199c49bca8ef18c09d2d87548332a0144a1c5ce6a2f82e4f5f9c"},
|
||||
{file = "langchain-0.0.274-py3-none-any.whl", hash = "sha256:402e0518a2e3183498158c159cd50f7d13e948908430f682eebe2741a51ebc2a"},
|
||||
{file = "langchain-0.0.274.tar.gz", hash = "sha256:adc2cf9993765c9d241aae6079497b0f62090bebff05aa985dd92e1b10b8cacb"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
aiohttp = ">=3.8.3,<4.0.0"
|
||||
async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""}
|
||||
dataclasses-json = ">=0.5.7,<0.6.0"
|
||||
langsmith = ">=0.0.11,<0.1.0"
|
||||
langsmith = ">=0.0.21,<0.1.0"
|
||||
numexpr = ">=2.8.4,<3.0.0"
|
||||
numpy = ">=1,<2"
|
||||
openapi-schema-pydantic = ">=1.2,<2.0"
|
||||
pydantic = ">=1,<2"
|
||||
pydantic = ">=1,<3"
|
||||
PyYAML = ">=5.3"
|
||||
requests = ">=2,<3"
|
||||
SQLAlchemy = ">=1.4,<3"
|
||||
tenacity = ">=8.1.0,<9.0.0"
|
||||
|
||||
[package.extras]
|
||||
all = ["O365 (>=2.0.26,<3.0.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "amadeus (>=8.1.0)", "anthropic (>=0.3,<0.4)", "arxiv (>=1.4,<2.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "awadb (>=0.3.9,<0.4.0)", "azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-vision (>=0.11.1b1,<0.12.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "beautifulsoup4 (>=4,<5)", "clarifai (>=9.1.0)", "clickhouse-connect (>=0.5.14,<0.6.0)", "cohere (>=4,<5)", "deeplake (>=3.6.8,<4.0.0)", "docarray[hnswlib] (>=0.32.0,<0.33.0)", "duckduckgo-search (>=3.8.3,<4.0.0)", "elasticsearch (>=8,<9)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "google-api-python-client (==2.70.0)", "google-auth (>=2.18.1,<3.0.0)", "google-search-results (>=2,<3)", "gptcache (>=0.1.7)", "html2text (>=2020.1.16,<2021.0.0)", "huggingface_hub (>=0,<1)", "jina (>=3.14,<4.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "lancedb (>=0.1,<0.2)", "langkit (>=0.0.6,<0.1.0)", "lark (>=1.1.5,<2.0.0)", "libdeeplake (>=0.0.60,<0.0.61)", "librosa (>=0.10.0.post2,<0.11.0)", "lxml (>=4.9.2,<5.0.0)", "manifest-ml (>=0.0.1,<0.0.2)", "marqo (>=0.11.0,<0.12.0)", "momento (>=1.5.0,<2.0.0)", "nebula3-python (>=3.4.0,<4.0.0)", "neo4j (>=5.8.1,<6.0.0)", "networkx (>=2.6.3,<3.0.0)", "nlpcloud (>=1,<2)", "nltk (>=3,<4)", "nomic (>=1.0.43,<2.0.0)", "octoai-sdk (>=0.1.1,<0.2.0)", "openai (>=0,<1)", "openlm (>=0.0.5,<0.0.6)", "opensearch-py (>=2.0.0,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pexpect (>=4.8.0,<5.0.0)", "pgvector (>=0.1.6,<0.2.0)", "pinecone-client (>=2,<3)", "pinecone-text (>=0.4.2,<0.5.0)", "psycopg2-binary (>=2.9.5,<3.0.0)", "pymongo (>=4.3.3,<5.0.0)", "pyowm (>=3.3.0,<4.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pytesseract (>=0.3.10,<0.4.0)", "python-arango (>=7.5.9,<8.0.0)", "pyvespa (>=0.33.0,<0.34.0)", "qdrant-client (>=1.3.1,<2.0.0)", "rdflib (>=6.3.2,<7.0.0)", "redis (>=4,<5)", "requests-toolbelt (>=1.0.0,<2.0.0)", "sentence-transformers (>=2,<3)", "singlestoredb (>=0.7.1,<0.8.0)", "spacy (>=3,<4)", "steamship (>=2.16.9,<3.0.0)", "tensorflow-text (>=2.11.0,<3.0.0)", "tigrisdb (>=1.0.0b6,<2.0.0)", "tiktoken (>=0.3.2,<0.4.0)", "torch (>=1,<3)", "transformers (>=4,<5)", "weaviate-client (>=3,<4)", "wikipedia (>=1,<2)", "wolframalpha (==5.0.0)", "xinference (>=0.0.6,<0.0.7)"]
|
||||
azure = ["azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-vision (>=0.11.1b1,<0.12.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-core (>=1.26.4,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "azure-search-documents (==11.4.0b6)", "openai (>=0,<1)"]
|
||||
all = ["O365 (>=2.0.26,<3.0.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "amadeus (>=8.1.0)", "arxiv (>=1.4,<2.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "awadb (>=0.3.9,<0.4.0)", "azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-vision (>=0.11.1b1,<0.12.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "beautifulsoup4 (>=4,<5)", "clarifai (>=9.1.0)", "clickhouse-connect (>=0.5.14,<0.6.0)", "cohere (>=4,<5)", "deeplake (>=3.6.8,<4.0.0)", "docarray[hnswlib] (>=0.32.0,<0.33.0)", "duckduckgo-search (>=3.8.3,<4.0.0)", "elasticsearch (>=8,<9)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "google-api-python-client (==2.70.0)", "google-auth (>=2.18.1,<3.0.0)", "google-search-results (>=2,<3)", "gptcache (>=0.1.7)", "html2text (>=2020.1.16,<2021.0.0)", "huggingface_hub (>=0,<1)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "lancedb (>=0.1,<0.2)", "langkit (>=0.0.6,<0.1.0)", "lark (>=1.1.5,<2.0.0)", "libdeeplake (>=0.0.60,<0.0.61)", "librosa (>=0.10.0.post2,<0.11.0)", "lxml (>=4.9.2,<5.0.0)", "manifest-ml (>=0.0.1,<0.0.2)", "marqo (>=1.2.4,<2.0.0)", "momento (>=1.5.0,<2.0.0)", "nebula3-python (>=3.4.0,<4.0.0)", "neo4j (>=5.8.1,<6.0.0)", "networkx (>=2.6.3,<3.0.0)", "nlpcloud (>=1,<2)", "nltk (>=3,<4)", "nomic (>=1.0.43,<2.0.0)", "openai (>=0,<1)", "openlm (>=0.0.5,<0.0.6)", "opensearch-py (>=2.0.0,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pexpect (>=4.8.0,<5.0.0)", "pgvector (>=0.1.6,<0.2.0)", "pinecone-client (>=2,<3)", "pinecone-text (>=0.4.2,<0.5.0)", "psycopg2-binary (>=2.9.5,<3.0.0)", "pymongo (>=4.3.3,<5.0.0)", "pyowm (>=3.3.0,<4.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pytesseract (>=0.3.10,<0.4.0)", "python-arango (>=7.5.9,<8.0.0)", "pyvespa (>=0.33.0,<0.34.0)", "qdrant-client (>=1.3.1,<2.0.0)", "rdflib (>=6.3.2,<7.0.0)", "redis (>=4,<5)", "requests-toolbelt (>=1.0.0,<2.0.0)", "sentence-transformers (>=2,<3)", "singlestoredb (>=0.7.1,<0.8.0)", "tensorflow-text (>=2.11.0,<3.0.0)", "tigrisdb (>=1.0.0b6,<2.0.0)", "tiktoken (>=0.3.2,<0.4.0)", "torch (>=1,<3)", "transformers (>=4,<5)", "weaviate-client (>=3,<4)", "wikipedia (>=1,<2)", "wolframalpha (==5.0.0)"]
|
||||
azure = ["azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-vision (>=0.11.1b1,<0.12.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-core (>=1.26.4,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "azure-search-documents (==11.4.0b8)", "openai (>=0,<1)"]
|
||||
clarifai = ["clarifai (>=9.1.0)"]
|
||||
cohere = ["cohere (>=4,<5)"]
|
||||
docarray = ["docarray[hnswlib] (>=0.32.0,<0.33.0)"]
|
||||
embeddings = ["sentence-transformers (>=2,<3)"]
|
||||
extended-testing = ["amazon-textract-caller (<2)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.0.7,<0.0.8)", "chardet (>=5.1.0,<6.0.0)", "esprima (>=4.0.1,<5.0.0)", "feedparser (>=6.0.10,<7.0.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "gql (>=3.4.1,<4.0.0)", "html2text (>=2020.1.16,<2021.0.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "lxml (>=4.9.2,<5.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "openai (>=0,<1)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "tqdm (>=4.48.0)", "xata (>=1.0.0a7,<2.0.0)", "xinference (>=0.0.6,<0.0.7)", "zep-python (>=0.32)"]
|
||||
extended-testing = ["amazon-textract-caller (<2)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.0.7,<0.0.8)", "chardet (>=5.1.0,<6.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "gql (>=3.4.1,<4.0.0)", "html2text (>=2020.1.16,<2021.0.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "lxml (>=4.9.2,<5.0.0)", "markdownify (>=0.11.6,<0.12.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "openai (>=0,<1)", "openapi-schema-pydantic (>=1.2,<2.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "tqdm (>=4.48.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"]
|
||||
javascript = ["esprima (>=4.0.1,<5.0.0)"]
|
||||
llms = ["anthropic (>=0.3,<0.4)", "clarifai (>=9.1.0)", "cohere (>=4,<5)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (>=0,<1)", "openllm (>=0.1.19)", "openlm (>=0.0.5,<0.0.6)", "torch (>=1,<3)", "transformers (>=4,<5)", "xinference (>=0.0.6,<0.0.7)"]
|
||||
llms = ["clarifai (>=9.1.0)", "cohere (>=4,<5)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (>=0,<1)", "openlm (>=0.0.5,<0.0.6)", "torch (>=1,<3)", "transformers (>=4,<5)"]
|
||||
openai = ["openai (>=0,<1)", "tiktoken (>=0.3.2,<0.4.0)"]
|
||||
qdrant = ["qdrant-client (>=1.3.1,<2.0.0)"]
|
||||
text-helpers = ["chardet (>=5.1.0,<6.0.0)"]
|
||||
|
|
@ -2925,13 +2991,13 @@ test = ["psutil", "pytest", "pytest-asyncio"]
|
|||
|
||||
[[package]]
|
||||
name = "langsmith"
|
||||
version = "0.0.26"
|
||||
version = "0.0.27"
|
||||
description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
|
||||
optional = false
|
||||
python-versions = ">=3.8.1,<4.0"
|
||||
files = [
|
||||
{file = "langsmith-0.0.26-py3-none-any.whl", hash = "sha256:61c1d4582104d96edde04e1eea1dae347645b691c44489a5871341a2a1a2a1eb"},
|
||||
{file = "langsmith-0.0.26.tar.gz", hash = "sha256:80a4ef1b663a24a460d25b9986ab2010c5d06b6061c65be473abafc0647d191a"},
|
||||
{file = "langsmith-0.0.27-py3-none-any.whl", hash = "sha256:f61b07f093ba377b9af53c3d6f68fd1245f8f28605d4fc88433208aca93a5a23"},
|
||||
{file = "langsmith-0.0.27.tar.gz", hash = "sha256:c4df680ee8bf88d37f56ba196048341847c48b50ae561719c5542ef6488170e5"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -2960,12 +3026,12 @@ test = ["coverage", "pytest", "pytest-cov"]
|
|||
|
||||
[[package]]
|
||||
name = "llama-cpp-python"
|
||||
version = "0.1.78"
|
||||
version = "0.1.81"
|
||||
description = "A Python wrapper for llama.cpp"
|
||||
optional = true
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "llama_cpp_python-0.1.78.tar.gz", hash = "sha256:cffdcbc4b5fca2bceb1f6bf3590460ebc898c69295a02439dfc6327566e10367"},
|
||||
{file = "llama_cpp_python-0.1.81.tar.gz", hash = "sha256:8b8fa42e41c6334efe056571b5f19056ffd9776b94ee152530e1fb9fe81deda2"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -3139,6 +3205,25 @@ docs = ["sphinx (>=1.6.0)", "sphinx-bootstrap-theme"]
|
|||
flake8 = ["flake8"]
|
||||
tests = ["psutil", "pytest (!=3.3.0)", "pytest-cov"]
|
||||
|
||||
[[package]]
|
||||
name = "mako"
|
||||
version = "1.2.4"
|
||||
description = "A super-fast templating language that borrows the best ideas from the existing templating languages."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "Mako-1.2.4-py3-none-any.whl", hash = "sha256:c97c79c018b9165ac9922ae4f32da095ffd3c4e6872b45eded42926deea46818"},
|
||||
{file = "Mako-1.2.4.tar.gz", hash = "sha256:d60a3903dc3bb01a18ad6a89cdbe2e4eadc69c0bc8ef1e3773ba53d44c3f7a34"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
MarkupSafe = ">=0.9.2"
|
||||
|
||||
[package.extras]
|
||||
babel = ["Babel"]
|
||||
lingua = ["lingua"]
|
||||
testing = ["pytest"]
|
||||
|
||||
[[package]]
|
||||
name = "markdown"
|
||||
version = "3.4.4"
|
||||
|
|
@ -3187,7 +3272,7 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"]
|
|||
name = "markupsafe"
|
||||
version = "2.1.3"
|
||||
description = "Safely add untrusted strings to HTML/XML markup."
|
||||
optional = true
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"},
|
||||
|
|
@ -3308,13 +3393,13 @@ files = [
|
|||
|
||||
[[package]]
|
||||
name = "metaphor-python"
|
||||
version = "0.1.15"
|
||||
version = "0.1.16"
|
||||
description = "A Python package for the Metaphor API."
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "metaphor-python-0.1.15.tar.gz", hash = "sha256:66e324377af7df4799b845fe8de4f81393db48baa76db59308f7eebec2ba9421"},
|
||||
{file = "metaphor_python-0.1.15-py3-none-any.whl", hash = "sha256:e9efe6ccd175d67b08331955c0290ff1fe83bb4c43e80a88a154e254bf47aaf4"},
|
||||
{file = "metaphor-python-0.1.16.tar.gz", hash = "sha256:c26c3e8a37ef1b195073d556c929180c2b1acf7590a801da8d1b9afbdd82dd8f"},
|
||||
{file = "metaphor_python-0.1.16-py3-none-any.whl", hash = "sha256:f3989a679f888cc0374593ab5e5a82b0f121c90b14d6eb5b813e861990dbb13b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -3747,20 +3832,6 @@ dev = ["black (>=21.6b0,<22.0)", "pytest (==6.*)", "pytest-asyncio", "pytest-moc
|
|||
embeddings = ["matplotlib", "numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "plotly", "scikit-learn (>=1.0.2)", "scipy", "tenacity (>=8.0.1)"]
|
||||
wandb = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "wandb"]
|
||||
|
||||
[[package]]
|
||||
name = "openapi-schema-pydantic"
|
||||
version = "1.2.4"
|
||||
description = "OpenAPI (v3) specification schema as pydantic class"
|
||||
optional = false
|
||||
python-versions = ">=3.6.1"
|
||||
files = [
|
||||
{file = "openapi-schema-pydantic-1.2.4.tar.gz", hash = "sha256:3e22cf58b74a69f752cc7e5f1537f6e44164282db2700cbbcd3bb99ddd065196"},
|
||||
{file = "openapi_schema_pydantic-1.2.4-py3-none-any.whl", hash = "sha256:a932ecc5dcbb308950282088956e94dea069c9823c84e507d64f6b622222098c"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
pydantic = ">=1.8.2"
|
||||
|
||||
[[package]]
|
||||
name = "openpyxl"
|
||||
version = "3.1.2"
|
||||
|
|
@ -4222,6 +4293,23 @@ files = [
|
|||
qa = ["flake8 (==3.8.3)", "mypy (==0.782)"]
|
||||
testing = ["docopt", "pytest (<6.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "passlib"
|
||||
version = "1.7.4"
|
||||
description = "comprehensive password hashing framework supporting over 30 schemes"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "passlib-1.7.4-py2.py3-none-any.whl", hash = "sha256:aa6bca462b8d8bda89c70b382f0c298a20b5560af6cbfa2dce410c0a2fb669f1"},
|
||||
{file = "passlib-1.7.4.tar.gz", hash = "sha256:defd50f72b65c5402ab2c573830a6978e5f202ad0d984793c8dde2c4152ebe04"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
argon2 = ["argon2-cffi (>=18.2.0)"]
|
||||
bcrypt = ["bcrypt (>=3.1.0)"]
|
||||
build-docs = ["cloud-sptheme (>=1.10.1)", "sphinx (>=1.6)", "sphinxcontrib-fulltoc (>=1.2.0)"]
|
||||
totp = ["cryptography"]
|
||||
|
||||
[[package]]
|
||||
name = "pathspec"
|
||||
version = "0.11.2"
|
||||
|
|
@ -4415,13 +4503,13 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co
|
|||
|
||||
[[package]]
|
||||
name = "pluggy"
|
||||
version = "1.2.0"
|
||||
version = "1.3.0"
|
||||
description = "plugin and hook calling mechanisms for python"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"},
|
||||
{file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"},
|
||||
{file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"},
|
||||
{file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
|
|
@ -5094,17 +5182,17 @@ diagrams = ["jinja2", "railroad-diagrams"]
|
|||
|
||||
[[package]]
|
||||
name = "pypdf"
|
||||
version = "3.15.2"
|
||||
version = "3.15.4"
|
||||
description = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "pypdf-3.15.2-py3-none-any.whl", hash = "sha256:f6e598292be34187287a609c72815c1502b3dc2c997b374ba0870ce79d2e975a"},
|
||||
{file = "pypdf-3.15.2.tar.gz", hash = "sha256:cdf7d75ebb8901f3352cf9488c5f662c6de9c52e432c429d15cada67ba372fce"},
|
||||
{file = "pypdf-3.15.4-py3-none-any.whl", hash = "sha256:791f0a52ddf390709f1f1b0c05c4d8cde13829b4f7cb91b4003b9bdd352bc944"},
|
||||
{file = "pypdf-3.15.4.tar.gz", hash = "sha256:a2780ed01dc4da23ac1542209f58fd3d951d8dd37c3c0309d123cd2f2679fb03"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
typing_extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}
|
||||
typing_extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.10\""}
|
||||
|
||||
[package.extras]
|
||||
crypto = ["PyCryptodome", "cryptography"]
|
||||
|
|
@ -5177,6 +5265,43 @@ pytest = ">=4.6"
|
|||
[package.extras]
|
||||
testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-mock"
|
||||
version = "3.11.1"
|
||||
description = "Thin-wrapper around the mock package for easier use with pytest"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "pytest-mock-3.11.1.tar.gz", hash = "sha256:7f6b125602ac6d743e523ae0bfa71e1a697a2f5534064528c6ff84c2f7c2fc7f"},
|
||||
{file = "pytest_mock-3.11.1-py3-none-any.whl", hash = "sha256:21c279fff83d70763b05f8874cc9cfb3fcacd6d354247a976f9529d19f9acf39"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
pytest = ">=5.0"
|
||||
|
||||
[package.extras]
|
||||
dev = ["pre-commit", "pytest-asyncio", "tox"]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-xdist"
|
||||
version = "3.3.1"
|
||||
description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "pytest-xdist-3.3.1.tar.gz", hash = "sha256:d5ee0520eb1b7bcca50a60a518ab7a7707992812c578198f8b44fdfac78e8c93"},
|
||||
{file = "pytest_xdist-3.3.1-py3-none-any.whl", hash = "sha256:ff9daa7793569e6a68544850fd3927cd257cc03a7ef76c95e86915355e82b5f2"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
execnet = ">=1.1"
|
||||
pytest = ">=6.2.0"
|
||||
|
||||
[package.extras]
|
||||
psutil = ["psutil (>=3.0)"]
|
||||
setproctitle = ["setproctitle"]
|
||||
testing = ["filelock"]
|
||||
|
||||
[[package]]
|
||||
name = "python-dateutil"
|
||||
version = "2.8.2"
|
||||
|
|
@ -5792,53 +5917,62 @@ files = [
|
|||
|
||||
[[package]]
|
||||
name = "safetensors"
|
||||
version = "0.3.2"
|
||||
version = "0.3.3"
|
||||
description = "Fast and Safe Tensor serialization"
|
||||
optional = true
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "safetensors-0.3.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b6a66989075c2891d743153e8ba9ca84ee7232c8539704488f454199b8b8f84d"},
|
||||
{file = "safetensors-0.3.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:670d6bc3a3b377278ce2971fa7c36ebc0a35041c4ea23b9df750a39380800195"},
|
||||
{file = "safetensors-0.3.2-cp310-cp310-macosx_13_0_x86_64.whl", hash = "sha256:7f80af7e4ab3188daaff12d43d078da3017a90d732d38d7af4eb08b6ca2198a5"},
|
||||
{file = "safetensors-0.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbb44e140bf2aeda98d9dde669dbec15f7b77f96a9274469b91a6cf4bcc5ec3b"},
|
||||
{file = "safetensors-0.3.2-cp310-cp310-win32.whl", hash = "sha256:2961c1243fd0da46aa6a1c835305cc4595486f8ac64632a604d0eb5f2de76175"},
|
||||
{file = "safetensors-0.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c813920482c337d1424d306e1b05824a38e3ef94303748a0a287dea7a8c4f805"},
|
||||
{file = "safetensors-0.3.2-cp311-cp311-macosx_10_11_universal2.whl", hash = "sha256:707df34bd9b9047e97332136ad98e57028faeccdb9cfe1c3b52aba5964cc24bf"},
|
||||
{file = "safetensors-0.3.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:becc5bb85b2947eae20ed23b407ebfd5277d9a560f90381fe2c42e6c043677ba"},
|
||||
{file = "safetensors-0.3.2-cp311-cp311-macosx_13_0_universal2.whl", hash = "sha256:54ad6af663e15e2b99e2ea3280981b7514485df72ba6d014dc22dae7ba6a5e6c"},
|
||||
{file = "safetensors-0.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ada0fac127ff8fb04834da5c6d85a8077e6a1c9180a11251d96f8068db922a17"},
|
||||
{file = "safetensors-0.3.2-cp311-cp311-win32.whl", hash = "sha256:155b82dbe2b0ebff18cde3f76b42b6d9470296e92561ef1a282004d449fa2b4c"},
|
||||
{file = "safetensors-0.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:a86428d196959619ce90197731be9391b5098b35100a7228ef4643957648f7f5"},
|
||||
{file = "safetensors-0.3.2-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:c1f8ab41ed735c5b581f451fd15d9602ff51aa88044bfa933c5fa4b1d0c644d1"},
|
||||
{file = "safetensors-0.3.2-cp37-cp37m-macosx_13_0_x86_64.whl", hash = "sha256:bc9cfb3c9ea2aec89685b4d656f9f2296f0f0d67ecf2bebf950870e3be89b3db"},
|
||||
{file = "safetensors-0.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d7d70d48585fe8df00725aa788f2e64fd24a4c9ae07cd6be34f6859d0f89a9c"},
|
||||
{file = "safetensors-0.3.2-cp37-cp37m-win32.whl", hash = "sha256:6ff59bc90cdc857f68b1023be9085fda6202bbe7f2fd67d06af8f976d6adcc10"},
|
||||
{file = "safetensors-0.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:8b05c93da15fa911763a89281906ca333ed800ab0ef1c7ce53317aa1a2322f19"},
|
||||
{file = "safetensors-0.3.2-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:8969cfd9e8d904e8d3c67c989e1bd9a95e3cc8980d4f95e4dcd43c299bb94253"},
|
||||
{file = "safetensors-0.3.2-cp38-cp38-macosx_13_0_x86_64.whl", hash = "sha256:f54148ac027556eb02187e9bc1556c4d916c99ca3cb34ca36a7d304d675035c1"},
|
||||
{file = "safetensors-0.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa98f49e95f02eb750d32c4947e7d5aa43883149ebd0414920866446525b70f0"},
|
||||
{file = "safetensors-0.3.2-cp38-cp38-win32.whl", hash = "sha256:33409df5e28a83dc5cc5547a3ac17c0f1b13a1847b1eb3bc4b3be0df9915171e"},
|
||||
{file = "safetensors-0.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:e04a7cbbb3856159ab99e3adb14521544f65fcb8548cce773a1435a0f8d78d27"},
|
||||
{file = "safetensors-0.3.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:7c864cf5dcbfb608c5378f83319c60cc9c97263343b57c02756b7613cd5ab4dd"},
|
||||
{file = "safetensors-0.3.2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:14e8c19d6dc51d4f70ee33c46aff04c8ba3f95812e74daf8036c24bc86e75cae"},
|
||||
{file = "safetensors-0.3.2-cp39-cp39-macosx_13_0_x86_64.whl", hash = "sha256:fafd95e5ef41e8f312e2a32b7031f7b9b2a621b255f867b221f94bb2e9f51ae8"},
|
||||
{file = "safetensors-0.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87ff0024ef2e5722a79af24688ce4a430f70601d0cf712a744105ed4b8f67ba5"},
|
||||
{file = "safetensors-0.3.2-cp39-cp39-win32.whl", hash = "sha256:827af9478b78977248ba93e2fd97ea307fb63f463f80cef4824460f8c2542a52"},
|
||||
{file = "safetensors-0.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:9b09f27c456efa301f98681ea14b12f81f2637889f6336223ccab71e42c34541"},
|
||||
{file = "safetensors-0.3.2.tar.gz", hash = "sha256:2dbd34554ed3b99435a0e84df077108f5334c8336b5ed9cb8b6b98f7b10da2f6"},
|
||||
{file = "safetensors-0.3.3-cp310-cp310-macosx_10_11_x86_64.whl", hash = "sha256:92e4d0c8b2836120fddd134474c5bda8963f322333941f8b9f643e5b24f041eb"},
|
||||
{file = "safetensors-0.3.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:3dcadb6153c42addc9c625a622ebde9293fabe1973f9ef31ba10fb42c16e8536"},
|
||||
{file = "safetensors-0.3.3-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:08f26b61e1b0a14dc959aa9d568776bd038805f611caef1de04a80c468d4a7a4"},
|
||||
{file = "safetensors-0.3.3-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:17f41344d9a075f2f21b289a49a62e98baff54b5754240ba896063bce31626bf"},
|
||||
{file = "safetensors-0.3.3-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:f1045f798e1a16a6ced98d6a42ec72936d367a2eec81dc5fade6ed54638cd7d2"},
|
||||
{file = "safetensors-0.3.3-cp310-cp310-macosx_13_0_x86_64.whl", hash = "sha256:eaf0e4bc91da13f21ac846a39429eb3f3b7ed06295a32321fa3eb1a59b5c70f3"},
|
||||
{file = "safetensors-0.3.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a07121f427e646a50d18c1be0fa1a2cbf6398624c31149cd7e6b35486d72189e"},
|
||||
{file = "safetensors-0.3.3-cp310-cp310-win32.whl", hash = "sha256:a85e29cbfddfea86453cc0f4889b4bcc6b9c155be9a60e27be479a34e199e7ef"},
|
||||
{file = "safetensors-0.3.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:cbc3312f134baf07334dd517341a4b470b2931f090bd9284888acb7dfaf4606f"},
|
||||
{file = "safetensors-0.3.3-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:d15030af39d5d30c22bcbc6d180c65405b7ea4c05b7bab14a570eac7d7d43722"},
|
||||
{file = "safetensors-0.3.3-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:f84a74cbe9859b28e3d6d7715ac1dd3097bebf8d772694098f6d42435245860c"},
|
||||
{file = "safetensors-0.3.3-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:10d637423d98ab2e6a4ad96abf4534eb26fcaf8ca3115623e64c00759374e90d"},
|
||||
{file = "safetensors-0.3.3-cp311-cp311-macosx_13_0_universal2.whl", hash = "sha256:3b46f5de8b44084aff2e480874c550c399c730c84b2e8ad1bddb062c94aa14e9"},
|
||||
{file = "safetensors-0.3.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e8fdf7407dba44587ed5e79d5de3533d242648e1f2041760b21474bd5ea5c8c"},
|
||||
{file = "safetensors-0.3.3-cp311-cp311-win32.whl", hash = "sha256:7d3b744cee8d7a46ffa68db1a2ff1a1a432488e3f7a5a97856fe69e22139d50c"},
|
||||
{file = "safetensors-0.3.3-cp37-cp37m-macosx_10_11_x86_64.whl", hash = "sha256:2fff5b19a1b462c17322998b2f4b8bce43c16fe208968174d2f3a1446284ceed"},
|
||||
{file = "safetensors-0.3.3-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:41adb1d39e8aad04b16879e3e0cbcb849315999fad73bc992091a01e379cb058"},
|
||||
{file = "safetensors-0.3.3-cp37-cp37m-macosx_12_0_x86_64.whl", hash = "sha256:0f2b404250b3b877b11d34afcc30d80e7035714a1116a3df56acaca6b6c00096"},
|
||||
{file = "safetensors-0.3.3-cp37-cp37m-macosx_13_0_x86_64.whl", hash = "sha256:b43956ef20e9f4f2e648818a9e7b3499edd6b753a0f5526d4f6a6826fbee8446"},
|
||||
{file = "safetensors-0.3.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c32ee08f61cea56a5d62bbf94af95df6040c8ab574afffaeb7b44ae5da1e9e3"},
|
||||
{file = "safetensors-0.3.3-cp37-cp37m-win32.whl", hash = "sha256:351600f367badd59f7bfe86d317bb768dd8c59c1561c6fac43cafbd9c1af7827"},
|
||||
{file = "safetensors-0.3.3-cp38-cp38-macosx_10_11_x86_64.whl", hash = "sha256:8530399666748634bc0b301a6a5523756931b0c2680d188e743d16304afe917a"},
|
||||
{file = "safetensors-0.3.3-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:9d741c1f1621e489ba10aa3d135b54202684f6e205df52e219d5eecd673a80c9"},
|
||||
{file = "safetensors-0.3.3-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:0c345fd85b4d2093a5109596ff4cd9dfc2e84992e881b4857fbc4a93a3b89ddb"},
|
||||
{file = "safetensors-0.3.3-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:69ccee8d05f55cdf76f7e6c87d2bdfb648c16778ef8acfd2ecc495e273e9233e"},
|
||||
{file = "safetensors-0.3.3-cp38-cp38-macosx_13_0_arm64.whl", hash = "sha256:c08a9a4b7a4ca389232fa8d097aebc20bbd4f61e477abc7065b5c18b8202dede"},
|
||||
{file = "safetensors-0.3.3-cp38-cp38-macosx_13_0_x86_64.whl", hash = "sha256:a002868d2e3f49bbe81bee2655a411c24fa1f8e68b703dec6629cb989d6ae42e"},
|
||||
{file = "safetensors-0.3.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ab43aeeb9eadbb6b460df3568a662e6f1911ecc39387f8752afcb6a7d96c087"},
|
||||
{file = "safetensors-0.3.3-cp38-cp38-win32.whl", hash = "sha256:f2f59fce31dd3429daca7269a6b06f65e6547a0c248f5116976c3f1e9b73f251"},
|
||||
{file = "safetensors-0.3.3-cp39-cp39-macosx_10_11_x86_64.whl", hash = "sha256:59a596b3225c96d59af412385981f17dd95314e3fffdf359c7e3f5bb97730a19"},
|
||||
{file = "safetensors-0.3.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:82a16e92210a6221edd75ab17acdd468dd958ef5023d9c6c1289606cc30d1479"},
|
||||
{file = "safetensors-0.3.3-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:98a929e763a581f516373ef31983ed1257d2d0da912a8e05d5cd12e9e441c93a"},
|
||||
{file = "safetensors-0.3.3-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:12b83f1986cd16ea0454c636c37b11e819d60dd952c26978310a0835133480b7"},
|
||||
{file = "safetensors-0.3.3-cp39-cp39-macosx_13_0_arm64.whl", hash = "sha256:f439175c827c2f1bbd54df42789c5204a10983a30bc4242bc7deaf854a24f3f0"},
|
||||
{file = "safetensors-0.3.3-cp39-cp39-macosx_13_0_x86_64.whl", hash = "sha256:0085be33b8cbcb13079b3a8e131656e05b0bc5e6970530d4c24150f7afd76d70"},
|
||||
{file = "safetensors-0.3.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad3cc8006e7a86ee7c88bd2813ec59cd7cc75b03e6fa4af89b9c7b235b438d68"},
|
||||
{file = "safetensors-0.3.3-cp39-cp39-win32.whl", hash = "sha256:ab29f54c6b8c301ca05fa014728996bd83aac6e21528f893aaf8945c71f42b6d"},
|
||||
{file = "safetensors-0.3.3.tar.gz", hash = "sha256:edb7072d788c4f929d0f5735d3a2fb51e5a27f833587828583b7f5747af1a2b8"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
all = ["black (==22.3)", "click (==8.0.4)", "flake8 (>=3.8.3)", "flax (>=0.6.3)", "h5py (>=3.7.0)", "huggingface-hub (>=0.12.1)", "isort (>=5.5.4)", "jax (>=0.3.25)", "jaxlib (>=0.3.25)", "numpy (>=1.21.6)", "paddlepaddle (>=2.4.1)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "setuptools-rust (>=1.5.2)", "tensorflow (==2.11.0)", "torch (>=1.10)"]
|
||||
dev = ["black (==22.3)", "click (==8.0.4)", "flake8 (>=3.8.3)", "flax (>=0.6.3)", "h5py (>=3.7.0)", "huggingface-hub (>=0.12.1)", "isort (>=5.5.4)", "jax (>=0.3.25)", "jaxlib (>=0.3.25)", "numpy (>=1.21.6)", "paddlepaddle (>=2.4.1)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "setuptools-rust (>=1.5.2)", "tensorflow (==2.11.0)", "torch (>=1.10)"]
|
||||
jax = ["flax (>=0.6.3)", "jax (>=0.3.25)", "jaxlib (>=0.3.25)"]
|
||||
jax = ["flax (>=0.6.3)", "jax (>=0.3.25)", "jaxlib (>=0.3.25)", "numpy (>=1.21.6)"]
|
||||
numpy = ["numpy (>=1.21.6)"]
|
||||
paddlepaddle = ["paddlepaddle (>=2.4.1)"]
|
||||
paddlepaddle = ["numpy (>=1.21.6)", "paddlepaddle (>=2.4.1)"]
|
||||
pinned-tf = ["tensorflow (==2.11.0)"]
|
||||
quality = ["black (==22.3)", "click (==8.0.4)", "flake8 (>=3.8.3)", "isort (>=5.5.4)"]
|
||||
tensorflow = ["tensorflow (>=2.11.0)"]
|
||||
tensorflow = ["numpy (>=1.21.6)", "tensorflow (>=2.11.0)"]
|
||||
testing = ["h5py (>=3.7.0)", "huggingface-hub (>=0.12.1)", "numpy (>=1.21.6)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "setuptools-rust (>=1.5.2)"]
|
||||
torch = ["torch (>=1.10)"]
|
||||
torch = ["numpy (>=1.21.6)", "torch (>=1.10)"]
|
||||
|
||||
[[package]]
|
||||
name = "scikit-learn"
|
||||
|
|
@ -6429,13 +6563,13 @@ doc = ["reno", "sphinx", "tornado (>=4.5)"]
|
|||
|
||||
[[package]]
|
||||
name = "textual"
|
||||
version = "0.34.0"
|
||||
version = "0.35.1"
|
||||
description = "Modern Text User Interface framework"
|
||||
optional = true
|
||||
python-versions = ">=3.7,<4.0"
|
||||
files = [
|
||||
{file = "textual-0.34.0-py3-none-any.whl", hash = "sha256:c695866acd8e85519eb0920cb921999ac5f58891ef7925e8b132e0eebc142e88"},
|
||||
{file = "textual-0.34.0.tar.gz", hash = "sha256:b66deee4afa9f6986c1bee973731d7dad2b169872377d238c9aad7141449b443"},
|
||||
{file = "textual-0.35.1-py3-none-any.whl", hash = "sha256:c4257ed3019cf8a2da2ac59ae59de5e66e04b95d482d065cfb3099f70fddd36f"},
|
||||
{file = "textual-0.35.1.tar.gz", hash = "sha256:70ca0bfe582f96dfa10179a9ab71329b8b15e750e26b7cee1fb4a67a981bbf36"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -6854,6 +6988,17 @@ files = [
|
|||
{file = "types_cachetools-5.3.0.6-py3-none-any.whl", hash = "sha256:f7f8a25bfe306f2e6bc2ad0a2f949d9e72f2d91036d509c36d3810bf728bc6e1"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "types-passlib"
|
||||
version = "1.7.7.13"
|
||||
description = "Typing stubs for passlib"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "types-passlib-1.7.7.13.tar.gz", hash = "sha256:f152639f1f2103d7f59a56e2aec5f9398a75a80830991d0d68aac5c2b9c32a77"},
|
||||
{file = "types_passlib-1.7.7.13-py3-none-any.whl", hash = "sha256:414b5ee9c88313357c9261cfcf816509b1e8e4673f0796bd61e9ef249f6fe076"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "types-pillow"
|
||||
version = "9.5.0.6"
|
||||
|
|
@ -6865,6 +7010,31 @@ files = [
|
|||
{file = "types_Pillow-9.5.0.6-py3-none-any.whl", hash = "sha256:1d238abaa9d529b04941d805b7f4d3f7df30702bb14521ec507617f117406fb4"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "types-pyasn1"
|
||||
version = "0.4.0.6"
|
||||
description = "Typing stubs for pyasn1"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "types-pyasn1-0.4.0.6.tar.gz", hash = "sha256:8f1965d0b79152f9d1efc89f9aa9a8cdda7cd28b2619df6737c095cbedeff98b"},
|
||||
{file = "types_pyasn1-0.4.0.6-py3-none-any.whl", hash = "sha256:dd5fc818864e63a66cd714be0a7a59a493f4a81b87ee9ac978c41f1eaa9a0cef"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "types-python-jose"
|
||||
version = "3.3.4.8"
|
||||
description = "Typing stubs for python-jose"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "types-python-jose-3.3.4.8.tar.gz", hash = "sha256:3c316675c3cee059ccb9aff87358254344915239fa7f19cee2787155a7db14ac"},
|
||||
{file = "types_python_jose-3.3.4.8-py3-none-any.whl", hash = "sha256:95592273443b45dc5cc88f7c56aa5a97725428753fb738b794e63ccb4904954e"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
types-pyasn1 = "*"
|
||||
|
||||
[[package]]
|
||||
name = "types-pytz"
|
||||
version = "2023.3.0.1"
|
||||
|
|
@ -7119,33 +7289,33 @@ files = [
|
|||
|
||||
[[package]]
|
||||
name = "watchfiles"
|
||||
version = "0.19.0"
|
||||
version = "0.20.0"
|
||||
description = "Simple, modern and high performance file watching and code reload in python."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "watchfiles-0.19.0-cp37-abi3-macosx_10_7_x86_64.whl", hash = "sha256:91633e64712df3051ca454ca7d1b976baf842d7a3640b87622b323c55f3345e7"},
|
||||
{file = "watchfiles-0.19.0-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:b6577b8c6c8701ba8642ea9335a129836347894b666dd1ec2226830e263909d3"},
|
||||
{file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:18b28f6ad871b82df9542ff958d0c86bb0d8310bb09eb8e87d97318a3b5273af"},
|
||||
{file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fac19dc9cbc34052394dbe81e149411a62e71999c0a19e1e09ce537867f95ae0"},
|
||||
{file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:09ea3397aecbc81c19ed7f025e051a7387feefdb789cf768ff994c1228182fda"},
|
||||
{file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c0376deac92377817e4fb8f347bf559b7d44ff556d9bc6f6208dd3f79f104aaf"},
|
||||
{file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c75eff897786ee262c9f17a48886f4e98e6cfd335e011c591c305e5d083c056"},
|
||||
{file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb5d45c4143c1dd60f98a16187fd123eda7248f84ef22244818c18d531a249d1"},
|
||||
{file = "watchfiles-0.19.0-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:79c533ff593db861ae23436541f481ec896ee3da4e5db8962429b441bbaae16e"},
|
||||
{file = "watchfiles-0.19.0-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:3d7d267d27aceeeaa3de0dd161a0d64f0a282264d592e335fff7958cc0cbae7c"},
|
||||
{file = "watchfiles-0.19.0-cp37-abi3-win32.whl", hash = "sha256:176a9a7641ec2c97b24455135d58012a5be5c6217fc4d5fef0b2b9f75dbf5154"},
|
||||
{file = "watchfiles-0.19.0-cp37-abi3-win_amd64.whl", hash = "sha256:945be0baa3e2440151eb3718fd8846751e8b51d8de7b884c90b17d271d34cae8"},
|
||||
{file = "watchfiles-0.19.0-cp37-abi3-win_arm64.whl", hash = "sha256:0089c6dc24d436b373c3c57657bf4f9a453b13767150d17284fc6162b2791911"},
|
||||
{file = "watchfiles-0.19.0-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:cae3dde0b4b2078f31527acff6f486e23abed307ba4d3932466ba7cdd5ecec79"},
|
||||
{file = "watchfiles-0.19.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f3920b1285a7d3ce898e303d84791b7bf40d57b7695ad549dc04e6a44c9f120"},
|
||||
{file = "watchfiles-0.19.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9afd0d69429172c796164fd7fe8e821ade9be983f51c659a38da3faaaaac44dc"},
|
||||
{file = "watchfiles-0.19.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68dce92b29575dda0f8d30c11742a8e2b9b8ec768ae414b54f7453f27bdf9545"},
|
||||
{file = "watchfiles-0.19.0-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:5569fc7f967429d4bc87e355cdfdcee6aabe4b620801e2cf5805ea245c06097c"},
|
||||
{file = "watchfiles-0.19.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5471582658ea56fca122c0f0d0116a36807c63fefd6fdc92c71ca9a4491b6b48"},
|
||||
{file = "watchfiles-0.19.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b538014a87f94d92f98f34d3e6d2635478e6be6423a9ea53e4dd96210065e193"},
|
||||
{file = "watchfiles-0.19.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20b44221764955b1e703f012c74015306fb7e79a00c15370785f309b1ed9aa8d"},
|
||||
{file = "watchfiles-0.19.0.tar.gz", hash = "sha256:d9b073073e048081e502b6c6b0b88714c026a1a4c890569238d04aca5f9ca74b"},
|
||||
{file = "watchfiles-0.20.0-cp37-abi3-macosx_10_7_x86_64.whl", hash = "sha256:3796312bd3587e14926013612b23066912cf45a14af71cf2b20db1c12dadf4e9"},
|
||||
{file = "watchfiles-0.20.0-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:d0002d81c89a662b595645fb684a371b98ff90a9c7d8f8630c82f0fde8310458"},
|
||||
{file = "watchfiles-0.20.0-cp37-abi3-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:570848706440373b4cd8017f3e850ae17f76dbdf1e9045fc79023b11e1afe490"},
|
||||
{file = "watchfiles-0.20.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a0351d20d03c6f7ad6b2e8a226a5efafb924c7755ee1e34f04c77c3682417fa"},
|
||||
{file = "watchfiles-0.20.0-cp37-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:007dcc4a401093010b389c044e81172c8a2520dba257c88f8828b3d460c6bb38"},
|
||||
{file = "watchfiles-0.20.0-cp37-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0d82dbc1832da83e441d112069833eedd4cf583d983fb8dd666fbefbea9d99c0"},
|
||||
{file = "watchfiles-0.20.0-cp37-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99f4c65fd2fce61a571b2a6fcf747d6868db0bef8a934e8ca235cc8533944d95"},
|
||||
{file = "watchfiles-0.20.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5392dd327a05f538c56edb1c6ebba6af91afc81b40822452342f6da54907bbdf"},
|
||||
{file = "watchfiles-0.20.0-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:08dc702529bb06a2b23859110c214db245455532da5eaea602921687cfcd23db"},
|
||||
{file = "watchfiles-0.20.0-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:7d4e66a857621584869cfbad87039e65dadd7119f0d9bb9dbc957e089e32c164"},
|
||||
{file = "watchfiles-0.20.0-cp37-abi3-win32.whl", hash = "sha256:a03d1e6feb7966b417f43c3e3783188167fd69c2063e86bad31e62c4ea794cc5"},
|
||||
{file = "watchfiles-0.20.0-cp37-abi3-win_amd64.whl", hash = "sha256:eccc8942bcdc7d638a01435d915b913255bbd66f018f1af051cd8afddb339ea3"},
|
||||
{file = "watchfiles-0.20.0-cp37-abi3-win_arm64.whl", hash = "sha256:b17d4176c49d207865630da5b59a91779468dd3e08692fe943064da260de2c7c"},
|
||||
{file = "watchfiles-0.20.0-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d97db179f7566dcf145c5179ddb2ae2a4450e3a634eb864b09ea04e68c252e8e"},
|
||||
{file = "watchfiles-0.20.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:835df2da7a5df5464c4a23b2d963e1a9d35afa422c83bf4ff4380b3114603644"},
|
||||
{file = "watchfiles-0.20.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:608cd94a8767f49521901aff9ae0c92cc8f5a24d528db7d6b0295290f9d41193"},
|
||||
{file = "watchfiles-0.20.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89d1de8218874925bce7bb2ae9657efc504411528930d7a83f98b1749864f2ef"},
|
||||
{file = "watchfiles-0.20.0-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:13f995d5152a8ba4ed7c2bbbaeee4e11a5944defc7cacd0ccb4dcbdcfd78029a"},
|
||||
{file = "watchfiles-0.20.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:9b5c8d3be7b502f8c43a33c63166ada8828dbb0c6d49c8f9ce990a96de2f5a49"},
|
||||
{file = "watchfiles-0.20.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e43af4464daa08723c04b43cf978ab86cc55c684c16172622bdac64b34e36af0"},
|
||||
{file = "watchfiles-0.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87d9e1f75c4f86c93d73b5bd1ebe667558357548f11b4f8af4e0e272f79413ce"},
|
||||
{file = "watchfiles-0.20.0.tar.gz", hash = "sha256:728575b6b94c90dd531514677201e8851708e6e4b5fe7028ac506a200b622019"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -7164,13 +7334,13 @@ files = [
|
|||
|
||||
[[package]]
|
||||
name = "weaviate-client"
|
||||
version = "3.23.0"
|
||||
version = "3.23.1"
|
||||
description = "A python native Weaviate client"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "weaviate-client-3.23.0.tar.gz", hash = "sha256:3ffd7f1460c9e32755d84d4f5fc63dfc0bd990dbe2c3dc20d5c68119d467680e"},
|
||||
{file = "weaviate_client-3.23.0-py3-none-any.whl", hash = "sha256:3d3bb75c1d96b2b71e213c5eb885ae3e3f42e4304955383c467d100187d9ff8e"},
|
||||
{file = "weaviate-client-3.23.1.tar.gz", hash = "sha256:035f395fb8b17008224dc8a9ca4459b7ef4a2b0449209ac0c8d0f2e3b9a77f59"},
|
||||
{file = "weaviate_client-3.23.1-py3-none-any.whl", hash = "sha256:826b28237f7143ee4c51b988c5c37494760b4377a8536acc772a2194eb33f30c"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -7608,4 +7778,4 @@ local = ["ctransformers", "llama-cpp-python", "sentence-transformers"]
|
|||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.9,<3.11"
|
||||
content-hash = "53bb67a463c4ad0d3dd30c89e30750fc31be5530ee54539d4df34fd82ecf7bc1"
|
||||
content-hash = "505fe04c51514ef25dd955b377a00185c4a2581770af3cc84db4c47477760048"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
[tool.poetry]
|
||||
name = "langflow"
|
||||
version = "0.4.13"
|
||||
version = "0.5.0a0"
|
||||
description = "A Python package with a built-in web application"
|
||||
authors = ["Logspace <contact@logspace.ai>"]
|
||||
maintainers = [
|
||||
|
|
@ -19,7 +19,7 @@ readme = "README.md"
|
|||
keywords = ["nlp", "langchain", "openai", "gpt", "gui"]
|
||||
packages = [{ include = "langflow", from = "src/backend" }]
|
||||
include = ["src/backend/langflow/*", "src/backend/langflow/**/*"]
|
||||
|
||||
documentation = "https://docs.langflow.org"
|
||||
|
||||
[tool.poetry.scripts]
|
||||
langflow = "langflow.__main__:main"
|
||||
|
|
@ -33,7 +33,7 @@ google-search-results = "^2.4.1"
|
|||
google-api-python-client = "^2.79.0"
|
||||
typer = "^0.9.0"
|
||||
gunicorn = "^21.1.0"
|
||||
langchain = "^0.0.256"
|
||||
langchain = "^0.0.274"
|
||||
openai = "^0.27.8"
|
||||
pandas = "^2.0.0"
|
||||
chromadb = "^0.3.21"
|
||||
|
|
@ -45,7 +45,7 @@ unstructured = "^0.7.0"
|
|||
pypdf = "^3.11.0"
|
||||
lxml = "^4.9.2"
|
||||
pysrt = "^1.1.2"
|
||||
fake-useragent = "^1.1.3"
|
||||
fake-useragent = "^1.2.1"
|
||||
docstring-parser = "^0.15"
|
||||
psycopg2-binary = "^2.9.6"
|
||||
pyarrow = "^12.0.0"
|
||||
|
|
@ -63,7 +63,6 @@ python-multipart = "^0.0.6"
|
|||
sqlmodel = "^0.0.8"
|
||||
faiss-cpu = "^1.7.4"
|
||||
anthropic = "^0.3.0"
|
||||
|
||||
orjson = "3.9.3"
|
||||
multiprocess = "^0.70.14"
|
||||
cachetools = "^5.3.1"
|
||||
|
|
@ -78,7 +77,12 @@ psycopg = "^3.1.9"
|
|||
psycopg-binary = "^3.1.9"
|
||||
fastavro = "^1.8.0"
|
||||
langchain-experimental = "^0.0.8"
|
||||
alembic = "^1.11.2"
|
||||
passlib = "^1.7.4"
|
||||
bcrypt = "^4.0.1"
|
||||
python-jose = "^3.3.0"
|
||||
metaphor-python = "^0.1.11"
|
||||
markupsafe = "^2.1.3"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
black = "^23.1.0"
|
||||
|
|
@ -94,6 +98,10 @@ pandas-stubs = "^2.0.0.230412"
|
|||
types-pillow = "^9.5.0.2"
|
||||
types-appdirs = "^1.4.3.5"
|
||||
types-pyyaml = "^6.0.12.8"
|
||||
types-python-jose = "^3.3.4.8"
|
||||
types-passlib = "^1.7.7.13"
|
||||
pytest-mock = "^3.11.1"
|
||||
pytest-xdist = "^3.3.1"
|
||||
|
||||
|
||||
[tool.poetry.extras]
|
||||
|
|
|
|||
|
|
@ -11,4 +11,4 @@ RUN rm *.whl
|
|||
|
||||
EXPOSE 80
|
||||
|
||||
CMD [ "uvicorn", "--host", "0.0.0.0", "--port", "80", "langflow.backend.app:app" ]
|
||||
CMD [ "uvicorn", "--host", "0.0.0.0", "--port", "7860", "--factory", "langflow.main:create_app" ]
|
||||
|
|
|
|||
|
|
@ -1,5 +1,7 @@
|
|||
from importlib import metadata
|
||||
from langflow.cache import cache_manager
|
||||
|
||||
# Deactivate cache manager for now
|
||||
# from langflow.services.cache import cache_manager
|
||||
from langflow.processing.process import load_flow_from_json
|
||||
from langflow.interface.custom.custom_component import CustomComponent
|
||||
|
||||
|
|
|
|||
|
|
@ -1,8 +1,11 @@
|
|||
import sys
|
||||
import time
|
||||
import httpx
|
||||
from langflow.utils.util import get_number_of_workers
|
||||
from multiprocess import Process # type: ignore
|
||||
from langflow.services.database.utils import session_getter
|
||||
from langflow.services.manager import initialize_services, initialize_settings_manager
|
||||
from langflow.services.utils import get_db_manager, get_settings_manager
|
||||
|
||||
from multiprocess import Process, cpu_count # type: ignore
|
||||
import platform
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
|
@ -10,16 +13,46 @@ import socket
|
|||
from rich.panel import Panel
|
||||
from rich import box
|
||||
from rich import print as rprint
|
||||
from rich.table import Table
|
||||
import typer
|
||||
from langflow.main import setup_app
|
||||
from langflow.settings import settings
|
||||
from langflow.utils.logger import configure, logger
|
||||
import webbrowser
|
||||
from dotenv import load_dotenv
|
||||
|
||||
from rich.console import Console
|
||||
|
||||
console = Console()
|
||||
|
||||
app = typer.Typer()
|
||||
|
||||
|
||||
def get_number_of_workers(workers=None):
|
||||
if workers == -1 or workers is None:
|
||||
workers = (cpu_count() * 2) + 1
|
||||
logger.debug(f"Number of workers: {workers}")
|
||||
return workers
|
||||
|
||||
|
||||
def display_results(results):
|
||||
"""
|
||||
Display the results of the migration.
|
||||
"""
|
||||
for table_results in results:
|
||||
table = Table(title=f"Migration {table_results.table_name}")
|
||||
table.add_column("Name")
|
||||
table.add_column("Type")
|
||||
table.add_column("Status")
|
||||
|
||||
for result in table_results.results:
|
||||
status = "Success" if result.success else "Failure"
|
||||
color = "green" if result.success else "red"
|
||||
table.add_row(result.name, result.type, f"[{color}]{status}[/{color}]")
|
||||
|
||||
console.print(table)
|
||||
console.print() # Print a new line
|
||||
|
||||
|
||||
def update_settings(
|
||||
config: str,
|
||||
cache: str,
|
||||
|
|
@ -30,19 +63,20 @@ def update_settings(
|
|||
"""Update the settings from a config file."""
|
||||
|
||||
# Check for database_url in the environment variables
|
||||
|
||||
initialize_settings_manager()
|
||||
settings_manager = get_settings_manager()
|
||||
if config:
|
||||
logger.debug(f"Loading settings from {config}")
|
||||
settings.update_from_yaml(config, dev=dev)
|
||||
settings_manager.settings.update_from_yaml(config, dev=dev)
|
||||
if remove_api_keys:
|
||||
logger.debug(f"Setting remove_api_keys to {remove_api_keys}")
|
||||
settings.update_settings(REMOVE_API_KEYS=remove_api_keys)
|
||||
settings_manager.settings.update_settings(REMOVE_API_KEYS=remove_api_keys)
|
||||
if cache:
|
||||
logger.debug(f"Setting cache to {cache}")
|
||||
settings.update_settings(CACHE=cache)
|
||||
settings_manager.settings.update_settings(CACHE=cache)
|
||||
if components_path:
|
||||
logger.debug(f"Adding component path {components_path}")
|
||||
settings.update_settings(COMPONENTS_PATH=components_path)
|
||||
settings_manager.settings.update_settings(COMPONENTS_PATH=components_path)
|
||||
|
||||
|
||||
def serve_on_jcloud():
|
||||
|
|
@ -92,7 +126,7 @@ def serve_on_jcloud():
|
|||
|
||||
|
||||
@app.command()
|
||||
def serve(
|
||||
def run(
|
||||
host: str = typer.Option(
|
||||
"127.0.0.1", help="Host to bind the server to.", envvar="LANGFLOW_HOST"
|
||||
),
|
||||
|
|
@ -106,7 +140,9 @@ def serve(
|
|||
help="Path to the directory containing custom components.",
|
||||
envvar="LANGFLOW_COMPONENTS_PATH",
|
||||
),
|
||||
config: str = typer.Option("config.yaml", help="Path to the configuration file."),
|
||||
config: str = typer.Option(
|
||||
Path(__file__).parent / "config.yaml", help="Path to the configuration file."
|
||||
),
|
||||
# .env file param
|
||||
env_file: Path = typer.Option(
|
||||
None, help="Path to the .env file containing environment variables."
|
||||
|
|
@ -146,6 +182,11 @@ def serve(
|
|||
help="Remove API keys from the projects saved in the database.",
|
||||
envvar="LANGFLOW_REMOVE_API_KEYS",
|
||||
),
|
||||
backend_only: bool = typer.Option(
|
||||
False,
|
||||
help="Run only the backend server without the frontend.",
|
||||
envvar="LANGFLOW_BACKEND_ONLY",
|
||||
),
|
||||
):
|
||||
"""
|
||||
Run the Langflow server.
|
||||
|
|
@ -167,7 +208,7 @@ def serve(
|
|||
)
|
||||
# create path object if path is provided
|
||||
static_files_dir: Optional[Path] = Path(path) if path else None
|
||||
app = setup_app(static_files_dir=static_files_dir)
|
||||
app = setup_app(static_files_dir=static_files_dir, backend_only=backend_only)
|
||||
# check if port is being used
|
||||
if is_port_in_use(port, host):
|
||||
port = get_free_port(port)
|
||||
|
|
@ -179,6 +220,10 @@ def serve(
|
|||
"timeout": timeout,
|
||||
}
|
||||
|
||||
# Define an env variable to know if we are just testing the server
|
||||
if "pytest" in sys.modules:
|
||||
return
|
||||
|
||||
if platform.system() in ["Windows"]:
|
||||
# Run using uvicorn on MacOS and Windows
|
||||
# Windows doesn't support gunicorn
|
||||
|
|
@ -299,6 +344,43 @@ def run_langflow(host, port, log_level, options, app):
|
|||
sys.exit(1)
|
||||
|
||||
|
||||
@app.command()
|
||||
def superuser(
|
||||
username: str = typer.Option(..., prompt=True, help="Username for the superuser."),
|
||||
password: str = typer.Option(
|
||||
..., prompt=True, hide_input=True, help="Password for the superuser."
|
||||
),
|
||||
):
|
||||
initialize_services()
|
||||
db_manager = get_db_manager()
|
||||
with session_getter(db_manager) as session:
|
||||
from langflow.services.auth.utils import create_super_user
|
||||
|
||||
if create_super_user(session, username, password):
|
||||
# Verify that the superuser was created
|
||||
from langflow.services.database.models.user.user import User
|
||||
|
||||
user = session.query(User).filter(User.username == username).first()
|
||||
if user is None:
|
||||
typer.echo("Superuser creation failed.")
|
||||
return
|
||||
|
||||
typer.echo("Superuser created successfully.")
|
||||
|
||||
else:
|
||||
typer.echo("Superuser creation failed.")
|
||||
|
||||
|
||||
@app.command()
|
||||
def migration(test: bool = typer.Option(False, help="Run migrations in test mode.")):
|
||||
initialize_services()
|
||||
db_manager = get_db_manager()
|
||||
if not test:
|
||||
db_manager.run_migrations()
|
||||
results = db_manager.run_migrations_test()
|
||||
display_results(results)
|
||||
|
||||
|
||||
def main():
|
||||
app()
|
||||
|
||||
|
|
|
|||
113
src/backend/langflow/alembic.ini
Normal file
|
|
@ -0,0 +1,113 @@
|
|||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = alembic
|
||||
|
||||
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||
# Uncomment the line below if you want the files to be prepended with date and time
|
||||
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
||||
# for all available tokens
|
||||
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
# defaults to the current working directory.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
# If specified, requires the python-dateutil library that can be
|
||||
# installed by adding `alembic[tz]` to the pip requirements
|
||||
# string value is passed to dateutil.tz.gettz()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; This defaults
|
||||
# to alembic/versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path.
|
||||
# The path separator used here should be the separator specified by "version_path_separator" below.
|
||||
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
|
||||
|
||||
# version path separator; As mentioned above, this is the character used to split
|
||||
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
||||
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
||||
# Valid values for version_path_separator are:
|
||||
#
|
||||
# version_path_separator = :
|
||||
# version_path_separator = ;
|
||||
# version_path_separator = space
|
||||
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
# new in Alembic version 1.10
|
||||
# recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
# This is the path to the db in the root of the project.
|
||||
# When the user runs the Langflow the database url will
|
||||
# be set dinamically.
|
||||
sqlalchemy.url = sqlite:///../../../langflow.db
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks = black
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
1
src/backend/langflow/alembic/README
Normal file
|
|
@ -0,0 +1 @@
|
|||
Generic single-database configuration.
|
||||
81
src/backend/langflow/alembic/env.py
Normal file
|
|
@ -0,0 +1,81 @@
|
|||
from logging.config import fileConfig
|
||||
|
||||
from sqlalchemy import engine_from_config
|
||||
from sqlalchemy import pool
|
||||
|
||||
from alembic import context
|
||||
|
||||
from langflow.services.database.manager import SQLModel
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
target_metadata = SQLModel.metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
render_as_batch=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection, target_metadata=target_metadata, render_as_batch=True
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
27
src/backend/langflow/alembic/script.py.mako
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
import sqlmodel
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
|
|
@ -0,0 +1,177 @@
|
|||
"""Adds tables
|
||||
|
||||
Revision ID: 260dbcc8b680
|
||||
Revises:
|
||||
Create Date: 2023-08-27 19:49:02.681355
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
import sqlmodel
|
||||
from sqlalchemy.engine.reflection import Inspector
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "260dbcc8b680"
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn)
|
||||
# List existing tables
|
||||
existing_tables = inspector.get_table_names()
|
||||
# Drop 'flowstyle' table if it exists
|
||||
# and other related indices
|
||||
if "flowstyle" in existing_tables:
|
||||
op.drop_table("flowstyle")
|
||||
if "ix_flowstyle_flow_id" in [
|
||||
index["name"] for index in inspector.get_indexes("flowstyle")
|
||||
]:
|
||||
op.drop_index("ix_flowstyle_flow_id", table_name="flowstyle")
|
||||
|
||||
existing_indices_flow = []
|
||||
existing_fks_flow = []
|
||||
if "flow" in existing_tables:
|
||||
existing_indices_flow = [
|
||||
index["name"] for index in inspector.get_indexes("flow")
|
||||
]
|
||||
# Existing foreign keys for the 'flow' table, if it exists
|
||||
existing_fks_flow = [
|
||||
fk["referred_table"] + "." + fk["referred_columns"][0]
|
||||
for fk in inspector.get_foreign_keys("flow")
|
||||
]
|
||||
# Now check if the columns user_id exists in the 'flow' table
|
||||
# If it does not exist, we need to create the foreign key
|
||||
|
||||
if "user" not in existing_tables:
|
||||
op.create_table(
|
||||
"user",
|
||||
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
|
||||
sa.Column("username", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column("password", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column("is_active", sa.Boolean(), nullable=False),
|
||||
sa.Column("is_superuser", sa.Boolean(), nullable=False),
|
||||
sa.Column("create_at", sa.DateTime(), nullable=False),
|
||||
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||
sa.Column("last_login_at", sa.DateTime(), nullable=True),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("id"),
|
||||
)
|
||||
with op.batch_alter_table("user", schema=None) as batch_op:
|
||||
batch_op.create_index(
|
||||
batch_op.f("ix_user_username"), ["username"], unique=True
|
||||
)
|
||||
|
||||
if "apikey" not in existing_tables:
|
||||
op.create_table(
|
||||
"apikey",
|
||||
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||
sa.Column("last_used_at", sa.DateTime(), nullable=True),
|
||||
sa.Column("total_uses", sa.Integer(), nullable=False, default=0),
|
||||
sa.Column("is_active", sa.Boolean(), nullable=False, default=True),
|
||||
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
|
||||
sa.Column("api_key", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
|
||||
sa.ForeignKeyConstraint(
|
||||
["user_id"],
|
||||
["user.id"],
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("id"),
|
||||
)
|
||||
with op.batch_alter_table("apikey", schema=None) as batch_op:
|
||||
batch_op.create_index(
|
||||
batch_op.f("ix_apikey_api_key"), ["api_key"], unique=True
|
||||
)
|
||||
batch_op.create_index(batch_op.f("ix_apikey_name"), ["name"], unique=False)
|
||||
batch_op.create_index(
|
||||
batch_op.f("ix_apikey_user_id"), ["user_id"], unique=False
|
||||
)
|
||||
if "flow" not in existing_tables:
|
||||
op.create_table(
|
||||
"flow",
|
||||
sa.Column("data", sa.JSON(), nullable=True),
|
||||
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column("description", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
|
||||
sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
|
||||
sa.ForeignKeyConstraint(
|
||||
["user_id"],
|
||||
["user.id"],
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("id"),
|
||||
)
|
||||
# Conditionally create indices for 'flow' table
|
||||
# if _alembic_tmp_flow exists, then we need to drop it first
|
||||
# This is to deal with SQLite not being able to ROLLBACK
|
||||
# for some unknown reason
|
||||
if "_alembic_tmp_flow" in existing_tables:
|
||||
op.drop_table("_alembic_tmp_flow")
|
||||
with op.batch_alter_table("flow", schema=None) as batch_op:
|
||||
flow_columns = [col["name"] for col in inspector.get_columns("flow")]
|
||||
if "user_id" not in flow_columns:
|
||||
batch_op.add_column(
|
||||
sa.Column(
|
||||
"user_id",
|
||||
sqlmodel.sql.sqltypes.GUID(),
|
||||
nullable=True, # This should be False, but we need to allow NULL values for now
|
||||
)
|
||||
)
|
||||
if "user.id" not in existing_fks_flow:
|
||||
batch_op.create_foreign_key("fk_flow_user_id", "user", ["user_id"], ["id"])
|
||||
if "ix_flow_description" not in existing_indices_flow:
|
||||
batch_op.create_index(
|
||||
batch_op.f("ix_flow_description"), ["description"], unique=False
|
||||
)
|
||||
if "ix_flow_name" not in existing_indices_flow:
|
||||
batch_op.create_index(batch_op.f("ix_flow_name"), ["name"], unique=False)
|
||||
with op.batch_alter_table("flow", schema=None) as batch_op:
|
||||
if "ix_flow_user_id" not in existing_indices_flow:
|
||||
batch_op.create_index(
|
||||
batch_op.f("ix_flow_user_id"), ["user_id"], unique=False
|
||||
)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn)
|
||||
# List existing tables
|
||||
existing_tables = inspector.get_table_names()
|
||||
if "flow" in existing_tables:
|
||||
with op.batch_alter_table("flow", schema=None) as batch_op:
|
||||
batch_op.drop_index(batch_op.f("ix_flow_user_id"))
|
||||
batch_op.drop_index(batch_op.f("ix_flow_name"))
|
||||
batch_op.drop_index(batch_op.f("ix_flow_description"))
|
||||
|
||||
op.drop_table("flow")
|
||||
if "apikey" in existing_tables:
|
||||
with op.batch_alter_table("apikey", schema=None) as batch_op:
|
||||
batch_op.drop_index(batch_op.f("ix_apikey_user_id"))
|
||||
batch_op.drop_index(batch_op.f("ix_apikey_name"))
|
||||
batch_op.drop_index(batch_op.f("ix_apikey_api_key"))
|
||||
|
||||
op.drop_table("apikey")
|
||||
if "user" in existing_tables:
|
||||
with op.batch_alter_table("user", schema=None) as batch_op:
|
||||
batch_op.drop_index(batch_op.f("ix_user_username"))
|
||||
|
||||
op.drop_table("user")
|
||||
|
||||
if "flowstyle" in existing_tables:
|
||||
op.drop_table("flowstyle")
|
||||
|
||||
if "component" in existing_tables:
|
||||
op.drop_table("component")
|
||||
# ### end Alembic commands ###
|
||||
|
|
@ -5,8 +5,10 @@ from langflow.api.v1 import (
|
|||
endpoints_router,
|
||||
validate_router,
|
||||
flows_router,
|
||||
flow_styles_router,
|
||||
component_router,
|
||||
users_router,
|
||||
api_key_router,
|
||||
login_router,
|
||||
)
|
||||
|
||||
router = APIRouter(
|
||||
|
|
@ -17,4 +19,6 @@ router.include_router(endpoints_router)
|
|||
router.include_router(validate_router)
|
||||
router.include_router(component_router)
|
||||
router.include_router(flows_router)
|
||||
router.include_router(flow_styles_router)
|
||||
router.include_router(users_router)
|
||||
router.include_router(api_key_router)
|
||||
router.include_router(login_router)
|
||||
|
|
|
|||
|
|
@ -2,8 +2,10 @@ from langflow.api.v1.endpoints import router as endpoints_router
|
|||
from langflow.api.v1.validate import router as validate_router
|
||||
from langflow.api.v1.chat import router as chat_router
|
||||
from langflow.api.v1.flows import router as flows_router
|
||||
from langflow.api.v1.flow_styles import router as flow_styles_router
|
||||
from langflow.api.v1.components import router as component_router
|
||||
from langflow.api.v1.users import router as users_router
|
||||
from langflow.api.v1.api_key import router as api_key_router
|
||||
from langflow.api.v1.login import router as login_router
|
||||
|
||||
__all__ = [
|
||||
"chat_router",
|
||||
|
|
@ -11,5 +13,7 @@ __all__ = [
|
|||
"component_router",
|
||||
"validate_router",
|
||||
"flows_router",
|
||||
"flow_styles_router",
|
||||
"users_router",
|
||||
"api_key_router",
|
||||
"login_router",
|
||||
]
|
||||
|
|
|
|||
61
src/backend/langflow/api/v1/api_key.py
Normal file
|
|
@ -0,0 +1,61 @@
|
|||
from uuid import UUID
|
||||
from fastapi import APIRouter, HTTPException, Depends
|
||||
from langflow.api.v1.schemas import ApiKeysResponse
|
||||
from langflow.services.auth.utils import get_current_active_user
|
||||
from langflow.services.database.models.api_key.api_key import (
|
||||
ApiKeyCreate,
|
||||
UnmaskedApiKeyRead,
|
||||
)
|
||||
|
||||
# Assuming you have these methods in your service layer
|
||||
from langflow.services.database.models.api_key.crud import (
|
||||
get_api_keys,
|
||||
create_api_key,
|
||||
delete_api_key,
|
||||
)
|
||||
from langflow.services.database.models.user.user import User
|
||||
from langflow.services.utils import get_session
|
||||
from sqlmodel import Session
|
||||
|
||||
|
||||
router = APIRouter(tags=["APIKey"], prefix="/api_key")
|
||||
|
||||
|
||||
@router.get("/", response_model=ApiKeysResponse)
|
||||
def get_api_keys_route(
|
||||
db: Session = Depends(get_session),
|
||||
current_user: User = Depends(get_current_active_user),
|
||||
):
|
||||
try:
|
||||
user_id = current_user.id
|
||||
keys = get_api_keys(db, user_id)
|
||||
|
||||
return ApiKeysResponse(total_count=len(keys), user_id=user_id, api_keys=keys)
|
||||
except Exception as exc:
|
||||
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
||||
|
||||
|
||||
@router.post("/", response_model=UnmaskedApiKeyRead)
|
||||
def create_api_key_route(
|
||||
req: ApiKeyCreate,
|
||||
current_user: User = Depends(get_current_active_user),
|
||||
db: Session = Depends(get_session),
|
||||
):
|
||||
try:
|
||||
user_id = current_user.id
|
||||
return create_api_key(db, req, user_id=user_id)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=str(e)) from e
|
||||
|
||||
|
||||
@router.delete("/{api_key_id}")
|
||||
def delete_api_key_route(
|
||||
api_key_id: UUID,
|
||||
current_user=Depends(get_current_active_user),
|
||||
db: Session = Depends(get_session),
|
||||
):
|
||||
try:
|
||||
delete_api_key(db, api_key_id)
|
||||
return {"detail": "API Key deleted"}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=str(e)) from e
|
||||
|
|
@ -1,3 +1,4 @@
|
|||
from typing import Optional
|
||||
from langflow.template.frontend_node.base import FrontendNode
|
||||
from pydantic import BaseModel, validator
|
||||
|
||||
|
|
@ -20,7 +21,8 @@ class FrontendNodeRequest(FrontendNode):
|
|||
class ValidatePromptRequest(BaseModel):
|
||||
name: str
|
||||
template: str
|
||||
frontend_node: FrontendNodeRequest
|
||||
#optional for tweak call
|
||||
frontend_node: Optional[FrontendNodeRequest]
|
||||
|
||||
|
||||
# Build ValidationResponse class for {"imports": {"errors": []}, "function": {"errors": []}}
|
||||
|
|
@ -39,7 +41,8 @@ class CodeValidationResponse(BaseModel):
|
|||
|
||||
class PromptValidationResponse(BaseModel):
|
||||
input_variables: list
|
||||
frontend_node: FrontendNodeRequest
|
||||
#object return for tweak call
|
||||
frontend_node: FrontendNodeRequest | object
|
||||
|
||||
|
||||
INVALID_CHARACTERS = {
|
||||
|
|
|
|||
|
|
@ -1,22 +1,42 @@
|
|||
from fastapi import APIRouter, HTTPException, WebSocket, WebSocketException, status
|
||||
from fastapi import (
|
||||
APIRouter,
|
||||
Depends,
|
||||
HTTPException,
|
||||
Query,
|
||||
WebSocket,
|
||||
WebSocketException,
|
||||
status,
|
||||
)
|
||||
from fastapi.responses import StreamingResponse
|
||||
from langflow.api.utils import build_input_keys_response
|
||||
from langflow.api.v1.schemas import BuildStatus, BuiltResponse, InitResponse, StreamData
|
||||
|
||||
from langflow.chat.manager import ChatManager
|
||||
from langflow.services import service_manager, ServiceType
|
||||
from langflow.graph.graph.base import Graph
|
||||
from langflow.services.auth.utils import get_current_active_user, get_current_user
|
||||
from langflow.services.utils import get_session
|
||||
from langflow.utils.logger import logger
|
||||
from cachetools import LRUCache
|
||||
from sqlmodel import Session
|
||||
|
||||
router = APIRouter(tags=["Chat"])
|
||||
chat_manager = ChatManager()
|
||||
|
||||
flow_data_store: LRUCache = LRUCache(maxsize=10)
|
||||
|
||||
|
||||
@router.websocket("/chat/{client_id}")
|
||||
async def chat(client_id: str, websocket: WebSocket):
|
||||
async def chat(
|
||||
client_id: str,
|
||||
websocket: WebSocket,
|
||||
token: str = Query(...),
|
||||
db: Session = Depends(get_session),
|
||||
):
|
||||
"""Websocket endpoint for chat."""
|
||||
try:
|
||||
user = await get_current_user(token, db)
|
||||
if not user.is_active:
|
||||
raise HTTPException(status_code=401, detail="Invalid token")
|
||||
chat_manager = service_manager.get(ServiceType.CHAT_MANAGER)
|
||||
if client_id in chat_manager.in_memory_cache:
|
||||
await chat_manager.handle_websocket(client_id, websocket)
|
||||
else:
|
||||
|
|
@ -31,7 +51,9 @@ async def chat(client_id: str, websocket: WebSocket):
|
|||
|
||||
|
||||
@router.post("/build/init/{flow_id}", response_model=InitResponse, status_code=201)
|
||||
async def init_build(graph_data: dict, flow_id: str):
|
||||
async def init_build(
|
||||
graph_data: dict, flow_id: str, current_user=Depends(get_current_active_user)
|
||||
):
|
||||
"""Initialize the build by storing graph data and returning a unique session ID."""
|
||||
|
||||
try:
|
||||
|
|
@ -45,6 +67,7 @@ async def init_build(graph_data: dict, flow_id: str):
|
|||
return InitResponse(flowId=flow_id)
|
||||
|
||||
# Delete from cache if already exists
|
||||
chat_manager = service_manager.get(ServiceType.CHAT_MANAGER)
|
||||
if flow_id in chat_manager.in_memory_cache:
|
||||
with chat_manager.in_memory_cache._lock:
|
||||
chat_manager.in_memory_cache.delete(flow_id)
|
||||
|
|
@ -52,6 +75,7 @@ async def init_build(graph_data: dict, flow_id: str):
|
|||
flow_data_store[flow_id] = {
|
||||
"graph_data": graph_data,
|
||||
"status": BuildStatus.STARTED,
|
||||
"user_id": current_user.id,
|
||||
}
|
||||
|
||||
return InitResponse(flowId=flow_id)
|
||||
|
|
@ -97,6 +121,7 @@ async def stream_build(flow_id: str):
|
|||
return
|
||||
|
||||
graph_data = flow_data_store[flow_id].get("graph_data")
|
||||
user_id = flow_data_store[flow_id]["user_id"]
|
||||
|
||||
if not graph_data:
|
||||
error_message = "No data provided"
|
||||
|
|
@ -117,7 +142,7 @@ async def stream_build(flow_id: str):
|
|||
"log": f"Building node {vertex.vertex_type}",
|
||||
}
|
||||
yield str(StreamData(event="log", data=log_dict))
|
||||
vertex.build()
|
||||
vertex.build(user_id)
|
||||
params = vertex._built_object_repr()
|
||||
valid = True
|
||||
logger.debug(f"Building node {str(vertex.vertex_type)}")
|
||||
|
|
@ -157,7 +182,7 @@ async def stream_build(flow_id: str):
|
|||
"handle_keys": [],
|
||||
}
|
||||
yield str(StreamData(event="message", data=input_keys_response))
|
||||
|
||||
chat_manager = service_manager.get(ServiceType.CHAT_MANAGER)
|
||||
chat_manager.set_cache(flow_id, langchain_object)
|
||||
# We need to reset the chat history
|
||||
chat_manager.chat_history.empty_history(flow_id)
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
from datetime import timezone
|
||||
from typing import List
|
||||
from uuid import UUID
|
||||
from langflow.database.models.component import Component, ComponentModel
|
||||
from langflow.database.base import get_session
|
||||
from langflow.services.database.models.component import Component, ComponentModel
|
||||
from langflow.services.utils import get_session
|
||||
from sqlmodel import Session, select
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
|
|
|||
|
|
@ -1,14 +1,15 @@
|
|||
from http import HTTPStatus
|
||||
from typing import Annotated, Optional
|
||||
from typing import Annotated, Any, Optional, Union
|
||||
from langflow.services.auth.utils import api_key_security, get_current_active_user
|
||||
|
||||
from langflow.cache.utils import save_uploaded_file
|
||||
from langflow.database.models.flow import Flow
|
||||
from langflow.services.cache.utils import save_uploaded_file
|
||||
from langflow.services.database.models.flow import Flow
|
||||
from langflow.processing.process import process_graph_cached, process_tweaks
|
||||
from langflow.services.database.models.user.user import User
|
||||
from langflow.services.utils import get_settings_manager
|
||||
from langflow.utils.logger import logger
|
||||
from langflow.settings import settings
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, UploadFile, Body
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, UploadFile, Body, status
|
||||
import sqlalchemy as sa
|
||||
from langflow.interface.custom.custom_component import CustomComponent
|
||||
|
||||
|
||||
|
|
@ -26,7 +27,7 @@ from langflow.interface.types import (
|
|||
build_langchain_custom_component_list_from_path,
|
||||
)
|
||||
|
||||
from langflow.database.base import get_session
|
||||
from langflow.services.utils import get_session
|
||||
from sqlmodel import Session
|
||||
|
||||
# build router
|
||||
|
|
@ -34,18 +35,21 @@ router = APIRouter(tags=["Base"])
|
|||
|
||||
|
||||
@router.get("/all")
|
||||
def get_all():
|
||||
def get_all(current_user: User = Depends(get_current_active_user)):
|
||||
logger.debug("Building langchain types dict")
|
||||
native_components = build_langchain_types_dict()
|
||||
# custom_components is a list of dicts
|
||||
# need to merge all the keys into one dict
|
||||
custom_components_from_file = {}
|
||||
if settings.COMPONENTS_PATH:
|
||||
logger.info(f"Building custom components from {settings.COMPONENTS_PATH}")
|
||||
custom_components_from_file: dict[str, Any] = {}
|
||||
settings_manager = get_settings_manager()
|
||||
if settings_manager.settings.COMPONENTS_PATH:
|
||||
logger.info(
|
||||
f"Building custom components from {settings_manager.settings.COMPONENTS_PATH}"
|
||||
)
|
||||
|
||||
custom_component_dicts = []
|
||||
processed_paths = []
|
||||
for path in settings.COMPONENTS_PATH:
|
||||
for path in settings_manager.settings.COMPONENTS_PATH:
|
||||
if str(path) in processed_paths:
|
||||
continue
|
||||
custom_component_dict = build_langchain_custom_component_list_from_path(
|
||||
|
|
@ -56,8 +60,12 @@ def get_all():
|
|||
|
||||
logger.info(f"Loading {len(custom_component_dicts)} category(ies)")
|
||||
for custom_component_dict in custom_component_dicts:
|
||||
logger.debug(
|
||||
{key: len(value) for key, value in custom_component_dict.items()}
|
||||
# custom_component_dict is a dict of dicts
|
||||
if not custom_component_dict:
|
||||
continue
|
||||
category = list(custom_component_dict.keys())[0]
|
||||
logger.info(
|
||||
f"Loading {len(custom_component_dict[category])} component(s) from category {category}"
|
||||
)
|
||||
custom_components_from_file = merge_nested_dicts_with_renaming(
|
||||
custom_components_from_file, custom_component_dict
|
||||
|
|
@ -69,21 +77,42 @@ def get_all():
|
|||
|
||||
|
||||
# For backwards compatibility we will keep the old endpoint
|
||||
@router.post("/predict/{flow_id}", response_model=ProcessResponse)
|
||||
@router.post("/process/{flow_id}", response_model=ProcessResponse)
|
||||
@router.post(
|
||||
"/predict/{flow_id}",
|
||||
response_model=ProcessResponse,
|
||||
dependencies=[Depends(api_key_security)],
|
||||
)
|
||||
@router.post(
|
||||
"/process/{flow_id}",
|
||||
response_model=ProcessResponse,
|
||||
)
|
||||
async def process_flow(
|
||||
session: Annotated[Session, Depends(get_session)],
|
||||
flow_id: str,
|
||||
inputs: Optional[dict] = None,
|
||||
tweaks: Optional[dict] = None,
|
||||
clear_cache: Annotated[bool, Body(embed=True)] = False, # noqa: F821
|
||||
session: Session = Depends(get_session),
|
||||
session_id: Annotated[Union[None, str], Body(embed=True)] = None, # noqa: F821
|
||||
api_key_user: User = Depends(api_key_security),
|
||||
):
|
||||
"""
|
||||
Endpoint to process an input with a given flow_id.
|
||||
"""
|
||||
|
||||
try:
|
||||
flow = session.get(Flow, flow_id)
|
||||
if api_key_user is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid API Key",
|
||||
)
|
||||
|
||||
# Get the flow that matches the flow_id and belongs to the user
|
||||
flow = (
|
||||
session.query(Flow)
|
||||
.filter(Flow.id == flow_id)
|
||||
.filter(Flow.user_id == api_key_user.id)
|
||||
.first()
|
||||
)
|
||||
if flow is None:
|
||||
raise ValueError(f"Flow {flow_id} not found")
|
||||
|
||||
|
|
@ -95,10 +124,26 @@ async def process_flow(
|
|||
graph_data = process_tweaks(graph_data, tweaks)
|
||||
except Exception as exc:
|
||||
logger.error(f"Error processing tweaks: {exc}")
|
||||
response = process_graph_cached(graph_data, inputs, clear_cache)
|
||||
return ProcessResponse(
|
||||
result=response,
|
||||
response, session_id = process_graph_cached(
|
||||
graph_data, inputs, clear_cache, session_id
|
||||
)
|
||||
return ProcessResponse(result=response, session_id=session_id)
|
||||
except sa.exc.StatementError as exc:
|
||||
# StatementError('(builtins.ValueError) badly formed hexadecimal UUID string')
|
||||
if "badly formed hexadecimal UUID string" in str(exc):
|
||||
# This means the Flow ID is not a valid UUID which means it can't find the flow
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)
|
||||
) from exc
|
||||
except ValueError as exc:
|
||||
if f"Flow {flow_id} not found" in str(exc):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)
|
||||
) from exc
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(exc)
|
||||
) from exc
|
||||
except Exception as e:
|
||||
# Log stack trace
|
||||
logger.exception(e)
|
||||
|
|
|
|||
|
|
@ -1,83 +0,0 @@
|
|||
from uuid import UUID
|
||||
from langflow.database.models.flow_style import (
|
||||
FlowStyle,
|
||||
FlowStyleCreate,
|
||||
FlowStyleRead,
|
||||
FlowStyleUpdate,
|
||||
)
|
||||
from langflow.database.base import get_session
|
||||
from sqlmodel import Session, select
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
|
||||
|
||||
# build router
|
||||
router = APIRouter(prefix="/flow_styles", tags=["FlowStyles"])
|
||||
|
||||
# FlowStyleCreate:
|
||||
# class FlowStyleBase(SQLModel):
|
||||
# color: str = Field(index=True)
|
||||
# emoji: str = Field(index=False)
|
||||
# flow_id: UUID = Field(default=None, foreign_key="flow.id")
|
||||
|
||||
|
||||
@router.post("/", response_model=FlowStyleRead)
|
||||
def create_flow_style(
|
||||
*, session: Session = Depends(get_session), flow_style: FlowStyleCreate
|
||||
):
|
||||
"""Create a new flow_style."""
|
||||
db_flow_style = FlowStyle.from_orm(flow_style)
|
||||
session.add(db_flow_style)
|
||||
session.commit()
|
||||
session.refresh(db_flow_style)
|
||||
return db_flow_style
|
||||
|
||||
|
||||
@router.get("/", response_model=list[FlowStyleRead])
|
||||
def read_flow_styles(*, session: Session = Depends(get_session)):
|
||||
"""Read all flows."""
|
||||
try:
|
||||
flows = session.exec(select(FlowStyle)).all()
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e)) from e
|
||||
return flows
|
||||
|
||||
|
||||
@router.get("/{flow_styles_id}", response_model=FlowStyleRead)
|
||||
def read_flow_style(*, session: Session = Depends(get_session), flow_styles_id: UUID):
|
||||
"""Read a flow_style."""
|
||||
if flow_style := session.get(FlowStyle, flow_styles_id):
|
||||
return flow_style
|
||||
else:
|
||||
raise HTTPException(status_code=404, detail="FlowStyle not found")
|
||||
|
||||
|
||||
@router.patch("/{flow_style_id}", response_model=FlowStyleRead)
|
||||
def update_flow_style(
|
||||
*,
|
||||
session: Session = Depends(get_session),
|
||||
flow_style_id: UUID,
|
||||
flow_style: FlowStyleUpdate,
|
||||
):
|
||||
"""Update a flow_style."""
|
||||
db_flow_style = session.get(FlowStyle, flow_style_id)
|
||||
if not db_flow_style:
|
||||
raise HTTPException(status_code=404, detail="FlowStyle not found")
|
||||
flow_data = flow_style.dict(exclude_unset=True)
|
||||
for key, value in flow_data.items():
|
||||
if hasattr(db_flow_style, key) and value is not None:
|
||||
setattr(db_flow_style, key, value)
|
||||
session.add(db_flow_style)
|
||||
session.commit()
|
||||
session.refresh(db_flow_style)
|
||||
return db_flow_style
|
||||
|
||||
|
||||
@router.delete("/{flow_id}")
|
||||
def delete_flow_style(*, session: Session = Depends(get_session), flow_id: UUID):
|
||||
"""Delete a flow_style."""
|
||||
flow_style = session.get(FlowStyle, flow_id)
|
||||
if not flow_style:
|
||||
raise HTTPException(status_code=404, detail="FlowStyle not found")
|
||||
session.delete(flow_style)
|
||||
session.commit()
|
||||
return {"message": "FlowStyle deleted successfully"}
|
||||
|
|
@ -1,19 +1,21 @@
|
|||
from typing import List
|
||||
from uuid import UUID
|
||||
from fastapi.encoders import jsonable_encoder
|
||||
from langflow.settings import settings
|
||||
|
||||
from langflow.api.utils import remove_api_keys
|
||||
from langflow.api.v1.schemas import FlowListCreate, FlowListRead
|
||||
from langflow.database.models.flow import (
|
||||
from langflow.services.auth.utils import get_current_active_user
|
||||
from langflow.services.database.models.flow import (
|
||||
Flow,
|
||||
FlowCreate,
|
||||
FlowRead,
|
||||
FlowReadWithStyle,
|
||||
FlowUpdate,
|
||||
)
|
||||
from langflow.database.base import get_session
|
||||
from langflow.services.database.models.user.user import User
|
||||
from langflow.services.utils import get_session
|
||||
from langflow.services.utils import get_settings_manager
|
||||
import orjson
|
||||
from sqlmodel import Session, select
|
||||
from sqlmodel import Session
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
|
||||
from fastapi import File, UploadFile
|
||||
|
|
@ -23,48 +25,77 @@ router = APIRouter(prefix="/flows", tags=["Flows"])
|
|||
|
||||
|
||||
@router.post("/", response_model=FlowRead, status_code=201)
|
||||
def create_flow(*, session: Session = Depends(get_session), flow: FlowCreate):
|
||||
def create_flow(
|
||||
*,
|
||||
session: Session = Depends(get_session),
|
||||
flow: FlowCreate,
|
||||
current_user: User = Depends(get_current_active_user),
|
||||
):
|
||||
"""Create a new flow."""
|
||||
if flow.user_id is None:
|
||||
flow.user_id = current_user.id
|
||||
|
||||
db_flow = Flow.from_orm(flow)
|
||||
|
||||
session.add(db_flow)
|
||||
session.commit()
|
||||
session.refresh(db_flow)
|
||||
return db_flow
|
||||
|
||||
|
||||
@router.get("/", response_model=list[FlowReadWithStyle], status_code=200)
|
||||
def read_flows(*, session: Session = Depends(get_session)):
|
||||
@router.get("/", response_model=list[FlowRead], status_code=200)
|
||||
def read_flows(
|
||||
*,
|
||||
session: Session = Depends(get_session),
|
||||
current_user: User = Depends(get_current_active_user),
|
||||
):
|
||||
"""Read all flows."""
|
||||
try:
|
||||
flows = session.exec(select(Flow)).all()
|
||||
flows = current_user.flows
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e)) from e
|
||||
return [jsonable_encoder(flow) for flow in flows]
|
||||
|
||||
|
||||
@router.get("/{flow_id}", response_model=FlowReadWithStyle, status_code=200)
|
||||
def read_flow(*, session: Session = Depends(get_session), flow_id: UUID):
|
||||
@router.get("/{flow_id}", response_model=FlowRead, status_code=200)
|
||||
def read_flow(
|
||||
*,
|
||||
session: Session = Depends(get_session),
|
||||
flow_id: UUID,
|
||||
current_user: User = Depends(get_current_active_user),
|
||||
):
|
||||
"""Read a flow."""
|
||||
if flow := session.get(Flow, flow_id):
|
||||
return flow
|
||||
if user_flow := (
|
||||
session.query(Flow)
|
||||
.filter(Flow.id == flow_id)
|
||||
.filter(Flow.user_id == current_user.id)
|
||||
.first()
|
||||
):
|
||||
return user_flow
|
||||
else:
|
||||
raise HTTPException(status_code=404, detail="Flow not found")
|
||||
|
||||
|
||||
@router.patch("/{flow_id}", response_model=FlowRead, status_code=200)
|
||||
def update_flow(
|
||||
*, session: Session = Depends(get_session), flow_id: UUID, flow: FlowUpdate
|
||||
*,
|
||||
session: Session = Depends(get_session),
|
||||
flow_id: UUID,
|
||||
flow: FlowUpdate,
|
||||
current_user: User = Depends(get_current_active_user),
|
||||
):
|
||||
"""Update a flow."""
|
||||
|
||||
db_flow = session.get(Flow, flow_id)
|
||||
db_flow = read_flow(session=session, flow_id=flow_id, current_user=current_user)
|
||||
if not db_flow:
|
||||
raise HTTPException(status_code=404, detail="Flow not found")
|
||||
flow_data = flow.dict(exclude_unset=True)
|
||||
if settings.REMOVE_API_KEYS:
|
||||
settings_manager = get_settings_manager()
|
||||
if settings_manager.settings.REMOVE_API_KEYS:
|
||||
flow_data = remove_api_keys(flow_data)
|
||||
for key, value in flow_data.items():
|
||||
setattr(db_flow, key, value)
|
||||
if value is not None:
|
||||
setattr(db_flow, key, value)
|
||||
session.add(db_flow)
|
||||
session.commit()
|
||||
session.refresh(db_flow)
|
||||
|
|
@ -72,9 +103,14 @@ def update_flow(
|
|||
|
||||
|
||||
@router.delete("/{flow_id}", status_code=200)
|
||||
def delete_flow(*, session: Session = Depends(get_session), flow_id: UUID):
|
||||
def delete_flow(
|
||||
*,
|
||||
session: Session = Depends(get_session),
|
||||
flow_id: UUID,
|
||||
current_user: User = Depends(get_current_active_user),
|
||||
):
|
||||
"""Delete a flow."""
|
||||
flow = session.get(Flow, flow_id)
|
||||
flow = read_flow(session=session, flow_id=flow_id, current_user=current_user)
|
||||
if not flow:
|
||||
raise HTTPException(status_code=404, detail="Flow not found")
|
||||
session.delete(flow)
|
||||
|
|
@ -86,10 +122,16 @@ def delete_flow(*, session: Session = Depends(get_session), flow_id: UUID):
|
|||
|
||||
|
||||
@router.post("/batch/", response_model=List[FlowRead], status_code=201)
|
||||
def create_flows(*, session: Session = Depends(get_session), flow_list: FlowListCreate):
|
||||
def create_flows(
|
||||
*,
|
||||
session: Session = Depends(get_session),
|
||||
flow_list: FlowListCreate,
|
||||
current_user: User = Depends(get_current_active_user),
|
||||
):
|
||||
"""Create multiple new flows."""
|
||||
db_flows = []
|
||||
for flow in flow_list.flows:
|
||||
flow.user_id = current_user.id
|
||||
db_flow = Flow.from_orm(flow)
|
||||
session.add(db_flow)
|
||||
db_flows.append(db_flow)
|
||||
|
|
@ -101,7 +143,10 @@ def create_flows(*, session: Session = Depends(get_session), flow_list: FlowList
|
|||
|
||||
@router.post("/upload/", response_model=List[FlowRead], status_code=201)
|
||||
async def upload_file(
|
||||
*, session: Session = Depends(get_session), file: UploadFile = File(...)
|
||||
*,
|
||||
session: Session = Depends(get_session),
|
||||
file: UploadFile = File(...),
|
||||
current_user: User = Depends(get_current_active_user),
|
||||
):
|
||||
"""Upload flows from a file."""
|
||||
contents = await file.read()
|
||||
|
|
@ -110,11 +155,19 @@ async def upload_file(
|
|||
flow_list = FlowListCreate(**data)
|
||||
else:
|
||||
flow_list = FlowListCreate(flows=[FlowCreate(**flow) for flow in data])
|
||||
return create_flows(session=session, flow_list=flow_list)
|
||||
# Now we set the user_id for all flows
|
||||
for flow in flow_list.flows:
|
||||
flow.user_id = current_user.id
|
||||
|
||||
return create_flows(session=session, flow_list=flow_list, current_user=current_user)
|
||||
|
||||
|
||||
@router.get("/download/", response_model=FlowListRead, status_code=200)
|
||||
async def download_file(*, session: Session = Depends(get_session)):
|
||||
async def download_file(
|
||||
*,
|
||||
session: Session = Depends(get_session),
|
||||
current_user: User = Depends(get_current_active_user),
|
||||
):
|
||||
"""Download all flows as a file."""
|
||||
flows = read_flows(session=session)
|
||||
flows = read_flows(session=session, current_user=current_user)
|
||||
return FlowListRead(flows=flows)
|
||||
|
|
|
|||
63
src/backend/langflow/api/v1/login.py
Normal file
|
|
@ -0,0 +1,63 @@
|
|||
from sqlmodel import Session
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from fastapi.security import OAuth2PasswordRequestForm
|
||||
|
||||
from langflow.services.utils import get_session
|
||||
from langflow.api.v1.schemas import Token
|
||||
from langflow.services.auth.utils import (
|
||||
authenticate_user,
|
||||
create_user_tokens,
|
||||
create_refresh_token,
|
||||
create_user_longterm_token,
|
||||
get_current_active_user,
|
||||
)
|
||||
|
||||
from langflow.services.utils import get_settings_manager
|
||||
|
||||
router = APIRouter(tags=["Login"])
|
||||
|
||||
|
||||
@router.post("/login", response_model=Token)
|
||||
async def login_to_get_access_token(
|
||||
form_data: OAuth2PasswordRequestForm = Depends(),
|
||||
db: Session = Depends(get_session),
|
||||
# _: Session = Depends(get_current_active_user)
|
||||
):
|
||||
if user := authenticate_user(form_data.username, form_data.password, db):
|
||||
return create_user_tokens(user_id=user.id, db=db, update_last_login=True)
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Incorrect username or password",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
|
||||
@router.get("/auto_login")
|
||||
async def auto_login(db: Session = Depends(get_session)):
|
||||
settings_manager = get_settings_manager()
|
||||
|
||||
if settings_manager.auth_settings.AUTO_LOGIN:
|
||||
return create_user_longterm_token(db)
|
||||
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail={
|
||||
"message": "Auto login is disabled. Please enable it in the settings",
|
||||
"auto_login": False,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@router.post("/refresh")
|
||||
async def refresh_token(
|
||||
token: str, current_user: Session = Depends(get_current_active_user)
|
||||
):
|
||||
if token:
|
||||
return create_refresh_token(token)
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid refresh token",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
|
@ -1,8 +1,12 @@
|
|||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional, Union
|
||||
from langflow.database.models.base import orjson_dumps
|
||||
from langflow.database.models.flow import FlowCreate, FlowRead
|
||||
from uuid import UUID
|
||||
from langflow.services.database.models.api_key.api_key import ApiKeyRead
|
||||
from langflow.services.database.models.flow import FlowCreate, FlowRead
|
||||
from langflow.services.database.models.user import UserRead
|
||||
from langflow.services.database.models.base import orjson_dumps
|
||||
|
||||
from pydantic import BaseModel, Field, validator
|
||||
|
||||
|
||||
|
|
@ -47,6 +51,7 @@ class ProcessResponse(BaseModel):
|
|||
"""Process response schema."""
|
||||
|
||||
result: dict
|
||||
session_id: Optional[str] = None
|
||||
|
||||
|
||||
class ChatMessage(BaseModel):
|
||||
|
|
@ -136,3 +141,32 @@ class ComponentListCreate(BaseModel):
|
|||
|
||||
class ComponentListRead(BaseModel):
|
||||
flows: List[FlowRead]
|
||||
|
||||
|
||||
class UsersResponse(BaseModel):
|
||||
total_count: int
|
||||
users: List[UserRead]
|
||||
|
||||
|
||||
class ApiKeyResponse(BaseModel):
|
||||
id: str
|
||||
api_key: str
|
||||
name: str
|
||||
created_at: str
|
||||
last_used_at: str
|
||||
|
||||
|
||||
class ApiKeysResponse(BaseModel):
|
||||
total_count: int
|
||||
user_id: UUID
|
||||
api_keys: List[ApiKeyRead]
|
||||
|
||||
|
||||
class CreateApiKeyRequest(BaseModel):
|
||||
name: str
|
||||
|
||||
|
||||
class Token(BaseModel):
|
||||
access_token: str
|
||||
refresh_token: str
|
||||
token_type: str
|
||||
|
|
|
|||
149
src/backend/langflow/api/v1/users.py
Normal file
|
|
@ -0,0 +1,149 @@
|
|||
from uuid import UUID
|
||||
from langflow.api.v1.schemas import UsersResponse
|
||||
from langflow.services.database.models.user import (
|
||||
User,
|
||||
UserCreate,
|
||||
UserRead,
|
||||
UserUpdate,
|
||||
)
|
||||
|
||||
from sqlalchemy import func
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
from sqlmodel import Session, select
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
|
||||
from langflow.services.utils import get_session
|
||||
from langflow.services.auth.utils import (
|
||||
get_current_active_superuser,
|
||||
get_current_active_user,
|
||||
get_password_hash,
|
||||
)
|
||||
from langflow.services.database.models.user.crud import (
|
||||
update_user,
|
||||
)
|
||||
|
||||
router = APIRouter(tags=["Users"])
|
||||
|
||||
|
||||
@router.post("/user", response_model=UserRead, status_code=201)
|
||||
def add_user(
|
||||
user: UserCreate,
|
||||
db: Session = Depends(get_session),
|
||||
) -> User:
|
||||
"""
|
||||
Add a new user to the database.
|
||||
"""
|
||||
new_user = User.from_orm(user)
|
||||
try:
|
||||
new_user.password = get_password_hash(user.password)
|
||||
|
||||
db.add(new_user)
|
||||
db.commit()
|
||||
db.refresh(new_user)
|
||||
except IntegrityError as e:
|
||||
db.rollback()
|
||||
raise HTTPException(status_code=400, detail="This username is unavailable.") from e
|
||||
|
||||
return new_user
|
||||
|
||||
|
||||
@router.get("/user", response_model=UserRead)
|
||||
def read_current_user(
|
||||
current_user: User = Depends(get_current_active_user),
|
||||
) -> User:
|
||||
"""
|
||||
Retrieve the current user's data.
|
||||
"""
|
||||
return current_user
|
||||
|
||||
|
||||
@router.get("/users", response_model=UsersResponse)
|
||||
def read_all_users(
|
||||
skip: int = 0,
|
||||
limit: int = 10,
|
||||
current_user: Session = Depends(get_current_active_superuser),
|
||||
db: Session = Depends(get_session),
|
||||
) -> UsersResponse:
|
||||
"""
|
||||
Retrieve a list of users from the database with pagination.
|
||||
"""
|
||||
query = select(User).offset(skip).limit(limit)
|
||||
users = db.execute(query).fetchall()
|
||||
|
||||
count_query = select(func.count()).select_from(User) # type: ignore
|
||||
total_count = db.execute(count_query).scalar()
|
||||
|
||||
return UsersResponse(
|
||||
total_count=total_count, # type: ignore
|
||||
users=[UserRead(**dict(user.User)) for user in users],
|
||||
)
|
||||
|
||||
|
||||
@router.patch("/user/{user_id}", response_model=UserRead)
|
||||
def patch_user(
|
||||
user_id: UUID,
|
||||
user: UserUpdate,
|
||||
_: Session = Depends(get_current_active_user),
|
||||
db: Session = Depends(get_session),
|
||||
) -> User:
|
||||
"""
|
||||
Update an existing user's data.
|
||||
"""
|
||||
return update_user(user_id, user, db)
|
||||
|
||||
|
||||
@router.delete("/user/{user_id}")
|
||||
def delete_user(
|
||||
user_id: UUID,
|
||||
current_user: User = Depends(get_current_active_superuser),
|
||||
db: Session = Depends(get_session),
|
||||
) -> dict:
|
||||
"""
|
||||
Delete a user from the database.
|
||||
"""
|
||||
if current_user.id == user_id:
|
||||
raise HTTPException(
|
||||
status_code=400, detail="You can't delete your own user account"
|
||||
)
|
||||
elif not current_user.is_superuser:
|
||||
raise HTTPException(
|
||||
status_code=403, detail="You don't have the permission to delete this user"
|
||||
)
|
||||
|
||||
user_db = db.query(User).filter(User.id == user_id).first()
|
||||
if not user_db:
|
||||
raise HTTPException(status_code=404, detail="User not found")
|
||||
|
||||
db.delete(user_db)
|
||||
db.commit()
|
||||
|
||||
return {"detail": "User deleted"}
|
||||
|
||||
|
||||
# TODO: REMOVE - Just for testing purposes
|
||||
@router.post("/super_user", response_model=User)
|
||||
def add_super_user_for_testing_purposes_delete_me_before_merge_into_dev(
|
||||
db: Session = Depends(get_session),
|
||||
) -> User:
|
||||
"""
|
||||
Add a superuser for testing purposes.
|
||||
(This should be removed in production)
|
||||
"""
|
||||
new_user = User(
|
||||
username="superuser",
|
||||
password=get_password_hash("12345"),
|
||||
is_active=True,
|
||||
is_superuser=True,
|
||||
last_login_at=None,
|
||||
)
|
||||
|
||||
try:
|
||||
db.add(new_user)
|
||||
db.commit()
|
||||
db.refresh(new_user)
|
||||
except IntegrityError as e:
|
||||
db.rollback()
|
||||
raise HTTPException(status_code=400, detail="User exists") from e
|
||||
|
||||
return new_user
|
||||
|
|
@ -31,7 +31,12 @@ def post_validate_code(code: Code):
|
|||
def post_validate_prompt(prompt_request: ValidatePromptRequest):
|
||||
try:
|
||||
input_variables = validate_prompt(prompt_request.template)
|
||||
|
||||
# Check if frontend_node is None before proceeding to avoid attempting to update a non-existent node.
|
||||
if prompt_request.frontend_node is None:
|
||||
return PromptValidationResponse(
|
||||
input_variables=input_variables,
|
||||
frontend_node={},
|
||||
)
|
||||
old_custom_fields = get_old_custom_fields(prompt_request)
|
||||
|
||||
add_new_variables_to_template(input_variables, prompt_request)
|
||||
|
|
|
|||
7
src/backend/langflow/cache/__init__.py
vendored
|
|
@ -1,7 +0,0 @@
|
|||
from langflow.cache.manager import cache_manager
|
||||
from langflow.cache.flow import InMemoryCache
|
||||
|
||||
__all__ = [
|
||||
"cache_manager",
|
||||
"InMemoryCache",
|
||||
]
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
from langflow import CustomComponent
|
||||
from langchain.schema import Document
|
||||
from langflow.database.models.base import orjson_dumps
|
||||
from langflow.services.database.models.base import orjson_dumps
|
||||
import requests
|
||||
from typing import Optional
|
||||
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@
|
|||
|
||||
from langflow import CustomComponent
|
||||
from langchain.schema import Document
|
||||
from langflow.database.models.base import orjson_dumps
|
||||
from langflow.services.database.models.base import orjson_dumps
|
||||
|
||||
|
||||
class JSONDocumentBuilder(CustomComponent):
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
from langflow import CustomComponent
|
||||
from langchain.schema import Document
|
||||
from langflow.database.models.base import orjson_dumps
|
||||
from langflow.services.database.models.base import orjson_dumps
|
||||
import requests
|
||||
from typing import Optional
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ from typing import List, Optional
|
|||
import requests
|
||||
from langflow import CustomComponent
|
||||
from langchain.schema import Document
|
||||
from langflow.database.models.base import orjson_dumps
|
||||
from langflow.services.database.models.base import orjson_dumps
|
||||
|
||||
|
||||
class UpdateRequest(CustomComponent):
|
||||
|
|
|
|||
109
src/backend/langflow/components/vectorstores/Chroma.py
Normal file
|
|
@ -0,0 +1,109 @@
|
|||
from typing import Optional, Union
|
||||
from langflow import CustomComponent
|
||||
|
||||
from langchain.vectorstores import Chroma
|
||||
from langchain.schema import Document
|
||||
from langchain.vectorstores.base import VectorStore
|
||||
from langchain.schema import BaseRetriever
|
||||
from langchain.embeddings.base import Embeddings
|
||||
import chromadb # type: ignore
|
||||
|
||||
|
||||
class ChromaComponent(CustomComponent):
|
||||
"""
|
||||
A custom component for implementing a Vector Store using Chroma.
|
||||
"""
|
||||
|
||||
display_name: str = "Chroma (Custom Component)"
|
||||
description: str = "Implementation of Vector Store using Chroma"
|
||||
documentation = "https://python.langchain.com/docs/integrations/vectorstores/chroma"
|
||||
beta = True
|
||||
|
||||
def build_config(self):
|
||||
"""
|
||||
Builds the configuration for the component.
|
||||
|
||||
Returns:
|
||||
- dict: A dictionary containing the configuration options for the component.
|
||||
"""
|
||||
return {
|
||||
"collection_name": {"display_name": "Collection Name", "value": "langflow"},
|
||||
"persist": {"display_name": "Persist"},
|
||||
"persist_directory": {"display_name": "Persist Directory"},
|
||||
"code": {"show": False, "display_name": "Code"},
|
||||
"documents": {"display_name": "Documents", "is_list": True},
|
||||
"embedding": {"display_name": "Embedding"},
|
||||
"chroma_server_cors_allow_origins": {
|
||||
"display_name": "Server CORS Allow Origins",
|
||||
"advanced": True,
|
||||
},
|
||||
"chroma_server_host": {"display_name": "Server Host", "advanced": True},
|
||||
"chroma_server_port": {"display_name": "Server Port", "advanced": True},
|
||||
"chroma_server_grpc_port": {
|
||||
"display_name": "Server gRPC Port",
|
||||
"advanced": True,
|
||||
},
|
||||
"chroma_server_ssl_enabled": {
|
||||
"display_name": "Server SSL Enabled",
|
||||
"advanced": True,
|
||||
},
|
||||
}
|
||||
|
||||
def build(
|
||||
self,
|
||||
collection_name: str,
|
||||
persist: bool,
|
||||
chroma_server_ssl_enabled: bool,
|
||||
persist_directory: Optional[str] = None,
|
||||
embedding: Optional[Embeddings] = None,
|
||||
documents: Optional[Document] = None,
|
||||
chroma_server_cors_allow_origins: Optional[str] = None,
|
||||
chroma_server_host: Optional[str] = None,
|
||||
chroma_server_port: Optional[int] = None,
|
||||
chroma_server_grpc_port: Optional[int] = None,
|
||||
) -> Union[VectorStore, BaseRetriever]:
|
||||
"""
|
||||
Builds the Vector Store or BaseRetriever object.
|
||||
|
||||
Args:
|
||||
- collection_name (str): The name of the collection.
|
||||
- persist_directory (Optional[str]): The directory to persist the Vector Store to.
|
||||
- chroma_server_ssl_enabled (bool): Whether to enable SSL for the Chroma server.
|
||||
- persist (bool): Whether to persist the Vector Store or not.
|
||||
- embedding (Optional[Embeddings]): The embeddings to use for the Vector Store.
|
||||
- documents (Optional[Document]): The documents to use for the Vector Store.
|
||||
- chroma_server_cors_allow_origins (Optional[str]): The CORS allow origins for the Chroma server.
|
||||
- chroma_server_host (Optional[str]): The host for the Chroma server.
|
||||
- chroma_server_port (Optional[int]): The port for the Chroma server.
|
||||
- chroma_server_grpc_port (Optional[int]): The gRPC port for the Chroma server.
|
||||
|
||||
Returns:
|
||||
- Union[VectorStore, BaseRetriever]: The Vector Store or BaseRetriever object.
|
||||
"""
|
||||
|
||||
# Chroma settings
|
||||
chroma_settings = None
|
||||
|
||||
if chroma_server_host is not None:
|
||||
chroma_settings = chromadb.config.Settings(
|
||||
chroma_server_cors_allow_origins=chroma_server_cors_allow_origins
|
||||
or None,
|
||||
chroma_server_host=chroma_server_host,
|
||||
chroma_server_port=chroma_server_port or None,
|
||||
chroma_server_grpc_port=chroma_server_grpc_port or None,
|
||||
chroma_server_ssl_enabled=chroma_server_ssl_enabled,
|
||||
)
|
||||
|
||||
# If documents, then we need to create a Chroma instance using .from_documents
|
||||
if documents is not None and embedding is not None:
|
||||
return Chroma.from_documents(
|
||||
documents=documents, # type: ignore
|
||||
persist_directory=persist_directory if persist else None,
|
||||
collection_name=collection_name,
|
||||
embedding=embedding,
|
||||
client_settings=chroma_settings,
|
||||
)
|
||||
|
||||
return Chroma(
|
||||
persist_directory=persist_directory, client_settings=chroma_settings
|
||||
)
|
||||
|
|
@ -104,6 +104,8 @@ embeddings:
|
|||
documentation: "https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/sentence_transformers"
|
||||
CohereEmbeddings:
|
||||
documentation: "https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/cohere"
|
||||
VertexAIEmbeddings:
|
||||
documentation: "https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/google_vertex_ai_palm"
|
||||
llms:
|
||||
OpenAI:
|
||||
documentation: "https://python.langchain.com/docs/modules/model_io/models/llms/integrations/openai"
|
||||
|
|
@ -127,8 +129,8 @@ llms:
|
|||
# There's a bug in this component deactivating until we get it sorted: _language_models.py", line 804, in send_message
|
||||
# is_blocked=safety_attributes.get("blocked", False),
|
||||
# AttributeError: 'list' object has no attribute 'get'
|
||||
# ChatVertexAI:
|
||||
# documentation: "https://python.langchain.com/docs/modules/model_io/models/chat/integrations/google_vertex_ai_palm"
|
||||
ChatVertexAI:
|
||||
documentation: "https://python.langchain.com/docs/modules/model_io/models/chat/integrations/google_vertex_ai_palm"
|
||||
###
|
||||
memories:
|
||||
# https://github.com/supabase-community/supabase-py/issues/482
|
||||
|
|
|
|||
|
|
@ -1,86 +0,0 @@
|
|||
from contextlib import contextmanager
|
||||
import os
|
||||
|
||||
from sqlmodel import SQLModel, Session, create_engine
|
||||
from langflow.utils.logger import logger
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
class Engine:
|
||||
_instance = None
|
||||
|
||||
@classmethod
|
||||
def get(cls):
|
||||
logger.debug("Getting database engine")
|
||||
if cls._instance is None:
|
||||
cls.create()
|
||||
return cls._instance
|
||||
|
||||
@classmethod
|
||||
def create(cls):
|
||||
logger.debug("Creating database engine")
|
||||
from langflow.settings import settings
|
||||
|
||||
if langflow_database_url := os.getenv("LANGFLOW_DATABASE_URL"):
|
||||
settings.DATABASE_URL = langflow_database_url
|
||||
logger.debug("Using LANGFLOW_DATABASE_URL")
|
||||
|
||||
if settings.DATABASE_URL and settings.DATABASE_URL.startswith("sqlite"):
|
||||
connect_args = {"check_same_thread": False}
|
||||
else:
|
||||
connect_args = {}
|
||||
if not settings.DATABASE_URL:
|
||||
raise RuntimeError("No database_url provided")
|
||||
cls._instance = create_engine(settings.DATABASE_URL, connect_args=connect_args)
|
||||
|
||||
@classmethod
|
||||
def update(cls):
|
||||
logger.debug("Updating database engine")
|
||||
cls._instance = None
|
||||
cls.create()
|
||||
|
||||
|
||||
def create_db_and_tables():
|
||||
logger.debug("Creating database and tables")
|
||||
try:
|
||||
SQLModel.metadata.create_all(Engine.get())
|
||||
except sa.exc.OperationalError as exc:
|
||||
# Check if the error is because the table already exists
|
||||
if "already exists" in str(exc):
|
||||
logger.debug("Database and tables already exist")
|
||||
else:
|
||||
logger.error(f"Error creating database and tables: {exc}")
|
||||
raise RuntimeError("Error creating database and tables") from exc
|
||||
except Exception as exc:
|
||||
logger.error(f"Error creating database and tables: {exc}")
|
||||
raise RuntimeError("Error creating database and tables") from exc
|
||||
# Now check if the table Flow exists, if not, something went wrong
|
||||
# and we need to create the tables again.
|
||||
from sqlalchemy import inspect
|
||||
|
||||
inspector = inspect(Engine.get())
|
||||
if "flow" not in inspector.get_table_names():
|
||||
logger.error("Something went wrong creating the database and tables.")
|
||||
logger.error("Please check your database settings.")
|
||||
|
||||
raise RuntimeError("Something went wrong creating the database and tables.")
|
||||
else:
|
||||
logger.debug("Database and tables created successfully")
|
||||
|
||||
|
||||
@contextmanager
|
||||
def session_getter():
|
||||
try:
|
||||
session = Session(Engine.get())
|
||||
yield session
|
||||
except Exception as e:
|
||||
print("Session rollback because of exception:", e)
|
||||
session.rollback()
|
||||
raise
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
def get_session():
|
||||
with session_getter() as session:
|
||||
yield session
|
||||
|
|
@ -1,33 +0,0 @@
|
|||
# Path: src/backend/langflow/database/models/flowstyle.py
|
||||
|
||||
from langflow.database.models.base import SQLModelSerializable
|
||||
from sqlmodel import Field, Relationship
|
||||
from uuid import UUID, uuid4
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from langflow.database.models.flow import Flow
|
||||
|
||||
|
||||
class FlowStyleBase(SQLModelSerializable):
|
||||
color: str
|
||||
emoji: str
|
||||
flow_id: UUID = Field(default=None, foreign_key="flow.id")
|
||||
|
||||
|
||||
class FlowStyle(FlowStyleBase, table=True):
|
||||
id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True)
|
||||
flow: "Flow" = Relationship(back_populates="style")
|
||||
|
||||
|
||||
class FlowStyleUpdate(SQLModelSerializable):
|
||||
color: Optional[str] = None
|
||||
emoji: Optional[str] = None
|
||||
|
||||
|
||||
class FlowStyleCreate(FlowStyleBase):
|
||||
pass
|
||||
|
||||
|
||||
class FlowStyleRead(FlowStyleBase):
|
||||
id: UUID
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
from typing import Dict, Generator, List, Type, Union
|
||||
|
||||
from langflow.graph.edge.base import Edge
|
||||
from langflow.graph.graph.constants import VERTEX_TYPE_MAP
|
||||
from langflow.graph.graph.constants import lazy_load_vertex_dict
|
||||
from langflow.graph.vertex.base import Vertex
|
||||
from langflow.graph.vertex.types import (
|
||||
FileToolVertex,
|
||||
|
|
@ -144,7 +144,7 @@ class Graph:
|
|||
|
||||
return list(reversed(sorted_vertices))
|
||||
|
||||
def generator_build(self) -> Generator:
|
||||
def generator_build(self) -> Generator[Vertex, None, None]:
|
||||
"""Builds each vertex in the graph and yields it."""
|
||||
sorted_vertices = self.topological_sort()
|
||||
logger.debug("Sorted vertices: %s", sorted_vertices)
|
||||
|
|
@ -187,10 +187,12 @@ class Graph:
|
|||
"""Returns the node class based on the node type."""
|
||||
if node_type in FILE_TOOLS:
|
||||
return FileToolVertex
|
||||
if node_type in VERTEX_TYPE_MAP:
|
||||
return VERTEX_TYPE_MAP[node_type]
|
||||
if node_type in lazy_load_vertex_dict.VERTEX_TYPE_MAP:
|
||||
return lazy_load_vertex_dict.VERTEX_TYPE_MAP[node_type]
|
||||
return (
|
||||
VERTEX_TYPE_MAP[node_lc_type] if node_lc_type in VERTEX_TYPE_MAP else Vertex
|
||||
lazy_load_vertex_dict.VERTEX_TYPE_MAP[node_lc_type]
|
||||
if node_lc_type in lazy_load_vertex_dict.VERTEX_TYPE_MAP
|
||||
else Vertex
|
||||
)
|
||||
|
||||
def _build_vertices(self) -> List[Vertex]:
|
||||
|
|
|
|||
|
|
@ -1,4 +1,3 @@
|
|||
from langflow.graph.vertex.base import Vertex
|
||||
from langflow.graph.vertex import types
|
||||
from langflow.interface.agents.base import agent_creator
|
||||
from langflow.interface.chains.base import chain_creator
|
||||
|
|
@ -15,23 +14,45 @@ from langflow.interface.wrappers.base import wrapper_creator
|
|||
from langflow.interface.output_parsers.base import output_parser_creator
|
||||
from langflow.interface.retrievers.base import retriever_creator
|
||||
from langflow.interface.custom.base import custom_component_creator
|
||||
from typing import Dict, Type
|
||||
from langflow.utils.lazy_load import LazyLoadDictBase
|
||||
|
||||
|
||||
VERTEX_TYPE_MAP: Dict[str, Type[Vertex]] = {
|
||||
**{t: types.PromptVertex for t in prompt_creator.to_list()},
|
||||
**{t: types.AgentVertex for t in agent_creator.to_list()},
|
||||
**{t: types.ChainVertex for t in chain_creator.to_list()},
|
||||
**{t: types.ToolVertex for t in tool_creator.to_list()},
|
||||
**{t: types.ToolkitVertex for t in toolkits_creator.to_list()},
|
||||
**{t: types.WrapperVertex for t in wrapper_creator.to_list()},
|
||||
**{t: types.LLMVertex for t in llm_creator.to_list()},
|
||||
**{t: types.MemoryVertex for t in memory_creator.to_list()},
|
||||
**{t: types.EmbeddingVertex for t in embedding_creator.to_list()},
|
||||
**{t: types.VectorStoreVertex for t in vectorstore_creator.to_list()},
|
||||
**{t: types.DocumentLoaderVertex for t in documentloader_creator.to_list()},
|
||||
**{t: types.TextSplitterVertex for t in textsplitter_creator.to_list()},
|
||||
**{t: types.OutputParserVertex for t in output_parser_creator.to_list()},
|
||||
**{t: types.CustomComponentVertex for t in custom_component_creator.to_list()},
|
||||
**{t: types.RetrieverVertex for t in retriever_creator.to_list()},
|
||||
}
|
||||
class VertexTypesDict(LazyLoadDictBase):
|
||||
def __init__(self):
|
||||
self._all_types_dict = None
|
||||
|
||||
@property
|
||||
def VERTEX_TYPE_MAP(self):
|
||||
return self.all_types_dict
|
||||
|
||||
def _build_dict(self):
|
||||
langchain_types_dict = self.get_type_dict()
|
||||
return {
|
||||
**langchain_types_dict,
|
||||
"Custom": ["Custom Tool", "Python Function"],
|
||||
}
|
||||
|
||||
def get_type_dict(self):
|
||||
return {
|
||||
**{t: types.PromptVertex for t in prompt_creator.to_list()},
|
||||
**{t: types.AgentVertex for t in agent_creator.to_list()},
|
||||
**{t: types.ChainVertex for t in chain_creator.to_list()},
|
||||
**{t: types.ToolVertex for t in tool_creator.to_list()},
|
||||
**{t: types.ToolkitVertex for t in toolkits_creator.to_list()},
|
||||
**{t: types.WrapperVertex for t in wrapper_creator.to_list()},
|
||||
**{t: types.LLMVertex for t in llm_creator.to_list()},
|
||||
**{t: types.MemoryVertex for t in memory_creator.to_list()},
|
||||
**{t: types.EmbeddingVertex for t in embedding_creator.to_list()},
|
||||
**{t: types.VectorStoreVertex for t in vectorstore_creator.to_list()},
|
||||
**{t: types.DocumentLoaderVertex for t in documentloader_creator.to_list()},
|
||||
**{t: types.TextSplitterVertex for t in textsplitter_creator.to_list()},
|
||||
**{t: types.OutputParserVertex for t in output_parser_creator.to_list()},
|
||||
**{
|
||||
t: types.CustomComponentVertex
|
||||
for t in custom_component_creator.to_list()
|
||||
},
|
||||
**{t: types.RetrieverVertex for t in retriever_creator.to_list()},
|
||||
}
|
||||
|
||||
|
||||
lazy_load_vertex_dict = VertexTypesDict()
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import ast
|
||||
from langflow.graph.utils import UnbuiltObject
|
||||
from langflow.interface.initialize import loading
|
||||
from langflow.interface.listing import ALL_TYPES_DICT
|
||||
from langflow.interface.listing import lazy_load_dict
|
||||
from langflow.utils.constants import DIRECT_TYPES
|
||||
from langflow.utils.logger import logger
|
||||
from langflow.utils.util import sync_to_async
|
||||
|
|
@ -63,7 +63,7 @@ class Vertex:
|
|||
)
|
||||
|
||||
if self.base_type is None:
|
||||
for base_type, value in ALL_TYPES_DICT.items():
|
||||
for base_type, value in lazy_load_dict.ALL_TYPES_DICT.items():
|
||||
if self.vertex_type in value:
|
||||
self.base_type = base_type
|
||||
break
|
||||
|
|
@ -133,13 +133,13 @@ class Vertex:
|
|||
# Add _type to params
|
||||
self.params = params
|
||||
|
||||
def _build(self):
|
||||
def _build(self, user_id=None):
|
||||
"""
|
||||
Initiate the build process.
|
||||
"""
|
||||
logger.debug(f"Building {self.vertex_type}")
|
||||
self._build_each_node_in_params_dict()
|
||||
self._get_and_instantiate_class()
|
||||
self._get_and_instantiate_class(user_id)
|
||||
self._validate_built_object()
|
||||
|
||||
self._built = True
|
||||
|
|
@ -169,23 +169,25 @@ class Vertex:
|
|||
"""
|
||||
return all(self._is_node(node) for node in value)
|
||||
|
||||
def _build_node_and_update_params(self, key, node):
|
||||
def _build_node_and_update_params(self, key, node, user_id=None):
|
||||
"""
|
||||
Builds a given node and updates the params dictionary accordingly.
|
||||
"""
|
||||
result = node.build()
|
||||
result = node.build(user_id)
|
||||
self._handle_func(key, result)
|
||||
if isinstance(result, list):
|
||||
self._extend_params_list_with_result(key, result)
|
||||
self.params[key] = result
|
||||
|
||||
def _build_list_of_nodes_and_update_params(self, key, nodes):
|
||||
def _build_list_of_nodes_and_update_params(
|
||||
self, key, nodes: List["Vertex"], user_id=None
|
||||
):
|
||||
"""
|
||||
Iterates over a list of nodes, builds each and updates the params dictionary.
|
||||
"""
|
||||
self.params[key] = []
|
||||
for node in nodes:
|
||||
built = node.build()
|
||||
built = node.build(user_id)
|
||||
if isinstance(built, list):
|
||||
if key not in self.params:
|
||||
self.params[key] = []
|
||||
|
|
@ -215,7 +217,7 @@ class Vertex:
|
|||
if isinstance(self.params[key], list):
|
||||
self.params[key].extend(result)
|
||||
|
||||
def _get_and_instantiate_class(self):
|
||||
def _get_and_instantiate_class(self, user_id=None):
|
||||
"""
|
||||
Gets the class from a dictionary and instantiates it with the params.
|
||||
"""
|
||||
|
|
@ -226,6 +228,7 @@ class Vertex:
|
|||
node_type=self.vertex_type,
|
||||
base_type=self.base_type,
|
||||
params=self.params,
|
||||
user_id=user_id,
|
||||
)
|
||||
self._update_built_object_and_artifacts(result)
|
||||
except Exception as exc:
|
||||
|
|
@ -255,9 +258,9 @@ class Vertex:
|
|||
|
||||
raise ValueError(message)
|
||||
|
||||
def build(self, force: bool = False) -> Any:
|
||||
def build(self, force: bool = False, user_id=None, *args, **kwargs) -> Any:
|
||||
if not self._built or force:
|
||||
self._build()
|
||||
self._build(user_id, *args, **kwargs)
|
||||
|
||||
return self._built_object
|
||||
|
||||
|
|
|
|||
|
|
@ -21,18 +21,18 @@ class AgentVertex(Vertex):
|
|||
elif isinstance(source_node, ChainVertex):
|
||||
self.chains.append(source_node)
|
||||
|
||||
def build(self, force: bool = False) -> Any:
|
||||
def build(self, force: bool = False, user_id=None, *args, **kwargs) -> Any:
|
||||
if not self._built or force:
|
||||
self._set_tools_and_chains()
|
||||
# First, build the tools
|
||||
for tool_node in self.tools:
|
||||
tool_node.build()
|
||||
tool_node.build(user_id=user_id)
|
||||
|
||||
# Next, build the chains and the rest
|
||||
for chain_node in self.chains:
|
||||
chain_node.build(tools=self.tools)
|
||||
chain_node.build(tools=self.tools, user_id=user_id)
|
||||
|
||||
self._build()
|
||||
self._build(user_id=user_id)
|
||||
|
||||
return self._built_object
|
||||
|
||||
|
|
@ -49,13 +49,13 @@ class LLMVertex(Vertex):
|
|||
def __init__(self, data: Dict):
|
||||
super().__init__(data, base_type="llms")
|
||||
|
||||
def build(self, force: bool = False) -> Any:
|
||||
def build(self, force: bool = False, user_id=None, *args, **kwargs) -> Any:
|
||||
# LLM is different because some models might take up too much memory
|
||||
# or time to load. So we only load them when we need them.ß
|
||||
if self.vertex_type == self.built_node_type:
|
||||
return self.class_built_object
|
||||
if not self._built or force:
|
||||
self._build()
|
||||
self._build(user_id=user_id)
|
||||
self.built_node_type = self.vertex_type
|
||||
self.class_built_object = self._built_object
|
||||
# Avoid deepcopying the LLM
|
||||
|
|
@ -77,11 +77,11 @@ class WrapperVertex(Vertex):
|
|||
def __init__(self, data: Dict):
|
||||
super().__init__(data, base_type="wrappers")
|
||||
|
||||
def build(self, force: bool = False) -> Any:
|
||||
def build(self, force: bool = False, user_id=None, *args, **kwargs) -> Any:
|
||||
if not self._built or force:
|
||||
if "headers" in self.params:
|
||||
self.params["headers"] = ast.literal_eval(self.params["headers"])
|
||||
self._build()
|
||||
self._build(user_id=user_id)
|
||||
return self._built_object
|
||||
|
||||
|
||||
|
|
@ -148,16 +148,19 @@ class ChainVertex(Vertex):
|
|||
def build(
|
||||
self,
|
||||
force: bool = False,
|
||||
tools: Optional[List[Union[ToolkitVertex, ToolVertex]]] = None,
|
||||
user_id=None,
|
||||
*args,
|
||||
**kwargs,
|
||||
) -> Any:
|
||||
if not self._built or force:
|
||||
# Check if the chain requires a PromptVertex
|
||||
for key, value in self.params.items():
|
||||
if isinstance(value, PromptVertex):
|
||||
# Build the PromptVertex, passing the tools if available
|
||||
tools = kwargs.get("tools", None)
|
||||
self.params[key] = value.build(tools=tools, force=force)
|
||||
|
||||
self._build()
|
||||
self._build(user_id=user_id)
|
||||
|
||||
return self._built_object
|
||||
|
||||
|
|
@ -169,7 +172,10 @@ class PromptVertex(Vertex):
|
|||
def build(
|
||||
self,
|
||||
force: bool = False,
|
||||
user_id=None,
|
||||
tools: Optional[List[Union[ToolkitVertex, ToolVertex]]] = None,
|
||||
*args,
|
||||
**kwargs,
|
||||
) -> Any:
|
||||
if not self._built or force:
|
||||
if (
|
||||
|
|
@ -180,7 +186,7 @@ class PromptVertex(Vertex):
|
|||
# Check if it is a ZeroShotPrompt and needs a tool
|
||||
if "ShotPrompt" in self.vertex_type:
|
||||
tools = (
|
||||
[tool_node.build() for tool_node in tools]
|
||||
[tool_node.build(user_id=user_id) for tool_node in tools]
|
||||
if tools is not None
|
||||
else []
|
||||
)
|
||||
|
|
@ -208,7 +214,7 @@ class PromptVertex(Vertex):
|
|||
else:
|
||||
self.params.pop("input_variables", None)
|
||||
|
||||
self._build()
|
||||
self._build(user_id=user_id)
|
||||
return self._built_object
|
||||
|
||||
def _built_object_repr(self):
|
||||
|
|
|
|||
|
|
@ -5,7 +5,8 @@ from langchain.agents import types
|
|||
from langflow.custom.customs import get_custom_nodes
|
||||
from langflow.interface.agents.custom import CUSTOM_AGENTS
|
||||
from langflow.interface.base import LangChainTypeCreator
|
||||
from langflow.settings import settings
|
||||
from langflow.services.utils import get_settings_manager
|
||||
|
||||
from langflow.template.frontend_node.agents import AgentFrontendNode
|
||||
from langflow.utils.logger import logger
|
||||
from langflow.utils.util import build_template_from_class, build_template_from_method
|
||||
|
|
@ -53,13 +54,17 @@ class AgentCreator(LangChainTypeCreator):
|
|||
# Now this is a generator
|
||||
def to_list(self) -> List[str]:
|
||||
names = []
|
||||
settings_manager = get_settings_manager()
|
||||
for _, agent in self.type_to_loader_dict.items():
|
||||
agent_name = (
|
||||
agent.function_name()
|
||||
if hasattr(agent, "function_name")
|
||||
else agent.__name__
|
||||
)
|
||||
if agent_name in settings.AGENTS or settings.DEV:
|
||||
if (
|
||||
agent_name in settings_manager.settings.AGENTS
|
||||
or settings_manager.settings.DEV
|
||||
):
|
||||
names.append(agent_name)
|
||||
return names
|
||||
|
||||
|
|
|
|||
|
|
@ -2,13 +2,14 @@ from abc import ABC, abstractmethod
|
|||
from typing import Any, Dict, List, Optional, Type, Union
|
||||
from langchain.chains.base import Chain
|
||||
from langchain.agents import AgentExecutor
|
||||
from langflow.services.utils import get_settings_manager
|
||||
from pydantic import BaseModel
|
||||
|
||||
from langflow.template.field.base import TemplateField
|
||||
from langflow.template.frontend_node.base import FrontendNode
|
||||
from langflow.template.template.base import Template
|
||||
from langflow.utils.logger import logger
|
||||
from langflow.settings import settings
|
||||
|
||||
|
||||
# Assuming necessary imports for Field, Template, and FrontendNode classes
|
||||
|
||||
|
|
@ -26,9 +27,12 @@ class LangChainTypeCreator(BaseModel, ABC):
|
|||
@property
|
||||
def docs_map(self) -> Dict[str, str]:
|
||||
"""A dict with the name of the component as key and the documentation link as value."""
|
||||
settings_manager = get_settings_manager()
|
||||
if self.name_docs_dict is None:
|
||||
try:
|
||||
type_settings = getattr(settings, self.type_name.upper())
|
||||
type_settings = getattr(
|
||||
settings_manager.settings, self.type_name.upper()
|
||||
)
|
||||
self.name_docs_dict = {
|
||||
name: value_dict["documentation"]
|
||||
for name, value_dict in type_settings.items()
|
||||
|
|
|
|||
|
|
@ -3,7 +3,8 @@ from typing import Any, Dict, List, Optional, Type
|
|||
from langflow.custom.customs import get_custom_nodes
|
||||
from langflow.interface.base import LangChainTypeCreator
|
||||
from langflow.interface.importing.utils import import_class
|
||||
from langflow.settings import settings
|
||||
from langflow.services.utils import get_settings_manager
|
||||
|
||||
from langflow.template.frontend_node.chains import ChainFrontendNode
|
||||
from langflow.utils.logger import logger
|
||||
from langflow.utils.util import build_template_from_class, build_template_from_method
|
||||
|
|
@ -30,6 +31,7 @@ class ChainCreator(LangChainTypeCreator):
|
|||
@property
|
||||
def type_to_loader_dict(self) -> Dict:
|
||||
if self.type_dict is None:
|
||||
settings_manager = get_settings_manager()
|
||||
self.type_dict: dict[str, Any] = {
|
||||
chain_name: import_class(f"langchain.chains.{chain_name}")
|
||||
for chain_name in chains.__all__
|
||||
|
|
@ -43,7 +45,8 @@ class ChainCreator(LangChainTypeCreator):
|
|||
self.type_dict = {
|
||||
name: chain
|
||||
for name, chain in self.type_dict.items()
|
||||
if name in settings.CHAINS or settings.DEV
|
||||
if name in settings_manager.settings.CHAINS
|
||||
or settings_manager.settings.DEV
|
||||
}
|
||||
return self.type_dict
|
||||
|
||||
|
|
|
|||
|
|
@ -1,14 +1,16 @@
|
|||
from typing import Any, Callable, List, Optional
|
||||
from typing import Any, Callable, List, Optional, Union
|
||||
from uuid import UUID
|
||||
from fastapi import HTTPException
|
||||
from langflow.interface.custom.constants import CUSTOM_COMPONENT_SUPPORTED_TYPES
|
||||
from langflow.interface.custom.component import Component
|
||||
from langflow.interface.custom.directory_reader import DirectoryReader
|
||||
from langflow.services.utils import get_db_manager
|
||||
from langflow.interface.custom.utils import extract_inner_type
|
||||
|
||||
from langflow.utils import validate
|
||||
|
||||
from langflow.database.base import session_getter
|
||||
from langflow.database.models.flow import Flow
|
||||
from langflow.services.database.utils import session_getter
|
||||
from langflow.services.database.models.flow import Flow
|
||||
from pydantic import Extra
|
||||
import yaml
|
||||
|
||||
|
|
@ -21,6 +23,7 @@ class CustomComponent(Component, extra=Extra.allow):
|
|||
function: Optional[Callable] = None
|
||||
return_type_valid_list = list(CUSTOM_COMPONENT_SUPPORTED_TYPES.keys())
|
||||
repr_value: Optional[Any] = ""
|
||||
user_id: Optional[Union[UUID, str]] = None
|
||||
|
||||
def __init__(self, **data):
|
||||
super().__init__(**data)
|
||||
|
|
@ -176,7 +179,8 @@ class CustomComponent(Component, extra=Extra.allow):
|
|||
from langflow.processing.process import build_sorted_vertices_with_caching
|
||||
from langflow.processing.process import process_tweaks
|
||||
|
||||
with session_getter() as session:
|
||||
db_manager = get_db_manager()
|
||||
with session_getter(db_manager) as session:
|
||||
graph_data = flow.data if (flow := session.get(Flow, flow_id)) else None
|
||||
if not graph_data:
|
||||
raise ValueError(f"Flow {flow_id} not found")
|
||||
|
|
@ -185,10 +189,16 @@ class CustomComponent(Component, extra=Extra.allow):
|
|||
return build_sorted_vertices_with_caching(graph_data)
|
||||
|
||||
def list_flows(self, *, get_session: Optional[Callable] = None) -> List[Flow]:
|
||||
get_session = get_session or session_getter
|
||||
with get_session() as session:
|
||||
flows = session.query(Flow).all()
|
||||
return flows
|
||||
if not self.user_id:
|
||||
raise ValueError("Session is invalid")
|
||||
try:
|
||||
get_session = get_session or session_getter
|
||||
db_manager = get_db_manager()
|
||||
with get_session(db_manager) as session:
|
||||
flows = session.query(Flow).filter(Flow.user_id == self.user_id).all()
|
||||
return flows
|
||||
except Exception as e:
|
||||
raise ValueError("Session is invalid") from e
|
||||
|
||||
def get_flow(
|
||||
self,
|
||||
|
|
@ -199,12 +209,16 @@ class CustomComponent(Component, extra=Extra.allow):
|
|||
get_session: Optional[Callable] = None,
|
||||
) -> Flow:
|
||||
get_session = get_session or session_getter
|
||||
|
||||
with get_session() as session:
|
||||
db_manager = get_db_manager()
|
||||
with get_session(db_manager) as session:
|
||||
if flow_id:
|
||||
flow = session.query(Flow).get(flow_id)
|
||||
elif flow_name:
|
||||
flow = session.query(Flow).filter(Flow.name == flow_name).first()
|
||||
flow = (
|
||||
session.query(Flow)
|
||||
.filter(Flow.name == flow_name)
|
||||
.filter(Flow.user_id == self.user_id)
|
||||
).first()
|
||||
else:
|
||||
raise ValueError("Either flow_name or flow_id must be provided")
|
||||
|
||||
|
|
|
|||
|
|
@ -1,9 +1,10 @@
|
|||
from typing import Dict, List, Optional, Type
|
||||
|
||||
from langflow.interface.base import LangChainTypeCreator
|
||||
from langflow.services.utils import get_settings_manager
|
||||
from langflow.template.frontend_node.documentloaders import DocumentLoaderFrontNode
|
||||
from langflow.interface.custom_lists import documentloaders_type_to_cls_dict
|
||||
from langflow.settings import settings
|
||||
|
||||
from langflow.utils.logger import logger
|
||||
from langflow.utils.util import build_template_from_class
|
||||
|
||||
|
|
@ -30,10 +31,12 @@ class DocumentLoaderCreator(LangChainTypeCreator):
|
|||
return None
|
||||
|
||||
def to_list(self) -> List[str]:
|
||||
settings_manager = get_settings_manager()
|
||||
return [
|
||||
documentloader.__name__
|
||||
for documentloader in self.type_to_loader_dict.values()
|
||||
if documentloader.__name__ in settings.DOCUMENTLOADERS or settings.DEV
|
||||
if documentloader.__name__ in settings_manager.settings.DOCUMENTLOADERS
|
||||
or settings_manager.settings.DEV
|
||||
]
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,8 @@ from typing import Dict, List, Optional, Type
|
|||
|
||||
from langflow.interface.base import LangChainTypeCreator
|
||||
from langflow.interface.custom_lists import embedding_type_to_cls_dict
|
||||
from langflow.settings import settings
|
||||
from langflow.services.utils import get_settings_manager
|
||||
|
||||
from langflow.template.frontend_node.base import FrontendNode
|
||||
from langflow.template.frontend_node.embeddings import EmbeddingFrontendNode
|
||||
from langflow.utils.logger import logger
|
||||
|
|
@ -32,10 +33,12 @@ class EmbeddingCreator(LangChainTypeCreator):
|
|||
return None
|
||||
|
||||
def to_list(self) -> List[str]:
|
||||
settings_manager = get_settings_manager()
|
||||
return [
|
||||
embedding.__name__
|
||||
for embedding in self.type_to_loader_dict.values()
|
||||
if embedding.__name__ in settings.EMBEDDINGS or settings.DEV
|
||||
if embedding.__name__ in settings_manager.settings.EMBEDDINGS
|
||||
or settings_manager.settings.DEV
|
||||
]
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import json
|
||||
import orjson
|
||||
from typing import Any, Callable, Dict, Sequence, Type
|
||||
from typing import Any, Callable, Dict, Sequence, Type, TYPE_CHECKING
|
||||
|
||||
from langchain.agents import agent as agent_module
|
||||
from langchain.agents.agent import AgentExecutor
|
||||
|
|
@ -36,8 +36,13 @@ from langchain.vectorstores.base import VectorStore
|
|||
from langchain.document_loaders.base import BaseLoader
|
||||
from langflow.utils.logger import logger
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from langflow import CustomComponent
|
||||
|
||||
def instantiate_class(node_type: str, base_type: str, params: Dict) -> Any:
|
||||
|
||||
def instantiate_class(
|
||||
node_type: str, base_type: str, params: Dict, user_id=None
|
||||
) -> Any:
|
||||
"""Instantiate class from module type and key, and params"""
|
||||
params = convert_params_to_sets(params)
|
||||
params = convert_kwargs(params)
|
||||
|
|
@ -48,7 +53,9 @@ def instantiate_class(node_type: str, base_type: str, params: Dict) -> Any:
|
|||
return custom_node(**params)
|
||||
logger.debug(f"Instantiating {node_type} of type {base_type}")
|
||||
class_object = import_by_type(_type=base_type, name=node_type)
|
||||
return instantiate_based_on_type(class_object, base_type, node_type, params)
|
||||
return instantiate_based_on_type(
|
||||
class_object, base_type, node_type, params, user_id=user_id
|
||||
)
|
||||
|
||||
|
||||
def convert_params_to_sets(params):
|
||||
|
|
@ -75,7 +82,7 @@ def convert_kwargs(params):
|
|||
return params
|
||||
|
||||
|
||||
def instantiate_based_on_type(class_object, base_type, node_type, params):
|
||||
def instantiate_based_on_type(class_object, base_type, node_type, params, user_id):
|
||||
if base_type == "agents":
|
||||
return instantiate_agent(node_type, class_object, params)
|
||||
elif base_type == "prompts":
|
||||
|
|
@ -89,7 +96,7 @@ def instantiate_based_on_type(class_object, base_type, node_type, params):
|
|||
elif base_type == "toolkits":
|
||||
return instantiate_toolkit(node_type, class_object, params)
|
||||
elif base_type == "embeddings":
|
||||
return instantiate_embedding(class_object, params)
|
||||
return instantiate_embedding(node_type, class_object, params)
|
||||
elif base_type == "vectorstores":
|
||||
return instantiate_vectorstore(class_object, params)
|
||||
elif base_type == "documentloaders":
|
||||
|
|
@ -109,19 +116,19 @@ def instantiate_based_on_type(class_object, base_type, node_type, params):
|
|||
elif base_type == "memory":
|
||||
return instantiate_memory(node_type, class_object, params)
|
||||
elif base_type == "custom_components":
|
||||
return instantiate_custom_component(node_type, class_object, params)
|
||||
return instantiate_custom_component(node_type, class_object, params, user_id)
|
||||
elif base_type == "wrappers":
|
||||
return instantiate_wrapper(node_type, class_object, params)
|
||||
else:
|
||||
return class_object(**params)
|
||||
|
||||
|
||||
def instantiate_custom_component(node_type, class_object, params):
|
||||
def instantiate_custom_component(node_type, class_object, params, user_id):
|
||||
# we need to make a copy of the params because we will be
|
||||
# modifying it
|
||||
params_copy = params.copy()
|
||||
class_object = get_function_custom(params_copy.pop("code"))
|
||||
custom_component = class_object()
|
||||
class_object: "CustomComponent" = get_function_custom(params_copy.pop("code"))
|
||||
custom_component = class_object(user_id=user_id)
|
||||
built_object = custom_component.build(**params_copy)
|
||||
return built_object, {"repr": custom_component.custom_repr()}
|
||||
|
||||
|
|
@ -148,7 +155,7 @@ def instantiate_llm(node_type, class_object, params: Dict):
|
|||
# This is a workaround so JinaChat works until streaming is implemented
|
||||
# if "openai_api_base" in params and "jina" in params["openai_api_base"]:
|
||||
# False if condition is True
|
||||
if node_type == "VertexAI":
|
||||
if "VertexAI" in node_type:
|
||||
return initialize_vertexai(class_object=class_object, params=params)
|
||||
# max_tokens sometimes is a string and should be an int
|
||||
if "max_tokens" in params:
|
||||
|
|
@ -262,9 +269,13 @@ def instantiate_toolkit(node_type, class_object: Type[BaseToolkit], params: Dict
|
|||
return loaded_toolkit
|
||||
|
||||
|
||||
def instantiate_embedding(class_object, params: Dict):
|
||||
def instantiate_embedding(node_type, class_object, params: Dict):
|
||||
params.pop("model", None)
|
||||
params.pop("headers", None)
|
||||
|
||||
if "VertexAI" in node_type:
|
||||
return initialize_vertexai(class_object=class_object, params=params)
|
||||
|
||||
try:
|
||||
return class_object(**params)
|
||||
except ValidationError:
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import contextlib
|
||||
import json
|
||||
from langflow.database.models.base import orjson_dumps
|
||||
from langflow.services.database.models.base import orjson_dumps
|
||||
import orjson
|
||||
from typing import Any, Dict, List
|
||||
|
||||
|
|
|
|||
|
|
@ -14,34 +14,43 @@ from langflow.interface.wrappers.base import wrapper_creator
|
|||
from langflow.interface.output_parsers.base import output_parser_creator
|
||||
from langflow.interface.retrievers.base import retriever_creator
|
||||
from langflow.interface.custom.base import custom_component_creator
|
||||
from langflow.utils.lazy_load import LazyLoadDictBase
|
||||
|
||||
|
||||
def get_type_dict():
|
||||
return {
|
||||
"agents": agent_creator.to_list(),
|
||||
"prompts": prompt_creator.to_list(),
|
||||
"llms": llm_creator.to_list(),
|
||||
"tools": tool_creator.to_list(),
|
||||
"chains": chain_creator.to_list(),
|
||||
"memory": memory_creator.to_list(),
|
||||
"toolkits": toolkits_creator.to_list(),
|
||||
"wrappers": wrapper_creator.to_list(),
|
||||
"documentLoaders": documentloader_creator.to_list(),
|
||||
"vectorStore": vectorstore_creator.to_list(),
|
||||
"embeddings": embedding_creator.to_list(),
|
||||
"textSplitters": textsplitter_creator.to_list(),
|
||||
"utilities": utility_creator.to_list(),
|
||||
"outputParsers": output_parser_creator.to_list(),
|
||||
"retrievers": retriever_creator.to_list(),
|
||||
"custom_components": custom_component_creator.to_list(),
|
||||
}
|
||||
class AllTypesDict(LazyLoadDictBase):
|
||||
def __init__(self):
|
||||
self._all_types_dict = None
|
||||
|
||||
@property
|
||||
def ALL_TYPES_DICT(self):
|
||||
return self.all_types_dict
|
||||
|
||||
def _build_dict(self):
|
||||
langchain_types_dict = self.get_type_dict()
|
||||
return {
|
||||
**langchain_types_dict,
|
||||
"Custom": ["Custom Tool", "Python Function"],
|
||||
}
|
||||
|
||||
def get_type_dict(self):
|
||||
return {
|
||||
"agents": agent_creator.to_list(),
|
||||
"prompts": prompt_creator.to_list(),
|
||||
"llms": llm_creator.to_list(),
|
||||
"tools": tool_creator.to_list(),
|
||||
"chains": chain_creator.to_list(),
|
||||
"memory": memory_creator.to_list(),
|
||||
"toolkits": toolkits_creator.to_list(),
|
||||
"wrappers": wrapper_creator.to_list(),
|
||||
"documentLoaders": documentloader_creator.to_list(),
|
||||
"vectorStore": vectorstore_creator.to_list(),
|
||||
"embeddings": embedding_creator.to_list(),
|
||||
"textSplitters": textsplitter_creator.to_list(),
|
||||
"utilities": utility_creator.to_list(),
|
||||
"outputParsers": output_parser_creator.to_list(),
|
||||
"retrievers": retriever_creator.to_list(),
|
||||
"custom_components": custom_component_creator.to_list(),
|
||||
}
|
||||
|
||||
|
||||
LANGCHAIN_TYPES_DICT = get_type_dict()
|
||||
|
||||
# Now we'll build a dict with Langchain types and ours
|
||||
|
||||
ALL_TYPES_DICT = {
|
||||
**LANGCHAIN_TYPES_DICT,
|
||||
"Custom": ["Custom Tool", "Python Function"],
|
||||
}
|
||||
lazy_load_dict = AllTypesDict()
|
||||
|
|
|
|||
|
|
@ -2,7 +2,8 @@ from typing import Dict, List, Optional, Type
|
|||
|
||||
from langflow.interface.base import LangChainTypeCreator
|
||||
from langflow.interface.custom_lists import llm_type_to_cls_dict
|
||||
from langflow.settings import settings
|
||||
from langflow.services.utils import get_settings_manager
|
||||
|
||||
from langflow.template.frontend_node.llms import LLMFrontendNode
|
||||
from langflow.utils.logger import logger
|
||||
from langflow.utils.util import build_template_from_class
|
||||
|
|
@ -33,10 +34,12 @@ class LLMCreator(LangChainTypeCreator):
|
|||
return None
|
||||
|
||||
def to_list(self) -> List[str]:
|
||||
settings_manager = get_settings_manager()
|
||||
return [
|
||||
llm.__name__
|
||||
for llm in self.type_to_loader_dict.values()
|
||||
if llm.__name__ in settings.LLMS or settings.DEV
|
||||
if llm.__name__ in settings_manager.settings.LLMS
|
||||
or settings_manager.settings.DEV
|
||||
]
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,8 @@ from typing import Dict, List, Optional, Type
|
|||
|
||||
from langflow.interface.base import LangChainTypeCreator
|
||||
from langflow.interface.custom_lists import memory_type_to_cls_dict
|
||||
from langflow.settings import settings
|
||||
from langflow.services.utils import get_settings_manager
|
||||
|
||||
from langflow.template.frontend_node.base import FrontendNode
|
||||
from langflow.template.frontend_node.memories import MemoryFrontendNode
|
||||
from langflow.utils.logger import logger
|
||||
|
|
@ -48,10 +49,12 @@ class MemoryCreator(LangChainTypeCreator):
|
|||
return None
|
||||
|
||||
def to_list(self) -> List[str]:
|
||||
settings_manager = get_settings_manager()
|
||||
return [
|
||||
memory.__name__
|
||||
for memory in self.type_to_loader_dict.values()
|
||||
if memory.__name__ in settings.MEMORIES or settings.DEV
|
||||
if memory.__name__ in settings_manager.settings.MEMORIES
|
||||
or settings_manager.settings.DEV
|
||||
]
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,8 @@ from langchain import output_parsers
|
|||
|
||||
from langflow.interface.base import LangChainTypeCreator
|
||||
from langflow.interface.importing.utils import import_class
|
||||
from langflow.settings import settings
|
||||
from langflow.services.utils import get_settings_manager
|
||||
|
||||
from langflow.template.frontend_node.output_parsers import OutputParserFrontendNode
|
||||
from langflow.utils.logger import logger
|
||||
from langflow.utils.util import build_template_from_class, build_template_from_method
|
||||
|
|
@ -23,6 +24,7 @@ class OutputParserCreator(LangChainTypeCreator):
|
|||
@property
|
||||
def type_to_loader_dict(self) -> Dict:
|
||||
if self.type_dict is None:
|
||||
settings_manager = get_settings_manager()
|
||||
self.type_dict = {
|
||||
output_parser_name: import_class(
|
||||
f"langchain.output_parsers.{output_parser_name}"
|
||||
|
|
@ -33,7 +35,8 @@ class OutputParserCreator(LangChainTypeCreator):
|
|||
self.type_dict = {
|
||||
name: output_parser
|
||||
for name, output_parser in self.type_dict.items()
|
||||
if name in settings.OUTPUT_PARSERS or settings.DEV
|
||||
if name in settings_manager.settings.OUTPUT_PARSERS
|
||||
or settings_manager.settings.DEV
|
||||
}
|
||||
return self.type_dict
|
||||
|
||||
|
|
|
|||
|
|
@ -5,7 +5,8 @@ from langchain import prompts
|
|||
from langflow.custom.customs import get_custom_nodes
|
||||
from langflow.interface.base import LangChainTypeCreator
|
||||
from langflow.interface.importing.utils import import_class
|
||||
from langflow.settings import settings
|
||||
from langflow.services.utils import get_settings_manager
|
||||
|
||||
from langflow.template.frontend_node.prompts import PromptFrontendNode
|
||||
from langflow.utils.logger import logger
|
||||
from langflow.utils.util import build_template_from_class
|
||||
|
|
@ -20,6 +21,7 @@ class PromptCreator(LangChainTypeCreator):
|
|||
|
||||
@property
|
||||
def type_to_loader_dict(self) -> Dict:
|
||||
settings_manager = get_settings_manager()
|
||||
if self.type_dict is None:
|
||||
self.type_dict = {
|
||||
prompt_name: import_class(f"langchain.prompts.{prompt_name}")
|
||||
|
|
@ -34,7 +36,8 @@ class PromptCreator(LangChainTypeCreator):
|
|||
self.type_dict = {
|
||||
name: prompt
|
||||
for name, prompt in self.type_dict.items()
|
||||
if name in settings.PROMPTS or settings.DEV
|
||||
if name in settings_manager.settings.PROMPTS
|
||||
or settings_manager.settings.DEV
|
||||
}
|
||||
return self.type_dict
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,8 @@ from langchain import retrievers
|
|||
|
||||
from langflow.interface.base import LangChainTypeCreator
|
||||
from langflow.interface.importing.utils import import_class
|
||||
from langflow.settings import settings
|
||||
from langflow.services.utils import get_settings_manager
|
||||
|
||||
from langflow.template.frontend_node.retrievers import RetrieverFrontendNode
|
||||
from langflow.utils.logger import logger
|
||||
from langflow.utils.util import build_template_from_method, build_template_from_class
|
||||
|
|
@ -48,10 +49,12 @@ class RetrieverCreator(LangChainTypeCreator):
|
|||
return None
|
||||
|
||||
def to_list(self) -> List[str]:
|
||||
settings_manager = get_settings_manager()
|
||||
return [
|
||||
retriever
|
||||
for retriever in self.type_to_loader_dict.keys()
|
||||
if retriever in settings.RETRIEVERS or settings.DEV
|
||||
if retriever in settings_manager.settings.RETRIEVERS
|
||||
or settings_manager.settings.DEV
|
||||
]
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
from langflow.cache.utils import memoize_dict
|
||||
from typing import Any, Dict, Tuple
|
||||
from langflow.services.cache.utils import memoize_dict
|
||||
from langflow.graph import Graph
|
||||
from langflow.utils.logger import logger
|
||||
|
||||
|
|
@ -15,7 +16,7 @@ def build_langchain_object_with_caching(data_graph):
|
|||
|
||||
|
||||
@memoize_dict(maxsize=10)
|
||||
def build_sorted_vertices_with_caching(data_graph):
|
||||
def build_sorted_vertices_with_caching(data_graph) -> Tuple[Any, Dict]:
|
||||
"""
|
||||
Build langchain object from data_graph.
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -1,9 +1,10 @@
|
|||
from typing import Dict, List, Optional, Type
|
||||
|
||||
from langflow.interface.base import LangChainTypeCreator
|
||||
from langflow.services.utils import get_settings_manager
|
||||
from langflow.template.frontend_node.textsplitters import TextSplittersFrontendNode
|
||||
from langflow.interface.custom_lists import textsplitter_type_to_cls_dict
|
||||
from langflow.settings import settings
|
||||
|
||||
from langflow.utils.logger import logger
|
||||
from langflow.utils.util import build_template_from_class
|
||||
|
||||
|
|
@ -30,10 +31,12 @@ class TextSplitterCreator(LangChainTypeCreator):
|
|||
return None
|
||||
|
||||
def to_list(self) -> List[str]:
|
||||
settings_manager = get_settings_manager()
|
||||
return [
|
||||
textsplitter.__name__
|
||||
for textsplitter in self.type_to_loader_dict.values()
|
||||
if textsplitter.__name__ in settings.TEXTSPLITTERS or settings.DEV
|
||||
if textsplitter.__name__ in settings_manager.settings.TEXTSPLITTERS
|
||||
or settings_manager.settings.DEV
|
||||
]
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,8 @@ from langchain.agents import agent_toolkits
|
|||
|
||||
from langflow.interface.base import LangChainTypeCreator
|
||||
from langflow.interface.importing.utils import import_class, import_module
|
||||
from langflow.settings import settings
|
||||
from langflow.services.utils import get_settings_manager
|
||||
|
||||
from langflow.utils.logger import logger
|
||||
from langflow.utils.util import build_template_from_class
|
||||
|
||||
|
|
@ -29,13 +30,15 @@ class ToolkitCreator(LangChainTypeCreator):
|
|||
@property
|
||||
def type_to_loader_dict(self) -> Dict:
|
||||
if self.type_dict is None:
|
||||
settings_manager = get_settings_manager()
|
||||
self.type_dict = {
|
||||
toolkit_name: import_class(
|
||||
f"langchain.agents.agent_toolkits.{toolkit_name}"
|
||||
)
|
||||
# if toolkit_name is not lower case it is a class
|
||||
for toolkit_name in agent_toolkits.__all__
|
||||
if not toolkit_name.islower() and toolkit_name in settings.TOOLKITS
|
||||
if not toolkit_name.islower()
|
||||
and toolkit_name in settings_manager.settings.TOOLKITS
|
||||
}
|
||||
|
||||
return self.type_dict
|
||||
|
|
|
|||
|
|
@ -15,7 +15,8 @@ from langflow.interface.tools.constants import (
|
|||
OTHER_TOOLS,
|
||||
)
|
||||
from langflow.interface.tools.util import get_tool_params
|
||||
from langflow.settings import settings
|
||||
from langflow.services.utils import get_settings_manager
|
||||
|
||||
from langflow.template.field.base import TemplateField
|
||||
from langflow.template.template.base import Template
|
||||
from langflow.utils import util
|
||||
|
|
@ -66,6 +67,7 @@ class ToolCreator(LangChainTypeCreator):
|
|||
|
||||
@property
|
||||
def type_to_loader_dict(self) -> Dict:
|
||||
settings_manager = get_settings_manager()
|
||||
if self.tools_dict is None:
|
||||
all_tools = {}
|
||||
|
||||
|
|
@ -74,7 +76,10 @@ class ToolCreator(LangChainTypeCreator):
|
|||
|
||||
tool_name = tool_params.get("name") or tool
|
||||
|
||||
if tool_name in settings.TOOLS or settings.DEV:
|
||||
if (
|
||||
tool_name in settings_manager.settings.TOOLS
|
||||
or settings_manager.settings.DEV
|
||||
):
|
||||
if tool_name == "JsonSpec":
|
||||
tool_params["path"] = tool_params.pop("dict_") # type: ignore
|
||||
all_tools[tool_name] = {
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ import inspect
|
|||
from typing import Dict, Union
|
||||
|
||||
from langchain.agents.tools import Tool
|
||||
from langflow.utils.logger import logger
|
||||
|
||||
|
||||
def get_func_tool_params(func, **kwargs) -> Union[Dict, None]:
|
||||
|
|
@ -57,7 +58,13 @@ def get_func_tool_params(func, **kwargs) -> Union[Dict, None]:
|
|||
|
||||
|
||||
def get_class_tool_params(cls, **kwargs) -> Union[Dict, None]:
|
||||
tree = ast.parse(inspect.getsource(cls))
|
||||
try:
|
||||
tree = ast.parse(inspect.getsource(cls))
|
||||
except IndentationError:
|
||||
logger.error(
|
||||
f"Error parsing class {cls.__name__}. Make sure there are no tabs in the code."
|
||||
)
|
||||
return None
|
||||
|
||||
tool_params = {}
|
||||
|
||||
|
|
|
|||
|
|
@ -5,7 +5,8 @@ from langchain import SQLDatabase, utilities
|
|||
from langflow.custom.customs import get_custom_nodes
|
||||
from langflow.interface.base import LangChainTypeCreator
|
||||
from langflow.interface.importing.utils import import_class
|
||||
from langflow.settings import settings
|
||||
from langflow.services.utils import get_settings_manager
|
||||
|
||||
from langflow.template.frontend_node.utilities import UtilitiesFrontendNode
|
||||
from langflow.utils.logger import logger
|
||||
from langflow.utils.util import build_template_from_class
|
||||
|
|
@ -26,6 +27,7 @@ class UtilityCreator(LangChainTypeCreator):
|
|||
from the langchain.chains module and filtering them according to the settings.utilities list.
|
||||
"""
|
||||
if self.type_dict is None:
|
||||
settings_manager = get_settings_manager()
|
||||
self.type_dict = {
|
||||
utility_name: import_class(f"langchain.utilities.{utility_name}")
|
||||
for utility_name in utilities.__all__
|
||||
|
|
@ -35,7 +37,8 @@ class UtilityCreator(LangChainTypeCreator):
|
|||
self.type_dict = {
|
||||
name: utility
|
||||
for name, utility in self.type_dict.items()
|
||||
if name in settings.UTILITIES or settings.DEV
|
||||
if name in settings_manager.settings.UTILITIES
|
||||
or settings_manager.settings.DEV
|
||||
}
|
||||
|
||||
return self.type_dict
|
||||
|
|
|
|||
|
|
@ -9,7 +9,8 @@ import yaml
|
|||
from langchain.base_language import BaseLanguageModel
|
||||
from PIL.Image import Image
|
||||
from langflow.utils.logger import logger
|
||||
from langflow.chat.config import ChatConfig
|
||||
from langflow.services.chat.config import ChatConfig
|
||||
from langflow.services.utils import get_settings_manager
|
||||
|
||||
|
||||
def load_file_into_dict(file_path: str) -> dict:
|
||||
|
|
@ -63,13 +64,11 @@ def extract_input_variables_from_prompt(prompt: str) -> list[str]:
|
|||
|
||||
def setup_llm_caching():
|
||||
"""Setup LLM caching."""
|
||||
|
||||
from langflow.settings import settings
|
||||
|
||||
settings_manager = get_settings_manager()
|
||||
try:
|
||||
set_langchain_cache(settings)
|
||||
set_langchain_cache(settings_manager.settings)
|
||||
except ImportError:
|
||||
logger.warning(f"Could not import {settings.CACHE}. ")
|
||||
logger.warning(f"Could not import {settings_manager.settings.CACHE}. ")
|
||||
except Exception as exc:
|
||||
logger.warning(f"Could not setup LLM caching. Error: {exc}")
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,8 @@ from langchain import vectorstores
|
|||
|
||||
from langflow.interface.base import LangChainTypeCreator
|
||||
from langflow.interface.importing.utils import import_class
|
||||
from langflow.settings import settings
|
||||
from langflow.services.utils import get_settings_manager
|
||||
|
||||
from langflow.template.frontend_node.vectorstores import VectorStoreFrontendNode
|
||||
from langflow.utils.logger import logger
|
||||
from langflow.utils.util import build_template_from_method
|
||||
|
|
@ -43,10 +44,12 @@ class VectorstoreCreator(LangChainTypeCreator):
|
|||
return None
|
||||
|
||||
def to_list(self) -> List[str]:
|
||||
settings_manager = get_settings_manager()
|
||||
return [
|
||||
vectorstore
|
||||
for vectorstore in self.type_to_loader_dict.keys()
|
||||
if vectorstore in settings.VECTORSTORES or settings.DEV
|
||||
if vectorstore in settings_manager.settings.VECTORSTORES
|
||||
or settings_manager.settings.DEV
|
||||
]
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -6,24 +6,22 @@ from fastapi.responses import FileResponse
|
|||
from fastapi.staticfiles import StaticFiles
|
||||
|
||||
from langflow.api import router
|
||||
from langflow.database.base import create_db_and_tables, Engine
|
||||
|
||||
|
||||
from langflow.interface.utils import setup_llm_caching
|
||||
from langflow.services.database.utils import initialize_database
|
||||
from langflow.services.manager import initialize_services
|
||||
from langflow.utils.logger import configure
|
||||
|
||||
|
||||
def create_app():
|
||||
"""Create the FastAPI app and include the router."""
|
||||
|
||||
configure()
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
origins = [
|
||||
"*",
|
||||
]
|
||||
|
||||
@app.get("/health")
|
||||
def get_health():
|
||||
return {"status": "OK"}
|
||||
origins = ["*"]
|
||||
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
|
|
@ -33,9 +31,14 @@ def create_app():
|
|||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
@app.get("/health")
|
||||
def health():
|
||||
return {"status": "ok"}
|
||||
|
||||
app.include_router(router)
|
||||
app.on_event("startup")(Engine.update)
|
||||
app.on_event("startup")(create_db_and_tables)
|
||||
|
||||
app.on_event("startup")(initialize_services)
|
||||
app.on_event("startup")(initialize_database)
|
||||
app.on_event("startup")(setup_llm_caching)
|
||||
return app
|
||||
|
||||
|
|
@ -68,22 +71,25 @@ def get_static_files_dir():
|
|||
return frontend_path / "frontend"
|
||||
|
||||
|
||||
def setup_app(static_files_dir: Optional[Path] = None) -> FastAPI:
|
||||
def setup_app(
|
||||
static_files_dir: Optional[Path] = None, backend_only: bool = False
|
||||
) -> FastAPI:
|
||||
"""Setup the FastAPI app."""
|
||||
# get the directory of the current file
|
||||
if not static_files_dir:
|
||||
static_files_dir = get_static_files_dir()
|
||||
|
||||
if not static_files_dir or not static_files_dir.exists():
|
||||
if not backend_only and (not static_files_dir or not static_files_dir.exists()):
|
||||
raise RuntimeError(f"Static files directory {static_files_dir} does not exist.")
|
||||
app = create_app()
|
||||
setup_static_files(app, static_files_dir)
|
||||
if not backend_only and static_files_dir is not None:
|
||||
setup_static_files(app, static_files_dir)
|
||||
return app
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
from langflow.utils.util import get_number_of_workers
|
||||
from langflow.__main__ import get_number_of_workers
|
||||
|
||||
configure()
|
||||
uvicorn.run(
|
||||
|
|
|
|||
|
|
@ -85,39 +85,55 @@ def get_input_str_if_only_one_input(inputs: dict) -> Optional[str]:
|
|||
return list(inputs.values())[0] if len(inputs) == 1 else None
|
||||
|
||||
|
||||
def process_graph_cached(
|
||||
data_graph: Dict[str, Any], inputs: Optional[dict] = None, clear_cache=False
|
||||
):
|
||||
"""
|
||||
Process graph by extracting input variables and replacing ZeroShotPrompt
|
||||
with PromptTemplate,then run the graph and return the result and thought.
|
||||
"""
|
||||
# Load langchain object
|
||||
def get_build_result(data_graph, session_id):
|
||||
# If session_id is provided, load the langchain_object from the session
|
||||
# using build_sorted_vertices_with_caching.get_result_by_session_id
|
||||
# if it returns something different than None, return it
|
||||
# otherwise, build the graph and return the result
|
||||
if session_id:
|
||||
logger.debug(f"Loading LangChain object from session {session_id}")
|
||||
result = build_sorted_vertices_with_caching.get_result_by_session_id(session_id)
|
||||
if result is not None:
|
||||
logger.debug("Loaded LangChain object")
|
||||
return result
|
||||
|
||||
logger.debug("Building langchain object")
|
||||
return build_sorted_vertices_with_caching(data_graph)
|
||||
|
||||
|
||||
def clear_caches_if_needed(clear_cache: bool):
|
||||
if clear_cache:
|
||||
build_sorted_vertices_with_caching.clear_cache()
|
||||
logger.debug("Cleared cache")
|
||||
langchain_object, artifacts = build_sorted_vertices_with_caching(data_graph)
|
||||
logger.debug("Loaded LangChain object")
|
||||
if inputs is None:
|
||||
inputs = {}
|
||||
|
||||
# Add artifacts to inputs
|
||||
# artifacts can be documents loaded when building
|
||||
# the flow
|
||||
for (
|
||||
key,
|
||||
value,
|
||||
) in artifacts.items():
|
||||
if key not in inputs or not inputs[key]:
|
||||
inputs[key] = value
|
||||
|
||||
def load_langchain_object(
|
||||
data_graph: Dict[str, Any], session_id: str
|
||||
) -> Tuple[Union[Chain, VectorStore], Dict[str, Any], str]:
|
||||
langchain_object, artifacts = get_build_result(data_graph, session_id)
|
||||
session_id = build_sorted_vertices_with_caching.hash
|
||||
logger.debug("Loaded LangChain object")
|
||||
|
||||
if langchain_object is None:
|
||||
# Raise user facing error
|
||||
raise ValueError(
|
||||
"There was an error loading the langchain_object. Please, check all the nodes and try again."
|
||||
)
|
||||
|
||||
# Generate result and thought
|
||||
return langchain_object, artifacts, session_id
|
||||
|
||||
|
||||
def process_inputs(inputs: Optional[dict], artifacts: Dict[str, Any]) -> dict:
|
||||
if inputs is None:
|
||||
inputs = {}
|
||||
|
||||
for key, value in artifacts.items():
|
||||
if key not in inputs or not inputs[key]:
|
||||
inputs[key] = value
|
||||
|
||||
return inputs
|
||||
|
||||
|
||||
def generate_result(langchain_object: Union[Chain, VectorStore], inputs: dict):
|
||||
if isinstance(langchain_object, Chain):
|
||||
if inputs is None:
|
||||
raise ValueError("Inputs must be provided for a Chain")
|
||||
|
|
@ -130,9 +146,28 @@ def process_graph_cached(
|
|||
raise ValueError(
|
||||
f"Unknown langchain_object type: {type(langchain_object).__name__}"
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def process_graph_cached(
|
||||
data_graph: Dict[str, Any],
|
||||
inputs: Optional[dict] = None,
|
||||
clear_cache=False,
|
||||
session_id=None,
|
||||
) -> Tuple[Any, str]:
|
||||
clear_caches_if_needed(clear_cache)
|
||||
# If session_id is provided, load the langchain_object from the session
|
||||
# else build the graph and return the result and the new session_id
|
||||
langchain_object, artifacts, session_id = load_langchain_object(
|
||||
data_graph, session_id
|
||||
)
|
||||
processed_inputs = process_inputs(inputs, artifacts)
|
||||
result = generate_result(langchain_object, processed_inputs)
|
||||
|
||||
return result, session_id
|
||||
|
||||
|
||||
def load_flow_from_json(
|
||||
flow: Union[Path, str, dict], tweaks: Optional[dict] = None, build=True
|
||||
):
|
||||
|
|
|
|||
4
src/backend/langflow/services/__init__.py
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
from .manager import service_manager
|
||||
from .schema import ServiceType
|
||||
|
||||
__all__ = ["service_manager", "ServiceType"]
|
||||
12
src/backend/langflow/services/auth/factory.py
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
from langflow.services.factory import ServiceFactory
|
||||
from langflow.services.auth.service import AuthManager
|
||||
|
||||
|
||||
class AuthManagerFactory(ServiceFactory):
|
||||
name = "auth_manager"
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(AuthManager)
|
||||
|
||||
def create(self, settings_manager):
|
||||
return AuthManager(settings_manager)
|
||||
12
src/backend/langflow/services/auth/service.py
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
from langflow.services.base import Service
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from langflow.services.settings.manager import SettingsManager
|
||||
|
||||
|
||||
class AuthManager(Service):
|
||||
name = "auth_manager"
|
||||
|
||||
def __init__(self, settings_manager: "SettingsManager"):
|
||||
self.settings_manager = settings_manager
|
||||
283
src/backend/langflow/services/auth/utils.py
Normal file
|
|
@ -0,0 +1,283 @@
|
|||
from datetime import datetime, timedelta, timezone
|
||||
from fastapi import Depends, HTTPException, Security, status
|
||||
from fastapi.security import APIKeyHeader, APIKeyQuery, OAuth2PasswordBearer
|
||||
from jose import JWTError, jwt
|
||||
from typing import Annotated, Coroutine, Optional, Union
|
||||
from uuid import UUID
|
||||
from langflow.services.database.models.api_key.api_key import ApiKey
|
||||
from langflow.services.database.models.api_key.crud import check_key
|
||||
from langflow.services.database.models.user.user import User
|
||||
from langflow.services.database.models.user.crud import (
|
||||
get_user_by_id,
|
||||
get_user_by_username,
|
||||
update_user_last_login_at,
|
||||
)
|
||||
from langflow.services.utils import get_session, get_settings_manager
|
||||
from sqlmodel import Session
|
||||
|
||||
oauth2_login = OAuth2PasswordBearer(tokenUrl="api/v1/login")
|
||||
|
||||
API_KEY_NAME = "api-key"
|
||||
|
||||
api_key_query = APIKeyQuery(
|
||||
name=API_KEY_NAME, scheme_name="API key query", auto_error=False
|
||||
)
|
||||
api_key_header = APIKeyHeader(
|
||||
name=API_KEY_NAME, scheme_name="API key header", auto_error=False
|
||||
)
|
||||
|
||||
|
||||
# Source: https://github.com/mrtolkien/fastapi_simple_security/blob/master/fastapi_simple_security/security_api_key.py
|
||||
async def api_key_security(
|
||||
query_param: str = Security(api_key_query),
|
||||
header_param: str = Security(api_key_header),
|
||||
db: Session = Depends(get_session),
|
||||
) -> Optional[User]:
|
||||
settings_manager = get_settings_manager()
|
||||
result: Optional[Union[ApiKey, User]] = None
|
||||
if settings_manager.auth_settings.AUTO_LOGIN:
|
||||
# Get the first user
|
||||
settings_manager.auth_settings.FIRST_SUPERUSER
|
||||
result = get_user_by_username(
|
||||
db, settings_manager.auth_settings.FIRST_SUPERUSER
|
||||
)
|
||||
|
||||
elif not query_param and not header_param:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="An API key must be passed as query or header",
|
||||
)
|
||||
|
||||
elif query_param:
|
||||
result = check_key(db, query_param)
|
||||
|
||||
else:
|
||||
result = check_key(db, header_param)
|
||||
|
||||
if not result:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Invalid or missing API key",
|
||||
)
|
||||
if isinstance(result, ApiKey):
|
||||
return result.user
|
||||
elif isinstance(result, User):
|
||||
return result
|
||||
|
||||
|
||||
async def get_current_user(
|
||||
token: Annotated[str, Depends(oauth2_login)],
|
||||
db: Session = Depends(get_session),
|
||||
) -> User:
|
||||
settings_manager = get_settings_manager()
|
||||
|
||||
credentials_exception = HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Could not validate credentials",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
if isinstance(token, Coroutine):
|
||||
token = await token
|
||||
|
||||
try:
|
||||
payload = jwt.decode(
|
||||
token,
|
||||
settings_manager.auth_settings.SECRET_KEY,
|
||||
algorithms=[settings_manager.auth_settings.ALGORITHM],
|
||||
)
|
||||
user_id: UUID = payload.get("sub") # type: ignore
|
||||
token_type: str = payload.get("type") # type: ignore
|
||||
|
||||
if user_id is None or token_type:
|
||||
raise credentials_exception
|
||||
except JWTError as e:
|
||||
raise credentials_exception from e
|
||||
|
||||
user = get_user_by_id(db, user_id) # type: ignore
|
||||
if user is None or not user.is_active:
|
||||
raise credentials_exception
|
||||
return user
|
||||
|
||||
|
||||
def get_current_active_user(current_user: Annotated[User, Depends(get_current_user)]):
|
||||
if not current_user.is_active:
|
||||
raise HTTPException(status_code=400, detail="Inactive user")
|
||||
return current_user
|
||||
|
||||
|
||||
def get_current_active_superuser(
|
||||
current_user: Annotated[User, Depends(get_current_user)]
|
||||
) -> User:
|
||||
if not current_user.is_active:
|
||||
raise HTTPException(status_code=401, detail="Inactive user")
|
||||
if not current_user.is_superuser:
|
||||
raise HTTPException(
|
||||
status_code=400, detail="The user doesn't have enough privileges"
|
||||
)
|
||||
return current_user
|
||||
|
||||
|
||||
def verify_password(plain_password, hashed_password):
|
||||
settings_manager = get_settings_manager()
|
||||
return settings_manager.auth_settings.pwd_context.verify(
|
||||
plain_password, hashed_password
|
||||
)
|
||||
|
||||
|
||||
def get_password_hash(password):
|
||||
settings_manager = get_settings_manager()
|
||||
return settings_manager.auth_settings.pwd_context.hash(password)
|
||||
|
||||
|
||||
def create_token(data: dict, expires_delta: timedelta):
|
||||
settings_manager = get_settings_manager()
|
||||
|
||||
to_encode = data.copy()
|
||||
expire = datetime.now(timezone.utc) + expires_delta
|
||||
to_encode["exp"] = expire
|
||||
|
||||
return jwt.encode(
|
||||
to_encode,
|
||||
settings_manager.auth_settings.SECRET_KEY,
|
||||
algorithm=settings_manager.auth_settings.ALGORITHM,
|
||||
)
|
||||
|
||||
|
||||
def create_super_user(
|
||||
db: Session = Depends(get_session),
|
||||
username: Optional[str] = None,
|
||||
password: Optional[str] = None,
|
||||
) -> User:
|
||||
settings_manager = get_settings_manager()
|
||||
|
||||
super_user = get_user_by_username(
|
||||
db, username or settings_manager.auth_settings.FIRST_SUPERUSER
|
||||
)
|
||||
|
||||
if not super_user:
|
||||
super_user = User(
|
||||
username=username or settings_manager.auth_settings.FIRST_SUPERUSER,
|
||||
password=get_password_hash(
|
||||
password or settings_manager.auth_settings.FIRST_SUPERUSER_PASSWORD
|
||||
),
|
||||
is_superuser=True,
|
||||
is_active=True,
|
||||
last_login_at=None,
|
||||
)
|
||||
|
||||
db.add(super_user)
|
||||
db.commit()
|
||||
db.refresh(super_user)
|
||||
|
||||
return super_user
|
||||
|
||||
|
||||
def create_user_longterm_token(db: Session = Depends(get_session)) -> dict:
|
||||
super_user = create_super_user(db)
|
||||
|
||||
access_token_expires_longterm = timedelta(days=365)
|
||||
access_token = create_token(
|
||||
data={"sub": str(super_user.id)},
|
||||
expires_delta=access_token_expires_longterm,
|
||||
)
|
||||
|
||||
# Update: last_login_at
|
||||
update_user_last_login_at(super_user.id, db)
|
||||
|
||||
return {
|
||||
"access_token": access_token,
|
||||
"refresh_token": None,
|
||||
"token_type": "bearer",
|
||||
}
|
||||
|
||||
|
||||
def create_user_api_key(user_id: UUID) -> dict:
|
||||
access_token = create_token(
|
||||
data={"sub": str(user_id), "role": "api_key"},
|
||||
expires_delta=timedelta(days=365 * 2),
|
||||
)
|
||||
|
||||
return {"api_key": access_token}
|
||||
|
||||
|
||||
def get_user_id_from_token(token: str) -> UUID:
|
||||
try:
|
||||
user_id = jwt.get_unverified_claims(token)["sub"]
|
||||
return UUID(user_id)
|
||||
except (KeyError, JWTError, ValueError):
|
||||
return UUID(int=0)
|
||||
|
||||
|
||||
def create_user_tokens(
|
||||
user_id: UUID, db: Session = Depends(get_session), update_last_login: bool = False
|
||||
) -> dict:
|
||||
settings_manager = get_settings_manager()
|
||||
|
||||
access_token_expires = timedelta(
|
||||
minutes=settings_manager.auth_settings.ACCESS_TOKEN_EXPIRE_MINUTES
|
||||
)
|
||||
access_token = create_token(
|
||||
data={"sub": str(user_id)},
|
||||
expires_delta=access_token_expires,
|
||||
)
|
||||
|
||||
refresh_token_expires = timedelta(
|
||||
minutes=settings_manager.auth_settings.REFRESH_TOKEN_EXPIRE_MINUTES
|
||||
)
|
||||
refresh_token = create_token(
|
||||
data={"sub": str(user_id), "type": "rf"},
|
||||
expires_delta=refresh_token_expires,
|
||||
)
|
||||
|
||||
# Update: last_login_at
|
||||
if update_last_login:
|
||||
update_user_last_login_at(user_id, db)
|
||||
|
||||
return {
|
||||
"access_token": access_token,
|
||||
"refresh_token": refresh_token,
|
||||
"token_type": "bearer",
|
||||
}
|
||||
|
||||
|
||||
def create_refresh_token(refresh_token: str, db: Session = Depends(get_session)):
|
||||
settings_manager = get_settings_manager()
|
||||
|
||||
try:
|
||||
payload = jwt.decode(
|
||||
refresh_token,
|
||||
settings_manager.auth_settings.SECRET_KEY,
|
||||
algorithms=[settings_manager.auth_settings.ALGORITHM],
|
||||
)
|
||||
user_id: UUID = payload.get("sub") # type: ignore
|
||||
token_type: str = payload.get("type") # type: ignore
|
||||
|
||||
if user_id is None or token_type is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid refresh token"
|
||||
)
|
||||
|
||||
return create_user_tokens(user_id, db)
|
||||
|
||||
except JWTError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid refresh token",
|
||||
) from e
|
||||
|
||||
|
||||
def authenticate_user(
|
||||
username: str, password: str, db: Session = Depends(get_session)
|
||||
) -> Optional[User]:
|
||||
user = get_user_by_username(db, username)
|
||||
|
||||
if not user:
|
||||
return None
|
||||
|
||||
if not user.is_active:
|
||||
if not user.last_login_at:
|
||||
raise HTTPException(status_code=400, detail="Waiting for approval")
|
||||
raise HTTPException(status_code=400, detail="Inactive user")
|
||||
|
||||
return user if verify_password(password, user.password) else None
|
||||
2
src/backend/langflow/services/base.py
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
class Service:
|
||||
name: str
|
||||
11
src/backend/langflow/services/cache/__init__.py
vendored
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
from . import factory, manager
|
||||
from langflow.services.cache.manager import cache_manager
|
||||
from langflow.services.cache.flow import InMemoryCache
|
||||
|
||||
|
||||
__all__ = [
|
||||
"cache_manager",
|
||||
"factory",
|
||||
"manager",
|
||||
"InMemoryCache",
|
||||
]
|
||||
11
src/backend/langflow/services/cache/factory.py
vendored
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
from langflow.services.cache.manager import CacheManager
|
||||
from langflow.services.factory import ServiceFactory
|
||||
|
||||
|
||||
class CacheManagerFactory(ServiceFactory):
|
||||
def __init__(self):
|
||||
super().__init__(CacheManager)
|
||||
|
||||
def create(self):
|
||||
# Here you would have logic to create and configure a CacheManager
|
||||
return CacheManager()
|
||||
|
|
@ -2,7 +2,7 @@ import threading
|
|||
import time
|
||||
from collections import OrderedDict
|
||||
|
||||
from langflow.cache.base import BaseCache
|
||||
from langflow.services.cache.base import BaseCache
|
||||
|
||||
|
||||
class InMemoryCache(BaseCache):
|
||||