diff --git a/dev.Dockerfile b/dev.Dockerfile index 7e439c69a..1f9a27ccc 100644 --- a/dev.Dockerfile +++ b/dev.Dockerfile @@ -3,7 +3,7 @@ FROM python:3.10-slim WORKDIR /app # Install Poetry -RUN apt-get update && apt-get install gcc curl -y +RUN apt-get update && apt-get install gcc g++ curl -y RUN curl -sSL https://install.python-poetry.org | python3 - # # Add Poetry to PATH ENV PATH="${PATH}:/root/.local/bin" @@ -15,4 +15,7 @@ COPY ./ ./ # Install dependencies RUN poetry config virtualenvs.create false && poetry install --no-interaction --no-ansi +# Set the logging level to DEBUG +ENV LOG_LEVEL=debug + CMD ["uvicorn", "langflow.main:app", "--host", "0.0.0.0", "--port", "5003", "--reload", "log-level", "debug"] \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index d9ba84030..f37406757 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -10,6 +10,12 @@ services: volumes: - ./:/app command: bash -c "uvicorn langflow.main:app --host 0.0.0.0 --port 7860 --reload" + deploy: + resources: + limits: + cpus: '4' + memory: 16G + frontend: build: diff --git a/poetry.lock b/poetry.lock index 353921ea0..6dd39bcde 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.4.0 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. [[package]] name = "aiohttp" @@ -1274,6 +1274,20 @@ llms = ["anthropic (>=0.2.4,<0.3.0)", "cohere (>=3,<4)", "huggingface_hub (>=0,< openai = ["openai (>=0,<1)"] qdrant = ["qdrant-client (>=1.1.1,<2.0.0)"] +[[package]] +name = "llama-cpp-python" +version = "0.1.23" +description = "A Python wrapper for llama.cpp" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "llama_cpp_python-0.1.23.tar.gz", hash = "sha256:323a937e68e04251b5ad1804922e05d15c8b6bfbcf7c3e683a7b39a20e165ebf"}, +] + +[package.dependencies] +typing-extensions = ">=4.5.0,<5.0.0" + [[package]] name = "markdown-it-py" version = "2.2.0" @@ -2763,4 +2777,4 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "70e86f7d3b5caed792e37ccf9e11ed95008e5078dd8830e4f8b96cc1d35c7b60" +content-hash = "a5f1a33bedd704cea56a6c8d3d97c8d8daad4b78f47765cca068f88face28647" diff --git a/pyproject.toml b/pyproject.toml index bab62904e..75c8374b3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,6 +36,7 @@ dill = "^0.3.6" pandas = "^1.5.3" huggingface-hub = "^0.13.3" rich = "^13.3.3" +llama-cpp-python = "0.1.23" [tool.poetry.group.dev.dependencies] black = "^23.1.0" diff --git a/src/backend/langflow/config.yaml b/src/backend/langflow/config.yaml index a00d91bff..0fd8033e0 100644 --- a/src/backend/langflow/config.yaml +++ b/src/backend/langflow/config.yaml @@ -27,6 +27,7 @@ llms: # - AzureOpenAI - ChatOpenAI - HuggingFaceHub + - LlamaCpp tools: - Search diff --git a/src/backend/langflow/interface/custom_lists.py b/src/backend/langflow/interface/custom_lists.py index 746c58325..d221901f6 100644 --- a/src/backend/langflow/interface/custom_lists.py +++ b/src/backend/langflow/interface/custom_lists.py @@ -12,12 +12,14 @@ from langchain import ( ) from langchain.agents import agent_toolkits from langchain.chat_models import ChatOpenAI +from langchain.llms import LlamaCpp from langflow.interface.importing.utils import import_class ## LLM llm_type_to_cls_dict = llms.type_to_cls_dict llm_type_to_cls_dict["openai-chat"] = ChatOpenAI # type: ignore +llm_type_to_cls_dict["llamacpp"] = LlamaCpp # type: ignore ## Chain chain_type_to_cls_dict: dict[str, Any] = { diff --git a/src/frontend/package.json b/src/frontend/package.json index b669569e3..84e9ad595 100644 --- a/src/frontend/package.json +++ b/src/frontend/package.json @@ -59,5 +59,5 @@ "last 1 safari version" ] }, - "proxy": "http://backend:7860" + "proxy": "http://127.0.0.1:5003" } \ No newline at end of file