diff --git a/README.md b/README.md index 133f41a6c..3b9367577 100644 --- a/README.md +++ b/README.md @@ -143,7 +143,7 @@ Alternatively, click the **"Open in Cloud Shell"** button below to launch Google # 🎨 Creating Flows -Creating flows with Langflow is easy. Simply drag sidebar components onto the canvas and connect them together to create your pipeline. Langflow provides a range of [LangChain components](https://docs.langchain.com/docs/category/components) to choose from, including LLMs, prompt serializers, agents, and chains. +Creating flows with Langflow is easy. Simply drag sidebar components onto the canvas and connect them together to create your pipeline. Langflow provides a range of [LangChain components](https://python.langchain.com/docs/integrations/components) to choose from, including LLMs, prompt serializers, agents, and chains. Explore by editing prompt parameters, link chains and agents, track an agent's thought process, and export your flow. diff --git a/cdk.Dockerfile b/cdk.Dockerfile index 44ccf0acb..670ae49bd 100644 --- a/cdk.Dockerfile +++ b/cdk.Dockerfile @@ -17,4 +17,4 @@ RUN poetry config virtualenvs.create false && poetry install --no-interaction -- RUN poetry add pymysql==1.0.2 -CMD ["uvicorn", "--factory", "src.backend.langflow.main:create_app", "--host", "0.0.0.0", "--port", "7860", "--reload", "--log-level", "debug"] +CMD ["sh", "./container-cmd-cdk.sh"] diff --git a/container-cmd-cdk.sh b/container-cmd-cdk.sh new file mode 100644 index 000000000..3ac6400d8 --- /dev/null +++ b/container-cmd-cdk.sh @@ -0,0 +1,3 @@ +export LANGFLOW_DATABASE_URL="mysql+pymysql://${username}:${password}@${host}:3306/${dbname}" +# echo $LANGFLOW_DATABASE_URL +uvicorn --factory src.backend.langflow.main:create_app --host 0.0.0.0 --port 7860 --reload --log-level debug \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index 8799c90aa..7616600da 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -6,7 +6,7 @@ services: backend: build: context: ./ - dockerfile: ./cdk.Dockerfile + dockerfile: ./dev.Dockerfile env_file: - .env ports: diff --git a/src/backend/langflow/components/llms/AnthropicLLM.py b/src/backend/langflow/components/llms/AnthropicLLM.py new file mode 100644 index 000000000..1f75ba39f --- /dev/null +++ b/src/backend/langflow/components/llms/AnthropicLLM.py @@ -0,0 +1,71 @@ +from typing import Optional +from langflow import CustomComponent +from langchain.chat_models.anthropic import ChatAnthropic +from langchain.llms.base import BaseLLM + + +class AnthropicLLM(CustomComponent): + display_name: str = "AnthropicLLM" + description: str = "Anthropic Chat&Completion large language models." + + def build_config(self): + return { + "model": { + "display_name": "Model Name", + "options": [ + "claude-2.1", + "claude-2.0", + "claude-instant-1.2", + "claude-instant-1" + # Add more models as needed + ], + "info": "https://python.langchain.com/docs/integrations/chat/anthropic", + "required": True, + "value": "claude-2.1", + }, + "anthropic_api_key": { + "display_name": "Anthropic API Key", + "required": True, + "password": True, + "info": "Your Anthropic API key.", + }, + "max_tokens": { + "display_name": "Max Tokens", + "field_type": "int", + "value": 256, + }, + "temperature": { + "display_name": "Temperature", + "field_type": "float", + "value": 0.7, + }, + "api_endpoint": { + "display_name": "API Endpoint", + "info": "Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.", + }, + "code": {"show": False}, + } + + def build( + self, + model: str, + anthropic_api_key: Optional[str] = None, + max_tokens: Optional[int] = None, + temperature: Optional[float] = None, + api_endpoint: Optional[str] = None, + ) -> BaseLLM: + # Set default API endpoint if not provided + if not api_endpoint: + api_endpoint = "https://api.anthropic.com" + + try: + output = ChatAnthropic( + model=model, + anthropic_api_key=anthropic_api_key, + max_tokens_to_sample=max_tokens, + temperature=temperature, + anthropic_api_url=api_endpoint, + ) + except Exception as e: + raise ValueError("Could not connect to Anthropic API.") from e + return output diff --git a/src/backend/langflow/processing/process.py b/src/backend/langflow/processing/process.py index 978c834d0..092928a31 100644 --- a/src/backend/langflow/processing/process.py +++ b/src/backend/langflow/processing/process.py @@ -1,20 +1,16 @@ import json from pathlib import Path +from typing import Any, Dict, List, Optional, Tuple, Union + from langchain.agents import AgentExecutor -from langchain.schema import AgentAction -from langflow.interface.run import ( - build_sorted_vertices, - get_memory_key, - update_memory_keys, -) +from langchain.chains.base import Chain +from langchain.schema import AgentAction, Document +from langchain.vectorstores.base import VectorStore +from langflow.graph import Graph +from langflow.interface.run import (build_sorted_vertices, get_memory_key, + update_memory_keys) from langflow.services.getters import get_session_service from loguru import logger -from langflow.graph import Graph -from langchain.chains.base import Chain -from langchain.vectorstores.base import VectorStore -from typing import Any, Dict, List, Optional, Tuple, Union -from langchain.schema import Document - from pydantic import BaseModel @@ -129,7 +125,9 @@ def process_inputs(inputs: Optional[dict], artifacts: Dict[str, Any]) -> dict: inputs = {} for key, value in artifacts.items(): - if key not in inputs or not inputs[key]: + if key == "repr": + continue + elif key not in inputs or not inputs[key]: inputs[key] = value return inputs