Merge branch 'aws-cdk-dev-dockerfile' into aws-cdk-dev2
This commit is contained in:
commit
e9f1f3217e
6 changed files with 88 additions and 16 deletions
|
|
@ -143,7 +143,7 @@ Alternatively, click the **"Open in Cloud Shell"** button below to launch Google
|
|||
|
||||
# 🎨 Creating Flows
|
||||
|
||||
Creating flows with Langflow is easy. Simply drag sidebar components onto the canvas and connect them together to create your pipeline. Langflow provides a range of [LangChain components](https://docs.langchain.com/docs/category/components) to choose from, including LLMs, prompt serializers, agents, and chains.
|
||||
Creating flows with Langflow is easy. Simply drag sidebar components onto the canvas and connect them together to create your pipeline. Langflow provides a range of [LangChain components](https://python.langchain.com/docs/integrations/components) to choose from, including LLMs, prompt serializers, agents, and chains.
|
||||
|
||||
Explore by editing prompt parameters, link chains and agents, track an agent's thought process, and export your flow.
|
||||
|
||||
|
|
|
|||
|
|
@ -17,4 +17,4 @@ RUN poetry config virtualenvs.create false && poetry install --no-interaction --
|
|||
|
||||
RUN poetry add pymysql==1.0.2
|
||||
|
||||
CMD ["uvicorn", "--factory", "src.backend.langflow.main:create_app", "--host", "0.0.0.0", "--port", "7860", "--reload", "--log-level", "debug"]
|
||||
CMD ["sh", "./container-cmd-cdk.sh"]
|
||||
|
|
|
|||
3
container-cmd-cdk.sh
Normal file
3
container-cmd-cdk.sh
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
export LANGFLOW_DATABASE_URL="mysql+pymysql://${username}:${password}@${host}:3306/${dbname}"
|
||||
# echo $LANGFLOW_DATABASE_URL
|
||||
uvicorn --factory src.backend.langflow.main:create_app --host 0.0.0.0 --port 7860 --reload --log-level debug
|
||||
|
|
@ -6,7 +6,7 @@ services:
|
|||
backend:
|
||||
build:
|
||||
context: ./
|
||||
dockerfile: ./cdk.Dockerfile
|
||||
dockerfile: ./dev.Dockerfile
|
||||
env_file:
|
||||
- .env
|
||||
ports:
|
||||
|
|
|
|||
71
src/backend/langflow/components/llms/AnthropicLLM.py
Normal file
71
src/backend/langflow/components/llms/AnthropicLLM.py
Normal file
|
|
@ -0,0 +1,71 @@
|
|||
from typing import Optional
|
||||
from langflow import CustomComponent
|
||||
from langchain.chat_models.anthropic import ChatAnthropic
|
||||
from langchain.llms.base import BaseLLM
|
||||
|
||||
|
||||
class AnthropicLLM(CustomComponent):
|
||||
display_name: str = "AnthropicLLM"
|
||||
description: str = "Anthropic Chat&Completion large language models."
|
||||
|
||||
def build_config(self):
|
||||
return {
|
||||
"model": {
|
||||
"display_name": "Model Name",
|
||||
"options": [
|
||||
"claude-2.1",
|
||||
"claude-2.0",
|
||||
"claude-instant-1.2",
|
||||
"claude-instant-1"
|
||||
# Add more models as needed
|
||||
],
|
||||
"info": "https://python.langchain.com/docs/integrations/chat/anthropic",
|
||||
"required": True,
|
||||
"value": "claude-2.1",
|
||||
},
|
||||
"anthropic_api_key": {
|
||||
"display_name": "Anthropic API Key",
|
||||
"required": True,
|
||||
"password": True,
|
||||
"info": "Your Anthropic API key.",
|
||||
},
|
||||
"max_tokens": {
|
||||
"display_name": "Max Tokens",
|
||||
"field_type": "int",
|
||||
"value": 256,
|
||||
},
|
||||
"temperature": {
|
||||
"display_name": "Temperature",
|
||||
"field_type": "float",
|
||||
"value": 0.7,
|
||||
},
|
||||
"api_endpoint": {
|
||||
"display_name": "API Endpoint",
|
||||
"info": "Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.",
|
||||
},
|
||||
"code": {"show": False},
|
||||
}
|
||||
|
||||
def build(
|
||||
self,
|
||||
model: str,
|
||||
anthropic_api_key: Optional[str] = None,
|
||||
max_tokens: Optional[int] = None,
|
||||
temperature: Optional[float] = None,
|
||||
api_endpoint: Optional[str] = None,
|
||||
) -> BaseLLM:
|
||||
# Set default API endpoint if not provided
|
||||
if not api_endpoint:
|
||||
api_endpoint = "https://api.anthropic.com"
|
||||
|
||||
try:
|
||||
output = ChatAnthropic(
|
||||
model=model,
|
||||
anthropic_api_key=anthropic_api_key,
|
||||
max_tokens_to_sample=max_tokens,
|
||||
temperature=temperature,
|
||||
anthropic_api_url=api_endpoint,
|
||||
)
|
||||
except Exception as e:
|
||||
raise ValueError("Could not connect to Anthropic API.") from e
|
||||
return output
|
||||
|
|
@ -1,20 +1,16 @@
|
|||
import json
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional, Tuple, Union
|
||||
|
||||
from langchain.agents import AgentExecutor
|
||||
from langchain.schema import AgentAction
|
||||
from langflow.interface.run import (
|
||||
build_sorted_vertices,
|
||||
get_memory_key,
|
||||
update_memory_keys,
|
||||
)
|
||||
from langchain.chains.base import Chain
|
||||
from langchain.schema import AgentAction, Document
|
||||
from langchain.vectorstores.base import VectorStore
|
||||
from langflow.graph import Graph
|
||||
from langflow.interface.run import (build_sorted_vertices, get_memory_key,
|
||||
update_memory_keys)
|
||||
from langflow.services.getters import get_session_service
|
||||
from loguru import logger
|
||||
from langflow.graph import Graph
|
||||
from langchain.chains.base import Chain
|
||||
from langchain.vectorstores.base import VectorStore
|
||||
from typing import Any, Dict, List, Optional, Tuple, Union
|
||||
from langchain.schema import Document
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
|
|
@ -129,7 +125,9 @@ def process_inputs(inputs: Optional[dict], artifacts: Dict[str, Any]) -> dict:
|
|||
inputs = {}
|
||||
|
||||
for key, value in artifacts.items():
|
||||
if key not in inputs or not inputs[key]:
|
||||
if key == "repr":
|
||||
continue
|
||||
elif key not in inputs or not inputs[key]:
|
||||
inputs[key] = value
|
||||
|
||||
return inputs
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue