Fix to Anthropic LLM (#1178)
This commit is contained in:
commit
b5c4e2dbc8
4 changed files with 83 additions and 9 deletions
|
|
@ -1,6 +1,6 @@
|
|||
[tool.poetry]
|
||||
name = "langflow"
|
||||
version = "0.5.11"
|
||||
version = "0.5.12"
|
||||
description = "A Python package with a built-in web application"
|
||||
authors = ["Logspace <contact@logspace.ai>"]
|
||||
maintainers = [
|
||||
|
|
|
|||
71
src/backend/langflow/components/llms/AnthropicLLM.py
Normal file
71
src/backend/langflow/components/llms/AnthropicLLM.py
Normal file
|
|
@ -0,0 +1,71 @@
|
|||
from typing import Optional
|
||||
from langflow import CustomComponent
|
||||
from langchain.chat_models.anthropic import ChatAnthropic
|
||||
from langchain.llms.base import BaseLLM
|
||||
|
||||
|
||||
class AnthropicLLM(CustomComponent):
|
||||
display_name: str = "AnthropicLLM"
|
||||
description: str = "Anthropic Chat&Completion large language models."
|
||||
|
||||
def build_config(self):
|
||||
return {
|
||||
"model": {
|
||||
"display_name": "Model Name",
|
||||
"options": [
|
||||
"claude-2.1",
|
||||
"claude-2.0",
|
||||
"claude-instant-1.2",
|
||||
"claude-instant-1"
|
||||
# Add more models as needed
|
||||
],
|
||||
"info": "https://python.langchain.com/docs/integrations/chat/anthropic",
|
||||
"required": True,
|
||||
"value": "claude-2.1",
|
||||
},
|
||||
"anthropic_api_key": {
|
||||
"display_name": "Anthropic API Key",
|
||||
"required": True,
|
||||
"password": True,
|
||||
"info": "Your Anthropic API key.",
|
||||
},
|
||||
"max_tokens": {
|
||||
"display_name": "Max Tokens",
|
||||
"field_type": "int",
|
||||
"value": 256,
|
||||
},
|
||||
"temperature": {
|
||||
"display_name": "Temperature",
|
||||
"field_type": "float",
|
||||
"value": 0.7,
|
||||
},
|
||||
"api_endpoint": {
|
||||
"display_name": "API Endpoint",
|
||||
"info": "Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.",
|
||||
},
|
||||
"code": {"show": False},
|
||||
}
|
||||
|
||||
def build(
|
||||
self,
|
||||
model: str,
|
||||
anthropic_api_key: Optional[str] = None,
|
||||
max_tokens: Optional[int] = None,
|
||||
temperature: Optional[float] = None,
|
||||
api_endpoint: Optional[str] = None,
|
||||
) -> BaseLLM:
|
||||
# Set default API endpoint if not provided
|
||||
if not api_endpoint:
|
||||
api_endpoint = "https://api.anthropic.com"
|
||||
|
||||
try:
|
||||
output = ChatAnthropic(
|
||||
model=model,
|
||||
anthropic_api_key=anthropic_api_key,
|
||||
max_tokens_to_sample=max_tokens,
|
||||
temperature=temperature,
|
||||
anthropic_api_url=api_endpoint,
|
||||
)
|
||||
except Exception as e:
|
||||
raise ValueError("Could not connect to Anthropic API.") from e
|
||||
return output
|
||||
|
|
@ -117,10 +117,10 @@ llms:
|
|||
documentation: "https://python.langchain.com/docs/modules/model_io/models/llms/integrations/ctransformers"
|
||||
Cohere:
|
||||
documentation: "https://python.langchain.com/docs/modules/model_io/models/llms/integrations/cohere"
|
||||
Anthropic:
|
||||
documentation: ""
|
||||
ChatAnthropic:
|
||||
documentation: "https://python.langchain.com/docs/modules/model_io/models/chat/integrations/anthropic"
|
||||
# Anthropic:
|
||||
# documentation: ""
|
||||
# ChatAnthropic:
|
||||
# documentation: "https://python.langchain.com/docs/modules/model_io/models/chat/integrations/anthropic"
|
||||
HuggingFaceHub:
|
||||
documentation: "https://python.langchain.com/docs/modules/model_io/models/llms/integrations/huggingface_hub"
|
||||
VertexAI:
|
||||
|
|
@ -271,8 +271,8 @@ vectorstores:
|
|||
documentation: "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/qdrant"
|
||||
Weaviate:
|
||||
documentation: "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/weaviate"
|
||||
# FAISS:
|
||||
# documentation: "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/faiss"
|
||||
# FAISS:
|
||||
# documentation: "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/faiss"
|
||||
Pinecone:
|
||||
documentation: "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/pinecone"
|
||||
SupabaseVectorStore:
|
||||
|
|
|
|||
|
|
@ -6,8 +6,11 @@ from langchain.chains.base import Chain
|
|||
from langchain.schema import AgentAction, Document
|
||||
from langchain.vectorstores.base import VectorStore
|
||||
from langflow.graph import Graph
|
||||
from langflow.interface.run import (build_sorted_vertices, get_memory_key,
|
||||
update_memory_keys)
|
||||
from langflow.interface.run import (
|
||||
build_sorted_vertices,
|
||||
get_memory_key,
|
||||
update_memory_keys,
|
||||
)
|
||||
from langflow.services.getters import get_session_service
|
||||
from loguru import logger
|
||||
from pydantic import BaseModel
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue