Migrate to langchain_anthropic and add ChatAnthropic enhancements (#1551)

Migrate to langchain_anthropic and enhance ChatAnthropic functionality

- Replace langchain_community with langchain_anthropic for ChatAnthropic
- Add support for selecting the Claude model in ChatAnthropic
- Implement top_p and top_k parameter support in ChatAnthropic
This commit is contained in:
Daiki Sakai 2024-03-22 20:47:34 +09:00 committed by GitHub
commit 0b380afb66
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 61 additions and 44 deletions

69
poetry.lock generated
View file

@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand.
[[package]]
name = "aiohttp"
@ -167,13 +167,13 @@ files = [
[[package]]
name = "anthropic"
version = "0.15.1"
version = "0.21.3"
description = "The official Python library for the anthropic API"
optional = false
python-versions = ">=3.7"
files = [
{file = "anthropic-0.15.1-py3-none-any.whl", hash = "sha256:50344141ba12580dac829acc1a6921905e975393cca16c99b796a63903e997b9"},
{file = "anthropic-0.15.1.tar.gz", hash = "sha256:f188037c09a86c993196967a7c4ca7b0c30a7f51b261a9360528b5104069c088"},
{file = "anthropic-0.21.3-py3-none-any.whl", hash = "sha256:5869115453b543a46ded6515c9f29b8d610b6e94bbba3230ad80ac947d2b0862"},
{file = "anthropic-0.21.3.tar.gz", hash = "sha256:02f1ab5694c497e2b2d42d30d51a4f2edcaca92d2ec86bb64fe78a9c7434a869"},
]
[package.dependencies]
@ -1279,6 +1279,17 @@ files = [
{file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"},
]
[[package]]
name = "defusedxml"
version = "0.7.1"
description = "XML bomb protection for Python stdlib modules"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
files = [
{file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"},
{file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"},
]
[[package]]
name = "deprecated"
version = "1.2.14"
@ -3474,6 +3485,22 @@ openai = ["openai (<2)", "tiktoken (>=0.3.2,<0.6.0)"]
qdrant = ["qdrant-client (>=1.3.1,<2.0.0)"]
text-helpers = ["chardet (>=5.1.0,<6.0.0)"]
[[package]]
name = "langchain-anthropic"
version = "0.1.4"
description = "An integration package connecting AnthropicMessages and LangChain"
optional = false
python-versions = ">=3.8.1,<4.0"
files = [
{file = "langchain_anthropic-0.1.4-py3-none-any.whl", hash = "sha256:9b3e28c1c0f7a502495b240c6c015d7fc57d04fb381fae389ecdce8847de5777"},
{file = "langchain_anthropic-0.1.4.tar.gz", hash = "sha256:d772f7111335953d23393cac8173a0a1ee65b5fe0dc137c6b7a6db2a06fbcac4"},
]
[package.dependencies]
anthropic = ">=0.17.0,<1"
defusedxml = ">=0.7.1,<0.8.0"
langchain-core = ">=0.1,<0.2"
[[package]]
name = "langchain-community"
version = "0.0.24"
@ -4694,6 +4721,7 @@ optional = true
python-versions = ">=3"
files = [
{file = "nvidia_nvjitlink_cu12-12.3.101-py3-none-manylinux1_x86_64.whl", hash = "sha256:64335a8088e2b9d196ae8665430bc6a2b7e6ef2eb877a9c735c804bd4ff6467c"},
{file = "nvidia_nvjitlink_cu12-12.3.101-py3-none-manylinux2014_aarch64.whl", hash = "sha256:211a63e7b30a9d62f1a853e19928fbb1a750e3f17a13a3d1f98ff0ced19478dd"},
{file = "nvidia_nvjitlink_cu12-12.3.101-py3-none-win_amd64.whl", hash = "sha256:1b2e317e437433753530792f13eece58f0aec21a2b05903be7bffe58a606cbd1"},
]
@ -8312,20 +8340,6 @@ files = [
[package.dependencies]
types-urllib3 = "*"
[[package]]
name = "types-requests"
version = "2.31.0.20240218"
description = "Typing stubs for requests"
optional = false
python-versions = ">=3.8"
files = [
{file = "types-requests-2.31.0.20240218.tar.gz", hash = "sha256:f1721dba8385958f504a5386240b92de4734e047a08a40751c1654d1ac3349c5"},
{file = "types_requests-2.31.0.20240218-py3-none-any.whl", hash = "sha256:a82807ec6ddce8f00fe0e949da6d6bc1fbf1715420218a9640d695f70a9e5a9b"},
]
[package.dependencies]
urllib3 = ">=2"
[[package]]
name = "types-urllib3"
version = "1.26.25.14"
@ -8539,23 +8553,6 @@ brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotl
secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"]
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
[[package]]
name = "urllib3"
version = "2.0.7"
description = "HTTP library with thread-safe connection pooling, file post, and more."
optional = false
python-versions = ">=3.7"
files = [
{file = "urllib3-2.0.7-py3-none-any.whl", hash = "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e"},
{file = "urllib3-2.0.7.tar.gz", hash = "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84"},
]
[package.extras]
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]
secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"]
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
zstd = ["zstandard (>=0.18.0)"]
[[package]]
name = "uvicorn"
version = "0.27.1"
@ -9211,4 +9208,4 @@ local = ["ctransformers", "llama-cpp-python", "sentence-transformers"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.9,<3.12"
content-hash = "25c6686705b9e1a5a01b48c85bfc8e04592f1cf91d69ff47d421d9d7a895e1df"
content-hash = "1c79f55577a8fa603c09b2b7c02a01a7a397960c6c9c134dcee7e2f6eeea3d9e"

View file

@ -60,7 +60,6 @@ cohere = "^4.47.0"
python-multipart = "^0.0.7"
sqlmodel = "^0.0.14"
faiss-cpu = "^1.7.4"
anthropic = "^0.15.0"
orjson = "3.9.15"
multiprocess = "^0.70.14"
cachetools = "^5.3.1"
@ -105,6 +104,8 @@ elasticsearch = "^8.12.0"
pytube = "^15.0.0"
llama-index = "0.9.48"
langchain-openai = "^0.0.6"
urllib3 = "<2"
langchain-anthropic = "^0.1.4"
[tool.poetry.group.dev.dependencies]
pytest-asyncio = "^0.23.1"

View file

@ -2,7 +2,9 @@ from pydantic.v1.types import SecretStr
from langflow import CustomComponent
from typing import Optional, Union, Callable
from langflow.field_typing import BaseLanguageModel
from langchain_community.chat_models.anthropic import ChatAnthropic
# from langchain_community.chat_models.anthropic import ChatAnthropic
from langchain_anthropic import ChatAnthropic
class ChatAnthropicComponent(CustomComponent):
@ -17,31 +19,48 @@ class ChatAnthropicComponent(CustomComponent):
"field_type": "str",
"password": True,
},
"anthropic_api_url": {
"display_name": "Anthropic API URL",
"field_type": "str",
},
"model_kwargs": {
"display_name": "Model Kwargs",
"field_type": "dict",
"advanced": True,
},
"model_name": {
"display_name": "Model Name",
"field_type": "str",
"advanced": False,
"required": False,
"options": ["claude-3-opus-20240229", "claude-3-sonnet-20240229", "claude-3-haiku-20240307"],
},
"temperature": {
"display_name": "Temperature",
"field_type": "float",
},
"max_tokens": {
"display_name": "Max Tokens",
"field_type": "int",
"advanced": False,
"required": False,
},
"top_k": {"display_name": "Top K", "field_type": "int", "advanced": True},
"top_p": {"display_name": "Top P", "field_type": "float", "advanced": True},
}
def build(
self,
anthropic_api_key: str,
anthropic_api_url: Optional[str] = None,
model_kwargs: dict = {},
model_name: str = "claude-3-opus-20240229",
temperature: Optional[float] = None,
max_tokens: Optional[int] = 1024,
top_k: Optional[int] = None,
top_p: Optional[float] = None,
) -> Union[BaseLanguageModel, Callable]:
return ChatAnthropic(
anthropic_api_key=SecretStr(anthropic_api_key),
anthropic_api_url=anthropic_api_url,
model_kwargs=model_kwargs,
model_name=model_name,
temperature=temperature,
max_tokens=max_tokens,
top_k=top_k,
top_p=top_p,
)