diff --git a/docs/docs/components/custom.mdx b/docs/docs/components/custom.mdx index b07f953fa..bdf631314 100644 --- a/docs/docs/components/custom.mdx +++ b/docs/docs/components/custom.mdx @@ -37,7 +37,7 @@ The CustomComponent class serves as the foundation for creating custom component | _`langflow.field_typing.Prompt`_ | | _`langchain.chains.base.Chain`_ | | _`langchain.PromptTemplate`_ | - | _`langchain.llms.base.BaseLLM`_ | + | _`from langchain.schema.language_model import BaseLanguageModel`_ | | _`langchain.Tool`_ | | _`langchain.document_loaders.base.BaseLoader`_ | | _`langchain.schema.Document`_ | diff --git a/docs/docs/guidelines/custom-component.mdx b/docs/docs/guidelines/custom-component.mdx index 9e09cea59..6decb3833 100644 --- a/docs/docs/guidelines/custom-component.mdx +++ b/docs/docs/guidelines/custom-component.mdx @@ -131,7 +131,7 @@ class MyComponent(CustomComponent): --- -The [Return Type Annotation](https://docs.python.org/3/library/typing.html) of the _`build`_ method defines the component type (e.g., Chain, BaseLLM, or basic Python types). Check out all supported types in the [component reference](../components/custom). +The [Return Type Annotation](https://docs.python.org/3/library/typing.html) of the _`build`_ method defines the component type (e.g., Chain, BaseLanguageModel, or basic Python types). Check out all supported types in the [component reference](../components/custom). ```python from langflow.custom import CustomComponent diff --git a/src/backend/base/langflow/__main__.py b/src/backend/base/langflow/__main__.py index b8e4d90a2..5f508731f 100644 --- a/src/backend/base/langflow/__main__.py +++ b/src/backend/base/langflow/__main__.py @@ -21,8 +21,6 @@ from langflow.main import setup_app from langflow.services.database.utils import session_getter from langflow.services.deps import get_db_service, get_settings_service from langflow.services.utils import initialize_services, initialize_settings_service -from langflow.utils.logger import configure, logger - initialize_settings_service) from langflow.utils.logger import configure, logger console = Console() @@ -102,12 +100,8 @@ def update_settings( @app.command() def run( - host: str = typer.Option( - "127.0.0.1", help="Host to bind the server to.", envvar="LANGFLOW_HOST" - ), - workers: int = typer.Option( - 1, help="Number of worker processes.", envvar="LANGFLOW_WORKERS" - ), + host: str = typer.Option("127.0.0.1", help="Host to bind the server to.", envvar="LANGFLOW_HOST"), + workers: int = typer.Option(1, help="Number of worker processes.", envvar="LANGFLOW_WORKERS"), timeout: int = typer.Option(300, help="Worker timeout in seconds."), port: int = typer.Option(7860, help="Port to listen on.", envvar="LANGFLOW_PORT"), components_path: Optional[Path] = typer.Option( @@ -115,19 +109,11 @@ def run( help="Path to the directory containing custom components.", envvar="LANGFLOW_COMPONENTS_PATH", ), - config: str = typer.Option( - Path(__file__).parent / "config.yaml", help="Path to the configuration file." - ), + config: str = typer.Option(Path(__file__).parent / "config.yaml", help="Path to the configuration file."), # .env file param - env_file: Path = typer.Option( - None, help="Path to the .env file containing environment variables." - ), - log_level: str = typer.Option( - "critical", help="Logging level.", envvar="LANGFLOW_LOG_LEVEL" - ), - log_file: Path = typer.Option( - "logs/langflow.log", help="Path to the log file.", envvar="LANGFLOW_LOG_FILE" - ), + env_file: Path = typer.Option(None, help="Path to the .env file containing environment variables."), + log_level: str = typer.Option("critical", help="Logging level.", envvar="LANGFLOW_LOG_LEVEL"), + log_file: Path = typer.Option("logs/langflow.log", help="Path to the log file.", envvar="LANGFLOW_LOG_FILE"), cache: Optional[str] = typer.Option( envvar="LANGFLOW_LANGCHAIN_CACHE", help="Type of cache to use. (InMemoryCache, SQLiteCache)", @@ -221,9 +207,7 @@ def wait_for_server_ready(host, port): def run_on_mac_or_linux(host, port, log_level, options, app): - webapp_process = Process( - target=run_langflow, args=(host, port, log_level, options, app) - ) + webapp_process = Process(target=run_langflow, args=(host, port, log_level, options, app)) webapp_process.start() wait_for_server_ready(host, port) @@ -319,9 +303,7 @@ def build_new_version_notice(current_version: str, package_name: str): f"A new pre-release version of {package_name} is available: {latest_version}", ) else: - latest_version = httpx.get(f"https://pypi.org/pypi/{package_name}/json").json()[ - "info" - ]["version"] + latest_version = httpx.get(f"https://pypi.org/pypi/{package_name}/json").json()["info"]["version"] if not version_is_prerelease(latest_version): return ( False, @@ -345,9 +327,7 @@ def fetch_latest_version(package_name: str, include_prerelease: bool) -> str: def build_version_notice(current_version: str, package_name: str) -> str: latest_version = fetch_latest_version(package_name, is_prerelease(current_version)) - if latest_version and pkg_version.parse(current_version) < pkg_version.parse( - latest_version - ): + if latest_version and pkg_version.parse(current_version) < pkg_version.parse(latest_version): release_type = "pre-release" if is_prerelease(latest_version) else "version" return f"A new {release_type} of {package_name} is available: {latest_version}" return "" @@ -396,9 +376,7 @@ def print_banner(host: str, port: int): from importlib import metadata langflow_base_version = metadata.version("langflow-base") - is_pre_release |= is_prerelease( - langflow_base_version - ) # Update pre-release status + is_pre_release |= is_prerelease(langflow_base_version) # Update pre-release status notice = build_version_notice(langflow_base_version, "langflow-base") notice = stylize_text(notice, "langflow-base", is_pre_release) if notice: @@ -417,9 +395,7 @@ def print_banner(host: str, port: int): notices.append(f"Run '{pip_command}' to update.") styled_notices = [f"[bold]{notice}[/bold]" for notice in notices if notice] - styled_package_name = stylize_text( - package_name, package_name, any("pre-release" in notice for notice in notices) - ) + styled_package_name = stylize_text(package_name, package_name, any("pre-release" in notice for notice in notices)) title = f"[bold]Welcome to :chains: {styled_package_name}[/bold]\n" info_text = "Collaborate, and contribute at our [bold][link=https://github.com/langflow-ai/langflow]GitHub Repo[/link][/bold] :rocket:" @@ -462,12 +438,8 @@ def run_langflow(host, port, log_level, options, app): @app.command() def superuser( username: str = typer.Option(..., prompt=True, help="Username for the superuser."), - password: str = typer.Option( - ..., prompt=True, hide_input=True, help="Password for the superuser." - ), - log_level: str = typer.Option( - "error", help="Logging level.", envvar="LANGFLOW_LOG_LEVEL" - ), + password: str = typer.Option(..., prompt=True, hide_input=True, help="Password for the superuser."), + log_level: str = typer.Option("error", help="Logging level.", envvar="LANGFLOW_LOG_LEVEL"), ): """ Create a superuser. @@ -494,11 +466,23 @@ def superuser( @app.command() -def migration(test: bool = typer.Option(True, help="Run migrations in test mode.")): +def migration( + test: bool = typer.Option(True, help="Run migrations in test mode."), + fix: bool = typer.Option( + False, + help="Fix migrations. This is a destructive operation, and should only be used if you know what you are doing.", + ), +): """ Run or test migrations. """ - initialize_services() + if fix: + if not typer.confirm( + "This will delete all data necessary to fix migrations. Are you sure you want to continue?" + ): + raise typer.Abort() + + initialize_services(fix_migration=fix) db_service = get_db_service() if not test: db_service.run_migrations() diff --git a/src/backend/base/langflow/base/tools/__init__.py b/src/backend/base/langflow/base/tools/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/base/langflow/base/tools/base.py b/src/backend/base/langflow/base/tools/base.py new file mode 100644 index 000000000..e1c4d5fdc --- /dev/null +++ b/src/backend/base/langflow/base/tools/base.py @@ -0,0 +1,23 @@ +from langflow.field_typing import Tool + + +def build_status_from_tool(tool: Tool): + """ + Builds a status string representation of a tool. + + Args: + tool (Tool): The tool object to build the status for. + + Returns: + str: The status string representation of the tool, including its name, description, and arguments (if any). + """ + description_repr = repr(tool.description).strip("'") + args_str = "\n".join( + [ + f"- {arg_name}: {arg_data['description']}" + for arg_name, arg_data in tool.args.items() + if "description" in arg_data + ] + ) + status = f"Name: {tool.name}\nDescription: {description_repr}" + return status + (f"\nArguments:\n{args_str}" if args_str else "") diff --git a/src/backend/base/langflow/components/agents/XMLAgent.py b/src/backend/base/langflow/components/agents/XMLAgent.py index 687bfff6f..117399af9 100644 --- a/src/backend/base/langflow/components/agents/XMLAgent.py +++ b/src/backend/base/langflow/components/agents/XMLAgent.py @@ -4,7 +4,7 @@ from langchain.agents import create_xml_agent from langchain_core.prompts import PromptTemplate from langflow.base.agents.agent import LCAgentComponent -from langflow.field_typing import BaseLLM, BaseMemory, Text, Tool +from langflow.field_typing import BaseLanguageModel, BaseMemory, Text, Tool class XMLAgentComponent(LCAgentComponent): @@ -66,7 +66,7 @@ class XMLAgentComponent(LCAgentComponent): async def build( self, input_value: str, - llm: BaseLLM, + llm: BaseLanguageModel, tools: List[Tool], prompt: str, memory: Optional[BaseMemory] = None, diff --git a/src/backend/base/langflow/components/model_specs/AmazonBedrockSpecs.py b/src/backend/base/langflow/components/model_specs/AmazonBedrockSpecs.py index 1a18f5c08..ff36820f5 100644 --- a/src/backend/base/langflow/components/model_specs/AmazonBedrockSpecs.py +++ b/src/backend/base/langflow/components/model_specs/AmazonBedrockSpecs.py @@ -1,6 +1,5 @@ from typing import Optional - -from langchain.llms.base import BaseLLM +from langflow.field_typing import BaseLanguageModel from langchain_community.llms.bedrock import Bedrock from langflow.interface.custom.custom_component import CustomComponent @@ -46,7 +45,7 @@ class AmazonBedrockComponent(CustomComponent): endpoint_url: Optional[str] = None, streaming: bool = False, cache: Optional[bool] = None, - ) -> BaseLLM: + ) -> BaseLanguageModel: try: output = Bedrock( credentials_profile_name=credentials_profile_name, diff --git a/src/backend/base/langflow/components/model_specs/AnthropicLLMSpecs.py b/src/backend/base/langflow/components/model_specs/AnthropicLLMSpecs.py index 2ea78162b..016eaeb2d 100644 --- a/src/backend/base/langflow/components/model_specs/AnthropicLLMSpecs.py +++ b/src/backend/base/langflow/components/model_specs/AnthropicLLMSpecs.py @@ -1,14 +1,14 @@ from typing import Optional from langchain.llms.base import BaseLanguageModel -from langchain_community.chat_models.anthropic import ChatAnthropic +from langchain_anthropic import ChatAnthropic from pydantic.v1 import SecretStr from langflow.interface.custom.custom_component import CustomComponent -class AnthropicLLM(CustomComponent): - display_name: str = "AnthropicLLM" +class ChatAntropicSpecsComponent(CustomComponent): + display_name: str = "Anthropic" description: str = "Anthropic Chat&Completion large language models." icon = "Anthropic" diff --git a/src/backend/base/langflow/components/model_specs/AnthropicSpecs.py b/src/backend/base/langflow/components/model_specs/AnthropicSpecs.py deleted file mode 100644 index 23c284888..000000000 --- a/src/backend/base/langflow/components/model_specs/AnthropicSpecs.py +++ /dev/null @@ -1,49 +0,0 @@ -from typing import Optional - -from langchain_community.llms.anthropic import Anthropic -from pydantic.v1 import SecretStr - -from langflow.field_typing import BaseLanguageModel, NestedDict -from langflow.interface.custom.custom_component import CustomComponent - - -class AnthropicComponent(CustomComponent): - display_name = "Anthropic" - description = "Anthropic large language models." - icon = "Anthropic" - - def build_config(self): - return { - "anthropic_api_key": { - "display_name": "Anthropic API Key", - "type": str, - "password": True, - }, - "anthropic_api_url": { - "display_name": "Anthropic API URL", - "type": str, - }, - "model_kwargs": { - "display_name": "Model Kwargs", - "field_type": "NestedDict", - "advanced": True, - }, - "temperature": { - "display_name": "Temperature", - "field_type": "float", - }, - } - - def build( - self, - anthropic_api_key: str, - anthropic_api_url: str, - model_kwargs: NestedDict = {}, - temperature: Optional[float] = None, - ) -> BaseLanguageModel: - return Anthropic( - anthropic_api_key=SecretStr(anthropic_api_key), - anthropic_api_url=anthropic_api_url, - model_kwargs=model_kwargs, - temperature=temperature, - ) diff --git a/src/backend/base/langflow/components/model_specs/BaiduQianfanChatEndpointsSpecs.py b/src/backend/base/langflow/components/model_specs/BaiduQianfanChatEndpointsSpecs.py index 9c39acdf6..a60fb9a64 100644 --- a/src/backend/base/langflow/components/model_specs/BaiduQianfanChatEndpointsSpecs.py +++ b/src/backend/base/langflow/components/model_specs/BaiduQianfanChatEndpointsSpecs.py @@ -1,9 +1,9 @@ from typing import Optional -from langchain.llms.base import BaseLLM from langchain_community.chat_models.baidu_qianfan_endpoint import QianfanChatEndpoint from pydantic.v1 import SecretStr +from langflow.field_typing import BaseLanguageModel from langflow.interface.custom.custom_component import CustomComponent @@ -79,7 +79,7 @@ class QianfanChatEndpointComponent(CustomComponent): temperature: Optional[float] = None, penalty_score: Optional[float] = None, endpoint: Optional[str] = None, - ) -> BaseLLM: + ) -> BaseLanguageModel: try: output = QianfanChatEndpoint( # type: ignore model=model, diff --git a/src/backend/base/langflow/components/model_specs/BaiduQianfanLLMEndpointsSpecs.py b/src/backend/base/langflow/components/model_specs/BaiduQianfanLLMEndpointsSpecs.py index acbf8ba28..21ed13e70 100644 --- a/src/backend/base/langflow/components/model_specs/BaiduQianfanLLMEndpointsSpecs.py +++ b/src/backend/base/langflow/components/model_specs/BaiduQianfanLLMEndpointsSpecs.py @@ -1,9 +1,9 @@ from typing import Optional from langchain.llms.baidu_qianfan_endpoint import QianfanLLMEndpoint -from langchain.llms.base import BaseLLM from langflow.interface.custom.custom_component import CustomComponent +from langflow.field_typing import BaseLanguageModel class QianfanLLMEndpointComponent(CustomComponent): @@ -78,7 +78,7 @@ class QianfanLLMEndpointComponent(CustomComponent): temperature: Optional[float] = None, penalty_score: Optional[float] = None, endpoint: Optional[str] = None, - ) -> BaseLLM: + ) -> BaseLanguageModel: try: output = QianfanLLMEndpoint( # type: ignore model=model, diff --git a/src/backend/base/langflow/components/model_specs/ChatAnthropicSpecs.py b/src/backend/base/langflow/components/model_specs/ChatAnthropicSpecs.py index a4a37b283..b03a9b737 100644 --- a/src/backend/base/langflow/components/model_specs/ChatAnthropicSpecs.py +++ b/src/backend/base/langflow/components/model_specs/ChatAnthropicSpecs.py @@ -1,67 +1,89 @@ -from typing import Callable, Optional, Union +from typing import Optional -from langchain_community.chat_models.anthropic import ChatAnthropic +from langchain_anthropic import ChatAnthropic from pydantic.v1.types import SecretStr + from langflow.custom import CustomComponent from langflow.field_typing import BaseLanguageModel -class ChatAnthropicComponent(CustomComponent): - display_name = "ChatAnthropic" - description = "`Anthropic` chat large language models." - documentation = "https://python.langchain.com/docs/modules/model_io/models/chat/integrations/anthropic" +class AnthropicLLM(CustomComponent): + display_name: str = "Anthropic" + description: str = "Generate text using Anthropic Chat&Completion LLMs." icon = "Anthropic" + field_order = [ + "model", + "anthropic_api_key", + "max_tokens", + "temperature", + "anthropic_api_url", + ] + def build_config(self): return { + "model": { + "display_name": "Model Name", + "options": [ + "claude-3-opus-20240229", + "claude-3-sonnet-20240229", + "claude-3-haiku-20240307", + "claude-2.1", + "claude-2.0", + "claude-instant-1.2", + "claude-instant-1", + ], + "info": "Name of the model to use.", + "required": True, + "value": "claude-3-opus-20240229", + }, "anthropic_api_key": { "display_name": "Anthropic API Key", - "field_type": "str", + "required": True, "password": True, - }, - "model_kwargs": { - "display_name": "Model Kwargs", - "field_type": "dict", - "advanced": True, - }, - "model_name": { - "display_name": "Model Name", - "field_type": "str", - "advanced": False, - "required": False, - "options": ["claude-3-opus-20240229", "claude-3-sonnet-20240229", "claude-3-haiku-20240307"], - }, - "temperature": { - "display_name": "Temperature", - "field_type": "float", + "info": "Your Anthropic API key.", }, "max_tokens": { "display_name": "Max Tokens", "field_type": "int", - "advanced": False, - "required": False, + "advanced": True, + "value": 256, }, - "top_k": {"display_name": "Top K", "field_type": "int", "advanced": True}, - "top_p": {"display_name": "Top P", "field_type": "float", "advanced": True}, + "temperature": { + "display_name": "Temperature", + "field_type": "float", + "value": 0.1, + }, + "anthropic_api_url": { + "display_name": "Anthropic API URL", + "advanced": True, + "info": "Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.", + }, + "code": {"show": False}, } def build( self, - anthropic_api_key: str, - model_kwargs: dict = {}, - model_name: str = "claude-3-opus-20240229", + model: str, + anthropic_api_key: Optional[str] = None, + max_tokens: Optional[int] = None, temperature: Optional[float] = None, - max_tokens: Optional[int] = 1024, - top_k: Optional[int] = None, - top_p: Optional[float] = None, - ) -> Union[BaseLanguageModel, Callable]: - return ChatAnthropic( - anthropic_api_key=SecretStr(anthropic_api_key), - model_kwargs=model_kwargs, - model_name=model_name, - temperature=temperature, - max_tokens=max_tokens, # type: ignore - top_k=top_k, - top_p=top_p, - ) + anthropic_api_url: Optional[str] = None, + ) -> BaseLanguageModel: + # Set default API endpoint if not provided + if not anthropic_api_url: + anthropic_api_url = "https://api.anthropic.com" + + try: + output = ChatAnthropic( + model_name=model, + anthropic_api_key=(SecretStr(anthropic_api_key) if anthropic_api_key else None), + max_tokens_to_sample=max_tokens, # type: ignore + temperature=temperature, + anthropic_api_url=anthropic_api_url, + ) + except Exception as e: + raise ValueError("Could not connect to Anthropic API.") from e + + return output diff --git a/src/backend/base/langflow/components/model_specs/ChatLiteLLMSpecs.py b/src/backend/base/langflow/components/model_specs/ChatLiteLLMSpecs.py index 439266fbb..840682f4d 100644 --- a/src/backend/base/langflow/components/model_specs/ChatLiteLLMSpecs.py +++ b/src/backend/base/langflow/components/model_specs/ChatLiteLLMSpecs.py @@ -1,4 +1,4 @@ -from typing import Any, Callable, Dict, Optional, Union +from typing import Any, Dict, Optional from langchain_community.chat_models.litellm import ChatLiteLLM, ChatLiteLLMException from langflow.field_typing import BaseLanguageModel diff --git a/src/backend/base/langflow/components/model_specs/ChatOpenAISpecs.py b/src/backend/base/langflow/components/model_specs/ChatOpenAISpecs.py index fcaf80965..704090620 100644 --- a/src/backend/base/langflow/components/model_specs/ChatOpenAISpecs.py +++ b/src/backend/base/langflow/components/model_specs/ChatOpenAISpecs.py @@ -1,9 +1,9 @@ -from typing import Optional, Union +from typing import Optional -from langchain.llms import BaseLLM +from langflow.field_typing import BaseLanguageModel from langchain_community.chat_models.openai import ChatOpenAI -from langflow.field_typing import BaseLanguageModel, NestedDict +from langflow.field_typing import NestedDict from langflow.interface.custom.custom_component import CustomComponent @@ -68,7 +68,7 @@ class ChatOpenAIComponent(CustomComponent): openai_api_base: Optional[str] = None, openai_api_key: Optional[str] = None, temperature: float = 0.7, - ) -> Union[BaseLanguageModel, BaseLLM]: + ) -> BaseLanguageModel: if not openai_api_base: openai_api_base = "https://api.openai.com/v1" return ChatOpenAI( diff --git a/src/backend/base/langflow/components/model_specs/ChatVertexAISpecs.py b/src/backend/base/langflow/components/model_specs/ChatVertexAISpecs.py index fd8b5b427..f2c377546 100644 --- a/src/backend/base/langflow/components/model_specs/ChatVertexAISpecs.py +++ b/src/backend/base/langflow/components/model_specs/ChatVertexAISpecs.py @@ -1,6 +1,5 @@ -from typing import List, Optional, Union +from typing import List, Optional -from langchain.llms import BaseLLM from langchain_community.chat_models.vertexai import ChatVertexAI from langchain_core.messages.base import BaseMessage @@ -74,7 +73,7 @@ class ChatVertexAIComponent(CustomComponent): top_k: int = 40, top_p: float = 0.95, verbose: bool = False, - ) -> Union[BaseLanguageModel, BaseLLM]: + ) -> BaseLanguageModel: return ChatVertexAI( credentials=credentials, examples=examples, diff --git a/src/backend/base/langflow/components/model_specs/HuggingFaceEndpointsSpecs.py b/src/backend/base/langflow/components/model_specs/HuggingFaceEndpointsSpecs.py index c3e74b1bd..c16fb2c2c 100644 --- a/src/backend/base/langflow/components/model_specs/HuggingFaceEndpointsSpecs.py +++ b/src/backend/base/langflow/components/model_specs/HuggingFaceEndpointsSpecs.py @@ -1,6 +1,6 @@ from typing import Optional -from langchain.llms.base import BaseLLM +from langflow.field_typing import BaseLanguageModel from langchain.llms.huggingface_endpoint import HuggingFaceEndpoint from langflow.interface.custom.custom_component import CustomComponent @@ -32,7 +32,7 @@ class HuggingFaceEndpointsComponent(CustomComponent): task: str = "text2text-generation", huggingfacehub_api_token: Optional[str] = None, model_kwargs: Optional[dict] = None, - ) -> BaseLLM: + ) -> BaseLanguageModel: try: output = HuggingFaceEndpoint( # type: ignore endpoint_url=endpoint_url, diff --git a/src/backend/base/langflow/components/model_specs/OllamaLLMSpecs.py b/src/backend/base/langflow/components/model_specs/OllamaLLMSpecs.py index 8d94467db..4ba5502d3 100644 --- a/src/backend/base/langflow/components/model_specs/OllamaLLMSpecs.py +++ b/src/backend/base/langflow/components/model_specs/OllamaLLMSpecs.py @@ -1,6 +1,6 @@ from typing import List, Optional -from langchain.llms.base import BaseLLM +from langflow.field_typing import BaseLanguageModel from langchain_community.llms.ollama import Ollama from langflow.interface.custom.custom_component import CustomComponent @@ -118,7 +118,7 @@ class OllamaLLM(CustomComponent): tfs_z: Optional[float] = None, top_k: Optional[int] = None, top_p: Optional[int] = None, - ) -> BaseLLM: + ) -> BaseLanguageModel: if not base_url: base_url = "http://localhost:11434" diff --git a/src/backend/base/langflow/components/model_specs/VertexAISpecs.py b/src/backend/base/langflow/components/model_specs/VertexAISpecs.py index f186f2ed0..c9664408d 100644 --- a/src/backend/base/langflow/components/model_specs/VertexAISpecs.py +++ b/src/backend/base/langflow/components/model_specs/VertexAISpecs.py @@ -1,6 +1,6 @@ -from typing import Callable, Dict, Optional, Union +from typing import Dict, Optional -from langchain.llms import BaseLLM +from langflow.field_typing import BaseLanguageModel from langchain_community.llms.vertexai import VertexAI from langflow.interface.custom.custom_component import CustomComponent @@ -129,7 +129,7 @@ class VertexAIComponent(CustomComponent): top_p: float = 0.95, tuned_model_name: Optional[str] = None, verbose: bool = False, - ) -> Union[BaseLLM, Callable]: + ) -> BaseLanguageModel: return VertexAI( credentials=credentials, location=location, diff --git a/src/backend/base/langflow/components/model_specs/__init__.py b/src/backend/base/langflow/components/model_specs/__init__.py index 7bf6c2881..3bd847894 100644 --- a/src/backend/base/langflow/components/model_specs/__init__.py +++ b/src/backend/base/langflow/components/model_specs/__init__.py @@ -1,6 +1,6 @@ from .AmazonBedrockSpecs import AmazonBedrockComponent -from .AnthropicLLMSpecs import AnthropicLLM -from .AnthropicSpecs import AnthropicComponent +from .AnthropicLLMSpecs import ChatAntropicSpecsComponent + from .AzureChatOpenAISpecs import AzureChatOpenAISpecsComponent from .BaiduQianfanChatEndpointsSpecs import QianfanChatEndpointComponent from .BaiduQianfanLLMEndpointsSpecs import QianfanLLMEndpointComponent @@ -17,8 +17,7 @@ from .VertexAISpecs import VertexAIComponent __all__ = [ "AmazonBedrockComponent", - "AnthropicLLM", - "AnthropicComponent", + "ChatAntropicSpecsComponent", "AzureChatOpenAISpecsComponent", "QianfanChatEndpointComponent", "QianfanLLMEndpointComponent", diff --git a/src/backend/base/langflow/components/retrievers/MultiQueryRetriever.py b/src/backend/base/langflow/components/retrievers/MultiQueryRetriever.py index 0dec1c028..8dd6d8579 100644 --- a/src/backend/base/langflow/components/retrievers/MultiQueryRetriever.py +++ b/src/backend/base/langflow/components/retrievers/MultiQueryRetriever.py @@ -1,8 +1,8 @@ -from typing import Callable, Optional, Union +from typing import Optional from langchain.retrievers import MultiQueryRetriever -from langflow.field_typing import BaseLLM, BaseRetriever, PromptTemplate +from langflow.field_typing import BaseRetriever, PromptTemplate, BaseLanguageModel from langflow.interface.custom.custom_component import CustomComponent @@ -39,11 +39,11 @@ class MultiQueryRetrieverComponent(CustomComponent): def build( self, - llm: BaseLLM, + llm: BaseLanguageModel, retriever: BaseRetriever, prompt: Optional[PromptTemplate] = None, parser_key: str = "lines", - ) -> Union[Callable, MultiQueryRetriever]: + ) -> MultiQueryRetriever: if not prompt: return MultiQueryRetriever.from_llm(llm=llm, retriever=retriever, parser_key=parser_key) else: diff --git a/src/backend/base/langflow/components/toolkits/VectorStoreInfo.py b/src/backend/base/langflow/components/toolkits/VectorStoreInfo.py index 1775d816a..626a14fd8 100644 --- a/src/backend/base/langflow/components/toolkits/VectorStoreInfo.py +++ b/src/backend/base/langflow/components/toolkits/VectorStoreInfo.py @@ -1,5 +1,3 @@ -from typing import Callable, Union - from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreInfo from langchain_community.vectorstores import VectorStore @@ -22,5 +20,5 @@ class VectorStoreInfoComponent(CustomComponent): vectorstore: VectorStore, description: str, name: str, - ) -> Union[VectorStoreInfo, Callable]: + ) -> VectorStoreInfo: return VectorStoreInfo(vectorstore=vectorstore, description=description, name=name) diff --git a/src/backend/base/langflow/components/tools/PythonREPLTool.py b/src/backend/base/langflow/components/tools/PythonREPLTool.py new file mode 100644 index 000000000..fa5cd5fc3 --- /dev/null +++ b/src/backend/base/langflow/components/tools/PythonREPLTool.py @@ -0,0 +1,68 @@ +import importlib + +from langchain.agents import Tool +from langchain_experimental.utilities import PythonREPL + +from langflow.base.tools.base import build_status_from_tool +from langflow.custom import CustomComponent + + +class PythonREPLToolComponent(CustomComponent): + display_name = "Python REPL Tool" + description = "A tool for running Python code in a REPL environment." + + def build_config(self): + return { + "name": {"display_name": "Name", "info": "The name of the tool."}, + "description": {"display_name": "Description", "info": "A description of the tool."}, + "global_imports": { + "display_name": "Global Imports", + "info": "A list of modules to import globally, e.g. ['math', 'numpy'].", + }, + } + + def get_globals(self, globals: list[str]) -> dict: + """ + Retrieves the global variables from the specified modules. + + Args: + globals (list[str]): A list of module names. + + Returns: + dict: A dictionary containing the global variables from the specified modules. + """ + global_dict = {} + for module in globals: + try: + module = importlib.import_module(module) + global_dict[module.__name__] = module + except ImportError: + print(f"Could not import module {module}") + return global_dict + + def build( + self, + name: str = "python_repl", + description: str = "A Python shell. Use this to execute python commands. Input should be a valid python command. If you want to see the output of a value, you should print it out with `print(...)`.", + global_imports: list[str] = ["math"], + ) -> Tool: + """ + Builds a Python REPL tool. + + Args: + name (str, optional): The name of the tool. Defaults to "python_repl". + description (str, optional): The description of the tool. Defaults to "A Python shell. Use this to execute python commands. Input should be a valid python command. If you want to see the output of a value, you should print it out with `print(...)`. ". + global_imports (list[str], optional): A list of global imports to be available in the Python REPL. Defaults to ["math"]. + + Returns: + Tool: The built Python REPL tool. + """ + _globals = self.get_globals(global_imports) + python_repl = PythonREPL(_globals=_globals) + tool = Tool( + name=name, + description=description, + func=python_repl.run, + ) + self.status = build_status_from_tool(tool) + return tool diff --git a/src/backend/base/langflow/components/tools/__init__.py b/src/backend/base/langflow/components/tools/__init__.py index 27072c109..3d64a723c 100644 --- a/src/backend/base/langflow/components/tools/__init__.py +++ b/src/backend/base/langflow/components/tools/__init__.py @@ -1,5 +1,7 @@ +from .PythonREPLTool import PythonREPLToolComponent from .RetrieverTool import RetrieverToolComponent -from .SearchAPITool import SearchApiToolComponent from .SearchApi import SearchApi +from .SearchAPITool import SearchApiToolComponent -__all__ = ["RetrieverToolComponent", "SearchApiToolComponent", "SearchApi"] + +__all__ = ["RetrieverToolComponent", "SearchApiToolComponent", "SearchApi", "PythonREPLToolComponent"] diff --git a/src/backend/base/langflow/server.py b/src/backend/base/langflow/server.py index 6a1509dc0..05e19ecb3 100644 --- a/src/backend/base/langflow/server.py +++ b/src/backend/base/langflow/server.py @@ -35,11 +35,7 @@ class LangflowApplication(BaseApplication): super().__init__() def load_config(self): - config = { - key: value - for key, value in self.options.items() - if key in self.cfg.settings and value is not None - } + config = {key: value for key, value in self.options.items() if key in self.cfg.settings and value is not None} for key, value in config.items(): self.cfg.set(key.lower(), value) diff --git a/src/backend/base/langflow/services/database/service.py b/src/backend/base/langflow/services/database/service.py index 2b1112fc8..ac14f4d4d 100644 --- a/src/backend/base/langflow/services/database/service.py +++ b/src/backend/base/langflow/services/database/service.py @@ -1,6 +1,5 @@ from datetime import datetime import time -from datetime import datetime from pathlib import Path from typing import TYPE_CHECKING @@ -37,10 +36,7 @@ class DatabaseService(Service): def _create_engine(self) -> "Engine": """Create the engine for the database.""" settings_service = get_settings_service() - if ( - settings_service.settings.DATABASE_URL - and settings_service.settings.DATABASE_URL.startswith("sqlite") - ): + if settings_service.settings.DATABASE_URL and settings_service.settings.DATABASE_URL.startswith("sqlite"): connect_args = {"check_same_thread": False} else: connect_args = {} @@ -52,9 +48,7 @@ class DatabaseService(Service): def __exit__(self, exc_type, exc_value, traceback): if exc_type is not None: # If an exception has been raised - logger.error( - f"Session rollback because of exception: {exc_type.__name__} {exc_value}" - ) + logger.error(f"Session rollback because of exception: {exc_type.__name__} {exc_value}") self._session.rollback() else: self._session.commit() @@ -71,9 +65,7 @@ class DatabaseService(Service): settings_service = get_settings_service() if settings_service.auth_settings.AUTO_LOGIN: with Session(self.engine) as session: - flows = session.exec( - select(models.Flow).where(models.Flow.user_id is None) - ).all() + flows = session.exec(select(models.Flow).where(models.Flow.user_id is None)).all() if flows: logger.debug("Migrating flows to default superuser") username = settings_service.auth_settings.SUPERUSER @@ -103,9 +95,7 @@ class DatabaseService(Service): expected_columns = list(model.model_fields.keys()) try: - available_columns = [ - col["name"] for col in inspector.get_columns(table) - ] + available_columns = [col["name"] for col in inspector.get_columns(table)] except sa.exc.NoSuchTableError: logger.debug(f"Missing table: {table}") return False @@ -169,9 +159,7 @@ class DatabaseService(Service): buffer.write(f"{datetime.now().isoformat()}: Checking migrations\n") command.check(alembic_cfg) except Exception as exc: - if isinstance( - exc, (util.exc.CommandError, util.exc.AutogenerateDiffsDetected) - ): + if isinstance(exc, (util.exc.CommandError, util.exc.AutogenerateDiffsDetected)): command.upgrade(alembic_cfg, "head") time.sleep(3) @@ -208,10 +196,7 @@ class DatabaseService(Service): # We will check that all models are in the database # and that the database is up to date with all columns sql_models = [models.Flow, models.User, models.ApiKey] - return [ - TableResults(sql_model.__tablename__, self.check_table(sql_model)) - for sql_model in sql_models - ] + return [TableResults(sql_model.__tablename__, self.check_table(sql_model)) for sql_model in sql_models] def check_table(self, model): results = [] @@ -220,9 +205,7 @@ class DatabaseService(Service): expected_columns = list(model.__fields__.keys()) available_columns = [] try: - available_columns = [ - col["name"] for col in inspector.get_columns(table_name) - ] + available_columns = [col["name"] for col in inspector.get_columns(table_name)] results.append(Result(name=table_name, type="table", success=True)) except sa.exc.NoSuchTableError: logger.error(f"Missing table: {table_name}") @@ -253,9 +236,7 @@ class DatabaseService(Service): try: table.create(self.engine, checkfirst=True) except OperationalError as oe: - logger.warning( - f"Table {table} already exists, skipping. Exception: {oe}" - ) + logger.warning(f"Table {table} already exists, skipping. Exception: {oe}") except Exception as exc: logger.error(f"Error creating table {table}: {exc}") raise RuntimeError(f"Error creating table {table}") from exc @@ -267,9 +248,7 @@ class DatabaseService(Service): if table not in table_names: logger.error("Something went wrong creating the database and tables.") logger.error("Please check your database settings.") - raise RuntimeError( - "Something went wrong creating the database and tables." - ) + raise RuntimeError("Something went wrong creating the database and tables.") logger.debug("Database and tables created successfully") diff --git a/src/backend/base/langflow/template/frontend_node/llms.py b/src/backend/base/langflow/template/frontend_node/llms.py index e33c4a60a..7bf5a8cb6 100644 --- a/src/backend/base/langflow/template/frontend_node/llms.py +++ b/src/backend/base/langflow/template/frontend_node/llms.py @@ -63,8 +63,8 @@ class LLMFrontendNode(FrontendNode): field.info = OPENAI_API_BASE_INFO def add_extra_base_classes(self) -> None: - if "BaseLLM" not in self.base_classes: - self.base_classes.append("BaseLLM") + if "BaseLanguageModel" not in self.base_classes: + self.base_classes.append("BaseLanguageModel") @staticmethod def format_azure_field(field: TemplateField): diff --git a/src/backend/base/langflow/utils/logger.py b/src/backend/base/langflow/utils/logger.py index a656a8462..f63120443 100644 --- a/src/backend/base/langflow/utils/logger.py +++ b/src/backend/base/langflow/utils/logger.py @@ -26,10 +26,7 @@ def patching(record): def configure(log_level: Optional[str] = None, log_file: Optional[Path] = None): - if ( - os.getenv("LANGFLOW_LOG_LEVEL", "").upper() in VALID_LOG_LEVELS - and log_level is None - ): + if os.getenv("LANGFLOW_LOG_LEVEL", "").upper() in VALID_LOG_LEVELS and log_level is None: log_level = os.getenv("LANGFLOW_LOG_LEVEL") if log_level is None: log_level = "ERROR" @@ -77,11 +74,7 @@ def configure(log_level: Optional[str] = None, log_file: Optional[Path] = None): def setup_uvicorn_logger(): - loggers = ( - logging.getLogger(name) - for name in logging.root.manager.loggerDict - if name.startswith("uvicorn.") - ) + loggers = (logging.getLogger(name) for name in logging.root.manager.loggerDict if name.startswith("uvicorn.")) for uvicorn_logger in loggers: uvicorn_logger.handlers = [] logging.getLogger("uvicorn").handlers = [InterceptHandler()] @@ -111,6 +104,4 @@ class InterceptHandler(logging.Handler): frame = frame.f_back depth += 1 - logger.opt(depth=depth, exception=record.exc_info).log( - level, record.getMessage() - ) + logger.opt(depth=depth, exception=record.exc_info).log(level, record.getMessage()) diff --git a/src/frontend/tests/custom_component_full.ts b/src/frontend/tests/custom_component_full.ts index d5a890a1b..6391b2094 100644 --- a/src/frontend/tests/custom_component_full.ts +++ b/src/frontend/tests/custom_component_full.ts @@ -1,6 +1,6 @@ export const custom = `from langflow.custom import CustomComponent -from langchain.llms.base import BaseLLM +from langflow.field_typing import BaseLanguageModel from langchain.chains import LLMChain from langchain.prompts import PromptTemplate from langchain_core.documents import Document @@ -15,6 +15,6 @@ class YourComponent(CustomComponent): def build_config(self): return { "file": { "file_type": ["json"], } } - def build(self, url: str,file:str,integer:int,nested:NestedDict,flt:float,boolean:bool,lisst:list[str],dictionary:dict, llm: BaseLLM, prompt: PromptTemplate) -> Document: + def build(self, url: str,file:str,integer:int,nested:NestedDict,flt:float,boolean:bool,lisst:list[str],dictionary:dict, llm: BaseLanguageModel, prompt: PromptTemplate) -> Document: return "test"`; diff --git a/src/frontend/tests/end-to-end/dropdownComponent.spec.ts b/src/frontend/tests/end-to-end/dropdownComponent.spec.ts index a31ceae44..0623e7460 100644 --- a/src/frontend/tests/end-to-end/dropdownComponent.spec.ts +++ b/src/frontend/tests/end-to-end/dropdownComponent.spec.ts @@ -177,7 +177,7 @@ test("dropDownComponent", async ({ page }) => { .click(); await page.locator("textarea").press("Control+a"); const emptyOptionsCode = `from typing import Optional -from langchain.llms.base import BaseLLM +from langflow.field_typing import BaseLanguageModel from langchain_community.llms.bedrock import Bedrock from langflow.interface.custom.custom_component import CustomComponent @@ -212,7 +212,7 @@ class AmazonBedrockComponent(CustomComponent): endpoint_url: Optional[str] = None, streaming: bool = False, cache: Optional[bool] = None, - ) -> BaseLLM: + ) -> BaseLanguageModel: try: output = Bedrock( credentials_profile_name=credentials_profile_name, diff --git a/tests/conftest.py b/tests/conftest.py index 9b6d7c0d6..cfec4f7ef 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -22,9 +22,6 @@ from langflow.services.database.models.flow.model import Flow, FlowCreate from langflow.services.database.models.user.model import User, UserCreate from langflow.services.database.utils import session_getter from langflow.services.deps import get_db_service -from sqlmodel import Session, SQLModel, create_engine, select -from sqlmodel.pool import StaticPool -from typer.testing import CliRunner if TYPE_CHECKING: from langflow.services.database.service import DatabaseService diff --git a/tests/test_custom_component.py b/tests/test_custom_component.py index 725d35564..07796c540 100644 --- a/tests/test_custom_component.py +++ b/tests/test_custom_component.py @@ -6,21 +6,15 @@ import pytest from langchain_core.documents import Document from langflow.interface.custom.base import CustomComponent -from langflow.interface.custom.code_parser.code_parser import ( - CodeParser, - CodeSyntaxError, -) -from langflow.interface.custom.custom_component.component import ( - Component, - ComponentCodeNullError, -) +from langflow.interface.custom.code_parser.code_parser import CodeParser, CodeSyntaxError +from langflow.interface.custom.custom_component.component import Component, ComponentCodeNullError from langflow.services.database.models.flow import Flow, FlowCreate code_default = """ from langflow.field_typing import Prompt from langflow.interface.custom.custom_component import CustomComponent -from langchain.llms.base import BaseLLM +from langflow.field_typing import BaseLanguageModel from langchain.chains import LLMChain from langchain.prompts import PromptTemplate from langchain_core.documents import Document @@ -32,7 +26,7 @@ class YourComponent(CustomComponent): description: str = "Your description" field_config = { "url": { "multiline": True, "required": True } } - def build(self, url: str, llm: BaseLLM, template: Prompt) -> Document: + def build(self, url: str, llm: BaseLanguageModel, template: Prompt) -> Document: response = requests.get(url) prompt = PromptTemplate.from_template(template) chain = LLMChain(llm=llm, prompt=prompt)