Add PythonREPLToolComponent to tools/__init__.py and create PythonREPLTool.py (#1639)

* re-add --fix

* Add PythonREPLToolComponent to tools/__init__.py and create PythonREPLTool.py

* Refactor PythonREPLToolComponent to use build_status_from_tool in PythonREPLTool.py

* Refactor model_specs imports in ChatLiteLLMSpecs.py

* Refactor imports in various files

* Refactor model_specs imports and class names in AnthropicLLMSpecs.py and AnthropicSpecs.py
This commit is contained in:
Gabriel Luiz Freitas Almeida 2024-04-08 16:51:03 -03:00 committed by GitHub
commit 83c915916d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
31 changed files with 245 additions and 243 deletions

View file

@ -37,7 +37,7 @@ The CustomComponent class serves as the foundation for creating custom component
| _`langflow.field_typing.Prompt`_ |
| _`langchain.chains.base.Chain`_ |
| _`langchain.PromptTemplate`_ |
| _`langchain.llms.base.BaseLLM`_ |
| _`from langchain.schema.language_model import BaseLanguageModel`_ |
| _`langchain.Tool`_ |
| _`langchain.document_loaders.base.BaseLoader`_ |
| _`langchain.schema.Document`_ |

View file

@ -131,7 +131,7 @@ class MyComponent(CustomComponent):
---
The [Return Type Annotation](https://docs.python.org/3/library/typing.html) of the _`build`_ method defines the component type (e.g., Chain, BaseLLM, or basic Python types). Check out all supported types in the [component reference](../components/custom).
The [Return Type Annotation](https://docs.python.org/3/library/typing.html) of the _`build`_ method defines the component type (e.g., Chain, BaseLanguageModel, or basic Python types). Check out all supported types in the [component reference](../components/custom).
```python
from langflow.custom import CustomComponent

View file

@ -21,8 +21,6 @@ from langflow.main import setup_app
from langflow.services.database.utils import session_getter
from langflow.services.deps import get_db_service, get_settings_service
from langflow.services.utils import initialize_services, initialize_settings_service
from langflow.utils.logger import configure, logger
initialize_settings_service)
from langflow.utils.logger import configure, logger
console = Console()
@ -102,12 +100,8 @@ def update_settings(
@app.command()
def run(
host: str = typer.Option(
"127.0.0.1", help="Host to bind the server to.", envvar="LANGFLOW_HOST"
),
workers: int = typer.Option(
1, help="Number of worker processes.", envvar="LANGFLOW_WORKERS"
),
host: str = typer.Option("127.0.0.1", help="Host to bind the server to.", envvar="LANGFLOW_HOST"),
workers: int = typer.Option(1, help="Number of worker processes.", envvar="LANGFLOW_WORKERS"),
timeout: int = typer.Option(300, help="Worker timeout in seconds."),
port: int = typer.Option(7860, help="Port to listen on.", envvar="LANGFLOW_PORT"),
components_path: Optional[Path] = typer.Option(
@ -115,19 +109,11 @@ def run(
help="Path to the directory containing custom components.",
envvar="LANGFLOW_COMPONENTS_PATH",
),
config: str = typer.Option(
Path(__file__).parent / "config.yaml", help="Path to the configuration file."
),
config: str = typer.Option(Path(__file__).parent / "config.yaml", help="Path to the configuration file."),
# .env file param
env_file: Path = typer.Option(
None, help="Path to the .env file containing environment variables."
),
log_level: str = typer.Option(
"critical", help="Logging level.", envvar="LANGFLOW_LOG_LEVEL"
),
log_file: Path = typer.Option(
"logs/langflow.log", help="Path to the log file.", envvar="LANGFLOW_LOG_FILE"
),
env_file: Path = typer.Option(None, help="Path to the .env file containing environment variables."),
log_level: str = typer.Option("critical", help="Logging level.", envvar="LANGFLOW_LOG_LEVEL"),
log_file: Path = typer.Option("logs/langflow.log", help="Path to the log file.", envvar="LANGFLOW_LOG_FILE"),
cache: Optional[str] = typer.Option(
envvar="LANGFLOW_LANGCHAIN_CACHE",
help="Type of cache to use. (InMemoryCache, SQLiteCache)",
@ -221,9 +207,7 @@ def wait_for_server_ready(host, port):
def run_on_mac_or_linux(host, port, log_level, options, app):
webapp_process = Process(
target=run_langflow, args=(host, port, log_level, options, app)
)
webapp_process = Process(target=run_langflow, args=(host, port, log_level, options, app))
webapp_process.start()
wait_for_server_ready(host, port)
@ -319,9 +303,7 @@ def build_new_version_notice(current_version: str, package_name: str):
f"A new pre-release version of {package_name} is available: {latest_version}",
)
else:
latest_version = httpx.get(f"https://pypi.org/pypi/{package_name}/json").json()[
"info"
]["version"]
latest_version = httpx.get(f"https://pypi.org/pypi/{package_name}/json").json()["info"]["version"]
if not version_is_prerelease(latest_version):
return (
False,
@ -345,9 +327,7 @@ def fetch_latest_version(package_name: str, include_prerelease: bool) -> str:
def build_version_notice(current_version: str, package_name: str) -> str:
latest_version = fetch_latest_version(package_name, is_prerelease(current_version))
if latest_version and pkg_version.parse(current_version) < pkg_version.parse(
latest_version
):
if latest_version and pkg_version.parse(current_version) < pkg_version.parse(latest_version):
release_type = "pre-release" if is_prerelease(latest_version) else "version"
return f"A new {release_type} of {package_name} is available: {latest_version}"
return ""
@ -396,9 +376,7 @@ def print_banner(host: str, port: int):
from importlib import metadata
langflow_base_version = metadata.version("langflow-base")
is_pre_release |= is_prerelease(
langflow_base_version
) # Update pre-release status
is_pre_release |= is_prerelease(langflow_base_version) # Update pre-release status
notice = build_version_notice(langflow_base_version, "langflow-base")
notice = stylize_text(notice, "langflow-base", is_pre_release)
if notice:
@ -417,9 +395,7 @@ def print_banner(host: str, port: int):
notices.append(f"Run '{pip_command}' to update.")
styled_notices = [f"[bold]{notice}[/bold]" for notice in notices if notice]
styled_package_name = stylize_text(
package_name, package_name, any("pre-release" in notice for notice in notices)
)
styled_package_name = stylize_text(package_name, package_name, any("pre-release" in notice for notice in notices))
title = f"[bold]Welcome to :chains: {styled_package_name}[/bold]\n"
info_text = "Collaborate, and contribute at our [bold][link=https://github.com/langflow-ai/langflow]GitHub Repo[/link][/bold] :rocket:"
@ -462,12 +438,8 @@ def run_langflow(host, port, log_level, options, app):
@app.command()
def superuser(
username: str = typer.Option(..., prompt=True, help="Username for the superuser."),
password: str = typer.Option(
..., prompt=True, hide_input=True, help="Password for the superuser."
),
log_level: str = typer.Option(
"error", help="Logging level.", envvar="LANGFLOW_LOG_LEVEL"
),
password: str = typer.Option(..., prompt=True, hide_input=True, help="Password for the superuser."),
log_level: str = typer.Option("error", help="Logging level.", envvar="LANGFLOW_LOG_LEVEL"),
):
"""
Create a superuser.
@ -494,11 +466,23 @@ def superuser(
@app.command()
def migration(test: bool = typer.Option(True, help="Run migrations in test mode.")):
def migration(
test: bool = typer.Option(True, help="Run migrations in test mode."),
fix: bool = typer.Option(
False,
help="Fix migrations. This is a destructive operation, and should only be used if you know what you are doing.",
),
):
"""
Run or test migrations.
"""
initialize_services()
if fix:
if not typer.confirm(
"This will delete all data necessary to fix migrations. Are you sure you want to continue?"
):
raise typer.Abort()
initialize_services(fix_migration=fix)
db_service = get_db_service()
if not test:
db_service.run_migrations()

View file

@ -0,0 +1,23 @@
from langflow.field_typing import Tool
def build_status_from_tool(tool: Tool):
"""
Builds a status string representation of a tool.
Args:
tool (Tool): The tool object to build the status for.
Returns:
str: The status string representation of the tool, including its name, description, and arguments (if any).
"""
description_repr = repr(tool.description).strip("'")
args_str = "\n".join(
[
f"- {arg_name}: {arg_data['description']}"
for arg_name, arg_data in tool.args.items()
if "description" in arg_data
]
)
status = f"Name: {tool.name}\nDescription: {description_repr}"
return status + (f"\nArguments:\n{args_str}" if args_str else "")

View file

@ -4,7 +4,7 @@ from langchain.agents import create_xml_agent
from langchain_core.prompts import PromptTemplate
from langflow.base.agents.agent import LCAgentComponent
from langflow.field_typing import BaseLLM, BaseMemory, Text, Tool
from langflow.field_typing import BaseLanguageModel, BaseMemory, Text, Tool
class XMLAgentComponent(LCAgentComponent):
@ -66,7 +66,7 @@ class XMLAgentComponent(LCAgentComponent):
async def build(
self,
input_value: str,
llm: BaseLLM,
llm: BaseLanguageModel,
tools: List[Tool],
prompt: str,
memory: Optional[BaseMemory] = None,

View file

@ -1,6 +1,5 @@
from typing import Optional
from langchain.llms.base import BaseLLM
from langflow.field_typing import BaseLanguageModel
from langchain_community.llms.bedrock import Bedrock
from langflow.interface.custom.custom_component import CustomComponent
@ -46,7 +45,7 @@ class AmazonBedrockComponent(CustomComponent):
endpoint_url: Optional[str] = None,
streaming: bool = False,
cache: Optional[bool] = None,
) -> BaseLLM:
) -> BaseLanguageModel:
try:
output = Bedrock(
credentials_profile_name=credentials_profile_name,

View file

@ -1,14 +1,14 @@
from typing import Optional
from langchain.llms.base import BaseLanguageModel
from langchain_community.chat_models.anthropic import ChatAnthropic
from langchain_anthropic import ChatAnthropic
from pydantic.v1 import SecretStr
from langflow.interface.custom.custom_component import CustomComponent
class AnthropicLLM(CustomComponent):
display_name: str = "AnthropicLLM"
class ChatAntropicSpecsComponent(CustomComponent):
display_name: str = "Anthropic"
description: str = "Anthropic Chat&Completion large language models."
icon = "Anthropic"

View file

@ -1,49 +0,0 @@
from typing import Optional
from langchain_community.llms.anthropic import Anthropic
from pydantic.v1 import SecretStr
from langflow.field_typing import BaseLanguageModel, NestedDict
from langflow.interface.custom.custom_component import CustomComponent
class AnthropicComponent(CustomComponent):
display_name = "Anthropic"
description = "Anthropic large language models."
icon = "Anthropic"
def build_config(self):
return {
"anthropic_api_key": {
"display_name": "Anthropic API Key",
"type": str,
"password": True,
},
"anthropic_api_url": {
"display_name": "Anthropic API URL",
"type": str,
},
"model_kwargs": {
"display_name": "Model Kwargs",
"field_type": "NestedDict",
"advanced": True,
},
"temperature": {
"display_name": "Temperature",
"field_type": "float",
},
}
def build(
self,
anthropic_api_key: str,
anthropic_api_url: str,
model_kwargs: NestedDict = {},
temperature: Optional[float] = None,
) -> BaseLanguageModel:
return Anthropic(
anthropic_api_key=SecretStr(anthropic_api_key),
anthropic_api_url=anthropic_api_url,
model_kwargs=model_kwargs,
temperature=temperature,
)

View file

@ -1,9 +1,9 @@
from typing import Optional
from langchain.llms.base import BaseLLM
from langchain_community.chat_models.baidu_qianfan_endpoint import QianfanChatEndpoint
from pydantic.v1 import SecretStr
from langflow.field_typing import BaseLanguageModel
from langflow.interface.custom.custom_component import CustomComponent
@ -79,7 +79,7 @@ class QianfanChatEndpointComponent(CustomComponent):
temperature: Optional[float] = None,
penalty_score: Optional[float] = None,
endpoint: Optional[str] = None,
) -> BaseLLM:
) -> BaseLanguageModel:
try:
output = QianfanChatEndpoint( # type: ignore
model=model,

View file

@ -1,9 +1,9 @@
from typing import Optional
from langchain.llms.baidu_qianfan_endpoint import QianfanLLMEndpoint
from langchain.llms.base import BaseLLM
from langflow.interface.custom.custom_component import CustomComponent
from langflow.field_typing import BaseLanguageModel
class QianfanLLMEndpointComponent(CustomComponent):
@ -78,7 +78,7 @@ class QianfanLLMEndpointComponent(CustomComponent):
temperature: Optional[float] = None,
penalty_score: Optional[float] = None,
endpoint: Optional[str] = None,
) -> BaseLLM:
) -> BaseLanguageModel:
try:
output = QianfanLLMEndpoint( # type: ignore
model=model,

View file

@ -1,67 +1,89 @@
from typing import Callable, Optional, Union
from typing import Optional
from langchain_community.chat_models.anthropic import ChatAnthropic
from langchain_anthropic import ChatAnthropic
from pydantic.v1.types import SecretStr
from langflow.custom import CustomComponent
from langflow.field_typing import BaseLanguageModel
class ChatAnthropicComponent(CustomComponent):
display_name = "ChatAnthropic"
description = "`Anthropic` chat large language models."
documentation = "https://python.langchain.com/docs/modules/model_io/models/chat/integrations/anthropic"
class AnthropicLLM(CustomComponent):
display_name: str = "Anthropic"
description: str = "Generate text using Anthropic Chat&Completion LLMs."
icon = "Anthropic"
field_order = [
"model",
"anthropic_api_key",
"max_tokens",
"temperature",
"anthropic_api_url",
]
def build_config(self):
return {
"model": {
"display_name": "Model Name",
"options": [
"claude-3-opus-20240229",
"claude-3-sonnet-20240229",
"claude-3-haiku-20240307",
"claude-2.1",
"claude-2.0",
"claude-instant-1.2",
"claude-instant-1",
],
"info": "Name of the model to use.",
"required": True,
"value": "claude-3-opus-20240229",
},
"anthropic_api_key": {
"display_name": "Anthropic API Key",
"field_type": "str",
"required": True,
"password": True,
},
"model_kwargs": {
"display_name": "Model Kwargs",
"field_type": "dict",
"advanced": True,
},
"model_name": {
"display_name": "Model Name",
"field_type": "str",
"advanced": False,
"required": False,
"options": ["claude-3-opus-20240229", "claude-3-sonnet-20240229", "claude-3-haiku-20240307"],
},
"temperature": {
"display_name": "Temperature",
"field_type": "float",
"info": "Your Anthropic API key.",
},
"max_tokens": {
"display_name": "Max Tokens",
"field_type": "int",
"advanced": False,
"required": False,
"advanced": True,
"value": 256,
},
"top_k": {"display_name": "Top K", "field_type": "int", "advanced": True},
"top_p": {"display_name": "Top P", "field_type": "float", "advanced": True},
"temperature": {
"display_name": "Temperature",
"field_type": "float",
"value": 0.1,
},
"anthropic_api_url": {
"display_name": "Anthropic API URL",
"advanced": True,
"info": "Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.",
},
"code": {"show": False},
}
def build(
self,
anthropic_api_key: str,
model_kwargs: dict = {},
model_name: str = "claude-3-opus-20240229",
model: str,
anthropic_api_key: Optional[str] = None,
max_tokens: Optional[int] = None,
temperature: Optional[float] = None,
max_tokens: Optional[int] = 1024,
top_k: Optional[int] = None,
top_p: Optional[float] = None,
) -> Union[BaseLanguageModel, Callable]:
return ChatAnthropic(
anthropic_api_key=SecretStr(anthropic_api_key),
model_kwargs=model_kwargs,
model_name=model_name,
temperature=temperature,
max_tokens=max_tokens, # type: ignore
top_k=top_k,
top_p=top_p,
)
anthropic_api_url: Optional[str] = None,
) -> BaseLanguageModel:
# Set default API endpoint if not provided
if not anthropic_api_url:
anthropic_api_url = "https://api.anthropic.com"
try:
output = ChatAnthropic(
model_name=model,
anthropic_api_key=(SecretStr(anthropic_api_key) if anthropic_api_key else None),
max_tokens_to_sample=max_tokens, # type: ignore
temperature=temperature,
anthropic_api_url=anthropic_api_url,
)
except Exception as e:
raise ValueError("Could not connect to Anthropic API.") from e
return output

View file

@ -1,4 +1,4 @@
from typing import Any, Callable, Dict, Optional, Union
from typing import Any, Dict, Optional
from langchain_community.chat_models.litellm import ChatLiteLLM, ChatLiteLLMException
from langflow.field_typing import BaseLanguageModel

View file

@ -1,9 +1,9 @@
from typing import Optional, Union
from typing import Optional
from langchain.llms import BaseLLM
from langflow.field_typing import BaseLanguageModel
from langchain_community.chat_models.openai import ChatOpenAI
from langflow.field_typing import BaseLanguageModel, NestedDict
from langflow.field_typing import NestedDict
from langflow.interface.custom.custom_component import CustomComponent
@ -68,7 +68,7 @@ class ChatOpenAIComponent(CustomComponent):
openai_api_base: Optional[str] = None,
openai_api_key: Optional[str] = None,
temperature: float = 0.7,
) -> Union[BaseLanguageModel, BaseLLM]:
) -> BaseLanguageModel:
if not openai_api_base:
openai_api_base = "https://api.openai.com/v1"
return ChatOpenAI(

View file

@ -1,6 +1,5 @@
from typing import List, Optional, Union
from typing import List, Optional
from langchain.llms import BaseLLM
from langchain_community.chat_models.vertexai import ChatVertexAI
from langchain_core.messages.base import BaseMessage
@ -74,7 +73,7 @@ class ChatVertexAIComponent(CustomComponent):
top_k: int = 40,
top_p: float = 0.95,
verbose: bool = False,
) -> Union[BaseLanguageModel, BaseLLM]:
) -> BaseLanguageModel:
return ChatVertexAI(
credentials=credentials,
examples=examples,

View file

@ -1,6 +1,6 @@
from typing import Optional
from langchain.llms.base import BaseLLM
from langflow.field_typing import BaseLanguageModel
from langchain.llms.huggingface_endpoint import HuggingFaceEndpoint
from langflow.interface.custom.custom_component import CustomComponent
@ -32,7 +32,7 @@ class HuggingFaceEndpointsComponent(CustomComponent):
task: str = "text2text-generation",
huggingfacehub_api_token: Optional[str] = None,
model_kwargs: Optional[dict] = None,
) -> BaseLLM:
) -> BaseLanguageModel:
try:
output = HuggingFaceEndpoint( # type: ignore
endpoint_url=endpoint_url,

View file

@ -1,6 +1,6 @@
from typing import List, Optional
from langchain.llms.base import BaseLLM
from langflow.field_typing import BaseLanguageModel
from langchain_community.llms.ollama import Ollama
from langflow.interface.custom.custom_component import CustomComponent
@ -118,7 +118,7 @@ class OllamaLLM(CustomComponent):
tfs_z: Optional[float] = None,
top_k: Optional[int] = None,
top_p: Optional[int] = None,
) -> BaseLLM:
) -> BaseLanguageModel:
if not base_url:
base_url = "http://localhost:11434"

View file

@ -1,6 +1,6 @@
from typing import Callable, Dict, Optional, Union
from typing import Dict, Optional
from langchain.llms import BaseLLM
from langflow.field_typing import BaseLanguageModel
from langchain_community.llms.vertexai import VertexAI
from langflow.interface.custom.custom_component import CustomComponent
@ -129,7 +129,7 @@ class VertexAIComponent(CustomComponent):
top_p: float = 0.95,
tuned_model_name: Optional[str] = None,
verbose: bool = False,
) -> Union[BaseLLM, Callable]:
) -> BaseLanguageModel:
return VertexAI(
credentials=credentials,
location=location,

View file

@ -1,6 +1,6 @@
from .AmazonBedrockSpecs import AmazonBedrockComponent
from .AnthropicLLMSpecs import AnthropicLLM
from .AnthropicSpecs import AnthropicComponent
from .AnthropicLLMSpecs import ChatAntropicSpecsComponent
from .AzureChatOpenAISpecs import AzureChatOpenAISpecsComponent
from .BaiduQianfanChatEndpointsSpecs import QianfanChatEndpointComponent
from .BaiduQianfanLLMEndpointsSpecs import QianfanLLMEndpointComponent
@ -17,8 +17,7 @@ from .VertexAISpecs import VertexAIComponent
__all__ = [
"AmazonBedrockComponent",
"AnthropicLLM",
"AnthropicComponent",
"ChatAntropicSpecsComponent",
"AzureChatOpenAISpecsComponent",
"QianfanChatEndpointComponent",
"QianfanLLMEndpointComponent",

View file

@ -1,8 +1,8 @@
from typing import Callable, Optional, Union
from typing import Optional
from langchain.retrievers import MultiQueryRetriever
from langflow.field_typing import BaseLLM, BaseRetriever, PromptTemplate
from langflow.field_typing import BaseRetriever, PromptTemplate, BaseLanguageModel
from langflow.interface.custom.custom_component import CustomComponent
@ -39,11 +39,11 @@ class MultiQueryRetrieverComponent(CustomComponent):
def build(
self,
llm: BaseLLM,
llm: BaseLanguageModel,
retriever: BaseRetriever,
prompt: Optional[PromptTemplate] = None,
parser_key: str = "lines",
) -> Union[Callable, MultiQueryRetriever]:
) -> MultiQueryRetriever:
if not prompt:
return MultiQueryRetriever.from_llm(llm=llm, retriever=retriever, parser_key=parser_key)
else:

View file

@ -1,5 +1,3 @@
from typing import Callable, Union
from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreInfo
from langchain_community.vectorstores import VectorStore
@ -22,5 +20,5 @@ class VectorStoreInfoComponent(CustomComponent):
vectorstore: VectorStore,
description: str,
name: str,
) -> Union[VectorStoreInfo, Callable]:
) -> VectorStoreInfo:
return VectorStoreInfo(vectorstore=vectorstore, description=description, name=name)

View file

@ -0,0 +1,68 @@
import importlib
from langchain.agents import Tool
from langchain_experimental.utilities import PythonREPL
from langflow.base.tools.base import build_status_from_tool
from langflow.custom import CustomComponent
class PythonREPLToolComponent(CustomComponent):
display_name = "Python REPL Tool"
description = "A tool for running Python code in a REPL environment."
def build_config(self):
return {
"name": {"display_name": "Name", "info": "The name of the tool."},
"description": {"display_name": "Description", "info": "A description of the tool."},
"global_imports": {
"display_name": "Global Imports",
"info": "A list of modules to import globally, e.g. ['math', 'numpy'].",
},
}
def get_globals(self, globals: list[str]) -> dict:
"""
Retrieves the global variables from the specified modules.
Args:
globals (list[str]): A list of module names.
Returns:
dict: A dictionary containing the global variables from the specified modules.
"""
global_dict = {}
for module in globals:
try:
module = importlib.import_module(module)
global_dict[module.__name__] = module
except ImportError:
print(f"Could not import module {module}")
return global_dict
def build(
self,
name: str = "python_repl",
description: str = "A Python shell. Use this to execute python commands. Input should be a valid python command. If you want to see the output of a value, you should print it out with `print(...)`.",
global_imports: list[str] = ["math"],
) -> Tool:
"""
Builds a Python REPL tool.
Args:
name (str, optional): The name of the tool. Defaults to "python_repl".
description (str, optional): The description of the tool. Defaults to "A Python shell. Use this to execute python commands. Input should be a valid python command. If you want to see the output of a value, you should print it out with `print(...)`. ".
global_imports (list[str], optional): A list of global imports to be available in the Python REPL. Defaults to ["math"].
Returns:
Tool: The built Python REPL tool.
"""
_globals = self.get_globals(global_imports)
python_repl = PythonREPL(_globals=_globals)
tool = Tool(
name=name,
description=description,
func=python_repl.run,
)
self.status = build_status_from_tool(tool)
return tool

View file

@ -1,5 +1,7 @@
from .PythonREPLTool import PythonREPLToolComponent
from .RetrieverTool import RetrieverToolComponent
from .SearchAPITool import SearchApiToolComponent
from .SearchApi import SearchApi
from .SearchAPITool import SearchApiToolComponent
__all__ = ["RetrieverToolComponent", "SearchApiToolComponent", "SearchApi"]
__all__ = ["RetrieverToolComponent", "SearchApiToolComponent", "SearchApi", "PythonREPLToolComponent"]

View file

@ -35,11 +35,7 @@ class LangflowApplication(BaseApplication):
super().__init__()
def load_config(self):
config = {
key: value
for key, value in self.options.items()
if key in self.cfg.settings and value is not None
}
config = {key: value for key, value in self.options.items() if key in self.cfg.settings and value is not None}
for key, value in config.items():
self.cfg.set(key.lower(), value)

View file

@ -1,6 +1,5 @@
from datetime import datetime
import time
from datetime import datetime
from pathlib import Path
from typing import TYPE_CHECKING
@ -37,10 +36,7 @@ class DatabaseService(Service):
def _create_engine(self) -> "Engine":
"""Create the engine for the database."""
settings_service = get_settings_service()
if (
settings_service.settings.DATABASE_URL
and settings_service.settings.DATABASE_URL.startswith("sqlite")
):
if settings_service.settings.DATABASE_URL and settings_service.settings.DATABASE_URL.startswith("sqlite"):
connect_args = {"check_same_thread": False}
else:
connect_args = {}
@ -52,9 +48,7 @@ class DatabaseService(Service):
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is not None: # If an exception has been raised
logger.error(
f"Session rollback because of exception: {exc_type.__name__} {exc_value}"
)
logger.error(f"Session rollback because of exception: {exc_type.__name__} {exc_value}")
self._session.rollback()
else:
self._session.commit()
@ -71,9 +65,7 @@ class DatabaseService(Service):
settings_service = get_settings_service()
if settings_service.auth_settings.AUTO_LOGIN:
with Session(self.engine) as session:
flows = session.exec(
select(models.Flow).where(models.Flow.user_id is None)
).all()
flows = session.exec(select(models.Flow).where(models.Flow.user_id is None)).all()
if flows:
logger.debug("Migrating flows to default superuser")
username = settings_service.auth_settings.SUPERUSER
@ -103,9 +95,7 @@ class DatabaseService(Service):
expected_columns = list(model.model_fields.keys())
try:
available_columns = [
col["name"] for col in inspector.get_columns(table)
]
available_columns = [col["name"] for col in inspector.get_columns(table)]
except sa.exc.NoSuchTableError:
logger.debug(f"Missing table: {table}")
return False
@ -169,9 +159,7 @@ class DatabaseService(Service):
buffer.write(f"{datetime.now().isoformat()}: Checking migrations\n")
command.check(alembic_cfg)
except Exception as exc:
if isinstance(
exc, (util.exc.CommandError, util.exc.AutogenerateDiffsDetected)
):
if isinstance(exc, (util.exc.CommandError, util.exc.AutogenerateDiffsDetected)):
command.upgrade(alembic_cfg, "head")
time.sleep(3)
@ -208,10 +196,7 @@ class DatabaseService(Service):
# We will check that all models are in the database
# and that the database is up to date with all columns
sql_models = [models.Flow, models.User, models.ApiKey]
return [
TableResults(sql_model.__tablename__, self.check_table(sql_model))
for sql_model in sql_models
]
return [TableResults(sql_model.__tablename__, self.check_table(sql_model)) for sql_model in sql_models]
def check_table(self, model):
results = []
@ -220,9 +205,7 @@ class DatabaseService(Service):
expected_columns = list(model.__fields__.keys())
available_columns = []
try:
available_columns = [
col["name"] for col in inspector.get_columns(table_name)
]
available_columns = [col["name"] for col in inspector.get_columns(table_name)]
results.append(Result(name=table_name, type="table", success=True))
except sa.exc.NoSuchTableError:
logger.error(f"Missing table: {table_name}")
@ -253,9 +236,7 @@ class DatabaseService(Service):
try:
table.create(self.engine, checkfirst=True)
except OperationalError as oe:
logger.warning(
f"Table {table} already exists, skipping. Exception: {oe}"
)
logger.warning(f"Table {table} already exists, skipping. Exception: {oe}")
except Exception as exc:
logger.error(f"Error creating table {table}: {exc}")
raise RuntimeError(f"Error creating table {table}") from exc
@ -267,9 +248,7 @@ class DatabaseService(Service):
if table not in table_names:
logger.error("Something went wrong creating the database and tables.")
logger.error("Please check your database settings.")
raise RuntimeError(
"Something went wrong creating the database and tables."
)
raise RuntimeError("Something went wrong creating the database and tables.")
logger.debug("Database and tables created successfully")

View file

@ -63,8 +63,8 @@ class LLMFrontendNode(FrontendNode):
field.info = OPENAI_API_BASE_INFO
def add_extra_base_classes(self) -> None:
if "BaseLLM" not in self.base_classes:
self.base_classes.append("BaseLLM")
if "BaseLanguageModel" not in self.base_classes:
self.base_classes.append("BaseLanguageModel")
@staticmethod
def format_azure_field(field: TemplateField):

View file

@ -26,10 +26,7 @@ def patching(record):
def configure(log_level: Optional[str] = None, log_file: Optional[Path] = None):
if (
os.getenv("LANGFLOW_LOG_LEVEL", "").upper() in VALID_LOG_LEVELS
and log_level is None
):
if os.getenv("LANGFLOW_LOG_LEVEL", "").upper() in VALID_LOG_LEVELS and log_level is None:
log_level = os.getenv("LANGFLOW_LOG_LEVEL")
if log_level is None:
log_level = "ERROR"
@ -77,11 +74,7 @@ def configure(log_level: Optional[str] = None, log_file: Optional[Path] = None):
def setup_uvicorn_logger():
loggers = (
logging.getLogger(name)
for name in logging.root.manager.loggerDict
if name.startswith("uvicorn.")
)
loggers = (logging.getLogger(name) for name in logging.root.manager.loggerDict if name.startswith("uvicorn."))
for uvicorn_logger in loggers:
uvicorn_logger.handlers = []
logging.getLogger("uvicorn").handlers = [InterceptHandler()]
@ -111,6 +104,4 @@ class InterceptHandler(logging.Handler):
frame = frame.f_back
depth += 1
logger.opt(depth=depth, exception=record.exc_info).log(
level, record.getMessage()
)
logger.opt(depth=depth, exception=record.exc_info).log(level, record.getMessage())

View file

@ -1,6 +1,6 @@
export const custom = `from langflow.custom import CustomComponent
from langchain.llms.base import BaseLLM
from langflow.field_typing import BaseLanguageModel
from langchain.chains import LLMChain
from langchain.prompts import PromptTemplate
from langchain_core.documents import Document
@ -15,6 +15,6 @@ class YourComponent(CustomComponent):
def build_config(self):
return { "file": { "file_type": ["json"], } }
def build(self, url: str,file:str,integer:int,nested:NestedDict,flt:float,boolean:bool,lisst:list[str],dictionary:dict, llm: BaseLLM, prompt: PromptTemplate) -> Document:
def build(self, url: str,file:str,integer:int,nested:NestedDict,flt:float,boolean:bool,lisst:list[str],dictionary:dict, llm: BaseLanguageModel, prompt: PromptTemplate) -> Document:
return "test"`;

View file

@ -177,7 +177,7 @@ test("dropDownComponent", async ({ page }) => {
.click();
await page.locator("textarea").press("Control+a");
const emptyOptionsCode = `from typing import Optional
from langchain.llms.base import BaseLLM
from langflow.field_typing import BaseLanguageModel
from langchain_community.llms.bedrock import Bedrock
from langflow.interface.custom.custom_component import CustomComponent
@ -212,7 +212,7 @@ class AmazonBedrockComponent(CustomComponent):
endpoint_url: Optional[str] = None,
streaming: bool = False,
cache: Optional[bool] = None,
) -> BaseLLM:
) -> BaseLanguageModel:
try:
output = Bedrock(
credentials_profile_name=credentials_profile_name,

View file

@ -22,9 +22,6 @@ from langflow.services.database.models.flow.model import Flow, FlowCreate
from langflow.services.database.models.user.model import User, UserCreate
from langflow.services.database.utils import session_getter
from langflow.services.deps import get_db_service
from sqlmodel import Session, SQLModel, create_engine, select
from sqlmodel.pool import StaticPool
from typer.testing import CliRunner
if TYPE_CHECKING:
from langflow.services.database.service import DatabaseService

View file

@ -6,21 +6,15 @@ import pytest
from langchain_core.documents import Document
from langflow.interface.custom.base import CustomComponent
from langflow.interface.custom.code_parser.code_parser import (
CodeParser,
CodeSyntaxError,
)
from langflow.interface.custom.custom_component.component import (
Component,
ComponentCodeNullError,
)
from langflow.interface.custom.code_parser.code_parser import CodeParser, CodeSyntaxError
from langflow.interface.custom.custom_component.component import Component, ComponentCodeNullError
from langflow.services.database.models.flow import Flow, FlowCreate
code_default = """
from langflow.field_typing import Prompt
from langflow.interface.custom.custom_component import CustomComponent
from langchain.llms.base import BaseLLM
from langflow.field_typing import BaseLanguageModel
from langchain.chains import LLMChain
from langchain.prompts import PromptTemplate
from langchain_core.documents import Document
@ -32,7 +26,7 @@ class YourComponent(CustomComponent):
description: str = "Your description"
field_config = { "url": { "multiline": True, "required": True } }
def build(self, url: str, llm: BaseLLM, template: Prompt) -> Document:
def build(self, url: str, llm: BaseLanguageModel, template: Prompt) -> Document:
response = requests.get(url)
prompt = PromptTemplate.from_template(template)
chain = LLMChain(llm=llm, prompt=prompt)