Remove Prompt

This commit is contained in:
ogabrielluiz 2024-06-18 18:37:20 -03:00
commit 418b32a616
4 changed files with 1 additions and 52 deletions

View file

@ -7,7 +7,6 @@ from langchain_core.language_models.llms import LLM
from langchain_core.messages import AIMessage, HumanMessage, SystemMessage
from langflow.custom import Component
from langflow.field_typing.prompt import Prompt
from langflow.schema.message import Message
@ -86,7 +85,7 @@ class LCModelComponent(Component):
return status_message
def get_chat_result(
self, runnable: BaseChatModel, stream: bool, input_value: str | Prompt, system_message: Optional[str] = None
self, runnable: BaseChatModel, stream: bool, input_value: str | Message, system_message: Optional[str] = None
):
messages: list[Union[HumanMessage, SystemMessage]] = []
if not input_value and not system_message:

View file

@ -25,7 +25,6 @@ from .constants import (
Tool,
VectorStore,
)
from .prompt import Prompt
from .range_spec import RangeSpec
@ -45,7 +44,6 @@ def __getattr__(name: str) -> Any:
# This is to avoid circular imports
if name == "Input":
return _import_input_class()
elif name == "RangeSpec":
return RangeSpec
elif name == "Output":
return _import_output_class()
@ -76,7 +74,6 @@ __all__ = [
"Input",
"NestedDict",
"Object",
"Prompt",
"PromptTemplate",
"RangeSpec",
"Text",

View file

@ -15,9 +15,6 @@ from langchain_core.tools import Tool
from langchain_core.vectorstores import VectorStore
from langchain_text_splitters import TextSplitter
from langflow.field_typing.prompt import Prompt
# Type alias for more complex dicts
NestedDict = Dict[str, Union[str, Dict]]
@ -60,5 +57,4 @@ CUSTOM_COMPONENT_SUPPORTED_TYPES = {
"Text": Text,
"Object": Object,
"Callable": Callable,
"Prompt": Prompt,
}

View file

@ -1,43 +0,0 @@
from langchain_core.load import load
from langchain_core.messages import HumanMessage
from langchain_core.prompts import BaseChatPromptTemplate, ChatPromptTemplate, PromptTemplate
from langflow.base.prompts.utils import dict_values_to_string
from langflow.schema.message import Message
from langflow.schema.data import Data
class Prompt(Data):
def load_lc_prompt(self):
if "prompt" not in self:
raise ValueError("Prompt is required.")
return load(self.prompt)
@classmethod
def from_lc_prompt(
cls,
prompt: BaseChatPromptTemplate,
):
prompt_json = prompt.to_json()
return cls(prompt=prompt_json)
def format_text(self):
prompt_template = PromptTemplate.from_template(self.template)
variables_with_str_values = dict_values_to_string(self.variables)
formatted_prompt = prompt_template.format(**variables_with_str_values)
self.text = formatted_prompt
return formatted_prompt
@classmethod
async def from_template_and_variables(cls, template: str, variables: dict):
instance = cls(template=template, variables=variables)
contents = [{"type": "text", "text": instance.format_text()}]
# Get all Message instances from the kwargs
for value in variables.values():
if isinstance(value, Message):
content_dicts = await value.get_file_content_dicts()
contents.extend(content_dicts)
prompt_template = ChatPromptTemplate.from_messages([HumanMessage(content=contents)])
instance.messages = prompt_template.messages
instance.prompt = prompt_template.to_json()
return instance