From 7f37dcf7bc9092875c8ba699432e13fd95d18d62 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 2 Jan 2024 23:30:57 -0300 Subject: [PATCH] Refactor ChatDefinition class to use prompt_template instead of prompt --- src/backend/langflow/utils/chat.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/src/backend/langflow/utils/chat.py b/src/backend/langflow/utils/chat.py index 35b4f22a6..e1621bdab 100644 --- a/src/backend/langflow/utils/chat.py +++ b/src/backend/langflow/utils/chat.py @@ -1,9 +1,8 @@ from typing import Any, Callable, Optional, Union from langchain_core.prompts import PromptTemplate as LCPromptTemplate -from llama_index.prompts import PromptTemplate as LIPromptTemplate - from langflow.utils.prompt import GenericPromptTemplate +from llama_index.prompts import PromptTemplate as LIPromptTemplate PromptTemplate = Union[LCPromptTemplate, LIPromptTemplate] @@ -14,17 +13,22 @@ class ChatDefinition: func: Callable, inputs: list[str], output_key: Optional[str] = None, - prompt: Optional[PromptTemplate] = None, + prompt_template: Optional[PromptTemplate] = None, ): self.func = func self.input_keys = inputs self.output_key = output_key - self.prompt = prompt + self.prompt_template = prompt_template @classmethod - def from_prompt_template(cls, prompt_template: PromptTemplate, func: Callable): + def from_prompt_template(cls, prompt_template: PromptTemplate, func: Callable, output_key: Optional[str] = None): prompt = GenericPromptTemplate(prompt_template) - return cls(func, prompt.input_keys, prompt_template) + return cls( + func=func, + inputs=prompt.input_keys, + output_key=output_key, + prompt_template=prompt_template, + ) def __call__(self, inputs: dict, callbacks: Optional[Any] = None) -> dict: return self.func(inputs, callbacks)