Refactor ChatDefinition class to use prompt_template instead of prompt

This commit is contained in:
Gabriel Luiz Freitas Almeida 2024-01-02 23:30:57 -03:00
commit 7f37dcf7bc

View file

@ -1,9 +1,8 @@
from typing import Any, Callable, Optional, Union
from langchain_core.prompts import PromptTemplate as LCPromptTemplate
from llama_index.prompts import PromptTemplate as LIPromptTemplate
from langflow.utils.prompt import GenericPromptTemplate
from llama_index.prompts import PromptTemplate as LIPromptTemplate
PromptTemplate = Union[LCPromptTemplate, LIPromptTemplate]
@ -14,17 +13,22 @@ class ChatDefinition:
func: Callable,
inputs: list[str],
output_key: Optional[str] = None,
prompt: Optional[PromptTemplate] = None,
prompt_template: Optional[PromptTemplate] = None,
):
self.func = func
self.input_keys = inputs
self.output_key = output_key
self.prompt = prompt
self.prompt_template = prompt_template
@classmethod
def from_prompt_template(cls, prompt_template: PromptTemplate, func: Callable):
def from_prompt_template(cls, prompt_template: PromptTemplate, func: Callable, output_key: Optional[str] = None):
prompt = GenericPromptTemplate(prompt_template)
return cls(func, prompt.input_keys, prompt_template)
return cls(
func=func,
inputs=prompt.input_keys,
output_key=output_key,
prompt_template=prompt_template,
)
def __call__(self, inputs: dict, callbacks: Optional[Any] = None) -> dict:
return self.func(inputs, callbacks)