📦 chore(ConversationChain.py): add ConversationChain component to handle conversations and load context from memory
📦 chore(LLMChain.py): add optional memory parameter to LLMChain build method to support loading context from memory
This commit is contained in:
parent
c23d797677
commit
941cdd4990
2 changed files with 26 additions and 1 deletions
24
src/backend/langflow/components/chains/ConversationChain.py
Normal file
24
src/backend/langflow/components/chains/ConversationChain.py
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
from langflow import CustomComponent
|
||||
from langchain.chains import ConversationChain
|
||||
from typing import Optional, Union, Callable
|
||||
from langflow.field_typing import BaseLanguageModel, BaseMemory, Chain
|
||||
|
||||
|
||||
class ConversationChainComponent(CustomComponent):
|
||||
display_name = "ConversationChain"
|
||||
description = "Chain to have a conversation and load context from memory."
|
||||
|
||||
def build_config(self):
|
||||
return {
|
||||
"prompt": {"display_name": "Prompt"},
|
||||
"llm": {"display_name": "LLM"},
|
||||
"memory": {"display_name": "Memory"},
|
||||
"code": {"show": False},
|
||||
}
|
||||
|
||||
def build(
|
||||
self,
|
||||
llm: BaseLanguageModel,
|
||||
memory: Optional[BaseMemory] = None,
|
||||
) -> Union[Chain, Callable]:
|
||||
return ConversationChain(llm=llm, memory=memory)
|
||||
|
|
@ -13,12 +13,13 @@ class LLMChainComponent(CustomComponent):
|
|||
"prompt": {"display_name": "Prompt"},
|
||||
"llm": {"display_name": "LLM"},
|
||||
"memory": {"display_name": "Memory"},
|
||||
"code": {"show": False},
|
||||
}
|
||||
|
||||
def build(
|
||||
self,
|
||||
prompt: PromptTemplate,
|
||||
llm: BaseLanguageModel,
|
||||
memory: Optional[BaseMemory],
|
||||
memory: Optional[BaseMemory] = None,
|
||||
) -> Union[Chain, Callable]:
|
||||
return LLMChain(prompt=prompt, llm=llm, memory=memory)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue