diff --git a/src/backend/langflow/components/chains/ConversationChain.py b/src/backend/langflow/components/chains/ConversationChain.py new file mode 100644 index 000000000..530c6fb01 --- /dev/null +++ b/src/backend/langflow/components/chains/ConversationChain.py @@ -0,0 +1,24 @@ +from langflow import CustomComponent +from langchain.chains import ConversationChain +from typing import Optional, Union, Callable +from langflow.field_typing import BaseLanguageModel, BaseMemory, Chain + + +class ConversationChainComponent(CustomComponent): + display_name = "ConversationChain" + description = "Chain to have a conversation and load context from memory." + + def build_config(self): + return { + "prompt": {"display_name": "Prompt"}, + "llm": {"display_name": "LLM"}, + "memory": {"display_name": "Memory"}, + "code": {"show": False}, + } + + def build( + self, + llm: BaseLanguageModel, + memory: Optional[BaseMemory] = None, + ) -> Union[Chain, Callable]: + return ConversationChain(llm=llm, memory=memory) diff --git a/src/backend/langflow/components/chains/LLMChain.py b/src/backend/langflow/components/chains/LLMChain.py index 5056c7f43..12b38a27a 100644 --- a/src/backend/langflow/components/chains/LLMChain.py +++ b/src/backend/langflow/components/chains/LLMChain.py @@ -13,12 +13,13 @@ class LLMChainComponent(CustomComponent): "prompt": {"display_name": "Prompt"}, "llm": {"display_name": "LLM"}, "memory": {"display_name": "Memory"}, + "code": {"show": False}, } def build( self, prompt: PromptTemplate, llm: BaseLanguageModel, - memory: Optional[BaseMemory], + memory: Optional[BaseMemory] = None, ) -> Union[Chain, Callable]: return LLMChain(prompt=prompt, llm=llm, memory=memory)