Added HuggingFaceHub and fixed the stream parameter + added summarization as a task option

This commit is contained in:
GMS 2023-06-02 21:22:21 +01:00
commit 90d2517fc1
3 changed files with 12 additions and 3 deletions

View file

@ -55,6 +55,7 @@ llms:
- LlamaCpp
- CTransformers
- Cohere
- HuggingFaceHub
memories:
- ConversationBufferMemory
- ConversationSummaryMemory

View file

@ -44,7 +44,11 @@ def try_setting_streaming_options(langchain_object, websocket):
langchain_object.llm_chain, "llm"
):
llm = langchain_object.llm_chain.llm
if isinstance(llm, BaseLanguageModel) and hasattr(llm, "streaming"):
llm.streaming = True
if isinstance(llm, BaseLanguageModel):
if hasattr(llm, "streaming"):
llm.streaming = True
if hasattr(llm, "stream"):
llm.stream = True
return langchain_object

View file

@ -34,7 +34,11 @@ class LLMFrontendNode(FrontendNode):
field.required = True
field.show = True
field.is_list = True
field.options = ["text-generation", "text2text-generation"]
field.options = [
"text-generation",
"text2text-generation",
"summarization"
]
field.advanced = True
if display_name := display_names_dict.get(field.name):