From 975164ebcb684d705e4959f3cd99b8690fe01dd3 Mon Sep 17 00:00:00 2001 From: Eric Hare Date: Tue, 3 Dec 2024 16:32:57 -0800 Subject: [PATCH] fix: Support Ollama models in CrewAI Components (#4936) * fix: Support ollama in crewai * fix: Correct name for type of model --- src/backend/base/langflow/base/agents/crewai/crew.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/backend/base/langflow/base/agents/crewai/crew.py b/src/backend/base/langflow/base/agents/crewai/crew.py index cb2cdf0b0..7c6247a6f 100644 --- a/src/backend/base/langflow/base/agents/crewai/crew.py +++ b/src/backend/base/langflow/base/agents/crewai/crew.py @@ -70,6 +70,10 @@ def convert_llm(llm: Any, excluded_keys=None) -> LLM: msg = "Could not find model name in the LLM object" raise ValueError(msg) + # Normalize Ollama with prefix TODO: Handle all litellm supported models + if llm.dict().get("_type") == "chat-ollama": + model_name = f"ollama/{model_name}" + # Retrieve the API Key from the LLM if excluded_keys is None: excluded_keys = {"model", "model_name", "_type", "api_key"}