fix: Support Ollama models in CrewAI Components (#4936)

* fix: Support ollama in crewai

* fix: Correct name for type of model
This commit is contained in:
Eric Hare 2024-12-03 16:32:57 -08:00 committed by GitHub
commit 975164ebcb
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -70,6 +70,10 @@ def convert_llm(llm: Any, excluded_keys=None) -> LLM:
msg = "Could not find model name in the LLM object"
raise ValueError(msg)
# Normalize Ollama with prefix TODO: Handle all litellm supported models
if llm.dict().get("_type") == "chat-ollama":
model_name = f"ollama/{model_name}"
# Retrieve the API Key from the LLM
if excluded_keys is None:
excluded_keys = {"model", "model_name", "_type", "api_key"}