add agent configurability
This commit is contained in:
parent
e5da0e962b
commit
05f7483a22
2 changed files with 11 additions and 1 deletions
|
|
@ -1,6 +1,6 @@
|
|||
[tool.poetry]
|
||||
name = "vocode"
|
||||
version = "0.1.32"
|
||||
version = "0.1.33"
|
||||
description = "The all-in-one voice SDK"
|
||||
authors = ["Ajay Raj <ajay@vocode.dev>"]
|
||||
license = "MIT License"
|
||||
|
|
|
|||
|
|
@ -5,6 +5,9 @@ from vocode.models.message import BaseMessage
|
|||
from .model import TypedModel, BaseModel
|
||||
|
||||
FILLER_AUDIO_DEFAULT_SILENCE_THRESHOLD_SECONDS = 0.5
|
||||
LLM_AGENT_DEFAULT_TEMPERATURE = 1.0
|
||||
LLM_AGENT_DEFAULT_MAX_TOKENS = 256
|
||||
LLM_AGENT_DEFAULT_MODEL_NAME = "text-curie-001"
|
||||
|
||||
|
||||
class AgentType(str, Enum):
|
||||
|
|
@ -33,17 +36,24 @@ class AgentConfig(TypedModel, type=AgentType.BASE):
|
|||
class LLMAgentConfig(AgentConfig, type=AgentType.LLM):
|
||||
prompt_preamble: str
|
||||
expected_first_prompt: Optional[str] = None
|
||||
model_name: str = LLM_AGENT_DEFAULT_MODEL_NAME
|
||||
temperature: float = LLM_AGENT_DEFAULT_TEMPERATURE
|
||||
max_tokens: int = LLM_AGENT_DEFAULT_MAX_TOKENS
|
||||
|
||||
|
||||
class ChatGPTAlphaAgentConfig(AgentConfig, type=AgentType.CHAT_GPT_ALPHA):
|
||||
prompt_preamble: str
|
||||
expected_first_prompt: Optional[str] = None
|
||||
temperature: float = LLM_AGENT_DEFAULT_TEMPERATURE
|
||||
max_tokens: int = LLM_AGENT_DEFAULT_MAX_TOKENS
|
||||
|
||||
|
||||
class ChatGPTAgentConfig(AgentConfig, type=AgentType.CHAT_GPT):
|
||||
prompt_preamble: str
|
||||
expected_first_prompt: Optional[str] = None
|
||||
generate_responses: bool = False
|
||||
temperature: float = LLM_AGENT_DEFAULT_TEMPERATURE
|
||||
max_tokens: int = LLM_AGENT_DEFAULT_MAX_TOKENS
|
||||
|
||||
|
||||
class InformationRetrievalAgentConfig(
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue