diff --git a/src/backend/langflow/utils/constants.py b/src/backend/langflow/utils/constants.py index 0c97b56a2..283f44406 100644 --- a/src/backend/langflow/utils/constants.py +++ b/src/backend/langflow/utils/constants.py @@ -6,16 +6,14 @@ OPENAI_MODELS = [ "text-ada-001", ] CHAT_OPENAI_MODELS = [ - "gpt-3.5-turbo-0613", - "gpt-3.5-turbo", - "gpt-3.5-turbo-16k-0613", - "gpt-3.5-turbo-16k", - "gpt-4-0613", - "gpt-4-32k-0613", + "gpt-4-1106-preview", "gpt-4", "gpt-4-32k", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", ] + ANTHROPIC_MODELS = [ # largest model, ideal for a wide range of more complex tasks. "claude-v1", diff --git a/tests/test_llms_template.py b/tests/test_llms_template.py index 0a30a825e..78131cb05 100644 --- a/tests/test_llms_template.py +++ b/tests/test_llms_template.py @@ -309,17 +309,14 @@ def test_chat_open_ai(client: TestClient, logged_in_headers): "placeholder": "", "show": True, "multiline": False, - "value": "gpt-3.5-turbo-0613", + "value": "gpt-4-1106-preview", "password": False, "options": [ - "gpt-3.5-turbo-0613", - "gpt-3.5-turbo", - "gpt-3.5-turbo-16k-0613", - "gpt-3.5-turbo-16k", - "gpt-4-0613", - "gpt-4-32k-0613", + "gpt-4-1106-preview", "gpt-4", "gpt-4-32k", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", ], "name": "model_name", "type": "str",