🔧 fix(test_agents_template.py): update test functions to include logged_in_headers parameter to test authenticated requests
🔧 fix(test_chains_template.py): update test functions to include logged_in_headers parameter to test authenticated requests 🔧 fix(test_endpoints.py): update test_get_all function to include logged_in_headers parameter to test authenticated requests 🔧 fix(test_llms_template.py): update test functions to include logged_in_headers parameter to test authenticated requests 🔧 fix(test_prompts_template.py): update test functions to include logged_in_headers parameter to test authenticated requests 🔧 fix(test_vectorstore_template.py): update test functions to include logged_in_headers parameter to test authenticated requests
This commit is contained in:
parent
1364fa0e0f
commit
812864eded
6 changed files with 38 additions and 38 deletions
|
|
@ -1,8 +1,8 @@
|
|||
from fastapi.testclient import TestClient
|
||||
|
||||
|
||||
def test_zero_shot_agent(client: TestClient):
|
||||
response = client.get("api/v1/all")
|
||||
def test_zero_shot_agent(client: TestClient, logged_in_headers):
|
||||
response = client.get("api/v1/all", headers=logged_in_headers)
|
||||
assert response.status_code == 200
|
||||
json_response = response.json()
|
||||
agents = json_response["agents"]
|
||||
|
|
@ -113,8 +113,8 @@ def test_zero_shot_agent(client: TestClient):
|
|||
}
|
||||
|
||||
|
||||
def test_json_agent(client: TestClient):
|
||||
response = client.get("api/v1/all")
|
||||
def test_json_agent(client: TestClient, logged_in_headers):
|
||||
response = client.get("api/v1/all", headers=logged_in_headers)
|
||||
assert response.status_code == 200
|
||||
json_response = response.json()
|
||||
agents = json_response["agents"]
|
||||
|
|
@ -152,8 +152,8 @@ def test_json_agent(client: TestClient):
|
|||
}
|
||||
|
||||
|
||||
def test_csv_agent(client: TestClient):
|
||||
response = client.get("api/v1/all")
|
||||
def test_csv_agent(client: TestClient, logged_in_headers):
|
||||
response = client.get("api/v1/all", headers=logged_in_headers)
|
||||
assert response.status_code == 200
|
||||
json_response = response.json()
|
||||
agents = json_response["agents"]
|
||||
|
|
@ -195,8 +195,8 @@ def test_csv_agent(client: TestClient):
|
|||
}
|
||||
|
||||
|
||||
def test_initialize_agent(client: TestClient):
|
||||
response = client.get("api/v1/all")
|
||||
def test_initialize_agent(client: TestClient, logged_in_headers):
|
||||
response = client.get("api/v1/all", headers=logged_in_headers)
|
||||
assert response.status_code == 200
|
||||
json_response = response.json()
|
||||
agents = json_response["agents"]
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
from fastapi.testclient import TestClient
|
||||
|
||||
|
||||
# def test_chains_settings(client: TestClient):
|
||||
# response = client.get("api/v1/all")
|
||||
# def test_chains_settings(client: TestClient, logged_in_headers):
|
||||
# response = client.get("api/v1/all", headers=logged_in_headers)
|
||||
# assert response.status_code == 200
|
||||
# json_response = response.json()
|
||||
# chains = json_response["chains"]
|
||||
|
|
@ -10,8 +10,8 @@ from fastapi.testclient import TestClient
|
|||
|
||||
|
||||
# Test the ConversationChain object
|
||||
def test_conversation_chain(client: TestClient):
|
||||
response = client.get("api/v1/all")
|
||||
def test_conversation_chain(client: TestClient, logged_in_headers):
|
||||
response = client.get("api/v1/all", headers=logged_in_headers)
|
||||
assert response.status_code == 200
|
||||
json_response = response.json()
|
||||
chains = json_response["chains"]
|
||||
|
|
@ -102,8 +102,8 @@ def test_conversation_chain(client: TestClient):
|
|||
)
|
||||
|
||||
|
||||
def test_llm_chain(client: TestClient):
|
||||
response = client.get("api/v1/all")
|
||||
def test_llm_chain(client: TestClient, logged_in_headers):
|
||||
response = client.get("api/v1/all", headers=logged_in_headers)
|
||||
assert response.status_code == 200
|
||||
json_response = response.json()
|
||||
chains = json_response["chains"]
|
||||
|
|
@ -173,8 +173,8 @@ def test_llm_chain(client: TestClient):
|
|||
}
|
||||
|
||||
|
||||
def test_llm_checker_chain(client: TestClient):
|
||||
response = client.get("api/v1/all")
|
||||
def test_llm_checker_chain(client: TestClient, logged_in_headers):
|
||||
response = client.get("api/v1/all", headers=logged_in_headers)
|
||||
assert response.status_code == 200
|
||||
json_response = response.json()
|
||||
chains = json_response["chains"]
|
||||
|
|
@ -207,8 +207,8 @@ def test_llm_checker_chain(client: TestClient):
|
|||
assert chain["description"] == ""
|
||||
|
||||
|
||||
def test_llm_math_chain(client: TestClient):
|
||||
response = client.get("api/v1/all")
|
||||
def test_llm_math_chain(client: TestClient, logged_in_headers):
|
||||
response = client.get("api/v1/all", headers=logged_in_headers)
|
||||
assert response.status_code == 200
|
||||
json_response = response.json()
|
||||
chains = json_response["chains"]
|
||||
|
|
@ -299,8 +299,8 @@ def test_llm_math_chain(client: TestClient):
|
|||
)
|
||||
|
||||
|
||||
def test_series_character_chain(client: TestClient):
|
||||
response = client.get("api/v1/all")
|
||||
def test_series_character_chain(client: TestClient, logged_in_headers):
|
||||
response = client.get("api/v1/all", headers=logged_in_headers)
|
||||
assert response.status_code == 200
|
||||
json_response = response.json()
|
||||
chains = json_response["chains"]
|
||||
|
|
@ -367,8 +367,8 @@ def test_series_character_chain(client: TestClient):
|
|||
)
|
||||
|
||||
|
||||
def test_mid_journey_prompt_chain(client: TestClient):
|
||||
response = client.get("api/v1/all")
|
||||
def test_mid_journey_prompt_chain(client: TestClient, logged_in_headers):
|
||||
response = client.get("api/v1/all", headers=logged_in_headers)
|
||||
assert response.status_code == 200
|
||||
json_response = response.json()
|
||||
chains = json_response["chains"]
|
||||
|
|
@ -408,8 +408,8 @@ def test_mid_journey_prompt_chain(client: TestClient):
|
|||
)
|
||||
|
||||
|
||||
def test_time_travel_guide_chain(client: TestClient):
|
||||
response = client.get("api/v1/all")
|
||||
def test_time_travel_guide_chain(client: TestClient, logged_in_headers):
|
||||
response = client.get("api/v1/all", headers=logged_in_headers)
|
||||
assert response.status_code == 200
|
||||
json_response = response.json()
|
||||
chains = json_response["chains"]
|
||||
|
|
|
|||
|
|
@ -83,8 +83,8 @@ PROMPT_REQUEST = {
|
|||
}
|
||||
|
||||
|
||||
def test_get_all(client: TestClient):
|
||||
response = client.get("api/v1/all")
|
||||
def test_get_all(client: TestClient, logged_in_headers):
|
||||
response = client.get("api/v1/all", headers=logged_in_headers)
|
||||
assert response.status_code == 200
|
||||
json_response = response.json()
|
||||
# We need to test the custom nodes
|
||||
|
|
|
|||
|
|
@ -2,9 +2,9 @@ from fastapi.testclient import TestClient
|
|||
from langflow.services.utils import get_settings_manager
|
||||
|
||||
|
||||
def test_llms_settings(client: TestClient):
|
||||
def test_llms_settings(client: TestClient, logged_in_headers):
|
||||
settings_manager = get_settings_manager()
|
||||
response = client.get("api/v1/all")
|
||||
response = client.get("api/v1/all", headers=logged_in_headers)
|
||||
assert response.status_code == 200
|
||||
json_response = response.json()
|
||||
llms = json_response["llms"]
|
||||
|
|
@ -103,8 +103,8 @@ def test_llms_settings(client: TestClient):
|
|||
# }
|
||||
|
||||
|
||||
def test_openai(client: TestClient):
|
||||
response = client.get("api/v1/all")
|
||||
def test_openai(client: TestClient, logged_in_headers):
|
||||
response = client.get("api/v1/all", headers=logged_in_headers)
|
||||
assert response.status_code == 200
|
||||
json_response = response.json()
|
||||
language_models = json_response["llms"]
|
||||
|
|
@ -369,8 +369,8 @@ def test_openai(client: TestClient):
|
|||
}
|
||||
|
||||
|
||||
def test_chat_open_ai(client: TestClient):
|
||||
response = client.get("api/v1/all")
|
||||
def test_chat_open_ai(client: TestClient, logged_in_headers):
|
||||
response = client.get("api/v1/all", headers=logged_in_headers)
|
||||
assert response.status_code == 200
|
||||
json_response = response.json()
|
||||
language_models = json_response["llms"]
|
||||
|
|
|
|||
|
|
@ -2,17 +2,17 @@ from fastapi.testclient import TestClient
|
|||
from langflow.services.utils import get_settings_manager
|
||||
|
||||
|
||||
def test_prompts_settings(client: TestClient):
|
||||
def test_prompts_settings(client: TestClient, logged_in_headers):
|
||||
settings_manager = get_settings_manager()
|
||||
response = client.get("api/v1/all")
|
||||
response = client.get("api/v1/all", headers=logged_in_headers)
|
||||
assert response.status_code == 200
|
||||
json_response = response.json()
|
||||
prompts = json_response["prompts"]
|
||||
assert set(prompts.keys()) == set(settings_manager.settings.PROMPTS)
|
||||
|
||||
|
||||
def test_prompt_template(client: TestClient):
|
||||
response = client.get("api/v1/all")
|
||||
def test_prompt_template(client: TestClient, logged_in_headers):
|
||||
response = client.get("api/v1/all", headers=logged_in_headers)
|
||||
assert response.status_code == 200
|
||||
json_response = response.json()
|
||||
prompts = json_response["prompts"]
|
||||
|
|
|
|||
|
|
@ -4,9 +4,9 @@ from langflow.services.utils import get_settings_manager
|
|||
|
||||
# check that all agents are in settings.agents
|
||||
# are in json_response["agents"]
|
||||
def test_vectorstores_settings(client: TestClient):
|
||||
def test_vectorstores_settings(client: TestClient, logged_in_headers):
|
||||
settings_manager = get_settings_manager()
|
||||
response = client.get("api/v1/all")
|
||||
response = client.get("api/v1/all", headers=logged_in_headers)
|
||||
assert response.status_code == 200
|
||||
json_response = response.json()
|
||||
vectorstores = json_response["vectorstores"]
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue