Newer
Older
cortex-hub / ai-hub / integration_tests / test_integration.py
import pytest
import httpx

# The base URL for the local server started by the run_tests.sh script
BASE_URL = "http://127.0.0.1:8000"

# A common prompt to be used for the tests
TEST_PROMPT = "Explain the theory of relativity in one sentence."

async def test_root_endpoint():
    """Tests if the root endpoint is alive and returns the correct status."""
    async with httpx.AsyncClient() as client:
        response = await client.get(f"{BASE_URL}/")
        
        assert response.status_code == 200
        assert response.json() == {"status": "AI Model Hub is running!"}

async def test_chat_endpoint_deepseek():
    """
    Tests the /chat endpoint using the default 'deepseek' model.
    """
    url = f"{BASE_URL}/chat?model=deepseek"
    payload = {"prompt": TEST_PROMPT}

    async with httpx.AsyncClient(timeout=30.0) as client:
        response = await client.post(url, json=payload)

    # 1. Check for a successful response
    assert response.status_code == 200, f"Expected status 200, but got {response.status_code}. Response: {response.text}"
    
    # 2. Check the response structure
    data = response.json()
    assert "response" in data
    assert "model_used" in data
    
    # 3. Validate the content
    assert data["model_used"] == "deepseek"
    assert isinstance(data["response"], str)
    assert len(data["response"]) > 0
    print(f"\n✅ DeepSeek Response: {data['response'][:80]}...")


async def test_chat_endpoint_gemini():
    """
    Tests the /chat endpoint explicitly requesting the 'gemini' model.
    """
    url = f"{BASE_URL}/chat?model=gemini"
    payload = {"prompt": TEST_PROMPT}

    async with httpx.AsyncClient(timeout=30.0) as client:
        response = await client.post(url, json=payload)

    # 1. Check for a successful response
    assert response.status_code == 200, f"Expected status 200, but got {response.status_code}. Response: {response.text}"

    # 2. Check the response structure
    data = response.json()
    assert "response" in data
    assert "model_used" in data

    # 3. Validate the content
    assert data["model_used"] == "gemini"
    assert isinstance(data["response"], str)
    assert len(data["response"]) > 0
    print(f"\n✅ Gemini Response: {data['response'][:80]}...")


async def test_unsupported_model():
    """
    Tests the API's error handling for an invalid model name.
    """
    # Note: The 'model' parameter is intentionally incorrect here.
    url = f"{BASE_URL}/chat?model=unsupported_model_123"
    payload = {"prompt": TEST_PROMPT}

    async with httpx.AsyncClient() as client:
        response = await client.post(url, json=payload)
    
    # Expect a 422 Unprocessable Entity error because the 'model' query parameter
    # does not match the allowed Literal["deepseek", "gemini"] values.
    assert response.status_code == 422