| .. | |||
| README.md | 14 hours ago | ||
| __init__.py | 14 hours ago | ||
| endpoint_factory.py | 14 hours ago | ||
| endpoints.json | 14 hours ago | ||
| main.py | 14 hours ago | ||
| utils.py | 14 hours ago | ||
This module provides a unified interface for container file operations across multiple LLM providers (OpenAI, Azure OpenAI, etc.).
endpoints.json # Declarative endpoint definitions
↓
endpoint_factory.py # Auto-generates SDK functions
↓
container_handler.py # Generic HTTP handler
↓
BaseContainerConfig # Provider-specific transformations
├── OpenAIContainerConfig
└── AzureContainerConfig (example)
| File | Purpose |
|---|---|
endpoints.json |
Single source of truth - Defines all container file endpoints |
endpoint_factory.py |
Auto-generates SDK functions (list_container_files, etc.) |
main.py |
Core container operations (create, list, retrieve, delete containers) |
utils.py |
Request parameter utilities |
To add a new container file endpoint (e.g., get_container_file_content):
endpoints.json{
"name": "get_container_file_content",
"async_name": "aget_container_file_content",
"path": "/containers/{container_id}/files/{file_id}/content",
"method": "GET",
"path_params": ["container_id", "file_id"],
"query_params": [],
"response_type": "ContainerFileContentResponse"
}
In litellm/types/containers/main.py:
class ContainerFileContentResponse(BaseModel):
"""Response for file content download."""
content: bytes
# ... other fields
In litellm/llms/custom_httpx/container_handler.py, add to RESPONSE_TYPES:
RESPONSE_TYPES = {
# ... existing types
"ContainerFileContentResponse": ContainerFileContentResponse,
}
In litellm/router.py, add the call_type to the factory_function Literal and _init_containers_api_endpoints condition.
In litellm/proxy/route_llm_request.py, add to the route mappings and skip-model-routing lists.
If your endpoint has a new combination of path parameters, add a handler in litellm/proxy/container_endpoints/handler_factory.py:
elif path_params == ["container_id", "file_id", "new_param"]:
async def handler(...):
# handler implementation
Create litellm/llms/azure/containers/transformation.py:
from typing import Dict, Optional, Tuple, Any
import httpx
from litellm.llms.base_llm.containers.transformation import BaseContainerConfig
from litellm.types.containers.main import (
ContainerFileListResponse,
ContainerFileObject,
DeleteContainerFileResponse,
)
from litellm.types.router import GenericLiteLLMParams
from litellm.secret_managers.main import get_secret_str
class AzureContainerConfig(BaseContainerConfig):
"""Configuration class for Azure OpenAI container API."""
def get_supported_openai_params(self) -> list:
return ["name", "expires_after", "file_ids", "extra_headers"]
def map_openai_params(
self,
container_create_optional_params,
drop_params: bool,
) -> Dict:
return dict(container_create_optional_params)
def validate_environment(
self,
headers: dict,
api_key: Optional[str] = None,
) -> dict:
"""Azure uses api-key header instead of Bearer token."""
import litellm
api_key = (
api_key
or litellm.azure_key
or get_secret_str("AZURE_API_KEY")
)
headers["api-key"] = api_key
return headers
def get_complete_url(
self,
api_base: Optional[str],
litellm_params: dict,
) -> str:
"""
Azure format:
https://{resource}.openai.azure.com/openai/containers?api-version=2024-xx
"""
if api_base is None:
raise ValueError("api_base is required for Azure")
api_version = litellm_params.get("api_version", "2024-02-15-preview")
return f"{api_base.rstrip('/')}/openai/containers?api-version={api_version}"
# Implement remaining abstract methods from BaseContainerConfig:
# - transform_container_create_request
# - transform_container_create_response
# - transform_container_list_request
# - transform_container_list_response
# - transform_container_retrieve_request
# - transform_container_retrieve_response
# - transform_container_delete_request
# - transform_container_delete_response
# - transform_container_file_list_request
# - transform_container_file_list_response
In litellm/utils.py, find ProviderConfigManager.get_provider_container_config() and add:
@staticmethod
def get_provider_container_config(
provider: LlmProviders,
) -> Optional[BaseContainerConfig]:
if provider == LlmProviders.OPENAI:
from litellm.llms.openai.containers.transformation import OpenAIContainerConfig
return OpenAIContainerConfig()
elif provider == LlmProviders.AZURE:
from litellm.llms.azure.containers.transformation import AzureContainerConfig
return AzureContainerConfig()
return None
# Create container via Azure
curl -X POST "http://localhost:4000/v1/containers" \
-H "Authorization: Bearer sk-1234" \
-H "custom-llm-provider: azure" \
-H "Content-Type: application/json" \
-d '{"name": "My Azure Container"}'
# List container files via Azure
curl -X GET "http://localhost:4000/v1/containers/cntr_123/files" \
-H "Authorization: Bearer sk-1234" \
-H "custom-llm-provider: azure"
custom-llm-provider header/query/bodyProviderConfigManager.get_provider_container_config(provider)get_complete_url)validate_environment)Run the container API tests:
cd /Users/ishaanjaffer/github/litellm python -m pytest tests/test_litellm/containers/ -v
Test via proxy:
# Start proxy
cd litellm/proxy && python proxy_cli.py --config proxy_config.yaml --port 4000
# Test endpoints
curl -X GET "http://localhost:4000/v1/containers/cntr_123/files" \
-H "Authorization: Bearer sk-1234"
| Endpoint | Method | Path |
|---|---|---|
| List container files | GET | /v1/containers/{container_id}/files |
| Retrieve container file | GET | /v1/containers/{container_id}/files/{file_id} |
| Delete container file | DELETE | /v1/containers/{container_id}/files/{file_id} |
See endpoints.json for the complete list.