mirror of
https://github.com/CherryHQ/cherry-studio.git
synced 2025-12-24 18:50:56 +08:00
4950 lines
176 KiB
JSON
4950 lines
176 KiB
JSON
{
|
|
"version": "2025.11.24",
|
|
"providers": [
|
|
{
|
|
"id": "ai21",
|
|
"name": "AI21 (`ai21`)",
|
|
"description": "Provider: AI21 (`ai21`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/ai21",
|
|
"website": "https://docs.litellm.ai/docs/providers/ai21",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "ai21_chat",
|
|
"name": "AI21 Chat (`ai21_chat`)",
|
|
"description": "Provider: AI21 Chat (`ai21_chat`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/ai21",
|
|
"website": "https://docs.litellm.ai/docs/providers/ai21",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "aiml",
|
|
"name": "AI/ML API (`aiml`)",
|
|
"description": "Provider: AI/ML API (`aiml`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "EMBEDDINGS", "IMAGE_GENERATION", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/aiml",
|
|
"website": "https://docs.litellm.ai/docs/providers/aiml",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "anthropic",
|
|
"name": "Anthropic (`anthropic`)",
|
|
"description": "Provider: Anthropic (`anthropic`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": true,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": true,
|
|
"has_real_time_metrics": true,
|
|
"provides_usage_analytics": true,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": true,
|
|
"provides_usage_limits": true,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": true,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/anthropic",
|
|
"website": "https://docs.litellm.ai/docs/providers/anthropic",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["official"],
|
|
"reliability": "high"
|
|
}
|
|
},
|
|
{
|
|
"id": "anthropic_text",
|
|
"name": "Anthropic Text (`anthropic_text`)",
|
|
"description": "Provider: Anthropic Text (`anthropic_text`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": true,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": true,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/anthropic",
|
|
"website": "https://docs.litellm.ai/docs/providers/anthropic",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "assemblyai",
|
|
"name": "AssemblyAI (`assemblyai`)",
|
|
"description": "Provider: AssemblyAI (`assemblyai`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["AUDIO_TRANSCRIPT", "CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/pass_through/assembly_ai",
|
|
"website": "https://docs.litellm.ai/docs/pass_through/assembly_ai",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "auto_router",
|
|
"name": "Auto Router (`auto_router`)",
|
|
"description": "Provider: Auto Router (`auto_router`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/proxy/auto_routing",
|
|
"website": "https://docs.litellm.ai/docs/proxy/auto_routing",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "azure",
|
|
"name": "Azure (`azure`)",
|
|
"description": "Provider: Azure (`azure`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": true,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": true,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": [
|
|
"AUDIO_GENERATION",
|
|
"AUDIO_TRANSCRIPT",
|
|
"CHAT_COMPLETIONS",
|
|
"EMBEDDINGS",
|
|
"IMAGE_GENERATION",
|
|
"MESSAGES",
|
|
"MODERATIONS",
|
|
"RESPONSES"
|
|
],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/azure",
|
|
"website": "https://docs.litellm.ai/docs/providers/azure",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "azure_ai",
|
|
"name": "Azure AI (`azure_ai`)",
|
|
"description": "Provider: Azure AI (`azure_ai`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": true,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": true,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": [
|
|
"AUDIO_GENERATION",
|
|
"AUDIO_TRANSCRIPT",
|
|
"CHAT_COMPLETIONS",
|
|
"EMBEDDINGS",
|
|
"IMAGE_GENERATION",
|
|
"MESSAGES",
|
|
"MODERATIONS",
|
|
"OCR",
|
|
"RESPONSES"
|
|
],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/azure_ai",
|
|
"website": "https://docs.litellm.ai/docs/providers/azure_ai",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "azure_ai/doc-intelligence",
|
|
"name": "Azure AI Document Intelligence (`azure_ai/doc-intelligence`)",
|
|
"description": "Provider: Azure AI Document Intelligence (`azure_ai/doc-intelligence`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": false,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["OCR"],
|
|
"api_compatibility": {
|
|
"supports_array_content": false,
|
|
"supports_stream_options": false,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": false,
|
|
"supports_multimodal": false
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/azure_document_intelligence",
|
|
"website": "https://docs.litellm.ai/docs/providers/azure_document_intelligence",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "azure_text",
|
|
"name": "Azure Text (`azure_text`)",
|
|
"description": "Provider: Azure Text (`azure_text`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": true,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": true,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": [
|
|
"AUDIO_GENERATION",
|
|
"AUDIO_TRANSCRIPT",
|
|
"CHAT_COMPLETIONS",
|
|
"MESSAGES",
|
|
"MODERATIONS",
|
|
"RESPONSES"
|
|
],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/azure",
|
|
"website": "https://docs.litellm.ai/docs/providers/azure",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "baseten",
|
|
"name": "Baseten (`baseten`)",
|
|
"description": "Provider: Baseten (`baseten`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/baseten",
|
|
"website": "https://docs.litellm.ai/docs/providers/baseten",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "bedrock",
|
|
"name": "AWS - Bedrock (`bedrock`)",
|
|
"description": "Provider: AWS - Bedrock (`bedrock`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "EMBEDDINGS", "MESSAGES", "RERANK", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/bedrock",
|
|
"website": "https://docs.litellm.ai/docs/providers/bedrock",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "bytez",
|
|
"name": "Bytez (`bytez`)",
|
|
"description": "Provider: Bytez (`bytez`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/bytez",
|
|
"website": "https://docs.litellm.ai/docs/providers/bytez",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "cerebras",
|
|
"name": "Cerebras (`cerebras`)",
|
|
"description": "Provider: Cerebras (`cerebras`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/cerebras",
|
|
"website": "https://docs.litellm.ai/docs/providers/cerebras",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "clarifai",
|
|
"name": "Clarifai (`clarifai`)",
|
|
"description": "Provider: Clarifai (`clarifai`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/clarifai",
|
|
"website": "https://docs.litellm.ai/docs/providers/clarifai",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "cloudflare",
|
|
"name": "Cloudflare AI Workers (`cloudflare`)",
|
|
"description": "Provider: Cloudflare AI Workers (`cloudflare`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/cloudflare_workers",
|
|
"website": "https://docs.litellm.ai/docs/providers/cloudflare_workers",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "codestral",
|
|
"name": "Codestral (`codestral`)",
|
|
"description": "Provider: Codestral (`codestral`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/codestral",
|
|
"website": "https://docs.litellm.ai/docs/providers/codestral",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "cohere",
|
|
"name": "Cohere (`cohere`)",
|
|
"description": "Provider: Cohere (`cohere`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "EMBEDDINGS", "MESSAGES", "RERANK", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/cohere",
|
|
"website": "https://docs.litellm.ai/docs/providers/cohere",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "cohere_chat",
|
|
"name": "Cohere Chat (`cohere_chat`)",
|
|
"description": "Provider: Cohere Chat (`cohere_chat`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/cohere",
|
|
"website": "https://docs.litellm.ai/docs/providers/cohere",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "cometapi",
|
|
"name": "CometAPI (`cometapi`)",
|
|
"description": "Provider: CometAPI (`cometapi`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "EMBEDDINGS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/cometapi",
|
|
"website": "https://docs.litellm.ai/docs/providers/cometapi",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "compactifai",
|
|
"name": "CompactifAI (`compactifai`)",
|
|
"description": "Provider: CompactifAI (`compactifai`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/compactifai",
|
|
"website": "https://docs.litellm.ai/docs/providers/compactifai",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "custom",
|
|
"name": "Custom (`custom`)",
|
|
"description": "Provider: Custom (`custom`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/custom_llm_server",
|
|
"website": "https://docs.litellm.ai/docs/providers/custom_llm_server",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "custom_openai",
|
|
"name": "Custom OpenAI (`custom_openai`)",
|
|
"description": "Provider: Custom OpenAI (`custom_openai`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": true,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": true,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": [
|
|
"AUDIO_GENERATION",
|
|
"AUDIO_TRANSCRIPT",
|
|
"CHAT_COMPLETIONS",
|
|
"MESSAGES",
|
|
"MODERATIONS",
|
|
"RESPONSES"
|
|
],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/openai_compatible",
|
|
"website": "https://docs.litellm.ai/docs/providers/openai_compatible",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "dashscope",
|
|
"name": "Dashscope (`dashscope`)",
|
|
"description": "Provider: Dashscope (`dashscope`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/dashscope",
|
|
"website": "https://docs.litellm.ai/docs/providers/dashscope",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "databricks",
|
|
"name": "Databricks (`databricks`)",
|
|
"description": "Provider: Databricks (`databricks`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/databricks",
|
|
"website": "https://docs.litellm.ai/docs/providers/databricks",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "dataforseo",
|
|
"name": "DataForSEO (`dataforseo`)",
|
|
"description": "Provider: DataForSEO (`dataforseo`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": false,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["WEB_SEARCH"],
|
|
"api_compatibility": {
|
|
"supports_array_content": false,
|
|
"supports_stream_options": false,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": false,
|
|
"supports_multimodal": false
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/search/dataforseo",
|
|
"website": "https://docs.litellm.ai/docs/search/dataforseo",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "datarobot",
|
|
"name": "DataRobot (`datarobot`)",
|
|
"description": "Provider: DataRobot (`datarobot`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/datarobot",
|
|
"website": "https://docs.litellm.ai/docs/providers/datarobot",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "deepgram",
|
|
"name": "Deepgram (`deepgram`)",
|
|
"description": "Provider: Deepgram (`deepgram`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["AUDIO_TRANSCRIPT", "CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/deepgram",
|
|
"website": "https://docs.litellm.ai/docs/providers/deepgram",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "deepinfra",
|
|
"name": "DeepInfra (`deepinfra`)",
|
|
"description": "Provider: DeepInfra (`deepinfra`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/deepinfra",
|
|
"website": "https://docs.litellm.ai/docs/providers/deepinfra",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "deepseek",
|
|
"name": "Deepseek (`deepseek`)",
|
|
"description": "Provider: Deepseek (`deepseek`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/deepseek",
|
|
"website": "https://docs.litellm.ai/docs/providers/deepseek",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "docker_model_runner",
|
|
"name": "Docker Model Runner (`docker_model_runner`)",
|
|
"description": "Provider: Docker Model Runner (`docker_model_runner`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/docker_model_runner",
|
|
"website": "https://docs.litellm.ai/docs/providers/docker_model_runner",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "elevenlabs",
|
|
"name": "ElevenLabs (`elevenlabs`)",
|
|
"description": "Provider: ElevenLabs (`elevenlabs`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["AUDIO_GENERATION", "CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/elevenlabs",
|
|
"website": "https://docs.litellm.ai/docs/providers/elevenlabs",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "empower",
|
|
"name": "Empower (`empower`)",
|
|
"description": "Provider: Empower (`empower`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/empower",
|
|
"website": "https://docs.litellm.ai/docs/providers/empower",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "exa_ai",
|
|
"name": "Exa AI (`exa_ai`)",
|
|
"description": "Provider: Exa AI (`exa_ai`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": false,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["WEB_SEARCH"],
|
|
"api_compatibility": {
|
|
"supports_array_content": false,
|
|
"supports_stream_options": false,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": false,
|
|
"supports_multimodal": false
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/search/exa_ai",
|
|
"website": "https://docs.litellm.ai/docs/search/exa_ai",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "fal_ai",
|
|
"name": "Fal AI (`fal_ai`)",
|
|
"description": "Provider: Fal AI (`fal_ai`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "IMAGE_GENERATION", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/fal_ai",
|
|
"website": "https://docs.litellm.ai/docs/providers/fal_ai",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "featherless_ai",
|
|
"name": "Featherless AI (`featherless_ai`)",
|
|
"description": "Provider: Featherless AI (`featherless_ai`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/featherless_ai",
|
|
"website": "https://docs.litellm.ai/docs/providers/featherless_ai",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "firecrawl",
|
|
"name": "Firecrawl (`firecrawl`)",
|
|
"description": "Provider: Firecrawl (`firecrawl`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": false,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["WEB_SEARCH"],
|
|
"api_compatibility": {
|
|
"supports_array_content": false,
|
|
"supports_stream_options": false,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": false,
|
|
"supports_multimodal": false
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/search/firecrawl",
|
|
"website": "https://docs.litellm.ai/docs/search/firecrawl",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "fireworks_ai",
|
|
"name": "Fireworks AI (`fireworks_ai`)",
|
|
"description": "Provider: Fireworks AI (`fireworks_ai`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/fireworks_ai",
|
|
"website": "https://docs.litellm.ai/docs/providers/fireworks_ai",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "friendliai",
|
|
"name": "FriendliAI (`friendliai`)",
|
|
"description": "Provider: FriendliAI (`friendliai`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/friendliai",
|
|
"website": "https://docs.litellm.ai/docs/providers/friendliai",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "galadriel",
|
|
"name": "Galadriel (`galadriel`)",
|
|
"description": "Provider: Galadriel (`galadriel`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/galadriel",
|
|
"website": "https://docs.litellm.ai/docs/providers/galadriel",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "gemini",
|
|
"name": "Google AI Studio - Gemini (`gemini`)",
|
|
"description": "Provider: Google AI Studio - Gemini (`gemini`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/gemini",
|
|
"website": "https://docs.litellm.ai/docs/providers/gemini",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "github",
|
|
"name": "GitHub Models (`github`)",
|
|
"description": "Provider: GitHub Models (`github`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/github",
|
|
"website": "https://docs.litellm.ai/docs/providers/github",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "github_copilot",
|
|
"name": "GitHub Copilot (`github_copilot`)",
|
|
"description": "Provider: GitHub Copilot (`github_copilot`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/github_copilot",
|
|
"website": "https://docs.litellm.ai/docs/providers/github_copilot",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "gradient_ai",
|
|
"name": "GradientAI (`gradient_ai`)",
|
|
"description": "Provider: GradientAI (`gradient_ai`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/gradient_ai",
|
|
"website": "https://docs.litellm.ai/docs/providers/gradient_ai",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "groq",
|
|
"name": "Groq AI (`groq`)",
|
|
"description": "Provider: Groq AI (`groq`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/groq",
|
|
"website": "https://docs.litellm.ai/docs/providers/groq",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "heroku",
|
|
"name": "Heroku (`heroku`)",
|
|
"description": "Provider: Heroku (`heroku`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/heroku",
|
|
"website": "https://docs.litellm.ai/docs/providers/heroku",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "hosted_vllm",
|
|
"name": "Hosted VLLM (`hosted_vllm`)",
|
|
"description": "Provider: Hosted VLLM (`hosted_vllm`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/vllm",
|
|
"website": "https://docs.litellm.ai/docs/providers/vllm",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "huggingface",
|
|
"name": "Huggingface (`huggingface`)",
|
|
"description": "Provider: Huggingface (`huggingface`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "EMBEDDINGS", "MESSAGES", "RERANK", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/huggingface",
|
|
"website": "https://docs.litellm.ai/docs/providers/huggingface",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "hyperbolic",
|
|
"name": "Hyperbolic (`hyperbolic`)",
|
|
"description": "Provider: Hyperbolic (`hyperbolic`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/hyperbolic",
|
|
"website": "https://docs.litellm.ai/docs/providers/hyperbolic",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "infinity",
|
|
"name": "Infinity (`infinity`)",
|
|
"description": "Provider: Infinity (`infinity`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": false,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["EMBEDDINGS"],
|
|
"api_compatibility": {
|
|
"supports_array_content": false,
|
|
"supports_stream_options": false,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": false,
|
|
"supports_multimodal": false
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/infinity",
|
|
"website": "https://docs.litellm.ai/docs/providers/infinity",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "jina_ai",
|
|
"name": "Jina AI (`jina_ai`)",
|
|
"description": "Provider: Jina AI (`jina_ai`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": false,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["EMBEDDINGS"],
|
|
"api_compatibility": {
|
|
"supports_array_content": false,
|
|
"supports_stream_options": false,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": false,
|
|
"supports_multimodal": false
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/jina_ai",
|
|
"website": "https://docs.litellm.ai/docs/providers/jina_ai",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "lambda_ai",
|
|
"name": "Lambda AI (`lambda_ai`)",
|
|
"description": "Provider: Lambda AI (`lambda_ai`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/lambda_ai",
|
|
"website": "https://docs.litellm.ai/docs/providers/lambda_ai",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "lemonade",
|
|
"name": "Lemonade (`lemonade`)",
|
|
"description": "Provider: Lemonade (`lemonade`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/lemonade",
|
|
"website": "https://docs.litellm.ai/docs/providers/lemonade",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "litellm_proxy",
|
|
"name": "LiteLLM Proxy (`litellm_proxy`)",
|
|
"description": "Provider: LiteLLM Proxy (`litellm_proxy`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "EMBEDDINGS", "IMAGE_GENERATION", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/litellm_proxy",
|
|
"website": "https://docs.litellm.ai/docs/providers/litellm_proxy",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "llamafile",
|
|
"name": "Llamafile (`llamafile`)",
|
|
"description": "Provider: Llamafile (`llamafile`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/llamafile",
|
|
"website": "https://docs.litellm.ai/docs/providers/llamafile",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "lm_studio",
|
|
"name": "LM Studio (`lm_studio`)",
|
|
"description": "Provider: LM Studio (`lm_studio`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/lm_studio",
|
|
"website": "https://docs.litellm.ai/docs/providers/lm_studio",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "maritalk",
|
|
"name": "Maritalk (`maritalk`)",
|
|
"description": "Provider: Maritalk (`maritalk`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/maritalk",
|
|
"website": "https://docs.litellm.ai/docs/providers/maritalk",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "meta_llama",
|
|
"name": "Meta - Llama API (`meta_llama`)",
|
|
"description": "Provider: Meta - Llama API (`meta_llama`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/meta_llama",
|
|
"website": "https://docs.litellm.ai/docs/providers/meta_llama",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "mistral",
|
|
"name": "Mistral AI API (`mistral`)",
|
|
"description": "Provider: Mistral AI API (`mistral`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "EMBEDDINGS", "MESSAGES", "OCR", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/mistral",
|
|
"website": "https://docs.litellm.ai/docs/providers/mistral",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "moonshot",
|
|
"name": "Moonshot (`moonshot`)",
|
|
"description": "Provider: Moonshot (`moonshot`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/moonshot",
|
|
"website": "https://docs.litellm.ai/docs/providers/moonshot",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "morph",
|
|
"name": "Morph (`morph`)",
|
|
"description": "Provider: Morph (`morph`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/morph",
|
|
"website": "https://docs.litellm.ai/docs/providers/morph",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "nebius",
|
|
"name": "Nebius AI Studio (`nebius`)",
|
|
"description": "Provider: Nebius AI Studio (`nebius`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "EMBEDDINGS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/nebius",
|
|
"website": "https://docs.litellm.ai/docs/providers/nebius",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "nlp_cloud",
|
|
"name": "NLP Cloud (`nlp_cloud`)",
|
|
"description": "Provider: NLP Cloud (`nlp_cloud`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/nlp_cloud",
|
|
"website": "https://docs.litellm.ai/docs/providers/nlp_cloud",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "novita",
|
|
"name": "Novita AI (`novita`)",
|
|
"description": "Provider: Novita AI (`novita`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://novita.ai/models/llm?utm_source=github_litellm&utm_medium=github_readme&utm_campaign=github_link",
|
|
"website": "https://novita.ai/models/llm?utm_source=github_litellm&utm_medium=github_readme&utm_campaign=github_link",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "nscale",
|
|
"name": "Nscale (`nscale`)",
|
|
"description": "Provider: Nscale (`nscale`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/nscale",
|
|
"website": "https://docs.litellm.ai/docs/providers/nscale",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "nvidia_nim",
|
|
"name": "Nvidia NIM (`nvidia_nim`)",
|
|
"description": "Provider: Nvidia NIM (`nvidia_nim`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/nvidia_nim",
|
|
"website": "https://docs.litellm.ai/docs/providers/nvidia_nim",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "oci",
|
|
"name": "OCI (`oci`)",
|
|
"description": "Provider: OCI (`oci`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/oci",
|
|
"website": "https://docs.litellm.ai/docs/providers/oci",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "ollama",
|
|
"name": "Ollama (`ollama`)",
|
|
"description": "Provider: Ollama (`ollama`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "EMBEDDINGS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/ollama",
|
|
"website": "https://docs.litellm.ai/docs/providers/ollama",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "ollama_chat",
|
|
"name": "Ollama Chat (`ollama_chat`)",
|
|
"description": "Provider: Ollama Chat (`ollama_chat`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/ollama",
|
|
"website": "https://docs.litellm.ai/docs/providers/ollama",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "oobabooga",
|
|
"name": "Oobabooga (`oobabooga`)",
|
|
"description": "Provider: Oobabooga (`oobabooga`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": true,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": true,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": [
|
|
"AUDIO_GENERATION",
|
|
"AUDIO_TRANSCRIPT",
|
|
"CHAT_COMPLETIONS",
|
|
"MESSAGES",
|
|
"MODERATIONS",
|
|
"RESPONSES"
|
|
],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/openai_compatible",
|
|
"website": "https://docs.litellm.ai/docs/providers/openai_compatible",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "openai",
|
|
"name": "OpenAI (`openai`)",
|
|
"description": "Provider: OpenAI (`openai`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": true,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": true,
|
|
"has_real_time_metrics": true,
|
|
"provides_usage_analytics": true,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": true,
|
|
"provides_usage_limits": true,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": true,
|
|
"supports_model_fine_tuning": true
|
|
},
|
|
"supported_endpoints": [
|
|
"AUDIO_GENERATION",
|
|
"AUDIO_TRANSCRIPT",
|
|
"CHAT_COMPLETIONS",
|
|
"EMBEDDINGS",
|
|
"IMAGE_GENERATION",
|
|
"MESSAGES",
|
|
"MODERATIONS",
|
|
"RESPONSES"
|
|
],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": true,
|
|
"supports_service_tier": true,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": true,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/openai",
|
|
"website": "https://docs.litellm.ai/docs/providers/openai",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["official"],
|
|
"reliability": "high"
|
|
}
|
|
},
|
|
{
|
|
"id": "openai_like",
|
|
"name": "OpenAI-like (`openai_like`)",
|
|
"description": "Provider: OpenAI-like (`openai_like`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": false,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["EMBEDDINGS"],
|
|
"api_compatibility": {
|
|
"supports_array_content": false,
|
|
"supports_stream_options": false,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": false,
|
|
"supports_multimodal": false
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/openai_compatible",
|
|
"website": "https://docs.litellm.ai/docs/providers/openai_compatible",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "openrouter",
|
|
"name": "OpenRouter (`openrouter`)",
|
|
"description": "Provider: OpenRouter (`openrouter`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "UNIFIED",
|
|
"model_routing": "INTELLIGENT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": true,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": true,
|
|
"has_auto_retry": true,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": true,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/openrouter",
|
|
"website": "https://docs.litellm.ai/docs/providers/openrouter",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["proxy"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "ovhcloud",
|
|
"name": "OVHCloud AI Endpoints (`ovhcloud`)",
|
|
"description": "Provider: OVHCloud AI Endpoints (`ovhcloud`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/ovhcloud",
|
|
"website": "https://docs.litellm.ai/docs/providers/ovhcloud",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "parallel_ai",
|
|
"name": "Parallel AI (`parallel_ai`)",
|
|
"description": "Provider: Parallel AI (`parallel_ai`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": false,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["WEB_SEARCH"],
|
|
"api_compatibility": {
|
|
"supports_array_content": false,
|
|
"supports_stream_options": false,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": false,
|
|
"supports_multimodal": false
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/search/parallel_ai",
|
|
"website": "https://docs.litellm.ai/docs/search/parallel_ai",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "perplexity",
|
|
"name": "Perplexity AI (`perplexity`)",
|
|
"description": "Provider: Perplexity AI (`perplexity`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES", "WEB_SEARCH"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/perplexity",
|
|
"website": "https://docs.litellm.ai/docs/providers/perplexity",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "petals",
|
|
"name": "Petals (`petals`)",
|
|
"description": "Provider: Petals (`petals`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/petals",
|
|
"website": "https://docs.litellm.ai/docs/providers/petals",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "predibase",
|
|
"name": "Predibase (`predibase`)",
|
|
"description": "Provider: Predibase (`predibase`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/predibase",
|
|
"website": "https://docs.litellm.ai/docs/providers/predibase",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "recraft",
|
|
"name": "Recraft (`recraft`)",
|
|
"description": "Provider: Recraft (`recraft`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": false,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["IMAGE_GENERATION"],
|
|
"api_compatibility": {
|
|
"supports_array_content": false,
|
|
"supports_stream_options": false,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": false,
|
|
"supports_multimodal": false
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/recraft",
|
|
"website": "https://docs.litellm.ai/docs/providers/recraft",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "replicate",
|
|
"name": "Replicate (`replicate`)",
|
|
"description": "Provider: Replicate (`replicate`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/replicate",
|
|
"website": "https://docs.litellm.ai/docs/providers/replicate",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "runwayml",
|
|
"name": "RunwayML (`runwayml`)",
|
|
"description": "Provider: RunwayML (`runwayml`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": false,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["AUDIO_GENERATION", "IMAGE_GENERATION"],
|
|
"api_compatibility": {
|
|
"supports_array_content": false,
|
|
"supports_stream_options": false,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": false,
|
|
"supports_multimodal": false
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/runwayml/videos",
|
|
"website": "https://docs.litellm.ai/docs/providers/runwayml/videos",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "sagemaker",
|
|
"name": "AWS - Sagemaker (`sagemaker`)",
|
|
"description": "Provider: AWS - Sagemaker (`sagemaker`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "EMBEDDINGS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/aws_sagemaker",
|
|
"website": "https://docs.litellm.ai/docs/providers/aws_sagemaker",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "sagemaker_chat",
|
|
"name": "Sagemaker Chat (`sagemaker_chat`)",
|
|
"description": "Provider: Sagemaker Chat (`sagemaker_chat`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/aws_sagemaker",
|
|
"website": "https://docs.litellm.ai/docs/providers/aws_sagemaker",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "sambanova",
|
|
"name": "Sambanova (`sambanova`)",
|
|
"description": "Provider: Sambanova (`sambanova`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/sambanova",
|
|
"website": "https://docs.litellm.ai/docs/providers/sambanova",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "searxng",
|
|
"name": "SearXNG (`searxng`)",
|
|
"description": "Provider: SearXNG (`searxng`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": false,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["WEB_SEARCH"],
|
|
"api_compatibility": {
|
|
"supports_array_content": false,
|
|
"supports_stream_options": false,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": false,
|
|
"supports_multimodal": false
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/search/searxng",
|
|
"website": "https://docs.litellm.ai/docs/search/searxng",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "snowflake",
|
|
"name": "Snowflake (`snowflake`)",
|
|
"description": "Provider: Snowflake (`snowflake`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/snowflake",
|
|
"website": "https://docs.litellm.ai/docs/providers/snowflake",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "tavily",
|
|
"name": "Tavily (`tavily`)",
|
|
"description": "Provider: Tavily (`tavily`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": false,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["WEB_SEARCH"],
|
|
"api_compatibility": {
|
|
"supports_array_content": false,
|
|
"supports_stream_options": false,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": false,
|
|
"supports_multimodal": false
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/search/tavily",
|
|
"website": "https://docs.litellm.ai/docs/search/tavily",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "text-completion-codestral",
|
|
"name": "Text Completion Codestral (`text-completion-codestral`)",
|
|
"description": "Provider: Text Completion Codestral (`text-completion-codestral`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/codestral",
|
|
"website": "https://docs.litellm.ai/docs/providers/codestral",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "text-completion-openai",
|
|
"name": "Text Completion OpenAI (`text-completion-openai`)",
|
|
"description": "Provider: Text Completion OpenAI (`text-completion-openai`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": true,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": true,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": [
|
|
"AUDIO_GENERATION",
|
|
"AUDIO_TRANSCRIPT",
|
|
"CHAT_COMPLETIONS",
|
|
"MESSAGES",
|
|
"MODERATIONS",
|
|
"RESPONSES"
|
|
],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/text_completion_openai",
|
|
"website": "https://docs.litellm.ai/docs/providers/text_completion_openai",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "together_ai",
|
|
"name": "Together AI (`together_ai`)",
|
|
"description": "Provider: Together AI (`together_ai`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "UNIFIED",
|
|
"model_routing": "INTELLIGENT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": true,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": true,
|
|
"has_auto_retry": true,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": true,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/togetherai",
|
|
"website": "https://docs.litellm.ai/docs/providers/togetherai",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["proxy"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "topaz",
|
|
"name": "Topaz (`topaz`)",
|
|
"description": "Provider: Topaz (`topaz`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/topaz",
|
|
"website": "https://docs.litellm.ai/docs/providers/topaz",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "triton",
|
|
"name": "Triton (`triton`)",
|
|
"description": "Provider: Triton (`triton`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/triton-inference-server",
|
|
"website": "https://docs.litellm.ai/docs/providers/triton-inference-server",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "v0",
|
|
"name": "V0 (`v0`)",
|
|
"description": "Provider: V0 (`v0`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/v0",
|
|
"website": "https://docs.litellm.ai/docs/providers/v0",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "vercel_ai_gateway",
|
|
"name": "Vercel AI Gateway (`vercel_ai_gateway`)",
|
|
"description": "Provider: Vercel AI Gateway (`vercel_ai_gateway`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/vercel_ai_gateway",
|
|
"website": "https://docs.litellm.ai/docs/providers/vercel_ai_gateway",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "vertex_ai",
|
|
"name": "Google - Vertex AI (`vertex_ai`)",
|
|
"description": "Provider: Google - Vertex AI (`vertex_ai`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "EMBEDDINGS", "IMAGE_GENERATION", "MESSAGES", "OCR", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/vertex",
|
|
"website": "https://docs.litellm.ai/docs/providers/vertex",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "vllm",
|
|
"name": "VLLM (`vllm`)",
|
|
"description": "Provider: VLLM (`vllm`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/vllm",
|
|
"website": "https://docs.litellm.ai/docs/providers/vllm",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "volcengine",
|
|
"name": "Volcengine (`volcengine`)",
|
|
"description": "Provider: Volcengine (`volcengine`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/volcano",
|
|
"website": "https://docs.litellm.ai/docs/providers/volcano",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "voyage",
|
|
"name": "Voyage AI (`voyage`)",
|
|
"description": "Provider: Voyage AI (`voyage`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": false,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["EMBEDDINGS"],
|
|
"api_compatibility": {
|
|
"supports_array_content": false,
|
|
"supports_stream_options": false,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": false,
|
|
"supports_multimodal": false
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/voyage",
|
|
"website": "https://docs.litellm.ai/docs/providers/voyage",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "wandb",
|
|
"name": "WandB Inference (`wandb`)",
|
|
"description": "Provider: WandB Inference (`wandb`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/wandb_inference",
|
|
"website": "https://docs.litellm.ai/docs/providers/wandb_inference",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "watsonx",
|
|
"name": "IBM - Watsonx.ai (`watsonx`)",
|
|
"description": "Provider: IBM - Watsonx.ai (`watsonx`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "EMBEDDINGS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/watsonx",
|
|
"website": "https://docs.litellm.ai/docs/providers/watsonx",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "watsonx_text",
|
|
"name": "Watsonx Text (`watsonx_text`)",
|
|
"description": "Provider: Watsonx Text (`watsonx_text`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/watsonx",
|
|
"website": "https://docs.litellm.ai/docs/providers/watsonx",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "xai",
|
|
"name": "xAI (`xai`)",
|
|
"description": "Provider: xAI (`xai`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": true,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
|
|
"api_compatibility": {
|
|
"supports_array_content": true,
|
|
"supports_stream_options": true,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": true,
|
|
"supports_multimodal": true
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/xai",
|
|
"website": "https://docs.litellm.ai/docs/providers/xai",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
},
|
|
{
|
|
"id": "xinference",
|
|
"name": "Xinference (`xinference`)",
|
|
"description": "Provider: Xinference (`xinference`)",
|
|
"authentication": "API_KEY",
|
|
"pricing_model": "PER_MODEL",
|
|
"model_routing": "DIRECT",
|
|
"behaviors": {
|
|
"supports_custom_models": false,
|
|
"provides_model_mapping": false,
|
|
"supports_model_versioning": true,
|
|
"provides_fallback_routing": false,
|
|
"has_auto_retry": false,
|
|
"supports_health_check": false,
|
|
"has_real_time_metrics": false,
|
|
"provides_usage_analytics": false,
|
|
"supports_webhook_events": false,
|
|
"requires_api_key_validation": true,
|
|
"supports_rate_limiting": false,
|
|
"provides_usage_limits": false,
|
|
"supports_streaming": false,
|
|
"supports_batch_processing": false,
|
|
"supports_model_fine_tuning": false
|
|
},
|
|
"supported_endpoints": ["EMBEDDINGS"],
|
|
"api_compatibility": {
|
|
"supports_array_content": false,
|
|
"supports_stream_options": false,
|
|
"supports_developer_role": false,
|
|
"supports_service_tier": false,
|
|
"supports_thinking_control": false,
|
|
"supports_api_version": false,
|
|
"supports_parallel_tools": false,
|
|
"supports_multimodal": false
|
|
},
|
|
"special_config": {},
|
|
"documentation": "https://docs.litellm.ai/docs/providers/xinference",
|
|
"website": "https://docs.litellm.ai/docs/providers/xinference",
|
|
"deprecated": false,
|
|
"maintenance_mode": false,
|
|
"config_version": "1.0.0",
|
|
"metadata": {
|
|
"source": "litellm-endpoints",
|
|
"tags": ["cloud"],
|
|
"reliability": "medium"
|
|
}
|
|
}
|
|
]
|
|
}
|