cherry-studio/packages/catalog/data/providers/self-hosted.json
2025-11-24 08:55:12 +08:00

241 lines
8.1 KiB
JSON

{
"version": "2025.11.24",
"providers": [
{
"id": "hosted_vllm",
"name": "Hosted VLLM (`hosted_vllm`)",
"description": "Provider: Hosted VLLM (`hosted_vllm`)",
"authentication": "API_KEY",
"pricingModel": "PER_MODEL",
"modelRouting": "DIRECT",
"behaviors": {
"supportsCustomModels": false,
"providesModelMapping": false,
"supportsModelVersioning": true,
"providesFallbackRouting": false,
"hasAutoRetry": false,
"supportsHealthCheck": false,
"hasRealTimeMetrics": false,
"providesUsageAnalytics": false,
"supportsWebhookEvents": false,
"requiresApiKeyValidation": true,
"supportsRateLimiting": false,
"providesUsageLimits": false,
"supportsStreaming": true,
"supportsBatchProcessing": false,
"supportsModelFineTuning": false
},
"supportedEndpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
"apiCompatibility": {
"supportsArrayContent": true,
"supportsStreamOptions": true,
"supportsDeveloperRole": false,
"supportsServiceTier": false,
"supportsThinkingControl": false,
"supportsApiVersion": false,
"supportsParallelTools": true,
"supportsMultimodal": true
},
"specialConfig": {},
"documentation": "https://docs.litellm.ai/docs/providers/vllm",
"website": "https://docs.litellm.ai/docs/providers/vllm",
"deprecated": false,
"maintenanceMode": false,
"configVersion": "1.0.0",
"metadata": {
"source": "litellm-endpoints",
"tags": ["cloud"],
"reliability": "medium"
}
},
{
"id": "lm_studio",
"name": "LM Studio (`lm_studio`)",
"description": "Provider: LM Studio (`lm_studio`)",
"authentication": "API_KEY",
"pricingModel": "PER_MODEL",
"modelRouting": "DIRECT",
"behaviors": {
"supportsCustomModels": false,
"providesModelMapping": false,
"supportsModelVersioning": true,
"providesFallbackRouting": false,
"hasAutoRetry": false,
"supportsHealthCheck": false,
"hasRealTimeMetrics": false,
"providesUsageAnalytics": false,
"supportsWebhookEvents": false,
"requiresApiKeyValidation": true,
"supportsRateLimiting": false,
"providesUsageLimits": false,
"supportsStreaming": true,
"supportsBatchProcessing": false,
"supportsModelFineTuning": false
},
"supportedEndpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
"apiCompatibility": {
"supportsArrayContent": true,
"supportsStreamOptions": true,
"supportsDeveloperRole": false,
"supportsServiceTier": false,
"supportsThinkingControl": false,
"supportsApiVersion": false,
"supportsParallelTools": true,
"supportsMultimodal": true
},
"specialConfig": {},
"documentation": "https://docs.litellm.ai/docs/providers/lm_studio",
"website": "https://docs.litellm.ai/docs/providers/lm_studio",
"deprecated": false,
"maintenanceMode": false,
"configVersion": "1.0.0",
"metadata": {
"source": "litellm-endpoints",
"tags": ["cloud"],
"reliability": "medium"
}
},
{
"id": "ollama",
"name": "Ollama (`ollama`)",
"description": "Provider: Ollama (`ollama`)",
"authentication": "API_KEY",
"pricingModel": "PER_MODEL",
"modelRouting": "DIRECT",
"behaviors": {
"supportsCustomModels": false,
"providesModelMapping": false,
"supportsModelVersioning": true,
"providesFallbackRouting": false,
"hasAutoRetry": false,
"supportsHealthCheck": false,
"hasRealTimeMetrics": false,
"providesUsageAnalytics": false,
"supportsWebhookEvents": false,
"requiresApiKeyValidation": true,
"supportsRateLimiting": false,
"providesUsageLimits": false,
"supportsStreaming": true,
"supportsBatchProcessing": false,
"supportsModelFineTuning": false
},
"supportedEndpoints": ["CHAT_COMPLETIONS", "EMBEDDINGS", "MESSAGES", "RESPONSES"],
"apiCompatibility": {
"supportsArrayContent": true,
"supportsStreamOptions": true,
"supportsDeveloperRole": false,
"supportsServiceTier": false,
"supportsThinkingControl": false,
"supportsApiVersion": false,
"supportsParallelTools": true,
"supportsMultimodal": true
},
"specialConfig": {},
"documentation": "https://docs.litellm.ai/docs/providers/ollama",
"website": "https://docs.litellm.ai/docs/providers/ollama",
"deprecated": false,
"maintenanceMode": false,
"configVersion": "1.0.0",
"metadata": {
"source": "litellm-endpoints",
"tags": ["cloud"],
"reliability": "medium"
}
},
{
"id": "ollama_chat",
"name": "Ollama Chat (`ollama_chat`)",
"description": "Provider: Ollama Chat (`ollama_chat`)",
"authentication": "API_KEY",
"pricingModel": "PER_MODEL",
"modelRouting": "DIRECT",
"behaviors": {
"supportsCustomModels": false,
"providesModelMapping": false,
"supportsModelVersioning": true,
"providesFallbackRouting": false,
"hasAutoRetry": false,
"supportsHealthCheck": false,
"hasRealTimeMetrics": false,
"providesUsageAnalytics": false,
"supportsWebhookEvents": false,
"requiresApiKeyValidation": true,
"supportsRateLimiting": false,
"providesUsageLimits": false,
"supportsStreaming": true,
"supportsBatchProcessing": false,
"supportsModelFineTuning": false
},
"supportedEndpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
"apiCompatibility": {
"supportsArrayContent": true,
"supportsStreamOptions": true,
"supportsDeveloperRole": false,
"supportsServiceTier": false,
"supportsThinkingControl": false,
"supportsApiVersion": false,
"supportsParallelTools": true,
"supportsMultimodal": true
},
"specialConfig": {},
"documentation": "https://docs.litellm.ai/docs/providers/ollama",
"website": "https://docs.litellm.ai/docs/providers/ollama",
"deprecated": false,
"maintenanceMode": false,
"configVersion": "1.0.0",
"metadata": {
"source": "litellm-endpoints",
"tags": ["cloud"],
"reliability": "medium"
}
},
{
"id": "vllm",
"name": "VLLM (`vllm`)",
"description": "Provider: VLLM (`vllm`)",
"authentication": "API_KEY",
"pricingModel": "PER_MODEL",
"modelRouting": "DIRECT",
"behaviors": {
"supportsCustomModels": false,
"providesModelMapping": false,
"supportsModelVersioning": true,
"providesFallbackRouting": false,
"hasAutoRetry": false,
"supportsHealthCheck": false,
"hasRealTimeMetrics": false,
"providesUsageAnalytics": false,
"supportsWebhookEvents": false,
"requiresApiKeyValidation": true,
"supportsRateLimiting": false,
"providesUsageLimits": false,
"supportsStreaming": true,
"supportsBatchProcessing": false,
"supportsModelFineTuning": false
},
"supportedEndpoints": ["CHAT_COMPLETIONS", "MESSAGES", "RESPONSES"],
"apiCompatibility": {
"supportsArrayContent": true,
"supportsStreamOptions": true,
"supportsDeveloperRole": false,
"supportsServiceTier": false,
"supportsThinkingControl": false,
"supportsApiVersion": false,
"supportsParallelTools": true,
"supportsMultimodal": true
},
"specialConfig": {},
"documentation": "https://docs.litellm.ai/docs/providers/vllm",
"website": "https://docs.litellm.ai/docs/providers/vllm",
"deprecated": false,
"maintenanceMode": false,
"configVersion": "1.0.0",
"metadata": {
"source": "litellm-endpoints",
"tags": ["cloud"],
"reliability": "medium"
}
}
]
}