Feat/ppio rerank (#7567)

* feat: add PPIO rerank and embedding models

* fix: fix migrate.ts

* fix: set ppio provider type to openai

* fix: remove 'ppio' from ProviderType definition

---------

Co-authored-by: suyao <sy20010504@gmail.com>
This commit is contained in:
cnJasonZ 2025-06-30 10:16:22 +08:00 committed by GitHub
parent 7b7819217f
commit 4c988ede52
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 139 additions and 27 deletions

View File

@ -7,6 +7,7 @@ import { GeminiAPIClient } from './gemini/GeminiAPIClient'
import { VertexAPIClient } from './gemini/VertexAPIClient'
import { OpenAIAPIClient } from './openai/OpenAIApiClient'
import { OpenAIResponseAPIClient } from './openai/OpenAIResponseAPIClient'
import { PPIOAPIClient } from './ppio/PPIOAPIClient'
/**
* Factory for creating ApiClient instances based on provider configuration
@ -31,6 +32,11 @@ export class ApiClientFactory {
instance = new AihubmixAPIClient(provider) as BaseApiClient
return instance
}
if (provider.id === 'ppio') {
console.log(`[ApiClientFactory] Creating PPIOAPIClient for provider: ${provider.id}`)
instance = new PPIOAPIClient(provider) as BaseApiClient
return instance
}
// 然后检查标准的provider type
switch (provider.type) {

View File

@ -0,0 +1,65 @@
import { isSupportedModel } from '@renderer/config/models'
import { Provider } from '@renderer/types'
import OpenAI from 'openai'
import { OpenAIAPIClient } from '../openai/OpenAIApiClient'
export class PPIOAPIClient extends OpenAIAPIClient {
constructor(provider: Provider) {
super(provider)
}
override async listModels(): Promise<OpenAI.Models.Model[]> {
try {
const sdk = await this.getSdkInstance()
// PPIO requires three separate requests to get all model types
const [chatModelsResponse, embeddingModelsResponse, rerankerModelsResponse] = await Promise.all([
// Chat/completion models
sdk.request({
method: 'get',
path: '/models'
}),
// Embedding models
sdk.request({
method: 'get',
path: '/models?model_type=embedding'
}),
// Reranker models
sdk.request({
method: 'get',
path: '/models?model_type=reranker'
})
])
// Extract models from all responses
// @ts-ignore - PPIO response structure may not be typed
const allModels = [
...((chatModelsResponse as any)?.data || []),
...((embeddingModelsResponse as any)?.data || []),
...((rerankerModelsResponse as any)?.data || [])
]
// Process and standardize model data
const processedModels = allModels.map((model: any) => ({
id: model.id || model.name,
description: model.description || model.display_name || model.summary,
object: 'model' as const,
owned_by: model.owned_by || model.publisher || model.organization || 'ppio',
created: model.created || Date.now()
}))
// Clean up model IDs and filter supported models
processedModels.forEach((model) => {
if (model.id) {
model.id = model.id.trim()
}
})
return processedModels.filter(isSupportedModel)
} catch (error) {
console.error('Error listing PPIO models:', error)
return []
}
}
}

View File

@ -767,6 +767,30 @@ export const SYSTEM_MODELS: Record<string, Model[]> = {
}
],
ppio: [
{
id: 'deepseek/deepseek-r1-0528',
provider: 'ppio',
name: 'DeepSeek R1-0528',
group: 'deepseek'
},
{
id: 'deepseek/deepseek-v3-0324',
provider: 'ppio',
name: 'DeepSeek V3-0324',
group: 'deepseek'
},
{
id: 'deepseek/deepseek-r1-turbo',
provider: 'ppio',
name: 'DeepSeek R1 Turbo',
group: 'deepseek'
},
{
id: 'deepseek/deepseek-v3-turbo',
provider: 'ppio',
name: 'DeepSeek V3 Turbo',
group: 'deepseek'
},
{
id: 'deepseek/deepseek-r1/community',
name: 'DeepSeek: DeepSeek R1 (Community)',
@ -780,52 +804,58 @@ export const SYSTEM_MODELS: Record<string, Model[]> = {
group: 'deepseek'
},
{
id: 'deepseek/deepseek-r1',
id: 'minimaxai/minimax-m1-80k',
provider: 'ppio',
name: 'DeepSeek R1',
group: 'deepseek'
name: 'MiniMax M1-80K',
group: 'minimaxai'
},
{
id: 'deepseek/deepseek-v3',
id: 'qwen/qwen3-235b-a22b-fp8',
provider: 'ppio',
name: 'DeepSeek V3',
group: 'deepseek'
},
{
id: 'qwen/qwen-2.5-72b-instruct',
provider: 'ppio',
name: 'Qwen2.5-72B-Instruct',
name: 'Qwen3 235B',
group: 'qwen'
},
{
id: 'qwen/qwen2.5-32b-instruct',
id: 'qwen/qwen3-32b-fp8',
provider: 'ppio',
name: 'Qwen2.5-32B-Instruct',
name: 'Qwen3 32B',
group: 'qwen'
},
{
id: 'meta-llama/llama-3.1-70b-instruct',
id: 'qwen/qwen3-30b-a3b-fp8',
provider: 'ppio',
name: 'Llama-3.1-70B-Instruct',
group: 'meta-llama'
name: 'Qwen3 30B',
group: 'qwen'
},
{
id: 'meta-llama/llama-3.1-8b-instruct',
id: 'qwen/qwen2.5-vl-72b-instruct',
provider: 'ppio',
name: 'Llama-3.1-8B-Instruct',
group: 'meta-llama'
name: 'Qwen2.5 VL 72B',
group: 'qwen'
},
{
id: '01-ai/yi-1.5-34b-chat',
id: 'qwen/qwen3-embedding-8b',
provider: 'ppio',
name: 'Yi-1.5-34B-Chat',
group: '01-ai'
name: 'Qwen3 Embedding 8B',
group: 'qwen'
},
{
id: '01-ai/yi-1.5-9b-chat',
id: 'qwen/qwen3-reranker-8b',
provider: 'ppio',
name: 'Yi-1.5-9B-Chat',
group: '01-ai'
name: 'Qwen3 Reranker 8B',
group: 'qwen'
},
{
id: 'thudm/glm-z1-32b-0414',
provider: 'ppio',
name: 'GLM-Z1 32B',
group: 'thudm'
},
{
id: 'thudm/glm-z1-9b-0414',
provider: 'ppio',
name: 'GLM-Z1 9B',
group: 'thudm'
}
],
alayanew: [],

View File

@ -50,7 +50,7 @@ const persistedReducer = persistReducer(
{
key: 'cherry-studio',
storage,
version: 116,
version: 117,
blacklist: ['runtime', 'messages', 'messageBlocks'],
migrate
},

View File

@ -79,7 +79,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
name: 'PPIO',
type: 'openai',
apiKey: '',
apiHost: 'https://api.ppinfra.com/v3/openai',
apiHost: 'https://api.ppinfra.com/v3/openai/',
models: SYSTEM_MODELS.ppio,
isSystem: true,
enabled: false

View File

@ -1656,6 +1656,17 @@ const migrateConfig = {
state.settings.testChannel = UpgradeChannel.LATEST
}
return state
} catch (error) {
return state
}
},
'117': (state: RootState) => {
try {
updateProvider(state, 'ppio', {
models: SYSTEM_MODELS.ppio,
apiHost: 'https://api.ppinfra.com/v3/openai/'
})
return state
} catch (error) {
return state