fix(ApiClientFactory): adjust provider type handling for OpenAI clients (#7675)

This commit is contained in:
SuYao 2025-07-08 19:21:49 +08:00 committed by GitHub
parent a343377a43
commit 14c5357fa3
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 5 additions and 5 deletions

View File

@ -47,10 +47,9 @@ export class ApiClientFactory {
// 然后检查标准的provider type
switch (provider.type) {
case 'openai':
case 'azure-openai':
console.log(`[ApiClientFactory] Creating OpenAIApiClient for provider: ${provider.id}`)
instance = new OpenAIAPIClient(provider) as BaseApiClient
break
case 'azure-openai':
case 'openai-response':
instance = new OpenAIResponseAPIClient(provider) as BaseApiClient
break

View File

@ -2,6 +2,7 @@ import { GenericChunk } from '@renderer/aiCore/middleware/schemas'
import { CompletionsContext } from '@renderer/aiCore/middleware/types'
import {
isOpenAIChatCompletionOnlyModel,
isOpenAILLMModel,
isSupportedReasoningEffortOpenAIModel,
isVisionModel
} from '@renderer/config/models'
@ -64,10 +65,10 @@ export class OpenAIResponseAPIClient extends OpenAIBaseClient<
*
*/
public getClient(model: Model) {
if (isOpenAIChatCompletionOnlyModel(model)) {
return this.client
} else {
if (isOpenAILLMModel(model) && !isOpenAIChatCompletionOnlyModel(model)) {
return this
} else {
return this.client
}
}