From 3dd393b840dac7c44ae160bd3fca9b1e109b62dc Mon Sep 17 00:00:00 2001 From: SuYao Date: Thu, 10 Jul 2025 22:17:20 +0800 Subject: [PATCH] fix: azure-openai (#7978) --- .../clients/openai/OpenAIResponseAPIClient.ts | 33 +++++++++++++------ src/renderer/src/aiCore/index.ts | 2 +- .../src/components/Alert/OpenAIAlert.tsx | 16 +++++---- 3 files changed, 34 insertions(+), 17 deletions(-) diff --git a/src/renderer/src/aiCore/clients/openai/OpenAIResponseAPIClient.ts b/src/renderer/src/aiCore/clients/openai/OpenAIResponseAPIClient.ts index 6de5f2f876..043ca65872 100644 --- a/src/renderer/src/aiCore/clients/openai/OpenAIResponseAPIClient.ts +++ b/src/renderer/src/aiCore/clients/openai/OpenAIResponseAPIClient.ts @@ -39,7 +39,7 @@ import { findFileBlocks, findImageBlocks } from '@renderer/utils/messageUtils/fi import { buildSystemPrompt } from '@renderer/utils/prompt' import { MB } from '@shared/config/constant' import { isEmpty } from 'lodash' -import OpenAI from 'openai' +import OpenAI, { AzureOpenAI } from 'openai' import { ResponseInput } from 'openai/resources/responses/responses' import { RequestTransformer, ResponseChunkTransformer } from '../types' @@ -66,6 +66,9 @@ export class OpenAIResponseAPIClient extends OpenAIBaseClient< */ public getClient(model: Model) { if (isOpenAILLMModel(model) && !isOpenAIChatCompletionOnlyModel(model)) { + if (this.provider.id === 'azure-openai' || this.provider.type === 'azure-openai') { + this.provider = { ...this.provider, apiVersion: 'preview' } + } return this } else { return this.client @@ -77,15 +80,25 @@ export class OpenAIResponseAPIClient extends OpenAIBaseClient< return this.sdkInstance } - return new OpenAI({ - dangerouslyAllowBrowser: true, - apiKey: this.apiKey, - baseURL: this.getBaseURL(), - defaultHeaders: { - ...this.defaultHeaders(), - ...this.provider.extra_headers - } - }) + if (this.provider.id === 'azure-openai' || this.provider.type === 'azure-openai') { + this.provider = { ...this.provider, apiHost: `${this.provider.apiHost}/openai/v1` } + return new AzureOpenAI({ + dangerouslyAllowBrowser: true, + apiKey: this.apiKey, + apiVersion: this.provider.apiVersion, + baseURL: this.provider.apiHost + }) + } else { + return new OpenAI({ + dangerouslyAllowBrowser: true, + apiKey: this.apiKey, + baseURL: this.getBaseURL(), + defaultHeaders: { + ...this.defaultHeaders(), + ...this.provider.extra_headers + } + }) + } } override async createCompletions( diff --git a/src/renderer/src/aiCore/index.ts b/src/renderer/src/aiCore/index.ts index 34edc1b755..51cb84df15 100644 --- a/src/renderer/src/aiCore/index.ts +++ b/src/renderer/src/aiCore/index.ts @@ -80,7 +80,7 @@ export default class AiProvider { builder.remove(ThinkChunkMiddlewareName) } // 注意:用client判断会导致typescript类型收窄 - if (!(this.apiClient instanceof OpenAIAPIClient)) { + if (!(this.apiClient instanceof OpenAIAPIClient) && !(this.apiClient instanceof OpenAIResponseAPIClient)) { builder.remove(ThinkingTagExtractionMiddlewareName) } if (!(this.apiClient instanceof AnthropicAPIClient) && !(this.apiClient instanceof OpenAIResponseAPIClient)) { diff --git a/src/renderer/src/components/Alert/OpenAIAlert.tsx b/src/renderer/src/components/Alert/OpenAIAlert.tsx index 455ab62987..07579fe87a 100644 --- a/src/renderer/src/components/Alert/OpenAIAlert.tsx +++ b/src/renderer/src/components/Alert/OpenAIAlert.tsx @@ -1,24 +1,28 @@ import { Alert } from 'antd' +import { t } from 'i18next' import { useEffect, useState } from 'react' -import { useTranslation } from 'react-i18next' const LOCALSTORAGE_KEY = 'openai_alert_closed' -const OpenAIAlert = () => { - const { t } = useTranslation() +interface Props { + message?: string + key?: string +} + +const OpenAIAlert = ({ message = t('settings.provider.openai.alert'), key = LOCALSTORAGE_KEY }: Props) => { const [visible, setVisible] = useState(false) useEffect(() => { - const closed = localStorage.getItem(LOCALSTORAGE_KEY) + const closed = localStorage.getItem(key) setVisible(!closed) - }, []) + }, [key]) if (!visible) return null return ( { localStorage.setItem(LOCALSTORAGE_KEY, '1')