revert: openai compatible type

This commit is contained in:
kangfenmao 2025-05-12 21:56:05 +08:00
parent 483ea46440
commit 396b400004
15 changed files with 2131 additions and 2113 deletions

View File

@ -2410,7 +2410,7 @@ export function isWebSearchModel(model: Model): boolean {
return CLAUDE_SUPPORTED_WEBSEARCH_REGEX.test(model.id)
}
if (provider.type === 'openai') {
if (provider.type === 'openai-response') {
if (
isOpenAILLMModel(model) &&
!isTextToImageModel(model) &&
@ -2441,7 +2441,7 @@ export function isWebSearchModel(model: Model): boolean {
return models.includes(model?.id)
}
if (provider?.type === 'openai-compatible') {
if (provider?.type === 'openai') {
if (GEMINI_SEARCH_MODELS.includes(model?.id) || isOpenAIWebSearch(model)) {
return true
}

View File

@ -213,7 +213,7 @@ export async function upgradeToV7(tx: Transaction): Promise<void> {
hasCitationData = true
citationDataToCreate.response = {
results: oldMessage.metadata.annotations,
source: WebSearchSource.OPENAI
source: WebSearchSource.OPENAI_RESPONSE
}
}
if (oldMessage.metadata?.citations?.length) {

View File

@ -49,8 +49,8 @@ const MainTextBlock: React.FC<Props> = ({ block, citationBlockId, role, mentions
}
switch (block.citationReferences[0].citationBlockSource) {
case WebSearchSource.OPENAI_COMPATIBLE:
case WebSearchSource.OPENAI: {
case WebSearchSource.OPENAI:
case WebSearchSource.OPENAI_RESPONSE: {
formattedCitations.forEach((citation) => {
const citationNum = citation.number
const supData = {

View File

@ -16,7 +16,7 @@ interface Props {
const PopupContainer: React.FC<Props> = ({ provider, resolve }) => {
const [open, setOpen] = useState(true)
const [name, setName] = useState(provider?.name || '')
const [type, setType] = useState<ProviderType>(provider?.type || 'openai-compatible')
const [type, setType] = useState<ProviderType>(provider?.type || 'openai')
const [logo, setLogo] = useState<string | null>(null)
const [dropdownOpen, setDropdownOpen] = useState(false)
const { t } = useTranslation()
@ -52,7 +52,7 @@ const PopupContainer: React.FC<Props> = ({ provider, resolve }) => {
const onCancel = () => {
setOpen(false)
resolve({ name: '', type: 'openai-compatible' })
resolve({ name: '', type: 'openai' })
}
const onClose = () => {
@ -189,8 +189,8 @@ const PopupContainer: React.FC<Props> = ({ provider, resolve }) => {
value={type}
onChange={setType}
options={[
{ label: 'OpenAI-Compatible', value: 'openai-compatible' },
{ label: 'OpenAI-Response', value: 'openai' },
{ label: 'OpenAI', value: 'openai' },
{ label: 'OpenAI-Response', value: 'openai-response' },
{ label: 'Gemini', value: 'gemini' },
{ label: 'Anthropic', value: 'anthropic' },
{ label: 'Azure OpenAI', value: 'azure-openai' }

View File

@ -262,7 +262,7 @@ const ProviderSetting: FC<Props> = ({ provider: _provider }) => {
if (apiHost.endsWith('#')) {
return apiHost.replace('#', '')
}
if (provider.type === 'openai-compatible') {
if (provider.type === 'openai') {
return formatApiHost(apiHost) + 'chat/completions'
}
return formatApiHost(apiHost) + 'responses'

View File

@ -8,8 +8,8 @@ import { CompletionsParams } from '.'
import AnthropicProvider from './AnthropicProvider'
import BaseProvider from './BaseProvider'
import GeminiProvider from './GeminiProvider'
import OpenAICompatibleProvider from './OpenAICompatibleProvider'
import OpenAIProvider from './OpenAIProvider'
import OpenAIResponseProvider from './OpenAIResponseProvider'
/**
* AihubmixProvider -
@ -26,8 +26,8 @@ export default class AihubmixProvider extends BaseProvider {
// 初始化各个提供商
this.providers.set('claude', new AnthropicProvider(provider))
this.providers.set('gemini', new GeminiProvider({ ...provider, apiHost: 'https://aihubmix.com/gemini' }))
this.providers.set('openai', new OpenAIProvider(provider))
this.providers.set('default', new OpenAICompatibleProvider(provider))
this.providers.set('openai', new OpenAIResponseProvider(provider))
this.providers.set('default', new OpenAIProvider(provider))
// 设置默认提供商
this.defaultProvider = this.providers.get('default')!

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -4,25 +4,26 @@ import AihubmixProvider from './AihubmixProvider'
import AnthropicProvider from './AnthropicProvider'
import BaseProvider from './BaseProvider'
import GeminiProvider from './GeminiProvider'
import OpenAICompatibleProvider from './OpenAICompatibleProvider'
import OpenAIProvider from './OpenAIProvider'
import OpenAIResponseProvider from './OpenAIResponseProvider'
export default class ProviderFactory {
static create(provider: Provider): BaseProvider {
if (provider.id === 'aihubmix') {
return new AihubmixProvider(provider)
}
switch (provider.type) {
case 'openai':
return new OpenAIProvider(provider)
case 'openai-compatible':
if (provider.id === 'aihubmix') {
return new AihubmixProvider(provider)
}
return new OpenAICompatibleProvider(provider)
case 'openai-response':
return new OpenAIResponseProvider(provider)
case 'anthropic':
return new AnthropicProvider(provider)
case 'gemini':
return new GeminiProvider(provider)
default:
return new OpenAICompatibleProvider(provider)
return new OpenAIProvider(provider)
}
}
}

View File

@ -46,7 +46,7 @@ const persistedReducer = persistReducer(
{
key: 'cherry-studio',
storage,
version: 99,
version: 100,
blacklist: ['runtime', 'messages', 'messageBlocks'],
migrate
},

View File

@ -28,7 +28,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'silicon',
name: 'Silicon',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'https://api.siliconflow.cn',
models: SYSTEM_MODELS.silicon,
@ -38,7 +38,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'aihubmix',
name: 'AiHubMix',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'https://aihubmix.com',
models: SYSTEM_MODELS.aihubmix,
@ -48,7 +48,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'ocoolai',
name: 'ocoolAI',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'https://api.ocoolai.com',
models: SYSTEM_MODELS.ocoolai,
@ -58,7 +58,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'deepseek',
name: 'deepseek',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'https://api.deepseek.com',
models: SYSTEM_MODELS.deepseek,
@ -68,7 +68,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'openrouter',
name: 'OpenRouter',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'https://openrouter.ai/api/v1/',
models: SYSTEM_MODELS.openrouter,
@ -78,7 +78,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'ppio',
name: 'PPIO',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'https://api.ppinfra.com/v3/openai',
models: SYSTEM_MODELS.ppio,
@ -88,7 +88,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'alayanew',
name: 'AlayaNew',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'https://deepseek.alayanew.com',
models: SYSTEM_MODELS.alayanew,
@ -98,7 +98,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'infini',
name: 'Infini',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'https://cloud.infini-ai.com/maas',
models: SYSTEM_MODELS.infini,
@ -108,7 +108,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'qiniu',
name: 'Qiniu',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'https://api.qnaigc.com',
models: SYSTEM_MODELS.qiniu,
@ -118,7 +118,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'dmxapi',
name: 'DMXAPI',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'https://www.dmxapi.cn',
models: SYSTEM_MODELS.dmxapi,
@ -128,7 +128,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'o3',
name: 'O3',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'https://api.o3.fan',
models: SYSTEM_MODELS.o3,
@ -138,7 +138,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'ollama',
name: 'Ollama',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'http://localhost:11434',
models: SYSTEM_MODELS.ollama,
@ -148,7 +148,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'lmstudio',
name: 'LM Studio',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'http://localhost:1234',
models: SYSTEM_MODELS.lmstudio,
@ -168,7 +168,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'openai',
name: 'OpenAI',
type: 'openai',
type: 'openai-response',
apiKey: '',
apiHost: 'https://api.openai.com',
models: SYSTEM_MODELS.openai,
@ -178,7 +178,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'azure-openai',
name: 'Azure OpenAI',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: '',
apiVersion: '',
@ -199,7 +199,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'zhipu',
name: 'ZhiPu',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'https://open.bigmodel.cn/api/paas/v4/',
models: SYSTEM_MODELS.zhipu,
@ -209,7 +209,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'github',
name: 'Github Models',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'https://models.inference.ai.azure.com/',
models: SYSTEM_MODELS.github,
@ -219,7 +219,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'copilot',
name: 'Github Copilot',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'https://api.githubcopilot.com/',
models: SYSTEM_MODELS.copilot,
@ -230,7 +230,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'yi',
name: 'Yi',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'https://api.lingyiwanwu.com',
models: SYSTEM_MODELS.yi,
@ -240,7 +240,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'moonshot',
name: 'Moonshot AI',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'https://api.moonshot.cn',
models: SYSTEM_MODELS.moonshot,
@ -250,7 +250,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'baichuan',
name: 'BAICHUAN AI',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'https://api.baichuan-ai.com',
models: SYSTEM_MODELS.baichuan,
@ -260,7 +260,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'dashscope',
name: 'Bailian',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'https://dashscope.aliyuncs.com/compatible-mode/v1/',
models: SYSTEM_MODELS.bailian,
@ -270,7 +270,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'stepfun',
name: 'StepFun',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'https://api.stepfun.com',
models: SYSTEM_MODELS.stepfun,
@ -280,7 +280,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'doubao',
name: 'doubao',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'https://ark.cn-beijing.volces.com/api/v3/',
models: SYSTEM_MODELS.doubao,
@ -290,7 +290,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'minimax',
name: 'MiniMax',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'https://api.minimax.chat/v1/',
models: SYSTEM_MODELS.minimax,
@ -300,7 +300,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'groq',
name: 'Groq',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'https://api.groq.com/openai',
models: SYSTEM_MODELS.groq,
@ -310,7 +310,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'together',
name: 'Together',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'https://api.together.xyz',
models: SYSTEM_MODELS.together,
@ -320,7 +320,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'fireworks',
name: 'Fireworks',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'https://api.fireworks.ai/inference',
models: SYSTEM_MODELS.fireworks,
@ -330,7 +330,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'zhinao',
name: 'zhinao',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'https://api.360.cn',
models: SYSTEM_MODELS.zhinao,
@ -340,7 +340,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'hunyuan',
name: 'hunyuan',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'https://api.hunyuan.cloud.tencent.com',
models: SYSTEM_MODELS.hunyuan,
@ -350,7 +350,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'nvidia',
name: 'nvidia',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'https://integrate.api.nvidia.com',
models: SYSTEM_MODELS.nvidia,
@ -360,7 +360,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'grok',
name: 'Grok',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'https://api.x.ai',
models: SYSTEM_MODELS.grok,
@ -370,7 +370,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'hyperbolic',
name: 'Hyperbolic',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'https://api.hyperbolic.xyz',
models: SYSTEM_MODELS.hyperbolic,
@ -380,7 +380,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'mistral',
name: 'Mistral',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'https://api.mistral.ai',
models: SYSTEM_MODELS.mistral,
@ -390,7 +390,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'jina',
name: 'Jina',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'https://api.jina.ai',
models: SYSTEM_MODELS.jina,
@ -400,7 +400,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'gitee-ai',
name: 'gitee ai',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'https://ai.gitee.com',
models: SYSTEM_MODELS['gitee-ai'],
@ -410,7 +410,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'perplexity',
name: 'Perplexity',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'https://api.perplexity.ai/',
models: SYSTEM_MODELS.perplexity,
@ -420,7 +420,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'modelscope',
name: 'ModelScope',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'https://api-inference.modelscope.cn/v1/',
models: SYSTEM_MODELS.modelscope,
@ -430,7 +430,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'xirang',
name: 'Xirang',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'https://wishub-x1.ctyun.cn',
models: SYSTEM_MODELS.xirang,
@ -440,7 +440,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'tencent-cloud-ti',
name: 'Tencent Cloud TI',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'https://api.lkeap.cloud.tencent.com',
models: SYSTEM_MODELS['tencent-cloud-ti'],
@ -450,7 +450,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'baidu-cloud',
name: 'Baidu Cloud',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'https://qianfan.baidubce.com/v2/',
models: SYSTEM_MODELS['baidu-cloud'],
@ -460,7 +460,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'gpustack',
name: 'GPUStack',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: '',
models: SYSTEM_MODELS.gpustack,
@ -470,7 +470,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{
id: 'voyageai',
name: 'VoyageAI',
type: 'openai-compatible',
type: 'openai',
apiKey: '',
apiHost: 'https://api.voyageai.com',
models: SYSTEM_MODELS.voyageai,

View File

@ -101,7 +101,7 @@ const formatCitationsFromBlock = (block: CitationMessageBlock | undefined): Cita
})) || []
break
}
case WebSearchSource.OPENAI:
case WebSearchSource.OPENAI_RESPONSE:
formattedCitations =
(block.response.results as OpenAI.Responses.ResponseOutputText.URLCitation[])?.map((result, index) => {
let hostname: string | undefined
@ -120,7 +120,7 @@ const formatCitationsFromBlock = (block: CitationMessageBlock | undefined): Cita
}
}) || []
break
case WebSearchSource.OPENAI_COMPATIBLE:
case WebSearchSource.OPENAI:
formattedCitations =
(block.response.results as OpenAI.Chat.Completions.ChatCompletionMessage.Annotation[])?.map((url, index) => {
const urlCitation = url.url_citation

View File

@ -1257,6 +1257,7 @@ const migrateConfig = {
try {
state.llm.providers.forEach((provider) => {
if (provider.type === 'openai' && provider.id !== 'openai') {
// @ts-ignore eslint-disable-next-line
provider.type = 'openai-compatible'
}
})
@ -1296,6 +1297,22 @@ const migrateConfig = {
} catch (error) {
return state
}
},
'100': (state: RootState) => {
try {
state.llm.providers.forEach((provider) => {
// @ts-ignore eslint-disable-next-line
if (['openai-compatible', 'openai'].includes(provider.type)) {
provider.type = 'openai'
}
if (provider.id === 'openai') {
provider.type = 'openai-response'
}
})
return state
} catch (error) {
return state
}
}
}

View File

@ -162,7 +162,7 @@ export type Provider = {
notes?: string
}
export type ProviderType = 'openai' | 'openai-compatible' | 'anthropic' | 'gemini' | 'qwenlm' | 'azure-openai'
export type ProviderType = 'openai' | 'openai-response' | 'anthropic' | 'gemini' | 'qwenlm' | 'azure-openai'
export type ModelType = 'text' | 'vision' | 'embedding' | 'reasoning' | 'function_calling' | 'web_search'
@ -462,7 +462,7 @@ export type WebSearchResults =
export enum WebSearchSource {
WEBSEARCH = 'websearch',
OPENAI = 'openai',
OPENAI_COMPATIBLE = 'openai-compatible',
OPENAI_RESPONSE = 'openai-response',
OPENROUTER = 'openrouter',
ANTHROPIC = 'anthropic',
GEMINI = 'gemini',