revert: openai compatible type

This commit is contained in:
kangfenmao 2025-05-12 21:56:05 +08:00 committed by 亢奋猫
parent c8b272c7b9
commit 1d4c3a783e
15 changed files with 2126 additions and 2108 deletions

View File

@ -2410,7 +2410,7 @@ export function isWebSearchModel(model: Model): boolean {
return CLAUDE_SUPPORTED_WEBSEARCH_REGEX.test(model.id) return CLAUDE_SUPPORTED_WEBSEARCH_REGEX.test(model.id)
} }
if (provider.type === 'openai') { if (provider.type === 'openai-response') {
if ( if (
isOpenAILLMModel(model) && isOpenAILLMModel(model) &&
!isTextToImageModel(model) && !isTextToImageModel(model) &&
@ -2441,7 +2441,7 @@ export function isWebSearchModel(model: Model): boolean {
return models.includes(model?.id) return models.includes(model?.id)
} }
if (provider?.type === 'openai-compatible') { if (provider?.type === 'openai') {
if (GEMINI_SEARCH_MODELS.includes(model?.id) || isOpenAIWebSearch(model)) { if (GEMINI_SEARCH_MODELS.includes(model?.id) || isOpenAIWebSearch(model)) {
return true return true
} }

View File

@ -213,7 +213,7 @@ export async function upgradeToV7(tx: Transaction): Promise<void> {
hasCitationData = true hasCitationData = true
citationDataToCreate.response = { citationDataToCreate.response = {
results: oldMessage.metadata.annotations, results: oldMessage.metadata.annotations,
source: WebSearchSource.OPENAI source: WebSearchSource.OPENAI_RESPONSE
} }
} }
if (oldMessage.metadata?.citations?.length) { if (oldMessage.metadata?.citations?.length) {

View File

@ -49,8 +49,8 @@ const MainTextBlock: React.FC<Props> = ({ block, citationBlockId, role, mentions
} }
switch (block.citationReferences[0].citationBlockSource) { switch (block.citationReferences[0].citationBlockSource) {
case WebSearchSource.OPENAI_COMPATIBLE: case WebSearchSource.OPENAI:
case WebSearchSource.OPENAI: { case WebSearchSource.OPENAI_RESPONSE: {
formattedCitations.forEach((citation) => { formattedCitations.forEach((citation) => {
const citationNum = citation.number const citationNum = citation.number
const supData = { const supData = {

View File

@ -16,7 +16,7 @@ interface Props {
const PopupContainer: React.FC<Props> = ({ provider, resolve }) => { const PopupContainer: React.FC<Props> = ({ provider, resolve }) => {
const [open, setOpen] = useState(true) const [open, setOpen] = useState(true)
const [name, setName] = useState(provider?.name || '') const [name, setName] = useState(provider?.name || '')
const [type, setType] = useState<ProviderType>(provider?.type || 'openai-compatible') const [type, setType] = useState<ProviderType>(provider?.type || 'openai')
const [logo, setLogo] = useState<string | null>(null) const [logo, setLogo] = useState<string | null>(null)
const [dropdownOpen, setDropdownOpen] = useState(false) const [dropdownOpen, setDropdownOpen] = useState(false)
const { t } = useTranslation() const { t } = useTranslation()
@ -52,7 +52,7 @@ const PopupContainer: React.FC<Props> = ({ provider, resolve }) => {
const onCancel = () => { const onCancel = () => {
setOpen(false) setOpen(false)
resolve({ name: '', type: 'openai-compatible' }) resolve({ name: '', type: 'openai' })
} }
const onClose = () => { const onClose = () => {
@ -189,8 +189,8 @@ const PopupContainer: React.FC<Props> = ({ provider, resolve }) => {
value={type} value={type}
onChange={setType} onChange={setType}
options={[ options={[
{ label: 'OpenAI-Compatible', value: 'openai-compatible' }, { label: 'OpenAI', value: 'openai' },
{ label: 'OpenAI-Response', value: 'openai' }, { label: 'OpenAI-Response', value: 'openai-response' },
{ label: 'Gemini', value: 'gemini' }, { label: 'Gemini', value: 'gemini' },
{ label: 'Anthropic', value: 'anthropic' }, { label: 'Anthropic', value: 'anthropic' },
{ label: 'Azure OpenAI', value: 'azure-openai' } { label: 'Azure OpenAI', value: 'azure-openai' }

View File

@ -260,7 +260,7 @@ const ProviderSetting: FC<Props> = ({ provider: _provider }) => {
if (apiHost.endsWith('#')) { if (apiHost.endsWith('#')) {
return apiHost.replace('#', '') return apiHost.replace('#', '')
} }
if (provider.type === 'openai-compatible') { if (provider.type === 'openai') {
return formatApiHost(apiHost) + 'chat/completions' return formatApiHost(apiHost) + 'chat/completions'
} }
return formatApiHost(apiHost) + 'responses' return formatApiHost(apiHost) + 'responses'

View File

@ -8,8 +8,8 @@ import { CompletionsParams } from '.'
import AnthropicProvider from './AnthropicProvider' import AnthropicProvider from './AnthropicProvider'
import BaseProvider from './BaseProvider' import BaseProvider from './BaseProvider'
import GeminiProvider from './GeminiProvider' import GeminiProvider from './GeminiProvider'
import OpenAICompatibleProvider from './OpenAICompatibleProvider'
import OpenAIProvider from './OpenAIProvider' import OpenAIProvider from './OpenAIProvider'
import OpenAIResponseProvider from './OpenAIResponseProvider'
/** /**
* AihubmixProvider - * AihubmixProvider -
@ -26,8 +26,8 @@ export default class AihubmixProvider extends BaseProvider {
// 初始化各个提供商 // 初始化各个提供商
this.providers.set('claude', new AnthropicProvider(provider)) this.providers.set('claude', new AnthropicProvider(provider))
this.providers.set('gemini', new GeminiProvider({ ...provider, apiHost: 'https://aihubmix.com/gemini' })) this.providers.set('gemini', new GeminiProvider({ ...provider, apiHost: 'https://aihubmix.com/gemini' }))
this.providers.set('openai', new OpenAIProvider(provider)) this.providers.set('openai', new OpenAIResponseProvider(provider))
this.providers.set('default', new OpenAICompatibleProvider(provider)) this.providers.set('default', new OpenAIProvider(provider))
// 设置默认提供商 // 设置默认提供商
this.defaultProvider = this.providers.get('default')! this.defaultProvider = this.providers.get('default')!

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -4,25 +4,26 @@ import AihubmixProvider from './AihubmixProvider'
import AnthropicProvider from './AnthropicProvider' import AnthropicProvider from './AnthropicProvider'
import BaseProvider from './BaseProvider' import BaseProvider from './BaseProvider'
import GeminiProvider from './GeminiProvider' import GeminiProvider from './GeminiProvider'
import OpenAICompatibleProvider from './OpenAICompatibleProvider'
import OpenAIProvider from './OpenAIProvider' import OpenAIProvider from './OpenAIProvider'
import OpenAIResponseProvider from './OpenAIResponseProvider'
export default class ProviderFactory { export default class ProviderFactory {
static create(provider: Provider): BaseProvider { static create(provider: Provider): BaseProvider {
if (provider.id === 'aihubmix') {
return new AihubmixProvider(provider)
}
switch (provider.type) { switch (provider.type) {
case 'openai': case 'openai':
return new OpenAIProvider(provider) return new OpenAIProvider(provider)
case 'openai-compatible': case 'openai-response':
if (provider.id === 'aihubmix') { return new OpenAIResponseProvider(provider)
return new AihubmixProvider(provider)
}
return new OpenAICompatibleProvider(provider)
case 'anthropic': case 'anthropic':
return new AnthropicProvider(provider) return new AnthropicProvider(provider)
case 'gemini': case 'gemini':
return new GeminiProvider(provider) return new GeminiProvider(provider)
default: default:
return new OpenAICompatibleProvider(provider) return new OpenAIProvider(provider)
} }
} }
} }

View File

@ -46,7 +46,7 @@ const persistedReducer = persistReducer(
{ {
key: 'cherry-studio', key: 'cherry-studio',
storage, storage,
version: 99, version: 100,
blacklist: ['runtime', 'messages', 'messageBlocks'], blacklist: ['runtime', 'messages', 'messageBlocks'],
migrate migrate
}, },

View File

@ -28,7 +28,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'silicon', id: 'silicon',
name: 'Silicon', name: 'Silicon',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'https://api.siliconflow.cn', apiHost: 'https://api.siliconflow.cn',
models: SYSTEM_MODELS.silicon, models: SYSTEM_MODELS.silicon,
@ -38,7 +38,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'aihubmix', id: 'aihubmix',
name: 'AiHubMix', name: 'AiHubMix',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'https://aihubmix.com', apiHost: 'https://aihubmix.com',
models: SYSTEM_MODELS.aihubmix, models: SYSTEM_MODELS.aihubmix,
@ -48,7 +48,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'ocoolai', id: 'ocoolai',
name: 'ocoolAI', name: 'ocoolAI',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'https://api.ocoolai.com', apiHost: 'https://api.ocoolai.com',
models: SYSTEM_MODELS.ocoolai, models: SYSTEM_MODELS.ocoolai,
@ -58,7 +58,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'deepseek', id: 'deepseek',
name: 'deepseek', name: 'deepseek',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'https://api.deepseek.com', apiHost: 'https://api.deepseek.com',
models: SYSTEM_MODELS.deepseek, models: SYSTEM_MODELS.deepseek,
@ -68,7 +68,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'openrouter', id: 'openrouter',
name: 'OpenRouter', name: 'OpenRouter',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'https://openrouter.ai/api/v1/', apiHost: 'https://openrouter.ai/api/v1/',
models: SYSTEM_MODELS.openrouter, models: SYSTEM_MODELS.openrouter,
@ -78,7 +78,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'ppio', id: 'ppio',
name: 'PPIO', name: 'PPIO',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'https://api.ppinfra.com/v3/openai', apiHost: 'https://api.ppinfra.com/v3/openai',
models: SYSTEM_MODELS.ppio, models: SYSTEM_MODELS.ppio,
@ -88,7 +88,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'alayanew', id: 'alayanew',
name: 'AlayaNew', name: 'AlayaNew',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'https://deepseek.alayanew.com', apiHost: 'https://deepseek.alayanew.com',
models: SYSTEM_MODELS.alayanew, models: SYSTEM_MODELS.alayanew,
@ -98,7 +98,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'infini', id: 'infini',
name: 'Infini', name: 'Infini',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'https://cloud.infini-ai.com/maas', apiHost: 'https://cloud.infini-ai.com/maas',
models: SYSTEM_MODELS.infini, models: SYSTEM_MODELS.infini,
@ -108,7 +108,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'qiniu', id: 'qiniu',
name: 'Qiniu', name: 'Qiniu',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'https://api.qnaigc.com', apiHost: 'https://api.qnaigc.com',
models: SYSTEM_MODELS.qiniu, models: SYSTEM_MODELS.qiniu,
@ -118,7 +118,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'dmxapi', id: 'dmxapi',
name: 'DMXAPI', name: 'DMXAPI',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'https://www.dmxapi.cn', apiHost: 'https://www.dmxapi.cn',
models: SYSTEM_MODELS.dmxapi, models: SYSTEM_MODELS.dmxapi,
@ -128,7 +128,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'o3', id: 'o3',
name: 'O3', name: 'O3',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'https://api.o3.fan', apiHost: 'https://api.o3.fan',
models: SYSTEM_MODELS.o3, models: SYSTEM_MODELS.o3,
@ -138,7 +138,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'ollama', id: 'ollama',
name: 'Ollama', name: 'Ollama',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'http://localhost:11434', apiHost: 'http://localhost:11434',
models: SYSTEM_MODELS.ollama, models: SYSTEM_MODELS.ollama,
@ -148,7 +148,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'lmstudio', id: 'lmstudio',
name: 'LM Studio', name: 'LM Studio',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'http://localhost:1234', apiHost: 'http://localhost:1234',
models: SYSTEM_MODELS.lmstudio, models: SYSTEM_MODELS.lmstudio,
@ -168,7 +168,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'openai', id: 'openai',
name: 'OpenAI', name: 'OpenAI',
type: 'openai', type: 'openai-response',
apiKey: '', apiKey: '',
apiHost: 'https://api.openai.com', apiHost: 'https://api.openai.com',
models: SYSTEM_MODELS.openai, models: SYSTEM_MODELS.openai,
@ -178,7 +178,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'azure-openai', id: 'azure-openai',
name: 'Azure OpenAI', name: 'Azure OpenAI',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: '', apiHost: '',
apiVersion: '', apiVersion: '',
@ -199,7 +199,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'zhipu', id: 'zhipu',
name: 'ZhiPu', name: 'ZhiPu',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'https://open.bigmodel.cn/api/paas/v4/', apiHost: 'https://open.bigmodel.cn/api/paas/v4/',
models: SYSTEM_MODELS.zhipu, models: SYSTEM_MODELS.zhipu,
@ -209,7 +209,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'github', id: 'github',
name: 'Github Models', name: 'Github Models',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'https://models.inference.ai.azure.com/', apiHost: 'https://models.inference.ai.azure.com/',
models: SYSTEM_MODELS.github, models: SYSTEM_MODELS.github,
@ -219,7 +219,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'copilot', id: 'copilot',
name: 'Github Copilot', name: 'Github Copilot',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'https://api.githubcopilot.com/', apiHost: 'https://api.githubcopilot.com/',
models: SYSTEM_MODELS.copilot, models: SYSTEM_MODELS.copilot,
@ -230,7 +230,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'yi', id: 'yi',
name: 'Yi', name: 'Yi',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'https://api.lingyiwanwu.com', apiHost: 'https://api.lingyiwanwu.com',
models: SYSTEM_MODELS.yi, models: SYSTEM_MODELS.yi,
@ -240,7 +240,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'moonshot', id: 'moonshot',
name: 'Moonshot AI', name: 'Moonshot AI',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'https://api.moonshot.cn', apiHost: 'https://api.moonshot.cn',
models: SYSTEM_MODELS.moonshot, models: SYSTEM_MODELS.moonshot,
@ -250,7 +250,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'baichuan', id: 'baichuan',
name: 'BAICHUAN AI', name: 'BAICHUAN AI',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'https://api.baichuan-ai.com', apiHost: 'https://api.baichuan-ai.com',
models: SYSTEM_MODELS.baichuan, models: SYSTEM_MODELS.baichuan,
@ -260,7 +260,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'dashscope', id: 'dashscope',
name: 'Bailian', name: 'Bailian',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'https://dashscope.aliyuncs.com/compatible-mode/v1/', apiHost: 'https://dashscope.aliyuncs.com/compatible-mode/v1/',
models: SYSTEM_MODELS.bailian, models: SYSTEM_MODELS.bailian,
@ -270,7 +270,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'stepfun', id: 'stepfun',
name: 'StepFun', name: 'StepFun',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'https://api.stepfun.com', apiHost: 'https://api.stepfun.com',
models: SYSTEM_MODELS.stepfun, models: SYSTEM_MODELS.stepfun,
@ -280,7 +280,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'doubao', id: 'doubao',
name: 'doubao', name: 'doubao',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'https://ark.cn-beijing.volces.com/api/v3/', apiHost: 'https://ark.cn-beijing.volces.com/api/v3/',
models: SYSTEM_MODELS.doubao, models: SYSTEM_MODELS.doubao,
@ -290,7 +290,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'minimax', id: 'minimax',
name: 'MiniMax', name: 'MiniMax',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'https://api.minimax.chat/v1/', apiHost: 'https://api.minimax.chat/v1/',
models: SYSTEM_MODELS.minimax, models: SYSTEM_MODELS.minimax,
@ -300,7 +300,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'groq', id: 'groq',
name: 'Groq', name: 'Groq',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'https://api.groq.com/openai', apiHost: 'https://api.groq.com/openai',
models: SYSTEM_MODELS.groq, models: SYSTEM_MODELS.groq,
@ -310,7 +310,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'together', id: 'together',
name: 'Together', name: 'Together',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'https://api.together.xyz', apiHost: 'https://api.together.xyz',
models: SYSTEM_MODELS.together, models: SYSTEM_MODELS.together,
@ -320,7 +320,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'fireworks', id: 'fireworks',
name: 'Fireworks', name: 'Fireworks',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'https://api.fireworks.ai/inference', apiHost: 'https://api.fireworks.ai/inference',
models: SYSTEM_MODELS.fireworks, models: SYSTEM_MODELS.fireworks,
@ -330,7 +330,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'zhinao', id: 'zhinao',
name: 'zhinao', name: 'zhinao',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'https://api.360.cn', apiHost: 'https://api.360.cn',
models: SYSTEM_MODELS.zhinao, models: SYSTEM_MODELS.zhinao,
@ -340,7 +340,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'hunyuan', id: 'hunyuan',
name: 'hunyuan', name: 'hunyuan',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'https://api.hunyuan.cloud.tencent.com', apiHost: 'https://api.hunyuan.cloud.tencent.com',
models: SYSTEM_MODELS.hunyuan, models: SYSTEM_MODELS.hunyuan,
@ -350,7 +350,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'nvidia', id: 'nvidia',
name: 'nvidia', name: 'nvidia',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'https://integrate.api.nvidia.com', apiHost: 'https://integrate.api.nvidia.com',
models: SYSTEM_MODELS.nvidia, models: SYSTEM_MODELS.nvidia,
@ -360,7 +360,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'grok', id: 'grok',
name: 'Grok', name: 'Grok',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'https://api.x.ai', apiHost: 'https://api.x.ai',
models: SYSTEM_MODELS.grok, models: SYSTEM_MODELS.grok,
@ -370,7 +370,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'hyperbolic', id: 'hyperbolic',
name: 'Hyperbolic', name: 'Hyperbolic',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'https://api.hyperbolic.xyz', apiHost: 'https://api.hyperbolic.xyz',
models: SYSTEM_MODELS.hyperbolic, models: SYSTEM_MODELS.hyperbolic,
@ -380,7 +380,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'mistral', id: 'mistral',
name: 'Mistral', name: 'Mistral',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'https://api.mistral.ai', apiHost: 'https://api.mistral.ai',
models: SYSTEM_MODELS.mistral, models: SYSTEM_MODELS.mistral,
@ -390,7 +390,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'jina', id: 'jina',
name: 'Jina', name: 'Jina',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'https://api.jina.ai', apiHost: 'https://api.jina.ai',
models: SYSTEM_MODELS.jina, models: SYSTEM_MODELS.jina,
@ -400,7 +400,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'gitee-ai', id: 'gitee-ai',
name: 'gitee ai', name: 'gitee ai',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'https://ai.gitee.com', apiHost: 'https://ai.gitee.com',
models: SYSTEM_MODELS['gitee-ai'], models: SYSTEM_MODELS['gitee-ai'],
@ -410,7 +410,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'perplexity', id: 'perplexity',
name: 'Perplexity', name: 'Perplexity',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'https://api.perplexity.ai/', apiHost: 'https://api.perplexity.ai/',
models: SYSTEM_MODELS.perplexity, models: SYSTEM_MODELS.perplexity,
@ -420,7 +420,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'modelscope', id: 'modelscope',
name: 'ModelScope', name: 'ModelScope',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'https://api-inference.modelscope.cn/v1/', apiHost: 'https://api-inference.modelscope.cn/v1/',
models: SYSTEM_MODELS.modelscope, models: SYSTEM_MODELS.modelscope,
@ -430,7 +430,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'xirang', id: 'xirang',
name: 'Xirang', name: 'Xirang',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'https://wishub-x1.ctyun.cn', apiHost: 'https://wishub-x1.ctyun.cn',
models: SYSTEM_MODELS.xirang, models: SYSTEM_MODELS.xirang,
@ -440,7 +440,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'tencent-cloud-ti', id: 'tencent-cloud-ti',
name: 'Tencent Cloud TI', name: 'Tencent Cloud TI',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'https://api.lkeap.cloud.tencent.com', apiHost: 'https://api.lkeap.cloud.tencent.com',
models: SYSTEM_MODELS['tencent-cloud-ti'], models: SYSTEM_MODELS['tencent-cloud-ti'],
@ -450,7 +450,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'baidu-cloud', id: 'baidu-cloud',
name: 'Baidu Cloud', name: 'Baidu Cloud',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'https://qianfan.baidubce.com/v2/', apiHost: 'https://qianfan.baidubce.com/v2/',
models: SYSTEM_MODELS['baidu-cloud'], models: SYSTEM_MODELS['baidu-cloud'],
@ -460,7 +460,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'gpustack', id: 'gpustack',
name: 'GPUStack', name: 'GPUStack',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: '', apiHost: '',
models: SYSTEM_MODELS.gpustack, models: SYSTEM_MODELS.gpustack,
@ -470,7 +470,7 @@ export const INITIAL_PROVIDERS: Provider[] = [
{ {
id: 'voyageai', id: 'voyageai',
name: 'VoyageAI', name: 'VoyageAI',
type: 'openai-compatible', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'https://api.voyageai.com', apiHost: 'https://api.voyageai.com',
models: SYSTEM_MODELS.voyageai, models: SYSTEM_MODELS.voyageai,

View File

@ -101,7 +101,7 @@ const formatCitationsFromBlock = (block: CitationMessageBlock | undefined): Cita
})) || [] })) || []
break break
} }
case WebSearchSource.OPENAI: case WebSearchSource.OPENAI_RESPONSE:
formattedCitations = formattedCitations =
(block.response.results as OpenAI.Responses.ResponseOutputText.URLCitation[])?.map((result, index) => { (block.response.results as OpenAI.Responses.ResponseOutputText.URLCitation[])?.map((result, index) => {
let hostname: string | undefined let hostname: string | undefined
@ -120,7 +120,7 @@ const formatCitationsFromBlock = (block: CitationMessageBlock | undefined): Cita
} }
}) || [] }) || []
break break
case WebSearchSource.OPENAI_COMPATIBLE: case WebSearchSource.OPENAI:
formattedCitations = formattedCitations =
(block.response.results as OpenAI.Chat.Completions.ChatCompletionMessage.Annotation[])?.map((url, index) => { (block.response.results as OpenAI.Chat.Completions.ChatCompletionMessage.Annotation[])?.map((url, index) => {
const urlCitation = url.url_citation const urlCitation = url.url_citation

View File

@ -1257,6 +1257,7 @@ const migrateConfig = {
try { try {
state.llm.providers.forEach((provider) => { state.llm.providers.forEach((provider) => {
if (provider.type === 'openai' && provider.id !== 'openai') { if (provider.type === 'openai' && provider.id !== 'openai') {
// @ts-ignore eslint-disable-next-line
provider.type = 'openai-compatible' provider.type = 'openai-compatible'
} }
}) })
@ -1296,6 +1297,22 @@ const migrateConfig = {
} catch (error) { } catch (error) {
return state return state
} }
},
'100': (state: RootState) => {
try {
state.llm.providers.forEach((provider) => {
// @ts-ignore eslint-disable-next-line
if (['openai-compatible', 'openai'].includes(provider.type)) {
provider.type = 'openai'
}
if (provider.id === 'openai') {
provider.type = 'openai-response'
}
})
return state
} catch (error) {
return state
}
} }
} }

View File

@ -162,7 +162,7 @@ export type Provider = {
notes?: string notes?: string
} }
export type ProviderType = 'openai' | 'openai-compatible' | 'anthropic' | 'gemini' | 'qwenlm' | 'azure-openai' export type ProviderType = 'openai' | 'openai-response' | 'anthropic' | 'gemini' | 'qwenlm' | 'azure-openai'
export type ModelType = 'text' | 'vision' | 'embedding' | 'reasoning' | 'function_calling' | 'web_search' export type ModelType = 'text' | 'vision' | 'embedding' | 'reasoning' | 'function_calling' | 'web_search'
@ -462,7 +462,7 @@ export type WebSearchResults =
export enum WebSearchSource { export enum WebSearchSource {
WEBSEARCH = 'websearch', WEBSEARCH = 'websearch',
OPENAI = 'openai', OPENAI = 'openai',
OPENAI_COMPATIBLE = 'openai-compatible', OPENAI_RESPONSE = 'openai-response',
OPENROUTER = 'openrouter', OPENROUTER = 'openrouter',
ANTHROPIC = 'anthropic', ANTHROPIC = 'anthropic',
GEMINI = 'gemini', GEMINI = 'gemini',