mirror of
https://github.com/CherryHQ/cherry-studio.git
synced 2026-01-02 10:29:02 +08:00
refactor(aiCore): simplify OpenAI summary text handling and improve type safety
- Remove 'off' option from OpenAISummaryText type and use null instead - Add migration to convert 'off' values to null - Add utility function to convert undefined to null - Update Selector component to handle null/undefined values - Improve type safety in provider options and reasoning params
This commit is contained in:
parent
e5777e79b1
commit
7641667162
@ -55,7 +55,7 @@ export async function buildStreamTextParams(
|
||||
timeout?: number
|
||||
headers?: Record<string, string>
|
||||
}
|
||||
} = {}
|
||||
}
|
||||
): Promise<{
|
||||
params: StreamTextParams
|
||||
modelId: string
|
||||
|
||||
@ -1,18 +1,26 @@
|
||||
import { OpenAIResponsesProviderOptions } from '@ai-sdk/openai'
|
||||
import { baseProviderIdSchema, customProviderIdSchema } from '@cherrystudio/ai-core/provider'
|
||||
import { isOpenAIModel, isQwenMTModel, isSupportFlexServiceTierModel } from '@renderer/config/models'
|
||||
import { isSupportServiceTierProvider } from '@renderer/config/providers'
|
||||
import { mapLanguageToQwenMTModel } from '@renderer/config/translate'
|
||||
import { getStoreSetting } from '@renderer/hooks/useSettings'
|
||||
import {
|
||||
Assistant,
|
||||
GroqServiceTier,
|
||||
GroqServiceTiers,
|
||||
GroqSystemProvider,
|
||||
isGroqServiceTier,
|
||||
isGroqSystemProvider,
|
||||
isOpenAIServiceTier,
|
||||
isTranslateAssistant,
|
||||
Model,
|
||||
NotGroqProvider,
|
||||
OpenAIServiceTier,
|
||||
OpenAIServiceTiers,
|
||||
Provider,
|
||||
SystemProviderIds
|
||||
Provider
|
||||
} from '@renderer/types'
|
||||
import { OpenAIVerbosity } from '@renderer/types/aiCoreTypes'
|
||||
import { JSONValue } from 'ai'
|
||||
import { t } from 'i18next'
|
||||
|
||||
import { getAiSdkProviderId } from '../provider/factory'
|
||||
@ -27,8 +35,9 @@ import {
|
||||
} from './reasoning'
|
||||
import { getWebSearchParams } from './websearch'
|
||||
|
||||
// copy from BaseApiClient.ts
|
||||
const getServiceTier = (model: Model, provider: Provider) => {
|
||||
function getServiceTier<T extends GroqSystemProvider>(model: Model, provider: T): GroqServiceTier
|
||||
function getServiceTier<T extends NotGroqProvider>(model: Model, provider: T): OpenAIServiceTier
|
||||
function getServiceTier<T extends Provider>(model: Model, provider: T): OpenAIServiceTier | GroqServiceTier {
|
||||
const serviceTierSetting = provider.serviceTier
|
||||
|
||||
if (!isSupportServiceTierProvider(provider) || !isOpenAIModel(model) || !serviceTierSetting) {
|
||||
@ -36,12 +45,14 @@ const getServiceTier = (model: Model, provider: Provider) => {
|
||||
}
|
||||
|
||||
// 处理不同供应商需要 fallback 到默认值的情况
|
||||
if (provider.id === SystemProviderIds.groq) {
|
||||
if (isGroqSystemProvider(provider)) {
|
||||
if (
|
||||
!isGroqServiceTier(serviceTierSetting) ||
|
||||
(serviceTierSetting === GroqServiceTiers.flex && !isSupportFlexServiceTierModel(model))
|
||||
) {
|
||||
return undefined
|
||||
} else {
|
||||
return serviceTierSetting
|
||||
}
|
||||
} else {
|
||||
// 其他 OpenAI 供应商,假设他们的服务层级设置和 OpenAI 完全相同
|
||||
@ -56,6 +67,11 @@ const getServiceTier = (model: Model, provider: Provider) => {
|
||||
return serviceTierSetting
|
||||
}
|
||||
|
||||
function getVerbosity(): OpenAIVerbosity {
|
||||
const openAI = getStoreSetting('openAI')
|
||||
return openAI.verbosity
|
||||
}
|
||||
|
||||
/**
|
||||
* 构建 AI SDK 的 providerOptions
|
||||
* 按 provider 类型分离,保持类型安全
|
||||
@ -70,12 +86,12 @@ export function buildProviderOptions(
|
||||
enableWebSearch: boolean
|
||||
enableGenerateImage: boolean
|
||||
}
|
||||
): Record<string, any> {
|
||||
): Record<string, Record<string, JSONValue>> {
|
||||
const rawProviderId = getAiSdkProviderId(actualProvider)
|
||||
// 构建 provider 特定的选项
|
||||
let providerSpecificOptions: Record<string, any> = {}
|
||||
const serviceTierSetting = getServiceTier(model, actualProvider)
|
||||
providerSpecificOptions.serviceTier = serviceTierSetting
|
||||
const serviceTier = getServiceTier(model, actualProvider)
|
||||
const textVerbosity = getVerbosity()
|
||||
// 根据 provider 类型分离构建逻辑
|
||||
const { data: baseProviderId, success } = baseProviderIdSchema.safeParse(rawProviderId)
|
||||
if (success) {
|
||||
@ -87,8 +103,9 @@ export function buildProviderOptions(
|
||||
case 'azure-responses':
|
||||
providerSpecificOptions = {
|
||||
...buildOpenAIProviderOptions(assistant, model, capabilities),
|
||||
serviceTier: serviceTierSetting
|
||||
}
|
||||
textVerbosity,
|
||||
serviceTier
|
||||
} satisfies OpenAIResponsesProviderOptions
|
||||
break
|
||||
|
||||
case 'anthropic':
|
||||
@ -108,7 +125,7 @@ export function buildProviderOptions(
|
||||
// 对于其他 provider,使用通用的构建逻辑
|
||||
providerSpecificOptions = {
|
||||
...buildGenericProviderOptions(assistant, model, capabilities),
|
||||
serviceTier: serviceTierSetting
|
||||
serviceTier
|
||||
}
|
||||
break
|
||||
}
|
||||
@ -131,7 +148,8 @@ export function buildProviderOptions(
|
||||
// 对于其他 provider,使用通用的构建逻辑
|
||||
providerSpecificOptions = {
|
||||
...buildGenericProviderOptions(assistant, model, capabilities),
|
||||
serviceTier: serviceTierSetting
|
||||
serviceTier,
|
||||
textVerbosity
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
||||
@ -29,8 +29,8 @@ import {
|
||||
import { isSupportEnableThinkingProvider } from '@renderer/config/providers'
|
||||
import { getStoreSetting } from '@renderer/hooks/useSettings'
|
||||
import { getAssistantSettings, getProviderByModel } from '@renderer/services/AssistantService'
|
||||
import { SettingsState } from '@renderer/store/settings'
|
||||
import { Assistant, EFFORT_RATIO, isSystemProvider, Model, SystemProviderIds } from '@renderer/types'
|
||||
import { OpenAIReasoningEffort, OpenAISummaryText } from '@renderer/types/aiCoreTypes'
|
||||
import { ReasoningEffortOptionalParams } from '@renderer/types/sdk'
|
||||
import { toInteger } from 'lodash'
|
||||
|
||||
@ -311,19 +311,23 @@ export function getReasoningEffort(assistant: Assistant, model: Model): Reasonin
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取 OpenAI 推理参数
|
||||
* 从 OpenAIResponseAPIClient 和 OpenAIAPIClient 中提取的逻辑
|
||||
* Get OpenAI reasoning parameters
|
||||
* Extracted from OpenAIResponseAPIClient and OpenAIAPIClient logic
|
||||
* For official OpenAI provider only
|
||||
*/
|
||||
export function getOpenAIReasoningParams(assistant: Assistant, model: Model): Record<string, any> {
|
||||
export function getOpenAIReasoningParams(
|
||||
assistant: Assistant,
|
||||
model: Model
|
||||
): { reasoningEffort?: OpenAIReasoningEffort; reasoningSummary?: OpenAISummaryText } {
|
||||
if (!isReasoningModel(model)) {
|
||||
return {}
|
||||
}
|
||||
const openAI = getStoreSetting('openAI') as SettingsState['openAI']
|
||||
const summaryText = openAI?.summaryText || 'off'
|
||||
const openAI = getStoreSetting('openAI')
|
||||
const summaryText = openAI.summaryText
|
||||
|
||||
let reasoningSummary: string | undefined = undefined
|
||||
let reasoningSummary: OpenAISummaryText = undefined
|
||||
|
||||
if (summaryText === 'off' || model.id.includes('o1-pro')) {
|
||||
if (model.id.includes('o1-pro')) {
|
||||
reasoningSummary = undefined
|
||||
} else {
|
||||
reasoningSummary = summaryText
|
||||
@ -331,7 +335,7 @@ export function getOpenAIReasoningParams(assistant: Assistant, model: Model): Re
|
||||
|
||||
let reasoningEffort = assistant?.settings?.reasoning_effort
|
||||
|
||||
if (isOpenAIDeepResearchModel(model)) {
|
||||
if (isOpenAIDeepResearchModel(model) || reasoningEffort === 'auto') {
|
||||
reasoningEffort = 'medium'
|
||||
}
|
||||
|
||||
|
||||
@ -4,7 +4,7 @@ import { ReactNode, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import styled, { css } from 'styled-components'
|
||||
|
||||
interface SelectorOption<V = string | number> {
|
||||
interface SelectorOption<V = string | number | undefined | null> {
|
||||
label: string | ReactNode
|
||||
value: V
|
||||
type?: 'group'
|
||||
@ -12,7 +12,7 @@ interface SelectorOption<V = string | number> {
|
||||
disabled?: boolean
|
||||
}
|
||||
|
||||
interface BaseSelectorProps<V = string | number> {
|
||||
interface BaseSelectorProps<V = string | number | undefined | null> {
|
||||
options: SelectorOption<V>[]
|
||||
placeholder?: string
|
||||
placement?: 'topLeft' | 'topCenter' | 'topRight' | 'bottomLeft' | 'bottomCenter' | 'bottomRight' | 'top' | 'bottom'
|
||||
@ -36,7 +36,7 @@ interface MultipleSelectorProps<V> extends BaseSelectorProps<V> {
|
||||
|
||||
export type SelectorProps<V> = SingleSelectorProps<V> | MultipleSelectorProps<V>
|
||||
|
||||
const Selector = <V extends string | number>({
|
||||
const Selector = <V extends string | number | undefined | null>({
|
||||
options,
|
||||
value,
|
||||
onChange = () => {},
|
||||
|
||||
@ -1013,6 +1013,7 @@
|
||||
"name": "Name",
|
||||
"no_results": "No results",
|
||||
"none": "None",
|
||||
"off": "Off",
|
||||
"open": "Open",
|
||||
"paste": "Paste",
|
||||
"placeholders": {
|
||||
@ -3985,7 +3986,6 @@
|
||||
"default": "default",
|
||||
"flex": "flex",
|
||||
"on_demand": "on demand",
|
||||
"performance": "performance",
|
||||
"priority": "priority",
|
||||
"tip": "Specifies the latency tier to use for processing the request",
|
||||
"title": "Service Tier"
|
||||
@ -4004,7 +4004,7 @@
|
||||
"low": "Low",
|
||||
"medium": "Medium",
|
||||
"tip": "Control the level of detail in the model's output",
|
||||
"title": "Level of detail"
|
||||
"title": "Verbosity"
|
||||
}
|
||||
},
|
||||
"privacy": {
|
||||
|
||||
@ -11,15 +11,15 @@ import { CollapsibleSettingGroup } from '@renderer/pages/settings/SettingGroup'
|
||||
import { RootState, useAppDispatch } from '@renderer/store'
|
||||
import { setOpenAISummaryText, setOpenAIVerbosity } from '@renderer/store/settings'
|
||||
import {
|
||||
GroqServiceTier,
|
||||
GroqServiceTiers,
|
||||
Model,
|
||||
OpenAIServiceTier,
|
||||
OpenAIServiceTiers,
|
||||
OpenAISummaryText,
|
||||
ServiceTier,
|
||||
SystemProviderIds
|
||||
} from '@renderer/types'
|
||||
import { OpenAIVerbosity } from '@types'
|
||||
import { OpenAISummaryText, OpenAIVerbosity } from '@renderer/types/aiCoreTypes'
|
||||
import { Tooltip } from 'antd'
|
||||
import { CircleHelp } from 'lucide-react'
|
||||
import { FC, useCallback, useEffect, useMemo } from 'react'
|
||||
@ -71,6 +71,14 @@ const OpenAISettingsGroup: FC<Props> = ({ model, providerId, SettingGroup, Setti
|
||||
)
|
||||
|
||||
const summaryTextOptions = [
|
||||
{
|
||||
value: null,
|
||||
label: t('common.off')
|
||||
},
|
||||
{
|
||||
value: undefined,
|
||||
label: t('common.default')
|
||||
},
|
||||
{
|
||||
value: 'auto',
|
||||
label: t('settings.openai.summary_text_mode.auto')
|
||||
@ -86,6 +94,14 @@ const OpenAISettingsGroup: FC<Props> = ({ model, providerId, SettingGroup, Setti
|
||||
]
|
||||
|
||||
const verbosityOptions = [
|
||||
{
|
||||
value: null,
|
||||
label: t('common.off')
|
||||
},
|
||||
{
|
||||
value: undefined,
|
||||
label: t('common.default')
|
||||
},
|
||||
{
|
||||
value: 'low',
|
||||
label: t('settings.openai.verbosity.low')
|
||||
@ -101,9 +117,17 @@ const OpenAISettingsGroup: FC<Props> = ({ model, providerId, SettingGroup, Setti
|
||||
]
|
||||
|
||||
const serviceTierOptions = useMemo(() => {
|
||||
let baseOptions: { value: ServiceTier; label: string }[]
|
||||
let baseOptions: { value: OpenAIServiceTier; label: string }[] | { value: GroqServiceTier; label: string }[]
|
||||
if (provider.id === SystemProviderIds.groq) {
|
||||
baseOptions = [
|
||||
{
|
||||
value: null,
|
||||
label: t('common.off')
|
||||
},
|
||||
{
|
||||
value: undefined,
|
||||
label: t('common.default')
|
||||
},
|
||||
{
|
||||
value: 'auto',
|
||||
label: t('settings.openai.service_tier.auto')
|
||||
@ -115,12 +139,8 @@ const OpenAISettingsGroup: FC<Props> = ({ model, providerId, SettingGroup, Setti
|
||||
{
|
||||
value: 'flex',
|
||||
label: t('settings.openai.service_tier.flex')
|
||||
},
|
||||
{
|
||||
value: 'performance',
|
||||
label: t('settings.openai.service_tier.performance')
|
||||
}
|
||||
]
|
||||
] as const
|
||||
} else {
|
||||
// 其他情况默认是和 OpenAI 相同
|
||||
baseOptions = [
|
||||
@ -140,7 +160,7 @@ const OpenAISettingsGroup: FC<Props> = ({ model, providerId, SettingGroup, Setti
|
||||
value: 'priority',
|
||||
label: t('settings.openai.service_tier.priority')
|
||||
}
|
||||
]
|
||||
] as const
|
||||
}
|
||||
return baseOptions.filter((option) => {
|
||||
if (option.value === 'flex') {
|
||||
|
||||
@ -1487,6 +1487,7 @@ const migrateConfig = {
|
||||
'102': (state: RootState) => {
|
||||
try {
|
||||
state.settings.openAI = {
|
||||
// @ts-expect-error it's a removed type. migrated on 167
|
||||
summaryText: 'off',
|
||||
serviceTier: 'auto',
|
||||
verbosity: 'medium'
|
||||
@ -1580,6 +1581,7 @@ const migrateConfig = {
|
||||
addMiniApp(state, 'google')
|
||||
if (!state.settings.openAI) {
|
||||
state.settings.openAI = {
|
||||
// @ts-expect-error it's a removed type. migrated on 167
|
||||
summaryText: 'off',
|
||||
serviceTier: 'auto',
|
||||
verbosity: 'medium'
|
||||
@ -2713,6 +2715,18 @@ const migrateConfig = {
|
||||
logger.error('migrate 166 error', error as Error)
|
||||
return state
|
||||
}
|
||||
},
|
||||
'167': (state: RootState) => {
|
||||
try {
|
||||
// @ts-expect-error it's a removed type
|
||||
if (state.settings.openAI.summaryText === 'off') {
|
||||
state.settings.openAI.summaryText = null
|
||||
}
|
||||
return state
|
||||
} catch (error) {
|
||||
logger.error('migrate 166 error', error as Error)
|
||||
return state
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -9,16 +9,15 @@ import {
|
||||
LanguageVarious,
|
||||
MathEngine,
|
||||
OpenAIServiceTier,
|
||||
OpenAISummaryText,
|
||||
PaintingProvider,
|
||||
S3Config,
|
||||
SidebarIcon,
|
||||
ThemeMode,
|
||||
TranslateLanguageCode
|
||||
} from '@renderer/types'
|
||||
import { OpenAISummaryText, OpenAIVerbosity } from '@renderer/types/aiCoreTypes'
|
||||
import { uuid } from '@renderer/utils'
|
||||
import { UpgradeChannel } from '@shared/config/constant'
|
||||
import { OpenAIVerbosity } from '@types'
|
||||
|
||||
import { RemoteSyncState } from './backup'
|
||||
|
||||
@ -374,7 +373,7 @@ export const initialState: SettingsState = {
|
||||
},
|
||||
// OpenAI
|
||||
openAI: {
|
||||
summaryText: 'off',
|
||||
summaryText: null,
|
||||
serviceTier: 'auto',
|
||||
verbosity: 'medium'
|
||||
},
|
||||
|
||||
@ -31,4 +31,4 @@ export type AiSdkModel = LanguageModel | ImageModel
|
||||
|
||||
export type OpenAIVerbosity = OpenAI.Responses.ResponseTextConfig['verbosity']
|
||||
export type OpenAIReasoningEffort = OpenAI.ReasoningEffort
|
||||
export type OpenAISummaryText = OpenAI.Reasoning['summary'] | 'off'
|
||||
export type OpenAISummaryText = OpenAI.Reasoning['summary']
|
||||
|
||||
@ -224,6 +224,19 @@ export function uniqueObjectArray<T>(array: T[]): T[] {
|
||||
return array.filter((obj, index, self) => index === self.findIndex((t) => isEqual(t, obj)))
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts an `undefined` value to `null`, otherwise returns the value as-is.
|
||||
* @param value - The value to check
|
||||
* @returns `null` if the input is `undefined`; otherwise the input value
|
||||
*/
|
||||
export function defined<T>(value: T | undefined): T | null {
|
||||
if (value === undefined) {
|
||||
return null
|
||||
} else {
|
||||
return value
|
||||
}
|
||||
}
|
||||
|
||||
export * from './api'
|
||||
export * from './collection'
|
||||
export * from './dataLimit'
|
||||
|
||||
Loading…
Reference in New Issue
Block a user