mirror of
https://github.com/CherryHQ/cherry-studio.git
synced 2026-01-10 15:49:29 +08:00
hotfix: add OpenAI settings tab and related functionality (#6040)
* feat: add OpenAI settings tab and related functionality * fix: update related logic to support flexible service layer. * fix(OpenAIResponseProvider): remove unused isOpenAILLMModel import
This commit is contained in:
parent
cc4008bf2b
commit
ae163ff0ed
@ -2245,6 +2245,20 @@ export function isOpenAILLMModel(model: Model): boolean {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function isOpenAIModel(model: Model): boolean {
|
||||||
|
if (!model) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return model.id.includes('gpt') || isOpenAIReasoningModel(model)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isSupportedFlexServiceTier(model: Model): boolean {
|
||||||
|
if (!model) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return (model.id.includes('o3') && !model.id.includes('o3-mini')) || model.id.includes('o4-mini')
|
||||||
|
}
|
||||||
|
|
||||||
export function isSupportedReasoningEffortOpenAIModel(model: Model): boolean {
|
export function isSupportedReasoningEffortOpenAIModel(model: Model): boolean {
|
||||||
return (
|
return (
|
||||||
(model.id.includes('o1') && !(model.id.includes('o1-preview') || model.id.includes('o1-mini'))) ||
|
(model.id.includes('o1') && !(model.id.includes('o1-preview') || model.id.includes('o1-mini'))) ||
|
||||||
|
|||||||
@ -1637,6 +1637,20 @@
|
|||||||
"zoom": {
|
"zoom": {
|
||||||
"title": "Page Zoom",
|
"title": "Page Zoom",
|
||||||
"reset": "Reset"
|
"reset": "Reset"
|
||||||
|
},
|
||||||
|
"openai": {
|
||||||
|
"title": "OpenAI Settings",
|
||||||
|
"summary_text_mode.title": "Summary Mode",
|
||||||
|
"summary_text_mode.tip": "A summary of the reasoning performed by the model",
|
||||||
|
"summary_text_mode.auto": "auto",
|
||||||
|
"summary_text_mode.concise": "concise",
|
||||||
|
"summary_text_mode.detailed": "detailed",
|
||||||
|
"summary_text_mode.off": "off",
|
||||||
|
"service_tier.title": "Service Tier",
|
||||||
|
"service_tier.tip": "Specifies the latency tier to use for processing the request",
|
||||||
|
"service_tier.auto": "auto",
|
||||||
|
"service_tier.default": "default",
|
||||||
|
"service_tier.flex": "flex"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"translate": {
|
"translate": {
|
||||||
|
|||||||
@ -1637,7 +1637,21 @@
|
|||||||
},
|
},
|
||||||
"input.show_translate_confirm": "翻訳確認ダイアログを表示",
|
"input.show_translate_confirm": "翻訳確認ダイアログを表示",
|
||||||
"about.debug.title": "デバッグ",
|
"about.debug.title": "デバッグ",
|
||||||
"about.debug.open": "開く"
|
"about.debug.open": "開く",
|
||||||
|
"openai": {
|
||||||
|
"title": "OpenAIの設定",
|
||||||
|
"summary_text_mode.title": "要約モード",
|
||||||
|
"summary_text_mode.tip": "モデルが行った推論の要約",
|
||||||
|
"summary_text_mode.auto": "自動",
|
||||||
|
"summary_text_mode.concise": "簡潔",
|
||||||
|
"summary_text_mode.detailed": "詳細",
|
||||||
|
"summary_text_mode.off": "オフ",
|
||||||
|
"service_tier.title": "サービスティア",
|
||||||
|
"service_tier.tip": "リクエスト処理に使用するレイテンシティアを指定します",
|
||||||
|
"service_tier.auto": "自動",
|
||||||
|
"service_tier.default": "デフォルト",
|
||||||
|
"service_tier.flex": "フレックス"
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"translate": {
|
"translate": {
|
||||||
"any.language": "任意の言語",
|
"any.language": "任意の言語",
|
||||||
|
|||||||
@ -1636,8 +1636,22 @@
|
|||||||
"reset": "Сбросить"
|
"reset": "Сбросить"
|
||||||
},
|
},
|
||||||
"input.show_translate_confirm": "Показать диалоговое окно подтверждения перевода",
|
"input.show_translate_confirm": "Показать диалоговое окно подтверждения перевода",
|
||||||
"about.debug.title": "[to be translated]:Debug",
|
"openai": {
|
||||||
"about.debug.open": "[to be translated]:Open"
|
"title": "Настройки OpenAI",
|
||||||
|
"summary_text_mode.title": "Режим резюме",
|
||||||
|
"summary_text_mode.tip": "Резюме рассуждений, выполненных моделью",
|
||||||
|
"summary_text_mode.auto": "Авто",
|
||||||
|
"summary_text_mode.concise": "Краткий",
|
||||||
|
"summary_text_mode.detailed": "Подробный",
|
||||||
|
"summary_text_mode.off": "Выключен",
|
||||||
|
"service_tier.title": "Уровень сервиса",
|
||||||
|
"service_tier.tip": "Указывает уровень задержки, который следует использовать для обработки запроса",
|
||||||
|
"service_tier.auto": "Авто",
|
||||||
|
"service_tier.default": "По умолчанию",
|
||||||
|
"service_tier.flex": "Гибкий"
|
||||||
|
},
|
||||||
|
"about.debug.title": "Отладка",
|
||||||
|
"about.debug.open": "Открыть"
|
||||||
},
|
},
|
||||||
"translate": {
|
"translate": {
|
||||||
"any.language": "Любой язык",
|
"any.language": "Любой язык",
|
||||||
|
|||||||
@ -1637,6 +1637,20 @@
|
|||||||
"zoom": {
|
"zoom": {
|
||||||
"title": "缩放",
|
"title": "缩放",
|
||||||
"reset": "重置"
|
"reset": "重置"
|
||||||
|
},
|
||||||
|
"openai": {
|
||||||
|
"title": "OpenAI设置",
|
||||||
|
"summary_text_mode.title": "摘要模式",
|
||||||
|
"summary_text_mode.tip": "模型执行的推理摘要",
|
||||||
|
"summary_text_mode.auto": "自动",
|
||||||
|
"summary_text_mode.concise": "简洁",
|
||||||
|
"summary_text_mode.detailed": "详细",
|
||||||
|
"summary_text_mode.off": "关闭",
|
||||||
|
"service_tier.title": "服务层级",
|
||||||
|
"service_tier.tip": "指定用于处理请求的延迟层级",
|
||||||
|
"service_tier.auto": "自动",
|
||||||
|
"service_tier.default": "默认",
|
||||||
|
"service_tier.flex": "灵活"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"translate": {
|
"translate": {
|
||||||
|
|||||||
@ -1637,6 +1637,20 @@
|
|||||||
"zoom": {
|
"zoom": {
|
||||||
"title": "縮放",
|
"title": "縮放",
|
||||||
"reset": "重置"
|
"reset": "重置"
|
||||||
|
},
|
||||||
|
"openai": {
|
||||||
|
"title": "OpenAI設定",
|
||||||
|
"summary_text_mode.title": "摘要模式",
|
||||||
|
"summary_text_mode.tip": "模型所執行的推理摘要",
|
||||||
|
"summary_text_mode.auto": "自動",
|
||||||
|
"summary_text_mode.concise": "簡潔",
|
||||||
|
"summary_text_mode.detailed": "詳細",
|
||||||
|
"summary_text_mode.off": "關閉",
|
||||||
|
"service_tier.title": "服務層級",
|
||||||
|
"service_tier.tip": "指定用於處理請求的延遲層級",
|
||||||
|
"service_tier.auto": "自動",
|
||||||
|
"service_tier.default": "預設",
|
||||||
|
"service_tier.flex": "彈性"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"translate": {
|
"translate": {
|
||||||
|
|||||||
144
src/renderer/src/pages/home/Tabs/OpenAISettingsTab.tsx
Normal file
144
src/renderer/src/pages/home/Tabs/OpenAISettingsTab.tsx
Normal file
@ -0,0 +1,144 @@
|
|||||||
|
import { SettingDivider, SettingRow, SettingSubtitle } from '@renderer/pages/settings'
|
||||||
|
import { RootState, useAppDispatch } from '@renderer/store'
|
||||||
|
import { setOpenAIServiceTier, setOpenAISummaryText } from '@renderer/store/settings'
|
||||||
|
import { OpenAIServiceTier, OpenAISummaryText } from '@renderer/types'
|
||||||
|
import { Select, Tooltip } from 'antd'
|
||||||
|
import { CircleHelp } from 'lucide-react'
|
||||||
|
import { FC, useCallback, useEffect, useMemo } from 'react'
|
||||||
|
import { useTranslation } from 'react-i18next'
|
||||||
|
import { useSelector } from 'react-redux'
|
||||||
|
import styled from 'styled-components'
|
||||||
|
|
||||||
|
import { SettingGroup, SettingRowTitleSmall } from './SettingsTab'
|
||||||
|
|
||||||
|
interface Props {
|
||||||
|
isOpenAIReasoning: boolean
|
||||||
|
isSupportedFlexServiceTier: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
const FALL_BACK_SERVICE_TIER: Record<OpenAIServiceTier, OpenAIServiceTier> = {
|
||||||
|
auto: 'auto',
|
||||||
|
default: 'default',
|
||||||
|
flex: 'default'
|
||||||
|
}
|
||||||
|
|
||||||
|
const OpenAISettingsTab: FC<Props> = (props) => {
|
||||||
|
const { t } = useTranslation()
|
||||||
|
const summaryText = useSelector((state: RootState) => state.settings.openAI.summaryText)
|
||||||
|
const serviceTierMode = useSelector((state: RootState) => state.settings.openAI.serviceTier)
|
||||||
|
const dispatch = useAppDispatch()
|
||||||
|
|
||||||
|
const setSummaryText = useCallback(
|
||||||
|
(value: OpenAISummaryText) => {
|
||||||
|
dispatch(setOpenAISummaryText(value))
|
||||||
|
},
|
||||||
|
[dispatch]
|
||||||
|
)
|
||||||
|
|
||||||
|
const setServiceTierMode = useCallback(
|
||||||
|
(value: OpenAIServiceTier) => {
|
||||||
|
dispatch(setOpenAIServiceTier(value))
|
||||||
|
},
|
||||||
|
[dispatch]
|
||||||
|
)
|
||||||
|
|
||||||
|
const summaryTextOptions = [
|
||||||
|
{
|
||||||
|
value: 'auto',
|
||||||
|
label: t('settings.openai.summary_text_mode.auto')
|
||||||
|
},
|
||||||
|
{
|
||||||
|
value: 'detailed',
|
||||||
|
label: t('settings.openai.summary_text_mode.detailed')
|
||||||
|
},
|
||||||
|
{
|
||||||
|
value: 'off',
|
||||||
|
label: t('settings.openai.summary_text_mode.off')
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
const serviceTierOptions = useMemo(() => {
|
||||||
|
const baseOptions = [
|
||||||
|
{
|
||||||
|
value: 'auto',
|
||||||
|
label: t('settings.openai.service_tier.auto')
|
||||||
|
},
|
||||||
|
{
|
||||||
|
value: 'default',
|
||||||
|
label: t('settings.openai.service_tier.default')
|
||||||
|
},
|
||||||
|
{
|
||||||
|
value: 'flex',
|
||||||
|
label: t('settings.openai.service_tier.flex')
|
||||||
|
}
|
||||||
|
]
|
||||||
|
return baseOptions.filter((option) => {
|
||||||
|
if (option.value === 'flex') {
|
||||||
|
return props.isSupportedFlexServiceTier
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
}, [props.isSupportedFlexServiceTier, t])
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (serviceTierMode && !serviceTierOptions.some((option) => option.value === serviceTierMode)) {
|
||||||
|
setServiceTierMode(FALL_BACK_SERVICE_TIER[serviceTierMode])
|
||||||
|
}
|
||||||
|
}, [serviceTierMode, serviceTierOptions, setServiceTierMode])
|
||||||
|
|
||||||
|
return (
|
||||||
|
<SettingGroup>
|
||||||
|
<SettingSubtitle style={{ marginTop: 0 }}>{t('settings.openai.title')}</SettingSubtitle>
|
||||||
|
<SettingDivider />
|
||||||
|
<SettingRow>
|
||||||
|
<SettingRowTitleSmall>
|
||||||
|
{t('settings.openai.service_tier.title')}{' '}
|
||||||
|
<Tooltip title={t('settings.openai.service_tier.tip')}>
|
||||||
|
<CircleHelp size={14} style={{ marginLeft: 4 }} color="var(--color-text-2)" />
|
||||||
|
</Tooltip>
|
||||||
|
</SettingRowTitleSmall>
|
||||||
|
<StyledSelect
|
||||||
|
value={serviceTierMode}
|
||||||
|
style={{ width: 135 }}
|
||||||
|
onChange={(value) => {
|
||||||
|
setServiceTierMode(value as OpenAIServiceTier)
|
||||||
|
}}
|
||||||
|
size="small"
|
||||||
|
options={serviceTierOptions}
|
||||||
|
/>
|
||||||
|
</SettingRow>
|
||||||
|
{props.isOpenAIReasoning && (
|
||||||
|
<>
|
||||||
|
<SettingDivider />
|
||||||
|
<SettingRow>
|
||||||
|
<SettingRowTitleSmall>
|
||||||
|
{t('settings.openai.summary_text_mode.title')}{' '}
|
||||||
|
<Tooltip title={t('settings.openai.summary_text_mode.tip')}>
|
||||||
|
<CircleHelp size={14} style={{ marginLeft: 4 }} color="var(--color-text-2)" />
|
||||||
|
</Tooltip>
|
||||||
|
</SettingRowTitleSmall>
|
||||||
|
<StyledSelect
|
||||||
|
value={summaryText}
|
||||||
|
style={{ width: 135 }}
|
||||||
|
onChange={(value) => {
|
||||||
|
setSummaryText(value as OpenAISummaryText)
|
||||||
|
}}
|
||||||
|
size="small"
|
||||||
|
options={summaryTextOptions}
|
||||||
|
/>
|
||||||
|
</SettingRow>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
</SettingGroup>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const StyledSelect = styled(Select)`
|
||||||
|
.ant-select-selector {
|
||||||
|
border-radius: 15px !important;
|
||||||
|
padding: 4px 10px !important;
|
||||||
|
height: 26px !important;
|
||||||
|
}
|
||||||
|
`
|
||||||
|
|
||||||
|
export default OpenAISettingsTab
|
||||||
@ -8,11 +8,18 @@ import {
|
|||||||
isMac,
|
isMac,
|
||||||
isWindows
|
isWindows
|
||||||
} from '@renderer/config/constant'
|
} from '@renderer/config/constant'
|
||||||
|
import {
|
||||||
|
isOpenAIModel,
|
||||||
|
isSupportedFlexServiceTier,
|
||||||
|
isSupportedReasoningEffortOpenAIModel
|
||||||
|
} from '@renderer/config/models'
|
||||||
import { codeThemes } from '@renderer/context/SyntaxHighlighterProvider'
|
import { codeThemes } from '@renderer/context/SyntaxHighlighterProvider'
|
||||||
import { useAssistant } from '@renderer/hooks/useAssistant'
|
import { useAssistant } from '@renderer/hooks/useAssistant'
|
||||||
|
import { useProvider } from '@renderer/hooks/useProvider'
|
||||||
import { useSettings } from '@renderer/hooks/useSettings'
|
import { useSettings } from '@renderer/hooks/useSettings'
|
||||||
import { SettingDivider, SettingRow, SettingRowTitle, SettingSubtitle } from '@renderer/pages/settings'
|
import { SettingDivider, SettingRow, SettingRowTitle, SettingSubtitle } from '@renderer/pages/settings'
|
||||||
import AssistantSettingsPopup from '@renderer/pages/settings/AssistantSettings'
|
import AssistantSettingsPopup from '@renderer/pages/settings/AssistantSettings'
|
||||||
|
import { getDefaultModel } from '@renderer/services/AssistantService'
|
||||||
import { useAppDispatch } from '@renderer/store'
|
import { useAppDispatch } from '@renderer/store'
|
||||||
import {
|
import {
|
||||||
SendMessageShortcut,
|
SendMessageShortcut,
|
||||||
@ -57,12 +64,15 @@ import { FC, useEffect, useState } from 'react'
|
|||||||
import { useTranslation } from 'react-i18next'
|
import { useTranslation } from 'react-i18next'
|
||||||
import styled from 'styled-components'
|
import styled from 'styled-components'
|
||||||
|
|
||||||
|
import OpenAISettingsTab from './OpenAISettingsTab'
|
||||||
|
|
||||||
interface Props {
|
interface Props {
|
||||||
assistant: Assistant
|
assistant: Assistant
|
||||||
}
|
}
|
||||||
|
|
||||||
const SettingsTab: FC<Props> = (props) => {
|
const SettingsTab: FC<Props> = (props) => {
|
||||||
const { assistant, updateAssistantSettings, updateAssistant } = useAssistant(props.assistant.id)
|
const { assistant, updateAssistantSettings, updateAssistant } = useAssistant(props.assistant.id)
|
||||||
|
const { provider } = useProvider(assistant.model.provider)
|
||||||
const { messageStyle, codeStyle, fontSize, language } = useSettings()
|
const { messageStyle, codeStyle, fontSize, language } = useSettings()
|
||||||
|
|
||||||
const [temperature, setTemperature] = useState(assistant?.settings?.temperature ?? DEFAULT_TEMPERATURE)
|
const [temperature, setTemperature] = useState(assistant?.settings?.temperature ?? DEFAULT_TEMPERATURE)
|
||||||
@ -155,6 +165,15 @@ const SettingsTab: FC<Props> = (props) => {
|
|||||||
const assistantContextCount = assistant?.settings?.contextCount || 20
|
const assistantContextCount = assistant?.settings?.contextCount || 20
|
||||||
const maxContextCount = assistantContextCount > 20 ? assistantContextCount : 20
|
const maxContextCount = assistantContextCount > 20 ? assistantContextCount : 20
|
||||||
|
|
||||||
|
const model = assistant.model || getDefaultModel()
|
||||||
|
|
||||||
|
const isOpenAI = isOpenAIModel(model)
|
||||||
|
const isOpenAIReasoning =
|
||||||
|
isSupportedReasoningEffortOpenAIModel(model) &&
|
||||||
|
!model.id.includes('o1-pro') &&
|
||||||
|
(provider.type === 'openai-response' || provider.id === 'aihubmix')
|
||||||
|
const isOpenAIFlexServiceTier = isSupportedFlexServiceTier(model)
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Container className="settings-tab">
|
<Container className="settings-tab">
|
||||||
<SettingGroup style={{ marginTop: 10 }}>
|
<SettingGroup style={{ marginTop: 10 }}>
|
||||||
@ -265,6 +284,9 @@ const SettingsTab: FC<Props> = (props) => {
|
|||||||
</Row>
|
</Row>
|
||||||
)}
|
)}
|
||||||
</SettingGroup>
|
</SettingGroup>
|
||||||
|
{isOpenAI && (
|
||||||
|
<OpenAISettingsTab isOpenAIReasoning={isOpenAIReasoning} isSupportedFlexServiceTier={isOpenAIFlexServiceTier} />
|
||||||
|
)}
|
||||||
<SettingGroup>
|
<SettingGroup>
|
||||||
<SettingSubtitle style={{ marginTop: 0 }}>{t('settings.messages.title')}</SettingSubtitle>
|
<SettingSubtitle style={{ marginTop: 0 }}>{t('settings.messages.title')}</SettingSubtitle>
|
||||||
<SettingDivider />
|
<SettingDivider />
|
||||||
@ -629,7 +651,7 @@ const Label = styled.p`
|
|||||||
margin-right: 5px;
|
margin-right: 5px;
|
||||||
`
|
`
|
||||||
|
|
||||||
const SettingRowTitleSmall = styled(SettingRowTitle)`
|
export const SettingRowTitleSmall = styled(SettingRowTitle)`
|
||||||
font-size: 13px;
|
font-size: 13px;
|
||||||
`
|
`
|
||||||
|
|
||||||
|
|||||||
@ -476,6 +476,7 @@ export default class OpenAIProvider extends BaseOpenAIProvider {
|
|||||||
keep_alive: this.keepAliveTime,
|
keep_alive: this.keepAliveTime,
|
||||||
stream: isSupportStreamOutput(),
|
stream: isSupportStreamOutput(),
|
||||||
tools: !isEmpty(tools) ? tools : undefined,
|
tools: !isEmpty(tools) ? tools : undefined,
|
||||||
|
service_tier: this.getServiceTier(model),
|
||||||
...getOpenAIWebSearchParams(assistant, model),
|
...getOpenAIWebSearchParams(assistant, model),
|
||||||
...this.getReasoningEffort(assistant, model),
|
...this.getReasoningEffort(assistant, model),
|
||||||
...this.getProviderSpecificParameters(assistant, model),
|
...this.getProviderSpecificParameters(assistant, model),
|
||||||
|
|||||||
@ -1,7 +1,8 @@
|
|||||||
import {
|
import {
|
||||||
isOpenAILLMModel,
|
isOpenAIModel,
|
||||||
isOpenAIReasoningModel,
|
isOpenAIReasoningModel,
|
||||||
isOpenAIWebSearch,
|
isOpenAIWebSearch,
|
||||||
|
isSupportedFlexServiceTier,
|
||||||
isSupportedModel,
|
isSupportedModel,
|
||||||
isSupportedReasoningEffortOpenAIModel,
|
isSupportedReasoningEffortOpenAIModel,
|
||||||
isVisionModel
|
isVisionModel
|
||||||
@ -25,6 +26,8 @@ import {
|
|||||||
MCPToolResponse,
|
MCPToolResponse,
|
||||||
Metrics,
|
Metrics,
|
||||||
Model,
|
Model,
|
||||||
|
OpenAIServiceTier,
|
||||||
|
OpenAISummaryText,
|
||||||
Provider,
|
Provider,
|
||||||
Suggestion,
|
Suggestion,
|
||||||
ToolCallResponse,
|
ToolCallResponse,
|
||||||
@ -176,17 +179,25 @@ export abstract class BaseOpenAIProvider extends BaseProvider {
|
|||||||
}
|
}
|
||||||
|
|
||||||
protected getServiceTier(model: Model) {
|
protected getServiceTier(model: Model) {
|
||||||
if ((model.id.includes('o3') && !model.id.includes('o3-mini')) || model.id.includes('o4-mini')) {
|
if (!isOpenAIModel(model)) return undefined
|
||||||
return 'flex'
|
const openAI = getStoreSetting('openAI') as any
|
||||||
|
let serviceTier = 'auto' as OpenAIServiceTier
|
||||||
|
|
||||||
|
if (openAI.serviceTier === 'flex') {
|
||||||
|
if (isSupportedFlexServiceTier(model)) {
|
||||||
|
serviceTier = 'flex'
|
||||||
|
} else {
|
||||||
|
serviceTier = 'auto'
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
serviceTier = openAI.serviceTier
|
||||||
}
|
}
|
||||||
if (isOpenAILLMModel(model)) {
|
|
||||||
return 'auto'
|
return serviceTier
|
||||||
}
|
|
||||||
return undefined
|
|
||||||
}
|
}
|
||||||
|
|
||||||
protected getTimeout(model: Model) {
|
protected getTimeout(model: Model) {
|
||||||
if ((model.id.includes('o3') && !model.id.includes('o3-mini')) || model.id.includes('o4-mini')) {
|
if (isSupportedFlexServiceTier(model)) {
|
||||||
return 15 * 1000 * 60
|
return 15 * 1000 * 60
|
||||||
}
|
}
|
||||||
return 5 * 1000 * 60
|
return 5 * 1000 * 60
|
||||||
@ -196,6 +207,14 @@ export abstract class BaseOpenAIProvider extends BaseProvider {
|
|||||||
if (!isSupportedReasoningEffortOpenAIModel(model)) {
|
if (!isSupportedReasoningEffortOpenAIModel(model)) {
|
||||||
return {}
|
return {}
|
||||||
}
|
}
|
||||||
|
const openAI = getStoreSetting('openAI') as any
|
||||||
|
const summaryText = openAI.summaryText as OpenAISummaryText
|
||||||
|
let summary: string | undefined = undefined
|
||||||
|
if (summaryText === 'off' || model.id.includes('o1-pro')) {
|
||||||
|
summary = undefined
|
||||||
|
} else {
|
||||||
|
summary = summaryText
|
||||||
|
}
|
||||||
|
|
||||||
const reasoningEffort = assistant?.settings?.reasoning_effort
|
const reasoningEffort = assistant?.settings?.reasoning_effort
|
||||||
if (!reasoningEffort) {
|
if (!reasoningEffort) {
|
||||||
@ -206,7 +225,7 @@ export abstract class BaseOpenAIProvider extends BaseProvider {
|
|||||||
return {
|
return {
|
||||||
reasoning: {
|
reasoning: {
|
||||||
effort: reasoningEffort as OpenAI.ReasoningEffort,
|
effort: reasoningEffort as OpenAI.ReasoningEffort,
|
||||||
summary: 'detailed'
|
summary: summary
|
||||||
} as OpenAI.Reasoning
|
} as OpenAI.Reasoning
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -46,7 +46,7 @@ const persistedReducer = persistReducer(
|
|||||||
{
|
{
|
||||||
key: 'cherry-studio',
|
key: 'cherry-studio',
|
||||||
storage,
|
storage,
|
||||||
version: 100,
|
version: 102,
|
||||||
blacklist: ['runtime', 'messages', 'messageBlocks'],
|
blacklist: ['runtime', 'messages', 'messageBlocks'],
|
||||||
migrate
|
migrate
|
||||||
},
|
},
|
||||||
|
|||||||
@ -1343,6 +1343,17 @@ const migrateConfig = {
|
|||||||
} catch (error) {
|
} catch (error) {
|
||||||
return state
|
return state
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
'102': (state: RootState) => {
|
||||||
|
try {
|
||||||
|
state.settings.openAI = {
|
||||||
|
summaryText: 'off',
|
||||||
|
serviceTier: 'auto'
|
||||||
|
}
|
||||||
|
return state
|
||||||
|
} catch (error) {
|
||||||
|
return state
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -1,6 +1,14 @@
|
|||||||
import { createSlice, PayloadAction } from '@reduxjs/toolkit'
|
import { createSlice, PayloadAction } from '@reduxjs/toolkit'
|
||||||
import { TRANSLATE_PROMPT } from '@renderer/config/prompts'
|
import { TRANSLATE_PROMPT } from '@renderer/config/prompts'
|
||||||
import { CodeStyleVarious, LanguageVarious, MathEngine, ThemeMode, TranslateLanguageVarious } from '@renderer/types'
|
import {
|
||||||
|
CodeStyleVarious,
|
||||||
|
LanguageVarious,
|
||||||
|
MathEngine,
|
||||||
|
OpenAIServiceTier,
|
||||||
|
OpenAISummaryText,
|
||||||
|
ThemeMode,
|
||||||
|
TranslateLanguageVarious
|
||||||
|
} from '@renderer/types'
|
||||||
|
|
||||||
import { WebDAVSyncState } from './backup'
|
import { WebDAVSyncState } from './backup'
|
||||||
|
|
||||||
@ -132,6 +140,11 @@ export interface SettingsState {
|
|||||||
siyuan: boolean
|
siyuan: boolean
|
||||||
docx: boolean
|
docx: boolean
|
||||||
}
|
}
|
||||||
|
// OpenAI
|
||||||
|
openAI: {
|
||||||
|
summaryText: OpenAISummaryText
|
||||||
|
serviceTier: OpenAIServiceTier
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export type MultiModelMessageStyle = 'horizontal' | 'vertical' | 'fold' | 'grid'
|
export type MultiModelMessageStyle = 'horizontal' | 'vertical' | 'fold' | 'grid'
|
||||||
@ -238,6 +251,11 @@ export const initialState: SettingsState = {
|
|||||||
obsidian: true,
|
obsidian: true,
|
||||||
siyuan: true,
|
siyuan: true,
|
||||||
docx: true
|
docx: true
|
||||||
|
},
|
||||||
|
// OpenAI
|
||||||
|
openAI: {
|
||||||
|
summaryText: 'off',
|
||||||
|
serviceTier: 'auto'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -519,6 +537,12 @@ const settingsSlice = createSlice({
|
|||||||
},
|
},
|
||||||
setEnableBackspaceDeleteModel: (state, action: PayloadAction<boolean>) => {
|
setEnableBackspaceDeleteModel: (state, action: PayloadAction<boolean>) => {
|
||||||
state.enableBackspaceDeleteModel = action.payload
|
state.enableBackspaceDeleteModel = action.payload
|
||||||
|
},
|
||||||
|
setOpenAISummaryText: (state, action: PayloadAction<OpenAISummaryText>) => {
|
||||||
|
state.openAI.summaryText = action.payload
|
||||||
|
},
|
||||||
|
setOpenAIServiceTier: (state, action: PayloadAction<OpenAIServiceTier>) => {
|
||||||
|
state.openAI.serviceTier = action.payload
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
@ -613,7 +637,9 @@ export const {
|
|||||||
setEnableDataCollection,
|
setEnableDataCollection,
|
||||||
setEnableQuickPanelTriggers,
|
setEnableQuickPanelTriggers,
|
||||||
setExportMenuOptions,
|
setExportMenuOptions,
|
||||||
setEnableBackspaceDeleteModel
|
setEnableBackspaceDeleteModel,
|
||||||
|
setOpenAISummaryText,
|
||||||
|
setOpenAIServiceTier
|
||||||
} = settingsSlice.actions
|
} = settingsSlice.actions
|
||||||
|
|
||||||
export default settingsSlice.reducer
|
export default settingsSlice.reducer
|
||||||
|
|||||||
@ -653,3 +653,6 @@ export interface StoreSyncAction {
|
|||||||
source?: string
|
source?: string
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export type OpenAISummaryText = 'auto' | 'concise' | 'detailed' | 'off'
|
||||||
|
export type OpenAIServiceTier = 'auto' | 'default' | 'flex'
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user