fix(stream-options): add user-configurable stream options for OpenAI API (#11693)
Some checks failed
Auto I18N Weekly / Auto I18N (push) Has been cancelled

* refactor(types): rename OpenAISummaryText to OpenAIReasoningSummary for clarity

* refactor: move OpenAISettingsGroup to independent folder

* refactor(OpenAISettingsGroup): extract settings components into separate files

Move ReasoningSummarySetting, ServiceTierSetting and VerbositySetting into individual components to improve code organization and maintainability

* feat(stream-options): add stream options configuration for OpenAI completions

add includeUsage option to control token usage reporting in streamed responses
update provider config and settings UI to support new stream options
add migration for existing providers to set default stream options
extend toOptionValue utility to handle boolean values

* refactor(stream-options): move stream options includeUsage to settings store

- Remove streamOptions from Provider type and move includeUsage to settings.openAI
- Update migration to initialize streamOptions in settings
- Modify providerToAiSdkConfig to read includeUsage from settings
- Update StreamOptionsSetting component to use settings store

* feat(i18n): add missing translations for 'on' and stream options

Add translations for the 'on' state and stream options including token usage in multiple languages

* docs(select): update docs

* test(providerConfig): add tests for stream options includeUsage

add test cases to verify includeUsage stream option behavior for OpenAI provider

* Update src/renderer/src/i18n/translate/ru-ru.json

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>

* Update src/renderer/src/pages/home/Tabs/components/OpenAISettingsGroup/VerbositySetting.tsx

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>

* Update src/renderer/src/utils/select.ts

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>

* test(select): add tests for toOptionValue and toRealValue functions

* fix(providerConfig): handle undefined streamOptions in openAI settings

Prevent potential runtime errors by safely accessing nested streamOptions properties

* test(providerConfig): add tests for Copilot provider includeUsage settings

* fix(OpenAISettingsGroup): handle potential undefined streamOptions in selector

* docs(aiCoreTypes): add comment for OpenAICompletionsStreamOptions

* refactor(select): improve type safety in toOptionValue function

Use Exclude to prevent string literals from overlapping with special values

---------

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
This commit is contained in:
Phantom 2025-12-05 19:52:37 +08:00 committed by GitHub
parent 9d6d827f88
commit 3cedb95db3
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
28 changed files with 949 additions and 277 deletions

View File

@ -22,11 +22,15 @@ vi.mock('@renderer/services/AssistantService', () => ({
}) })
})) }))
vi.mock('@renderer/store', () => ({ vi.mock('@renderer/store', () => {
const mockGetState = vi.fn()
return {
default: { default: {
getState: () => ({ copilot: { defaultHeaders: {} } }) getState: mockGetState
},
__mockGetState: mockGetState
} }
})) })
vi.mock('@renderer/utils/api', () => ({ vi.mock('@renderer/utils/api', () => ({
formatApiHost: vi.fn((host, isSupportedAPIVersion = true) => { formatApiHost: vi.fn((host, isSupportedAPIVersion = true) => {
@ -79,6 +83,8 @@ import { isCherryAIProvider, isPerplexityProvider } from '@renderer/utils/provid
import { COPILOT_DEFAULT_HEADERS, COPILOT_EDITOR_VERSION, isCopilotResponsesModel } from '../constants' import { COPILOT_DEFAULT_HEADERS, COPILOT_EDITOR_VERSION, isCopilotResponsesModel } from '../constants'
import { getActualProvider, providerToAiSdkConfig } from '../providerConfig' import { getActualProvider, providerToAiSdkConfig } from '../providerConfig'
const { __mockGetState: mockGetState } = vi.mocked(await import('@renderer/store')) as any
const createWindowKeyv = () => { const createWindowKeyv = () => {
const store = new Map<string, string>() const store = new Map<string, string>()
return { return {
@ -132,6 +138,16 @@ describe('Copilot responses routing', () => {
...(globalThis as any).window, ...(globalThis as any).window,
keyv: createWindowKeyv() keyv: createWindowKeyv()
} }
mockGetState.mockReturnValue({
copilot: { defaultHeaders: {} },
settings: {
openAI: {
streamOptions: {
includeUsage: undefined
}
}
}
})
}) })
it('detects official GPT-5 Codex identifiers case-insensitively', () => { it('detects official GPT-5 Codex identifiers case-insensitively', () => {
@ -167,6 +183,16 @@ describe('CherryAI provider configuration', () => {
...(globalThis as any).window, ...(globalThis as any).window,
keyv: createWindowKeyv() keyv: createWindowKeyv()
} }
mockGetState.mockReturnValue({
copilot: { defaultHeaders: {} },
settings: {
openAI: {
streamOptions: {
includeUsage: undefined
}
}
}
})
vi.clearAllMocks() vi.clearAllMocks()
}) })
@ -231,6 +257,16 @@ describe('Perplexity provider configuration', () => {
...(globalThis as any).window, ...(globalThis as any).window,
keyv: createWindowKeyv() keyv: createWindowKeyv()
} }
mockGetState.mockReturnValue({
copilot: { defaultHeaders: {} },
settings: {
openAI: {
streamOptions: {
includeUsage: undefined
}
}
}
})
vi.clearAllMocks() vi.clearAllMocks()
}) })
@ -291,3 +327,165 @@ describe('Perplexity provider configuration', () => {
expect(actualProvider.apiHost).toBe('') expect(actualProvider.apiHost).toBe('')
}) })
}) })
describe('Stream options includeUsage configuration', () => {
beforeEach(() => {
;(globalThis as any).window = {
...(globalThis as any).window,
keyv: createWindowKeyv()
}
vi.clearAllMocks()
})
const createOpenAIProvider = (): Provider => ({
id: 'openai-compatible',
type: 'openai',
name: 'OpenAI',
apiKey: 'test-key',
apiHost: 'https://api.openai.com',
models: [],
isSystem: true
})
it('uses includeUsage from settings when undefined', () => {
mockGetState.mockReturnValue({
copilot: { defaultHeaders: {} },
settings: {
openAI: {
streamOptions: {
includeUsage: undefined
}
}
}
})
const provider = createOpenAIProvider()
const config = providerToAiSdkConfig(provider, createModel('gpt-4', 'GPT-4', 'openai'))
expect(config.options.includeUsage).toBeUndefined()
})
it('uses includeUsage from settings when set to true', () => {
mockGetState.mockReturnValue({
copilot: { defaultHeaders: {} },
settings: {
openAI: {
streamOptions: {
includeUsage: true
}
}
}
})
const provider = createOpenAIProvider()
const config = providerToAiSdkConfig(provider, createModel('gpt-4', 'GPT-4', 'openai'))
expect(config.options.includeUsage).toBe(true)
})
it('uses includeUsage from settings when set to false', () => {
mockGetState.mockReturnValue({
copilot: { defaultHeaders: {} },
settings: {
openAI: {
streamOptions: {
includeUsage: false
}
}
}
})
const provider = createOpenAIProvider()
const config = providerToAiSdkConfig(provider, createModel('gpt-4', 'GPT-4', 'openai'))
expect(config.options.includeUsage).toBe(false)
})
it('respects includeUsage setting for non-supporting providers', () => {
mockGetState.mockReturnValue({
copilot: { defaultHeaders: {} },
settings: {
openAI: {
streamOptions: {
includeUsage: true
}
}
}
})
const testProvider: Provider = {
id: 'test',
type: 'openai',
name: 'test',
apiKey: 'test-key',
apiHost: 'https://api.test.com',
models: [],
isSystem: false,
apiOptions: {
isNotSupportStreamOptions: true
}
}
const config = providerToAiSdkConfig(testProvider, createModel('gpt-4', 'GPT-4', 'test'))
// Even though setting is true, provider doesn't support it, so includeUsage should be undefined
expect(config.options.includeUsage).toBeUndefined()
})
it('uses includeUsage from settings for Copilot provider when set to false', () => {
mockGetState.mockReturnValue({
copilot: { defaultHeaders: {} },
settings: {
openAI: {
streamOptions: {
includeUsage: false
}
}
}
})
const provider = createCopilotProvider()
const config = providerToAiSdkConfig(provider, createModel('gpt-4', 'GPT-4', 'copilot'))
expect(config.options.includeUsage).toBe(false)
expect(config.providerId).toBe('github-copilot-openai-compatible')
})
it('uses includeUsage from settings for Copilot provider when set to true', () => {
mockGetState.mockReturnValue({
copilot: { defaultHeaders: {} },
settings: {
openAI: {
streamOptions: {
includeUsage: true
}
}
}
})
const provider = createCopilotProvider()
const config = providerToAiSdkConfig(provider, createModel('gpt-4', 'GPT-4', 'copilot'))
expect(config.options.includeUsage).toBe(true)
expect(config.providerId).toBe('github-copilot-openai-compatible')
})
it('uses includeUsage from settings for Copilot provider when undefined', () => {
mockGetState.mockReturnValue({
copilot: { defaultHeaders: {} },
settings: {
openAI: {
streamOptions: {
includeUsage: undefined
}
}
}
})
const provider = createCopilotProvider()
const config = providerToAiSdkConfig(provider, createModel('gpt-4', 'GPT-4', 'copilot'))
expect(config.options.includeUsage).toBeUndefined()
expect(config.providerId).toBe('github-copilot-openai-compatible')
})
})

View File

@ -11,6 +11,7 @@ import { createVertexProvider, isVertexAIConfigured } from '@renderer/hooks/useV
import { getProviderByModel } from '@renderer/services/AssistantService' import { getProviderByModel } from '@renderer/services/AssistantService'
import store from '@renderer/store' import store from '@renderer/store'
import { isSystemProvider, type Model, type Provider, SystemProviderIds } from '@renderer/types' import { isSystemProvider, type Model, type Provider, SystemProviderIds } from '@renderer/types'
import type { OpenAICompletionsStreamOptions } from '@renderer/types/aiCoreTypes'
import { import {
formatApiHost, formatApiHost,
formatAzureOpenAIApiHost, formatAzureOpenAIApiHost,
@ -147,6 +148,10 @@ export function providerToAiSdkConfig(actualProvider: Provider, model: Model): A
baseURL: baseURL, baseURL: baseURL,
apiKey: actualProvider.apiKey apiKey: actualProvider.apiKey
} }
let includeUsage: OpenAICompletionsStreamOptions['include_usage'] = undefined
if (isSupportStreamOptionsProvider(actualProvider)) {
includeUsage = store.getState().settings.openAI?.streamOptions?.includeUsage
}
const isCopilotProvider = actualProvider.id === SystemProviderIds.copilot const isCopilotProvider = actualProvider.id === SystemProviderIds.copilot
if (isCopilotProvider) { if (isCopilotProvider) {
@ -158,7 +163,7 @@ export function providerToAiSdkConfig(actualProvider: Provider, model: Model): A
...actualProvider.extra_headers ...actualProvider.extra_headers
}, },
name: actualProvider.id, name: actualProvider.id,
includeUsage: true includeUsage
}) })
return { return {
@ -261,7 +266,7 @@ export function providerToAiSdkConfig(actualProvider: Provider, model: Model): A
...options, ...options,
name: actualProvider.id, name: actualProvider.id,
...extraOptions, ...extraOptions,
includeUsage: isSupportStreamOptionsProvider(actualProvider) includeUsage
} }
} }
} }

View File

@ -37,7 +37,7 @@ import { getStoreSetting } from '@renderer/hooks/useSettings'
import { getAssistantSettings, getProviderByModel } from '@renderer/services/AssistantService' import { getAssistantSettings, getProviderByModel } from '@renderer/services/AssistantService'
import type { Assistant, Model } from '@renderer/types' import type { Assistant, Model } from '@renderer/types'
import { EFFORT_RATIO, isSystemProvider, SystemProviderIds } from '@renderer/types' import { EFFORT_RATIO, isSystemProvider, SystemProviderIds } from '@renderer/types'
import type { OpenAISummaryText } from '@renderer/types/aiCoreTypes' import type { OpenAIReasoningSummary } from '@renderer/types/aiCoreTypes'
import type { ReasoningEffortOptionalParams } from '@renderer/types/sdk' import type { ReasoningEffortOptionalParams } from '@renderer/types/sdk'
import { isSupportEnableThinkingProvider } from '@renderer/utils/provider' import { isSupportEnableThinkingProvider } from '@renderer/utils/provider'
import { toInteger } from 'lodash' import { toInteger } from 'lodash'
@ -448,7 +448,7 @@ export function getOpenAIReasoningParams(
const openAI = getStoreSetting('openAI') const openAI = getStoreSetting('openAI')
const summaryText = openAI.summaryText const summaryText = openAI.summaryText
let reasoningSummary: OpenAISummaryText = undefined let reasoningSummary: OpenAIReasoningSummary = undefined
if (model.id.includes('o1-pro')) { if (model.id.includes('o1-pro')) {
reasoningSummary = undefined reasoningSummary = undefined

View File

@ -5,6 +5,7 @@ export const SYSTEM_PROMPT_THRESHOLD = 128
export const DEFAULT_KNOWLEDGE_DOCUMENT_COUNT = 6 export const DEFAULT_KNOWLEDGE_DOCUMENT_COUNT = 6
export const DEFAULT_KNOWLEDGE_THRESHOLD = 0.0 export const DEFAULT_KNOWLEDGE_THRESHOLD = 0.0
export const DEFAULT_WEBSEARCH_RAG_DOCUMENT_COUNT = 1 export const DEFAULT_WEBSEARCH_RAG_DOCUMENT_COUNT = 1
export const DEFAULT_STREAM_OPTIONS_INCLUDE_USAGE = true
export const platform = window.electron?.process?.platform export const platform = window.electron?.process?.platform
export const isMac = platform === 'darwin' export const isMac = platform === 'darwin'

View File

@ -1162,6 +1162,7 @@
"no_results": "No results", "no_results": "No results",
"none": "None", "none": "None",
"off": "Off", "off": "Off",
"on": "On",
"open": "Open", "open": "Open",
"paste": "Paste", "paste": "Paste",
"placeholders": { "placeholders": {
@ -4271,6 +4272,12 @@
"tip": "Specifies the latency tier to use for processing the request", "tip": "Specifies the latency tier to use for processing the request",
"title": "Service Tier" "title": "Service Tier"
}, },
"stream_options": {
"include_usage": {
"tip": "Whether token usage is included (applicable only to the OpenAI Chat Completions API)",
"title": "Include usage"
}
},
"summary_text_mode": { "summary_text_mode": {
"auto": "auto", "auto": "auto",
"concise": "concise", "concise": "concise",

View File

@ -1162,6 +1162,7 @@
"no_results": "无结果", "no_results": "无结果",
"none": "无", "none": "无",
"off": "关闭", "off": "关闭",
"on": "启用",
"open": "打开", "open": "打开",
"paste": "粘贴", "paste": "粘贴",
"placeholders": { "placeholders": {
@ -4271,6 +4272,12 @@
"tip": "指定用于处理请求的延迟层级", "tip": "指定用于处理请求的延迟层级",
"title": "服务层级" "title": "服务层级"
}, },
"stream_options": {
"include_usage": {
"tip": "是否请求 Tokens 用量(仅 OpenAI Chat Completions API 可用)",
"title": "包含用量"
}
},
"summary_text_mode": { "summary_text_mode": {
"auto": "自动", "auto": "自动",
"concise": "简洁", "concise": "简洁",

View File

@ -1162,6 +1162,7 @@
"no_results": "沒有結果", "no_results": "沒有結果",
"none": "無", "none": "無",
"off": "關閉", "off": "關閉",
"on": "開啟",
"open": "開啟", "open": "開啟",
"paste": "貼上", "paste": "貼上",
"placeholders": { "placeholders": {
@ -4271,6 +4272,12 @@
"tip": "指定用於處理請求的延遲層級", "tip": "指定用於處理請求的延遲層級",
"title": "服務層級" "title": "服務層級"
}, },
"stream_options": {
"include_usage": {
"tip": "是否請求 Tokens 用量(僅 OpenAI Chat Completions API 可用)",
"title": "包含用量"
}
},
"summary_text_mode": { "summary_text_mode": {
"auto": "自動", "auto": "自動",
"concise": "簡潔", "concise": "簡潔",

View File

@ -1162,6 +1162,7 @@
"no_results": "Keine Ergebnisse", "no_results": "Keine Ergebnisse",
"none": "Keine", "none": "Keine",
"off": "Aus", "off": "Aus",
"on": "An",
"open": "Öffnen", "open": "Öffnen",
"paste": "Einfügen", "paste": "Einfügen",
"placeholders": { "placeholders": {
@ -4271,6 +4272,12 @@
"tip": "Latenz-Ebene für Anfrageverarbeitung festlegen", "tip": "Latenz-Ebene für Anfrageverarbeitung festlegen",
"title": "Service-Tier" "title": "Service-Tier"
}, },
"stream_options": {
"include_usage": {
"tip": "Ob die Token-Nutzung enthalten ist (gilt nur für die OpenAI Chat Completions API)",
"title": "Nutzung einbeziehen"
}
},
"summary_text_mode": { "summary_text_mode": {
"auto": "Automatisch", "auto": "Automatisch",
"concise": "Kompakt", "concise": "Kompakt",

View File

@ -1162,6 +1162,7 @@
"no_results": "Δεν βρέθηκαν αποτελέσματα", "no_results": "Δεν βρέθηκαν αποτελέσματα",
"none": "Χωρίς", "none": "Χωρίς",
"off": "Κλειστό", "off": "Κλειστό",
"on": "Ενεργό",
"open": "Άνοιγμα", "open": "Άνοιγμα",
"paste": "Επικόλληση", "paste": "Επικόλληση",
"placeholders": { "placeholders": {
@ -4271,6 +4272,12 @@
"tip": "Καθορίστε το επίπεδο καθυστέρησης που χρησιμοποιείται για την επεξεργασία των αιτημάτων", "tip": "Καθορίστε το επίπεδο καθυστέρησης που χρησιμοποιείται για την επεξεργασία των αιτημάτων",
"title": "Επίπεδο υπηρεσίας" "title": "Επίπεδο υπηρεσίας"
}, },
"stream_options": {
"include_usage": {
"tip": "Είτε περιλαμβάνεται η χρήση διακριτικών (ισχύει μόνο για το OpenAI Chat Completions API)",
"title": "Συμπεριλάβετε χρήση"
}
},
"summary_text_mode": { "summary_text_mode": {
"auto": "Αυτόματο", "auto": "Αυτόματο",
"concise": "Σύντομο", "concise": "Σύντομο",

View File

@ -1162,6 +1162,7 @@
"no_results": "Sin resultados", "no_results": "Sin resultados",
"none": "无", "none": "无",
"off": "Apagado", "off": "Apagado",
"on": "En",
"open": "Abrir", "open": "Abrir",
"paste": "Pegar", "paste": "Pegar",
"placeholders": { "placeholders": {
@ -4271,6 +4272,12 @@
"tip": "Especifica el nivel de latencia utilizado para procesar la solicitud", "tip": "Especifica el nivel de latencia utilizado para procesar la solicitud",
"title": "Nivel de servicio" "title": "Nivel de servicio"
}, },
"stream_options": {
"include_usage": {
"tip": "Si se incluye el uso de tokens (aplicable solo a la API de Completions de chat de OpenAI)",
"title": "Incluir uso"
}
},
"summary_text_mode": { "summary_text_mode": {
"auto": "Automático", "auto": "Automático",
"concise": "Conciso", "concise": "Conciso",

View File

@ -1162,6 +1162,7 @@
"no_results": "Aucun résultat", "no_results": "Aucun résultat",
"none": "Aucun", "none": "Aucun",
"off": "Désactivé", "off": "Désactivé",
"on": "Marche",
"open": "Ouvrir", "open": "Ouvrir",
"paste": "Coller", "paste": "Coller",
"placeholders": { "placeholders": {
@ -4271,6 +4272,12 @@
"tip": "Spécifie le niveau de latence utilisé pour traiter la demande", "tip": "Spécifie le niveau de latence utilisé pour traiter la demande",
"title": "Niveau de service" "title": "Niveau de service"
}, },
"stream_options": {
"include_usage": {
"tip": "Si l'utilisation des jetons est incluse (applicable uniquement à l'API OpenAI Chat Completions)",
"title": "Inclure l'utilisation"
}
},
"summary_text_mode": { "summary_text_mode": {
"auto": "Automatique", "auto": "Automatique",
"concise": "Concis", "concise": "Concis",

View File

@ -1162,6 +1162,7 @@
"no_results": "検索結果なし", "no_results": "検索結果なし",
"none": "無", "none": "無",
"off": "オフ", "off": "オフ",
"on": "オン",
"open": "開く", "open": "開く",
"paste": "貼り付け", "paste": "貼り付け",
"placeholders": { "placeholders": {
@ -4271,6 +4272,12 @@
"tip": "リクエスト処理に使用するレイテンシティアを指定します", "tip": "リクエスト処理に使用するレイテンシティアを指定します",
"title": "サービスティア" "title": "サービスティア"
}, },
"stream_options": {
"include_usage": {
"tip": "トークン使用量が含まれるかどうか (OpenAI Chat Completions APIのみに適用)",
"title": "使用法を含める"
}
},
"summary_text_mode": { "summary_text_mode": {
"auto": "自動", "auto": "自動",
"concise": "簡潔", "concise": "簡潔",

View File

@ -1162,6 +1162,7 @@
"no_results": "Nenhum resultado", "no_results": "Nenhum resultado",
"none": "Nenhum", "none": "Nenhum",
"off": "Desligado", "off": "Desligado",
"on": "Ligado",
"open": "Abrir", "open": "Abrir",
"paste": "Colar", "paste": "Colar",
"placeholders": { "placeholders": {
@ -4271,6 +4272,12 @@
"tip": "Especifique o nível de latência usado para processar a solicitação", "tip": "Especifique o nível de latência usado para processar a solicitação",
"title": "Nível de Serviço" "title": "Nível de Serviço"
}, },
"stream_options": {
"include_usage": {
"tip": "Se o uso de tokens está incluído (aplicável apenas à API de Conclusões de Chat da OpenAI)",
"title": "Incluir uso"
}
},
"summary_text_mode": { "summary_text_mode": {
"auto": "Automático", "auto": "Automático",
"concise": "Conciso", "concise": "Conciso",

View File

@ -1162,6 +1162,7 @@
"no_results": "Результатов не найдено", "no_results": "Результатов не найдено",
"none": "без", "none": "без",
"off": "Выкл", "off": "Выкл",
"on": "Вкл",
"open": "Открыть", "open": "Открыть",
"paste": "Вставить", "paste": "Вставить",
"placeholders": { "placeholders": {
@ -4271,6 +4272,12 @@
"tip": "Указывает уровень задержки, который следует использовать для обработки запроса", "tip": "Указывает уровень задержки, который следует использовать для обработки запроса",
"title": "Уровень сервиса" "title": "Уровень сервиса"
}, },
"stream_options": {
"include_usage": {
"tip": "Включено ли использование токенов (применимо только к API завершения чата OpenAI)",
"title": "Включить использование"
}
},
"summary_text_mode": { "summary_text_mode": {
"auto": "Авто", "auto": "Авто",
"concise": "Краткий", "concise": "Краткий",

View File

@ -56,7 +56,11 @@ import type { Assistant, AssistantSettings, CodeStyleVarious, MathEngine } from
import { isGroqSystemProvider, ThemeMode } from '@renderer/types' import { isGroqSystemProvider, ThemeMode } from '@renderer/types'
import { modalConfirm } from '@renderer/utils' import { modalConfirm } from '@renderer/utils'
import { getSendMessageShortcutLabel } from '@renderer/utils/input' import { getSendMessageShortcutLabel } from '@renderer/utils/input'
import { isSupportServiceTierProvider, isSupportVerbosityProvider } from '@renderer/utils/provider' import {
isOpenAICompatibleProvider,
isSupportServiceTierProvider,
isSupportVerbosityProvider
} from '@renderer/utils/provider'
import { Button, Col, InputNumber, Row, Slider, Switch } from 'antd' import { Button, Col, InputNumber, Row, Slider, Switch } from 'antd'
import { Settings2 } from 'lucide-react' import { Settings2 } from 'lucide-react'
import type { FC } from 'react' import type { FC } from 'react'
@ -184,6 +188,7 @@ const SettingsTab: FC<Props> = (props) => {
const model = assistant.model || getDefaultModel() const model = assistant.model || getDefaultModel()
const showOpenAiSettings = const showOpenAiSettings =
isOpenAICompatibleProvider(provider) ||
isOpenAIModel(model) || isOpenAIModel(model) ||
isSupportServiceTierProvider(provider) || isSupportServiceTierProvider(provider) ||
(isSupportVerbosityModel(model) && isSupportVerbosityProvider(provider)) (isSupportVerbosityModel(model) && isSupportVerbosityProvider(provider))

View File

@ -1,247 +0,0 @@
import Selector from '@renderer/components/Selector'
import {
getModelSupportedVerbosity,
isSupportedReasoningEffortOpenAIModel,
isSupportFlexServiceTierModel,
isSupportVerbosityModel
} from '@renderer/config/models'
import { useProvider } from '@renderer/hooks/useProvider'
import { SettingDivider, SettingRow } from '@renderer/pages/settings'
import { CollapsibleSettingGroup } from '@renderer/pages/settings/SettingGroup'
import type { RootState } from '@renderer/store'
import { useAppDispatch } from '@renderer/store'
import { setOpenAISummaryText, setOpenAIVerbosity } from '@renderer/store/settings'
import type { Model, OpenAIServiceTier, ServiceTier } from '@renderer/types'
import { SystemProviderIds } from '@renderer/types'
import type { OpenAISummaryText, OpenAIVerbosity } from '@renderer/types/aiCoreTypes'
import { isSupportServiceTierProvider, isSupportVerbosityProvider } from '@renderer/utils/provider'
import { toOptionValue, toRealValue } from '@renderer/utils/select'
import { Tooltip } from 'antd'
import { CircleHelp } from 'lucide-react'
import type { FC } from 'react'
import { useCallback, useEffect, useMemo } from 'react'
import { useTranslation } from 'react-i18next'
import { useSelector } from 'react-redux'
type VerbosityOption = {
value: NonNullable<OpenAIVerbosity> | 'undefined' | 'null'
label: string
}
type SummaryTextOption = {
value: NonNullable<OpenAISummaryText> | 'undefined' | 'null'
label: string
}
type OpenAIServiceTierOption = { value: NonNullable<OpenAIServiceTier> | 'null' | 'undefined'; label: string }
interface Props {
model: Model
providerId: string
SettingGroup: FC<{ children: React.ReactNode }>
SettingRowTitleSmall: FC<{ children: React.ReactNode }>
}
const OpenAISettingsGroup: FC<Props> = ({ model, providerId, SettingGroup, SettingRowTitleSmall }) => {
const { t } = useTranslation()
const { provider, updateProvider } = useProvider(providerId)
const verbosity = useSelector((state: RootState) => state.settings.openAI.verbosity)
const summaryText = useSelector((state: RootState) => state.settings.openAI.summaryText)
const serviceTierMode = provider.serviceTier
const dispatch = useAppDispatch()
const showSummarySetting =
isSupportedReasoningEffortOpenAIModel(model) &&
!model.id.includes('o1-pro') &&
(provider.type === 'openai-response' || model.endpoint_type === 'openai-response' || provider.id === 'aihubmix')
const showVerbositySetting = isSupportVerbosityModel(model) && isSupportVerbosityProvider(provider)
const isSupportFlexServiceTier = isSupportFlexServiceTierModel(model)
const isSupportServiceTier = isSupportServiceTierProvider(provider)
const showServiceTierSetting = isSupportServiceTier && providerId !== SystemProviderIds.groq
const setSummaryText = useCallback(
(value: OpenAISummaryText) => {
dispatch(setOpenAISummaryText(value))
},
[dispatch]
)
const setServiceTierMode = useCallback(
(value: ServiceTier) => {
updateProvider({ serviceTier: value })
},
[updateProvider]
)
const setVerbosity = useCallback(
(value: OpenAIVerbosity) => {
dispatch(setOpenAIVerbosity(value))
},
[dispatch]
)
const summaryTextOptions = [
{
value: 'undefined',
label: t('common.ignore')
},
{
value: 'null',
label: t('common.off')
},
{
value: 'auto',
label: t('settings.openai.summary_text_mode.auto')
},
{
value: 'detailed',
label: t('settings.openai.summary_text_mode.detailed')
},
{
value: 'concise',
label: t('settings.openai.summary_text_mode.concise')
}
] as const satisfies SummaryTextOption[]
const verbosityOptions = useMemo(() => {
const allOptions = [
{
value: 'undefined',
label: t('common.ignore')
},
{
value: 'null',
label: t('common.off')
},
{
value: 'low',
label: t('settings.openai.verbosity.low')
},
{
value: 'medium',
label: t('settings.openai.verbosity.medium')
},
{
value: 'high',
label: t('settings.openai.verbosity.high')
}
] as const satisfies VerbosityOption[]
const supportedVerbosityLevels = getModelSupportedVerbosity(model).map((v) => toOptionValue(v))
return allOptions.filter((option) => supportedVerbosityLevels.includes(option.value))
}, [model, t])
const serviceTierOptions = useMemo(() => {
const options = [
{
value: 'undefined',
label: t('common.ignore')
},
{
value: 'null',
label: t('common.off')
},
{
value: 'auto',
label: t('settings.openai.service_tier.auto')
},
{
value: 'default',
label: t('settings.openai.service_tier.default')
},
{
value: 'flex',
label: t('settings.openai.service_tier.flex')
},
{
value: 'priority',
label: t('settings.openai.service_tier.priority')
}
] as const satisfies OpenAIServiceTierOption[]
return options.filter((option) => {
if (option.value === 'flex') {
return isSupportFlexServiceTier
}
return true
})
}, [isSupportFlexServiceTier, t])
useEffect(() => {
if (verbosity && !verbosityOptions.some((option) => option.value === verbosity)) {
const supportedVerbosityLevels = getModelSupportedVerbosity(model)
// Default to the highest supported verbosity level
const defaultVerbosity = supportedVerbosityLevels[supportedVerbosityLevels.length - 1]
setVerbosity(defaultVerbosity)
}
}, [model, verbosity, verbosityOptions, setVerbosity])
if (!showSummarySetting && !showServiceTierSetting && !showVerbositySetting) {
return null
}
return (
<CollapsibleSettingGroup title={t('settings.openai.title')} defaultExpanded={true}>
<SettingGroup>
{showServiceTierSetting && (
<>
<SettingRow>
<SettingRowTitleSmall>
{t('settings.openai.service_tier.title')}{' '}
<Tooltip title={t('settings.openai.service_tier.tip')}>
<CircleHelp size={14} style={{ marginLeft: 4 }} color="var(--color-text-2)" />
</Tooltip>
</SettingRowTitleSmall>
<Selector
value={toOptionValue(serviceTierMode)}
onChange={(value) => {
setServiceTierMode(toRealValue(value))
}}
options={serviceTierOptions}
/>
</SettingRow>
{(showSummarySetting || showVerbositySetting) && <SettingDivider />}
</>
)}
{showSummarySetting && (
<>
<SettingRow>
<SettingRowTitleSmall>
{t('settings.openai.summary_text_mode.title')}{' '}
<Tooltip title={t('settings.openai.summary_text_mode.tip')}>
<CircleHelp size={14} style={{ marginLeft: 4 }} color="var(--color-text-2)" />
</Tooltip>
</SettingRowTitleSmall>
<Selector
value={toOptionValue(summaryText)}
onChange={(value) => {
setSummaryText(toRealValue(value))
}}
options={summaryTextOptions}
/>
</SettingRow>
{showVerbositySetting && <SettingDivider />}
</>
)}
{showVerbositySetting && (
<SettingRow>
<SettingRowTitleSmall>
{t('settings.openai.verbosity.title')}{' '}
<Tooltip title={t('settings.openai.verbosity.tip')}>
<CircleHelp size={14} style={{ marginLeft: 4 }} color="var(--color-text-2)" />
</Tooltip>
</SettingRowTitleSmall>
<Selector
value={toOptionValue(verbosity)}
onChange={(value) => {
setVerbosity(toRealValue(value))
}}
options={verbosityOptions}
/>
</SettingRow>
)}
</SettingGroup>
<SettingDivider />
</CollapsibleSettingGroup>
)
}
export default OpenAISettingsGroup

View File

@ -0,0 +1,72 @@
import { isSupportedReasoningEffortOpenAIModel, isSupportVerbosityModel } from '@renderer/config/models'
import { useProvider } from '@renderer/hooks/useProvider'
import { SettingDivider } from '@renderer/pages/settings'
import { CollapsibleSettingGroup } from '@renderer/pages/settings/SettingGroup'
import type { Model } from '@renderer/types'
import { SystemProviderIds } from '@renderer/types'
import {
isSupportServiceTierProvider,
isSupportStreamOptionsProvider,
isSupportVerbosityProvider
} from '@renderer/utils/provider'
import type { FC } from 'react'
import { useTranslation } from 'react-i18next'
import ReasoningSummarySetting from './ReasoningSummarySetting'
import ServiceTierSetting from './ServiceTierSetting'
import StreamOptionsSetting from './StreamOptionsSetting'
import VerbositySetting from './VerbositySetting'
interface Props {
model: Model
providerId: string
SettingGroup: FC<{ children: React.ReactNode }>
SettingRowTitleSmall: FC<{ children: React.ReactNode }>
}
const OpenAISettingsGroup: FC<Props> = ({ model, providerId, SettingGroup, SettingRowTitleSmall }) => {
const { t } = useTranslation()
const { provider } = useProvider(providerId)
const showSummarySetting =
isSupportedReasoningEffortOpenAIModel(model) &&
!model.id.includes('o1-pro') &&
(provider.type === 'openai-response' || model.endpoint_type === 'openai-response' || provider.id === 'aihubmix')
const showVerbositySetting = isSupportVerbosityModel(model) && isSupportVerbosityProvider(provider)
const isSupportServiceTier = isSupportServiceTierProvider(provider)
const showServiceTierSetting = isSupportServiceTier && providerId !== SystemProviderIds.groq
const showStreamOptionsSetting = isSupportStreamOptionsProvider(provider)
if (!showSummarySetting && !showServiceTierSetting && !showVerbositySetting && !showStreamOptionsSetting) {
return null
}
return (
<CollapsibleSettingGroup title={t('settings.openai.title')} defaultExpanded={true}>
<SettingGroup>
{showServiceTierSetting && (
<>
<ServiceTierSetting model={model} providerId={providerId} SettingRowTitleSmall={SettingRowTitleSmall} />
{(showSummarySetting || showVerbositySetting || showStreamOptionsSetting) && <SettingDivider />}
</>
)}
{showSummarySetting && (
<>
<ReasoningSummarySetting SettingRowTitleSmall={SettingRowTitleSmall} />
{(showVerbositySetting || showStreamOptionsSetting) && <SettingDivider />}
</>
)}
{showVerbositySetting && (
<>
<VerbositySetting model={model} SettingRowTitleSmall={SettingRowTitleSmall} />
{showStreamOptionsSetting && <SettingDivider />}
</>
)}
{showStreamOptionsSetting && <StreamOptionsSetting SettingRowTitleSmall={SettingRowTitleSmall} />}
</SettingGroup>
<SettingDivider />
</CollapsibleSettingGroup>
)
}
export default OpenAISettingsGroup

View File

@ -0,0 +1,78 @@
import Selector from '@renderer/components/Selector'
import { SettingRow } from '@renderer/pages/settings'
import type { RootState } from '@renderer/store'
import { useAppDispatch } from '@renderer/store'
import { setOpenAISummaryText } from '@renderer/store/settings'
import type { OpenAIReasoningSummary } from '@renderer/types/aiCoreTypes'
import { toOptionValue, toRealValue } from '@renderer/utils/select'
import { Tooltip } from 'antd'
import { CircleHelp } from 'lucide-react'
import type { FC } from 'react'
import { useCallback } from 'react'
import { useTranslation } from 'react-i18next'
import { useSelector } from 'react-redux'
type SummaryTextOption = {
value: NonNullable<OpenAIReasoningSummary> | 'undefined' | 'null'
label: string
}
interface Props {
SettingRowTitleSmall: FC<{ children: React.ReactNode }>
}
const ReasoningSummarySetting: FC<Props> = ({ SettingRowTitleSmall }) => {
const { t } = useTranslation()
const summaryText = useSelector((state: RootState) => state.settings.openAI.summaryText)
const dispatch = useAppDispatch()
const setSummaryText = useCallback(
(value: OpenAIReasoningSummary) => {
dispatch(setOpenAISummaryText(value))
},
[dispatch]
)
const summaryTextOptions = [
{
value: 'undefined',
label: t('common.ignore')
},
{
value: 'null',
label: t('common.off')
},
{
value: 'auto',
label: t('settings.openai.summary_text_mode.auto')
},
{
value: 'detailed',
label: t('settings.openai.summary_text_mode.detailed')
},
{
value: 'concise',
label: t('settings.openai.summary_text_mode.concise')
}
] as const satisfies SummaryTextOption[]
return (
<SettingRow>
<SettingRowTitleSmall>
{t('settings.openai.summary_text_mode.title')}{' '}
<Tooltip title={t('settings.openai.summary_text_mode.tip')}>
<CircleHelp size={14} style={{ marginLeft: 4 }} color="var(--color-text-2)" />
</Tooltip>
</SettingRowTitleSmall>
<Selector
value={toOptionValue(summaryText)}
onChange={(value) => {
setSummaryText(toRealValue(value))
}}
options={summaryTextOptions}
/>
</SettingRow>
)
}
export default ReasoningSummarySetting

View File

@ -0,0 +1,88 @@
import Selector from '@renderer/components/Selector'
import { isSupportFlexServiceTierModel } from '@renderer/config/models'
import { useProvider } from '@renderer/hooks/useProvider'
import { SettingRow } from '@renderer/pages/settings'
import type { Model, OpenAIServiceTier, ServiceTier } from '@renderer/types'
import { toOptionValue, toRealValue } from '@renderer/utils/select'
import { Tooltip } from 'antd'
import { CircleHelp } from 'lucide-react'
import type { FC } from 'react'
import { useCallback, useMemo } from 'react'
import { useTranslation } from 'react-i18next'
type OpenAIServiceTierOption = { value: NonNullable<OpenAIServiceTier> | 'null' | 'undefined'; label: string }
interface Props {
model: Model
providerId: string
SettingRowTitleSmall: FC<{ children: React.ReactNode }>
}
const ServiceTierSetting: FC<Props> = ({ model, providerId, SettingRowTitleSmall }) => {
const { t } = useTranslation()
const { provider, updateProvider } = useProvider(providerId)
const serviceTierMode = provider.serviceTier
const isSupportFlexServiceTier = isSupportFlexServiceTierModel(model)
const setServiceTierMode = useCallback(
(value: ServiceTier) => {
updateProvider({ serviceTier: value })
},
[updateProvider]
)
const serviceTierOptions = useMemo(() => {
const options = [
{
value: 'undefined',
label: t('common.ignore')
},
{
value: 'null',
label: t('common.off')
},
{
value: 'auto',
label: t('settings.openai.service_tier.auto')
},
{
value: 'default',
label: t('settings.openai.service_tier.default')
},
{
value: 'flex',
label: t('settings.openai.service_tier.flex')
},
{
value: 'priority',
label: t('settings.openai.service_tier.priority')
}
] as const satisfies OpenAIServiceTierOption[]
return options.filter((option) => {
if (option.value === 'flex') {
return isSupportFlexServiceTier
}
return true
})
}, [isSupportFlexServiceTier, t])
return (
<SettingRow>
<SettingRowTitleSmall>
{t('settings.openai.service_tier.title')}{' '}
<Tooltip title={t('settings.openai.service_tier.tip')}>
<CircleHelp size={14} style={{ marginLeft: 4 }} color="var(--color-text-2)" />
</Tooltip>
</SettingRowTitleSmall>
<Selector
value={toOptionValue(serviceTierMode)}
onChange={(value) => {
setServiceTierMode(toRealValue(value))
}}
options={serviceTierOptions}
/>
</SettingRow>
)
}
export default ServiceTierSetting

View File

@ -0,0 +1,72 @@
import Selector from '@renderer/components/Selector'
import { SettingRow } from '@renderer/pages/settings'
import type { RootState } from '@renderer/store'
import { useAppDispatch } from '@renderer/store'
import { setOpenAIStreamOptionsIncludeUsage } from '@renderer/store/settings'
import type { OpenAICompletionsStreamOptions } from '@renderer/types/aiCoreTypes'
import { toOptionValue, toRealValue } from '@renderer/utils/select'
import { Tooltip } from 'antd'
import { CircleHelp } from 'lucide-react'
import type { FC } from 'react'
import { useCallback, useMemo } from 'react'
import { useTranslation } from 'react-i18next'
import { useSelector } from 'react-redux'
type IncludeUsageOption = {
value: 'undefined' | 'false' | 'true'
label: string
}
interface Props {
SettingRowTitleSmall: FC<{ children: React.ReactNode }>
}
const StreamOptionsSetting: FC<Props> = ({ SettingRowTitleSmall }) => {
const { t } = useTranslation()
const includeUsage = useSelector((state: RootState) => state.settings.openAI?.streamOptions?.includeUsage)
const dispatch = useAppDispatch()
const setIncludeUsage = useCallback(
(value: OpenAICompletionsStreamOptions['include_usage']) => {
dispatch(setOpenAIStreamOptionsIncludeUsage(value))
},
[dispatch]
)
const includeUsageOptions = useMemo(() => {
return [
{
value: 'undefined',
label: t('common.ignore')
},
{
value: 'false',
label: t('common.off')
},
{
value: 'true',
label: t('common.on')
}
] as const satisfies IncludeUsageOption[]
}, [t])
return (
<SettingRow>
<SettingRowTitleSmall>
{t('settings.openai.stream_options.include_usage.title')}{' '}
<Tooltip title={t('settings.openai.stream_options.include_usage.tip')}>
<CircleHelp size={14} style={{ marginLeft: 4 }} color="var(--color-text-2)" />
</Tooltip>
</SettingRowTitleSmall>
<Selector
value={toOptionValue(includeUsage)}
onChange={(value) => {
setIncludeUsage(toRealValue(value))
}}
options={includeUsageOptions}
/>
</SettingRow>
)
}
export default StreamOptionsSetting

View File

@ -0,0 +1,94 @@
import Selector from '@renderer/components/Selector'
import { getModelSupportedVerbosity } from '@renderer/config/models'
import { SettingRow } from '@renderer/pages/settings'
import type { RootState } from '@renderer/store'
import { useAppDispatch } from '@renderer/store'
import { setOpenAIVerbosity } from '@renderer/store/settings'
import type { Model } from '@renderer/types'
import type { OpenAIVerbosity } from '@renderer/types/aiCoreTypes'
import { toOptionValue, toRealValue } from '@renderer/utils/select'
import { Tooltip } from 'antd'
import { CircleHelp } from 'lucide-react'
import type { FC } from 'react'
import { useCallback, useEffect, useMemo } from 'react'
import { useTranslation } from 'react-i18next'
import { useSelector } from 'react-redux'
type VerbosityOption = {
value: NonNullable<OpenAIVerbosity> | 'undefined' | 'null'
label: string
}
interface Props {
model: Model
SettingRowTitleSmall: FC<{ children: React.ReactNode }>
}
const VerbositySetting: FC<Props> = ({ model, SettingRowTitleSmall }) => {
const { t } = useTranslation()
const verbosity = useSelector((state: RootState) => state.settings.openAI.verbosity)
const dispatch = useAppDispatch()
const setVerbosity = useCallback(
(value: OpenAIVerbosity) => {
dispatch(setOpenAIVerbosity(value))
},
[dispatch]
)
const verbosityOptions = useMemo(() => {
const allOptions = [
{
value: 'undefined',
label: t('common.ignore')
},
{
value: 'null',
label: t('common.off')
},
{
value: 'low',
label: t('settings.openai.verbosity.low')
},
{
value: 'medium',
label: t('settings.openai.verbosity.medium')
},
{
value: 'high',
label: t('settings.openai.verbosity.high')
}
] as const satisfies VerbosityOption[]
const supportedVerbosityLevels = getModelSupportedVerbosity(model).map((v) => toOptionValue(v))
return allOptions.filter((option) => supportedVerbosityLevels.includes(option.value))
}, [model, t])
useEffect(() => {
if (verbosity !== undefined && !verbosityOptions.some((option) => option.value === toOptionValue(verbosity))) {
const supportedVerbosityLevels = getModelSupportedVerbosity(model)
// Default to the highest supported verbosity level
const defaultVerbosity = supportedVerbosityLevels[supportedVerbosityLevels.length - 1]
setVerbosity(defaultVerbosity)
}
}, [model, verbosity, verbosityOptions, setVerbosity])
return (
<SettingRow>
<SettingRowTitleSmall>
{t('settings.openai.verbosity.title')}{' '}
<Tooltip title={t('settings.openai.verbosity.tip')}>
<CircleHelp size={14} style={{ marginLeft: 4 }} color="var(--color-text-2)" />
</Tooltip>
</SettingRowTitleSmall>
<Selector
value={toOptionValue(verbosity)}
onChange={(value) => {
setVerbosity(toRealValue(value))
}}
options={verbosityOptions}
/>
</SettingRow>
)
}
export default VerbositySetting

View File

@ -0,0 +1,3 @@
import OpenAISettingsGroup from './OpenAISettingsGroup'
export default OpenAISettingsGroup

View File

@ -67,7 +67,7 @@ const persistedReducer = persistReducer(
{ {
key: 'cherry-studio', key: 'cherry-studio',
storage, storage,
version: 181, version: 182,
blacklist: ['runtime', 'messages', 'messageBlocks', 'tabs', 'toolPermissions'], blacklist: ['runtime', 'messages', 'messageBlocks', 'tabs', 'toolPermissions'],
migrate migrate
}, },

View File

@ -1,6 +1,11 @@
import { loggerService } from '@logger' import { loggerService } from '@logger'
import { nanoid } from '@reduxjs/toolkit' import { nanoid } from '@reduxjs/toolkit'
import { DEFAULT_CONTEXTCOUNT, DEFAULT_TEMPERATURE, isMac } from '@renderer/config/constant' import {
DEFAULT_CONTEXTCOUNT,
DEFAULT_STREAM_OPTIONS_INCLUDE_USAGE,
DEFAULT_TEMPERATURE,
isMac
} from '@renderer/config/constant'
import { DEFAULT_MIN_APPS } from '@renderer/config/minapps' import { DEFAULT_MIN_APPS } from '@renderer/config/minapps'
import { import {
glm45FlashModel, glm45FlashModel,
@ -2956,6 +2961,21 @@ const migrateConfig = {
logger.error('migrate 181 error', error as Error) logger.error('migrate 181 error', error as Error)
return state return state
} }
},
'182': (state: RootState) => {
try {
// Initialize streamOptions in settings.openAI if not exists
if (!state.settings.openAI.streamOptions) {
state.settings.openAI.streamOptions = {
includeUsage: DEFAULT_STREAM_OPTIONS_INCLUDE_USAGE
}
}
logger.info('migrate 182 success')
return state
} catch (error) {
logger.error('migrate 182 error', error as Error)
return state
}
} }
} }

View File

@ -1,6 +1,6 @@
import type { PayloadAction } from '@reduxjs/toolkit' import type { PayloadAction } from '@reduxjs/toolkit'
import { createSlice } from '@reduxjs/toolkit' import { createSlice } from '@reduxjs/toolkit'
import { isMac } from '@renderer/config/constant' import { DEFAULT_STREAM_OPTIONS_INCLUDE_USAGE, isMac } from '@renderer/config/constant'
import { TRANSLATE_PROMPT } from '@renderer/config/prompts' import { TRANSLATE_PROMPT } from '@renderer/config/prompts'
import { DEFAULT_SIDEBAR_ICONS } from '@renderer/config/sidebar' import { DEFAULT_SIDEBAR_ICONS } from '@renderer/config/sidebar'
import type { import type {
@ -16,7 +16,11 @@ import type {
TranslateLanguageCode TranslateLanguageCode
} from '@renderer/types' } from '@renderer/types'
import { ThemeMode } from '@renderer/types' import { ThemeMode } from '@renderer/types'
import type { OpenAISummaryText, OpenAIVerbosity } from '@renderer/types/aiCoreTypes' import type {
OpenAICompletionsStreamOptions,
OpenAIReasoningSummary,
OpenAIVerbosity
} from '@renderer/types/aiCoreTypes'
import { uuid } from '@renderer/utils' import { uuid } from '@renderer/utils'
import { API_SERVER_DEFAULTS, UpgradeChannel } from '@shared/config/constant' import { API_SERVER_DEFAULTS, UpgradeChannel } from '@shared/config/constant'
@ -193,10 +197,14 @@ export interface SettingsState {
} }
// OpenAI // OpenAI
openAI: { openAI: {
summaryText: OpenAISummaryText // TODO: it's a bad naming. rename it to reasoningSummary in v2.
summaryText: OpenAIReasoningSummary
/** @deprecated 现在该设置迁移到Provider对象中 */ /** @deprecated 现在该设置迁移到Provider对象中 */
serviceTier: OpenAIServiceTier serviceTier: OpenAIServiceTier
verbosity: OpenAIVerbosity verbosity: OpenAIVerbosity
streamOptions: {
includeUsage: OpenAICompletionsStreamOptions['include_usage']
}
} }
// Notification // Notification
notification: { notification: {
@ -376,7 +384,10 @@ export const initialState: SettingsState = {
openAI: { openAI: {
summaryText: 'auto', summaryText: 'auto',
serviceTier: 'auto', serviceTier: 'auto',
verbosity: undefined verbosity: undefined,
streamOptions: {
includeUsage: DEFAULT_STREAM_OPTIONS_INCLUDE_USAGE
}
}, },
notification: { notification: {
assistant: false, assistant: false,
@ -791,12 +802,18 @@ const settingsSlice = createSlice({
setDisableHardwareAcceleration: (state, action: PayloadAction<boolean>) => { setDisableHardwareAcceleration: (state, action: PayloadAction<boolean>) => {
state.disableHardwareAcceleration = action.payload state.disableHardwareAcceleration = action.payload
}, },
setOpenAISummaryText: (state, action: PayloadAction<OpenAISummaryText>) => { setOpenAISummaryText: (state, action: PayloadAction<OpenAIReasoningSummary>) => {
state.openAI.summaryText = action.payload state.openAI.summaryText = action.payload
}, },
setOpenAIVerbosity: (state, action: PayloadAction<OpenAIVerbosity>) => { setOpenAIVerbosity: (state, action: PayloadAction<OpenAIVerbosity>) => {
state.openAI.verbosity = action.payload state.openAI.verbosity = action.payload
}, },
setOpenAIStreamOptionsIncludeUsage: (
state,
action: PayloadAction<OpenAICompletionsStreamOptions['include_usage']>
) => {
state.openAI.streamOptions.includeUsage = action.payload
},
setNotificationSettings: (state, action: PayloadAction<SettingsState['notification']>) => { setNotificationSettings: (state, action: PayloadAction<SettingsState['notification']>) => {
state.notification = action.payload state.notification = action.payload
}, },
@ -967,6 +984,7 @@ export const {
setDisableHardwareAcceleration, setDisableHardwareAcceleration,
setOpenAISummaryText, setOpenAISummaryText,
setOpenAIVerbosity, setOpenAIVerbosity,
setOpenAIStreamOptionsIncludeUsage,
setNotificationSettings, setNotificationSettings,
// Local backup settings // Local backup settings
setLocalBackupDir, setLocalBackupDir,

View File

@ -50,7 +50,12 @@ export type OpenAIReasoningEffort = OpenAI.ReasoningEffort
* When undefined, the parameter is omitted from the request. * When undefined, the parameter is omitted from the request.
* When null, verbosity is explicitly disabled. * When null, verbosity is explicitly disabled.
*/ */
export type OpenAISummaryText = OpenAI.Reasoning['summary'] export type OpenAIReasoningSummary = OpenAI.Reasoning['summary']
/**
* Options for streaming response. Only set this when you set `stream: true`.
*/
export type OpenAICompletionsStreamOptions = OpenAI.ChatCompletionStreamOptions
const AiSdkParamsSchema = z.enum([ const AiSdkParamsSchema = z.enum([
'maxOutputTokens', 'maxOutputTokens',

View File

@ -0,0 +1,163 @@
import { describe, expect, it } from 'vitest'
import { toOptionValue, toRealValue } from '../select'
describe('toOptionValue', () => {
describe('primitive values', () => {
it('should convert undefined to string "undefined"', () => {
expect(toOptionValue(undefined)).toBe('undefined')
})
it('should convert null to string "null"', () => {
expect(toOptionValue(null)).toBe('null')
})
it('should convert true to string "true"', () => {
expect(toOptionValue(true)).toBe('true')
})
it('should convert false to string "false"', () => {
expect(toOptionValue(false)).toBe('false')
})
})
describe('string values', () => {
it('should return string as-is', () => {
expect(toOptionValue('hello')).toBe('hello')
})
it('should return empty string as-is', () => {
expect(toOptionValue('')).toBe('')
})
it('should return string with special characters as-is', () => {
expect(toOptionValue('hello-world_123')).toBe('hello-world_123')
})
it('should return string that looks like a boolean as-is', () => {
expect(toOptionValue('True')).toBe('True')
expect(toOptionValue('FALSE')).toBe('FALSE')
})
})
describe('mixed type scenarios', () => {
it('should handle union types correctly', () => {
const values: Array<string | boolean | null | undefined> = ['test', true, false, null, undefined, '']
expect(toOptionValue(values[0])).toBe('test')
expect(toOptionValue(values[1])).toBe('true')
expect(toOptionValue(values[2])).toBe('false')
expect(toOptionValue(values[3])).toBe('null')
expect(toOptionValue(values[4])).toBe('undefined')
expect(toOptionValue(values[5])).toBe('')
})
})
})
describe('toRealValue', () => {
describe('special string values', () => {
it('should convert string "undefined" to undefined', () => {
expect(toRealValue('undefined')).toBeUndefined()
})
it('should convert string "null" to null', () => {
expect(toRealValue('null')).toBeNull()
})
it('should convert string "true" to boolean true', () => {
expect(toRealValue('true')).toBe(true)
})
it('should convert string "false" to boolean false', () => {
expect(toRealValue('false')).toBe(false)
})
})
describe('regular string values', () => {
it('should return regular string as-is', () => {
expect(toRealValue('hello')).toBe('hello')
})
it('should return empty string as-is', () => {
expect(toRealValue('')).toBe('')
})
it('should return string with special characters as-is', () => {
expect(toRealValue('hello-world_123')).toBe('hello-world_123')
})
it('should return string that looks like special value but with different casing', () => {
expect(toRealValue('Undefined')).toBe('Undefined')
expect(toRealValue('NULL')).toBe('NULL')
expect(toRealValue('True')).toBe('True')
expect(toRealValue('False')).toBe('False')
})
})
describe('edge cases', () => {
it('should handle strings containing special values as substring', () => {
expect(toRealValue('undefined_value')).toBe('undefined_value')
expect(toRealValue('null_check')).toBe('null_check')
expect(toRealValue('true_condition')).toBe('true_condition')
expect(toRealValue('false_flag')).toBe('false_flag')
})
it('should handle strings with whitespace', () => {
expect(toRealValue(' undefined')).toBe(' undefined')
expect(toRealValue('null ')).toBe('null ')
expect(toRealValue(' true ')).toBe(' true ')
})
})
})
describe('toOptionValue and toRealValue roundtrip', () => {
it('should correctly convert and restore undefined', () => {
const original = undefined
const option = toOptionValue(original)
const restored = toRealValue(option)
expect(restored).toBeUndefined()
})
it('should correctly convert and restore null', () => {
const original = null
const option = toOptionValue(original)
const restored = toRealValue(option)
expect(restored).toBeNull()
})
it('should correctly convert and restore true', () => {
const original = true
const option = toOptionValue(original)
const restored = toRealValue(option)
expect(restored).toBe(true)
})
it('should correctly convert and restore false', () => {
const original = false
const option = toOptionValue(original)
const restored = toRealValue(option)
expect(restored).toBe(false)
})
it('should correctly convert and restore string values', () => {
const strings = ['hello', '', 'test-123', 'some_value']
strings.forEach((str) => {
const option = toOptionValue(str)
const restored = toRealValue(option)
expect(restored).toBe(str)
})
})
it('should handle array of mixed values', () => {
const values: Array<string | boolean | null | undefined> = ['test', true, false, null, undefined]
const options = values.map(toOptionValue)
const restored = options.map(toRealValue)
expect(restored[0]).toBe('test')
expect(restored[1]).toBe(true)
expect(restored[2]).toBe(false)
expect(restored[3]).toBeNull()
expect(restored[4]).toBeUndefined()
})
})

View File

@ -1,36 +1,63 @@
/** /**
* Convert a value (string | undefined | null) into an option-compatible string. * Convert a value (string | undefined | null | boolean) into an option-compatible string.
* - `undefined` becomes the literal string `'undefined'` * - `undefined` becomes the literal string `'undefined'`
* - `null` becomes the literal string `'null'` * - `null` becomes the literal string `'null'`
* - `true` becomes the literal string `'true'`
* - `false` becomes the literal string `'false'`
* - Any other string is returned as-is * - Any other string is returned as-is
* *
* @param v - The value to convert * @param v - The value to convert
* @returns The string representation safe for option usage * @returns The string representation safe for option usage
*/ */
export function toOptionValue<T extends undefined | Exclude<string, null>>(v: T): NonNullable<T> | 'undefined' export function toOptionValue(v: undefined): 'undefined'
export function toOptionValue<T extends null | Exclude<string, undefined>>(v: T): NonNullable<T> | 'null' export function toOptionValue(v: null): 'null'
export function toOptionValue<T extends string | undefined | null>(v: T): NonNullable<T> | 'undefined' | 'null' export function toOptionValue(v: boolean): 'true' | 'false'
export function toOptionValue<T extends Exclude<string, null | undefined>>(v: T): T export function toOptionValue(v: boolean | undefined): 'true' | 'false' | 'undefined'
export function toOptionValue(v: string | undefined | null) { export function toOptionValue(v: boolean | null): 'true' | 'false' | 'null'
if (v === undefined) return 'undefined' export function toOptionValue(v: boolean | undefined | null): 'true' | 'false' | 'undefined' | 'null'
if (v === null) return 'null' export function toOptionValue<T extends string>(v: T): T
return v export function toOptionValue<T extends Exclude<string, 'undefined'> | undefined>(v: T): NonNullable<T> | 'undefined'
export function toOptionValue<T extends Exclude<string, 'null'> | null>(v: T): NonNullable<T> | 'null'
export function toOptionValue<T extends Exclude<string, 'boolean'> | boolean>(v: T): T | 'true' | 'false'
export function toOptionValue<T extends Exclude<string, 'null' | 'undefined'> | null | undefined>(
v: T
): NonNullable<T> | 'null' | 'undefined'
export function toOptionValue<T extends Exclude<string, 'null' | 'true' | 'false'> | null | boolean>(
v: T
): NonNullable<T> | 'null' | 'true' | 'false'
export function toOptionValue<T extends Exclude<string, 'undefined' | 'true' | 'false'> | undefined | boolean>(
v: T
): NonNullable<T> | 'undefined' | 'true' | 'false'
export function toOptionValue<
T extends Exclude<string, 'null' | 'undefined' | 'true' | 'false'> | null | undefined | boolean
>(v: T): NonNullable<T> | 'null' | 'undefined' | 'true' | 'false'
export function toOptionValue(v: string | undefined | null | boolean) {
return String(v)
} }
/** /**
* Convert an option string back to its original value. * Convert an option string back to its original value.
* - The literal string `'undefined'` becomes `undefined` * - The literal string `'undefined'` becomes `undefined`
* - The literal string `'null'` becomes `null` * - The literal string `'null'` becomes `null`
* - The literal string `'true'` becomes `true`
* - The literal string `'false'` becomes `false`
* - Any other string is returned as-is * - Any other string is returned as-is
* *
* @param v - The option string to convert * @param v - The option string to convert
* @returns The real value (`undefined`, `null`, or the original string) * @returns The real value (`undefined`, `null`, `boolean`, or the original string)
*/ */
export function toRealValue<T extends 'undefined'>(v: T): undefined export function toRealValue(v: 'undefined'): undefined
export function toRealValue<T extends 'null'>(v: T): null export function toRealValue(v: 'null'): null
export function toRealValue<T extends string>(v: T): Exclude<T, 'undefined' | 'null'> export function toRealValue(v: 'true' | 'false'): boolean
export function toRealValue(v: 'undefined' | 'null'): undefined | null
export function toRealValue(v: 'undefined' | 'true' | 'false'): undefined | boolean
export function toRealValue(v: 'null' | 'true' | 'false'): null | boolean
export function toRealValue(v: 'undefined' | 'null' | 'true' | 'false'): undefined | null | boolean
export function toRealValue<T extends string>(v: T): Exclude<T, 'undefined' | 'null' | 'true' | 'false'>
export function toRealValue(v: string) { export function toRealValue(v: string) {
if (v === 'undefined') return undefined if (v === 'undefined') return undefined
if (v === 'null') return null if (v === 'null') return null
if (v === 'true') return true
if (v === 'false') return false
return v return v
} }