diff --git a/src/renderer/src/aiCore/provider/__tests__/providerConfig.test.ts b/src/renderer/src/aiCore/provider/__tests__/providerConfig.test.ts index 430ff5286..43d3cc52b 100644 --- a/src/renderer/src/aiCore/provider/__tests__/providerConfig.test.ts +++ b/src/renderer/src/aiCore/provider/__tests__/providerConfig.test.ts @@ -22,11 +22,15 @@ vi.mock('@renderer/services/AssistantService', () => ({ }) })) -vi.mock('@renderer/store', () => ({ - default: { - getState: () => ({ copilot: { defaultHeaders: {} } }) +vi.mock('@renderer/store', () => { + const mockGetState = vi.fn() + return { + default: { + getState: mockGetState + }, + __mockGetState: mockGetState } -})) +}) vi.mock('@renderer/utils/api', () => ({ formatApiHost: vi.fn((host, isSupportedAPIVersion = true) => { @@ -79,6 +83,8 @@ import { isCherryAIProvider, isPerplexityProvider } from '@renderer/utils/provid import { COPILOT_DEFAULT_HEADERS, COPILOT_EDITOR_VERSION, isCopilotResponsesModel } from '../constants' import { getActualProvider, providerToAiSdkConfig } from '../providerConfig' +const { __mockGetState: mockGetState } = vi.mocked(await import('@renderer/store')) as any + const createWindowKeyv = () => { const store = new Map() return { @@ -132,6 +138,16 @@ describe('Copilot responses routing', () => { ...(globalThis as any).window, keyv: createWindowKeyv() } + mockGetState.mockReturnValue({ + copilot: { defaultHeaders: {} }, + settings: { + openAI: { + streamOptions: { + includeUsage: undefined + } + } + } + }) }) it('detects official GPT-5 Codex identifiers case-insensitively', () => { @@ -167,6 +183,16 @@ describe('CherryAI provider configuration', () => { ...(globalThis as any).window, keyv: createWindowKeyv() } + mockGetState.mockReturnValue({ + copilot: { defaultHeaders: {} }, + settings: { + openAI: { + streamOptions: { + includeUsage: undefined + } + } + } + }) vi.clearAllMocks() }) @@ -231,6 +257,16 @@ describe('Perplexity provider configuration', () => { ...(globalThis as any).window, keyv: createWindowKeyv() } + mockGetState.mockReturnValue({ + copilot: { defaultHeaders: {} }, + settings: { + openAI: { + streamOptions: { + includeUsage: undefined + } + } + } + }) vi.clearAllMocks() }) @@ -291,3 +327,165 @@ describe('Perplexity provider configuration', () => { expect(actualProvider.apiHost).toBe('') }) }) + +describe('Stream options includeUsage configuration', () => { + beforeEach(() => { + ;(globalThis as any).window = { + ...(globalThis as any).window, + keyv: createWindowKeyv() + } + vi.clearAllMocks() + }) + + const createOpenAIProvider = (): Provider => ({ + id: 'openai-compatible', + type: 'openai', + name: 'OpenAI', + apiKey: 'test-key', + apiHost: 'https://api.openai.com', + models: [], + isSystem: true + }) + + it('uses includeUsage from settings when undefined', () => { + mockGetState.mockReturnValue({ + copilot: { defaultHeaders: {} }, + settings: { + openAI: { + streamOptions: { + includeUsage: undefined + } + } + } + }) + + const provider = createOpenAIProvider() + const config = providerToAiSdkConfig(provider, createModel('gpt-4', 'GPT-4', 'openai')) + + expect(config.options.includeUsage).toBeUndefined() + }) + + it('uses includeUsage from settings when set to true', () => { + mockGetState.mockReturnValue({ + copilot: { defaultHeaders: {} }, + settings: { + openAI: { + streamOptions: { + includeUsage: true + } + } + } + }) + + const provider = createOpenAIProvider() + const config = providerToAiSdkConfig(provider, createModel('gpt-4', 'GPT-4', 'openai')) + + expect(config.options.includeUsage).toBe(true) + }) + + it('uses includeUsage from settings when set to false', () => { + mockGetState.mockReturnValue({ + copilot: { defaultHeaders: {} }, + settings: { + openAI: { + streamOptions: { + includeUsage: false + } + } + } + }) + + const provider = createOpenAIProvider() + const config = providerToAiSdkConfig(provider, createModel('gpt-4', 'GPT-4', 'openai')) + + expect(config.options.includeUsage).toBe(false) + }) + + it('respects includeUsage setting for non-supporting providers', () => { + mockGetState.mockReturnValue({ + copilot: { defaultHeaders: {} }, + settings: { + openAI: { + streamOptions: { + includeUsage: true + } + } + } + }) + + const testProvider: Provider = { + id: 'test', + type: 'openai', + name: 'test', + apiKey: 'test-key', + apiHost: 'https://api.test.com', + models: [], + isSystem: false, + apiOptions: { + isNotSupportStreamOptions: true + } + } + + const config = providerToAiSdkConfig(testProvider, createModel('gpt-4', 'GPT-4', 'test')) + + // Even though setting is true, provider doesn't support it, so includeUsage should be undefined + expect(config.options.includeUsage).toBeUndefined() + }) + + it('uses includeUsage from settings for Copilot provider when set to false', () => { + mockGetState.mockReturnValue({ + copilot: { defaultHeaders: {} }, + settings: { + openAI: { + streamOptions: { + includeUsage: false + } + } + } + }) + + const provider = createCopilotProvider() + const config = providerToAiSdkConfig(provider, createModel('gpt-4', 'GPT-4', 'copilot')) + + expect(config.options.includeUsage).toBe(false) + expect(config.providerId).toBe('github-copilot-openai-compatible') + }) + + it('uses includeUsage from settings for Copilot provider when set to true', () => { + mockGetState.mockReturnValue({ + copilot: { defaultHeaders: {} }, + settings: { + openAI: { + streamOptions: { + includeUsage: true + } + } + } + }) + + const provider = createCopilotProvider() + const config = providerToAiSdkConfig(provider, createModel('gpt-4', 'GPT-4', 'copilot')) + + expect(config.options.includeUsage).toBe(true) + expect(config.providerId).toBe('github-copilot-openai-compatible') + }) + + it('uses includeUsage from settings for Copilot provider when undefined', () => { + mockGetState.mockReturnValue({ + copilot: { defaultHeaders: {} }, + settings: { + openAI: { + streamOptions: { + includeUsage: undefined + } + } + } + }) + + const provider = createCopilotProvider() + const config = providerToAiSdkConfig(provider, createModel('gpt-4', 'GPT-4', 'copilot')) + + expect(config.options.includeUsage).toBeUndefined() + expect(config.providerId).toBe('github-copilot-openai-compatible') + }) +}) diff --git a/src/renderer/src/aiCore/provider/providerConfig.ts b/src/renderer/src/aiCore/provider/providerConfig.ts index 0be69bdb4..99e4fbd1c 100644 --- a/src/renderer/src/aiCore/provider/providerConfig.ts +++ b/src/renderer/src/aiCore/provider/providerConfig.ts @@ -11,6 +11,7 @@ import { createVertexProvider, isVertexAIConfigured } from '@renderer/hooks/useV import { getProviderByModel } from '@renderer/services/AssistantService' import store from '@renderer/store' import { isSystemProvider, type Model, type Provider, SystemProviderIds } from '@renderer/types' +import type { OpenAICompletionsStreamOptions } from '@renderer/types/aiCoreTypes' import { formatApiHost, formatAzureOpenAIApiHost, @@ -147,6 +148,10 @@ export function providerToAiSdkConfig(actualProvider: Provider, model: Model): A baseURL: baseURL, apiKey: actualProvider.apiKey } + let includeUsage: OpenAICompletionsStreamOptions['include_usage'] = undefined + if (isSupportStreamOptionsProvider(actualProvider)) { + includeUsage = store.getState().settings.openAI?.streamOptions?.includeUsage + } const isCopilotProvider = actualProvider.id === SystemProviderIds.copilot if (isCopilotProvider) { @@ -158,7 +163,7 @@ export function providerToAiSdkConfig(actualProvider: Provider, model: Model): A ...actualProvider.extra_headers }, name: actualProvider.id, - includeUsage: true + includeUsage }) return { @@ -261,7 +266,7 @@ export function providerToAiSdkConfig(actualProvider: Provider, model: Model): A ...options, name: actualProvider.id, ...extraOptions, - includeUsage: isSupportStreamOptionsProvider(actualProvider) + includeUsage } } } diff --git a/src/renderer/src/aiCore/utils/reasoning.ts b/src/renderer/src/aiCore/utils/reasoning.ts index 46350b085..1e74db24d 100644 --- a/src/renderer/src/aiCore/utils/reasoning.ts +++ b/src/renderer/src/aiCore/utils/reasoning.ts @@ -37,7 +37,7 @@ import { getStoreSetting } from '@renderer/hooks/useSettings' import { getAssistantSettings, getProviderByModel } from '@renderer/services/AssistantService' import type { Assistant, Model } from '@renderer/types' import { EFFORT_RATIO, isSystemProvider, SystemProviderIds } from '@renderer/types' -import type { OpenAISummaryText } from '@renderer/types/aiCoreTypes' +import type { OpenAIReasoningSummary } from '@renderer/types/aiCoreTypes' import type { ReasoningEffortOptionalParams } from '@renderer/types/sdk' import { isSupportEnableThinkingProvider } from '@renderer/utils/provider' import { toInteger } from 'lodash' @@ -448,7 +448,7 @@ export function getOpenAIReasoningParams( const openAI = getStoreSetting('openAI') const summaryText = openAI.summaryText - let reasoningSummary: OpenAISummaryText = undefined + let reasoningSummary: OpenAIReasoningSummary = undefined if (model.id.includes('o1-pro')) { reasoningSummary = undefined diff --git a/src/renderer/src/config/constant.ts b/src/renderer/src/config/constant.ts index 9903ce2db..958f9bd20 100644 --- a/src/renderer/src/config/constant.ts +++ b/src/renderer/src/config/constant.ts @@ -5,6 +5,7 @@ export const SYSTEM_PROMPT_THRESHOLD = 128 export const DEFAULT_KNOWLEDGE_DOCUMENT_COUNT = 6 export const DEFAULT_KNOWLEDGE_THRESHOLD = 0.0 export const DEFAULT_WEBSEARCH_RAG_DOCUMENT_COUNT = 1 +export const DEFAULT_STREAM_OPTIONS_INCLUDE_USAGE = true export const platform = window.electron?.process?.platform export const isMac = platform === 'darwin' diff --git a/src/renderer/src/i18n/locales/en-us.json b/src/renderer/src/i18n/locales/en-us.json index 427cbdcff..2ebddb688 100644 --- a/src/renderer/src/i18n/locales/en-us.json +++ b/src/renderer/src/i18n/locales/en-us.json @@ -1162,6 +1162,7 @@ "no_results": "No results", "none": "None", "off": "Off", + "on": "On", "open": "Open", "paste": "Paste", "placeholders": { @@ -4271,6 +4272,12 @@ "tip": "Specifies the latency tier to use for processing the request", "title": "Service Tier" }, + "stream_options": { + "include_usage": { + "tip": "Whether token usage is included (applicable only to the OpenAI Chat Completions API)", + "title": "Include usage" + } + }, "summary_text_mode": { "auto": "auto", "concise": "concise", diff --git a/src/renderer/src/i18n/locales/zh-cn.json b/src/renderer/src/i18n/locales/zh-cn.json index 69f4e63ee..4218c68f5 100644 --- a/src/renderer/src/i18n/locales/zh-cn.json +++ b/src/renderer/src/i18n/locales/zh-cn.json @@ -1162,6 +1162,7 @@ "no_results": "无结果", "none": "无", "off": "关闭", + "on": "启用", "open": "打开", "paste": "粘贴", "placeholders": { @@ -4271,6 +4272,12 @@ "tip": "指定用于处理请求的延迟层级", "title": "服务层级" }, + "stream_options": { + "include_usage": { + "tip": "是否请求 Tokens 用量(仅 OpenAI Chat Completions API 可用)", + "title": "包含用量" + } + }, "summary_text_mode": { "auto": "自动", "concise": "简洁", diff --git a/src/renderer/src/i18n/locales/zh-tw.json b/src/renderer/src/i18n/locales/zh-tw.json index d21f66ccb..bcf12aa63 100644 --- a/src/renderer/src/i18n/locales/zh-tw.json +++ b/src/renderer/src/i18n/locales/zh-tw.json @@ -1162,6 +1162,7 @@ "no_results": "沒有結果", "none": "無", "off": "關閉", + "on": "開啟", "open": "開啟", "paste": "貼上", "placeholders": { @@ -4271,6 +4272,12 @@ "tip": "指定用於處理請求的延遲層級", "title": "服務層級" }, + "stream_options": { + "include_usage": { + "tip": "是否請求 Tokens 用量(僅 OpenAI Chat Completions API 可用)", + "title": "包含用量" + } + }, "summary_text_mode": { "auto": "自動", "concise": "簡潔", diff --git a/src/renderer/src/i18n/translate/de-de.json b/src/renderer/src/i18n/translate/de-de.json index 7d11af7a1..963f06551 100644 --- a/src/renderer/src/i18n/translate/de-de.json +++ b/src/renderer/src/i18n/translate/de-de.json @@ -1162,6 +1162,7 @@ "no_results": "Keine Ergebnisse", "none": "Keine", "off": "Aus", + "on": "An", "open": "Öffnen", "paste": "Einfügen", "placeholders": { @@ -4271,6 +4272,12 @@ "tip": "Latenz-Ebene für Anfrageverarbeitung festlegen", "title": "Service-Tier" }, + "stream_options": { + "include_usage": { + "tip": "Ob die Token-Nutzung enthalten ist (gilt nur für die OpenAI Chat Completions API)", + "title": "Nutzung einbeziehen" + } + }, "summary_text_mode": { "auto": "Automatisch", "concise": "Kompakt", diff --git a/src/renderer/src/i18n/translate/el-gr.json b/src/renderer/src/i18n/translate/el-gr.json index f451e8af3..6bc8b318d 100644 --- a/src/renderer/src/i18n/translate/el-gr.json +++ b/src/renderer/src/i18n/translate/el-gr.json @@ -1162,6 +1162,7 @@ "no_results": "Δεν βρέθηκαν αποτελέσματα", "none": "Χωρίς", "off": "Κλειστό", + "on": "Ενεργό", "open": "Άνοιγμα", "paste": "Επικόλληση", "placeholders": { @@ -4271,6 +4272,12 @@ "tip": "Καθορίστε το επίπεδο καθυστέρησης που χρησιμοποιείται για την επεξεργασία των αιτημάτων", "title": "Επίπεδο υπηρεσίας" }, + "stream_options": { + "include_usage": { + "tip": "Είτε περιλαμβάνεται η χρήση διακριτικών (ισχύει μόνο για το OpenAI Chat Completions API)", + "title": "Συμπεριλάβετε χρήση" + } + }, "summary_text_mode": { "auto": "Αυτόματο", "concise": "Σύντομο", diff --git a/src/renderer/src/i18n/translate/es-es.json b/src/renderer/src/i18n/translate/es-es.json index ee2b03d06..925977529 100644 --- a/src/renderer/src/i18n/translate/es-es.json +++ b/src/renderer/src/i18n/translate/es-es.json @@ -1162,6 +1162,7 @@ "no_results": "Sin resultados", "none": "无", "off": "Apagado", + "on": "En", "open": "Abrir", "paste": "Pegar", "placeholders": { @@ -4271,6 +4272,12 @@ "tip": "Especifica el nivel de latencia utilizado para procesar la solicitud", "title": "Nivel de servicio" }, + "stream_options": { + "include_usage": { + "tip": "Si se incluye el uso de tokens (aplicable solo a la API de Completions de chat de OpenAI)", + "title": "Incluir uso" + } + }, "summary_text_mode": { "auto": "Automático", "concise": "Conciso", diff --git a/src/renderer/src/i18n/translate/fr-fr.json b/src/renderer/src/i18n/translate/fr-fr.json index e909edc25..5bd57f777 100644 --- a/src/renderer/src/i18n/translate/fr-fr.json +++ b/src/renderer/src/i18n/translate/fr-fr.json @@ -1162,6 +1162,7 @@ "no_results": "Aucun résultat", "none": "Aucun", "off": "Désactivé", + "on": "Marche", "open": "Ouvrir", "paste": "Coller", "placeholders": { @@ -4271,6 +4272,12 @@ "tip": "Spécifie le niveau de latence utilisé pour traiter la demande", "title": "Niveau de service" }, + "stream_options": { + "include_usage": { + "tip": "Si l'utilisation des jetons est incluse (applicable uniquement à l'API OpenAI Chat Completions)", + "title": "Inclure l'utilisation" + } + }, "summary_text_mode": { "auto": "Automatique", "concise": "Concis", diff --git a/src/renderer/src/i18n/translate/ja-jp.json b/src/renderer/src/i18n/translate/ja-jp.json index aa705da38..9d0926c48 100644 --- a/src/renderer/src/i18n/translate/ja-jp.json +++ b/src/renderer/src/i18n/translate/ja-jp.json @@ -1162,6 +1162,7 @@ "no_results": "検索結果なし", "none": "無", "off": "オフ", + "on": "オン", "open": "開く", "paste": "貼り付け", "placeholders": { @@ -4271,6 +4272,12 @@ "tip": "リクエスト処理に使用するレイテンシティアを指定します", "title": "サービスティア" }, + "stream_options": { + "include_usage": { + "tip": "トークン使用量が含まれるかどうか (OpenAI Chat Completions APIのみに適用)", + "title": "使用法を含める" + } + }, "summary_text_mode": { "auto": "自動", "concise": "簡潔", diff --git a/src/renderer/src/i18n/translate/pt-pt.json b/src/renderer/src/i18n/translate/pt-pt.json index 056306838..b84971d72 100644 --- a/src/renderer/src/i18n/translate/pt-pt.json +++ b/src/renderer/src/i18n/translate/pt-pt.json @@ -1162,6 +1162,7 @@ "no_results": "Nenhum resultado", "none": "Nenhum", "off": "Desligado", + "on": "Ligado", "open": "Abrir", "paste": "Colar", "placeholders": { @@ -4271,6 +4272,12 @@ "tip": "Especifique o nível de latência usado para processar a solicitação", "title": "Nível de Serviço" }, + "stream_options": { + "include_usage": { + "tip": "Se o uso de tokens está incluído (aplicável apenas à API de Conclusões de Chat da OpenAI)", + "title": "Incluir uso" + } + }, "summary_text_mode": { "auto": "Automático", "concise": "Conciso", diff --git a/src/renderer/src/i18n/translate/ru-ru.json b/src/renderer/src/i18n/translate/ru-ru.json index 12d696ec8..50db74739 100644 --- a/src/renderer/src/i18n/translate/ru-ru.json +++ b/src/renderer/src/i18n/translate/ru-ru.json @@ -1162,6 +1162,7 @@ "no_results": "Результатов не найдено", "none": "без", "off": "Выкл", + "on": "Вкл", "open": "Открыть", "paste": "Вставить", "placeholders": { @@ -4271,6 +4272,12 @@ "tip": "Указывает уровень задержки, который следует использовать для обработки запроса", "title": "Уровень сервиса" }, + "stream_options": { + "include_usage": { + "tip": "Включено ли использование токенов (применимо только к API завершения чата OpenAI)", + "title": "Включить использование" + } + }, "summary_text_mode": { "auto": "Авто", "concise": "Краткий", diff --git a/src/renderer/src/pages/home/Tabs/SettingsTab.tsx b/src/renderer/src/pages/home/Tabs/SettingsTab.tsx index 57dac8c78..014897ce9 100644 --- a/src/renderer/src/pages/home/Tabs/SettingsTab.tsx +++ b/src/renderer/src/pages/home/Tabs/SettingsTab.tsx @@ -56,7 +56,11 @@ import type { Assistant, AssistantSettings, CodeStyleVarious, MathEngine } from import { isGroqSystemProvider, ThemeMode } from '@renderer/types' import { modalConfirm } from '@renderer/utils' import { getSendMessageShortcutLabel } from '@renderer/utils/input' -import { isSupportServiceTierProvider, isSupportVerbosityProvider } from '@renderer/utils/provider' +import { + isOpenAICompatibleProvider, + isSupportServiceTierProvider, + isSupportVerbosityProvider +} from '@renderer/utils/provider' import { Button, Col, InputNumber, Row, Slider, Switch } from 'antd' import { Settings2 } from 'lucide-react' import type { FC } from 'react' @@ -184,6 +188,7 @@ const SettingsTab: FC = (props) => { const model = assistant.model || getDefaultModel() const showOpenAiSettings = + isOpenAICompatibleProvider(provider) || isOpenAIModel(model) || isSupportServiceTierProvider(provider) || (isSupportVerbosityModel(model) && isSupportVerbosityProvider(provider)) diff --git a/src/renderer/src/pages/home/Tabs/components/OpenAISettingsGroup.tsx b/src/renderer/src/pages/home/Tabs/components/OpenAISettingsGroup.tsx deleted file mode 100644 index 35c943e21..000000000 --- a/src/renderer/src/pages/home/Tabs/components/OpenAISettingsGroup.tsx +++ /dev/null @@ -1,247 +0,0 @@ -import Selector from '@renderer/components/Selector' -import { - getModelSupportedVerbosity, - isSupportedReasoningEffortOpenAIModel, - isSupportFlexServiceTierModel, - isSupportVerbosityModel -} from '@renderer/config/models' -import { useProvider } from '@renderer/hooks/useProvider' -import { SettingDivider, SettingRow } from '@renderer/pages/settings' -import { CollapsibleSettingGroup } from '@renderer/pages/settings/SettingGroup' -import type { RootState } from '@renderer/store' -import { useAppDispatch } from '@renderer/store' -import { setOpenAISummaryText, setOpenAIVerbosity } from '@renderer/store/settings' -import type { Model, OpenAIServiceTier, ServiceTier } from '@renderer/types' -import { SystemProviderIds } from '@renderer/types' -import type { OpenAISummaryText, OpenAIVerbosity } from '@renderer/types/aiCoreTypes' -import { isSupportServiceTierProvider, isSupportVerbosityProvider } from '@renderer/utils/provider' -import { toOptionValue, toRealValue } from '@renderer/utils/select' -import { Tooltip } from 'antd' -import { CircleHelp } from 'lucide-react' -import type { FC } from 'react' -import { useCallback, useEffect, useMemo } from 'react' -import { useTranslation } from 'react-i18next' -import { useSelector } from 'react-redux' - -type VerbosityOption = { - value: NonNullable | 'undefined' | 'null' - label: string -} - -type SummaryTextOption = { - value: NonNullable | 'undefined' | 'null' - label: string -} - -type OpenAIServiceTierOption = { value: NonNullable | 'null' | 'undefined'; label: string } - -interface Props { - model: Model - providerId: string - SettingGroup: FC<{ children: React.ReactNode }> - SettingRowTitleSmall: FC<{ children: React.ReactNode }> -} - -const OpenAISettingsGroup: FC = ({ model, providerId, SettingGroup, SettingRowTitleSmall }) => { - const { t } = useTranslation() - const { provider, updateProvider } = useProvider(providerId) - const verbosity = useSelector((state: RootState) => state.settings.openAI.verbosity) - const summaryText = useSelector((state: RootState) => state.settings.openAI.summaryText) - const serviceTierMode = provider.serviceTier - const dispatch = useAppDispatch() - - const showSummarySetting = - isSupportedReasoningEffortOpenAIModel(model) && - !model.id.includes('o1-pro') && - (provider.type === 'openai-response' || model.endpoint_type === 'openai-response' || provider.id === 'aihubmix') - const showVerbositySetting = isSupportVerbosityModel(model) && isSupportVerbosityProvider(provider) - const isSupportFlexServiceTier = isSupportFlexServiceTierModel(model) - const isSupportServiceTier = isSupportServiceTierProvider(provider) - const showServiceTierSetting = isSupportServiceTier && providerId !== SystemProviderIds.groq - - const setSummaryText = useCallback( - (value: OpenAISummaryText) => { - dispatch(setOpenAISummaryText(value)) - }, - [dispatch] - ) - - const setServiceTierMode = useCallback( - (value: ServiceTier) => { - updateProvider({ serviceTier: value }) - }, - [updateProvider] - ) - - const setVerbosity = useCallback( - (value: OpenAIVerbosity) => { - dispatch(setOpenAIVerbosity(value)) - }, - [dispatch] - ) - - const summaryTextOptions = [ - { - value: 'undefined', - label: t('common.ignore') - }, - { - value: 'null', - label: t('common.off') - }, - { - value: 'auto', - label: t('settings.openai.summary_text_mode.auto') - }, - { - value: 'detailed', - label: t('settings.openai.summary_text_mode.detailed') - }, - { - value: 'concise', - label: t('settings.openai.summary_text_mode.concise') - } - ] as const satisfies SummaryTextOption[] - - const verbosityOptions = useMemo(() => { - const allOptions = [ - { - value: 'undefined', - label: t('common.ignore') - }, - { - value: 'null', - label: t('common.off') - }, - { - value: 'low', - label: t('settings.openai.verbosity.low') - }, - { - value: 'medium', - label: t('settings.openai.verbosity.medium') - }, - { - value: 'high', - label: t('settings.openai.verbosity.high') - } - ] as const satisfies VerbosityOption[] - const supportedVerbosityLevels = getModelSupportedVerbosity(model).map((v) => toOptionValue(v)) - return allOptions.filter((option) => supportedVerbosityLevels.includes(option.value)) - }, [model, t]) - - const serviceTierOptions = useMemo(() => { - const options = [ - { - value: 'undefined', - label: t('common.ignore') - }, - { - value: 'null', - label: t('common.off') - }, - { - value: 'auto', - label: t('settings.openai.service_tier.auto') - }, - { - value: 'default', - label: t('settings.openai.service_tier.default') - }, - { - value: 'flex', - label: t('settings.openai.service_tier.flex') - }, - { - value: 'priority', - label: t('settings.openai.service_tier.priority') - } - ] as const satisfies OpenAIServiceTierOption[] - return options.filter((option) => { - if (option.value === 'flex') { - return isSupportFlexServiceTier - } - return true - }) - }, [isSupportFlexServiceTier, t]) - - useEffect(() => { - if (verbosity && !verbosityOptions.some((option) => option.value === verbosity)) { - const supportedVerbosityLevels = getModelSupportedVerbosity(model) - // Default to the highest supported verbosity level - const defaultVerbosity = supportedVerbosityLevels[supportedVerbosityLevels.length - 1] - setVerbosity(defaultVerbosity) - } - }, [model, verbosity, verbosityOptions, setVerbosity]) - - if (!showSummarySetting && !showServiceTierSetting && !showVerbositySetting) { - return null - } - - return ( - - - {showServiceTierSetting && ( - <> - - - {t('settings.openai.service_tier.title')}{' '} - - - - - { - setServiceTierMode(toRealValue(value)) - }} - options={serviceTierOptions} - /> - - {(showSummarySetting || showVerbositySetting) && } - - )} - {showSummarySetting && ( - <> - - - {t('settings.openai.summary_text_mode.title')}{' '} - - - - - { - setSummaryText(toRealValue(value)) - }} - options={summaryTextOptions} - /> - - {showVerbositySetting && } - - )} - {showVerbositySetting && ( - - - {t('settings.openai.verbosity.title')}{' '} - - - - - { - setVerbosity(toRealValue(value)) - }} - options={verbosityOptions} - /> - - )} - - - - ) -} - -export default OpenAISettingsGroup diff --git a/src/renderer/src/pages/home/Tabs/components/OpenAISettingsGroup/OpenAISettingsGroup.tsx b/src/renderer/src/pages/home/Tabs/components/OpenAISettingsGroup/OpenAISettingsGroup.tsx new file mode 100644 index 000000000..2aa24f94f --- /dev/null +++ b/src/renderer/src/pages/home/Tabs/components/OpenAISettingsGroup/OpenAISettingsGroup.tsx @@ -0,0 +1,72 @@ +import { isSupportedReasoningEffortOpenAIModel, isSupportVerbosityModel } from '@renderer/config/models' +import { useProvider } from '@renderer/hooks/useProvider' +import { SettingDivider } from '@renderer/pages/settings' +import { CollapsibleSettingGroup } from '@renderer/pages/settings/SettingGroup' +import type { Model } from '@renderer/types' +import { SystemProviderIds } from '@renderer/types' +import { + isSupportServiceTierProvider, + isSupportStreamOptionsProvider, + isSupportVerbosityProvider +} from '@renderer/utils/provider' +import type { FC } from 'react' +import { useTranslation } from 'react-i18next' + +import ReasoningSummarySetting from './ReasoningSummarySetting' +import ServiceTierSetting from './ServiceTierSetting' +import StreamOptionsSetting from './StreamOptionsSetting' +import VerbositySetting from './VerbositySetting' + +interface Props { + model: Model + providerId: string + SettingGroup: FC<{ children: React.ReactNode }> + SettingRowTitleSmall: FC<{ children: React.ReactNode }> +} + +const OpenAISettingsGroup: FC = ({ model, providerId, SettingGroup, SettingRowTitleSmall }) => { + const { t } = useTranslation() + const { provider } = useProvider(providerId) + + const showSummarySetting = + isSupportedReasoningEffortOpenAIModel(model) && + !model.id.includes('o1-pro') && + (provider.type === 'openai-response' || model.endpoint_type === 'openai-response' || provider.id === 'aihubmix') + const showVerbositySetting = isSupportVerbosityModel(model) && isSupportVerbosityProvider(provider) + const isSupportServiceTier = isSupportServiceTierProvider(provider) + const showServiceTierSetting = isSupportServiceTier && providerId !== SystemProviderIds.groq + const showStreamOptionsSetting = isSupportStreamOptionsProvider(provider) + + if (!showSummarySetting && !showServiceTierSetting && !showVerbositySetting && !showStreamOptionsSetting) { + return null + } + + return ( + + + {showServiceTierSetting && ( + <> + + {(showSummarySetting || showVerbositySetting || showStreamOptionsSetting) && } + + )} + {showSummarySetting && ( + <> + + {(showVerbositySetting || showStreamOptionsSetting) && } + + )} + {showVerbositySetting && ( + <> + + {showStreamOptionsSetting && } + + )} + {showStreamOptionsSetting && } + + + + ) +} + +export default OpenAISettingsGroup diff --git a/src/renderer/src/pages/home/Tabs/components/OpenAISettingsGroup/ReasoningSummarySetting.tsx b/src/renderer/src/pages/home/Tabs/components/OpenAISettingsGroup/ReasoningSummarySetting.tsx new file mode 100644 index 000000000..3754fef0b --- /dev/null +++ b/src/renderer/src/pages/home/Tabs/components/OpenAISettingsGroup/ReasoningSummarySetting.tsx @@ -0,0 +1,78 @@ +import Selector from '@renderer/components/Selector' +import { SettingRow } from '@renderer/pages/settings' +import type { RootState } from '@renderer/store' +import { useAppDispatch } from '@renderer/store' +import { setOpenAISummaryText } from '@renderer/store/settings' +import type { OpenAIReasoningSummary } from '@renderer/types/aiCoreTypes' +import { toOptionValue, toRealValue } from '@renderer/utils/select' +import { Tooltip } from 'antd' +import { CircleHelp } from 'lucide-react' +import type { FC } from 'react' +import { useCallback } from 'react' +import { useTranslation } from 'react-i18next' +import { useSelector } from 'react-redux' + +type SummaryTextOption = { + value: NonNullable | 'undefined' | 'null' + label: string +} + +interface Props { + SettingRowTitleSmall: FC<{ children: React.ReactNode }> +} + +const ReasoningSummarySetting: FC = ({ SettingRowTitleSmall }) => { + const { t } = useTranslation() + const summaryText = useSelector((state: RootState) => state.settings.openAI.summaryText) + const dispatch = useAppDispatch() + + const setSummaryText = useCallback( + (value: OpenAIReasoningSummary) => { + dispatch(setOpenAISummaryText(value)) + }, + [dispatch] + ) + + const summaryTextOptions = [ + { + value: 'undefined', + label: t('common.ignore') + }, + { + value: 'null', + label: t('common.off') + }, + { + value: 'auto', + label: t('settings.openai.summary_text_mode.auto') + }, + { + value: 'detailed', + label: t('settings.openai.summary_text_mode.detailed') + }, + { + value: 'concise', + label: t('settings.openai.summary_text_mode.concise') + } + ] as const satisfies SummaryTextOption[] + + return ( + + + {t('settings.openai.summary_text_mode.title')}{' '} + + + + + { + setSummaryText(toRealValue(value)) + }} + options={summaryTextOptions} + /> + + ) +} + +export default ReasoningSummarySetting diff --git a/src/renderer/src/pages/home/Tabs/components/OpenAISettingsGroup/ServiceTierSetting.tsx b/src/renderer/src/pages/home/Tabs/components/OpenAISettingsGroup/ServiceTierSetting.tsx new file mode 100644 index 000000000..114ff0da3 --- /dev/null +++ b/src/renderer/src/pages/home/Tabs/components/OpenAISettingsGroup/ServiceTierSetting.tsx @@ -0,0 +1,88 @@ +import Selector from '@renderer/components/Selector' +import { isSupportFlexServiceTierModel } from '@renderer/config/models' +import { useProvider } from '@renderer/hooks/useProvider' +import { SettingRow } from '@renderer/pages/settings' +import type { Model, OpenAIServiceTier, ServiceTier } from '@renderer/types' +import { toOptionValue, toRealValue } from '@renderer/utils/select' +import { Tooltip } from 'antd' +import { CircleHelp } from 'lucide-react' +import type { FC } from 'react' +import { useCallback, useMemo } from 'react' +import { useTranslation } from 'react-i18next' + +type OpenAIServiceTierOption = { value: NonNullable | 'null' | 'undefined'; label: string } + +interface Props { + model: Model + providerId: string + SettingRowTitleSmall: FC<{ children: React.ReactNode }> +} + +const ServiceTierSetting: FC = ({ model, providerId, SettingRowTitleSmall }) => { + const { t } = useTranslation() + const { provider, updateProvider } = useProvider(providerId) + const serviceTierMode = provider.serviceTier + const isSupportFlexServiceTier = isSupportFlexServiceTierModel(model) + + const setServiceTierMode = useCallback( + (value: ServiceTier) => { + updateProvider({ serviceTier: value }) + }, + [updateProvider] + ) + + const serviceTierOptions = useMemo(() => { + const options = [ + { + value: 'undefined', + label: t('common.ignore') + }, + { + value: 'null', + label: t('common.off') + }, + { + value: 'auto', + label: t('settings.openai.service_tier.auto') + }, + { + value: 'default', + label: t('settings.openai.service_tier.default') + }, + { + value: 'flex', + label: t('settings.openai.service_tier.flex') + }, + { + value: 'priority', + label: t('settings.openai.service_tier.priority') + } + ] as const satisfies OpenAIServiceTierOption[] + return options.filter((option) => { + if (option.value === 'flex') { + return isSupportFlexServiceTier + } + return true + }) + }, [isSupportFlexServiceTier, t]) + + return ( + + + {t('settings.openai.service_tier.title')}{' '} + + + + + { + setServiceTierMode(toRealValue(value)) + }} + options={serviceTierOptions} + /> + + ) +} + +export default ServiceTierSetting diff --git a/src/renderer/src/pages/home/Tabs/components/OpenAISettingsGroup/StreamOptionsSetting.tsx b/src/renderer/src/pages/home/Tabs/components/OpenAISettingsGroup/StreamOptionsSetting.tsx new file mode 100644 index 000000000..b9de0fe81 --- /dev/null +++ b/src/renderer/src/pages/home/Tabs/components/OpenAISettingsGroup/StreamOptionsSetting.tsx @@ -0,0 +1,72 @@ +import Selector from '@renderer/components/Selector' +import { SettingRow } from '@renderer/pages/settings' +import type { RootState } from '@renderer/store' +import { useAppDispatch } from '@renderer/store' +import { setOpenAIStreamOptionsIncludeUsage } from '@renderer/store/settings' +import type { OpenAICompletionsStreamOptions } from '@renderer/types/aiCoreTypes' +import { toOptionValue, toRealValue } from '@renderer/utils/select' +import { Tooltip } from 'antd' +import { CircleHelp } from 'lucide-react' +import type { FC } from 'react' +import { useCallback, useMemo } from 'react' +import { useTranslation } from 'react-i18next' +import { useSelector } from 'react-redux' + +type IncludeUsageOption = { + value: 'undefined' | 'false' | 'true' + label: string +} + +interface Props { + SettingRowTitleSmall: FC<{ children: React.ReactNode }> +} + +const StreamOptionsSetting: FC = ({ SettingRowTitleSmall }) => { + const { t } = useTranslation() + const includeUsage = useSelector((state: RootState) => state.settings.openAI?.streamOptions?.includeUsage) + const dispatch = useAppDispatch() + + const setIncludeUsage = useCallback( + (value: OpenAICompletionsStreamOptions['include_usage']) => { + dispatch(setOpenAIStreamOptionsIncludeUsage(value)) + }, + [dispatch] + ) + + const includeUsageOptions = useMemo(() => { + return [ + { + value: 'undefined', + label: t('common.ignore') + }, + { + value: 'false', + label: t('common.off') + }, + { + value: 'true', + label: t('common.on') + } + ] as const satisfies IncludeUsageOption[] + }, [t]) + + return ( + + + {t('settings.openai.stream_options.include_usage.title')}{' '} + + + + + { + setIncludeUsage(toRealValue(value)) + }} + options={includeUsageOptions} + /> + + ) +} + +export default StreamOptionsSetting diff --git a/src/renderer/src/pages/home/Tabs/components/OpenAISettingsGroup/VerbositySetting.tsx b/src/renderer/src/pages/home/Tabs/components/OpenAISettingsGroup/VerbositySetting.tsx new file mode 100644 index 000000000..550f8d443 --- /dev/null +++ b/src/renderer/src/pages/home/Tabs/components/OpenAISettingsGroup/VerbositySetting.tsx @@ -0,0 +1,94 @@ +import Selector from '@renderer/components/Selector' +import { getModelSupportedVerbosity } from '@renderer/config/models' +import { SettingRow } from '@renderer/pages/settings' +import type { RootState } from '@renderer/store' +import { useAppDispatch } from '@renderer/store' +import { setOpenAIVerbosity } from '@renderer/store/settings' +import type { Model } from '@renderer/types' +import type { OpenAIVerbosity } from '@renderer/types/aiCoreTypes' +import { toOptionValue, toRealValue } from '@renderer/utils/select' +import { Tooltip } from 'antd' +import { CircleHelp } from 'lucide-react' +import type { FC } from 'react' +import { useCallback, useEffect, useMemo } from 'react' +import { useTranslation } from 'react-i18next' +import { useSelector } from 'react-redux' + +type VerbosityOption = { + value: NonNullable | 'undefined' | 'null' + label: string +} + +interface Props { + model: Model + SettingRowTitleSmall: FC<{ children: React.ReactNode }> +} + +const VerbositySetting: FC = ({ model, SettingRowTitleSmall }) => { + const { t } = useTranslation() + const verbosity = useSelector((state: RootState) => state.settings.openAI.verbosity) + const dispatch = useAppDispatch() + + const setVerbosity = useCallback( + (value: OpenAIVerbosity) => { + dispatch(setOpenAIVerbosity(value)) + }, + [dispatch] + ) + + const verbosityOptions = useMemo(() => { + const allOptions = [ + { + value: 'undefined', + label: t('common.ignore') + }, + { + value: 'null', + label: t('common.off') + }, + { + value: 'low', + label: t('settings.openai.verbosity.low') + }, + { + value: 'medium', + label: t('settings.openai.verbosity.medium') + }, + { + value: 'high', + label: t('settings.openai.verbosity.high') + } + ] as const satisfies VerbosityOption[] + const supportedVerbosityLevels = getModelSupportedVerbosity(model).map((v) => toOptionValue(v)) + return allOptions.filter((option) => supportedVerbosityLevels.includes(option.value)) + }, [model, t]) + + useEffect(() => { + if (verbosity !== undefined && !verbosityOptions.some((option) => option.value === toOptionValue(verbosity))) { + const supportedVerbosityLevels = getModelSupportedVerbosity(model) + // Default to the highest supported verbosity level + const defaultVerbosity = supportedVerbosityLevels[supportedVerbosityLevels.length - 1] + setVerbosity(defaultVerbosity) + } + }, [model, verbosity, verbosityOptions, setVerbosity]) + + return ( + + + {t('settings.openai.verbosity.title')}{' '} + + + + + { + setVerbosity(toRealValue(value)) + }} + options={verbosityOptions} + /> + + ) +} + +export default VerbositySetting diff --git a/src/renderer/src/pages/home/Tabs/components/OpenAISettingsGroup/index.tsx b/src/renderer/src/pages/home/Tabs/components/OpenAISettingsGroup/index.tsx new file mode 100644 index 000000000..18492971b --- /dev/null +++ b/src/renderer/src/pages/home/Tabs/components/OpenAISettingsGroup/index.tsx @@ -0,0 +1,3 @@ +import OpenAISettingsGroup from './OpenAISettingsGroup' + +export default OpenAISettingsGroup diff --git a/src/renderer/src/store/index.ts b/src/renderer/src/store/index.ts index 516d66cdc..8d9176be1 100644 --- a/src/renderer/src/store/index.ts +++ b/src/renderer/src/store/index.ts @@ -67,7 +67,7 @@ const persistedReducer = persistReducer( { key: 'cherry-studio', storage, - version: 181, + version: 182, blacklist: ['runtime', 'messages', 'messageBlocks', 'tabs', 'toolPermissions'], migrate }, diff --git a/src/renderer/src/store/migrate.ts b/src/renderer/src/store/migrate.ts index 909837b3f..a80336e69 100644 --- a/src/renderer/src/store/migrate.ts +++ b/src/renderer/src/store/migrate.ts @@ -1,6 +1,11 @@ import { loggerService } from '@logger' import { nanoid } from '@reduxjs/toolkit' -import { DEFAULT_CONTEXTCOUNT, DEFAULT_TEMPERATURE, isMac } from '@renderer/config/constant' +import { + DEFAULT_CONTEXTCOUNT, + DEFAULT_STREAM_OPTIONS_INCLUDE_USAGE, + DEFAULT_TEMPERATURE, + isMac +} from '@renderer/config/constant' import { DEFAULT_MIN_APPS } from '@renderer/config/minapps' import { glm45FlashModel, @@ -2956,6 +2961,21 @@ const migrateConfig = { logger.error('migrate 181 error', error as Error) return state } + }, + '182': (state: RootState) => { + try { + // Initialize streamOptions in settings.openAI if not exists + if (!state.settings.openAI.streamOptions) { + state.settings.openAI.streamOptions = { + includeUsage: DEFAULT_STREAM_OPTIONS_INCLUDE_USAGE + } + } + logger.info('migrate 182 success') + return state + } catch (error) { + logger.error('migrate 182 error', error as Error) + return state + } } } diff --git a/src/renderer/src/store/settings.ts b/src/renderer/src/store/settings.ts index 36a478853..572f72274 100644 --- a/src/renderer/src/store/settings.ts +++ b/src/renderer/src/store/settings.ts @@ -1,6 +1,6 @@ import type { PayloadAction } from '@reduxjs/toolkit' import { createSlice } from '@reduxjs/toolkit' -import { isMac } from '@renderer/config/constant' +import { DEFAULT_STREAM_OPTIONS_INCLUDE_USAGE, isMac } from '@renderer/config/constant' import { TRANSLATE_PROMPT } from '@renderer/config/prompts' import { DEFAULT_SIDEBAR_ICONS } from '@renderer/config/sidebar' import type { @@ -16,7 +16,11 @@ import type { TranslateLanguageCode } from '@renderer/types' import { ThemeMode } from '@renderer/types' -import type { OpenAISummaryText, OpenAIVerbosity } from '@renderer/types/aiCoreTypes' +import type { + OpenAICompletionsStreamOptions, + OpenAIReasoningSummary, + OpenAIVerbosity +} from '@renderer/types/aiCoreTypes' import { uuid } from '@renderer/utils' import { API_SERVER_DEFAULTS, UpgradeChannel } from '@shared/config/constant' @@ -193,10 +197,14 @@ export interface SettingsState { } // OpenAI openAI: { - summaryText: OpenAISummaryText + // TODO: it's a bad naming. rename it to reasoningSummary in v2. + summaryText: OpenAIReasoningSummary /** @deprecated 现在该设置迁移到Provider对象中 */ serviceTier: OpenAIServiceTier verbosity: OpenAIVerbosity + streamOptions: { + includeUsage: OpenAICompletionsStreamOptions['include_usage'] + } } // Notification notification: { @@ -376,7 +384,10 @@ export const initialState: SettingsState = { openAI: { summaryText: 'auto', serviceTier: 'auto', - verbosity: undefined + verbosity: undefined, + streamOptions: { + includeUsage: DEFAULT_STREAM_OPTIONS_INCLUDE_USAGE + } }, notification: { assistant: false, @@ -791,12 +802,18 @@ const settingsSlice = createSlice({ setDisableHardwareAcceleration: (state, action: PayloadAction) => { state.disableHardwareAcceleration = action.payload }, - setOpenAISummaryText: (state, action: PayloadAction) => { + setOpenAISummaryText: (state, action: PayloadAction) => { state.openAI.summaryText = action.payload }, setOpenAIVerbosity: (state, action: PayloadAction) => { state.openAI.verbosity = action.payload }, + setOpenAIStreamOptionsIncludeUsage: ( + state, + action: PayloadAction + ) => { + state.openAI.streamOptions.includeUsage = action.payload + }, setNotificationSettings: (state, action: PayloadAction) => { state.notification = action.payload }, @@ -967,6 +984,7 @@ export const { setDisableHardwareAcceleration, setOpenAISummaryText, setOpenAIVerbosity, + setOpenAIStreamOptionsIncludeUsage, setNotificationSettings, // Local backup settings setLocalBackupDir, diff --git a/src/renderer/src/types/aiCoreTypes.ts b/src/renderer/src/types/aiCoreTypes.ts index 6281905cb..28250e405 100644 --- a/src/renderer/src/types/aiCoreTypes.ts +++ b/src/renderer/src/types/aiCoreTypes.ts @@ -50,7 +50,12 @@ export type OpenAIReasoningEffort = OpenAI.ReasoningEffort * When undefined, the parameter is omitted from the request. * When null, verbosity is explicitly disabled. */ -export type OpenAISummaryText = OpenAI.Reasoning['summary'] +export type OpenAIReasoningSummary = OpenAI.Reasoning['summary'] + +/** + * Options for streaming response. Only set this when you set `stream: true`. + */ +export type OpenAICompletionsStreamOptions = OpenAI.ChatCompletionStreamOptions const AiSdkParamsSchema = z.enum([ 'maxOutputTokens', diff --git a/src/renderer/src/utils/__tests__/select.test.ts b/src/renderer/src/utils/__tests__/select.test.ts new file mode 100644 index 000000000..36e7d95ac --- /dev/null +++ b/src/renderer/src/utils/__tests__/select.test.ts @@ -0,0 +1,163 @@ +import { describe, expect, it } from 'vitest' + +import { toOptionValue, toRealValue } from '../select' + +describe('toOptionValue', () => { + describe('primitive values', () => { + it('should convert undefined to string "undefined"', () => { + expect(toOptionValue(undefined)).toBe('undefined') + }) + + it('should convert null to string "null"', () => { + expect(toOptionValue(null)).toBe('null') + }) + + it('should convert true to string "true"', () => { + expect(toOptionValue(true)).toBe('true') + }) + + it('should convert false to string "false"', () => { + expect(toOptionValue(false)).toBe('false') + }) + }) + + describe('string values', () => { + it('should return string as-is', () => { + expect(toOptionValue('hello')).toBe('hello') + }) + + it('should return empty string as-is', () => { + expect(toOptionValue('')).toBe('') + }) + + it('should return string with special characters as-is', () => { + expect(toOptionValue('hello-world_123')).toBe('hello-world_123') + }) + + it('should return string that looks like a boolean as-is', () => { + expect(toOptionValue('True')).toBe('True') + expect(toOptionValue('FALSE')).toBe('FALSE') + }) + }) + + describe('mixed type scenarios', () => { + it('should handle union types correctly', () => { + const values: Array = ['test', true, false, null, undefined, ''] + + expect(toOptionValue(values[0])).toBe('test') + expect(toOptionValue(values[1])).toBe('true') + expect(toOptionValue(values[2])).toBe('false') + expect(toOptionValue(values[3])).toBe('null') + expect(toOptionValue(values[4])).toBe('undefined') + expect(toOptionValue(values[5])).toBe('') + }) + }) +}) + +describe('toRealValue', () => { + describe('special string values', () => { + it('should convert string "undefined" to undefined', () => { + expect(toRealValue('undefined')).toBeUndefined() + }) + + it('should convert string "null" to null', () => { + expect(toRealValue('null')).toBeNull() + }) + + it('should convert string "true" to boolean true', () => { + expect(toRealValue('true')).toBe(true) + }) + + it('should convert string "false" to boolean false', () => { + expect(toRealValue('false')).toBe(false) + }) + }) + + describe('regular string values', () => { + it('should return regular string as-is', () => { + expect(toRealValue('hello')).toBe('hello') + }) + + it('should return empty string as-is', () => { + expect(toRealValue('')).toBe('') + }) + + it('should return string with special characters as-is', () => { + expect(toRealValue('hello-world_123')).toBe('hello-world_123') + }) + + it('should return string that looks like special value but with different casing', () => { + expect(toRealValue('Undefined')).toBe('Undefined') + expect(toRealValue('NULL')).toBe('NULL') + expect(toRealValue('True')).toBe('True') + expect(toRealValue('False')).toBe('False') + }) + }) + + describe('edge cases', () => { + it('should handle strings containing special values as substring', () => { + expect(toRealValue('undefined_value')).toBe('undefined_value') + expect(toRealValue('null_check')).toBe('null_check') + expect(toRealValue('true_condition')).toBe('true_condition') + expect(toRealValue('false_flag')).toBe('false_flag') + }) + + it('should handle strings with whitespace', () => { + expect(toRealValue(' undefined')).toBe(' undefined') + expect(toRealValue('null ')).toBe('null ') + expect(toRealValue(' true ')).toBe(' true ') + }) + }) +}) + +describe('toOptionValue and toRealValue roundtrip', () => { + it('should correctly convert and restore undefined', () => { + const original = undefined + const option = toOptionValue(original) + const restored = toRealValue(option) + expect(restored).toBeUndefined() + }) + + it('should correctly convert and restore null', () => { + const original = null + const option = toOptionValue(original) + const restored = toRealValue(option) + expect(restored).toBeNull() + }) + + it('should correctly convert and restore true', () => { + const original = true + const option = toOptionValue(original) + const restored = toRealValue(option) + expect(restored).toBe(true) + }) + + it('should correctly convert and restore false', () => { + const original = false + const option = toOptionValue(original) + const restored = toRealValue(option) + expect(restored).toBe(false) + }) + + it('should correctly convert and restore string values', () => { + const strings = ['hello', '', 'test-123', 'some_value'] + strings.forEach((str) => { + const option = toOptionValue(str) + const restored = toRealValue(option) + expect(restored).toBe(str) + }) + }) + + it('should handle array of mixed values', () => { + const values: Array = ['test', true, false, null, undefined] + + const options = values.map(toOptionValue) + const restored = options.map(toRealValue) + + expect(restored[0]).toBe('test') + expect(restored[1]).toBe(true) + expect(restored[2]).toBe(false) + expect(restored[3]).toBeNull() + expect(restored[4]).toBeUndefined() + }) +}) diff --git a/src/renderer/src/utils/select.ts b/src/renderer/src/utils/select.ts index cf1eaa19d..07e24b00b 100644 --- a/src/renderer/src/utils/select.ts +++ b/src/renderer/src/utils/select.ts @@ -1,36 +1,63 @@ /** - * Convert a value (string | undefined | null) into an option-compatible string. + * Convert a value (string | undefined | null | boolean) into an option-compatible string. * - `undefined` becomes the literal string `'undefined'` * - `null` becomes the literal string `'null'` + * - `true` becomes the literal string `'true'` + * - `false` becomes the literal string `'false'` * - Any other string is returned as-is * * @param v - The value to convert * @returns The string representation safe for option usage */ -export function toOptionValue>(v: T): NonNullable | 'undefined' -export function toOptionValue>(v: T): NonNullable | 'null' -export function toOptionValue(v: T): NonNullable | 'undefined' | 'null' -export function toOptionValue>(v: T): T -export function toOptionValue(v: string | undefined | null) { - if (v === undefined) return 'undefined' - if (v === null) return 'null' - return v +export function toOptionValue(v: undefined): 'undefined' +export function toOptionValue(v: null): 'null' +export function toOptionValue(v: boolean): 'true' | 'false' +export function toOptionValue(v: boolean | undefined): 'true' | 'false' | 'undefined' +export function toOptionValue(v: boolean | null): 'true' | 'false' | 'null' +export function toOptionValue(v: boolean | undefined | null): 'true' | 'false' | 'undefined' | 'null' +export function toOptionValue(v: T): T +export function toOptionValue | undefined>(v: T): NonNullable | 'undefined' +export function toOptionValue | null>(v: T): NonNullable | 'null' +export function toOptionValue | boolean>(v: T): T | 'true' | 'false' +export function toOptionValue | null | undefined>( + v: T +): NonNullable | 'null' | 'undefined' +export function toOptionValue | null | boolean>( + v: T +): NonNullable | 'null' | 'true' | 'false' +export function toOptionValue | undefined | boolean>( + v: T +): NonNullable | 'undefined' | 'true' | 'false' +export function toOptionValue< + T extends Exclude | null | undefined | boolean +>(v: T): NonNullable | 'null' | 'undefined' | 'true' | 'false' +export function toOptionValue(v: string | undefined | null | boolean) { + return String(v) } /** * Convert an option string back to its original value. * - The literal string `'undefined'` becomes `undefined` * - The literal string `'null'` becomes `null` + * - The literal string `'true'` becomes `true` + * - The literal string `'false'` becomes `false` * - Any other string is returned as-is * * @param v - The option string to convert - * @returns The real value (`undefined`, `null`, or the original string) + * @returns The real value (`undefined`, `null`, `boolean`, or the original string) */ -export function toRealValue(v: T): undefined -export function toRealValue(v: T): null -export function toRealValue(v: T): Exclude +export function toRealValue(v: 'undefined'): undefined +export function toRealValue(v: 'null'): null +export function toRealValue(v: 'true' | 'false'): boolean +export function toRealValue(v: 'undefined' | 'null'): undefined | null +export function toRealValue(v: 'undefined' | 'true' | 'false'): undefined | boolean +export function toRealValue(v: 'null' | 'true' | 'false'): null | boolean +export function toRealValue(v: 'undefined' | 'null' | 'true' | 'false'): undefined | null | boolean +export function toRealValue(v: T): Exclude export function toRealValue(v: string) { if (v === 'undefined') return undefined if (v === 'null') return null + if (v === 'true') return true + if (v === 'false') return false return v }