mirror of
https://github.com/CherryHQ/cherry-studio.git
synced 2025-12-28 13:31:32 +08:00
Fix/qwen-mt (#8527)
* feat(ModelList): add support for 'supported_text_delta' in model configuration - Introduced a new boolean property 'supported_text_delta' to the model configuration, allowing models to indicate support for text delta outputs. - Updated the AddModelPopup and ModelEditContent components to handle this new property, including UI elements for user interaction. - Enhanced migration logic to set default values for existing models based on compatibility checks. - Added corresponding translations for the new property in the i18n files. * feat(OpenAIApiClient): enhance support for Qwen MT model and system message handling - Added support for Qwen MT model in OpenAIApiClient, including translation options based on target language. - Updated system message handling to accommodate models that do not support system messages. - Introduced utility functions to identify Qwen MT models and their compatibility with text delta outputs. - Enhanced TextChunkMiddleware to handle text accumulation based on model capabilities. - Updated model configuration to include Qwen MT in the list of excluded models for function calling. * feat(i18n): add translations for 'supported_text_delta' in multiple languages - Introduced new translation entries for the 'supported_text_delta' property in English, Japanese, Russian, and Traditional Chinese localization files. - Updated the corresponding labels and tooltips to enhance user experience across different languages. * refactor(ModelEditContent): reposition 'supported_text_delta' input for improved UI layout - Moved the 'supported_text_delta' Form.Item from its previous location to enhance the user interface and maintain consistency in the model editing experience. - Ensured that the switch input for 'supported_text_delta' is now displayed in a more logical order within the form. * fix(TextChunkMiddleware): update condition for supported_text_delta check - Changed the condition in TextChunkMiddleware to explicitly check for 'supported_text_delta' being false, improving clarity in the logic. - Updated test cases to reflect the new structure of model configurations, ensuring 'supported_text_delta' is consistently set to true for relevant models. * feat(migrate): add support for 'supported_text_delta' in assistant models - Updated migration logic to set 'supported_text_delta' for both default and specific models within assistants. - Implemented checks to ensure compatibility with text delta outputs, enhancing model configuration consistency. * feat(ModelList): add 'supported_text_delta' to model addition logic - Enhanced the model addition process in EditModelsPopup, NewApiAddModelPopup, and NewApiBatchAddModelPopup to include the 'supported_text_delta' property. - Ensured consistency across components by setting 'supported_text_delta' to true when adding models, improving compatibility with text delta outputs. * feat(migrate): streamline model text delta support in migration logic - Refactored migration logic to utilize a new helper function for updating the 'supported_text_delta' property across various model types, enhancing code clarity and reducing redundancy. - Ensured that all relevant models, including those in assistants and LLM providers, are correctly configured for text delta compatibility during migration. * feat(OpenAIApiClient): integrate language mapping for Qwen MT model translations - Updated the OpenAIApiClient to utilize a new utility function for mapping target languages to Qwen MT model names, enhancing translation accuracy. - Refactored migration logic to ensure default tool use mode is set for assistants lacking this configuration, improving user experience. - Added a new utility function for language mapping in the utils module, supporting better integration with translation features. * feat(ModelList): update model addition logic to determine 'supported_text_delta' - Integrated a new utility function to assess model compatibility with text delta outputs during the model addition process in AddModelPopup. - Simplified the logic for setting the 'supported_text_delta' property, enhancing clarity and ensuring accurate model configuration. * feat(ModelList): unify model addition logic for 'supported_text_delta' - Refactored model addition across AddModelPopup, EditModelsPopup, NewApiAddModelPopup, and NewApiBatchAddModelPopup to consistently determine 'supported_text_delta' using a utility function. - Simplified the logic for setting 'supported_text_delta', enhancing clarity and ensuring accurate model configuration across components.
This commit is contained in:
parent
81b6350501
commit
65257eb3d5
@ -6,6 +6,8 @@ import {
|
||||
getOpenAIWebSearchParams,
|
||||
isDoubaoThinkingAutoModel,
|
||||
isGrokReasoningModel,
|
||||
isNotSupportSystemMessageModel,
|
||||
isQwenMTModel,
|
||||
isQwenReasoningModel,
|
||||
isReasoningModel,
|
||||
isSupportedReasoningEffortGrokModel,
|
||||
@ -32,6 +34,7 @@ import {
|
||||
Model,
|
||||
Provider,
|
||||
ToolCallResponse,
|
||||
TranslateAssistant,
|
||||
WebSearchSource
|
||||
} from '@renderer/types'
|
||||
import { ChunkType, TextStartChunk, ThinkingStartChunk } from '@renderer/types/chunk'
|
||||
@ -44,6 +47,7 @@ import {
|
||||
OpenAISdkRawOutput,
|
||||
ReasoningEffortOptionalParams
|
||||
} from '@renderer/types/sdk'
|
||||
import { mapLanguageToQwenMTModel } from '@renderer/utils'
|
||||
import { addImageFileToContents } from '@renderer/utils/formats'
|
||||
import {
|
||||
isEnabledToolUse,
|
||||
@ -472,6 +476,16 @@ export class OpenAIAPIClient extends OpenAIBaseClient<
|
||||
streamOutput = true
|
||||
}
|
||||
|
||||
const extra_body: Record<string, any> = {}
|
||||
|
||||
if (isQwenMTModel(model)) {
|
||||
const targetLanguage = (assistant as TranslateAssistant).targetLanguage
|
||||
extra_body.translation_options = {
|
||||
source_lang: 'auto',
|
||||
target_lang: mapLanguageToQwenMTModel(targetLanguage!)
|
||||
}
|
||||
}
|
||||
|
||||
// 1. 处理系统消息
|
||||
let systemMessage = { role: 'system', content: assistant.prompt || '' }
|
||||
|
||||
@ -515,7 +529,7 @@ export class OpenAIAPIClient extends OpenAIBaseClient<
|
||||
|
||||
// 4. 最终请求消息
|
||||
let reqMessages: OpenAISdkMessageParam[]
|
||||
if (!systemMessage.content) {
|
||||
if (!systemMessage.content || isNotSupportSystemMessageModel(model)) {
|
||||
reqMessages = [...userMessages]
|
||||
} else {
|
||||
reqMessages = [systemMessage, ...userMessages].filter(Boolean) as OpenAISdkMessageParam[]
|
||||
@ -541,7 +555,8 @@ export class OpenAIAPIClient extends OpenAIBaseClient<
|
||||
// 只在对话场景下应用自定义参数,避免影响翻译、总结等其他业务逻辑
|
||||
...(coreRequest.callType === 'chat' ? this.getCustomParameters(assistant) : {}),
|
||||
// OpenRouter usage tracking
|
||||
...(this.provider.id === 'openrouter' ? { usage: { include: true } } : {})
|
||||
...(this.provider.id === 'openrouter' ? { usage: { include: true } } : {}),
|
||||
...(isQwenMTModel(model) ? extra_body : {})
|
||||
}
|
||||
|
||||
// Create the appropriate parameters object based on whether streaming is enabled
|
||||
|
||||
@ -45,8 +45,11 @@ export const TextChunkMiddleware: CompletionsMiddleware =
|
||||
transform(chunk: GenericChunk, controller) {
|
||||
logger.silly('chunk', chunk)
|
||||
if (chunk.type === ChunkType.TEXT_DELTA) {
|
||||
accumulatedTextContent += chunk.text
|
||||
|
||||
if (model.supported_text_delta === false) {
|
||||
accumulatedTextContent = chunk.text
|
||||
} else {
|
||||
accumulatedTextContent += chunk.text
|
||||
}
|
||||
// 处理 onResponse 回调 - 发送增量文本更新
|
||||
if (params.onResponse) {
|
||||
params.onResponse(accumulatedTextContent, false)
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
import { TopView } from '@renderer/components/TopView'
|
||||
import { isNotSupportedTextDelta } from '@renderer/config/models'
|
||||
import { useProvider } from '@renderer/hooks/useProvider'
|
||||
import { Model, Provider } from '@renderer/types'
|
||||
import { getDefaultGroupName } from '@renderer/utils'
|
||||
@ -56,7 +57,7 @@ const PopupContainer: React.FC<Props> = ({ title, provider, resolve }) => {
|
||||
group: values.group ?? getDefaultGroupName(id)
|
||||
}
|
||||
|
||||
addModel(model)
|
||||
addModel({ ...model, supported_text_delta: !isNotSupportedTextDelta(model) })
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
@ -13,6 +13,7 @@ import {
|
||||
groupQwenModels,
|
||||
isEmbeddingModel,
|
||||
isFunctionCallingModel,
|
||||
isNotSupportedTextDelta,
|
||||
isReasoningModel,
|
||||
isRerankModel,
|
||||
isVisionModel,
|
||||
@ -144,13 +145,14 @@ const PopupContainer: React.FC<Props> = ({ provider: _provider, resolve }) => {
|
||||
if (model.supported_endpoint_types && model.supported_endpoint_types.length > 0) {
|
||||
addModel({
|
||||
...model,
|
||||
endpoint_type: model.supported_endpoint_types[0]
|
||||
endpoint_type: model.supported_endpoint_types[0],
|
||||
supported_text_delta: !isNotSupportedTextDelta(model)
|
||||
})
|
||||
} else {
|
||||
NewApiAddModelPopup.show({ title: t('settings.models.add.add_model'), provider, model })
|
||||
}
|
||||
} else {
|
||||
addModel(model)
|
||||
addModel({ ...model, supported_text_delta: !isNotSupportedTextDelta(model) })
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@ -11,7 +11,7 @@ import {
|
||||
import { useDynamicLabelWidth } from '@renderer/hooks/useDynamicLabelWidth'
|
||||
import { Model, ModelCapability, ModelType, Provider } from '@renderer/types'
|
||||
import { getDefaultGroupName, getDifference, getUnion } from '@renderer/utils'
|
||||
import { Button, Checkbox, Divider, Flex, Form, Input, InputNumber, message, Modal, Select } from 'antd'
|
||||
import { Button, Checkbox, Divider, Flex, Form, Input, InputNumber, message, Modal, Select, Switch } from 'antd'
|
||||
import { ChevronDown, ChevronUp } from 'lucide-react'
|
||||
import { FC, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
@ -33,6 +33,7 @@ const ModelEditContent: FC<ModelEditContentProps> = ({ provider, model, onUpdate
|
||||
const [currencySymbol, setCurrencySymbol] = useState(model.pricing?.currencySymbol || '$')
|
||||
const [isCustomCurrency, setIsCustomCurrency] = useState(!symbols.includes(model.pricing?.currencySymbol || '$'))
|
||||
const [modelCapabilities, setModelCapabilities] = useState(model.capabilities || [])
|
||||
const [supportedTextDelta, setSupportedTextDelta] = useState(model.supported_text_delta)
|
||||
|
||||
const labelWidth = useDynamicLabelWidth([t('settings.models.add.endpoint_type.label')])
|
||||
|
||||
@ -45,6 +46,7 @@ const ModelEditContent: FC<ModelEditContentProps> = ({ provider, model, onUpdate
|
||||
group: values.group || model.group,
|
||||
endpoint_type: provider.id === 'new-api' ? values.endpointType : model.endpoint_type,
|
||||
capabilities: modelCapabilities,
|
||||
supported_text_delta: supportedTextDelta,
|
||||
pricing: {
|
||||
input_per_million_tokens: Number(values.input_per_million_tokens) || 0,
|
||||
output_per_million_tokens: Number(values.output_per_million_tokens) || 0,
|
||||
@ -338,6 +340,12 @@ const ModelEditContent: FC<ModelEditContentProps> = ({ provider, model, onUpdate
|
||||
</div>
|
||||
)
|
||||
})()}
|
||||
<Form.Item
|
||||
name="supported_text_delta"
|
||||
label={t('settings.models.add.supported_text_delta.label')}
|
||||
tooltip={t('settings.models.add.supported_text_delta.tooltip')}>
|
||||
<Switch checked={supportedTextDelta} onChange={(checked) => setSupportedTextDelta(checked)} />
|
||||
</Form.Item>
|
||||
<TypeTitle>{t('models.price.price')}</TypeTitle>
|
||||
<Form.Item name="currencySymbol" label={t('models.price.currency')} style={{ marginBottom: 10 }}>
|
||||
<Select
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
import { TopView } from '@renderer/components/TopView'
|
||||
import { endpointTypeOptions } from '@renderer/config/endpointTypes'
|
||||
import { isNotSupportedTextDelta } from '@renderer/config/models'
|
||||
import { useDynamicLabelWidth } from '@renderer/hooks/useDynamicLabelWidth'
|
||||
import { useProvider } from '@renderer/hooks/useProvider'
|
||||
import { EndpointType, Model, Provider } from '@renderer/types'
|
||||
@ -62,7 +63,7 @@ const PopupContainer: React.FC<Props> = ({ title, provider, resolve, model, endp
|
||||
endpoint_type: provider.id === 'new-api' ? values.endpointType : undefined
|
||||
}
|
||||
|
||||
addModel(model)
|
||||
addModel({ ...model, supported_text_delta: !isNotSupportedTextDelta(model) })
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
import { TopView } from '@renderer/components/TopView'
|
||||
import { endpointTypeOptions } from '@renderer/config/endpointTypes'
|
||||
import { isNotSupportedTextDelta } from '@renderer/config/models'
|
||||
import { useDynamicLabelWidth } from '@renderer/hooks/useDynamicLabelWidth'
|
||||
import { useProvider } from '@renderer/hooks/useProvider'
|
||||
import { EndpointType, Model, Provider } from '@renderer/types'
|
||||
@ -45,7 +46,8 @@ const PopupContainer: React.FC<Props> = ({ title, provider, resolve, batchModels
|
||||
batchModels.forEach((model) => {
|
||||
addModel({
|
||||
...model,
|
||||
endpoint_type: values.endpointType
|
||||
endpoint_type: values.endpointType,
|
||||
supported_text_delta: !isNotSupportedTextDelta(model)
|
||||
})
|
||||
})
|
||||
return true
|
||||
|
||||
@ -254,7 +254,8 @@ const FUNCTION_CALLING_EXCLUDED_MODELS = [
|
||||
'o1-mini',
|
||||
'o1-preview',
|
||||
'AIDC-AI/Marco-o1',
|
||||
'gemini-1(?:\\.[\\w-]+)?'
|
||||
'gemini-1(?:\\.[\\w-]+)?',
|
||||
'qwen-mt(?:-[\\w-]+)?'
|
||||
]
|
||||
|
||||
export const FUNCTION_CALLING_REGEX = new RegExp(
|
||||
@ -3026,3 +3027,24 @@ export const isAnthropicModel = (model?: Model): boolean => {
|
||||
|
||||
return getLowerBaseModelName(model.id).startsWith('claude')
|
||||
}
|
||||
|
||||
export const isQwenMTModel = (model: Model): boolean => {
|
||||
const name = getLowerBaseModelName(model.id)
|
||||
return name.includes('qwen-mt')
|
||||
}
|
||||
|
||||
export const isNotSupportedTextDelta = (model: Model): boolean => {
|
||||
if (isQwenMTModel(model)) {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
export const isNotSupportSystemMessageModel = (model: Model): boolean => {
|
||||
if (isQwenMTModel(model) || isGemmaModel(model)) {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
@ -2876,6 +2876,10 @@
|
||||
"label": "Model Name",
|
||||
"placeholder": "Optional e.g. GPT-4",
|
||||
"tooltip": "Optional e.g. GPT-4"
|
||||
},
|
||||
"supported_text_delta": {
|
||||
"label": "Incremental text output",
|
||||
"tooltip": "When the model is not supported, close the button"
|
||||
}
|
||||
},
|
||||
"api_key": "API Key",
|
||||
|
||||
@ -2876,6 +2876,10 @@
|
||||
"label": "モデル名",
|
||||
"placeholder": "例:GPT-4",
|
||||
"tooltip": "例:GPT-4"
|
||||
},
|
||||
"supported_text_delta": {
|
||||
"label": "インクリメンタルテキスト出力",
|
||||
"tooltip": "モデルがサポートされていない場合は、ボタンを閉じます"
|
||||
}
|
||||
},
|
||||
"api_key": "API キー",
|
||||
|
||||
@ -2876,6 +2876,10 @@
|
||||
"label": "Имя модели",
|
||||
"placeholder": "Необязательно, например, GPT-4",
|
||||
"tooltip": "Необязательно, например, GPT-4"
|
||||
},
|
||||
"supported_text_delta": {
|
||||
"label": "Инкрементный текст вывод",
|
||||
"tooltip": "Когда модель не поддерживается, закройте кнопку"
|
||||
}
|
||||
},
|
||||
"api_key": "API ключ",
|
||||
|
||||
@ -2876,6 +2876,10 @@
|
||||
"label": "模型名称",
|
||||
"placeholder": "例如 GPT-4",
|
||||
"tooltip": "例如 GPT-4"
|
||||
},
|
||||
"supported_text_delta": {
|
||||
"label": "增量文本输出",
|
||||
"tooltip": "当模型不支持的时候,将该按钮关闭"
|
||||
}
|
||||
},
|
||||
"api_key": "API 密钥",
|
||||
|
||||
@ -2876,6 +2876,10 @@
|
||||
"label": "模型名稱",
|
||||
"placeholder": "選填,例如 GPT-4",
|
||||
"tooltip": "例如 GPT-4"
|
||||
},
|
||||
"supported_text_delta": {
|
||||
"label": "增量文本輸出",
|
||||
"tooltip": "當模型不支持的時候,將該按鈕關閉"
|
||||
}
|
||||
},
|
||||
"api_key": "API 密鑰",
|
||||
|
||||
@ -30,6 +30,7 @@ import {
|
||||
MemoryItem,
|
||||
Model,
|
||||
Provider,
|
||||
TranslateAssistant,
|
||||
WebSearchResponse,
|
||||
WebSearchSource
|
||||
} from '@renderer/types'
|
||||
@ -591,7 +592,7 @@ async function processConversationMemory(messages: Message[], assistant: Assista
|
||||
|
||||
interface FetchTranslateProps {
|
||||
content: string
|
||||
assistant: Assistant
|
||||
assistant: TranslateAssistant
|
||||
onResponse?: (text: string, isComplete: boolean) => void
|
||||
}
|
||||
|
||||
|
||||
@ -8,7 +8,16 @@ import {
|
||||
import i18n from '@renderer/i18n'
|
||||
import store from '@renderer/store'
|
||||
import { addAssistant } from '@renderer/store/assistants'
|
||||
import type { Agent, Assistant, AssistantSettings, Language, Model, Provider, Topic } from '@renderer/types'
|
||||
import type {
|
||||
Agent,
|
||||
Assistant,
|
||||
AssistantSettings,
|
||||
Language,
|
||||
Model,
|
||||
Provider,
|
||||
Topic,
|
||||
TranslateAssistant
|
||||
} from '@renderer/types'
|
||||
import { uuid } from '@renderer/utils'
|
||||
|
||||
export function getDefaultAssistant(): Assistant {
|
||||
@ -34,7 +43,7 @@ export function getDefaultAssistant(): Assistant {
|
||||
}
|
||||
}
|
||||
|
||||
export function getDefaultTranslateAssistant(targetLanguage: Language, text: string): Assistant {
|
||||
export function getDefaultTranslateAssistant(targetLanguage: Language, text: string): TranslateAssistant {
|
||||
const translateModel = getTranslateModel()
|
||||
const assistant: Assistant = getDefaultAssistant()
|
||||
assistant.model = translateModel
|
||||
@ -47,7 +56,7 @@ export function getDefaultTranslateAssistant(targetLanguage: Language, text: str
|
||||
.getState()
|
||||
.settings.translateModelPrompt.replaceAll('{{target_language}}', targetLanguage.value)
|
||||
.replaceAll('{{text}}', text)
|
||||
return assistant
|
||||
return { ...assistant, targetLanguage }
|
||||
}
|
||||
|
||||
export function getDefaultAssistantSettings() {
|
||||
|
||||
@ -174,7 +174,8 @@ vi.mock('@renderer/store/llm.ts', () => {
|
||||
{
|
||||
id: 'gemini-2.5-pro',
|
||||
name: 'Gemini 2.5 Pro',
|
||||
provider: 'gemini'
|
||||
provider: 'gemini',
|
||||
supported_text_delta: true
|
||||
}
|
||||
],
|
||||
isSystem: true,
|
||||
@ -184,17 +185,20 @@ vi.mock('@renderer/store/llm.ts', () => {
|
||||
defaultModel: {
|
||||
id: 'gemini-2.5-pro',
|
||||
name: 'Gemini 2.5 Pro',
|
||||
provider: 'gemini'
|
||||
provider: 'gemini',
|
||||
supported_text_delta: true
|
||||
},
|
||||
topicNamingModel: {
|
||||
id: 'gemini-2.5-pro',
|
||||
name: 'Gemini 2.5 Pro',
|
||||
provider: 'gemini'
|
||||
provider: 'gemini',
|
||||
supported_text_delta: true
|
||||
},
|
||||
translateModel: {
|
||||
id: 'gemini-2.5-pro',
|
||||
name: 'Gemini 2.5 Pro',
|
||||
provider: 'gemini'
|
||||
provider: 'gemini',
|
||||
supported_text_delta: true
|
||||
},
|
||||
quickAssistantId: '',
|
||||
settings: {
|
||||
@ -1690,7 +1694,8 @@ describe('ApiService', () => {
|
||||
prompt: 'test',
|
||||
model: {
|
||||
id: 'gemini-2.5-pro',
|
||||
name: 'Gemini 2.5 Pro'
|
||||
name: 'Gemini 2.5 Pro',
|
||||
supported_text_delta: true
|
||||
}
|
||||
} as Assistant,
|
||||
onChunk: mockOnChunk,
|
||||
@ -1810,7 +1815,8 @@ describe('ApiService', () => {
|
||||
type: 'anthropic',
|
||||
model: {
|
||||
id: 'claude-3-7-sonnet-20250219',
|
||||
name: 'Claude 3.7 Sonnet'
|
||||
name: 'Claude 3.7 Sonnet',
|
||||
supported_text_delta: true
|
||||
}
|
||||
} as Assistant,
|
||||
onChunk: mockOnChunk,
|
||||
@ -1886,7 +1892,8 @@ describe('ApiService', () => {
|
||||
type: 'anthropic',
|
||||
model: {
|
||||
id: 'claude-3-7-sonnet-20250219',
|
||||
name: 'Claude 3.7 Sonnet'
|
||||
name: 'Claude 3.7 Sonnet',
|
||||
supported_text_delta: true
|
||||
}
|
||||
} as Assistant,
|
||||
onChunk: mockOnChunk,
|
||||
@ -1957,7 +1964,8 @@ describe('ApiService', () => {
|
||||
prompt: 'test',
|
||||
model: {
|
||||
id: 'gemini-2.5-pro',
|
||||
name: 'Gemini 2.5 Pro'
|
||||
name: 'Gemini 2.5 Pro',
|
||||
supported_text_delta: true
|
||||
}
|
||||
} as Assistant,
|
||||
onChunk: mockOnChunk,
|
||||
@ -2125,7 +2133,8 @@ describe('ApiService', () => {
|
||||
prompt: 'test',
|
||||
model: {
|
||||
id: 'gpt-4o',
|
||||
name: 'GPT-4o'
|
||||
name: 'GPT-4o',
|
||||
supported_text_delta: true
|
||||
}
|
||||
} as Assistant,
|
||||
onChunk: mockOnChunk,
|
||||
@ -2215,7 +2224,8 @@ describe('ApiService', () => {
|
||||
prompt: 'test',
|
||||
model: {
|
||||
id: 'gpt-4o',
|
||||
name: 'GPT-4o'
|
||||
name: 'GPT-4o',
|
||||
supported_text_delta: true
|
||||
}
|
||||
} as Assistant,
|
||||
onChunk: mockOnChunk,
|
||||
|
||||
@ -2,11 +2,11 @@ import { loggerService } from '@logger'
|
||||
import { nanoid } from '@reduxjs/toolkit'
|
||||
import { DEFAULT_CONTEXTCOUNT, DEFAULT_TEMPERATURE, isMac } from '@renderer/config/constant'
|
||||
import { DEFAULT_MIN_APPS } from '@renderer/config/minapps'
|
||||
import { isFunctionCallingModel, SYSTEM_MODELS } from '@renderer/config/models'
|
||||
import { isFunctionCallingModel, isNotSupportedTextDelta, SYSTEM_MODELS } from '@renderer/config/models'
|
||||
import { TRANSLATE_PROMPT } from '@renderer/config/prompts'
|
||||
import db from '@renderer/databases'
|
||||
import i18n from '@renderer/i18n'
|
||||
import { Assistant, LanguageCode, Provider, WebSearchProvider } from '@renderer/types'
|
||||
import { Assistant, LanguageCode, Model, Provider, WebSearchProvider } from '@renderer/types'
|
||||
import { getDefaultGroupName, getLeadingEmoji, runAsyncFunction, uuid } from '@renderer/utils'
|
||||
import { UpgradeChannel } from '@shared/config/constant'
|
||||
import { isEmpty } from 'lodash'
|
||||
@ -1878,6 +1878,35 @@ const migrateConfig = {
|
||||
assistant.settings.toolUseMode = 'prompt'
|
||||
}
|
||||
})
|
||||
|
||||
const updateModelTextDelta = (model?: Model) => {
|
||||
if (model) {
|
||||
model.supported_text_delta = true
|
||||
if (isNotSupportedTextDelta(model)) {
|
||||
model.supported_text_delta = false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
state.llm.providers.forEach((provider) => {
|
||||
provider.models.forEach((model) => {
|
||||
updateModelTextDelta(model)
|
||||
})
|
||||
})
|
||||
state.assistants.assistants.forEach((assistant) => {
|
||||
updateModelTextDelta(assistant.defaultModel)
|
||||
updateModelTextDelta(assistant.model)
|
||||
})
|
||||
|
||||
updateModelTextDelta(state.llm.defaultModel)
|
||||
updateModelTextDelta(state.llm.topicNamingModel)
|
||||
updateModelTextDelta(state.llm.translateModel)
|
||||
|
||||
if (state.assistants.defaultAssistant.model) {
|
||||
updateModelTextDelta(state.assistants.defaultAssistant.model)
|
||||
updateModelTextDelta(state.assistants.defaultAssistant.defaultModel)
|
||||
}
|
||||
|
||||
return state
|
||||
} catch (error) {
|
||||
logger.error('migrate 124 error', error as Error)
|
||||
|
||||
@ -34,6 +34,10 @@ export type Assistant = {
|
||||
enableMemory?: boolean
|
||||
}
|
||||
|
||||
export type TranslateAssistant = Assistant & {
|
||||
targetLanguage?: Language
|
||||
}
|
||||
|
||||
export type AssistantsSortType = 'tags' | 'list'
|
||||
|
||||
export type AssistantMessage = {
|
||||
@ -216,6 +220,7 @@ export type Model = {
|
||||
pricing?: ModelPricing
|
||||
endpoint_type?: EndpointType
|
||||
supported_endpoint_types?: EndpointType[]
|
||||
supported_text_delta?: boolean
|
||||
}
|
||||
|
||||
export type Suggestion = {
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import { loggerService } from '@logger'
|
||||
import { Model, ModelType, Provider } from '@renderer/types'
|
||||
import { Language, Model, ModelType, Provider } from '@renderer/types'
|
||||
import { ModalFuncProps } from 'antd'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
|
||||
@ -238,6 +238,16 @@ export function isUserSelectedModelType(model: Model, type: ModelType): boolean
|
||||
return t ? t.isUserSelected : undefined
|
||||
}
|
||||
|
||||
export function mapLanguageToQwenMTModel(language: Language): string {
|
||||
if (language.langCode === 'zh-cn') {
|
||||
return 'Chinese'
|
||||
}
|
||||
if (language.langCode === 'zh-tw') {
|
||||
return 'Traditional Chinese'
|
||||
}
|
||||
return language.value
|
||||
}
|
||||
|
||||
export * from './api'
|
||||
export * from './collection'
|
||||
export * from './file'
|
||||
|
||||
@ -5,12 +5,7 @@ import db from '@renderer/databases'
|
||||
import { useTopicMessages } from '@renderer/hooks/useMessageOperations'
|
||||
import { useSettings } from '@renderer/hooks/useSettings'
|
||||
import MessageContent from '@renderer/pages/home/Messages/MessageContent'
|
||||
import {
|
||||
getDefaultAssistant,
|
||||
getDefaultModel,
|
||||
getDefaultTopic,
|
||||
getTranslateModel
|
||||
} from '@renderer/services/AssistantService'
|
||||
import { getDefaultTopic, getDefaultTranslateAssistant } from '@renderer/services/AssistantService'
|
||||
import { Assistant, Language, Topic } from '@renderer/types'
|
||||
import type { ActionItem } from '@renderer/types/selectionTypes'
|
||||
import { runAsyncFunction } from '@renderer/utils'
|
||||
@ -83,13 +78,7 @@ const ActionTranslate: FC<Props> = ({ action, scrollToBottom }) => {
|
||||
initialized.current = true
|
||||
|
||||
// Initialize assistant
|
||||
const currentAssistant = getDefaultAssistant()
|
||||
const translateModel = getTranslateModel() || getDefaultModel()
|
||||
|
||||
currentAssistant.model = translateModel
|
||||
currentAssistant.settings = {
|
||||
temperature: 0.7
|
||||
}
|
||||
const currentAssistant = getDefaultTranslateAssistant(targetLanguage, action.selectedText)
|
||||
|
||||
assistantRef.current = currentAssistant
|
||||
|
||||
@ -127,13 +116,9 @@ const ActionTranslate: FC<Props> = ({ action, scrollToBottom }) => {
|
||||
translateLang = targetLanguage
|
||||
}
|
||||
|
||||
// Initialize prompt content
|
||||
const userContent = translateModelPrompt
|
||||
.replaceAll('{{target_language}}', translateLang.value)
|
||||
.replaceAll('{{text}}', action.selectedText)
|
||||
|
||||
processMessages(assistantRef.current, topicRef.current, userContent, setAskId, onStream, onFinish, onError)
|
||||
}, [action, targetLanguage, alterLanguage, translateModelPrompt, scrollToBottom])
|
||||
assistantRef.current = getDefaultTranslateAssistant(translateLang, action.selectedText)
|
||||
processMessages(assistantRef.current, topicRef.current, action.selectedText, setAskId, onStream, onFinish, onError)
|
||||
}, [action, targetLanguage, alterLanguage, scrollToBottom])
|
||||
|
||||
useEffect(() => {
|
||||
fetchResult()
|
||||
|
||||
Loading…
Reference in New Issue
Block a user