mirror of
https://github.com/CherryHQ/cherry-studio.git
synced 2026-01-06 13:19:33 +08:00
fix(aiCore): support minimax-m2 (#10962)
* fix(aiCore): add minimax-m2 to reasoning model check and correct comment * feat(models): add minimax-m2 to function calling models list * feat(models): add isMiniMaxReasoningModel helper function Add helper function to check for MiniMax reasoning models and update isReasoningModel to use it
This commit is contained in:
parent
dedfc79406
commit
487b5c4d8a
@ -66,7 +66,8 @@ export function getReasoningEffort(assistant: Assistant, model: Model): Reasonin
|
|||||||
isGrokReasoningModel(model) ||
|
isGrokReasoningModel(model) ||
|
||||||
isOpenAIReasoningModel(model) ||
|
isOpenAIReasoningModel(model) ||
|
||||||
isQwenAlwaysThinkModel(model) ||
|
isQwenAlwaysThinkModel(model) ||
|
||||||
model.id.includes('seed-oss')
|
model.id.includes('seed-oss') ||
|
||||||
|
model.id.includes('minimax-m2')
|
||||||
) {
|
) {
|
||||||
return {}
|
return {}
|
||||||
}
|
}
|
||||||
@ -199,7 +200,7 @@ export function getReasoningEffort(assistant: Assistant, model: Model): Reasonin
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// OpenRouter models, use thinking
|
// OpenRouter models, use reasoning
|
||||||
if (model.provider === SystemProviderIds.openrouter) {
|
if (model.provider === SystemProviderIds.openrouter) {
|
||||||
if (isSupportedReasoningEffortModel(model) || isSupportedThinkingTokenModel(model)) {
|
if (isSupportedReasoningEffortModel(model) || isSupportedThinkingTokenModel(model)) {
|
||||||
return {
|
return {
|
||||||
|
|||||||
@ -455,6 +455,14 @@ export const isStepReasoningModel = (model?: Model): boolean => {
|
|||||||
return modelId.includes('step-3') || modelId.includes('step-r1-v-mini')
|
return modelId.includes('step-3') || modelId.includes('step-r1-v-mini')
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const isMiniMaxReasoningModel = (model?: Model): boolean => {
|
||||||
|
if (!model) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
const modelId = getLowerBaseModelName(model.id, '/')
|
||||||
|
return (['minimax-m1', 'minimax-m2'] as const).some((id) => modelId.includes(id))
|
||||||
|
}
|
||||||
|
|
||||||
export function isReasoningModel(model?: Model): boolean {
|
export function isReasoningModel(model?: Model): boolean {
|
||||||
if (!model || isEmbeddingModel(model) || isRerankModel(model) || isTextToImageModel(model)) {
|
if (!model || isEmbeddingModel(model) || isRerankModel(model) || isTextToImageModel(model)) {
|
||||||
return false
|
return false
|
||||||
@ -489,8 +497,8 @@ export function isReasoningModel(model?: Model): boolean {
|
|||||||
isStepReasoningModel(model) ||
|
isStepReasoningModel(model) ||
|
||||||
isDeepSeekHybridInferenceModel(model) ||
|
isDeepSeekHybridInferenceModel(model) ||
|
||||||
isLingReasoningModel(model) ||
|
isLingReasoningModel(model) ||
|
||||||
|
isMiniMaxReasoningModel(model) ||
|
||||||
modelId.includes('magistral') ||
|
modelId.includes('magistral') ||
|
||||||
modelId.includes('minimax-m1') ||
|
|
||||||
modelId.includes('pangu-pro-moe') ||
|
modelId.includes('pangu-pro-moe') ||
|
||||||
modelId.includes('seed-oss')
|
modelId.includes('seed-oss')
|
||||||
) {
|
) {
|
||||||
|
|||||||
@ -27,8 +27,9 @@ export const FUNCTION_CALLING_MODELS = [
|
|||||||
'doubao-seed-1[.-]6(?:-[\\w-]+)?',
|
'doubao-seed-1[.-]6(?:-[\\w-]+)?',
|
||||||
'kimi-k2(?:-[\\w-]+)?',
|
'kimi-k2(?:-[\\w-]+)?',
|
||||||
'ling-\\w+(?:-[\\w-]+)?',
|
'ling-\\w+(?:-[\\w-]+)?',
|
||||||
'ring-\\w+(?:-[\\w-]+)?'
|
'ring-\\w+(?:-[\\w-]+)?',
|
||||||
]
|
'minimax-m2'
|
||||||
|
] as const
|
||||||
|
|
||||||
const FUNCTION_CALLING_EXCLUDED_MODELS = [
|
const FUNCTION_CALLING_EXCLUDED_MODELS = [
|
||||||
'aqa(?:-[\\w-]+)?',
|
'aqa(?:-[\\w-]+)?',
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user