fix: add interval thinking model support and related tests

This commit is contained in:
suyao 2025-12-23 15:53:56 +08:00
parent aca3ac73e3
commit 8fbd969938
No known key found for this signature in database
4 changed files with 137 additions and 3 deletions

View File

@ -10,6 +10,8 @@ import {
isAnthropicModel,
isGeminiModel,
isGrokModel,
isIntervalThinkingModel,
isMiniMaxReasoningModel,
isOpenAIModel,
isOpenAIOpenWeightModel,
isQwenMTModel,
@ -601,7 +603,7 @@ function buildGenericProviderOptions(
enableGenerateImage: boolean
}
): Record<string, any> {
const { enableWebSearch } = capabilities
const { enableWebSearch, enableReasoning } = capabilities
let providerOptions: Record<string, any> = {}
const reasoningParams = getReasoningEffort(assistant, model)
@ -609,6 +611,20 @@ function buildGenericProviderOptions(
...providerOptions,
...reasoningParams
}
if (enableReasoning) {
if (isIntervalThinkingModel(model)) {
providerOptions = {
...providerOptions,
sendReasoning: true
}
}
if (isMiniMaxReasoningModel(model)) {
providerOptions = {
...providerOptions,
reasoning_split: true
}
}
}
if (enableWebSearch) {
const webSearchParams = getWebSearchParams(model)

View File

@ -17,6 +17,7 @@ import {
isGeminiReasoningModel,
isGrok4FastReasoningModel,
isHunyuanReasoningModel,
isIntervalThinkingModel,
isLingReasoningModel,
isMiniMaxReasoningModel,
isPerplexityReasoningModel,
@ -2157,3 +2158,105 @@ describe('getModelSupportedReasoningEffortOptions', () => {
})
})
})
describe('isIntervalThinkingModel', () => {
describe('MiniMax models', () => {
it('should return true for minimax-m2', () => {
expect(isIntervalThinkingModel(createModel({ id: 'minimax-m2' }))).toBe(true)
})
it('should return true for minimax-m2.1', () => {
expect(isIntervalThinkingModel(createModel({ id: 'minimax-m2.1' }))).toBe(true)
})
it('should return true for minimax-m2 with suffixes', () => {
expect(isIntervalThinkingModel(createModel({ id: 'minimax-m2-pro' }))).toBe(true)
expect(isIntervalThinkingModel(createModel({ id: 'minimax-m2-preview' }))).toBe(true)
expect(isIntervalThinkingModel(createModel({ id: 'minimax-m2-lite' }))).toBe(true)
expect(isIntervalThinkingModel(createModel({ id: 'minimax-m2-ultra-lite' }))).toBe(true)
})
it('should return true for minimax-m2.x with suffixes', () => {
expect(isIntervalThinkingModel(createModel({ id: 'minimax-m2.1-pro' }))).toBe(true)
expect(isIntervalThinkingModel(createModel({ id: 'minimax-m2.2-preview' }))).toBe(true)
expect(isIntervalThinkingModel(createModel({ id: 'minimax-m2.5-lite' }))).toBe(true)
})
it('should return false for non-m2 minimax models', () => {
expect(isIntervalThinkingModel(createModel({ id: 'minimax-m1' }))).toBe(false)
expect(isIntervalThinkingModel(createModel({ id: 'minimax-m3' }))).toBe(false)
expect(isIntervalThinkingModel(createModel({ id: 'minimax-pro' }))).toBe(false)
})
it('should handle case insensitivity', () => {
expect(isIntervalThinkingModel(createModel({ id: 'MiniMax-M2' }))).toBe(true)
expect(isIntervalThinkingModel(createModel({ id: 'MINIMAX-M2.1' }))).toBe(true)
})
})
describe('MiMo models', () => {
it('should return true for mimo-v2-flash', () => {
expect(isIntervalThinkingModel(createModel({ id: 'mimo-v2-flash' }))).toBe(true)
})
it('should return false for other mimo models', () => {
expect(isIntervalThinkingModel(createModel({ id: 'mimo-v1-flash' }))).toBe(false)
expect(isIntervalThinkingModel(createModel({ id: 'mimo-v2' }))).toBe(false)
expect(isIntervalThinkingModel(createModel({ id: 'mimo-v2-pro' }))).toBe(false)
expect(isIntervalThinkingModel(createModel({ id: 'mimo-flash' }))).toBe(false)
})
it('should handle case insensitivity', () => {
expect(isIntervalThinkingModel(createModel({ id: 'MiMo-V2-Flash' }))).toBe(true)
expect(isIntervalThinkingModel(createModel({ id: 'MIMO-V2-FLASH' }))).toBe(true)
})
})
describe('Zhipu GLM models', () => {
it('should return true for glm-4.5', () => {
expect(isIntervalThinkingModel(createModel({ id: 'glm-4.5' }))).toBe(true)
})
it('should return true for glm-4.6', () => {
expect(isIntervalThinkingModel(createModel({ id: 'glm-4.6' }))).toBe(true)
})
it('should return true for glm-4.7 and higher versions', () => {
expect(isIntervalThinkingModel(createModel({ id: 'glm-4.7' }))).toBe(true)
expect(isIntervalThinkingModel(createModel({ id: 'glm-4.8' }))).toBe(true)
expect(isIntervalThinkingModel(createModel({ id: 'glm-4.9' }))).toBe(true)
})
it('should return true for glm-4.x with suffixes', () => {
expect(isIntervalThinkingModel(createModel({ id: 'glm-4.5-pro' }))).toBe(true)
expect(isIntervalThinkingModel(createModel({ id: 'glm-4.6-preview' }))).toBe(true)
expect(isIntervalThinkingModel(createModel({ id: 'glm-4.7-lite' }))).toBe(true)
expect(isIntervalThinkingModel(createModel({ id: 'glm-4.8-ultra' }))).toBe(true)
})
it('should return false for glm-4 without decimal version', () => {
expect(isIntervalThinkingModel(createModel({ id: 'glm-4' }))).toBe(false)
expect(isIntervalThinkingModel(createModel({ id: 'glm-4-pro' }))).toBe(false)
})
it('should return false for other glm models', () => {
expect(isIntervalThinkingModel(createModel({ id: 'glm-3.5' }))).toBe(false)
expect(isIntervalThinkingModel(createModel({ id: 'glm-5.0' }))).toBe(false)
expect(isIntervalThinkingModel(createModel({ id: 'glm-zero-preview' }))).toBe(false)
})
it('should handle case insensitivity', () => {
expect(isIntervalThinkingModel(createModel({ id: 'GLM-4.5' }))).toBe(true)
expect(isIntervalThinkingModel(createModel({ id: 'Glm-4.6-Pro' }))).toBe(true)
})
})
describe('Non-matching models', () => {
it('should return false for unrelated models', () => {
expect(isIntervalThinkingModel(createModel({ id: 'gpt-4' }))).toBe(false)
expect(isIntervalThinkingModel(createModel({ id: 'claude-3-opus' }))).toBe(false)
expect(isIntervalThinkingModel(createModel({ id: 'gemini-pro' }))).toBe(false)
expect(isIntervalThinkingModel(createModel({ id: 'deepseek-v3' }))).toBe(false)
})
})
})

View File

@ -738,3 +738,18 @@ export const findTokenLimit = (modelId: string): { min: number; max: number } |
*/
export const isFixedReasoningModel = (model: Model) =>
isReasoningModel(model) && !isSupportedThinkingTokenModel(model) && !isSupportedReasoningEffortModel(model)
// https://platform.minimaxi.com/docs/guides/text-m2-function-call#openai-sdk
// https://docs.z.ai/guides/capabilities/thinking-mode
const INTERVAL_THINKING_MODEL_REGEX = /minimax-m2(.(\d+))?(?:-[\w-]+)?|mimo-v2-flash|glm-4.(\d+)(?:-[\w-]+)?$/i
/**
* Determines whether the given model supports interval thinking.
*
* @param model - The model object to check.
* @returns `true` if the model's ID matches the interval thinking model pattern; otherwise, `false`.
*/
export const isIntervalThinkingModel = (model: Model) => {
const modelId = getLowerBaseModelName(model.id)
return INTERVAL_THINKING_MODEL_REGEX.test(modelId)
}

View File

@ -256,13 +256,13 @@ __metadata:
"@ai-sdk/openai-compatible@patch:@ai-sdk/openai-compatible@npm%3A1.0.28#~/.yarn/patches/@ai-sdk-openai-compatible-npm-1.0.28-5705188855.patch":
version: 1.0.28
resolution: "@ai-sdk/openai-compatible@patch:@ai-sdk/openai-compatible@npm%3A1.0.28#~/.yarn/patches/@ai-sdk-openai-compatible-npm-1.0.28-5705188855.patch::version=1.0.28&hash=77cd08"
resolution: "@ai-sdk/openai-compatible@patch:@ai-sdk/openai-compatible@npm%3A1.0.28#~/.yarn/patches/@ai-sdk-openai-compatible-npm-1.0.28-5705188855.patch::version=1.0.28&hash=8a8450"
dependencies:
"@ai-sdk/provider": "npm:2.0.0"
"@ai-sdk/provider-utils": "npm:3.0.18"
peerDependencies:
zod: ^3.25.76 || ^4.1.8
checksum: 10c0/5ff563a56526c6be3543c6a659ed6e3a447b964a5fc574013e7133e2ddcb0aef2e3170e13690df0337665df72696f6f5b1c0cd613887407840d2e6189ae1917e
checksum: 10c0/7bac95fc73e3bcdb4f0f1284e4b4e5dcb25bc69c4b70295efbcf44f53e4ce9b9c5872c05819c76e5b4ada9f262ce85f58ea02492d37e71fb519a7e03c9b9a824
languageName: node
linkType: hard