fix: interleaved thinking support (#12084)

* fix: update @ai-sdk/openai-compatible to version 1.0.28 and adjust related patches

* fix: add sendReasoning option to OpenAICompatibleProviderOptions and update message conversion logic

* fix: add interval thinking model support and related tests

* fix: add sendReasoning option to OpenAICompatibleProviderOptions and update related logic

* fix: remove MiniMax reasoning model support and update interval thinking model regex

* chore: add comment

* fix: rename interval thinking model references to interleaved thinking model
This commit is contained in:
SuYao 2025-12-23 20:08:53 +08:00 committed by GitHub
parent e093a18deb
commit 09e58d3756
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
9 changed files with 409 additions and 164 deletions

View File

@ -1,140 +0,0 @@
diff --git a/dist/index.js b/dist/index.js
index 73045a7d38faafdc7f7d2cd79d7ff0e2b031056b..8d948c9ac4ea4b474db9ef3c5491961e7fcf9a07 100644
--- a/dist/index.js
+++ b/dist/index.js
@@ -421,6 +421,17 @@ var OpenAICompatibleChatLanguageModel = class {
text: reasoning
});
}
+ if (choice.message.images) {
+ for (const image of choice.message.images) {
+ const match1 = image.image_url.url.match(/^data:([^;]+)/)
+ const match2 = image.image_url.url.match(/^data:[^;]*;base64,(.+)$/);
+ content.push({
+ type: 'file',
+ mediaType: match1 ? (match1[1] ?? 'image/jpeg') : 'image/jpeg',
+ data: match2 ? match2[1] : image.image_url.url,
+ });
+ }
+ }
if (choice.message.tool_calls != null) {
for (const toolCall of choice.message.tool_calls) {
content.push({
@@ -598,6 +609,17 @@ var OpenAICompatibleChatLanguageModel = class {
delta: delta.content
});
}
+ if (delta.images) {
+ for (const image of delta.images) {
+ const match1 = image.image_url.url.match(/^data:([^;]+)/)
+ const match2 = image.image_url.url.match(/^data:[^;]*;base64,(.+)$/);
+ controller.enqueue({
+ type: 'file',
+ mediaType: match1 ? (match1[1] ?? 'image/jpeg') : 'image/jpeg',
+ data: match2 ? match2[1] : image.image_url.url,
+ });
+ }
+ }
if (delta.tool_calls != null) {
for (const toolCallDelta of delta.tool_calls) {
const index = toolCallDelta.index;
@@ -765,6 +787,14 @@ var OpenAICompatibleChatResponseSchema = import_v43.z.object({
arguments: import_v43.z.string()
})
})
+ ).nullish(),
+ images: import_v43.z.array(
+ import_v43.z.object({
+ type: import_v43.z.literal('image_url'),
+ image_url: import_v43.z.object({
+ url: import_v43.z.string(),
+ })
+ })
).nullish()
}),
finish_reason: import_v43.z.string().nullish()
@@ -795,6 +825,14 @@ var createOpenAICompatibleChatChunkSchema = (errorSchema) => import_v43.z.union(
arguments: import_v43.z.string().nullish()
})
})
+ ).nullish(),
+ images: import_v43.z.array(
+ import_v43.z.object({
+ type: import_v43.z.literal('image_url'),
+ image_url: import_v43.z.object({
+ url: import_v43.z.string(),
+ })
+ })
).nullish()
}).nullish(),
finish_reason: import_v43.z.string().nullish()
diff --git a/dist/index.mjs b/dist/index.mjs
index 1c2b9560bbfbfe10cb01af080aeeed4ff59db29c..2c8ddc4fc9bfc5e7e06cfca105d197a08864c427 100644
--- a/dist/index.mjs
+++ b/dist/index.mjs
@@ -405,6 +405,17 @@ var OpenAICompatibleChatLanguageModel = class {
text: reasoning
});
}
+ if (choice.message.images) {
+ for (const image of choice.message.images) {
+ const match1 = image.image_url.url.match(/^data:([^;]+)/)
+ const match2 = image.image_url.url.match(/^data:[^;]*;base64,(.+)$/);
+ content.push({
+ type: 'file',
+ mediaType: match1 ? (match1[1] ?? 'image/jpeg') : 'image/jpeg',
+ data: match2 ? match2[1] : image.image_url.url,
+ });
+ }
+ }
if (choice.message.tool_calls != null) {
for (const toolCall of choice.message.tool_calls) {
content.push({
@@ -582,6 +593,17 @@ var OpenAICompatibleChatLanguageModel = class {
delta: delta.content
});
}
+ if (delta.images) {
+ for (const image of delta.images) {
+ const match1 = image.image_url.url.match(/^data:([^;]+)/)
+ const match2 = image.image_url.url.match(/^data:[^;]*;base64,(.+)$/);
+ controller.enqueue({
+ type: 'file',
+ mediaType: match1 ? (match1[1] ?? 'image/jpeg') : 'image/jpeg',
+ data: match2 ? match2[1] : image.image_url.url,
+ });
+ }
+ }
if (delta.tool_calls != null) {
for (const toolCallDelta of delta.tool_calls) {
const index = toolCallDelta.index;
@@ -749,6 +771,14 @@ var OpenAICompatibleChatResponseSchema = z3.object({
arguments: z3.string()
})
})
+ ).nullish(),
+ images: z3.array(
+ z3.object({
+ type: z3.literal('image_url'),
+ image_url: z3.object({
+ url: z3.string(),
+ })
+ })
).nullish()
}),
finish_reason: z3.string().nullish()
@@ -779,6 +809,14 @@ var createOpenAICompatibleChatChunkSchema = (errorSchema) => z3.union([
arguments: z3.string().nullish()
})
})
+ ).nullish(),
+ images: z3.array(
+ z3.object({
+ type: z3.literal('image_url'),
+ image_url: z3.object({
+ url: z3.string(),
+ })
+ })
).nullish()
}).nullish(),
finish_reason: z3.string().nullish()

View File

@ -0,0 +1,266 @@
diff --git a/dist/index.d.ts b/dist/index.d.ts
index 48e2f6263c6ee4c75d7e5c28733e64f6ebe92200..00d0729c4a3cbf9a48e8e1e962c7e2b256b75eba 100644
--- a/dist/index.d.ts
+++ b/dist/index.d.ts
@@ -7,6 +7,7 @@ declare const openaiCompatibleProviderOptions: z.ZodObject<{
user: z.ZodOptional<z.ZodString>;
reasoningEffort: z.ZodOptional<z.ZodString>;
textVerbosity: z.ZodOptional<z.ZodString>;
+ sendReasoning: z.ZodOptional<z.ZodBoolean>;
}, z.core.$strip>;
type OpenAICompatibleProviderOptions = z.infer<typeof openaiCompatibleProviderOptions>;
diff --git a/dist/index.js b/dist/index.js
index da237bb35b7fa8e24b37cd861ee73dfc51cdfc72..b3060fbaf010e30b64df55302807828e5bfe0f9a 100644
--- a/dist/index.js
+++ b/dist/index.js
@@ -41,7 +41,7 @@ function getOpenAIMetadata(message) {
var _a, _b;
return (_b = (_a = message == null ? void 0 : message.providerOptions) == null ? void 0 : _a.openaiCompatible) != null ? _b : {};
}
-function convertToOpenAICompatibleChatMessages(prompt) {
+function convertToOpenAICompatibleChatMessages({prompt, options}) {
const messages = [];
for (const { role, content, ...message } of prompt) {
const metadata = getOpenAIMetadata({ ...message });
@@ -91,6 +91,7 @@ function convertToOpenAICompatibleChatMessages(prompt) {
}
case "assistant": {
let text = "";
+ let reasoning_text = "";
const toolCalls = [];
for (const part of content) {
const partMetadata = getOpenAIMetadata(part);
@@ -99,6 +100,12 @@ function convertToOpenAICompatibleChatMessages(prompt) {
text += part.text;
break;
}
+ case "reasoning": {
+ if (options.sendReasoning) {
+ reasoning_text += part.text;
+ }
+ break;
+ }
case "tool-call": {
toolCalls.push({
id: part.toolCallId,
@@ -116,6 +123,7 @@ function convertToOpenAICompatibleChatMessages(prompt) {
messages.push({
role: "assistant",
content: text,
+ reasoning_content: reasoning_text ?? undefined,
tool_calls: toolCalls.length > 0 ? toolCalls : void 0,
...metadata
});
@@ -200,7 +208,8 @@ var openaiCompatibleProviderOptions = import_v4.z.object({
/**
* Controls the verbosity of the generated text. Defaults to `medium`.
*/
- textVerbosity: import_v4.z.string().optional()
+ textVerbosity: import_v4.z.string().optional(),
+ sendReasoning: import_v4.z.boolean().optional()
});
// src/openai-compatible-error.ts
@@ -378,7 +387,7 @@ var OpenAICompatibleChatLanguageModel = class {
reasoning_effort: compatibleOptions.reasoningEffort,
verbosity: compatibleOptions.textVerbosity,
// messages:
- messages: convertToOpenAICompatibleChatMessages(prompt),
+ messages: convertToOpenAICompatibleChatMessages({prompt, options: compatibleOptions}),
// tools:
tools: openaiTools,
tool_choice: openaiToolChoice
@@ -421,6 +430,17 @@ var OpenAICompatibleChatLanguageModel = class {
text: reasoning
});
}
+ if (choice.message.images) {
+ for (const image of choice.message.images) {
+ const match1 = image.image_url.url.match(/^data:([^;]+)/)
+ const match2 = image.image_url.url.match(/^data:[^;]*;base64,(.+)$/);
+ content.push({
+ type: 'file',
+ mediaType: match1 ? (match1[1] ?? 'image/jpeg') : 'image/jpeg',
+ data: match2 ? match2[1] : image.image_url.url,
+ });
+ }
+ }
if (choice.message.tool_calls != null) {
for (const toolCall of choice.message.tool_calls) {
content.push({
@@ -598,6 +618,17 @@ var OpenAICompatibleChatLanguageModel = class {
delta: delta.content
});
}
+ if (delta.images) {
+ for (const image of delta.images) {
+ const match1 = image.image_url.url.match(/^data:([^;]+)/)
+ const match2 = image.image_url.url.match(/^data:[^;]*;base64,(.+)$/);
+ controller.enqueue({
+ type: 'file',
+ mediaType: match1 ? (match1[1] ?? 'image/jpeg') : 'image/jpeg',
+ data: match2 ? match2[1] : image.image_url.url,
+ });
+ }
+ }
if (delta.tool_calls != null) {
for (const toolCallDelta of delta.tool_calls) {
const index = toolCallDelta.index;
@@ -765,6 +796,14 @@ var OpenAICompatibleChatResponseSchema = import_v43.z.object({
arguments: import_v43.z.string()
})
})
+ ).nullish(),
+ images: import_v43.z.array(
+ import_v43.z.object({
+ type: import_v43.z.literal('image_url'),
+ image_url: import_v43.z.object({
+ url: import_v43.z.string(),
+ })
+ })
).nullish()
}),
finish_reason: import_v43.z.string().nullish()
@@ -795,6 +834,14 @@ var createOpenAICompatibleChatChunkSchema = (errorSchema) => import_v43.z.union(
arguments: import_v43.z.string().nullish()
})
})
+ ).nullish(),
+ images: import_v43.z.array(
+ import_v43.z.object({
+ type: import_v43.z.literal('image_url'),
+ image_url: import_v43.z.object({
+ url: import_v43.z.string(),
+ })
+ })
).nullish()
}).nullish(),
finish_reason: import_v43.z.string().nullish()
diff --git a/dist/index.mjs b/dist/index.mjs
index a809a7aa0e148bfd43e01dd7b018568b151c8ad5..565b605eeacd9830b2b0e817e58ad0c5700264de 100644
--- a/dist/index.mjs
+++ b/dist/index.mjs
@@ -23,7 +23,7 @@ function getOpenAIMetadata(message) {
var _a, _b;
return (_b = (_a = message == null ? void 0 : message.providerOptions) == null ? void 0 : _a.openaiCompatible) != null ? _b : {};
}
-function convertToOpenAICompatibleChatMessages(prompt) {
+function convertToOpenAICompatibleChatMessages({prompt, options}) {
const messages = [];
for (const { role, content, ...message } of prompt) {
const metadata = getOpenAIMetadata({ ...message });
@@ -73,6 +73,7 @@ function convertToOpenAICompatibleChatMessages(prompt) {
}
case "assistant": {
let text = "";
+ let reasoning_text = "";
const toolCalls = [];
for (const part of content) {
const partMetadata = getOpenAIMetadata(part);
@@ -81,6 +82,12 @@ function convertToOpenAICompatibleChatMessages(prompt) {
text += part.text;
break;
}
+ case "reasoning": {
+ if (options.sendReasoning) {
+ reasoning_text += part.text;
+ }
+ break;
+ }
case "tool-call": {
toolCalls.push({
id: part.toolCallId,
@@ -98,6 +105,7 @@ function convertToOpenAICompatibleChatMessages(prompt) {
messages.push({
role: "assistant",
content: text,
+ reasoning_content: reasoning_text ?? undefined,
tool_calls: toolCalls.length > 0 ? toolCalls : void 0,
...metadata
});
@@ -182,7 +190,8 @@ var openaiCompatibleProviderOptions = z.object({
/**
* Controls the verbosity of the generated text. Defaults to `medium`.
*/
- textVerbosity: z.string().optional()
+ textVerbosity: z.string().optional(),
+ sendReasoning: z.boolean().optional()
});
// src/openai-compatible-error.ts
@@ -362,7 +371,7 @@ var OpenAICompatibleChatLanguageModel = class {
reasoning_effort: compatibleOptions.reasoningEffort,
verbosity: compatibleOptions.textVerbosity,
// messages:
- messages: convertToOpenAICompatibleChatMessages(prompt),
+ messages: convertToOpenAICompatibleChatMessages({prompt, options: compatibleOptions}),
// tools:
tools: openaiTools,
tool_choice: openaiToolChoice
@@ -405,6 +414,17 @@ var OpenAICompatibleChatLanguageModel = class {
text: reasoning
});
}
+ if (choice.message.images) {
+ for (const image of choice.message.images) {
+ const match1 = image.image_url.url.match(/^data:([^;]+)/)
+ const match2 = image.image_url.url.match(/^data:[^;]*;base64,(.+)$/);
+ content.push({
+ type: 'file',
+ mediaType: match1 ? (match1[1] ?? 'image/jpeg') : 'image/jpeg',
+ data: match2 ? match2[1] : image.image_url.url,
+ });
+ }
+ }
if (choice.message.tool_calls != null) {
for (const toolCall of choice.message.tool_calls) {
content.push({
@@ -582,6 +602,17 @@ var OpenAICompatibleChatLanguageModel = class {
delta: delta.content
});
}
+ if (delta.images) {
+ for (const image of delta.images) {
+ const match1 = image.image_url.url.match(/^data:([^;]+)/)
+ const match2 = image.image_url.url.match(/^data:[^;]*;base64,(.+)$/);
+ controller.enqueue({
+ type: 'file',
+ mediaType: match1 ? (match1[1] ?? 'image/jpeg') : 'image/jpeg',
+ data: match2 ? match2[1] : image.image_url.url,
+ });
+ }
+ }
if (delta.tool_calls != null) {
for (const toolCallDelta of delta.tool_calls) {
const index = toolCallDelta.index;
@@ -749,6 +780,14 @@ var OpenAICompatibleChatResponseSchema = z3.object({
arguments: z3.string()
})
})
+ ).nullish(),
+ images: z3.array(
+ z3.object({
+ type: z3.literal('image_url'),
+ image_url: z3.object({
+ url: z3.string(),
+ })
+ })
).nullish()
}),
finish_reason: z3.string().nullish()
@@ -779,6 +818,14 @@ var createOpenAICompatibleChatChunkSchema = (errorSchema) => z3.union([
arguments: z3.string().nullish()
})
})
+ ).nullish(),
+ images: z3.array(
+ z3.object({
+ type: z3.literal('image_url'),
+ image_url: z3.object({
+ url: z3.string(),
+ })
+ })
).nullish()
}).nullish(),
finish_reason: z3.string().nullish()

View File

@ -416,7 +416,9 @@
"@ai-sdk/openai@npm:^2.0.42": "patch:@ai-sdk/openai@npm%3A2.0.85#~/.yarn/patches/@ai-sdk-openai-npm-2.0.85-27483d1d6a.patch",
"@ai-sdk/google@npm:^2.0.40": "patch:@ai-sdk/google@npm%3A2.0.40#~/.yarn/patches/@ai-sdk-google-npm-2.0.40-47e0eeee83.patch",
"@ai-sdk/openai-compatible@npm:^1.0.27": "patch:@ai-sdk/openai-compatible@npm%3A1.0.27#~/.yarn/patches/@ai-sdk-openai-compatible-npm-1.0.27-06f74278cf.patch",
"@ai-sdk/google@npm:2.0.49": "patch:@ai-sdk/google@npm%3A2.0.49#~/.yarn/patches/@ai-sdk-google-npm-2.0.49-84720f41bd.patch"
"@ai-sdk/google@npm:2.0.49": "patch:@ai-sdk/google@npm%3A2.0.49#~/.yarn/patches/@ai-sdk-google-npm-2.0.49-84720f41bd.patch",
"@ai-sdk/openai-compatible@npm:1.0.27": "patch:@ai-sdk/openai-compatible@npm%3A1.0.28#~/.yarn/patches/@ai-sdk-openai-compatible-npm-1.0.28-5705188855.patch",
"@ai-sdk/openai-compatible@npm:^1.0.19": "patch:@ai-sdk/openai-compatible@npm%3A1.0.28#~/.yarn/patches/@ai-sdk-openai-compatible-npm-1.0.28-5705188855.patch"
},
"packageManager": "yarn@4.9.1",
"lint-staged": {

View File

@ -41,7 +41,7 @@
"ai": "^5.0.26"
},
"dependencies": {
"@ai-sdk/openai-compatible": "^1.0.28",
"@ai-sdk/openai-compatible": "patch:@ai-sdk/openai-compatible@npm%3A1.0.28#~/.yarn/patches/@ai-sdk-openai-compatible-npm-1.0.28-5705188855.patch",
"@ai-sdk/provider": "^2.0.0",
"@ai-sdk/provider-utils": "^3.0.17"
},

View File

@ -42,7 +42,7 @@
"@ai-sdk/anthropic": "^2.0.49",
"@ai-sdk/azure": "^2.0.87",
"@ai-sdk/deepseek": "^1.0.31",
"@ai-sdk/openai-compatible": "patch:@ai-sdk/openai-compatible@npm%3A1.0.27#~/.yarn/patches/@ai-sdk-openai-compatible-npm-1.0.27-06f74278cf.patch",
"@ai-sdk/openai-compatible": "patch:@ai-sdk/openai-compatible@npm%3A1.0.28#~/.yarn/patches/@ai-sdk-openai-compatible-npm-1.0.28-5705188855.patch",
"@ai-sdk/provider": "^2.0.0",
"@ai-sdk/provider-utils": "^3.0.17",
"@ai-sdk/xai": "^2.0.36",

View File

@ -10,6 +10,7 @@ import {
isAnthropicModel,
isGeminiModel,
isGrokModel,
isInterleavedThinkingModel,
isOpenAIModel,
isOpenAIOpenWeightModel,
isQwenMTModel,
@ -603,7 +604,7 @@ function buildGenericProviderOptions(
enableGenerateImage: boolean
}
): Record<string, any> {
const { enableWebSearch } = capabilities
const { enableWebSearch, enableReasoning } = capabilities
let providerOptions: Record<string, any> = {}
const reasoningParams = getReasoningEffort(assistant, model)
@ -611,6 +612,14 @@ function buildGenericProviderOptions(
...providerOptions,
...reasoningParams
}
if (enableReasoning) {
if (isInterleavedThinkingModel(model)) {
providerOptions = {
...providerOptions,
sendReasoning: true
}
}
}
if (enableWebSearch) {
const webSearchParams = getWebSearchParams(model)

View File

@ -17,6 +17,7 @@ import {
isGeminiReasoningModel,
isGrok4FastReasoningModel,
isHunyuanReasoningModel,
isInterleavedThinkingModel,
isLingReasoningModel,
isMiniMaxReasoningModel,
isPerplexityReasoningModel,
@ -2157,3 +2158,105 @@ describe('getModelSupportedReasoningEffortOptions', () => {
})
})
})
describe('isInterleavedThinkingModel', () => {
describe('MiniMax models', () => {
it('should return true for minimax-m2', () => {
expect(isInterleavedThinkingModel(createModel({ id: 'minimax-m2' }))).toBe(true)
})
it('should return true for minimax-m2.1', () => {
expect(isInterleavedThinkingModel(createModel({ id: 'minimax-m2.1' }))).toBe(true)
})
it('should return true for minimax-m2 with suffixes', () => {
expect(isInterleavedThinkingModel(createModel({ id: 'minimax-m2-pro' }))).toBe(true)
expect(isInterleavedThinkingModel(createModel({ id: 'minimax-m2-preview' }))).toBe(true)
expect(isInterleavedThinkingModel(createModel({ id: 'minimax-m2-lite' }))).toBe(true)
expect(isInterleavedThinkingModel(createModel({ id: 'minimax-m2-ultra-lite' }))).toBe(true)
})
it('should return true for minimax-m2.x with suffixes', () => {
expect(isInterleavedThinkingModel(createModel({ id: 'minimax-m2.1-pro' }))).toBe(true)
expect(isInterleavedThinkingModel(createModel({ id: 'minimax-m2.2-preview' }))).toBe(true)
expect(isInterleavedThinkingModel(createModel({ id: 'minimax-m2.5-lite' }))).toBe(true)
})
it('should return false for non-m2 minimax models', () => {
expect(isInterleavedThinkingModel(createModel({ id: 'minimax-m1' }))).toBe(false)
expect(isInterleavedThinkingModel(createModel({ id: 'minimax-m3' }))).toBe(false)
expect(isInterleavedThinkingModel(createModel({ id: 'minimax-pro' }))).toBe(false)
})
it('should handle case insensitivity', () => {
expect(isInterleavedThinkingModel(createModel({ id: 'MiniMax-M2' }))).toBe(true)
expect(isInterleavedThinkingModel(createModel({ id: 'MINIMAX-M2.1' }))).toBe(true)
})
})
describe('MiMo models', () => {
it('should return true for mimo-v2-flash', () => {
expect(isInterleavedThinkingModel(createModel({ id: 'mimo-v2-flash' }))).toBe(true)
})
it('should return false for other mimo models', () => {
expect(isInterleavedThinkingModel(createModel({ id: 'mimo-v1-flash' }))).toBe(false)
expect(isInterleavedThinkingModel(createModel({ id: 'mimo-v2' }))).toBe(false)
expect(isInterleavedThinkingModel(createModel({ id: 'mimo-v2-pro' }))).toBe(false)
expect(isInterleavedThinkingModel(createModel({ id: 'mimo-flash' }))).toBe(false)
})
it('should handle case insensitivity', () => {
expect(isInterleavedThinkingModel(createModel({ id: 'MiMo-V2-Flash' }))).toBe(true)
expect(isInterleavedThinkingModel(createModel({ id: 'MIMO-V2-FLASH' }))).toBe(true)
})
})
describe('Zhipu GLM models', () => {
it('should return true for glm-4.5', () => {
expect(isInterleavedThinkingModel(createModel({ id: 'glm-4.5' }))).toBe(true)
})
it('should return true for glm-4.6', () => {
expect(isInterleavedThinkingModel(createModel({ id: 'glm-4.6' }))).toBe(true)
})
it('should return true for glm-4.7 and higher versions', () => {
expect(isInterleavedThinkingModel(createModel({ id: 'glm-4.7' }))).toBe(true)
expect(isInterleavedThinkingModel(createModel({ id: 'glm-4.8' }))).toBe(true)
expect(isInterleavedThinkingModel(createModel({ id: 'glm-4.9' }))).toBe(true)
})
it('should return true for glm-4.x with suffixes', () => {
expect(isInterleavedThinkingModel(createModel({ id: 'glm-4.5-pro' }))).toBe(true)
expect(isInterleavedThinkingModel(createModel({ id: 'glm-4.6-preview' }))).toBe(true)
expect(isInterleavedThinkingModel(createModel({ id: 'glm-4.7-lite' }))).toBe(true)
expect(isInterleavedThinkingModel(createModel({ id: 'glm-4.8-ultra' }))).toBe(true)
})
it('should return false for glm-4 without decimal version', () => {
expect(isInterleavedThinkingModel(createModel({ id: 'glm-4' }))).toBe(false)
expect(isInterleavedThinkingModel(createModel({ id: 'glm-4-pro' }))).toBe(false)
})
it('should return false for other glm models', () => {
expect(isInterleavedThinkingModel(createModel({ id: 'glm-3.5' }))).toBe(false)
expect(isInterleavedThinkingModel(createModel({ id: 'glm-5.0' }))).toBe(false)
expect(isInterleavedThinkingModel(createModel({ id: 'glm-zero-preview' }))).toBe(false)
})
it('should handle case insensitivity', () => {
expect(isInterleavedThinkingModel(createModel({ id: 'GLM-4.5' }))).toBe(true)
expect(isInterleavedThinkingModel(createModel({ id: 'Glm-4.6-Pro' }))).toBe(true)
})
})
describe('Non-matching models', () => {
it('should return false for unrelated models', () => {
expect(isInterleavedThinkingModel(createModel({ id: 'gpt-4' }))).toBe(false)
expect(isInterleavedThinkingModel(createModel({ id: 'claude-3-opus' }))).toBe(false)
expect(isInterleavedThinkingModel(createModel({ id: 'gemini-pro' }))).toBe(false)
expect(isInterleavedThinkingModel(createModel({ id: 'deepseek-v3' }))).toBe(false)
})
})
})

View File

@ -738,3 +738,20 @@ export const findTokenLimit = (modelId: string): { min: number; max: number } |
*/
export const isFixedReasoningModel = (model: Model) =>
isReasoningModel(model) && !isSupportedThinkingTokenModel(model) && !isSupportedReasoningEffortModel(model)
// https://platform.minimaxi.com/docs/guides/text-m2-function-call#openai-sdk
// https://docs.z.ai/guides/capabilities/thinking-mode
// https://platform.moonshot.cn/docs/guide/use-kimi-k2-thinking-model#%E5%A4%9A%E6%AD%A5%E5%B7%A5%E5%85%B7%E8%B0%83%E7%94%A8
const INTERLEAVED_THINKING_MODEL_REGEX =
/minimax-m2(.(\d+))?(?:-[\w-]+)?|mimo-v2-flash|glm-4.(\d+)(?:-[\w-]+)?|kimi-k2-thinking?$/i
/**
* Determines whether the given model supports interleaved thinking.
*
* @param model - The model object to check.
* @returns `true` if the model's ID matches the interleaved thinking model pattern; otherwise, `false`.
*/
export const isInterleavedThinkingModel = (model: Model) => {
const modelId = getLowerBaseModelName(model.id)
return INTERLEAVED_THINKING_MODEL_REGEX.test(modelId)
}

View File

@ -242,19 +242,7 @@ __metadata:
languageName: node
linkType: hard
"@ai-sdk/openai-compatible@npm:1.0.27, @ai-sdk/openai-compatible@npm:^1.0.19":
version: 1.0.27
resolution: "@ai-sdk/openai-compatible@npm:1.0.27"
dependencies:
"@ai-sdk/provider": "npm:2.0.0"
"@ai-sdk/provider-utils": "npm:3.0.17"
peerDependencies:
zod: ^3.25.76 || ^4.1.8
checksum: 10c0/9f656e4f2ea4d714dc05be588baafd962b2e0360e9195fef373e745efeb20172698ea87e1033c0c5e1f1aa6e0db76a32629427bc8433eb42bd1a0ee00e04af0c
languageName: node
linkType: hard
"@ai-sdk/openai-compatible@npm:^1.0.28":
"@ai-sdk/openai-compatible@npm:1.0.28":
version: 1.0.28
resolution: "@ai-sdk/openai-compatible@npm:1.0.28"
dependencies:
@ -266,15 +254,15 @@ __metadata:
languageName: node
linkType: hard
"@ai-sdk/openai-compatible@patch:@ai-sdk/openai-compatible@npm%3A1.0.27#~/.yarn/patches/@ai-sdk-openai-compatible-npm-1.0.27-06f74278cf.patch":
version: 1.0.27
resolution: "@ai-sdk/openai-compatible@patch:@ai-sdk/openai-compatible@npm%3A1.0.27#~/.yarn/patches/@ai-sdk-openai-compatible-npm-1.0.27-06f74278cf.patch::version=1.0.27&hash=c44b76"
"@ai-sdk/openai-compatible@patch:@ai-sdk/openai-compatible@npm%3A1.0.28#~/.yarn/patches/@ai-sdk-openai-compatible-npm-1.0.28-5705188855.patch":
version: 1.0.28
resolution: "@ai-sdk/openai-compatible@patch:@ai-sdk/openai-compatible@npm%3A1.0.28#~/.yarn/patches/@ai-sdk-openai-compatible-npm-1.0.28-5705188855.patch::version=1.0.28&hash=f2cb20"
dependencies:
"@ai-sdk/provider": "npm:2.0.0"
"@ai-sdk/provider-utils": "npm:3.0.17"
"@ai-sdk/provider-utils": "npm:3.0.18"
peerDependencies:
zod: ^3.25.76 || ^4.1.8
checksum: 10c0/80c8331bc5fc62dc23d99d861bdc76e4eaf8b4b071d0b2bfa42fbd87f50b1bcdfa5ce4a4deaf7026a603a1ba6eaf5c884d87e3c58b4d6515c220121d3f421de5
checksum: 10c0/0b1d99fe8ce506e5c0a3703ae0511ac2017781584074d41faa2df82923c64eb1229ffe9f036de150d0248923613c761a463fe89d5923493983e0463a1101e792
languageName: node
linkType: hard
@ -1880,7 +1868,7 @@ __metadata:
"@ai-sdk/anthropic": "npm:^2.0.49"
"@ai-sdk/azure": "npm:^2.0.87"
"@ai-sdk/deepseek": "npm:^1.0.31"
"@ai-sdk/openai-compatible": "patch:@ai-sdk/openai-compatible@npm%3A1.0.27#~/.yarn/patches/@ai-sdk-openai-compatible-npm-1.0.27-06f74278cf.patch"
"@ai-sdk/openai-compatible": "patch:@ai-sdk/openai-compatible@npm%3A1.0.28#~/.yarn/patches/@ai-sdk-openai-compatible-npm-1.0.28-5705188855.patch"
"@ai-sdk/provider": "npm:^2.0.0"
"@ai-sdk/provider-utils": "npm:^3.0.17"
"@ai-sdk/xai": "npm:^2.0.36"
@ -1900,7 +1888,7 @@ __metadata:
version: 0.0.0-use.local
resolution: "@cherrystudio/ai-sdk-provider@workspace:packages/ai-sdk-provider"
dependencies:
"@ai-sdk/openai-compatible": "npm:^1.0.28"
"@ai-sdk/openai-compatible": "patch:@ai-sdk/openai-compatible@npm%3A1.0.28#~/.yarn/patches/@ai-sdk-openai-compatible-npm-1.0.28-5705188855.patch"
"@ai-sdk/provider": "npm:^2.0.0"
"@ai-sdk/provider-utils": "npm:^3.0.17"
tsdown: "npm:^0.13.3"