refactor: remove deprecated max token settings from OpenAIProvider and OpenAIResponseProvider

This commit is contained in:
kangfenmao 2025-05-16 14:36:40 +08:00
parent 5701b09c23
commit d0a368d9ef
2 changed files with 1 additions and 9 deletions

View File

@ -1139,8 +1139,6 @@ export default class OpenAIProvider extends BaseOpenAIProvider {
const body = { const body = {
model: model.id, model: model.id,
messages: [{ role: 'user', content: 'hi' }], messages: [{ role: 'user', content: 'hi' }],
max_completion_tokens: 1, // openAI
max_tokens: 1, // openAI deprecated 但大部分OpenAI兼容的提供商继续用这个头
enable_thinking: false, // qwen3 enable_thinking: false, // qwen3
stream stream
} }

View File

@ -907,24 +907,18 @@ export abstract class BaseOpenAIProvider extends BaseProvider {
const response = await this.sdk.responses.create({ const response = await this.sdk.responses.create({
model: model.id, model: model.id,
input: [{ role: 'user', content: 'hi' }], input: [{ role: 'user', content: 'hi' }],
max_output_tokens: 1,
stream: true stream: true
}) })
let hasContent = false
for await (const chunk of response) { for await (const chunk of response) {
if (chunk.type === 'response.output_text.delta') { if (chunk.type === 'response.output_text.delta') {
hasContent = true return { valid: true, error: null }
} }
} }
if (hasContent) {
return { valid: true, error: null }
}
throw new Error('Empty streaming response') throw new Error('Empty streaming response')
} else { } else {
const response = await this.sdk.responses.create({ const response = await this.sdk.responses.create({
model: model.id, model: model.id,
input: [{ role: 'user', content: 'hi' }], input: [{ role: 'user', content: 'hi' }],
max_output_tokens: 1,
stream: false stream: false
}) })
if (!response.output_text) { if (!response.output_text) {