mirror of
https://github.com/CherryHQ/cherry-studio.git
synced 2026-01-06 21:35:52 +08:00
feat: minimize token usage when testing model (#5905)
This commit is contained in:
parent
84b205cac2
commit
55c78382e4
@ -678,7 +678,7 @@ export default class AnthropicProvider extends BaseProvider {
|
|||||||
const body = {
|
const body = {
|
||||||
model: model.id,
|
model: model.id,
|
||||||
messages: [{ role: 'user' as const, content: 'hi' }],
|
messages: [{ role: 'user' as const, content: 'hi' }],
|
||||||
max_tokens: 100,
|
max_tokens: 2, // api文档写的 x>1
|
||||||
stream
|
stream
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -916,7 +916,7 @@ export default class GeminiProvider extends BaseProvider {
|
|||||||
model: model.id,
|
model: model.id,
|
||||||
contents: [{ role: 'user', parts: [{ text: 'hi' }] }],
|
contents: [{ role: 'user', parts: [{ text: 'hi' }] }],
|
||||||
config: {
|
config: {
|
||||||
maxOutputTokens: 100
|
maxOutputTokens: 1
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
if (isEmpty(result.text)) {
|
if (isEmpty(result.text)) {
|
||||||
@ -927,7 +927,7 @@ export default class GeminiProvider extends BaseProvider {
|
|||||||
model: model.id,
|
model: model.id,
|
||||||
contents: [{ role: 'user', parts: [{ text: 'hi' }] }],
|
contents: [{ role: 'user', parts: [{ text: 'hi' }] }],
|
||||||
config: {
|
config: {
|
||||||
maxOutputTokens: 100
|
maxOutputTokens: 1
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
// 等待整个流式响应结束
|
// 等待整个流式响应结束
|
||||||
|
|||||||
@ -1112,6 +1112,9 @@ export default class OpenAICompatibleProvider extends BaseOpenAiProvider {
|
|||||||
const body = {
|
const body = {
|
||||||
model: model.id,
|
model: model.id,
|
||||||
messages: [{ role: 'user', content: 'hi' }],
|
messages: [{ role: 'user', content: 'hi' }],
|
||||||
|
max_completion_tokens: 1, // openAI
|
||||||
|
max_tokens: 1, // openAI deprecated 但大部分OpenAI兼容的提供商继续用这个头
|
||||||
|
enable_thinking: false, // qwen3
|
||||||
stream
|
stream
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -1026,6 +1026,7 @@ export abstract class BaseOpenAiProvider extends BaseProvider {
|
|||||||
const response = await this.sdk.responses.create({
|
const response = await this.sdk.responses.create({
|
||||||
model: model.id,
|
model: model.id,
|
||||||
input: [{ role: 'user', content: 'hi' }],
|
input: [{ role: 'user', content: 'hi' }],
|
||||||
|
max_output_tokens: 1,
|
||||||
stream: true
|
stream: true
|
||||||
})
|
})
|
||||||
let hasContent = false
|
let hasContent = false
|
||||||
@ -1042,7 +1043,8 @@ export abstract class BaseOpenAiProvider extends BaseProvider {
|
|||||||
const response = await this.sdk.responses.create({
|
const response = await this.sdk.responses.create({
|
||||||
model: model.id,
|
model: model.id,
|
||||||
input: [{ role: 'user', content: 'hi' }],
|
input: [{ role: 'user', content: 'hi' }],
|
||||||
stream: false
|
stream: false,
|
||||||
|
max_output_tokens: 1
|
||||||
})
|
})
|
||||||
if (!response.output_text) {
|
if (!response.output_text) {
|
||||||
throw new Error('Empty response')
|
throw new Error('Empty response')
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user