feat: support streaming for model health check (#5546)

This commit is contained in:
one 2025-04-30 22:25:32 +08:00 committed by GitHub
parent 28ec990100
commit aaf396f83a
6 changed files with 97 additions and 28 deletions

View File

@ -531,25 +531,50 @@ export default class AnthropicProvider extends BaseProvider {
/**
* Check if the model is valid
* @param model - The model
* @param stream - Whether to use streaming interface
* @returns The validity of the model
*/
public async check(model: Model): Promise<{ valid: boolean; error: Error | null }> {
public async check(model: Model, stream: boolean = false): Promise<{ valid: boolean; error: Error | null }> {
if (!model) {
return { valid: false, error: new Error('No model found') }
}
const body = {
model: model.id,
messages: [{ role: 'user', content: 'hi' }],
messages: [{ role: 'user' as const, content: 'hi' }],
max_tokens: 100,
stream: false
stream
}
try {
const message = await this.sdk.messages.create(body as MessageCreateParamsNonStreaming)
return {
valid: message.content.length > 0,
error: null
if (!stream) {
const message = await this.sdk.messages.create(body as MessageCreateParamsNonStreaming)
return {
valid: message.content.length > 0,
error: null
}
} else {
return await new Promise((resolve, reject) => {
let hasContent = false
this.sdk.messages
.stream(body)
.on('text', (text) => {
if (!hasContent && text) {
hasContent = true
resolve({ valid: true, error: null })
}
})
.on('finalMessage', (message) => {
if (!hasContent && message.content && message.content.length > 0) {
hasContent = true
resolve({ valid: true, error: null })
}
if (!hasContent) {
reject(new Error('Empty streaming response'))
}
})
.on('error', (error) => reject(error))
})
}
} catch (error: any) {
return {

View File

@ -43,7 +43,7 @@ export default abstract class BaseProvider {
abstract summaryForSearch(messages: Message[], assistant: Assistant): Promise<string | null>
abstract suggestions(messages: Message[], assistant: Assistant): Promise<Suggestion[]>
abstract generateText({ prompt, content }: { prompt: string; content: string }): Promise<string>
abstract check(model: Model): Promise<{ valid: boolean; error: Error | null }>
abstract check(model: Model, stream: boolean): Promise<{ valid: boolean; error: Error | null }>
abstract models(): Promise<OpenAI.Models.Model[]>
abstract generateImage(params: GenerateImageParams): Promise<string[]>
abstract generateImageByChat({ messages, assistant, onChunk, onFilterMessages }: CompletionsParams): Promise<void>

View File

@ -740,25 +740,47 @@ export default class GeminiProvider extends BaseProvider {
/**
* Check if the model is valid
* @param model - The model
* @param stream - Whether to use streaming interface
* @returns The validity of the model
*/
public async check(model: Model): Promise<{ valid: boolean; error: Error | null }> {
public async check(model: Model, stream: boolean = false): Promise<{ valid: boolean; error: Error | null }> {
if (!model) {
return { valid: false, error: new Error('No model found') }
}
try {
const result = await this.sdk.models.generateContent({
model: model.id,
contents: [{ role: 'user', parts: [{ text: 'hi' }] }],
config: {
maxOutputTokens: 100
if (!stream) {
const result = await this.sdk.models.generateContent({
model: model.id,
contents: [{ role: 'user', parts: [{ text: 'hi' }] }],
config: {
maxOutputTokens: 100
}
})
if (isEmpty(result.text)) {
throw new Error('Empty response')
}
} else {
const response = await this.sdk.models.generateContentStream({
model: model.id,
contents: [{ role: 'user', parts: [{ text: 'hi' }] }],
config: {
maxOutputTokens: 100
}
})
// 等待整个流式响应结束
let hasContent = false
for await (const chunk of response) {
if (chunk.text && chunk.text.length > 0) {
hasContent = true
break
}
}
if (!hasContent) {
throw new Error('Empty streaming response')
}
})
return {
valid: !isEmpty(result.text),
error: null
}
return { valid: true, error: null }
} catch (error: any) {
return {
valid: false,

View File

@ -962,26 +962,41 @@ export default class OpenAIProvider extends BaseProvider {
/**
* Check if the model is valid
* @param model - The model
* @param stream - Whether to use streaming interface
* @returns The validity of the model
*/
public async check(model: Model): Promise<{ valid: boolean; error: Error | null }> {
public async check(model: Model, stream: boolean = false): Promise<{ valid: boolean; error: Error | null }> {
if (!model) {
return { valid: false, error: new Error('No model found') }
}
const body = {
model: model.id,
messages: [{ role: 'user', content: 'hi' }],
stream: false
stream
}
try {
await this.checkIsCopilot()
console.debug('[checkModel] body', model.id, body)
const response = await this.sdk.chat.completions.create(body as ChatCompletionCreateParamsNonStreaming)
return {
valid: Boolean(response?.choices[0].message),
error: null
if (!stream) {
const response = await this.sdk.chat.completions.create(body as ChatCompletionCreateParamsNonStreaming)
if (!response?.choices[0].message) {
throw new Error('Empty response')
}
return { valid: true, error: null }
} else {
const response: any = await this.sdk.chat.completions.create(body as any)
// 等待整个流式响应结束
let hasContent = false
for await (const chunk of response) {
if (chunk.choices?.[0]?.delta?.content) {
hasContent = true
}
}
if (hasContent) {
return { valid: true, error: null }
}
throw new Error('Empty streaming response')
}
} catch (error: any) {
return {

View File

@ -59,8 +59,8 @@ export default class AiProvider {
return this.sdk.generateText({ prompt, content })
}
public async check(model: Model): Promise<{ valid: boolean; error: Error | null }> {
return this.sdk.check(model)
public async check(model: Model, stream: boolean = false): Promise<{ valid: boolean; error: Error | null }> {
return this.sdk.check(model, stream)
}
public async models(): Promise<OpenAI.Models.Model[]> {

View File

@ -82,7 +82,14 @@ export async function checkModel(provider: Provider, model: Model) {
return performModelCheck(
provider,
model,
(ai, model) => ai.check(model),
async (ai, model) => {
const result = await ai.check(model, false)
if (result.valid && !result.error) {
return result
}
// Try streaming check
return ai.check(model, true)
},
({ valid, error }) => ({ valid, error: error || null })
)
}