feat(models): refactor models service to use new API models schema and types

This commit is contained in:
Vaayne 2025-09-19 16:36:11 +08:00
parent 2cf2f04a70
commit d91df12dbc
4 changed files with 50 additions and 34 deletions

View File

@ -1,29 +1,20 @@
import { z } from 'zod' import {
ApiModelsRequest,
ApiModelsRequestSchema,
ApiModelsResponse,
OpenAICompatibleModel
} from '../../../renderer/src/types/apiModels'
import { loggerService } from '../../services/LoggerService' import { loggerService } from '../../services/LoggerService'
import { getAvailableProviders, listAllAvailableModels, OpenAICompatibleModel, transformModelToOpenAI } from '../utils' import { getAvailableProviders, listAllAvailableModels, transformModelToOpenAI } from '../utils'
const logger = loggerService.withContext('ModelsService') const logger = loggerService.withContext('ModelsService')
// Zod schema for models filtering // Re-export for backward compatibility
export const ModelsFilterSchema = z.object({ export const ModelsFilterSchema = ApiModelsRequestSchema
provider: z.enum(['openai', 'anthropic']).optional(), export type ModelsFilter = ApiModelsRequest
offset: z.coerce.number().min(0).default(0).optional(),
limit: z.coerce.number().min(1).optional()
})
export type ModelsFilter = z.infer<typeof ModelsFilterSchema>
export interface ModelsResponse {
object: 'list'
data: OpenAICompatibleModel[]
total?: number
offset?: number
limit?: number
}
export class ModelsService { export class ModelsService {
async getModels(filter?: ModelsFilter): Promise<ModelsResponse> { async getModels(filter?: ModelsFilter): Promise<ApiModelsResponse> {
try { try {
logger.info('Getting available models from providers', { filter }) logger.info('Getting available models from providers', { filter })
@ -83,7 +74,7 @@ export class ModelsService {
logger.debug(`Filtered out ${models.length - total} models after deduplication and filtering`) logger.debug(`Filtered out ${models.length - total} models after deduplication and filtering`)
} }
const response: ModelsResponse = { const response: ApiModelsResponse = {
object: 'list', object: 'list',
data: modelData data: modelData
} }

View File

@ -1,20 +1,9 @@
import { loggerService } from '@main/services/LoggerService' import { loggerService } from '@main/services/LoggerService'
import { reduxService } from '@main/services/ReduxService' import { reduxService } from '@main/services/ReduxService'
import { Model, Provider } from '@types' import { Model, OpenAICompatibleModel, Provider } from '@types'
const logger = loggerService.withContext('ApiServerUtils') const logger = loggerService.withContext('ApiServerUtils')
// OpenAI compatible model format
export interface OpenAICompatibleModel {
id: string
object: 'model'
created: number
name: string
owned_by: string
provider?: string
provider_model_id?: string
}
export async function getAvailableProviders(): Promise<Provider[]> { export async function getAvailableProviders(): Promise<Provider[]> {
try { try {
// Wait for store to be ready before accessing providers // Wait for store to be ready before accessing providers
@ -25,7 +14,9 @@ export async function getAvailableProviders(): Promise<Provider[]> {
} }
// Support OpenAI and Anthropic type providers for API server // Support OpenAI and Anthropic type providers for API server
const supportedProviders = providers.filter((p: Provider) => p.enabled && (p.type === 'openai' || p.type === 'anthropic')) const supportedProviders = providers.filter(
(p: Provider) => p.enabled && (p.type === 'openai' || p.type === 'anthropic')
)
logger.info(`Filtered to ${supportedProviders.length} supported providers from ${providers.length} total providers`) logger.info(`Filtered to ${supportedProviders.length} supported providers from ${providers.length} total providers`)

View File

@ -0,0 +1,33 @@
import { z } from 'zod'
// Request schema for /v1/models
export const ApiModelsRequestSchema = z.object({
provider: z.enum(['openai', 'anthropic']).optional(),
offset: z.coerce.number().min(0).default(0).optional(),
limit: z.coerce.number().min(1).optional()
})
// OpenAI compatible model schema
export const OpenAICompatibleModelSchema = z.object({
id: z.string(),
object: z.literal('model'),
created: z.number(),
name: z.string(),
owned_by: z.string(),
provider: z.string().optional(),
provider_model_id: z.string().optional()
})
// Response schema for /v1/models
export const ApiModelsResponseSchema = z.object({
object: z.literal('list'),
data: z.array(OpenAICompatibleModelSchema),
total: z.number().optional(),
offset: z.number().optional(),
limit: z.number().optional()
})
// Inferred TypeScript types
export type ApiModelsRequest = z.infer<typeof ApiModelsRequestSchema>
export type OpenAICompatibleModel = z.infer<typeof OpenAICompatibleModelSchema>
export type ApiModelsResponse = z.infer<typeof ApiModelsResponseSchema>

View File

@ -16,6 +16,7 @@ import type { Message } from './newMessage'
import type { BaseTool, MCPTool } from './tool' import type { BaseTool, MCPTool } from './tool'
export * from './agent' export * from './agent'
export * from './apiModels'
export * from './knowledge' export * from './knowledge'
export * from './mcp' export * from './mcp'
export * from './notification' export * from './notification'