diff --git a/src/renderer/src/aiCore/legacy/clients/ovms/OVMSClient.ts b/src/renderer/src/aiCore/legacy/clients/ovms/OVMSClient.ts index 179bb54a1e..02ac6de091 100644 --- a/src/renderer/src/aiCore/legacy/clients/ovms/OVMSClient.ts +++ b/src/renderer/src/aiCore/legacy/clients/ovms/OVMSClient.ts @@ -3,6 +3,7 @@ import { loggerService } from '@logger' import { isSupportedModel } from '@renderer/config/models' import type { Provider } from '@renderer/types' import { objectKeys } from '@renderer/types' +import { formatApiHost, withoutTrailingApiVersion } from '@renderer/utils' import { OpenAIAPIClient } from '../openai/OpenAIApiClient' @@ -16,11 +17,8 @@ export class OVMSClient extends OpenAIAPIClient { override async listModels(): Promise { try { const sdk = await this.getSdkInstance() - - const chatModelsResponse = await sdk.request({ - method: 'get', - path: '../v1/config' - }) + const url = formatApiHost(withoutTrailingApiVersion(this.getBaseURL()), true, 'v1') + const chatModelsResponse = await sdk.withOptions({ baseURL: url }).get('/config') logger.debug(`Chat models response: ${JSON.stringify(chatModelsResponse)}`) // Parse the config response to extract model information