feat: add endpoint type support for cherryin provider (#11367)

* feat: add endpoint type support for cherryin provider

* chore: bump @cherrystudio/ai-sdk-provider version to 0.1.1

* chore: bump ai-sdk-provider version to 0.1.3
This commit is contained in:
defi-failure 2025-11-21 21:42:08 +08:00 committed by GitHub
parent cea0058f87
commit c48f222cdb
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 40 additions and 5 deletions

View File

@ -1,6 +1,6 @@
{
"name": "@cherrystudio/ai-sdk-provider",
"version": "0.1.2",
"version": "0.1.3",
"description": "Cherry Studio AI SDK provider bundle with CherryIN routing.",
"keywords": [
"ai-sdk",

View File

@ -67,6 +67,10 @@ export interface CherryInProviderSettings {
* Optional static headers applied to every request.
*/
headers?: HeadersInput
/**
* Optional endpoint type to distinguish different endpoint behaviors.
*/
endpointType?: 'openai' | 'openai-response' | 'anthropic' | 'gemini' | 'image-generation' | 'jina-rerank'
}
export interface CherryInProvider extends ProviderV2 {
@ -151,7 +155,8 @@ export const createCherryIn = (options: CherryInProviderSettings = {}): CherryIn
baseURL = DEFAULT_CHERRYIN_BASE_URL,
anthropicBaseURL = DEFAULT_CHERRYIN_ANTHROPIC_BASE_URL,
geminiBaseURL = DEFAULT_CHERRYIN_GEMINI_BASE_URL,
fetch
fetch,
endpointType
} = options
const getJsonHeaders = createJsonHeadersGetter(options)
@ -205,7 +210,7 @@ export const createCherryIn = (options: CherryInProviderSettings = {}): CherryIn
fetch
})
const createChatModel = (modelId: string, settings: OpenAIProviderSettings = {}) => {
const createChatModelByModelId = (modelId: string, settings: OpenAIProviderSettings = {}) => {
if (isAnthropicModel(modelId)) {
return createAnthropicModel(modelId)
}
@ -223,6 +228,29 @@ export const createCherryIn = (options: CherryInProviderSettings = {}): CherryIn
})
}
const createChatModel = (modelId: string, settings: OpenAIProviderSettings = {}) => {
if (!endpointType) return createChatModelByModelId(modelId, settings)
switch (endpointType) {
case 'anthropic':
return createAnthropicModel(modelId)
case 'gemini':
return createGeminiModel(modelId)
case 'openai':
return createOpenAIChatModel(modelId)
case 'openai-response':
default:
return new OpenAIResponsesLanguageModel(modelId, {
provider: `${CHERRYIN_PROVIDER_NAME}.openai`,
url,
headers: () => ({
...getJsonHeaders(),
...settings.headers
}),
fetch
})
}
}
const createCompletionModel = (modelId: string, settings: OpenAIProviderSettings = {}) =>
new OpenAICompletionLanguageModel(modelId, {
provider: `${CHERRYIN_PROVIDER_NAME}.completion`,

View File

@ -35,7 +35,7 @@
"peerDependencies": {
"@ai-sdk/google": "^2.0.36",
"@ai-sdk/openai": "^2.0.64",
"@cherrystudio/ai-sdk-provider": "^0.1.2",
"@cherrystudio/ai-sdk-provider": "^0.1.3",
"ai": "^5.0.26"
},
"dependencies": {

View File

@ -227,6 +227,13 @@ export function providerToAiSdkConfig(
baseConfig.baseURL += aiSdkProviderId === 'google-vertex' ? '/publishers/google' : '/publishers/anthropic/models'
}
// cherryin
if (aiSdkProviderId === 'cherryin') {
if (model.endpoint_type) {
extraOptions.endpointType = model.endpoint_type
}
}
if (hasProviderConfig(aiSdkProviderId) && aiSdkProviderId !== 'openai-compatible') {
const options = ProviderConfigFactory.fromProvider(aiSdkProviderId, baseConfig, extraOptions)
return {

View File

@ -1909,7 +1909,7 @@ __metadata:
peerDependencies:
"@ai-sdk/google": ^2.0.36
"@ai-sdk/openai": ^2.0.64
"@cherrystudio/ai-sdk-provider": ^0.1.2
"@cherrystudio/ai-sdk-provider": ^0.1.3
ai: ^5.0.26
languageName: unknown
linkType: soft