♻️ refactor: enhance logging and provider handling for Anthropic integration

This commit is contained in:
Vaayne 2025-09-29 14:38:41 +08:00
parent 5b98ef5b3d
commit cc871b7a72
7 changed files with 59 additions and 168 deletions

View File

@ -77,7 +77,21 @@ export function getSdkClient(provider: Provider, oauthToken?: string | null): An
? provider.apiHost
: (provider.anthropicApiHost && provider.anthropicApiHost.trim()) || provider.apiHost
logger.debug('Anthropic API baseURL', { baseURL })
logger.debug("Anthropic API baseURL", { baseURL, providerId: provider.id });
if (provider.id === "aihubmix") {
return new Anthropic({
apiKey: provider.apiKey,
baseURL,
dangerouslyAllowBrowser: true,
defaultHeaders: {
"anthropic-beta": "output-128k-2025-02-19",
"APP-Code": "MLTG2087",
...provider.extra_headers,
},
});
}
return new Anthropic({
apiKey: provider.apiKey,
authToken: provider.apiKey,

View File

@ -65,7 +65,13 @@ async function handleStreamingResponse(
res.write('data: [DONE]\n\n')
flushStream()
} catch (streamError: any) {
logger.error('Stream error', { error: streamError })
logger.error('Stream error', {
error: streamError,
provider: provider.id,
model: request.model,
apiHost: provider.apiHost,
anthropicApiHost: provider.anthropicApiHost
})
res.write(
`data: ${JSON.stringify({
type: 'error',
@ -156,9 +162,13 @@ async function processMessageRequest(
})
}
logger.silly('Processing message request', {
request,
provider: provider.id
logger.info('Processing anthropic messages request', {
provider: provider.id,
apiHost: provider.apiHost,
anthropicApiHost: provider.anthropicApiHost,
model: request.model,
stream: request.stream,
thinking: request.thinking
})
// Handle streaming

View File

@ -27,9 +27,27 @@ export class ModelsService {
for (const model of models) {
const provider = providers.find((p) => p.id === model.provider)
if (!provider || (provider.isAnthropicModel && !provider.isAnthropicModel(model))) {
continue
logger.debug(
`Processing model ${model.id} from provider ${model.provider}`,
{ isAnthropicModel: provider?.isAnthropicModel }
);
if (
!provider ||
(filter.providerType === "anthropic" &&
provider.isAnthropicModel &&
!provider.isAnthropicModel(model))
) {
continue;
}
// Special case: For "aihubmix", it should be covered by above condition, but just in case
if (
provider.id === "aihubmix" &&
filter.providerType === "anthropic" &&
!model.id.includes("claude")
) {
continue;
}
const openAIModel = transformModelToOpenAI(model, provider)
const fullModelId = openAIModel.id // This is already in format "provider:model_id"

View File

@ -85,7 +85,10 @@ class ClaudeCodeService implements AgentServiceInterface {
const env = {
...loginShellEnvWithoutProxies,
ANTHROPIC_API_KEY: apiConfig.apiKey,
ANTHROPIC_AUTH_TOKEN: apiConfig.apiKey,
ANTHROPIC_BASE_URL: `http://${apiConfig.host}:${apiConfig.port}/${modelInfo.provider.id}`,
ANTHROPIC_MODEL: modelInfo.modelId,
ANTHROPIC_SMALL_FAST_MODEL: modelInfo.modelId,
ELECTRON_RUN_AS_NODE: '1',
ELECTRON_NO_ATTACH_CONSOLE: '1'
}
@ -97,7 +100,7 @@ class ClaudeCodeService implements AgentServiceInterface {
abortController,
cwd,
env,
model: modelInfo.modelId,
// model: modelInfo.modelId,
pathToClaudeCodeExecutable: this.claudeExecutablePath,
stderr: (chunk: string) => {
logger.warn('claude stderr', { chunk })

View File

@ -2618,7 +2618,7 @@ const migrateConfig = {
provider.anthropicApiHost = 'https://api-inference.modelscope.cn'
break
case 'aihubmix':
provider.anthropicApiHost = 'https://aihubmix.com/anthropic'
provider.anthropicApiHost = "https://aihubmix.com";
provider.isAnthropicModel = (m: Model) => m.id.includes('claude')
break
}

View File

@ -13,7 +13,6 @@ import type { FileMetadata } from './file'
import { KnowledgeBase, KnowledgeReference } from './knowledge'
import { MCPConfigSample, McpServerType } from './mcp'
import type { Message } from './newMessage'
import type { ServiceTier } from './provider'
import type { BaseTool, MCPTool } from './tool'
export * from './agent'
@ -224,162 +223,6 @@ export type User = {
email: string
}
// undefined 视为支持,默认支持
export type ProviderApiOptions = {
/** 是否不支持 message 的 content 为数组类型 */
isNotSupportArrayContent?: boolean
/** 是否不支持 stream_options 参数 */
isNotSupportStreamOptions?: boolean
/**
* @deprecated
* message role developer */
isNotSupportDeveloperRole?: boolean
/* 是否支持 message 的 role 为 developer */
isSupportDeveloperRole?: boolean
/**
* @deprecated
* service_tier . Only for OpenAI Models. */
isNotSupportServiceTier?: boolean
/* 是否支持 service_tier 参数. Only for OpenAI Models. */
isSupportServiceTier?: boolean
/** 是否不支持 enable_thinking 参数 */
isNotSupportEnableThinking?: boolean
}
export type Provider = {
id: string
type: ProviderType
name: string
apiKey: string
apiHost: string
anthropicApiHost?: string
isAnthropicModel?: (m: Model) => boolean
apiVersion?: string
models: Model[]
enabled?: boolean
isSystem?: boolean
isAuthed?: boolean
rateLimit?: number
// API options
apiOptions?: ProviderApiOptions
serviceTier?: ServiceTier
/** @deprecated */
isNotSupportArrayContent?: boolean
/** @deprecated */
isNotSupportStreamOptions?: boolean
/** @deprecated */
isNotSupportDeveloperRole?: boolean
/** @deprecated */
isNotSupportServiceTier?: boolean
authType?: 'apiKey' | 'oauth'
isVertex?: boolean
notes?: string
extra_headers?: Record<string, string>
}
export const SystemProviderIds = {
// cherryin: 'cherryin',
silicon: 'silicon',
aihubmix: 'aihubmix',
ocoolai: 'ocoolai',
deepseek: 'deepseek',
ppio: 'ppio',
alayanew: 'alayanew',
qiniu: 'qiniu',
dmxapi: 'dmxapi',
burncloud: 'burncloud',
tokenflux: 'tokenflux',
'302ai': '302ai',
cephalon: 'cephalon',
lanyun: 'lanyun',
ph8: 'ph8',
openrouter: 'openrouter',
ollama: 'ollama',
'new-api': 'new-api',
lmstudio: 'lmstudio',
anthropic: 'anthropic',
openai: 'openai',
'azure-openai': 'azure-openai',
gemini: 'gemini',
vertexai: 'vertexai',
github: 'github',
copilot: 'copilot',
zhipu: 'zhipu',
yi: 'yi',
moonshot: 'moonshot',
baichuan: 'baichuan',
dashscope: 'dashscope',
stepfun: 'stepfun',
doubao: 'doubao',
infini: 'infini',
minimax: 'minimax',
groq: 'groq',
together: 'together',
fireworks: 'fireworks',
nvidia: 'nvidia',
grok: 'grok',
hyperbolic: 'hyperbolic',
mistral: 'mistral',
jina: 'jina',
perplexity: 'perplexity',
modelscope: 'modelscope',
xirang: 'xirang',
hunyuan: 'hunyuan',
'tencent-cloud-ti': 'tencent-cloud-ti',
'baidu-cloud': 'baidu-cloud',
gpustack: 'gpustack',
voyageai: 'voyageai',
'aws-bedrock': 'aws-bedrock',
poe: 'poe',
aionly: 'aionly',
longcat: 'longcat'
} as const
export type SystemProviderId = keyof typeof SystemProviderIds
export const isSystemProviderId = (id: string): id is SystemProviderId => {
return Object.hasOwn(SystemProviderIds, id)
}
export type SystemProvider = Provider & {
id: SystemProviderId
isSystem: true
apiOptions?: never
}
export type VertexProvider = Provider & {
googleCredentials: {
privateKey: string
clientEmail: string
}
project: string
location: string
}
/**
* 使`provider.isSystem`
* @param provider - Provider对象
* @returns
*/
export const isSystemProvider = (provider: Provider): provider is SystemProvider => {
return isSystemProviderId(provider.id) && !!provider.isSystem
}
export type ProviderType =
| 'openai'
| 'openai-response'
| 'anthropic'
| 'gemini'
| 'qwenlm'
| 'azure-openai'
| 'vertexai'
| 'mistral'
| 'aws-bedrock'
| 'vertex-anthropic'
export type ModelType = 'text' | 'vision' | 'embedding' | 'reasoning' | 'function_calling' | 'web_search' | 'rerank'
export type ModelTag = Exclude<ModelType, 'text'> | 'free'

View File

@ -77,6 +77,8 @@ export type Provider = {
name: string
apiKey: string
apiHost: string
anthropicApiHost?: string
isAnthropicModel?: (m: Model) => boolean
apiVersion?: string
models: Model[]
enabled?: boolean
@ -104,7 +106,7 @@ export type Provider = {
}
export const SystemProviderIds = {
cherryin: 'cherryin',
// cherryin: 'cherryin',
silicon: 'silicon',
aihubmix: 'aihubmix',
ocoolai: 'ocoolai',
@ -157,7 +159,8 @@ export const SystemProviderIds = {
voyageai: 'voyageai',
'aws-bedrock': 'aws-bedrock',
poe: 'poe',
aionly: 'aionly'
aionly: 'aionly',
longcat: 'longcat'
} as const
export type SystemProviderId = keyof typeof SystemProviderIds