diff --git a/packages/aiCore/src/core/plugins/built-in/webSearchPlugin/index.ts b/packages/aiCore/src/core/plugins/built-in/webSearchPlugin/index.ts index d84320cbe0..159f6114a6 100644 --- a/packages/aiCore/src/core/plugins/built-in/webSearchPlugin/index.ts +++ b/packages/aiCore/src/core/plugins/built-in/webSearchPlugin/index.ts @@ -22,12 +22,13 @@ export const webSearchPlugin = (config: WebSearchPluginConfig = DEFAULT_WEB_SEAR transformParams: async (params: any, context: AiRequestContext) => { const { providerId } = context - + // console.log('providerId', providerId) switch (providerId) { case 'openai': { if (config.openai) { if (!params.tools) params.tools = {} params.tools.web_search_preview = openai.tools.webSearchPreview(config.openai) + // console.log('params.tools', params.tools) } break } @@ -59,6 +60,14 @@ export const webSearchPlugin = (config: WebSearchPluginConfig = DEFAULT_WEB_SEAR } break } + // default: { + // if (!params.providerOptions) params.providerOptions = {} + // params.providerOptions['aihubmix'] = { + // web_search: anthropic.tools.webSearch_20250305() + // } + // console.log('params.providerOptions', params.providerOptions) + // break + // } } return params diff --git a/src/renderer/src/aiCore/index_new.ts b/src/renderer/src/aiCore/index_new.ts index 091f725435..ced3095f7b 100644 --- a/src/renderer/src/aiCore/index_new.ts +++ b/src/renderer/src/aiCore/index_new.ts @@ -20,6 +20,7 @@ import { import { createPromptToolUsePlugin, webSearchPlugin } from '@cherrystudio/ai-core/core/plugins/built-in' import { isDedicatedImageGenerationModel } from '@renderer/config/models' import { createVertexProvider, isVertexAIConfigured, isVertexProvider } from '@renderer/hooks/useVertexAI' +import { getProviderByModel } from '@renderer/services/AssistantService' import type { GenerateImageParams, Model, Provider } from '@renderer/types' import { formatApiHost } from '@renderer/utils/api' import { cloneDeep } from 'lodash' @@ -29,9 +30,8 @@ import LegacyAiProvider from './index' import { AiSdkMiddlewareConfig, buildAiSdkMiddlewares } from './middleware/aisdk/AiSdkMiddlewareBuilder' import { CompletionsResult } from './middleware/schemas' import reasoningTimePlugin from './plugins/reasoningTimePlugin' -import { getAiSdkProviderId } from './provider/factory' -import { getProviderByModel } from '@renderer/services/AssistantService' import { createAihubmixProvider } from './provider/aihubmix' +import { getAiSdkProviderId } from './provider/factory' function getActualProvider(model: Model): Provider { const provider = getProviderByModel(model) diff --git a/src/renderer/src/aiCore/middleware/aisdk/AiSdkMiddlewareBuilder.ts b/src/renderer/src/aiCore/middleware/aisdk/AiSdkMiddlewareBuilder.ts index e951e4b8e3..ca0bfcbf71 100644 --- a/src/renderer/src/aiCore/middleware/aisdk/AiSdkMiddlewareBuilder.ts +++ b/src/renderer/src/aiCore/middleware/aisdk/AiSdkMiddlewareBuilder.ts @@ -144,13 +144,14 @@ function addProviderSpecificMiddlewares(builder: AiSdkMiddlewareBuilder, config: case 'anthropic': // Anthropic特定中间件 break - case 'openai': + case 'openai': { const tagName = config.model?.id.includes('gemini') ? tagNameArray[1] : tagNameArray[0] builder.add({ name: 'thinking-tag-extraction', middleware: extractReasoningMiddleware({ tagName }) }) break + } case 'gemini': // Gemini特定中间件 break diff --git a/src/renderer/src/aiCore/utils/options.ts b/src/renderer/src/aiCore/utils/options.ts index fd0198c4e3..00e6385d9e 100644 --- a/src/renderer/src/aiCore/utils/options.ts +++ b/src/renderer/src/aiCore/utils/options.ts @@ -1,4 +1,3 @@ -import { getDefaultModel, getProviderByModel } from '@renderer/services/AssistantService' import { Assistant, Model, Provider } from '@renderer/types' import { getAiSdkProviderId } from '../provider/factory' diff --git a/src/renderer/src/services/ApiService.ts b/src/renderer/src/services/ApiService.ts index 58827216a0..3849ee2c67 100644 --- a/src/renderer/src/services/ApiService.ts +++ b/src/renderer/src/services/ApiService.ts @@ -26,13 +26,7 @@ import { isEmpty, takeRight } from 'lodash' import AiProvider from '../aiCore' import AiProviderNew from '../aiCore/index_new' -import { - getAssistantProvider, - getDefaultModel, - getProviderByModel, - getTopNamingModel, - getTranslateModel -} from './AssistantService' +import { getDefaultModel, getProviderByModel, getTopNamingModel, getTranslateModel } from './AssistantService' import { getDefaultAssistant } from './AssistantService' // // TODO:考虑拆开 @@ -323,6 +317,7 @@ export async function fetchChatCompletion({ provider: provider, enableReasoning: capabilities.enableReasoning, enableTool: assistant.settings?.toolUseMode === 'prompt', + enableWebSearch: capabilities.enableWebSearch, mcpTools }