diff --git a/.yarn/patches/openai-npm-5.12.2-30b075401c.patch b/.yarn/patches/openai-npm-5.12.2-30b075401c.patch deleted file mode 100644 index 29b92dcc7..000000000 Binary files a/.yarn/patches/openai-npm-5.12.2-30b075401c.patch and /dev/null differ diff --git a/package.json b/package.json index f74f14fc4..9c4398cdc 100644 --- a/package.json +++ b/package.json @@ -126,6 +126,7 @@ "@cherrystudio/embedjs-ollama": "^0.1.31", "@cherrystudio/embedjs-openai": "^0.1.31", "@cherrystudio/extension-table-plus": "workspace:^", + "@cherrystudio/openai": "^6.5.0", "@dnd-kit/core": "^6.3.1", "@dnd-kit/modifiers": "^9.0.0", "@dnd-kit/sortable": "^10.0.0", @@ -297,7 +298,6 @@ "motion": "^12.10.5", "notion-helper": "^1.3.22", "npx-scope-finder": "^1.2.0", - "openai": "patch:openai@npm%3A5.12.2#~/.yarn/patches/openai-npm-5.12.2-30b075401c.patch", "oxlint": "^1.22.0", "oxlint-tsgolint": "^0.2.0", "p-queue": "^8.1.0", @@ -377,8 +377,8 @@ "file-stream-rotator@npm:^0.6.1": "patch:file-stream-rotator@npm%3A0.6.1#~/.yarn/patches/file-stream-rotator-npm-0.6.1-eab45fb13d.patch", "libsql@npm:^0.4.4": "patch:libsql@npm%3A0.4.7#~/.yarn/patches/libsql-npm-0.4.7-444e260fb1.patch", "node-abi": "4.12.0", - "openai@npm:^4.77.0": "patch:openai@npm%3A5.12.2#~/.yarn/patches/openai-npm-5.12.2-30b075401c.patch", - "openai@npm:^4.87.3": "patch:openai@npm%3A5.12.2#~/.yarn/patches/openai-npm-5.12.2-30b075401c.patch", + "openai@npm:^4.77.0": "npm:@cherrystudio/openai@6.5.0", + "openai@npm:^4.87.3": "npm:@cherrystudio/openai@6.5.0", "pdf-parse@npm:1.1.1": "patch:pdf-parse@npm%3A1.1.1#~/.yarn/patches/pdf-parse-npm-1.1.1-04a6109b2a.patch", "pkce-challenge@npm:^4.1.0": "patch:pkce-challenge@npm%3A4.1.0#~/.yarn/patches/pkce-challenge-npm-4.1.0-fbc51695a3.patch", "tar-fs": "^2.1.4", diff --git a/scripts/auto-translate-i18n.ts b/scripts/auto-translate-i18n.ts index 6a90f5b23..681e41079 100644 --- a/scripts/auto-translate-i18n.ts +++ b/scripts/auto-translate-i18n.ts @@ -2,9 +2,9 @@ * 该脚本用于少量自动翻译所有baseLocale以外的文本。待翻译文案必须以[to be translated]开头 * */ +import OpenAI from '@cherrystudio/openai' import cliProgress from 'cli-progress' import * as fs from 'fs' -import OpenAI from 'openai' import * as path from 'path' const localesDir = path.join(__dirname, '../src/renderer/src/i18n/locales') diff --git a/scripts/update-i18n.ts b/scripts/update-i18n.ts index 72fcca8ab..103623bf1 100644 --- a/scripts/update-i18n.ts +++ b/scripts/update-i18n.ts @@ -4,9 +4,9 @@ * API_KEY=sk-xxxx BASE_URL=xxxx MODEL=xxxx ts-node scripts/update-i18n.ts */ +import OpenAI from '@cherrystudio/openai' import cliProgress from 'cli-progress' import fs from 'fs' -import OpenAI from 'openai' type I18NValue = string | { [key: string]: I18NValue } type I18N = { [key: string]: I18NValue } diff --git a/src/main/apiServer/routes/chat.ts b/src/main/apiServer/routes/chat.ts index 45b95b212..baf29aae0 100644 --- a/src/main/apiServer/routes/chat.ts +++ b/src/main/apiServer/routes/chat.ts @@ -1,5 +1,5 @@ +import { ChatCompletionCreateParams } from '@cherrystudio/openai/resources' import express, { Request, Response } from 'express' -import { ChatCompletionCreateParams } from 'openai/resources' import { loggerService } from '../../services/LoggerService' import { diff --git a/src/main/apiServer/services/chat-completion.ts b/src/main/apiServer/services/chat-completion.ts index bc0e3bb23..63eed3ed8 100644 --- a/src/main/apiServer/services/chat-completion.ts +++ b/src/main/apiServer/services/chat-completion.ts @@ -1,6 +1,6 @@ +import OpenAI from '@cherrystudio/openai' +import { ChatCompletionCreateParams, ChatCompletionCreateParamsStreaming } from '@cherrystudio/openai/resources' import { Provider } from '@types' -import OpenAI from 'openai' -import { ChatCompletionCreateParams, ChatCompletionCreateParamsStreaming } from 'openai/resources' import { loggerService } from '../../services/LoggerService' import { ModelValidationError, validateModelId } from '../utils' diff --git a/src/main/services/remotefile/OpenAIService.ts b/src/main/services/remotefile/OpenAIService.ts index b7f8d3ea3..734bf6f26 100644 --- a/src/main/services/remotefile/OpenAIService.ts +++ b/src/main/services/remotefile/OpenAIService.ts @@ -1,8 +1,8 @@ +import OpenAI from '@cherrystudio/openai' import { loggerService } from '@logger' import { fileStorage } from '@main/services/FileStorage' import { FileListResponse, FileMetadata, FileUploadResponse, Provider } from '@types' import * as fs from 'fs' -import OpenAI from 'openai' import { CacheService } from '../CacheService' import { BaseFileService } from './BaseFileService' diff --git a/src/renderer/src/aiCore/legacy/clients/cherryai/CherryAiAPIClient.ts b/src/renderer/src/aiCore/legacy/clients/cherryai/CherryAiAPIClient.ts index 8f8969bd5..08e4d9df3 100644 --- a/src/renderer/src/aiCore/legacy/clients/cherryai/CherryAiAPIClient.ts +++ b/src/renderer/src/aiCore/legacy/clients/cherryai/CherryAiAPIClient.ts @@ -1,6 +1,6 @@ +import OpenAI from '@cherrystudio/openai' import { Provider } from '@renderer/types' import { OpenAISdkParams, OpenAISdkRawOutput } from '@renderer/types/sdk' -import OpenAI from 'openai' import { OpenAIAPIClient } from '../openai/OpenAIApiClient' diff --git a/src/renderer/src/aiCore/legacy/clients/openai/OpenAIApiClient.ts b/src/renderer/src/aiCore/legacy/clients/openai/OpenAIApiClient.ts index 839955a4f..746ee362b 100644 --- a/src/renderer/src/aiCore/legacy/clients/openai/OpenAIApiClient.ts +++ b/src/renderer/src/aiCore/legacy/clients/openai/OpenAIApiClient.ts @@ -1,3 +1,9 @@ +import OpenAI, { AzureOpenAI } from '@cherrystudio/openai' +import { + ChatCompletionContentPart, + ChatCompletionContentPartRefusal, + ChatCompletionTool +} from '@cherrystudio/openai/resources' import { loggerService } from '@logger' import { DEFAULT_MAX_TOKENS } from '@renderer/config/constant' import { @@ -78,8 +84,6 @@ import { } from '@renderer/utils/mcp-tools' import { findFileBlocks, findImageBlocks } from '@renderer/utils/messageUtils/find' import { t } from 'i18next' -import OpenAI, { AzureOpenAI } from 'openai' -import { ChatCompletionContentPart, ChatCompletionContentPartRefusal, ChatCompletionTool } from 'openai/resources' import { GenericChunk } from '../../middleware/schemas' import { RequestTransformer, ResponseChunkTransformer, ResponseChunkTransformerContext } from '../types' diff --git a/src/renderer/src/aiCore/legacy/clients/openai/OpenAIBaseClient.ts b/src/renderer/src/aiCore/legacy/clients/openai/OpenAIBaseClient.ts index 71ed1c95f..8a0a3fe0f 100644 --- a/src/renderer/src/aiCore/legacy/clients/openai/OpenAIBaseClient.ts +++ b/src/renderer/src/aiCore/legacy/clients/openai/OpenAIBaseClient.ts @@ -1,3 +1,4 @@ +import OpenAI, { AzureOpenAI } from '@cherrystudio/openai' import { loggerService } from '@logger' import { COPILOT_DEFAULT_HEADERS } from '@renderer/aiCore/provider/constants' import { @@ -25,7 +26,6 @@ import { ReasoningEffortOptionalParams } from '@renderer/types/sdk' import { formatApiHost } from '@renderer/utils/api' -import OpenAI, { AzureOpenAI } from 'openai' import { BaseApiClient } from '../BaseApiClient' diff --git a/src/renderer/src/aiCore/legacy/clients/openai/OpenAIResponseAPIClient.ts b/src/renderer/src/aiCore/legacy/clients/openai/OpenAIResponseAPIClient.ts index 9e400a9f8..6c3d12bb4 100644 --- a/src/renderer/src/aiCore/legacy/clients/openai/OpenAIResponseAPIClient.ts +++ b/src/renderer/src/aiCore/legacy/clients/openai/OpenAIResponseAPIClient.ts @@ -1,3 +1,5 @@ +import OpenAI, { AzureOpenAI } from '@cherrystudio/openai' +import { ResponseInput } from '@cherrystudio/openai/resources/responses/responses' import { loggerService } from '@logger' import { GenericChunk } from '@renderer/aiCore/legacy/middleware/schemas' import { CompletionsContext } from '@renderer/aiCore/legacy/middleware/types' @@ -45,8 +47,6 @@ import { findFileBlocks, findImageBlocks } from '@renderer/utils/messageUtils/fi import { MB } from '@shared/config/constant' import { t } from 'i18next' import { isEmpty } from 'lodash' -import OpenAI, { AzureOpenAI } from 'openai' -import { ResponseInput } from 'openai/resources/responses/responses' import { RequestTransformer, ResponseChunkTransformer } from '../types' import { OpenAIAPIClient } from './OpenAIApiClient' @@ -343,7 +343,27 @@ export class OpenAIResponseAPIClient extends OpenAIBaseClient< } switch (message.type) { case 'function_call_output': - sum += estimateTextTokens(message.output) + { + let str = '' + if (typeof message.output === 'string') { + str = message.output + } else { + for (const part of message.output) { + switch (part.type) { + case 'input_text': + str += part.text + break + case 'input_image': + str += part.image_url || '' + break + case 'input_file': + str += part.file_data || '' + break + } + } + } + sum += estimateTextTokens(str) + } break case 'function_call': sum += estimateTextTokens(message.arguments) diff --git a/src/renderer/src/aiCore/legacy/clients/ovms/OVMSClient.ts b/src/renderer/src/aiCore/legacy/clients/ovms/OVMSClient.ts index d1fd4ed8b..5dc91550a 100644 --- a/src/renderer/src/aiCore/legacy/clients/ovms/OVMSClient.ts +++ b/src/renderer/src/aiCore/legacy/clients/ovms/OVMSClient.ts @@ -1,7 +1,7 @@ +import OpenAI from '@cherrystudio/openai' import { loggerService } from '@logger' import { isSupportedModel } from '@renderer/config/models' import { objectKeys, Provider } from '@renderer/types' -import OpenAI from 'openai' import { OpenAIAPIClient } from '../openai/OpenAIApiClient' diff --git a/src/renderer/src/aiCore/legacy/clients/ppio/PPIOAPIClient.ts b/src/renderer/src/aiCore/legacy/clients/ppio/PPIOAPIClient.ts index fd282ac81..57b54b961 100644 --- a/src/renderer/src/aiCore/legacy/clients/ppio/PPIOAPIClient.ts +++ b/src/renderer/src/aiCore/legacy/clients/ppio/PPIOAPIClient.ts @@ -1,7 +1,7 @@ +import OpenAI from '@cherrystudio/openai' import { loggerService } from '@logger' import { isSupportedModel } from '@renderer/config/models' import { Model, Provider } from '@renderer/types' -import OpenAI from 'openai' import { OpenAIAPIClient } from '../openai/OpenAIApiClient' diff --git a/src/renderer/src/aiCore/legacy/clients/types.ts b/src/renderer/src/aiCore/legacy/clients/types.ts index a6f4adc38..6d10f4285 100644 --- a/src/renderer/src/aiCore/legacy/clients/types.ts +++ b/src/renderer/src/aiCore/legacy/clients/types.ts @@ -1,4 +1,5 @@ import Anthropic from '@anthropic-ai/sdk' +import OpenAI from '@cherrystudio/openai' import { Assistant, MCPTool, MCPToolResponse, Model, ToolCallResponse } from '@renderer/types' import { Provider } from '@renderer/types' import { @@ -13,7 +14,6 @@ import { SdkTool, SdkToolCall } from '@renderer/types/sdk' -import OpenAI from 'openai' import { CompletionsParams, GenericChunk } from '../middleware/schemas' import { CompletionsContext } from '../middleware/types' diff --git a/src/renderer/src/aiCore/legacy/clients/zhipu/ZhipuAPIClient.ts b/src/renderer/src/aiCore/legacy/clients/zhipu/ZhipuAPIClient.ts index c1d7b8f56..c04e08fb7 100644 --- a/src/renderer/src/aiCore/legacy/clients/zhipu/ZhipuAPIClient.ts +++ b/src/renderer/src/aiCore/legacy/clients/zhipu/ZhipuAPIClient.ts @@ -1,7 +1,7 @@ +import OpenAI from '@cherrystudio/openai' import { loggerService } from '@logger' import { Provider } from '@renderer/types' import { GenerateImageParams } from '@renderer/types' -import OpenAI from 'openai' import { OpenAIAPIClient } from '../openai/OpenAIApiClient' diff --git a/src/renderer/src/aiCore/legacy/middleware/feat/ImageGenerationMiddleware.ts b/src/renderer/src/aiCore/legacy/middleware/feat/ImageGenerationMiddleware.ts index 0f89e8aca..de8034d51 100644 --- a/src/renderer/src/aiCore/legacy/middleware/feat/ImageGenerationMiddleware.ts +++ b/src/renderer/src/aiCore/legacy/middleware/feat/ImageGenerationMiddleware.ts @@ -1,10 +1,10 @@ +import OpenAI from '@cherrystudio/openai' +import { toFile } from '@cherrystudio/openai/uploads' import { isDedicatedImageGenerationModel } from '@renderer/config/models' import FileManager from '@renderer/services/FileManager' import { ChunkType } from '@renderer/types/chunk' import { findImageBlocks, getMainTextContent } from '@renderer/utils/messageUtils/find' import { defaultTimeout } from '@shared/config/constant' -import OpenAI from 'openai' -import { toFile } from 'openai/uploads' import { BaseApiClient } from '../../clients/BaseApiClient' import { CompletionsParams, CompletionsResult, GenericChunk } from '../schemas' diff --git a/src/renderer/src/aiCore/prepareParams/fileProcessor.ts b/src/renderer/src/aiCore/prepareParams/fileProcessor.ts index 9339e61a4..9e46f0c62 100644 --- a/src/renderer/src/aiCore/prepareParams/fileProcessor.ts +++ b/src/renderer/src/aiCore/prepareParams/fileProcessor.ts @@ -3,6 +3,7 @@ * 处理文件内容提取、文件格式转换、文件上传等逻辑 */ +import type OpenAI from '@cherrystudio/openai' import { loggerService } from '@logger' import { getProviderByModel } from '@renderer/services/AssistantService' import type { FileMetadata, Message, Model } from '@renderer/types' @@ -10,7 +11,6 @@ import { FileTypes } from '@renderer/types' import { FileMessageBlock } from '@renderer/types/newMessage' import { findFileBlocks } from '@renderer/utils/messageUtils/find' import type { FilePart, TextPart } from 'ai' -import type OpenAI from 'openai' import { getAiSdkProviderId } from '../provider/factory' import { getFileSizeLimit, supportsImageInput, supportsLargeFileUpload, supportsPdfInput } from './modelCapabilities' diff --git a/src/renderer/src/config/models/utils.ts b/src/renderer/src/config/models/utils.ts index 1759a93d1..5d8be539d 100644 --- a/src/renderer/src/config/models/utils.ts +++ b/src/renderer/src/config/models/utils.ts @@ -1,7 +1,7 @@ +import OpenAI from '@cherrystudio/openai' import { isEmbeddingModel, isRerankModel } from '@renderer/config/models/embedding' import { Model } from '@renderer/types' import { getLowerBaseModelName } from '@renderer/utils' -import OpenAI from 'openai' import { WEB_SEARCH_PROMPT_FOR_OPENROUTER } from '../prompts' import { getWebSearchTools } from '../tools' diff --git a/src/renderer/src/config/tools.ts b/src/renderer/src/config/tools.ts index 18e2d6213..f7c06a4ef 100644 --- a/src/renderer/src/config/tools.ts +++ b/src/renderer/src/config/tools.ts @@ -1,5 +1,5 @@ +import { ChatCompletionTool } from '@cherrystudio/openai/resources' import { Model } from '@renderer/types' -import { ChatCompletionTool } from 'openai/resources' import { WEB_SEARCH_PROMPT_FOR_ZHIPU } from './prompts' diff --git a/src/renderer/src/services/ModelMessageService.ts b/src/renderer/src/services/ModelMessageService.ts index 40609fa39..d5953db7c 100644 --- a/src/renderer/src/services/ModelMessageService.ts +++ b/src/renderer/src/services/ModelMessageService.ts @@ -1,6 +1,6 @@ +import { ChatCompletionContentPart, ChatCompletionMessageParam } from '@cherrystudio/openai/resources' import { Model } from '@renderer/types' import { findLast } from 'lodash' -import { ChatCompletionContentPart, ChatCompletionMessageParam } from 'openai/resources' export function processReqMessages( model: Model, diff --git a/src/renderer/src/services/SpanManagerService.ts b/src/renderer/src/services/SpanManagerService.ts index bfb59cb25..331db3881 100644 --- a/src/renderer/src/services/SpanManagerService.ts +++ b/src/renderer/src/services/SpanManagerService.ts @@ -1,4 +1,5 @@ import { MessageStream } from '@anthropic-ai/sdk/resources/messages/messages' +import { Stream } from '@cherrystudio/openai/streaming' import { loggerService } from '@logger' import { SpanEntity, TokenUsage } from '@mcp-trace/trace-core' import { cleanContext, endContext, getContext, startContext } from '@mcp-trace/trace-web' @@ -16,7 +17,6 @@ import { Model, Topic } from '@renderer/types' import type { Message } from '@renderer/types/newMessage' import { MessageBlockType } from '@renderer/types/newMessage' import { SdkRawChunk } from '@renderer/types/sdk' -import { Stream } from 'openai/streaming' const logger = loggerService.withContext('SpanManagerService') diff --git a/src/renderer/src/services/__tests__/ApiService.test.ts b/src/renderer/src/services/__tests__/ApiService.test.ts index 5702561aa..a5dabc112 100644 --- a/src/renderer/src/services/__tests__/ApiService.test.ts +++ b/src/renderer/src/services/__tests__/ApiService.test.ts @@ -6,6 +6,8 @@ import { WebSearchResultBlock, WebSearchToolResultError } from '@anthropic-ai/sdk/resources/messages' +import OpenAI from '@cherrystudio/openai' +import { ChatCompletionChunk } from '@cherrystudio/openai/resources' import { FinishReason, MediaModality } from '@google/genai' import { FunctionCall } from '@google/genai' import AiProvider from '@renderer/aiCore' @@ -38,8 +40,6 @@ import { import { mcpToolCallResponseToGeminiMessage } from '@renderer/utils/mcp-tools' import * as McpToolsModule from '@renderer/utils/mcp-tools' import { cloneDeep } from 'lodash' -import OpenAI from 'openai' -import { ChatCompletionChunk } from 'openai/resources' import { beforeEach, describe, expect, it, vi } from 'vitest' // Mock the ApiClientFactory vi.mock('@renderer/aiCore/legacy/clients/ApiClientFactory', () => ({ diff --git a/src/renderer/src/services/__tests__/ModelMessageService.test.ts b/src/renderer/src/services/__tests__/ModelMessageService.test.ts index 0527e0c3c..4017f028c 100644 --- a/src/renderer/src/services/__tests__/ModelMessageService.test.ts +++ b/src/renderer/src/services/__tests__/ModelMessageService.test.ts @@ -1,5 +1,5 @@ +import { ChatCompletionMessageParam } from '@cherrystudio/openai/resources' import type { Model } from '@renderer/types' -import { ChatCompletionMessageParam } from 'openai/resources' import { describe, expect, it } from 'vitest' import { processReqMessages } from '../ModelMessageService' diff --git a/src/renderer/src/store/messageBlock.ts b/src/renderer/src/store/messageBlock.ts index 458956e38..cddb8dcd6 100644 --- a/src/renderer/src/store/messageBlock.ts +++ b/src/renderer/src/store/messageBlock.ts @@ -1,10 +1,10 @@ import { WebSearchResultBlock } from '@anthropic-ai/sdk/resources' +import type OpenAI from '@cherrystudio/openai' import type { GroundingMetadata } from '@google/genai' import { createEntityAdapter, createSelector, createSlice, type PayloadAction } from '@reduxjs/toolkit' import { AISDKWebSearchResult, Citation, WebSearchProviderResponse, WebSearchSource } from '@renderer/types' import type { CitationMessageBlock, MessageBlock } from '@renderer/types/newMessage' import { MessageBlockType } from '@renderer/types/newMessage' -import type OpenAI from 'openai' import type { RootState } from './index' // 确认 RootState 从 store/index.ts 导出 diff --git a/src/renderer/src/trace/dataHandler/StreamHandler.ts b/src/renderer/src/trace/dataHandler/StreamHandler.ts index 5d6fb01c5..c071bdacd 100644 --- a/src/renderer/src/trace/dataHandler/StreamHandler.ts +++ b/src/renderer/src/trace/dataHandler/StreamHandler.ts @@ -1,8 +1,8 @@ +import { OpenAI } from '@cherrystudio/openai' +import { Stream } from '@cherrystudio/openai/streaming' import { TokenUsage } from '@mcp-trace/trace-core' import { Span } from '@opentelemetry/api' import { endSpan } from '@renderer/services/SpanManagerService' -import { OpenAI } from 'openai' -import { Stream } from 'openai/streaming' export class StreamHandler { private topicId: string diff --git a/src/renderer/src/types/file.ts b/src/renderer/src/types/file.ts index 76d1c0df1..8591c1811 100644 --- a/src/renderer/src/types/file.ts +++ b/src/renderer/src/types/file.ts @@ -1,6 +1,6 @@ +import type OpenAI from '@cherrystudio/openai' import type { File } from '@google/genai' import type { FileSchema } from '@mistralai/mistralai/models/components' -import type OpenAI from 'openai' export type RemoteFile = | { diff --git a/src/renderer/src/types/index.ts b/src/renderer/src/types/index.ts index ac57f9d89..e8a8d333f 100644 --- a/src/renderer/src/types/index.ts +++ b/src/renderer/src/types/index.ts @@ -1,7 +1,7 @@ import type { LanguageModelV2Source } from '@ai-sdk/provider' import type { WebSearchResultBlock } from '@anthropic-ai/sdk/resources' +import type OpenAI from '@cherrystudio/openai' import type { GenerateImagesConfig, GroundingMetadata, PersonGeneration } from '@google/genai' -import type OpenAI from 'openai' import type { CSSProperties } from 'react' export * from './file' diff --git a/src/renderer/src/types/newMessage.ts b/src/renderer/src/types/newMessage.ts index a24462b19..4d0aa72f5 100644 --- a/src/renderer/src/types/newMessage.ts +++ b/src/renderer/src/types/newMessage.ts @@ -1,5 +1,5 @@ +import type { CompletionUsage } from '@cherrystudio/openai/resources' import type { ProviderMetadata } from 'ai' -import type { CompletionUsage } from 'openai/resources' import type { Assistant, diff --git a/src/renderer/src/types/sdk.ts b/src/renderer/src/types/sdk.ts index e18891f2b..996d725cb 100644 --- a/src/renderer/src/types/sdk.ts +++ b/src/renderer/src/types/sdk.ts @@ -11,6 +11,9 @@ import { MessageStream } from '@anthropic-ai/sdk/resources/messages/messages' import AnthropicVertex from '@anthropic-ai/vertex-sdk' import type { BedrockClient } from '@aws-sdk/client-bedrock' import type { BedrockRuntimeClient } from '@aws-sdk/client-bedrock-runtime' +import OpenAI, { AzureOpenAI } from '@cherrystudio/openai' +import { ChatCompletionContentPartImage } from '@cherrystudio/openai/resources' +import { Stream } from '@cherrystudio/openai/streaming' import { Content, CreateChatParameters, @@ -21,9 +24,6 @@ import { SendMessageParameters, Tool } from '@google/genai' -import OpenAI, { AzureOpenAI } from 'openai' -import { ChatCompletionContentPartImage } from 'openai/resources' -import { Stream } from 'openai/streaming' import { EndpointType } from './index' diff --git a/src/renderer/src/utils/mcp-tools.ts b/src/renderer/src/utils/mcp-tools.ts index 6914868d4..934d4d882 100644 --- a/src/renderer/src/utils/mcp-tools.ts +++ b/src/renderer/src/utils/mcp-tools.ts @@ -1,4 +1,11 @@ import { ContentBlockParam, MessageParam, ToolUnion, ToolUseBlock } from '@anthropic-ai/sdk/resources' +import OpenAI from '@cherrystudio/openai' +import { + ChatCompletionContentPart, + ChatCompletionMessageParam, + ChatCompletionMessageToolCall, + ChatCompletionTool +} from '@cherrystudio/openai/resources' import { Content, FunctionCall, Part, Tool, Type as GeminiSchemaType } from '@google/genai' import { loggerService } from '@logger' import { isFunctionCallingModel, isVisionModel } from '@renderer/config/models' @@ -21,13 +28,6 @@ import { ChunkType } from '@renderer/types/chunk' import { AwsBedrockSdkMessageParam, AwsBedrockSdkTool, AwsBedrockSdkToolCall } from '@renderer/types/sdk' import { t } from 'i18next' import { nanoid } from 'nanoid' -import OpenAI from 'openai' -import { - ChatCompletionContentPart, - ChatCompletionMessageParam, - ChatCompletionMessageToolCall, - ChatCompletionTool -} from 'openai/resources' import { isToolUseModeFunction } from './assistant' import { convertBase64ImageToAwsBedrockFormat } from './aws-bedrock-utils' diff --git a/yarn.lock b/yarn.lock index a634e71da..672c4af35 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2677,6 +2677,23 @@ __metadata: languageName: unknown linkType: soft +"@cherrystudio/openai@npm:^6.5.0, openai@npm:@cherrystudio/openai@6.5.0": + version: 6.5.0 + resolution: "@cherrystudio/openai@npm:6.5.0" + peerDependencies: + ws: ^8.18.0 + zod: ^3.25 || ^4.0 + peerDependenciesMeta: + ws: + optional: true + zod: + optional: true + bin: + openai: bin/cli + checksum: 10c0/0f6cafb97aec17037d5ddcccc88e4b4a9c8de77a989a35bab2394b682a1a69e8a9343e8ee5eb8107d5c495970dbf3567642f154c033f7afc3bf078078666a92e + languageName: node + linkType: hard + "@chevrotain/cst-dts-gen@npm:11.0.3": version: 11.0.3 resolution: "@chevrotain/cst-dts-gen@npm:11.0.3" @@ -13860,6 +13877,7 @@ __metadata: "@cherrystudio/embedjs-ollama": "npm:^0.1.31" "@cherrystudio/embedjs-openai": "npm:^0.1.31" "@cherrystudio/extension-table-plus": "workspace:^" + "@cherrystudio/openai": "npm:^6.5.0" "@dnd-kit/core": "npm:^6.3.1" "@dnd-kit/modifiers": "npm:^9.0.0" "@dnd-kit/sortable": "npm:^10.0.0" @@ -14041,7 +14059,6 @@ __metadata: notion-helper: "npm:^1.3.22" npx-scope-finder: "npm:^1.2.0" officeparser: "npm:^4.2.0" - openai: "patch:openai@npm%3A5.12.2#~/.yarn/patches/openai-npm-5.12.2-30b075401c.patch" os-proxy-config: "npm:^1.1.2" oxlint: "npm:^1.22.0" oxlint-tsgolint: "npm:^0.2.0" @@ -23907,23 +23924,6 @@ __metadata: languageName: node linkType: hard -"openai@patch:openai@npm%3A5.12.2#~/.yarn/patches/openai-npm-5.12.2-30b075401c.patch": - version: 5.12.2 - resolution: "openai@patch:openai@npm%3A5.12.2#~/.yarn/patches/openai-npm-5.12.2-30b075401c.patch::version=5.12.2&hash=ad5d10" - peerDependencies: - ws: ^8.18.0 - zod: ^3.23.8 - peerDependenciesMeta: - ws: - optional: true - zod: - optional: true - bin: - openai: bin/cli - checksum: 10c0/2964a1c88a98cf169c9b73e8cd6776c03c8f3103fee30961c6953e5d995ad57a697e2179615999356809349186df6496abae105928ff7ce0229e5016dec87cb3 - languageName: node - linkType: hard - "openapi-types@npm:^12.1.3": version: 12.1.3 resolution: "openapi-types@npm:12.1.3"