From 7d8ed3a7375777f23cafc6deeee78540e02a9feb Mon Sep 17 00:00:00 2001 From: lizhixuan Date: Mon, 23 Jun 2025 23:58:05 +0800 Subject: [PATCH] refactor: simplify AI Core architecture and enhance runtime execution - Restructured the AI Core documentation to reflect a simplified two-layer architecture, focusing on clear responsibilities between models and runtime layers. - Removed the orchestration layer and consolidated its functionality into the runtime layer, streamlining the API for users. - Introduced a new runtime executor for managing plugin-enhanced AI calls, improving the handling of execution and middleware. - Updated the core modules to enhance type safety and usability, including comprehensive type definitions for model creation and execution configurations. - Removed obsolete files and refactored existing code to improve organization and maintainability across the SDK. --- packages/aiCore/AI_SDK_ARCHITECTURE.md | 552 +++++++++--------- packages/aiCore/README.md | 1 - packages/aiCore/src/core/creation/index.ts | 15 - .../aiCore/src/core/execution/AiExecutor.ts | 174 ------ packages/aiCore/src/core/execution/index.ts | 47 -- packages/aiCore/src/core/index.ts | 14 +- packages/aiCore/src/core/middleware/index.ts | 4 +- .../{MiddlewareManager.ts => manager.ts} | 0 .../{ModelWrapper.ts => wrapper.ts} | 0 .../{creation => models}/ConfigManager.ts | 2 +- .../core/{creation => models}/ModelCreator.ts | 0 .../{creation => models}/ProviderCreator.ts | 2 - packages/aiCore/src/core/models/factory.ts | 51 ++ packages/aiCore/src/core/models/index.ts | 19 + .../src/core/{creation => models}/types.ts | 0 packages/aiCore/src/core/plugins/README.md | 2 - .../aiCore/src/core/plugins/built-in/index.ts | 7 + .../src/core/plugins/built-in/logging.ts | 86 +++ .../core/plugins/examples/example-usage.ts | 255 -------- packages/aiCore/src/core/plugins/index.ts | 2 + packages/aiCore/src/core/plugins/manager.ts | 2 +- packages/aiCore/src/core/plugins/types.ts | 25 +- packages/aiCore/src/core/runtime/executor.ts | 192 ++++++ packages/aiCore/src/core/runtime/index.ts | 107 ++++ .../pluginEngine.ts} | 77 +-- .../src/core/{execution => runtime}/types.ts | 20 +- packages/aiCore/src/index.ts | 66 +-- packages/aiCore/src/orchestration/api.ts | 148 ----- packages/aiCore/src/orchestration/index.ts | 13 - packages/aiCore/src/orchestration/types.ts | 26 - 30 files changed, 784 insertions(+), 1125 deletions(-) delete mode 100644 packages/aiCore/src/core/creation/index.ts delete mode 100644 packages/aiCore/src/core/execution/AiExecutor.ts delete mode 100644 packages/aiCore/src/core/execution/index.ts rename packages/aiCore/src/core/middleware/{MiddlewareManager.ts => manager.ts} (100%) rename packages/aiCore/src/core/middleware/{ModelWrapper.ts => wrapper.ts} (100%) rename packages/aiCore/src/core/{creation => models}/ConfigManager.ts (92%) rename packages/aiCore/src/core/{creation => models}/ModelCreator.ts (100%) rename packages/aiCore/src/core/{creation => models}/ProviderCreator.ts (99%) create mode 100644 packages/aiCore/src/core/models/factory.ts create mode 100644 packages/aiCore/src/core/models/index.ts rename packages/aiCore/src/core/{creation => models}/types.ts (100%) create mode 100644 packages/aiCore/src/core/plugins/built-in/index.ts create mode 100644 packages/aiCore/src/core/plugins/built-in/logging.ts delete mode 100644 packages/aiCore/src/core/plugins/examples/example-usage.ts create mode 100644 packages/aiCore/src/core/runtime/executor.ts create mode 100644 packages/aiCore/src/core/runtime/index.ts rename packages/aiCore/src/core/{clients/PluginEnabledAiClient.ts => runtime/pluginEngine.ts} (70%) rename packages/aiCore/src/core/{execution => runtime}/types.ts (51%) delete mode 100644 packages/aiCore/src/orchestration/api.ts delete mode 100644 packages/aiCore/src/orchestration/index.ts delete mode 100644 packages/aiCore/src/orchestration/types.ts diff --git a/packages/aiCore/AI_SDK_ARCHITECTURE.md b/packages/aiCore/AI_SDK_ARCHITECTURE.md index 6e540265d6..24eca128ff 100644 --- a/packages/aiCore/AI_SDK_ARCHITECTURE.md +++ b/packages/aiCore/AI_SDK_ARCHITECTURE.md @@ -4,7 +4,7 @@ ### 1.1 设计目标 -- **分层架构**:orchestration(编排层)→ core(核心层),职责分离 +- **简化分层**:`models`(模型层)→ `runtime`(运行时层),清晰的职责分离 - **统一接口**:使用 Vercel AI SDK 统一不同 AI Provider 的接口差异 - **动态导入**:通过动态导入实现按需加载,减少打包体积 - **最小包装**:直接使用 AI SDK 的类型和接口,避免重复定义 @@ -12,15 +12,17 @@ - **类型安全**:利用 TypeScript 和 AI SDK 的类型系统确保类型安全 - **轻量级**:专注核心功能,保持包的轻量和高效 - **包级独立**:作为独立包管理,便于复用和维护 +- **Agent就绪**:为将来集成 OpenAI Agents SDK 预留扩展空间 ### 1.2 核心优势 - **标准化**:AI SDK 提供统一的模型接口,减少适配工作 -- **分层设计**:清晰的职责分离,便于维护和扩展 +- **简化设计**:函数式API,避免过度抽象 - **更好的开发体验**:完整的 TypeScript 支持和丰富的生态系统 - **性能优化**:AI SDK 内置优化和最佳实践 - **模块化设计**:独立包结构,支持跨项目复用 - **可扩展插件**:通用的流转换和参数处理插件系统 +- **面向未来**:为 OpenAI Agents SDK 集成做好准备 ## 2. 整体架构图 @@ -32,34 +34,26 @@ graph TD end subgraph "packages/aiCore (AI Core 包)" - subgraph "Orchestration Layer (编排层)" - OrchAPI["api.ts (用户API)"] - OrchTypes["types.ts (编排类型)"] + subgraph "Runtime Layer (运行时层)" + RuntimeExecutor["RuntimeExecutor (运行时执行器)"] + PluginEngine["PluginEngine (插件引擎)"] + RuntimeAPI["Runtime API (便捷函数)"] end - subgraph "Core Layer (核心层)" - subgraph "Creation (创建层)" - ConfigManager["ConfigManager (配置管理)"] - ModelCreator["ModelCreator (模型创建)"] - ProviderCreator["ProviderCreator (提供商创建)"] - end + subgraph "Models Layer (模型层)" + ModelFactory["createModel() (模型工厂)"] + ProviderCreator["ProviderCreator (提供商创建器)"] + end - subgraph "Execution (执行层)" - AiExecutor["AiExecutor (执行器)"] - end - - subgraph "Clients (客户端层)" - PluginClient["PluginEnabledAiClient (插件客户端)"] + subgraph "Core Systems (核心系统)" + subgraph "Plugins (插件)" + PluginManager["PluginManager (插件管理)"] + BuiltInPlugins["Built-in Plugins (内置插件)"] + StreamTransforms["Stream Transforms (流转换)"] end subgraph "Middleware (中间件)" - MiddlewareManager["MiddlewareManager (中间件管理)"] - ModelWrapper["ModelWrapper (模型包装)"] - end - - subgraph "Plugins (插件)" - PluginManager["PluginManager (插件管理)"] - StreamTransforms["Stream Transforms (流转换)"] + MiddlewareWrapper["wrapModelWithMiddlewares() (中间件包装)"] end subgraph "Providers (提供商)" @@ -78,16 +72,19 @@ graph TD Others["其他 19+ Providers"] end - UI --> OrchAPI - Components --> OrchAPI - OrchAPI --> AiExecutor - AiExecutor --> PluginClient - PluginClient --> PluginManager - PluginClient --> ConfigManager - ConfigManager --> ModelCreator - ModelCreator --> ProviderCreator - ModelCreator --> MiddlewareManager - MiddlewareManager --> ModelWrapper + subgraph "Future: OpenAI Agents SDK" + AgentSDK["@openai/agents (未来集成)"] + AgentExtensions["Agent Extensions (预留)"] + end + + UI --> RuntimeAPI + Components --> RuntimeExecutor + RuntimeAPI --> RuntimeExecutor + RuntimeExecutor --> PluginEngine + RuntimeExecutor --> ModelFactory + PluginEngine --> PluginManager + ModelFactory --> ProviderCreator + ModelFactory --> MiddlewareWrapper ProviderCreator --> Registry Registry --> Factory Factory --> OpenAI @@ -96,51 +93,48 @@ graph TD Factory --> XAI Factory --> Others - PluginClient --> AICore + PluginEngine --> AICore AICore --> streamText AICore --> generateText AICore --> streamObject AICore --> generateObject PluginManager --> StreamTransforms + PluginManager --> BuiltInPlugins + + %% 未来集成路径 + RuntimeExecutor -.-> AgentSDK + AgentSDK -.-> AgentExtensions ``` ## 3. 包结构设计 -### 3.1 当前架构文件结构 +### 3.1 新架构文件结构 ``` packages/aiCore/ ├── src/ -│ ├── orchestration/ # 编排层 - 用户面向接口 -│ │ ├── api.ts # 主要API函数 ✅ -│ │ ├── types.ts # 编排类型定义 ✅ -│ │ └── index.ts # 编排层导出 ✅ │ ├── core/ # 核心层 - 内部实现 -│ │ ├── creation/ # 创建层 -│ │ │ ├── types.ts # 创建类型定义 ✅ -│ │ │ ├── ConfigManager.ts # 配置管理器 ✅ -│ │ │ ├── ModelCreator.ts # 模型创建器 ✅ +│ │ ├── models/ # 模型层 - 模型创建和配置 +│ │ │ ├── factory.ts # 模型工厂函数 ✅ │ │ │ ├── ProviderCreator.ts # 提供商创建器 ✅ -│ │ │ └── index.ts # 创建层导出 ✅ -│ │ ├── execution/ # 执行层 -│ │ │ ├── types.ts # 执行类型定义 ✅ -│ │ │ ├── AiExecutor.ts # AI执行器 ✅ -│ │ │ └── index.ts # 执行层导出 ✅ -│ │ ├── clients/ # 客户端层 -│ │ │ ├── PluginEnabledAiClient.ts # 插件客户端 ✅ -│ │ │ └── index.ts # 客户端导出 ✅ +│ │ │ ├── types.ts # 模型类型定义 ✅ +│ │ │ └── index.ts # 模型层导出 ✅ +│ │ ├── runtime/ # 运行时层 - 执行和用户API +│ │ │ ├── executor.ts # 运行时执行器 ✅ +│ │ │ ├── plugin-engine.ts # 插件引擎 ✅ +│ │ │ ├── types.ts # 运行时类型定义 ✅ +│ │ │ └── index.ts # 运行时导出 ✅ │ │ ├── middleware/ # 中间件系统 -│ │ │ ├── types.ts # 中间件类型 ✅ -│ │ │ ├── MiddlewareManager.ts # 中间件管理器 ✅ │ │ │ ├── ModelWrapper.ts # 模型包装器 ✅ │ │ │ └── index.ts # 中间件导出 ✅ │ │ ├── plugins/ # 插件系统 │ │ │ ├── types.ts # 插件类型定义 ✅ │ │ │ ├── manager.ts # 插件管理器 ✅ +│ │ │ ├── built-in/ # 内置插件 ✅ +│ │ │ │ ├── logging.ts # 日志插件 ✅ +│ │ │ │ └── index.ts # 内置插件导出 ✅ │ │ │ ├── examples/ # 示例插件 ✅ -│ │ │ │ ├── example-plugins.ts -│ │ │ │ └── example-usage.ts │ │ │ ├── README.md # 插件文档 ✅ │ │ │ └── index.ts # 插件导出 ✅ │ │ ├── providers/ # 提供商管理 @@ -159,143 +153,147 @@ packages/aiCore/ ## 4. 架构分层详解 -### 4.1 Orchestration Layer (编排层) +### 4.1 Models Layer (模型层) -**职责**:面向用户的主要API接口,提供简洁的使用体验 +**职责**:统一的模型创建和配置管理 **核心文件**: -- `api.ts`: 主要API函数 (`streamText`, `generateText`, `streamObject`, `generateObject`) -- `types.ts`: 编排配置类型定义 +- `factory.ts`: 模型工厂函数 (`createModel`, `createModels`) +- `ProviderCreator.ts`: 底层提供商创建和模型实例化 +- `types.ts`: 模型配置类型定义 **设计特点**: -- 支持两种使用方式:配置模式和直接AI SDK模式 -- 统一的函数重载设计 -- 自动处理执行器创建和调用 +- 函数式设计,避免不必要的类抽象 +- 统一的模型配置接口 +- 自动处理中间件应用 +- 支持批量模型创建 **核心API**: ```typescript -// 配置模式 - 推荐使用 -export async function streamText( - config: OrchestrationConfig, - modelId: string, - params: StreamTextParams -): Promise> +// 模型配置接口 +export interface ModelConfig { + providerId: ProviderId + modelId: string + options: ProviderSettingsMap[ProviderId] + middlewares?: LanguageModelV1Middleware[] +} -// 直接AI SDK模式 - 兼容原生使用 -export async function streamText(params: Parameters[0]): Promise> +// 核心模型创建函数 +export async function createModel(config: ModelConfig): Promise +export async function createModels(configs: ModelConfig[]): Promise ``` -### 4.2 Core Layer (核心层) +### 4.2 Runtime Layer (运行时层) -#### 4.2.1 Creation Layer (创建层) - -**职责**:负责配置解析、模型创建和提供商管理 +**职责**:运行时执行器和用户面向的API接口 **核心组件**: -- `ConfigManager`: 配置解析和中间件收集 -- `ModelCreator`: 高级模型创建逻辑 -- `ProviderCreator`: 底层提供商导入和模型创建 - -**关键功能**: - -```typescript -// 配置管理 -export function resolveConfig( - providerId: ProviderId, - modelId: string, - userOptions: ProviderSettingsMap[ProviderId], - plugins: AiPlugin[] = [] -): ResolvedConfig - -// 模型创建 -export async function createModel( - providerId: ProviderId, - modelId: string, - userOptions: ProviderSettingsMap[ProviderId], - plugins: AiPlugin[] = [] -): Promise -``` - -#### 4.2.2 Execution Layer (执行层) - -**职责**:AI执行引擎,封装插件处理逻辑 - -**核心组件**: - -- `AiExecutor`: 主要执行器类 +- `executor.ts`: 运行时执行器类 +- `plugin-engine.ts`: 插件引擎(原PluginEnabledAiClient) +- `index.ts`: 便捷函数和工厂方法 **设计特点**: -- 构造时确定插件配置,运行时不可变更 -- 内部使用 `PluginEnabledAiClient` 处理插件 -- 提供类型安全的API接口 +- 提供三种使用方式:类实例、静态工厂、函数式调用 +- 自动集成模型创建和插件处理 +- 完整的类型安全支持 +- 为 OpenAI Agents SDK 预留扩展接口 **核心API**: ```typescript -export class AiExecutor { +// 运行时执行器 +export class RuntimeExecutor { static create( providerId: T, options: ProviderSettingsMap[T], - plugins: AiPlugin[] = [] - ): AiExecutor + plugins?: AiPlugin[] + ): RuntimeExecutor - async streamText(modelId: string, params: StreamTextParams): Promise> - async generateText(modelId: string, params: GenerateTextParams): Promise> - async streamObject(modelId: string, params: StreamObjectParams): Promise> - async generateObject(modelId: string, params: GenerateObjectParams): Promise> + async streamText(modelId: string, params: StreamTextParams): Promise + async generateText(modelId: string, params: GenerateTextParams): Promise + async streamObject(modelId: string, params: StreamObjectParams): Promise + async generateObject(modelId: string, params: GenerateObjectParams): Promise } + +// 便捷函数式API +export async function streamText( + providerId: T, + options: ProviderSettingsMap[T], + modelId: string, + params: StreamTextParams, + plugins?: AiPlugin[] +): Promise ``` -#### 4.2.3 Clients Layer (客户端层) +### 4.3 Plugin System (插件系统) -**职责**:插件处理,连接插件系统和AI SDK +**职责**:可扩展的插件架构 **核心组件**: -- `PluginEnabledAiClient`: 处理插件执行和AI SDK调用 +- `PluginManager`: 插件生命周期管理 +- `built-in/`: 内置插件集合 +- 流转换收集和应用 **设计特点**: -- 使用 core/creation 层创建模型 -- 区分 streaming 和 non-streaming 插件处理 -- 支持 `streamText` 的流转换和其他方法的常规插件处理 +- 借鉴 Rollup 的钩子分类设计 +- 支持流转换 (`experimental_transform`) +- 内置常用插件(日志、计数等) +- 完整的生命周期钩子 -#### 4.2.4 Middleware Layer (中间件层) +**插件接口**: + +```typescript +export interface AiPlugin { + name: string + enforce?: 'pre' | 'post' + + // 【First】首个钩子 - 只执行第一个返回值的插件 + resolveModel?: (modelId: string, context: AiRequestContext) => string | null | Promise + loadTemplate?: (templateName: string, context: AiRequestContext) => any | null | Promise + + // 【Sequential】串行钩子 - 链式执行,支持数据转换 + transformParams?: (params: any, context: AiRequestContext) => any | Promise + transformResult?: (result: any, context: AiRequestContext) => any | Promise + + // 【Parallel】并行钩子 - 不依赖顺序,用于副作用 + onRequestStart?: (context: AiRequestContext) => void | Promise + onRequestEnd?: (context: AiRequestContext, result: any) => void | Promise + onError?: (error: Error, context: AiRequestContext) => void | Promise + + // 【Stream】流处理 + transformStream?: () => TransformStream +} +``` + +### 4.4 Middleware System (中间件系统) **职责**:AI SDK原生中间件支持 **核心组件**: -- `MiddlewareManager`: 中间件管理 (函数式) -- `ModelWrapper`: 模型包装器 (函数式) +- `ModelWrapper.ts`: 模型包装函数 **设计哲学**: -- 使用函数而非类,简化设计 - 直接使用AI SDK的 `wrapLanguageModel` - 与插件系统分离,职责明确 +- 函数式设计,简化使用 -#### 4.2.5 Plugins Layer (插件层) +```typescript +export function wrapModelWithMiddlewares( + model: LanguageModel, + middlewares: LanguageModelV1Middleware[] +): LanguageModel +``` -**职责**:特定的插件功能 - -**核心组件**: - -- `PluginManager`: 插件管理器 -- 流转换收集:`collectStreamTransforms` - -**设计特点**: - -- 支持流转换 (`experimental_transform`) -- 与AI SDK中间件分离 -- 专注于特定需求 - -#### 4.2.6 Providers Layer (提供商层) +### 4.5 Provider System (提供商系统) **职责**:AI Provider注册表和动态导入 @@ -313,192 +311,194 @@ export class AiExecutor { ## 5. 使用方式 -### 5.1 推荐使用方式 (Orchestration API) +### 5.1 函数式调用 (推荐 - 简单场景) ```typescript -import { streamText, generateText } from '@cherrystudio/ai-core' +import { streamText, generateText } from '@cherrystudio/ai-core/runtime' -// 配置模式使用 -const config = { - providerId: 'openai', - options: { apiKey: 'your-api-key' }, - plugins: [thinkingPlugin, toolPlugin] -} - -// 流式文本生成 -const stream = await streamText(config, 'gpt-4', { - messages: [{ role: 'user', content: 'Hello!' }] -}) - -// 普通文本生成 -const result = await generateText(config, 'gpt-4', { - messages: [{ role: 'user', content: 'Hello!' }] -}) +// 直接函数调用 +const stream = await streamText( + 'anthropic', + { apiKey: 'your-api-key' }, + 'claude-3', + { messages: [{ role: 'user', content: 'Hello!' }] }, + [loggingPlugin] +) ``` -### 5.2 直接AI SDK模式 (兼容性) +### 5.2 执行器实例 (推荐 - 复杂场景) ```typescript -import { streamText } from '@cherrystudio/ai-core' -import { openai } from '@ai-sdk/openai' +import { createExecutor } from '@cherrystudio/ai-core/runtime' -// 直接使用AI SDK模式 -const stream = await streamText({ - model: openai('gpt-4'), - messages: [{ role: 'user', content: 'Hello!' }] -}) -``` +// 创建可复用的执行器 +const executor = createExecutor('openai', { apiKey: 'your-api-key' }, [plugin1, plugin2]) -### 5.3 执行器模式 (高级用法) - -```typescript -import { AiExecutor } from '@cherrystudio/ai-core' - -// 创建执行器 -const executor = AiExecutor.create('openai', { apiKey: 'your-api-key' }, [plugin1, plugin2]) - -// 使用执行器 +// 多次使用 const stream = await executor.streamText('gpt-4', { messages: [{ role: 'user', content: 'Hello!' }] }) -``` -## 6. 插件系统详解 - -### 6.1 插件接口设计 - -```typescript -export interface AiPlugin { - name: string - collectStreamTransforms?: (context: AiRequestContext) => StreamTransform[] - transformParams?: (params: any, context: AiRequestContext) => Promise - transformResult?: (result: any, context: AiRequestContext) => Promise - onRequest?: (context: AiRequestContext) => Promise - onSuccess?: (result: any, context: AiRequestContext) => Promise - onError?: (error: Error, context: AiRequestContext) => Promise -} -``` - -### 6.2 流转换支持 - -专门针对 `streamText` 的流转换功能: - -```typescript -// 插件收集流转换 -const streamTransforms = pluginManager.collectStreamTransforms(context) - -// 应用到AI SDK -const result = await streamText({ - model, - ...params, - experimental_transform: streamTransforms.length > 0 ? composeTransforms(streamTransforms) : undefined +const result = await executor.generateText('gpt-4', { + messages: [{ role: 'user', content: 'How are you?' }] }) ``` -### 6.3 插件vs中间件 +### 5.3 静态工厂方法 -| 功能 | 插件 (Plugins) | 中间件 (Middleware) | -| -------- | -------------------------------- | ------------------- | -| 用途 | 应用特定功能 | AI SDK原生功能 | -| 流转换 | ✅ 支持 `experimental_transform` | ❌ 不支持 | -| 适用范围 | 所有AI方法 | 所有AI方法 | -| 应用时机 | 运行时 | 创建时 | -| 复杂度 | 简单 | 原生AI SDK | +```typescript +import { RuntimeExecutor } from '@cherrystudio/ai-core/runtime' + +// 静态创建 +const executor = RuntimeExecutor.create('anthropic', { apiKey: 'your-api-key' }) +await executor.streamText('claude-3', { messages: [...] }) +``` + +### 5.4 直接模型创建 (高级用法) + +```typescript +import { createModel } from '@cherrystudio/ai-core/models' +import { streamText } from 'ai' + +// 直接创建模型使用 +const model = await createModel({ + providerId: 'openai', + modelId: 'gpt-4', + options: { apiKey: 'your-api-key' }, + middlewares: [middleware1, middleware2] +}) + +// 直接使用 AI SDK +const result = await streamText({ model, messages: [...] }) +``` + +## 6. 为 OpenAI Agents SDK 预留的设计 + +### 6.1 架构兼容性 + +当前架构完全兼容 OpenAI Agents SDK 的集成需求: + +```typescript +// 当前的模型创建 +const model = await createModel({ + providerId: 'anthropic', + modelId: 'claude-3', + options: { apiKey: 'xxx' } +}) + +// 将来可以直接用于 OpenAI Agents SDK +import { Agent, run } from '@openai/agents' + +const agent = new Agent({ + model, // ✅ 直接兼容 LanguageModel 接口 + name: 'Assistant', + instructions: '...', + tools: [tool1, tool2] +}) + +const result = await run(agent, 'user input') +``` + +### 6.2 预留的扩展点 + +1. **runtime/agents/** 目录预留 +2. **AgentExecutor** 类预留 +3. **Agent工具转换插件** 预留 +4. **多Agent编排** 预留 + +### 6.3 未来架构扩展 + +``` +packages/aiCore/src/core/ +├── runtime/ +│ ├── agents/ # 🚀 未来添加 +│ │ ├── AgentExecutor.ts +│ │ ├── WorkflowManager.ts +│ │ └── ConversationManager.ts +│ ├── executor.ts +│ └── index.ts +``` ## 7. 架构优势 -### 7.1 分层清晰 +### 7.1 简化设计 -- **Orchestration**: 用户友好的API -- **Core**: 模块化的内部实现 -- **职责分离**: 每层专注自己的职责 +- **移除过度抽象**:删除了orchestration层和creation层的复杂包装 +- **函数式优先**:models层使用函数而非类 +- **直接明了**:runtime层直接提供用户API -### 7.2 函数式设计 +### 7.2 职责清晰 -- 大部分模块使用函数而非类 -- 更简洁的代码和更好的可测试性 -- 避免不必要的面向对象复杂性 +- **Models**: 专注模型创建和配置 +- **Runtime**: 专注执行和用户API +- **Plugins**: 专注扩展功能 +- **Providers**: 专注AI Provider管理 ### 7.3 类型安全 -- 统一使用 `types.ts` 中的类型定义 -- 避免重复定义,提高维护性 - 完整的 TypeScript 支持 +- AI SDK 类型的直接复用 +- 避免类型重复定义 -### 7.4 灵活扩展 +### 7.4 灵活使用 -- 插件系统支持流转换 -- 中间件系统支持AI SDK原生功能 -- 模块化设计便于功能扩展 +- 三种使用模式满足不同需求 +- 从简单函数调用到复杂执行器 +- 支持直接AI SDK使用 -## 8. 迁移状态 +### 7.5 面向未来 -### 8.1 已完成 ✅ +- 为 OpenAI Agents SDK 集成做好准备 +- 清晰的扩展点和架构边界 +- 模块化设计便于功能添加 -1. **架构重构** - 分层设计和职责分离 -2. **类型系统** - 统一类型定义和复用 -3. **函数式设计** - 从类转换为函数 -4. **插件系统** - 流转换和通用插件功能 -5. **Orchestration层** - 用户友好的API接口 -6. **Core层完整实现** - 创建、执行、客户端、中间件、插件、提供商 +## 8. 技术决策记录 -### 8.2 进行中 🔄 +### 8.1 为什么选择简化的两层架构? -1. **集成测试** - 在实际项目中完整测试 -2. **性能优化** - 确保无性能退化 -3. **文档完善** - 使用指南和最佳实践 +- **职责分离**:models专注创建,runtime专注执行 +- **模块化**:每层都有清晰的边界和职责 +- **扩展性**:为Agent功能预留了清晰的扩展空间 -### 8.3 计划中 📋 +### 8.2 为什么选择函数式设计? -1. **生态系统扩展** - 更多通用插件 -2. **优化改进** - 基于使用反馈的持续改进 -3. **社区贡献** - 开源发布和社区生态 +- **简洁性**:避免不必要的类设计 +- **性能**:减少对象创建开销 +- **易用性**:函数调用更直观 -## 9. 技术决策记录 - -### 9.1 为什么选择分层架构? - -- **用户体验**: Orchestration层提供简洁API -- **内部复杂性**: Core层处理复杂逻辑 -- **维护性**: 清晰的职责分离 - -### 9.2 为什么选择函数式设计? - -- **简洁性**: 避免不必要的类设计 -- **可测试性**: 函数更容易测试 -- **性能**: 减少对象创建开销 - -### 9.3 为什么分离插件和中间件? +### 8.3 为什么分离插件和中间件? - **职责明确**: 插件处理应用特定需求 - **原生支持**: 中间件使用AI SDK原生功能 - **灵活性**: 两套系统可以独立演进 -## 10. 总结 +## 9. 总结 -新的AI Core架构实现了: +AI Core架构实现了: -### 10.1 设计目标 +### 9.1 核心特点 -- ✅ **分层架构**: 清晰的编排层和核心层分离 -- ✅ **函数式设计**: 简洁的函数式API -- ✅ **类型安全**: 统一的类型定义和复用 -- ✅ **插件扩展**: 支持流转换的插件系统 -- ✅ **易用性**: 多种使用模式满足不同需求 +- ✅ **简化架构**: 2层核心架构,职责清晰 +- ✅ **函数式设计**: models层完全函数化 +- ✅ **类型安全**: 统一的类型定义和AI SDK类型复用 +- ✅ **插件扩展**: 强大的插件系统 +- ✅ **多种使用方式**: 满足不同复杂度需求 +- ✅ **Agent就绪**: 为OpenAI Agents SDK集成做好准备 -### 10.2 核心价值 +### 9.2 核心价值 - **统一接口**: 一套API支持19+ AI providers -- **灵活使用**: 配置模式、AI SDK模式、执行器模式 +- **灵活使用**: 函数式、实例式、静态工厂式 - **强类型**: 完整的TypeScript支持 - **可扩展**: 插件和中间件双重扩展能力 - **高性能**: 最小化包装,直接使用AI SDK +- **面向未来**: Agent SDK集成架构就绪 -### 10.3 未来发展 +### 9.3 未来发展 -这个架构提供了坚实的AI基础设施,支持: +这个架构提供了: -- 持续的功能扩展 -- 良好的开发体验 -- 社区生态建设 -- 跨项目复用价值 +- **优秀的开发体验**: 简洁的API和清晰的使用模式 +- **强大的扩展能力**: 为Agent功能预留了完整的架构空间 +- **良好的维护性**: 职责分离明确,代码易于维护 +- **广泛的适用性**: 既适合简单调用也适合复杂应用 diff --git a/packages/aiCore/README.md b/packages/aiCore/README.md index e400bd85d0..ab9e04541f 100644 --- a/packages/aiCore/README.md +++ b/packages/aiCore/README.md @@ -6,7 +6,6 @@ Cherry Studio AI Core 是一个基于 Vercel AI SDK 的统一 AI Provider 接口 - 🚀 统一的 AI Provider 接口 - 🔄 动态导入支持 -- 💾 智能缓存机制 - 🛠️ TypeScript 支持 - 📦 轻量级设计 diff --git a/packages/aiCore/src/core/creation/index.ts b/packages/aiCore/src/core/creation/index.ts deleted file mode 100644 index e3f0b0e4c0..0000000000 --- a/packages/aiCore/src/core/creation/index.ts +++ /dev/null @@ -1,15 +0,0 @@ -/** - * Creation 模块导出 - * 提供配置管理和模型创建能力 - */ - -export { resolveConfig } from './ConfigManager' -export { createModel, createModelFromConfig } from './ModelCreator' -export { - createBaseModel, - createImageModel, - getProviderInfo, - getSupportedProviders, - ProviderCreationError -} from './ProviderCreator' -export type { ModelCreationRequest, ResolvedConfig } from './types' diff --git a/packages/aiCore/src/core/execution/AiExecutor.ts b/packages/aiCore/src/core/execution/AiExecutor.ts deleted file mode 100644 index 21673a7e81..0000000000 --- a/packages/aiCore/src/core/execution/AiExecutor.ts +++ /dev/null @@ -1,174 +0,0 @@ -/** - * AI 执行器 - * 面向用户的主要API入口,专注于AI调用 - */ -import { generateObject, generateText, LanguageModelV1, streamObject, streamText } from 'ai' - -import { type ProviderId } from '../../types' -import { PluginEnabledAiClient } from '../clients/PluginEnabledAiClient' -import { type AiPlugin } from '../plugins' -import { isProviderSupported } from '../providers/registry' -import { type ExecutorConfig, type GenericExecutorConfig } from './types' - -export class AiExecutor { - private pluginClient: PluginEnabledAiClient - - constructor(config: ExecutorConfig) - constructor(config: GenericExecutorConfig) - constructor(config: ExecutorConfig | GenericExecutorConfig) { - if (isProviderSupported(config.providerId)) { - this.pluginClient = new PluginEnabledAiClient(config.providerId as T) - } else { - // 对于未知provider,使用openai-compatible - this.pluginClient = new PluginEnabledAiClient('openai-compatible' as T) - } - } - - /** - * 流式文本生成 - * 用户友好的API,内部使用插件处理能力 - */ - async streamText( - model: LanguageModelV1, - params: Omit[0], 'model'> - ): Promise> - async streamText( - model: LanguageModelV1, - params?: Omit[0], 'model'> - ): Promise> { - // 传统方式:使用插件处理逻辑 - return this.pluginClient.executeStreamWithPlugins( - 'streamText', - model.modelId, - params!, - async (finalModelId, transformedParams, streamTransforms) => { - // const model = await this.pluginClient.createModelWithMiddlewares(finalModelId) - const experimental_transform = - params?.experimental_transform ?? (streamTransforms.length > 0 ? streamTransforms : undefined) - - return await streamText({ - model, - ...transformedParams, - experimental_transform - }) - } - ) - } - - /** - * 生成文本 - * 用户友好的API,内部使用插件处理能力 - */ - async generateText( - model: LanguageModelV1, - params: Omit[0], 'model'> - ): Promise> - async generateText( - model: LanguageModelV1, - params?: Omit[0], 'model'> - ): Promise> { - // 传统方式:使用插件处理逻辑 - return this.pluginClient.executeWithPlugins( - 'generateText', - model.modelId, - params!, - async (finalModelId, transformedParams) => { - // const model = await this.pluginClient.createModelWithMiddlewares(finalModelId) - return await generateText({ model, ...transformedParams }) - } - ) - } - - /** - * 生成结构化对象 - * 用户友好的API,内部使用插件处理能力 - */ - async generateObject( - model: LanguageModelV1, - params: Omit[0], 'model'> - ): Promise> - async generateObject( - model: LanguageModelV1, - params?: Omit[0], 'model'> - ): Promise> { - // 传统方式:使用插件处理逻辑 - return this.pluginClient.executeWithPlugins( - 'generateObject', - model.modelId, - params!, - async (finalModelId, transformedParams) => { - // const model = await this.pluginClient.createModelWithMiddlewares(finalModelId) - return await generateObject({ model, ...transformedParams }) - } - ) - } - - /** - * 流式生成结构化对象 - * 用户友好的API,内部使用插件处理能力 - */ - async streamObject( - model: LanguageModelV1, - params: Omit[0], 'model'> - ): Promise> - async streamObject( - model: LanguageModelV1, - params?: Omit[0], 'model'> - ): Promise> { - // 传统方式:使用插件处理逻辑 - return this.pluginClient.executeWithPlugins( - 'streamObject', - model.modelId, - params!, - async (finalModelId, transformedParams) => { - // const model = await this.pluginClient.createModelWithMiddlewares(finalModelId) - return await streamObject({ model, ...transformedParams }) - } - ) - } - - /** - * 获取插件统计信息(只读) - */ - getPluginStats() { - return this.pluginClient.getPluginStats() - } - - /** - * 获取所有插件(只读) - */ - getPlugins() { - return this.pluginClient.getPlugins() - } - - /** - * 获取客户端信息 - */ - getClientInfo() { - return this.pluginClient.getClientInfo() - } - - // === 静态工厂方法 === - - /** - * 创建执行器 - 支持已知provider的类型安全 - */ - static create(providerId: T, plugins?: AiPlugin[]): AiExecutor - static create(providerId: string, plugins?: AiPlugin[]): AiExecutor - static create(providerId: string, plugins: AiPlugin[] = []): AiExecutor { - return new AiExecutor({ - providerId, - plugins - }) - } - - /** - * 创建OpenAI Compatible执行器 - */ - static createOpenAICompatible(plugins: AiPlugin[] = []): AiExecutor<'openai-compatible'> { - return new AiExecutor({ - providerId: 'openai-compatible', - plugins - }) - } -} diff --git a/packages/aiCore/src/core/execution/index.ts b/packages/aiCore/src/core/execution/index.ts deleted file mode 100644 index 901991eb67..0000000000 --- a/packages/aiCore/src/core/execution/index.ts +++ /dev/null @@ -1,47 +0,0 @@ -/** - * Execution 模块导出 - * 提供执行器能力 - */ - -// 主要执行器 -export { AiExecutor } from './AiExecutor' -export type { ExecutionOptions, ExecutorConfig, GenericExecutorConfig } from './types' - -// 便捷工厂函数 -import { type ProviderId, type ProviderSettingsMap } from '../../types' -import { type AiPlugin } from '../plugins' -import { AiExecutor } from './AiExecutor' - -/** - * 创建AI执行器 - 支持类型安全的已知provider - */ -export function createExecutor( - providerId: T, - options: ProviderSettingsMap[T], - plugins?: AiPlugin[] -): AiExecutor - -/** - * 创建AI执行器 - 支持未知provider - */ -export function createExecutor(providerId: string, options: any, plugins?: AiPlugin[]): AiExecutor - -export function createExecutor(providerId: string, options: any, plugins: AiPlugin[] = []): AiExecutor { - return AiExecutor.create(providerId, plugins) -} - -/** - * 创建OpenAI Compatible执行器 - */ -export function createOpenAICompatibleExecutor( - options: ProviderSettingsMap['openai-compatible'], - plugins: AiPlugin[] = [] -): AiExecutor<'openai-compatible'> { - return AiExecutor.createOpenAICompatible(plugins) -} - -// 为了未来的agent功能预留目录结构 -// 未来将在 ./agents/ 文件夹中添加: -// - AgentExecutor.ts -// - WorkflowManager.ts -// - ConversationManager.ts diff --git a/packages/aiCore/src/core/index.ts b/packages/aiCore/src/core/index.ts index 51acbf0e4b..a50a5a382d 100644 --- a/packages/aiCore/src/core/index.ts +++ b/packages/aiCore/src/core/index.ts @@ -5,21 +5,19 @@ // 中间件系统 export type { NamedMiddleware } from './middleware' -export { MiddlewareManager, wrapModelWithMiddlewares } from './middleware' +export { createMiddlewares, wrapModelWithMiddlewares } from './middleware' // 创建管理 -export type { ModelCreationRequest, ResolvedConfig } from './creation' +export type { ModelCreationRequest, ResolvedConfig } from './models' export { createBaseModel, createImageModel, createModel, - createModelFromConfig, getProviderInfo, getSupportedProviders, - ProviderCreationError, - resolveConfig -} from './creation' + ProviderCreationError +} from './models' // 执行管理 -export type { ExecutionOptions, ExecutorConfig, GenericExecutorConfig } from './execution' -export { AiExecutor } from './execution' +export type { ExecutionOptions, ExecutorConfig } from './runtime' +export { createExecutor, createOpenAICompatibleExecutor } from './runtime' diff --git a/packages/aiCore/src/core/middleware/index.ts b/packages/aiCore/src/core/middleware/index.ts index 718e17bef6..535b588098 100644 --- a/packages/aiCore/src/core/middleware/index.ts +++ b/packages/aiCore/src/core/middleware/index.ts @@ -3,6 +3,6 @@ * 提供通用的中间件管理能力 */ -export { MiddlewareManager } from './MiddlewareManager' -export { wrapModelWithMiddlewares } from './ModelWrapper' +export { createMiddlewares } from './manager' export type { NamedMiddleware } from './types' +export { wrapModelWithMiddlewares } from './wrapper' diff --git a/packages/aiCore/src/core/middleware/MiddlewareManager.ts b/packages/aiCore/src/core/middleware/manager.ts similarity index 100% rename from packages/aiCore/src/core/middleware/MiddlewareManager.ts rename to packages/aiCore/src/core/middleware/manager.ts diff --git a/packages/aiCore/src/core/middleware/ModelWrapper.ts b/packages/aiCore/src/core/middleware/wrapper.ts similarity index 100% rename from packages/aiCore/src/core/middleware/ModelWrapper.ts rename to packages/aiCore/src/core/middleware/wrapper.ts diff --git a/packages/aiCore/src/core/creation/ConfigManager.ts b/packages/aiCore/src/core/models/ConfigManager.ts similarity index 92% rename from packages/aiCore/src/core/creation/ConfigManager.ts rename to packages/aiCore/src/core/models/ConfigManager.ts index 11f164e775..b3958bf297 100644 --- a/packages/aiCore/src/core/creation/ConfigManager.ts +++ b/packages/aiCore/src/core/models/ConfigManager.ts @@ -5,7 +5,7 @@ import { LanguageModelV1Middleware } from 'ai' import { ProviderId, ProviderSettingsMap } from '../../types' -import { createMiddlewares } from '../middleware/MiddlewareManager' +import { createMiddlewares } from '../middleware/manager' import { AiPlugin } from '../plugins' import { ResolvedConfig } from './types' diff --git a/packages/aiCore/src/core/creation/ModelCreator.ts b/packages/aiCore/src/core/models/ModelCreator.ts similarity index 100% rename from packages/aiCore/src/core/creation/ModelCreator.ts rename to packages/aiCore/src/core/models/ModelCreator.ts diff --git a/packages/aiCore/src/core/creation/ProviderCreator.ts b/packages/aiCore/src/core/models/ProviderCreator.ts similarity index 99% rename from packages/aiCore/src/core/creation/ProviderCreator.ts rename to packages/aiCore/src/core/models/ProviderCreator.ts index 449cbc56e3..1dd46bad29 100644 --- a/packages/aiCore/src/core/creation/ProviderCreator.ts +++ b/packages/aiCore/src/core/models/ProviderCreator.ts @@ -105,13 +105,11 @@ export async function createImageModel( modelId: string, options: ProviderSettingsMap[T] ): Promise - export async function createImageModel( providerId: string, modelId: string, options: ProviderSettingsMap['openai-compatible'] ): Promise - export async function createImageModel( providerId: string, modelId: string = 'default', diff --git a/packages/aiCore/src/core/models/factory.ts b/packages/aiCore/src/core/models/factory.ts new file mode 100644 index 0000000000..acc539d436 --- /dev/null +++ b/packages/aiCore/src/core/models/factory.ts @@ -0,0 +1,51 @@ +/** + * 模型工厂函数 + * 统一的模型创建和配置管理 + */ +import { LanguageModel, LanguageModelV1Middleware } from 'ai' + +import { type ProviderId, type ProviderSettingsMap } from '../../types' +import { wrapModelWithMiddlewares } from '../middleware' +import { createBaseModel } from './ProviderCreator' + +export interface ModelConfig { + providerId: ProviderId + modelId: string + options: ProviderSettingsMap[ProviderId] + middlewares?: LanguageModelV1Middleware[] +} + +/** + * 创建模型 - 核心函数 + */ +export async function createModel(config: ModelConfig): Promise { + validateModelConfig(config) + + // 1. 创建基础模型 + const baseModel = await createBaseModel(config.providerId, config.modelId, config.options) + + // 2. 应用中间件(如果有) + return config.middlewares?.length ? wrapModelWithMiddlewares(baseModel, config.middlewares) : baseModel +} + +/** + * 批量创建模型 + */ +export async function createModels(configs: ModelConfig[]): Promise { + return Promise.all(configs.map((config) => createModel(config))) +} + +/** + * 验证模型配置 + */ +function validateModelConfig(config: ModelConfig): void { + if (!config.providerId) { + throw new Error('ModelConfig: providerId is required') + } + if (!config.modelId) { + throw new Error('ModelConfig: modelId is required') + } + if (!config.options) { + throw new Error('ModelConfig: options is required') + } +} diff --git a/packages/aiCore/src/core/models/index.ts b/packages/aiCore/src/core/models/index.ts new file mode 100644 index 0000000000..cee1eab374 --- /dev/null +++ b/packages/aiCore/src/core/models/index.ts @@ -0,0 +1,19 @@ +/** + * Models 模块导出 + * 提供统一的模型创建和配置管理能力 + */ + +// 主要的模型创建API +export { createModel, createModels, type ModelConfig } from './factory' + +// 底层Provider创建功能(供高级用户使用) +export { + createBaseModel, + createImageModel, + getProviderInfo, + getSupportedProviders, + ProviderCreationError +} from './ProviderCreator' + +// 保留原有类型 +export type { ModelCreationRequest, ResolvedConfig } from './types' diff --git a/packages/aiCore/src/core/creation/types.ts b/packages/aiCore/src/core/models/types.ts similarity index 100% rename from packages/aiCore/src/core/creation/types.ts rename to packages/aiCore/src/core/models/types.ts diff --git a/packages/aiCore/src/core/plugins/README.md b/packages/aiCore/src/core/plugins/README.md index bc0e28954e..266b10c876 100644 --- a/packages/aiCore/src/core/plugins/README.md +++ b/packages/aiCore/src/core/plugins/README.md @@ -4,8 +4,6 @@ ## 🎯 设计理念 -借鉴 Rollup/Vite 的成熟插件思想: - - **语义清晰**:不同钩子有不同的执行语义 - **类型安全**:TypeScript 完整支持 - **性能优化**:First 短路、Parallel 并发、Sequential 链式 diff --git a/packages/aiCore/src/core/plugins/built-in/index.ts b/packages/aiCore/src/core/plugins/built-in/index.ts new file mode 100644 index 0000000000..5de58f2175 --- /dev/null +++ b/packages/aiCore/src/core/plugins/built-in/index.ts @@ -0,0 +1,7 @@ +/** + * 内置插件命名空间 + * 所有内置插件都以 'built-in:' 为前缀 + */ +export const BUILT_IN_PLUGIN_PREFIX = 'built-in:' + +export { createLoggingPlugin } from './logging' diff --git a/packages/aiCore/src/core/plugins/built-in/logging.ts b/packages/aiCore/src/core/plugins/built-in/logging.ts new file mode 100644 index 0000000000..043765784c --- /dev/null +++ b/packages/aiCore/src/core/plugins/built-in/logging.ts @@ -0,0 +1,86 @@ +/** + * 内置插件:日志记录 + * 记录AI调用的关键信息,支持性能监控和调试 + */ +import { definePlugin } from '../index' +import type { AiRequestContext } from '../types' + +export interface LoggingConfig { + // 日志级别 + level?: 'debug' | 'info' | 'warn' | 'error' + // 是否记录参数 + logParams?: boolean + // 是否记录结果 + logResult?: boolean + // 是否记录性能数据 + logPerformance?: boolean + // 自定义日志函数 + logger?: (level: string, message: string, data?: any) => void +} + +/** + * 创建日志插件 + */ +export function createLoggingPlugin(config: LoggingConfig = {}) { + const { level = 'info', logParams = true, logResult = false, logPerformance = true, logger = console.log } = config + + const startTimes = new Map() + + return definePlugin({ + name: 'built-in:logging', + + onRequestStart: (context: AiRequestContext) => { + const requestId = context.requestId + startTimes.set(requestId, Date.now()) + + logger(level, `🚀 AI Request Started`, { + requestId, + providerId: context.providerId, + modelId: context.modelId, + originalParams: logParams ? context.originalParams : '[hidden]' + }) + }, + + onRequestEnd: (context: AiRequestContext, result: any) => { + const requestId = context.requestId + const startTime = startTimes.get(requestId) + const duration = startTime ? Date.now() - startTime : undefined + startTimes.delete(requestId) + + const logData: any = { + requestId, + providerId: context.providerId, + modelId: context.modelId + } + + if (logPerformance && duration) { + logData.duration = `${duration}ms` + } + + if (logResult) { + logData.result = result + } + + logger(level, `✅ AI Request Completed`, logData) + }, + + onError: (error: Error, context: AiRequestContext) => { + const requestId = context.requestId + const startTime = startTimes.get(requestId) + const duration = startTime ? Date.now() - startTime : undefined + startTimes.delete(requestId) + + logger('error', `❌ AI Request Failed`, { + requestId, + providerId: context.providerId, + modelId: context.modelId, + duration: duration ? `${duration}ms` : undefined, + error: { + name: error.name, + message: error.message, + stack: error.stack + } + }) + } + }) +} diff --git a/packages/aiCore/src/core/plugins/examples/example-usage.ts b/packages/aiCore/src/core/plugins/examples/example-usage.ts deleted file mode 100644 index 7a4d3ad1bc..0000000000 --- a/packages/aiCore/src/core/plugins/examples/example-usage.ts +++ /dev/null @@ -1,255 +0,0 @@ -import { openai } from '@ai-sdk/openai' -import { streamText } from 'ai' - -import { PluginEnabledAiClient } from '../../clients/PluginEnabledAiClient' -import { createContext, PluginManager } from '..' -import { ContentFilterPlugin, LoggingPlugin } from './example-plugins' - -/** - * 使用 PluginEnabledAiClient 的推荐方式 - * 这是最简单直接的使用方法 - */ -export async function exampleWithPluginEnabledClient() { - console.log('=== 使用 PluginEnabledAiClient 示例 ===') - - // 1. 创建带插件的客户端 - 链式调用方式 - const client = PluginEnabledAiClient.create('openai-compatible', { - name: 'openai', - baseURL: 'https://api.openai.com/v1', - apiKey: process.env.OPENAI_API_KEY || 'sk-test' - }) - .use(LoggingPlugin) - .use(ContentFilterPlugin) - - // 2. 或者在创建时传入插件(也可以这样使用) - // const clientWithPlugins = PluginEnabledAiClient.create( - // 'openai-compatible', - // { - // name: 'openai', - // baseURL: 'https://api.openai.com/v1', - // apiKey: process.env.OPENAI_API_KEY || 'sk-test' - // }, - // [LoggingPlugin, ContentFilterPlugin] - // ) - - // 3. 查看插件统计信息 - console.log('插件统计:', client.getPluginStats()) - - try { - // 4. 使用客户端进行 AI 调用(插件会自动生效) - console.log('开始生成文本...') - const result = await client.generateText('gpt-4', { - messages: [{ role: 'user', content: 'Hello, world!' }], - temperature: 0.7 - }) - - console.log('生成的文本:', result.text) - - // 5. 流式调用(支持流转换器) - console.log('开始流式生成...') - const streamResult = await client.streamText('gpt-4', { - messages: [{ role: 'user', content: 'Tell me a short story about AI' }] - }) - - console.log('开始流式响应...') - for await (const textPart of streamResult.textStream) { - process.stdout.write(textPart) - } - console.log('\n流式响应完成') - - return result - } catch (error) { - console.error('调用失败:', error) - throw error - } -} - -/** - * 创建 OpenAI Compatible 客户端的示例 - */ -export function exampleOpenAICompatible() { - console.log('=== OpenAI Compatible 示例 ===') - - // Ollama 示例 - const ollama = PluginEnabledAiClient.createOpenAICompatible( - { - name: 'ollama', - baseURL: 'http://localhost:11434/v1' - }, - [LoggingPlugin] - ) - - // LM Studio 示例 - const lmStudio = PluginEnabledAiClient.createOpenAICompatible({ - name: 'lm-studio', - baseURL: 'http://localhost:1234/v1' - }).use(ContentFilterPlugin) - - console.log('Ollama 插件统计:', ollama.getPluginStats()) - console.log('LM Studio 插件统计:', lmStudio.getPluginStats()) - - return { ollama, lmStudio } -} - -/** - * 动态插件管理示例 - */ -export function exampleDynamicPlugins() { - console.log('=== 动态插件管理示例 ===') - - const client = PluginEnabledAiClient.create('openai-compatible', { - name: 'openai', - baseURL: 'https://api.openai.com/v1', - apiKey: 'your-api-key' - }) - - console.log('初始状态:', client.getPluginStats()) - - // 动态添加插件 - client.use(LoggingPlugin) - console.log('添加 LoggingPlugin 后:', client.getPluginStats()) - - client.usePlugins([ContentFilterPlugin]) - console.log('添加 ContentFilterPlugin 后:', client.getPluginStats()) - - // 移除插件 - client.removePlugin('content-filter') - console.log('移除 content-filter 后:', client.getPluginStats()) - - return client -} - -/** - * 完整的低级 API 示例(原有的 example-usage.ts 的方式) - * 这种方式适合需要精细控制插件生命周期的场景 - */ -export async function exampleLowLevelApi() { - console.log('=== 低级 API 示例 ===') - - // 1. 创建插件管理器 - const pluginManager = new PluginManager([LoggingPlugin, ContentFilterPlugin]) - - // 2. 创建请求上下文 - const context = createContext('openai', 'gpt-4', { - messages: [{ role: 'user', content: 'Hello!' }] - }) - - try { - // 3. 触发请求开始事件 - await pluginManager.executeParallel('onRequestStart', context) - - // 4. 解析模型别名 - const resolvedModel = await pluginManager.executeFirst('resolveModel', 'gpt-4', context) - console.log('Resolved model:', resolvedModel || 'gpt-4') - - // 5. 转换请求参数 - const params = { - messages: [{ role: 'user' as const, content: 'Hello, AI!' }], - temperature: 0.7 - } - const transformedParams = await pluginManager.executeSequential('transformParams', params, context) - - // 6. 收集流转换器(关键:AI SDK 原生支持数组!) - const streamTransforms = pluginManager.collectStreamTransforms() - - // 7. 调用 AI SDK,直接传入转换器工厂数组 - const result = await streamText({ - model: openai('gpt-4'), - ...transformedParams, - experimental_transform: streamTransforms // 直接传入工厂函数数组 - }) - - // 8. 处理结果 - let fullText = '' - for await (const textPart of result.textStream) { - fullText += textPart - console.log('Streaming:', textPart) - } - - // 9. 转换最终结果 - const finalResult = { text: fullText, usage: await result.usage } - const transformedResult = await pluginManager.executeSequential('transformResult', finalResult, context) - - // 10. 触发完成事件 - await pluginManager.executeParallel('onRequestEnd', context, transformedResult) - - return transformedResult - } catch (error) { - // 11. 触发错误事件 - await pluginManager.executeParallel('onError', context, undefined, error as Error) - throw error - } -} - -/** - * 流转换器数组的其他使用方式 - */ -export function demonstrateStreamTransforms() { - console.log('=== 流转换器示例 ===') - - const pluginManager = new PluginManager([ - ContentFilterPlugin, - { - name: 'text-replacer', - transformStream() { - return () => - new TransformStream({ - transform(chunk, controller) { - if (chunk.type === 'text-delta') { - const replaced = chunk.textDelta.replace(/hello/gi, 'hi') - controller.enqueue({ ...chunk, textDelta: replaced }) - } else { - controller.enqueue(chunk) - } - } - }) - } - } - ]) - - // 获取所有流转换器 - const transforms = pluginManager.collectStreamTransforms() - console.log(`收集到 ${transforms.length} 个流转换器`) - - // 可以单独使用每个转换器 - transforms.forEach((factory, index) => { - console.log(`转换器 ${index + 1} 已准备就绪`) - const transform = factory({ stopStream: () => {} }) - console.log('Transform created:', transform) - }) - - return transforms -} - -/** - * 运行所有示例 - */ -export async function runAllExamples() { - console.log('🚀 开始运行所有示例...\n') - - try { - // 1. PluginEnabledAiClient 示例(推荐) - await exampleWithPluginEnabledClient() - console.log('✅ PluginEnabledAiClient 示例完成\n') - - // 2. OpenAI Compatible 示例 - exampleOpenAICompatible() - console.log('✅ OpenAI Compatible 示例完成\n') - - // 3. 动态插件管理示例 - exampleDynamicPlugins() - console.log('✅ 动态插件管理示例完成\n') - - // 4. 流转换器示例 - demonstrateStreamTransforms() - console.log('✅ 流转换器示例完成\n') - - // 5. 低级 API 示例 - // await exampleLowLevelApi() - console.log('✅ 低级 API 示例完成\n') - - console.log('🎉 所有示例运行完成!') - } catch (error) { - console.error('❌ 示例运行失败:', error) - } -} diff --git a/packages/aiCore/src/core/plugins/index.ts b/packages/aiCore/src/core/plugins/index.ts index 6d52d0d513..4c534a6e81 100644 --- a/packages/aiCore/src/core/plugins/index.ts +++ b/packages/aiCore/src/core/plugins/index.ts @@ -21,3 +21,5 @@ export function createContext(providerId: string, modelId: string, originalParam export function definePlugin(plugin: AiPlugin): AiPlugin { return plugin } + +export { createLoggingPlugin } from './built-in/logging' diff --git a/packages/aiCore/src/core/plugins/manager.ts b/packages/aiCore/src/core/plugins/manager.ts index 7a2c0d9369..7d4c64f5a7 100644 --- a/packages/aiCore/src/core/plugins/manager.ts +++ b/packages/aiCore/src/core/plugins/manager.ts @@ -3,7 +3,7 @@ import type { TextStreamPart, ToolSet } from 'ai' import { AiPlugin, AiRequestContext } from './types' /** - * 插件管理器 - 基于 Rollup 钩子分类设计 + * 插件管理器 */ export class PluginManager { private plugins: AiPlugin[] = [] diff --git a/packages/aiCore/src/core/plugins/types.ts b/packages/aiCore/src/core/plugins/types.ts index f1099be14b..f266ff600c 100644 --- a/packages/aiCore/src/core/plugins/types.ts +++ b/packages/aiCore/src/core/plugins/types.ts @@ -1,28 +1,5 @@ import type { TextStreamPart, ToolSet } from 'ai' -/** - * 生命周期阶段定义 - */ -export enum LifecycleStage { - PRE_REQUEST = 'pre-request', // 请求预处理 - REQUEST_EXECUTION = 'execution', // 请求执行 - STREAM_PROCESSING = 'stream', // 流式处理(仅流模式) - POST_RESPONSE = 'post-response', // 响应后处理 - ERROR_HANDLING = 'error' // 错误处理 -} - -/** - * 生命周期上下文 - */ -export interface LifecycleContext { - currentStage: LifecycleStage - startTime: number - stageStartTime: number - completedStages: Set - stageDurations: Map - metadata: Record -} - /** * AI 请求上下文 */ @@ -36,7 +13,7 @@ export interface AiRequestContext { } /** - * 借鉴 Rollup 的钩子分类设计 + * 钩子分类 */ export interface AiPlugin { name: string diff --git a/packages/aiCore/src/core/runtime/executor.ts b/packages/aiCore/src/core/runtime/executor.ts new file mode 100644 index 0000000000..d6f1bd03e4 --- /dev/null +++ b/packages/aiCore/src/core/runtime/executor.ts @@ -0,0 +1,192 @@ +/** + * 运行时执行器 + * 专注于插件化的AI调用处理 + */ +import { generateObject, generateText, LanguageModelV1, streamObject, streamText } from 'ai' + +import { type ProviderId, type ProviderSettingsMap } from '../../types' +import { createModel, getProviderInfo } from '../models' +import { type AiPlugin } from '../plugins' +import { isProviderSupported } from '../providers/registry' +import { PluginEngine } from './pluginEngine' +import { type RuntimeConfig } from './types' + +export class RuntimeExecutor { + private pluginClient: PluginEngine + // private options: ProviderSettingsMap[T] + private config: RuntimeConfig + + constructor(config: RuntimeConfig) { + if (!isProviderSupported(config.providerId)) { + throw new Error(`Unsupported provider: ${config.providerId}`) + } + + // 存储options供后续使用 + // this.options = config.options + this.config = config + // 创建插件客户端 + this.pluginClient = new PluginEngine(config.providerId, config.plugins || []) + } + + /** + * 流式文本生成 - 使用modelId自动创建模型 + */ + async streamText( + modelId: string, + params: Omit[0], 'model'> + ): Promise> { + // 1. 使用 createModel 创建模型 + const model = await createModel({ + providerId: this.config.providerId, + modelId, + options: this.config.options + }) + + // 2. 执行插件处理 + return this.pluginClient.executeStreamWithPlugins( + 'streamText', + modelId, + params, + async (finalModelId, transformedParams, streamTransforms) => { + const experimental_transform = + params?.experimental_transform ?? (streamTransforms.length > 0 ? streamTransforms : undefined) + + return await streamText({ + model, + ...transformedParams, + experimental_transform + }) + } + ) + } + + /** + * 流式文本生成 - 直接使用已创建的模型 + */ + async streamTextWithModel( + model: LanguageModelV1, + params: Omit[0], 'model'> + ): Promise> { + return this.pluginClient.executeStreamWithPlugins( + 'streamText', + model.modelId, + params, + async (finalModelId, transformedParams, streamTransforms) => { + const experimental_transform = + params?.experimental_transform ?? (streamTransforms.length > 0 ? streamTransforms : undefined) + + return await streamText({ + model, + ...transformedParams, + experimental_transform + }) + } + ) + } + + /** + * 生成文本 + */ + async generateText( + modelId: string, + params: Omit[0], 'model'> + ): Promise> { + const model = await createModel({ + providerId: this.config.providerId, + modelId, + options: this.config.options + }) + + return this.pluginClient.executeWithPlugins( + 'generateText', + modelId, + params, + async (finalModelId, transformedParams) => { + return await generateText({ model, ...transformedParams }) + } + ) + } + + /** + * 生成结构化对象 + */ + async generateObject( + modelId: string, + params: Omit[0], 'model'> + ): Promise> { + const model = await createModel({ + providerId: this.config.providerId, + modelId, + options: this.config.options + }) + + return this.pluginClient.executeWithPlugins( + 'generateObject', + modelId, + params, + async (finalModelId, transformedParams) => { + return await generateObject({ model, ...transformedParams }) + } + ) + } + + /** + * 流式生成结构化对象 + */ + async streamObject( + modelId: string, + params: Omit[0], 'model'> + ): Promise> { + const model = await createModel({ + providerId: this.config.providerId, + modelId, + options: this.config.options + }) + + return this.pluginClient.executeWithPlugins( + 'streamObject', + modelId, + params, + async (finalModelId, transformedParams) => { + return await streamObject({ model, ...transformedParams }) + } + ) + } + /** + * 获取客户端信息 + */ + getClientInfo() { + return getProviderInfo(this.config.providerId) + } + + // === 静态工厂方法 === + + /** + * 创建执行器 - 支持已知provider的类型安全 + */ + static create( + providerId: T, + options: ProviderSettingsMap[T], + plugins?: AiPlugin[] + ): RuntimeExecutor { + return new RuntimeExecutor({ + providerId, + options, + plugins + }) + } + + /** + * 创建OpenAI Compatible执行器 + */ + static createOpenAICompatible( + options: ProviderSettingsMap['openai-compatible'], + plugins: AiPlugin[] = [] + ): RuntimeExecutor<'openai-compatible'> { + return new RuntimeExecutor({ + providerId: 'openai-compatible', + options, + plugins + }) + } +} diff --git a/packages/aiCore/src/core/runtime/index.ts b/packages/aiCore/src/core/runtime/index.ts new file mode 100644 index 0000000000..6a8c9a06ee --- /dev/null +++ b/packages/aiCore/src/core/runtime/index.ts @@ -0,0 +1,107 @@ +/** + * Runtime 模块导出 + * 专注于运行时插件化AI调用处理 + */ + +// 主要的运行时执行器 +export { RuntimeExecutor } from './executor' + +// 导出类型 +export type { + ExecutionOptions, + // 向后兼容的类型别名 + ExecutorConfig, + RuntimeConfig +} from './types' + +// === 便捷工厂函数 === + +import { type ProviderId, type ProviderSettingsMap } from '../../types' +import { type AiPlugin } from '../plugins' +import { RuntimeExecutor } from './executor' + +/** + * 创建运行时执行器 - 支持类型安全的已知provider + */ +export function createExecutor( + providerId: T, + options: ProviderSettingsMap[T], + plugins?: AiPlugin[] +): RuntimeExecutor { + return RuntimeExecutor.create(providerId, options, plugins) +} + +/** + * 创建OpenAI Compatible执行器 + */ +export function createOpenAICompatibleExecutor( + options: ProviderSettingsMap['openai-compatible'], + plugins: AiPlugin[] = [] +): RuntimeExecutor<'openai-compatible'> { + return RuntimeExecutor.createOpenAICompatible(options, plugins) +} + +// === 直接调用API(无需创建executor实例)=== + +/** + * 直接流式文本生成 + */ +export async function streamText( + providerId: T, + options: ProviderSettingsMap[T], + modelId: string, + params: Parameters['streamText']>[1], + plugins?: AiPlugin[] +): Promise['streamText']>> { + const executor = createExecutor(providerId, options, plugins) + return executor.streamText(modelId, params) +} + +/** + * 直接生成文本 + */ +export async function generateText( + providerId: T, + options: ProviderSettingsMap[T], + modelId: string, + params: Parameters['generateText']>[1], + plugins?: AiPlugin[] +): Promise['generateText']>> { + const executor = createExecutor(providerId, options, plugins) + return executor.generateText(modelId, params) +} + +/** + * 直接生成结构化对象 + */ +export async function generateObject( + providerId: T, + options: ProviderSettingsMap[T], + modelId: string, + params: Parameters['generateObject']>[1], + plugins?: AiPlugin[] +): Promise['generateObject']>> { + const executor = createExecutor(providerId, options, plugins) + return executor.generateObject(modelId, params) +} + +/** + * 直接流式生成结构化对象 + */ +export async function streamObject( + providerId: T, + options: ProviderSettingsMap[T], + modelId: string, + params: Parameters['streamObject']>[1], + plugins?: AiPlugin[] +): Promise['streamObject']>> { + const executor = createExecutor(providerId, options, plugins) + return executor.streamObject(modelId, params) +} + +// === Agent 功能预留 === +// 未来将在 ../agents/ 文件夹中添加: +// - AgentExecutor.ts +// - WorkflowManager.ts +// - ConversationManager.ts +// 并在此处导出相关API diff --git a/packages/aiCore/src/core/clients/PluginEnabledAiClient.ts b/packages/aiCore/src/core/runtime/pluginEngine.ts similarity index 70% rename from packages/aiCore/src/core/clients/PluginEnabledAiClient.ts rename to packages/aiCore/src/core/runtime/pluginEngine.ts index a72bc7432e..f944522b72 100644 --- a/packages/aiCore/src/core/clients/PluginEnabledAiClient.ts +++ b/packages/aiCore/src/core/runtime/pluginEngine.ts @@ -1,27 +1,4 @@ -/** - * AI Client - Cherry Studio AI Core 的主要客户端接口 - * 默认集成插件系统,提供完整的 AI 调用能力 - * - * ## 使用方式 - * - * ```typescript - * import { AiClient } from '@cherrystudio/ai-core' - * - * // 创建客户端(默认带插件系统) - * const client = AiClient.create('openai', { - * name: 'openai', - * apiKey: process.env.OPENAI_API_KEY - * }, [LoggingPlugin, ContentFilterPlugin]) - * - * // 使用方式与 UniversalAiSdkClient 完全相同 - * const result = await client.generateText('gpt-4', { - * messages: [{ role: 'user', content: 'Hello!' }] - * }) - * ``` - */ - import { type ProviderId, type ProviderSettingsMap } from '../../types' -import { getProviderInfo } from '..' import { type AiPlugin, createContext, PluginManager } from '../plugins' import { isProviderSupported } from '../providers/registry' @@ -29,7 +6,7 @@ import { isProviderSupported } from '../providers/registry' * 插件增强的 AI 客户端 * 专注于插件处理,不暴露用户API */ -export class PluginEnabledAiClient { +export class PluginEngine { private pluginManager: PluginManager constructor( @@ -170,21 +147,6 @@ export class PluginEnabledAiClient { throw error } } - - /** - * 获取客户端信息 - */ - getClientInfo() { - return getProviderInfo(this.providerId) - } - - // /** - // * 获取底层客户端实例(用于高级用法) - // */ - // getBaseClient(): UniversalAiSdkClient { - // return this.baseClient - // } - // === 静态工厂方法 === /** @@ -193,46 +155,23 @@ export class PluginEnabledAiClient { static createOpenAICompatible( config: ProviderSettingsMap['openai-compatible'], plugins: AiPlugin[] = [] - ): PluginEnabledAiClient<'openai-compatible'> { - return new PluginEnabledAiClient('openai-compatible', plugins) + ): PluginEngine<'openai-compatible'> { + return new PluginEngine('openai-compatible', plugins) } /** * 创建标准提供商客户端 */ - static create(providerId: T, plugins?: AiPlugin[]): PluginEnabledAiClient + static create(providerId: T, plugins?: AiPlugin[]): PluginEngine - static create(providerId: string, plugins?: AiPlugin[]): PluginEnabledAiClient<'openai-compatible'> + static create(providerId: string, plugins?: AiPlugin[]): PluginEngine<'openai-compatible'> - static create(providerId: string, plugins: AiPlugin[] = []): PluginEnabledAiClient { + static create(providerId: string, plugins: AiPlugin[] = []): PluginEngine { if (isProviderSupported(providerId)) { - return new PluginEnabledAiClient(providerId as ProviderId, plugins) + return new PluginEngine(providerId as ProviderId, plugins) } else { // 对于未知 provider,使用 openai-compatible - return new PluginEnabledAiClient('openai-compatible', plugins) + return new PluginEngine('openai-compatible', plugins) } } } - -/** - * 创建 AI 客户端的工厂函数(默认带插件系统) - * @deprecated 建议使用 AiExecutor 代替 - */ -export function createClient(providerId: T, plugins?: AiPlugin[]): PluginEnabledAiClient - -export function createClient(providerId: string, plugins?: AiPlugin[]): PluginEnabledAiClient<'openai-compatible'> - -export function createClient(providerId: string, plugins: AiPlugin[] = []): PluginEnabledAiClient { - return PluginEnabledAiClient.create(providerId, plugins) -} - -/** - * 创建 OpenAI Compatible 客户端的便捷函数 - * @deprecated 建议使用 AiExecutor 代替 - */ -export function createCompatibleClient( - config: ProviderSettingsMap['openai-compatible'], - plugins: AiPlugin[] = [] -): PluginEnabledAiClient<'openai-compatible'> { - return PluginEnabledAiClient.createOpenAICompatible(config, plugins) -} diff --git a/packages/aiCore/src/core/execution/types.ts b/packages/aiCore/src/core/runtime/types.ts similarity index 51% rename from packages/aiCore/src/core/execution/types.ts rename to packages/aiCore/src/core/runtime/types.ts index 3a4dd88f1e..e4df242e3a 100644 --- a/packages/aiCore/src/core/execution/types.ts +++ b/packages/aiCore/src/core/runtime/types.ts @@ -1,22 +1,15 @@ /** - * Execution 层类型定义 + * Runtime 层类型定义 */ -import { type ProviderId } from '../../types' +import { type ProviderId, type ProviderSettingsMap } from '../../types' import { type AiPlugin } from '../plugins' /** - * 执行器配置 + * 运行时执行器配置 */ -export interface ExecutorConfig { +export interface RuntimeConfig { providerId: T - plugins?: AiPlugin[] -} - -/** - * 通用执行器配置(用于未知provider) - */ -export interface GenericExecutorConfig { - providerId: string + options: ProviderSettingsMap[T] plugins?: AiPlugin[] } @@ -27,3 +20,6 @@ export interface ExecutionOptions { // 未来可以添加执行级别的选项 // 比如:超时设置、重试机制等 } + +// 保留旧类型以保持向后兼容 +export interface ExecutorConfig extends RuntimeConfig {} diff --git a/packages/aiCore/src/index.ts b/packages/aiCore/src/index.ts index 05e00f335e..2c37c8b2e1 100644 --- a/packages/aiCore/src/index.ts +++ b/packages/aiCore/src/index.ts @@ -4,36 +4,21 @@ */ // 导入内部使用的类和函数 -import { createClient } from './core/clients/PluginEnabledAiClient' import { getProviderInfo as factoryGetProviderInfo, getSupportedProviders as factoryGetSupportedProviders -} from './core/creation' -import { AiExecutor } from './core/execution/AiExecutor' +} from './core/models' import { aiProviderRegistry, isProviderSupported } from './core/providers/registry' -import { type ProviderSettingsMap } from './types' +import { createExecutor } from './core/runtime' +import { ProviderId, type ProviderSettingsMap } from './types' // ==================== 主要用户接口 ==================== -// orchestration层 - 面向用户的主要API -export { - AiExecutor, - generateObject, - generateText, - type OrchestrationConfig, - streamObject, - streamText -} from './orchestration' - -// 为了向后兼容,保留AiClient别名(内部使用PluginEnabledAiClient) -export { - PluginEnabledAiClient as AiClient, - createClient, - createCompatibleClient -} from './core/clients/PluginEnabledAiClient' +export { createExecutor, createOpenAICompatibleExecutor } from './core/runtime' // ==================== 插件系统 ==================== export type { AiPlugin, AiRequestContext, HookResult, HookType, PluginManagerConfig } from './core/plugins' export { createContext, definePlugin, PluginManager } from './core/plugins' +export { PluginEngine } from './core/runtime/pluginEngine' // ==================== 低级 API ==================== export { @@ -42,7 +27,7 @@ export { getProviderInfo as getClientInfo, getSupportedProviders, ProviderCreationError -} from './core/creation' +} from './core/models' export { aiProviderRegistry } from './core/providers/registry' // ==================== 类型定义 ==================== @@ -153,13 +138,13 @@ export const AiCore = { name: AI_CORE_NAME, // 创建主要执行器(推荐使用) - create(providerId: string, plugins: any[] = []) { - return AiExecutor.create(providerId, plugins) + create(providerId: ProviderId, options: ProviderSettingsMap[ProviderId], plugins: any[] = []) { + return createExecutor(providerId, options, plugins) }, // 创建底层客户端(高级用法) - createClient(providerId: string, plugins: any[] = []) { - return createClient(providerId, plugins) + createClient(providerId: ProviderId, options: ProviderSettingsMap[ProviderId], plugins: any[] = []) { + return createExecutor(providerId, options, plugins) }, // 获取支持的providers @@ -180,36 +165,19 @@ export const AiCore = { // 推荐使用的执行器创建函数 export const createOpenAIExecutor = (options: ProviderSettingsMap['openai'], plugins?: any[]) => { - return AiExecutor.create('openai', plugins) + return createExecutor('openai', options, plugins) } export const createAnthropicExecutor = (options: ProviderSettingsMap['anthropic'], plugins?: any[]) => { - return AiExecutor.create('anthropic', plugins) + return createExecutor('anthropic', options, plugins) } export const createGoogleExecutor = (options: ProviderSettingsMap['google'], plugins?: any[]) => { - return AiExecutor.create('google', plugins) + return createExecutor('google', options, plugins) } export const createXAIExecutor = (options: ProviderSettingsMap['xai'], plugins?: any[]) => { - return AiExecutor.create('xai', plugins) -} - -// 向后兼容的客户端创建函数 -export const createOpenAIClient = (options: ProviderSettingsMap['openai'], plugins?: any[]) => { - return createClient('openai', plugins) -} - -export const createAnthropicClient = (options: ProviderSettingsMap['anthropic'], plugins?: any[]) => { - return createClient('anthropic', plugins) -} - -export const createGoogleClient = (options: ProviderSettingsMap['google'], plugins?: any[]) => { - return createClient('google', plugins) -} - -export const createXAIClient = (options: ProviderSettingsMap['xai'], plugins?: any[]) => { - return createClient('xai', plugins) + return createExecutor('xai', options, plugins) } // ==================== 调试和开发工具 ==================== @@ -223,10 +191,10 @@ export const DevTools = { }, // 测试provider连接 - async testProvider(providerId: string, options: any) { + async testProvider(providerId: ProviderId, options: ProviderSettingsMap[ProviderId]) { try { - const client = createClient(providerId, options) - const info = client.getClientInfo() + const executor = createExecutor(providerId, options) + const info = executor.getClientInfo() return { success: true, providerId: info.id, diff --git a/packages/aiCore/src/orchestration/api.ts b/packages/aiCore/src/orchestration/api.ts deleted file mode 100644 index 4a674fc90b..0000000000 --- a/packages/aiCore/src/orchestration/api.ts +++ /dev/null @@ -1,148 +0,0 @@ -/** - * AI 编排器 - * 编排层 - 面向用户的主要API,串联creation和execution层 - */ -import { - generateObject as aiGenerateObject, - generateText as aiGenerateText, - streamObject as aiStreamObject, - streamText as aiStreamText -} from 'ai' - -import { createModelFromConfig, resolveConfig } from '../core/creation' -import { AiExecutor } from '../core/execution/AiExecutor' -import { - type GenerateObjectParams, - type GenerateTextParams, - type StreamObjectParams, - type StreamTextParams -} from '../types' -import { type OrchestrationConfig } from './types' - -/** - * 流式文本生成 - * 编排:creation层获取model → 执行器执行 - */ -// export async function streamText( -// modelId: string, -// params: StreamTextParams, -// config: OrchestrationConfig -// ): Promise> -export async function streamText( - modelId: string, - params: StreamTextParams, - config: OrchestrationConfig -): Promise> { - // 外部 registry 方式:直接使用用户提供的 model - // if ('model' in configOrParams) { - // return aiStreamText(configOrParams) - // } - - // 1. 使用 creation 层解析配置并创建 model - const resolvedConfig = resolveConfig( - config.providerId, - modelId!, - config.options, - config.plugins || [], - config.middlewares || [] // middlewares - ) - const model = await createModelFromConfig(resolvedConfig) - // const providerOptions = extractProviderOptions(resolvedConfig) - - // 2. 创建执行器并传入 model - const executor = AiExecutor.create(config.providerId, config.plugins) - return executor.streamText(model, params!) -} - -/** - * 生成文本 - * 编排:creation层获取model → 执行器执行 - */ -export async function generateText( - modelId: string, - params: GenerateTextParams, - config: OrchestrationConfig -): Promise> { - // 外部 registry 方式:直接使用用户提供的 model - // if ('model' in configOrParams) { - // return aiGenerateText(configOrParams) - // } - - // 编排方式:1. creation层获取model 2. execution层执行 - // 1. 使用 creation 层解析配置并创建 model - const resolvedConfig = resolveConfig( - config.providerId, - modelId!, - config.options, - config.plugins || [], - config.middlewares || [] // middlewares - ) - const model = await createModelFromConfig(resolvedConfig) - // const providerOptions = extractProviderOptions(resolvedConfig) - - // 2. 创建执行器并传入 model - const executor = AiExecutor.create(config.providerId, config.plugins) - return executor.generateText(model, params!) -} - -/** - * 生成结构化对象 - * 编排:creation层获取model → 执行器执行 - */ -export async function generateObject( - modelId: string, - params: GenerateObjectParams, - config: OrchestrationConfig -): Promise> { - // 外部 registry 方式:直接使用用户提供的 model - // if ('model' in configOrParams) { - // return aiGenerateObject(configOrParams) - // } - - // 编排方式:1. creation层获取model 2. execution层执行 - // 1. 使用 creation 层解析配置并创建 model - const resolvedConfig = resolveConfig( - config.providerId, - modelId!, - config.options, - config.plugins || [], - config.middlewares || [] // middlewares - ) - const model = await createModelFromConfig(resolvedConfig) - // const providerOptions = extractProviderOptions(resolvedConfig) - - // 2. 创建执行器并传入 model - const executor = AiExecutor.create(config.providerId, config.plugins) - return executor.generateObject(model, params!) -} - -/** - * 流式生成结构化对象 - * 编排:creation层获取model → 执行器执行 - */ -export async function streamObject( - modelId: string, - params: StreamObjectParams, - config: OrchestrationConfig -): Promise> { - // 外部 registry 方式:直接使用用户提供的 model - // if ('model' in configOrParams) { - // return aiStreamObject(configOrParams) - // } - - // 编排方式:1. creation层获取model 2. execution层执行 - // 1. 使用 creation 层解析配置并创建 model - const resolvedConfig = resolveConfig( - config.providerId, - modelId!, - config.options, - config.plugins || [], - config.middlewares || [] // middlewares - ) - const model = await createModelFromConfig(resolvedConfig) - // const providerOptions = extractProviderOptions(resolvedConfig) - - // 2. 创建执行器并传入 model - const executor = AiExecutor.create(config.providerId, config.plugins) - return executor.streamObject(model, params!) -} diff --git a/packages/aiCore/src/orchestration/index.ts b/packages/aiCore/src/orchestration/index.ts deleted file mode 100644 index 8ccd117179..0000000000 --- a/packages/aiCore/src/orchestration/index.ts +++ /dev/null @@ -1,13 +0,0 @@ -/** - * 编排层导出 - * 面向用户的编排层接口 - */ - -// 主要编排函数 -export { generateObject, generateText, streamObject, streamText } from './api' - -// 类型定义 -export type { OrchestrationConfig } from './types' - -// 为了向后兼容,重新导出AiExecutor -export { AiExecutor } from '../core/execution/AiExecutor' diff --git a/packages/aiCore/src/orchestration/types.ts b/packages/aiCore/src/orchestration/types.ts deleted file mode 100644 index f93f89c6af..0000000000 --- a/packages/aiCore/src/orchestration/types.ts +++ /dev/null @@ -1,26 +0,0 @@ -/** - * Orchestration 层类型定义 - * 面向用户的编排层接口 - */ -import { LanguageModelV1Middleware } from 'ai' - -import { type AiPlugin } from '../core/plugins' -import { type ProviderId, type ProviderSettingsMap } from '../types' - -/** - * 编排配置 - */ -export interface OrchestrationConfig { - providerId: T - options: ProviderSettingsMap[T] - plugins?: AiPlugin[] - middlewares?: LanguageModelV1Middleware[] -} - -/** - * 编排选项 - */ -export interface OrchestrationOptions { - // 未来可以添加编排级别的选项 - // 比如:重试机制、超时设置、日志级别等 -}