From 3b123863b531bf34ab5f882e31470c97569a0877 Mon Sep 17 00:00:00 2001 From: alickreborn0 Date: Sun, 20 Jul 2025 06:53:35 +0000 Subject: [PATCH] feat: Support LLM Tracing by Alibaba Cloud EDAS product (#7895) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add tracing modules * Initial commit * fix: problem * fix: update trace web * fix: trace view * fix: trace view * fix: fix some problem * fix: knowledge and mcp trace * feat: save trace to user home dir * feat: open trace with electron browser window * fix: root trace outputs * feat: trace internationalization and add trace icon * feat: add trace title * feat: update * package.json添加windows运行script * feat: update window title * fix: mcp trace param * fix: error show * fix: listTool result * fix: merge error * feat: add stream usage and response * feat: change trace stream * fix: change stream adapter * fix: span detail show problem * fix: process show by time * fix: stream outputs * fix: merge problem * fix: stream outputs * fix: output text * fix: EDAS support text * fix: change trace footer style * fix: topicId is loaded multiple times * fix: span reload problem & attribute with cache * fix: refresh optimization * Change Powered by text. * resolve upstream conflicts * fix: build-time type exception * fix: exceptions not used when building * fix: recend no trace * fix: resend trace list * fix: delete temporary files * feat: trace for resend * fix: trace for resend message with edit * fix: directory structure and construction method of mcp-trace * fix: change CRLF to LF * fix: add function call outputs * Revert "fix: change CRLF to LF" * fix: reorganize multi-model display * fix: append model trace binding topic * fix: some problems * fix: code optimization * fix: delete async * fix: UI optimization * fix: sort import --------- Co-authored-by: 崔顺发 Co-authored-by: 管鑫荣 --- .gitignore | 1 + electron.vite.config.ts | 22 +- package.json | 16 +- .../mcp-trace/trace-core/core/spanConvert.ts | 26 + .../mcp-trace/trace-core/core/traceCache.ts | 7 + .../mcp-trace/trace-core/core/traceMethod.ts | 163 +++++ .../trace-core/exporters/FuncSpanExporter.ts | 26 + packages/mcp-trace/trace-core/index.ts | 8 + .../processors/CacheSpanProcessor.ts | 40 ++ .../processors/EmitterSpanProcessor.ts | 28 + .../processors/FuncSpanProcessor.ts | 42 ++ packages/mcp-trace/trace-core/types/config.ts | 67 ++ packages/mcp-trace/trace-node/nodeTracer.ts | 46 ++ .../trace-web/TopicContextManager.ts | 75 +++ packages/mcp-trace/trace-web/index.ts | 3 + .../trace-web/traceContextPromise.ts | 99 +++ packages/mcp-trace/trace-web/webTracer.ts | 46 ++ packages/shared/IpcChannel.ts | 17 +- src/main/index.ts | 3 + src/main/ipc.ts | 158 +++-- src/main/knowledge/embeddings/Embeddings.ts | 6 + src/main/mcpServers/memory.ts | 11 + src/main/services/FileStorage.ts | 3 + src/main/services/FileSystemService.ts | 2 + src/main/services/KnowledgeService.ts | 20 +- src/main/services/MCPService.ts | 82 +-- src/main/services/NodeTraceService.ts | 122 ++++ src/main/services/SpanCacheService.ts | 409 +++++++++++++ src/preload/index.ts | 58 +- .../aiCore/clients/openai/OpenAIApiClient.ts | 3 +- src/renderer/src/aiCore/index.ts | 24 +- .../src/aiCore/middleware/composer.ts | 26 +- .../middleware/core/McpToolChunkMiddleware.ts | 18 +- src/renderer/src/aiCore/middleware/schemas.ts | 1 + src/renderer/src/databases/index.ts | 1 - .../src/hooks/useMessageOperations.ts | 20 +- src/renderer/src/i18n/locales/en-us.json | 125 +++- src/renderer/src/i18n/locales/ja-jp.json | 125 +++- src/renderer/src/i18n/locales/ru-ru.json | 125 +++- src/renderer/src/i18n/locales/zh-cn.json | 123 ++++ src/renderer/src/i18n/locales/zh-tw.json | 125 +++- src/renderer/src/i18n/translate/el-gr.json | 1 + src/renderer/src/i18n/translate/es-es.json | 1 + src/renderer/src/i18n/translate/fr-fr.json | 1 + src/renderer/src/i18n/translate/pt-pt.json | 1 + src/renderer/src/init.ts | 6 + .../src/pages/home/Inputbar/Inputbar.tsx | 11 +- .../pages/home/Messages/MessageMenubar.tsx | 31 +- .../settings/DataSettings/DataSettings.tsx | 1 + .../src/providers/WebSearchProvider/index.ts | 29 +- src/renderer/src/services/ApiService.ts | 109 +++- src/renderer/src/services/KnowledgeService.ts | 100 ++- .../src/services/SpanManagerService.ts | 358 +++++++++++ src/renderer/src/services/WebSearchService.ts | 46 +- src/renderer/src/services/WebTraceService.ts | 34 ++ src/renderer/src/store/thunk/messageThunk.ts | 27 +- .../trace/dataHandler/AsyncIterableHandler.ts | 98 +++ .../trace/dataHandler/CommonResultHandler.ts | 77 +++ .../trace/dataHandler/MessageStreamHandler.ts | 70 +++ .../src/trace/dataHandler/StreamHandler.ts | 110 ++++ src/renderer/src/trace/pages/Component.tsx | 164 +++++ src/renderer/src/trace/pages/ProgressBar.tsx | 33 + src/renderer/src/trace/pages/SpanDetail.tsx | 171 ++++++ src/renderer/src/trace/pages/Trace.css | 234 +++++++ src/renderer/src/trace/pages/TraceModel.tsx | 7 + src/renderer/src/trace/pages/TraceTree.tsx | 133 ++++ src/renderer/src/trace/pages/index.tsx | 200 ++++++ src/renderer/src/trace/traceWindow.tsx | 66 ++ .../src/trace/types/ModelSpanEntity.ts | 80 +++ src/renderer/src/types/index.ts | 3 + src/renderer/src/types/newMessage.ts | 3 + src/renderer/src/utils/mcp-tools.ts | 33 +- src/renderer/src/utils/queue.ts | 7 +- .../action/components/ActionGeneral.tsx | 4 + src/renderer/traceWindow.html | 40 ++ tsconfig.json | 8 +- tsconfig.node.json | 13 +- tsconfig.web.json | 11 +- yarn.lock | 578 +++++++++++++++++- 79 files changed, 4982 insertions(+), 239 deletions(-) create mode 100644 packages/mcp-trace/trace-core/core/spanConvert.ts create mode 100644 packages/mcp-trace/trace-core/core/traceCache.ts create mode 100644 packages/mcp-trace/trace-core/core/traceMethod.ts create mode 100644 packages/mcp-trace/trace-core/exporters/FuncSpanExporter.ts create mode 100644 packages/mcp-trace/trace-core/index.ts create mode 100644 packages/mcp-trace/trace-core/processors/CacheSpanProcessor.ts create mode 100644 packages/mcp-trace/trace-core/processors/EmitterSpanProcessor.ts create mode 100644 packages/mcp-trace/trace-core/processors/FuncSpanProcessor.ts create mode 100644 packages/mcp-trace/trace-core/types/config.ts create mode 100644 packages/mcp-trace/trace-node/nodeTracer.ts create mode 100644 packages/mcp-trace/trace-web/TopicContextManager.ts create mode 100644 packages/mcp-trace/trace-web/index.ts create mode 100644 packages/mcp-trace/trace-web/traceContextPromise.ts create mode 100644 packages/mcp-trace/trace-web/webTracer.ts create mode 100644 src/main/services/NodeTraceService.ts create mode 100644 src/main/services/SpanCacheService.ts create mode 100644 src/renderer/src/services/SpanManagerService.ts create mode 100644 src/renderer/src/services/WebTraceService.ts create mode 100644 src/renderer/src/trace/dataHandler/AsyncIterableHandler.ts create mode 100644 src/renderer/src/trace/dataHandler/CommonResultHandler.ts create mode 100644 src/renderer/src/trace/dataHandler/MessageStreamHandler.ts create mode 100644 src/renderer/src/trace/dataHandler/StreamHandler.ts create mode 100644 src/renderer/src/trace/pages/Component.tsx create mode 100644 src/renderer/src/trace/pages/ProgressBar.tsx create mode 100644 src/renderer/src/trace/pages/SpanDetail.tsx create mode 100644 src/renderer/src/trace/pages/Trace.css create mode 100644 src/renderer/src/trace/pages/TraceModel.tsx create mode 100644 src/renderer/src/trace/pages/TraceTree.tsx create mode 100644 src/renderer/src/trace/pages/index.tsx create mode 100644 src/renderer/src/trace/traceWindow.tsx create mode 100644 src/renderer/src/trace/types/ModelSpanEntity.ts create mode 100644 src/renderer/traceWindow.html diff --git a/.gitignore b/.gitignore index 7f15a6637..29e36a9fc 100644 --- a/.gitignore +++ b/.gitignore @@ -35,6 +35,7 @@ Thumbs.db node_modules dist out +mcp_server stats.html # ENV diff --git a/electron.vite.config.ts b/electron.vite.config.ts index b2fe3449b..bf64d7199 100644 --- a/electron.vite.config.ts +++ b/electron.vite.config.ts @@ -19,7 +19,9 @@ export default defineConfig({ '@main': resolve('src/main'), '@types': resolve('src/renderer/src/types'), '@shared': resolve('packages/shared'), - '@logger': resolve('src/main/services/LoggerService') + '@logger': resolve('src/main/services/LoggerService'), + '@mcp-trace/trace-core': resolve('packages/mcp-trace/trace-core'), + '@mcp-trace/trace-node': resolve('packages/mcp-trace/trace-node') } }, build: { @@ -40,10 +42,16 @@ export default defineConfig({ } }, preload: { - plugins: [externalizeDepsPlugin()], + plugins: [ + react({ + tsDecorators: true + }), + externalizeDepsPlugin() + ], resolve: { alias: { - '@shared': resolve('packages/shared') + '@shared': resolve('packages/shared'), + '@mcp-trace/trace-core': resolve('packages/mcp-trace/trace-core') } }, build: { @@ -53,6 +61,7 @@ export default defineConfig({ renderer: { plugins: [ react({ + tsDecorators: true, plugins: [ [ '@swc/plugin-styled-components', @@ -72,7 +81,9 @@ export default defineConfig({ alias: { '@renderer': resolve('src/renderer/src'), '@shared': resolve('packages/shared'), - '@logger': resolve('src/renderer/src/services/LoggerService') + '@logger': resolve('src/renderer/src/services/LoggerService'), + '@mcp-trace/trace-core': resolve('packages/mcp-trace/trace-core'), + '@mcp-trace/trace-web': resolve('packages/mcp-trace/trace-web') } }, optimizeDeps: { @@ -91,7 +102,8 @@ export default defineConfig({ index: resolve(__dirname, 'src/renderer/index.html'), miniWindow: resolve(__dirname, 'src/renderer/miniWindow.html'), selectionToolbar: resolve(__dirname, 'src/renderer/selectionToolbar.html'), - selectionAction: resolve(__dirname, 'src/renderer/selectionAction.html') + selectionAction: resolve(__dirname, 'src/renderer/selectionAction.html'), + traceWindow: resolve(__dirname, 'src/renderer/traceWindow.html') } } }, diff --git a/package.json b/package.json index c13a65a08..af52be259 100644 --- a/package.json +++ b/package.json @@ -13,7 +13,10 @@ ], "installConfig": { "hoistingLimits": [ - "packages/database" + "packages/database", + "packages/mcp-trace/trace-core", + "packages/mcp-trace/trace-node", + "packages/mcp-trace/trace-web" ] } }, @@ -38,6 +41,7 @@ "publish": "yarn build:check && yarn release patch push", "pulish:artifacts": "cd packages/artifacts && npm publish && cd -", "generate:agents": "yarn workspace @cherry-studio/database agents", + "generate:icons": "electron-icon-builder --input=./build/logo.png --output=build", "analyze:renderer": "VISUALIZER_RENDERER=true yarn build", "analyze:main": "VISUALIZER_MAIN=true yarn build", "typecheck": "npm run typecheck:node && npm run typecheck:web", @@ -74,6 +78,7 @@ "notion-helper": "^1.3.22", "os-proxy-config": "^1.1.2", "pdfjs-dist": "4.10.38", + "react-json-view": "^1.21.3", "selection-hook": "^1.0.7", "turndown": "7.2.0" }, @@ -114,6 +119,12 @@ "@modelcontextprotocol/sdk": "^1.12.3", "@mozilla/readability": "^0.6.0", "@notionhq/client": "^2.2.15", + "@opentelemetry/api": "^1.9.0", + "@opentelemetry/core": "2.0.0", + "@opentelemetry/exporter-trace-otlp-http": "^0.200.0", + "@opentelemetry/sdk-trace-base": "^2.0.0", + "@opentelemetry/sdk-trace-node": "^2.0.0", + "@opentelemetry/sdk-trace-web": "^2.0.0", "@playwright/test": "^1.52.0", "@reduxjs/toolkit": "^2.2.5", "@shikijs/markdown-it": "^3.7.0", @@ -195,7 +206,7 @@ "mime": "^4.0.4", "motion": "^12.10.5", "npx-scope-finder": "^1.2.0", - "officeparser": "^4.1.1", + "officeparser": "^4.2.0", "openai": "patch:openai@npm%3A5.1.0#~/.yarn/patches/openai-npm-5.1.0-0e7b3ccb07.patch", "p-queue": "^8.1.0", "playwright": "^1.52.0", @@ -216,6 +227,7 @@ "react-window": "^1.8.11", "redux": "^5.0.1", "redux-persist": "^6.0.0", + "reflect-metadata": "0.2.2", "rehype-katex": "^7.0.1", "rehype-mathjax": "^7.1.0", "rehype-raw": "^7.0.0", diff --git a/packages/mcp-trace/trace-core/core/spanConvert.ts b/packages/mcp-trace/trace-core/core/spanConvert.ts new file mode 100644 index 000000000..a226f5d10 --- /dev/null +++ b/packages/mcp-trace/trace-core/core/spanConvert.ts @@ -0,0 +1,26 @@ +import { SpanKind, SpanStatusCode } from '@opentelemetry/api' +import { ReadableSpan } from '@opentelemetry/sdk-trace-base' + +import { SpanEntity } from '../types/config' + +/** + * convert ReadableSpan to SpanEntity + * @param span ReadableSpan + * @returns SpanEntity + */ +export function convertSpanToSpanEntity(span: ReadableSpan): SpanEntity { + return { + id: span.spanContext().spanId, + traceId: span.spanContext().traceId, + parentId: span.parentSpanContext?.spanId || '', + name: span.name, + startTime: span.startTime[0] * 1e3 + Math.floor(span.startTime[1] / 1e6), // 转为毫秒 + endTime: span.endTime ? span.endTime[0] * 1e3 + Math.floor(span.endTime[1] / 1e6) : undefined, // 转为毫秒 + attributes: { ...span.attributes }, + status: SpanStatusCode[span.status.code], + events: span.events, + kind: SpanKind[span.kind], + links: span.links, + modelName: span.attributes?.modelName + } as SpanEntity +} diff --git a/packages/mcp-trace/trace-core/core/traceCache.ts b/packages/mcp-trace/trace-core/core/traceCache.ts new file mode 100644 index 000000000..cc5ba795f --- /dev/null +++ b/packages/mcp-trace/trace-core/core/traceCache.ts @@ -0,0 +1,7 @@ +import { ReadableSpan } from '@opentelemetry/sdk-trace-base' + +export interface TraceCache { + createSpan: (span: ReadableSpan) => void + endSpan: (span: ReadableSpan) => void + clear: () => void +} diff --git a/packages/mcp-trace/trace-core/core/traceMethod.ts b/packages/mcp-trace/trace-core/core/traceMethod.ts new file mode 100644 index 000000000..6349df024 --- /dev/null +++ b/packages/mcp-trace/trace-core/core/traceMethod.ts @@ -0,0 +1,163 @@ +import 'reflect-metadata' + +import { SpanStatusCode, trace } from '@opentelemetry/api' +import { context as traceContext } from '@opentelemetry/api' + +import { defaultConfig } from '../types/config' + +export interface SpanDecoratorOptions { + spanName?: string + traceName?: string + tag?: string +} + +export function TraceMethod(traced: SpanDecoratorOptions) { + return function (target: any, propertyKey?: any, descriptor?: PropertyDescriptor | undefined) { + // 兼容静态方法装饰器只传2个参数的情况 + if (!descriptor) { + descriptor = Object.getOwnPropertyDescriptor(target, propertyKey) + } + if (!descriptor || typeof descriptor.value !== 'function') { + throw new Error('TraceMethod can only be applied to methods.') + } + + const originalMethod = descriptor.value + const traceName = traced.traceName || defaultConfig.defaultTracerName || 'default' + const tracer = trace.getTracer(traceName) + + descriptor.value = function (...args: any[]) { + const name = traced.spanName || propertyKey + return tracer.startActiveSpan(name, async (span) => { + try { + span.setAttribute('inputs', convertToString(args)) + span.setAttribute('tags', traced.tag || '') + const result = await originalMethod.apply(this, args) + span.setAttribute('outputs', convertToString(result)) + span.setStatus({ code: SpanStatusCode.OK }) + return result + } catch (error) { + const err = error instanceof Error ? error : new Error(String(error)) + span.setStatus({ + code: SpanStatusCode.ERROR, + message: err.message + }) + span.recordException(err) + throw error + } finally { + span.end() + } + }) + } + return descriptor + } +} + +export function TraceProperty(traced: SpanDecoratorOptions) { + return (target: any, propertyKey: string, descriptor?: PropertyDescriptor) => { + // 处理箭头函数类属性 + const traceName = traced.traceName || defaultConfig.defaultTracerName || 'default' + const tracer = trace.getTracer(traceName) + const name = traced.spanName || propertyKey + + if (!descriptor) { + const originalValue = target[propertyKey] + + Object.defineProperty(target, propertyKey, { + value: async function (...args: any[]) { + const span = tracer.startSpan(name) + try { + span.setAttribute('inputs', convertToString(args)) + span.setAttribute('tags', traced.tag || '') + const result = await originalValue.apply(this, args) + span.setAttribute('outputs', convertToString(result)) + return result + } catch (error) { + const err = error instanceof Error ? error : new Error(String(error)) + span.recordException(err) + span.setStatus({ code: SpanStatusCode.ERROR, message: err.message }) + throw error + } finally { + span.end() + } + }, + configurable: true, + writable: true + }) + return + } + + // 标准方法装饰器逻辑 + const originalMethod = descriptor.value + + descriptor.value = async function (...args: any[]) { + const span = tracer.startSpan(name) + try { + span.setAttribute('inputs', convertToString(args)) + span.setAttribute('tags', traced.tag || '') + const result = await originalMethod.apply(this, args) + span.setAttribute('outputs', convertToString(result)) + return result + } catch (error) { + const err = error instanceof Error ? error : new Error(String(error)) + span.recordException(err) + span.setStatus({ code: SpanStatusCode.ERROR, message: err.message }) + throw error + } finally { + span.end() + } + } + } +} + +export function withSpanFunc any>( + name: string, + tag: string, + fn: F, + args: Parameters +): ReturnType { + const traceName = defaultConfig.defaultTracerName || 'default' + const tracer = trace.getTracer(traceName) + const _name = name || fn.name || 'anonymousFunction' + return traceContext.with(traceContext.active(), () => + tracer.startActiveSpan( + _name, + { + attributes: { + tags: tag || '', + inputs: JSON.stringify(args) + } + }, + (span) => { + // 在这里调用原始函数 + const result = fn(...args) + if (result instanceof Promise) { + return result + .then((res) => { + span.setStatus({ code: SpanStatusCode.OK }) + span.setAttribute('outputs', convertToString(res)) + return res + }) + .catch((error) => { + const err = error instanceof Error ? error : new Error(String(error)) + span.setStatus({ code: SpanStatusCode.ERROR, message: err.message }) + span.recordException(err) + throw error + }) + .finally(() => span.end()) + } else { + span.setStatus({ code: SpanStatusCode.OK }) + span.setAttribute('outputs', convertToString(result)) + span.end() + } + return result + } + ) + ) +} + +function convertToString(args: any | any[]): string | boolean | number { + if (typeof args === 'string' || typeof args === 'boolean' || typeof args === 'number') { + return args + } + return JSON.stringify(args) +} diff --git a/packages/mcp-trace/trace-core/exporters/FuncSpanExporter.ts b/packages/mcp-trace/trace-core/exporters/FuncSpanExporter.ts new file mode 100644 index 000000000..48d769daf --- /dev/null +++ b/packages/mcp-trace/trace-core/exporters/FuncSpanExporter.ts @@ -0,0 +1,26 @@ +import { ExportResult, ExportResultCode } from '@opentelemetry/core' +import { ReadableSpan, SpanExporter } from '@opentelemetry/sdk-trace-base' + +export type SaveFunction = (spans: ReadableSpan[]) => Promise + +export class FunctionSpanExporter implements SpanExporter { + private exportFunction: SaveFunction + + constructor(fn: SaveFunction) { + this.exportFunction = fn + } + + shutdown(): Promise { + return Promise.resolve() + } + + export(spans: ReadableSpan[], resultCallback: (result: ExportResult) => void): void { + this.exportFunction(spans) + .then(() => { + resultCallback({ code: ExportResultCode.SUCCESS }) + }) + .catch((error) => { + resultCallback({ code: ExportResultCode.FAILED, error: error }) + }) + } +} diff --git a/packages/mcp-trace/trace-core/index.ts b/packages/mcp-trace/trace-core/index.ts new file mode 100644 index 000000000..e9c013041 --- /dev/null +++ b/packages/mcp-trace/trace-core/index.ts @@ -0,0 +1,8 @@ +export * from './core/spanConvert' +export * from './core/traceCache' +export * from './core/traceMethod' +export * from './exporters/FuncSpanExporter' +export * from './processors/CacheSpanProcessor' +export * from './processors/EmitterSpanProcessor' +export * from './processors/FuncSpanProcessor' +export * from './types/config' diff --git a/packages/mcp-trace/trace-core/processors/CacheSpanProcessor.ts b/packages/mcp-trace/trace-core/processors/CacheSpanProcessor.ts new file mode 100644 index 000000000..b20a61de0 --- /dev/null +++ b/packages/mcp-trace/trace-core/processors/CacheSpanProcessor.ts @@ -0,0 +1,40 @@ +import { Context, trace } from '@opentelemetry/api' +import { BatchSpanProcessor, BufferConfig, ReadableSpan, Span, SpanExporter } from '@opentelemetry/sdk-trace-base' + +import { TraceCache } from '../core/traceCache' + +export class CacheBatchSpanProcessor extends BatchSpanProcessor { + private cache: TraceCache + + constructor(_exporter: SpanExporter, cache: TraceCache, config?: BufferConfig) { + super(_exporter, config) + this.cache = cache + } + + override onEnd(span: ReadableSpan): void { + super.onEnd(span) + this.cache.endSpan(span) + } + + override onStart(span: Span, parentContext: Context): void { + super.onStart(span, parentContext) + this.cache.createSpan({ + name: span.name, + kind: span.kind, + spanContext: () => span.spanContext(), + parentSpanContext: trace.getSpanContext(parentContext), + startTime: span.startTime, + status: span.status, + attributes: span.attributes, + links: span.links, + events: span.events, + duration: span.duration, + ended: span.ended, + resource: span.resource, + instrumentationScope: span.instrumentationScope, + droppedAttributesCount: span.droppedAttributesCount, + droppedEventsCount: span.droppedEventsCount, + droppedLinksCount: span.droppedLinksCount + } as ReadableSpan) + } +} diff --git a/packages/mcp-trace/trace-core/processors/EmitterSpanProcessor.ts b/packages/mcp-trace/trace-core/processors/EmitterSpanProcessor.ts new file mode 100644 index 000000000..41015b208 --- /dev/null +++ b/packages/mcp-trace/trace-core/processors/EmitterSpanProcessor.ts @@ -0,0 +1,28 @@ +import { Context } from '@opentelemetry/api' +import { BatchSpanProcessor, BufferConfig, ReadableSpan, Span, SpanExporter } from '@opentelemetry/sdk-trace-base' +import { EventEmitter } from 'stream' + +import { convertSpanToSpanEntity } from '../core/spanConvert' + +export const TRACE_DATA_EVENT = 'trace_data_event' +export const ON_START = 'start' +export const ON_END = 'end' + +export class EmitterSpanProcessor extends BatchSpanProcessor { + private emitter: EventEmitter + + constructor(_exporter: SpanExporter, emitter: NodeJS.EventEmitter, config?: BufferConfig) { + super(_exporter, config) + this.emitter = emitter + } + + override onEnd(span: ReadableSpan): void { + super.onEnd(span) + this.emitter.emit(TRACE_DATA_EVENT, ON_END, convertSpanToSpanEntity(span)) + } + + override onStart(span: Span, parentContext: Context): void { + super.onStart(span, parentContext) + this.emitter.emit(TRACE_DATA_EVENT, ON_START, convertSpanToSpanEntity(span)) + } +} diff --git a/packages/mcp-trace/trace-core/processors/FuncSpanProcessor.ts b/packages/mcp-trace/trace-core/processors/FuncSpanProcessor.ts new file mode 100644 index 000000000..8a7281d95 --- /dev/null +++ b/packages/mcp-trace/trace-core/processors/FuncSpanProcessor.ts @@ -0,0 +1,42 @@ +import { Context, trace } from '@opentelemetry/api' +import { BatchSpanProcessor, BufferConfig, ReadableSpan, Span, SpanExporter } from '@opentelemetry/sdk-trace-base' + +export type SpanFunction = (span: ReadableSpan) => void + +export class FunctionSpanProcessor extends BatchSpanProcessor { + private start: SpanFunction + private end: SpanFunction + + constructor(_exporter: SpanExporter, start: SpanFunction, end: SpanFunction, config?: BufferConfig) { + super(_exporter, config) + this.start = start + this.end = end + } + + override onEnd(span: ReadableSpan): void { + super.onEnd(span) + this.end(span) + } + + override onStart(span: Span, parentContext: Context): void { + super.onStart(span, parentContext) + this.start({ + name: span.name, + kind: span.kind, + spanContext: () => span.spanContext(), + parentSpanContext: trace.getSpanContext(parentContext), + startTime: span.startTime, + status: span.status, + attributes: span.attributes, + links: span.links, + events: span.events, + duration: span.duration, + ended: span.ended, + resource: span.resource, + instrumentationScope: span.instrumentationScope, + droppedAttributesCount: span.droppedAttributesCount, + droppedEventsCount: span.droppedEventsCount, + droppedLinksCount: span.droppedLinksCount + } as ReadableSpan) + } +} diff --git a/packages/mcp-trace/trace-core/types/config.ts b/packages/mcp-trace/trace-core/types/config.ts new file mode 100644 index 000000000..abb98ec65 --- /dev/null +++ b/packages/mcp-trace/trace-core/types/config.ts @@ -0,0 +1,67 @@ +import { Link } from '@opentelemetry/api' +import { TimedEvent } from '@opentelemetry/sdk-trace-base' + +export type AttributeValue = + | string + | number + | boolean + | Array + | Array + | Array + | { [key: string]: string | number | boolean } + | Array + +export type Attributes = { + [key: string]: AttributeValue +} + +export interface TelemetryConfig { + serviceName: string + endpoint?: string + headers?: Record + defaultTracerName?: string + isDevModel?: boolean +} + +export interface TraceConfig extends TelemetryConfig { + maxAttributesPerSpan?: number +} + +export interface TraceEntity { + id: string + name: string +} + +export interface TokenUsage { + prompt_tokens: number + completion_tokens: number + total_tokens: number + prompt_tokens_details?: { + [key: string]: number + } +} + +export interface SpanEntity { + id: string + name: string + parentId: string + traceId: string + status: string + kind: string + attributes: Attributes | undefined + isEnd: boolean + events: TimedEvent[] | undefined + startTime: number + endTime: number | null + links: Link[] | undefined + topicId?: string + usage?: TokenUsage + modelName?: string +} + +export const defaultConfig: TelemetryConfig = { + serviceName: 'default', + headers: {}, + defaultTracerName: 'default', + isDevModel: true +} diff --git a/packages/mcp-trace/trace-node/nodeTracer.ts b/packages/mcp-trace/trace-node/nodeTracer.ts new file mode 100644 index 000000000..aee952501 --- /dev/null +++ b/packages/mcp-trace/trace-node/nodeTracer.ts @@ -0,0 +1,46 @@ +import { trace, Tracer } from '@opentelemetry/api' +import { AsyncLocalStorageContextManager } from '@opentelemetry/context-async-hooks' +import { W3CTraceContextPropagator } from '@opentelemetry/core' +import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-http' +import { BatchSpanProcessor, ConsoleSpanExporter, SpanProcessor } from '@opentelemetry/sdk-trace-base' +import { NodeTracerProvider } from '@opentelemetry/sdk-trace-node' + +import { defaultConfig, TraceConfig } from '../trace-core/types/config' + +export class NodeTracer { + private static provider: NodeTracerProvider + private static defaultTracer: Tracer + private static spanProcessor: SpanProcessor + + static init(config?: TraceConfig, spanProcessor?: SpanProcessor) { + if (config) { + defaultConfig.serviceName = config.serviceName || defaultConfig.serviceName + defaultConfig.endpoint = config.endpoint || defaultConfig.endpoint + defaultConfig.headers = config.headers || defaultConfig.headers + defaultConfig.defaultTracerName = config.defaultTracerName || defaultConfig.defaultTracerName + } + this.spanProcessor = spanProcessor || new BatchSpanProcessor(this.getExporter()) + this.provider = new NodeTracerProvider({ + spanProcessors: [this.spanProcessor] + }) + this.provider.register({ + propagator: new W3CTraceContextPropagator(), + contextManager: new AsyncLocalStorageContextManager() + }) + this.defaultTracer = trace.getTracer(config?.defaultTracerName || 'default') + } + + private static getExporter(config?: TraceConfig) { + if (config && config.endpoint) { + return new OTLPTraceExporter({ + url: `${config.endpoint}/v1/traces`, + headers: config.headers || undefined + }) + } + return new ConsoleSpanExporter() + } + + public static getTracer() { + return this.defaultTracer + } +} diff --git a/packages/mcp-trace/trace-web/TopicContextManager.ts b/packages/mcp-trace/trace-web/TopicContextManager.ts new file mode 100644 index 000000000..a2688fc02 --- /dev/null +++ b/packages/mcp-trace/trace-web/TopicContextManager.ts @@ -0,0 +1,75 @@ +import { Context, ContextManager, ROOT_CONTEXT } from '@opentelemetry/api' + +export class TopicContextManager implements ContextManager { + private topicContextStack: Map + private _topicContexts: Map + + constructor() { + // topicId -> context + this.topicContextStack = new Map() + this._topicContexts = new Map() + } + + // 绑定一个context到topicId + startContextForTopic(topicId, context: Context) { + const currentContext = this.getCurrentContext(topicId) + this._topicContexts.set(topicId, context) + if (!this.topicContextStack.has(topicId) && !this.topicContextStack.get(topicId)) { + this.topicContextStack.set(topicId, [currentContext]) + } else { + this.topicContextStack.get(topicId)?.push(currentContext) + } + } + + // 获取topicId对应的context + getContextForTopic(topicId) { + return this.getCurrentContext(topicId) + } + + endContextForTopic(topicId) { + const context = this.getHistoryContext(topicId) + this._topicContexts.set(topicId, context) + } + + cleanContextForTopic(topicId) { + this.topicContextStack.delete(topicId) + this._topicContexts.delete(topicId) + } + + private getHistoryContext(topicId): Context { + const hasContext = this.topicContextStack.has(topicId) && this.topicContextStack.get(topicId) + const context = hasContext && hasContext.length > 0 && hasContext.pop() + return context ? context : ROOT_CONTEXT + } + + private getCurrentContext(topicId): Context { + const hasContext = this._topicContexts.has(topicId) && this._topicContexts.get(topicId) + return hasContext || ROOT_CONTEXT + } + + // OpenTelemetry接口实现 + active() { + // 不支持全局active,必须显式传递 + return ROOT_CONTEXT + } + + with(_, fn, thisArg, ...args) { + // 直接调用fn,不做全局active切换 + return fn.apply(thisArg, args) + } + + bind(target, context) { + // 显式绑定 + target.__ot_context = context + return target + } + + enable() { + return this + } + + disable() { + this._topicContexts.clear() + return this + } +} diff --git a/packages/mcp-trace/trace-web/index.ts b/packages/mcp-trace/trace-web/index.ts new file mode 100644 index 000000000..bb3073241 --- /dev/null +++ b/packages/mcp-trace/trace-web/index.ts @@ -0,0 +1,3 @@ +export * from './TopicContextManager' +export * from './traceContextPromise' +export * from './webTracer' diff --git a/packages/mcp-trace/trace-web/traceContextPromise.ts b/packages/mcp-trace/trace-web/traceContextPromise.ts new file mode 100644 index 000000000..ee99722b7 --- /dev/null +++ b/packages/mcp-trace/trace-web/traceContextPromise.ts @@ -0,0 +1,99 @@ +import { Context, context } from '@opentelemetry/api' + +const originalPromise = globalThis.Promise + +class TraceContextPromise extends Promise { + _context: Context + + constructor( + executor: (resolve: (value: T | PromiseLike) => void, reject: (reason?: any) => void) => void, + ctx?: Context + ) { + const capturedContext = ctx || context.active() + super((resolve, reject) => { + context.with(capturedContext, () => { + executor( + (value) => context.with(capturedContext, () => resolve(value)), + (reason) => context.with(capturedContext, () => reject(reason)) + ) + }) + }) + this._context = capturedContext + } + + // 兼容 Promise.resolve/reject + static resolve(): Promise + static resolve(value: T | PromiseLike): Promise + static resolve(value: T | PromiseLike, ctx?: Context): Promise + static resolve(value?: T | PromiseLike, ctx?: Context): Promise { + return new TraceContextPromise((resolve) => resolve(value as T), ctx) + } + + static reject(reason?: any): Promise + static reject(reason?: any, ctx?: Context): Promise { + return new TraceContextPromise((_, reject) => reject(reason), ctx) + } + + static all(values: (T | PromiseLike)[]): Promise { + // 尝试从缓存获取 context + let capturedContext = context.active() + const newValues = values.map((v) => { + if (v instanceof Promise && !(v instanceof TraceContextPromise)) { + return new TraceContextPromise((resolve, reject) => v.then(resolve, reject), capturedContext) + } else if (typeof v === 'function') { + // 如果 v 是一个 Function,使用 context 传递 trace 上下文 + return (...args: any[]) => context.with(capturedContext, () => v(...args)) + } else { + return v + } + }) + if (Array.isArray(values) && values.length > 0 && values[0] instanceof TraceContextPromise) { + capturedContext = (values[0] as TraceContextPromise)._context + } + return originalPromise.all(newValues) as Promise + } + + static race(values: (T | PromiseLike)[]): Promise { + const capturedContext = context.active() + return new TraceContextPromise((resolve, reject) => { + originalPromise.race(values).then( + (result) => context.with(capturedContext, () => resolve(result)), + (err) => context.with(capturedContext, () => reject(err)) + ) + }, capturedContext) + } + + static allSettled(values: (T | PromiseLike)[]): Promise[]> { + const capturedContext = context.active() + return new TraceContextPromise[]>((resolve, reject) => { + originalPromise.allSettled(values).then( + (result) => context.with(capturedContext, () => resolve(result)), + (err) => context.with(capturedContext, () => reject(err)) + ) + }, capturedContext) + } + + static any(values: (T | PromiseLike)[]): Promise { + const capturedContext = context.active() + return new TraceContextPromise((resolve, reject) => { + originalPromise.any(values).then( + (result) => context.with(capturedContext, () => resolve(result)), + (err) => context.with(capturedContext, () => reject(err)) + ) + }, capturedContext) + } +} + +/** + * 用 TraceContextPromise 替换全局 Promise + */ +export function instrumentPromises() { + globalThis.Promise = TraceContextPromise as unknown as PromiseConstructor +} + +/** + * 恢复原生 Promise + */ +export function uninstrumentPromises() { + globalThis.Promise = originalPromise +} diff --git a/packages/mcp-trace/trace-web/webTracer.ts b/packages/mcp-trace/trace-web/webTracer.ts new file mode 100644 index 000000000..0b8af5813 --- /dev/null +++ b/packages/mcp-trace/trace-web/webTracer.ts @@ -0,0 +1,46 @@ +import { W3CTraceContextPropagator } from '@opentelemetry/core' +import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-http' +import { BatchSpanProcessor, ConsoleSpanExporter, SpanProcessor } from '@opentelemetry/sdk-trace-base' +import { WebTracerProvider } from '@opentelemetry/sdk-trace-web' + +import { defaultConfig, TraceConfig } from '../trace-core/types/config' +import { TopicContextManager } from './TopicContextManager' + +export const contextManager = new TopicContextManager() + +export class WebTracer { + private static provider: WebTracerProvider + private static processor: SpanProcessor + + static init(config?: TraceConfig, spanProcessor?: SpanProcessor) { + if (config) { + defaultConfig.serviceName = config.serviceName || defaultConfig.serviceName + defaultConfig.endpoint = config.endpoint || defaultConfig.endpoint + defaultConfig.headers = config.headers || defaultConfig.headers + defaultConfig.defaultTracerName = config.defaultTracerName || defaultConfig.defaultTracerName + } + this.processor = spanProcessor || new BatchSpanProcessor(this.getExporter()) + this.provider = new WebTracerProvider({ + spanProcessors: [this.processor] + }) + this.provider.register({ + propagator: new W3CTraceContextPropagator(), + contextManager: contextManager + }) + } + + private static getExporter() { + if (defaultConfig.endpoint) { + return new OTLPTraceExporter({ + url: `${defaultConfig.endpoint}/v1/traces`, + headers: defaultConfig.headers + }) + } + return new ConsoleSpanExporter() + } +} + +export const startContext = contextManager.startContextForTopic.bind(contextManager) +export const getContext = contextManager.getContextForTopic.bind(contextManager) +export const endContext = contextManager.endContextForTopic.bind(contextManager) +export const cleanContext = contextManager.cleanContextForTopic.bind(contextManager) diff --git a/packages/shared/IpcChannel.ts b/packages/shared/IpcChannel.ts index 4c2e823f3..a9fe5f9ab 100644 --- a/packages/shared/IpcChannel.ts +++ b/packages/shared/IpcChannel.ts @@ -257,5 +257,20 @@ export enum IpcChannel { Memory_SetConfig = 'memory:set-config', Memory_DeleteUser = 'memory:delete-user', Memory_DeleteAllMemoriesForUser = 'memory:delete-all-memories-for-user', - Memory_GetUsersList = 'memory:get-users-list' + Memory_GetUsersList = 'memory:get-users-list', + + // TRACE + TRACE_SAVE_DATA = 'trace:saveData', + TRACE_GET_DATA = 'trace:getData', + TRACE_SAVE_ENTITY = 'trace:saveEntity', + TRACE_GET_ENTITY = 'trace:getEntity', + TRACE_BIND_TOPIC = 'trace:bindTopic', + TRACE_CLEAN_TOPIC = 'trace:cleanTopic', + TRACE_TOKEN_USAGE = 'trace:tokenUsage', + TRACE_CLEAN_HISTORY = 'trace:cleanHistory', + TRACE_OPEN_WINDOW = 'trace:openWindow', + TRACE_SET_TITLE = 'trace:setTitle', + TRACE_ADD_END_MESSAGE = 'trace:addEndMessage', + TRACE_CLEAN_LOCAL_DATA = 'trace:cleanLocalData', + TRACE_ADD_STREAM_MESSAGE = 'trace:addStreamMessage' } diff --git a/src/main/index.ts b/src/main/index.ts index 7b5338414..bef5c9ca5 100644 --- a/src/main/index.ts +++ b/src/main/index.ts @@ -15,6 +15,7 @@ import { isDev, isLinux, isWin } from './constant' import { registerIpc } from './ipc' import { configManager } from './services/ConfigManager' import mcpService from './services/MCPService' +import { nodeTraceService } from './services/NodeTraceService' import { CHERRY_STUDIO_PROTOCOL, handleProtocolUrl, @@ -109,6 +110,8 @@ if (!app.requestSingleInstanceLock()) { const mainWindow = windowService.createMainWindow() new TrayService() + nodeTraceService.init() + app.on('activate', function () { const mainWindow = windowService.getMainWindow() if (!mainWindow || mainWindow.isDestroyed()) { diff --git a/src/main/ipc.ts b/src/main/ipc.ts index b5424c81a..da096f131 100644 --- a/src/main/ipc.ts +++ b/src/main/ipc.ts @@ -6,6 +6,7 @@ import { loggerService } from '@logger' import { isLinux, isMac, isPortable, isWin } from '@main/constant' import { getBinaryPath, isBinaryExists, runInstallScript } from '@main/utils/process' import { handleZoomFactor } from '@main/utils/zoom' +import { SpanEntity, TokenUsage } from '@mcp-trace/trace-core' import { UpgradeChannel } from '@shared/config/constant' import { IpcChannel } from '@shared/IpcChannel' import { FileMetadata, Provider, Shortcut, ThemeMode } from '@types' @@ -24,6 +25,7 @@ import FileService from './services/FileSystemService' import KnowledgeService from './services/KnowledgeService' import mcpService from './services/MCPService' import MemoryService from './services/memory/MemoryService' +import { openTraceWindow, setTraceWindowTitle } from './services/NodeTraceService' import NotificationService from './services/NotificationService' import * as NutstoreService from './services/NutstoreService' import ObsidianVaultService from './services/ObsidianVaultService' @@ -33,6 +35,19 @@ import { FileServiceManager } from './services/remotefile/FileServiceManager' import { searchService } from './services/SearchService' import { SelectionService } from './services/SelectionService' import { registerShortcuts, unregisterAllShortcuts } from './services/ShortcutService' +import { + addEndMessage, + addStreamMessage, + bindTopic, + cleanHistoryTrace, + cleanLocalData, + cleanTopic, + getEntity, + getSpans, + saveEntity, + saveSpans, + tokenUsage +} from './services/SpanCacheService' import storeSyncService from './services/StoreSyncService' import { themeService } from './services/ThemeService' import VertexAIService from './services/VertexAIService' @@ -371,49 +386,49 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) { }) // backup - ipcMain.handle(IpcChannel.Backup_Backup, backupManager.backup) - ipcMain.handle(IpcChannel.Backup_Restore, backupManager.restore) - ipcMain.handle(IpcChannel.Backup_BackupToWebdav, backupManager.backupToWebdav) - ipcMain.handle(IpcChannel.Backup_RestoreFromWebdav, backupManager.restoreFromWebdav) - ipcMain.handle(IpcChannel.Backup_ListWebdavFiles, backupManager.listWebdavFiles) - ipcMain.handle(IpcChannel.Backup_CheckConnection, backupManager.checkConnection) - ipcMain.handle(IpcChannel.Backup_CreateDirectory, backupManager.createDirectory) - ipcMain.handle(IpcChannel.Backup_DeleteWebdavFile, backupManager.deleteWebdavFile) - ipcMain.handle(IpcChannel.Backup_BackupToLocalDir, backupManager.backupToLocalDir) - ipcMain.handle(IpcChannel.Backup_RestoreFromLocalBackup, backupManager.restoreFromLocalBackup) - ipcMain.handle(IpcChannel.Backup_ListLocalBackupFiles, backupManager.listLocalBackupFiles) - ipcMain.handle(IpcChannel.Backup_DeleteLocalBackupFile, backupManager.deleteLocalBackupFile) - ipcMain.handle(IpcChannel.Backup_SetLocalBackupDir, backupManager.setLocalBackupDir) - ipcMain.handle(IpcChannel.Backup_BackupToS3, backupManager.backupToS3) - ipcMain.handle(IpcChannel.Backup_RestoreFromS3, backupManager.restoreFromS3) - ipcMain.handle(IpcChannel.Backup_ListS3Files, backupManager.listS3Files) - ipcMain.handle(IpcChannel.Backup_DeleteS3File, backupManager.deleteS3File) - ipcMain.handle(IpcChannel.Backup_CheckS3Connection, backupManager.checkS3Connection) + ipcMain.handle(IpcChannel.Backup_Backup, backupManager.backup.bind(backupManager)) + ipcMain.handle(IpcChannel.Backup_Restore, backupManager.restore.bind(backupManager)) + ipcMain.handle(IpcChannel.Backup_BackupToWebdav, backupManager.backupToWebdav.bind(backupManager)) + ipcMain.handle(IpcChannel.Backup_RestoreFromWebdav, backupManager.restoreFromWebdav.bind(backupManager)) + ipcMain.handle(IpcChannel.Backup_ListWebdavFiles, backupManager.listWebdavFiles.bind(backupManager)) + ipcMain.handle(IpcChannel.Backup_CheckConnection, backupManager.checkConnection.bind(backupManager)) + ipcMain.handle(IpcChannel.Backup_CreateDirectory, backupManager.createDirectory.bind(backupManager)) + ipcMain.handle(IpcChannel.Backup_DeleteWebdavFile, backupManager.deleteWebdavFile.bind(backupManager)) + ipcMain.handle(IpcChannel.Backup_BackupToLocalDir, backupManager.backupToLocalDir.bind(backupManager)) + ipcMain.handle(IpcChannel.Backup_RestoreFromLocalBackup, backupManager.restoreFromLocalBackup.bind(backupManager)) + ipcMain.handle(IpcChannel.Backup_ListLocalBackupFiles, backupManager.listLocalBackupFiles.bind(backupManager)) + ipcMain.handle(IpcChannel.Backup_DeleteLocalBackupFile, backupManager.deleteLocalBackupFile.bind(backupManager)) + ipcMain.handle(IpcChannel.Backup_SetLocalBackupDir, backupManager.setLocalBackupDir.bind(backupManager)) + ipcMain.handle(IpcChannel.Backup_BackupToS3, backupManager.backupToS3.bind(backupManager)) + ipcMain.handle(IpcChannel.Backup_RestoreFromS3, backupManager.restoreFromS3.bind(backupManager)) + ipcMain.handle(IpcChannel.Backup_ListS3Files, backupManager.listS3Files.bind(backupManager)) + ipcMain.handle(IpcChannel.Backup_DeleteS3File, backupManager.deleteS3File.bind(backupManager)) + ipcMain.handle(IpcChannel.Backup_CheckS3Connection, backupManager.checkS3Connection.bind(backupManager)) // file - ipcMain.handle(IpcChannel.File_Open, fileManager.open) - ipcMain.handle(IpcChannel.File_OpenPath, fileManager.openPath) - ipcMain.handle(IpcChannel.File_Save, fileManager.save) - ipcMain.handle(IpcChannel.File_Select, fileManager.selectFile) - ipcMain.handle(IpcChannel.File_Upload, fileManager.uploadFile) - ipcMain.handle(IpcChannel.File_Clear, fileManager.clear) - ipcMain.handle(IpcChannel.File_Read, fileManager.readFile) - ipcMain.handle(IpcChannel.File_Delete, fileManager.deleteFile) - ipcMain.handle('file:deleteDir', fileManager.deleteDir) - ipcMain.handle(IpcChannel.File_Get, fileManager.getFile) - ipcMain.handle(IpcChannel.File_SelectFolder, fileManager.selectFolder) - ipcMain.handle(IpcChannel.File_CreateTempFile, fileManager.createTempFile) - ipcMain.handle(IpcChannel.File_Write, fileManager.writeFile) - ipcMain.handle(IpcChannel.File_WriteWithId, fileManager.writeFileWithId) - ipcMain.handle(IpcChannel.File_SaveImage, fileManager.saveImage) - ipcMain.handle(IpcChannel.File_Base64Image, fileManager.base64Image) - ipcMain.handle(IpcChannel.File_SaveBase64Image, fileManager.saveBase64Image) - ipcMain.handle(IpcChannel.File_Base64File, fileManager.base64File) - ipcMain.handle(IpcChannel.File_GetPdfInfo, fileManager.pdfPageCount) - ipcMain.handle(IpcChannel.File_Download, fileManager.downloadFile) - ipcMain.handle(IpcChannel.File_Copy, fileManager.copyFile) - ipcMain.handle(IpcChannel.File_BinaryImage, fileManager.binaryImage) - ipcMain.handle(IpcChannel.File_OpenWithRelativePath, fileManager.openFileWithRelativePath) + ipcMain.handle(IpcChannel.File_Open, fileManager.open.bind(fileManager)) + ipcMain.handle(IpcChannel.File_OpenPath, fileManager.openPath.bind(fileManager)) + ipcMain.handle(IpcChannel.File_Save, fileManager.save.bind(fileManager)) + ipcMain.handle(IpcChannel.File_Select, fileManager.selectFile.bind(fileManager)) + ipcMain.handle(IpcChannel.File_Upload, fileManager.uploadFile.bind(fileManager)) + ipcMain.handle(IpcChannel.File_Clear, fileManager.clear.bind(fileManager)) + ipcMain.handle(IpcChannel.File_Read, fileManager.readFile.bind(fileManager)) + ipcMain.handle(IpcChannel.File_Delete, fileManager.deleteFile.bind(fileManager)) + ipcMain.handle('file:deleteDir', fileManager.deleteDir.bind(fileManager)) + ipcMain.handle(IpcChannel.File_Get, fileManager.getFile.bind(fileManager)) + ipcMain.handle(IpcChannel.File_SelectFolder, fileManager.selectFolder.bind(fileManager)) + ipcMain.handle(IpcChannel.File_CreateTempFile, fileManager.createTempFile.bind(fileManager)) + ipcMain.handle(IpcChannel.File_Write, fileManager.writeFile.bind(fileManager)) + ipcMain.handle(IpcChannel.File_WriteWithId, fileManager.writeFileWithId.bind(fileManager)) + ipcMain.handle(IpcChannel.File_SaveImage, fileManager.saveImage.bind(fileManager)) + ipcMain.handle(IpcChannel.File_Base64Image, fileManager.base64Image.bind(fileManager)) + ipcMain.handle(IpcChannel.File_SaveBase64Image, fileManager.saveBase64Image.bind(fileManager)) + ipcMain.handle(IpcChannel.File_Base64File, fileManager.base64File.bind(fileManager)) + ipcMain.handle(IpcChannel.File_GetPdfInfo, fileManager.pdfPageCount.bind(fileManager)) + ipcMain.handle(IpcChannel.File_Download, fileManager.downloadFile.bind(fileManager)) + ipcMain.handle(IpcChannel.File_Copy, fileManager.copyFile.bind(fileManager)) + ipcMain.handle(IpcChannel.File_BinaryImage, fileManager.binaryImage.bind(fileManager)) + ipcMain.handle(IpcChannel.File_OpenWithRelativePath, fileManager.openFileWithRelativePath.bind(fileManager)) // file service ipcMain.handle(IpcChannel.FileService_Upload, async (_, provider: Provider, file: FileMetadata) => { @@ -437,10 +452,10 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) { }) // fs - ipcMain.handle(IpcChannel.Fs_Read, FileService.readFile) + ipcMain.handle(IpcChannel.Fs_Read, FileService.readFile.bind(FileService)) // export - ipcMain.handle(IpcChannel.Export_Word, exportService.exportToWord) + ipcMain.handle(IpcChannel.Export_Word, exportService.exportToWord.bind(exportService)) // open path ipcMain.handle(IpcChannel.Open_Path, async (_, path: string) => { @@ -458,14 +473,14 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) { }) // knowledge base - ipcMain.handle(IpcChannel.KnowledgeBase_Create, KnowledgeService.create) - ipcMain.handle(IpcChannel.KnowledgeBase_Reset, KnowledgeService.reset) - ipcMain.handle(IpcChannel.KnowledgeBase_Delete, KnowledgeService.delete) - ipcMain.handle(IpcChannel.KnowledgeBase_Add, KnowledgeService.add) - ipcMain.handle(IpcChannel.KnowledgeBase_Remove, KnowledgeService.remove) - ipcMain.handle(IpcChannel.KnowledgeBase_Search, KnowledgeService.search) - ipcMain.handle(IpcChannel.KnowledgeBase_Rerank, KnowledgeService.rerank) - ipcMain.handle(IpcChannel.KnowledgeBase_Check_Quota, KnowledgeService.checkQuota) + ipcMain.handle(IpcChannel.KnowledgeBase_Create, KnowledgeService.create.bind(KnowledgeService)) + ipcMain.handle(IpcChannel.KnowledgeBase_Reset, KnowledgeService.reset.bind(KnowledgeService)) + ipcMain.handle(IpcChannel.KnowledgeBase_Delete, KnowledgeService.delete.bind(KnowledgeService)) + ipcMain.handle(IpcChannel.KnowledgeBase_Add, KnowledgeService.add.bind(KnowledgeService)) + ipcMain.handle(IpcChannel.KnowledgeBase_Remove, KnowledgeService.remove.bind(KnowledgeService)) + ipcMain.handle(IpcChannel.KnowledgeBase_Search, KnowledgeService.search.bind(KnowledgeService)) + ipcMain.handle(IpcChannel.KnowledgeBase_Rerank, KnowledgeService.rerank.bind(KnowledgeService)) + ipcMain.handle(IpcChannel.KnowledgeBase_Check_Quota, KnowledgeService.checkQuota.bind(KnowledgeService)) // memory ipcMain.handle(IpcChannel.Memory_Add, async (_, messages, config) => { @@ -586,12 +601,12 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) { ipcMain.handle(IpcChannel.App_InstallBunBinary, () => runInstallScript('install-bun.js')) //copilot - ipcMain.handle(IpcChannel.Copilot_GetAuthMessage, CopilotService.getAuthMessage) - ipcMain.handle(IpcChannel.Copilot_GetCopilotToken, CopilotService.getCopilotToken) - ipcMain.handle(IpcChannel.Copilot_SaveCopilotToken, CopilotService.saveCopilotToken) - ipcMain.handle(IpcChannel.Copilot_GetToken, CopilotService.getToken) - ipcMain.handle(IpcChannel.Copilot_Logout, CopilotService.logout) - ipcMain.handle(IpcChannel.Copilot_GetUser, CopilotService.getUser) + ipcMain.handle(IpcChannel.Copilot_GetAuthMessage, CopilotService.getAuthMessage.bind(CopilotService)) + ipcMain.handle(IpcChannel.Copilot_GetCopilotToken, CopilotService.getCopilotToken.bind(CopilotService)) + ipcMain.handle(IpcChannel.Copilot_SaveCopilotToken, CopilotService.saveCopilotToken.bind(CopilotService)) + ipcMain.handle(IpcChannel.Copilot_GetToken, CopilotService.getToken.bind(CopilotService)) + ipcMain.handle(IpcChannel.Copilot_Logout, CopilotService.logout.bind(CopilotService)) + ipcMain.handle(IpcChannel.Copilot_GetUser, CopilotService.getUser.bind(CopilotService)) // Obsidian service ipcMain.handle(IpcChannel.Obsidian_GetVaults, () => { @@ -603,7 +618,7 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) { }) // nutstore - ipcMain.handle(IpcChannel.Nutstore_GetSsoUrl, NutstoreService.getNutstoreSSOUrl) + ipcMain.handle(IpcChannel.Nutstore_GetSsoUrl, NutstoreService.getNutstoreSSOUrl.bind(NutstoreService)) ipcMain.handle(IpcChannel.Nutstore_DecryptToken, (_, token: string) => NutstoreService.decryptToken(token)) ipcMain.handle(IpcChannel.Nutstore_GetDirectoryContents, (_, token: string, path: string) => NutstoreService.getDirectoryContents(token, path) @@ -642,4 +657,31 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) { ipcMain.handle(IpcChannel.App_SetDisableHardwareAcceleration, (_, isDisable: boolean) => { configManager.setDisableHardwareAcceleration(isDisable) }) + ipcMain.handle(IpcChannel.TRACE_SAVE_DATA, (_, topicId: string) => saveSpans(topicId)) + ipcMain.handle(IpcChannel.TRACE_GET_DATA, (_, topicId: string, traceId: string, modelName?: string) => + getSpans(topicId, traceId, modelName) + ) + ipcMain.handle(IpcChannel.TRACE_SAVE_ENTITY, (_, entity: SpanEntity) => saveEntity(entity)) + ipcMain.handle(IpcChannel.TRACE_GET_ENTITY, (_, spanId: string) => getEntity(spanId)) + ipcMain.handle(IpcChannel.TRACE_BIND_TOPIC, (_, topicId: string, traceId: string) => bindTopic(traceId, topicId)) + ipcMain.handle(IpcChannel.TRACE_CLEAN_TOPIC, (_, topicId: string, traceId?: string) => cleanTopic(topicId, traceId)) + ipcMain.handle(IpcChannel.TRACE_TOKEN_USAGE, (_, spanId: string, usage: TokenUsage) => tokenUsage(spanId, usage)) + ipcMain.handle(IpcChannel.TRACE_CLEAN_HISTORY, (_, topicId: string, traceId: string, modelName?: string) => + cleanHistoryTrace(topicId, traceId, modelName) + ) + ipcMain.handle( + IpcChannel.TRACE_OPEN_WINDOW, + (_, topicId: string, traceId: string, autoOpen?: boolean, modelName?: string) => + openTraceWindow(topicId, traceId, autoOpen, modelName) + ) + ipcMain.handle(IpcChannel.TRACE_SET_TITLE, (_, title: string) => setTraceWindowTitle(title)) + ipcMain.handle(IpcChannel.TRACE_ADD_END_MESSAGE, (_, spanId: string, modelName: string, message: string) => + addEndMessage(spanId, modelName, message) + ) + ipcMain.handle(IpcChannel.TRACE_CLEAN_LOCAL_DATA, () => cleanLocalData()) + ipcMain.handle( + IpcChannel.TRACE_ADD_STREAM_MESSAGE, + (_, spanId: string, modelName: string, context: string, msg: any) => + addStreamMessage(spanId, modelName, context, msg) + ) } diff --git a/src/main/knowledge/embeddings/Embeddings.ts b/src/main/knowledge/embeddings/Embeddings.ts index 0ec17691e..17bb8ff47 100644 --- a/src/main/knowledge/embeddings/Embeddings.ts +++ b/src/main/knowledge/embeddings/Embeddings.ts @@ -1,4 +1,5 @@ import type { BaseEmbeddings } from '@cherrystudio/embedjs-interfaces' +import { TraceMethod } from '@mcp-trace/trace-core' import { ApiClient } from '@types' import EmbeddingsFactory from './EmbeddingsFactory' @@ -14,13 +15,18 @@ export default class Embeddings { public async init(): Promise { return this.sdk.init() } + + @TraceMethod({ spanName: 'dimensions', tag: 'Embeddings' }) public async getDimensions(): Promise { return this.sdk.getDimensions() } + + @TraceMethod({ spanName: 'embedDocuments', tag: 'Embeddings' }) public async embedDocuments(texts: string[]): Promise { return this.sdk.embedDocuments(texts) } + @TraceMethod({ spanName: 'embedQuery', tag: 'Embeddings' }) public async embedQuery(text: string): Promise { return this.sdk.embedQuery(text) } diff --git a/src/main/mcpServers/memory.ts b/src/main/mcpServers/memory.ts index 912999b6d..d69366027 100644 --- a/src/main/mcpServers/memory.ts +++ b/src/main/mcpServers/memory.ts @@ -1,5 +1,6 @@ import { loggerService } from '@logger' import { getConfigDir } from '@main/utils/file' +import { TraceMethod } from '@mcp-trace/trace-core' import { Server } from '@modelcontextprotocol/sdk/server/index.js' import { CallToolRequestSchema, ErrorCode, ListToolsRequestSchema, McpError } from '@modelcontextprotocol/sdk/types.js' import { Mutex } from 'async-mutex' // 引入 Mutex @@ -45,6 +46,7 @@ class KnowledgeGraphManager { } // Static async factory method for initialization + @TraceMethod({ spanName: 'create', tag: 'KnowledgeGraph' }) public static async create(memoryPath: string): Promise { const manager = new KnowledgeGraphManager(memoryPath) await manager._ensureMemoryPathExists() @@ -143,6 +145,7 @@ class KnowledgeGraphManager { return JSON.parse(relationStr) as Relation } + @TraceMethod({ spanName: 'createEntities', tag: 'KnowledgeGraph' }) async createEntities(entities: Entity[]): Promise { const newEntities: Entity[] = [] entities.forEach((entity) => { @@ -159,6 +162,7 @@ class KnowledgeGraphManager { return newEntities } + @TraceMethod({ spanName: 'createRelations', tag: 'KnowledgeGraph' }) async createRelations(relations: Relation[]): Promise { const newRelations: Relation[] = [] relations.forEach((relation) => { @@ -179,6 +183,7 @@ class KnowledgeGraphManager { return newRelations } + @TraceMethod({ spanName: 'addObservtions', tag: 'KnowledgeGraph' }) async addObservations( observations: { entityName: string; contents: string[] }[] ): Promise<{ entityName: string; addedObservations: string[] }[]> { @@ -213,6 +218,7 @@ class KnowledgeGraphManager { return results } + @TraceMethod({ spanName: 'deleteEntities', tag: 'KnowledgeGraph' }) async deleteEntities(entityNames: string[]): Promise { let changed = false const namesToDelete = new Set(entityNames) @@ -244,6 +250,7 @@ class KnowledgeGraphManager { } } + @TraceMethod({ spanName: 'deleteObservations', tag: 'KnowledgeGraph' }) async deleteObservations(deletions: { entityName: string; observations: string[] }[]): Promise { let changed = false deletions.forEach((d) => { @@ -262,6 +269,7 @@ class KnowledgeGraphManager { } } + @TraceMethod({ spanName: 'deleteRelations', tag: 'KnowledgeGraph' }) async deleteRelations(relations: Relation[]): Promise { let changed = false relations.forEach((rel) => { @@ -276,6 +284,7 @@ class KnowledgeGraphManager { } // Read the current state from memory + @TraceMethod({ spanName: 'readGraph', tag: 'KnowledgeGraph' }) async readGraph(): Promise { // Return a deep copy to prevent external modification of the internal state return JSON.parse( @@ -287,6 +296,7 @@ class KnowledgeGraphManager { } // Search operates on the in-memory graph + @TraceMethod({ spanName: 'searchNodes', tag: 'KnowledgeGraph' }) async searchNodes(query: string): Promise { const lowerCaseQuery = query.toLowerCase() const filteredEntities = Array.from(this.entities.values()).filter( @@ -309,6 +319,7 @@ class KnowledgeGraphManager { } // Open operates on the in-memory graph + @TraceMethod({ spanName: 'openNodes', tag: 'KnowledgeGraph' }) async openNodes(names: string[]): Promise { const nameSet = new Set(names) const filteredEntities = Array.from(this.entities.values()).filter((e) => nameSet.has(e.name)) diff --git a/src/main/services/FileStorage.ts b/src/main/services/FileStorage.ts index 61e8ed2ce..005fa7523 100644 --- a/src/main/services/FileStorage.ts +++ b/src/main/services/FileStorage.ts @@ -45,6 +45,7 @@ class FileStorage { } } + // @TraceProperty({ spanName: 'getFileHash', tag: 'FileStorage' }) private getFileHash = async (filePath: string): Promise => { return new Promise((resolve, reject) => { const hash = crypto.createHash('md5') @@ -219,6 +220,7 @@ class FileStorage { return fileInfo } + // @TraceProperty({ spanName: 'deleteFile', tag: 'FileStorage' }) public deleteFile = async (_: Electron.IpcMainInvokeEvent, id: string): Promise => { if (!fs.existsSync(path.join(this.storageDir, id))) { return @@ -586,6 +588,7 @@ class FileStorage { return mimeToExtension[mimeType] || '.bin' } + // @TraceProperty({ spanName: 'copyFile', tag: 'FileStorage' }) public copyFile = async (_: Electron.IpcMainInvokeEvent, id: string, destPath: string): Promise => { try { const sourcePath = path.join(this.storageDir, id) diff --git a/src/main/services/FileSystemService.ts b/src/main/services/FileSystemService.ts index a964d43a8..47e897e15 100644 --- a/src/main/services/FileSystemService.ts +++ b/src/main/services/FileSystemService.ts @@ -1,6 +1,8 @@ +import { TraceMethod } from '@mcp-trace/trace-core' import fs from 'fs/promises' export default class FileService { + @TraceMethod({ spanName: 'readFile', tag: 'FileService' }) public static async readFile(_: Electron.IpcMainInvokeEvent, pathOrUrl: string, encoding?: BufferEncoding) { const path = pathOrUrl.startsWith('file://') ? new URL(pathOrUrl) : pathOrUrl if (encoding) return fs.readFile(path, { encoding }) diff --git a/src/main/services/KnowledgeService.ts b/src/main/services/KnowledgeService.ts index 0e5858a88..6c783f431 100644 --- a/src/main/services/KnowledgeService.ts +++ b/src/main/services/KnowledgeService.ts @@ -31,6 +31,7 @@ import Reranker from '@main/knowledge/reranker/Reranker' import { windowService } from '@main/services/WindowService' import { getDataPath } from '@main/utils' import { getAllFiles } from '@main/utils/file' +import { TraceMethod } from '@mcp-trace/trace-core' import { MB } from '@shared/config/constant' import type { LoaderReturn } from '@shared/config/types' import { IpcChannel } from '@shared/IpcChannel' @@ -155,7 +156,7 @@ class KnowledgeService { await ragApplication.reset() } - public delete = async (_: Electron.IpcMainInvokeEvent, id: string): Promise => { + public async delete(_: Electron.IpcMainInvokeEvent, id: string): Promise { logger.debug('delete id', id) const dbPath = path.join(this.storageDir, id) if (fs.existsSync(dbPath)) { @@ -473,7 +474,7 @@ class KnowledgeService { }) } - public add = async (_: Electron.IpcMainInvokeEvent, options: KnowledgeBaseAddItemOptions): Promise => { + public add = (_: Electron.IpcMainInvokeEvent, options: KnowledgeBaseAddItemOptions): Promise => { return new Promise((resolve) => { const { base, item, forceReload = false, userId = '' } = options const optionsNonNullableAttribute = { base, item, forceReload, userId } @@ -520,10 +521,11 @@ class KnowledgeService { }) } - public remove = async ( + @TraceMethod({ spanName: 'remove', tag: 'Knowledge' }) + public async remove( _: Electron.IpcMainInvokeEvent, { uniqueId, uniqueIds, base }: { uniqueId: string; uniqueIds: string[]; base: KnowledgeBaseParams } - ): Promise => { + ): Promise { const ragApplication = await this.getRagApplication(base) logger.debug(`Remove Item UniqueId: ${uniqueId}`) for (const id of uniqueIds) { @@ -531,18 +533,20 @@ class KnowledgeService { } } - public search = async ( + @TraceMethod({ spanName: 'RagSearch', tag: 'Knowledge' }) + public async search( _: Electron.IpcMainInvokeEvent, { search, base }: { search: string; base: KnowledgeBaseParams } - ): Promise => { + ): Promise { const ragApplication = await this.getRagApplication(base) return await ragApplication.search(search) } - public rerank = async ( + @TraceMethod({ spanName: 'rerank', tag: 'Knowledge' }) + public async rerank( _: Electron.IpcMainInvokeEvent, { search, base, results }: { search: string; base: KnowledgeBaseParams; results: ExtractChunkData[] } - ): Promise => { + ): Promise { if (results.length === 0) { return results } diff --git a/src/main/services/MCPService.ts b/src/main/services/MCPService.ts index 2303e1ed0..347dfff0d 100644 --- a/src/main/services/MCPService.ts +++ b/src/main/services/MCPService.ts @@ -7,6 +7,7 @@ import { createInMemoryMCPServer } from '@main/mcpServers/factory' import { makeSureDirExists } from '@main/utils' import { buildFunctionCallToolName } from '@main/utils/mcp' import { getBinaryName, getBinaryPath } from '@main/utils/process' +import { TraceMethod, withSpanFunc } from '@mcp-trace/trace-core' import { Client } from '@modelcontextprotocol/sdk/client/index.js' import { SSEClientTransport, SSEClientTransportOptions } from '@modelcontextprotocol/sdk/client/sse.js' import { StdioClientTransport } from '@modelcontextprotocol/sdk/client/stdio.js' @@ -26,7 +27,7 @@ import { ToolListChangedNotificationSchema } from '@modelcontextprotocol/sdk/types.js' import { nanoid } from '@reduxjs/toolkit' -import { +import type { GetMCPPromptResponse, GetResourceResponse, MCPCallToolResponse, @@ -49,6 +50,8 @@ import getLoginShellEnvironment from './mcp/shell-env' // Generic type for caching wrapped functions type CachedFunction = (...args: T) => Promise +type CallToolArgs = { server: MCPServer; name: string; args: any; callId?: string } + const logger = loggerService.withContext('MCPService') /** @@ -580,17 +583,22 @@ class McpService { } async listTools(_: Electron.IpcMainInvokeEvent, server: MCPServer) { - const cachedListTools = withCache<[MCPServer], MCPTool[]>( - this.listToolsImpl.bind(this), - (server) => { - const serverKey = this.getServerKey(server) - return `mcp:list_tool:${serverKey}` - }, - 5 * 60 * 1000, // 5 minutes TTL - `[MCP] Tools from ${server.name}` - ) + const listFunc = (server: MCPServer) => { + const cachedListTools = withCache<[MCPServer], MCPTool[]>( + this.listToolsImpl.bind(this), + (server) => { + const serverKey = this.getServerKey(server) + return `mcp:list_tool:${serverKey}` + }, + 5 * 60 * 1000, // 5 minutes TTL + `[MCP] Tools from ${server.name}` + ) - return cachedListTools(server) + const result = cachedListTools(server) + return result + } + + return withSpanFunc(`${server.name}.ListTool`, 'MCP', listFunc, [server]) } /** @@ -598,37 +606,41 @@ class McpService { */ public async callTool( _: Electron.IpcMainInvokeEvent, - { server, name, args, callId }: { server: MCPServer; name: string; args: any; callId?: string } + { server, name, args, callId }: CallToolArgs ): Promise { const toolCallId = callId || uuidv4() const abortController = new AbortController() this.activeToolCalls.set(toolCallId, abortController) - try { - logger.debug('Calling:', server.name, name, args, 'callId:', toolCallId) - if (typeof args === 'string') { - try { - args = JSON.parse(args) - } catch (e) { - logger.error('args parse error', args) + const callToolFunc = async ({ server, name, args }: CallToolArgs) => { + try { + logger.debug('Calling:', server.name, name, args, 'callId:', toolCallId) + if (typeof args === 'string') { + try { + args = JSON.parse(args) + } catch (e) { + logger.error('args parse error', args) + } } + const client = await this.initClient(server) + const result = await client.callTool({ name, arguments: args }, undefined, { + onprogress: (process) => { + logger.debug(`Progress: ${process.progress / (process.total || 1)}`) + window.api.mcp.setProgress(process.progress / (process.total || 1)) + }, + timeout: server.timeout ? server.timeout * 1000 : 60000, // Default timeout of 1 minute + signal: this.activeToolCalls.get(toolCallId)?.signal + }) + return result as MCPCallToolResponse + } catch (error) { + logger.error(`Error calling tool ${name} on ${server.name}:`, error) + throw error + } finally { + this.activeToolCalls.delete(toolCallId) } - const client = await this.initClient(server) - const result = await client.callTool({ name, arguments: args }, undefined, { - onprogress: (process) => { - logger.debug(`Progress: ${process.progress / (process.total || 1)}`) - window.api.mcp.setProgress(process.progress / (process.total || 1)) - }, - timeout: server.timeout ? server.timeout * 1000 : 60000, // Default timeout of 1 minute - signal: this.activeToolCalls.get(toolCallId)?.signal - }) - return result as MCPCallToolResponse - } catch (error) { - logger.error(`Error calling tool ${name} on ${server.name}:`, error) - throw error - } finally { - this.activeToolCalls.delete(toolCallId) } + + return await withSpanFunc(`${server.name}.${name}`, `MCP`, callToolFunc, [{ server, name, args }]) } public async getInstallInfo() { @@ -695,6 +707,7 @@ class McpService { /** * Get a specific prompt from an MCP server with caching */ + @TraceMethod({ spanName: 'getPrompt', tag: 'mcp' }) public async getPrompt( _: Electron.IpcMainInvokeEvent, { server, name, args }: { server: MCPServer; name: string; args?: Record } @@ -781,6 +794,7 @@ class McpService { /** * Get a specific resource from an MCP server with caching */ + @TraceMethod({ spanName: 'getResource', tag: 'mcp' }) public async getResource( _: Electron.IpcMainInvokeEvent, { server, uri }: { server: MCPServer; uri: string } diff --git a/src/main/services/NodeTraceService.ts b/src/main/services/NodeTraceService.ts new file mode 100644 index 000000000..a11880949 --- /dev/null +++ b/src/main/services/NodeTraceService.ts @@ -0,0 +1,122 @@ +import { isDev } from '@main/constant' +import { CacheBatchSpanProcessor, defaultConfig, FunctionSpanExporter } from '@mcp-trace/trace-core' +import { NodeTracer as MCPNodeTracer } from '@mcp-trace/trace-node/nodeTracer' +import { context, SpanContext, trace } from '@opentelemetry/api' +import { BrowserWindow, ipcMain } from 'electron' +import * as path from 'path' + +import { ConfigKeys, configManager } from './ConfigManager' +import { spanCacheService } from './SpanCacheService' + +export const TRACER_NAME = 'CherryStudio' + +export class NodeTraceService { + init() { + // TODO get developer mode setting from config + defaultConfig.isDevModel = true + + const exporter = new FunctionSpanExporter(async (spans) => { + console.log(`Spans length:`, spans.length) + }) + + MCPNodeTracer.init( + { + defaultTracerName: TRACER_NAME, + serviceName: TRACER_NAME + }, + new CacheBatchSpanProcessor(exporter, spanCacheService) + ) + } +} + +const originalHandle = ipcMain.handle +ipcMain.handle = (channel: string, handler: (...args: any[]) => Promise) => { + return originalHandle.call(ipcMain, channel, async (event, ...args) => { + const carray = args && args.length > 0 ? args[args.length - 1] : {} + let ctx = context.active() + let newArgs = args + if (carray && typeof carray === 'object' && 'type' in carray && carray.type === 'trace') { + const span = trace.wrapSpanContext(carray.context as SpanContext) + ctx = trace.setSpan(context.active(), span) + newArgs = args.slice(0, args.length - 1) + } + return context.with(ctx, () => handler(event, ...newArgs)) + }) +} + +export const nodeTraceService = new NodeTraceService() + +let traceWin: BrowserWindow | null = null + +export function openTraceWindow(topicId: string, traceId: string, autoOpen = true, modelName?: string) { + if (traceWin && !traceWin.isDestroyed()) { + traceWin.focus() + traceWin.webContents.send('set-trace', { traceId, topicId, modelName }) + return + } + + if (!traceWin && !autoOpen) { + return + } + + traceWin = new BrowserWindow({ + width: 600, + minWidth: 500, + minHeight: 600, + height: 800, + autoHideMenuBar: true, + closable: true, + focusable: true, + movable: true, + hasShadow: true, + roundedCorners: true, + maximizable: true, + minimizable: true, + resizable: true, + title: 'Call Chain Window', + frame: false, + titleBarStyle: 'hidden', + titleBarOverlay: { height: 40 }, + webPreferences: { + preload: path.join(__dirname, '../preload/index.js'), + contextIsolation: true, + nodeIntegration: false, + sandbox: false, + devTools: isDev ? true : false + } + }) + + if (isDev && process.env['ELECTRON_RENDERER_URL']) { + traceWin.loadURL(process.env['ELECTRON_RENDERER_URL'] + `/traceWindow.html`) + } else { + traceWin.loadFile(path.join(__dirname, '../renderer/traceWindow.html')) + } + traceWin.on('closed', () => { + configManager.unsubscribe(ConfigKeys.Language, setLanguageCallback) + try { + traceWin?.destroy() + } finally { + traceWin = null + } + }) + + traceWin.webContents.on('did-finish-load', () => { + traceWin!.webContents.send('set-trace', { + traceId, + topicId, + modelName + }) + traceWin!.webContents.send('set-language', { lang: configManager.get(ConfigKeys.Language) }) + configManager.subscribe(ConfigKeys.Language, setLanguageCallback) + }) +} + +const setLanguageCallback = (lang: string) => { + traceWin!.webContents.send('set-language', { lang }) +} + +export const setTraceWindowTitle = (title: string) => { + if (traceWin) { + traceWin.title = title + } +} diff --git a/src/main/services/SpanCacheService.ts b/src/main/services/SpanCacheService.ts new file mode 100644 index 000000000..1529815ce --- /dev/null +++ b/src/main/services/SpanCacheService.ts @@ -0,0 +1,409 @@ +import { + Attributes, + convertSpanToSpanEntity, + defaultConfig, + SpanEntity, + TokenUsage, + TraceCache +} from '@mcp-trace/trace-core' +import { SpanStatusCode } from '@opentelemetry/api' +import { ReadableSpan } from '@opentelemetry/sdk-trace-base' +import fs from 'fs/promises' +import * as os from 'os' +import * as path from 'path' + +class SpanCacheService implements TraceCache { + private topicMap: Map = new Map() + private fileDir: string + private cache: Map = new Map() + pri + + constructor() { + this.fileDir = path.join(os.homedir(), '.cherrystudio', 'trace') + } + + createSpan: (span: ReadableSpan) => void = (span: ReadableSpan) => { + if (!defaultConfig.isDevModel) { + return + } + const spanEntity = convertSpanToSpanEntity(span) + spanEntity.topicId = this.topicMap.get(spanEntity.traceId) + this.cache.set(span.spanContext().spanId, spanEntity) + this._updateModelName(spanEntity) + } + + endSpan: (span: ReadableSpan) => void = (span: ReadableSpan) => { + if (!defaultConfig.isDevModel) { + return + } + const spanId = span.spanContext().spanId + const spanEntity = this.cache.get(spanId) + if (!spanEntity) { + return + } + + spanEntity.topicId = this.topicMap.get(spanEntity.traceId) + spanEntity.endTime = span.endTime ? span.endTime[0] * 1e3 + Math.floor(span.endTime[1] / 1e6) : null + spanEntity.status = SpanStatusCode[span.status.code] + spanEntity.attributes = span.attributes ? ({ ...span.attributes } as Attributes) : {} + spanEntity.events = span.events + spanEntity.links = span.links + this._updateModelName(spanEntity) + } + + clear: () => void = () => { + this.cache.clear() + } + + async cleanTopic(topicId: string, traceId?: string, modelName?: string) { + const spans = Array.from(this.cache.values().filter((e) => e.topicId === topicId)) + spans.map((e) => e.id).forEach((id) => this.cache.delete(id)) + + await this._checkFolder(path.join(this.fileDir, topicId)) + + if (modelName) { + this.cleanHistoryTrace(topicId, traceId || '', modelName) + this.saveSpans(topicId) + } else if (traceId) { + fs.rm(path.join(this.fileDir, topicId, traceId)) + } else { + fs.readdir(path.join(this.fileDir, topicId)).then((files) => + files.forEach((file) => { + fs.rm(path.join(this.fileDir, topicId, file)) + }) + ) + } + } + + async cleanLocalData() { + this.cache.clear() + fs.readdir(this.fileDir) + .then((files) => + files.forEach((topicId) => { + fs.rm(path.join(this.fileDir, topicId), { recursive: true, force: true }) + }) + ) + .catch((err) => { + console.error('Error cleaning local data:', err) + }) + } + + async saveSpans(topicId: string) { + if (!defaultConfig.isDevModel) { + return + } + let traceId: string | undefined + for (const [key, value] of this.topicMap.entries()) { + if (value === topicId) { + traceId = key + break // 找到后立即退出循环 + } + } + if (!traceId) { + return + } + const spans = Array.from(this.cache.values().filter((e) => e.traceId === traceId || !e.modelName)) + await this._saveToFile(spans, traceId, topicId) + this.topicMap.delete(traceId) + this._cleanCache(traceId) + } + + async getSpans(topicId: string, traceId: string, modelName?: string) { + if (this.topicMap.has(traceId)) { + const spans: SpanEntity[] = [] + this.cache + .values() + .filter((spanEntity) => { + return spanEntity.traceId === traceId && spanEntity.modelName + }) + .filter((spanEntity) => { + return !modelName || spanEntity.modelName === modelName + }) + .forEach((sp) => spans.push(sp)) + return spans + } else { + return this._getHisData(topicId, traceId, modelName) + } + } + + /** + * binding topic id to trace + * @param traceId traceId + * @param topicId topicId + */ + setTopicId(traceId: string, topicId: string): void { + this.topicMap.set(traceId, topicId) + } + + getEntity(spanId: string): SpanEntity | undefined { + return this.cache.get(spanId) + } + + saveEntity(entity: SpanEntity) { + if (!defaultConfig.isDevModel) { + return + } + if (this.cache.has(entity.id)) { + this._updateEntity(entity) + } else { + this._addEntity(entity) + } + this._updateModelName(entity) + } + + updateTokenUsage(spanId: string, usage: TokenUsage) { + const entity = this.cache.get(spanId) + if (entity) { + entity.usage = { ...usage } + } + if (entity?.parentId) { + this._updateParentUsage(entity.parentId, usage) + } + } + + addStreamMessage(spanId: string, modelName: string, context: string, message: any) { + const span = this.cache.get(spanId) + if (!span) { + return + } + const attributes = span.attributes + let msgArray: any[] = [] + if (attributes && attributes['outputs'] && Array.isArray(attributes['outputs'])) { + msgArray = attributes['outputs'] || [] + msgArray.push(message) + attributes['outputs'] = msgArray + } else { + msgArray = [message] + span.attributes = { ...attributes, outputs: msgArray } as Attributes + } + this._updateParentOutputs(span.parentId, modelName, context) + } + + setEndMessage(spanId: string, modelName: string, message: string) { + const span = this.cache.get(spanId) + if (span && span.attributes) { + let outputs = span.attributes['outputs'] + if (!outputs || typeof outputs !== 'object') { + outputs = {} + } + if (!(`${modelName}` in outputs) || !outputs[`${modelName}`]) { + outputs[`${modelName}`] = message + span.attributes[`outputs`] = outputs + this.cache.set(spanId, span) + } + } + } + + async cleanHistoryTrace(topicId: string, traceId: string, modelName?: string) { + this._cleanCache(traceId, modelName) + + const filePath = path.join(this.fileDir, topicId, traceId) + const fileExists = await this._existFile(filePath) + + if (!fileExists) { + return + } + + if (!modelName) { + await fs.rm(filePath, { recursive: true }) + } else { + const allSpans = await this._getHisData(topicId, traceId) + allSpans.forEach((span) => { + if (!modelName || modelName !== span.modelName) { + this.cache.set(span.id, span) + } + }) + try { + await fs.rm(filePath, { recursive: true }) + } catch (error) { + console.error(error) + } + } + } + + private _addEntity(entity: SpanEntity): void { + entity.topicId = this.topicMap.get(entity.traceId) + this.cache.set(entity.id, entity) + } + + private _updateModelName(entity: SpanEntity) { + let modelName = entity.modelName || entity.attributes?.modelName?.toString() + if (!modelName && entity.parentId) { + modelName = this.cache.get(entity.parentId)?.modelName + } + entity.modelName = modelName + } + private _updateEntity(entity: SpanEntity): void { + entity.topicId = this.topicMap.get(entity.traceId) + const savedEntity = this.cache.get(entity.id) + if (savedEntity) { + Object.keys(entity).forEach((key) => { + const value = entity[key] + if (value === undefined) { + savedEntity[key] = value + return + } + if (key === 'attributes') { + const savedAttrs = savedEntity.attributes || {} + Object.keys(value).forEach((attrKey) => { + const jsonData = + typeof value[attrKey] === 'string' && value[attrKey].startsWith('{') + ? JSON.parse(value[attrKey]) + : value[attrKey] + if ( + savedAttrs[attrKey] !== undefined && + typeof jsonData === 'object' && + typeof savedAttrs[attrKey] === 'object' + ) { + savedAttrs[attrKey] = { ...savedAttrs[attrKey], ...jsonData } + } else { + savedAttrs[attrKey] = value[attrKey] + } + }) + savedEntity.attributes = savedAttrs + } else { + savedEntity[key] = value + } + }) + this.cache.set(entity.id, savedEntity) + } + } + + private _cleanCache(traceId: string, modelName?: string) { + this.cache + .values() + .filter((span) => { + return span && span.traceId === traceId && (!modelName || span.modelName === modelName) + }) + .forEach((span) => this.cache.delete(span.id)) + } + + private _updateParentOutputs(spanId: string, modelName: string, context: string) { + const span = this.cache.get(spanId) + if (!span || !context) { + return + } + const attributes = span.attributes + // 如果含有modelName属性,是具体的某个modalName输出,拼接到streamText下面 + if (attributes && span.modelName) { + const currentValue = attributes['outputs'] + if (currentValue && typeof currentValue === 'object') { + const allContext = (currentValue['streamText'] || '') + context + attributes['outputs'] = { ...currentValue, streamText: allContext } + } else { + attributes['outputs'] = { streamText: context } + } + span.attributes = attributes + } else if (span.modelName) { + span.attributes = { outputs: { [`${modelName}`]: context } } as Attributes + } else { + return + } + this.cache.set(span.id, span) + this._updateParentOutputs(span.parentId, modelName, context) + } + + private _updateParentUsage(spanId: string, usage: TokenUsage) { + const entity = this.cache.get(spanId) + if (!entity) { + return + } + if (!entity.usage) { + entity.usage = { ...usage } + } else { + entity.usage.prompt_tokens = entity.usage.prompt_tokens + usage.prompt_tokens + entity.usage.completion_tokens = entity.usage.completion_tokens + usage.completion_tokens + entity.usage.total_tokens = entity.usage.total_tokens + usage.total_tokens + } + this.cache.set(entity.id, entity) + if (entity?.parentId) { + this._updateParentUsage(entity.parentId, usage) + } + } + + private async _saveToFile(spans: SpanEntity[], traceId: string, topicId: string) { + const dirPath = path.join(this.fileDir, topicId) + await this._checkFolder(dirPath) + + const filePath = path.join(dirPath, traceId) + + const writeOperations = spans + .filter((span) => span.topicId) + .map(async (span) => { + await fs.appendFile(filePath, JSON.stringify(span) + '\n') + }) + + await Promise.all(writeOperations) + } + + private async _getHisData(topicId: string, traceId: string, modelName?: string) { + const filePath = path.join(this.fileDir, topicId, traceId) + + if (!(await this._existFile(filePath))) { + return [] + } + + try { + const fileHandle = await fs.open(filePath, 'r') + const stream = fileHandle.createReadStream() + const chunks: string[] = [] + + for await (const chunk of stream) { + chunks.push(chunk.toString()) + } + await fileHandle.close() + + // 使用生成器逐行处理 + const parseLines = function* (text: string) { + for (const line of text.split('\n')) { + const trimmed = line.trim() + if (trimmed) { + try { + yield JSON.parse(trimmed) as SpanEntity + } catch (e) { + console.error(`JSON解析失败: ${trimmed}`, e) + } + } + } + } + + return Array.from(parseLines(chunks.join(''))) + .filter((span) => span.topicId === topicId && span.traceId === traceId && span.modelName) + .filter((span) => !modelName || span.modelName === modelName) + } catch (err) { + console.error('Error parsing JSON:', err) + throw err + } + } + + private async _checkFolder(filePath: string) { + try { + await fs.mkdir(filePath, { recursive: true }) + } catch (err) { + if (typeof err === 'object' && err && 'code' in err && err.code !== 'EEXIST') throw err + } + } + + private async _existFile(filePath: string) { + try { + await fs.access(filePath) + return true + } catch (err) { + console.log('delete trace file error:', err) + return false + } + } +} + +export const spanCacheService = new SpanCacheService() +export const cleanTopic = spanCacheService.cleanTopic.bind(spanCacheService) +export const saveEntity = spanCacheService.saveEntity.bind(spanCacheService) +export const getEntity = spanCacheService.getEntity.bind(spanCacheService) +export const tokenUsage = spanCacheService.updateTokenUsage.bind(spanCacheService) +export const saveSpans = spanCacheService.saveSpans.bind(spanCacheService) +export const getSpans = spanCacheService.getSpans.bind(spanCacheService) +export const addEndMessage = spanCacheService.setEndMessage.bind(spanCacheService) +export const bindTopic = spanCacheService.setTopicId.bind(spanCacheService) +export const addStreamMessage = spanCacheService.addStreamMessage.bind(spanCacheService) +export const cleanHistoryTrace = spanCacheService.cleanHistoryTrace.bind(spanCacheService) +export const cleanLocalData = spanCacheService.cleanLocalData.bind(spanCacheService) diff --git a/src/preload/index.ts b/src/preload/index.ts index 43ec5a255..0aaa6b2c2 100644 --- a/src/preload/index.ts +++ b/src/preload/index.ts @@ -1,5 +1,7 @@ import type { ExtractChunkData } from '@cherrystudio/embedjs-interfaces' import { electronAPI } from '@electron-toolkit/preload' +import { SpanEntity, TokenUsage } from '@mcp-trace/trace-core' +import { SpanContext } from '@opentelemetry/api' import { UpgradeChannel } from '@shared/config/constant' import type { LogLevel, LogSourceWithContext } from '@shared/config/types' import { IpcChannel } from '@shared/IpcChannel' @@ -26,6 +28,14 @@ import { Notification } from 'src/renderer/src/types/notification' import { CreateDirectoryOptions } from 'webdav' import type { ActionItem } from '../renderer/src/types/selectionTypes' +export function tracedInvoke(channel: string, spanContext: SpanContext | undefined, ...args: any[]) { + if (spanContext) { + const data = { type: 'trace', context: spanContext } + console.log(`tracedInvoke data`, data) + return ipcRenderer.invoke(channel, ...args, data) + } + return ipcRenderer.invoke(channel, ...args) +} // Custom APIs for renderer const api = { @@ -125,7 +135,7 @@ const api = { deleteDir: (dirPath: string) => ipcRenderer.invoke(IpcChannel.File_DeleteDir, dirPath), read: (fileId: string, detectEncoding?: boolean) => ipcRenderer.invoke(IpcChannel.File_Read, fileId, detectEncoding), - clear: () => ipcRenderer.invoke(IpcChannel.File_Clear), + clear: (spanContext?: SpanContext) => ipcRenderer.invoke(IpcChannel.File_Clear, spanContext), get: (filePath: string) => ipcRenderer.invoke(IpcChannel.File_Get, filePath), /** * 创建一个空的临时文件 @@ -145,7 +155,7 @@ const api = { openPath: (path: string) => ipcRenderer.invoke(IpcChannel.File_OpenPath, path), save: (path: string, content: string | NodeJS.ArrayBufferView, options?: any) => ipcRenderer.invoke(IpcChannel.File_Save, path, content, options), - selectFolder: () => ipcRenderer.invoke(IpcChannel.File_SelectFolder), + selectFolder: (spanContext?: SpanContext) => ipcRenderer.invoke(IpcChannel.File_SelectFolder, spanContext), saveImage: (name: string, data: string) => ipcRenderer.invoke(IpcChannel.File_SaveImage, name, data), binaryImage: (fileId: string) => ipcRenderer.invoke(IpcChannel.File_BinaryImage, fileId), base64Image: (fileId: string) => ipcRenderer.invoke(IpcChannel.File_Base64Image, fileId), @@ -169,7 +179,8 @@ const api = { update: (shortcuts: Shortcut[]) => ipcRenderer.invoke(IpcChannel.Shortcuts_Update, shortcuts) }, knowledgeBase: { - create: (base: KnowledgeBaseParams) => ipcRenderer.invoke(IpcChannel.KnowledgeBase_Create, base), + create: (base: KnowledgeBaseParams, context?: SpanContext) => + tracedInvoke(IpcChannel.KnowledgeBase_Create, context, base), reset: (base: KnowledgeBaseParams) => ipcRenderer.invoke(IpcChannel.KnowledgeBase_Reset, base), delete: (id: string) => ipcRenderer.invoke(IpcChannel.KnowledgeBase_Delete, id), add: ({ @@ -185,10 +196,12 @@ const api = { }) => ipcRenderer.invoke(IpcChannel.KnowledgeBase_Add, { base, item, forceReload, userId }), remove: ({ uniqueId, uniqueIds, base }: { uniqueId: string; uniqueIds: string[]; base: KnowledgeBaseParams }) => ipcRenderer.invoke(IpcChannel.KnowledgeBase_Remove, { uniqueId, uniqueIds, base }), - search: ({ search, base }: { search: string; base: KnowledgeBaseParams }) => - ipcRenderer.invoke(IpcChannel.KnowledgeBase_Search, { search, base }), - rerank: ({ search, base, results }: { search: string; base: KnowledgeBaseParams; results: ExtractChunkData[] }) => - ipcRenderer.invoke(IpcChannel.KnowledgeBase_Rerank, { search, base, results }), + search: ({ search, base }: { search: string; base: KnowledgeBaseParams }, context?: SpanContext) => + tracedInvoke(IpcChannel.KnowledgeBase_Search, context, { search, base }), + rerank: ( + { search, base, results }: { search: string; base: KnowledgeBaseParams; results: ExtractChunkData[] }, + context?: SpanContext + ) => tracedInvoke(IpcChannel.KnowledgeBase_Rerank, context, { search, base, results }), checkQuota: ({ base, userId }: { base: KnowledgeBaseParams; userId: string }) => ipcRenderer.invoke(IpcChannel.KnowledgeBase_Check_Quota, base, userId) }, @@ -253,9 +266,11 @@ const api = { removeServer: (server: MCPServer) => ipcRenderer.invoke(IpcChannel.Mcp_RemoveServer, server), restartServer: (server: MCPServer) => ipcRenderer.invoke(IpcChannel.Mcp_RestartServer, server), stopServer: (server: MCPServer) => ipcRenderer.invoke(IpcChannel.Mcp_StopServer, server), - listTools: (server: MCPServer) => ipcRenderer.invoke(IpcChannel.Mcp_ListTools, server), - callTool: ({ server, name, args, callId }: { server: MCPServer; name: string; args: any; callId?: string }) => - ipcRenderer.invoke(IpcChannel.Mcp_CallTool, { server, name, args, callId }), + listTools: (server: MCPServer, context?: SpanContext) => tracedInvoke(IpcChannel.Mcp_ListTools, context, server), + callTool: ( + { server, name, args, callId }: { server: MCPServer; name: string; args: any; callId?: string }, + context?: SpanContext + ) => tracedInvoke(IpcChannel.Mcp_CallTool, context, { server, name, args, callId }), listPrompts: (server: MCPServer) => ipcRenderer.invoke(IpcChannel.Mcp_ListPrompts, server), getPrompt: ({ server, name, args }: { server: MCPServer; name: string; args?: Record }) => ipcRenderer.invoke(IpcChannel.Mcp_GetPrompt, { server, name, args }), @@ -348,7 +363,28 @@ const api = { }, quoteToMainWindow: (text: string) => ipcRenderer.invoke(IpcChannel.App_QuoteToMain, text), setDisableHardwareAcceleration: (isDisable: boolean) => - ipcRenderer.invoke(IpcChannel.App_SetDisableHardwareAcceleration, isDisable) + ipcRenderer.invoke(IpcChannel.App_SetDisableHardwareAcceleration, isDisable), + trace: { + saveData: (topicId: string) => ipcRenderer.invoke(IpcChannel.TRACE_SAVE_DATA, topicId), + getData: (topicId: string, traceId: string, modelName?: string) => + ipcRenderer.invoke(IpcChannel.TRACE_GET_DATA, topicId, traceId, modelName), + saveEntity: (entity: SpanEntity) => ipcRenderer.invoke(IpcChannel.TRACE_SAVE_ENTITY, entity), + getEntity: (spanId: string) => ipcRenderer.invoke(IpcChannel.TRACE_GET_ENTITY, spanId), + bindTopic: (topicId: string, traceId: string) => ipcRenderer.invoke(IpcChannel.TRACE_BIND_TOPIC, topicId, traceId), + tokenUsage: (spanId: string, usage: TokenUsage) => ipcRenderer.invoke(IpcChannel.TRACE_TOKEN_USAGE, spanId, usage), + cleanHistory: (topicId: string, traceId: string, modelName?: string) => + ipcRenderer.invoke(IpcChannel.TRACE_CLEAN_HISTORY, topicId, traceId, modelName), + cleanTopic: (topicId: string, traceId?: string) => + ipcRenderer.invoke(IpcChannel.TRACE_CLEAN_TOPIC, topicId, traceId), + openWindow: (topicId: string, traceId: string, autoOpen?: boolean, modelName?: string) => + ipcRenderer.invoke(IpcChannel.TRACE_OPEN_WINDOW, topicId, traceId, autoOpen, modelName), + setTraceWindowTitle: (title: string) => ipcRenderer.invoke(IpcChannel.TRACE_SET_TITLE, title), + addEndMessage: (spanId: string, modelName: string, context: string) => + ipcRenderer.invoke(IpcChannel.TRACE_ADD_END_MESSAGE, spanId, modelName, context), + cleanLocalData: () => ipcRenderer.invoke(IpcChannel.TRACE_CLEAN_LOCAL_DATA), + addStreamMessage: (spanId: string, modelName: string, context: string, message: any) => + ipcRenderer.invoke(IpcChannel.TRACE_ADD_STREAM_MESSAGE, spanId, modelName, context, message) + } } // Use `contextBridge` APIs to expose Electron APIs to diff --git a/src/renderer/src/aiCore/clients/openai/OpenAIApiClient.ts b/src/renderer/src/aiCore/clients/openai/OpenAIApiClient.ts index fc2994d4c..2fa275ca6 100644 --- a/src/renderer/src/aiCore/clients/openai/OpenAIApiClient.ts +++ b/src/renderer/src/aiCore/clients/openai/OpenAIApiClient.ts @@ -543,7 +543,8 @@ export class OpenAIAPIClient extends OpenAIBaseClient< const sdkParams: OpenAISdkParams = streamOutput ? { ...commonParams, - stream: true + stream: true, + stream_options: { include_usage: true } } : { ...commonParams, diff --git a/src/renderer/src/aiCore/index.ts b/src/renderer/src/aiCore/index.ts index 9e7a9b731..af300a0f1 100644 --- a/src/renderer/src/aiCore/index.ts +++ b/src/renderer/src/aiCore/index.ts @@ -2,9 +2,11 @@ import { loggerService } from '@logger' import { ApiClientFactory } from '@renderer/aiCore/clients/ApiClientFactory' import { BaseApiClient } from '@renderer/aiCore/clients/BaseApiClient' import { isDedicatedImageGenerationModel, isFunctionCallingModel } from '@renderer/config/models' +import { withSpanResult } from '@renderer/services/SpanManagerService' +import { StartSpanParams } from '@renderer/trace/types/ModelSpanEntity' import { getProviderByModel } from '@renderer/services/AssistantService' import type { GenerateImageParams, Model, Provider } from '@renderer/types' -import { RequestOptions, SdkModel } from '@renderer/types/sdk' +import type { RequestOptions, SdkModel } from '@renderer/types/sdk' import { isEnabledToolUse } from '@renderer/utils/mcp-tools' import { OpenAIAPIClient } from './clients' @@ -25,7 +27,7 @@ import { MIDDLEWARE_NAME as ImageGenerationMiddlewareName } from './middleware/f import { MIDDLEWARE_NAME as ThinkingTagExtractionMiddlewareName } from './middleware/feat/ThinkingTagExtractionMiddleware' import { MIDDLEWARE_NAME as ToolUseExtractionMiddlewareName } from './middleware/feat/ToolUseExtractionMiddleware' import { MiddlewareRegistry } from './middleware/register' -import { CompletionsParams, CompletionsResult } from './middleware/schemas' +import type { CompletionsParams, CompletionsResult } from './middleware/schemas' const logger = loggerService.withContext('AiProvider') @@ -126,7 +128,23 @@ export default class AiProvider { const wrappedCompletionMethod = applyCompletionsMiddlewares(client, client.createCompletions, middlewares) // 4. Execute the wrapped method with the original params - return wrappedCompletionMethod(params, options) + const result = wrappedCompletionMethod(params, options) + return result + } + + public async completionsForTrace(params: CompletionsParams, options?: RequestOptions): Promise { + const traceName = params.assistant.model?.name + ? `${params.assistant.model?.name}.${params.callType}` + : `LLM.${params.callType}` + + const traceParams: StartSpanParams = { + name: traceName, + tag: 'LLM', + topicId: params.topicId || '', + modelName: params.assistant.model?.name + } + + return await withSpanResult(this.completions.bind(this), traceParams, params, options) } public async models(): Promise { diff --git a/src/renderer/src/aiCore/middleware/composer.ts b/src/renderer/src/aiCore/middleware/composer.ts index 8b93b8015..82b9fd170 100644 --- a/src/renderer/src/aiCore/middleware/composer.ts +++ b/src/renderer/src/aiCore/middleware/composer.ts @@ -1,3 +1,4 @@ +import { withSpanResult } from '@renderer/services/SpanManagerService' import { RequestOptions, SdkInstance, @@ -252,19 +253,28 @@ export function applyCompletionsMiddlewares< const abortSignal = context._internal.flowControl?.abortSignal const timeout = context._internal.customState?.sdkMetadata?.timeout + const methodCall = async (payload) => { + return await originalCompletionsMethod.call(originalApiClientInstance, payload, { + ...options, + signal: abortSignal, + timeout + }) + } + + const traceParams = { + name: `${params.assistant?.model?.name}.client`, + tag: 'LLM', + topicId: params.topicId || '', + modelName: params.assistant?.model?.name + } + // Call the original SDK method with transformed parameters // 使用转换后的参数调用原始 SDK 方法 - const rawOutput = await originalCompletionsMethod.call(originalApiClientInstance, sdkPayload, { - ...options, - signal: abortSignal, - timeout - }) + const rawOutput = await withSpanResult(methodCall, traceParams, sdkPayload) // Return result wrapped in CompletionsResult format // 以 CompletionsResult 格式返回包装的结果 - return { - rawOutput - } as CompletionsResult + return { rawOutput } as CompletionsResult } const chain = middlewares.map((middleware) => middleware(api)) diff --git a/src/renderer/src/aiCore/middleware/core/McpToolChunkMiddleware.ts b/src/renderer/src/aiCore/middleware/core/McpToolChunkMiddleware.ts index 50609131e..2642d8bef 100644 --- a/src/renderer/src/aiCore/middleware/core/McpToolChunkMiddleware.ts +++ b/src/renderer/src/aiCore/middleware/core/McpToolChunkMiddleware.ts @@ -119,7 +119,8 @@ function createToolHandlingTransform( mcpTools, allToolResponses, currentParams.onChunk, - currentParams.assistant.model! + currentParams.assistant.model!, + currentParams.topicId ) // 缓存执行结果 @@ -147,7 +148,8 @@ function createToolHandlingTransform( mcpTools, allToolResponses, currentParams.onChunk, - currentParams.assistant.model! + currentParams.assistant.model!, + currentParams.topicId ) // 缓存执行结果 @@ -217,7 +219,8 @@ async function executeToolCalls( mcpTools: MCPTool[], allToolResponses: MCPToolResponse[], onChunk: CompletionsParams['onChunk'], - model: Model + model: Model, + topicId?: string ): Promise<{ toolResults: SdkMessageParam[]; confirmedToolCalls: SdkToolCall[] }> { const mcpToolResponses: ToolCallResponse[] = toolCalls .map((toolCall) => { @@ -244,7 +247,8 @@ async function executeToolCalls( }, model, mcpTools, - ctx._internal?.flowControl?.abortSignal + ctx._internal?.flowControl?.abortSignal, + topicId ) // 找出已确认工具对应的原始toolCalls @@ -275,7 +279,8 @@ async function executeToolUseResponses( mcpTools: MCPTool[], allToolResponses: MCPToolResponse[], onChunk: CompletionsParams['onChunk'], - model: Model + model: Model, + topicId?: CompletionsParams['topicId'] ): Promise<{ toolResults: SdkMessageParam[] }> { // 直接使用parseAndCallTools函数处理已经解析好的ToolUseResponse const { toolResults } = await parseAndCallTools( @@ -287,7 +292,8 @@ async function executeToolUseResponses( }, model, mcpTools, - ctx._internal?.flowControl?.abortSignal + ctx._internal?.flowControl?.abortSignal, + topicId ) return { toolResults } diff --git a/src/renderer/src/aiCore/middleware/schemas.ts b/src/renderer/src/aiCore/middleware/schemas.ts index 2e6021462..d429add46 100644 --- a/src/renderer/src/aiCore/middleware/schemas.ts +++ b/src/renderer/src/aiCore/middleware/schemas.ts @@ -55,6 +55,7 @@ export interface CompletionsParams { // 上下文控制 contextCount?: number + topicId?: string // 主题ID,用于关联上下文 _internal?: ProcessingState } diff --git a/src/renderer/src/databases/index.ts b/src/renderer/src/databases/index.ts index 6c23a115a..baadb0d0d 100644 --- a/src/renderer/src/databases/index.ts +++ b/src/renderer/src/databases/index.ts @@ -73,7 +73,6 @@ db.version(7) message_blocks: 'id, messageId, file.id' // Correct syntax with comma separator }) .upgrade((tx) => upgradeToV7(tx)) - db.version(8) .stores({ // Re-declare all tables for the new version diff --git a/src/renderer/src/hooks/useMessageOperations.ts b/src/renderer/src/hooks/useMessageOperations.ts index 331d88706..615b830e6 100644 --- a/src/renderer/src/hooks/useMessageOperations.ts +++ b/src/renderer/src/hooks/useMessageOperations.ts @@ -1,6 +1,7 @@ import { loggerService } from '@logger' import { createSelector } from '@reduxjs/toolkit' import { EVENT_NAMES, EventEmitter } from '@renderer/services/EventService' +import { appendTrace, pauseTrace, restartTrace } from '@renderer/services/SpanManagerService' import { estimateUserPromptUsage } from '@renderer/services/TokenService' import store, { type RootState, useAppDispatch, useAppSelector } from '@renderer/store' import { updateOneBlock } from '@renderer/store/messageBlock' @@ -53,8 +54,9 @@ export function useMessageOperations(topic: Topic) { * Dispatches deleteSingleMessageThunk. */ const deleteMessage = useCallback( - async (id: string) => { + async (id: string, traceId?: string, modelName?: string) => { await dispatch(deleteSingleMessageThunk(topic.id, id)) + window.api.trace.cleanHistory(topic.id, traceId || '', modelName) }, [dispatch, topic.id] ) @@ -99,6 +101,7 @@ export function useMessageOperations(topic: Topic) { */ const resendMessage = useCallback( async (message: Message, assistant: Assistant) => { + await restartTrace(message) await dispatch(resendMessageThunk(topic.id, message, assistant)) }, [dispatch, topic.id] @@ -139,6 +142,7 @@ export function useMessageOperations(topic: Topic) { for (const askId of askIds) { abortCompletion(askId) } + pauseTrace(topic.id) dispatch(newMessagesActions.setTopicLoading({ topicId: topic.id, loading: false })) }, [topic.id, dispatch]) @@ -158,6 +162,7 @@ export function useMessageOperations(topic: Topic) { */ const regenerateAssistantMessage = useCallback( async (message: Message, assistant: Assistant) => { + await restartTrace(message) if (message.role !== 'assistant') { logger.warn('regenerateAssistantMessage should only be called for assistant messages.') return @@ -173,6 +178,7 @@ export function useMessageOperations(topic: Topic) { */ const appendAssistantResponse = useCallback( async (existingAssistantMessage: Message, newModel: Model, assistant: Assistant) => { + await appendTrace(existingAssistantMessage, newModel) if (existingAssistantMessage.role !== 'assistant') { logger.error('appendAssistantResponse should only be called for an existing assistant message.') return @@ -181,7 +187,15 @@ export function useMessageOperations(topic: Topic) { logger.error('Cannot append response: The existing assistant message is missing its askId.') return } - await dispatch(appendAssistantResponseThunk(topic.id, existingAssistantMessage.id, newModel, assistant)) + await dispatch( + appendAssistantResponseThunk( + topic.id, + existingAssistantMessage.id, + newModel, + assistant, + existingAssistantMessage.traceId + ) + ) }, [dispatch, topic.id] ) @@ -375,6 +389,8 @@ export function useMessageOperations(topic: Topic) { return } + await restartTrace(message, mainTextBlock.content) + const fileBlocks = editedBlocks.filter( (block) => block.type === MessageBlockType.FILE || block.type === MessageBlockType.IMAGE ) diff --git a/src/renderer/src/i18n/locales/en-us.json b/src/renderer/src/i18n/locales/en-us.json index 1f47f9172..953aaf47c 100644 --- a/src/renderer/src/i18n/locales/en-us.json +++ b/src/renderer/src/i18n/locales/en-us.json @@ -2561,9 +2561,132 @@ }, "words": { "knowledgeGraph": "Knowledge Graph", - "quit": "[to be translated]:退出", + "quit": "Quit", "show_window": "Show Window", "visualization": "Visualization" + }, + "memory": { + "title": "Memories", + "actions": "Actions", + "description": "Memory allows you to store and manage information about your interactions with the assistant. You can add, edit, and delete memories, as well as filter and search through them.", + "add_memory": "Add Memory", + "edit_memory": "Edit Memory", + "memory_content": "Memory Content", + "please_enter_memory": "Please enter memory content", + "memory_placeholder": "Enter memory content...", + "user_id": "User ID", + "user_id_placeholder": "Enter user ID (optional)", + "load_failed": "Failed to load memories", + "add_success": "Memory added successfully", + "add_failed": "Failed to add memory", + "update_success": "Memory updated successfully", + "update_failed": "Failed to update memory", + "delete_success": "Memory deleted successfully", + "delete_failed": "Failed to delete memory", + "delete_confirm_title": "Delete Memories", + "delete_confirm_content": "Are you sure you want to delete {{count}} memories?", + "delete_confirm": "Are you sure you want to delete this memory?", + "time": "Time", + "user": "User", + "content": "Content", + "score": "Score", + "memories_description": "Showing {{count}} of {{total}} memories", + "search_placeholder": "Search memories...", + "start_date": "Start Date", + "end_date": "End Date", + "all_users": "All Users", + "users": "users", + "delete_selected": "Delete Selected", + "reset_filters": "Reset Filters", + "pagination_total": "{{start}}-{{end}} of {{total}} items", + "current_user": "Current User", + "select_user": "Select User", + "default_user": "Default User", + "switch_user": "Switch User", + "user_switched": "User context switched to {{user}}", + "switch_user_confirm": "Switch user context to {{user}}?", + "add_user": "Add User", + "add_new_user": "Add New User", + "new_user_id": "New User ID", + "new_user_id_placeholder": "Enter a unique user ID", + "user_id_required": "User ID is required", + "user_id_reserved": "'default-user' is reserved, please use a different ID", + "user_id_exists": "This user ID already exists", + "user_id_too_long": "User ID cannot exceed 50 characters", + "user_id_invalid_chars": "User ID can only contain letters, numbers, hyphens and underscores", + "user_id_rules": "User ID must be unique and contain only letters, numbers, hyphens (-) and underscores (_)", + "user_created": "User {{user}} created and switched successfully", + "add_user_failed": "Failed to add user", + "memory": "memory", + "reset_user_memories": "Reset User Memories", + "reset_memories": "Reset Memories", + "delete_user": "Delete User", + "loading_memories": "Loading memories...", + "no_memories": "No memories yet", + "no_matching_memories": "No matching memories found", + "no_memories_description": "Start by adding your first memory to get started", + "try_different_filters": "Try adjusting your search criteria", + "add_first_memory": "Add Your First Memory", + "user_switch_failed": "Failed to switch user", + "cannot_delete_default_user": "Cannot delete the default user", + "delete_user_confirm_title": "Delete User", + "delete_user_confirm_content": "Are you sure you want to delete user {{user}} and all their memories?", + "user_deleted": "User {{user}} deleted successfully", + "delete_user_failed": "Failed to delete user", + "reset_user_memories_confirm_title": "Reset User Memories", + "reset_user_memories_confirm_content": "Are you sure you want to reset all memories for {{user}}?", + "user_memories_reset": "All memories for {{user}} have been reset", + "reset_user_memories_failed": "Failed to reset user memories", + "reset_memories_confirm_title": "Reset All Memories", + "reset_memories_confirm_content": "Are you sure you want to permanently delete all memories for {{user}}? This action cannot be undone.", + "memories_reset_success": "All memories for {{user}} have been reset successfully", + "reset_memories_failed": "Failed to reset memories", + "delete_confirm_single": "Are you sure you want to delete this memory?", + "total_memories": "total memories", + "default": "Default", + "custom": "Custom", + "global_memory_enabled": "Global memory enabled", + "global_memory": "Global Memory", + "enable_global_memory_first": "Please enable global memory first", + "configure_memory_first": "Please configure memory settings first", + "global_memory_disabled_title": "Global Memory Disabled", + "global_memory_disabled_desc": "To use memory features, please enable global memory in assistant settings first.", + "not_configured_title": "Memory Not Configured", + "not_configured_desc": "Please configure embedding and LLM models in memory settings to enable memory functionality.", + "go_to_memory_page": "Go to Memory Page", + "settings": "Settings", + "user_management": "User Management", + "statistics": "Statistics", + "search": "Search", + "initial_memory_content": "Welcome! This is your first memory.", + "loading": "Loading memories...", + "settings_title": "Memory Settings", + "llm_model": "LLM Model", + "please_select_llm_model": "Please select an LLM model", + "select_llm_model_placeholder": "Select LLM Model", + "embedding_model": "Embedding Model", + "please_select_embedding_model": "Please select an embedding model", + "select_embedding_model_placeholder": "Select Embedding Model", + "embedding_dimensions": "Embedding Dimensions", + "stored_memories": "Stored Memories", + "global_memory_description": "To use memory features, please enable global memory in assistant settings." + }, + "trace": { + "label": "Call Chain", + "traceWindow": "Call Chain Window", + "backList": "Back To List", + "spanDetail": "Span Details", + "name": "Node Name", + "tag": "Tag", + "startTime": "Start Time", + "endTime": "End Time", + "tokenUsage": "Token Usage", + "spendTime": "Spend Time", + "parentId": "Parent Id", + "inputs": "Inputs", + "outputs": "Outputs", + "noTraceList": "No trace information found", + "edasSupport": "Powered by Alibaba Cloud EDAS" } } } diff --git a/src/renderer/src/i18n/locales/ja-jp.json b/src/renderer/src/i18n/locales/ja-jp.json index 1486bd29e..b16d9d685 100644 --- a/src/renderer/src/i18n/locales/ja-jp.json +++ b/src/renderer/src/i18n/locales/ja-jp.json @@ -2561,9 +2561,132 @@ }, "words": { "knowledgeGraph": "ナレッジグラフ", - "quit": "[to be translated]:退出", + "quit": "終了", "show_window": "ウィンドウを表示", "visualization": "可視化" + }, + "trace": { + "label": "呼び出しチェーン", + "traceWindow": "呼び出しチェーンウィンドウ", + "backList": "リストに戻る", + "spanDetail": "スパンの詳細", + "name": "ノード名", + "tag": "Tagラベル", + "startTime": "開始時間", + "endTime": "終了時間", + "tokenUsage": "トークンの使用", + "spendTime": "時間を過ごす", + "parentId": "親ID", + "inputs": "入力", + "outputs": "出力", + "noTraceList": "トレース情報が見つかりません", + "edasSupport": "Powered by Alibaba Cloud EDAS" + }, + "memory": { + "title": "グローバルメモリ", + "add_memory": "メモリーを追加", + "edit_memory": "メモリーを編集", + "memory_content": "メモリー内容", + "please_enter_memory": "メモリー内容を入力してください", + "memory_placeholder": "メモリー内容を入力...", + "user_id": "ユーザーID", + "user_id_placeholder": "ユーザーIDを入力(オプション)", + "load_failed": "メモリーの読み込みに失敗しました", + "add_success": "メモリーが正常に追加されました", + "add_failed": "メモリーの追加に失敗しました", + "update_success": "メモリーが正常に更新されました", + "update_failed": "メモリーの更新に失敗しました", + "delete_success": "メモリーが正常に削除されました", + "delete_failed": "メモリーの削除に失敗しました", + "delete_confirm_title": "メモリーを削除", + "delete_confirm_content": "{{count}}件のメモリーを削除してもよろしいですか?", + "delete_confirm": "このメモリーを削除してもよろしいですか?", + "time": "時間", + "user": "ユーザー", + "content": "内容", + "score": "スコア", + "memories_description": "{{total}}件中{{count}}件のメモリーを表示", + "search_placeholder": "メモリーを検索...", + "start_date": "開始日", + "end_date": "終了日", + "all_users": "すべてのユーザー", + "users": "ユーザー", + "delete_selected": "選択したものを削除", + "reset_filters": "フィルターをリセット", + "pagination_total": "{{total}}件中{{start}}-{{end}}件", + "current_user": "現在のユーザー", + "select_user": "ユーザーを選択", + "default_user": "デフォルトユーザー", + "switch_user": "ユーザーを切り替え", + "user_switched": "ユーザーコンテキストが{{user}}に切り替わりました", + "switch_user_confirm": "ユーザーコンテキストを{{user}}に切り替えますか?", + "add_user": "ユーザーを追加", + "add_new_user": "新しいユーザーを追加", + "new_user_id": "新しいユーザーID", + "new_user_id_placeholder": "一意のユーザーIDを入力", + "user_id_required": "ユーザーIDは必須です", + "user_id_reserved": "'default-user'は予約済みです。別のIDを使用してください", + "user_id_exists": "このユーザーIDはすでに存在します", + "user_id_too_long": "ユーザーIDは50文字を超えられません", + "user_id_invalid_chars": "ユーザーIDには文字、数字、ハイフン、アンダースコアのみ使用できます", + "user_id_rules": "ユーザーIDは一意であり、文字、数字、ハイフン(-)、アンダースコア(_)のみ含む必要があります", + "user_created": "ユーザー{{user}}が作成され、切り替えが成功しました", + "add_user_failed": "ユーザーの追加に失敗しました", + "memory": "個のメモリ", + "reset_user_memories": "ユーザーメモリをリセット", + "reset_memories": "メモリをリセット", + "delete_user": "ユーザーを削除", + "loading_memories": "メモリを読み込み中...", + "no_memories": "メモリがありません", + "no_matching_memories": "一致するメモリが見つかりません", + "no_memories_description": "最初のメモリを追加してください", + "try_different_filters": "検索条件を調整してください", + "add_first_memory": "最初のメモリを追加", + "user_switch_failed": "ユーザーの切り替えに失敗しました", + "cannot_delete_default_user": "デフォルトユーザーは削除できません", + "delete_user_confirm_title": "ユーザーを削除", + "delete_user_confirm_content": "ユーザー{{user}}とそのすべてのメモリを削除してもよろしいですか?", + "user_deleted": "ユーザー{{user}}が正常に削除されました", + "delete_user_failed": "ユーザーの削除に失敗しました", + "reset_user_memories_confirm_title": "ユーザーメモリをリセット", + "reset_user_memories_confirm_content": "{{user}}のすべてのメモリをリセットしてもよろしいですか?", + "user_memories_reset": "{{user}}のすべてのメモリがリセットされました", + "reset_user_memories_failed": "ユーザーメモリのリセットに失敗しました", + "reset_memories_confirm_title": "すべてのメモリをリセット", + "reset_memories_confirm_content": "{{user}}のすべてのメモリを完全に削除してもよろしいですか?この操作は元に戻せません。", + "memories_reset_success": "{{user}}のすべてのメモリが正常にリセットされました", + "reset_memories_failed": "メモリのリセットに失敗しました", + "delete_confirm_single": "このメモリを削除してもよろしいですか?", + "total_memories": "個のメモリ", + "default": "デフォルト", + "custom": "カスタム", + "description": "メモリは、アシスタントとのやりとりに関する情報を保存・管理する機能です。メモリの追加、編集、削除のほか、フィルタリングや検索を行うことができます。", + "global_memory_enabled": "グローバルメモリが有効化されました", + "global_memory": "グローバルメモリ", + "enable_global_memory_first": "最初にグローバルメモリを有効にしてください", + "configure_memory_first": "最初にメモリ設定を構成してください", + "global_memory_disabled_title": "グローバルメモリが無効です", + "global_memory_disabled_desc": "メモリ機能を使用するには、まずアシスタント設定でグローバルメモリを有効にしてください。", + "not_configured_title": "メモリが設定されていません", + "not_configured_desc": "メモリ機能を有効にするには、メモリ設定で埋め込みとLLMモデルを設定してください。", + "go_to_memory_page": "メモリページに移動", + "settings": "設定", + "statistics": "統計", + "search": "検索", + "actions": "アクション", + "user_management": "ユーザー管理", + "initial_memory_content": "ようこそ!これはあなたの最初の記憶です。", + "loading": "思い出を読み込み中...", + "settings_title": "メモリ設定", + "llm_model": "LLMモデル", + "please_select_llm_model": "LLMモデルを選択してください", + "select_llm_model_placeholder": "LLMモデルを選択", + "embedding_model": "埋め込みモデル", + "please_select_embedding_model": "埋め込みモデルを選択してください", + "select_embedding_model_placeholder": "埋め込みモデルを選択", + "embedding_dimensions": "埋め込み次元", + "stored_memories": "保存された記憶", + "global_memory_description": "メモリ機能を使用するには、アシスタント設定でグローバルメモリを有効にしてください。" } } } diff --git a/src/renderer/src/i18n/locales/ru-ru.json b/src/renderer/src/i18n/locales/ru-ru.json index a461a9357..896cbfa0f 100644 --- a/src/renderer/src/i18n/locales/ru-ru.json +++ b/src/renderer/src/i18n/locales/ru-ru.json @@ -2561,9 +2561,132 @@ }, "words": { "knowledgeGraph": "Граф знаний", - "quit": "[to be translated]:退出", + "quit": "Выйти", "show_window": "Показать окно", "visualization": "Визуализация" + }, + "memory": { + "title": "Глобальная память", + "add_memory": "Добавить память", + "edit_memory": "Редактировать память", + "memory_content": "Содержимое памяти", + "please_enter_memory": "Пожалуйста, введите содержимое памяти", + "memory_placeholder": "Введите содержимое памяти...", + "user_id": "ID пользователя", + "user_id_placeholder": "Введите ID пользователя (необязательно)", + "load_failed": "Не удалось загрузить память", + "add_success": "Память успешно добавлена", + "add_failed": "Не удалось добавить память", + "update_success": "Память успешно обновлена", + "update_failed": "Не удалось обновить память", + "delete_success": "Память успешно удалена", + "delete_failed": "Не удалось удалить память", + "delete_confirm_title": "Удалить память", + "delete_confirm_content": "Вы уверены, что хотите удалить {{count}} записей памяти?", + "delete_confirm": "Вы уверены, что хотите удалить эту запись памяти?", + "time": "Время", + "user": "Пользователь", + "content": "Содержимое", + "score": "Оценка", + "memories_description": "Показано {{count}} из {{total}} записей памяти", + "search_placeholder": "Поиск памяти...", + "start_date": "Дата начала", + "end_date": "Дата окончания", + "all_users": "Все пользователи", + "users": "пользователи", + "delete_selected": "Удалить выбранные", + "reset_filters": "Сбросить фильтры", + "pagination_total": "{{start}}-{{end}} из {{total}} элементов", + "current_user": "Текущий пользователь", + "select_user": "Выбрать пользователя", + "default_user": "Пользователь по умолчанию", + "switch_user": "Переключить пользователя", + "user_switched": "Контекст пользователя переключен на {{user}}", + "switch_user_confirm": "Переключить контекст пользователя на {{user}}?", + "add_user": "Добавить пользователя", + "add_new_user": "Добавить нового пользователя", + "new_user_id": "Новый ID пользователя", + "new_user_id_placeholder": "Введите уникальный ID пользователя", + "user_id_required": "ID пользователя обязателен", + "user_id_reserved": "'default-user' зарезервирован, используйте другой ID", + "user_id_exists": "Этот ID пользователя уже существует", + "user_id_too_long": "ID пользователя не может превышать 50 символов", + "user_id_invalid_chars": "ID пользователя может содержать только буквы, цифры, дефисы и подчёркивания", + "user_id_rules": "ID пользователя должен быть уникальным и содержать только буквы, цифры, дефисы (-) и подчёркивания (_)", + "user_created": "Пользователь {{user}} создан и переключен успешно", + "add_user_failed": "Не удалось добавить пользователя", + "memory": "воспоминаний", + "reset_user_memories": "Сбросить воспоминания пользователя", + "reset_memories": "Сбросить воспоминания", + "delete_user": "Удалить пользователя", + "loading_memories": "Загрузка воспоминаний...", + "no_memories": "Нет воспоминаний", + "no_matching_memories": "Подходящие воспоминания не найдены", + "no_memories_description": "Начните с добавления вашего первого воспоминания", + "try_different_filters": "Попробуйте изменить критерии поиска", + "add_first_memory": "Добавить первое воспоминание", + "user_switch_failed": "Не удалось переключить пользователя", + "cannot_delete_default_user": "Нельзя удалить пользователя по умолчанию", + "delete_user_confirm_title": "Удалить пользователя", + "delete_user_confirm_content": "Вы уверены, что хотите удалить пользователя {{user}} и все его воспоминания?", + "user_deleted": "Пользователь {{user}} успешно удален", + "delete_user_failed": "Не удалось удалить пользователя", + "reset_user_memories_confirm_title": "Сбросить воспоминания пользователя", + "reset_user_memories_confirm_content": "Вы уверены, что хотите сбросить все воспоминания пользователя {{user}}?", + "user_memories_reset": "Все воспоминания пользователя {{user}} сброшены", + "reset_user_memories_failed": "Не удалось сбросить воспоминания пользователя", + "reset_memories_confirm_title": "Сбросить все воспоминания", + "reset_memories_confirm_content": "Вы уверены, что хотите навсегда удалить все воспоминания пользователя {{user}}? Это действие нельзя отменить.", + "memories_reset_success": "Все воспоминания пользователя {{user}} успешно сброшены", + "reset_memories_failed": "Не удалось сбросить воспоминания", + "delete_confirm_single": "Вы уверены, что хотите удалить это воспоминание?", + "total_memories": "всего воспоминаний", + "default": "По умолчанию", + "custom": "Пользовательский", + "description": "Память позволяет хранить и управлять информацией о ваших взаимодействиях с ассистентом. Вы можете добавлять, редактировать и удалять воспоминания, а также фильтровать и искать их.", + "global_memory_enabled": "Глобальная память включена", + "global_memory": "Глобальная память", + "enable_global_memory_first": "Сначала включите глобальную память", + "configure_memory_first": "Сначала настройте параметры памяти", + "global_memory_disabled_title": "Глобальная память отключена", + "global_memory_disabled_desc": "Чтобы использовать функции памяти, сначала включите глобальную память в настройках ассистента.", + "not_configured_title": "Память не настроена", + "not_configured_desc": "Пожалуйста, настройте модели встраивания и LLM в настройках памяти, чтобы включить функциональность памяти.", + "go_to_memory_page": "Перейти на страницу памяти", + "settings": "Настройки", + "statistics": "Статистика", + "search": "Поиск", + "actions": "Действия", + "user_management": "Управление пользователями", + "initial_memory_content": "Добро пожаловать! Это ваше первое воспоминание.", + "loading": "Загрузка воспоминаний...", + "settings_title": "Настройки памяти", + "llm_model": "Модель LLM", + "please_select_llm_model": "Пожалуйста, выберите модель LLM", + "select_llm_model_placeholder": "Выбор модели LLM", + "embedding_model": "Модель встраивания", + "please_select_embedding_model": "Пожалуйста, выберите модель для внедрения", + "select_embedding_model_placeholder": "Выберите модель внедрения", + "embedding_dimensions": "Размерность вложения", + "stored_memories": "Запасённые воспоминания", + "global_memory_description": "Для использования функций памяти необходимо включить глобальную память в настройках ассистента." + }, + "trace": { + "label": "Цепочка вызовов", + "traceWindow": "Окно цепочки вызовов", + "backList": "Вернуться к списку", + "spanDetail": "Span Подробнее", + "name": "Имя узла", + "tag": "ярлык", + "startTime": "время начала", + "endTime": "время окончания", + "tokenUsage": "Использование токена", + "spendTime": "тратитьВремя", + "parentId": "Родительский идентификатор", + "inputs": "входы", + "outputs": "выходы", + "noTraceList": "Информация о следах не найдена", + "edasSupport": "Powered by Alibaba Cloud EDAS" } } } diff --git a/src/renderer/src/i18n/locales/zh-cn.json b/src/renderer/src/i18n/locales/zh-cn.json index ceaebd274..90dbfd94b 100644 --- a/src/renderer/src/i18n/locales/zh-cn.json +++ b/src/renderer/src/i18n/locales/zh-cn.json @@ -2564,6 +2564,129 @@ "quit": "退出", "show_window": "显示窗口", "visualization": "可视化" + }, + "memory": { + "title": "全局记忆", + "settings": "设置", + "statistics": "统计", + "search": "搜索", + "actions": "操作", + "add_memory": "添加记忆", + "edit_memory": "编辑记忆", + "memory_content": "记忆内容", + "please_enter_memory": "请输入记忆内容", + "memory_placeholder": "输入记忆内容...", + "user_id": "用户 ID", + "user_id_placeholder": "输入用户 ID(可选)", + "load_failed": "加载记忆失败", + "add_success": "记忆添加成功", + "add_failed": "添加记忆失败", + "update_success": "记忆更新成功", + "update_failed": "更新记忆失败", + "delete_success": "记忆删除成功", + "delete_failed": "删除记忆失败", + "delete_confirm_title": "删除记忆", + "delete_confirm_content": "确定要删除 {{count}} 条记忆吗?", + "delete_confirm": "确定要删除这条记忆吗?", + "time": "时间", + "user": "用户", + "content": "内容", + "score": "分数", + "memories_description": "显示 {{count}} / {{total}} 条记忆", + "search_placeholder": "搜索记忆...", + "start_date": "开始日期", + "end_date": "结束日期", + "all_users": "所有用户", + "users": "用户", + "delete_selected": "删除选中", + "reset_filters": "重置筛选", + "pagination_total": "第 {{start}}-{{end}} 项,共 {{total}} 项", + "current_user": "当前用户", + "select_user": "选择用户", + "default_user": "默认用户", + "switch_user": "切换用户", + "user_switched": "用户上下文已切换到 {{user}}", + "switch_user_confirm": "将用户上下文切换到 {{user}}?", + "add_user": "添加用户", + "add_new_user": "添加新用户", + "new_user_id": "新用户ID", + "new_user_id_placeholder": "输入唯一的用户ID", + "user_management": "用户管理", + "user_id_required": "用户ID为必填项", + "user_id_reserved": "'default-user' 为保留字,请使用其他ID", + "user_id_exists": "该用户ID已存在", + "user_id_too_long": "用户ID不能超过50个字符", + "user_id_invalid_chars": "用户ID只能包含字母、数字、连字符和下划线", + "user_id_rules": "用户ID必须唯一,只能包含字母、数字、连字符(-)和下划线(_)", + "user_created": "用户 {{user}} 创建并切换成功", + "add_user_failed": "添加用户失败", + "memory": "条记忆", + "reset_user_memories": "重置用户记忆", + "reset_memories": "重置记忆", + "delete_user": "删除用户", + "loading_memories": "正在加载记忆...", + "no_memories": "暂无记忆", + "no_matching_memories": "未找到匹配的记忆", + "no_memories_description": "开始添加您的第一条记忆吧", + "try_different_filters": "尝试调整搜索条件", + "add_first_memory": "添加您的第一条记忆", + "user_switch_failed": "切换用户失败", + "cannot_delete_default_user": "不能删除默认用户", + "delete_user_confirm_title": "删除用户", + "delete_user_confirm_content": "确定要删除用户 {{user}} 及其所有记忆吗?", + "user_deleted": "用户 {{user}} 删除成功", + "delete_user_failed": "删除用户失败", + "reset_user_memories_confirm_title": "重置用户记忆", + "reset_user_memories_confirm_content": "确定要重置 {{user}} 的所有记忆吗?", + "user_memories_reset": "{{user}} 的所有记忆已重置", + "reset_user_memories_failed": "重置用户记忆失败", + "reset_memories_confirm_title": "重置所有记忆", + "reset_memories_confirm_content": "确定要永久删除 {{user}} 的所有记忆吗?此操作无法撤销。", + "memories_reset_success": "{{user}} 的所有记忆已成功重置", + "reset_memories_failed": "重置记忆失败", + "delete_confirm_single": "确定要删除这条记忆吗?", + "total_memories": "条记忆", + "default": "默认", + "custom": "自定义", + "description": "记忆功能允许您存储和管理与助手交互的信息。您可以添加、编辑和删除记忆,也可以对它们进行过滤和搜索。", + "global_memory_enabled": "全局记忆已启用", + "global_memory": "全局记忆", + "enable_global_memory_first": "请先启用全局记忆", + "configure_memory_first": "请先配置记忆设置", + "global_memory_disabled_title": "全局记忆已禁用", + "global_memory_disabled_desc": "要使用记忆功能,请先在助手设置中启用全局记忆。", + "not_configured_title": "记忆未配置", + "not_configured_desc": "请在记忆设置中配置嵌入和LLM模型以启用记忆功能。", + "go_to_memory_page": "前往记忆页面", + "initial_memory_content": "欢迎!这是您的第一条记忆。", + "loading": "正在加载记忆...", + "settings_title": "记忆设置", + "llm_model": "LLM 模型", + "please_select_llm_model": "请选择 LLM 模型", + "select_llm_model_placeholder": "选择 LLM 模型", + "embedding_model": "嵌入模型", + "please_select_embedding_model": "请选择嵌入模型", + "select_embedding_model_placeholder": "选择嵌入模型", + "embedding_dimensions": "嵌入维度", + "stored_memories": "已存储记忆", + "global_memory_description": "需要开启助手设置中的全局记忆才能使用" + }, + "trace": { + "label": "调用链", + "traceWindow": "调用链窗口", + "backList": "返回列表", + "spanDetail": "Span详情", + "name": "节点名称", + "tag": "标签", + "startTime": "开始时间", + "endTime": "结束时间", + "tokenUsage": "Token使用量", + "spendTime": "消耗时间", + "parentId": "上级Id", + "inputs": "输入", + "outputs": "输出", + "noTraceList": "没有找到Trace信息", + "edasSupport": "Powered by Alibaba Cloud EDAS" } } } diff --git a/src/renderer/src/i18n/locales/zh-tw.json b/src/renderer/src/i18n/locales/zh-tw.json index 0a5b5936b..2b9a285b3 100644 --- a/src/renderer/src/i18n/locales/zh-tw.json +++ b/src/renderer/src/i18n/locales/zh-tw.json @@ -2561,9 +2561,132 @@ }, "words": { "knowledgeGraph": "知識圖譜", - "quit": "[to be translated]:退出", + "quit": "結束", "show_window": "顯示視窗", "visualization": "視覺化" + }, + "memory": { + "title": "全域記憶", + "add_memory": "新增記憶", + "edit_memory": "編輯記憶", + "memory_content": "記憶內容", + "please_enter_memory": "請輸入記憶內容", + "memory_placeholder": "輸入記憶內容...", + "user_id": "使用者ID", + "user_id_placeholder": "輸入使用者ID(可選)", + "load_failed": "載入記憶失敗", + "add_success": "記憶新增成功", + "add_failed": "新增記憶失敗", + "update_success": "記憶更新成功", + "update_failed": "更新記憶失敗", + "delete_success": "記憶刪除成功", + "delete_failed": "刪除記憶失敗", + "delete_confirm_title": "刪除記憶", + "delete_confirm_content": "確定要刪除 {{count}} 條記憶嗎?", + "delete_confirm": "確定要刪除這條記憶嗎?", + "time": "時間", + "user": "使用者", + "content": "內容", + "score": "分數", + "memories_description": "顯示 {{count}} / {{total}} 條記憶", + "search_placeholder": "搜尋記憶...", + "start_date": "開始日期", + "end_date": "結束日期", + "all_users": "所有使用者", + "users": "使用者", + "delete_selected": "刪除選取", + "reset_filters": "重設篩選", + "pagination_total": "第 {{start}}-{{end}} 項,共 {{total}} 項", + "current_user": "目前使用者", + "select_user": "選擇使用者", + "default_user": "預設使用者", + "switch_user": "切換使用者", + "user_switched": "使用者內容已切換至 {{user}}", + "switch_user_confirm": "將使用者內容切換至 {{user}}?", + "add_user": "新增使用者", + "add_new_user": "新增新使用者", + "new_user_id": "新使用者ID", + "new_user_id_placeholder": "輸入唯一的使用者ID", + "user_id_required": "使用者ID為必填欄位", + "user_id_reserved": "'default-user' 為保留字,請使用其他ID", + "user_id_exists": "此使用者ID已存在", + "user_id_too_long": "使用者ID不能超過50個字元", + "user_id_invalid_chars": "使用者ID只能包含字母、數字、連字符和底線", + "user_id_rules": "使用者ID必须唯一,只能包含字母、數字、連字符(-)和底線(_)", + "user_created": "使用者 {{user}} 建立並切換成功", + "add_user_failed": "新增使用者失敗", + "memory": "個記憶", + "reset_user_memories": "重置使用者記憶", + "reset_memories": "重置記憶", + "delete_user": "刪除使用者", + "loading_memories": "正在載入記憶...", + "no_memories": "暫無記憶", + "no_matching_memories": "未找到符合的記憶", + "no_memories_description": "開始新增您的第一個記憶吧", + "try_different_filters": "嘗試調整搜尋條件", + "add_first_memory": "新增您的第一個記憶", + "user_switch_failed": "切換使用者失敗", + "cannot_delete_default_user": "不能刪除預設使用者", + "delete_user_confirm_title": "刪除使用者", + "delete_user_confirm_content": "確定要刪除使用者 {{user}} 及其所有記憶嗎?", + "user_deleted": "使用者 {{user}} 刪除成功", + "delete_user_failed": "刪除使用者失敗", + "reset_user_memories_confirm_title": "重置使用者記憶", + "reset_user_memories_confirm_content": "確定要重置 {{user}} 的所有記憶嗎?", + "user_memories_reset": "{{user}} 的所有記憶已重置", + "reset_user_memories_failed": "重置使用者記憶失敗", + "reset_memories_confirm_title": "重置所有記憶", + "reset_memories_confirm_content": "確定要永久刪除 {{user}} 的所有記憶嗎?此操作無法復原。", + "memories_reset_success": "{{user}} 的所有記憶已成功重置", + "reset_memories_failed": "重置記憶失敗", + "delete_confirm_single": "確定要刪除這個記憶嗎?", + "total_memories": "個記憶", + "default": "預設", + "custom": "自定義", + "description": "記憶功能讓您儲存和管理與助手互動的資訊。您可以新增、編輯和刪除記憶,也可以對它們進行篩選和搜尋。", + "global_memory_enabled": "全域記憶已啟用", + "global_memory": "全域記憶", + "enable_global_memory_first": "請先啟用全域記憶", + "configure_memory_first": "請先配置記憶設定", + "global_memory_disabled_title": "全域記憶已停用", + "global_memory_disabled_desc": "要使用記憶功能,請先在助手設定中啟用全域記憶。", + "not_configured_title": "記憶未配置", + "not_configured_desc": "請在記憶設定中配置嵌入和LLM模型以啟用記憶功能。", + "go_to_memory_page": "前往記憶頁面", + "settings": "設定", + "statistics": "統計", + "search": "搜尋", + "actions": "操作", + "user_management": "使用者管理", + "initial_memory_content": "歡迎!這是你的第一個記憶。", + "loading": "載入記憶中...", + "settings_title": "記憶體設定", + "llm_model": "LLM 模型", + "please_select_llm_model": "請選擇一個LLM模型", + "select_llm_model_placeholder": "選擇LLM模型", + "embedding_model": "嵌入模型", + "please_select_embedding_model": "請選擇一個嵌入模型", + "select_embedding_model_placeholder": "選擇嵌入模型", + "embedding_dimensions": "嵌入維度", + "stored_memories": "儲存的記憶", + "global_memory_description": "需要開啟助手設定中的全域記憶才能使用" + }, + "trace": { + "label": "呼叫鏈", + "traceWindow": "呼叫鏈視窗", + "backList": "返回清單", + "spanDetail": "Span詳情", + "name": "節點名稱", + "tag": "標籤", + "startTime": "開始時間", + "endTime": "結束時間", + "tokenUsage": "Token使用量", + "spendTime": "消耗時間", + "parentId": "上級Id", + "inputs": "輸入", + "outputs": "輸出", + "noTraceList": "沒有找到Trace資訊", + "edasSupport": "Powered by Alibaba Cloud EDAS" } } } diff --git a/src/renderer/src/i18n/translate/el-gr.json b/src/renderer/src/i18n/translate/el-gr.json index fedf91d3f..94869ef5f 100644 --- a/src/renderer/src/i18n/translate/el-gr.json +++ b/src/renderer/src/i18n/translate/el-gr.json @@ -341,6 +341,7 @@ "provider": "Παρέχων", "reasoning_content": "Έχει σκεφτεί πολύ καλά", "regenerate": "Ξαναπαραγωγή", + "trace": "ίχνος", "rename": "Μετονομασία", "reset": "Επαναφορά", "save": "Αποθήκευση", diff --git a/src/renderer/src/i18n/translate/es-es.json b/src/renderer/src/i18n/translate/es-es.json index c7285d695..a14d35a89 100644 --- a/src/renderer/src/i18n/translate/es-es.json +++ b/src/renderer/src/i18n/translate/es-es.json @@ -342,6 +342,7 @@ "provider": "Proveedor", "reasoning_content": "Pensamiento profundo", "regenerate": "Regenerar", + "trace": "Rastro", "rename": "Renombrar", "reset": "Restablecer", "save": "Guardar", diff --git a/src/renderer/src/i18n/translate/fr-fr.json b/src/renderer/src/i18n/translate/fr-fr.json index a875305a1..1dcd5731f 100644 --- a/src/renderer/src/i18n/translate/fr-fr.json +++ b/src/renderer/src/i18n/translate/fr-fr.json @@ -341,6 +341,7 @@ "provider": "Fournisseur", "reasoning_content": "Réflexion approfondie", "regenerate": "Regénérer", + "trace": "Tracer", "rename": "Renommer", "reset": "Réinitialiser", "save": "Enregistrer", diff --git a/src/renderer/src/i18n/translate/pt-pt.json b/src/renderer/src/i18n/translate/pt-pt.json index e81420c30..f062782dd 100644 --- a/src/renderer/src/i18n/translate/pt-pt.json +++ b/src/renderer/src/i18n/translate/pt-pt.json @@ -343,6 +343,7 @@ "provider": "Fornecedor", "reasoning_content": "Pensamento profundo concluído", "regenerate": "Regenerar", + "trace": "Regenerar", "rename": "Renomear", "reset": "Redefinir", "save": "Salvar", diff --git a/src/renderer/src/init.ts b/src/renderer/src/init.ts index de8d6eabc..f9ce442b5 100644 --- a/src/renderer/src/init.ts +++ b/src/renderer/src/init.ts @@ -4,6 +4,7 @@ import { loggerService } from '@logger' import { startAutoSync } from './services/BackupService' import { startNutstoreAutoSync } from './services/NutstoreService' import storeSyncService from './services/StoreSyncService' +import { webTraceService } from './services/WebTraceService' import store from './store' loggerService.initWindowSource('mainWindow') @@ -30,6 +31,11 @@ function initStoreSync() { storeSyncService.subscribe() } +function initWebTrace() { + webTraceService.init() +} + initKeyv() initAutoSync() initStoreSync() +initWebTrace() diff --git a/src/renderer/src/pages/home/Inputbar/Inputbar.tsx b/src/renderer/src/pages/home/Inputbar/Inputbar.tsx index 120f4ae88..7ff3c3ead 100644 --- a/src/renderer/src/pages/home/Inputbar/Inputbar.tsx +++ b/src/renderer/src/pages/home/Inputbar/Inputbar.tsx @@ -26,6 +26,7 @@ import FileManager from '@renderer/services/FileManager' import { checkRateLimit, getUserMessage } from '@renderer/services/MessagesService' import { getModelUniqId } from '@renderer/services/ModelService' import PasteService from '@renderer/services/PasteService' +import { spanManagerService } from '@renderer/services/SpanManagerService' import { estimateTextTokens as estimateTxtTokens, estimateUserPromptUsage } from '@renderer/services/TokenService' import { translateText } from '@renderer/services/TranslateService' import WebSearchService from '@renderer/services/WebSearchService' @@ -209,7 +210,11 @@ const Inputbar: FC = ({ assistant: _assistant, setActiveTopic, topic }) = logger.info('Starting to send message') - EventEmitter.emit(EVENT_NAMES.SEND_MESSAGE) + const parent = spanManagerService.startTrace( + { topicId: topic.id, name: 'sendMessage', inputs: text }, + mentionedModels && mentionedModels.length > 0 ? mentionedModels : [assistant.model] + ) + EventEmitter.emit(EVENT_NAMES.SEND_MESSAGE, { topicId: topic.id, traceId: parent?.spanContext().traceId }) try { // Dispatch the sendMessage action with all options @@ -234,6 +239,7 @@ const Inputbar: FC = ({ assistant: _assistant, setActiveTopic, topic }) = baseUserMessage.usage = await estimateUserPromptUsage(baseUserMessage) const { message, blocks } = getUserMessage(baseUserMessage) + message.traceId = parent?.spanContext().traceId currentMessageId.current = message.id dispatch(_sendMessage(message, blocks, assistantWithTopicPrompt, topic.id)) @@ -246,6 +252,7 @@ const Inputbar: FC = ({ assistant: _assistant, setActiveTopic, topic }) = setExpend(false) } catch (error) { logger.warn('Failed to send message:', error) + parent?.recordException(error as Error) } }, [assistant, dispatch, files, inputEmpty, loading, mentionedModels, resizeTextArea, text, topic]) @@ -472,7 +479,7 @@ const Inputbar: FC = ({ assistant: _assistant, setActiveTopic, topic }) = await onPause() await delay(1) } - EventEmitter.emit(EVENT_NAMES.CLEAR_MESSAGES) + EventEmitter.emit(EVENT_NAMES.CLEAR_MESSAGES, topic) } const onNewContext = () => { diff --git a/src/renderer/src/pages/home/Messages/MessageMenubar.tsx b/src/renderer/src/pages/home/Messages/MessageMenubar.tsx index 0f0c1f01b..c241a8cac 100644 --- a/src/renderer/src/pages/home/Messages/MessageMenubar.tsx +++ b/src/renderer/src/pages/home/Messages/MessageMenubar.tsx @@ -1,4 +1,5 @@ import { CheckOutlined, EditOutlined, QuestionCircleOutlined, SyncOutlined } from '@ant-design/icons' +import { defaultConfig } from '@mcp-trace/trace-core' import ObsidianExportPopup from '@renderer/components/Popups/ObsidianExportPopup' import SaveToKnowledgePopup from '@renderer/components/Popups/SaveToKnowledgePopup' import SelectModelPopup from '@renderer/components/Popups/SelectModelPopup' @@ -14,6 +15,7 @@ import { translateText } from '@renderer/services/TranslateService' import store, { RootState } from '@renderer/store' import { messageBlocksSelectors } from '@renderer/store/messageBlock' import { selectMessagesForTopic } from '@renderer/store/newMessage' +import { TraceIcon } from '@renderer/trace/pages/Component' import type { Assistant, Language, Model, Topic } from '@renderer/types' import { type Message, MessageBlockType } from '@renderer/types/newMessage' import { captureScrollableDivAsBlob, captureScrollableDivAsDataURL, classNames } from '@renderer/utils' @@ -45,7 +47,7 @@ import { ThumbsUp, Trash } from 'lucide-react' -import { FC, memo, useCallback, useMemo, useState } from 'react' +import { FC, memo, useCallback, useEffect, useMemo, useState } from 'react' import { useTranslation } from 'react-i18next' import { useSelector } from 'react-redux' import styled from 'styled-components' @@ -177,6 +179,24 @@ const MessageMenubar: FC = (props) => { [isTranslating, message, getTranslationUpdater, mainTextContent] ) + const [isDevelopModel, setIsDevelopModel] = useState(true) + + useEffect(() => { + setIsDevelopModel(defaultConfig.isDevModel || false) + }, []) + + const handleTraceUserMessage = useCallback(async () => { + console.log('current traceId', message.traceId, 'start send') + if (message.traceId) { + window.api.trace.openWindow( + message.topicId, + message.traceId, + true, + message.role === 'user' ? undefined : message.model?.name + ) + } + }, [message]) + const isEditable = useMemo(() => { return findMainTextBlocks(message).length > 0 // 使用 MCP Server 后会有大于一段 MatinTextBlock }, [message]) @@ -560,7 +580,7 @@ const MessageMenubar: FC = (props) => { okButtonProps={{ danger: true }} icon={} onOpenChange={(open) => open && setShowDeleteTooltip(false)} - onConfirm={() => deleteMessage(message.id)}> + onConfirm={() => deleteMessage(message.id, message.traceId, message.model?.name)}> e.stopPropagation()} @@ -574,6 +594,13 @@ const MessageMenubar: FC = (props) => { + {isDevelopModel && message.traceId && ( + + handleTraceUserMessage()}> + + + + )} {!isUserMessage && ( e.domEvent.stopPropagation() }} diff --git a/src/renderer/src/pages/settings/DataSettings/DataSettings.tsx b/src/renderer/src/pages/settings/DataSettings/DataSettings.tsx index d5c850955..6f3743bfe 100644 --- a/src/renderer/src/pages/settings/DataSettings/DataSettings.tsx +++ b/src/renderer/src/pages/settings/DataSettings/DataSettings.tsx @@ -161,6 +161,7 @@ const DataSettings: FC = () => { onOk: async () => { try { await window.api.clearCache() + await window.api.trace.cleanLocalData() await window.api.getCacheSize().then(setCacheSize) window.message.success(t('settings.data.clear_cache.success')) } catch (error) { diff --git a/src/renderer/src/providers/WebSearchProvider/index.ts b/src/renderer/src/providers/WebSearchProvider/index.ts index a2d73f57b..e1fe8f185 100644 --- a/src/renderer/src/providers/WebSearchProvider/index.ts +++ b/src/renderer/src/providers/WebSearchProvider/index.ts @@ -1,4 +1,5 @@ -import { WebSearchState } from '@renderer/store/websearch' +import { withSpanResult } from '@renderer/services/SpanManagerService' +import type { WebSearchState } from '@renderer/store/websearch' import { WebSearchProvider, WebSearchProviderResponse } from '@renderer/types' import { filterResultWithBlacklist } from '@renderer/utils/blacklistMatchPattern' @@ -7,16 +8,38 @@ import WebSearchProviderFactory from './WebSearchProviderFactory' export default class WebSearchEngineProvider { private sdk: BaseWebSearchProvider + private providerName: string + private topicId: string | undefined + private parentSpanId: string | undefined + private modelName: string | undefined - constructor(provider: WebSearchProvider) { + constructor(provider: WebSearchProvider, parentSpanId?: string) { this.sdk = WebSearchProviderFactory.create(provider) + this.providerName = provider.name + this.topicId = provider.topicId + this.parentSpanId = parentSpanId + this.modelName = provider.modelName } + public async search( query: string, websearch: WebSearchState, httpOptions?: RequestInit ): Promise { - const result = await this.sdk.search(query, websearch, httpOptions) + const callSearch = async ({ query, websearch }) => { + return await this.sdk.search(query, websearch, httpOptions) + } + + const traceParams = { + name: `${this.providerName}.search`, + tag: 'Web', + topicId: this.topicId || '', + parentSpanId: this.parentSpanId, + modelName: this.modelName + } + + const result = await withSpanResult(callSearch, traceParams, { query, websearch }) + return await filterResultWithBlacklist(result, websearch) } } diff --git a/src/renderer/src/services/ApiService.ts b/src/renderer/src/services/ApiService.ts index 501f9b715..000febadf 100644 --- a/src/renderer/src/services/ApiService.ts +++ b/src/renderer/src/services/ApiService.ts @@ -18,6 +18,7 @@ import { import { getModel } from '@renderer/hooks/useModel' import { getStoreSetting } from '@renderer/hooks/useSettings' import i18n from '@renderer/i18n' +import { currentSpan, withSpanResult } from '@renderer/services/SpanManagerService' import store from '@renderer/store' import { selectCurrentUserId, selectGlobalMemoryEnabled, selectMemoryConfig } from '@renderer/store/memory' import { @@ -110,11 +111,24 @@ async function fetchExternalTool( summaryAssistant.model = assistant.model || getDefaultModel() summaryAssistant.prompt = prompt + const callSearchSummary = async (params: { messages: Message[]; assistant: Assistant }) => { + return await fetchSearchSummary(params) + } + + const traceParams = { + name: `${summaryAssistant.model?.name}.Summary`, + tag: 'LLM', + topicId: lastUserMessage.topicId, + modelName: summaryAssistant.model.name + } + + const searchSummaryParams = { + messages: lastAnswer ? [lastAnswer, lastUserMessage] : [lastUserMessage], + assistant: summaryAssistant + } + try { - const result = await fetchSearchSummary({ - messages: lastAnswer ? [lastAnswer, lastUserMessage] : [lastUserMessage], - assistant: summaryAssistant - }) + const result = await withSpanResult(callSearchSummary, traceParams, searchSummaryParams) if (!result) return getFallbackResult() @@ -145,7 +159,10 @@ async function fetchExternalTool( } // --- Web Search Function --- - const searchTheWeb = async (extractResults: ExtractResults | undefined): Promise => { + const searchTheWeb = async ( + extractResults: ExtractResults | undefined, + parentSpanId?: string + ): Promise => { if (!shouldWebSearch) return // Add check for extractResults existence early @@ -165,8 +182,17 @@ async function fetchExternalTool( try { // Use the consolidated processWebsearch function WebSearchService.createAbortSignal(lastUserMessage.id) + let safeWebSearchProvider = webSearchProvider + if (webSearchProvider) { + safeWebSearchProvider = { + ...webSearchProvider, + topicId: lastUserMessage.topicId, + parentSpanId, + modelName: assistant.model.name + } + } const webSearchResponse = await WebSearchService.processWebsearch( - webSearchProvider!, + safeWebSearchProvider!, extractResults, lastUserMessage.id ) @@ -222,7 +248,9 @@ async function fetchExternalTool( // --- Knowledge Base Search Function --- const searchKnowledgeBase = async ( - extractResults: ExtractResults | undefined + extractResults: ExtractResults | undefined, + parentSpanId?: string, + modelName?: string ): Promise => { if (!hasKnowledgeBase) return @@ -253,7 +281,13 @@ async function fetchExternalTool( // const mainTextBlock = mainTextBlocks // ?.map((blockId) => store.getState().messageBlocks.entities[blockId]) // .find((block) => block?.type === MessageBlockType.MAIN_TEXT) as MainTextMessageBlock | undefined - return await processKnowledgeSearch(tempExtractResults, knowledgeBaseIds) + return await processKnowledgeSearch( + tempExtractResults, + knowledgeBaseIds, + lastUserMessage.topicId, + parentSpanId, + modelName + ) } catch (error) { logger.error('Knowledge base search failed:', error) return @@ -274,11 +308,12 @@ async function fetchExternalTool( let knowledgeReferencesFromSearch: KnowledgeReference[] | undefined let memorySearchReferences: MemoryItem[] | undefined + const parentSpanId = currentSpan(lastUserMessage.topicId, assistant.model?.name)?.spanContext().spanId // 并行执行搜索 if (shouldWebSearch || shouldKnowledgeSearch || shouldSearchMemory) { ;[webSearchResponseFromSearch, knowledgeReferencesFromSearch, memorySearchReferences] = await Promise.all([ - searchTheWeb(extractResults), - searchKnowledgeBase(extractResults), + searchTheWeb(extractResults, parentSpanId), + searchKnowledgeBase(extractResults, parentSpanId, assistant.model?.name), searchMemory() ]) } @@ -319,9 +354,10 @@ async function fetchExternalTool( if (enabledMCPs && enabledMCPs.length > 0) { try { + const spanContext = currentSpan(lastUserMessage.topicId, assistant.model?.name)?.spanContext() const toolPromises = enabledMCPs.map>(async (mcpServer) => { try { - const tools = await window.api.mcp.listTools(mcpServer) + const tools = await window.api.mcp.listTools(mcpServer, spanContext) return tools.filter((tool: any) => !mcpServer.disabledTools?.includes(tool.name)) } catch (error) { logger.error(`Error fetching tools from MCP server ${mcpServer.name}:`, error) @@ -417,24 +453,27 @@ export async function fetchChatCompletion({ // --- Call AI Completions --- onChunkReceived({ type: ChunkType.LLM_RESPONSE_CREATED }) - await AI.completions( - { - callType: 'chat', - messages: _messages, - assistant, - onChunk: onChunkReceived, - mcpTools: mcpTools, - maxTokens, - streamOutput: assistant.settings?.streamOutput || false, - enableReasoning, - enableWebSearch, - enableUrlContext, - enableGenerateImage - }, - { - streamOutput: assistant.settings?.streamOutput || false - } - ) + + const completionsParams: CompletionsParams = { + callType: 'chat', + messages: _messages, + assistant, + onChunk: onChunkReceived, + mcpTools: mcpTools, + maxTokens, + streamOutput: assistant.settings?.streamOutput || false, + enableReasoning, + enableWebSearch, + enableUrlContext, + enableGenerateImage, + topicId: lastUserMessage.topicId + } + + const requestOptions = { + streamOutput: assistant.settings?.streamOutput || false + } + + return await AI.completionsForTrace(completionsParams, requestOptions) // Post-conversation memory processing const globalMemoryEnabled = selectGlobalMemoryEnabled(store.getState()) @@ -600,6 +639,8 @@ export async function fetchMessagesSummary({ messages, assistant }: { messages: const AI = new AiProvider(provider) + const topicId = messages?.find((message) => message.topicId)?.topicId || undefined + // LLM对多条消息的总结有问题,用单条结构化的消息表示会话内容会更好 const structredMessages = contextMessages.map((message) => { const structredMessage = { @@ -637,11 +678,12 @@ export async function fetchMessagesSummary({ messages, assistant }: { messages: assistant: { ...summaryAssistant, prompt, model }, maxTokens: 1000, streamOutput: false, + topicId, enableReasoning: false } try { - const { getText } = await AI.completions(params) + const { getText } = await AI.completionsForTrace(params) const text = getText() return removeSpecialCharactersForTopicName(text) || null } catch (error: any) { @@ -657,16 +699,19 @@ export async function fetchSearchSummary({ messages, assistant }: { messages: Me return null } + const topicId = messages?.find((message) => message.topicId)?.topicId || undefined + const AI = new AiProvider(provider) const params: CompletionsParams = { callType: 'search', messages: messages, assistant, - streamOutput: false + streamOutput: false, + topicId } - return await AI.completions(params) + return await AI.completionsForTrace(params) } export async function fetchGenerate({ diff --git a/src/renderer/src/services/KnowledgeService.ts b/src/renderer/src/services/KnowledgeService.ts index 6d8b5fd4e..070a91a64 100644 --- a/src/renderer/src/services/KnowledgeService.ts +++ b/src/renderer/src/services/KnowledgeService.ts @@ -1,8 +1,10 @@ import type { ExtractChunkData } from '@cherrystudio/embedjs-interfaces' import { loggerService } from '@logger' +import { Span } from '@opentelemetry/api' import AiProvider from '@renderer/aiCore' import { DEFAULT_KNOWLEDGE_DOCUMENT_COUNT, DEFAULT_KNOWLEDGE_THRESHOLD } from '@renderer/config/constant' import { getEmbeddingMaxContext } from '@renderer/config/embedings' +import { addSpan, endSpan } from '@renderer/services/SpanManagerService' import store from '@renderer/store' import { FileMetadata, KnowledgeBase, KnowledgeBaseParams, KnowledgeReference } from '@renderer/types' import { ExtractResults } from '@renderer/utils/extract' @@ -102,18 +104,40 @@ export const getKnowledgeSourceUrl = async (item: ExtractChunkData & { file: Fil export const searchKnowledgeBase = async ( query: string, base: KnowledgeBase, - rewrite?: string + rewrite?: string, + topicId?: string, + parentSpanId?: string, + modelName?: string ): Promise> => { + let currentSpan: Span | undefined = undefined try { const baseParams = getKnowledgeBaseParams(base) const documentCount = base.documentCount || DEFAULT_KNOWLEDGE_DOCUMENT_COUNT const threshold = base.threshold || DEFAULT_KNOWLEDGE_THRESHOLD + if (topicId) { + currentSpan = addSpan({ + topicId, + name: `${base.name}-search`, + inputs: { + query, + rewrite, + base: baseParams + }, + tag: 'Knowledge', + parentSpanId, + modelName + }) + } + // 执行搜索 - const searchResults = await window.api.knowledgeBase.search({ - search: rewrite || query, - base: baseParams - }) + const searchResults = await window.api.knowledgeBase.search( + { + search: rewrite || query, + base: baseParams + }, + currentSpan?.spanContext() + ) // 过滤阈值不达标的结果 const filteredResults = searchResults.filter((item) => item.score >= threshold) @@ -121,33 +145,56 @@ export const searchKnowledgeBase = async ( // 如果有rerank模型,执行重排 let rerankResults = filteredResults if (base.rerankModel && filteredResults.length > 0) { - rerankResults = await window.api.knowledgeBase.rerank({ - search: rewrite || query, - base: baseParams, - results: filteredResults - }) + rerankResults = await window.api.knowledgeBase.rerank( + { + search: rewrite || query, + base: baseParams, + results: filteredResults + }, + currentSpan?.spanContext() + ) } // 限制文档数量 const limitedResults = rerankResults.slice(0, documentCount) // 处理文件信息 - return await Promise.all( + const result = await Promise.all( limitedResults.map(async (item) => { const file = await getFileFromUrl(item.metadata.source) logger.debug('Knowledge search item:', item, 'File:', file) return { ...item, file } }) ) + if (topicId) { + endSpan({ + topicId, + outputs: result, + span: currentSpan, + modelName + }) + } + return result } catch (error) { logger.error(`Error searching knowledge base ${base.name}:`, error) + if (topicId) { + endSpan({ + topicId, + error: error instanceof Error ? error : new Error(String(error)), + span: currentSpan, + modelName + }) + } throw error } } export const processKnowledgeSearch = async ( extractResults: ExtractResults, - knowledgeBaseIds: string[] | undefined + knowledgeBaseIds: string[] | undefined, + topicId: string, + parentSpanId?: string, + modelName?: string ): Promise => { if ( !extractResults.knowledge?.question || @@ -167,10 +214,27 @@ export const processKnowledgeSearch = async ( return [] } + const span = addSpan({ + topicId, + name: 'knowledgeSearch', + inputs: { + questions, + rewrite, + knowledgeBaseIds: knowledgeBaseIds + }, + tag: 'Knowledge', + parentSpanId, + modelName + }) + // 为每个知识库执行多问题搜索 const baseSearchPromises = bases.map(async (base) => { // 为每个问题搜索并合并结果 - const allResults = await Promise.all(questions.map((question) => searchKnowledgeBase(question, base, rewrite))) + const allResults = await Promise.all( + questions.map((question) => + searchKnowledgeBase(question, base, rewrite, topicId, span?.spanContext().spanId, modelName) + ) + ) // 合并结果并去重 const flatResults = allResults.flat() @@ -179,7 +243,7 @@ export const processKnowledgeSearch = async ( ).sort((a, b) => b.score - a.score) // 转换为引用格式 - return await Promise.all( + const result = await Promise.all( uniqueResults.map( async (item, index) => ({ @@ -190,12 +254,20 @@ export const processKnowledgeSearch = async ( }) as KnowledgeReference ) ) + return result }) // 汇总所有知识库的结果 const resultsPerBase = await Promise.all(baseSearchPromises) const allReferencesRaw = resultsPerBase.flat().filter((ref): ref is KnowledgeReference => !!ref) + endSpan({ + topicId, + outputs: resultsPerBase, + span, + modelName + }) + // 重新为引用分配ID return allReferencesRaw.map((ref, index) => ({ ...ref, diff --git a/src/renderer/src/services/SpanManagerService.ts b/src/renderer/src/services/SpanManagerService.ts new file mode 100644 index 000000000..401ff5810 --- /dev/null +++ b/src/renderer/src/services/SpanManagerService.ts @@ -0,0 +1,358 @@ +import { MessageStream } from '@anthropic-ai/sdk/resources/messages/messages' +import { defaultConfig, SpanEntity, TokenUsage } from '@mcp-trace/trace-core' +import { cleanContext, endContext, getContext, startContext } from '@mcp-trace/trace-web' +import { Context, context, Span, SpanStatusCode, trace } from '@opentelemetry/api' +import { isAsyncIterable } from '@renderer/aiCore/middleware/utils' +import { db } from '@renderer/databases' +import { EVENT_NAMES, EventEmitter } from '@renderer/services/EventService' +import { handleAsyncIterable } from '@renderer/trace/dataHandler/AsyncIterableHandler' +import { handleResult } from '@renderer/trace/dataHandler/CommonResultHandler' +import { handleMessageStream } from '@renderer/trace/dataHandler/MessageStreamHandler' +import { handleStream } from '@renderer/trace/dataHandler/StreamHandler' +import { EndSpanParams, ModelSpanEntity, StartSpanParams } from '@renderer/trace/types/ModelSpanEntity' +import { Model, Topic } from '@renderer/types' +import type { Message } from '@renderer/types/newMessage' +import { MessageBlockType } from '@renderer/types/newMessage' +import { SdkRawChunk } from '@renderer/types/sdk' +import { Stream } from 'openai/streaming' + +class SpanManagerService { + private spanMap: Map = new Map() + + getModelSpanEntity(topicId: string, modelName?: string) { + const entities = this.spanMap.get(topicId) + if (!entities) { + const entity = new ModelSpanEntity(modelName) + this.spanMap.set(topicId, [entity]) + return entity + } + let entity = entities.find((e) => e.getModelName() === modelName) + if (!entity) { + entity = new ModelSpanEntity(modelName) + entities.push(entity) + } + return entity + } + + startTrace(params: StartSpanParams, models?: Model[]) { + if (!defaultConfig.isDevModel) { + console.warn('Trace is enabled in developer mode.') + return + } + const span = webTracer.startSpan(params.name || 'root', { + root: true, + attributes: { + inputs: JSON.stringify(params.inputs || {}), + models: JSON.stringify(models || []) + } + }) + + const entity = this.getModelSpanEntity(params.topicId) + entity.addSpan(span) + const traceId = span.spanContext().traceId + window.api.trace.bindTopic(params.topicId, traceId) + + const ctx = this._updateContext(span, params.topicId) + models?.forEach((model) => { + this._addModelRootSpan({ ...params, name: `${model.name}.handleMessage`, modelName: model.name }, ctx) + }) + return span + } + + async restartTrace(message: Message, text?: string) { + if (!defaultConfig.isDevModel) { + console.warn('Trace is enabled in developer mode.') + return + } + + if (!message.traceId) { + return + } + + await window.api.trace.bindTopic(message.topicId, message.traceId) + + const input = await this._getContentFromMessage(message, text) + + let _models + if (message.role === 'user') { + await window.api.trace.cleanHistory(message.topicId, message.traceId) + + const topic = await db.topics.get(message.topicId) + _models = topic?.messages.filter((m) => m.role === 'assistant' && m.askId === message.id).map((m) => m.model) + } else { + _models = [message.model] + await window.api.trace.cleanHistory(message.topicId, message.traceId || '', message.model?.name) + } + + _models + ?.filter((m) => !!m) + .forEach((model) => { + this._addModelRootSpan({ ...input, modelName: model.name, name: `${model.name}.resendMessage` }) + }) + + const modelName = message.role !== 'user' ? _models[0]?.name : undefined + window.api.trace.openWindow(message.topicId, message.traceId, false, modelName) + } + + async appendTrace(message: Message, model: Model) { + if (!defaultConfig.isDevModel) { + console.warn('Trace is enabled in developer mode.') + return + } + if (!message.traceId) { + return + } + + await window.api.trace.cleanHistory(message.topicId, message.traceId, model.name) + + const input = await this._getContentFromMessage(message) + await window.api.trace.bindTopic(message.topicId, message.traceId) + this._addModelRootSpan({ ...input, name: `${model.name}.appendMessage`, modelName: model.name }) + window.api.trace.openWindow(message.topicId, message.traceId, false, model.name) + } + + private async _getContentFromMessage(message: Message, content?: string): Promise { + let _content = content + if (!_content) { + const blocks = await Promise.all( + message.blocks.map(async (blockId) => { + return await db.message_blocks.get(blockId) + }) + ) + _content = blocks.find((data) => data?.type === MessageBlockType.MAIN_TEXT)?.content + } + return { + topicId: message.topicId, + inputs: { + messageId: message.id, + content: _content, + askId: message.askId, + traceId: message.traceId, + tag: 'resendMessage' + } + } + } + + private _updateContext(span: Span, topicId: string, traceId?: string) { + window.api.trace.saveEntity({ + id: span.spanContext().spanId, + traceId: traceId ? traceId : span.spanContext().traceId, + topicId + } as SpanEntity) + if (traceId) { + span['_spanContext'].traceId = traceId + } + + const ctx = trace.setSpan(context.active(), span) + startContext(topicId, ctx) + return ctx + } + + private _addModelRootSpan(params: StartSpanParams, ctx?: Context) { + const entity = this.getModelSpanEntity(params.topicId, params.modelName) + const rootSpan = webTracer.startSpan( + `${params.name}`, + { + attributes: { + inputs: JSON.stringify(params.inputs || {}), + modelName: params.modelName, + tags: 'ModelHandle' + } + }, + ctx + ) + entity.addSpan(rootSpan, true) + const traceId = params.inputs?.traceId || rootSpan.spanContext().traceId + return this._updateContext(rootSpan, params.topicId, traceId) + } + + endTrace(params: EndSpanParams) { + const entity = this.getModelSpanEntity(params.topicId) + let span = entity.getCurrentSpan() + const code = params.error ? SpanStatusCode.ERROR : SpanStatusCode.OK + const message = params.error ? params.error.message : '' + while (span) { + if (params.outputs) { + span.setAttributes({ outputs: params.outputs }) + } + if (params.error) { + span.recordException(params.error) + } + span.setStatus({ code, message }) + span.end() + entity.removeSpan(span) + span = entity.getCurrentSpan() + } + this.finishModelTrace(params.topicId) + cleanContext(params.topicId) + window.api.trace.saveData(params.topicId) + } + + addSpan(params: StartSpanParams) { + if (!defaultConfig.isDevModel) { + console.warn('Trace is enabled in developer mode.') + return + } + const entity = this.getModelSpanEntity(params.topicId, params.modelName) + let parentSpan = entity.getSpanById(params.parentSpanId) + if (!parentSpan) { + parentSpan = this.getCurrentSpan(params.topicId, params.modelName) + } + + const parentCtx = parentSpan ? trace.setSpan(context.active(), parentSpan) : getContext(params.topicId) + const span = webTracer.startSpan( + params.name || 'root', + { + attributes: { + inputs: JSON.stringify(params.inputs || {}), + tags: params.tag || '', + modelName: params.modelName + } + }, + parentCtx + ) + const ctx = trace.setSpan(getContext(params.topicId), span) + entity.addSpan(span) + startContext(params.topicId, ctx) + return span + } + + endSpan(params: EndSpanParams) { + const entity = this.getModelSpanEntity(params.topicId, params.modelName) + const span = params.span || entity.getCurrentSpan(params.modelName) + if (params.modelEnded && params.modelName && params.outputs) { + const rootEntity = this.getModelSpanEntity(params.topicId) + const span = rootEntity?.getRootSpan() + window.api.trace.addEndMessage(span?.spanContext().spanId || '', params.modelName, params.outputs) + } + if (params.modelEnded && params.error && params.modelName) { + const rootEntity = this.getModelSpanEntity(params.topicId) + rootEntity.addModelError(params.error) + } + if (!span) { + console.info(`No active span found for topicId: ${params.topicId}-modelName: ${params.modelName}.`) + return + } + + // remove span + if (entity.removeSpan(span)) { + this.getModelSpanEntity(params.topicId).removeSpan(span) + } + + const code = params.error ? SpanStatusCode.ERROR : SpanStatusCode.OK + const message = params.error ? params.error.message : 'success' + if (params.outputs) { + span.setAttributes({ outputs: JSON.stringify(params.outputs || {}) }) + } + if (params.error) { + span.recordException(params.error) + } + span.setStatus({ code, message }) + span.end() + endContext(params.topicId) + } + + getCurrentSpan(topicId: string, modelName?: string, isRoot = false): Span | undefined { + let entity = this.getModelSpanEntity(topicId, modelName) + let span = isRoot ? entity.getRoot() : entity.getCurrentSpan(modelName) + if (!span && modelName) { + entity = this.getModelSpanEntity(topicId) + span = entity.getCurrentSpan() + } + return span + } + + async addTokenUsage(topicId: string, prompt: number, completion: number) { + const span = this.getCurrentSpan(topicId) + const usage: TokenUsage = { + prompt_tokens: prompt, + completion_tokens: completion, + total_tokens: prompt + completion + } + if (span) { + window.api.trace.tokenUsage(span.spanContext().spanId, usage) + } + } + + async finishModelTrace(topicId: string) { + this.spanMap.get(topicId)?.forEach((entity) => entity.finishSpan()) + this.spanMap.delete(topicId) + } +} + +/** + * Wraps a function and executes it within a span, returning the function's result instead of the wrapped function. + * @param fn The function to execute. + * @param name The span name. + * @param tags The span tags. + * @param getTopicId Function to get topicId from arguments. + * @returns The result of the executed function. + */ +export function withSpanResult any>( + fn: F, + params: StartSpanParams, + ...args: Parameters +): ReturnType { + if (!params.topicId || params.topicId === '') { + return fn(...args) + } + const span = addSpan({ + topicId: params.topicId, + name: params.name, + tag: params.tag, + inputs: args, + parentSpanId: params.parentSpanId, + modelName: params.modelName + }) + try { + const result = fn(...args) + if (result instanceof Promise) { + return result + .then((data) => { + if (!data || typeof data !== 'object') { + endSpan({ topicId: params.topicId, outputs: data, span, modelName: params.modelName }) + return data + } + + if (data instanceof Stream) { + return handleStream(data, span, params.topicId, params.modelName) + } else if (data instanceof MessageStream) { + return handleMessageStream(data, span, params.topicId, params.modelName) + } else if (isAsyncIterable(data)) { + return handleAsyncIterable(data, span, params.topicId, params.modelName) + } else { + return handleResult(data, span, params.topicId, params.modelName) + } + }) + .catch((err) => { + endSpan({ topicId: params.topicId, error: err, span, modelName: params.modelName }) + throw err + }) as ReturnType + } else { + endSpan({ topicId: params.topicId, outputs: result, span, modelName: params.modelName }) + return result + } + } catch (err) { + endSpan({ topicId: params.topicId, error: err as Error, span, modelName: params.modelName }) + throw err + } +} + +export const spanManagerService = new SpanManagerService() +export const webTracer = trace.getTracer('CherryStudio', '1.0.0') +export const addSpan = spanManagerService.addSpan.bind(spanManagerService) +export const startTrace = spanManagerService.startTrace.bind(spanManagerService) +export const endTrace = spanManagerService.endTrace.bind(spanManagerService) +export const endSpan = spanManagerService.endSpan.bind(spanManagerService) +export const currentSpan = spanManagerService.getCurrentSpan.bind(spanManagerService) +export const addTokenUsage = spanManagerService.addTokenUsage.bind(spanManagerService) +export const pauseTrace = spanManagerService.finishModelTrace.bind(spanManagerService) +export const appendTrace = spanManagerService.appendTrace.bind(spanManagerService) +export const restartTrace = spanManagerService.restartTrace.bind(spanManagerService) + +EventEmitter.on(EVENT_NAMES.SEND_MESSAGE, ({ topicId, traceId }) => { + window.api.trace.openWindow(topicId, traceId, false) +}) +EventEmitter.on(EVENT_NAMES.CLEAR_MESSAGES, (topic: Topic) => { + window.api.trace.cleanTopic(topic.id) +}) diff --git a/src/renderer/src/services/WebSearchService.ts b/src/renderer/src/services/WebSearchService.ts index 8e851e491..e09aacad0 100644 --- a/src/renderer/src/services/WebSearchService.ts +++ b/src/renderer/src/services/WebSearchService.ts @@ -2,6 +2,7 @@ import { loggerService } from '@logger' import { DEFAULT_WEBSEARCH_RAG_DOCUMENT_COUNT } from '@renderer/config/constant' import i18n from '@renderer/i18n' import WebSearchEngineProvider from '@renderer/providers/WebSearchProvider' +import { addSpan, endSpan } from '@renderer/services/SpanManagerService' import store from '@renderer/store' import { setWebSearchStatus } from '@renderer/store/runtime' import { CompressionConfig, WebSearchState } from '@renderer/store/websearch' @@ -164,10 +165,11 @@ class WebSearchService { public async search( provider: WebSearchProvider, query: string, - httpOptions?: RequestInit + httpOptions?: RequestInit, + spanId?: string ): Promise { const websearch = this.getWebSearchState() - const webSearchEngine = new WebSearchEngineProvider(provider) + const webSearchEngine = new WebSearchEngineProvider(provider, spanId) let formattedQuery = query // FIXME: 有待商榷,效果一般 @@ -440,16 +442,38 @@ class WebSearchService { // 使用请求特定的signal,如果没有则回退到全局signal const signal = this.getRequestState(requestId).signal || this.signal + const span = webSearchProvider.topicId + ? addSpan({ + topicId: webSearchProvider.topicId, + name: `WebSearch`, + inputs: { + question: extractResults.websearch.question, + provider: webSearchProvider.id + }, + tag: `Web`, + parentSpanId: webSearchProvider.parentSpanId, + modelName: webSearchProvider.modelName + }) + : undefined const questions = extractResults.websearch.question const links = extractResults.websearch.links // 处理 summarize if (questions[0] === 'summarize' && links && links.length > 0) { const contents = await fetchWebContents(links, undefined, undefined, { signal }) + webSearchProvider.topicId && + endSpan({ + topicId: webSearchProvider.topicId, + outputs: contents, + modelName: webSearchProvider.modelName, + span + }) return { query: 'summaries', results: contents } } - const searchPromises = questions.map((q) => this.search(webSearchProvider, q, { signal })) + const searchPromises = questions.map((q) => + this.search(webSearchProvider, q, { signal }, span?.spanContext().spanId) + ) const searchResults = await Promise.allSettled(searchPromises) // 统计成功完成的搜索数量 @@ -480,6 +504,14 @@ class WebSearchService { // 如果没有搜索结果,直接返回空结果 if (finalResults.length === 0) { await this.setWebSearchStatus(requestId, { phase: 'default' }) + if (webSearchProvider.topicId) { + endSpan({ + topicId: webSearchProvider.topicId, + outputs: finalResults, + modelName: webSearchProvider.modelName, + span + }) + } return { query: questions.join(' | '), results: [] @@ -526,6 +558,14 @@ class WebSearchService { // 重置状态 await this.setWebSearchStatus(requestId, { phase: 'default' }) + if (webSearchProvider.topicId) { + endSpan({ + topicId: webSearchProvider.topicId, + outputs: finalResults, + modelName: webSearchProvider.modelName, + span + }) + } return { query: questions.join(' | '), results: finalResults diff --git a/src/renderer/src/services/WebTraceService.ts b/src/renderer/src/services/WebTraceService.ts new file mode 100644 index 000000000..40f8f97e7 --- /dev/null +++ b/src/renderer/src/services/WebTraceService.ts @@ -0,0 +1,34 @@ +import { convertSpanToSpanEntity, FunctionSpanExporter, FunctionSpanProcessor } from '@mcp-trace/trace-core' +import { WebTracer } from '@mcp-trace/trace-web' +import { ReadableSpan } from '@opentelemetry/sdk-trace-base' + +const TRACER_NAME = 'CherryStudio' + +class WebTraceService { + init() { + const exporter = new FunctionSpanExporter((spans: ReadableSpan[]): Promise => { + // Implement your save logic here if needed + // For now, just resolve immediately + console.log('Saving spans:', spans) + return Promise.resolve() + }) + const processor = new FunctionSpanProcessor( + exporter, + (span: ReadableSpan) => { + window.api.trace.saveEntity(convertSpanToSpanEntity(span)) + }, + (span: ReadableSpan) => { + window.api.trace.saveEntity(convertSpanToSpanEntity(span)) + } + ) + WebTracer.init( + { + defaultTracerName: TRACER_NAME, + serviceName: TRACER_NAME + }, + processor + ) + } +} + +export const webTraceService = new WebTraceService() diff --git a/src/renderer/src/store/thunk/messageThunk.ts b/src/renderer/src/store/thunk/messageThunk.ts index 4a285f51e..989f0bbe1 100644 --- a/src/renderer/src/store/thunk/messageThunk.ts +++ b/src/renderer/src/store/thunk/messageThunk.ts @@ -4,6 +4,7 @@ import { fetchChatCompletion } from '@renderer/services/ApiService' import FileManager from '@renderer/services/FileManager' import { BlockManager } from '@renderer/services/messageStreaming/BlockManager' import { createCallbacks } from '@renderer/services/messageStreaming/callbacks' +import { endSpan } from '@renderer/services/SpanManagerService' import { createStreamProcessor, type StreamProcessorCallbacks } from '@renderer/services/StreamProcessingService' import store from '@renderer/store' import { updateTopicUpdatedAt } from '@renderer/store/assistants' @@ -258,7 +259,8 @@ const dispatchMultiModelResponses = async ( const assistantMessage = createAssistantMessage(assistant.id, topicId, { askId: triggeringMessage.id, model: mentionedModel, - modelId: mentionedModel.id + modelId: mentionedModel.id, + traceId: triggeringMessage.traceId }) dispatch(newMessagesActions.addMessage({ topicId, message: assistantMessage })) assistantMessageStubs.push(assistantMessage) @@ -886,13 +888,24 @@ const fetchAndProcessAssistantResponseImpl = async ( const streamProcessorCallbacks = createStreamProcessor(callbacks) // const startTime = Date.now() - await fetchChatCompletion({ + const result = await fetchChatCompletion({ messages: messagesForContext, assistant: assistant, onChunkReceived: streamProcessorCallbacks }) + endSpan({ + topicId, + outputs: result ? result.getText() : '', + modelName: assistant.model?.name, + modelEnded: true + }) } catch (error: any) { logger.error('Error fetching chat completion:', error) + endSpan({ + topicId, + error: error, + modelName: assistant.model?.name + }) if (assistantMessage) { callbacks.onError?.(error) throw error @@ -930,7 +943,8 @@ export const sendMessage = } else { const assistantMessage = createAssistantMessage(assistant.id, topicId, { askId: userMessage.id, - model: assistant.model + model: assistant.model, + traceId: userMessage.traceId }) await saveMessageAndBlocksToDB(assistantMessage, []) dispatch(newMessagesActions.addMessage({ topicId, message: assistantMessage })) @@ -1129,6 +1143,7 @@ export const resendMessageThunk = askId: userMessageToResend.id, model: assistant.model }) + assistantMessage.traceId = userMessageToResend.traceId resetDataList.push(assistantMessage) resetDataList.forEach((message) => { @@ -1427,7 +1442,8 @@ export const appendAssistantResponseThunk = topicId: Topic['id'], existingAssistantMessageId: string, // ID of the assistant message the user interacted with newModel: Model, // The new model selected by the user - assistant: Assistant // Base assistant configuration + assistant: Assistant, // Base assistant configuration + traceId?: string ) => async (dispatch: AppDispatch, getState: () => RootState) => { try { @@ -1474,7 +1490,8 @@ export const appendAssistantResponseThunk = const newAssistantStub = createAssistantMessage(assistant.id, topicId, { askId: askId, // Crucial: Use the original askId model: newModel, - modelId: newModel.id + modelId: newModel.id, + traceId: traceId }) // 3. Update Redux Store diff --git a/src/renderer/src/trace/dataHandler/AsyncIterableHandler.ts b/src/renderer/src/trace/dataHandler/AsyncIterableHandler.ts new file mode 100644 index 000000000..1bdcc679e --- /dev/null +++ b/src/renderer/src/trace/dataHandler/AsyncIterableHandler.ts @@ -0,0 +1,98 @@ +import { TokenUsage } from '@mcp-trace/trace-core' +import { Span } from '@opentelemetry/api' +import { endSpan } from '@renderer/services/SpanManagerService' +import { SdkRawChunk } from '@renderer/types/sdk' + +export class AsyncIterableHandler { + private span: Span + private stream: AsyncIterable + private topicId: string + private usageToken: TokenUsage + private modelName?: string + constructor(stream: AsyncIterable, span: Span, topicId: string, modelName?: string) { + this.stream = this.transformStream(stream) + this.span = span + this.topicId = topicId + this.modelName = modelName + this.usageToken = { + completion_tokens: 0, + prompt_tokens: 0, + total_tokens: 0 + } + } + + async handleChunk(chunk: SdkRawChunk) { + let context = 'choices' in chunk ? chunk.choices.map((ch) => ch.delta.context).join() : '' + if (!context && 'candidates' in chunk && chunk.candidates) { + context = chunk.candidates + .map( + (ch) => + ch.content?.parts + ?.map((p) => { + if (p.text) { + return p.text + } else if (p.functionCall) { + return `${p.functionCall.name}(${JSON.stringify(p.functionCall.args || '')})` + } else if (p.codeExecutionResult) { + return p.codeExecutionResult.output || String(p.codeExecutionResult.outcome || '') + } else if (p.executableCode) { + return `'''${p.executableCode.language || ''}\n${p.executableCode.code}\n'''` + } else if (p.fileData) { + return '' + } else if (p.functionResponse) { + return `${p.functionResponse.name}: ${JSON.stringify(p.functionResponse.response)}` + } else if (p.inlineData) { + return '' + } else if (p.videoMetadata) { + return `fps: ${p.videoMetadata.fps}, start:${p.videoMetadata.startOffset}, end:${p.videoMetadata.endOffset}` + } else { + return '' + } + }) + .join() || '' + ) + .join() + } + if (context) { + window.api.trace.addStreamMessage(this.span.spanContext().spanId, this.modelName || '', context, chunk) + } + if ('usageMetadata' in chunk && chunk.usageMetadata) { + this.usageToken.prompt_tokens = chunk.usageMetadata.promptTokenCount || 0 + this.usageToken.total_tokens = chunk.usageMetadata.totalTokenCount || 0 + this.usageToken.completion_tokens = + (chunk.usageMetadata.totalTokenCount || 0) - (chunk.usageMetadata.promptTokenCount || 0) + } + } + + async finish() { + window.api.trace.tokenUsage(this.span.spanContext().spanId, this.usageToken) + endSpan({ topicId: this.topicId, span: this.span, modelName: this.modelName }) + } + + async handleError(err) { + endSpan({ topicId: this.topicId, error: err, span: this.span, modelName: this.modelName }) + } + + async *transformStream(stream: AsyncIterable) { + try { + for await (const chunk of stream) { + this.handleChunk(chunk) + yield chunk + } + } catch (err) { + this.handleError(err) + throw err + } + this.finish() + } + + static handleStream(stream: AsyncIterable, span?: Span, topicId?: string, modelName?: string) { + if (!span || !topicId) { + return stream + } + const handler = new AsyncIterableHandler(stream, span!, topicId, modelName) + return handler.stream + } +} + +export const handleAsyncIterable = AsyncIterableHandler.handleStream diff --git a/src/renderer/src/trace/dataHandler/CommonResultHandler.ts b/src/renderer/src/trace/dataHandler/CommonResultHandler.ts new file mode 100644 index 000000000..4c5ea553c --- /dev/null +++ b/src/renderer/src/trace/dataHandler/CommonResultHandler.ts @@ -0,0 +1,77 @@ +import { TokenUsage } from '@mcp-trace/trace-core' +import { Span } from '@opentelemetry/api' +import { CompletionsResult } from '@renderer/aiCore/middleware/schemas' +import { endSpan } from '@renderer/services/SpanManagerService' + +export class CompletionsResultHandler { + private data: any + private tokenUsage?: TokenUsage + private span: Span + private topicId: string + private modelName?: string + + constructor(data: any, span: Span, topicId: string, modelName?: string) { + this.data = data && this.isCompletionsResult(data) ? { ...data, finishText: data.getText() } : data + this.span = span + this.topicId = topicId + this.tokenUsage = this.getUsage(data) + this.modelName = modelName + } + + isCompletionsResult(data: any): data is CompletionsResult { + return ( + data !== null && + typeof data === 'object' && + typeof data.getText === 'function' && + (data.rawOutput === undefined || typeof data.rawOutput === 'object') && + (data.stream === undefined || typeof data.stream === 'object') && + (data.controller === undefined || data.controller instanceof AbortController) + ) + } + + getUsage(data?: any): TokenUsage | undefined { + // Replace this with an appropriate property check for CompletionsResult + if (!data || typeof data !== 'object' || !('usage' in data || 'usageMetadata' in data)) { + return undefined + } + const tokens: TokenUsage = { + completion_tokens: 0, + prompt_tokens: 0, + total_tokens: 0 + } + if ('usage' in data) { + const usage = data.usage + tokens.completion_tokens = usage['completion_tokens'] || 0 + tokens.prompt_tokens = usage['prompt_tokens'] || 0 + tokens.total_tokens = usage['total_tokens'] || 0 + // Do something with usage + } else { + const usage = data.usageMetadata + tokens.completion_tokens = usage['thoughtsTokenCount'] || 0 + tokens.prompt_tokens = usage['promptTokenCount'] || 0 + tokens.total_tokens = usage['totalTokenCount'] || 0 + } + return tokens + } + + finish() { + if (this.tokenUsage) { + window.api.trace.tokenUsage(this.span.spanContext().spanId, this.tokenUsage) + } + if (this.data) { + endSpan({ topicId: this.topicId, outputs: this.data, span: this.span, modelName: this.modelName }) + } else { + endSpan({ topicId: this.topicId, span: this.span, modelName: this.modelName }) + } + } + + static handleResult(data?: any, span?: Span, topicId?: string, modelName?: string) { + if (span && topicId) { + const handler = new CompletionsResultHandler(data, span!, topicId, modelName) + handler.finish() + } + return data + } +} + +export const handleResult = CompletionsResultHandler.handleResult diff --git a/src/renderer/src/trace/dataHandler/MessageStreamHandler.ts b/src/renderer/src/trace/dataHandler/MessageStreamHandler.ts new file mode 100644 index 000000000..347194983 --- /dev/null +++ b/src/renderer/src/trace/dataHandler/MessageStreamHandler.ts @@ -0,0 +1,70 @@ +import { Message, MessageStream } from '@anthropic-ai/sdk/resources/messages/messages' +import { TokenUsage } from '@mcp-trace/trace-core' +import { Span } from '@opentelemetry/api' +import { endSpan } from '@renderer/services/SpanManagerService' + +export class MessageStreamHandler { + private span: Span + private stream: MessageStream + private topicId: string + private tokenUsage: TokenUsage + private modelName?: string + + constructor(stream: MessageStream, span: Span, topicId: string, modelName?: string) { + this.stream = stream + this.span = span + this.topicId = topicId + this.tokenUsage = { + completion_tokens: 0, + prompt_tokens: 0, + total_tokens: 0 + } + stream.on('error', (err) => { + endSpan({ topicId, error: err, span, modelName: this.modelName }) + }) + stream.on('message', (message) => this.write(message)) + stream.on('end', () => this.finish()) + this.modelName = modelName + } + + async finish() { + window.api.trace.tokenUsage(this.span.spanContext().spanId, this.tokenUsage) + endSpan({ topicId: this.topicId, span: this.span, modelName: this.modelName }) + } + + async write(message: Message) { + if (message.usage) { + this.tokenUsage.completion_tokens += message.usage.output_tokens + this.tokenUsage.prompt_tokens += message.usage.input_tokens + this.tokenUsage.total_tokens += message.usage.output_tokens + message.usage.input_tokens + } + const context = message.content + .map((c) => { + if (c.type === 'text') { + return c.text + } else if (c.type === 'redacted_thinking') { + return c.data + } else if (c.type === 'server_tool_use' || c.type === 'tool_use') { + return `${c.name}: ${c.input}` + } else if (c.type === 'thinking') { + return c.thinking + } else if (c.type === 'web_search_tool_result') { + return c.content + } else { + return JSON.stringify(c) + } + }) + .join() + window.api.trace.addStreamMessage(this.span.spanContext().spanId, this.modelName || '', context, message) + } + + static handleStream(stream: MessageStream, span?: Span, topicId?: string, modelName?: string) { + if (!span || !topicId) { + return stream + } + const handler = new MessageStreamHandler(stream, span!, topicId, modelName) + return handler.stream + } +} + +export const handleMessageStream = MessageStreamHandler.handleStream diff --git a/src/renderer/src/trace/dataHandler/StreamHandler.ts b/src/renderer/src/trace/dataHandler/StreamHandler.ts new file mode 100644 index 000000000..22c1a0c51 --- /dev/null +++ b/src/renderer/src/trace/dataHandler/StreamHandler.ts @@ -0,0 +1,110 @@ +import { TokenUsage } from '@mcp-trace/trace-core' +import { Span } from '@opentelemetry/api' +import { endSpan } from '@renderer/services/SpanManagerService' +import { OpenAI } from 'openai' +import { Stream } from 'openai/streaming' + +export class StreamHandler { + private topicId: string + private span: Span + private modelName?: string + private usage: TokenUsage = { + prompt_tokens: 0, + completion_tokens: 0, + total_tokens: 0 + } + private stream: Stream + + constructor( + topicId: string, + span: Span, + stream: Stream, + modelName?: string + ) { + this.topicId = topicId + this.span = span + this.modelName = modelName + this.stream = stream + } + + async *createStreamAdapter(): AsyncIterable< + OpenAI.Chat.Completions.ChatCompletionChunk | OpenAI.Responses.ResponseStreamEvent + > { + try { + for await (const chunk of this.stream) { + let context: string | undefined + if ('object' in chunk && chunk.object === 'chat.completion.chunk') { + const completionChunk = chunk as OpenAI.Chat.Completions.ChatCompletionChunk + if (completionChunk.usage) { + this.usage.completion_tokens += completionChunk.usage.completion_tokens || 0 + this.usage.prompt_tokens += completionChunk.usage.prompt_tokens || 0 + this.usage.total_tokens += completionChunk.usage.total_tokens || 0 + } + context = chunk.choices + .map((choice) => { + if (!choice.delta) { + return '' + } else if ('reasoning_content' in choice.delta) { + return choice.delta.reasoning_content + } else if (choice.delta.content) { + return choice.delta.content + } else if (choice.delta.refusal) { + return choice.delta.refusal + } else if (choice.delta.tool_calls) { + return choice.delta.tool_calls.map((toolCall) => { + return toolCall.function?.name || toolCall.function?.arguments + }) + } + return '' + }) + .join() + } else { + const resp = chunk as OpenAI.Responses.ResponseStreamEvent + if ('response' in resp && resp.response) { + context = resp.response.output_text + if (resp.response.usage) { + this.usage.completion_tokens += resp.response.usage.output_tokens || 0 + this.usage.prompt_tokens += resp.response.usage.input_tokens || 0 + this.usage.total_tokens += (resp.response.usage.input_tokens || 0) + resp.response.usage.output_tokens + } + } else if ('delta' in resp && resp.delta) { + context = typeof resp.delta === 'string' ? resp.delta : JSON.stringify(resp.delta) + } else if ('text' in resp && resp.text) { + context = resp.text + } else if ('partial_image_b64' in resp && resp.partial_image_b64) { + context = '' + } else if ('part' in resp && resp.part) { + context = 'refusal' in resp.part ? resp.part.refusal : resp.part.text + } else { + context = '' + } + } + window.api.trace.addStreamMessage(this.span.spanContext().spanId, this.modelName || '', context, chunk) + yield chunk + } + this.finish() + } catch (err) { + endSpan({ topicId: this.topicId, error: err as Error, span: this.span, modelName: this.modelName }) + throw err + } + } + + async finish() { + window.api.trace.tokenUsage(this.span.spanContext().spanId, this.usage) + endSpan({ topicId: this.topicId, span: this.span, modelName: this.modelName }) + } + + static handleStream( + stream: Stream, + span?: Span, + topicId?: string, + modelName?: string + ) { + if (!span || !topicId) { + return stream + } + return new StreamHandler(topicId, span, stream, modelName).createStreamAdapter() + } +} + +export const handleStream = StreamHandler.handleStream diff --git a/src/renderer/src/trace/pages/Component.tsx b/src/renderer/src/trace/pages/Component.tsx new file mode 100644 index 000000000..2ceb26f89 --- /dev/null +++ b/src/renderer/src/trace/pages/Component.tsx @@ -0,0 +1,164 @@ +import { CaretDownOutlined, CaretRightOutlined } from '@ant-design/icons' +import React from 'react' + +// Box 组件 +export const Box: React.FC< + React.HTMLAttributes & { padding?: number; border?: string; borderStyle?: string; className?: string } +> = ({ padding: p, border, borderStyle, className, style, ...props }) => ( +
+) + +// SimpleGrid 组件 +export const SimpleGrid: React.FC<{ + columns?: number + templateColumns?: string + children: React.ReactNode + leftSpace?: number + className?: string + style?: React.CSSProperties + onClick?: React.MouseEventHandler +}> = ({ columns, templateColumns, children, leftSpace = 0, style, className, onClick, ...props }) => ( +
+ {children} +
+) + +// Text 组件 +export const Text: React.FC> = ({ style, className, ...props }) => ( + +) + +// VStack 组件 +export const VStack: React.FC<{ grap?: number; align?: string; children: React.ReactNode }> = ({ + grap = 5, + align = 'stretch', + children, + ...props +}) => ( +
+ {children} +
+) + +// GridItem 组件 +export const GridItem: React.FC< + React.HTMLAttributes & { colSpan?: number; rowSpan?: number; padding?: number } +> = ({ colSpan, rowSpan, padding, style, ...props }) => ( +
+) + +// HStack 组件 +export const HStack: React.FC<{ grap?: number; children: React.ReactNode; style?: React.CSSProperties }> = ({ + grap, + children, + style, + ...props +}) => ( +
+ {children} +
+) + +// IconButton 组件 +export const IconButton: React.FC< + React.ButtonHTMLAttributes & { size?: 'sm' | 'md'; fontSize?: string } +> = ({ size = 'md', fontSize = '12px', style, onClick, ...props }) => ( + +) + +// 自定义 Button 组件 +export const Button: React.FC> = ({ style, ...props }) => ( +