feat: remove faiss database (#10178)

This commit is contained in:
Chen Tao 2025-09-15 17:59:46 +08:00 committed by GitHub
parent 7f9f5514a4
commit e3d2bb2ec6
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
45 changed files with 376 additions and 2598 deletions

View File

@ -77,7 +77,6 @@
"@napi-rs/system-ocr": "patch:@napi-rs/system-ocr@npm%3A1.0.2#~/.yarn/patches/@napi-rs-system-ocr-npm-1.0.2-59e7a78e8b.patch",
"@strongtz/win32-arm64-msvc": "^0.4.7",
"express": "^5.1.0",
"faiss-node": "^0.5.1",
"font-list": "^2.0.0",
"graceful-fs": "^4.2.11",
"jsdom": "26.1.0",
@ -137,9 +136,6 @@
"@heroui/react": "^2.8.3",
"@kangfenmao/keyv-storage": "^0.1.0",
"@langchain/community": "^0.3.50",
"@langchain/core": "^0.3.68",
"@langchain/ollama": "^0.2.1",
"@langchain/openai": "^0.6.7",
"@mistralai/mistralai": "^1.7.5",
"@modelcontextprotocol/sdk": "^1.17.5",
"@mozilla/readability": "^0.6.0",

View File

@ -28,7 +28,7 @@ import DxtService from './services/DxtService'
import { ExportService } from './services/ExportService'
import { fileStorage as fileManager } from './services/FileStorage'
import FileService from './services/FileSystemService'
import KnowledgeService from './services/knowledge/KnowledgeService'
import KnowledgeService from './services/KnowledgeService'
import mcpService from './services/MCPService'
import MemoryService from './services/memory/MemoryService'
import { openTraceWindow, setTraceWindowTitle } from './services/NodeTraceService'

View File

@ -1,63 +0,0 @@
import { VoyageEmbeddings } from '@langchain/community/embeddings/voyage'
import type { Embeddings } from '@langchain/core/embeddings'
import { OllamaEmbeddings } from '@langchain/ollama'
import { AzureOpenAIEmbeddings, OpenAIEmbeddings } from '@langchain/openai'
import { ApiClient, SystemProviderIds } from '@types'
import { isJinaEmbeddingsModel, JinaEmbeddings } from './JinaEmbeddings'
export default class EmbeddingsFactory {
static create({ embedApiClient, dimensions }: { embedApiClient: ApiClient; dimensions?: number }): Embeddings {
const batchSize = 10
const { model, provider, apiKey, apiVersion, baseURL } = embedApiClient
if (provider === SystemProviderIds.ollama) {
let baseUrl = baseURL
if (baseURL.includes('v1/')) {
baseUrl = baseURL.replace('v1/', '')
}
const headers = apiKey
? {
Authorization: `Bearer ${apiKey}`
}
: undefined
return new OllamaEmbeddings({
model: model,
baseUrl,
...headers
})
} else if (provider === SystemProviderIds.voyageai) {
return new VoyageEmbeddings({
modelName: model,
apiKey,
outputDimension: dimensions,
batchSize
})
}
if (isJinaEmbeddingsModel(model)) {
return new JinaEmbeddings({
model,
apiKey,
batchSize,
dimensions,
baseUrl: baseURL
})
}
if (apiVersion !== undefined) {
return new AzureOpenAIEmbeddings({
azureOpenAIApiKey: apiKey,
azureOpenAIApiVersion: apiVersion,
azureOpenAIApiDeploymentName: model,
azureOpenAIEndpoint: baseURL,
dimensions,
batchSize
})
}
return new OpenAIEmbeddings({
model,
apiKey,
dimensions,
batchSize,
configuration: { baseURL }
})
}
}

View File

@ -1,199 +0,0 @@
import { Embeddings, type EmbeddingsParams } from '@langchain/core/embeddings'
import { chunkArray } from '@langchain/core/utils/chunk_array'
import { getEnvironmentVariable } from '@langchain/core/utils/env'
import { z } from 'zod'
const jinaModelSchema = z.union([
z.literal('jina-clip-v2'),
z.literal('jina-embeddings-v3'),
z.literal('jina-colbert-v2'),
z.literal('jina-clip-v1'),
z.literal('jina-colbert-v1-en'),
z.literal('jina-embeddings-v2-base-es'),
z.literal('jina-embeddings-v2-base-code'),
z.literal('jina-embeddings-v2-base-de'),
z.literal('jina-embeddings-v2-base-zh'),
z.literal('jina-embeddings-v2-base-en')
])
type JinaModel = z.infer<typeof jinaModelSchema>
export const isJinaEmbeddingsModel = (model: string): model is JinaModel => {
return jinaModelSchema.safeParse(model).success
}
interface JinaEmbeddingsParams extends EmbeddingsParams {
/** Model name to use */
model: JinaModel
baseUrl?: string
/**
* Timeout to use when making requests to Jina.
*/
timeout?: number
/**
* The maximum number of documents to embed in a single request.
*/
batchSize?: number
/**
* Whether to strip new lines from the input text.
*/
stripNewLines?: boolean
/**
* The dimensions of the embedding.
*/
dimensions?: number
/**
* Scales the embedding so its Euclidean (L2) norm becomes 1, preserving direction. Useful when downstream involves dot-product, classification, visualization..
*/
normalized?: boolean
}
type JinaMultiModelInput =
| {
text: string
image?: never
}
| {
image: string
text?: never
}
type JinaEmbeddingsInput = string | JinaMultiModelInput
interface EmbeddingCreateParams {
model: JinaEmbeddingsParams['model']
/**
* input can be strings or JinaMultiModelInputs,if you want embed image,you should use JinaMultiModelInputs
*/
input: JinaEmbeddingsInput[]
dimensions: number
task?: 'retrieval.query' | 'retrieval.passage'
}
interface EmbeddingResponse {
model: string
object: string
usage: {
total_tokens: number
prompt_tokens: number
}
data: {
object: string
index: number
embedding: number[]
}[]
}
interface EmbeddingErrorResponse {
detail: string
}
export class JinaEmbeddings extends Embeddings implements JinaEmbeddingsParams {
model: JinaEmbeddingsParams['model'] = 'jina-clip-v2'
batchSize = 24
baseUrl = 'https://api.jina.ai/v1/embeddings'
stripNewLines = true
dimensions = 1024
apiKey: string
constructor(
fields?: Partial<JinaEmbeddingsParams> & {
apiKey?: string
}
) {
const fieldsWithDefaults = { maxConcurrency: 2, ...fields }
super(fieldsWithDefaults)
const apiKey =
fieldsWithDefaults?.apiKey || getEnvironmentVariable('JINA_API_KEY') || getEnvironmentVariable('JINA_AUTH_TOKEN')
if (!apiKey) throw new Error('Jina API key not found')
this.apiKey = apiKey
this.baseUrl = fieldsWithDefaults?.baseUrl ? `${fieldsWithDefaults?.baseUrl}embeddings` : this.baseUrl
this.model = fieldsWithDefaults?.model ?? this.model
this.dimensions = fieldsWithDefaults?.dimensions ?? this.dimensions
this.batchSize = fieldsWithDefaults?.batchSize ?? this.batchSize
this.stripNewLines = fieldsWithDefaults?.stripNewLines ?? this.stripNewLines
}
private doStripNewLines(input: JinaEmbeddingsInput[]) {
if (this.stripNewLines) {
return input.map((i) => {
if (typeof i === 'string') {
return i.replace(/\n/g, ' ')
}
if (i.text) {
return { text: i.text.replace(/\n/g, ' ') }
}
return i
})
}
return input
}
async embedDocuments(input: JinaEmbeddingsInput[]): Promise<number[][]> {
const batches = chunkArray(this.doStripNewLines(input), this.batchSize)
const batchRequests = batches.map((batch) => {
const params = this.getParams(batch)
return this.embeddingWithRetry(params)
})
const batchResponses = await Promise.all(batchRequests)
const embeddings: number[][] = []
for (let i = 0; i < batchResponses.length; i += 1) {
const batch = batches[i]
const batchResponse = batchResponses[i] || []
for (let j = 0; j < batch.length; j += 1) {
embeddings.push(batchResponse[j])
}
}
return embeddings
}
async embedQuery(input: JinaEmbeddingsInput): Promise<number[]> {
const params = this.getParams(this.doStripNewLines([input]), true)
const embeddings = (await this.embeddingWithRetry(params)) || [[]]
return embeddings[0]
}
private getParams(input: JinaEmbeddingsInput[], query?: boolean): EmbeddingCreateParams {
return {
model: this.model,
input,
dimensions: this.dimensions,
task: query ? 'retrieval.query' : this.model === 'jina-clip-v2' ? undefined : 'retrieval.passage'
}
}
private async embeddingWithRetry(body: EmbeddingCreateParams) {
const response = await fetch(this.baseUrl, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${this.apiKey}`
},
body: JSON.stringify(body)
})
const embeddingData: EmbeddingResponse | EmbeddingErrorResponse = await response.json()
if ('detail' in embeddingData && embeddingData.detail) {
throw new Error(`${embeddingData.detail}`)
}
return (embeddingData as EmbeddingResponse).data.map(({ embedding }) => embedding)
}
}

View File

@ -1,25 +0,0 @@
import type { Embeddings as BaseEmbeddings } from '@langchain/core/embeddings'
import { TraceMethod } from '@mcp-trace/trace-core'
import { ApiClient } from '@types'
import EmbeddingsFactory from './EmbeddingsFactory'
export default class TextEmbeddings {
private sdk: BaseEmbeddings
constructor({ embedApiClient, dimensions }: { embedApiClient: ApiClient; dimensions?: number }) {
this.sdk = EmbeddingsFactory.create({
embedApiClient,
dimensions
})
}
@TraceMethod({ spanName: 'embedDocuments', tag: 'Embeddings' })
public async embedDocuments(texts: string[]): Promise<number[][]> {
return this.sdk.embedDocuments(texts)
}
@TraceMethod({ spanName: 'embedQuery', tag: 'Embeddings' })
public async embedQuery(text: string): Promise<number[]> {
return this.sdk.embedQuery(text)
}
}

View File

@ -1,97 +0,0 @@
import { BaseDocumentLoader } from '@langchain/core/document_loaders/base'
import { Document } from '@langchain/core/documents'
import { readTextFileWithAutoEncoding } from '@main/utils/file'
import MarkdownIt from 'markdown-it'
export class MarkdownLoader extends BaseDocumentLoader {
private path: string
private md: MarkdownIt
constructor(path: string) {
super()
this.path = path
this.md = new MarkdownIt()
}
public async load(): Promise<Document[]> {
const content = await readTextFileWithAutoEncoding(this.path)
return this.parseMarkdown(content)
}
private parseMarkdown(content: string): Document[] {
const tokens = this.md.parse(content, {})
const documents: Document[] = []
let currentSection: {
heading?: string
level?: number
content: string
startLine?: number
} = { content: '' }
let i = 0
while (i < tokens.length) {
const token = tokens[i]
if (token.type === 'heading_open') {
// Save previous section if it has content
if (currentSection.content.trim()) {
documents.push(
new Document({
pageContent: currentSection.content.trim(),
metadata: {
source: this.path,
heading: currentSection.heading || 'Introduction',
level: currentSection.level || 0,
startLine: currentSection.startLine || 0
}
})
)
}
// Start new section
const level = parseInt(token.tag.slice(1)) // Extract number from h1, h2, etc.
const headingContent = tokens[i + 1]?.content || ''
currentSection = {
heading: headingContent,
level: level,
content: '',
startLine: token.map?.[0] || 0
}
// Skip heading_open, inline, heading_close tokens
i += 3
continue
}
// Add token content to current section
if (token.content) {
currentSection.content += token.content
}
// Add newlines for block tokens
if (token.block && token.type !== 'heading_close') {
currentSection.content += '\n'
}
i++
}
// Add the last section
if (currentSection.content.trim()) {
documents.push(
new Document({
pageContent: currentSection.content.trim(),
metadata: {
source: this.path,
heading: currentSection.heading || 'Introduction',
level: currentSection.level || 0,
startLine: currentSection.startLine || 0
}
})
)
}
return documents
}
}

View File

@ -1,50 +0,0 @@
import { BaseDocumentLoader } from '@langchain/core/document_loaders/base'
import { Document } from '@langchain/core/documents'
export class NoteLoader extends BaseDocumentLoader {
private text: string
private sourceUrl?: string
constructor(
public _text: string,
public _sourceUrl?: string
) {
super()
this.text = _text
this.sourceUrl = _sourceUrl
}
/**
* A protected method that takes a `raw` string as a parameter and returns
* a promise that resolves to an array containing the raw text as a single
* element.
* @param raw The raw text to be parsed.
* @returns A promise that resolves to an array containing the raw text as a single element.
*/
protected async parse(raw: string): Promise<string[]> {
return [raw]
}
public async load(): Promise<Document[]> {
const metadata = { source: this.sourceUrl || 'note' }
const parsed = await this.parse(this.text)
parsed.forEach((pageContent, i) => {
if (typeof pageContent !== 'string') {
throw new Error(`Expected string, at position ${i} got ${typeof pageContent}`)
}
})
return parsed.map(
(pageContent, i) =>
new Document({
pageContent,
metadata:
parsed.length === 1
? metadata
: {
...metadata,
line: i + 1
}
})
)
}
}

View File

@ -1,170 +0,0 @@
import { BaseDocumentLoader } from '@langchain/core/document_loaders/base'
import { Document } from '@langchain/core/documents'
import { Innertube } from 'youtubei.js'
// ... (接口定义 YoutubeConfig 和 VideoMetadata 保持不变)
/**
* Configuration options for the YoutubeLoader class. Includes properties
* such as the videoId, language, and addVideoInfo.
*/
interface YoutubeConfig {
videoId: string
language?: string
addVideoInfo?: boolean
// 新增一个选项,用于控制输出格式
transcriptFormat?: 'text' | 'srt'
}
/**
* Metadata of a YouTube video. Includes properties such as the source
* (videoId), description, title, view_count, author, and category.
*/
interface VideoMetadata {
source: string
description?: string
title?: string
view_count?: number
author?: string
category?: string
}
/**
* A document loader for loading data from YouTube videos. It uses the
* youtubei.js library to fetch the transcript and video metadata.
* @example
* ```typescript
* const loader = new YoutubeLoader({
* videoId: "VIDEO_ID",
* language: "en",
* addVideoInfo: true,
* transcriptFormat: "srt" // 获取 SRT 格式
* });
* const docs = await loader.load();
* console.log(docs[0].pageContent);
* ```
*/
export class YoutubeLoader extends BaseDocumentLoader {
private videoId: string
private language?: string
private addVideoInfo: boolean
// 新增格式化选项的私有属性
private transcriptFormat: 'text' | 'srt'
constructor(config: YoutubeConfig) {
super()
this.videoId = config.videoId
this.language = config?.language
this.addVideoInfo = config?.addVideoInfo ?? false
// 初始化格式化选项,默认为 'text' 以保持向后兼容
this.transcriptFormat = config?.transcriptFormat ?? 'text'
}
/**
* Extracts the videoId from a YouTube video URL.
* @param url The URL of the YouTube video.
* @returns The videoId of the YouTube video.
*/
private static getVideoID(url: string): string {
const match = url.match(/.*(?:youtu.be\/|v\/|u\/\w\/|embed\/|watch\?v=)([^#&?]*).*/)
if (match !== null && match[1].length === 11) {
return match[1]
} else {
throw new Error('Failed to get youtube video id from the url')
}
}
/**
* Creates a new instance of the YoutubeLoader class from a YouTube video
* URL.
* @param url The URL of the YouTube video.
* @param config Optional configuration options for the YoutubeLoader instance, excluding the videoId.
* @returns A new instance of the YoutubeLoader class.
*/
static createFromUrl(url: string, config?: Omit<YoutubeConfig, 'videoId'>): YoutubeLoader {
const videoId = YoutubeLoader.getVideoID(url)
return new YoutubeLoader({ ...config, videoId })
}
/**
* [] SRT (HH:MM:SS,ms)
* @param ms
* @returns
*/
private static formatTimestamp(ms: number): string {
const totalSeconds = Math.floor(ms / 1000)
const hours = Math.floor(totalSeconds / 3600)
.toString()
.padStart(2, '0')
const minutes = Math.floor((totalSeconds % 3600) / 60)
.toString()
.padStart(2, '0')
const seconds = (totalSeconds % 60).toString().padStart(2, '0')
const milliseconds = (ms % 1000).toString().padStart(3, '0')
return `${hours}:${minutes}:${seconds},${milliseconds}`
}
/**
* Loads the transcript and video metadata from the specified YouTube
* video. It can return the transcript as plain text or in SRT format.
* @returns An array of Documents representing the retrieved data.
*/
async load(): Promise<Document[]> {
const metadata: VideoMetadata = {
source: this.videoId
}
try {
const youtube = await Innertube.create({
lang: this.language,
retrieve_player: false
})
const info = await youtube.getInfo(this.videoId)
const transcriptData = await info.getTranscript()
if (!transcriptData.transcript.content?.body?.initial_segments) {
throw new Error('Transcript segments not found in the response.')
}
const segments = transcriptData.transcript.content.body.initial_segments
let pageContent: string
// 根据 transcriptFormat 选项决定如何格式化字幕
if (this.transcriptFormat === 'srt') {
// [修改] 将字幕片段格式化为 SRT 格式
pageContent = segments
.map((segment, index) => {
const srtIndex = index + 1
const startTime = YoutubeLoader.formatTimestamp(Number(segment.start_ms))
const endTime = YoutubeLoader.formatTimestamp(Number(segment.end_ms))
const text = segment.snippet?.text || '' // 使用 segment.snippet.text
return `${srtIndex}\n${startTime} --> ${endTime}\n${text}`
})
.join('\n\n') // 每个 SRT 块之间用两个换行符分隔
} else {
// [原始逻辑] 拼接为纯文本
pageContent = segments.map((segment) => segment.snippet?.text || '').join(' ')
}
if (this.addVideoInfo) {
const basicInfo = info.basic_info
metadata.description = basicInfo.short_description
metadata.title = basicInfo.title
metadata.view_count = basicInfo.view_count
metadata.author = basicInfo.author
}
const document = new Document({
pageContent,
metadata
})
return [document]
} catch (e: unknown) {
throw new Error(`Failed to get YouTube video transcription: ${(e as Error).message}`)
}
}
}

View File

@ -1,235 +0,0 @@
import { DocxLoader } from '@langchain/community/document_loaders/fs/docx'
import { EPubLoader } from '@langchain/community/document_loaders/fs/epub'
import { PDFLoader } from '@langchain/community/document_loaders/fs/pdf'
import { PPTXLoader } from '@langchain/community/document_loaders/fs/pptx'
import { CheerioWebBaseLoader } from '@langchain/community/document_loaders/web/cheerio'
import { SitemapLoader } from '@langchain/community/document_loaders/web/sitemap'
import { FaissStore } from '@langchain/community/vectorstores/faiss'
import { Document } from '@langchain/core/documents'
import { loggerService } from '@logger'
import { UrlSource } from '@main/utils/knowledge'
import { LoaderReturn } from '@shared/config/types'
import { FileMetadata, FileTypes, KnowledgeBaseParams } from '@types'
import { randomUUID } from 'crypto'
import { JSONLoader } from 'langchain/document_loaders/fs/json'
import { TextLoader } from 'langchain/document_loaders/fs/text'
import { SplitterFactory } from '../splitter'
import { MarkdownLoader } from './MarkdownLoader'
import { NoteLoader } from './NoteLoader'
import { YoutubeLoader } from './YoutubeLoader'
const logger = loggerService.withContext('KnowledgeService File Loader')
type LoaderInstance =
| TextLoader
| PDFLoader
| PPTXLoader
| DocxLoader
| JSONLoader
| EPubLoader
| CheerioWebBaseLoader
| YoutubeLoader
| SitemapLoader
| NoteLoader
| MarkdownLoader
/**
* metadata
*/
function formatDocument(docs: Document[], type: string): Document[] {
return docs.map((doc) => ({
...doc,
metadata: {
...doc.metadata,
type: type
}
}))
}
/**
*
*/
async function processDocuments(
base: KnowledgeBaseParams,
vectorStore: FaissStore,
docs: Document[],
loaderType: string,
splitterType?: string
): Promise<LoaderReturn> {
const formattedDocs = formatDocument(docs, loaderType)
const splitter = SplitterFactory.create({
chunkSize: base.chunkSize,
chunkOverlap: base.chunkOverlap,
...(splitterType && { type: splitterType })
})
const splitterResults = await splitter.splitDocuments(formattedDocs)
const ids = splitterResults.map(() => randomUUID())
await vectorStore.addDocuments(splitterResults, { ids })
return {
entriesAdded: splitterResults.length,
uniqueId: ids[0] || '',
uniqueIds: ids,
loaderType
}
}
/**
*
*/
async function executeLoader(
base: KnowledgeBaseParams,
vectorStore: FaissStore,
loaderInstance: LoaderInstance,
loaderType: string,
identifier: string,
splitterType?: string
): Promise<LoaderReturn> {
const emptyResult: LoaderReturn = {
entriesAdded: 0,
uniqueId: '',
uniqueIds: [],
loaderType
}
try {
const docs = await loaderInstance.load()
return await processDocuments(base, vectorStore, docs, loaderType, splitterType)
} catch (error) {
logger.error(`Error loading or processing ${identifier} with loader ${loaderType}: ${error}`)
return emptyResult
}
}
/**
*
*/
const FILE_LOADER_MAP: Record<string, { loader: new (path: string) => LoaderInstance; type: string }> = {
'.pdf': { loader: PDFLoader, type: 'pdf' },
'.txt': { loader: TextLoader, type: 'text' },
'.pptx': { loader: PPTXLoader, type: 'pptx' },
'.docx': { loader: DocxLoader, type: 'docx' },
'.doc': { loader: DocxLoader, type: 'doc' },
'.json': { loader: JSONLoader, type: 'json' },
'.epub': { loader: EPubLoader, type: 'epub' },
'.md': { loader: MarkdownLoader, type: 'markdown' }
}
export async function addFileLoader(
base: KnowledgeBaseParams,
vectorStore: FaissStore,
file: FileMetadata
): Promise<LoaderReturn> {
const fileExt = file.ext.toLowerCase()
const loaderConfig = FILE_LOADER_MAP[fileExt]
if (!loaderConfig) {
// 默认使用文本加载器
const loaderInstance = new TextLoader(file.path)
const type = fileExt.replace('.', '') || 'unknown'
return executeLoader(base, vectorStore, loaderInstance, type, file.path)
}
const loaderInstance = new loaderConfig.loader(file.path)
return executeLoader(base, vectorStore, loaderInstance, loaderConfig.type, file.path)
}
export async function addWebLoader(
base: KnowledgeBaseParams,
vectorStore: FaissStore,
url: string,
source: UrlSource
): Promise<LoaderReturn> {
let loaderInstance: CheerioWebBaseLoader | YoutubeLoader | undefined
let splitterType: string | undefined
switch (source) {
case 'normal':
loaderInstance = new CheerioWebBaseLoader(url)
break
case 'youtube':
loaderInstance = YoutubeLoader.createFromUrl(url, {
addVideoInfo: true,
transcriptFormat: 'srt'
})
splitterType = 'srt'
break
}
if (!loaderInstance) {
return {
entriesAdded: 0,
uniqueId: '',
uniqueIds: [],
loaderType: source
}
}
return executeLoader(base, vectorStore, loaderInstance, source, url, splitterType)
}
export async function addSitemapLoader(
base: KnowledgeBaseParams,
vectorStore: FaissStore,
url: string
): Promise<LoaderReturn> {
const loaderInstance = new SitemapLoader(url)
return executeLoader(base, vectorStore, loaderInstance, 'sitemap', url)
}
export async function addNoteLoader(
base: KnowledgeBaseParams,
vectorStore: FaissStore,
content: string,
sourceUrl: string
): Promise<LoaderReturn> {
const loaderInstance = new NoteLoader(content, sourceUrl)
return executeLoader(base, vectorStore, loaderInstance, 'note', sourceUrl)
}
export async function addVideoLoader(
base: KnowledgeBaseParams,
vectorStore: FaissStore,
files: FileMetadata[]
): Promise<LoaderReturn> {
const srtFile = files.find((f) => f.type === FileTypes.TEXT)
const videoFile = files.find((f) => f.type === FileTypes.VIDEO)
const emptyResult: LoaderReturn = {
entriesAdded: 0,
uniqueId: '',
uniqueIds: [],
loaderType: 'video'
}
if (!srtFile || !videoFile) {
return emptyResult
}
try {
const loaderInstance = new TextLoader(srtFile.path)
const originalDocs = await loaderInstance.load()
const docsWithVideoMeta = originalDocs.map(
(doc) =>
new Document({
...doc,
metadata: {
...doc.metadata,
video: {
path: videoFile.path,
name: videoFile.origin_name
}
}
})
)
return await processDocuments(base, vectorStore, docsWithVideoMeta, 'video', 'srt')
} catch (error) {
logger.error(`Error loading or processing file ${srtFile.path} with loader video: ${error}`)
return emptyResult
}
}

View File

@ -1,55 +0,0 @@
import { BM25Retriever } from '@langchain/community/retrievers/bm25'
import { FaissStore } from '@langchain/community/vectorstores/faiss'
import { BaseRetriever } from '@langchain/core/retrievers'
import { loggerService } from '@main/services/LoggerService'
import { type KnowledgeBaseParams } from '@types'
import { type Document } from 'langchain/document'
import { EnsembleRetriever } from 'langchain/retrievers/ensemble'
const logger = loggerService.withContext('RetrieverFactory')
export class RetrieverFactory {
/**
* LangChain (Retriever)
* @param base
* @param vectorStore
* @param documents BM25Retriever
* @returns BaseRetriever
*/
public createRetriever(base: KnowledgeBaseParams, vectorStore: FaissStore, documents: Document[]): BaseRetriever {
const retrieverType = base.retriever?.mode ?? 'hybrid'
const retrieverWeight = base.retriever?.weight ?? 0.5
const searchK = base.documentCount ?? 5
logger.info(`Creating retriever of type: ${retrieverType} with k=${searchK}`)
switch (retrieverType) {
case 'bm25':
if (documents.length === 0) {
throw new Error('BM25Retriever requires documents, but none were provided or found.')
}
logger.info('Create BM25 Retriever')
return BM25Retriever.fromDocuments(documents, { k: searchK })
case 'hybrid': {
if (documents.length === 0) {
logger.warn('No documents provided for BM25 part of hybrid search. Falling back to vector search only.')
return vectorStore.asRetriever(searchK)
}
const vectorstoreRetriever = vectorStore.asRetriever(searchK)
const bm25Retriever = BM25Retriever.fromDocuments(documents, { k: searchK })
logger.info('Create Hybrid Retriever')
return new EnsembleRetriever({
retrievers: [bm25Retriever, vectorstoreRetriever],
weights: [retrieverWeight, 1 - retrieverWeight]
})
}
case 'vector':
default:
logger.info('Create Vector Retriever')
return vectorStore.asRetriever(searchK)
}
}
}

View File

@ -1,133 +0,0 @@
import { Document } from '@langchain/core/documents'
import { TextSplitter, TextSplitterParams } from 'langchain/text_splitter'
// 定义一个接口来表示解析后的单个字幕片段
interface SrtSegment {
text: string
startTime: number // in seconds
endTime: number // in seconds
}
// 辅助函数:将 SRT 时间戳字符串 (HH:MM:SS,ms) 转换为秒
function srtTimeToSeconds(time: string): number {
const parts = time.split(':')
const secondsAndMs = parts[2].split(',')
const hours = parseInt(parts[0], 10)
const minutes = parseInt(parts[1], 10)
const seconds = parseInt(secondsAndMs[0], 10)
const milliseconds = parseInt(secondsAndMs[1], 10)
return hours * 3600 + minutes * 60 + seconds + milliseconds / 1000
}
export class SrtSplitter extends TextSplitter {
constructor(fields?: Partial<TextSplitterParams>) {
// 传入 chunkSize 和 chunkOverlap
super(fields)
}
splitText(): Promise<string[]> {
throw new Error('Method not implemented.')
}
// 核心方法:重写 splitDocuments 来实现自定义逻辑
async splitDocuments(documents: Document[]): Promise<Document[]> {
const allChunks: Document[] = []
for (const doc of documents) {
// 1. 解析 SRT 内容
const segments = this.parseSrt(doc.pageContent)
if (segments.length === 0) continue
// 2. 将字幕片段组合成块
const chunks = this.mergeSegmentsIntoChunks(segments, doc.metadata)
allChunks.push(...chunks)
}
return allChunks
}
// 辅助方法:解析整个 SRT 字符串
private parseSrt(srt: string): SrtSegment[] {
const segments: SrtSegment[] = []
const blocks = srt.trim().split(/\n\n/)
for (const block of blocks) {
const lines = block.split('\n')
if (lines.length < 3) continue
const timeMatch = lines[1].match(/(\d{2}:\d{2}:\d{2},\d{3}) --> (\d{2}:\d{2}:\d{2},\d{3})/)
if (!timeMatch) continue
const startTime = srtTimeToSeconds(timeMatch[1])
const endTime = srtTimeToSeconds(timeMatch[2])
const text = lines.slice(2).join(' ').trim()
segments.push({ text, startTime, endTime })
}
return segments
}
// 辅助方法:将解析后的片段合并成每 5 段一个块
private mergeSegmentsIntoChunks(segments: SrtSegment[], baseMetadata: Record<string, any>): Document[] {
const chunks: Document[] = []
let currentChunkText = ''
let currentChunkStartTime = 0
let currentChunkEndTime = 0
let segmentCount = 0
for (const segment of segments) {
if (segmentCount === 0) {
currentChunkStartTime = segment.startTime
}
currentChunkText += (currentChunkText ? ' ' : '') + segment.text
currentChunkEndTime = segment.endTime
segmentCount++
// 当累积到 5 段时,创建一个新的 Document
if (segmentCount === 5) {
const metadata: Record<string, any> = {
...baseMetadata,
startTime: currentChunkStartTime,
endTime: currentChunkEndTime
}
if (baseMetadata.source_url) {
metadata.source_url_with_timestamp = `${baseMetadata.source_url}?t=${Math.floor(currentChunkStartTime)}s`
}
chunks.push(
new Document({
pageContent: currentChunkText,
metadata
})
)
// 重置计数器和临时变量
currentChunkText = ''
currentChunkStartTime = 0
currentChunkEndTime = 0
segmentCount = 0
}
}
// 如果还有剩余的片段,创建最后一个 Document
if (segmentCount > 0) {
const metadata: Record<string, any> = {
...baseMetadata,
startTime: currentChunkStartTime,
endTime: currentChunkEndTime
}
if (baseMetadata.source_url) {
metadata.source_url_with_timestamp = `${baseMetadata.source_url}?t=${Math.floor(currentChunkStartTime)}s`
}
chunks.push(
new Document({
pageContent: currentChunkText,
metadata
})
)
}
return chunks
}
}

View File

@ -1,31 +0,0 @@
import { RecursiveCharacterTextSplitter, TextSplitter } from '@langchain/textsplitters'
import { SrtSplitter } from './SrtSplitter'
export type SplitterConfig = {
chunkSize?: number
chunkOverlap?: number
type?: 'recursive' | 'srt' | string
}
export class SplitterFactory {
/**
* Creates a TextSplitter instance based on the provided configuration.
* @param config - The configuration object specifying the splitter type and its parameters.
* @returns An instance of a TextSplitter, or null if no splitting is required.
*/
public static create(config: SplitterConfig): TextSplitter {
switch (config.type) {
case 'srt':
return new SrtSplitter({
chunkSize: config.chunkSize,
chunkOverlap: config.chunkOverlap
})
case 'recursive':
default:
return new RecursiveCharacterTextSplitter({
chunkSize: config.chunkSize,
chunkOverlap: config.chunkOverlap
})
}
}
}

View File

@ -1,3 +1,18 @@
/**
* Knowledge Service - Manages knowledge bases using RAG (Retrieval-Augmented Generation)
*
* This service handles creation, management, and querying of knowledge bases from various sources
* including files, directories, URLs, sitemaps, and notes.
*
* Features:
* - Concurrent task processing with workload management
* - Multiple data source support
* - Vector database integration
*
* For detailed documentation, see:
* @see {@link ../../../docs/technical/KnowledgeService.md}
*/
import * as fs from 'node:fs'
import path from 'node:path'
@ -9,32 +24,87 @@ import { loggerService } from '@logger'
import Embeddings from '@main/knowledge/embedjs/embeddings/Embeddings'
import { addFileLoader } from '@main/knowledge/embedjs/loader'
import { NoteLoader } from '@main/knowledge/embedjs/loader/noteLoader'
import { preprocessingService } from '@main/knowledge/preprocess/PreprocessingService'
import PreprocessProvider from '@main/knowledge/preprocess/PreprocessProvider'
import Reranker from '@main/knowledge/reranker/Reranker'
import { fileStorage } from '@main/services/FileStorage'
import { windowService } from '@main/services/WindowService'
import { getDataPath } from '@main/utils'
import { getAllFiles } from '@main/utils/file'
import { TraceMethod } from '@mcp-trace/trace-core'
import { MB } from '@shared/config/constant'
import { LoaderReturn } from '@shared/config/types'
import type { LoaderReturn } from '@shared/config/types'
import { IpcChannel } from '@shared/IpcChannel'
import { FileMetadata, KnowledgeBaseParams, KnowledgeSearchResult } from '@types'
import { FileMetadata, KnowledgeBaseParams, KnowledgeItem, KnowledgeSearchResult } from '@types'
import { v4 as uuidv4 } from 'uuid'
import { windowService } from '../WindowService'
import {
IKnowledgeFramework,
KnowledgeBaseAddItemOptionsNonNullableAttribute,
LoaderDoneReturn,
LoaderTask,
LoaderTaskItem,
LoaderTaskItemState
} from './IKnowledgeFramework'
const logger = loggerService.withContext('MainKnowledgeService')
export class EmbedJsFramework implements IKnowledgeFramework {
private storageDir: string
private ragApplications: Map<string, RAGApplication> = new Map()
private pendingDeleteFile: string
private dbInstances: Map<string, LibSqlDb> = new Map()
export interface KnowledgeBaseAddItemOptions {
base: KnowledgeBaseParams
item: KnowledgeItem
forceReload?: boolean
userId?: string
}
interface KnowledgeBaseAddItemOptionsNonNullableAttribute {
base: KnowledgeBaseParams
item: KnowledgeItem
forceReload: boolean
userId: string
}
interface EvaluateTaskWorkload {
workload: number
}
type LoaderDoneReturn = LoaderReturn | null
enum LoaderTaskItemState {
PENDING,
PROCESSING,
DONE
}
interface LoaderTaskItem {
state: LoaderTaskItemState
task: () => Promise<unknown>
evaluateTaskWorkload: EvaluateTaskWorkload
}
interface LoaderTask {
loaderTasks: LoaderTaskItem[]
loaderDoneReturn: LoaderDoneReturn
}
interface LoaderTaskOfSet {
loaderTasks: Set<LoaderTaskItem>
loaderDoneReturn: LoaderDoneReturn
}
interface QueueTaskItem {
taskPromise: () => Promise<unknown>
resolve: () => void
evaluateTaskWorkload: EvaluateTaskWorkload
}
const loaderTaskIntoOfSet = (loaderTask: LoaderTask): LoaderTaskOfSet => {
return {
loaderTasks: new Set(loaderTask.loaderTasks),
loaderDoneReturn: loaderTask.loaderDoneReturn
}
}
class KnowledgeService {
private storageDir = path.join(getDataPath(), 'KnowledgeBase')
private pendingDeleteFile = path.join(this.storageDir, 'knowledge_pending_delete.json')
// Byte based
private workload = 0
private processingItemCount = 0
private knowledgeItemProcessingQueueMappingPromise: Map<LoaderTaskOfSet, () => void> = new Map()
private ragApplications: Map<string, RAGApplication> = new Map()
private dbInstances: Map<string, LibSqlDb> = new Map()
private static MAXIMUM_WORKLOAD = 80 * MB
private static MAXIMUM_PROCESSING_ITEM_COUNT = 30
private static ERROR_LOADER_RETURN: LoaderReturn = {
entriesAdded: 0,
uniqueId: '',
@ -43,9 +113,7 @@ export class EmbedJsFramework implements IKnowledgeFramework {
status: 'failed'
}
constructor(storageDir: string) {
this.storageDir = storageDir
this.pendingDeleteFile = path.join(this.storageDir, 'knowledge_pending_delete.json')
constructor() {
this.initStorageDir()
this.cleanupOnStartup()
}
@ -160,28 +228,33 @@ export class EmbedJsFramework implements IKnowledgeFramework {
logger.info(`Startup cleanup completed: ${deletedCount}/${pendingDeleteIds.length} knowledge bases deleted`)
}
private async getRagApplication(base: KnowledgeBaseParams): Promise<RAGApplication> {
if (this.ragApplications.has(base.id)) {
return this.ragApplications.get(base.id)!
private getRagApplication = async ({
id,
embedApiClient,
dimensions,
documentCount
}: KnowledgeBaseParams): Promise<RAGApplication> => {
if (this.ragApplications.has(id)) {
return this.ragApplications.get(id)!
}
let ragApplication: RAGApplication
const embeddings = new Embeddings({
embedApiClient: base.embedApiClient,
dimensions: base.dimensions
embedApiClient,
dimensions
})
try {
const libSqlDb = new LibSqlDb({ path: path.join(this.storageDir, base.id) })
const libSqlDb = new LibSqlDb({ path: path.join(this.storageDir, id) })
// Save database instance for later closing
this.dbInstances.set(base.id, libSqlDb)
this.dbInstances.set(id, libSqlDb)
ragApplication = await new RAGApplicationBuilder()
.setModel('NO_MODEL')
.setEmbeddingModel(embeddings)
.setVectorDatabase(libSqlDb)
.setSearchResultCount(base.documentCount || 30)
.setSearchResultCount(documentCount || 30)
.build()
this.ragApplications.set(base.id, ragApplication)
this.ragApplications.set(id, ragApplication)
} catch (e) {
logger.error('Failed to create RAGApplication:', e as Error)
throw new Error(`Failed to create RAGApplication: ${e}`)
@ -189,14 +262,17 @@ export class EmbedJsFramework implements IKnowledgeFramework {
return ragApplication
}
async initialize(base: KnowledgeBaseParams): Promise<void> {
public create = async (_: Electron.IpcMainInvokeEvent, base: KnowledgeBaseParams): Promise<void> => {
await this.getRagApplication(base)
}
async reset(base: KnowledgeBaseParams): Promise<void> {
const ragApp = await this.getRagApplication(base)
await ragApp.reset()
public reset = async (_: Electron.IpcMainInvokeEvent, base: KnowledgeBaseParams): Promise<void> => {
const ragApplication = await this.getRagApplication(base)
await ragApplication.reset()
}
async delete(id: string): Promise<void> {
public async delete(_: Electron.IpcMainInvokeEvent, id: string): Promise<void> {
logger.debug(`delete id: ${id}`)
await this.cleanupKnowledgeResources(id)
@ -209,41 +285,15 @@ export class EmbedJsFramework implements IKnowledgeFramework {
this.pendingDeleteManager.add(id)
}
}
getLoaderTask(options: KnowledgeBaseAddItemOptionsNonNullableAttribute): LoaderTask {
const { item } = options
const getRagApplication = () => this.getRagApplication(options.base)
switch (item.type) {
case 'file':
return this.fileTask(getRagApplication, options)
case 'directory':
return this.directoryTask(getRagApplication, options)
case 'url':
return this.urlTask(getRagApplication, options)
case 'sitemap':
return this.sitemapTask(getRagApplication, options)
case 'note':
return this.noteTask(getRagApplication, options)
default:
return {
loaderTasks: [],
loaderDoneReturn: null
}
}
}
async remove(options: { uniqueIds: string[]; base: KnowledgeBaseParams }): Promise<void> {
const ragApp = await this.getRagApplication(options.base)
for (const id of options.uniqueIds) {
await ragApp.deleteLoader(id)
}
private maximumLoad() {
return (
this.processingItemCount >= KnowledgeService.MAXIMUM_PROCESSING_ITEM_COUNT ||
this.workload >= KnowledgeService.MAXIMUM_WORKLOAD
)
}
async search(options: { search: string; base: KnowledgeBaseParams }): Promise<KnowledgeSearchResult[]> {
const ragApp = await this.getRagApplication(options.base)
return await ragApp.search(options.search)
}
private fileTask(
getRagApplication: () => Promise<RAGApplication>,
ragApplication: RAGApplication,
options: KnowledgeBaseAddItemOptionsNonNullableAttribute
): LoaderTask {
const { base, item, forceReload, userId } = options
@ -256,8 +306,7 @@ export class EmbedJsFramework implements IKnowledgeFramework {
task: async () => {
try {
// Add preprocessing logic
const ragApplication = await getRagApplication()
const fileToProcess: FileMetadata = await preprocessingService.preprocessFile(file, base, item, userId)
const fileToProcess: FileMetadata = await this.preprocessing(file, base, item, userId)
// Use processed file for loading
return addFileLoader(ragApplication, fileToProcess, base, forceReload)
@ -268,7 +317,7 @@ export class EmbedJsFramework implements IKnowledgeFramework {
.catch((e) => {
logger.error(`Error in addFileLoader for ${file.name}: ${e}`)
const errorResult: LoaderReturn = {
...EmbedJsFramework.ERROR_LOADER_RETURN,
...KnowledgeService.ERROR_LOADER_RETURN,
message: e.message,
messageSource: 'embedding'
}
@ -278,7 +327,7 @@ export class EmbedJsFramework implements IKnowledgeFramework {
} catch (e: any) {
logger.error(`Preprocessing failed for ${file.name}: ${e}`)
const errorResult: LoaderReturn = {
...EmbedJsFramework.ERROR_LOADER_RETURN,
...KnowledgeService.ERROR_LOADER_RETURN,
message: e.message,
messageSource: 'preprocess'
}
@ -295,7 +344,7 @@ export class EmbedJsFramework implements IKnowledgeFramework {
return loaderTask
}
private directoryTask(
getRagApplication: () => Promise<RAGApplication>,
ragApplication: RAGApplication,
options: KnowledgeBaseAddItemOptionsNonNullableAttribute
): LoaderTask {
const { base, item, forceReload } = options
@ -322,9 +371,8 @@ export class EmbedJsFramework implements IKnowledgeFramework {
for (const file of files) {
loaderTasks.push({
state: LoaderTaskItemState.PENDING,
task: async () => {
const ragApplication = await getRagApplication()
return addFileLoader(ragApplication, file, base, forceReload)
task: () =>
addFileLoader(ragApplication, file, base, forceReload)
.then((result) => {
loaderDoneReturn.entriesAdded += 1
processedFiles += 1
@ -335,12 +383,11 @@ export class EmbedJsFramework implements IKnowledgeFramework {
.catch((err) => {
logger.error('Failed to add dir loader:', err)
return {
...EmbedJsFramework.ERROR_LOADER_RETURN,
...KnowledgeService.ERROR_LOADER_RETURN,
message: `Failed to add dir loader: ${err.message}`,
messageSource: 'embedding'
}
})
},
}),
evaluateTaskWorkload: { workload: file.size }
})
}
@ -352,7 +399,7 @@ export class EmbedJsFramework implements IKnowledgeFramework {
}
private urlTask(
getRagApplication: () => Promise<RAGApplication>,
ragApplication: RAGApplication,
options: KnowledgeBaseAddItemOptionsNonNullableAttribute
): LoaderTask {
const { base, item, forceReload } = options
@ -362,8 +409,7 @@ export class EmbedJsFramework implements IKnowledgeFramework {
loaderTasks: [
{
state: LoaderTaskItemState.PENDING,
task: async () => {
const ragApplication = await getRagApplication()
task: () => {
const loaderReturn = ragApplication.addLoader(
new WebLoader({
urlOrContent: content,
@ -387,7 +433,7 @@ export class EmbedJsFramework implements IKnowledgeFramework {
.catch((err) => {
logger.error('Failed to add url loader:', err)
return {
...EmbedJsFramework.ERROR_LOADER_RETURN,
...KnowledgeService.ERROR_LOADER_RETURN,
message: `Failed to add url loader: ${err.message}`,
messageSource: 'embedding'
}
@ -402,7 +448,7 @@ export class EmbedJsFramework implements IKnowledgeFramework {
}
private sitemapTask(
getRagApplication: () => Promise<RAGApplication>,
ragApplication: RAGApplication,
options: KnowledgeBaseAddItemOptionsNonNullableAttribute
): LoaderTask {
const { base, item, forceReload } = options
@ -412,9 +458,8 @@ export class EmbedJsFramework implements IKnowledgeFramework {
loaderTasks: [
{
state: LoaderTaskItemState.PENDING,
task: async () => {
const ragApplication = await getRagApplication()
return ragApplication
task: () =>
ragApplication
.addLoader(
new SitemapLoader({ url: content, chunkSize: base.chunkSize, chunkOverlap: base.chunkOverlap }) as any,
forceReload
@ -432,12 +477,11 @@ export class EmbedJsFramework implements IKnowledgeFramework {
.catch((err) => {
logger.error('Failed to add sitemap loader:', err)
return {
...EmbedJsFramework.ERROR_LOADER_RETURN,
...KnowledgeService.ERROR_LOADER_RETURN,
message: `Failed to add sitemap loader: ${err.message}`,
messageSource: 'embedding'
}
})
},
}),
evaluateTaskWorkload: { workload: 20 * MB }
}
],
@ -447,7 +491,7 @@ export class EmbedJsFramework implements IKnowledgeFramework {
}
private noteTask(
getRagApplication: () => Promise<RAGApplication>,
ragApplication: RAGApplication,
options: KnowledgeBaseAddItemOptionsNonNullableAttribute
): LoaderTask {
const { base, item, forceReload } = options
@ -460,8 +504,7 @@ export class EmbedJsFramework implements IKnowledgeFramework {
loaderTasks: [
{
state: LoaderTaskItemState.PENDING,
task: async () => {
const ragApplication = await getRagApplication()
task: () => {
const loaderReturn = ragApplication.addLoader(
new NoteLoader({
text: content,
@ -484,7 +527,7 @@ export class EmbedJsFramework implements IKnowledgeFramework {
.catch((err) => {
logger.error('Failed to add note loader:', err)
return {
...EmbedJsFramework.ERROR_LOADER_RETURN,
...KnowledgeService.ERROR_LOADER_RETURN,
message: `Failed to add note loader: ${err.message}`,
messageSource: 'embedding'
}
@ -497,4 +540,199 @@ export class EmbedJsFramework implements IKnowledgeFramework {
}
return loaderTask
}
private processingQueueHandle() {
const getSubtasksUntilMaximumLoad = (): QueueTaskItem[] => {
const queueTaskList: QueueTaskItem[] = []
that: for (const [task, resolve] of this.knowledgeItemProcessingQueueMappingPromise) {
for (const item of task.loaderTasks) {
if (this.maximumLoad()) {
break that
}
const { state, task: taskPromise, evaluateTaskWorkload } = item
if (state !== LoaderTaskItemState.PENDING) {
continue
}
const { workload } = evaluateTaskWorkload
this.workload += workload
this.processingItemCount += 1
item.state = LoaderTaskItemState.PROCESSING
queueTaskList.push({
taskPromise: () =>
taskPromise().then(() => {
this.workload -= workload
this.processingItemCount -= 1
task.loaderTasks.delete(item)
if (task.loaderTasks.size === 0) {
this.knowledgeItemProcessingQueueMappingPromise.delete(task)
resolve()
}
this.processingQueueHandle()
}),
resolve: () => {},
evaluateTaskWorkload
})
}
}
return queueTaskList
}
const subTasks = getSubtasksUntilMaximumLoad()
if (subTasks.length > 0) {
const subTaskPromises = subTasks.map(({ taskPromise }) => taskPromise())
Promise.all(subTaskPromises).then(() => {
subTasks.forEach(({ resolve }) => resolve())
})
}
}
private appendProcessingQueue(task: LoaderTask): Promise<LoaderReturn> {
return new Promise((resolve) => {
this.knowledgeItemProcessingQueueMappingPromise.set(loaderTaskIntoOfSet(task), () => {
resolve(task.loaderDoneReturn!)
})
})
}
public add = (_: Electron.IpcMainInvokeEvent, options: KnowledgeBaseAddItemOptions): Promise<LoaderReturn> => {
return new Promise((resolve) => {
const { base, item, forceReload = false, userId = '' } = options
const optionsNonNullableAttribute = { base, item, forceReload, userId }
this.getRagApplication(base)
.then((ragApplication) => {
const task = (() => {
switch (item.type) {
case 'file':
return this.fileTask(ragApplication, optionsNonNullableAttribute)
case 'directory':
return this.directoryTask(ragApplication, optionsNonNullableAttribute)
case 'url':
return this.urlTask(ragApplication, optionsNonNullableAttribute)
case 'sitemap':
return this.sitemapTask(ragApplication, optionsNonNullableAttribute)
case 'note':
return this.noteTask(ragApplication, optionsNonNullableAttribute)
default:
return null
}
})()
if (task) {
this.appendProcessingQueue(task).then(() => {
resolve(task.loaderDoneReturn!)
})
this.processingQueueHandle()
} else {
resolve({
...KnowledgeService.ERROR_LOADER_RETURN,
message: 'Unsupported item type',
messageSource: 'embedding'
})
}
})
.catch((err) => {
logger.error('Failed to add item:', err)
resolve({
...KnowledgeService.ERROR_LOADER_RETURN,
message: `Failed to add item: ${err.message}`,
messageSource: 'embedding'
})
})
})
}
@TraceMethod({ spanName: 'remove', tag: 'Knowledge' })
public async remove(
_: Electron.IpcMainInvokeEvent,
{ uniqueId, uniqueIds, base }: { uniqueId: string; uniqueIds: string[]; base: KnowledgeBaseParams }
): Promise<void> {
const ragApplication = await this.getRagApplication(base)
logger.debug(`Remove Item UniqueId: ${uniqueId}`)
for (const id of uniqueIds) {
await ragApplication.deleteLoader(id)
}
}
@TraceMethod({ spanName: 'RagSearch', tag: 'Knowledge' })
public async search(
_: Electron.IpcMainInvokeEvent,
{ search, base }: { search: string; base: KnowledgeBaseParams }
): Promise<KnowledgeSearchResult[]> {
const ragApplication = await this.getRagApplication(base)
return await ragApplication.search(search)
}
@TraceMethod({ spanName: 'rerank', tag: 'Knowledge' })
public async rerank(
_: Electron.IpcMainInvokeEvent,
{ search, base, results }: { search: string; base: KnowledgeBaseParams; results: KnowledgeSearchResult[] }
): Promise<KnowledgeSearchResult[]> {
if (results.length === 0) {
return results
}
return await new Reranker(base).rerank(search, results)
}
public getStorageDir = (): string => {
return this.storageDir
}
private preprocessing = async (
file: FileMetadata,
base: KnowledgeBaseParams,
item: KnowledgeItem,
userId: string
): Promise<FileMetadata> => {
let fileToProcess: FileMetadata = file
if (base.preprocessProvider && file.ext.toLowerCase() === '.pdf') {
try {
const provider = new PreprocessProvider(base.preprocessProvider.provider, userId)
const filePath = fileStorage.getFilePathById(file)
// Check if file has already been preprocessed
const alreadyProcessed = await provider.checkIfAlreadyProcessed(file)
if (alreadyProcessed) {
logger.debug(`File already preprocess processed, using cached result: ${filePath}`)
return alreadyProcessed
}
// Execute preprocessing
logger.debug(`Starting preprocess processing for scanned PDF: ${filePath}`)
const { processedFile, quota } = await provider.parseFile(item.id, file)
fileToProcess = processedFile
const mainWindow = windowService.getMainWindow()
mainWindow?.webContents.send('file-preprocess-finished', {
itemId: item.id,
quota: quota
})
} catch (err) {
logger.error(`Preprocess processing failed: ${err}`)
// If preprocessing fails, use original file
// fileToProcess = file
throw new Error(`Preprocess processing failed: ${err}`)
}
}
return fileToProcess
}
public checkQuota = async (
_: Electron.IpcMainInvokeEvent,
base: KnowledgeBaseParams,
userId: string
): Promise<number> => {
try {
if (base.preprocessProvider && base.preprocessProvider.type === 'preprocess') {
const provider = new PreprocessProvider(base.preprocessProvider.provider, userId)
return await provider.checkQuota()
}
throw new Error('No preprocess provider configured')
} catch (err) {
logger.error(`Failed to check quota: ${err}`)
throw new Error(`Failed to check quota: ${err}`)
}
}
}
export default new KnowledgeService()

View File

@ -1,72 +0,0 @@
import { LoaderReturn } from '@shared/config/types'
import { KnowledgeBaseParams, KnowledgeItem, KnowledgeSearchResult } from '@types'
export interface KnowledgeBaseAddItemOptions {
base: KnowledgeBaseParams
item: KnowledgeItem
forceReload?: boolean
userId?: string
}
export interface KnowledgeBaseAddItemOptionsNonNullableAttribute {
base: KnowledgeBaseParams
item: KnowledgeItem
forceReload: boolean
userId: string
}
export interface EvaluateTaskWorkload {
workload: number
}
export type LoaderDoneReturn = LoaderReturn | null
export enum LoaderTaskItemState {
PENDING,
PROCESSING,
DONE
}
export interface LoaderTaskItem {
state: LoaderTaskItemState
task: () => Promise<unknown>
evaluateTaskWorkload: EvaluateTaskWorkload
}
export interface LoaderTask {
loaderTasks: LoaderTaskItem[]
loaderDoneReturn: LoaderDoneReturn
}
export interface LoaderTaskOfSet {
loaderTasks: Set<LoaderTaskItem>
loaderDoneReturn: LoaderDoneReturn
}
export interface QueueTaskItem {
taskPromise: () => Promise<unknown>
resolve: () => void
evaluateTaskWorkload: EvaluateTaskWorkload
}
export const loaderTaskIntoOfSet = (loaderTask: LoaderTask): LoaderTaskOfSet => {
return {
loaderTasks: new Set(loaderTask.loaderTasks),
loaderDoneReturn: loaderTask.loaderDoneReturn
}
}
export interface IKnowledgeFramework {
/** 为给定知识库初始化框架资源 */
initialize(base: KnowledgeBaseParams): Promise<void>
/** 重置知识库,删除其所有内容 */
reset(base: KnowledgeBaseParams): Promise<void>
/** 删除与知识库关联的资源,包括文件 */
delete(id: string): Promise<void>
/** 生成用于添加条目的任务对象,由队列处理 */
getLoaderTask(options: KnowledgeBaseAddItemOptionsNonNullableAttribute): LoaderTask
/** 从知识库中删除特定条目 */
remove(options: { uniqueIds: string[]; base: KnowledgeBaseParams }): Promise<void>
/** 搜索知识库 */
search(options: { search: string; base: KnowledgeBaseParams }): Promise<KnowledgeSearchResult[]>
}

View File

@ -1,48 +0,0 @@
import path from 'node:path'
import { KnowledgeBaseParams } from '@types'
import { app } from 'electron'
import { EmbedJsFramework } from './EmbedJsFramework'
import { IKnowledgeFramework } from './IKnowledgeFramework'
import { LangChainFramework } from './LangChainFramework'
class KnowledgeFrameworkFactory {
private static instance: KnowledgeFrameworkFactory
private frameworks: Map<string, IKnowledgeFramework> = new Map()
private storageDir: string
private constructor(storageDir: string) {
this.storageDir = storageDir
}
public static getInstance(storageDir: string): KnowledgeFrameworkFactory {
if (!KnowledgeFrameworkFactory.instance) {
KnowledgeFrameworkFactory.instance = new KnowledgeFrameworkFactory(storageDir)
}
return KnowledgeFrameworkFactory.instance
}
public getFramework(base: KnowledgeBaseParams): IKnowledgeFramework {
const frameworkType = base.framework || 'embedjs' // 如果未指定,默认为 embedjs
if (this.frameworks.has(frameworkType)) {
return this.frameworks.get(frameworkType)!
}
let framework: IKnowledgeFramework
switch (frameworkType) {
case 'langchain':
framework = new LangChainFramework(this.storageDir)
break
case 'embedjs':
default:
framework = new EmbedJsFramework(this.storageDir)
break
}
this.frameworks.set(frameworkType, framework)
return framework
}
}
export const knowledgeFrameworkFactory = KnowledgeFrameworkFactory.getInstance(
path.join(app.getPath('userData'), 'Data', 'KnowledgeBase')
)

View File

@ -1,190 +0,0 @@
import * as fs from 'node:fs'
import path from 'node:path'
import { loggerService } from '@logger'
import { preprocessingService } from '@main/knowledge/preprocess/PreprocessingService'
import Reranker from '@main/knowledge/reranker/Reranker'
import { TraceMethod } from '@mcp-trace/trace-core'
import { MB } from '@shared/config/constant'
import { LoaderReturn } from '@shared/config/types'
import { KnowledgeBaseParams, KnowledgeSearchResult } from '@types'
import { app } from 'electron'
import {
KnowledgeBaseAddItemOptions,
LoaderTask,
loaderTaskIntoOfSet,
LoaderTaskItemState,
LoaderTaskOfSet,
QueueTaskItem
} from './IKnowledgeFramework'
import { knowledgeFrameworkFactory } from './KnowledgeFrameworkFactory'
const logger = loggerService.withContext('MainKnowledgeService')
class KnowledgeService {
private storageDir = path.join(app.getPath('userData'), 'Data', 'KnowledgeBase')
private workload = 0
private processingItemCount = 0
private knowledgeItemProcessingQueueMappingPromise: Map<LoaderTaskOfSet, () => void> = new Map()
private static MAXIMUM_WORKLOAD = 80 * MB
private static MAXIMUM_PROCESSING_ITEM_COUNT = 30
private static ERROR_LOADER_RETURN: LoaderReturn = {
entriesAdded: 0,
uniqueId: '',
uniqueIds: [''],
loaderType: '',
status: 'failed'
}
constructor() {
this.initStorageDir()
}
private initStorageDir = (): void => {
if (!fs.existsSync(this.storageDir)) {
fs.mkdirSync(this.storageDir, { recursive: true })
}
}
private maximumLoad() {
return (
this.processingItemCount >= KnowledgeService.MAXIMUM_PROCESSING_ITEM_COUNT ||
this.workload >= KnowledgeService.MAXIMUM_WORKLOAD
)
}
private processingQueueHandle() {
const getSubtasksUntilMaximumLoad = (): QueueTaskItem[] => {
const queueTaskList: QueueTaskItem[] = []
that: for (const [task, resolve] of this.knowledgeItemProcessingQueueMappingPromise) {
for (const item of task.loaderTasks) {
if (this.maximumLoad()) {
break that
}
const { state, task: taskPromise, evaluateTaskWorkload } = item
if (state !== LoaderTaskItemState.PENDING) {
continue
}
const { workload } = evaluateTaskWorkload
this.workload += workload
this.processingItemCount += 1
item.state = LoaderTaskItemState.PROCESSING
queueTaskList.push({
taskPromise: () =>
taskPromise().then(() => {
this.workload -= workload
this.processingItemCount -= 1
task.loaderTasks.delete(item)
if (task.loaderTasks.size === 0) {
this.knowledgeItemProcessingQueueMappingPromise.delete(task)
resolve()
}
this.processingQueueHandle()
}),
resolve: () => {},
evaluateTaskWorkload
})
}
}
return queueTaskList
}
const subTasks = getSubtasksUntilMaximumLoad()
if (subTasks.length > 0) {
const subTaskPromises = subTasks.map(({ taskPromise }) => taskPromise())
Promise.all(subTaskPromises).then(() => {
subTasks.forEach(({ resolve }) => resolve())
})
}
}
private appendProcessingQueue(task: LoaderTask): Promise<LoaderReturn> {
return new Promise((resolve) => {
this.knowledgeItemProcessingQueueMappingPromise.set(loaderTaskIntoOfSet(task), () => {
resolve(task.loaderDoneReturn!)
})
})
}
public async create(_: Electron.IpcMainInvokeEvent, base: KnowledgeBaseParams): Promise<void> {
logger.info(`Creating knowledge base: ${JSON.stringify(base)}`)
const framework = knowledgeFrameworkFactory.getFramework(base)
await framework.initialize(base)
}
public async reset(_: Electron.IpcMainInvokeEvent, base: KnowledgeBaseParams): Promise<void> {
const framework = knowledgeFrameworkFactory.getFramework(base)
await framework.reset(base)
}
public async delete(_: Electron.IpcMainInvokeEvent, base: KnowledgeBaseParams, id: string): Promise<void> {
logger.info(`Deleting knowledge base: ${JSON.stringify(base)}`)
const framework = knowledgeFrameworkFactory.getFramework(base)
await framework.delete(id)
}
public add = async (_: Electron.IpcMainInvokeEvent, options: KnowledgeBaseAddItemOptions): Promise<LoaderReturn> => {
logger.info(`Adding item to knowledge base: ${JSON.stringify(options)}`)
return new Promise((resolve) => {
const { base, item, forceReload = false, userId = '' } = options
const framework = knowledgeFrameworkFactory.getFramework(base)
const task = framework.getLoaderTask({ base, item, forceReload, userId })
if (task) {
this.appendProcessingQueue(task).then(() => {
resolve(task.loaderDoneReturn!)
})
this.processingQueueHandle()
} else {
resolve({
...KnowledgeService.ERROR_LOADER_RETURN,
message: 'Unsupported item type',
messageSource: 'embedding'
})
}
})
}
public async remove(
_: Electron.IpcMainInvokeEvent,
{ uniqueIds, base }: { uniqueIds: string[]; base: KnowledgeBaseParams }
): Promise<void> {
logger.info(`Removing items from knowledge base: ${JSON.stringify({ uniqueIds, base })}`)
const framework = knowledgeFrameworkFactory.getFramework(base)
await framework.remove({ uniqueIds, base })
}
public async search(
_: Electron.IpcMainInvokeEvent,
{ search, base }: { search: string; base: KnowledgeBaseParams }
): Promise<KnowledgeSearchResult[]> {
logger.info(`Searching knowledge base: ${JSON.stringify({ search, base })}`)
const framework = knowledgeFrameworkFactory.getFramework(base)
return framework.search({ search, base })
}
@TraceMethod({ spanName: 'rerank', tag: 'Knowledge' })
public async rerank(
_: Electron.IpcMainInvokeEvent,
{ search, base, results }: { search: string; base: KnowledgeBaseParams; results: KnowledgeSearchResult[] }
): Promise<KnowledgeSearchResult[]> {
logger.info(`Reranking knowledge base: ${JSON.stringify({ search, base, results })}`)
if (results.length === 0) {
return results
}
return await new Reranker(base).rerank(search, results)
}
public getStorageDir = (): string => {
return this.storageDir
}
public async checkQuota(_: Electron.IpcMainInvokeEvent, base: KnowledgeBaseParams, userId: string): Promise<number> {
return preprocessingService.checkQuota(base, userId)
}
}
export default new KnowledgeService()

View File

@ -1,557 +0,0 @@
import * as fs from 'node:fs'
import path from 'node:path'
import { FaissStore } from '@langchain/community/vectorstores/faiss'
import type { Document } from '@langchain/core/documents'
import { loggerService } from '@logger'
import TextEmbeddings from '@main/knowledge/langchain/embeddings/TextEmbeddings'
import {
addFileLoader,
addNoteLoader,
addSitemapLoader,
addVideoLoader,
addWebLoader
} from '@main/knowledge/langchain/loader'
import { RetrieverFactory } from '@main/knowledge/langchain/retriever'
import { preprocessingService } from '@main/knowledge/preprocess/PreprocessingService'
import { getAllFiles } from '@main/utils/file'
import { getUrlSource } from '@main/utils/knowledge'
import { MB } from '@shared/config/constant'
import { LoaderReturn } from '@shared/config/types'
import { IpcChannel } from '@shared/IpcChannel'
import {
FileMetadata,
isKnowledgeDirectoryItem,
isKnowledgeFileItem,
isKnowledgeNoteItem,
isKnowledgeSitemapItem,
isKnowledgeUrlItem,
isKnowledgeVideoItem,
KnowledgeBaseParams,
KnowledgeSearchResult
} from '@types'
import { uuidv4 } from 'zod'
import { windowService } from '../WindowService'
import {
IKnowledgeFramework,
KnowledgeBaseAddItemOptionsNonNullableAttribute,
LoaderDoneReturn,
LoaderTask,
LoaderTaskItem,
LoaderTaskItemState
} from './IKnowledgeFramework'
const logger = loggerService.withContext('LangChainFramework')
export class LangChainFramework implements IKnowledgeFramework {
private storageDir: string
private static ERROR_LOADER_RETURN: LoaderReturn = {
entriesAdded: 0,
uniqueId: '',
uniqueIds: [''],
loaderType: '',
status: 'failed'
}
constructor(storageDir: string) {
this.storageDir = storageDir
this.initStorageDir()
}
private initStorageDir = (): void => {
if (!fs.existsSync(this.storageDir)) {
fs.mkdirSync(this.storageDir, { recursive: true })
}
}
private async createDatabase(base: KnowledgeBaseParams): Promise<void> {
const dbPath = path.join(this.storageDir, base.id)
const embeddings = this.getEmbeddings(base)
const vectorStore = new FaissStore(embeddings, {})
const mockDocument: Document = {
pageContent: 'Create Database Document',
metadata: {}
}
await vectorStore.addDocuments([mockDocument], { ids: ['1'] })
await vectorStore.save(dbPath)
await vectorStore.delete({ ids: ['1'] })
await vectorStore.save(dbPath)
}
private getEmbeddings(base: KnowledgeBaseParams): TextEmbeddings {
return new TextEmbeddings({
embedApiClient: base.embedApiClient,
dimensions: base.dimensions
})
}
private async getVectorStore(base: KnowledgeBaseParams): Promise<FaissStore> {
const embeddings = this.getEmbeddings(base)
const vectorStore = await FaissStore.load(path.join(this.storageDir, base.id), embeddings)
return vectorStore
}
async initialize(base: KnowledgeBaseParams): Promise<void> {
await this.createDatabase(base)
}
async reset(base: KnowledgeBaseParams): Promise<void> {
const dbPath = path.join(this.storageDir, base.id)
if (fs.existsSync(dbPath)) {
fs.rmSync(dbPath, { recursive: true })
}
// 立即重建空索引,避免随后加载时报错
await this.createDatabase(base)
}
async delete(id: string): Promise<void> {
const dbPath = path.join(this.storageDir, id)
if (fs.existsSync(dbPath)) {
fs.rmSync(dbPath, { recursive: true })
}
}
getLoaderTask(options: KnowledgeBaseAddItemOptionsNonNullableAttribute): LoaderTask {
const { item } = options
const getStore = () => this.getVectorStore(options.base)
switch (item.type) {
case 'file':
return this.fileTask(getStore, options)
case 'directory':
return this.directoryTask(getStore, options)
case 'url':
return this.urlTask(getStore, options)
case 'sitemap':
return this.sitemapTask(getStore, options)
case 'note':
return this.noteTask(getStore, options)
case 'video':
return this.videoTask(getStore, options)
default:
return {
loaderTasks: [],
loaderDoneReturn: null
}
}
}
async remove(options: { uniqueIds: string[]; base: KnowledgeBaseParams }): Promise<void> {
const { uniqueIds, base } = options
const vectorStore = await this.getVectorStore(base)
logger.info(`[ KnowledgeService Remove Item UniqueIds: ${uniqueIds}]`)
await vectorStore.delete({ ids: uniqueIds })
await vectorStore.save(path.join(this.storageDir, base.id))
}
async search(options: { search: string; base: KnowledgeBaseParams }): Promise<KnowledgeSearchResult[]> {
const { search, base } = options
logger.info(`search base: ${JSON.stringify(base)}`)
try {
const vectorStore = await this.getVectorStore(base)
// 如果是 bm25 或 hybrid 模式,则从数据库获取所有文档
const documents: Document[] = await this.getAllDocuments(base)
if (documents.length === 0) return []
const retrieverFactory = new RetrieverFactory()
const retriever = retrieverFactory.createRetriever(base, vectorStore, documents)
const results = await retriever.invoke(search)
logger.info(`Search Results: ${JSON.stringify(results)}`)
// VectorStoreRetriever 和 EnsembleRetriever 会将分数附加到 metadata.score
// BM25Retriever 默认不返回分数,所以我们需要处理这种情况
return results.map((item) => {
return {
pageContent: item.pageContent,
metadata: item.metadata,
// 如果 metadata 中没有 score提供一个默认值
score: typeof item.metadata.score === 'number' ? item.metadata.score : 0
}
})
} catch (error: any) {
logger.error(`Error during search in knowledge base ${base.id}: ${error.message}`)
return []
}
}
private fileTask(
getVectorStore: () => Promise<FaissStore>,
options: KnowledgeBaseAddItemOptionsNonNullableAttribute
): LoaderTask {
const { base, item, userId } = options
if (!isKnowledgeFileItem(item)) {
logger.error(`Invalid item type for fileTask: expected 'file', got '${item.type}'`)
return {
loaderTasks: [],
loaderDoneReturn: {
...LangChainFramework.ERROR_LOADER_RETURN,
message: `Invalid item type: expected 'file', got '${item.type}'`,
messageSource: 'validation'
}
}
}
const file = item.content
const loaderTask: LoaderTask = {
loaderTasks: [
{
state: LoaderTaskItemState.PENDING,
task: async () => {
try {
const vectorStore = await getVectorStore()
// 添加预处理逻辑
const fileToProcess: FileMetadata = await preprocessingService.preprocessFile(file, base, item, userId)
// 使用处理后的文件进行加载
return addFileLoader(base, vectorStore, fileToProcess)
.then((result) => {
loaderTask.loaderDoneReturn = result
return result
})
.then(async () => {
await vectorStore.save(path.join(this.storageDir, base.id))
})
.catch((e) => {
logger.error(`Error in addFileLoader for ${file.name}: ${e}`)
const errorResult: LoaderReturn = {
...LangChainFramework.ERROR_LOADER_RETURN,
message: e.message,
messageSource: 'embedding'
}
loaderTask.loaderDoneReturn = errorResult
return errorResult
})
} catch (e: any) {
logger.error(`Preprocessing failed for ${file.name}: ${e}`)
const errorResult: LoaderReturn = {
...LangChainFramework.ERROR_LOADER_RETURN,
message: e.message,
messageSource: 'preprocess'
}
loaderTask.loaderDoneReturn = errorResult
return errorResult
}
},
evaluateTaskWorkload: { workload: file.size }
}
],
loaderDoneReturn: null
}
return loaderTask
}
private directoryTask(
getVectorStore: () => Promise<FaissStore>,
options: KnowledgeBaseAddItemOptionsNonNullableAttribute
): LoaderTask {
const { base, item } = options
if (!isKnowledgeDirectoryItem(item)) {
logger.error(`Invalid item type for directoryTask: expected 'directory', got '${item.type}'`)
return {
loaderTasks: [],
loaderDoneReturn: {
...LangChainFramework.ERROR_LOADER_RETURN,
message: `Invalid item type: expected 'directory', got '${item.type}'`,
messageSource: 'validation'
}
}
}
const directory = item.content
const files = getAllFiles(directory)
const totalFiles = files.length
let processedFiles = 0
const sendDirectoryProcessingPercent = (totalFiles: number, processedFiles: number) => {
const mainWindow = windowService.getMainWindow()
mainWindow?.webContents.send(IpcChannel.DirectoryProcessingPercent, {
itemId: item.id,
percent: (processedFiles / totalFiles) * 100
})
}
const loaderDoneReturn: LoaderDoneReturn = {
entriesAdded: 0,
uniqueId: `DirectoryLoader_${uuidv4()}`,
uniqueIds: [],
loaderType: 'DirectoryLoader'
}
const loaderTasks: LoaderTaskItem[] = []
for (const file of files) {
loaderTasks.push({
state: LoaderTaskItemState.PENDING,
task: async () => {
const vectorStore = await getVectorStore()
return addFileLoader(base, vectorStore, file)
.then((result) => {
loaderDoneReturn.entriesAdded += 1
processedFiles += 1
sendDirectoryProcessingPercent(totalFiles, processedFiles)
loaderDoneReturn.uniqueIds.push(result.uniqueId)
return result
})
.then(async () => {
await vectorStore.save(path.join(this.storageDir, base.id))
})
.catch((err) => {
logger.error(err)
return {
...LangChainFramework.ERROR_LOADER_RETURN,
message: `Failed to add dir loader: ${err.message}`,
messageSource: 'embedding'
}
})
},
evaluateTaskWorkload: { workload: file.size }
})
}
return {
loaderTasks,
loaderDoneReturn
}
}
private urlTask(
getVectorStore: () => Promise<FaissStore>,
options: KnowledgeBaseAddItemOptionsNonNullableAttribute
): LoaderTask {
const { base, item } = options
if (!isKnowledgeUrlItem(item)) {
logger.error(`Invalid item type for urlTask: expected 'url', got '${item.type}'`)
return {
loaderTasks: [],
loaderDoneReturn: {
...LangChainFramework.ERROR_LOADER_RETURN,
message: `Invalid item type: expected 'url', got '${item.type}'`,
messageSource: 'validation'
}
}
}
const url = item.content
const loaderTask: LoaderTask = {
loaderTasks: [
{
state: LoaderTaskItemState.PENDING,
task: async () => {
// 使用处理后的网页进行加载
const vectorStore = await getVectorStore()
return addWebLoader(base, vectorStore, url, getUrlSource(url))
.then((result) => {
loaderTask.loaderDoneReturn = result
return result
})
.then(async () => {
await vectorStore.save(path.join(this.storageDir, base.id))
})
.catch((e) => {
logger.error(`Error in addWebLoader for ${url}: ${e}`)
const errorResult: LoaderReturn = {
...LangChainFramework.ERROR_LOADER_RETURN,
message: e.message,
messageSource: 'embedding'
}
loaderTask.loaderDoneReturn = errorResult
return errorResult
})
},
evaluateTaskWorkload: { workload: 2 * MB }
}
],
loaderDoneReturn: null
}
return loaderTask
}
private sitemapTask(
getVectorStore: () => Promise<FaissStore>,
options: KnowledgeBaseAddItemOptionsNonNullableAttribute
): LoaderTask {
const { base, item } = options
if (!isKnowledgeSitemapItem(item)) {
logger.error(`Invalid item type for sitemapTask: expected 'sitemap', got '${item.type}'`)
return {
loaderTasks: [],
loaderDoneReturn: {
...LangChainFramework.ERROR_LOADER_RETURN,
message: `Invalid item type: expected 'sitemap', got '${item.type}'`,
messageSource: 'validation'
}
}
}
const url = item.content
const loaderTask: LoaderTask = {
loaderTasks: [
{
state: LoaderTaskItemState.PENDING,
task: async () => {
// 使用处理后的网页进行加载
const vectorStore = await getVectorStore()
return addSitemapLoader(base, vectorStore, url)
.then((result) => {
loaderTask.loaderDoneReturn = result
return result
})
.then(async () => {
await vectorStore.save(path.join(this.storageDir, base.id))
})
.catch((e) => {
logger.error(`Error in addWebLoader for ${url}: ${e}`)
const errorResult: LoaderReturn = {
...LangChainFramework.ERROR_LOADER_RETURN,
message: e.message,
messageSource: 'embedding'
}
loaderTask.loaderDoneReturn = errorResult
return errorResult
})
},
evaluateTaskWorkload: { workload: 2 * MB }
}
],
loaderDoneReturn: null
}
return loaderTask
}
private noteTask(
getVectorStore: () => Promise<FaissStore>,
options: KnowledgeBaseAddItemOptionsNonNullableAttribute
): LoaderTask {
const { base, item } = options
if (!isKnowledgeNoteItem(item)) {
logger.error(`Invalid item type for noteTask: expected 'note', got '${item.type}'`)
return {
loaderTasks: [],
loaderDoneReturn: {
...LangChainFramework.ERROR_LOADER_RETURN,
message: `Invalid item type: expected 'note', got '${item.type}'`,
messageSource: 'validation'
}
}
}
const content = item.content
const sourceUrl = item.sourceUrl ?? ''
logger.info(`noteTask ${content}, ${sourceUrl}`)
const encoder = new TextEncoder()
const contentBytes = encoder.encode(content)
const loaderTask: LoaderTask = {
loaderTasks: [
{
state: LoaderTaskItemState.PENDING,
task: async () => {
// 使用处理后的笔记进行加载
const vectorStore = await getVectorStore()
return addNoteLoader(base, vectorStore, content, sourceUrl)
.then((result) => {
loaderTask.loaderDoneReturn = result
return result
})
.then(async () => {
await vectorStore.save(path.join(this.storageDir, base.id))
})
.catch((e) => {
logger.error(`Error in addNoteLoader for ${sourceUrl}: ${e}`)
const errorResult: LoaderReturn = {
...LangChainFramework.ERROR_LOADER_RETURN,
message: e.message,
messageSource: 'embedding'
}
loaderTask.loaderDoneReturn = errorResult
return errorResult
})
},
evaluateTaskWorkload: { workload: contentBytes.length }
}
],
loaderDoneReturn: null
}
return loaderTask
}
private videoTask(
getVectorStore: () => Promise<FaissStore>,
options: KnowledgeBaseAddItemOptionsNonNullableAttribute
): LoaderTask {
const { base, item } = options
if (!isKnowledgeVideoItem(item)) {
logger.error(`Invalid item type for videoTask: expected 'video', got '${item.type}'`)
return {
loaderTasks: [],
loaderDoneReturn: {
...LangChainFramework.ERROR_LOADER_RETURN,
message: `Invalid item type: expected 'video', got '${item.type}'`,
messageSource: 'validation'
}
}
}
const files = item.content
const loaderTask: LoaderTask = {
loaderTasks: [
{
state: LoaderTaskItemState.PENDING,
task: async () => {
const vectorStore = await getVectorStore()
return addVideoLoader(base, vectorStore, files)
.then((result) => {
loaderTask.loaderDoneReturn = result
return result
})
.then(async () => {
await vectorStore.save(path.join(this.storageDir, base.id))
})
.catch((e) => {
logger.error(`Preprocessing failed for ${files[0].name}: ${e}`)
const errorResult: LoaderReturn = {
...LangChainFramework.ERROR_LOADER_RETURN,
message: e.message,
messageSource: 'preprocess'
}
loaderTask.loaderDoneReturn = errorResult
return errorResult
})
},
evaluateTaskWorkload: { workload: files[0].size }
}
],
loaderDoneReturn: null
}
return loaderTask
}
private async getAllDocuments(base: KnowledgeBaseParams): Promise<Document[]> {
logger.info(`Fetching all documents from database for knowledge base: ${base.id}`)
try {
const results = (await this.getVectorStore(base)).docstore._docs
const documents: Document[] = Array.from(results.values())
logger.info(`Fetched ${documents.length} documents for BM25/Hybrid retriever.`)
return documents
} catch (e) {
logger.error(`Could not fetch documents from database for base ${base.id}: ${e}`)
// 如果表不存在或查询失败,返回空数组
return []
}
}
}

View File

@ -24,7 +24,6 @@ import {
KnowledgeBase,
KnowledgeItem,
KnowledgeNoteItem,
MigrationModeEnum,
ProcessingStatus
} from '@renderer/types'
import { runAsyncFunction, uuid } from '@renderer/utils'
@ -231,7 +230,7 @@ export const useKnowledge = (baseId: string) => {
}
// 迁移知识库(保留原知识库)
const migrateBase = async (newBase: KnowledgeBase, mode: MigrationModeEnum) => {
const migrateBase = async (newBase: KnowledgeBase) => {
if (!base) return
const timestamp = dayjs().format('YYMMDDHHmmss')
@ -244,14 +243,9 @@ export const useKnowledge = (baseId: string) => {
name: newName,
created_at: Date.now(),
updated_at: Date.now(),
items: [],
framework: mode === MigrationModeEnum.MigrationToLangChain ? 'langchain' : base.framework
items: []
} satisfies KnowledgeBase
if (mode === MigrationModeEnum.MigrationToLangChain) {
await window.api.knowledgeBase.create(getKnowledgeBaseParams(migratedBase))
}
dispatch(addBase(migratedBase))
const files: FileMetadata[] = []

View File

@ -14,11 +14,7 @@ const createInitialKnowledgeBase = (): KnowledgeBase => ({
items: [],
created_at: Date.now(),
updated_at: Date.now(),
version: 1,
framework: 'langchain',
retriever: {
mode: 'hybrid'
}
version: 1
})
/**

View File

@ -1063,10 +1063,6 @@
"error": {
"failed": "Migration failed"
},
"migrate_to_langchain": {
"content": "The knowledge base migration does not delete the old knowledge base but creates a copy and reprocesses all entries, which may consume a significant number of tokens. Please proceed with caution.",
"info": "The knowledge base architecture has been updated. Click to migrate to the new architecture."
},
"source_dimensions": "Source Dimensions",
"source_model": "Source Model",
"target_dimensions": "Target Dimensions",
@ -1085,20 +1081,6 @@
"quota": "{{name}} Left Quota: {{quota}}",
"quota_infinity": "{{name}} Quota: Unlimited",
"rename": "Rename",
"retriever": "Retrieve mode",
"retriever_bm25": "full-text search",
"retriever_bm25_desc": "Search for documents based on keyword relevance and frequency.",
"retriever_hybrid": "Hybrid Search (Recommended)",
"retriever_hybrid_desc": "Combine keyword search and semantic search to achieve optimal retrieval accuracy.",
"retriever_hybrid_weight": {
"bm25": "full text",
"recommended": "recommend",
"title": "Hybrid Search Weight Adjustment (Full-text/Vector)",
"vector": "vector"
},
"retriever_tooltip": "Using different retrieval methods to search the knowledge base",
"retriever_vector": "vector search",
"retriever_vector_desc": "Retrieve documents based on semantic similarity and meaning.",
"search": "Search knowledge base",
"search_placeholder": "Enter text to search",
"settings": {

View File

@ -1064,10 +1064,6 @@
"error": {
"failed": "迁移失败"
},
"migrate_to_langchain": {
"content": "知识库迁移不会删除旧知识库,而是创建一个副本之后重新处理所有知识库条目,可能消耗大量 tokens请谨慎操作。",
"info": "知识库架构已更新,点击迁移到新架构"
},
"source_dimensions": "源维度",
"source_model": "源模型",
"target_dimensions": "目标维度",
@ -1086,20 +1082,6 @@
"quota": "{{name}} 剩余额度:{{quota}}",
"quota_infinity": "{{name}} 剩余额度:无限制",
"rename": "重命名",
"retriever": "检索模式",
"retriever_bm25": "全文搜索",
"retriever_bm25_desc": "根据关键字的相关性和频率查找文档。",
"retriever_hybrid": "混合搜索 (推荐)",
"retriever_hybrid_desc": "结合关键词搜索和语义搜索,以实现最佳检索准确性。",
"retriever_hybrid_weight": {
"bm25": "全文",
"recommended": "推荐",
"title": "混合搜索权重调整 (全文/向量)",
"vector": "向量"
},
"retriever_tooltip": "使用不同的检索方式检索知识库",
"retriever_vector": "向量搜索",
"retriever_vector_desc": "根据语义相似性和含义查找文档。",
"search": "搜索知识库",
"search_placeholder": "输入查询内容",
"settings": {

View File

@ -1063,10 +1063,6 @@
"error": {
"failed": "遷移失敗"
},
"migrate_to_langchain": {
"content": "知識庫遷移不會刪除舊知識庫,而是建立一個副本後重新處理所有知識庫條目,可能消耗大量 tokens請謹慎操作。",
"info": "知識庫架構已更新,點擊遷移到新架構"
},
"source_dimensions": "源維度",
"source_model": "源模型",
"target_dimensions": "目標維度",
@ -1085,20 +1081,6 @@
"quota": "{{name}} 剩餘配額:{{quota}}",
"quota_infinity": "{{name}} 配額:無限制",
"rename": "重新命名",
"retriever": "搜尋模式",
"retriever_bm25": "全文搜尋",
"retriever_bm25_desc": "根據關鍵字的相關性和頻率查找文件。",
"retriever_hybrid": "混合搜尋(推薦)",
"retriever_hybrid_desc": "結合關鍵字搜索和語義搜索,以實現最佳檢索準確性。",
"retriever_hybrid_weight": {
"bm25": "全文",
"recommended": "推薦",
"title": "混合搜尋權重調整 (全文/向量)",
"vector": "向量"
},
"retriever_tooltip": "使用不同的檢索方式檢索知識庫",
"retriever_vector": "向量搜尋",
"retriever_vector_desc": "根據語意相似性和含義查找文件。",
"search": "搜尋知識庫",
"search_placeholder": "輸入查詢內容",
"settings": {

View File

@ -1063,10 +1063,6 @@
"error": {
"failed": "Αποτυχία μεταφοράς"
},
"migrate_to_langchain": {
"content": "Η μετανάστευση της βάσης γνώσεων δεν διαγράφει την παλιά βάση γνώσεων, αλλά δημιουργεί ένα αντίγραφο και στη συνέχεια επεξεργάζεται ξανά όλες τις εγγραφές της βάσης γνώσεων, κάτι που μπορεί να καταναλώσει μεγάλο αριθμό tokens, οπότε ενεργήστε με προσοχή.",
"info": "Η δομή της βάσης γνώσεων έχει ενημερωθεί, κάντε κλικ για μετεγκατάσταση στη νέα δομή"
},
"source_dimensions": "Πηγαίες διαστάσεις",
"source_model": "Πηγαίο μοντέλο",
"target_dimensions": "Προορισμένες διαστάσεις",
@ -1085,20 +1081,6 @@
"quota": "Διαθέσιμο όριο για {{name}}: {{quota}}",
"quota_infinity": "Διαθέσιμο όριο για {{name}}: Απεριόριστο",
"rename": "Μετονομασία",
"retriever": "Λειτουργία αναζήτησης",
"retriever_bm25": "Πλήρης αναζήτηση κειμένου",
"retriever_bm25_desc": "Αναζήτηση εγγράφων με βάση τη σχετικότητα και τη συχνότητα των λέξεων-κλειδιών.",
"retriever_hybrid": "Μικτή αναζήτηση (συνιστάται)",
"retriever_hybrid_desc": "Συνδυάστε την αναζήτηση με λέξεις-κλειδιά και την σημασιολογική αναζήτηση για την επίτευξη της βέλτιστης ακρίβειας ανάκτησης.",
"retriever_hybrid_weight": {
"bm25": "ολόκληρο το κείμενο",
"recommended": "Προτείνω",
"title": "Προσαρμογή βάρους μικτής αναζήτησης (πλήρες κείμενο/διανυσματικό)",
"vector": "διάνυσμα"
},
"retriever_tooltip": "Χρησιμοποιώντας διαφορετικές μεθόδους αναζήτησης για αναζήτηση στη βάση γνώσης",
"retriever_vector": "Αναζήτηση διανυσμάτων",
"retriever_vector_desc": "Βρείτε έγγραφα βάση της σημασιολογικής ομοιότητας και της έννοιας.",
"search": "Αναζήτηση βάσης γνώσεων",
"search_placeholder": "Εισάγετε την αναζήτηση",
"settings": {

View File

@ -1063,10 +1063,6 @@
"error": {
"failed": "Error en la migración"
},
"migrate_to_langchain": {
"content": "La migración de la base de conocimiento no elimina la base antigua, sino que crea una copia y luego reprocesa todas las entradas, lo que puede consumir una gran cantidad de tokens. Proceda con precaución.",
"info": "La estructura de la base de conocimiento ha sido actualizada. Haz clic para migrar a la nueva estructura."
},
"source_dimensions": "Dimensiones de origen",
"source_model": "Modelo de origen",
"target_dimensions": "Dimensiones de destino",
@ -1085,20 +1081,6 @@
"quota": "Cupo restante de {{name}}: {{quota}}",
"quota_infinity": "Cupo restante de {{name}}: ilimitado",
"rename": "Renombrar",
"retriever": "modo de recuperación",
"retriever_bm25": "búsqueda de texto completo",
"retriever_bm25_desc": "Encontrar documentos basados en la relevancia y frecuencia de las palabras clave.",
"retriever_hybrid": "Búsqueda híbrida (recomendada)",
"retriever_hybrid_desc": "Combinar la búsqueda por palabras clave con la búsqueda semántica para lograr la máxima precisión en la recuperación.",
"retriever_hybrid_weight": {
"bm25": "texto completo",
"recommended": "Recomendado",
"title": "Ajuste de ponderación en búsqueda híbrida (texto completo/vectorial)",
"vector": "vector"
},
"retriever_tooltip": "Usar diferentes métodos de búsqueda para consultar la base de conocimiento",
"retriever_vector": "búsqueda vectorial",
"retriever_vector_desc": "Buscar documentos según similitud semántica y significado.",
"search": "Buscar en la base de conocimientos",
"search_placeholder": "Ingrese el contenido de la consulta",
"settings": {

View File

@ -1063,10 +1063,6 @@
"error": {
"failed": "Erreur lors de la migration"
},
"migrate_to_langchain": {
"content": "La migration de la base de connaissances ne supprime pas l'ancienne base, mais crée une copie avant de retraiter tous les éléments, ce qui peut consommer un grand nombre de tokens. Veuillez agir avec prudence.",
"info": "L'architecture de la base de connaissances a été mise à jour, cliquez pour migrer vers la nouvelle architecture."
},
"source_dimensions": "Dimensions source",
"source_model": "Modèle source",
"target_dimensions": "Dimensions cible",
@ -1085,20 +1081,6 @@
"quota": "Quota restant pour {{name}} : {{quota}}",
"quota_infinity": "Quota restant pour {{name}} : illimité",
"rename": "Renommer",
"retriever": "Mode de recherche",
"retriever_bm25": "Recherche plein texte",
"retriever_bm25_desc": "Rechercher des documents en fonction de la pertinence et de la fréquence des mots-clés.",
"retriever_hybrid": "Recherche hybride (recommandé)",
"retriever_hybrid_desc": "Associez la recherche par mots-clés et la recherche sémantique pour une précision de recherche optimale.",
"retriever_hybrid_weight": {
"bm25": "texte intégral",
"recommended": "Recommandé",
"title": "Ajustement des pondérations de recherche hybride (texte intégral/vecteur)",
"vector": "vecteur"
},
"retriever_tooltip": "Utiliser différentes méthodes de recherche pour interroger la base de connaissances",
"retriever_vector": "Recherche vectorielle",
"retriever_vector_desc": "Rechercher des documents selon la similarité sémantique et le sens.",
"search": "Rechercher dans la base de connaissances",
"search_placeholder": "Entrez votre requête",
"settings": {

View File

@ -1063,10 +1063,6 @@
"error": {
"failed": "移行が失敗しました"
},
"migrate_to_langchain": {
"content": "ナレッジベースの移行は旧ナレッジベースを削除せず、すべてのエントリーを再処理したコピーを作成します。大量のトークンを消費する可能性があるため、操作には十分注意してください。",
"info": "ナレッジベースのアーキテクチャが更新されました、新しいアーキテクチャに移行するにはクリックしてください"
},
"source_dimensions": "ソース次元",
"source_model": "ソースモデル",
"target_dimensions": "ターゲット次元",
@ -1085,20 +1081,6 @@
"quota": "{{name}} 残りクォータ: {{quota}}",
"quota_infinity": "{{name}} クォータ: 無制限",
"rename": "名前を変更",
"retriever": "検索モード",
"retriever_bm25": "全文検索",
"retriever_bm25_desc": "キーワードの関連性と頻度に基づいてドキュメントを検索します。",
"retriever_hybrid": "ハイブリッド検索(おすすめ)",
"retriever_hybrid_desc": "キーワード検索と意味検索を組み合わせて、最高の検索精度を実現します。",
"retriever_hybrid_weight": {
"bm25": "全文(ぜんぶん)",
"recommended": "おすすめ",
"title": "ハイブリッド検索の重み付け調整 (全文/ベクトル)",
"vector": "ベクトル"
},
"retriever_tooltip": "異なる検索方法を使用してナレッジベースを検索する",
"retriever_vector": "ベクトル検索",
"retriever_vector_desc": "意味的な類似性と意味に基づいて文書を検索します。",
"search": "ナレッジベースを検索",
"search_placeholder": "検索するテキストを入力",
"settings": {

View File

@ -1063,10 +1063,6 @@
"error": {
"failed": "Falha na migração"
},
"migrate_to_langchain": {
"content": "A migração da base de conhecimento não elimina a base antiga, mas sim cria uma cópia e reprocessa todas as entradas, o que pode consumir muitos tokens. Por favor, proceda com cautela.",
"info": "A arquitetura da base de conhecimento foi atualizada, clique para migrar para a nova arquitetura."
},
"source_dimensions": "Dimensões de origem",
"source_model": "Modelo de origem",
"target_dimensions": "Dimensões de destino",
@ -1085,20 +1081,6 @@
"quota": "Cota restante de {{name}}: {{quota}}",
"quota_infinity": "Cota restante de {{name}}: ilimitada",
"rename": "Renomear",
"retriever": "Modo de pesquisa",
"retriever_bm25": "pesquisa de texto completo",
"retriever_bm25_desc": "Pesquisar documentos com base na relevância e frequência das palavras-chave.",
"retriever_hybrid": "Pesquisa híbrida (recomendada)",
"retriever_hybrid_desc": "Combine a pesquisa por palavras-chave com a pesquisa semântica para alcançar a melhor precisão de recuperação.",
"retriever_hybrid_weight": {
"bm25": "texto integral",
"recommended": "Recomendar",
"title": "Ajuste de ponderação de pesquisa híbrida (texto completo/vetorial)",
"vector": "vetor"
},
"retriever_tooltip": "Utilize diferentes métodos de pesquisa para consultar a base de conhecimento.",
"retriever_vector": "pesquisa vetorial",
"retriever_vector_desc": "Encontrar documentos com base na similaridade semântica e significado.",
"search": "Pesquisar repositório de conhecimento",
"search_placeholder": "Digite o conteúdo da consulta",
"settings": {

View File

@ -1063,10 +1063,6 @@
"error": {
"failed": "Миграция завершена с ошибками"
},
"migrate_to_langchain": {
"content": "Миграция базы знаний не удаляет старую базу, а создает ее копию с последующей повторной обработкой всех записей, что может потребовать значительного количества токенов. Пожалуйста, действуйте осторожно.",
"info": "Архитектура базы знаний обновлена, нажмите, чтобы перейти на новую архитектуру"
},
"source_dimensions": "Исходная размерность",
"source_model": "Исходная модель",
"target_dimensions": "Целевая размерность",
@ -1085,20 +1081,6 @@
"quota": "{{name}} Остаток квоты: {{quota}}",
"quota_infinity": "{{name}} Квота: Не ограничена",
"rename": "Переименовать",
"retriever": "Режим поиска",
"retriever_bm25": "полнотекстовый поиск",
"retriever_bm25_desc": "Поиск документов на основе релевантности и частоты ключевых слов.",
"retriever_hybrid": "Гибридный поиск (рекомендуется)",
"retriever_hybrid_desc": "Сочетание поиска по ключевым словам и семантического поиска для достижения оптимальной точности поиска.",
"retriever_hybrid_weight": {
"bm25": "Полный текст",
"recommended": "рекомендовать",
"title": "Регулировка весов гибридного поиска (полнотекстовый/векторный)",
"vector": "вектор"
},
"retriever_tooltip": "Использование различных методов поиска в базе знаний",
"retriever_vector": "векторный поиск",
"retriever_vector_desc": "Поиск документов по семантическому сходству и смыслу.",
"search": "Поиск в базе знаний",
"search_placeholder": "Введите текст для поиска",
"settings": {

View File

@ -14,7 +14,6 @@ import styled from 'styled-components'
import EditKnowledgeBasePopup from './components/EditKnowledgeBasePopup'
import KnowledgeSearchPopup from './components/KnowledgeSearchPopup'
import MigrationInfoTag from './components/MigrationInfoTag'
import QuotaTag from './components/QuotaTag'
import KnowledgeDirectories from './items/KnowledgeDirectories'
import KnowledgeFiles from './items/KnowledgeFiles'
@ -109,13 +108,14 @@ const KnowledgeContent: FC<KnowledgeContentProps> = ({ selectedBase }) => {
content: <KnowledgeSitemaps selectedBase={selectedBase} />,
show: true
},
// 暂时不显示,后续实现
{
key: 'videos',
title: t('knowledge.videos'),
icon: activeKey === 'videos' ? <Video size={16} color="var(--color-primary)" /> : <Video size={16} />,
items: videoItems,
content: <KnowledgeVideos selectedBase={selectedBase} />,
show: base?.framework === 'langchain'
show: false
}
]
@ -162,7 +162,6 @@ const KnowledgeContent: FC<KnowledgeContentProps> = ({ selectedBase }) => {
{base.preprocessProvider && base.preprocessProvider.type === 'preprocess' && (
<QuotaTag base={base} providerId={base.preprocessProvider?.provider.id} quota={quota} />
)}
{base.framework !== 'langchain' && <MigrationInfoTag base={base} />}
</div>
</ModelInfo>
<HStack gap={8} alignItems="center">

View File

@ -78,7 +78,6 @@ function createKnowledgeBase(overrides: Partial<KnowledgeBase> = {}): KnowledgeB
chunkSize: 500,
chunkOverlap: 200,
threshold: 0.5,
framework: 'langchain',
...overrides
}
}

View File

@ -137,24 +137,6 @@ vi.mock('antd', () => ({
{children}
</select>
),
Segmented: ({ value, onChange, options, style }: any) => (
<div data-testid="retriever-segmented" style={style}>
{options?.map((option: any) => (
<button
key={option.value}
type="button"
data-testid={`segmented-option-${option.value}`}
onClick={() => onChange?.(option.value)}
data-active={value === option.value}
style={{
backgroundColor: value === option.value ? '#1677ff' : '#fff',
color: value === option.value ? '#fff' : '#000'
}}>
{option.label}
</button>
))}
</div>
),
Slider: ({ value, onChange, min, max, step, marks, style }: any) => {
// Determine test ID based on slider characteristics
const isWeightSlider = min === 0 && max === 1 && step === 0.1
@ -193,14 +175,10 @@ function createKnowledgeBase(overrides: Partial<KnowledgeBase> = {}): KnowledgeB
id: 'test-base-id',
name: 'Test Knowledge Base',
model: defaultModel,
retriever: {
mode: 'hybrid'
},
items: [],
created_at: Date.now(),
updated_at: Date.now(),
version: 1,
framework: 'langchain',
...overrides
}
}
@ -319,42 +297,6 @@ describe('GeneralSettingsPanel', () => {
expect(mockSetNewBase).toHaveBeenCalledWith(expect.any(Function))
})
it('should handle hybrid weight change', async () => {
renderComponent()
const weightSlider = screen.getByTestId('weight-slider')
fireEvent.change(weightSlider, { target: { value: '0.7' } })
expect(mockSetNewBase).toHaveBeenCalledWith({
...mockBase,
retriever: {
...mockBase.retriever,
mode: 'hybrid',
weight: 0.7
}
})
})
it('should handle retriever selection change', async () => {
renderComponent()
// Test clicking on hybrid retriever option
const hybridOption = screen.getByTestId('segmented-option-hybrid')
await user.click(hybridOption)
expect(mockSetNewBase).toHaveBeenCalledWith({
...mockBase,
retriever: { mode: 'hybrid' }
})
})
it('should not render retriever segmented when framework is embedjs', () => {
const baseWithEmbedjs = createKnowledgeBase({ framework: 'embedjs' })
renderComponent({ newBase: baseWithEmbedjs })
expect(screen.queryByTestId('retriever-segmented')).not.toBeInTheDocument()
})
it('should disable dimension input when no model is selected', () => {
const baseWithoutModel = createKnowledgeBase({ model: undefined as any })
renderComponent({ newBase: baseWithoutModel })

View File

@ -170,69 +170,6 @@ exports[`GeneralSettingsPanel > basic rendering > should match snapshot 1`] = `
</option>
</select>
</div>
<div
class="c1"
>
<div
class="settings-label"
>
knowledge.retriever
<span
data-placement="right"
data-testid="info-tooltip"
title="knowledge.retriever_tooltip"
>
</span>
</div>
<div
data-testid="retriever-segmented"
>
<button
data-active="true"
data-testid="segmented-option-hybrid"
style="background-color: rgb(22, 119, 255); color: rgb(255, 255, 255);"
type="button"
>
knowledge.retriever_hybrid
</button>
<button
data-active="false"
data-testid="segmented-option-vector"
style="background-color: rgb(255, 255, 255); color: rgb(0, 0, 0);"
type="button"
>
knowledge.retriever_vector
</button>
<button
data-active="false"
data-testid="segmented-option-bm25"
style="background-color: rgb(255, 255, 255); color: rgb(0, 0, 0);"
type="button"
>
knowledge.retriever_bm25
</button>
</div>
</div>
<div
class="c1"
>
<div
class="settings-label"
>
knowledge.retriever_hybrid_weight.title
</div>
<input
data-marks="{"0":"knowledge.retriever_hybrid_weight.bm25","1":"knowledge.retriever_hybrid_weight.vector","0.5":"knowledge.retriever_hybrid_weight.recommended"}"
data-testid="weight-slider"
max="1"
min="0"
step="0.1"
style="width: 100%;"
type="range"
value="0.5"
/>
</div>
<div
class="c1"
>

View File

@ -65,7 +65,7 @@ exports[`KnowledgeBaseFormModal > basic rendering > should match snapshot 1`] =
data-title="Knowledge Base Settings"
styles="[object Object]"
transitionname="animation-move-down"
width="min(900px, 75vw)"
width="min(900px, 65vw)"
>
<div
data-testid="modal-header"

View File

@ -51,8 +51,7 @@ const PopupContainer: React.FC<PopupContainerProps> = ({ title, resolve }) => {
const _newBase: KnowledgeBase = {
...newBase,
created_at: Date.now(),
updated_at: Date.now(),
framework: 'langchain'
updated_at: Date.now()
}
await window.api.knowledgeBase.create(getKnowledgeBaseParams(_newBase))

View File

@ -4,7 +4,7 @@ import { TopView } from '@renderer/components/TopView'
import { useKnowledge } from '@renderer/hooks/useKnowledge'
import { useKnowledgeBaseForm } from '@renderer/hooks/useKnowledgeBaseForm'
import { getModelUniqId } from '@renderer/services/ModelService'
import { KnowledgeBase, MigrationModeEnum } from '@renderer/types'
import { KnowledgeBase } from '@renderer/types'
import { formatErrorMessage } from '@renderer/utils/error'
import { Flex } from 'antd'
import { useCallback, useMemo, useState } from 'react'
@ -48,7 +48,7 @@ const PopupContainer: React.FC<PopupContainerProps> = ({ base: _base, resolve })
const handleEmbeddingModelChangeMigration = useCallback(async () => {
const migratedBase = { ...newBase, id: nanoid() }
try {
await migrateBase(migratedBase, MigrationModeEnum.EmbeddingModelChange)
await migrateBase(migratedBase)
setOpen(false)
resolve(migratedBase)
} catch (error) {

View File

@ -6,7 +6,7 @@ import { isEmbeddingModel, isRerankModel } from '@renderer/config/models'
import { useProviders } from '@renderer/hooks/useProvider'
import { getModelUniqId } from '@renderer/services/ModelService'
import { KnowledgeBase, PreprocessProvider } from '@renderer/types'
import { Input, Segmented, Select, SelectProps, Slider } from 'antd'
import { Input, Select, SelectProps, Slider } from 'antd'
import { useTranslation } from 'react-i18next'
import { SettingsItem, SettingsPanel } from './styles'
@ -106,55 +106,6 @@ const GeneralSettingsPanel: React.FC<GeneralSettingsPanelProps> = ({
/>
</SettingsItem>
{newBase.framework !== 'embedjs' && (
<>
<SettingsItem>
<div className="settings-label">
{t('knowledge.retriever')}
<InfoTooltip title={t('knowledge.retriever_tooltip')} placement="right" />
</div>
<Segmented
value={newBase.retriever?.mode || 'hybrid'}
onChange={(value) =>
setNewBase({ ...newBase, retriever: { mode: value as 'vector' | 'bm25' | 'hybrid' } })
}
options={[
{ label: t('knowledge.retriever_hybrid'), value: 'hybrid' },
{ label: t('knowledge.retriever_vector'), value: 'vector' },
{ label: t('knowledge.retriever_bm25'), value: 'bm25' }
]}
/>
</SettingsItem>
{newBase.retriever?.mode === 'hybrid' && (
<SettingsItem>
<div className="settings-label">{t('knowledge.retriever_hybrid_weight.title')}</div>
<Slider
style={{ width: '100%' }}
min={0}
max={1}
step={0.1}
value={newBase.retriever?.weight || 0.5}
marks={{
0: t('knowledge.retriever_hybrid_weight.bm25'),
0.5: t('knowledge.retriever_hybrid_weight.recommended'),
1: t('knowledge.retriever_hybrid_weight.vector')
}}
onChange={(value) =>
setNewBase({
...newBase,
retriever: {
...newBase.retriever,
mode: 'hybrid',
weight: value
}
})
}
/>
</SettingsItem>
)}
</>
)}
<SettingsItem>
<div className="settings-label">
{t('knowledge.document_count')}

View File

@ -25,9 +25,9 @@ const KnowledgeBaseFormModal: React.FC<KnowledgeBaseFormModalProps> = ({ panels,
maskClosable={false}
centered
transitionName="animation-move-down"
width="min(900px, 75vw)"
width="min(900px, 65vw)"
styles={{
body: { padding: 0, height: 700 },
body: { padding: 0, height: 550 },
header: {
padding: '10px 15px',
borderBottom: '0.5px solid var(--color-border)',

View File

@ -1,57 +0,0 @@
import { loggerService } from '@logger'
import { nanoid } from '@reduxjs/toolkit'
import { useKnowledge } from '@renderer/hooks/useKnowledge'
import { useKnowledgeBaseForm } from '@renderer/hooks/useKnowledgeBaseForm'
import { KnowledgeBase, MigrationModeEnum } from '@renderer/types'
import { formatErrorMessage } from '@renderer/utils/error'
import { Flex, Tag } from 'antd'
import { FC, useCallback } from 'react'
import { useTranslation } from 'react-i18next'
const logger = loggerService.withContext('MigrationInfoTag')
const MigrationInfoTag: FC<{ base: KnowledgeBase }> = ({ base: _base }) => {
const { t } = useTranslation()
const { migrateBase } = useKnowledge(_base.id)
const { newBase } = useKnowledgeBaseForm(_base)
// 处理嵌入模型更改迁移
const handleMigration = useCallback(async () => {
const migratedBase = { ...newBase, id: nanoid() }
try {
await migrateBase(migratedBase, MigrationModeEnum.MigrationToLangChain)
} catch (error) {
logger.error('KnowledgeBase migration failed:', error as Error)
window.toast.error(t('knowledge.migrate.error.failed') + ': ' + formatErrorMessage(error))
}
}, [newBase, migrateBase, t])
const onClick = async () => {
window.modal.confirm({
title: t('knowledge.migrate.confirm.title'),
content: (
<Flex vertical align="self-start">
<span>{t('knowledge.migrate.migrate_to_langchain.content')}</span>
</Flex>
),
okText: t('knowledge.migrate.confirm.ok'),
centered: true,
onOk: handleMigration
})
}
return (
<Tag
color="blue"
style={{
borderRadius: 20,
margin: 0,
cursor: 'pointer'
}}
onClick={onClick}>
{t('knowledge.migrate.migrate_to_langchain.info')}
</Tag>
)
}
export default MigrationInfoTag

View File

@ -31,18 +31,10 @@ const QuotaTag: FC<{ base: KnowledgeBase; providerId: PreprocessProviderId; quot
const userId = getStoreSetting('userId')
const baseParams = getKnowledgeBaseParams(base)
try {
let response: number
if (base.framework === 'langchain') {
response = await window.api.knowledgeBase.checkQuota({
base: baseParams,
userId: userId as string
})
} else {
response = await window.api.knowledgeBase.checkQuota({
base: baseParams,
userId: userId as string
})
}
const response = await window.api.knowledgeBase.checkQuota({
base: baseParams,
userId: userId as string
})
setQuota(response)
} catch (error) {
logger.error('[KnowledgeContent] Error checking quota:', error as Error)

View File

@ -75,9 +75,7 @@ export const getKnowledgeBaseParams = (base: KnowledgeBase): KnowledgeBaseParams
baseURL: rerankHost
},
documentCount: base.documentCount,
preprocessProvider: updatedPreprocessProvider,
framework: base.framework,
retriever: base.retriever || { mode: 'hybrid' }
preprocessProvider: updatedPreprocessProvider
}
}

View File

@ -244,8 +244,7 @@ class WebSearchService {
items: [],
created_at: Date.now(),
updated_at: Date.now(),
version: 1,
framework: 'langchain'
version: 1
}
// 更新LRU cache

View File

@ -67,7 +67,7 @@ const persistedReducer = persistReducer(
{
key: 'cherry-studio',
storage,
version: 154,
version: 155,
blacklist: ['runtime', 'messages', 'messageBlocks', 'tabs'],
migrate
},

View File

@ -2376,8 +2376,8 @@ const migrateConfig = {
'147': (state: RootState) => {
try {
state.knowledge.bases.forEach((base) => {
if (!base.framework) {
base.framework = 'embedjs'
if ((base as any).framework) {
delete (base as any).framework
}
})
return state
@ -2398,8 +2398,8 @@ const migrateConfig = {
'149': (state: RootState) => {
try {
state.knowledge.bases.forEach((base) => {
if (!base.framework) {
base.framework = 'embedjs'
if ((base as any).framework) {
delete (base as any).framework
}
})
return state
@ -2463,6 +2463,19 @@ const migrateConfig = {
logger.error('migrate 154 error', error as Error)
return state
}
},
'155': (state: RootState) => {
try {
state.knowledge.bases.forEach((base) => {
if ((base as any).framework) {
delete (base as any).framework
}
})
return state
} catch (error) {
logger.error('migrate 155 error', error as Error)
return state
}
}
}

View File

@ -100,12 +100,6 @@ export interface KnowledgeBase {
type: 'preprocess'
provider: PreprocessProvider
}
framework: 'embedjs' | 'langchain'
// default is hybrid
retriever?: {
mode: 'vector' | 'bm25' | 'hybrid'
weight?: number
}
}
export type ProcessingStatus = 'pending' | 'processing' | 'completed' | 'failed'
@ -145,11 +139,6 @@ export type KnowledgeBaseParams = {
type: 'preprocess'
provider: PreprocessProvider
}
framework: 'embedjs' | 'langchain'
retriever?: {
mode: 'vector' | 'bm25' | 'hybrid'
weight?: number
}
}
export type KnowledgeReference = {
@ -166,8 +155,3 @@ export interface KnowledgeSearchResult {
score: number
metadata: Record<string, any>
}
export enum MigrationModeEnum {
EmbeddingModelChange = 'EmbeddingModelChange',
MigrationToLangChain = 'MigrationToLangChain'
}

131
yarn.lock
View File

@ -6186,26 +6186,6 @@ __metadata:
languageName: node
linkType: hard
"@langchain/core@npm:^0.3.68":
version: 0.3.73
resolution: "@langchain/core@npm:0.3.73"
dependencies:
"@cfworker/json-schema": "npm:^4.0.2"
ansi-styles: "npm:^5.0.0"
camelcase: "npm:6"
decamelize: "npm:1.2.0"
js-tiktoken: "npm:^1.0.12"
langsmith: "npm:^0.3.46"
mustache: "npm:^4.2.0"
p-queue: "npm:^6.6.2"
p-retry: "npm:4"
uuid: "npm:^10.0.0"
zod: "npm:^3.25.32"
zod-to-json-schema: "npm:^3.22.3"
checksum: 10c0/c10be034bd6698b276f040a749c9792008b43cfb3d41e9c58d6a11ff09fa1b8cf68ddc1836ca858a4b53a45163b57ff22be033cf415bc976f761b9e81c453c2b
languageName: node
linkType: hard
"@langchain/core@patch:@langchain/core@npm%3A0.3.44#~/.yarn/patches/@langchain-core-npm-0.3.44-41d5c3cb0a.patch":
version: 0.3.44
resolution: "@langchain/core@patch:@langchain/core@npm%3A0.3.44#~/.yarn/patches/@langchain-core-npm-0.3.44-41d5c3cb0a.patch::version=0.3.44&hash=41dd7b"
@ -6240,20 +6220,6 @@ __metadata:
languageName: node
linkType: hard
"@langchain/ollama@npm:^0.2.1":
version: 0.2.1
resolution: "@langchain/ollama@npm:0.2.1"
dependencies:
ollama: "npm:^0.5.12"
uuid: "npm:^10.0.0"
zod: "npm:^3.24.1"
zod-to-json-schema: "npm:^3.24.1"
peerDependencies:
"@langchain/core": ">=0.2.21 <0.4.0"
checksum: 10c0/be3083a15e879f2c19d0a51aafb88a3ba4ea69f9fdd90e6069b84edd92649c00a29a34cb885746e099f5cab0791b55df3776cd582cba7de299b8bd574d32b8c1
languageName: node
linkType: hard
"@langchain/openai@npm:0.3.16":
version: 0.3.16
resolution: "@langchain/openai@npm:0.3.16"
@ -6282,7 +6248,7 @@ __metadata:
languageName: node
linkType: hard
"@langchain/openai@npm:>=0.2.0 <0.7.0, @langchain/openai@npm:^0.6.7":
"@langchain/openai@npm:>=0.2.0 <0.7.0":
version: 0.6.11
resolution: "@langchain/openai@npm:0.6.11"
dependencies:
@ -13074,9 +13040,6 @@ __metadata:
"@heroui/react": "npm:^2.8.3"
"@kangfenmao/keyv-storage": "npm:^0.1.0"
"@langchain/community": "npm:^0.3.50"
"@langchain/core": "npm:^0.3.68"
"@langchain/ollama": "npm:^0.2.1"
"@langchain/openai": "npm:^0.6.7"
"@libsql/client": "npm:0.14.0"
"@libsql/win32-x64-msvc": "npm:^0.4.7"
"@mistralai/mistralai": "npm:^1.7.5"
@ -13194,7 +13157,6 @@ __metadata:
eslint-plugin-simple-import-sort: "npm:^12.1.1"
eslint-plugin-unused-imports: "npm:^4.1.4"
express: "npm:^5.1.0"
faiss-node: "npm:^0.5.1"
fast-diff: "npm:^1.3.0"
fast-xml-parser: "npm:^5.2.0"
fetch-socks: "npm:1.3.2"
@ -14062,15 +14024,6 @@ __metadata:
languageName: node
linkType: hard
"bindings@npm:^1.5.0":
version: 1.5.0
resolution: "bindings@npm:1.5.0"
dependencies:
file-uri-to-path: "npm:1.0.0"
checksum: 10c0/3dab2491b4bb24124252a91e656803eac24292473e56554e35bbfe3cc1875332cfa77600c3bac7564049dc95075bf6fcc63a4609920ff2d64d0fe405fcf0d4ba
languageName: node
linkType: hard
"birecord@npm:^0.1.1":
version: 0.1.1
resolution: "birecord@npm:0.1.1"
@ -16239,13 +16192,6 @@ __metadata:
languageName: node
linkType: hard
"detect-libc@npm:^2.0.0, detect-libc@npm:^2.0.3, detect-libc@npm:^2.0.4":
version: 2.0.4
resolution: "detect-libc@npm:2.0.4"
checksum: 10c0/c15541f836eba4b1f521e4eecc28eefefdbc10a94d3b8cb4c507689f332cc111babb95deda66f2de050b22122113189986d5190be97d51b5a2b23b938415e67c
languageName: node
linkType: hard
"detect-libc@npm:^2.0.1":
version: 2.0.3
resolution: "detect-libc@npm:2.0.3"
@ -16253,6 +16199,13 @@ __metadata:
languageName: node
linkType: hard
"detect-libc@npm:^2.0.3, detect-libc@npm:^2.0.4":
version: 2.0.4
resolution: "detect-libc@npm:2.0.4"
checksum: 10c0/c15541f836eba4b1f521e4eecc28eefefdbc10a94d3b8cb4c507689f332cc111babb95deda66f2de050b22122113189986d5190be97d51b5a2b23b938415e67c
languageName: node
linkType: hard
"detect-node@npm:^2.0.4":
version: 2.1.0
resolution: "detect-node@npm:2.1.0"
@ -17625,18 +17578,6 @@ __metadata:
languageName: node
linkType: hard
"faiss-node@npm:^0.5.1":
version: 0.5.1
resolution: "faiss-node@npm:0.5.1"
dependencies:
bindings: "npm:^1.5.0"
node-addon-api: "npm:^6.0.0"
node-gyp: "npm:latest"
prebuild-install: "npm:^7.1.1"
checksum: 10c0/2045c4db87b39637d56c8228b26a30f4bcf49f5e0f99a7717b89030683aa8ed4d37c2f58d3b7b630fe56128fa7b8b55a0ad375ea651008bff17cb3618abc30be
languageName: node
linkType: hard
"fast-deep-equal@npm:3.1.3, fast-deep-equal@npm:^3.1.1, fast-deep-equal@npm:^3.1.3":
version: 3.1.3
resolution: "fast-deep-equal@npm:3.1.3"
@ -17931,13 +17872,6 @@ __metadata:
languageName: node
linkType: hard
"file-uri-to-path@npm:1.0.0":
version: 1.0.0
resolution: "file-uri-to-path@npm:1.0.0"
checksum: 10c0/3b545e3a341d322d368e880e1c204ef55f1d45cdea65f7efc6c6ce9e0c4d22d802d5629320eb779d006fe59624ac17b0e848d83cc5af7cd101f206cb704f5519
languageName: node
linkType: hard
"filelist@npm:^1.0.4":
version: 1.0.4
resolution: "filelist@npm:1.0.4"
@ -22431,13 +22365,6 @@ __metadata:
languageName: node
linkType: hard
"napi-build-utils@npm:^2.0.0":
version: 2.0.0
resolution: "napi-build-utils@npm:2.0.0"
checksum: 10c0/5833aaeb5cc5c173da47a102efa4680a95842c13e0d9cc70428bd3ee8d96bb2172f8860d2811799b5daa5cbeda779933601492a2028a6a5351c6d0fcf6de83db
languageName: node
linkType: hard
"native-promise-only@npm:0.8.1":
version: 0.8.1
resolution: "native-promise-only@npm:0.8.1"
@ -22537,15 +22464,6 @@ __metadata:
languageName: node
linkType: hard
"node-addon-api@npm:^6.0.0":
version: 6.1.0
resolution: "node-addon-api@npm:6.1.0"
dependencies:
node-gyp: "npm:latest"
checksum: 10c0/d2699c4ad15740fd31482a3b6fca789af7723ab9d393adc6ac45250faaee72edad8f0b10b2b9d087df0de93f1bdc16d97afdd179b26b9ebc9ed68b569faa4bac
languageName: node
linkType: hard
"node-addon-api@npm:^8.4.0":
version: 8.4.0
resolution: "node-addon-api@npm:8.4.0"
@ -23683,28 +23601,6 @@ __metadata:
languageName: node
linkType: hard
"prebuild-install@npm:^7.1.1":
version: 7.1.3
resolution: "prebuild-install@npm:7.1.3"
dependencies:
detect-libc: "npm:^2.0.0"
expand-template: "npm:^2.0.3"
github-from-package: "npm:0.0.0"
minimist: "npm:^1.2.3"
mkdirp-classic: "npm:^0.5.3"
napi-build-utils: "npm:^2.0.0"
node-abi: "npm:^3.3.0"
pump: "npm:^3.0.0"
rc: "npm:^1.2.7"
simple-get: "npm:^4.0.0"
tar-fs: "npm:^2.0.0"
tunnel-agent: "npm:^0.6.0"
bin:
prebuild-install: bin.js
checksum: 10c0/25919a42b52734606a4036ab492d37cfe8b601273d8dfb1fa3c84e141a0a475e7bad3ab848c741d2f810cef892fcf6059b8c7fe5b29f98d30e0c29ad009bedff
languageName: node
linkType: hard
"prelude-ls@npm:^1.2.1":
version: 1.2.1
resolution: "prelude-ls@npm:1.2.1"
@ -26316,17 +26212,6 @@ __metadata:
languageName: node
linkType: hard
"simple-get@npm:^4.0.0":
version: 4.0.1
resolution: "simple-get@npm:4.0.1"
dependencies:
decompress-response: "npm:^6.0.0"
once: "npm:^1.3.1"
simple-concat: "npm:^1.0.0"
checksum: 10c0/b0649a581dbca741babb960423248899203165769747142033479a7dc5e77d7b0fced0253c731cd57cf21e31e4d77c9157c3069f4448d558ebc96cf9e1eebcf0
languageName: node
linkType: hard
"simple-swizzle@npm:^0.2.2":
version: 0.2.2
resolution: "simple-swizzle@npm:0.2.2"