diff --git a/src/main/services/KnowledgeService.ts b/src/main/services/KnowledgeService.ts index 87321b5cf2..d55a1cfbe0 100644 --- a/src/main/services/KnowledgeService.ts +++ b/src/main/services/KnowledgeService.ts @@ -95,15 +95,18 @@ class KnowledgeService { const files = getAllFiles(directory) const totalFiles = files.length let processedFiles = 0 + const loaderPromises = files.map(async (file) => { const result = await addFileLoader(ragApplication, file, base, forceReload) processedFiles++ - sendDirectoryProcessingPercent(totalFiles, processedFiles) return result }) + const loaderResults = await Promise.allSettled(loaderPromises) - const uniqueIds = loaderResults.filter(result => result.status === 'fulfilled').map((result) => result.uniqueId) + // @ts-ignore uniqueId + const uniqueIds = loaderResults.filter((result) => result.status === 'fulfilled').map((result) => result.uniqueId) + return { entriesAdded: loaderResults.length, uniqueId: `DirectoryLoader_${uuidv4()}`, diff --git a/src/renderer/src/store/llm.ts b/src/renderer/src/store/llm.ts index 3f2fc10d79..71dd400c5a 100644 --- a/src/renderer/src/store/llm.ts +++ b/src/renderer/src/store/llm.ts @@ -387,16 +387,6 @@ const initialState: LlmState = { isSystem: true, enabled: false }, - { - id: 'lmstudio', - name: 'LM Studio', - type: 'openai', - apiKey: '', - apiHost: 'http://localhost:1234', - models: SYSTEM_MODELS.lmstudio, - isSystem: true, - enabled: true - }, { id: 'modelscope', name: 'ModelScope',