mirror of
https://github.com/CherryHQ/cherry-studio.git
synced 2025-12-27 04:31:27 +08:00
fix(Anthropic): add base64 file handling to IPC and file management (#5595)
This commit is contained in:
parent
66abb416df
commit
eedbaa965c
@ -107,7 +107,7 @@ export enum IpcChannel {
|
||||
File_Download = 'file:download',
|
||||
File_Copy = 'file:copy',
|
||||
File_BinaryImage = 'file:binaryImage',
|
||||
|
||||
File_Base64File = 'file:base64File',
|
||||
Fs_Read = 'fs:read',
|
||||
|
||||
Export_Word = 'export:word',
|
||||
|
||||
@ -197,6 +197,7 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
ipcMain.handle(IpcChannel.File_Write, fileManager.writeFile)
|
||||
ipcMain.handle(IpcChannel.File_SaveImage, fileManager.saveImage)
|
||||
ipcMain.handle(IpcChannel.File_Base64Image, fileManager.base64Image)
|
||||
ipcMain.handle(IpcChannel.File_Base64File, fileManager.base64File)
|
||||
ipcMain.handle(IpcChannel.File_Download, fileManager.downloadFile)
|
||||
ipcMain.handle(IpcChannel.File_Copy, fileManager.copyFile)
|
||||
ipcMain.handle(IpcChannel.File_BinaryImage, fileManager.binaryImage)
|
||||
|
||||
@ -263,6 +263,14 @@ class FileStorage {
|
||||
}
|
||||
}
|
||||
|
||||
public base64File = async (_: Electron.IpcMainInvokeEvent, id: string): Promise<{ data: string; mime: string }> => {
|
||||
const filePath = path.join(this.storageDir, id)
|
||||
const buffer = await fs.promises.readFile(filePath)
|
||||
const base64 = buffer.toString('base64')
|
||||
const mime = `application/${path.extname(filePath).slice(1)}`
|
||||
return { data: base64, mime }
|
||||
}
|
||||
|
||||
public binaryImage = async (_: Electron.IpcMainInvokeEvent, id: string): Promise<{ data: Buffer; mime: string }> => {
|
||||
const filePath = path.join(this.storageDir, id)
|
||||
const data = await fs.promises.readFile(filePath)
|
||||
|
||||
@ -65,7 +65,8 @@ const api = {
|
||||
base64Image: (fileId: string) => ipcRenderer.invoke(IpcChannel.File_Base64Image, fileId),
|
||||
download: (url: string) => ipcRenderer.invoke(IpcChannel.File_Download, url),
|
||||
copy: (fileId: string, destPath: string) => ipcRenderer.invoke(IpcChannel.File_Copy, fileId, destPath),
|
||||
binaryImage: (fileId: string) => ipcRenderer.invoke(IpcChannel.File_BinaryImage, fileId)
|
||||
binaryImage: (fileId: string) => ipcRenderer.invoke(IpcChannel.File_BinaryImage, fileId),
|
||||
base64File: (fileId: string) => ipcRenderer.invoke(IpcChannel.File_Base64File, fileId)
|
||||
},
|
||||
fs: {
|
||||
read: (path: string) => ipcRenderer.invoke(IpcChannel.Fs_Read, path)
|
||||
|
||||
@ -5,6 +5,7 @@ import { isReasoningModel, isVisionModel } from '@renderer/config/models'
|
||||
import { getStoreSetting } from '@renderer/hooks/useSettings'
|
||||
import i18n from '@renderer/i18n'
|
||||
import { getAssistantSettings, getDefaultModel, getTopNamingModel } from '@renderer/services/AssistantService'
|
||||
import FileManager from '@renderer/services/FileManager'
|
||||
import {
|
||||
filterContextMessages,
|
||||
filterEmptyMessages,
|
||||
@ -76,12 +77,23 @@ export default class AnthropicProvider extends BaseProvider {
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Get and process file blocks
|
||||
const fileBlocks = findFileBlocks(message)
|
||||
for (const fileBlock of fileBlocks) {
|
||||
const file = fileBlock.file
|
||||
if ([FileTypes.TEXT, FileTypes.DOCUMENT].includes(file.type)) {
|
||||
}
|
||||
// Get and process file blocks
|
||||
const fileBlocks = findFileBlocks(message)
|
||||
for (const fileBlock of fileBlocks) {
|
||||
const { file } = fileBlock
|
||||
if ([FileTypes.TEXT, FileTypes.DOCUMENT].includes(file.type)) {
|
||||
if (file.ext === '.pdf' && file.size < 32 * 1024 * 1024) {
|
||||
const base64Data = await FileManager.readBase64File(file)
|
||||
parts.push({
|
||||
type: 'document',
|
||||
source: {
|
||||
type: 'base64',
|
||||
media_type: 'application/pdf',
|
||||
data: base64Data
|
||||
}
|
||||
})
|
||||
} else {
|
||||
const fileContent = await (await window.api.file.read(file.id + file.ext)).trim()
|
||||
parts.push({
|
||||
type: 'text',
|
||||
@ -90,6 +102,7 @@ export default class AnthropicProvider extends BaseProvider {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
role: message.role === 'system' ? 'user' : message.role,
|
||||
content: parts
|
||||
@ -252,27 +265,25 @@ export default class AnthropicProvider extends BaseProvider {
|
||||
onChunk({ type: ChunkType.LLM_RESPONSE_CREATED })
|
||||
let hasThinkingContent = false
|
||||
this.sdk.messages
|
||||
.stream({ ...body, stream: true }, { signal })
|
||||
.stream({ ...body, stream: true }, { signal, timeout: 5 * 60 * 1000 })
|
||||
.on('text', (text) => {
|
||||
if (hasThinkingContent && !checkThinkingContent) {
|
||||
checkThinkingContent = true
|
||||
onChunk({
|
||||
type: ChunkType.THINKING_COMPLETE,
|
||||
text: thinking_content,
|
||||
thinking_millsec: time_first_content_millsec - time_first_token_millsec
|
||||
thinking_millsec: new Date().getTime() - time_first_content_millsec
|
||||
})
|
||||
// FIXME: 临时方案,重置时间戳和思考内容
|
||||
time_first_token_millsec = 0
|
||||
time_first_content_millsec = 0
|
||||
thinking_content = ''
|
||||
checkThinkingContent = false
|
||||
hasThinkingContent = false
|
||||
}
|
||||
if (time_first_token_millsec == 0) {
|
||||
time_first_token_millsec = new Date().getTime() - start_time_millsec
|
||||
time_first_token_millsec = new Date().getTime()
|
||||
}
|
||||
|
||||
if (hasThinkingContent && time_first_content_millsec === 0) {
|
||||
thinking_content = ''
|
||||
checkThinkingContent = false
|
||||
hasThinkingContent = false
|
||||
|
||||
if (!hasThinkingContent && time_first_content_millsec === 0) {
|
||||
time_first_content_millsec = new Date().getTime()
|
||||
}
|
||||
|
||||
@ -283,7 +294,7 @@ export default class AnthropicProvider extends BaseProvider {
|
||||
const currentTime = new Date().getTime() // Get current time for each chunk
|
||||
|
||||
if (time_first_token_millsec == 0) {
|
||||
time_first_token_millsec = currentTime - start_time_millsec
|
||||
time_first_token_millsec = currentTime
|
||||
}
|
||||
|
||||
// Set time_first_content_millsec ONLY when the first content (thinking or text) arrives
|
||||
@ -293,7 +304,6 @@ export default class AnthropicProvider extends BaseProvider {
|
||||
|
||||
// Calculate thinking time as time elapsed since start until this chunk
|
||||
const thinking_time = currentTime - time_first_content_millsec
|
||||
|
||||
onChunk({
|
||||
type: ChunkType.THINKING_DELTA,
|
||||
text: thinking,
|
||||
@ -340,11 +350,13 @@ export default class AnthropicProvider extends BaseProvider {
|
||||
metrics: {
|
||||
completion_tokens: message.usage.output_tokens,
|
||||
time_completion_millsec,
|
||||
time_first_token_millsec
|
||||
time_first_token_millsec: time_first_token_millsec - start_time_millsec
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// FIXME: 临时方案,重置时间戳和思考内容
|
||||
time_first_token_millsec = 0
|
||||
time_first_content_millsec = 0
|
||||
resolve()
|
||||
})
|
||||
.on('error', (error) => reject(error))
|
||||
|
||||
@ -584,9 +584,6 @@ export default class OpenAIProvider extends BaseProvider {
|
||||
thinking_millsec: final_time_thinking_millsec_delta
|
||||
})
|
||||
|
||||
// FIXME: 临时方案,重置时间戳和思考内容
|
||||
time_first_token_millsec = 0
|
||||
time_first_content_millsec = 0
|
||||
thinkingContent = ''
|
||||
isFirstThinkingChunk = true
|
||||
hasReasoningContent = false
|
||||
@ -610,8 +607,11 @@ export default class OpenAIProvider extends BaseProvider {
|
||||
)
|
||||
}
|
||||
}
|
||||
if (isFirstChunk) {
|
||||
// 说明前面没有思考内容
|
||||
if (isFirstChunk && time_first_token_millsec === 0 && time_first_token_millsec_delta === 0) {
|
||||
isFirstChunk = false
|
||||
time_first_token_millsec = currentTime
|
||||
time_first_token_millsec_delta = time_first_token_millsec - start_time_millsec
|
||||
}
|
||||
content += delta.content // Still accumulate for processToolUses
|
||||
|
||||
@ -711,8 +711,9 @@ export default class OpenAIProvider extends BaseProvider {
|
||||
}
|
||||
})
|
||||
|
||||
// OpenAI stream typically doesn't provide a final summary chunk easily.
|
||||
// We are sending per-chunk usage if available.
|
||||
// FIXME: 临时方案,重置时间戳和思考内容
|
||||
time_first_token_millsec = 0
|
||||
time_first_content_millsec = 0
|
||||
}
|
||||
|
||||
console.debug('[completions] reqMessages before processing', model.id, reqMessages)
|
||||
@ -1164,8 +1165,7 @@ export default class OpenAIProvider extends BaseProvider {
|
||||
validUserFiles.map(async (f) => {
|
||||
// f.file is guaranteed to exist here due to the filter above
|
||||
const fileInfo = f.file!
|
||||
const binaryData = await FileManager.readFile(fileInfo)
|
||||
console.log('binaryData', binaryData)
|
||||
const binaryData = await FileManager.readBinaryImage(fileInfo)
|
||||
const file = await toFile(binaryData, fileInfo.origin_name || 'image.png', {
|
||||
type: 'image/png'
|
||||
})
|
||||
|
||||
@ -28,11 +28,16 @@ class FileManager {
|
||||
return Promise.all(files.map((file) => this.addFile(file)))
|
||||
}
|
||||
|
||||
static async readFile(file: FileType): Promise<Buffer> {
|
||||
static async readBinaryImage(file: FileType): Promise<Buffer> {
|
||||
const fileData = await window.api.file.binaryImage(file.id + file.ext)
|
||||
return fileData.data
|
||||
}
|
||||
|
||||
static async readBase64File(file: FileType): Promise<string> {
|
||||
const fileData = await window.api.file.base64File(file.id + file.ext)
|
||||
return fileData.data
|
||||
}
|
||||
|
||||
static async uploadFile(file: FileType): Promise<FileType> {
|
||||
console.log(`[FileManager] Uploading file: ${JSON.stringify(file)}`)
|
||||
|
||||
|
||||
Loading…
Reference in New Issue
Block a user