mirror of
https://github.com/CherryHQ/cherry-studio.git
synced 2025-12-19 14:41:24 +08:00
💄 style: format code with yarn format
This commit is contained in:
parent
b869869e26
commit
a09c52424f
@ -8,9 +8,9 @@
|
||||
* This shared module can be used by both main and renderer processes.
|
||||
*/
|
||||
|
||||
import Anthropic from "@anthropic-ai/sdk";
|
||||
import {TextBlockParam} from "@anthropic-ai/sdk/resources";
|
||||
import {Provider} from "@types";
|
||||
import Anthropic from '@anthropic-ai/sdk'
|
||||
import { TextBlockParam } from '@anthropic-ai/sdk/resources'
|
||||
import { Provider } from '@types'
|
||||
|
||||
/**
|
||||
* Creates and configures an Anthropic SDK client based on the provider configuration.
|
||||
@ -54,7 +54,8 @@ export function getSdkClient(provider: Provider, oauthToken?: string | null): An
|
||||
defaultHeaders: {
|
||||
'Content-Type': 'application/json',
|
||||
'anthropic-version': '2023-06-01',
|
||||
'anthropic-beta': 'oauth-2025-04-20,claude-code-20250219,interleaved-thinking-2025-05-14,fine-grained-tool-streaming-2025-05-14',
|
||||
'anthropic-beta':
|
||||
'oauth-2025-04-20,claude-code-20250219,interleaved-thinking-2025-05-14,fine-grained-tool-streaming-2025-05-14',
|
||||
'anthropic-dangerous-direct-browser-access': 'true',
|
||||
'user-agent': 'claude-cli/1.0.118 (external, sdk-ts)',
|
||||
'x-app': 'cli',
|
||||
|
||||
@ -128,7 +128,6 @@ apiRouter.use('/models', modelsRoutes)
|
||||
apiRouter.use('/agents', agentsRoutes)
|
||||
app.use('/v1', apiRouter)
|
||||
|
||||
|
||||
// Setup OpenAPI documentation
|
||||
setupOpenAPIDocumentation(app)
|
||||
|
||||
|
||||
@ -3,7 +3,6 @@ import {NextFunction, Request, Response} from 'express'
|
||||
|
||||
import { config } from '../config'
|
||||
|
||||
|
||||
const isValidToken = (token: string, apiKey: string): boolean => {
|
||||
if (token.length !== apiKey.length) {
|
||||
return false
|
||||
|
||||
@ -1,9 +1,5 @@
|
||||
import { loggerService } from '@logger'
|
||||
import {
|
||||
AgentModelValidationError,
|
||||
sessionMessageService,
|
||||
sessionService
|
||||
} from '@main/services/agents'
|
||||
import { AgentModelValidationError, sessionMessageService, sessionService } from '@main/services/agents'
|
||||
import {
|
||||
CreateSessionResponse,
|
||||
ListAgentSessionsResponse,
|
||||
|
||||
@ -35,7 +35,7 @@ export const createZodValidator = (config: ZodValidationConfig) => {
|
||||
type: 'field',
|
||||
value: err.input,
|
||||
msg: err.message,
|
||||
path: err.path.map(p => String(p)).join('.'),
|
||||
path: err.path.map((p) => String(p)).join('.'),
|
||||
location: getLocationFromPath(err.path, config)
|
||||
}))
|
||||
|
||||
|
||||
@ -318,7 +318,7 @@ router.post('/', async (req: Request, res: Response) => {
|
||||
model: request.model,
|
||||
messageCount: request.messages?.length || 0,
|
||||
stream: request.stream,
|
||||
max_tokens: request.max_tokens,
|
||||
max_tokens: request.max_tokens
|
||||
})
|
||||
|
||||
// Validate model ID and get provider
|
||||
|
||||
@ -38,9 +38,10 @@ export type PrepareRequestResult =
|
||||
}
|
||||
|
||||
export class ChatCompletionService {
|
||||
async resolveProviderContext(model: string): Promise<
|
||||
| { ok: false; error: ModelValidationError }
|
||||
| { ok: true; provider: Provider; modelId: string; client: OpenAI }
|
||||
async resolveProviderContext(
|
||||
model: string
|
||||
): Promise<
|
||||
{ ok: false; error: ModelValidationError } | { ok: true; provider: Provider; modelId: string; client: OpenAI }
|
||||
> {
|
||||
const modelValidation = await validateModelId(model)
|
||||
if (!modelValidation.valid) {
|
||||
@ -196,9 +197,7 @@ export class ChatCompletionService {
|
||||
}
|
||||
}
|
||||
|
||||
async processStreamingCompletion(
|
||||
request: ChatCompletionCreateParams
|
||||
): Promise<{
|
||||
async processStreamingCompletion(request: ChatCompletionCreateParams): Promise<{
|
||||
provider: Provider
|
||||
modelId: string
|
||||
stream: AsyncIterable<OpenAI.Chat.Completions.ChatCompletionChunk>
|
||||
@ -227,9 +226,9 @@ export class ChatCompletionService {
|
||||
})
|
||||
|
||||
const streamRequest = providerRequest as ChatCompletionCreateParamsStreaming
|
||||
const stream = (await client.chat.completions.create(streamRequest)) as AsyncIterable<
|
||||
OpenAI.Chat.Completions.ChatCompletionChunk
|
||||
>
|
||||
const stream = (await client.chat.completions.create(
|
||||
streamRequest
|
||||
)) as AsyncIterable<OpenAI.Chat.Completions.ChatCompletionChunk>
|
||||
|
||||
logger.info('Successfully started streaming chat completion')
|
||||
return {
|
||||
|
||||
@ -1,11 +1,10 @@
|
||||
import Anthropic from "@anthropic-ai/sdk";
|
||||
import Anthropic from '@anthropic-ai/sdk'
|
||||
import { Message, MessageCreateParams, RawMessageStreamEvent } from '@anthropic-ai/sdk/resources'
|
||||
import { loggerService } from '@logger'
|
||||
import anthropicService from "@main/services/AnthropicService";
|
||||
import anthropicService from '@main/services/AnthropicService'
|
||||
import { buildClaudeCodeSystemMessage, getSdkClient } from '@shared/anthropic'
|
||||
import { Provider } from '@types'
|
||||
|
||||
|
||||
const logger = loggerService.withContext('MessagesService')
|
||||
|
||||
export interface ValidationResult {
|
||||
@ -46,7 +45,6 @@ export class MessagesService {
|
||||
return getSdkClient(provider)
|
||||
}
|
||||
|
||||
|
||||
async processMessage(request: MessageCreateParams, provider: Provider): Promise<Message> {
|
||||
logger.info('Processing Anthropic message request:', {
|
||||
model: request.model,
|
||||
|
||||
@ -20,4 +20,3 @@ export class AgentModelValidationError extends Error {
|
||||
this.detail = detail
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1,5 +1,10 @@
|
||||
import { loggerService } from '@logger'
|
||||
import type {AgentSessionMessageEntity, CreateSessionMessageRequest, GetAgentSessionResponse, ListOptions} from '@types'
|
||||
import type {
|
||||
AgentSessionMessageEntity,
|
||||
CreateSessionMessageRequest,
|
||||
GetAgentSessionResponse,
|
||||
ListOptions
|
||||
} from '@types'
|
||||
import { TextStreamPart } from 'ai'
|
||||
import { desc, eq } from 'drizzle-orm'
|
||||
|
||||
|
||||
@ -119,7 +119,7 @@ function handleUserOrAssistantMessage(message: Extract<SDKMessage, { type: 'assi
|
||||
toolCallId: block.tool_use_id,
|
||||
toolName: '',
|
||||
input: '',
|
||||
output: block.content,
|
||||
output: block.content
|
||||
})
|
||||
break
|
||||
default:
|
||||
@ -244,7 +244,8 @@ function handleStreamEvent(message: Extract<SDKMessage, { type: 'stream_event' }
|
||||
}
|
||||
break
|
||||
|
||||
case 'content_block_stop': {
|
||||
case 'content_block_stop':
|
||||
{
|
||||
const contentBlock = contentBlockState.get(blockKey)
|
||||
if (contentBlock?.type === 'text') {
|
||||
chunks.push({
|
||||
@ -304,9 +305,7 @@ function handleResultMessage(message: Extract<SDKMessage, { type: 'result' }>):
|
||||
usage = {
|
||||
inputTokens: message.usage.input_tokens ?? 0,
|
||||
outputTokens: message.usage.output_tokens ?? 0,
|
||||
totalTokens:
|
||||
(message.usage.input_tokens ?? 0) +
|
||||
(message.usage.output_tokens ?? 0)
|
||||
totalTokens: (message.usage.input_tokens ?? 0) + (message.usage.output_tokens ?? 0)
|
||||
}
|
||||
}
|
||||
if (message.subtype === 'success') {
|
||||
|
||||
@ -54,11 +54,21 @@ import {
|
||||
ThinkingStartChunk
|
||||
} from '@renderer/types/chunk'
|
||||
import { type Message } from '@renderer/types/newMessage'
|
||||
import {AnthropicSdkMessageParam, AnthropicSdkParams, AnthropicSdkRawChunk, AnthropicSdkRawOutput} from '@renderer/types/sdk'
|
||||
import {
|
||||
AnthropicSdkMessageParam,
|
||||
AnthropicSdkParams,
|
||||
AnthropicSdkRawChunk,
|
||||
AnthropicSdkRawOutput
|
||||
} from '@renderer/types/sdk'
|
||||
import { addImageFileToContents } from '@renderer/utils/formats'
|
||||
import {anthropicToolUseToMcpTool, isSupportedToolUse, mcpToolCallResponseToAnthropicMessage, mcpToolsToAnthropicTools} from '@renderer/utils/mcp-tools'
|
||||
import {
|
||||
anthropicToolUseToMcpTool,
|
||||
isSupportedToolUse,
|
||||
mcpToolCallResponseToAnthropicMessage,
|
||||
mcpToolsToAnthropicTools
|
||||
} from '@renderer/utils/mcp-tools'
|
||||
import { findFileBlocks, findImageBlocks } from '@renderer/utils/messageUtils/find'
|
||||
import {buildClaudeCodeSystemMessage, getSdkClient} from "@shared/anthropic";
|
||||
import { buildClaudeCodeSystemMessage, getSdkClient } from '@shared/anthropic'
|
||||
import { t } from 'i18next'
|
||||
|
||||
import { GenericChunk } from '../../middleware/schemas'
|
||||
@ -105,7 +115,7 @@ export class AnthropicAPIClient extends BaseApiClient<
|
||||
if (payload.stream) {
|
||||
return sdk.messages.stream(payload, options)
|
||||
}
|
||||
return sdk.messages.create(payload, options);
|
||||
return sdk.messages.create(payload, options)
|
||||
}
|
||||
|
||||
// @ts-ignore sdk未提供
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import type {ProviderMetadata} from "ai";
|
||||
import type { ProviderMetadata } from 'ai'
|
||||
import type { CompletionUsage } from 'openai/resources'
|
||||
|
||||
import type {
|
||||
|
||||
Loading…
Reference in New Issue
Block a user