mirror of
https://github.com/CherryHQ/cherry-studio.git
synced 2025-12-23 18:10:26 +08:00
feat: add huggingface provider (#10966)
* Refactor code structure for improved readability and maintainability * fix(i18n): Auto update translations for PR #10966 * fix: add empty array for huggingface models in SYSTEM_MODELS * feat: integrate HuggingFace provider and enhance reasoning options * fix: remove debug console logs from provider options functions --------- Co-authored-by: GitHub Action <action@github.com>
This commit is contained in:
parent
44e01e5ad4
commit
82132d479a
131
.yarn/patches/@ai-sdk-huggingface-npm-0.0.4-8080836bc1.patch
vendored
Normal file
131
.yarn/patches/@ai-sdk-huggingface-npm-0.0.4-8080836bc1.patch
vendored
Normal file
@ -0,0 +1,131 @@
|
|||||||
|
diff --git a/dist/index.mjs b/dist/index.mjs
|
||||||
|
index b3f018730a93639aad7c203f15fb1aeb766c73f4..ade2a43d66e9184799d072153df61ef7be4ea110 100644
|
||||||
|
--- a/dist/index.mjs
|
||||||
|
+++ b/dist/index.mjs
|
||||||
|
@@ -296,7 +296,14 @@ var HuggingFaceResponsesLanguageModel = class {
|
||||||
|
metadata: huggingfaceOptions == null ? void 0 : huggingfaceOptions.metadata,
|
||||||
|
instructions: huggingfaceOptions == null ? void 0 : huggingfaceOptions.instructions,
|
||||||
|
...preparedTools && { tools: preparedTools },
|
||||||
|
- ...preparedToolChoice && { tool_choice: preparedToolChoice }
|
||||||
|
+ ...preparedToolChoice && { tool_choice: preparedToolChoice },
|
||||||
|
+ ...(huggingfaceOptions?.reasoningEffort != null && {
|
||||||
|
+ reasoning: {
|
||||||
|
+ ...(huggingfaceOptions?.reasoningEffort != null && {
|
||||||
|
+ effort: huggingfaceOptions.reasoningEffort,
|
||||||
|
+ }),
|
||||||
|
+ },
|
||||||
|
+ }),
|
||||||
|
};
|
||||||
|
return { args: baseArgs, warnings };
|
||||||
|
}
|
||||||
|
@@ -365,6 +372,20 @@ var HuggingFaceResponsesLanguageModel = class {
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
+ case 'reasoning': {
|
||||||
|
+ for (const contentPart of part.content) {
|
||||||
|
+ content.push({
|
||||||
|
+ type: 'reasoning',
|
||||||
|
+ text: contentPart.text,
|
||||||
|
+ providerMetadata: {
|
||||||
|
+ huggingface: {
|
||||||
|
+ itemId: part.id,
|
||||||
|
+ },
|
||||||
|
+ },
|
||||||
|
+ });
|
||||||
|
+ }
|
||||||
|
+ break;
|
||||||
|
+ }
|
||||||
|
case "mcp_call": {
|
||||||
|
content.push({
|
||||||
|
type: "tool-call",
|
||||||
|
@@ -519,6 +540,11 @@ var HuggingFaceResponsesLanguageModel = class {
|
||||||
|
id: value.item.call_id,
|
||||||
|
toolName: value.item.name
|
||||||
|
});
|
||||||
|
+ } else if (value.item.type === 'reasoning') {
|
||||||
|
+ controller.enqueue({
|
||||||
|
+ type: 'reasoning-start',
|
||||||
|
+ id: value.item.id,
|
||||||
|
+ });
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
@@ -570,6 +596,22 @@ var HuggingFaceResponsesLanguageModel = class {
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
+ if (isReasoningDeltaChunk(value)) {
|
||||||
|
+ controller.enqueue({
|
||||||
|
+ type: 'reasoning-delta',
|
||||||
|
+ id: value.item_id,
|
||||||
|
+ delta: value.delta,
|
||||||
|
+ });
|
||||||
|
+ return;
|
||||||
|
+ }
|
||||||
|
+
|
||||||
|
+ if (isReasoningEndChunk(value)) {
|
||||||
|
+ controller.enqueue({
|
||||||
|
+ type: 'reasoning-end',
|
||||||
|
+ id: value.item_id,
|
||||||
|
+ });
|
||||||
|
+ return;
|
||||||
|
+ }
|
||||||
|
},
|
||||||
|
flush(controller) {
|
||||||
|
controller.enqueue({
|
||||||
|
@@ -593,7 +635,8 @@ var HuggingFaceResponsesLanguageModel = class {
|
||||||
|
var huggingfaceResponsesProviderOptionsSchema = z2.object({
|
||||||
|
metadata: z2.record(z2.string(), z2.string()).optional(),
|
||||||
|
instructions: z2.string().optional(),
|
||||||
|
- strictJsonSchema: z2.boolean().optional()
|
||||||
|
+ strictJsonSchema: z2.boolean().optional(),
|
||||||
|
+ reasoningEffort: z2.string().optional(),
|
||||||
|
});
|
||||||
|
var huggingfaceResponsesResponseSchema = z2.object({
|
||||||
|
id: z2.string(),
|
||||||
|
@@ -727,12 +770,31 @@ var responseCreatedChunkSchema = z2.object({
|
||||||
|
model: z2.string()
|
||||||
|
})
|
||||||
|
});
|
||||||
|
+var reasoningTextDeltaChunkSchema = z2.object({
|
||||||
|
+ type: z2.literal('response.reasoning_text.delta'),
|
||||||
|
+ item_id: z2.string(),
|
||||||
|
+ output_index: z2.number(),
|
||||||
|
+ content_index: z2.number(),
|
||||||
|
+ delta: z2.string(),
|
||||||
|
+ sequence_number: z2.number(),
|
||||||
|
+});
|
||||||
|
+
|
||||||
|
+var reasoningTextEndChunkSchema = z2.object({
|
||||||
|
+ type: z2.literal('response.reasoning_text.done'),
|
||||||
|
+ item_id: z2.string(),
|
||||||
|
+ output_index: z2.number(),
|
||||||
|
+ content_index: z2.number(),
|
||||||
|
+ text: z2.string(),
|
||||||
|
+ sequence_number: z2.number(),
|
||||||
|
+});
|
||||||
|
var huggingfaceResponsesChunkSchema = z2.union([
|
||||||
|
responseOutputItemAddedSchema,
|
||||||
|
responseOutputItemDoneSchema,
|
||||||
|
textDeltaChunkSchema,
|
||||||
|
responseCompletedChunkSchema,
|
||||||
|
responseCreatedChunkSchema,
|
||||||
|
+ reasoningTextDeltaChunkSchema,
|
||||||
|
+ reasoningTextEndChunkSchema,
|
||||||
|
z2.object({ type: z2.string() }).loose()
|
||||||
|
// fallback for unknown chunks
|
||||||
|
]);
|
||||||
|
@@ -751,6 +813,12 @@ function isResponseCompletedChunk(chunk) {
|
||||||
|
function isResponseCreatedChunk(chunk) {
|
||||||
|
return chunk.type === "response.created";
|
||||||
|
}
|
||||||
|
+function isReasoningDeltaChunk(chunk) {
|
||||||
|
+ return chunk.type === 'response.reasoning_text.delta';
|
||||||
|
+}
|
||||||
|
+function isReasoningEndChunk(chunk) {
|
||||||
|
+ return chunk.type === 'response.reasoning_text.done';
|
||||||
|
+}
|
||||||
|
|
||||||
|
// src/huggingface-provider.ts
|
||||||
|
function createHuggingFace(options = {}) {
|
||||||
@ -103,6 +103,7 @@
|
|||||||
"@agentic/tavily": "^7.3.3",
|
"@agentic/tavily": "^7.3.3",
|
||||||
"@ai-sdk/amazon-bedrock": "^3.0.35",
|
"@ai-sdk/amazon-bedrock": "^3.0.35",
|
||||||
"@ai-sdk/google-vertex": "^3.0.40",
|
"@ai-sdk/google-vertex": "^3.0.40",
|
||||||
|
"@ai-sdk/huggingface": "patch:@ai-sdk/huggingface@npm%3A0.0.4#~/.yarn/patches/@ai-sdk-huggingface-npm-0.0.4-8080836bc1.patch",
|
||||||
"@ai-sdk/mistral": "^2.0.19",
|
"@ai-sdk/mistral": "^2.0.19",
|
||||||
"@ai-sdk/perplexity": "^2.0.13",
|
"@ai-sdk/perplexity": "^2.0.13",
|
||||||
"@ant-design/v5-patch-for-react-19": "^1.0.3",
|
"@ant-design/v5-patch-for-react-19": "^1.0.3",
|
||||||
|
|||||||
@ -7,6 +7,7 @@ import { createAzure } from '@ai-sdk/azure'
|
|||||||
import { type AzureOpenAIProviderSettings } from '@ai-sdk/azure'
|
import { type AzureOpenAIProviderSettings } from '@ai-sdk/azure'
|
||||||
import { createDeepSeek } from '@ai-sdk/deepseek'
|
import { createDeepSeek } from '@ai-sdk/deepseek'
|
||||||
import { createGoogleGenerativeAI } from '@ai-sdk/google'
|
import { createGoogleGenerativeAI } from '@ai-sdk/google'
|
||||||
|
import { createHuggingFace } from '@ai-sdk/huggingface'
|
||||||
import { createOpenAI, type OpenAIProviderSettings } from '@ai-sdk/openai'
|
import { createOpenAI, type OpenAIProviderSettings } from '@ai-sdk/openai'
|
||||||
import { createOpenAICompatible } from '@ai-sdk/openai-compatible'
|
import { createOpenAICompatible } from '@ai-sdk/openai-compatible'
|
||||||
import { LanguageModelV2 } from '@ai-sdk/provider'
|
import { LanguageModelV2 } from '@ai-sdk/provider'
|
||||||
@ -28,7 +29,8 @@ export const baseProviderIds = [
|
|||||||
'azure',
|
'azure',
|
||||||
'azure-responses',
|
'azure-responses',
|
||||||
'deepseek',
|
'deepseek',
|
||||||
'openrouter'
|
'openrouter',
|
||||||
|
'huggingface'
|
||||||
] as const
|
] as const
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -132,6 +134,12 @@ export const baseProviders = [
|
|||||||
name: 'OpenRouter',
|
name: 'OpenRouter',
|
||||||
creator: createOpenRouter,
|
creator: createOpenRouter,
|
||||||
supportsImageGeneration: true
|
supportsImageGeneration: true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'huggingface',
|
||||||
|
name: 'HuggingFace',
|
||||||
|
creator: createHuggingFace,
|
||||||
|
supportsImageGeneration: true
|
||||||
}
|
}
|
||||||
] as const satisfies BaseProvider[]
|
] as const satisfies BaseProvider[]
|
||||||
|
|
||||||
|
|||||||
@ -49,7 +49,7 @@ class AdapterTracer {
|
|||||||
this.cachedParentContext = undefined
|
this.cachedParentContext = undefined
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info('AdapterTracer created with parent context info', {
|
logger.debug('AdapterTracer created with parent context info', {
|
||||||
topicId,
|
topicId,
|
||||||
modelName,
|
modelName,
|
||||||
parentTraceId: this.parentSpanContext?.traceId,
|
parentTraceId: this.parentSpanContext?.traceId,
|
||||||
@ -62,7 +62,7 @@ class AdapterTracer {
|
|||||||
startActiveSpan<F extends (span: Span) => any>(name: string, options: any, fn: F): ReturnType<F>
|
startActiveSpan<F extends (span: Span) => any>(name: string, options: any, fn: F): ReturnType<F>
|
||||||
startActiveSpan<F extends (span: Span) => any>(name: string, options: any, context: any, fn: F): ReturnType<F>
|
startActiveSpan<F extends (span: Span) => any>(name: string, options: any, context: any, fn: F): ReturnType<F>
|
||||||
startActiveSpan<F extends (span: Span) => any>(name: string, arg2?: any, arg3?: any, arg4?: any): ReturnType<F> {
|
startActiveSpan<F extends (span: Span) => any>(name: string, arg2?: any, arg3?: any, arg4?: any): ReturnType<F> {
|
||||||
logger.info('AdapterTracer.startActiveSpan called', {
|
logger.debug('AdapterTracer.startActiveSpan called', {
|
||||||
spanName: name,
|
spanName: name,
|
||||||
topicId: this.topicId,
|
topicId: this.topicId,
|
||||||
modelName: this.modelName,
|
modelName: this.modelName,
|
||||||
@ -88,7 +88,7 @@ class AdapterTracer {
|
|||||||
// 包装span的end方法
|
// 包装span的end方法
|
||||||
const originalEnd = span.end.bind(span)
|
const originalEnd = span.end.bind(span)
|
||||||
span.end = (endTime?: any) => {
|
span.end = (endTime?: any) => {
|
||||||
logger.info('AI SDK span.end() called in startActiveSpan - about to convert span', {
|
logger.debug('AI SDK span.end() called in startActiveSpan - about to convert span', {
|
||||||
spanName: name,
|
spanName: name,
|
||||||
spanId: span.spanContext().spanId,
|
spanId: span.spanContext().spanId,
|
||||||
traceId: span.spanContext().traceId,
|
traceId: span.spanContext().traceId,
|
||||||
@ -101,14 +101,14 @@ class AdapterTracer {
|
|||||||
|
|
||||||
// 转换并保存 span 数据
|
// 转换并保存 span 数据
|
||||||
try {
|
try {
|
||||||
logger.info('Converting AI SDK span to SpanEntity (from startActiveSpan)', {
|
logger.debug('Converting AI SDK span to SpanEntity (from startActiveSpan)', {
|
||||||
spanName: name,
|
spanName: name,
|
||||||
spanId: span.spanContext().spanId,
|
spanId: span.spanContext().spanId,
|
||||||
traceId: span.spanContext().traceId,
|
traceId: span.spanContext().traceId,
|
||||||
topicId: this.topicId,
|
topicId: this.topicId,
|
||||||
modelName: this.modelName
|
modelName: this.modelName
|
||||||
})
|
})
|
||||||
logger.info('span', span)
|
logger.silly('span', span)
|
||||||
const spanEntity = AiSdkSpanAdapter.convertToSpanEntity({
|
const spanEntity = AiSdkSpanAdapter.convertToSpanEntity({
|
||||||
span,
|
span,
|
||||||
topicId: this.topicId,
|
topicId: this.topicId,
|
||||||
@ -118,7 +118,7 @@ class AdapterTracer {
|
|||||||
// 保存转换后的数据
|
// 保存转换后的数据
|
||||||
window.api.trace.saveEntity(spanEntity)
|
window.api.trace.saveEntity(spanEntity)
|
||||||
|
|
||||||
logger.info('AI SDK span converted and saved successfully (from startActiveSpan)', {
|
logger.debug('AI SDK span converted and saved successfully (from startActiveSpan)', {
|
||||||
spanName: name,
|
spanName: name,
|
||||||
spanId: span.spanContext().spanId,
|
spanId: span.spanContext().spanId,
|
||||||
traceId: span.spanContext().traceId,
|
traceId: span.spanContext().traceId,
|
||||||
@ -151,7 +151,7 @@ class AdapterTracer {
|
|||||||
if (this.parentSpanContext) {
|
if (this.parentSpanContext) {
|
||||||
try {
|
try {
|
||||||
const ctx = trace.setSpanContext(otelContext.active(), this.parentSpanContext)
|
const ctx = trace.setSpanContext(otelContext.active(), this.parentSpanContext)
|
||||||
logger.info('Created active context with parent SpanContext for startActiveSpan', {
|
logger.debug('Created active context with parent SpanContext for startActiveSpan', {
|
||||||
spanName: name,
|
spanName: name,
|
||||||
parentTraceId: this.parentSpanContext.traceId,
|
parentTraceId: this.parentSpanContext.traceId,
|
||||||
parentSpanId: this.parentSpanContext.spanId,
|
parentSpanId: this.parentSpanContext.spanId,
|
||||||
@ -218,7 +218,7 @@ export function createTelemetryPlugin(config: TelemetryPluginConfig) {
|
|||||||
if (effectiveTopicId) {
|
if (effectiveTopicId) {
|
||||||
try {
|
try {
|
||||||
// 从 SpanManagerService 获取当前的 span
|
// 从 SpanManagerService 获取当前的 span
|
||||||
logger.info('Attempting to find parent span', {
|
logger.debug('Attempting to find parent span', {
|
||||||
topicId: effectiveTopicId,
|
topicId: effectiveTopicId,
|
||||||
requestId: context.requestId,
|
requestId: context.requestId,
|
||||||
modelName: modelName,
|
modelName: modelName,
|
||||||
@ -230,7 +230,7 @@ export function createTelemetryPlugin(config: TelemetryPluginConfig) {
|
|||||||
if (parentSpan) {
|
if (parentSpan) {
|
||||||
// 直接使用父 span 的 SpanContext,避免手动拼装字段遗漏
|
// 直接使用父 span 的 SpanContext,避免手动拼装字段遗漏
|
||||||
parentSpanContext = parentSpan.spanContext()
|
parentSpanContext = parentSpan.spanContext()
|
||||||
logger.info('Found active parent span for AI SDK', {
|
logger.debug('Found active parent span for AI SDK', {
|
||||||
parentSpanId: parentSpanContext.spanId,
|
parentSpanId: parentSpanContext.spanId,
|
||||||
parentTraceId: parentSpanContext.traceId,
|
parentTraceId: parentSpanContext.traceId,
|
||||||
topicId: effectiveTopicId,
|
topicId: effectiveTopicId,
|
||||||
@ -302,7 +302,7 @@ export function createTelemetryPlugin(config: TelemetryPluginConfig) {
|
|||||||
logger.debug('Updated active context with parent span')
|
logger.debug('Updated active context with parent span')
|
||||||
})
|
})
|
||||||
|
|
||||||
logger.info('Set parent context for AI SDK spans', {
|
logger.debug('Set parent context for AI SDK spans', {
|
||||||
parentSpanId: parentSpanContext?.spanId,
|
parentSpanId: parentSpanContext?.spanId,
|
||||||
parentTraceId: parentSpanContext?.traceId,
|
parentTraceId: parentSpanContext?.traceId,
|
||||||
hasActiveContext: !!activeContext,
|
hasActiveContext: !!activeContext,
|
||||||
@ -313,7 +313,7 @@ export function createTelemetryPlugin(config: TelemetryPluginConfig) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info('Injecting AI SDK telemetry config with adapter', {
|
logger.debug('Injecting AI SDK telemetry config with adapter', {
|
||||||
requestId: context.requestId,
|
requestId: context.requestId,
|
||||||
topicId: effectiveTopicId,
|
topicId: effectiveTopicId,
|
||||||
modelId: context.modelId,
|
modelId: context.modelId,
|
||||||
|
|||||||
@ -63,6 +63,14 @@ export const NEW_PROVIDER_CONFIGS: ProviderConfig[] = [
|
|||||||
creatorFunctionName: 'createMistral',
|
creatorFunctionName: 'createMistral',
|
||||||
supportsImageGeneration: false,
|
supportsImageGeneration: false,
|
||||||
aliases: ['mistral']
|
aliases: ['mistral']
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'huggingface',
|
||||||
|
name: 'HuggingFace',
|
||||||
|
import: () => import('@ai-sdk/huggingface'),
|
||||||
|
creatorFunctionName: 'createHuggingFace',
|
||||||
|
supportsImageGeneration: true,
|
||||||
|
aliases: ['hf', 'hugging-face']
|
||||||
}
|
}
|
||||||
] as const
|
] as const
|
||||||
|
|
||||||
|
|||||||
@ -90,7 +90,9 @@ export function buildProviderOptions(
|
|||||||
serviceTier: serviceTierSetting
|
serviceTier: serviceTierSetting
|
||||||
}
|
}
|
||||||
break
|
break
|
||||||
|
case 'huggingface':
|
||||||
|
providerSpecificOptions = buildOpenAIProviderOptions(assistant, model, capabilities)
|
||||||
|
break
|
||||||
case 'anthropic':
|
case 'anthropic':
|
||||||
providerSpecificOptions = buildAnthropicProviderOptions(assistant, model, capabilities)
|
providerSpecificOptions = buildAnthropicProviderOptions(assistant, model, capabilities)
|
||||||
break
|
break
|
||||||
|
|||||||
@ -10,6 +10,7 @@ import {
|
|||||||
isGrok4FastReasoningModel,
|
isGrok4FastReasoningModel,
|
||||||
isGrokReasoningModel,
|
isGrokReasoningModel,
|
||||||
isOpenAIDeepResearchModel,
|
isOpenAIDeepResearchModel,
|
||||||
|
isOpenAIModel,
|
||||||
isOpenAIReasoningModel,
|
isOpenAIReasoningModel,
|
||||||
isQwenAlwaysThinkModel,
|
isQwenAlwaysThinkModel,
|
||||||
isQwenReasoningModel,
|
isQwenReasoningModel,
|
||||||
@ -319,6 +320,20 @@ export function getOpenAIReasoningParams(assistant: Assistant, model: Model): Re
|
|||||||
if (!isReasoningModel(model)) {
|
if (!isReasoningModel(model)) {
|
||||||
return {}
|
return {}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let reasoningEffort = assistant?.settings?.reasoning_effort
|
||||||
|
|
||||||
|
if (!reasoningEffort) {
|
||||||
|
return {}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 非OpenAI模型,但是Provider类型是responses/azure openai的情况
|
||||||
|
if (!isOpenAIModel(model)) {
|
||||||
|
return {
|
||||||
|
reasoningEffort
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const openAI = getStoreSetting('openAI') as SettingsState['openAI']
|
const openAI = getStoreSetting('openAI') as SettingsState['openAI']
|
||||||
const summaryText = openAI?.summaryText || 'off'
|
const summaryText = openAI?.summaryText || 'off'
|
||||||
|
|
||||||
@ -330,16 +345,10 @@ export function getOpenAIReasoningParams(assistant: Assistant, model: Model): Re
|
|||||||
reasoningSummary = summaryText
|
reasoningSummary = summaryText
|
||||||
}
|
}
|
||||||
|
|
||||||
let reasoningEffort = assistant?.settings?.reasoning_effort
|
|
||||||
|
|
||||||
if (isOpenAIDeepResearchModel(model)) {
|
if (isOpenAIDeepResearchModel(model)) {
|
||||||
reasoningEffort = 'medium'
|
reasoningEffort = 'medium'
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!reasoningEffort) {
|
|
||||||
return {}
|
|
||||||
}
|
|
||||||
|
|
||||||
// OpenAI 推理参数
|
// OpenAI 推理参数
|
||||||
if (isSupportedReasoningEffortOpenAIModel(model)) {
|
if (isSupportedReasoningEffortOpenAIModel(model)) {
|
||||||
return {
|
return {
|
||||||
|
|||||||
BIN
src/renderer/src/assets/images/providers/huggingface.webp
Normal file
BIN
src/renderer/src/assets/images/providers/huggingface.webp
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 27 KiB |
@ -1837,5 +1837,6 @@ export const SYSTEM_MODELS: Record<SystemProviderId | 'defaultModel', Model[]> =
|
|||||||
provider: 'longcat',
|
provider: 'longcat',
|
||||||
group: 'LongCat'
|
group: 'LongCat'
|
||||||
}
|
}
|
||||||
]
|
],
|
||||||
|
huggingface: []
|
||||||
}
|
}
|
||||||
|
|||||||
@ -22,6 +22,7 @@ import GoogleProviderLogo from '@renderer/assets/images/providers/google.png'
|
|||||||
import GPUStackProviderLogo from '@renderer/assets/images/providers/gpustack.svg'
|
import GPUStackProviderLogo from '@renderer/assets/images/providers/gpustack.svg'
|
||||||
import GrokProviderLogo from '@renderer/assets/images/providers/grok.png'
|
import GrokProviderLogo from '@renderer/assets/images/providers/grok.png'
|
||||||
import GroqProviderLogo from '@renderer/assets/images/providers/groq.png'
|
import GroqProviderLogo from '@renderer/assets/images/providers/groq.png'
|
||||||
|
import HuggingfaceProviderLogo from '@renderer/assets/images/providers/huggingface.webp'
|
||||||
import HyperbolicProviderLogo from '@renderer/assets/images/providers/hyperbolic.png'
|
import HyperbolicProviderLogo from '@renderer/assets/images/providers/hyperbolic.png'
|
||||||
import InfiniProviderLogo from '@renderer/assets/images/providers/infini.png'
|
import InfiniProviderLogo from '@renderer/assets/images/providers/infini.png'
|
||||||
import IntelOvmsLogo from '@renderer/assets/images/providers/intel.png'
|
import IntelOvmsLogo from '@renderer/assets/images/providers/intel.png'
|
||||||
@ -653,6 +654,16 @@ export const SYSTEM_PROVIDERS_CONFIG: Record<SystemProviderId, SystemProvider> =
|
|||||||
models: SYSTEM_MODELS.longcat,
|
models: SYSTEM_MODELS.longcat,
|
||||||
isSystem: true,
|
isSystem: true,
|
||||||
enabled: false
|
enabled: false
|
||||||
|
},
|
||||||
|
huggingface: {
|
||||||
|
id: 'huggingface',
|
||||||
|
name: 'Hugging Face',
|
||||||
|
type: 'openai-response',
|
||||||
|
apiKey: '',
|
||||||
|
apiHost: 'https://router.huggingface.co/v1/',
|
||||||
|
models: [],
|
||||||
|
isSystem: true,
|
||||||
|
enabled: false
|
||||||
}
|
}
|
||||||
} as const
|
} as const
|
||||||
|
|
||||||
@ -717,7 +728,8 @@ export const PROVIDER_LOGO_MAP: AtLeast<SystemProviderId, string> = {
|
|||||||
'aws-bedrock': AwsProviderLogo,
|
'aws-bedrock': AwsProviderLogo,
|
||||||
poe: 'poe', // use svg icon component
|
poe: 'poe', // use svg icon component
|
||||||
aionly: AiOnlyProviderLogo,
|
aionly: AiOnlyProviderLogo,
|
||||||
longcat: LongCatProviderLogo
|
longcat: LongCatProviderLogo,
|
||||||
|
huggingface: HuggingfaceProviderLogo
|
||||||
} as const
|
} as const
|
||||||
|
|
||||||
export function getProviderLogo(providerId: string) {
|
export function getProviderLogo(providerId: string) {
|
||||||
@ -1344,6 +1356,17 @@ export const PROVIDER_URLS: Record<SystemProviderId, ProviderUrls> = {
|
|||||||
docs: 'https://longcat.chat/platform/docs/zh/',
|
docs: 'https://longcat.chat/platform/docs/zh/',
|
||||||
models: 'https://longcat.chat/platform/docs/zh/APIDocs.html'
|
models: 'https://longcat.chat/platform/docs/zh/APIDocs.html'
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
huggingface: {
|
||||||
|
api: {
|
||||||
|
url: 'https://router.huggingface.co/v1/'
|
||||||
|
},
|
||||||
|
websites: {
|
||||||
|
official: 'https://huggingface.co/',
|
||||||
|
apiKey: 'https://huggingface.co/settings/tokens',
|
||||||
|
docs: 'https://huggingface.co/docs',
|
||||||
|
models: 'https://huggingface.co/models'
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -88,7 +88,9 @@ const providerKeyMap = {
|
|||||||
zhinao: 'provider.zhinao',
|
zhinao: 'provider.zhinao',
|
||||||
zhipu: 'provider.zhipu',
|
zhipu: 'provider.zhipu',
|
||||||
poe: 'provider.poe',
|
poe: 'provider.poe',
|
||||||
aionly: 'provider.aionly'
|
aionly: 'provider.aionly',
|
||||||
|
longcat: 'provider.longcat',
|
||||||
|
huggingface: 'provider.huggingface'
|
||||||
} as const
|
} as const
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@ -2345,12 +2345,14 @@
|
|||||||
"gpustack": "GPUStack",
|
"gpustack": "GPUStack",
|
||||||
"grok": "Grok",
|
"grok": "Grok",
|
||||||
"groq": "Groq",
|
"groq": "Groq",
|
||||||
|
"huggingface": "Hugging Face",
|
||||||
"hunyuan": "Tencent Hunyuan",
|
"hunyuan": "Tencent Hunyuan",
|
||||||
"hyperbolic": "Hyperbolic",
|
"hyperbolic": "Hyperbolic",
|
||||||
"infini": "Infini",
|
"infini": "Infini",
|
||||||
"jina": "Jina",
|
"jina": "Jina",
|
||||||
"lanyun": "LANYUN",
|
"lanyun": "LANYUN",
|
||||||
"lmstudio": "LM Studio",
|
"lmstudio": "LM Studio",
|
||||||
|
"longcat": "LongCat AI",
|
||||||
"minimax": "MiniMax",
|
"minimax": "MiniMax",
|
||||||
"mistral": "Mistral",
|
"mistral": "Mistral",
|
||||||
"modelscope": "ModelScope",
|
"modelscope": "ModelScope",
|
||||||
|
|||||||
@ -2345,12 +2345,14 @@
|
|||||||
"gpustack": "GPUStack",
|
"gpustack": "GPUStack",
|
||||||
"grok": "Grok",
|
"grok": "Grok",
|
||||||
"groq": "Groq",
|
"groq": "Groq",
|
||||||
|
"huggingface": "Hugging Face",
|
||||||
"hunyuan": "腾讯混元",
|
"hunyuan": "腾讯混元",
|
||||||
"hyperbolic": "Hyperbolic",
|
"hyperbolic": "Hyperbolic",
|
||||||
"infini": "无问芯穹",
|
"infini": "无问芯穹",
|
||||||
"jina": "Jina",
|
"jina": "Jina",
|
||||||
"lanyun": "蓝耘科技",
|
"lanyun": "蓝耘科技",
|
||||||
"lmstudio": "LM Studio",
|
"lmstudio": "LM Studio",
|
||||||
|
"longcat": "龙猫",
|
||||||
"minimax": "MiniMax",
|
"minimax": "MiniMax",
|
||||||
"mistral": "Mistral",
|
"mistral": "Mistral",
|
||||||
"modelscope": "ModelScope 魔搭",
|
"modelscope": "ModelScope 魔搭",
|
||||||
|
|||||||
@ -2345,12 +2345,14 @@
|
|||||||
"gpustack": "GPUStack",
|
"gpustack": "GPUStack",
|
||||||
"grok": "Grok",
|
"grok": "Grok",
|
||||||
"groq": "Groq",
|
"groq": "Groq",
|
||||||
|
"huggingface": "Hugging Face",
|
||||||
"hunyuan": "騰訊混元",
|
"hunyuan": "騰訊混元",
|
||||||
"hyperbolic": "Hyperbolic",
|
"hyperbolic": "Hyperbolic",
|
||||||
"infini": "無問芯穹",
|
"infini": "無問芯穹",
|
||||||
"jina": "Jina",
|
"jina": "Jina",
|
||||||
"lanyun": "藍耘",
|
"lanyun": "藍耘",
|
||||||
"lmstudio": "LM Studio",
|
"lmstudio": "LM Studio",
|
||||||
|
"longcat": "龍貓",
|
||||||
"minimax": "MiniMax",
|
"minimax": "MiniMax",
|
||||||
"mistral": "Mistral",
|
"mistral": "Mistral",
|
||||||
"modelscope": "ModelScope 魔搭",
|
"modelscope": "ModelScope 魔搭",
|
||||||
|
|||||||
@ -2345,12 +2345,14 @@
|
|||||||
"gpustack": "GPUStack",
|
"gpustack": "GPUStack",
|
||||||
"grok": "Grok",
|
"grok": "Grok",
|
||||||
"groq": "Groq",
|
"groq": "Groq",
|
||||||
|
"huggingface": "Hugging Face",
|
||||||
"hunyuan": "Tencent Hunyuan",
|
"hunyuan": "Tencent Hunyuan",
|
||||||
"hyperbolic": "Hyperbolic",
|
"hyperbolic": "Hyperbolic",
|
||||||
"infini": "Infini-AI",
|
"infini": "Infini-AI",
|
||||||
"jina": "Jina",
|
"jina": "Jina",
|
||||||
"lanyun": "Lanyun Technologie",
|
"lanyun": "Lanyun Technologie",
|
||||||
"lmstudio": "LM Studio",
|
"lmstudio": "LM Studio",
|
||||||
|
"longcat": "Meißner Riesenhamster",
|
||||||
"minimax": "MiniMax",
|
"minimax": "MiniMax",
|
||||||
"mistral": "Mistral",
|
"mistral": "Mistral",
|
||||||
"modelscope": "ModelScope",
|
"modelscope": "ModelScope",
|
||||||
|
|||||||
@ -2345,12 +2345,14 @@
|
|||||||
"gpustack": "GPUStack",
|
"gpustack": "GPUStack",
|
||||||
"grok": "Grok",
|
"grok": "Grok",
|
||||||
"groq": "Groq",
|
"groq": "Groq",
|
||||||
|
"huggingface": "Hugging Face",
|
||||||
"hunyuan": "Tencent Hunyuan",
|
"hunyuan": "Tencent Hunyuan",
|
||||||
"hyperbolic": "Υπερβολικός",
|
"hyperbolic": "Υπερβολικός",
|
||||||
"infini": "Χωρίς Ερώτημα Xin Qiong",
|
"infini": "Χωρίς Ερώτημα Xin Qiong",
|
||||||
"jina": "Jina",
|
"jina": "Jina",
|
||||||
"lanyun": "Λανιούν Τεχνολογία",
|
"lanyun": "Λανιούν Τεχνολογία",
|
||||||
"lmstudio": "LM Studio",
|
"lmstudio": "LM Studio",
|
||||||
|
"longcat": "Τσίρο",
|
||||||
"minimax": "MiniMax",
|
"minimax": "MiniMax",
|
||||||
"mistral": "Mistral",
|
"mistral": "Mistral",
|
||||||
"modelscope": "ModelScope Magpie",
|
"modelscope": "ModelScope Magpie",
|
||||||
|
|||||||
@ -2345,12 +2345,14 @@
|
|||||||
"gpustack": "GPUStack",
|
"gpustack": "GPUStack",
|
||||||
"grok": "Grok",
|
"grok": "Grok",
|
||||||
"groq": "Groq",
|
"groq": "Groq",
|
||||||
|
"huggingface": "Hugging Face",
|
||||||
"hunyuan": "Tencent Hùnyuán",
|
"hunyuan": "Tencent Hùnyuán",
|
||||||
"hyperbolic": "Hiperbólico",
|
"hyperbolic": "Hiperbólico",
|
||||||
"infini": "Infini",
|
"infini": "Infini",
|
||||||
"jina": "Jina",
|
"jina": "Jina",
|
||||||
"lanyun": "Tecnología Lanyun",
|
"lanyun": "Tecnología Lanyun",
|
||||||
"lmstudio": "Estudio LM",
|
"lmstudio": "Estudio LM",
|
||||||
|
"longcat": "Totoro",
|
||||||
"minimax": "Minimax",
|
"minimax": "Minimax",
|
||||||
"mistral": "Mistral",
|
"mistral": "Mistral",
|
||||||
"modelscope": "ModelScope Módulo",
|
"modelscope": "ModelScope Módulo",
|
||||||
|
|||||||
@ -2345,12 +2345,14 @@
|
|||||||
"gpustack": "GPUStack",
|
"gpustack": "GPUStack",
|
||||||
"grok": "Grok",
|
"grok": "Grok",
|
||||||
"groq": "Groq",
|
"groq": "Groq",
|
||||||
|
"huggingface": "Hugging Face",
|
||||||
"hunyuan": "Tencent HunYuan",
|
"hunyuan": "Tencent HunYuan",
|
||||||
"hyperbolic": "Hyperbolique",
|
"hyperbolic": "Hyperbolique",
|
||||||
"infini": "Sans Frontières Céleste",
|
"infini": "Sans Frontières Céleste",
|
||||||
"jina": "Jina",
|
"jina": "Jina",
|
||||||
"lanyun": "Technologie Lan Yun",
|
"lanyun": "Technologie Lan Yun",
|
||||||
"lmstudio": "Studio LM",
|
"lmstudio": "Studio LM",
|
||||||
|
"longcat": "Mon voisin Totoro",
|
||||||
"minimax": "MiniMax",
|
"minimax": "MiniMax",
|
||||||
"mistral": "Mistral",
|
"mistral": "Mistral",
|
||||||
"modelscope": "ModelScope MoDa",
|
"modelscope": "ModelScope MoDa",
|
||||||
|
|||||||
@ -2345,12 +2345,14 @@
|
|||||||
"gpustack": "GPUStack",
|
"gpustack": "GPUStack",
|
||||||
"grok": "Grok",
|
"grok": "Grok",
|
||||||
"groq": "Groq",
|
"groq": "Groq",
|
||||||
|
"huggingface": "ハギングフェイス",
|
||||||
"hunyuan": "腾讯混元",
|
"hunyuan": "腾讯混元",
|
||||||
"hyperbolic": "Hyperbolic",
|
"hyperbolic": "Hyperbolic",
|
||||||
"infini": "Infini",
|
"infini": "Infini",
|
||||||
"jina": "Jina",
|
"jina": "Jina",
|
||||||
"lanyun": "LANYUN",
|
"lanyun": "LANYUN",
|
||||||
"lmstudio": "LM Studio",
|
"lmstudio": "LM Studio",
|
||||||
|
"longcat": "トトロ",
|
||||||
"minimax": "MiniMax",
|
"minimax": "MiniMax",
|
||||||
"mistral": "Mistral",
|
"mistral": "Mistral",
|
||||||
"modelscope": "ModelScope",
|
"modelscope": "ModelScope",
|
||||||
|
|||||||
@ -2345,12 +2345,14 @@
|
|||||||
"gpustack": "GPUStack",
|
"gpustack": "GPUStack",
|
||||||
"grok": "Compreender",
|
"grok": "Compreender",
|
||||||
"groq": "Groq",
|
"groq": "Groq",
|
||||||
|
"huggingface": "Hugging Face",
|
||||||
"hunyuan": "Tencent Hún Yuán",
|
"hunyuan": "Tencent Hún Yuán",
|
||||||
"hyperbolic": "Hiperbólico",
|
"hyperbolic": "Hiperbólico",
|
||||||
"infini": "Infinito",
|
"infini": "Infinito",
|
||||||
"jina": "Jina",
|
"jina": "Jina",
|
||||||
"lanyun": "Lanyun Tecnologia",
|
"lanyun": "Lanyun Tecnologia",
|
||||||
"lmstudio": "Estúdio LM",
|
"lmstudio": "Estúdio LM",
|
||||||
|
"longcat": "Totoro",
|
||||||
"minimax": "Minimax",
|
"minimax": "Minimax",
|
||||||
"mistral": "Mistral",
|
"mistral": "Mistral",
|
||||||
"modelscope": "ModelScope MôDá",
|
"modelscope": "ModelScope MôDá",
|
||||||
|
|||||||
@ -2345,12 +2345,14 @@
|
|||||||
"gpustack": "GPUStack",
|
"gpustack": "GPUStack",
|
||||||
"grok": "Grok",
|
"grok": "Grok",
|
||||||
"groq": "Groq",
|
"groq": "Groq",
|
||||||
|
"huggingface": "Hugging Face",
|
||||||
"hunyuan": "Tencent Hunyuan",
|
"hunyuan": "Tencent Hunyuan",
|
||||||
"hyperbolic": "Hyperbolic",
|
"hyperbolic": "Hyperbolic",
|
||||||
"infini": "Infini",
|
"infini": "Infini",
|
||||||
"jina": "Jina",
|
"jina": "Jina",
|
||||||
"lanyun": "LANYUN",
|
"lanyun": "LANYUN",
|
||||||
"lmstudio": "LM Studio",
|
"lmstudio": "LM Studio",
|
||||||
|
"longcat": "Тоторо",
|
||||||
"minimax": "MiniMax",
|
"minimax": "MiniMax",
|
||||||
"mistral": "Mistral",
|
"mistral": "Mistral",
|
||||||
"modelscope": "ModelScope",
|
"modelscope": "ModelScope",
|
||||||
|
|||||||
@ -65,7 +65,7 @@ const persistedReducer = persistReducer(
|
|||||||
{
|
{
|
||||||
key: 'cherry-studio',
|
key: 'cherry-studio',
|
||||||
storage,
|
storage,
|
||||||
version: 166,
|
version: 167,
|
||||||
blacklist: ['runtime', 'messages', 'messageBlocks', 'tabs'],
|
blacklist: ['runtime', 'messages', 'messageBlocks', 'tabs'],
|
||||||
migrate
|
migrate
|
||||||
},
|
},
|
||||||
|
|||||||
@ -2720,6 +2720,15 @@ const migrateConfig = {
|
|||||||
logger.error('migrate 166 error', error as Error)
|
logger.error('migrate 166 error', error as Error)
|
||||||
return state
|
return state
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
'167': (state: RootState) => {
|
||||||
|
try {
|
||||||
|
addProvider(state, 'huggingface')
|
||||||
|
return state
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('migrate 167 error', error as Error)
|
||||||
|
return state
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -162,7 +162,8 @@ export const SystemProviderIds = {
|
|||||||
'aws-bedrock': 'aws-bedrock',
|
'aws-bedrock': 'aws-bedrock',
|
||||||
poe: 'poe',
|
poe: 'poe',
|
||||||
aionly: 'aionly',
|
aionly: 'aionly',
|
||||||
longcat: 'longcat'
|
longcat: 'longcat',
|
||||||
|
huggingface: 'huggingface'
|
||||||
} as const
|
} as const
|
||||||
|
|
||||||
export type SystemProviderId = keyof typeof SystemProviderIds
|
export type SystemProviderId = keyof typeof SystemProviderIds
|
||||||
|
|||||||
27
yarn.lock
27
yarn.lock
@ -180,6 +180,32 @@ __metadata:
|
|||||||
languageName: node
|
languageName: node
|
||||||
linkType: hard
|
linkType: hard
|
||||||
|
|
||||||
|
"@ai-sdk/huggingface@npm:0.0.4":
|
||||||
|
version: 0.0.4
|
||||||
|
resolution: "@ai-sdk/huggingface@npm:0.0.4"
|
||||||
|
dependencies:
|
||||||
|
"@ai-sdk/openai-compatible": "npm:1.0.22"
|
||||||
|
"@ai-sdk/provider": "npm:2.0.0"
|
||||||
|
"@ai-sdk/provider-utils": "npm:3.0.12"
|
||||||
|
peerDependencies:
|
||||||
|
zod: ^3.25.76 || ^4
|
||||||
|
checksum: 10c0/756b8f820b89bf9550c9281dfe2a1a813477dec82be5557e236e8b5eaaf0204b65a65925ad486b7576c687f33c709f6d99fd4fc87a46b1add210435b08834986
|
||||||
|
languageName: node
|
||||||
|
linkType: hard
|
||||||
|
|
||||||
|
"@ai-sdk/huggingface@patch:@ai-sdk/huggingface@npm%3A0.0.4#~/.yarn/patches/@ai-sdk-huggingface-npm-0.0.4-8080836bc1.patch":
|
||||||
|
version: 0.0.4
|
||||||
|
resolution: "@ai-sdk/huggingface@patch:@ai-sdk/huggingface@npm%3A0.0.4#~/.yarn/patches/@ai-sdk-huggingface-npm-0.0.4-8080836bc1.patch::version=0.0.4&hash=ceb48e"
|
||||||
|
dependencies:
|
||||||
|
"@ai-sdk/openai-compatible": "npm:1.0.22"
|
||||||
|
"@ai-sdk/provider": "npm:2.0.0"
|
||||||
|
"@ai-sdk/provider-utils": "npm:3.0.12"
|
||||||
|
peerDependencies:
|
||||||
|
zod: ^3.25.76 || ^4
|
||||||
|
checksum: 10c0/4726a10de7a6fd554b58d62f79cd6514c2cc5166052e035ba1517e224a310ddb355a5d2922ee8507fb8d928d6d5b2b102d3d221af5a44b181e436e6b64382087
|
||||||
|
languageName: node
|
||||||
|
linkType: hard
|
||||||
|
|
||||||
"@ai-sdk/mistral@npm:^2.0.19":
|
"@ai-sdk/mistral@npm:^2.0.19":
|
||||||
version: 2.0.19
|
version: 2.0.19
|
||||||
resolution: "@ai-sdk/mistral@npm:2.0.19"
|
resolution: "@ai-sdk/mistral@npm:2.0.19"
|
||||||
@ -13853,6 +13879,7 @@ __metadata:
|
|||||||
"@agentic/tavily": "npm:^7.3.3"
|
"@agentic/tavily": "npm:^7.3.3"
|
||||||
"@ai-sdk/amazon-bedrock": "npm:^3.0.35"
|
"@ai-sdk/amazon-bedrock": "npm:^3.0.35"
|
||||||
"@ai-sdk/google-vertex": "npm:^3.0.40"
|
"@ai-sdk/google-vertex": "npm:^3.0.40"
|
||||||
|
"@ai-sdk/huggingface": "patch:@ai-sdk/huggingface@npm%3A0.0.4#~/.yarn/patches/@ai-sdk-huggingface-npm-0.0.4-8080836bc1.patch"
|
||||||
"@ai-sdk/mistral": "npm:^2.0.19"
|
"@ai-sdk/mistral": "npm:^2.0.19"
|
||||||
"@ai-sdk/perplexity": "npm:^2.0.13"
|
"@ai-sdk/perplexity": "npm:^2.0.13"
|
||||||
"@ant-design/v5-patch-for-react-19": "npm:^1.0.3"
|
"@ant-design/v5-patch-for-react-19": "npm:^1.0.3"
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user