feat: change default provider

This commit is contained in:
kangfenmao 2024-09-03 20:03:09 +08:00
parent 350f13e97c
commit 5cb67e00a6
3 changed files with 62 additions and 62 deletions

View File

@ -5,6 +5,38 @@ const EMBEDDING_REGEX = /embedding/i
export const SYSTEM_MODELS: Record<string, Model[]> = { export const SYSTEM_MODELS: Record<string, Model[]> = {
ollama: [], ollama: [],
silicon: [
{
id: 'Qwen/Qwen2-7B-Instruct',
provider: 'silicon',
name: 'Qwen2-7B-Instruct',
group: 'Qwen2'
},
{
id: 'Qwen/Qwen2-72B-Instruct',
provider: 'silicon',
name: 'Qwen2-72B-Instruct',
group: 'Qwen2'
},
{
id: 'THUDM/glm-4-9b-chat',
provider: 'silicon',
name: 'GLM-4-9B-Chat',
group: 'GLM'
},
{
id: 'deepseek-ai/DeepSeek-V2-Chat',
provider: 'silicon',
name: 'DeepSeek-V2-Chat',
group: 'DeepSeek'
},
{
id: 'deepseek-ai/DeepSeek-Coder-V2-Instruct',
provider: 'silicon',
name: 'DeepSeek-Coder-V2-Instruct',
group: 'DeepSeek'
}
],
openai: [ openai: [
{ {
id: 'gpt-4o', id: 'gpt-4o',
@ -71,38 +103,6 @@ export const SYSTEM_MODELS: Record<string, Model[]> = {
group: 'Claude 3' group: 'Claude 3'
} }
], ],
silicon: [
{
id: 'Qwen/Qwen2-7B-Instruct',
provider: 'silicon',
name: 'Qwen2-7B-Instruct',
group: 'Qwen2'
},
{
id: 'Qwen/Qwen2-72B-Instruct',
provider: 'silicon',
name: 'Qwen2-72B-Instruct',
group: 'Qwen2'
},
{
id: 'THUDM/glm-4-9b-chat',
provider: 'silicon',
name: 'GLM-4-9B-Chat',
group: 'GLM'
},
{
id: 'deepseek-ai/DeepSeek-V2-Chat',
provider: 'silicon',
name: 'DeepSeek-V2-Chat',
group: 'DeepSeek'
},
{
id: 'deepseek-ai/DeepSeek-Coder-V2-Instruct',
provider: 'silicon',
name: 'DeepSeek-Coder-V2-Instruct',
group: 'DeepSeek'
}
],
deepseek: [ deepseek: [
{ {
id: 'deepseek-chat', id: 'deepseek-chat',

View File

@ -110,10 +110,10 @@ const MessageItem: FC<Props> = ({ message, index, showMenu, onDeleteMessage }) =
if (message.status === 'error') { if (message.status === 'error') {
return ( return (
<Alert <Alert
message={t('error.chat.response')} message={<div style={{ fontSize: 14 }}>{t('error.chat.response')}</div>}
description={<Markdown message={message} />} description={<Markdown message={message} />}
type="error" type="error"
style={{ marginBottom: 15 }} style={{ marginBottom: 15, padding: 10, fontSize: 12 }}
/> />
) )
} }

View File

@ -19,25 +19,25 @@ export interface LlmState {
} }
const initialState: LlmState = { const initialState: LlmState = {
defaultModel: SYSTEM_MODELS.openai[0], defaultModel: SYSTEM_MODELS.silicon[0],
topicNamingModel: SYSTEM_MODELS.openai[0], topicNamingModel: SYSTEM_MODELS.silicon[0],
translateModel: SYSTEM_MODELS.openai[0], translateModel: SYSTEM_MODELS.silicon[0],
providers: [ providers: [
{ {
id: 'openai', id: 'silicon',
name: 'OpenAI', name: 'Silicon',
apiKey: '', apiKey: '',
apiHost: 'https://api.openai.com', apiHost: 'https://api.siliconflow.cn',
models: SYSTEM_MODELS.openai, models: SYSTEM_MODELS.silicon,
isSystem: true, isSystem: true,
enabled: true enabled: true
}, },
{ {
id: 'gemini', id: 'ollama',
name: 'Gemini', name: 'Ollama',
apiKey: '', apiKey: '',
apiHost: 'https://generativelanguage.googleapis.com', apiHost: 'http://localhost:11434/v1/',
models: SYSTEM_MODELS.gemini, models: SYSTEM_MODELS.ollama,
isSystem: true, isSystem: true,
enabled: false enabled: false
}, },
@ -51,20 +51,20 @@ const initialState: LlmState = {
enabled: false enabled: false
}, },
{ {
id: 'ollama', id: 'openai',
name: 'Ollama', name: 'OpenAI',
apiKey: '', apiKey: '',
apiHost: 'http://localhost:11434/v1/', apiHost: 'https://api.openai.com',
models: SYSTEM_MODELS.ollama, models: SYSTEM_MODELS.openai,
isSystem: true, isSystem: true,
enabled: false enabled: false
}, },
{ {
id: 'silicon', id: 'gemini',
name: 'Silicon', name: 'Gemini',
apiKey: '', apiKey: '',
apiHost: 'https://api.siliconflow.cn', apiHost: 'https://generativelanguage.googleapis.com',
models: SYSTEM_MODELS.silicon, models: SYSTEM_MODELS.gemini,
isSystem: true, isSystem: true,
enabled: false enabled: false
}, },
@ -149,15 +149,6 @@ const initialState: LlmState = {
isSystem: true, isSystem: true,
enabled: false enabled: false
}, },
{
id: 'aihubmix',
name: 'AiHubMix',
apiKey: '',
apiHost: 'https://aihubmix.com',
models: SYSTEM_MODELS.aihubmix,
isSystem: true,
enabled: false
},
{ {
id: 'graphrag-kylin-mountain', id: 'graphrag-kylin-mountain',
name: 'GraphRAG', name: 'GraphRAG',
@ -184,6 +175,15 @@ const initialState: LlmState = {
models: SYSTEM_MODELS.groq, models: SYSTEM_MODELS.groq,
isSystem: true, isSystem: true,
enabled: false enabled: false
},
{
id: 'aihubmix',
name: 'AiHubMix',
apiKey: '',
apiHost: 'https://aihubmix.com',
models: SYSTEM_MODELS.aihubmix,
isSystem: true,
enabled: false
} }
], ],
settings: { settings: {