fix(aichat): adapt to 百炼

This commit is contained in:
源文雨 2025-09-10 10:32:30 +08:00
parent fb090839d6
commit 08e02ab730
4 changed files with 10 additions and 6 deletions

2
data

@ -1 +1 @@
Subproject commit 4f751a1cda692b5b8cb909ce1376a3a0ea0d57cc
Subproject commit 6bcac0faaba9dc8664a84ae4de46154cfd9e740c

2
go.mod
View File

@ -22,7 +22,7 @@ require (
github.com/disintegration/imaging v1.6.2
github.com/fumiama/ahsai v0.1.0
github.com/fumiama/cron v1.3.0
github.com/fumiama/deepinfra v0.0.0-20250812083039-f1b27f21d8c9
github.com/fumiama/deepinfra v0.0.0-20250910022828-8cde75e137f4
github.com/fumiama/go-base16384 v1.7.0
github.com/fumiama/go-registry v0.2.7
github.com/fumiama/gotracemoe v0.0.3

4
go.sum
View File

@ -59,8 +59,8 @@ github.com/fumiama/ahsai v0.1.0 h1:LXD61Kaj6kJHa3AEGsLIfKNzcgaVxg7JB72OR4yNNZ4=
github.com/fumiama/ahsai v0.1.0/go.mod h1:fFeNnqgo44i8FIaguK659aQryuZeFy+4klYLQu/rfdk=
github.com/fumiama/cron v1.3.0 h1:ZWlwuexF+HQHl3cYytEE5HNwD99q+3vNZF1GrEiXCFo=
github.com/fumiama/cron v1.3.0/go.mod h1:bz5Izvgi/xEUI8tlBN8BI2jr9Moo8N4or0KV8xXuPDY=
github.com/fumiama/deepinfra v0.0.0-20250812083039-f1b27f21d8c9 h1:X2h8RnCgC04LmwBoizYbFawXh/h6CouXmhYtaVuUn7k=
github.com/fumiama/deepinfra v0.0.0-20250812083039-f1b27f21d8c9/go.mod h1:wW05PQSn8mo1mZIoa6LBUE+3xIBjkoONvnfPTV5ZOhY=
github.com/fumiama/deepinfra v0.0.0-20250910022828-8cde75e137f4 h1:cV3HXXLNudIL9rIEYt1RCgl6H4703nE3+jL4pJNsRtc=
github.com/fumiama/deepinfra v0.0.0-20250910022828-8cde75e137f4/go.mod h1:wW05PQSn8mo1mZIoa6LBUE+3xIBjkoONvnfPTV5ZOhY=
github.com/fumiama/go-base16384 v1.7.0 h1:6fep7XPQWxRlh4Hu+KsdH+6+YdUp+w6CwRXtMWSsXCA=
github.com/fumiama/go-base16384 v1.7.0/go.mod h1:OEn+947GV5gsbTAnyuUW/SrfxJYUdYupSIQXOuGOcXM=
github.com/fumiama/go-registry v0.2.7 h1:tLEqgEpsiybQMqBv0dLHm5leia/z1DhajMupwnOHeNs=

View File

@ -126,7 +126,9 @@ func init() {
mod = model.NewOpenAI(
cfg.ModelName, cfg.Separator,
temperature, topp, maxn,
)
).SetExtra(&map[string]bool{
"enable_thinking": false,
})
case 1:
mod = model.NewOLLaMA(
cfg.ModelName, cfg.Separator,
@ -516,7 +518,9 @@ func llmchat(prompt string, temp int64) (string, error) {
mod = model.NewOpenAI(
cfg.ModelName, cfg.Separator,
temperature, topp, maxn,
)
).SetExtra(&map[string]bool{
"enable_thinking": false,
})
case 1:
mod = model.NewOLLaMA(
cfg.ModelName, cfg.Separator,