feat(aichat): agent add memory

This commit is contained in:
源文雨
2026-01-03 23:36:49 +08:00
parent 91d512498d
commit 57c41a7db2
11 changed files with 512 additions and 922 deletions

View File

@@ -1,302 +0,0 @@
package aichat
import (
"errors"
"fmt"
"strconv"
"strings"
ctrl "github.com/FloatTech/zbpctrl"
"github.com/FloatTech/zbputils/chat"
"github.com/fumiama/deepinfra"
"github.com/fumiama/deepinfra/model"
"github.com/sirupsen/logrus"
zero "github.com/wdvxdr1123/ZeroBot"
"github.com/wdvxdr1123/ZeroBot/message"
)
var (
cfg = newconfig()
)
var (
apitypes = map[string]uint8{
"OpenAI": 0,
"OLLaMA": 1,
"GenAI": 2,
}
apilist = [3]string{"OpenAI", "OLLaMA", "GenAI"}
)
// ModelType 支持打印 string 并生产 protocal
type ModelType int
func newModelType(typ string) (ModelType, error) {
t, ok := apitypes[typ]
if !ok {
return 0, errors.New("未知类型 " + typ)
}
return ModelType(t), nil
}
func (mt ModelType) String() string {
return apilist[mt]
}
func (mt ModelType) protocol(modn string, temp float32, topp float32, maxn uint) (mod model.Protocol, err error) {
switch cfg.Type {
case 0:
mod = model.NewOpenAI(
modn, cfg.Separator,
temp, topp, maxn,
)
case 1:
mod = model.NewOLLaMA(
modn, cfg.Separator,
temp, topp, maxn,
)
case 2:
mod = model.NewGenAI(
modn,
temp, topp, maxn,
)
default:
err = errors.New("unsupported model type " + strconv.Itoa(int(cfg.Type)))
}
return
}
// ModelBool 支持打印成 "是/否"
type ModelBool bool
func (mb ModelBool) String() string {
if mb {
return "是"
}
return "否"
}
// ModelKey 支持隐藏密钥
type ModelKey string
func (mk ModelKey) String() string {
if len(mk) == 0 {
return "未设置"
}
if len(mk) <= 4 {
return "****"
}
key := string(mk)
return key[:2] + strings.Repeat("*", len(key)-4) + key[len(key)-2:]
}
type config struct {
ModelName string
ImageModelName string
AgentModelName string
Type ModelType
ImageType ModelType
AgentType ModelType
MaxN uint
TopP float32
SystemP string
API string
ImageAPI string
AgentAPI string
Key ModelKey
ImageKey ModelKey
AgentKey ModelKey
Separator string
NoSystemP ModelBool
}
func newconfig() config {
return config{
ModelName: model.ModelDeepDeek,
SystemP: chat.SystemPrompt,
API: deepinfra.OpenAIDeepInfra,
}
}
func (c *config) String() string {
topp, maxn := c.mparams()
sb := strings.Builder{}
sb.WriteString(fmt.Sprintf("• 模型名:%s\n", c.ModelName))
sb.WriteString(fmt.Sprintf("• 图像模型名:%s\n", c.ImageModelName))
sb.WriteString(fmt.Sprintf("• Agent模型名%s\n", c.AgentModelName))
sb.WriteString(fmt.Sprintf("• 接口类型:%v\n", c.Type))
sb.WriteString(fmt.Sprintf("• 图像接口类型:%v\n", c.ImageType))
sb.WriteString(fmt.Sprintf("• Agent接口类型%v\n", c.AgentType))
sb.WriteString(fmt.Sprintf("• 最大长度:%d\n", maxn))
sb.WriteString(fmt.Sprintf("• TopP%.1f\n", topp))
sb.WriteString(fmt.Sprintf("• 系统提示词:%s\n", c.SystemP))
sb.WriteString(fmt.Sprintf("• 接口地址:%s\n", c.API))
sb.WriteString(fmt.Sprintf("• 图像接口地址:%s\n", c.ImageAPI))
sb.WriteString(fmt.Sprintf("• Agent接口地址%s\n", c.AgentAPI))
sb.WriteString(fmt.Sprintf("• 密钥:%v\n", c.Key))
sb.WriteString(fmt.Sprintf("• 图像密钥:%v\n", c.ImageKey))
sb.WriteString(fmt.Sprintf("• Agent密钥%v\n", c.AgentKey))
sb.WriteString(fmt.Sprintf("• 分隔符:%s\n", c.Separator))
sb.WriteString(fmt.Sprintf("• 支持系统提示词:%v\n", !c.NoSystemP))
return sb.String()
}
func (c *config) isvalid() bool {
return c.ModelName != "" && c.API != "" && c.Key != ""
}
// 获取全局模型参数TopP和最大长度
func (c *config) mparams() (topp float32, maxn uint) {
// 处理TopP参数
topp = c.TopP
if topp == 0 {
topp = 0.9
}
// 处理最大长度参数
maxn = c.MaxN
if maxn == 0 {
maxn = 4096
}
return topp, maxn
}
func ensureconfig(ctx *zero.Ctx) bool {
c, ok := ctx.State["manager"].(*ctrl.Control[*zero.Ctx])
if !ok {
return false
}
if !cfg.isvalid() {
err := c.GetExtra(&cfg)
if err != nil {
logrus.Warnln("ERROR: get extra err:", err)
}
if !cfg.isvalid() {
cfg = newconfig()
}
}
return true
}
func newextrasetstr[T ~string](ptr *T) func(ctx *zero.Ctx) {
return func(ctx *zero.Ctx) {
args := strings.TrimSpace(ctx.State["args"].(string))
if args == "" {
ctx.SendChain(message.Text("ERROR: empty args"))
return
}
c, ok := ctx.State["manager"].(*ctrl.Control[*zero.Ctx])
if !ok {
ctx.SendChain(message.Text("ERROR: no such plugin"))
return
}
*ptr = T(args)
err := c.SetExtra(&cfg)
if err != nil {
ctx.SendChain(message.Text("ERROR: set extra err: ", err))
return
}
ctx.SendChain(message.Text("成功"))
}
}
func newextrasetbool[T ~bool](ptr *T) func(ctx *zero.Ctx) {
return func(ctx *zero.Ctx) {
args := ctx.State["regex_matched"].([]string)
isno := args[1] == "不"
c, ok := ctx.State["manager"].(*ctrl.Control[*zero.Ctx])
if !ok {
ctx.SendChain(message.Text("ERROR: no such plugin"))
return
}
*ptr = T(isno)
err := c.SetExtra(&cfg)
if err != nil {
ctx.SendChain(message.Text("ERROR: set extra err: ", err))
return
}
ctx.SendChain(message.Text("成功"))
}
}
func newextrasetuint(ptr *uint) func(ctx *zero.Ctx) {
return func(ctx *zero.Ctx) {
args := strings.TrimSpace(ctx.State["args"].(string))
if args == "" {
ctx.SendChain(message.Text("ERROR: empty args"))
return
}
c, ok := ctx.State["manager"].(*ctrl.Control[*zero.Ctx])
if !ok {
ctx.SendChain(message.Text("ERROR: no such plugin"))
return
}
n, err := strconv.ParseUint(args, 10, 64)
if err != nil {
ctx.SendChain(message.Text("ERROR: parse args err: ", err))
return
}
*ptr = uint(n)
err = c.SetExtra(&cfg)
if err != nil {
ctx.SendChain(message.Text("ERROR: set extra err: ", err))
return
}
ctx.SendChain(message.Text("成功"))
}
}
func newextrasetfloat32(ptr *float32) func(ctx *zero.Ctx) {
return func(ctx *zero.Ctx) {
args := strings.TrimSpace(ctx.State["args"].(string))
if args == "" {
ctx.SendChain(message.Text("ERROR: empty args"))
return
}
c, ok := ctx.State["manager"].(*ctrl.Control[*zero.Ctx])
if !ok {
ctx.SendChain(message.Text("ERROR: no such plugin"))
return
}
n, err := strconv.ParseFloat(args, 32)
if err != nil {
ctx.SendChain(message.Text("ERROR: parse args err: ", err))
return
}
*ptr = float32(n)
err = c.SetExtra(&cfg)
if err != nil {
ctx.SendChain(message.Text("ERROR: set extra err: ", err))
return
}
ctx.SendChain(message.Text("成功"))
}
}
func newextrasetmodeltype(ptr *ModelType) func(ctx *zero.Ctx) {
return func(ctx *zero.Ctx) {
args := strings.TrimSpace(ctx.State["args"].(string))
if args == "" {
ctx.SendChain(message.Text("ERROR: empty args"))
return
}
c, ok := ctx.State["manager"].(*ctrl.Control[*zero.Ctx])
if !ok {
ctx.SendChain(message.Text("ERROR: no such plugin"))
return
}
typ, err := newModelType(args)
if err != nil {
ctx.SendChain(message.Text("ERROR: ", err))
return
}
*ptr = typ
err = c.SetExtra(&cfg)
if err != nil {
ctx.SendChain(message.Text("ERROR: set extra err: ", err))
return
}
ctx.SendChain(message.Text("成功"))
}
}

View File

@@ -1,18 +1,14 @@
// Package aichat OpenAI聊天和群聊总结
// Package aichat 大模型聊天和Agent
package aichat
import (
"encoding/json"
"math/rand"
"strconv"
"strings"
"time"
"github.com/fumiama/deepinfra"
"github.com/fumiama/deepinfra/model"
goba "github.com/fumiama/go-onebot-agent"
"github.com/sirupsen/logrus"
"github.com/tidwall/gjson"
zero "github.com/wdvxdr1123/ZeroBot"
"github.com/wdvxdr1123/ZeroBot/extension/single"
@@ -23,7 +19,6 @@ import (
ctrl "github.com/FloatTech/zbpctrl"
"github.com/FloatTech/zbputils/chat"
"github.com/FloatTech/zbputils/control"
"github.com/FloatTech/zbputils/ctxext"
)
var (
@@ -31,27 +26,8 @@ var (
en = control.AutoRegister(&ctrl.Options[*zero.Ctx]{
DisableOnDefault: false,
Extra: control.ExtraFromString("aichat"),
Brief: "OpenAI聊天",
Help: "- 设置AI聊天触发概率10\n" +
"- 设置AI聊天温度80\n" +
"- 设置AI聊天(|识图|Agent)接口类型[OpenAI|OLLaMA|GenAI]\n" +
"- 设置AI聊天(不)使用Agent模式\n" +
"- 设置AI聊天(不)支持系统提示词\n" +
"- 设置AI聊天(|识图|Agent)接口地址https://api.siliconflow.cn/v1/chat/completions\n" +
"- 设置AI聊天(|识图|Agent)密钥xxx\n" +
"- 设置AI聊天(|识图|Agent)模型名Qwen/Qwen3-8B\n" +
"- 查看AI聊天系统提示词\n" +
"- 重置AI聊天系统提示词\n" +
"- 设置AI聊天系统提示词xxx\n" +
"- 设置AI聊天分隔符</think>(留空则清除)\n" +
"- 设置AI聊天(不)响应AT\n" +
"- 设置AI聊天最大长度4096\n" +
"- 设置AI聊天TopP 0.9\n" +
"- 设置AI聊天(不)以AI语音输出\n" +
"- 查看AI聊天配置\n" +
"- 重置AI聊天\n" +
"- 群聊总结 [消息数目]|群聊总结 1000\n" +
"- /gpt [内容] (使用大模型聊天)\n",
Brief: "大模型聊天和Agent",
Help: "- (随意聊天, 概率匹配)",
PrivateDataFolder: "aichat",
}).ApplySingle(single.New(
@@ -66,46 +42,50 @@ var (
)
var (
limit = ctxext.NewLimiterManager(time.Second*30, 1)
fastfailnorecord = false
)
func init() {
en.OnMessage(ensureconfig, func(ctx *zero.Ctx) bool {
en.OnMessage(chat.EnsureConfig, func(ctx *zero.Ctx) bool {
gid := ctx.Event.GroupID
if gid == 0 {
gid = -ctx.Event.UserID
}
stor, err := newstorage(ctx, gid)
if err != nil {
logrus.Warnln("ERROR: ", err)
stor, ok := ctx.State[zero.StateKeyPrefixKeep+"aichatcfg_stor__"].(chat.Storage)
if !ok {
logrus.Warnln("ERROR: cannot get stor")
return false
}
ctx.State["__aichat_stor__"] = stor
return ctx.ExtractPlainText() != "" &&
(!stor.noreplyat() || (stor.noreplyat() && !ctx.Event.IsToMe))
if !(ctx.ExtractPlainText() != "" &&
(!stor.NoReplyAt() || (stor.NoReplyAt() && !ctx.Event.IsToMe))) {
return false
}
rate := stor.Rate()
if !ctx.Event.IsToMe && rand.Intn(100) >= int(rate) {
return false
}
if chat.AC.Key == "" {
logrus.Warnln("ERROR: get extra err: empty key")
return false
}
if ctx.Event.IsToMe {
ctx.Block()
}
return true
}).SetBlock(false).Handle(func(ctx *zero.Ctx) {
gid := ctx.Event.GroupID
if gid == 0 {
gid = -ctx.Event.UserID
}
stor := ctx.State["__aichat_stor__"].(storage)
rate := stor.rate()
if !ctx.Event.IsToMe && rand.Intn(100) >= int(rate) {
return
}
if ctx.Event.IsToMe {
ctx.Block()
}
if cfg.Key == "" {
logrus.Warnln("ERROR: get extra err: empty key")
return
}
temperature := stor.temp()
topp, maxn := cfg.mparams()
stor := ctx.State[zero.StateKeyPrefixKeep+"aichatcfg_stor__"].(chat.Storage)
temperature := stor.Temp()
topp, maxn := chat.AC.MParams()
if !stor.noagent() && cfg.AgentAPI != "" && cfg.AgentModelName != "" {
x := deepinfra.NewAPI(cfg.AgentAPI, string(cfg.AgentKey))
mod, err := cfg.Type.protocol(cfg.AgentModelName, temperature, topp, maxn)
logrus.Debugln("[aichat] agent mode test: noagent", stor.NoAgent(), "hasapi", chat.AC.AgentAPI != "", "hasmodel", chat.AC.AgentModelName != "")
if !stor.NoAgent() && chat.AC.AgentAPI != "" && chat.AC.AgentModelName != "" {
logrus.Debugln("[aichat] enter agent mode")
x := deepinfra.NewAPI(chat.AC.AgentAPI, string(chat.AC.AgentKey))
mod, err := chat.AC.Type.Protocol(chat.AC.AgentModelName, temperature, topp, maxn)
if err != nil {
logrus.Warnln("ERROR: ", err)
return
@@ -117,24 +97,35 @@ func init() {
role = goba.PermRoleOwner
}
}
ag := chat.AgentOf(ctx.Event.SelfID)
if cfg.ImageAPI != "" && !ag.CanViewImage() {
mod, err := cfg.ImageType.protocol(cfg.ImageModelName, temperature, topp, maxn)
c, ok := ctx.State["manager"].(*ctrl.Control[*zero.Ctx])
if !ok {
logrus.Warnln("ERROR: cannot get ctrl mamager")
}
ag := chat.AgentOf(ctx.Event.SelfID, c.Service)
logrus.Debugln("[aichat] got agent")
if chat.AC.ImageAPI != "" && !ag.CanViewImage() {
mod, err := chat.AC.ImageType.Protocol(chat.AC.ImageModelName, temperature, topp, maxn)
if err != nil {
logrus.Warnln("ERROR: ", err)
return
}
ag.SetViewImageAPI(deepinfra.NewAPI(cfg.ImageAPI, string(cfg.ImageKey)), mod)
ag.SetViewImageAPI(deepinfra.NewAPI(chat.AC.ImageAPI, string(chat.AC.ImageKey)), mod)
logrus.Debugln("[aichat] agent set img")
}
ctx.NoTimeout()
logrus.Debugln("[aichat] agent set no timeout")
hasresp := false
for i := 0; i < 8; i++ { // 最大运行 8 轮因为问答上下文只有 16
reqs := chat.CallAgent(ag, zero.SuperUserPermission(ctx), x, mod, gid, role)
if len(reqs) == 0 {
logrus.Debugln("[aichat] agent call got empty response")
break
}
hasresp = true
for _, req := range reqs {
if req.Action == goba.SVM { // is a fake action
continue
}
resp := ctx.CallAction(req.Action, req.Params)
logrus.Infoln("[aichat] agent get resp:", reqs)
ag.AddResponse(gid, &goba.APIResponse{
@@ -147,19 +138,19 @@ func init() {
}
}
if hasresp {
ag.AddTerminus(gid)
return
}
// no response, fall back to normal chat
logrus.Debugln("[aichat] agent fell back to normal chat")
}
x := deepinfra.NewAPI(cfg.API, string(cfg.Key))
mod, err := cfg.Type.protocol(cfg.ModelName, temperature, topp, maxn)
x := deepinfra.NewAPI(chat.AC.API, string(chat.AC.Key))
mod, err := chat.AC.Type.Protocol(chat.AC.ModelName, temperature, topp, maxn)
if err != nil {
logrus.Warnln("ERROR: ", err)
return
}
data, err := x.Request(chat.GetChatContext(mod, gid, cfg.SystemP, bool(cfg.NoSystemP)))
data, err := x.Request(chat.GetChatContext(mod, gid, chat.AC.SystemP, bool(chat.AC.NoSystemP)))
if err != nil {
logrus.Warnln("[aichat] post err:", err)
return
@@ -182,7 +173,7 @@ func init() {
logrus.Infoln("[aichat] 回复内容:", t)
recCfg := airecord.GetConfig()
record := ""
if !fastfailnorecord && !stor.norecord() {
if !fastfailnorecord && !stor.NoRecord() {
record = ctx.GetAIRecord(recCfg.ModelID, recCfg.Customgid, t)
if record != "" {
ctx.SendChain(message.Record(record))
@@ -199,293 +190,4 @@ func init() {
}
}
})
en.OnPrefix("设置AI聊天触发概率", zero.AdminPermission).SetBlock(true).
Handle(ctxext.NewStorageSaveBitmapHandler(bitmaprate, 0, 100))
en.OnPrefix("设置AI聊天温度", zero.AdminPermission).SetBlock(true).
Handle(ctxext.NewStorageSaveBitmapHandler(bitmaptemp, 0, 100))
en.OnPrefix("设置AI聊天接口类型", ensureconfig, zero.OnlyPrivate, zero.SuperUserPermission).SetBlock(true).
Handle(newextrasetmodeltype(&cfg.Type))
en.OnPrefix("设置AI聊天识图接口类型", ensureconfig, zero.OnlyPrivate, zero.SuperUserPermission).SetBlock(true).
Handle(newextrasetmodeltype(&cfg.ImageType))
en.OnPrefix("设置AI聊天Agent接口类型", ensureconfig, zero.OnlyPrivate, zero.SuperUserPermission).SetBlock(true).
Handle(newextrasetmodeltype(&cfg.AgentType))
en.OnPrefix("设置AI聊天接口地址", ensureconfig, zero.OnlyPrivate, zero.SuperUserPermission).SetBlock(true).
Handle(newextrasetstr(&cfg.API))
en.OnPrefix("设置AI聊天识图接口地址", ensureconfig, zero.OnlyPrivate, zero.SuperUserPermission).SetBlock(true).
Handle(newextrasetstr(&cfg.ImageAPI))
en.OnPrefix("设置AI聊天Agent接口地址", ensureconfig, zero.OnlyPrivate, zero.SuperUserPermission).SetBlock(true).
Handle(newextrasetstr(&cfg.AgentAPI))
en.OnPrefix("设置AI聊天密钥", ensureconfig, zero.OnlyPrivate, zero.SuperUserPermission).SetBlock(true).
Handle(newextrasetstr(&cfg.Key))
en.OnPrefix("设置AI聊天识图密钥", ensureconfig, zero.OnlyPrivate, zero.SuperUserPermission).SetBlock(true).
Handle(newextrasetstr(&cfg.ImageKey))
en.OnPrefix("设置AI聊天Agent密钥", ensureconfig, zero.OnlyPrivate, zero.SuperUserPermission).SetBlock(true).
Handle(newextrasetstr(&cfg.AgentKey))
en.OnPrefix("设置AI聊天模型名", ensureconfig, zero.OnlyPrivate, zero.SuperUserPermission).SetBlock(true).
Handle(newextrasetstr(&cfg.ModelName))
en.OnPrefix("设置AI聊天识图模型名", ensureconfig, zero.OnlyPrivate, zero.SuperUserPermission).SetBlock(true).
Handle(newextrasetstr(&cfg.ImageModelName))
en.OnPrefix("设置AI聊天Agent模型名", ensureconfig, zero.OnlyPrivate, zero.SuperUserPermission).SetBlock(true).
Handle(newextrasetstr(&cfg.AgentModelName))
en.OnPrefix("设置AI聊天系统提示词", ensureconfig, zero.OnlyPrivate, zero.SuperUserPermission).SetBlock(true).
Handle(newextrasetstr(&cfg.SystemP))
en.OnFullMatch("查看AI聊天系统提示词", ensureconfig, zero.OnlyPrivate, zero.SuperUserPermission).SetBlock(true).Handle(func(ctx *zero.Ctx) {
ctx.SendChain(message.Text(cfg.SystemP))
})
en.OnFullMatch("重置AI聊天系统提示词", ensureconfig, zero.OnlyPrivate, zero.SuperUserPermission).SetBlock(true).Handle(func(ctx *zero.Ctx) {
c, ok := ctx.State["manager"].(*ctrl.Control[*zero.Ctx])
if !ok {
ctx.SendChain(message.Text("ERROR: no such plugin"))
return
}
cfg.SystemP = chat.SystemPrompt
err := c.SetExtra(&cfg)
if err != nil {
ctx.SendChain(message.Text("ERROR: set extra err: ", err))
return
}
ctx.SendChain(message.Text("成功"))
})
en.OnPrefix("设置AI聊天分隔符", ensureconfig, zero.OnlyPrivate, zero.SuperUserPermission).SetBlock(true).
Handle(newextrasetstr(&cfg.Separator))
en.OnRegex("^设置AI聊天(不)?响应AT$", ensureconfig, zero.SuperUserPermission).SetBlock(true).
Handle(ctxext.NewStorageSaveBoolHandler(bitmapnrat))
en.OnRegex("^设置AI聊天(不)?支持系统提示词$", ensureconfig, zero.OnlyPrivate, zero.SuperUserPermission).SetBlock(true).
Handle(newextrasetbool(&cfg.NoSystemP))
en.OnRegex("^设置AI聊天(不)?使用Agent模式$", ensureconfig, zero.SuperUserPermission).SetBlock(true).
Handle(ctxext.NewStorageSaveBoolHandler(bitmapnagt))
en.OnPrefix("设置AI聊天最大长度", ensureconfig, zero.OnlyPrivate, zero.SuperUserPermission).SetBlock(true).
Handle(newextrasetuint(&cfg.MaxN))
en.OnPrefix("设置AI聊天TopP", ensureconfig, zero.OnlyPrivate, zero.SuperUserPermission).SetBlock(true).
Handle(newextrasetfloat32(&cfg.TopP))
en.OnRegex("^设置AI聊天(不)?以AI语音输出$", ensureconfig, zero.AdminPermission).SetBlock(true).
Handle(ctxext.NewStorageSaveBoolHandler(bitmapnrec))
en.OnFullMatch("查看AI聊天配置", ensureconfig, zero.SuperUserPermission).SetBlock(true).
Handle(func(ctx *zero.Ctx) {
gid := ctx.Event.GroupID
if gid == 0 {
gid = -ctx.Event.UserID
}
stor, err := newstorage(ctx, gid)
if err != nil {
ctx.SendChain(message.Text("ERROR: ", err))
return
}
ctx.SendChain(
message.Text(
"【当前AI聊天本群配置】\n",
"• 触发概率:", int(stor.rate()), "\n",
"• 温度:", stor.temp(), "\n",
"• 以AI语音输出", ModelBool(!stor.norecord()), "\n",
"• 使用Agent", ModelBool(!stor.noagent()), "\n",
"• 响应@", ModelBool(!stor.noreplyat()), "\n",
),
message.Text("【当前AI聊天全局配置】\n", &cfg),
)
})
en.OnFullMatch("重置AI聊天", ensureconfig, zero.SuperUserPermission).SetBlock(true).Handle(func(ctx *zero.Ctx) {
chat.ResetChat()
ctx.SendChain(message.Text("成功"))
})
// 添加群聊总结功能
en.OnRegex(`^群聊总结\s?(\d*)$`, ensureconfig, zero.OnlyGroup, zero.AdminPermission).SetBlock(true).Limit(limit.LimitByGroup).Handle(func(ctx *zero.Ctx) {
ctx.SendChain(message.Text("少女思考中..."))
gid := ctx.Event.GroupID
if gid == 0 {
gid = -ctx.Event.UserID
}
p, _ := strconv.ParseInt(ctx.State["regex_matched"].([]string)[1], 10, 64)
if p > 1000 {
p = 1000
}
if p == 0 {
p = 200
}
group := ctx.GetGroupInfo(gid, false)
if group.MemberCount == 0 {
ctx.SendChain(message.Text(zero.BotConfig.NickName[0], "未加入", group.Name, "(", gid, "),无法获取总结"))
return
}
var messages []string
h := ctx.GetGroupMessageHistory(gid, 0, p, false)
h.Get("messages").ForEach(func(_, msgObj gjson.Result) bool {
nickname := msgObj.Get("sender.nickname").Str
text := strings.TrimSpace(message.ParseMessageFromString(msgObj.Get("raw_message").Str).ExtractPlainText())
if text != "" {
messages = append(messages, nickname+": "+text)
}
return true
})
if len(messages) == 0 {
ctx.SendChain(message.Text("ERROR: 历史消息为空或者无法获得历史消息"))
return
}
// 构造总结请求提示 (使用通用版省流提示词)
// 使用反引号定义多行字符串,更清晰
promptTemplate := `请对以下群聊对话进行【极简总结】。
要求:
1. 剔除客套与废话,直击主题。
2. 使用 Markdown 列表格式。
3. 按以下结构输出:
- 🎯 核心议题:(一句话概括)
- 💡 关键观点/结论:(提取3-5个重点)
- ✅ 下一步/待办:(如果有,明确谁做什么)
群聊对话内容如下:
`
summaryPrompt := promptTemplate + strings.Join(messages, "\n")
stor, err := newstorage(ctx, gid)
if err != nil {
ctx.SendChain(message.Text("ERROR: ", err))
return
}
// 调用大模型API进行总结
summary, err := llmchat(summaryPrompt, stor.temp())
if err != nil {
ctx.SendChain(message.Text("ERROR: ", err))
return
}
var b strings.Builder
b.WriteString("群 ")
b.WriteString(group.Name)
b.WriteByte('(')
b.WriteString(strconv.FormatInt(gid, 10))
b.WriteString(") 的 ")
b.WriteString(strconv.FormatInt(p, 10))
b.WriteString(" 条消息总结:\n\n")
b.WriteString(summary)
// 分割总结内容为多段按1000字符长度切割
summaryText := b.String()
msg := make(message.Message, 0)
for len(summaryText) > 0 {
if len(summaryText) <= 1000 {
msg = append(msg, ctxext.FakeSenderForwardNode(ctx, message.Text(summaryText)))
break
}
// 查找1000字符内的最后一个换行符尽量在换行处分割
chunk := summaryText[:1000]
lastNewline := strings.LastIndex(chunk, "\n")
if lastNewline > 0 {
chunk = summaryText[:lastNewline+1]
}
msg = append(msg, ctxext.FakeSenderForwardNode(ctx, message.Text(chunk)))
summaryText = summaryText[len(chunk):]
}
if len(msg) > 0 {
ctx.Send(msg)
}
})
// 添加 /gpt 命令处理(同时支持回复消息和直接使用)
en.OnKeyword("/gpt", ensureconfig).SetBlock(true).Handle(func(ctx *zero.Ctx) {
gid := ctx.Event.GroupID
if gid == 0 {
gid = -ctx.Event.UserID
}
text := ctx.MessageString()
var query string
var replyContent string
// 检查是否是回复消息 (使用MessageElement检查而不是CQ码)
for _, elem := range ctx.Event.Message {
if elem.Type == "reply" {
// 提取被回复的消息ID
replyIDStr := elem.Data["id"]
replyID, err := strconv.ParseInt(replyIDStr, 10, 64)
if err == nil {
// 获取被回复的消息内容
replyMsg := ctx.GetMessage(replyID)
if replyMsg.Elements != nil {
replyContent = replyMsg.Elements.ExtractPlainText()
}
}
break // 找到回复元素后退出循环
}
}
// 提取 /gpt 后面的内容
parts := strings.SplitN(text, "/gpt", 2)
var gContent string
if len(parts) > 1 {
gContent = strings.TrimSpace(parts[1])
}
// 组合内容:优先使用回复内容,如果同时有/gpt内容则拼接
switch {
case replyContent != "" && gContent != "":
query = replyContent + "\n" + gContent
case replyContent != "":
query = replyContent
case gContent != "":
query = gContent
default:
return
}
stor, err := newstorage(ctx, gid)
if err != nil {
ctx.SendChain(message.Text("ERROR: ", err))
return
}
// 调用大模型API进行聊天
reply, err := llmchat(query, stor.temp())
if err != nil {
ctx.SendChain(message.Text("ERROR: ", err))
return
}
// 分割总结内容为多段按1000字符长度切割
msg := make(message.Message, 0)
for len(reply) > 0 {
if len(reply) <= 1000 {
msg = append(msg, ctxext.FakeSenderForwardNode(ctx, message.Text(reply)))
break
}
// 查找1000字符内的最后一个换行符尽量在换行处分割
chunk := reply[:1000]
lastNewline := strings.LastIndex(chunk, "\n")
if lastNewline > 0 {
chunk = reply[:lastNewline+1]
}
msg = append(msg, ctxext.FakeSenderForwardNode(ctx, message.Text(chunk)))
reply = reply[len(chunk):]
}
if len(msg) > 0 {
ctx.Send(msg)
}
})
}
// llmchat 调用大模型API包装
func llmchat(prompt string, temp float32) (string, error) {
topp, maxn := cfg.mparams()
x := deepinfra.NewAPI(cfg.API, string(cfg.Key))
mod, err := cfg.Type.protocol(cfg.ModelName, temp, topp, maxn)
if err != nil {
return "", nil
}
data, err := x.Request(mod.User(model.NewContentText(prompt)))
if err != nil {
return "", err
}
return strings.TrimSpace(data), nil
}

View File

@@ -1,53 +0,0 @@
package aichat
import (
"github.com/FloatTech/zbputils/ctxext"
zero "github.com/wdvxdr1123/ZeroBot"
)
const (
bitmaprate = 0x0000ff
bitmaptemp = 0x00ff00
bitmapnagt = 0x010000
bitmapnrec = 0x020000
bitmapnrat = 0x040000
)
var (
fastfailnorecord = false
)
type storage ctxext.Storage
func newstorage(ctx *zero.Ctx, gid int64) (storage, error) {
s, err := ctxext.NewStorage(ctx, gid)
return storage(s), err
}
func (s storage) rate() uint8 {
return uint8((ctxext.Storage)(s).Get(bitmaprate))
}
func (s storage) temp() float32 {
temp := int8((ctxext.Storage)(s).Get(bitmaptemp))
// 处理温度参数
if temp <= 0 {
temp = 70 // default setting
}
if temp > 100 {
temp = 100
}
return float32(temp) / 100
}
func (s storage) noagent() bool {
return (ctxext.Storage)(s).GetBool(bitmapnagt)
}
func (s storage) norecord() bool {
return (ctxext.Storage)(s).GetBool(bitmapnrec)
}
func (s storage) noreplyat() bool {
return (ctxext.Storage)(s).GetBool(bitmapnrat)
}

View File

@@ -1,149 +0,0 @@
package aichat
import (
"testing"
"github.com/FloatTech/zbputils/ctxext"
)
func TestStorage_rate(t *testing.T) {
s := storage(ctxext.Storage(0))
// 测试默认值
if rate := s.rate(); rate != 0 {
t.Errorf("default rate() = %v, want 0", rate)
}
// 设置值并测试
s = storage((ctxext.Storage)(s).Set(int64(100), bitmaprate))
if rate := s.rate(); rate != 100 {
t.Errorf("rate() after set = %v, want 100", rate)
}
}
func TestStorage_temp(t *testing.T) {
s := storage(ctxext.Storage(0))
tests := []struct {
name string
setValue int64
expected float32
}{
{"default temp (0)", 0, 0.70}, // 默认值 70/100
{"valid temp 50", 50, 0.50}, // 50/100 = 0.50
{"valid temp 80", 80, 0.80}, // 80/100 = 0.80
{"max temp 100", 100, 1.00}, // 100/100 = 1.00
{"over max temp", 127, 1.00}, // 限制为 100/100 = 1.00
{"negative temp", -10, 0.70}, // 默认值 70/100
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
s = storage((ctxext.Storage)(s).Set(tt.setValue, bitmaptemp))
result := s.temp()
if result != tt.expected {
t.Errorf("temp() = %v, want %v", result, tt.expected)
}
})
}
}
func TestStorage_noagent(t *testing.T) {
s := storage(ctxext.Storage(0))
// 测试默认值
if noagent := s.noagent(); noagent != false {
t.Errorf("default noagent() = %v, want false", noagent)
}
// 设置为 true 并测试
s = storage((ctxext.Storage)(s).Set(1, bitmapnagt))
if noagent := s.noagent(); noagent != true {
t.Errorf("noagent() after set true = %v, want true", noagent)
}
}
func TestStorage_norecord(t *testing.T) {
s := storage(ctxext.Storage(0))
// 测试默认值
if norecord := s.norecord(); norecord != false {
t.Errorf("default norecord() = %v, want false", norecord)
}
// 设置为 true 并测试
s = storage((ctxext.Storage)(s).Set(1, bitmapnrec))
if norecord := s.norecord(); norecord != true {
t.Errorf("norecord() after set true = %v, want true", norecord)
}
}
func TestStorage_noreplyat(t *testing.T) {
s := storage(ctxext.Storage(0))
// 测试默认值
if noreplyat := s.noreplyat(); noreplyat != false {
t.Errorf("default noreplyat() = %v, want false", noreplyat)
}
// 设置为 true 并测试
s = storage((ctxext.Storage)(s).Set(1, bitmapnrat))
if noreplyat := s.noreplyat(); noreplyat != true {
t.Errorf("noreplyat() after set true = %v, want true", noreplyat)
}
}
func TestStorage_Integration(t *testing.T) {
s := storage(ctxext.Storage(0))
// 设置各种值
s = storage((ctxext.Storage)(s).Set(int64(75), bitmaprate))
s = storage((ctxext.Storage)(s).Set(int64(85), bitmaptemp))
s = storage((ctxext.Storage)(s).Set(1, bitmapnagt))
s = storage((ctxext.Storage)(s).Set(0, bitmapnrec))
s = storage((ctxext.Storage)(s).Set(1, bitmapnrat))
// 验证所有方法
if rate := s.rate(); rate != 75 {
t.Errorf("rate() = %v, want 75", rate)
}
if temp := s.temp(); temp != 0.85 {
t.Errorf("temp() = %v, want 0.85", temp)
}
if noagent := s.noagent(); !noagent {
t.Errorf("noagent() = %v, want true", noagent)
}
if norecord := s.norecord(); norecord {
t.Errorf("norecord() = %v, want false", norecord)
}
if noreplyat := s.noreplyat(); !noreplyat {
t.Errorf("noreplyat() = %v, want true", noreplyat)
}
}
func BenchmarkStorage_rate(b *testing.B) {
s := storage(ctxext.Storage(0))
s = storage((ctxext.Storage)(s).Set(int64(100), bitmaprate))
b.ResetTimer()
for i := 0; i < b.N; i++ {
s.rate()
}
}
func BenchmarkStorage_temp(b *testing.B) {
s := storage(ctxext.Storage(0))
s = storage((ctxext.Storage)(s).Set(int64(80), bitmaptemp))
b.ResetTimer()
for i := 0; i < b.N; i++ {
s.temp()
}
}

144
plugin/aichatcfg/main.go Normal file
View File

@@ -0,0 +1,144 @@
// Package aichatcfg aichat 的配置, 优先级要比 aichat 高
package aichatcfg
import (
"github.com/sirupsen/logrus"
zero "github.com/wdvxdr1123/ZeroBot"
"github.com/wdvxdr1123/ZeroBot/message"
ctrl "github.com/FloatTech/zbpctrl"
"github.com/FloatTech/zbputils/chat"
"github.com/FloatTech/zbputils/control"
"github.com/FloatTech/zbputils/ctxext"
)
var (
// en data [8 temp] [8 rate] LSB
en = control.AutoRegister(&ctrl.Options[*zero.Ctx]{
DisableOnDefault: false,
Extra: control.ExtraFromString("aichat"),
Brief: "aichat 的配置",
Help: "- 设置AI聊天触发概率10\n" +
"- 设置AI聊天温度80\n" +
"- 设置AI聊天(|识图|Agent)接口类型[OpenAI|OLLaMA|GenAI]\n" +
"- 设置AI聊天(不)使用Agent模式\n" +
"- 设置AI聊天(不)支持系统提示词\n" +
"- 设置AI聊天(|识图|Agent)接口地址https://api.siliconflow.cn/v1/chat/completions\n" +
"- 设置AI聊天(|识图|Agent)密钥xxx\n" +
"- 设置AI聊天(|识图|Agent)模型名Qwen/Qwen3-8B\n" +
"- 查看AI聊天系统提示词\n" +
"- 重置AI聊天系统提示词\n" +
"- 设置AI聊天系统提示词xxx\n" +
"- 设置AI聊天分隔符</think>(留空则清除)\n" +
"- 设置AI聊天(不)响应AT\n" +
"- 设置AI聊天最大长度4096\n" +
"- 设置AI聊天TopP 0.9\n" +
"- 设置AI聊天(不)以AI语音输出\n" +
"- 查看AI聊天配置\n" +
"- 重置AI聊天\n",
})
)
func init() {
en.UsePreHandler(func(ctx *zero.Ctx) bool {
gid := ctx.Event.GroupID
if gid == 0 {
gid = -ctx.Event.UserID
}
stor, err := chat.NewStorage(ctx, gid)
if err != nil {
logrus.Warnln("ERROR: ", err)
return false
}
ctx.State[zero.StateKeyPrefixKeep+"aichatcfg_stor__"] = stor
return true
})
en.OnPrefix("设置AI聊天触发概率", zero.AdminPermission).SetBlock(true).
Handle(ctxext.NewStorageSaveBitmapHandler(chat.BitmapRate, 0, 100))
en.OnPrefix("设置AI聊天温度", zero.AdminPermission).SetBlock(true).
Handle(ctxext.NewStorageSaveBitmapHandler(chat.BitmapTemp, 0, 100))
en.OnPrefix("设置AI聊天接口类型", chat.EnsureConfig, zero.OnlyPrivate, zero.SuperUserPermission).SetBlock(true).
Handle(chat.NewExtraSetModelType(&chat.AC.Type))
en.OnPrefix("设置AI聊天识图接口类型", chat.EnsureConfig, zero.OnlyPrivate, zero.SuperUserPermission).SetBlock(true).
Handle(chat.NewExtraSetModelType(&chat.AC.ImageType))
en.OnPrefix("设置AI聊天Agent接口类型", chat.EnsureConfig, zero.OnlyPrivate, zero.SuperUserPermission).SetBlock(true).
Handle(chat.NewExtraSetModelType(&chat.AC.AgentType))
en.OnPrefix("设置AI聊天接口地址", chat.EnsureConfig, zero.OnlyPrivate, zero.SuperUserPermission).SetBlock(true).
Handle(chat.NewExtraSetStr(&chat.AC.API))
en.OnPrefix("设置AI聊天识图接口地址", chat.EnsureConfig, zero.OnlyPrivate, zero.SuperUserPermission).SetBlock(true).
Handle(chat.NewExtraSetStr(&chat.AC.ImageAPI))
en.OnPrefix("设置AI聊天Agent接口地址", chat.EnsureConfig, zero.OnlyPrivate, zero.SuperUserPermission).SetBlock(true).
Handle(chat.NewExtraSetStr(&chat.AC.AgentAPI))
en.OnPrefix("设置AI聊天密钥", chat.EnsureConfig, zero.OnlyPrivate, zero.SuperUserPermission).SetBlock(true).
Handle(chat.NewExtraSetStr(&chat.AC.Key))
en.OnPrefix("设置AI聊天识图密钥", chat.EnsureConfig, zero.OnlyPrivate, zero.SuperUserPermission).SetBlock(true).
Handle(chat.NewExtraSetStr(&chat.AC.ImageKey))
en.OnPrefix("设置AI聊天Agent密钥", chat.EnsureConfig, zero.OnlyPrivate, zero.SuperUserPermission).SetBlock(true).
Handle(chat.NewExtraSetStr(&chat.AC.AgentKey))
en.OnPrefix("设置AI聊天模型名", chat.EnsureConfig, zero.OnlyPrivate, zero.SuperUserPermission).SetBlock(true).
Handle(chat.NewExtraSetStr(&chat.AC.ModelName))
en.OnPrefix("设置AI聊天识图模型名", chat.EnsureConfig, zero.OnlyPrivate, zero.SuperUserPermission).SetBlock(true).
Handle(chat.NewExtraSetStr(&chat.AC.ImageModelName))
en.OnPrefix("设置AI聊天Agent模型名", chat.EnsureConfig, zero.OnlyPrivate, zero.SuperUserPermission).SetBlock(true).
Handle(chat.NewExtraSetStr(&chat.AC.AgentModelName))
en.OnPrefix("设置AI聊天系统提示词", chat.EnsureConfig, zero.OnlyPrivate, zero.SuperUserPermission).SetBlock(true).
Handle(chat.NewExtraSetStr(&chat.AC.SystemP))
en.OnFullMatch("查看AI聊天系统提示词", chat.EnsureConfig, zero.OnlyPrivate, zero.SuperUserPermission).SetBlock(true).Handle(func(ctx *zero.Ctx) {
ctx.SendChain(message.Text(chat.AC.SystemP))
})
en.OnFullMatch("重置AI聊天系统提示词", chat.EnsureConfig, zero.OnlyPrivate, zero.SuperUserPermission).SetBlock(true).Handle(func(ctx *zero.Ctx) {
c, ok := ctx.State["manager"].(*ctrl.Control[*zero.Ctx])
if !ok {
ctx.SendChain(message.Text("ERROR: no such plugin"))
return
}
chat.AC.SystemP = chat.SystemPrompt
err := c.SetExtra(&chat.AC)
if err != nil {
ctx.SendChain(message.Text("ERROR: set extra err: ", err))
return
}
ctx.SendChain(message.Text("成功"))
})
en.OnPrefix("设置AI聊天分隔符", chat.EnsureConfig, zero.OnlyPrivate, zero.SuperUserPermission).SetBlock(true).
Handle(chat.NewExtraSetStr(&chat.AC.Separator))
en.OnRegex("^设置AI聊天(不)?响应AT$", chat.EnsureConfig, zero.SuperUserPermission).SetBlock(true).
Handle(ctxext.NewStorageSaveBoolHandler(chat.BitmapNrat))
en.OnRegex("^设置AI聊天(不)?支持系统提示词$", chat.EnsureConfig, zero.OnlyPrivate, zero.SuperUserPermission).SetBlock(true).
Handle(chat.NewExtraSetBool(&chat.AC.NoSystemP))
en.OnRegex("^设置AI聊天(不)?使用Agent模式$", chat.EnsureConfig, zero.SuperUserPermission).SetBlock(true).
Handle(ctxext.NewStorageSaveBoolHandler(chat.BitmapNagt))
en.OnPrefix("设置AI聊天最大长度", chat.EnsureConfig, zero.OnlyPrivate, zero.SuperUserPermission).SetBlock(true).
Handle(chat.NewExtraSetUint(&chat.AC.MaxN))
en.OnPrefix("设置AI聊天TopP", chat.EnsureConfig, zero.OnlyPrivate, zero.SuperUserPermission).SetBlock(true).
Handle(chat.NewExtraSetFloat32(&chat.AC.TopP))
en.OnRegex("^设置AI聊天(不)?以AI语音输出$", chat.EnsureConfig, zero.AdminPermission).SetBlock(true).
Handle(ctxext.NewStorageSaveBoolHandler(chat.BitmapNrec))
en.OnFullMatch("查看AI聊天配置", chat.EnsureConfig, zero.SuperUserPermission).SetBlock(true).
Handle(func(ctx *zero.Ctx) {
gid := ctx.Event.GroupID
if gid == 0 {
gid = -ctx.Event.UserID
}
stor, err := chat.NewStorage(ctx, gid)
if err != nil {
ctx.SendChain(message.Text("ERROR: ", err))
return
}
ctx.SendChain(
message.Text(
"【当前AI聊天本群配置】\n",
"• 触发概率:", int(stor.Rate()), "\n",
"• 温度:", stor.Temp(), "\n",
"• 以AI语音输出", chat.ModelBool(!stor.NoRecord()), "\n",
"• 使用Agent", chat.ModelBool(!stor.NoAgent()), "\n",
"• 响应@", chat.ModelBool(!stor.NoReplyAt()), "\n",
),
message.Text("【当前AI聊天全局配置】\n", &chat.AC),
)
})
en.OnFullMatch("重置AI聊天", chat.EnsureConfig, zero.SuperUserPermission).SetBlock(true).Handle(func(ctx *zero.Ctx) {
chat.ResetChat()
ctx.SendChain(message.Text("成功"))
})
}

246
plugin/llm/main.go Normal file
View File

@@ -0,0 +1,246 @@
// Package llm 大模型聊天和群聊总结
package llm
import (
"strconv"
"strings"
"time"
"github.com/fumiama/deepinfra"
"github.com/fumiama/deepinfra/model"
"github.com/tidwall/gjson"
zero "github.com/wdvxdr1123/ZeroBot"
"github.com/wdvxdr1123/ZeroBot/extension/single"
"github.com/wdvxdr1123/ZeroBot/message"
ctrl "github.com/FloatTech/zbpctrl"
"github.com/FloatTech/zbputils/chat"
"github.com/FloatTech/zbputils/control"
"github.com/FloatTech/zbputils/ctxext"
)
var (
// en data [8 temp] [8 rate] LSB
en = control.AutoRegister(&ctrl.Options[*zero.Ctx]{
DisableOnDefault: false,
Brief: "大模型聊天和群聊总结",
Help: "- 群聊总结 [消息数目]|群聊总结 1000\n" +
"- /gpt [内容] (使用大模型聊天)\n",
}).ApplySingle(single.New(
single.WithKeyFn(func(ctx *zero.Ctx) int64 {
if ctx.Event.GroupID == 0 {
return -ctx.Event.UserID
}
return ctx.Event.GroupID
}),
// no post option, silently quit
))
)
var (
limit = ctxext.NewLimiterManager(time.Second*30, 1)
)
func init() {
// 添加群聊总结功能
en.OnRegex(`^群聊总结\s?(\d*)$`, chat.EnsureConfig, zero.OnlyGroup, zero.AdminPermission).SetBlock(true).Limit(limit.LimitByGroup).Handle(func(ctx *zero.Ctx) {
ctx.SendChain(message.Text("少女思考中..."))
gid := ctx.Event.GroupID
if gid == 0 {
gid = -ctx.Event.UserID
}
p, _ := strconv.ParseInt(ctx.State["regex_matched"].([]string)[1], 10, 64)
if p > 1000 {
p = 1000
}
if p == 0 {
p = 200
}
group := ctx.GetGroupInfo(gid, false)
if group.MemberCount == 0 {
ctx.SendChain(message.Text(zero.BotConfig.NickName[0], "未加入", group.Name, "(", gid, "),无法获取总结"))
return
}
var messages []string
h := ctx.GetGroupMessageHistory(gid, 0, p, false)
h.Get("messages").ForEach(func(_, msgObj gjson.Result) bool {
nickname := msgObj.Get("sender.nickname").Str
text := strings.TrimSpace(message.ParseMessageFromString(msgObj.Get("raw_message").Str).ExtractPlainText())
if text != "" {
messages = append(messages, nickname+": "+text)
}
return true
})
if len(messages) == 0 {
ctx.SendChain(message.Text("ERROR: 历史消息为空或者无法获得历史消息"))
return
}
// 构造总结请求提示 (使用通用版省流提示词)
// 使用反引号定义多行字符串,更清晰
promptTemplate := `请对以下群聊对话进行【极简总结】。
要求:
1. 剔除客套与废话,直击主题。
2. 使用 Markdown 列表格式。
3. 按以下结构输出:
- 🎯 核心议题:(一句话概括)
- 💡 关键观点/结论:(提取3-5个重点)
- ✅ 下一步/待办:(如果有,明确谁做什么)
群聊对话内容如下:
`
summaryPrompt := promptTemplate + strings.Join(messages, "\n")
stor, err := chat.NewStorage(ctx, gid)
if err != nil {
ctx.SendChain(message.Text("ERROR: ", err))
return
}
// 调用大模型API进行总结
summary, err := llmchat(summaryPrompt, stor.Temp())
if err != nil {
ctx.SendChain(message.Text("ERROR: ", err))
return
}
var b strings.Builder
b.WriteString("群 ")
b.WriteString(group.Name)
b.WriteByte('(')
b.WriteString(strconv.FormatInt(gid, 10))
b.WriteString(") 的 ")
b.WriteString(strconv.FormatInt(p, 10))
b.WriteString(" 条消息总结:\n\n")
b.WriteString(summary)
// 分割总结内容为多段按1000字符长度切割
summaryText := b.String()
msg := make(message.Message, 0)
for len(summaryText) > 0 {
if len(summaryText) <= 1000 {
msg = append(msg, ctxext.FakeSenderForwardNode(ctx, message.Text(summaryText)))
break
}
// 查找1000字符内的最后一个换行符尽量在换行处分割
chunk := summaryText[:1000]
lastNewline := strings.LastIndex(chunk, "\n")
if lastNewline > 0 {
chunk = summaryText[:lastNewline+1]
}
msg = append(msg, ctxext.FakeSenderForwardNode(ctx, message.Text(chunk)))
summaryText = summaryText[len(chunk):]
}
if len(msg) > 0 {
ctx.Send(msg)
}
})
// 添加 /gpt 命令处理(同时支持回复消息和直接使用)
en.OnKeyword("/gpt", chat.EnsureConfig).SetBlock(true).Handle(func(ctx *zero.Ctx) {
gid := ctx.Event.GroupID
if gid == 0 {
gid = -ctx.Event.UserID
}
text := ctx.MessageString()
var query string
var replyContent string
// 检查是否是回复消息 (使用MessageElement检查而不是CQ码)
for _, elem := range ctx.Event.Message {
if elem.Type == "reply" {
// 提取被回复的消息ID
replyIDStr := elem.Data["id"]
replyID, err := strconv.ParseInt(replyIDStr, 10, 64)
if err == nil {
// 获取被回复的消息内容
replyMsg := ctx.GetMessage(replyID)
if replyMsg.Elements != nil {
replyContent = replyMsg.Elements.ExtractPlainText()
}
}
break // 找到回复元素后退出循环
}
}
// 提取 /gpt 后面的内容
parts := strings.SplitN(text, "/gpt", 2)
var gContent string
if len(parts) > 1 {
gContent = strings.TrimSpace(parts[1])
}
// 组合内容:优先使用回复内容,如果同时有/gpt内容则拼接
switch {
case replyContent != "" && gContent != "":
query = replyContent + "\n" + gContent
case replyContent != "":
query = replyContent
case gContent != "":
query = gContent
default:
return
}
stor, err := chat.NewStorage(ctx, gid)
if err != nil {
ctx.SendChain(message.Text("ERROR: ", err))
return
}
// 调用大模型API进行聊天
reply, err := llmchat(query, stor.Temp())
if err != nil {
ctx.SendChain(message.Text("ERROR: ", err))
return
}
// 分割总结内容为多段按1000字符长度切割
msg := make(message.Message, 0)
for len(reply) > 0 {
if len(reply) <= 1000 {
msg = append(msg, ctxext.FakeSenderForwardNode(ctx, message.Text(reply)))
break
}
// 查找1000字符内的最后一个换行符尽量在换行处分割
chunk := reply[:1000]
lastNewline := strings.LastIndex(chunk, "\n")
if lastNewline > 0 {
chunk = reply[:lastNewline+1]
}
msg = append(msg, ctxext.FakeSenderForwardNode(ctx, message.Text(chunk)))
reply = reply[len(chunk):]
}
if len(msg) > 0 {
ctx.Send(msg)
}
})
}
// llmchat 调用大模型API包装
func llmchat(prompt string, temp float32) (string, error) {
topp, maxn := chat.AC.MParams()
x := deepinfra.NewAPI(chat.AC.API, string(chat.AC.Key))
mod, err := chat.AC.Type.Protocol(chat.AC.ModelName, temp, topp, maxn)
if err != nil {
return "", nil
}
data, err := x.Request(mod.User(model.NewContentText(prompt)))
if err != nil {
return "", err
}
return strings.TrimSpace(data), nil
}