mirror of
https://github.com/NapNeko/NapCatQQ.git
synced 2025-12-26 10:41:20 +08:00
feat: Add group album upload utilities and refactor API && close #1116
Introduces src/core/data/webapi.ts with utilities for chunked group album uploads, including session creation and chunk management. Refactors NTQQWebApi in webapi.ts to use these utilities, adds getAlbumList and uploadImageToQunAlbum methods, and improves upload logic for efficiency and maintainability.
This commit is contained in:
parent
722c3554e9
commit
b1e77b1658
@ -8,10 +8,10 @@ import {
|
||||
WebHonorType,
|
||||
} from '@/core';
|
||||
import { NapCatCore } from '..';
|
||||
import { readFileSync } from 'node:fs';
|
||||
import { createReadStream, readFileSync } from 'node:fs';
|
||||
import { createHash } from 'node:crypto';
|
||||
import { basename } from 'node:path';
|
||||
|
||||
import { createStreamUploadChunk, qunAlbumControl } from '../data/webapi';
|
||||
export class NTQQWebApi {
|
||||
context: InstanceContext;
|
||||
core: NapCatCore;
|
||||
@ -323,64 +323,105 @@ export class NTQQWebApi {
|
||||
}
|
||||
return (hash & 0x7FFFFFFF).toString();
|
||||
}
|
||||
async createQunAlbumSession(gc: string, sAlbumID: string, sAlbumName: string, path: string, skey: string, pskey: string, uin: string) {
|
||||
|
||||
async getAlbumList(gc: string) {
|
||||
const pskey = (await this.core.apis.UserApi.getPSkey(['qzone.qq.com'])).domainPskeyMap.get('qzone.qq.com') || '';
|
||||
const bkn = this.getBknFromSKey(pskey);
|
||||
const uin = this.core.selfInfo.uin || '10001';
|
||||
const api = `https://h5.qzone.qq.com/proxy/domain/u.photo.qzone.qq.com/cgi-bin/upp/qun_list_album_v2?random=7570&g_tk=${bkn}&format=json&inCharset=utf-8&outCharset=utf-8&qua=V1_IPH_SQ_6.2.0_0_HDBM_T&cmd=qunGetAlbumList&qunId=${gc}&start=0&num=1000&uin=${uin}&getMemberRole=0`;
|
||||
const response = await RequestUtil.HttpGetJson<{ data: { album: Array<{ id: string, title: string }> } }>(api);
|
||||
return response.data.album;
|
||||
}
|
||||
|
||||
async uploadImageToQunAlbum(gc: string, sAlbumID: string, sAlbumName: string, path: string) {
|
||||
const skey = await this.core.apis.UserApi.getSKey() || '';
|
||||
const pskey = (await this.core.apis.UserApi.getPSkey(['qzone.qq.com'])).domainPskeyMap.get('qzone.qq.com') || '';
|
||||
const session = (await this.createQunAlbumSession(gc, sAlbumID, sAlbumName, path, skey, pskey)).data.session;
|
||||
if (!session) throw new Error('创建群相册会话失败');
|
||||
|
||||
const uin = this.core.selfInfo.uin || '10001';
|
||||
const chunk = createStreamUploadChunk(createReadStream(path), uin, session, 16384);
|
||||
|
||||
// 准备上传参数
|
||||
const total = readFileSync(path).length;
|
||||
const GTK = this.getBknFromSKey(pskey);
|
||||
const cookie = `p_uin=${uin}; p_skey=${pskey}; skey=${skey}; uin=${uin}`;
|
||||
|
||||
// 收集所有分片
|
||||
const allChunks: NonNullable<Awaited<ReturnType<typeof chunk.getNextChunk>>>[] = [];
|
||||
let chunked = await chunk.getNextChunk();
|
||||
|
||||
while (chunked) {
|
||||
allChunks.push(chunked);
|
||||
chunked = await chunk.getNextChunk();
|
||||
}
|
||||
|
||||
// 将分片分成3组,每组内部按顺序执行,3组之间并行执行
|
||||
const chunkGroups: typeof allChunks[] = [[], [], []];
|
||||
allChunks.forEach((chunk, index) => {
|
||||
const groupIndex = index % 3;
|
||||
chunkGroups[groupIndex]!.push(chunk);
|
||||
});
|
||||
|
||||
// 创建单个上传分片的函数
|
||||
const uploadChunk = async (chunkData: typeof allChunks[0]) => {
|
||||
const api = `https://h5.qzone.qq.com/webapp/json/sliceUpload/FileUpload?seq=${chunkData.seq}&retry=0&offset=${chunkData.offset}&end=${chunkData.end}&total=${total}&type=json&g_tk=${GTK}`;
|
||||
|
||||
const post = await RequestUtil.HttpGetJson<{ data: { offset: string }, ret: number, msg: string }>(api, 'POST', chunkData, {
|
||||
'Cookie': cookie,
|
||||
'Content-Type': 'application/json',
|
||||
'origin': 'https://h5.qzone.qq.com',
|
||||
});
|
||||
if (post.ret !== 0) throw new Error(`分片 ${chunkData.seq} 上传失败: ${post.msg}`);
|
||||
|
||||
return { seq: chunkData.seq, offset: chunkData.offset, success: true };
|
||||
};
|
||||
|
||||
// 创建每组顺序上传的函数
|
||||
const uploadGroupSequentially = async (group: typeof allChunks) => {
|
||||
const groupResults = [];
|
||||
for (const chunk of group) {
|
||||
const result = await uploadChunk(chunk);
|
||||
groupResults.push(result);
|
||||
}
|
||||
return groupResults;
|
||||
};
|
||||
|
||||
// 3个队列并行执行,每个队列内部按顺序执行
|
||||
const groupPromises = chunkGroups.map(group => uploadGroupSequentially(group));
|
||||
const groupResults = await Promise.all(groupPromises);
|
||||
|
||||
// 合并所有结果
|
||||
const results = groupResults.flat();
|
||||
|
||||
// 按序号排序结果
|
||||
results.sort((a, b) => a.seq - b.seq);
|
||||
return results;
|
||||
}
|
||||
|
||||
async createQunAlbumSession(gc: string, sAlbumID: string, sAlbumName: string, path: string, skey: string, pskey: string) {
|
||||
const img = readFileSync(path);
|
||||
const uin = this.core.selfInfo.uin || '10001';
|
||||
const img_md5 = createHash('md5').update(img).digest('hex');
|
||||
const img_size = img.length;
|
||||
const img_name = basename(path);
|
||||
const time = Math.floor(Date.now() / 1000);
|
||||
const GTK = this.getBknFromSKey(pskey);
|
||||
const cookie = `p_uin=${uin}; p_skey=${pskey}; skey=${skey}; uin=${uin}`;
|
||||
const body = {
|
||||
control_req: [{
|
||||
uin: uin,
|
||||
token: {
|
||||
type: 4,
|
||||
data: pskey,
|
||||
appid: 5
|
||||
},
|
||||
appid: 'qun',
|
||||
checksum: img_md5,
|
||||
check_type: 0,
|
||||
file_len: img_size,
|
||||
env: {
|
||||
refer: 'qzone',
|
||||
deviceInfo: 'h5'
|
||||
},
|
||||
model: 0,
|
||||
biz_req: {
|
||||
sPicTitle: img_name,
|
||||
sPicDesc: '',
|
||||
sAlbumName: sAlbumName,
|
||||
sAlbumID: sAlbumID,
|
||||
iAlbumTypeID: 0,
|
||||
iBitmap: 0,
|
||||
iUploadType: 0,
|
||||
iUpPicType: 0,
|
||||
iBatchID: time,
|
||||
sPicPath: '',
|
||||
iPicWidth: 0,
|
||||
iPicHight: 0,
|
||||
iWaterType: 0,
|
||||
iDistinctUse: 0,
|
||||
iNeedFeeds: 1,
|
||||
iUploadTime: time,
|
||||
mapExt: {
|
||||
appid: 'qun',
|
||||
userid: gc
|
||||
}
|
||||
},
|
||||
session: '',
|
||||
asy_upload: 0,
|
||||
cmd: 'FileUpload'
|
||||
}]
|
||||
};
|
||||
const body = qunAlbumControl({
|
||||
uin,
|
||||
group_id: gc,
|
||||
pskey,
|
||||
pic_md5: img_md5,
|
||||
img_size,
|
||||
img_name,
|
||||
sAlbumName: sAlbumName,
|
||||
sAlbumID: sAlbumID
|
||||
});
|
||||
const api = `https://h5.qzone.qq.com/webapp/json/sliceUpload/FileBatchControl/${img_md5}?g_tk=${GTK}`;
|
||||
const post = await RequestUtil.HttpGetJson(api, 'POST', body, {
|
||||
const post = await RequestUtil.HttpGetJson<{ data: { session: string }, ret: number, msg: string }>(api, 'POST', body, {
|
||||
'Cookie': cookie,
|
||||
'Content-Type': 'application/json'
|
||||
});
|
||||
|
||||
return post;
|
||||
}
|
||||
}
|
||||
|
||||
302
src/core/data/webapi.ts
Normal file
302
src/core/data/webapi.ts
Normal file
@ -0,0 +1,302 @@
|
||||
import { ReadStream } from "node:fs";
|
||||
export interface ControlReq {
|
||||
appid?: string;
|
||||
asy_upload?: number;
|
||||
biz_req?: BizReq;
|
||||
check_type?: number;
|
||||
checksum?: string;
|
||||
cmd?: string;
|
||||
env?: Env;
|
||||
file_len?: number;
|
||||
model?: number;
|
||||
session?: string;
|
||||
token?: Token;
|
||||
uin?: string;
|
||||
[property: string]: any;
|
||||
}
|
||||
|
||||
export interface BizReq {
|
||||
iAlbumTypeID: number;
|
||||
iBatchID: number;
|
||||
iBitmap: number;
|
||||
iDistinctUse: number;
|
||||
iNeedFeeds: number;
|
||||
iPicHight: number;
|
||||
iPicWidth: number;
|
||||
iUploadTime: number;
|
||||
iUploadType: number;
|
||||
iUpPicType: number;
|
||||
iWaterType: number;
|
||||
mapExt: MapExt;
|
||||
sAlbumID: string;
|
||||
sAlbumName: string;
|
||||
sExif_CameraMaker: string;
|
||||
sExif_CameraModel: string;
|
||||
sExif_Latitude: string;
|
||||
sExif_LatitudeRef: string;
|
||||
sExif_Longitude: string;
|
||||
sExif_LongitudeRef: string;
|
||||
sExif_Time: string;
|
||||
sPicDesc: string;
|
||||
sPicPath: string;
|
||||
sPicTitle: string;
|
||||
[property: string]: any;
|
||||
}
|
||||
|
||||
export interface MapExt {
|
||||
appid: string;
|
||||
userid: string;
|
||||
[property: string]: any;
|
||||
}
|
||||
|
||||
export interface Env {
|
||||
deviceInfo: string;
|
||||
refer: string;
|
||||
[property: string]: any;
|
||||
}
|
||||
|
||||
export interface Token {
|
||||
appid: number;
|
||||
data: string;
|
||||
type: number;
|
||||
[property: string]: any;
|
||||
}
|
||||
|
||||
export function qunAlbumControl({
|
||||
uin,
|
||||
group_id,
|
||||
pskey,
|
||||
pic_md5,
|
||||
img_size,
|
||||
img_name,
|
||||
sAlbumName,
|
||||
sAlbumID
|
||||
}: {
|
||||
uin: string,
|
||||
group_id: string,
|
||||
pskey: string,
|
||||
pic_md5: string,
|
||||
img_size: number,
|
||||
img_name: string,
|
||||
sAlbumName: string,
|
||||
sAlbumID: string,
|
||||
}
|
||||
): {
|
||||
control_req: ControlReq[]
|
||||
} {
|
||||
return {
|
||||
control_req: [
|
||||
{
|
||||
uin: uin,
|
||||
token: {
|
||||
type: 4,
|
||||
data: pskey,
|
||||
appid: 5
|
||||
},
|
||||
appid: "qun",
|
||||
checksum: pic_md5,
|
||||
check_type: 0,
|
||||
file_len: img_size,
|
||||
env: {
|
||||
refer: "qzone",
|
||||
deviceInfo: "h5"
|
||||
},
|
||||
model: 0,
|
||||
biz_req: {
|
||||
sPicTitle: img_name,
|
||||
sPicDesc: "",
|
||||
sAlbumName: sAlbumName,
|
||||
sAlbumID: sAlbumID,
|
||||
iAlbumTypeID: 0,
|
||||
iBitmap: 0,
|
||||
iUploadType: 3,
|
||||
iUpPicType: 0,
|
||||
iBatchID: +(Date.now().toString() + '4000'),//17位时间戳
|
||||
sPicPath: "",
|
||||
iPicWidth: 0,
|
||||
iPicHight: 0,
|
||||
iWaterType: 0,
|
||||
iDistinctUse: 0,
|
||||
iNeedFeeds: 1,
|
||||
iUploadTime: +(Math.floor(Date.now() / 1000).toString()),
|
||||
mapExt: {
|
||||
appid: "qun",
|
||||
userid: group_id
|
||||
},
|
||||
sExif_CameraMaker: "",
|
||||
sExif_CameraModel: "",
|
||||
sExif_Time: "",
|
||||
sExif_LatitudeRef: "",
|
||||
sExif_Latitude: "",
|
||||
sExif_LongitudeRef: "",
|
||||
sExif_Longitude: ""
|
||||
},
|
||||
session: "",
|
||||
asy_upload: 0,
|
||||
cmd: "FileUpload"
|
||||
}]
|
||||
}
|
||||
}
|
||||
|
||||
export function createStreamUpload(
|
||||
{
|
||||
uin,
|
||||
session,
|
||||
offset,
|
||||
seq,
|
||||
end,
|
||||
slice_size,
|
||||
data
|
||||
|
||||
}: { uin: string, session: string, offset: number, seq: number, end: number, slice_size: number, data: string }
|
||||
) {
|
||||
return {
|
||||
uin: uin,
|
||||
appid: "qun",
|
||||
session: session,
|
||||
offset: offset,//分片起始位置
|
||||
data: data,//base64编码数据
|
||||
checksum: "",
|
||||
check_type: 0,
|
||||
retry: 0,//重试次数
|
||||
seq: seq,//分片序号
|
||||
end: end,//分片结束位置 文件总大小
|
||||
cmd: "FileUpload",
|
||||
slice_size: slice_size,//分片大小16KB 16384
|
||||
biz_req: {
|
||||
iUploadType: 3
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
class ChunkData {
|
||||
private reader: ReadStream;
|
||||
private uin: string;
|
||||
private chunkSize: number;
|
||||
private offset: number = 0;
|
||||
private seq: number = 0;
|
||||
private buffer: Uint8Array = new Uint8Array(0);
|
||||
private isCompleted: boolean = false;
|
||||
private session: string;
|
||||
|
||||
constructor(file: ReadStream, uin: string, chunkSize: number = 16384, session: string = '') {
|
||||
this.reader = file;
|
||||
this.uin = uin;
|
||||
this.chunkSize = chunkSize;
|
||||
this.session = session;
|
||||
}
|
||||
|
||||
async getNextChunk(): Promise<ReturnType<typeof createStreamUpload> | null> {
|
||||
if (this.isCompleted && this.buffer.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
return new Promise((resolve, reject) => {
|
||||
const processChunk = () => {
|
||||
// 如果没有数据了,返回 null
|
||||
if (this.buffer.length === 0) {
|
||||
resolve(null);
|
||||
return;
|
||||
}
|
||||
|
||||
// 准备当前块数据
|
||||
const chunkToSend = this.buffer.slice(0, Math.min(this.chunkSize, this.buffer.length));
|
||||
this.buffer = this.buffer.slice(chunkToSend.length);
|
||||
|
||||
// 计算位置信息
|
||||
const start = this.offset;
|
||||
this.offset += chunkToSend.length;
|
||||
const end = this.offset;
|
||||
|
||||
// 转换为 Base64
|
||||
const base64Data = Buffer.from(chunkToSend).toString('base64');
|
||||
|
||||
// 创建上传数据对象
|
||||
const uploadData = createStreamUpload({
|
||||
uin: this.uin,
|
||||
session: this.session,
|
||||
offset: start,
|
||||
seq: this.seq,
|
||||
end: end,
|
||||
slice_size: this.chunkSize,
|
||||
data: base64Data
|
||||
});
|
||||
|
||||
this.seq++;
|
||||
|
||||
resolve(uploadData);
|
||||
};
|
||||
|
||||
// 如果缓冲区已经有足够数据,直接处理
|
||||
if (this.buffer.length >= this.chunkSize) {
|
||||
processChunk();
|
||||
return;
|
||||
}
|
||||
|
||||
// 否则,从流中读取更多数据
|
||||
const dataHandler = (chunk: string | Buffer) => {
|
||||
// 确保处理的是 Buffer
|
||||
const bufferChunk = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk);
|
||||
|
||||
// 合并缓冲区
|
||||
const newBuffer = new Uint8Array(this.buffer.length + bufferChunk.length);
|
||||
newBuffer.set(this.buffer);
|
||||
newBuffer.set(new Uint8Array(bufferChunk), this.buffer.length);
|
||||
this.buffer = newBuffer;
|
||||
|
||||
// 如果有足够的数据,处理并返回
|
||||
if (this.buffer.length >= this.chunkSize) {
|
||||
this.reader.removeListener('data', dataHandler);
|
||||
this.reader.removeListener('end', endHandler);
|
||||
this.reader.removeListener('error', errorHandler);
|
||||
processChunk();
|
||||
}
|
||||
};
|
||||
|
||||
const endHandler = () => {
|
||||
this.isCompleted = true;
|
||||
this.reader.removeListener('data', dataHandler);
|
||||
this.reader.removeListener('end', endHandler);
|
||||
this.reader.removeListener('error', errorHandler);
|
||||
|
||||
// 处理剩余数据
|
||||
processChunk();
|
||||
};
|
||||
|
||||
const errorHandler = (err: Error) => {
|
||||
this.reader.removeListener('data', dataHandler);
|
||||
this.reader.removeListener('end', endHandler);
|
||||
this.reader.removeListener('error', errorHandler);
|
||||
reject(err);
|
||||
};
|
||||
|
||||
// 添加事件监听器
|
||||
this.reader.on('data', dataHandler);
|
||||
this.reader.on('end', endHandler);
|
||||
this.reader.on('error', errorHandler);
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error getting next chunk:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
setSession(session: string): void {
|
||||
this.session = session;
|
||||
}
|
||||
|
||||
getProgress(): number {
|
||||
return this.offset;
|
||||
}
|
||||
|
||||
isFinished(): boolean {
|
||||
return this.isCompleted && this.buffer.length === 0;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// 根据文件流 按chunk持续函数
|
||||
export function createStreamUploadChunk(file: ReadStream, uin: string, session: string, chunk: number = 16384): ChunkData {
|
||||
return new ChunkData(file, uin, chunk, session);
|
||||
}
|
||||
18
src/onebot/action/extends/GetQunAlbumList.ts
Normal file
18
src/onebot/action/extends/GetQunAlbumList.ts
Normal file
@ -0,0 +1,18 @@
|
||||
|
||||
import { OneBotAction } from '@/onebot/action/OneBotAction';
|
||||
import { ActionName } from '@/onebot/action/router';
|
||||
import { Static, Type } from '@sinclair/typebox';
|
||||
const SchemaData = Type.Object({
|
||||
group_id: Type.String()
|
||||
});
|
||||
|
||||
type Payload = Static<typeof SchemaData>;
|
||||
|
||||
export class GetQunAlbumList extends OneBotAction<Payload, unknown> {
|
||||
override actionName = ActionName.GetQunAlbumList;
|
||||
override payloadSchema = SchemaData;
|
||||
|
||||
async _handle(payload: Payload) {
|
||||
return await this.core.apis.WebApi.getAlbumList(payload.group_id);
|
||||
}
|
||||
}
|
||||
31
src/onebot/action/extends/UploadImageToQunAlbum.ts
Normal file
31
src/onebot/action/extends/UploadImageToQunAlbum.ts
Normal file
@ -0,0 +1,31 @@
|
||||
import { uriToLocalFile } from '@/common/file';
|
||||
import { OneBotAction } from '@/onebot/action/OneBotAction';
|
||||
import { ActionName } from '@/onebot/action/router';
|
||||
import { Static, Type } from '@sinclair/typebox';
|
||||
import { existsSync } from 'node:fs';
|
||||
import { unlink } from 'node:fs/promises';
|
||||
|
||||
const SchemaData = Type.Object({
|
||||
group_id: Type.String(),
|
||||
album_id: Type.String(),
|
||||
album_name: Type.String(),
|
||||
file: Type.String()
|
||||
});
|
||||
|
||||
type Payload = Static<typeof SchemaData>;
|
||||
|
||||
export class UploadImageToQunAlbum extends OneBotAction<Payload, unknown> {
|
||||
override actionName = ActionName.UploadImageToQunAlbum;
|
||||
override payloadSchema = SchemaData;
|
||||
|
||||
async _handle(payload: Payload) {
|
||||
const downloadResult = await uriToLocalFile(this.core.NapCatTempPath, payload.file);
|
||||
try {
|
||||
return await this.core.apis.WebApi.uploadImageToQunAlbum(payload.group_id, payload.album_id, payload.album_name, downloadResult.path);
|
||||
} finally {
|
||||
if (downloadResult.path && existsSync(downloadResult.path)) {
|
||||
await unlink(downloadResult.path);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -124,10 +124,14 @@ import { GetGroupDetailInfo } from './group/GetGroupDetailInfo';
|
||||
import GetGroupAddRequest from './extends/GetGroupAddRequest';
|
||||
import { GetCollectionList } from './extends/GetCollectionList';
|
||||
import { SetGroupTodo } from './packet/SetGroupTodo';
|
||||
import { GetQunAlbumList } from './extends/GetQunAlbumList';
|
||||
import { UploadImageToQunAlbum } from './extends/UploadImageToQunAlbum';
|
||||
|
||||
export function createActionMap(obContext: NapCatOneBot11Adapter, core: NapCatCore) {
|
||||
|
||||
const actionHandlers = [
|
||||
new GetQunAlbumList(obContext, core),
|
||||
new UploadImageToQunAlbum(obContext, core),
|
||||
new SetGroupTodo(obContext, core),
|
||||
new GetGroupDetailInfo(obContext, core),
|
||||
new SetGroupKickMembers(obContext, core),
|
||||
|
||||
@ -10,6 +10,8 @@ export interface InvalidCheckResult {
|
||||
}
|
||||
|
||||
export const ActionName = {
|
||||
UploadImageToQunAlbum: 'upload_image_to_qun_album',
|
||||
GetQunAlbumList: 'get_qun_album_list',
|
||||
SetGroupTodo: 'set_group_todo',
|
||||
SetGroupKickMembers: 'set_group_kick_members',
|
||||
SetGroupRobotAddOption: 'set_group_robot_add_option',
|
||||
|
||||
Loading…
Reference in New Issue
Block a user