You've already forked DataMate
refactor(upload): 重构切片上传逻辑支持动态请求ID解析
- 移除预先批量获取reqId的方式,改为按需解析 - 新增resolveReqId函数支持动态获取请求ID - 添加onReqIdResolved回调处理ID解析完成事件 - 改进文件按行切片上传,每行作为独立文件处理 - 优化空行跳过逻辑,统计跳过的空行数量 - 修复fileNo和chunkNo的对应关系 - 更新streamSplitAndUpload参数结构
This commit is contained in:
@@ -251,18 +251,6 @@ export function useFileSliceUpload(
|
||||
const file = files[i];
|
||||
console.log(`[useSliceUpload] Processing file ${i + 1}/${files.length}: ${file.name}`);
|
||||
|
||||
// 为每个文件单独调用 preUpload,获取独立的 reqId
|
||||
const { data: reqId } = await preUpload(task.key, {
|
||||
totalFileNum: 1,
|
||||
totalSize: file.size,
|
||||
datasetId: task.key,
|
||||
hasArchive: task.hasArchive,
|
||||
prefix: task.prefix,
|
||||
});
|
||||
|
||||
console.log(`[useSliceUpload] File ${file.name} preUpload response reqId:`, reqId);
|
||||
reqIds.push(reqId);
|
||||
|
||||
const result = await streamSplitAndUpload(
|
||||
file,
|
||||
(formData, config) => uploadChunk(task.key, formData, {
|
||||
@@ -292,10 +280,21 @@ export function useFileSliceUpload(
|
||||
},
|
||||
};
|
||||
updateTaskList(updatedTask);
|
||||
},
|
||||
},
|
||||
1024 * 1024, // 1MB chunk size
|
||||
{
|
||||
reqId,
|
||||
resolveReqId: async ({ totalFileNum, totalSize }) => {
|
||||
const { data: reqId } = await preUpload(task.key, {
|
||||
totalFileNum,
|
||||
totalSize,
|
||||
datasetId: task.key,
|
||||
hasArchive: task.hasArchive,
|
||||
prefix: task.prefix,
|
||||
});
|
||||
console.log(`[useSliceUpload] File ${file.name} preUpload response reqId:`, reqId);
|
||||
reqIds.push(reqId);
|
||||
return reqId;
|
||||
},
|
||||
hasArchive: newTask.hasArchive,
|
||||
prefix: newTask.prefix,
|
||||
signal: newTask.controller.signal,
|
||||
|
||||
Reference in New Issue
Block a user