mirror of
https://git.mirrors.martin98.com/https://github.com/infiniflow/ragflow.git
synced 2025-08-03 05:20:39 +08:00
Feat: add partition of file uploads (#5248)
### What problem does this PR solve? Partitions the upload of documents in parts of 20 to avoid the size limit error. Allows uploading 100s of documents on a single interaction. ### Type of change - [X] New Feature (non-breaking change which adds functionality)
This commit is contained in:
parent
4f2816c01c
commit
3d605a23fe
@ -248,27 +248,60 @@ export const useUploadNextDocument = () => {
|
|||||||
} = useMutation({
|
} = useMutation({
|
||||||
mutationKey: ['uploadDocument'],
|
mutationKey: ['uploadDocument'],
|
||||||
mutationFn: async (fileList: UploadFile[]) => {
|
mutationFn: async (fileList: UploadFile[]) => {
|
||||||
const formData = new FormData();
|
const partitionedFileList = fileList.reduce<UploadFile[][]>(
|
||||||
formData.append('kb_id', knowledgeId);
|
(acc, cur, index) => {
|
||||||
fileList.forEach((file: any) => {
|
const partIndex = Math.floor(index / 20); // Uploads 20 documents at a time
|
||||||
formData.append('file', file);
|
if (!acc[partIndex]) {
|
||||||
});
|
acc[partIndex] = [];
|
||||||
|
}
|
||||||
|
acc[partIndex].push(cur);
|
||||||
|
return acc;
|
||||||
|
},
|
||||||
|
[],
|
||||||
|
);
|
||||||
|
|
||||||
try {
|
let allRet = [];
|
||||||
const ret = await kbService.document_upload(formData);
|
for (const listPart of partitionedFileList) {
|
||||||
const code = get(ret, 'data.code');
|
const formData = new FormData();
|
||||||
if (code === 0) {
|
formData.append('kb_id', knowledgeId);
|
||||||
message.success(i18n.t('message.uploaded'));
|
listPart.forEach((file: any) => {
|
||||||
}
|
formData.append('file', file);
|
||||||
|
});
|
||||||
|
|
||||||
if (code === 0 || code === 500) {
|
try {
|
||||||
queryClient.invalidateQueries({ queryKey: ['fetchDocumentList'] });
|
const ret = await kbService.document_upload(formData);
|
||||||
|
allRet.push(ret);
|
||||||
|
} catch (error) {
|
||||||
|
allRet.push({ data: { code: 500 } });
|
||||||
|
|
||||||
|
const filenames = listPart.map((file: any) => file.name).join(', ');
|
||||||
|
console.warn(error);
|
||||||
|
console.warn('Error uploading files:', filenames);
|
||||||
}
|
}
|
||||||
return ret?.data;
|
|
||||||
} catch (error) {
|
|
||||||
console.warn(error);
|
|
||||||
return {};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const succeed = allRet.every((ret) => get(ret, 'data.code') === 0);
|
||||||
|
const any500 = allRet.some((ret) => get(ret, 'data.code') === 500);
|
||||||
|
|
||||||
|
if (succeed) {
|
||||||
|
message.success(i18n.t('message.uploaded'));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (succeed || any500) {
|
||||||
|
queryClient.invalidateQueries({ queryKey: ['fetchDocumentList'] });
|
||||||
|
}
|
||||||
|
|
||||||
|
const allData = {
|
||||||
|
code: any500
|
||||||
|
? 500
|
||||||
|
: succeed
|
||||||
|
? 0
|
||||||
|
: allRet.filter((ret) => get(ret, 'data.code') !== 0)[0]?.data
|
||||||
|
?.code,
|
||||||
|
data: succeed,
|
||||||
|
message: allRet.map((ret) => get(ret, 'data.message')).join('/n'),
|
||||||
|
};
|
||||||
|
return allData;
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user