/**
* 文件切片
* @param {File} file 切片文件
* @param {number} pieceSize 切片大小
* @param {string} fileKey 文件唯一标识
*/
const getSliceFile = async (file: File, pieceSizes = 50, fileKey: string) => {
const piece = 1024 * 1024 * pieceSizes;
// 文件总大小
const totalSize = file.size;
const fileName = file.name;
// 每次上传的开始字节
let start = 0;
let index = 1;
// 每次上传的结尾字节
let end = start + piece;
const chunks = [];
while (start < totalSize) {
const current = Math.min(end, totalSize);
// 根据长度截取每次需要上传的数据
// File对象继承自Blob对象,因此包含slice方法
const blob = file.slice(start, current);
// 可以利用spark-md5这个插件生成一个固定的hash值
const hash = ''
// 特殊处理,对接阿里云大文件上传
chunks.push({
file: blob,
size: totalSize,
index,
fileSizeInByte: totalSize,
name: fileName,
fileName,
hash,
sliceSizeInByte: blob.size,
fileKey,
});
start = current;
end = start + piece;
index += 1;
}
return chunks;
};
![](https://img.haomeiwen.com/i15842714/8bfefe4ec487f542.png)
网友评论