使用JQuery文件上载在上载文件之前进行md5校验和
我正在使用lib将文件上载到服务器。但在此之前,我想对md5进行校验和,以发送一个AJAX请求来检查重复的文件。那么,有没有什么方法可以在上传之前对文件的MD5进行校验和呢。谢谢并致以最诚挚的问候。总结:使用JQuery文件上载在上载文件之前进行md5校验和,jquery,file,upload,jquery-file-upload,blueimp,Jquery,File,Upload,Jquery File Upload,Blueimp,我正在使用lib将文件上载到服务器。但在此之前,我想对md5进行校验和,以发送一个AJAX请求来检查重复的文件。那么,有没有什么方法可以在上传之前对文件的MD5进行校验和呢。谢谢并致以最诚挚的问候。总结: 在调用启动上载的data.submit()之前,在'add'部分添加spark-md5代码 您还可以执行其他操作,如文件大小检查等 我的堆栈: mynode.js中的Multer(您可以使用其他东西): 蓝精灵: 用于md5检查的spark-md5: 演示页面中的代码: 控制台日志结果
- 在调用启动上载的
data.submit()之前,在
部分添加spark-md5代码'add'
- 您还可以执行其他操作,如文件大小检查等
- mynode.js中的Multer(您可以使用其他东西):
- 蓝精灵:
- 用于md5检查的spark-md5:
- 演示页面中的代码:
uploading adobe_flash_setup_0906278883.exe 4522ae4ce9ee143b5b18dfa4a51b01b6
file name: adobe_flash_setup_0906278883.exe (1,518,959 bytes)
read chunk number 1 of 1
finished loading :)
computed hash: 3f38a0468b52a38c34385201de4746b0
placeholder call for data.submit();
在我的
标记之前:
<script src="https://unpkg.com/jquery@3.2.1/dist/jquery.min.js"></script>
<script src="https://unpkg.com/blueimp-file-upload@9.19.1/js/vendor/jquery.ui.widget.js"></script>
<script src="https://unpkg.com/blueimp-file-upload@9.19.1/js/jquery.iframe-transport.js"></script>
<script src="https://unpkg.com/blueimp-file-upload@9.19.1/js/jquery.fileupload.js"></script>
<script src="https://unpkg.com/spark-md5@3.0.0/spark-md5.min.js"></script>
$('#fileupload').fileupload({
url: 'https://mywebsite/blahblahblahblahblah',
paramName: '_file',
dataType: 'json',
type: 'POST',
autoUpload: true,
add: function(e, data) {
console.log('uploading', data.files[0].name, _hashID);
var blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice,
file = data.files[0],
chunkSize = 2097152, // read in chunks of 2MB
chunks = Math.ceil(file.size / chunkSize),
currentChunk = 0,
spark = new SparkMD5.ArrayBuffer(),
frOnload = function(e) {
console.log("\nread chunk number " + parseInt(currentChunk + 1) + " of " + chunks);
spark.append(e.target.result); // append array buffer
currentChunk++;
if (currentChunk < chunks)
loadNext();
else
console.log("\nfinished loading :)\n\ncomputed hash:\n" + spark.end());
console.log("placeholder call for data.submit();")
data.submit();
},
frOnerror = function() {
console.log("\noops, something went wrong.");
};
function loadNext() {
var fileReader = new FileReader();
fileReader.onload = frOnload;
fileReader.onerror = frOnerror;
var start = currentChunk * chunkSize,
end = ((start + chunkSize) >= file.size) ? file.size : start + chunkSize;
fileReader.readAsArrayBuffer(blobSlice.call(file, start, end));
};
console.log("file name: " + file.name + " (" + file.size.toString().replace(/\B(?=(?:\d{3})+(?!\d))/g, ',') + " bytes)\n");
loadNext();
},
progress: function (e, data) {
// usual stuff
},
done: function (e, data) {
// usual stuff
}
在我的
标签上:
<script src="https://unpkg.com/jquery@3.2.1/dist/jquery.min.js"></script>
<script src="https://unpkg.com/blueimp-file-upload@9.19.1/js/vendor/jquery.ui.widget.js"></script>
<script src="https://unpkg.com/blueimp-file-upload@9.19.1/js/jquery.iframe-transport.js"></script>
<script src="https://unpkg.com/blueimp-file-upload@9.19.1/js/jquery.fileupload.js"></script>
<script src="https://unpkg.com/spark-md5@3.0.0/spark-md5.min.js"></script>
$('#fileupload').fileupload({
url: 'https://mywebsite/blahblahblahblahblah',
paramName: '_file',
dataType: 'json',
type: 'POST',
autoUpload: true,
add: function(e, data) {
console.log('uploading', data.files[0].name, _hashID);
var blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice,
file = data.files[0],
chunkSize = 2097152, // read in chunks of 2MB
chunks = Math.ceil(file.size / chunkSize),
currentChunk = 0,
spark = new SparkMD5.ArrayBuffer(),
frOnload = function(e) {
console.log("\nread chunk number " + parseInt(currentChunk + 1) + " of " + chunks);
spark.append(e.target.result); // append array buffer
currentChunk++;
if (currentChunk < chunks)
loadNext();
else
console.log("\nfinished loading :)\n\ncomputed hash:\n" + spark.end());
console.log("placeholder call for data.submit();")
data.submit();
},
frOnerror = function() {
console.log("\noops, something went wrong.");
};
function loadNext() {
var fileReader = new FileReader();
fileReader.onload = frOnload;
fileReader.onerror = frOnerror;
var start = currentChunk * chunkSize,
end = ((start + chunkSize) >= file.size) ? file.size : start + chunkSize;
fileReader.readAsArrayBuffer(blobSlice.call(file, start, end));
};
console.log("file name: " + file.name + " (" + file.size.toString().replace(/\B(?=(?:\d{3})+(?!\d))/g, ',') + " bytes)\n");
loadNext();
},
progress: function (e, data) {
// usual stuff
},
done: function (e, data) {
// usual stuff
}
$('#fileupload')。fileupload({
网址:'https://mywebsite/blahblahblahblahblah',
参数名称:“\u文件”,
数据类型:“json”,
键入:“POST”,
自动上传:对,
添加:功能(e、数据){
console.log('upload',data.files[0]。名称,\u hashID);
var blobSlice=File.prototype.slice | | | File.prototype.mozSlice | | File.prototype.webkitSlice,
file=data.files[0],
chunkSize=2097152,//读取2MB的块
chunks=Math.ceil(file.size/chunkSize),
currentChunk=0,
spark=新SparkMD5.ArrayBuffer(),
frOnload=函数(e){
console.log(“\nread chunk number”+parseInt(currentChunk+1)+”of“+chunk);
spark.append(e.target.result);//追加数组缓冲区
currentChunk++;
如果(当前块<块)
loadNext();
其他的
console.log(“\n已完成加载:)\n\n计算哈希:\n”+spark.end());
log(“占位符调用data.submit();”)
data.submit();
},
frOnerror=函数(){
log(“\noops,出了点问题。”);
};
函数loadNext(){
var fileReader=newfilereader();
fileReader.onload=frOnload;
fileReader.onerror=fron错误;
var start=currentChunk*chunkSize,
end=((start+chunkSize)>=file.size)?file.size:start+chunkSize;
readAsArrayBuffer(blobSlice.call(file,start,end));
};
console.log(“文件名:“+file.name+”(“+file.size.toString().replace(/\B(?=(?:\d{3})+(?!\d))/g',,”)+“字节)\n”);
loadNext();
},
进度:功能(e、数据){
//平常的东西
},
完成:功能(e,数据){
//平常的东西
}