mirror of
https://github.com/ascribe/onion.git
synced 2025-01-03 10:25:08 +01:00
Fix ESLint errors with computeHashOfFile
This commit is contained in:
parent
f814a6ceb5
commit
63310c5717
@ -1,5 +1,3 @@
|
|||||||
'use strict';
|
|
||||||
|
|
||||||
import Q from 'q';
|
import Q from 'q';
|
||||||
import SparkMD5 from 'spark-md5';
|
import SparkMD5 from 'spark-md5';
|
||||||
import Moment from 'moment';
|
import Moment from 'moment';
|
||||||
@ -17,18 +15,17 @@ export { extractFileExtensionFromString, extractFileExtensionFromUrl } from 'js-
|
|||||||
*/
|
*/
|
||||||
export function computeHashOfFile(file) {
|
export function computeHashOfFile(file) {
|
||||||
return Q.Promise((resolve, reject, notify) => {
|
return Q.Promise((resolve, reject, notify) => {
|
||||||
let blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice;
|
const blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice;
|
||||||
let chunkSize = 2097152; // Read in chunks of 2MB
|
const chunkSize = 2097152; // Read in chunks of 2MB
|
||||||
let chunks = Math.ceil(file.size / chunkSize);
|
const chunks = Math.ceil(file.size / chunkSize);
|
||||||
|
const spark = new SparkMD5.ArrayBuffer();
|
||||||
|
const fileReader = new FileReader();
|
||||||
|
const startTime = new Moment();
|
||||||
let currentChunk = 0;
|
let currentChunk = 0;
|
||||||
let spark = new SparkMD5.ArrayBuffer();
|
|
||||||
let fileReader = new FileReader();
|
|
||||||
|
|
||||||
let startTime = new Moment();
|
|
||||||
|
|
||||||
// comment: We should convert this to es6 at some point, however if so please consider that
|
// comment: We should convert this to es6 at some point, however if so please consider that
|
||||||
// an arrow function will get rid of the function's scope...
|
// an arrow function will get rid of the function's scope...
|
||||||
fileReader.onload = function(e) {
|
fileReader.onload = (e) => {
|
||||||
// console.log('read chunk nr', currentChunk + 1, 'of', chunks);
|
// console.log('read chunk nr', currentChunk + 1, 'of', chunks);
|
||||||
spark.append(e.target.result); // Append array buffer
|
spark.append(e.target.result); // Append array buffer
|
||||||
currentChunk++;
|
currentChunk++;
|
||||||
@ -36,24 +33,24 @@ export function computeHashOfFile(file) {
|
|||||||
if (currentChunk < chunks) {
|
if (currentChunk < chunks) {
|
||||||
loadNext();
|
loadNext();
|
||||||
} else {
|
} else {
|
||||||
let fileHash = spark.end();
|
const fileHash = spark.end();
|
||||||
|
|
||||||
console.info('computed hash %s (took %d s)',
|
console.info('computed hash %s (took %d s)',
|
||||||
fileHash,
|
fileHash,
|
||||||
Math.round(((new Moment() - startTime) / 1000) % 60)); // Compute hash
|
Math.round(((new Moment() - startTime) / 1000) % 60)); // Compute hash
|
||||||
|
|
||||||
let blobTextFile = makeTextFile(fileHash, file);
|
resolve(makeTextFile(fileHash, file));
|
||||||
resolve(blobTextFile);
|
|
||||||
}
|
}
|
||||||
}.bind(this);
|
};
|
||||||
|
|
||||||
fileReader.onerror = function () {
|
fileReader.onerror = () => {
|
||||||
reject(new Error(getLangText('We weren\'t able to hash your file locally. Try to upload it manually or consider contact us.')));
|
reject(new Error(getLangText("We weren't able to hash your file locally. Try to " +
|
||||||
|
'upload it manually or consider contact us.')));
|
||||||
};
|
};
|
||||||
|
|
||||||
function loadNext() {
|
function loadNext() {
|
||||||
var start = currentChunk * chunkSize,
|
const start = currentChunk * chunkSize;
|
||||||
end = ((start + chunkSize) >= file.size) ? file.size : start + chunkSize;
|
const end = ((start + chunkSize) >= file.size) ? file.size : start + chunkSize;
|
||||||
|
|
||||||
// send progress
|
// send progress
|
||||||
// Due to the fact that progressHandler and notify are going to be removed in v2
|
// Due to the fact that progressHandler and notify are going to be removed in v2
|
||||||
|
Loading…
Reference in New Issue
Block a user