1
0
mirror of https://github.com/ascribe/onion.git synced 2025-02-14 21:10:27 +01:00

converte computeHashOfFile to a promise based solution

This commit is contained in:
Tim Daubenschütz 2015-07-23 15:38:52 +02:00
parent 71fcdff874
commit 33a179cfe2
3 changed files with 43 additions and 37 deletions

View File

@ -34,7 +34,8 @@
"globals": { "globals": {
"Intercom": true, "Intercom": true,
"fetch": true, "fetch": true,
"require": true "require": true,
"File": true
}, },
"plugins": [ "plugins": [
"react" "react"

View File

@ -171,7 +171,8 @@ let FileUploader = React.createClass({
} }
}} }}
areAssetsDownloadable={true} areAssetsDownloadable={true}
areAssetsEditable={this.props.editable}/> areAssetsEditable={this.props.editable}/
localHashing={true}>
</Property> </Property>
<hr /> <hr />
</Form> </Form>

View File

@ -8,7 +8,7 @@ import SparkMD5 from 'spark-md5';
* @param {string} text regular javascript string * @param {string} text regular javascript string
* @return {string} regular javascript string * @return {string} regular javascript string
*/ */
export function makeTextFile(text) { function makeTextFile(text) {
let data = new Blob([text], {type: 'text/plain'}); let data = new Blob([text], {type: 'text/plain'});
return window.URL.createObjectURL(data); return window.URL.createObjectURL(data);
} }
@ -20,42 +20,46 @@ export function makeTextFile(text) {
* @return {string} regular javascript string * @return {string} regular javascript string
*/ */
export function computeHashOfFile(file) { export function computeHashOfFile(file) {
let blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice, return new Promise((resolve, reject) => {
chunkSize = 2097152, // Read in chunks of 2MB let blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice;
chunks = Math.ceil(file.size / chunkSize), let chunkSize = 2097152; // Read in chunks of 2MB
currentChunk = 0, let chunks = Math.ceil(file.size / chunkSize);
spark = new SparkMD5.ArrayBuffer(), let currentChunk = 0;
fileReader = new FileReader(); let spark = new SparkMD5.ArrayBuffer();
let fileReader = new FileReader();
let startTime = new Date(); let startTime = new Date();
fileReader.onload = function (e) { fileReader.onload = function(e) {
//console.log('read chunk nr', currentChunk + 1, 'of', chunks); //console.log('read chunk nr', currentChunk + 1, 'of', chunks);
spark.append(e.target.result); // Append array buffer spark.append(e.target.result); // Append array buffer
currentChunk++; currentChunk++;
if (currentChunk < chunks) { if (currentChunk < chunks) {
loadNext(); loadNext();
} else { } else {
let fileHash = spark.end(); let fileHash = spark.end();
console.info('computed hash %s (took %d s)',
fileHash, console.info('computed hash %s (took %d s)',
Math.round(((new Date() - startTime) / 1000) % 60)); // Compute hash fileHash,
let hashFile = this.makeTextFile(fileHash); Math.round(((new Date() - startTime) / 1000) % 60)); // Compute hash
console.info('hash: ', hashFile);
return hashFile; let hashFile = makeTextFile(fileHash);
console.info('hash: ', hashFile);
resolve(hashFile);
}
}.bind(this);
fileReader.onerror = function () {
reject(new Error('We weren\' able to hash your file locally. Try to upload it manually or consider contact us.'));
};
function loadNext() {
var start = currentChunk * chunkSize,
end = ((start + chunkSize) >= file.size) ? file.size : start + chunkSize;
fileReader.readAsArrayBuffer(blobSlice.call(file, start, end));
} }
}.bind(this);
fileReader.onerror = function () {
console.warn('oops, something went wrong.');
};
function loadNext() {
var start = currentChunk * chunkSize,
end = ((start + chunkSize) >= file.size) ? file.size : start + chunkSize;
fileReader.readAsArrayBuffer(blobSlice.call(file, start, end));
}
loadNext();
loadNext();
});
} }