| OLD | NEW |
| (Empty) |
| 1 // Copyright 2017 The Chromium OS Authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 'use strict'; | |
| 6 | |
| 7 /** | |
| 8 * A class that takes care of communication with NaCL and creates an archive. | |
| 9 * One instance of this class is created for each pack request. Since multiple | |
| 10 * compression requests can be in progress at the same time, each instance has | |
| 11 * a unique compressor id of positive integer. Every communication with NaCL | |
| 12 * must be done with compressor id. | |
| 13 * @constructor | |
| 14 * @param {!Object} naclModule The nacl module. | |
| 15 * @param {!Array} items The items to be packed. | |
| 16 */ | |
| 17 unpacker.Compressor = function(naclModule, items) { | |
| 18 /** | |
| 19 * @private {!Object} | |
| 20 * @const | |
| 21 */ | |
| 22 this.naclModule_ = naclModule; | |
| 23 | |
| 24 /** | |
| 25 * @private {!Array} | |
| 26 * @const | |
| 27 */ | |
| 28 this.items_ = items; | |
| 29 | |
| 30 /** | |
| 31 * @private {!unpacker.types.CompressorId} | |
| 32 * @const | |
| 33 */ | |
| 34 this.compressorId_ = unpacker.Compressor.compressorIdCounter++; | |
| 35 | |
| 36 /** | |
| 37 * @private {string} | |
| 38 * @const | |
| 39 */ | |
| 40 this.archiveName_ = this.getArchiveName_(); | |
| 41 | |
| 42 /** | |
| 43 * The counter used to assign a unique id to each entry. | |
| 44 * @type {number} | |
| 45 */ | |
| 46 this.entryIdCounter_ = 1; | |
| 47 | |
| 48 /** | |
| 49 * The set of entry ids waiting for metadata from FileSystem API. | |
| 50 * These requests needs to be tracked here to tell whether all pack process | |
| 51 * has finished or not. | |
| 52 * @type {!Set} | |
| 53 */ | |
| 54 this.metadataRequestsInProgress_ = new Set(); | |
| 55 | |
| 56 /** | |
| 57 * The queue containing entry ids that have already obtained metadata from | |
| 58 * FileSystem API and are waiting to be added into archive. | |
| 59 * @type {!Array} | |
| 60 */ | |
| 61 this.pendingAddToArchiveRequests_ = []; | |
| 62 | |
| 63 /** | |
| 64 * The id of the entry that is being compressed and written into archive. | |
| 65 * Note that packing of each entry should be done one by one unlike | |
| 66 * unpacking. Thus, at most one entry is processed at once. | |
| 67 * @type {!unpacker.types.EntryId} | |
| 68 */ | |
| 69 this.entryIdInProgress_ = 0; | |
| 70 | |
| 71 /** | |
| 72 * Map from entry ids to entries. | |
| 73 * @const {!Object<!unpacker.types.EntryId, !FileEntry|!DirectoryEntry>} | |
| 74 */ | |
| 75 this.entries_ = {}; | |
| 76 | |
| 77 /** | |
| 78 * Map from entry ids to its metadata. | |
| 79 * @const {!Object<!unpacker.types.EntryId, !Metadata>} | |
| 80 */ | |
| 81 this.metadata_ = {}; | |
| 82 | |
| 83 /** | |
| 84 * The offset from which the entry in progress should be read. | |
| 85 * @type {number} | |
| 86 */ | |
| 87 this.offset_ = 0; | |
| 88 }; | |
| 89 | |
| 90 /** | |
| 91 * The counter which is assigned and incremented every time a new compressor | |
| 92 * instance is created. | |
| 93 * @type {number} | |
| 94 */ | |
| 95 unpacker.Compressor.compressorIdCounter = 1; | |
| 96 | |
| 97 /** | |
| 98 * The queue containing compressor ids that wait for foreground page to be | |
| 99 * loaded. Once this extension becomes a component extension, we don't need to | |
| 100 * create an archive file on the foreground page and this also gets unnecessary. | |
| 101 * @type {!Array} | |
| 102 */ | |
| 103 unpacker.Compressor.CompressorIdQueue = []; | |
| 104 | |
| 105 /** | |
| 106 * The default archive name. | |
| 107 * @type {string} | |
| 108 */ | |
| 109 unpacker.Compressor.DEFAULT_ARCHIVE_NAME = 'Archive.zip'; | |
| 110 | |
| 111 /** | |
| 112 * The getter function for compressor id. | |
| 113 * @return {!unpacker.types.CompressorId} | |
| 114 */ | |
| 115 unpacker.Compressor.prototype.getCompressorId = function() { | |
| 116 return this.compressorId_; | |
| 117 }; | |
| 118 | |
| 119 /** | |
| 120 * Returns the archive file name. | |
| 121 * @private | |
| 122 * @return {string} | |
| 123 */ | |
| 124 unpacker.Compressor.prototype.getArchiveName_ = function() { | |
| 125 // When multiple entries are selected. | |
| 126 if (this.items_.length !== 1) | |
| 127 return unpacker.Compressor.DEFAULT_ARCHIVE_NAME; | |
| 128 | |
| 129 var name = this.items_[0].entry.name | |
| 130 var idx = name.lastIndexOf('.'); | |
| 131 // When the name does not have extension. | |
| 132 // TODO(takise): This converts file.tar.gz to file.tar.zip. | |
| 133 if (idx === -1) | |
| 134 return name + '.zip'; | |
| 135 // When the name has extension. | |
| 136 return name.substring(0, idx) + '.zip'; | |
| 137 }; | |
| 138 | |
| 139 /** | |
| 140 * Starts actual compressing process. | |
| 141 * Creates an archive file and requests libarchive to create an archive object. | |
| 142 * @param {function(!unpacker.types.CompressorId)} onSuccess | |
| 143 * @param {function(!unpacker.types.CompressorId)} onError | |
| 144 */ | |
| 145 unpacker.Compressor.prototype.compress = function(onSuccess, onError) { | |
| 146 this.onSuccess_ = onSuccess; | |
| 147 this.onError_ = onError; | |
| 148 | |
| 149 this.getArchiveFile_(); | |
| 150 }; | |
| 151 | |
| 152 /** | |
| 153 * Gets an archive file with write permission. Currently, this extension does | |
| 154 * not have permission to create files from the background page. Thus, this | |
| 155 * function first creates a foreground page and then creates an archive file in | |
| 156 * it. Once this extension becomes a component extension, this process will be | |
| 157 * simpler. | |
| 158 * @private | |
| 159 */ | |
| 160 unpacker.Compressor.prototype.getArchiveFile_ = function() { | |
| 161 // If the foreground page already exists, create an archive file. | |
| 162 if (this.createArchiveFileForeground_) { | |
| 163 this.createArchiveFileForeground_(this.compressorId_); | |
| 164 } else { | |
| 165 // If the foreground page does not exist, push the id of this compressor to | |
| 166 // the queue so that we can resume later and create the foreground page. | |
| 167 // We need this queue because multiple compressors can wait for the | |
| 168 // foreground page to be loaded. | |
| 169 var queue = unpacker.Compressor.CompressorIdQueue; | |
| 170 queue.push(this.compressorId_); | |
| 171 if (queue.length === 1) { | |
| 172 chrome.app.window.create('../html/compressor.html', {hidden: true}); | |
| 173 } | |
| 174 } | |
| 175 }; | |
| 176 | |
| 177 /** | |
| 178 * Sends an create archive request to NaCL. | |
| 179 * @private | |
| 180 */ | |
| 181 unpacker.Compressor.prototype.sendCreateArchiveRequest_ = function() { | |
| 182 var request = unpacker.request.createCreateArchiveRequest( | |
| 183 this.compressorId_); | |
| 184 this.naclModule_.postMessage(request); | |
| 185 } | |
| 186 | |
| 187 /** | |
| 188 * A handler of create archive done response. | |
| 189 * Enumerates entries and requests FileSystem API for their metadata. | |
| 190 * @private | |
| 191 */ | |
| 192 unpacker.Compressor.prototype.createArchiveDone_ = function() { | |
| 193 this.items_.forEach(function(item) { | |
| 194 this.getEntryMetadata_(item.entry); | |
| 195 }.bind(this)); | |
| 196 } | |
| 197 | |
| 198 /** | |
| 199 * Gets metadata of a file or directory. | |
| 200 * @param {!FileEntry|!DirectoryEntry} entry FileEntry or DirectoryEntry. | |
| 201 * @private | |
| 202 */ | |
| 203 unpacker.Compressor.prototype.getEntryMetadata_ = function(entry) { | |
| 204 if (entry.isFile) | |
| 205 this.getSingleMetadata_(entry); | |
| 206 else | |
| 207 this.getDirectoryEntryMetadata_(/** @type {!DirectoryEntry} */ (entry)); | |
| 208 } | |
| 209 | |
| 210 /** | |
| 211 * Requests metadata of an entry non-recursively. | |
| 212 * @param {!FileEntry|!DirectoryEntry} entry FileEntry or DirectoryEntry. | |
| 213 * @private | |
| 214 */ | |
| 215 unpacker.Compressor.prototype.getSingleMetadata_ = function(entry) { | |
| 216 var entryId = this.entryIdCounter_++; | |
| 217 this.metadataRequestsInProgress_.add(entryId); | |
| 218 this.entries_[entryId] = entry; | |
| 219 | |
| 220 entry.getMetadata(function(metadata) { | |
| 221 this.metadataRequestsInProgress_.delete(entryId); | |
| 222 this.pendingAddToArchiveRequests_.push(entryId); | |
| 223 this.metadata_[entryId] = metadata; | |
| 224 this.sendAddToArchiveRequest_(); | |
| 225 }.bind(this), function(error) { | |
| 226 console.error('Failed to get metadata: ' + | |
| 227 error.message + '.'); | |
| 228 this.onError_(this.compressorId_); | |
| 229 }.bind(this)); | |
| 230 } | |
| 231 | |
| 232 /** | |
| 233 * Requests metadata of an entry recursively. | |
| 234 * @param {!DirectoryEntry} dir DirectoryEntry. | |
| 235 * @private | |
| 236 */ | |
| 237 unpacker.Compressor.prototype.getDirectoryEntryMetadata_ = function(dir) { | |
| 238 | |
| 239 // Read entries in dir and call getEntryMetadata_ for them recursively. | |
| 240 var dirReader = dir.createReader(); | |
| 241 | |
| 242 // Recursive function | |
| 243 var getEntries = function() { | |
| 244 dirReader.readEntries(function(results) { | |
| 245 // ReadEntries must be called until it returns nothing, because | |
| 246 // it does not necessarily return all entries in the directory. | |
| 247 if (results.length) { | |
| 248 results.forEach(this.getEntryMetadata_.bind(this)); | |
| 249 getEntries(); | |
| 250 } | |
| 251 }.bind(this), function(error) { | |
| 252 console.error('Failed to get directory entries: ' + | |
| 253 error.message + '.'); | |
| 254 this.onError_(this.compressorId_); | |
| 255 }.bind(this)); | |
| 256 }.bind(this); | |
| 257 | |
| 258 getEntries(); | |
| 259 | |
| 260 // Get the metadata of this dir itself. | |
| 261 this.getSingleMetadata_(dir); | |
| 262 } | |
| 263 | |
| 264 /** | |
| 265 * Pops an entry from the queue and adds it to the archive. | |
| 266 * If another entry is in progress, this function does nothing. If there is no | |
| 267 * entry in the queue, it shifts to close archive process. Otherwise, this sends | |
| 268 * an add to archive request for a popped entry with its metadata to libarchive. | |
| 269 * @private | |
| 270 */ | |
| 271 unpacker.Compressor.prototype.sendAddToArchiveRequest_ = function() { | |
| 272 // Another process is in progress. | |
| 273 if (this.entryIdInProgress_ != 0) | |
| 274 return; | |
| 275 | |
| 276 // All entries have already been archived. | |
| 277 if (this.pendingAddToArchiveRequests_.length === 0) { | |
| 278 if (this.metadataRequestsInProgress_.size === 0) | |
| 279 this.sendCloseArchiveRequest(false /* hasError */); | |
| 280 return; | |
| 281 } | |
| 282 | |
| 283 var entryId = this.pendingAddToArchiveRequests_.shift(); | |
| 284 this.entryIdInProgress_ = entryId; | |
| 285 | |
| 286 // Convert the absolute path on the virtual filesystem to a relative path from | |
| 287 // the archive root by removing the leading '/' if exists. | |
| 288 var fullPath = this.entries_[entryId].fullPath; | |
| 289 if (fullPath.length && fullPath[0] == '/') | |
| 290 fullPath = fullPath.substring(1); | |
| 291 | |
| 292 // Modification time is sent as string in a format: 'mm/dd/yy hh:mm:ss'. | |
| 293 var mt = this.metadata_[entryId].modificationTime; | |
| 294 var formattedTime = (mt.getMonth() + 1) + '/' + mt.getDate() + '/' + | |
| 295 mt.getFullYear() + ' ' + mt.getHours() + ':' + | |
| 296 mt.getMinutes() + ':' + mt.getSeconds(); | |
| 297 | |
| 298 var request = unpacker.request.createAddToArchiveRequest( | |
| 299 this.compressorId_, entryId, fullPath, | |
| 300 this.metadata_[entryId].size, formattedTime, | |
| 301 this.entries_[entryId].isDirectory); | |
| 302 this.naclModule_.postMessage(request); | |
| 303 } | |
| 304 | |
| 305 /** | |
| 306 * Sends a close archive request to libarchive. libarchive writes metadata of | |
| 307 * the archive itself on the archive and releases objects obtainted in the | |
| 308 * packing process. | |
| 309 */ | |
| 310 unpacker.Compressor.prototype.sendCloseArchiveRequest = function(hasError) { | |
| 311 var request = unpacker.request.createCloseArchiveRequest( | |
| 312 this.compressorId_, hasError); | |
| 313 this.naclModule_.postMessage(request); | |
| 314 } | |
| 315 | |
| 316 /** | |
| 317 * Sends a read file chunk done response. | |
| 318 * @param {number} length The number of bytes read from the entry. | |
| 319 * @param {!ArrayBuffer} buffer A buffer containing the data that was read. | |
| 320 * @private | |
| 321 */ | |
| 322 unpacker.Compressor.prototype.sendReadFileChunkDone_ = | |
| 323 function(length, buffer) { | |
| 324 var request = unpacker.request.createReadFileChunkDoneResponse( | |
| 325 this.compressorId_, length, buffer); | |
| 326 this.naclModule_.postMessage(request); | |
| 327 } | |
| 328 | |
| 329 /** | |
| 330 * A handler of read file chunk messages. | |
| 331 * Reads 'length' bytes from the entry currently in process. | |
| 332 * @param {!Object} data | |
| 333 * @private | |
| 334 */ | |
| 335 unpacker.Compressor.prototype.onReadFileChunk_ = function(data) { | |
| 336 var entryId = this.entryIdInProgress_; | |
| 337 var entry = this.entries_[entryId]; | |
| 338 var length = Number(data[unpacker.request.Key.LENGTH]); | |
| 339 | |
| 340 // A function to create a reader and read bytes. | |
| 341 var readFileChunk = function() { | |
| 342 var file = this.file_.slice(this.offset_, this.offset_ + length); | |
| 343 var reader = new FileReader(); | |
| 344 | |
| 345 reader.onloadend = function(event) { | |
| 346 var buffer = event.target.result; | |
| 347 | |
| 348 // The buffer must have 'length' bytes because the byte length which can | |
| 349 // be read from the file is already calculated on NaCL side. | |
| 350 if (buffer.byteLength !== length) { | |
| 351 console.error('Tried to read chunk with length ' + length + | |
| 352 ', but byte with length ' + buffer.byteLength + ' was returned.'); | |
| 353 | |
| 354 // If the first argument(length) is negative, it means that an error | |
| 355 // occurred in reading a chunk. | |
| 356 this.sendReadFileChunkDone_(-1, buffer); | |
| 357 this.onError_(this.compressorId_); | |
| 358 return; | |
| 359 } | |
| 360 | |
| 361 this.offset_ += length; | |
| 362 this.sendReadFileChunkDone_(length, buffer); | |
| 363 }.bind(this); | |
| 364 | |
| 365 reader.onerror = function(event) { | |
| 366 console.error('Failed to read file chunk. Name: ' + file.name + | |
| 367 ', offset: ' + this.offset_ + ', length: ' + length + '.'); | |
| 368 | |
| 369 // If the first argument(length) is negative, it means that an error | |
| 370 // occurred in reading a chunk. | |
| 371 this.sendReadFileChunkDone_(-1, new ArrayBuffer(0)); | |
| 372 this.onError_(this.compressorId_); | |
| 373 } | |
| 374 | |
| 375 reader.readAsArrayBuffer(file); | |
| 376 }.bind(this); | |
| 377 | |
| 378 // When the entry is read for the first time. | |
| 379 if (!this.file_) { | |
| 380 entry.file(function(file) { | |
| 381 this.file_ = file; | |
| 382 readFileChunk(); | |
| 383 }.bind(this)); | |
| 384 return; | |
| 385 } | |
| 386 | |
| 387 // From the second time onward. | |
| 388 readFileChunk(); | |
| 389 } | |
| 390 | |
| 391 /** | |
| 392 * A handler of write chunk requests. | |
| 393 * Writes the data in the given buffer onto the archive file. | |
| 394 * @param {!Object} data | |
| 395 * @private | |
| 396 */ | |
| 397 unpacker.Compressor.prototype.onWriteChunk_ = function(data) { | |
| 398 var length = Number(data[unpacker.request.Key.LENGTH]); | |
| 399 var buffer = data[unpacker.request.Key.CHUNK_BUFFER]; | |
| 400 this.writeChunk_(length, buffer, this.sendWriteChunkDone_.bind(this)); | |
| 401 } | |
| 402 | |
| 403 /** | |
| 404 * Writes buffer into the archive file (window.archiveFileEntry). | |
| 405 * @param {number} length The number of bytes in the buffer to write. | |
| 406 * @param {!ArrayBuffer} buffer The buffer to write in the archive. | |
| 407 * @param {function(number)} callback Callback to execute at the end of the | |
| 408 * function. This function has one parameter: length, which represents the | |
| 409 * length of bytes written on to the archive. If writing a chunk fails, | |
| 410 * a negative value must be assigned to this argument. | |
| 411 * @private | |
| 412 */ | |
| 413 unpacker.Compressor.prototype.writeChunk_ = function(length, buffer, | |
| 414 callback) { | |
| 415 // TODO(takise): Use the same instance of FileWriter over multiple calls of | |
| 416 // this function instead of creating new ones. | |
| 417 this.archiveFileEntry_.createWriter(function(fileWriter) { | |
| 418 fileWriter.onwriteend = function(event) { | |
| 419 callback(length); | |
| 420 }; | |
| 421 | |
| 422 fileWriter.onerror = function(event) { | |
| 423 console.error('Failed to write chunk to ' + this.archiveFileEntry_ + '.'); | |
| 424 | |
| 425 // If the first argument(length) is negative, it means that an error | |
| 426 // occurred in writing a chunk. | |
| 427 callback(-1 /* length */); | |
| 428 this.onError_(this.compressorId_); | |
| 429 }; | |
| 430 | |
| 431 // Create a new Blob and append it to the archive file. | |
| 432 var blob = new Blob([buffer], {}); | |
| 433 fileWriter.seek(fileWriter.length); | |
| 434 fileWriter.write(blob); | |
| 435 }, function(event) { | |
| 436 console.error('Failed to create writer for ' + this.archiveFileEntry_ + | |
| 437 '.'); | |
| 438 this.onError_(this.compressorId_); | |
| 439 }); | |
| 440 }; | |
| 441 | |
| 442 /** | |
| 443 * Sends a write chunk done response. | |
| 444 * @param {number} length The number of bytes written onto the entry. | |
| 445 * @private | |
| 446 */ | |
| 447 unpacker.Compressor.prototype.sendWriteChunkDone_ = function(length) { | |
| 448 var request = unpacker.request.createWriteChunkDoneResponse( | |
| 449 this.compressorId_, length); | |
| 450 this.naclModule_.postMessage(request); | |
| 451 } | |
| 452 | |
| 453 /** | |
| 454 * A handler of add to archive done responses. | |
| 455 * Resets information on the current entry and starts processing another entry. | |
| 456 * @private | |
| 457 */ | |
| 458 unpacker.Compressor.prototype.onAddToArchiveDone_ = function() { | |
| 459 // Reset information on the current entry. | |
| 460 this.entryIdInProgress_ = 0; | |
| 461 this.file_ = null; | |
| 462 this.offset_ = 0; | |
| 463 | |
| 464 // Start processing another entry. | |
| 465 this.sendAddToArchiveRequest_(); | |
| 466 } | |
| 467 | |
| 468 /** | |
| 469 * A handler of close archive responses. | |
| 470 * Receiving this response means the entire packing process has finished. | |
| 471 * @private | |
| 472 */ | |
| 473 unpacker.Compressor.prototype.onCloseArchiveDone_ = function() { | |
| 474 this.onSuccess_(this.compressorId_); | |
| 475 } | |
| 476 | |
| 477 /** | |
| 478 * Processes messages from NaCl module. | |
| 479 * @param {!Object} data The data contained in the message from NaCl. Its | |
| 480 * types depend on the operation of the request. | |
| 481 * @param {!unpacker.request.Operation} operation An operation from request.js. | |
| 482 */ | |
| 483 unpacker.Compressor.prototype.processMessage = function(data, operation) { | |
| 484 switch (operation) { | |
| 485 case unpacker.request.Operation.CREATE_ARCHIVE_DONE: | |
| 486 this.createArchiveDone_(); | |
| 487 break; | |
| 488 | |
| 489 case unpacker.request.Operation.READ_FILE_CHUNK: | |
| 490 this.onReadFileChunk_(data); | |
| 491 break; | |
| 492 | |
| 493 case unpacker.request.Operation.WRITE_CHUNK: | |
| 494 this.onWriteChunk_(data); | |
| 495 break; | |
| 496 | |
| 497 case unpacker.request.Operation.ADD_TO_ARCHIVE_DONE: | |
| 498 this.onAddToArchiveDone_(); | |
| 499 break; | |
| 500 | |
| 501 case unpacker.request.Operation.CLOSE_ARCHIVE_DONE: | |
| 502 this.onCloseArchiveDone_(); | |
| 503 break; | |
| 504 | |
| 505 case unpacker.request.Operation.COMPRESSOR_ERROR: | |
| 506 console.error('Compressor error for compressor id ' + this.compressorId_ + | |
| 507 ': ' + data[unpacker.request.Key.ERROR]); // The error contains | |
| 508 // the '.' at the end. | |
| 509 this.onError_(this.compressorId_); | |
| 510 break; | |
| 511 | |
| 512 default: | |
| 513 console.error('Invalid NaCl operation: ' + operation + '.'); | |
| 514 this.onError_(this.compressorId_); | |
| 515 } | |
| 516 }; | |
| OLD | NEW |