| OLD | NEW |
| 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 library crypto.hash_base; | 5 library crypto.hash_base; |
| 6 | 6 |
| 7 import 'dart:math' as math; | 7 import 'dart:math' as math; |
| 8 import 'dart:typed_data'; | 8 import 'dart:typed_data'; |
| 9 | 9 |
| 10 import 'hash.dart'; | 10 import 'hash.dart'; |
| 11 import 'utils.dart'; | 11 import 'utils.dart'; |
| 12 | 12 |
| 13 // Base class encapsulating common behavior for cryptographic hash | 13 /// A base class for [Hash] implementations. |
| 14 // functions. | 14 /// |
| 15 /// Subclasses should override [updateHash], and define it to update [h] with |
| 16 /// the results of the hash function. |
| 15 abstract class HashBase implements Hash { | 17 abstract class HashBase implements Hash { |
| 18 /// The size (in 32-bit words) of the chunks of input data that the hash |
| 19 /// function consumes at once. |
| 16 final int _chunkSizeInWords; | 20 final int _chunkSizeInWords; |
| 21 |
| 22 /// The size (in 32-bit words) of the digest that the hash function emits. |
| 17 final int _digestSizeInWords; | 23 final int _digestSizeInWords; |
| 24 |
| 25 /// Whether the hash function operates on big-endian words. |
| 18 final bool _bigEndianWords; | 26 final bool _bigEndianWords; |
| 27 |
| 28 /// The words in the current chunk. |
| 19 final Uint32List _currentChunk; | 29 final Uint32List _currentChunk; |
| 30 |
| 31 /// The words in the current digest. |
| 32 /// |
| 33 /// The size of this buffer is given by the `digestSizeInWords` constructor |
| 34 /// parameter. |
| 20 final Uint32List h; | 35 final Uint32List h; |
| 36 |
| 37 /// The length of the input data so far, in bytes. |
| 21 int _lengthInBytes = 0; | 38 int _lengthInBytes = 0; |
| 39 |
| 40 /// Data that has yet to be processed by the hash function. |
| 22 List<int> _pendingData; | 41 List<int> _pendingData; |
| 42 |
| 43 /// Whether [close] has been called. |
| 23 bool _digestCalled = false; | 44 bool _digestCalled = false; |
| 24 | 45 |
| 46 /// Creates a new hash. |
| 47 /// |
| 48 /// [chunkSizeInWords] represents the size of the input chunks processed by |
| 49 /// the algorithm. [digestSizeInWords] represents the size of the algorithm's |
| 50 /// output digest. Both are in terms of 32-bit words. |
| 25 HashBase( | 51 HashBase( |
| 26 int chunkSizeInWords, int digestSizeInWords, bool this._bigEndianWords) | 52 int chunkSizeInWords, int digestSizeInWords, bool this._bigEndianWords) |
| 27 : _pendingData = [], | 53 : _pendingData = [], |
| 28 _currentChunk = new Uint32List(chunkSizeInWords), | 54 _currentChunk = new Uint32List(chunkSizeInWords), |
| 29 h = new Uint32List(digestSizeInWords), | 55 h = new Uint32List(digestSizeInWords), |
| 30 _chunkSizeInWords = chunkSizeInWords, | 56 _chunkSizeInWords = chunkSizeInWords, |
| 31 _digestSizeInWords = digestSizeInWords; | 57 _digestSizeInWords = digestSizeInWords; |
| 32 | 58 |
| 33 // Update the hasher with more data. | |
| 34 void add(List<int> data) { | 59 void add(List<int> data) { |
| 35 if (_digestCalled) { | 60 if (_digestCalled) { |
| 36 throw new StateError( | 61 throw new StateError( |
| 37 'Hash update method called after digest was retrieved'); | 62 'Hash update method called after digest was retrieved'); |
| 38 } | 63 } |
| 39 _lengthInBytes += data.length; | 64 _lengthInBytes += data.length; |
| 40 _pendingData.addAll(data); | 65 _pendingData.addAll(data); |
| 41 _iterate(); | 66 _iterate(); |
| 42 } | 67 } |
| 43 | 68 |
| 44 // Finish the hash computation and return the digest string. | |
| 45 List<int> close() { | 69 List<int> close() { |
| 46 if (_digestCalled) { | 70 if (_digestCalled) { |
| 47 return _resultAsBytes(); | 71 return _resultAsBytes(); |
| 48 } | 72 } |
| 49 _digestCalled = true; | 73 _digestCalled = true; |
| 50 _finalizeData(); | 74 _finalizeData(); |
| 51 _iterate(); | 75 _iterate(); |
| 52 assert(_pendingData.length == 0); | 76 assert(_pendingData.length == 0); |
| 53 return _resultAsBytes(); | 77 return _resultAsBytes(); |
| 54 } | 78 } |
| 55 | 79 |
| 56 // Returns the block size of the hash in bytes. | |
| 57 int get blockSize { | 80 int get blockSize { |
| 58 return _chunkSizeInWords * BYTES_PER_WORD; | 81 return _chunkSizeInWords * BYTES_PER_WORD; |
| 59 } | 82 } |
| 60 | 83 |
| 61 // One round of the hash computation. | 84 /// Runs a single iteration of the hash computation, updating [h] with the |
| 85 /// result. |
| 86 /// |
| 87 /// [m] is the current chunk, whose size is given by the `chunkSizeInWords` |
| 88 /// parameter passed to the constructor. |
| 62 void updateHash(Uint32List m); | 89 void updateHash(Uint32List m); |
| 63 | 90 |
| 64 // Compute the final result as a list of bytes from the hash words. | 91 /// Computes the final result of the hash as a list of bytes from the hash |
| 92 /// words. |
| 65 List<int> _resultAsBytes() { | 93 List<int> _resultAsBytes() { |
| 66 var result = []; | 94 var result = []; |
| 67 for (var i = 0; i < h.length; i++) { | 95 for (var i = 0; i < h.length; i++) { |
| 68 result.addAll(_wordToBytes(h[i])); | 96 result.addAll(_wordToBytes(h[i])); |
| 69 } | 97 } |
| 70 return result; | 98 return result; |
| 71 } | 99 } |
| 72 | 100 |
| 73 // Converts a list of bytes to a chunk of 32-bit words. | 101 /// Converts a list of bytes to a chunk of 32-bit words. |
| 102 /// |
| 103 /// Stores the result in [_currentChunk]. |
| 74 void _bytesToChunk(List<int> data, int dataIndex) { | 104 void _bytesToChunk(List<int> data, int dataIndex) { |
| 75 assert((data.length - dataIndex) >= (_chunkSizeInWords * BYTES_PER_WORD)); | 105 assert((data.length - dataIndex) >= (_chunkSizeInWords * BYTES_PER_WORD)); |
| 76 | 106 |
| 77 for (var wordIndex = 0; wordIndex < _chunkSizeInWords; wordIndex++) { | 107 for (var wordIndex = 0; wordIndex < _chunkSizeInWords; wordIndex++) { |
| 78 var w3 = _bigEndianWords ? data[dataIndex] : data[dataIndex + 3]; | 108 var w3 = _bigEndianWords ? data[dataIndex] : data[dataIndex + 3]; |
| 79 var w2 = _bigEndianWords ? data[dataIndex + 1] : data[dataIndex + 2]; | 109 var w2 = _bigEndianWords ? data[dataIndex + 1] : data[dataIndex + 2]; |
| 80 var w1 = _bigEndianWords ? data[dataIndex + 2] : data[dataIndex + 1]; | 110 var w1 = _bigEndianWords ? data[dataIndex + 2] : data[dataIndex + 1]; |
| 81 var w0 = _bigEndianWords ? data[dataIndex + 3] : data[dataIndex]; | 111 var w0 = _bigEndianWords ? data[dataIndex + 3] : data[dataIndex]; |
| 82 dataIndex += 4; | 112 dataIndex += 4; |
| 83 var word = (w3 & 0xff) << 24; | 113 var word = (w3 & 0xff) << 24; |
| 84 word |= (w2 & MASK_8) << 16; | 114 word |= (w2 & MASK_8) << 16; |
| 85 word |= (w1 & MASK_8) << 8; | 115 word |= (w1 & MASK_8) << 8; |
| 86 word |= (w0 & MASK_8); | 116 word |= (w0 & MASK_8); |
| 87 _currentChunk[wordIndex] = word; | 117 _currentChunk[wordIndex] = word; |
| 88 } | 118 } |
| 89 } | 119 } |
| 90 | 120 |
| 91 // Convert a 32-bit word to four bytes. | 121 /// Converts a 32-bit word to four bytes. |
| 92 List<int> _wordToBytes(int word) { | 122 List<int> _wordToBytes(int word) { |
| 93 List bytes = new List<int>(BYTES_PER_WORD); | 123 List bytes = new List<int>(BYTES_PER_WORD); |
| 94 bytes[0] = (word >> (_bigEndianWords ? 24 : 0)) & MASK_8; | 124 bytes[0] = (word >> (_bigEndianWords ? 24 : 0)) & MASK_8; |
| 95 bytes[1] = (word >> (_bigEndianWords ? 16 : 8)) & MASK_8; | 125 bytes[1] = (word >> (_bigEndianWords ? 16 : 8)) & MASK_8; |
| 96 bytes[2] = (word >> (_bigEndianWords ? 8 : 16)) & MASK_8; | 126 bytes[2] = (word >> (_bigEndianWords ? 8 : 16)) & MASK_8; |
| 97 bytes[3] = (word >> (_bigEndianWords ? 0 : 24)) & MASK_8; | 127 bytes[3] = (word >> (_bigEndianWords ? 0 : 24)) & MASK_8; |
| 98 return bytes; | 128 return bytes; |
| 99 } | 129 } |
| 100 | 130 |
| 101 // Iterate through data updating the hash computation for each | 131 /// Iterates through [_pendingData], updating the hash computation for each |
| 102 // chunk. | 132 /// chunk. |
| 103 void _iterate() { | 133 void _iterate() { |
| 104 var len = _pendingData.length; | 134 var len = _pendingData.length; |
| 105 var chunkSizeInBytes = _chunkSizeInWords * BYTES_PER_WORD; | 135 var chunkSizeInBytes = _chunkSizeInWords * BYTES_PER_WORD; |
| 106 if (len >= chunkSizeInBytes) { | 136 if (len >= chunkSizeInBytes) { |
| 107 var index = 0; | 137 var index = 0; |
| 108 for (; (len - index) >= chunkSizeInBytes; index += chunkSizeInBytes) { | 138 for (; (len - index) >= chunkSizeInBytes; index += chunkSizeInBytes) { |
| 109 _bytesToChunk(_pendingData, index); | 139 _bytesToChunk(_pendingData, index); |
| 110 updateHash(_currentChunk); | 140 updateHash(_currentChunk); |
| 111 } | 141 } |
| 112 _pendingData = _pendingData.sublist(index, len); | 142 _pendingData = _pendingData.sublist(index, len); |
| 113 } | 143 } |
| 114 } | 144 } |
| 115 | 145 |
| 116 // Finalize the data. Add a 1 bit to the end of the message. Expand with | 146 /// Finalizes [_pendingData]. |
| 117 // 0 bits and add the length of the message. | 147 /// |
| 148 /// This adds a 1 bit to the end of the message, and expands it with 0 bits to |
| 149 /// pad it out. |
| 118 void _finalizeData() { | 150 void _finalizeData() { |
| 119 _pendingData.add(0x80); | 151 _pendingData.add(0x80); |
| 120 var contentsLength = _lengthInBytes + 9; | 152 var contentsLength = _lengthInBytes + 9; |
| 121 var chunkSizeInBytes = _chunkSizeInWords * BYTES_PER_WORD; | 153 var chunkSizeInBytes = _chunkSizeInWords * BYTES_PER_WORD; |
| 122 var finalizedLength = roundUp(contentsLength, chunkSizeInBytes); | 154 var finalizedLength = _roundUp(contentsLength, chunkSizeInBytes); |
| 123 var zeroPadding = finalizedLength - contentsLength; | 155 var zeroPadding = finalizedLength - contentsLength; |
| 124 for (var i = 0; i < zeroPadding; i++) { | 156 for (var i = 0; i < zeroPadding; i++) { |
| 125 _pendingData.add(0); | 157 _pendingData.add(0); |
| 126 } | 158 } |
| 127 var lengthInBits = _lengthInBytes * BITS_PER_BYTE; | 159 var lengthInBits = _lengthInBytes * BITS_PER_BYTE; |
| 128 assert(lengthInBits < math.pow(2, 32)); | 160 assert(lengthInBits < math.pow(2, 32)); |
| 129 if (_bigEndianWords) { | 161 if (_bigEndianWords) { |
| 130 _pendingData.addAll(_wordToBytes(0)); | 162 _pendingData.addAll(_wordToBytes(0)); |
| 131 _pendingData.addAll(_wordToBytes(lengthInBits & MASK_32)); | 163 _pendingData.addAll(_wordToBytes(lengthInBits & MASK_32)); |
| 132 } else { | 164 } else { |
| 133 _pendingData.addAll(_wordToBytes(lengthInBits & MASK_32)); | 165 _pendingData.addAll(_wordToBytes(lengthInBits & MASK_32)); |
| 134 _pendingData.addAll(_wordToBytes(0)); | 166 _pendingData.addAll(_wordToBytes(0)); |
| 135 } | 167 } |
| 136 } | 168 } |
| 169 |
| 170 /// Rounds [val] to the nearest multiple of [n]. |
| 171 int _roundUp(val, n) => (val + n - 1) & -n; |
| 137 } | 172 } |
| OLD | NEW |