Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 library crypto.hash_base; | 5 library crypto.hash_base; |
| 6 | 6 |
| 7 import 'dart:math' as math; | |
| 8 import 'dart:typed_data'; | 7 import 'dart:typed_data'; |
| 9 | 8 |
| 9 import 'package:typed_data/typed_data.dart'; | |
| 10 | |
| 10 import 'hash.dart'; | 11 import 'hash.dart'; |
| 11 import 'utils.dart'; | 12 import 'utils.dart'; |
| 12 | 13 |
| 13 /// A base class for [Hash] implementations. | 14 /// A base class for [Hash] implementations. |
| 14 /// | 15 /// |
| 15 /// Subclasses should override [updateHash], and define it to update [h] with | 16 /// Subclasses should override [updateHash] and [digest]. |
| 16 /// the results of the hash function. | |
| 17 abstract class HashBase implements Hash { | 17 abstract class HashBase implements Hash { |
| 18 /// The size (in 32-bit words) of the chunks of input data that the hash | |
| 19 /// function consumes at once. | |
| 20 final int _chunkSizeInWords; | |
| 21 | |
| 22 /// The size (in 32-bit words) of the digest that the hash function emits. | |
| 23 final int _digestSizeInWords; | |
| 24 | |
| 25 /// Whether the hash function operates on big-endian words. | 18 /// Whether the hash function operates on big-endian words. |
| 26 final bool _bigEndianWords; | 19 final Endianness _endian; |
| 27 | 20 |
| 28 /// The words in the current chunk. | 21 /// The words in the current chunk. |
| 22 /// | |
| 23 /// This is an instance variable to avoid re-allocating, but its data isn't | |
| 24 /// used across invocations of [_iterate]. | |
| 29 final Uint32List _currentChunk; | 25 final Uint32List _currentChunk; |
| 30 | 26 |
| 31 /// The words in the current digest. | |
| 32 /// | |
| 33 /// The size of this buffer is given by the `digestSizeInWords` constructor | |
| 34 /// parameter. | |
| 35 final Uint32List h; | |
| 36 | |
| 37 /// The length of the input data so far, in bytes. | 27 /// The length of the input data so far, in bytes. |
| 38 int _lengthInBytes = 0; | 28 int _lengthInBytes = 0; |
| 39 | 29 |
| 40 /// Data that has yet to be processed by the hash function. | 30 /// Data that has yet to be processed by the hash function. |
| 41 List<int> _pendingData; | 31 final _pendingData = new Uint8Buffer(); |
| 42 | 32 |
| 43 /// Whether [close] has been called. | 33 /// Whether [close] has been called. |
| 44 bool _digestCalled = false; | 34 bool _isClosed = false; |
| 35 | |
| 36 /// The words in the current digest. | |
| 37 /// | |
| 38 /// This should be updated each time [updateHash] is called. | |
| 39 Uint32List get digest; | |
| 40 | |
| 41 int get blockSize => _currentChunk.lengthInBytes; | |
| 45 | 42 |
| 46 /// Creates a new hash. | 43 /// Creates a new hash. |
| 47 /// | 44 /// |
| 48 /// [chunkSizeInWords] represents the size of the input chunks processed by | 45 /// [chunkSizeInWords] represents the size of the input chunks processed by |
| 49 /// the algorithm. [digestSizeInWords] represents the size of the algorithm's | 46 /// the algorithm, in terms of 32-bit words. |
| 50 /// output digest. Both are in terms of 32-bit words. | 47 HashBase(int chunkSizeInWords, {Endianness endian: Endianness.BIG_ENDIAN}) |
| 51 HashBase( | 48 : _endian = endian, |
| 52 int chunkSizeInWords, int digestSizeInWords, bool this._bigEndianWords) | 49 _currentChunk = new Uint32List(chunkSizeInWords); |
| 53 : _pendingData = [], | 50 |
| 54 _currentChunk = new Uint32List(chunkSizeInWords), | 51 /// Runs a single iteration of the hash computation, updating [digest] with |
| 55 h = new Uint32List(digestSizeInWords), | 52 /// the result. |
| 56 _chunkSizeInWords = chunkSizeInWords, | 53 /// |
| 57 _digestSizeInWords = digestSizeInWords; | 54 /// [m] is the current chunk, whose size is given by the `chunkSizeInWords` |
| 55 /// parameter passed to the constructor. | |
| 56 void updateHash(Uint32List m); | |
|
Bob Nystrom
2015/09/18 17:15:48
"m" -> "chunk"?
nweiz
2015/09/18 19:37:21
Done.
| |
| 58 | 57 |
| 59 void add(List<int> data) { | 58 void add(List<int> data) { |
| 60 if (_digestCalled) { | 59 if (_isClosed) throw new StateError('Hash.add() called after close().'); |
| 61 throw new StateError( | |
| 62 'Hash update method called after digest was retrieved'); | |
| 63 } | |
| 64 _lengthInBytes += data.length; | 60 _lengthInBytes += data.length; |
| 65 _pendingData.addAll(data); | 61 _pendingData.addAll(data); |
| 66 _iterate(); | 62 _iterate(); |
| 67 } | 63 } |
| 68 | 64 |
| 69 List<int> close() { | 65 List<int> close() { |
| 70 if (_digestCalled) { | 66 if (_isClosed) return _byteDigest(); |
| 71 return _resultAsBytes(); | 67 _isClosed = true; |
| 72 } | 68 |
| 73 _digestCalled = true; | |
| 74 _finalizeData(); | 69 _finalizeData(); |
| 75 _iterate(); | 70 _iterate(); |
| 76 assert(_pendingData.length == 0); | 71 assert(_pendingData.isEmpty); |
| 77 return _resultAsBytes(); | 72 return _byteDigest(); |
| 78 } | 73 } |
| 79 | 74 |
| 80 int get blockSize { | 75 Uint8List _byteDigest() { |
| 81 return _chunkSizeInWords * BYTES_PER_WORD; | 76 if (_endian == Endianness.HOST_ENDIAN) return digest.buffer.asUint8List(); |
| 82 } | |
| 83 | 77 |
| 84 /// Runs a single iteration of the hash computation, updating [h] with the | 78 var byteDigest = new Uint8List(digest.lengthInBytes); |
| 85 /// result. | 79 var byteData = byteDigest.buffer.asByteData(); |
| 86 /// | 80 for (var i = 0; i < digest.length; i++) { |
| 87 /// [m] is the current chunk, whose size is given by the `chunkSizeInWords` | 81 byteData.setUint32(i * bytesPerWord, digest[i]); |
| 88 /// parameter passed to the constructor. | |
| 89 void updateHash(Uint32List m); | |
| 90 | |
| 91 /// Computes the final result of the hash as a list of bytes from the hash | |
| 92 /// words. | |
| 93 List<int> _resultAsBytes() { | |
| 94 var result = []; | |
| 95 for (var i = 0; i < h.length; i++) { | |
| 96 result.addAll(_wordToBytes(h[i])); | |
| 97 } | 82 } |
| 98 return result; | 83 return byteDigest; |
| 99 } | |
| 100 | |
| 101 /// Converts a list of bytes to a chunk of 32-bit words. | |
| 102 /// | |
| 103 /// Stores the result in [_currentChunk]. | |
| 104 void _bytesToChunk(List<int> data, int dataIndex) { | |
| 105 assert((data.length - dataIndex) >= (_chunkSizeInWords * BYTES_PER_WORD)); | |
| 106 | |
| 107 for (var wordIndex = 0; wordIndex < _chunkSizeInWords; wordIndex++) { | |
| 108 var w3 = _bigEndianWords ? data[dataIndex] : data[dataIndex + 3]; | |
| 109 var w2 = _bigEndianWords ? data[dataIndex + 1] : data[dataIndex + 2]; | |
| 110 var w1 = _bigEndianWords ? data[dataIndex + 2] : data[dataIndex + 1]; | |
| 111 var w0 = _bigEndianWords ? data[dataIndex + 3] : data[dataIndex]; | |
| 112 dataIndex += 4; | |
| 113 var word = (w3 & 0xff) << 24; | |
| 114 word |= (w2 & MASK_8) << 16; | |
| 115 word |= (w1 & MASK_8) << 8; | |
| 116 word |= (w0 & MASK_8); | |
| 117 _currentChunk[wordIndex] = word; | |
| 118 } | |
| 119 } | |
| 120 | |
| 121 /// Converts a 32-bit word to four bytes. | |
| 122 List<int> _wordToBytes(int word) { | |
| 123 List bytes = new List<int>(BYTES_PER_WORD); | |
| 124 bytes[0] = (word >> (_bigEndianWords ? 24 : 0)) & MASK_8; | |
| 125 bytes[1] = (word >> (_bigEndianWords ? 16 : 8)) & MASK_8; | |
| 126 bytes[2] = (word >> (_bigEndianWords ? 8 : 16)) & MASK_8; | |
| 127 bytes[3] = (word >> (_bigEndianWords ? 0 : 24)) & MASK_8; | |
| 128 return bytes; | |
| 129 } | 84 } |
| 130 | 85 |
| 131 /// Iterates through [_pendingData], updating the hash computation for each | 86 /// Iterates through [_pendingData], updating the hash computation for each |
| 132 /// chunk. | 87 /// chunk. |
| 133 void _iterate() { | 88 void _iterate() { |
| 134 var len = _pendingData.length; | 89 var pendingDataBytes = _pendingData.buffer.asByteData(); |
| 135 var chunkSizeInBytes = _chunkSizeInWords * BYTES_PER_WORD; | 90 var pendingDataChunks = _pendingData.length ~/ _currentChunk.lengthInBytes; |
| 136 if (len >= chunkSizeInBytes) { | 91 for (var i = 0; i < pendingDataChunks; i++) { |
| 137 var index = 0; | 92 // Copy words from the pending data buffer into the current chunk buffer. |
| 138 for (; (len - index) >= chunkSizeInBytes; index += chunkSizeInBytes) { | 93 for (var j = 0; j < _currentChunk.length; j++) { |
| 139 _bytesToChunk(_pendingData, index); | 94 _currentChunk[j] = pendingDataBytes.getUint32( |
| 140 updateHash(_currentChunk); | 95 i * _currentChunk.lengthInBytes + j * bytesPerWord, _endian); |
| 141 } | 96 } |
| 142 _pendingData = _pendingData.sublist(index, len); | 97 |
| 98 // Run the hash function on the current chunk. | |
| 99 updateHash(_currentChunk); | |
| 143 } | 100 } |
| 101 | |
| 102 // Remove all pending data up to the last clean chunk break. | |
| 103 _pendingData.removeRange( | |
| 104 0, pendingDataChunks * _currentChunk.lengthInBytes); | |
| 144 } | 105 } |
| 145 | 106 |
| 146 /// Finalizes [_pendingData]. | 107 /// Finalizes [_pendingData]. |
| 147 /// | 108 /// |
| 148 /// This adds a 1 bit to the end of the message, and expands it with 0 bits to | 109 /// This adds a 1 bit to the end of the message, and expands it with 0 bits to |
| 149 /// pad it out. | 110 /// pad it out. |
| 150 void _finalizeData() { | 111 void _finalizeData() { |
| 112 // Pad out the data with 0x80, eight 0s, and as many more 0s as we need to | |
| 113 // land cleanly on a chunk boundary. | |
| 151 _pendingData.add(0x80); | 114 _pendingData.add(0x80); |
| 152 var contentsLength = _lengthInBytes + 9; | 115 var contentsLength = _lengthInBytes + 9; |
| 153 var chunkSizeInBytes = _chunkSizeInWords * BYTES_PER_WORD; | 116 var finalizedLength = _roundUp(contentsLength, _currentChunk.lengthInBytes); |
| 154 var finalizedLength = _roundUp(contentsLength, chunkSizeInBytes); | 117 for (var i = 0; i < finalizedLength - contentsLength; i++) { |
| 155 var zeroPadding = finalizedLength - contentsLength; | |
| 156 for (var i = 0; i < zeroPadding; i++) { | |
| 157 _pendingData.add(0); | 118 _pendingData.add(0); |
| 158 } | 119 } |
| 159 var lengthInBits = _lengthInBytes * BITS_PER_BYTE; | 120 |
| 160 const MAX_UINT64 = 0xFFFFFFFFFFFFFFFF; | 121 var lengthInBits = _lengthInBytes * bitsPerByte; |
| 161 if (lengthInBits > MAX_UINT64) { | 122 if (lengthInBits > maxUint64) { |
| 162 throw new UnsupportedError( | 123 throw new UnsupportedError( |
| 163 "Hash undefined for message bit lengths larger than 64 bits"); | 124 "Hashing is unsupported for messages with more than 2^64 bits."); |
| 164 } | 125 } |
| 165 if (_bigEndianWords) { | 126 |
| 166 _pendingData.addAll(_wordToBytes((lengthInBits >> 32) & MASK_32)); | 127 // Add the full length of the input data as a 64-bit value at the end of the |
| 167 _pendingData.addAll(_wordToBytes(lengthInBits & MASK_32)); | 128 // hash. |
| 168 } else { | 129 var offset = _pendingData.length; |
| 169 _pendingData.addAll(_wordToBytes(lengthInBits & MASK_32)); | 130 _pendingData.addAll(new Uint8List(8)); |
| 170 _pendingData.addAll(_wordToBytes((lengthInBits >> 32) & MASK_32)); | 131 _pendingData.buffer.asByteData().setUint64(offset, lengthInBits, _endian); |
| 171 } | |
| 172 } | 132 } |
| 173 | 133 |
| 174 /// Rounds [val] to the nearest multiple of [n]. | 134 /// Rounds [val] up to the next multiple of [n], as long as [n] is a power of |
| 175 int _roundUp(val, n) => (val + n - 1) & -n; | 135 /// two. |
| 136 int _roundUp(int val, int n) => (val + n - 1) & -n; | |
| 176 } | 137 } |
| OLD | NEW |