| Index: lib/src/hash_base.dart
|
| diff --git a/lib/src/hash_base.dart b/lib/src/hash_base.dart
|
| index b77b26d5982e8d614b3bb86f079af4873519c7b3..8cd2ad5e502155288488f58164d6c1d8412715b5 100644
|
| --- a/lib/src/hash_base.dart
|
| +++ b/lib/src/hash_base.dart
|
| @@ -4,143 +4,104 @@
|
|
|
| library crypto.hash_base;
|
|
|
| -import 'dart:math' as math;
|
| import 'dart:typed_data';
|
|
|
| +import 'package:typed_data/typed_data.dart';
|
| +
|
| import 'hash.dart';
|
| import 'utils.dart';
|
|
|
| /// A base class for [Hash] implementations.
|
| ///
|
| -/// Subclasses should override [updateHash], and define it to update [h] with
|
| -/// the results of the hash function.
|
| +/// Subclasses should override [updateHash] and [digest].
|
| abstract class HashBase implements Hash {
|
| - /// The size (in 32-bit words) of the chunks of input data that the hash
|
| - /// function consumes at once.
|
| - final int _chunkSizeInWords;
|
| -
|
| - /// The size (in 32-bit words) of the digest that the hash function emits.
|
| - final int _digestSizeInWords;
|
| -
|
| /// Whether the hash function operates on big-endian words.
|
| - final bool _bigEndianWords;
|
| + final Endianness _endian;
|
|
|
| /// The words in the current chunk.
|
| - final Uint32List _currentChunk;
|
| -
|
| - /// The words in the current digest.
|
| ///
|
| - /// The size of this buffer is given by the `digestSizeInWords` constructor
|
| - /// parameter.
|
| - final Uint32List h;
|
| + /// This is an instance variable to avoid re-allocating, but its data isn't
|
| + /// used across invocations of [_iterate].
|
| + final Uint32List _currentChunk;
|
|
|
| /// The length of the input data so far, in bytes.
|
| int _lengthInBytes = 0;
|
|
|
| /// Data that has yet to be processed by the hash function.
|
| - List<int> _pendingData;
|
| + final _pendingData = new Uint8Buffer();
|
|
|
| /// Whether [close] has been called.
|
| - bool _digestCalled = false;
|
| + bool _isClosed = false;
|
| +
|
| + /// The words in the current digest.
|
| + ///
|
| + /// This should be updated each time [updateHash] is called.
|
| + Uint32List get digest;
|
| +
|
| + int get blockSize => _currentChunk.lengthInBytes;
|
|
|
| /// Creates a new hash.
|
| ///
|
| /// [chunkSizeInWords] represents the size of the input chunks processed by
|
| - /// the algorithm. [digestSizeInWords] represents the size of the algorithm's
|
| - /// output digest. Both are in terms of 32-bit words.
|
| - HashBase(
|
| - int chunkSizeInWords, int digestSizeInWords, bool this._bigEndianWords)
|
| - : _pendingData = [],
|
| - _currentChunk = new Uint32List(chunkSizeInWords),
|
| - h = new Uint32List(digestSizeInWords),
|
| - _chunkSizeInWords = chunkSizeInWords,
|
| - _digestSizeInWords = digestSizeInWords;
|
| + /// the algorithm, in terms of 32-bit words.
|
| + HashBase(int chunkSizeInWords, {Endianness endian: Endianness.BIG_ENDIAN})
|
| + : _endian = endian,
|
| + _currentChunk = new Uint32List(chunkSizeInWords);
|
| +
|
| + /// Runs a single iteration of the hash computation, updating [digest] with
|
| + /// the result.
|
| + ///
|
| + /// [m] is the current chunk, whose size is given by the `chunkSizeInWords`
|
| + /// parameter passed to the constructor.
|
| + void updateHash(Uint32List chunk);
|
|
|
| void add(List<int> data) {
|
| - if (_digestCalled) {
|
| - throw new StateError(
|
| - 'Hash update method called after digest was retrieved');
|
| - }
|
| + if (_isClosed) throw new StateError('Hash.add() called after close().');
|
| _lengthInBytes += data.length;
|
| _pendingData.addAll(data);
|
| _iterate();
|
| }
|
|
|
| List<int> close() {
|
| - if (_digestCalled) {
|
| - return _resultAsBytes();
|
| - }
|
| - _digestCalled = true;
|
| + if (_isClosed) return _byteDigest();
|
| + _isClosed = true;
|
| +
|
| _finalizeData();
|
| _iterate();
|
| - assert(_pendingData.length == 0);
|
| - return _resultAsBytes();
|
| - }
|
| -
|
| - int get blockSize {
|
| - return _chunkSizeInWords * BYTES_PER_WORD;
|
| + assert(_pendingData.isEmpty);
|
| + return _byteDigest();
|
| }
|
|
|
| - /// Runs a single iteration of the hash computation, updating [h] with the
|
| - /// result.
|
| - ///
|
| - /// [m] is the current chunk, whose size is given by the `chunkSizeInWords`
|
| - /// parameter passed to the constructor.
|
| - void updateHash(Uint32List m);
|
| -
|
| - /// Computes the final result of the hash as a list of bytes from the hash
|
| - /// words.
|
| - List<int> _resultAsBytes() {
|
| - var result = [];
|
| - for (var i = 0; i < h.length; i++) {
|
| - result.addAll(_wordToBytes(h[i]));
|
| - }
|
| - return result;
|
| - }
|
| + Uint8List _byteDigest() {
|
| + if (_endian == Endianness.HOST_ENDIAN) return digest.buffer.asUint8List();
|
|
|
| - /// Converts a list of bytes to a chunk of 32-bit words.
|
| - ///
|
| - /// Stores the result in [_currentChunk].
|
| - void _bytesToChunk(List<int> data, int dataIndex) {
|
| - assert((data.length - dataIndex) >= (_chunkSizeInWords * BYTES_PER_WORD));
|
| -
|
| - for (var wordIndex = 0; wordIndex < _chunkSizeInWords; wordIndex++) {
|
| - var w3 = _bigEndianWords ? data[dataIndex] : data[dataIndex + 3];
|
| - var w2 = _bigEndianWords ? data[dataIndex + 1] : data[dataIndex + 2];
|
| - var w1 = _bigEndianWords ? data[dataIndex + 2] : data[dataIndex + 1];
|
| - var w0 = _bigEndianWords ? data[dataIndex + 3] : data[dataIndex];
|
| - dataIndex += 4;
|
| - var word = (w3 & 0xff) << 24;
|
| - word |= (w2 & MASK_8) << 16;
|
| - word |= (w1 & MASK_8) << 8;
|
| - word |= (w0 & MASK_8);
|
| - _currentChunk[wordIndex] = word;
|
| + var byteDigest = new Uint8List(digest.lengthInBytes);
|
| + var byteData = byteDigest.buffer.asByteData();
|
| + for (var i = 0; i < digest.length; i++) {
|
| + byteData.setUint32(i * bytesPerWord, digest[i]);
|
| }
|
| - }
|
| -
|
| - /// Converts a 32-bit word to four bytes.
|
| - List<int> _wordToBytes(int word) {
|
| - List bytes = new List<int>(BYTES_PER_WORD);
|
| - bytes[0] = (word >> (_bigEndianWords ? 24 : 0)) & MASK_8;
|
| - bytes[1] = (word >> (_bigEndianWords ? 16 : 8)) & MASK_8;
|
| - bytes[2] = (word >> (_bigEndianWords ? 8 : 16)) & MASK_8;
|
| - bytes[3] = (word >> (_bigEndianWords ? 0 : 24)) & MASK_8;
|
| - return bytes;
|
| + return byteDigest;
|
| }
|
|
|
| /// Iterates through [_pendingData], updating the hash computation for each
|
| /// chunk.
|
| void _iterate() {
|
| - var len = _pendingData.length;
|
| - var chunkSizeInBytes = _chunkSizeInWords * BYTES_PER_WORD;
|
| - if (len >= chunkSizeInBytes) {
|
| - var index = 0;
|
| - for (; (len - index) >= chunkSizeInBytes; index += chunkSizeInBytes) {
|
| - _bytesToChunk(_pendingData, index);
|
| - updateHash(_currentChunk);
|
| + var pendingDataBytes = _pendingData.buffer.asByteData();
|
| + var pendingDataChunks = _pendingData.length ~/ _currentChunk.lengthInBytes;
|
| + for (var i = 0; i < pendingDataChunks; i++) {
|
| + // Copy words from the pending data buffer into the current chunk buffer.
|
| + for (var j = 0; j < _currentChunk.length; j++) {
|
| + _currentChunk[j] = pendingDataBytes.getUint32(
|
| + i * _currentChunk.lengthInBytes + j * bytesPerWord, _endian);
|
| }
|
| - _pendingData = _pendingData.sublist(index, len);
|
| +
|
| + // Run the hash function on the current chunk.
|
| + updateHash(_currentChunk);
|
| }
|
| +
|
| + // Remove all pending data up to the last clean chunk break.
|
| + _pendingData.removeRange(
|
| + 0, pendingDataChunks * _currentChunk.lengthInBytes);
|
| }
|
|
|
| /// Finalizes [_pendingData].
|
| @@ -148,29 +109,29 @@ abstract class HashBase implements Hash {
|
| /// This adds a 1 bit to the end of the message, and expands it with 0 bits to
|
| /// pad it out.
|
| void _finalizeData() {
|
| + // Pad out the data with 0x80, eight 0s, and as many more 0s as we need to
|
| + // land cleanly on a chunk boundary.
|
| _pendingData.add(0x80);
|
| var contentsLength = _lengthInBytes + 9;
|
| - var chunkSizeInBytes = _chunkSizeInWords * BYTES_PER_WORD;
|
| - var finalizedLength = _roundUp(contentsLength, chunkSizeInBytes);
|
| - var zeroPadding = finalizedLength - contentsLength;
|
| - for (var i = 0; i < zeroPadding; i++) {
|
| + var finalizedLength = _roundUp(contentsLength, _currentChunk.lengthInBytes);
|
| + for (var i = 0; i < finalizedLength - contentsLength; i++) {
|
| _pendingData.add(0);
|
| }
|
| - var lengthInBits = _lengthInBytes * BITS_PER_BYTE;
|
| - const MAX_UINT64 = 0xFFFFFFFFFFFFFFFF;
|
| - if (lengthInBits > MAX_UINT64) {
|
| +
|
| + var lengthInBits = _lengthInBytes * bitsPerByte;
|
| + if (lengthInBits > maxUint64) {
|
| throw new UnsupportedError(
|
| - "Hash undefined for message bit lengths larger than 64 bits");
|
| - }
|
| - if (_bigEndianWords) {
|
| - _pendingData.addAll(_wordToBytes((lengthInBits >> 32) & MASK_32));
|
| - _pendingData.addAll(_wordToBytes(lengthInBits & MASK_32));
|
| - } else {
|
| - _pendingData.addAll(_wordToBytes(lengthInBits & MASK_32));
|
| - _pendingData.addAll(_wordToBytes((lengthInBits >> 32) & MASK_32));
|
| + "Hashing is unsupported for messages with more than 2^64 bits.");
|
| }
|
| +
|
| + // Add the full length of the input data as a 64-bit value at the end of the
|
| + // hash.
|
| + var offset = _pendingData.length;
|
| + _pendingData.addAll(new Uint8List(8));
|
| + _pendingData.buffer.asByteData().setUint64(offset, lengthInBits, _endian);
|
| }
|
|
|
| - /// Rounds [val] to the nearest multiple of [n].
|
| - int _roundUp(val, n) => (val + n - 1) & -n;
|
| + /// Rounds [val] up to the next multiple of [n], as long as [n] is a power of
|
| + /// two.
|
| + int _roundUp(int val, int n) => (val + n - 1) & -n;
|
| }
|
|
|