OLD | NEW |
(Empty) | |
| 1 /* NOLINT(build/header_guard) */ |
| 2 /* Copyright 2010 Google Inc. All Rights Reserved. |
| 3 |
| 4 Distributed under MIT license. |
| 5 See file LICENSE for detail or copy at https://opensource.org/licenses/MIT |
| 6 */ |
| 7 |
| 8 /* template parameters: FN, BUCKET_BITS, BLOCK_BITS, |
| 9 NUM_LAST_DISTANCES_TO_CHECK */ |
| 10 |
| 11 /* A (forgetful) hash table to the data seen by the compressor, to |
| 12 help create backward references to previous data. |
| 13 |
| 14 This is a hash map of fixed size (BUCKET_SIZE) to a ring buffer of |
| 15 fixed size (BLOCK_SIZE). The ring buffer contains the last BLOCK_SIZE |
| 16 index positions of the given hash key in the compressed data. */ |
| 17 |
| 18 #define HashLongestMatch HASHER() |
| 19 |
| 20 /* Number of hash buckets. */ |
| 21 #define BUCKET_SIZE (1 << BUCKET_BITS) |
| 22 |
| 23 /* Only BLOCK_SIZE newest backward references are kept, |
| 24 and the older are forgotten. */ |
| 25 #define BLOCK_SIZE (1u << BLOCK_BITS) |
| 26 |
| 27 /* Mask for accessing entries in a block (in a ring-buffer manner). */ |
| 28 #define BLOCK_MASK ((1 << BLOCK_BITS) - 1) |
| 29 |
| 30 #define HASH_MAP_SIZE (2 << BUCKET_BITS) |
| 31 |
| 32 static BROTLI_INLINE size_t FN(HashTypeLength)(void) { return 4; } |
| 33 static BROTLI_INLINE size_t FN(StoreLookahead)(void) { return 4; } |
| 34 |
| 35 /* HashBytes is the function that chooses the bucket to place |
| 36 the address in. The HashLongestMatch and HashLongestMatchQuickly |
| 37 classes have separate, different implementations of hashing. */ |
| 38 static uint32_t FN(HashBytes)(const uint8_t *data) { |
| 39 uint32_t h = BROTLI_UNALIGNED_LOAD32(data) * kHashMul32; |
| 40 /* The higher bits contain more mixture from the multiplication, |
| 41 so we take our results from there. */ |
| 42 return h >> (32 - BUCKET_BITS); |
| 43 } |
| 44 |
| 45 typedef struct HashLongestMatch { |
| 46 /* Number of entries in a particular bucket. */ |
| 47 uint16_t num_[BUCKET_SIZE]; |
| 48 |
| 49 /* Buckets containing BLOCK_SIZE of backward references. */ |
| 50 uint32_t buckets_[BLOCK_SIZE << BUCKET_BITS]; |
| 51 |
| 52 /* True if num_ array needs to be initialized. */ |
| 53 BROTLI_BOOL is_dirty_; |
| 54 |
| 55 DictionarySearchStatictics dict_search_stats_; |
| 56 } HashLongestMatch; |
| 57 |
| 58 static void FN(Reset)(HashLongestMatch* self) { |
| 59 self->is_dirty_ = BROTLI_TRUE; |
| 60 DictionarySearchStaticticsReset(&self->dict_search_stats_); |
| 61 } |
| 62 |
| 63 static void FN(InitEmpty)(HashLongestMatch* self) { |
| 64 if (self->is_dirty_) { |
| 65 memset(self->num_, 0, sizeof(self->num_)); |
| 66 self->is_dirty_ = BROTLI_FALSE; |
| 67 } |
| 68 } |
| 69 |
| 70 static void FN(InitForData)(HashLongestMatch* self, const uint8_t* data, |
| 71 size_t num) { |
| 72 size_t i; |
| 73 for (i = 0; i < num; ++i) { |
| 74 const uint32_t key = FN(HashBytes)(&data[i]); |
| 75 self->num_[key] = 0; |
| 76 } |
| 77 if (num != 0) { |
| 78 self->is_dirty_ = BROTLI_FALSE; |
| 79 } |
| 80 } |
| 81 |
| 82 static void FN(Init)( |
| 83 MemoryManager* m, HashLongestMatch* self, const uint8_t* data, |
| 84 const BrotliEncoderParams* params, size_t position, size_t bytes, |
| 85 BROTLI_BOOL is_last) { |
| 86 /* Choose which initialization method is faster. |
| 87 Init() is about 100 times faster than InitForData(). */ |
| 88 const size_t kMaxBytesForPartialHashInit = HASH_MAP_SIZE >> 7; |
| 89 BROTLI_UNUSED(m); |
| 90 BROTLI_UNUSED(params); |
| 91 if (position == 0 && is_last && bytes <= kMaxBytesForPartialHashInit) { |
| 92 FN(InitForData)(self, data, bytes); |
| 93 } else { |
| 94 FN(InitEmpty)(self); |
| 95 } |
| 96 } |
| 97 |
| 98 /* Look at 4 bytes at &data[ix & mask]. |
| 99 Compute a hash from these, and store the value of ix at that position. */ |
| 100 static BROTLI_INLINE void FN(Store)(HashLongestMatch* self, const uint8_t *data, |
| 101 const size_t mask, const size_t ix) { |
| 102 const uint32_t key = FN(HashBytes)(&data[ix & mask]); |
| 103 const size_t minor_ix = self->num_[key] & BLOCK_MASK; |
| 104 self->buckets_[minor_ix + (key << BLOCK_BITS)] = (uint32_t)ix; |
| 105 ++self->num_[key]; |
| 106 } |
| 107 |
| 108 static BROTLI_INLINE void FN(StoreRange)(HashLongestMatch* self, |
| 109 const uint8_t *data, const size_t mask, const size_t ix_start, |
| 110 const size_t ix_end) { |
| 111 size_t i; |
| 112 for (i = ix_start; i < ix_end; ++i) { |
| 113 FN(Store)(self, data, mask, i); |
| 114 } |
| 115 } |
| 116 |
| 117 static BROTLI_INLINE void FN(StitchToPreviousBlock)(HashLongestMatch* self, |
| 118 size_t num_bytes, size_t position, const uint8_t* ringbuffer, |
| 119 size_t ringbuffer_mask) { |
| 120 if (num_bytes >= FN(HashTypeLength)() - 1 && position >= 3) { |
| 121 /* Prepare the hashes for three last bytes of the last write. |
| 122 These could not be calculated before, since they require knowledge |
| 123 of both the previous and the current block. */ |
| 124 FN(Store)(self, ringbuffer, ringbuffer_mask, position - 3); |
| 125 FN(Store)(self, ringbuffer, ringbuffer_mask, position - 2); |
| 126 FN(Store)(self, ringbuffer, ringbuffer_mask, position - 1); |
| 127 } |
| 128 } |
| 129 |
| 130 /* Find a longest backward match of &data[cur_ix] up to the length of |
| 131 max_length and stores the position cur_ix in the hash table. |
| 132 |
| 133 Does not look for matches longer than max_length. |
| 134 Does not look for matches further away than max_backward. |
| 135 Writes the best match into |out|. |
| 136 Returns true when match is found, otherwise false. */ |
| 137 static BROTLI_INLINE BROTLI_BOOL FN(FindLongestMatch)(HashLongestMatch* self, |
| 138 const uint8_t* BROTLI_RESTRICT data, const size_t ring_buffer_mask, |
| 139 const int* BROTLI_RESTRICT distance_cache, const size_t cur_ix, |
| 140 const size_t max_length, const size_t max_backward, |
| 141 HasherSearchResult* BROTLI_RESTRICT out) { |
| 142 const size_t cur_ix_masked = cur_ix & ring_buffer_mask; |
| 143 BROTLI_BOOL is_match_found = BROTLI_FALSE; |
| 144 /* Don't accept a short copy from far away. */ |
| 145 score_t best_score = out->score; |
| 146 size_t best_len = out->len; |
| 147 size_t i; |
| 148 out->len = 0; |
| 149 out->len_x_code = 0; |
| 150 /* Try last distance first. */ |
| 151 for (i = 0; i < NUM_LAST_DISTANCES_TO_CHECK; ++i) { |
| 152 const size_t idx = kDistanceCacheIndex[i]; |
| 153 const size_t backward = |
| 154 (size_t)(distance_cache[idx] + kDistanceCacheOffset[i]); |
| 155 size_t prev_ix = (size_t)(cur_ix - backward); |
| 156 if (prev_ix >= cur_ix) { |
| 157 continue; |
| 158 } |
| 159 if (BROTLI_PREDICT_FALSE(backward > max_backward)) { |
| 160 continue; |
| 161 } |
| 162 prev_ix &= ring_buffer_mask; |
| 163 |
| 164 if (cur_ix_masked + best_len > ring_buffer_mask || |
| 165 prev_ix + best_len > ring_buffer_mask || |
| 166 data[cur_ix_masked + best_len] != data[prev_ix + best_len]) { |
| 167 continue; |
| 168 } |
| 169 { |
| 170 const size_t len = FindMatchLengthWithLimit(&data[prev_ix], |
| 171 &data[cur_ix_masked], |
| 172 max_length); |
| 173 if (len >= 3 || (len == 2 && i < 2)) { |
| 174 /* Comparing for >= 2 does not change the semantics, but just saves for |
| 175 a few unnecessary binary logarithms in backward reference score, |
| 176 since we are not interested in such short matches. */ |
| 177 score_t score = BackwardReferenceScoreUsingLastDistance(len, i); |
| 178 if (best_score < score) { |
| 179 best_score = score; |
| 180 best_len = len; |
| 181 out->len = best_len; |
| 182 out->distance = backward; |
| 183 out->score = best_score; |
| 184 is_match_found = BROTLI_TRUE; |
| 185 } |
| 186 } |
| 187 } |
| 188 } |
| 189 { |
| 190 const uint32_t key = FN(HashBytes)(&data[cur_ix_masked]); |
| 191 uint32_t* BROTLI_RESTRICT bucket = &self->buckets_[key << BLOCK_BITS]; |
| 192 const size_t down = |
| 193 (self->num_[key] > BLOCK_SIZE) ? (self->num_[key] - BLOCK_SIZE) : 0u; |
| 194 for (i = self->num_[key]; i > down;) { |
| 195 size_t prev_ix = bucket[--i & BLOCK_MASK]; |
| 196 const size_t backward = cur_ix - prev_ix; |
| 197 if (BROTLI_PREDICT_FALSE(backward > max_backward)) { |
| 198 break; |
| 199 } |
| 200 prev_ix &= ring_buffer_mask; |
| 201 if (cur_ix_masked + best_len > ring_buffer_mask || |
| 202 prev_ix + best_len > ring_buffer_mask || |
| 203 data[cur_ix_masked + best_len] != data[prev_ix + best_len]) { |
| 204 continue; |
| 205 } |
| 206 { |
| 207 const size_t len = FindMatchLengthWithLimit(&data[prev_ix], |
| 208 &data[cur_ix_masked], |
| 209 max_length); |
| 210 if (len >= 4) { |
| 211 /* Comparing for >= 3 does not change the semantics, but just saves |
| 212 for a few unnecessary binary logarithms in backward reference |
| 213 score, since we are not interested in such short matches. */ |
| 214 score_t score = BackwardReferenceScore(len, backward); |
| 215 if (best_score < score) { |
| 216 best_score = score; |
| 217 best_len = len; |
| 218 out->len = best_len; |
| 219 out->distance = backward; |
| 220 out->score = best_score; |
| 221 is_match_found = BROTLI_TRUE; |
| 222 } |
| 223 } |
| 224 } |
| 225 } |
| 226 bucket[self->num_[key] & BLOCK_MASK] = (uint32_t)cur_ix; |
| 227 ++self->num_[key]; |
| 228 } |
| 229 if (!is_match_found) { |
| 230 is_match_found = SearchInStaticDictionary(&self->dict_search_stats_, |
| 231 &data[cur_ix_masked], max_length, max_backward, out, BROTLI_FALSE); |
| 232 } |
| 233 return is_match_found; |
| 234 } |
| 235 |
| 236 #undef HASH_MAP_SIZE |
| 237 #undef BLOCK_MASK |
| 238 #undef BLOCK_SIZE |
| 239 #undef BUCKET_SIZE |
| 240 |
| 241 #undef HashLongestMatch |
OLD | NEW |