OLD | NEW |
(Empty) | |
| 1 /* Copyright 2013 Google Inc. All Rights Reserved. |
| 2 |
| 3 Distributed under MIT license. |
| 4 See file LICENSE for detail or copy at https://opensource.org/licenses/MIT |
| 5 */ |
| 6 |
| 7 /* Implementation of Brotli compressor. */ |
| 8 |
| 9 #include <brotli/encode.h> |
| 10 |
| 11 #include <stdlib.h> /* free, malloc */ |
| 12 #include <string.h> /* memcpy, memset */ |
| 13 |
| 14 #include "../common/version.h" |
| 15 #include "./backward_references.h" |
| 16 #include "./bit_cost.h" |
| 17 #include "./brotli_bit_stream.h" |
| 18 #include "./compress_fragment.h" |
| 19 #include "./compress_fragment_two_pass.h" |
| 20 #include "./context.h" |
| 21 #include "./entropy_encode.h" |
| 22 #include "./fast_log.h" |
| 23 #include "./hash.h" |
| 24 #include "./histogram.h" |
| 25 #include "./memory.h" |
| 26 #include "./metablock.h" |
| 27 #include "./port.h" |
| 28 #include "./prefix.h" |
| 29 #include "./quality.h" |
| 30 #include "./ringbuffer.h" |
| 31 #include "./utf8_util.h" |
| 32 #include "./write_bits.h" |
| 33 |
| 34 #if defined(__cplusplus) || defined(c_plusplus) |
| 35 extern "C" { |
| 36 #endif |
| 37 |
| 38 #define COPY_ARRAY(dst, src) memcpy(dst, src, sizeof(src)); |
| 39 |
| 40 typedef enum BrotliEncoderStreamState { |
| 41 /* Default state. */ |
| 42 BROTLI_STREAM_PROCESSING = 0, |
| 43 /* Intermediate state; after next block is emitted, byte-padding should be |
| 44 performed before getting back to default state. */ |
| 45 BROTLI_STREAM_FLUSH_REQUESTED = 1, |
| 46 /* Last metablock was produced; no more input is acceptable. */ |
| 47 BROTLI_STREAM_FINISHED = 2, |
| 48 /* Flushing compressed block and writing meta-data block header. */ |
| 49 BROTLI_STREAM_METADATA_HEAD = 3, |
| 50 /* Writing metadata block body. */ |
| 51 BROTLI_STREAM_METADATA_BODY = 4 |
| 52 } BrotliEncoderStreamState; |
| 53 |
| 54 typedef struct BrotliEncoderStateStruct { |
| 55 BrotliEncoderParams params; |
| 56 |
| 57 MemoryManager memory_manager_; |
| 58 |
| 59 Hashers hashers_; |
| 60 uint64_t input_pos_; |
| 61 RingBuffer ringbuffer_; |
| 62 size_t cmd_alloc_size_; |
| 63 Command* commands_; |
| 64 size_t num_commands_; |
| 65 size_t num_literals_; |
| 66 size_t last_insert_len_; |
| 67 uint64_t last_flush_pos_; |
| 68 uint64_t last_processed_pos_; |
| 69 int dist_cache_[4]; |
| 70 int saved_dist_cache_[4]; |
| 71 uint8_t last_byte_; |
| 72 uint8_t last_byte_bits_; |
| 73 uint8_t prev_byte_; |
| 74 uint8_t prev_byte2_; |
| 75 size_t storage_size_; |
| 76 uint8_t* storage_; |
| 77 /* Hash table for FAST_ONE_PASS_COMPRESSION_QUALITY mode. */ |
| 78 int small_table_[1 << 10]; /* 4KiB */ |
| 79 int* large_table_; /* Allocated only when needed */ |
| 80 size_t large_table_size_; |
| 81 /* Command and distance prefix codes (each 64 symbols, stored back-to-back) |
| 82 used for the next block in FAST_ONE_PASS_COMPRESSION_QUALITY. The command |
| 83 prefix code is over a smaller alphabet with the following 64 symbols: |
| 84 0 - 15: insert length code 0, copy length code 0 - 15, same distance |
| 85 16 - 39: insert length code 0, copy length code 0 - 23 |
| 86 40 - 63: insert length code 0 - 23, copy length code 0 |
| 87 Note that symbols 16 and 40 represent the same code in the full alphabet, |
| 88 but we do not use either of them in FAST_ONE_PASS_COMPRESSION_QUALITY. */ |
| 89 uint8_t cmd_depths_[128]; |
| 90 uint16_t cmd_bits_[128]; |
| 91 /* The compressed form of the command and distance prefix codes for the next |
| 92 block in FAST_ONE_PASS_COMPRESSION_QUALITY. */ |
| 93 uint8_t cmd_code_[512]; |
| 94 size_t cmd_code_numbits_; |
| 95 /* Command and literal buffers for FAST_TWO_PASS_COMPRESSION_QUALITY. */ |
| 96 uint32_t* command_buf_; |
| 97 uint8_t* literal_buf_; |
| 98 |
| 99 uint8_t* next_out_; |
| 100 size_t available_out_; |
| 101 size_t total_out_; |
| 102 /* Temporary buffer for padding flush bits or metadata block header / body. */ |
| 103 union { |
| 104 uint64_t u64[2]; |
| 105 uint8_t u8[16]; |
| 106 } tiny_buf_; |
| 107 uint32_t remaining_metadata_bytes_; |
| 108 BrotliEncoderStreamState stream_state_; |
| 109 |
| 110 BROTLI_BOOL is_last_block_emitted_; |
| 111 BROTLI_BOOL is_initialized_; |
| 112 } BrotliEncoderStateStruct; |
| 113 |
| 114 static BROTLI_BOOL EnsureInitialized(BrotliEncoderState* s); |
| 115 |
| 116 static size_t InputBlockSize(BrotliEncoderState* s) { |
| 117 if (!EnsureInitialized(s)) return 0; |
| 118 return (size_t)1 << s->params.lgblock; |
| 119 } |
| 120 |
| 121 static uint64_t UnprocessedInputSize(BrotliEncoderState* s) { |
| 122 return s->input_pos_ - s->last_processed_pos_; |
| 123 } |
| 124 |
| 125 static size_t RemainingInputBlockSize(BrotliEncoderState* s) { |
| 126 const uint64_t delta = UnprocessedInputSize(s); |
| 127 size_t block_size = InputBlockSize(s); |
| 128 if (delta >= block_size) return 0; |
| 129 return block_size - (size_t)delta; |
| 130 } |
| 131 |
| 132 BROTLI_BOOL BrotliEncoderSetParameter( |
| 133 BrotliEncoderState* state, BrotliEncoderParameter p, uint32_t value) { |
| 134 /* Changing parameters on the fly is not implemented yet. */ |
| 135 if (state->is_initialized_) return BROTLI_FALSE; |
| 136 /* TODO: Validate/clamp parameters here. */ |
| 137 switch (p) { |
| 138 case BROTLI_PARAM_MODE: |
| 139 state->params.mode = (BrotliEncoderMode)value; |
| 140 return BROTLI_TRUE; |
| 141 |
| 142 case BROTLI_PARAM_QUALITY: |
| 143 state->params.quality = (int)value; |
| 144 return BROTLI_TRUE; |
| 145 |
| 146 case BROTLI_PARAM_LGWIN: |
| 147 state->params.lgwin = (int)value; |
| 148 return BROTLI_TRUE; |
| 149 |
| 150 case BROTLI_PARAM_LGBLOCK: |
| 151 state->params.lgblock = (int)value; |
| 152 return BROTLI_TRUE; |
| 153 |
| 154 default: return BROTLI_FALSE; |
| 155 } |
| 156 } |
| 157 |
| 158 static void RecomputeDistancePrefixes(Command* cmds, |
| 159 size_t num_commands, |
| 160 uint32_t num_direct_distance_codes, |
| 161 uint32_t distance_postfix_bits) { |
| 162 size_t i; |
| 163 if (num_direct_distance_codes == 0 && distance_postfix_bits == 0) { |
| 164 return; |
| 165 } |
| 166 for (i = 0; i < num_commands; ++i) { |
| 167 Command* cmd = &cmds[i]; |
| 168 if (CommandCopyLen(cmd) && cmd->cmd_prefix_ >= 128) { |
| 169 PrefixEncodeCopyDistance(CommandRestoreDistanceCode(cmd), |
| 170 num_direct_distance_codes, |
| 171 distance_postfix_bits, |
| 172 &cmd->dist_prefix_, |
| 173 &cmd->dist_extra_); |
| 174 } |
| 175 } |
| 176 } |
| 177 |
| 178 /* Wraps 64-bit input position to 32-bit ring-buffer position preserving |
| 179 "not-a-first-lap" feature. */ |
| 180 static uint32_t WrapPosition(uint64_t position) { |
| 181 uint32_t result = (uint32_t)position; |
| 182 uint64_t gb = position >> 30; |
| 183 if (gb > 2) { |
| 184 /* Wrap every 2GiB; The first 3GB are continuous. */ |
| 185 result = (result & ((1u << 30) - 1)) | ((uint32_t)((gb - 1) & 1) + 1) << 30; |
| 186 } |
| 187 return result; |
| 188 } |
| 189 |
| 190 static uint8_t* GetBrotliStorage(BrotliEncoderState* s, size_t size) { |
| 191 MemoryManager* m = &s->memory_manager_; |
| 192 if (s->storage_size_ < size) { |
| 193 BROTLI_FREE(m, s->storage_); |
| 194 s->storage_ = BROTLI_ALLOC(m, uint8_t, size); |
| 195 if (BROTLI_IS_OOM(m)) return NULL; |
| 196 s->storage_size_ = size; |
| 197 } |
| 198 return s->storage_; |
| 199 } |
| 200 |
| 201 static size_t HashTableSize(size_t max_table_size, size_t input_size) { |
| 202 size_t htsize = 256; |
| 203 while (htsize < max_table_size && htsize < input_size) { |
| 204 htsize <<= 1; |
| 205 } |
| 206 return htsize; |
| 207 } |
| 208 |
| 209 static int* GetHashTable(BrotliEncoderState* s, int quality, |
| 210 size_t input_size, size_t* table_size) { |
| 211 /* Use smaller hash table when input.size() is smaller, since we |
| 212 fill the table, incurring O(hash table size) overhead for |
| 213 compression, and if the input is short, we won't need that |
| 214 many hash table entries anyway. */ |
| 215 MemoryManager* m = &s->memory_manager_; |
| 216 const size_t max_table_size = MaxHashTableSize(quality); |
| 217 size_t htsize = HashTableSize(max_table_size, input_size); |
| 218 int* table; |
| 219 assert(max_table_size >= 256); |
| 220 if (quality == FAST_ONE_PASS_COMPRESSION_QUALITY) { |
| 221 /* Only odd shifts are supported by fast-one-pass. */ |
| 222 if ((htsize & 0xAAAAA) == 0) { |
| 223 htsize <<= 1; |
| 224 } |
| 225 } |
| 226 |
| 227 if (htsize <= sizeof(s->small_table_) / sizeof(s->small_table_[0])) { |
| 228 table = s->small_table_; |
| 229 } else { |
| 230 if (htsize > s->large_table_size_) { |
| 231 s->large_table_size_ = htsize; |
| 232 BROTLI_FREE(m, s->large_table_); |
| 233 s->large_table_ = BROTLI_ALLOC(m, int, htsize); |
| 234 if (BROTLI_IS_OOM(m)) return 0; |
| 235 } |
| 236 table = s->large_table_; |
| 237 } |
| 238 |
| 239 *table_size = htsize; |
| 240 memset(table, 0, htsize * sizeof(*table)); |
| 241 return table; |
| 242 } |
| 243 |
| 244 static void EncodeWindowBits(int lgwin, uint8_t* last_byte, |
| 245 uint8_t* last_byte_bits) { |
| 246 if (lgwin == 16) { |
| 247 *last_byte = 0; |
| 248 *last_byte_bits = 1; |
| 249 } else if (lgwin == 17) { |
| 250 *last_byte = 1; |
| 251 *last_byte_bits = 7; |
| 252 } else if (lgwin > 17) { |
| 253 *last_byte = (uint8_t)(((lgwin - 17) << 1) | 1); |
| 254 *last_byte_bits = 4; |
| 255 } else { |
| 256 *last_byte = (uint8_t)(((lgwin - 8) << 4) | 1); |
| 257 *last_byte_bits = 7; |
| 258 } |
| 259 } |
| 260 |
| 261 /* Initializes the command and distance prefix codes for the first block. */ |
| 262 static void InitCommandPrefixCodes(uint8_t cmd_depths[128], |
| 263 uint16_t cmd_bits[128], |
| 264 uint8_t cmd_code[512], |
| 265 size_t* cmd_code_numbits) { |
| 266 static const uint8_t kDefaultCommandDepths[128] = { |
| 267 0, 4, 4, 5, 6, 6, 7, 7, 7, 7, 7, 8, 8, 8, 8, 8, |
| 268 0, 0, 0, 4, 4, 4, 4, 4, 5, 5, 6, 6, 6, 6, 7, 7, |
| 269 7, 7, 10, 10, 10, 10, 10, 10, 0, 4, 4, 5, 5, 5, 6, 6, |
| 270 7, 8, 8, 9, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, |
| 271 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, |
| 272 6, 6, 6, 6, 6, 6, 5, 5, 5, 5, 5, 5, 4, 4, 4, 4, |
| 273 4, 4, 4, 5, 5, 5, 5, 5, 5, 6, 6, 7, 7, 7, 8, 10, |
| 274 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, |
| 275 }; |
| 276 static const uint16_t kDefaultCommandBits[128] = { |
| 277 0, 0, 8, 9, 3, 35, 7, 71, |
| 278 39, 103, 23, 47, 175, 111, 239, 31, |
| 279 0, 0, 0, 4, 12, 2, 10, 6, |
| 280 13, 29, 11, 43, 27, 59, 87, 55, |
| 281 15, 79, 319, 831, 191, 703, 447, 959, |
| 282 0, 14, 1, 25, 5, 21, 19, 51, |
| 283 119, 159, 95, 223, 479, 991, 63, 575, |
| 284 127, 639, 383, 895, 255, 767, 511, 1023, |
| 285 14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, |
| 286 27, 59, 7, 39, 23, 55, 30, 1, 17, 9, 25, 5, 0, 8, 4, 12, |
| 287 2, 10, 6, 21, 13, 29, 3, 19, 11, 15, 47, 31, 95, 63, 127, 255, |
| 288 767, 2815, 1791, 3839, 511, 2559, 1535, 3583, 1023, 3071, 2047, 4095, |
| 289 }; |
| 290 static const uint8_t kDefaultCommandCode[] = { |
| 291 0xff, 0x77, 0xd5, 0xbf, 0xe7, 0xde, 0xea, 0x9e, 0x51, 0x5d, 0xde, 0xc6, |
| 292 0x70, 0x57, 0xbc, 0x58, 0x58, 0x58, 0xd8, 0xd8, 0x58, 0xd5, 0xcb, 0x8c, |
| 293 0xea, 0xe0, 0xc3, 0x87, 0x1f, 0x83, 0xc1, 0x60, 0x1c, 0x67, 0xb2, 0xaa, |
| 294 0x06, 0x83, 0xc1, 0x60, 0x30, 0x18, 0xcc, 0xa1, 0xce, 0x88, 0x54, 0x94, |
| 295 0x46, 0xe1, 0xb0, 0xd0, 0x4e, 0xb2, 0xf7, 0x04, 0x00, |
| 296 }; |
| 297 static const size_t kDefaultCommandCodeNumBits = 448; |
| 298 COPY_ARRAY(cmd_depths, kDefaultCommandDepths); |
| 299 COPY_ARRAY(cmd_bits, kDefaultCommandBits); |
| 300 |
| 301 /* Initialize the pre-compressed form of the command and distance prefix |
| 302 codes. */ |
| 303 COPY_ARRAY(cmd_code, kDefaultCommandCode); |
| 304 *cmd_code_numbits = kDefaultCommandCodeNumBits; |
| 305 } |
| 306 |
| 307 /* Decide about the context map based on the ability of the prediction |
| 308 ability of the previous byte UTF8-prefix on the next byte. The |
| 309 prediction ability is calculated as Shannon entropy. Here we need |
| 310 Shannon entropy instead of 'BitsEntropy' since the prefix will be |
| 311 encoded with the remaining 6 bits of the following byte, and |
| 312 BitsEntropy will assume that symbol to be stored alone using Huffman |
| 313 coding. */ |
| 314 static void ChooseContextMap(int quality, |
| 315 uint32_t* bigram_histo, |
| 316 size_t* num_literal_contexts, |
| 317 const uint32_t** literal_context_map) { |
| 318 static const uint32_t kStaticContextMapContinuation[64] = { |
| 319 1, 1, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, |
| 320 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, |
| 321 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, |
| 322 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, |
| 323 }; |
| 324 static const uint32_t kStaticContextMapSimpleUTF8[64] = { |
| 325 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, |
| 326 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, |
| 327 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, |
| 328 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, |
| 329 }; |
| 330 |
| 331 uint32_t monogram_histo[3] = { 0 }; |
| 332 uint32_t two_prefix_histo[6] = { 0 }; |
| 333 size_t total = 0; |
| 334 size_t i; |
| 335 size_t dummy; |
| 336 double entropy[4]; |
| 337 for (i = 0; i < 9; ++i) { |
| 338 size_t j = i; |
| 339 total += bigram_histo[i]; |
| 340 monogram_histo[i % 3] += bigram_histo[i]; |
| 341 if (j >= 6) { |
| 342 j -= 6; |
| 343 } |
| 344 two_prefix_histo[j] += bigram_histo[i]; |
| 345 } |
| 346 entropy[1] = ShannonEntropy(monogram_histo, 3, &dummy); |
| 347 entropy[2] = (ShannonEntropy(two_prefix_histo, 3, &dummy) + |
| 348 ShannonEntropy(two_prefix_histo + 3, 3, &dummy)); |
| 349 entropy[3] = 0; |
| 350 for (i = 0; i < 3; ++i) { |
| 351 entropy[3] += ShannonEntropy(bigram_histo + 3 * i, 3, &dummy); |
| 352 } |
| 353 |
| 354 assert(total != 0); |
| 355 entropy[0] = 1.0 / (double)total; |
| 356 entropy[1] *= entropy[0]; |
| 357 entropy[2] *= entropy[0]; |
| 358 entropy[3] *= entropy[0]; |
| 359 |
| 360 if (quality < MIN_QUALITY_FOR_HQ_CONTEXT_MODELING) { |
| 361 /* 3 context models is a bit slower, don't use it at lower qualities. */ |
| 362 entropy[3] = entropy[1] * 10; |
| 363 } |
| 364 /* If expected savings by symbol are less than 0.2 bits, skip the |
| 365 context modeling -- in exchange for faster decoding speed. */ |
| 366 if (entropy[1] - entropy[2] < 0.2 && |
| 367 entropy[1] - entropy[3] < 0.2) { |
| 368 *num_literal_contexts = 1; |
| 369 } else if (entropy[2] - entropy[3] < 0.02) { |
| 370 *num_literal_contexts = 2; |
| 371 *literal_context_map = kStaticContextMapSimpleUTF8; |
| 372 } else { |
| 373 *num_literal_contexts = 3; |
| 374 *literal_context_map = kStaticContextMapContinuation; |
| 375 } |
| 376 } |
| 377 |
| 378 static void DecideOverLiteralContextModeling(const uint8_t* input, |
| 379 size_t start_pos, size_t length, size_t mask, int quality, |
| 380 ContextType* literal_context_mode, size_t* num_literal_contexts, |
| 381 const uint32_t** literal_context_map) { |
| 382 if (quality < MIN_QUALITY_FOR_CONTEXT_MODELING || length < 64) { |
| 383 return; |
| 384 } else { |
| 385 /* Gather bi-gram data of the UTF8 byte prefixes. To make the analysis of |
| 386 UTF8 data faster we only examine 64 byte long strides at every 4kB |
| 387 intervals. */ |
| 388 const size_t end_pos = start_pos + length; |
| 389 uint32_t bigram_prefix_histo[9] = { 0 }; |
| 390 for (; start_pos + 64 <= end_pos; start_pos += 4096) { |
| 391 static const int lut[4] = { 0, 0, 1, 2 }; |
| 392 const size_t stride_end_pos = start_pos + 64; |
| 393 int prev = lut[input[start_pos & mask] >> 6] * 3; |
| 394 size_t pos; |
| 395 for (pos = start_pos + 1; pos < stride_end_pos; ++pos) { |
| 396 const uint8_t literal = input[pos & mask]; |
| 397 ++bigram_prefix_histo[prev + lut[literal >> 6]]; |
| 398 prev = lut[literal >> 6] * 3; |
| 399 } |
| 400 } |
| 401 *literal_context_mode = CONTEXT_UTF8; |
| 402 ChooseContextMap(quality, &bigram_prefix_histo[0], num_literal_contexts, |
| 403 literal_context_map); |
| 404 } |
| 405 } |
| 406 |
| 407 static BROTLI_BOOL ShouldCompress( |
| 408 const uint8_t* data, const size_t mask, const uint64_t last_flush_pos, |
| 409 const size_t bytes, const size_t num_literals, const size_t num_commands) { |
| 410 if (num_commands < (bytes >> 8) + 2) { |
| 411 if (num_literals > 0.99 * (double)bytes) { |
| 412 uint32_t literal_histo[256] = { 0 }; |
| 413 static const uint32_t kSampleRate = 13; |
| 414 static const double kMinEntropy = 7.92; |
| 415 const double bit_cost_threshold = |
| 416 (double)bytes * kMinEntropy / kSampleRate; |
| 417 size_t t = (bytes + kSampleRate - 1) / kSampleRate; |
| 418 uint32_t pos = (uint32_t)last_flush_pos; |
| 419 size_t i; |
| 420 for (i = 0; i < t; i++) { |
| 421 ++literal_histo[data[pos & mask]]; |
| 422 pos += kSampleRate; |
| 423 } |
| 424 if (BitsEntropy(literal_histo, 256) > bit_cost_threshold) { |
| 425 return BROTLI_FALSE; |
| 426 } |
| 427 } |
| 428 } |
| 429 return BROTLI_TRUE; |
| 430 } |
| 431 |
| 432 static void WriteMetaBlockInternal(MemoryManager* m, |
| 433 const uint8_t* data, |
| 434 const size_t mask, |
| 435 const uint64_t last_flush_pos, |
| 436 const size_t bytes, |
| 437 const BROTLI_BOOL is_last, |
| 438 const BrotliEncoderParams* params, |
| 439 const uint8_t prev_byte, |
| 440 const uint8_t prev_byte2, |
| 441 const size_t num_literals, |
| 442 const size_t num_commands, |
| 443 Command* commands, |
| 444 const int* saved_dist_cache, |
| 445 int* dist_cache, |
| 446 size_t* storage_ix, |
| 447 uint8_t* storage) { |
| 448 const uint32_t wrapped_last_flush_pos = WrapPosition(last_flush_pos); |
| 449 uint8_t last_byte; |
| 450 uint8_t last_byte_bits; |
| 451 uint32_t num_direct_distance_codes = 0; |
| 452 uint32_t distance_postfix_bits = 0; |
| 453 |
| 454 if (bytes == 0) { |
| 455 /* Write the ISLAST and ISEMPTY bits. */ |
| 456 BrotliWriteBits(2, 3, storage_ix, storage); |
| 457 *storage_ix = (*storage_ix + 7u) & ~7u; |
| 458 return; |
| 459 } |
| 460 |
| 461 if (!ShouldCompress(data, mask, last_flush_pos, bytes, |
| 462 num_literals, num_commands)) { |
| 463 /* Restore the distance cache, as its last update by |
| 464 CreateBackwardReferences is now unused. */ |
| 465 memcpy(dist_cache, saved_dist_cache, 4 * sizeof(dist_cache[0])); |
| 466 BrotliStoreUncompressedMetaBlock(is_last, data, |
| 467 wrapped_last_flush_pos, mask, bytes, |
| 468 storage_ix, storage); |
| 469 return; |
| 470 } |
| 471 |
| 472 last_byte = storage[0]; |
| 473 last_byte_bits = (uint8_t)(*storage_ix & 0xff); |
| 474 if (params->quality >= MIN_QUALITY_FOR_RECOMPUTE_DISTANCE_PREFIXES && |
| 475 params->mode == BROTLI_MODE_FONT) { |
| 476 num_direct_distance_codes = 12; |
| 477 distance_postfix_bits = 1; |
| 478 RecomputeDistancePrefixes(commands, |
| 479 num_commands, |
| 480 num_direct_distance_codes, |
| 481 distance_postfix_bits); |
| 482 } |
| 483 if (params->quality <= MAX_QUALITY_FOR_STATIC_ENRTOPY_CODES) { |
| 484 BrotliStoreMetaBlockFast(m, data, wrapped_last_flush_pos, |
| 485 bytes, mask, is_last, |
| 486 commands, num_commands, |
| 487 storage_ix, storage); |
| 488 if (BROTLI_IS_OOM(m)) return; |
| 489 } else if (params->quality < MIN_QUALITY_FOR_BLOCK_SPLIT) { |
| 490 BrotliStoreMetaBlockTrivial(m, data, wrapped_last_flush_pos, |
| 491 bytes, mask, is_last, |
| 492 commands, num_commands, |
| 493 storage_ix, storage); |
| 494 if (BROTLI_IS_OOM(m)) return; |
| 495 } else { |
| 496 ContextType literal_context_mode = CONTEXT_UTF8; |
| 497 MetaBlockSplit mb; |
| 498 InitMetaBlockSplit(&mb); |
| 499 if (params->quality < MIN_QUALITY_FOR_HQ_BLOCK_SPLITTING) { |
| 500 size_t num_literal_contexts = 1; |
| 501 const uint32_t* literal_context_map = NULL; |
| 502 DecideOverLiteralContextModeling(data, wrapped_last_flush_pos, |
| 503 bytes, mask, |
| 504 params->quality, |
| 505 &literal_context_mode, |
| 506 &num_literal_contexts, |
| 507 &literal_context_map); |
| 508 if (literal_context_map == NULL) { |
| 509 BrotliBuildMetaBlockGreedy(m, data, wrapped_last_flush_pos, mask, |
| 510 commands, num_commands, &mb); |
| 511 if (BROTLI_IS_OOM(m)) return; |
| 512 } else { |
| 513 BrotliBuildMetaBlockGreedyWithContexts(m, data, |
| 514 wrapped_last_flush_pos, |
| 515 mask, |
| 516 prev_byte, prev_byte2, |
| 517 literal_context_mode, |
| 518 num_literal_contexts, |
| 519 literal_context_map, |
| 520 commands, num_commands, |
| 521 &mb); |
| 522 if (BROTLI_IS_OOM(m)) return; |
| 523 } |
| 524 } else { |
| 525 if (!BrotliIsMostlyUTF8(data, wrapped_last_flush_pos, mask, bytes, |
| 526 kMinUTF8Ratio)) { |
| 527 literal_context_mode = CONTEXT_SIGNED; |
| 528 } |
| 529 BrotliBuildMetaBlock(m, data, wrapped_last_flush_pos, mask, params, |
| 530 prev_byte, prev_byte2, |
| 531 commands, num_commands, |
| 532 literal_context_mode, |
| 533 &mb); |
| 534 if (BROTLI_IS_OOM(m)) return; |
| 535 } |
| 536 if (params->quality >= MIN_QUALITY_FOR_OPTIMIZE_HISTOGRAMS) { |
| 537 BrotliOptimizeHistograms(num_direct_distance_codes, |
| 538 distance_postfix_bits, |
| 539 &mb); |
| 540 } |
| 541 BrotliStoreMetaBlock(m, data, wrapped_last_flush_pos, bytes, mask, |
| 542 prev_byte, prev_byte2, |
| 543 is_last, |
| 544 num_direct_distance_codes, |
| 545 distance_postfix_bits, |
| 546 literal_context_mode, |
| 547 commands, num_commands, |
| 548 &mb, |
| 549 storage_ix, storage); |
| 550 if (BROTLI_IS_OOM(m)) return; |
| 551 DestroyMetaBlockSplit(m, &mb); |
| 552 } |
| 553 if (bytes + 4 < (*storage_ix >> 3)) { |
| 554 /* Restore the distance cache and last byte. */ |
| 555 memcpy(dist_cache, saved_dist_cache, 4 * sizeof(dist_cache[0])); |
| 556 storage[0] = last_byte; |
| 557 *storage_ix = last_byte_bits; |
| 558 BrotliStoreUncompressedMetaBlock(is_last, data, |
| 559 wrapped_last_flush_pos, mask, |
| 560 bytes, storage_ix, storage); |
| 561 } |
| 562 } |
| 563 |
| 564 static BROTLI_BOOL EnsureInitialized(BrotliEncoderState* s) { |
| 565 if (BROTLI_IS_OOM(&s->memory_manager_)) return BROTLI_FALSE; |
| 566 if (s->is_initialized_) return BROTLI_TRUE; |
| 567 |
| 568 SanitizeParams(&s->params); |
| 569 s->params.lgblock = ComputeLgBlock(&s->params); |
| 570 |
| 571 s->remaining_metadata_bytes_ = BROTLI_UINT32_MAX; |
| 572 |
| 573 RingBufferSetup(&s->params, &s->ringbuffer_); |
| 574 |
| 575 /* Initialize last byte with stream header. */ |
| 576 { |
| 577 int lgwin = s->params.lgwin; |
| 578 if (s->params.quality == FAST_ONE_PASS_COMPRESSION_QUALITY || |
| 579 s->params.quality == FAST_TWO_PASS_COMPRESSION_QUALITY) { |
| 580 lgwin = BROTLI_MAX(int, lgwin, 18); |
| 581 } |
| 582 EncodeWindowBits(lgwin, &s->last_byte_, &s->last_byte_bits_); |
| 583 } |
| 584 |
| 585 if (s->params.quality == FAST_ONE_PASS_COMPRESSION_QUALITY) { |
| 586 InitCommandPrefixCodes(s->cmd_depths_, s->cmd_bits_, |
| 587 s->cmd_code_, &s->cmd_code_numbits_); |
| 588 } |
| 589 |
| 590 /* Initialize hashers. */ |
| 591 HashersSetup(&s->memory_manager_, &s->hashers_, ChooseHasher(&s->params)); |
| 592 if (BROTLI_IS_OOM(&s->memory_manager_)) return BROTLI_FALSE; |
| 593 |
| 594 s->is_initialized_ = BROTLI_TRUE; |
| 595 return BROTLI_TRUE; |
| 596 } |
| 597 |
| 598 static void BrotliEncoderInitState(BrotliEncoderState* s) { |
| 599 s->params.mode = BROTLI_DEFAULT_MODE; |
| 600 s->params.quality = BROTLI_DEFAULT_QUALITY; |
| 601 s->params.lgwin = BROTLI_DEFAULT_WINDOW; |
| 602 s->params.lgblock = 0; |
| 603 |
| 604 s->input_pos_ = 0; |
| 605 s->num_commands_ = 0; |
| 606 s->num_literals_ = 0; |
| 607 s->last_insert_len_ = 0; |
| 608 s->last_flush_pos_ = 0; |
| 609 s->last_processed_pos_ = 0; |
| 610 s->prev_byte_ = 0; |
| 611 s->prev_byte2_ = 0; |
| 612 s->storage_size_ = 0; |
| 613 s->storage_ = 0; |
| 614 s->large_table_ = NULL; |
| 615 s->large_table_size_ = 0; |
| 616 s->cmd_code_numbits_ = 0; |
| 617 s->command_buf_ = NULL; |
| 618 s->literal_buf_ = NULL; |
| 619 s->next_out_ = NULL; |
| 620 s->available_out_ = 0; |
| 621 s->total_out_ = 0; |
| 622 s->stream_state_ = BROTLI_STREAM_PROCESSING; |
| 623 s->is_last_block_emitted_ = BROTLI_FALSE; |
| 624 s->is_initialized_ = BROTLI_FALSE; |
| 625 |
| 626 InitHashers(&s->hashers_); |
| 627 |
| 628 RingBufferInit(&s->ringbuffer_); |
| 629 |
| 630 s->commands_ = 0; |
| 631 s->cmd_alloc_size_ = 0; |
| 632 |
| 633 /* Initialize distance cache. */ |
| 634 s->dist_cache_[0] = 4; |
| 635 s->dist_cache_[1] = 11; |
| 636 s->dist_cache_[2] = 15; |
| 637 s->dist_cache_[3] = 16; |
| 638 /* Save the state of the distance cache in case we need to restore it for |
| 639 emitting an uncompressed block. */ |
| 640 memcpy(s->saved_dist_cache_, s->dist_cache_, sizeof(s->dist_cache_)); |
| 641 } |
| 642 |
| 643 BrotliEncoderState* BrotliEncoderCreateInstance(brotli_alloc_func alloc_func, |
| 644 brotli_free_func free_func, |
| 645 void* opaque) { |
| 646 BrotliEncoderState* state = 0; |
| 647 if (!alloc_func && !free_func) { |
| 648 state = (BrotliEncoderState*)malloc(sizeof(BrotliEncoderState)); |
| 649 } else if (alloc_func && free_func) { |
| 650 state = (BrotliEncoderState*)alloc_func(opaque, sizeof(BrotliEncoderState)); |
| 651 } |
| 652 if (state == 0) { |
| 653 /* BROTLI_DUMP(); */ |
| 654 return 0; |
| 655 } |
| 656 BrotliInitMemoryManager( |
| 657 &state->memory_manager_, alloc_func, free_func, opaque); |
| 658 BrotliEncoderInitState(state); |
| 659 return state; |
| 660 } |
| 661 |
| 662 static void BrotliEncoderCleanupState(BrotliEncoderState* s) { |
| 663 MemoryManager* m = &s->memory_manager_; |
| 664 if (BROTLI_IS_OOM(m)) { |
| 665 BrotliWipeOutMemoryManager(m); |
| 666 return; |
| 667 } |
| 668 BROTLI_FREE(m, s->storage_); |
| 669 BROTLI_FREE(m, s->commands_); |
| 670 RingBufferFree(m, &s->ringbuffer_); |
| 671 DestroyHashers(m, &s->hashers_); |
| 672 BROTLI_FREE(m, s->large_table_); |
| 673 BROTLI_FREE(m, s->command_buf_); |
| 674 BROTLI_FREE(m, s->literal_buf_); |
| 675 } |
| 676 |
| 677 /* Deinitializes and frees BrotliEncoderState instance. */ |
| 678 void BrotliEncoderDestroyInstance(BrotliEncoderState* state) { |
| 679 if (!state) { |
| 680 return; |
| 681 } else { |
| 682 MemoryManager* m = &state->memory_manager_; |
| 683 brotli_free_func free_func = m->free_func; |
| 684 void* opaque = m->opaque; |
| 685 BrotliEncoderCleanupState(state); |
| 686 free_func(opaque, state); |
| 687 } |
| 688 } |
| 689 |
| 690 /* |
| 691 Copies the given input data to the internal ring buffer of the compressor. |
| 692 No processing of the data occurs at this time and this function can be |
| 693 called multiple times before calling WriteBrotliData() to process the |
| 694 accumulated input. At most input_block_size() bytes of input data can be |
| 695 copied to the ring buffer, otherwise the next WriteBrotliData() will fail. |
| 696 */ |
| 697 static void CopyInputToRingBuffer(BrotliEncoderState* s, |
| 698 const size_t input_size, |
| 699 const uint8_t* input_buffer) { |
| 700 RingBuffer* ringbuffer_ = &s->ringbuffer_; |
| 701 MemoryManager* m = &s->memory_manager_; |
| 702 if (!EnsureInitialized(s)) return; |
| 703 RingBufferWrite(m, input_buffer, input_size, ringbuffer_); |
| 704 if (BROTLI_IS_OOM(m)) return; |
| 705 s->input_pos_ += input_size; |
| 706 |
| 707 /* TL;DR: If needed, initialize 7 more bytes in the ring buffer to make the |
| 708 hashing not depend on uninitialized data. This makes compression |
| 709 deterministic and it prevents uninitialized memory warnings in Valgrind. |
| 710 Even without erasing, the output would be valid (but nondeterministic). |
| 711 |
| 712 Background information: The compressor stores short (at most 8 bytes) |
| 713 substrings of the input already read in a hash table, and detects |
| 714 repetitions by looking up such substrings in the hash table. If it |
| 715 can find a substring, it checks whether the substring is really there |
| 716 in the ring buffer (or it's just a hash collision). Should the hash |
| 717 table become corrupt, this check makes sure that the output is |
| 718 still valid, albeit the compression ratio would be bad. |
| 719 |
| 720 The compressor populates the hash table from the ring buffer as it's |
| 721 reading new bytes from the input. However, at the last few indexes of |
| 722 the ring buffer, there are not enough bytes to build full-length |
| 723 substrings from. Since the hash table always contains full-length |
| 724 substrings, we erase with dummy zeros here to make sure that those |
| 725 substrings will contain zeros at the end instead of uninitialized |
| 726 data. |
| 727 |
| 728 Please note that erasing is not necessary (because the |
| 729 memory region is already initialized since he ring buffer |
| 730 has a `tail' that holds a copy of the beginning,) so we |
| 731 skip erasing if we have already gone around at least once in |
| 732 the ring buffer. |
| 733 |
| 734 Only clear during the first round of ring-buffer writes. On |
| 735 subsequent rounds data in the ring-buffer would be affected. */ |
| 736 if (ringbuffer_->pos_ <= ringbuffer_->mask_) { |
| 737 /* This is the first time when the ring buffer is being written. |
| 738 We clear 7 bytes just after the bytes that have been copied from |
| 739 the input buffer. |
| 740 |
| 741 The ring-buffer has a "tail" that holds a copy of the beginning, |
| 742 but only once the ring buffer has been fully written once, i.e., |
| 743 pos <= mask. For the first time, we need to write values |
| 744 in this tail (where index may be larger than mask), so that |
| 745 we have exactly defined behavior and don't read uninitialized |
| 746 memory. Due to performance reasons, hashing reads data using a |
| 747 LOAD64, which can go 7 bytes beyond the bytes written in the |
| 748 ring-buffer. */ |
| 749 memset(ringbuffer_->buffer_ + ringbuffer_->pos_, 0, 7); |
| 750 } |
| 751 } |
| 752 |
| 753 void BrotliEncoderSetCustomDictionary(BrotliEncoderState* s, size_t size, |
| 754 const uint8_t* dict) { |
| 755 size_t max_dict_size = MaxBackwardLimit(s->params.lgwin); |
| 756 size_t dict_size = size; |
| 757 MemoryManager* m = &s->memory_manager_; |
| 758 |
| 759 if (!EnsureInitialized(s)) return; |
| 760 |
| 761 if (dict_size == 0 || |
| 762 s->params.quality == FAST_ONE_PASS_COMPRESSION_QUALITY || |
| 763 s->params.quality == FAST_TWO_PASS_COMPRESSION_QUALITY) { |
| 764 return; |
| 765 } |
| 766 if (size > max_dict_size) { |
| 767 dict += size - max_dict_size; |
| 768 dict_size = max_dict_size; |
| 769 } |
| 770 CopyInputToRingBuffer(s, dict_size, dict); |
| 771 s->last_flush_pos_ = dict_size; |
| 772 s->last_processed_pos_ = dict_size; |
| 773 if (dict_size > 0) { |
| 774 s->prev_byte_ = dict[dict_size - 1]; |
| 775 } |
| 776 if (dict_size > 1) { |
| 777 s->prev_byte2_ = dict[dict_size - 2]; |
| 778 } |
| 779 HashersPrependCustomDictionary(m, &s->hashers_, &s->params, dict_size, dict); |
| 780 if (BROTLI_IS_OOM(m)) return; |
| 781 } |
| 782 |
| 783 /* Marks all input as processed. |
| 784 Returns true if position wrapping occurs. */ |
| 785 static BROTLI_BOOL UpdateLastProcessedPos(BrotliEncoderState* s) { |
| 786 uint32_t wrapped_last_processed_pos = WrapPosition(s->last_processed_pos_); |
| 787 uint32_t wrapped_input_pos = WrapPosition(s->input_pos_); |
| 788 s->last_processed_pos_ = s->input_pos_; |
| 789 return TO_BROTLI_BOOL(wrapped_input_pos < wrapped_last_processed_pos); |
| 790 } |
| 791 |
| 792 /* |
| 793 Processes the accumulated input data and sets |*out_size| to the length of |
| 794 the new output meta-block, or to zero if no new output meta-block has been |
| 795 created (in this case the processed input data is buffered internally). |
| 796 If |*out_size| is positive, |*output| points to the start of the output |
| 797 data. If |is_last| or |force_flush| is BROTLI_TRUE, an output meta-block is |
| 798 always created. However, until |is_last| is BROTLI_TRUE encoder may retain up |
| 799 to 7 bits of the last byte of output. To force encoder to dump the remaining |
| 800 bits use WriteMetadata() to append an empty meta-data block. |
| 801 Returns BROTLI_FALSE if the size of the input data is larger than |
| 802 input_block_size(). |
| 803 */ |
| 804 static BROTLI_BOOL EncodeData( |
| 805 BrotliEncoderState* s, const BROTLI_BOOL is_last, |
| 806 const BROTLI_BOOL force_flush, size_t* out_size, uint8_t** output) { |
| 807 const uint64_t delta = UnprocessedInputSize(s); |
| 808 const uint32_t bytes = (uint32_t)delta; |
| 809 const uint32_t wrapped_last_processed_pos = |
| 810 WrapPosition(s->last_processed_pos_); |
| 811 uint8_t* data; |
| 812 uint32_t mask; |
| 813 MemoryManager* m = &s->memory_manager_; |
| 814 |
| 815 if (!EnsureInitialized(s)) return BROTLI_FALSE; |
| 816 data = s->ringbuffer_.buffer_; |
| 817 mask = s->ringbuffer_.mask_; |
| 818 |
| 819 /* Adding more blocks after "last" block is forbidden. */ |
| 820 if (s->is_last_block_emitted_) return BROTLI_FALSE; |
| 821 if (is_last) s->is_last_block_emitted_ = BROTLI_TRUE; |
| 822 |
| 823 if (delta > InputBlockSize(s)) { |
| 824 return BROTLI_FALSE; |
| 825 } |
| 826 if (s->params.quality == FAST_TWO_PASS_COMPRESSION_QUALITY && |
| 827 !s->command_buf_) { |
| 828 s->command_buf_ = |
| 829 BROTLI_ALLOC(m, uint32_t, kCompressFragmentTwoPassBlockSize); |
| 830 s->literal_buf_ = |
| 831 BROTLI_ALLOC(m, uint8_t, kCompressFragmentTwoPassBlockSize); |
| 832 if (BROTLI_IS_OOM(m)) return BROTLI_FALSE; |
| 833 } |
| 834 |
| 835 if (s->params.quality == FAST_ONE_PASS_COMPRESSION_QUALITY || |
| 836 s->params.quality == FAST_TWO_PASS_COMPRESSION_QUALITY) { |
| 837 uint8_t* storage; |
| 838 size_t storage_ix = s->last_byte_bits_; |
| 839 size_t table_size; |
| 840 int* table; |
| 841 |
| 842 if (delta == 0 && !is_last) { |
| 843 /* We have no new input data and we don't have to finish the stream, so |
| 844 nothing to do. */ |
| 845 *out_size = 0; |
| 846 return BROTLI_TRUE; |
| 847 } |
| 848 storage = GetBrotliStorage(s, 2 * bytes + 502); |
| 849 if (BROTLI_IS_OOM(m)) return BROTLI_FALSE; |
| 850 storage[0] = s->last_byte_; |
| 851 table = GetHashTable(s, s->params.quality, bytes, &table_size); |
| 852 if (BROTLI_IS_OOM(m)) return BROTLI_FALSE; |
| 853 if (s->params.quality == FAST_ONE_PASS_COMPRESSION_QUALITY) { |
| 854 BrotliCompressFragmentFast( |
| 855 m, &data[wrapped_last_processed_pos & mask], |
| 856 bytes, is_last, |
| 857 table, table_size, |
| 858 s->cmd_depths_, s->cmd_bits_, |
| 859 &s->cmd_code_numbits_, s->cmd_code_, |
| 860 &storage_ix, storage); |
| 861 if (BROTLI_IS_OOM(m)) return BROTLI_FALSE; |
| 862 } else { |
| 863 BrotliCompressFragmentTwoPass( |
| 864 m, &data[wrapped_last_processed_pos & mask], |
| 865 bytes, is_last, |
| 866 s->command_buf_, s->literal_buf_, |
| 867 table, table_size, |
| 868 &storage_ix, storage); |
| 869 if (BROTLI_IS_OOM(m)) return BROTLI_FALSE; |
| 870 } |
| 871 s->last_byte_ = storage[storage_ix >> 3]; |
| 872 s->last_byte_bits_ = storage_ix & 7u; |
| 873 UpdateLastProcessedPos(s); |
| 874 *output = &storage[0]; |
| 875 *out_size = storage_ix >> 3; |
| 876 return BROTLI_TRUE; |
| 877 } |
| 878 |
| 879 { |
| 880 /* Theoretical max number of commands is 1 per 2 bytes. */ |
| 881 size_t newsize = s->num_commands_ + bytes / 2 + 1; |
| 882 if (newsize > s->cmd_alloc_size_) { |
| 883 Command* new_commands; |
| 884 /* Reserve a bit more memory to allow merging with a next block |
| 885 without reallocation: that would impact speed. */ |
| 886 newsize += (bytes / 4) + 16; |
| 887 s->cmd_alloc_size_ = newsize; |
| 888 new_commands = BROTLI_ALLOC(m, Command, newsize); |
| 889 if (BROTLI_IS_OOM(m)) return BROTLI_FALSE; |
| 890 if (s->commands_) { |
| 891 memcpy(new_commands, s->commands_, sizeof(Command) * s->num_commands_); |
| 892 BROTLI_FREE(m, s->commands_); |
| 893 } |
| 894 s->commands_ = new_commands; |
| 895 } |
| 896 } |
| 897 |
| 898 BrotliCreateBackwardReferences(m, bytes, wrapped_last_processed_pos, |
| 899 is_last, data, mask, |
| 900 &s->params, |
| 901 &s->hashers_, |
| 902 s->dist_cache_, |
| 903 &s->last_insert_len_, |
| 904 &s->commands_[s->num_commands_], |
| 905 &s->num_commands_, |
| 906 &s->num_literals_); |
| 907 if (BROTLI_IS_OOM(m)) return BROTLI_FALSE; |
| 908 |
| 909 { |
| 910 const size_t max_length = MaxMetablockSize(&s->params); |
| 911 const size_t max_literals = max_length / 8; |
| 912 const size_t max_commands = max_length / 8; |
| 913 const size_t processed_bytes = (size_t)(s->input_pos_ - s->last_flush_pos_); |
| 914 /* If maximal possible additional block doesn't fit metablock, flush now. */ |
| 915 /* TODO: Postpone decision until next block arrives? */ |
| 916 const BROTLI_BOOL next_input_fits_metablock = TO_BROTLI_BOOL( |
| 917 processed_bytes + InputBlockSize(s) <= max_length); |
| 918 /* If block splitting is not used, then flush as soon as there is some |
| 919 amount of commands / literals produced. */ |
| 920 const BROTLI_BOOL should_flush = TO_BROTLI_BOOL( |
| 921 s->params.quality < MIN_QUALITY_FOR_BLOCK_SPLIT && |
| 922 s->num_literals_ + s->num_commands_ >= MAX_NUM_DELAYED_SYMBOLS); |
| 923 if (!is_last && !force_flush && !should_flush && |
| 924 next_input_fits_metablock && |
| 925 s->num_literals_ < max_literals && |
| 926 s->num_commands_ < max_commands) { |
| 927 /* Merge with next input block. Everything will happen later. */ |
| 928 if (UpdateLastProcessedPos(s)) { |
| 929 HashersReset(&s->hashers_, ChooseHasher(&s->params)); |
| 930 } |
| 931 *out_size = 0; |
| 932 return BROTLI_TRUE; |
| 933 } |
| 934 } |
| 935 |
| 936 /* Create the last insert-only command. */ |
| 937 if (s->last_insert_len_ > 0) { |
| 938 InitInsertCommand(&s->commands_[s->num_commands_++], s->last_insert_len_); |
| 939 s->num_literals_ += s->last_insert_len_; |
| 940 s->last_insert_len_ = 0; |
| 941 } |
| 942 |
| 943 if (!is_last && s->input_pos_ == s->last_flush_pos_) { |
| 944 /* We have no new input data and we don't have to finish the stream, so |
| 945 nothing to do. */ |
| 946 *out_size = 0; |
| 947 return BROTLI_TRUE; |
| 948 } |
| 949 assert(s->input_pos_ >= s->last_flush_pos_); |
| 950 assert(s->input_pos_ > s->last_flush_pos_ || is_last); |
| 951 assert(s->input_pos_ - s->last_flush_pos_ <= 1u << 24); |
| 952 { |
| 953 const uint32_t metablock_size = |
| 954 (uint32_t)(s->input_pos_ - s->last_flush_pos_); |
| 955 uint8_t* storage = GetBrotliStorage(s, 2 * metablock_size + 502); |
| 956 size_t storage_ix = s->last_byte_bits_; |
| 957 if (BROTLI_IS_OOM(m)) return BROTLI_FALSE; |
| 958 storage[0] = s->last_byte_; |
| 959 WriteMetaBlockInternal( |
| 960 m, data, mask, s->last_flush_pos_, metablock_size, is_last, |
| 961 &s->params, s->prev_byte_, s->prev_byte2_, |
| 962 s->num_literals_, s->num_commands_, s->commands_, s->saved_dist_cache_, |
| 963 s->dist_cache_, &storage_ix, storage); |
| 964 if (BROTLI_IS_OOM(m)) return BROTLI_FALSE; |
| 965 s->last_byte_ = storage[storage_ix >> 3]; |
| 966 s->last_byte_bits_ = storage_ix & 7u; |
| 967 s->last_flush_pos_ = s->input_pos_; |
| 968 if (UpdateLastProcessedPos(s)) { |
| 969 HashersReset(&s->hashers_, ChooseHasher(&s->params)); |
| 970 } |
| 971 if (s->last_flush_pos_ > 0) { |
| 972 s->prev_byte_ = data[((uint32_t)s->last_flush_pos_ - 1) & mask]; |
| 973 } |
| 974 if (s->last_flush_pos_ > 1) { |
| 975 s->prev_byte2_ = data[(uint32_t)(s->last_flush_pos_ - 2) & mask]; |
| 976 } |
| 977 s->num_commands_ = 0; |
| 978 s->num_literals_ = 0; |
| 979 /* Save the state of the distance cache in case we need to restore it for |
| 980 emitting an uncompressed block. */ |
| 981 memcpy(s->saved_dist_cache_, s->dist_cache_, sizeof(s->dist_cache_)); |
| 982 *output = &storage[0]; |
| 983 *out_size = storage_ix >> 3; |
| 984 return BROTLI_TRUE; |
| 985 } |
| 986 } |
| 987 |
| 988 /* Dumps remaining output bits and metadata header to |header|. |
| 989 Returns number of produced bytes. |
| 990 REQUIRED: |header| should be 8-byte aligned and at least 16 bytes long. |
| 991 REQUIRED: |block_size| <= (1 << 24). */ |
| 992 static size_t WriteMetadataHeader( |
| 993 BrotliEncoderState* s, const size_t block_size, uint8_t* header) { |
| 994 size_t storage_ix; |
| 995 storage_ix = s->last_byte_bits_; |
| 996 header[0] = s->last_byte_; |
| 997 s->last_byte_ = 0; |
| 998 s->last_byte_bits_ = 0; |
| 999 |
| 1000 BrotliWriteBits(1, 0, &storage_ix, header); |
| 1001 BrotliWriteBits(2, 3, &storage_ix, header); |
| 1002 BrotliWriteBits(1, 0, &storage_ix, header); |
| 1003 if (block_size == 0) { |
| 1004 BrotliWriteBits(2, 0, &storage_ix, header); |
| 1005 } else { |
| 1006 uint32_t nbits = (block_size == 1) ? 0 : |
| 1007 (Log2FloorNonZero((uint32_t)block_size - 1) + 1); |
| 1008 uint32_t nbytes = (nbits + 7) / 8; |
| 1009 BrotliWriteBits(2, nbytes, &storage_ix, header); |
| 1010 BrotliWriteBits(8 * nbytes, block_size - 1, &storage_ix, header); |
| 1011 } |
| 1012 return (storage_ix + 7u) >> 3; |
| 1013 } |
| 1014 |
| 1015 static BROTLI_BOOL BrotliCompressBufferQuality10( |
| 1016 int lgwin, size_t input_size, const uint8_t* input_buffer, |
| 1017 size_t* encoded_size, uint8_t* encoded_buffer) { |
| 1018 MemoryManager memory_manager; |
| 1019 MemoryManager* m = &memory_manager; |
| 1020 |
| 1021 const size_t mask = BROTLI_SIZE_MAX >> 1; |
| 1022 const size_t max_backward_limit = MaxBackwardLimit(lgwin); |
| 1023 int dist_cache[4] = { 4, 11, 15, 16 }; |
| 1024 int saved_dist_cache[4] = { 4, 11, 15, 16 }; |
| 1025 BROTLI_BOOL ok = BROTLI_TRUE; |
| 1026 const size_t max_out_size = *encoded_size; |
| 1027 size_t total_out_size = 0; |
| 1028 uint8_t last_byte; |
| 1029 uint8_t last_byte_bits; |
| 1030 H10* hasher; |
| 1031 |
| 1032 const size_t hasher_eff_size = |
| 1033 BROTLI_MIN(size_t, input_size, max_backward_limit + BROTLI_WINDOW_GAP); |
| 1034 |
| 1035 BrotliEncoderParams params; |
| 1036 |
| 1037 const int lgmetablock = BROTLI_MIN(int, 24, lgwin + 1); |
| 1038 size_t max_block_size; |
| 1039 const size_t max_metablock_size = (size_t)1 << lgmetablock; |
| 1040 const size_t max_literals_per_metablock = max_metablock_size / 8; |
| 1041 const size_t max_commands_per_metablock = max_metablock_size / 8; |
| 1042 size_t metablock_start = 0; |
| 1043 uint8_t prev_byte = 0; |
| 1044 uint8_t prev_byte2 = 0; |
| 1045 |
| 1046 params.mode = BROTLI_DEFAULT_MODE; |
| 1047 params.quality = 10; |
| 1048 params.lgwin = lgwin; |
| 1049 params.lgblock = 0; |
| 1050 SanitizeParams(¶ms); |
| 1051 params.lgblock = ComputeLgBlock(¶ms); |
| 1052 max_block_size = (size_t)1 << params.lgblock; |
| 1053 |
| 1054 BrotliInitMemoryManager(m, 0, 0, 0); |
| 1055 |
| 1056 assert(input_size <= mask + 1); |
| 1057 EncodeWindowBits(lgwin, &last_byte, &last_byte_bits); |
| 1058 hasher = BROTLI_ALLOC(m, H10, 1); |
| 1059 if (BROTLI_IS_OOM(m)) goto oom; |
| 1060 InitializeH10(hasher); |
| 1061 InitH10(m, hasher, input_buffer, ¶ms, 0, hasher_eff_size, 1); |
| 1062 if (BROTLI_IS_OOM(m)) goto oom; |
| 1063 |
| 1064 while (ok && metablock_start < input_size) { |
| 1065 const size_t metablock_end = |
| 1066 BROTLI_MIN(size_t, input_size, metablock_start + max_metablock_size); |
| 1067 const size_t expected_num_commands = |
| 1068 (metablock_end - metablock_start) / 12 + 16; |
| 1069 Command* commands = 0; |
| 1070 size_t num_commands = 0; |
| 1071 size_t last_insert_len = 0; |
| 1072 size_t num_literals = 0; |
| 1073 size_t metablock_size = 0; |
| 1074 size_t cmd_alloc_size = 0; |
| 1075 BROTLI_BOOL is_last; |
| 1076 uint8_t* storage; |
| 1077 size_t storage_ix; |
| 1078 |
| 1079 size_t block_start; |
| 1080 for (block_start = metablock_start; block_start < metablock_end; ) { |
| 1081 size_t block_size = |
| 1082 BROTLI_MIN(size_t, metablock_end - block_start, max_block_size); |
| 1083 ZopfliNode* nodes = BROTLI_ALLOC(m, ZopfliNode, block_size + 1); |
| 1084 size_t path_size; |
| 1085 size_t new_cmd_alloc_size; |
| 1086 if (BROTLI_IS_OOM(m)) goto oom; |
| 1087 BrotliInitZopfliNodes(nodes, block_size + 1); |
| 1088 StitchToPreviousBlockH10(hasher, block_size, block_start, |
| 1089 input_buffer, mask); |
| 1090 path_size = BrotliZopfliComputeShortestPath( |
| 1091 m, block_size, block_start, input_buffer, mask, ¶ms, |
| 1092 max_backward_limit, dist_cache, hasher, nodes); |
| 1093 if (BROTLI_IS_OOM(m)) goto oom; |
| 1094 /* We allocate a command buffer in the first iteration of this loop that |
| 1095 will be likely big enough for the whole metablock, so that for most |
| 1096 inputs we will not have to reallocate in later iterations. We do the |
| 1097 allocation here and not before the loop, because if the input is small, |
| 1098 this will be allocated after the Zopfli cost model is freed, so this |
| 1099 will not increase peak memory usage. |
| 1100 TODO: If the first allocation is too small, increase command |
| 1101 buffer size exponentially. */ |
| 1102 new_cmd_alloc_size = BROTLI_MAX(size_t, expected_num_commands, |
| 1103 num_commands + path_size + 1); |
| 1104 if (cmd_alloc_size != new_cmd_alloc_size) { |
| 1105 Command* new_commands = BROTLI_ALLOC(m, Command, new_cmd_alloc_size); |
| 1106 if (BROTLI_IS_OOM(m)) goto oom; |
| 1107 cmd_alloc_size = new_cmd_alloc_size; |
| 1108 if (commands) { |
| 1109 memcpy(new_commands, commands, sizeof(Command) * num_commands); |
| 1110 BROTLI_FREE(m, commands); |
| 1111 } |
| 1112 commands = new_commands; |
| 1113 } |
| 1114 BrotliZopfliCreateCommands(block_size, block_start, max_backward_limit, |
| 1115 &nodes[0], dist_cache, &last_insert_len, |
| 1116 &commands[num_commands], &num_literals); |
| 1117 num_commands += path_size; |
| 1118 block_start += block_size; |
| 1119 metablock_size += block_size; |
| 1120 BROTLI_FREE(m, nodes); |
| 1121 if (num_literals > max_literals_per_metablock || |
| 1122 num_commands > max_commands_per_metablock) { |
| 1123 break; |
| 1124 } |
| 1125 } |
| 1126 |
| 1127 if (last_insert_len > 0) { |
| 1128 InitInsertCommand(&commands[num_commands++], last_insert_len); |
| 1129 num_literals += last_insert_len; |
| 1130 } |
| 1131 |
| 1132 is_last = TO_BROTLI_BOOL(metablock_start + metablock_size == input_size); |
| 1133 storage = NULL; |
| 1134 storage_ix = last_byte_bits; |
| 1135 |
| 1136 if (metablock_size == 0) { |
| 1137 /* Write the ISLAST and ISEMPTY bits. */ |
| 1138 storage = BROTLI_ALLOC(m, uint8_t, 16); |
| 1139 if (BROTLI_IS_OOM(m)) goto oom; |
| 1140 storage[0] = last_byte; |
| 1141 BrotliWriteBits(2, 3, &storage_ix, storage); |
| 1142 storage_ix = (storage_ix + 7u) & ~7u; |
| 1143 } else if (!ShouldCompress(input_buffer, mask, metablock_start, |
| 1144 metablock_size, num_literals, num_commands)) { |
| 1145 /* Restore the distance cache, as its last update by |
| 1146 CreateBackwardReferences is now unused. */ |
| 1147 memcpy(dist_cache, saved_dist_cache, 4 * sizeof(dist_cache[0])); |
| 1148 storage = BROTLI_ALLOC(m, uint8_t, metablock_size + 16); |
| 1149 if (BROTLI_IS_OOM(m)) goto oom; |
| 1150 storage[0] = last_byte; |
| 1151 BrotliStoreUncompressedMetaBlock(is_last, input_buffer, |
| 1152 metablock_start, mask, metablock_size, |
| 1153 &storage_ix, storage); |
| 1154 } else { |
| 1155 uint32_t num_direct_distance_codes = 0; |
| 1156 uint32_t distance_postfix_bits = 0; |
| 1157 ContextType literal_context_mode = CONTEXT_UTF8; |
| 1158 MetaBlockSplit mb; |
| 1159 InitMetaBlockSplit(&mb); |
| 1160 if (!BrotliIsMostlyUTF8(input_buffer, metablock_start, mask, |
| 1161 metablock_size, kMinUTF8Ratio)) { |
| 1162 literal_context_mode = CONTEXT_SIGNED; |
| 1163 } |
| 1164 BrotliBuildMetaBlock(m, input_buffer, metablock_start, mask, ¶ms, |
| 1165 prev_byte, prev_byte2, |
| 1166 commands, num_commands, |
| 1167 literal_context_mode, |
| 1168 &mb); |
| 1169 if (BROTLI_IS_OOM(m)) goto oom; |
| 1170 BrotliOptimizeHistograms(num_direct_distance_codes, |
| 1171 distance_postfix_bits, |
| 1172 &mb); |
| 1173 storage = BROTLI_ALLOC(m, uint8_t, 2 * metablock_size + 502); |
| 1174 if (BROTLI_IS_OOM(m)) goto oom; |
| 1175 storage[0] = last_byte; |
| 1176 BrotliStoreMetaBlock(m, input_buffer, metablock_start, metablock_size, |
| 1177 mask, prev_byte, prev_byte2, |
| 1178 is_last, |
| 1179 num_direct_distance_codes, |
| 1180 distance_postfix_bits, |
| 1181 literal_context_mode, |
| 1182 commands, num_commands, |
| 1183 &mb, |
| 1184 &storage_ix, storage); |
| 1185 if (BROTLI_IS_OOM(m)) goto oom; |
| 1186 if (metablock_size + 4 < (storage_ix >> 3)) { |
| 1187 /* Restore the distance cache and last byte. */ |
| 1188 memcpy(dist_cache, saved_dist_cache, 4 * sizeof(dist_cache[0])); |
| 1189 storage[0] = last_byte; |
| 1190 storage_ix = last_byte_bits; |
| 1191 BrotliStoreUncompressedMetaBlock(is_last, input_buffer, |
| 1192 metablock_start, mask, |
| 1193 metablock_size, &storage_ix, storage); |
| 1194 } |
| 1195 DestroyMetaBlockSplit(m, &mb); |
| 1196 } |
| 1197 last_byte = storage[storage_ix >> 3]; |
| 1198 last_byte_bits = storage_ix & 7u; |
| 1199 metablock_start += metablock_size; |
| 1200 prev_byte = input_buffer[metablock_start - 1]; |
| 1201 prev_byte2 = input_buffer[metablock_start - 2]; |
| 1202 /* Save the state of the distance cache in case we need to restore it for |
| 1203 emitting an uncompressed block. */ |
| 1204 memcpy(saved_dist_cache, dist_cache, 4 * sizeof(dist_cache[0])); |
| 1205 |
| 1206 { |
| 1207 const size_t out_size = storage_ix >> 3; |
| 1208 total_out_size += out_size; |
| 1209 if (total_out_size <= max_out_size) { |
| 1210 memcpy(encoded_buffer, storage, out_size); |
| 1211 encoded_buffer += out_size; |
| 1212 } else { |
| 1213 ok = BROTLI_FALSE; |
| 1214 } |
| 1215 } |
| 1216 BROTLI_FREE(m, storage); |
| 1217 BROTLI_FREE(m, commands); |
| 1218 } |
| 1219 |
| 1220 *encoded_size = total_out_size; |
| 1221 CleanupH10(m, hasher); |
| 1222 BROTLI_FREE(m, hasher); |
| 1223 return ok; |
| 1224 |
| 1225 oom: |
| 1226 BrotliWipeOutMemoryManager(m); |
| 1227 return BROTLI_FALSE; |
| 1228 } |
| 1229 |
| 1230 size_t BrotliEncoderMaxCompressedSize(size_t input_size) { |
| 1231 /* [window bits / empty metadata] + N * [uncompressed] + [last empty] */ |
| 1232 size_t num_large_blocks = input_size >> 24; |
| 1233 size_t tail = input_size - (num_large_blocks << 24); |
| 1234 size_t tail_overhead = (tail > (1 << 20)) ? 4 : 3; |
| 1235 size_t overhead = 2 + (4 * num_large_blocks) + tail_overhead + 1; |
| 1236 size_t result = input_size + overhead; |
| 1237 if (input_size == 0) return 1; |
| 1238 return (result < input_size) ? 0 : result; |
| 1239 } |
| 1240 |
| 1241 /* Wraps data to uncompressed brotli stream with minimal window size. |
| 1242 |output| should point at region with at least BrotliEncoderMaxCompressedSize |
| 1243 addressable bytes. |
| 1244 Returns the length of stream. */ |
| 1245 static size_t MakeUncompressedStream( |
| 1246 const uint8_t* input, size_t input_size, uint8_t* output) { |
| 1247 size_t size = input_size; |
| 1248 size_t result = 0; |
| 1249 size_t offset = 0; |
| 1250 if (input_size == 0) { |
| 1251 output[0] = 6; |
| 1252 return 1; |
| 1253 } |
| 1254 output[result++] = 0x21; /* window bits = 10, is_last = false */ |
| 1255 output[result++] = 0x03; /* empty metadata, padding */ |
| 1256 while (size > 0) { |
| 1257 uint32_t nibbles = 0; |
| 1258 uint32_t chunk_size; |
| 1259 uint32_t bits; |
| 1260 chunk_size = (size > (1u << 24)) ? (1u << 24) : (uint32_t)size; |
| 1261 if (chunk_size > (1u << 16)) nibbles = (chunk_size > (1u << 20)) ? 2 : 1; |
| 1262 bits = |
| 1263 (nibbles << 1) | ((chunk_size - 1) << 3) | (1u << (19 + 4 * nibbles)); |
| 1264 output[result++] = (uint8_t)bits; |
| 1265 output[result++] = (uint8_t)(bits >> 8); |
| 1266 output[result++] = (uint8_t)(bits >> 16); |
| 1267 if (nibbles == 2) output[result++] = (uint8_t)(bits >> 24); |
| 1268 memcpy(&output[result], &input[offset], chunk_size); |
| 1269 result += chunk_size; |
| 1270 offset += chunk_size; |
| 1271 size -= chunk_size; |
| 1272 } |
| 1273 output[result++] = 3; |
| 1274 return result; |
| 1275 } |
| 1276 |
| 1277 BROTLI_BOOL BrotliEncoderCompress( |
| 1278 int quality, int lgwin, BrotliEncoderMode mode, size_t input_size, |
| 1279 const uint8_t* input_buffer, size_t* encoded_size, |
| 1280 uint8_t* encoded_buffer) { |
| 1281 BrotliEncoderState* s; |
| 1282 size_t out_size = *encoded_size; |
| 1283 const uint8_t* input_start = input_buffer; |
| 1284 uint8_t* output_start = encoded_buffer; |
| 1285 size_t max_out_size = BrotliEncoderMaxCompressedSize(input_size); |
| 1286 if (out_size == 0) { |
| 1287 /* Output buffer needs at least one byte. */ |
| 1288 return BROTLI_FALSE; |
| 1289 } |
| 1290 if (input_size == 0) { |
| 1291 /* Handle the special case of empty input. */ |
| 1292 *encoded_size = 1; |
| 1293 *encoded_buffer = 6; |
| 1294 return BROTLI_TRUE; |
| 1295 } |
| 1296 if (quality == 10) { |
| 1297 /* TODO: Implement this direct path for all quality levels. */ |
| 1298 const int lg_win = BROTLI_MIN(int, BROTLI_MAX_WINDOW_BITS, |
| 1299 BROTLI_MAX(int, 16, lgwin)); |
| 1300 int ok = BrotliCompressBufferQuality10(lg_win, input_size, input_buffer, |
| 1301 encoded_size, encoded_buffer); |
| 1302 if (!ok || (max_out_size && *encoded_size > max_out_size)) { |
| 1303 goto fallback; |
| 1304 } |
| 1305 return BROTLI_TRUE; |
| 1306 } |
| 1307 |
| 1308 s = BrotliEncoderCreateInstance(0, 0, 0); |
| 1309 if (!s) { |
| 1310 return BROTLI_FALSE; |
| 1311 } else { |
| 1312 size_t available_in = input_size; |
| 1313 const uint8_t* next_in = input_buffer; |
| 1314 size_t available_out = *encoded_size; |
| 1315 uint8_t* next_out = encoded_buffer; |
| 1316 size_t total_out = 0; |
| 1317 BROTLI_BOOL result = BROTLI_FALSE; |
| 1318 BrotliEncoderSetParameter(s, BROTLI_PARAM_QUALITY, (uint32_t)quality); |
| 1319 BrotliEncoderSetParameter(s, BROTLI_PARAM_LGWIN, (uint32_t)lgwin); |
| 1320 BrotliEncoderSetParameter(s, BROTLI_PARAM_MODE, (uint32_t)mode); |
| 1321 result = BrotliEncoderCompressStream(s, BROTLI_OPERATION_FINISH, |
| 1322 &available_in, &next_in, &available_out, &next_out, &total_out); |
| 1323 if (!BrotliEncoderIsFinished(s)) result = 0; |
| 1324 *encoded_size = total_out; |
| 1325 BrotliEncoderDestroyInstance(s); |
| 1326 if (!result || (max_out_size && *encoded_size > max_out_size)) { |
| 1327 goto fallback; |
| 1328 } |
| 1329 return BROTLI_TRUE; |
| 1330 } |
| 1331 fallback: |
| 1332 *encoded_size = 0; |
| 1333 if (!max_out_size) return BROTLI_FALSE; |
| 1334 if (out_size >= max_out_size) { |
| 1335 *encoded_size = |
| 1336 MakeUncompressedStream(input_start, input_size, output_start); |
| 1337 return BROTLI_TRUE; |
| 1338 } |
| 1339 return BROTLI_FALSE; |
| 1340 } |
| 1341 |
| 1342 static void InjectBytePaddingBlock(BrotliEncoderState* s) { |
| 1343 uint32_t seal = s->last_byte_; |
| 1344 size_t seal_bits = s->last_byte_bits_; |
| 1345 uint8_t* destination; |
| 1346 s->last_byte_ = 0; |
| 1347 s->last_byte_bits_ = 0; |
| 1348 /* is_last = 0, data_nibbles = 11, reserved = 0, meta_nibbles = 00 */ |
| 1349 seal |= 0x6u << seal_bits; |
| 1350 seal_bits += 6; |
| 1351 /* If we have already created storage, then append to it. |
| 1352 Storage is valid until next block is being compressed. */ |
| 1353 if (s->next_out_) { |
| 1354 destination = s->next_out_ + s->available_out_; |
| 1355 } else { |
| 1356 destination = s->tiny_buf_.u8; |
| 1357 s->next_out_ = destination; |
| 1358 } |
| 1359 destination[0] = (uint8_t)seal; |
| 1360 if (seal_bits > 8) destination[1] = (uint8_t)(seal >> 8); |
| 1361 s->available_out_ += (seal_bits + 7) >> 3; |
| 1362 } |
| 1363 |
| 1364 /* Injects padding bits or pushes compressed data to output. |
| 1365 Returns false if nothing is done. */ |
| 1366 static BROTLI_BOOL InjectFlushOrPushOutput(BrotliEncoderState* s, |
| 1367 size_t* available_out, uint8_t** next_out, size_t* total_out) { |
| 1368 if (s->stream_state_ == BROTLI_STREAM_FLUSH_REQUESTED && |
| 1369 s->last_byte_bits_ != 0) { |
| 1370 InjectBytePaddingBlock(s); |
| 1371 return BROTLI_TRUE; |
| 1372 } |
| 1373 |
| 1374 if (s->available_out_ != 0 && *available_out != 0) { |
| 1375 size_t copy_output_size = |
| 1376 BROTLI_MIN(size_t, s->available_out_, *available_out); |
| 1377 memcpy(*next_out, s->next_out_, copy_output_size); |
| 1378 *next_out += copy_output_size; |
| 1379 *available_out -= copy_output_size; |
| 1380 s->next_out_ += copy_output_size; |
| 1381 s->available_out_ -= copy_output_size; |
| 1382 s->total_out_ += copy_output_size; |
| 1383 if (total_out) *total_out = s->total_out_; |
| 1384 return BROTLI_TRUE; |
| 1385 } |
| 1386 |
| 1387 return BROTLI_FALSE; |
| 1388 } |
| 1389 |
| 1390 static void CheckFlushComplete(BrotliEncoderState* s) { |
| 1391 if (s->stream_state_ == BROTLI_STREAM_FLUSH_REQUESTED && |
| 1392 s->available_out_ == 0) { |
| 1393 s->stream_state_ = BROTLI_STREAM_PROCESSING; |
| 1394 s->next_out_ = 0; |
| 1395 } |
| 1396 } |
| 1397 |
| 1398 static BROTLI_BOOL BrotliEncoderCompressStreamFast( |
| 1399 BrotliEncoderState* s, BrotliEncoderOperation op, size_t* available_in, |
| 1400 const uint8_t** next_in, size_t* available_out, uint8_t** next_out, |
| 1401 size_t* total_out) { |
| 1402 const size_t block_size_limit = (size_t)1 << s->params.lgwin; |
| 1403 const size_t buf_size = BROTLI_MIN(size_t, kCompressFragmentTwoPassBlockSize, |
| 1404 BROTLI_MIN(size_t, *available_in, block_size_limit)); |
| 1405 uint32_t* tmp_command_buf = NULL; |
| 1406 uint32_t* command_buf = NULL; |
| 1407 uint8_t* tmp_literal_buf = NULL; |
| 1408 uint8_t* literal_buf = NULL; |
| 1409 MemoryManager* m = &s->memory_manager_; |
| 1410 if (s->params.quality != FAST_ONE_PASS_COMPRESSION_QUALITY && |
| 1411 s->params.quality != FAST_TWO_PASS_COMPRESSION_QUALITY) { |
| 1412 return BROTLI_FALSE; |
| 1413 } |
| 1414 if (s->params.quality == FAST_TWO_PASS_COMPRESSION_QUALITY) { |
| 1415 if (!s->command_buf_ && buf_size == kCompressFragmentTwoPassBlockSize) { |
| 1416 s->command_buf_ = |
| 1417 BROTLI_ALLOC(m, uint32_t, kCompressFragmentTwoPassBlockSize); |
| 1418 s->literal_buf_ = |
| 1419 BROTLI_ALLOC(m, uint8_t, kCompressFragmentTwoPassBlockSize); |
| 1420 if (BROTLI_IS_OOM(m)) return BROTLI_FALSE; |
| 1421 } |
| 1422 if (s->command_buf_) { |
| 1423 command_buf = s->command_buf_; |
| 1424 literal_buf = s->literal_buf_; |
| 1425 } else { |
| 1426 tmp_command_buf = BROTLI_ALLOC(m, uint32_t, buf_size); |
| 1427 tmp_literal_buf = BROTLI_ALLOC(m, uint8_t, buf_size); |
| 1428 if (BROTLI_IS_OOM(m)) return BROTLI_FALSE; |
| 1429 command_buf = tmp_command_buf; |
| 1430 literal_buf = tmp_literal_buf; |
| 1431 } |
| 1432 } |
| 1433 |
| 1434 while (BROTLI_TRUE) { |
| 1435 if (InjectFlushOrPushOutput(s, available_out, next_out, total_out)) { |
| 1436 continue; |
| 1437 } |
| 1438 |
| 1439 /* Compress block only when internal output buffer is empty, stream is not |
| 1440 finished, there is no pending flush request, and there is either |
| 1441 additional input or pending operation. */ |
| 1442 if (s->available_out_ == 0 && |
| 1443 s->stream_state_ == BROTLI_STREAM_PROCESSING && |
| 1444 (*available_in != 0 || op != BROTLI_OPERATION_PROCESS)) { |
| 1445 size_t block_size = BROTLI_MIN(size_t, block_size_limit, *available_in); |
| 1446 BROTLI_BOOL is_last = |
| 1447 (*available_in == block_size) && (op == BROTLI_OPERATION_FINISH); |
| 1448 BROTLI_BOOL force_flush = |
| 1449 (*available_in == block_size) && (op == BROTLI_OPERATION_FLUSH); |
| 1450 size_t max_out_size = 2 * block_size + 502; |
| 1451 BROTLI_BOOL inplace = BROTLI_TRUE; |
| 1452 uint8_t* storage = NULL; |
| 1453 size_t storage_ix = s->last_byte_bits_; |
| 1454 size_t table_size; |
| 1455 int* table; |
| 1456 |
| 1457 if (force_flush && block_size == 0) { |
| 1458 s->stream_state_ = BROTLI_STREAM_FLUSH_REQUESTED; |
| 1459 continue; |
| 1460 } |
| 1461 if (max_out_size <= *available_out) { |
| 1462 storage = *next_out; |
| 1463 } else { |
| 1464 inplace = BROTLI_FALSE; |
| 1465 storage = GetBrotliStorage(s, max_out_size); |
| 1466 if (BROTLI_IS_OOM(m)) return BROTLI_FALSE; |
| 1467 } |
| 1468 storage[0] = s->last_byte_; |
| 1469 table = GetHashTable(s, s->params.quality, block_size, &table_size); |
| 1470 if (BROTLI_IS_OOM(m)) return BROTLI_FALSE; |
| 1471 |
| 1472 if (s->params.quality == FAST_ONE_PASS_COMPRESSION_QUALITY) { |
| 1473 BrotliCompressFragmentFast(m, *next_in, block_size, is_last, table, |
| 1474 table_size, s->cmd_depths_, s->cmd_bits_, &s->cmd_code_numbits_, |
| 1475 s->cmd_code_, &storage_ix, storage); |
| 1476 if (BROTLI_IS_OOM(m)) return BROTLI_FALSE; |
| 1477 } else { |
| 1478 BrotliCompressFragmentTwoPass(m, *next_in, block_size, is_last, |
| 1479 command_buf, literal_buf, table, table_size, |
| 1480 &storage_ix, storage); |
| 1481 if (BROTLI_IS_OOM(m)) return BROTLI_FALSE; |
| 1482 } |
| 1483 *next_in += block_size; |
| 1484 *available_in -= block_size; |
| 1485 if (inplace) { |
| 1486 size_t out_bytes = storage_ix >> 3; |
| 1487 assert(out_bytes <= *available_out); |
| 1488 assert((storage_ix & 7) == 0 || out_bytes < *available_out); |
| 1489 *next_out += out_bytes; |
| 1490 *available_out -= out_bytes; |
| 1491 s->total_out_ += out_bytes; |
| 1492 if (total_out) *total_out = s->total_out_; |
| 1493 } else { |
| 1494 size_t out_bytes = storage_ix >> 3; |
| 1495 s->next_out_ = storage; |
| 1496 s->available_out_ = out_bytes; |
| 1497 } |
| 1498 s->last_byte_ = storage[storage_ix >> 3]; |
| 1499 s->last_byte_bits_ = storage_ix & 7u; |
| 1500 |
| 1501 if (force_flush) s->stream_state_ = BROTLI_STREAM_FLUSH_REQUESTED; |
| 1502 if (is_last) s->stream_state_ = BROTLI_STREAM_FINISHED; |
| 1503 continue; |
| 1504 } |
| 1505 break; |
| 1506 } |
| 1507 BROTLI_FREE(m, tmp_command_buf); |
| 1508 BROTLI_FREE(m, tmp_literal_buf); |
| 1509 CheckFlushComplete(s); |
| 1510 return BROTLI_TRUE; |
| 1511 } |
| 1512 |
| 1513 static BROTLI_BOOL ProcessMetadata( |
| 1514 BrotliEncoderState* s, size_t* available_in, const uint8_t** next_in, |
| 1515 size_t* available_out, uint8_t** next_out, size_t* total_out) { |
| 1516 if (*available_in > (1u << 24)) return BROTLI_FALSE; |
| 1517 /* Switch to metadata block workflow, if required. */ |
| 1518 if (s->stream_state_ == BROTLI_STREAM_PROCESSING) { |
| 1519 s->remaining_metadata_bytes_ = (uint32_t)*available_in; |
| 1520 s->stream_state_ = BROTLI_STREAM_METADATA_HEAD; |
| 1521 } |
| 1522 if (s->stream_state_ != BROTLI_STREAM_METADATA_HEAD && |
| 1523 s->stream_state_ != BROTLI_STREAM_METADATA_BODY) { |
| 1524 return BROTLI_FALSE; |
| 1525 } |
| 1526 |
| 1527 while (BROTLI_TRUE) { |
| 1528 if (InjectFlushOrPushOutput(s, available_out, next_out, total_out)) { |
| 1529 continue; |
| 1530 } |
| 1531 if (s->available_out_ != 0) break; |
| 1532 |
| 1533 if (s->input_pos_ != s->last_flush_pos_) { |
| 1534 BROTLI_BOOL result = EncodeData(s, BROTLI_FALSE, BROTLI_TRUE, |
| 1535 &s->available_out_, &s->next_out_); |
| 1536 if (!result) return BROTLI_FALSE; |
| 1537 continue; |
| 1538 } |
| 1539 |
| 1540 if (s->stream_state_ == BROTLI_STREAM_METADATA_HEAD) { |
| 1541 s->next_out_ = s->tiny_buf_.u8; |
| 1542 s->available_out_ = |
| 1543 WriteMetadataHeader(s, s->remaining_metadata_bytes_, s->next_out_); |
| 1544 s->stream_state_ = BROTLI_STREAM_METADATA_BODY; |
| 1545 continue; |
| 1546 } else { |
| 1547 /* Exit workflow only when there is no more input and no more output. |
| 1548 Otherwise client may continue producing empty metadata blocks. */ |
| 1549 if (s->remaining_metadata_bytes_ == 0) { |
| 1550 s->remaining_metadata_bytes_ = BROTLI_UINT32_MAX; |
| 1551 s->stream_state_ = BROTLI_STREAM_PROCESSING; |
| 1552 break; |
| 1553 } |
| 1554 if (*available_out) { |
| 1555 /* Directly copy input to output. */ |
| 1556 uint32_t copy = (uint32_t)BROTLI_MIN( |
| 1557 size_t, s->remaining_metadata_bytes_, *available_out); |
| 1558 memcpy(*next_out, *next_in, copy); |
| 1559 *next_in += copy; |
| 1560 *available_in -= copy; |
| 1561 s->remaining_metadata_bytes_ -= copy; |
| 1562 *next_out += copy; |
| 1563 *available_out -= copy; |
| 1564 } else { |
| 1565 /* This guarantees progress in "TakeOutput" workflow. */ |
| 1566 uint32_t copy = BROTLI_MIN(uint32_t, s->remaining_metadata_bytes_, 16); |
| 1567 s->next_out_ = s->tiny_buf_.u8; |
| 1568 memcpy(s->next_out_, *next_in, copy); |
| 1569 *next_in += copy; |
| 1570 *available_in -= copy; |
| 1571 s->remaining_metadata_bytes_ -= copy; |
| 1572 s->available_out_ = copy; |
| 1573 } |
| 1574 continue; |
| 1575 } |
| 1576 } |
| 1577 |
| 1578 return BROTLI_TRUE; |
| 1579 } |
| 1580 |
| 1581 BROTLI_BOOL BrotliEncoderCompressStream( |
| 1582 BrotliEncoderState* s, BrotliEncoderOperation op, size_t* available_in, |
| 1583 const uint8_t** next_in, size_t* available_out,uint8_t** next_out, |
| 1584 size_t* total_out) { |
| 1585 if (!EnsureInitialized(s)) return BROTLI_FALSE; |
| 1586 |
| 1587 /* Unfinished metadata block; check requirements. */ |
| 1588 if (s->remaining_metadata_bytes_ != BROTLI_UINT32_MAX) { |
| 1589 if (*available_in != s->remaining_metadata_bytes_) return BROTLI_FALSE; |
| 1590 if (op != BROTLI_OPERATION_EMIT_METADATA) return BROTLI_FALSE; |
| 1591 } |
| 1592 |
| 1593 if (op == BROTLI_OPERATION_EMIT_METADATA) { |
| 1594 return ProcessMetadata( |
| 1595 s, available_in, next_in, available_out, next_out, total_out); |
| 1596 } |
| 1597 |
| 1598 if (s->stream_state_ == BROTLI_STREAM_METADATA_HEAD || |
| 1599 s->stream_state_ == BROTLI_STREAM_METADATA_BODY) { |
| 1600 return BROTLI_FALSE; |
| 1601 } |
| 1602 |
| 1603 if (s->stream_state_ != BROTLI_STREAM_PROCESSING && *available_in != 0) { |
| 1604 return BROTLI_FALSE; |
| 1605 } |
| 1606 if (s->params.quality == FAST_ONE_PASS_COMPRESSION_QUALITY || |
| 1607 s->params.quality == FAST_TWO_PASS_COMPRESSION_QUALITY) { |
| 1608 return BrotliEncoderCompressStreamFast(s, op, available_in, next_in, |
| 1609 available_out, next_out, total_out); |
| 1610 } |
| 1611 while (BROTLI_TRUE) { |
| 1612 size_t remaining_block_size = RemainingInputBlockSize(s); |
| 1613 |
| 1614 if (remaining_block_size != 0 && *available_in != 0) { |
| 1615 size_t copy_input_size = |
| 1616 BROTLI_MIN(size_t, remaining_block_size, *available_in); |
| 1617 CopyInputToRingBuffer(s, copy_input_size, *next_in); |
| 1618 *next_in += copy_input_size; |
| 1619 *available_in -= copy_input_size; |
| 1620 continue; |
| 1621 } |
| 1622 |
| 1623 if (InjectFlushOrPushOutput(s, available_out, next_out, total_out)) { |
| 1624 continue; |
| 1625 } |
| 1626 |
| 1627 /* Compress data only when internal output buffer is empty, stream is not |
| 1628 finished and there is no pending flush request. */ |
| 1629 if (s->available_out_ == 0 && |
| 1630 s->stream_state_ == BROTLI_STREAM_PROCESSING) { |
| 1631 if (remaining_block_size == 0 || op != BROTLI_OPERATION_PROCESS) { |
| 1632 BROTLI_BOOL is_last = TO_BROTLI_BOOL( |
| 1633 (*available_in == 0) && op == BROTLI_OPERATION_FINISH); |
| 1634 BROTLI_BOOL force_flush = TO_BROTLI_BOOL( |
| 1635 (*available_in == 0) && op == BROTLI_OPERATION_FLUSH); |
| 1636 BROTLI_BOOL result = EncodeData(s, is_last, force_flush, |
| 1637 &s->available_out_, &s->next_out_); |
| 1638 if (!result) return BROTLI_FALSE; |
| 1639 if (force_flush) s->stream_state_ = BROTLI_STREAM_FLUSH_REQUESTED; |
| 1640 if (is_last) s->stream_state_ = BROTLI_STREAM_FINISHED; |
| 1641 continue; |
| 1642 } |
| 1643 } |
| 1644 break; |
| 1645 } |
| 1646 CheckFlushComplete(s); |
| 1647 return BROTLI_TRUE; |
| 1648 } |
| 1649 |
| 1650 BROTLI_BOOL BrotliEncoderIsFinished(BrotliEncoderState* s) { |
| 1651 return TO_BROTLI_BOOL(s->stream_state_ == BROTLI_STREAM_FINISHED && |
| 1652 !BrotliEncoderHasMoreOutput(s)); |
| 1653 } |
| 1654 |
| 1655 BROTLI_BOOL BrotliEncoderHasMoreOutput(BrotliEncoderState* s) { |
| 1656 return TO_BROTLI_BOOL(s->available_out_ != 0); |
| 1657 } |
| 1658 |
| 1659 const uint8_t* BrotliEncoderTakeOutput(BrotliEncoderState* s, size_t* size) { |
| 1660 size_t consumed_size = s->available_out_; |
| 1661 uint8_t* result = s->next_out_; |
| 1662 if (*size) { |
| 1663 consumed_size = BROTLI_MIN(size_t, *size, s->available_out_); |
| 1664 } |
| 1665 if (consumed_size) { |
| 1666 s->next_out_ += consumed_size; |
| 1667 s->available_out_ -= consumed_size; |
| 1668 s->total_out_ += consumed_size; |
| 1669 CheckFlushComplete(s); |
| 1670 *size = consumed_size; |
| 1671 } else { |
| 1672 *size = 0; |
| 1673 result = 0; |
| 1674 } |
| 1675 return result; |
| 1676 } |
| 1677 |
| 1678 uint32_t BrotliEncoderVersion(void) { |
| 1679 return BROTLI_VERSION; |
| 1680 } |
| 1681 |
| 1682 |
| 1683 /* DEPRECATED >>> */ |
| 1684 size_t BrotliEncoderInputBlockSize(BrotliEncoderState* s) { |
| 1685 return InputBlockSize(s); |
| 1686 } |
| 1687 void BrotliEncoderCopyInputToRingBuffer(BrotliEncoderState* s, |
| 1688 const size_t input_size, |
| 1689 const uint8_t* input_buffer) { |
| 1690 CopyInputToRingBuffer(s, input_size, input_buffer); |
| 1691 } |
| 1692 BROTLI_BOOL BrotliEncoderWriteData( |
| 1693 BrotliEncoderState* s, const BROTLI_BOOL is_last, |
| 1694 const BROTLI_BOOL force_flush, size_t* out_size, uint8_t** output) { |
| 1695 return EncodeData(s, is_last, force_flush, out_size, output); |
| 1696 } |
| 1697 /* <<< DEPRECATED */ |
| 1698 |
| 1699 #if defined(__cplusplus) || defined(c_plusplus) |
| 1700 } /* extern "C" */ |
| 1701 #endif |
OLD | NEW |