| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved. | 2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| 11 | 11 |
| 12 /* This code is in the public domain. | 12 /* This code is in the public domain. |
| 13 ** Version: 1.1 Author: Walt Karas | 13 ** Version: 1.1 Author: Walt Karas |
| 14 */ | 14 */ |
| 15 | 15 |
| 16 #include "hmm_intrnl.h" | 16 #include "hmm_intrnl.h" |
| 17 | 17 |
| 18 void U(init)(U(descriptor) *desc) | 18 void U(init)(U(descriptor) *desc) { |
| 19 { | 19 desc->avl_tree_root = 0; |
| 20 desc->avl_tree_root = 0; | 20 desc->last_freed = 0; |
| 21 desc->last_freed = 0; | |
| 22 } | 21 } |
| 23 | 22 |
| 24 /* Remove a free block from a bin's doubly-linked list when it is not, | 23 /* Remove a free block from a bin's doubly-linked list when it is not, |
| 25 ** the first block in the bin. | 24 ** the first block in the bin. |
| 26 */ | 25 */ |
| 27 void U(dll_remove)( | 26 void U(dll_remove)( |
| 28 /* Pointer to pointer record in the block to be removed. */ | 27 /* Pointer to pointer record in the block to be removed. */ |
| 29 ptr_record *to_remove) | 28 ptr_record *to_remove) { |
| 30 { | 29 to_remove->prev->next = to_remove->next; |
| 31 to_remove->prev->next = to_remove->next; | |
| 32 | 30 |
| 33 if (to_remove->next) | 31 if (to_remove->next) |
| 34 to_remove->next->prev = to_remove->prev; | 32 to_remove->next->prev = to_remove->prev; |
| 35 } | 33 } |
| 36 | 34 |
| 37 /* Put a block into the free collection of a heap. | 35 /* Put a block into the free collection of a heap. |
| 38 */ | 36 */ |
| 39 void U(into_free_collection)( | 37 void U(into_free_collection)( |
| 40 /* Pointer to heap descriptor. */ | 38 /* Pointer to heap descriptor. */ |
| 41 U(descriptor) *desc, | 39 U(descriptor) *desc, |
| 42 /* Pointer to head record of block. */ | 40 /* Pointer to head record of block. */ |
| 43 head_record *head_ptr) | 41 head_record *head_ptr) { |
| 44 { | 42 ptr_record *ptr_rec_ptr = HEAD_TO_PTR_REC(head_ptr); |
| 45 ptr_record *ptr_rec_ptr = HEAD_TO_PTR_REC(head_ptr); | |
| 46 | 43 |
| 47 ptr_record *bin_front_ptr = | 44 ptr_record *bin_front_ptr = |
| 48 U(avl_insert)((U(avl_avl) *) & (desc->avl_tree_root), ptr_rec_ptr); | 45 U(avl_insert)((U(avl_avl) *) & (desc->avl_tree_root), ptr_rec_ptr); |
| 49 | 46 |
| 50 if (bin_front_ptr != ptr_rec_ptr) | 47 if (bin_front_ptr != ptr_rec_ptr) { |
| 51 { | 48 /* The block was not inserted into the AVL tree because there is |
| 52 /* The block was not inserted into the AVL tree because there is | 49 ** already a bin for the size of the block. */ |
| 53 ** already a bin for the size of the block. */ | |
| 54 | 50 |
| 55 MARK_SUCCESSIVE_BLOCK_IN_FREE_BIN(head_ptr) | 51 MARK_SUCCESSIVE_BLOCK_IN_FREE_BIN(head_ptr) |
| 56 ptr_rec_ptr->self = ptr_rec_ptr; | 52 ptr_rec_ptr->self = ptr_rec_ptr; |
| 57 | 53 |
| 58 /* Make the block the new second block in the bin's doubly-linked | 54 /* Make the block the new second block in the bin's doubly-linked |
| 59 ** list. */ | 55 ** list. */ |
| 60 ptr_rec_ptr->prev = bin_front_ptr; | 56 ptr_rec_ptr->prev = bin_front_ptr; |
| 61 ptr_rec_ptr->next = bin_front_ptr->next; | 57 ptr_rec_ptr->next = bin_front_ptr->next; |
| 62 bin_front_ptr->next = ptr_rec_ptr; | 58 bin_front_ptr->next = ptr_rec_ptr; |
| 63 | 59 |
| 64 if (ptr_rec_ptr->next) | 60 if (ptr_rec_ptr->next) |
| 65 ptr_rec_ptr->next->prev = ptr_rec_ptr; | 61 ptr_rec_ptr->next->prev = ptr_rec_ptr; |
| 66 } | 62 } else |
| 67 else | 63 /* Block is first block in new bin. */ |
| 68 /* Block is first block in new bin. */ | 64 ptr_rec_ptr->next = 0; |
| 69 ptr_rec_ptr->next = 0; | |
| 70 } | 65 } |
| 71 | 66 |
| 72 /* Allocate a block from a given bin. Returns a pointer to the payload | 67 /* Allocate a block from a given bin. Returns a pointer to the payload |
| 73 ** of the removed block. The "last freed" pointer must be null prior | 68 ** of the removed block. The "last freed" pointer must be null prior |
| 74 ** to calling this function. | 69 ** to calling this function. |
| 75 */ | 70 */ |
| 76 void *U(alloc_from_bin)( | 71 void *U(alloc_from_bin)( |
| 77 /* Pointer to heap descriptor. */ | 72 /* Pointer to heap descriptor. */ |
| 78 U(descriptor) *desc, | 73 U(descriptor) *desc, |
| 79 /* Pointer to pointer record of first block in bin. */ | 74 /* Pointer to pointer record of first block in bin. */ |
| 80 ptr_record *bin_front_ptr, | 75 ptr_record *bin_front_ptr, |
| 81 /* Number of BAUs needed in the allocated block. If the block taken | 76 /* Number of BAUs needed in the allocated block. If the block taken |
| 82 ** from the bin is significantly larger than the number of BAUs needed, | 77 ** from the bin is significantly larger than the number of BAUs needed, |
| 83 ** the "extra" BAUs are split off to form a new free block. */ | 78 ** the "extra" BAUs are split off to form a new free block. */ |
| 84 U(size_bau) n_baus) | 79 U(size_bau) n_baus) { |
| 85 { | 80 head_record *head_ptr; |
| 86 head_record *head_ptr; | 81 U(size_bau) rem_baus; |
| 87 U(size_bau) rem_baus; | 82 |
| 88 | 83 if (bin_front_ptr->next) { |
| 89 if (bin_front_ptr->next) | 84 /* There are multiple blocks in this bin. Use the 2nd block in |
| 90 { | 85 ** the bin to avoid needless change to the AVL tree. |
| 91 /* There are multiple blocks in this bin. Use the 2nd block in | 86 */ |
| 92 ** the bin to avoid needless change to the AVL tree. | 87 |
| 93 */ | 88 ptr_record *ptr_rec_ptr = bin_front_ptr->next; |
| 94 | 89 head_ptr = PTR_REC_TO_HEAD(ptr_rec_ptr); |
| 95 ptr_record *ptr_rec_ptr = bin_front_ptr->next; | |
| 96 head_ptr = PTR_REC_TO_HEAD(ptr_rec_ptr); | |
| 97 | 90 |
| 98 #ifdef AUDIT_FAIL | 91 #ifdef AUDIT_FAIL |
| 99 AUDIT_BLOCK(head_ptr) | 92 AUDIT_BLOCK(head_ptr) |
| 100 #endif | 93 #endif |
| 101 | 94 |
| 102 U(dll_remove)(ptr_rec_ptr); | 95 U(dll_remove)(ptr_rec_ptr); |
| 103 } | 96 } else { |
| 104 else | 97 /* There is only one block in the bin, so it has to be removed |
| 105 { | 98 ** from the AVL tree. |
| 106 /* There is only one block in the bin, so it has to be removed | 99 */ |
| 107 ** from the AVL tree. | 100 |
| 108 */ | 101 head_ptr = PTR_REC_TO_HEAD(bin_front_ptr); |
| 109 | 102 |
| 110 head_ptr = PTR_REC_TO_HEAD(bin_front_ptr); | 103 U(avl_remove)( |
| 111 | 104 (U(avl_avl) *) & (desc->avl_tree_root), BLOCK_BAUS(head_ptr)); |
| 112 U(avl_remove)( | 105 } |
| 113 (U(avl_avl) *) &(desc->avl_tree_root), BLOCK_BAUS(head_ptr)); | 106 |
| 114 } | 107 MARK_BLOCK_ALLOCATED(head_ptr) |
| 115 | 108 |
| 116 MARK_BLOCK_ALLOCATED(head_ptr) | 109 rem_baus = BLOCK_BAUS(head_ptr) - n_baus; |
| 117 | 110 |
| 118 rem_baus = BLOCK_BAUS(head_ptr) - n_baus; | 111 if (rem_baus >= MIN_BLOCK_BAUS) { |
| 119 | 112 /* Since there are enough "extra" BAUs, split them off to form |
| 120 if (rem_baus >= MIN_BLOCK_BAUS) | 113 ** a new free block. |
| 121 { | 114 */ |
| 122 /* Since there are enough "extra" BAUs, split them off to form | 115 |
| 123 ** a new free block. | 116 head_record *rem_head_ptr = |
| 124 */ | 117 (head_record *) BAUS_FORWARD(head_ptr, n_baus); |
| 125 | 118 |
| 126 head_record *rem_head_ptr = | 119 /* Change the next block's header to reflect the fact that the |
| 127 (head_record *) BAUS_FORWARD(head_ptr, n_baus); | 120 ** block preceeding it is now smaller. |
| 128 | 121 */ |
| 129 /* Change the next block's header to reflect the fact that the | 122 SET_PREV_BLOCK_BAUS( |
| 130 ** block preceeding it is now smaller. | 123 BAUS_FORWARD(head_ptr, head_ptr->block_size), rem_baus) |
| 131 */ | 124 |
| 132 SET_PREV_BLOCK_BAUS( | 125 head_ptr->block_size = n_baus; |
| 133 BAUS_FORWARD(head_ptr, head_ptr->block_size), rem_baus) | 126 |
| 134 | 127 rem_head_ptr->previous_block_size = n_baus; |
| 135 head_ptr->block_size = n_baus; | 128 rem_head_ptr->block_size = rem_baus; |
| 136 | 129 |
| 137 rem_head_ptr->previous_block_size = n_baus; | 130 desc->last_freed = rem_head_ptr; |
| 138 rem_head_ptr->block_size = rem_baus; | 131 } |
| 139 | 132 |
| 140 desc->last_freed = rem_head_ptr; | 133 return(HEAD_TO_PTR_REC(head_ptr)); |
| 141 } | |
| 142 | |
| 143 return(HEAD_TO_PTR_REC(head_ptr)); | |
| 144 } | 134 } |
| 145 | 135 |
| 146 /* Take a block out of the free collection. | 136 /* Take a block out of the free collection. |
| 147 */ | 137 */ |
| 148 void U(out_of_free_collection)( | 138 void U(out_of_free_collection)( |
| 149 /* Descriptor of heap that block is in. */ | 139 /* Descriptor of heap that block is in. */ |
| 150 U(descriptor) *desc, | 140 U(descriptor) *desc, |
| 151 /* Pointer to head of block to take out of free collection. */ | 141 /* Pointer to head of block to take out of free collection. */ |
| 152 head_record *head_ptr) | 142 head_record *head_ptr) { |
| 153 { | 143 ptr_record *ptr_rec_ptr = HEAD_TO_PTR_REC(head_ptr); |
| 154 ptr_record *ptr_rec_ptr = HEAD_TO_PTR_REC(head_ptr); | 144 |
| 155 | 145 if (ptr_rec_ptr->self == ptr_rec_ptr) |
| 156 if (ptr_rec_ptr->self == ptr_rec_ptr) | 146 /* Block is not the front block in its bin, so all we have to |
| 157 /* Block is not the front block in its bin, so all we have to | 147 ** do is take it out of the bin's doubly-linked list. */ |
| 158 ** do is take it out of the bin's doubly-linked list. */ | 148 U(dll_remove)(ptr_rec_ptr); |
| 159 U(dll_remove)(ptr_rec_ptr); | 149 else { |
| 150 ptr_record *next = ptr_rec_ptr->next; |
| 151 |
| 152 if (next) |
| 153 /* Block is the front block in its bin, and there is at least |
| 154 ** one other block in the bin. Substitute the next block for |
| 155 ** the front block. */ |
| 156 U(avl_subst)((U(avl_avl) *) & (desc->avl_tree_root), next); |
| 160 else | 157 else |
| 161 { | 158 /* Block is the front block in its bin, but there is no other |
| 162 ptr_record *next = ptr_rec_ptr->next; | 159 ** block in the bin. Eliminate the bin. */ |
| 163 | 160 U(avl_remove)( |
| 164 if (next) | 161 (U(avl_avl) *) & (desc->avl_tree_root), BLOCK_BAUS(head_ptr)); |
| 165 /* Block is the front block in its bin, and there is at least | 162 } |
| 166 ** one other block in the bin. Substitute the next block for | 163 } |
| 167 ** the front block. */ | 164 |
| 168 U(avl_subst)((U(avl_avl) *) &(desc->avl_tree_root), next); | 165 void U(free)(U(descriptor) *desc, void *payload_ptr) { |
| 169 else | 166 /* Flags if coalesce with adjacent block. */ |
| 170 /* Block is the front block in its bin, but there is no other | 167 int coalesce; |
| 171 ** block in the bin. Eliminate the bin. */ | 168 |
| 172 U(avl_remove)( | 169 head_record *fwd_head_ptr; |
| 173 (U(avl_avl) *) &(desc->avl_tree_root), BLOCK_BAUS(head_ptr)); | 170 head_record *free_head_ptr = PTR_REC_TO_HEAD(payload_ptr); |
| 174 } | 171 |
| 175 } | 172 desc->num_baus_can_shrink = 0; |
| 176 | 173 |
| 177 void U(free)(U(descriptor) *desc, void *payload_ptr) | 174 #ifdef HMM_AUDIT_FAIL |
| 178 { | 175 |
| 179 /* Flags if coalesce with adjacent block. */ | 176 AUDIT_BLOCK(free_head_ptr) |
| 180 int coalesce; | 177 |
| 181 | 178 /* Make sure not freeing an already free block. */ |
| 182 head_record *fwd_head_ptr; | 179 if (!IS_BLOCK_ALLOCATED(free_head_ptr)) |
| 183 head_record *free_head_ptr = PTR_REC_TO_HEAD(payload_ptr); | 180 HMM_AUDIT_FAIL |
| 184 | 181 |
| 185 desc->num_baus_can_shrink = 0; | 182 if (desc->avl_tree_root) |
| 186 | 183 /* Audit root block in AVL tree. */ |
| 187 #ifdef HMM_AUDIT_FAIL | 184 AUDIT_BLOCK(PTR_REC_TO_HEAD(desc->avl_tree_root)) |
| 188 | 185 |
| 189 AUDIT_BLOCK(free_head_ptr) | 186 #endif |
| 190 | 187 |
| 191 /* Make sure not freeing an already free block. */ | 188 fwd_head_ptr = |
| 192 if (!IS_BLOCK_ALLOCATED(free_head_ptr)) | 189 (head_record *) BAUS_FORWARD(free_head_ptr, free_head_ptr->block_size); |
| 193 HMM_AUDIT_FAIL | 190 |
| 194 | 191 if (free_head_ptr->previous_block_size) { |
| 195 if (desc->avl_tree_root) | 192 /* Coalesce with backward block if possible. */ |
| 196 /* Audit root block in AVL tree. */ | 193 |
| 197 AUDIT_BLOCK(PTR_REC_TO_HEAD(desc->avl_tree_root)) | 194 head_record *bkwd_head_ptr = |
| 198 | 195 (head_record *) BAUS_BACKWARD( |
| 199 #endif | 196 free_head_ptr, free_head_ptr->previous_block_size); |
| 200 | 197 |
| 201 fwd_head_ptr = | 198 #ifdef HMM_AUDIT_FAIL |
| 202 (head_record *) BAUS_FORWARD(free_head_ptr, free_head_ptr->block
_size); | 199 AUDIT_BLOCK(bkwd_head_ptr) |
| 203 | 200 #endif |
| 204 if (free_head_ptr->previous_block_size) | 201 |
| 205 { | 202 if (bkwd_head_ptr == (head_record *)(desc->last_freed)) { |
| 206 /* Coalesce with backward block if possible. */ | 203 desc->last_freed = 0; |
| 207 | 204 coalesce = 1; |
| 208 head_record *bkwd_head_ptr = | 205 } else if (IS_BLOCK_ALLOCATED(bkwd_head_ptr)) |
| 209 (head_record *) BAUS_BACKWARD( | 206 coalesce = 0; |
| 210 free_head_ptr, free_head_ptr->previous_block_size); | 207 else { |
| 211 | 208 U(out_of_free_collection)(desc, bkwd_head_ptr); |
| 212 #ifdef HMM_AUDIT_FAIL | 209 coalesce = 1; |
| 213 AUDIT_BLOCK(bkwd_head_ptr) | 210 } |
| 214 #endif | 211 |
| 215 | 212 if (coalesce) { |
| 216 if (bkwd_head_ptr == (head_record *)(desc->last_freed)) | 213 bkwd_head_ptr->block_size += free_head_ptr->block_size; |
| 217 { | 214 SET_PREV_BLOCK_BAUS(fwd_head_ptr, BLOCK_BAUS(bkwd_head_ptr)) |
| 218 desc->last_freed = 0; | 215 free_head_ptr = bkwd_head_ptr; |
| 219 coalesce = 1; | 216 } |
| 220 } | 217 } |
| 221 else if (IS_BLOCK_ALLOCATED(bkwd_head_ptr)) | 218 |
| 222 coalesce = 0; | 219 if (fwd_head_ptr->block_size == 0) { |
| 223 else | 220 /* Block to be freed is last block before dummy end-of-chunk block. */ |
| 224 { | 221 desc->end_of_shrinkable_chunk = |
| 225 U(out_of_free_collection)(desc, bkwd_head_ptr); | 222 BAUS_FORWARD(fwd_head_ptr, DUMMY_END_BLOCK_BAUS); |
| 226 coalesce = 1; | 223 desc->num_baus_can_shrink = BLOCK_BAUS(free_head_ptr); |
| 227 } | 224 |
| 228 | 225 if (PREV_BLOCK_BAUS(free_head_ptr) == 0) |
| 229 if (coalesce) | 226 /* Free block is the entire chunk, so shrinking can eliminate |
| 230 { | 227 ** entire chunk including dummy end block. */ |
| 231 bkwd_head_ptr->block_size += free_head_ptr->block_size; | 228 desc->num_baus_can_shrink += DUMMY_END_BLOCK_BAUS; |
| 232 SET_PREV_BLOCK_BAUS(fwd_head_ptr, BLOCK_BAUS(bkwd_head_ptr)) | 229 } else { |
| 233 free_head_ptr = bkwd_head_ptr; | 230 /* Coalesce with forward block if possible. */ |
| 234 } | 231 |
| 235 } | 232 #ifdef HMM_AUDIT_FAIL |
| 236 | 233 AUDIT_BLOCK(fwd_head_ptr) |
| 237 if (fwd_head_ptr->block_size == 0) | 234 #endif |
| 238 { | 235 |
| 239 /* Block to be freed is last block before dummy end-of-chunk block. */ | 236 if (fwd_head_ptr == (head_record *)(desc->last_freed)) { |
| 237 desc->last_freed = 0; |
| 238 coalesce = 1; |
| 239 } else if (IS_BLOCK_ALLOCATED(fwd_head_ptr)) |
| 240 coalesce = 0; |
| 241 else { |
| 242 U(out_of_free_collection)(desc, fwd_head_ptr); |
| 243 coalesce = 1; |
| 244 } |
| 245 |
| 246 if (coalesce) { |
| 247 free_head_ptr->block_size += fwd_head_ptr->block_size; |
| 248 |
| 249 fwd_head_ptr = |
| 250 (head_record *) BAUS_FORWARD( |
| 251 fwd_head_ptr, BLOCK_BAUS(fwd_head_ptr)); |
| 252 |
| 253 SET_PREV_BLOCK_BAUS(fwd_head_ptr, BLOCK_BAUS(free_head_ptr)) |
| 254 |
| 255 if (fwd_head_ptr->block_size == 0) { |
| 256 /* Coalesced block to be freed is last block before dummy |
| 257 ** end-of-chunk block. */ |
| 240 desc->end_of_shrinkable_chunk = | 258 desc->end_of_shrinkable_chunk = |
| 241 BAUS_FORWARD(fwd_head_ptr, DUMMY_END_BLOCK_BAUS); | 259 BAUS_FORWARD(fwd_head_ptr, DUMMY_END_BLOCK_BAUS); |
| 242 desc->num_baus_can_shrink = BLOCK_BAUS(free_head_ptr); | 260 desc->num_baus_can_shrink = BLOCK_BAUS(free_head_ptr); |
| 243 | 261 |
| 244 if (PREV_BLOCK_BAUS(free_head_ptr) == 0) | 262 if (PREV_BLOCK_BAUS(free_head_ptr) == 0) |
| 245 /* Free block is the entire chunk, so shrinking can eliminate | 263 /* Free block is the entire chunk, so shrinking can |
| 246 ** entire chunk including dummy end block. */ | 264 ** eliminate entire chunk including dummy end block. */ |
| 247 desc->num_baus_can_shrink += DUMMY_END_BLOCK_BAUS; | 265 desc->num_baus_can_shrink += DUMMY_END_BLOCK_BAUS; |
| 248 } | 266 } |
| 249 else | 267 } |
| 250 { | 268 } |
| 251 /* Coalesce with forward block if possible. */ | 269 |
| 252 | 270 if (desc->last_freed) { |
| 253 #ifdef HMM_AUDIT_FAIL | 271 /* There is a last freed block, but it is not adjacent to the |
| 254 AUDIT_BLOCK(fwd_head_ptr) | 272 ** block being freed by this call to free, so put the last |
| 255 #endif | 273 ** freed block into the free collection. |
| 256 | 274 */ |
| 257 if (fwd_head_ptr == (head_record *)(desc->last_freed)) | 275 |
| 258 { | 276 #ifdef HMM_AUDIT_FAIL |
| 259 desc->last_freed = 0; | 277 AUDIT_BLOCK(desc->last_freed) |
| 260 coalesce = 1; | 278 #endif |
| 261 } | 279 |
| 262 else if (IS_BLOCK_ALLOCATED(fwd_head_ptr)) | 280 U(into_free_collection)(desc, (head_record *)(desc->last_freed)); |
| 263 coalesce = 0; | 281 } |
| 264 else | 282 |
| 265 { | 283 desc->last_freed = free_head_ptr; |
| 266 U(out_of_free_collection)(desc, fwd_head_ptr); | 284 } |
| 267 coalesce = 1; | 285 |
| 268 } | 286 void U(new_chunk)(U(descriptor) *desc, void *start, U(size_bau) n_baus) { |
| 269 | 287 #ifdef HMM_AUDIT_FAIL |
| 270 if (coalesce) | 288 |
| 271 { | 289 if (desc->avl_tree_root) |
| 272 free_head_ptr->block_size += fwd_head_ptr->block_size; | 290 /* Audit root block in AVL tree. */ |
| 273 | 291 AUDIT_BLOCK(PTR_REC_TO_HEAD(desc->avl_tree_root)) |
| 274 fwd_head_ptr = | |
| 275 (head_record *) BAUS_FORWARD( | |
| 276 fwd_head_ptr, BLOCK_BAUS(fwd_head_ptr)); | |
| 277 | |
| 278 SET_PREV_BLOCK_BAUS(fwd_head_ptr, BLOCK_BAUS(free_head_ptr)) | |
| 279 | |
| 280 if (fwd_head_ptr->block_size == 0) | |
| 281 { | |
| 282 /* Coalesced block to be freed is last block before dummy | |
| 283 ** end-of-chunk block. */ | |
| 284 desc->end_of_shrinkable_chunk = | |
| 285 BAUS_FORWARD(fwd_head_ptr, DUMMY_END_BLOCK_BAUS); | |
| 286 desc->num_baus_can_shrink = BLOCK_BAUS(free_head_ptr); | |
| 287 | |
| 288 if (PREV_BLOCK_BAUS(free_head_ptr) == 0) | |
| 289 /* Free block is the entire chunk, so shrinking can | |
| 290 ** eliminate entire chunk including dummy end block. */ | |
| 291 desc->num_baus_can_shrink += DUMMY_END_BLOCK_BAUS; | |
| 292 } | |
| 293 } | |
| 294 } | |
| 295 | |
| 296 if (desc->last_freed) | |
| 297 { | |
| 298 /* There is a last freed block, but it is not adjacent to the | |
| 299 ** block being freed by this call to free, so put the last | |
| 300 ** freed block into the free collection. | |
| 301 */ | |
| 302 | |
| 303 #ifdef HMM_AUDIT_FAIL | |
| 304 AUDIT_BLOCK(desc->last_freed) | |
| 305 #endif | |
| 306 | |
| 307 U(into_free_collection)(desc, (head_record *)(desc->last_freed)); | |
| 308 } | |
| 309 | |
| 310 desc->last_freed = free_head_ptr; | |
| 311 } | |
| 312 | |
| 313 void U(new_chunk)(U(descriptor) *desc, void *start, U(size_bau) n_baus) | |
| 314 { | |
| 315 #ifdef HMM_AUDIT_FAIL | |
| 316 | |
| 317 if (desc->avl_tree_root) | |
| 318 /* Audit root block in AVL tree. */ | |
| 319 AUDIT_BLOCK(PTR_REC_TO_HEAD(desc->avl_tree_root)) | |
| 320 #endif | 292 #endif |
| 321 | 293 |
| 322 #undef HEAD_PTR | 294 #undef HEAD_PTR |
| 323 #define HEAD_PTR ((head_record *) start) | 295 #define HEAD_PTR ((head_record *) start) |
| 324 | 296 |
| 325 /* Make the chunk one big free block followed by a dummy end block. | 297 /* Make the chunk one big free block followed by a dummy end block. |
| 326 */ | 298 */ |
| 327 | 299 |
| 328 n_baus -= DUMMY_END_BLOCK_BAUS; | 300 n_baus -= DUMMY_END_BLOCK_BAUS; |
| 329 | 301 |
| 330 HEAD_PTR->previous_block_size = 0; | 302 HEAD_PTR->previous_block_size = 0; |
| 331 HEAD_PTR->block_size = n_baus; | 303 HEAD_PTR->block_size = n_baus; |
| 332 | 304 |
| 333 U(into_free_collection)(desc, HEAD_PTR); | 305 U(into_free_collection)(desc, HEAD_PTR); |
| 334 | 306 |
| 335 /* Set up the dummy end block. */ | 307 /* Set up the dummy end block. */ |
| 336 start = BAUS_FORWARD(start, n_baus); | 308 start = BAUS_FORWARD(start, n_baus); |
| 337 HEAD_PTR->previous_block_size = n_baus; | 309 HEAD_PTR->previous_block_size = n_baus; |
| 338 HEAD_PTR->block_size = 0; | 310 HEAD_PTR->block_size = 0; |
| 339 | 311 |
| 340 #undef HEAD_PTR | 312 #undef HEAD_PTR |
| 341 } | 313 } |
| 342 | 314 |
| 343 #ifdef HMM_AUDIT_FAIL | 315 #ifdef HMM_AUDIT_FAIL |
| 344 | 316 |
| 345 /* Function that does audit fail actions defined my preprocessor symbol, | 317 /* Function that does audit fail actions defined my preprocessor symbol, |
| 346 ** and returns a dummy integer value. | 318 ** and returns a dummy integer value. |
| 347 */ | 319 */ |
| 348 int U(audit_block_fail_dummy_return)(void) | 320 int U(audit_block_fail_dummy_return)(void) { |
| 349 { | 321 HMM_AUDIT_FAIL |
| 350 HMM_AUDIT_FAIL | |
| 351 | 322 |
| 352 /* Dummy return. */ | 323 /* Dummy return. */ |
| 353 return(0); | 324 return(0); |
| 354 } | 325 } |
| 355 | 326 |
| 356 #endif | 327 #endif |
| 357 | 328 |
| 358 /* AVL Tree instantiation. */ | 329 /* AVL Tree instantiation. */ |
| 359 | 330 |
| 360 #ifdef HMM_AUDIT_FAIL | 331 #ifdef HMM_AUDIT_FAIL |
| 361 | 332 |
| 362 /* The AVL tree generic package passes an ACCESS of 1 when it "touches" | 333 /* The AVL tree generic package passes an ACCESS of 1 when it "touches" |
| 363 ** a child node for the first time during a particular operation. I use | 334 ** a child node for the first time during a particular operation. I use |
| 364 ** this feature to audit only one time (per operation) the free blocks | 335 ** this feature to audit only one time (per operation) the free blocks |
| 365 ** that are tree nodes. Since the root node is not a child node, it has | 336 ** that are tree nodes. Since the root node is not a child node, it has |
| 366 ** to be audited directly. | 337 ** to be audited directly. |
| 367 */ | 338 */ |
| 368 | 339 |
| 369 /* The pain you feel while reading these macros will not be in vain. It | 340 /* The pain you feel while reading these macros will not be in vain. It |
| 370 ** will remove all doubt from you mind that C++ inline functions are | 341 ** will remove all doubt from you mind that C++ inline functions are |
| 371 ** a very good thing. | 342 ** a very good thing. |
| 372 */ | 343 */ |
| 373 | 344 |
| 374 #define AVL_GET_LESS(H, ACCESS) \ | 345 #define AVL_GET_LESS(H, ACCESS) \ |
| 375 (((ACCESS) ? AUDIT_BLOCK_AS_EXPR(PTR_REC_TO_HEAD(H)) : 0), (H)->self) | 346 (((ACCESS) ? AUDIT_BLOCK_AS_EXPR(PTR_REC_TO_HEAD(H)) : 0), (H)->self) |
| 376 #define AVL_GET_GREATER(H, ACCESS) \ | 347 #define AVL_GET_GREATER(H, ACCESS) \ |
| 377 (((ACCESS) ? AUDIT_BLOCK_AS_EXPR(PTR_REC_TO_HEAD(H)) : 0), (H)->prev) | 348 (((ACCESS) ? AUDIT_BLOCK_AS_EXPR(PTR_REC_TO_HEAD(H)) : 0), (H)->prev) |
| 378 | 349 |
| 379 #else | 350 #else |
| 380 | 351 |
| 381 #define AVL_GET_LESS(H, ACCESS) ((H)->self) | 352 #define AVL_GET_LESS(H, ACCESS) ((H)->self) |
| 382 #define AVL_GET_GREATER(H, ACCESS) ((H)->prev) | 353 #define AVL_GET_GREATER(H, ACCESS) ((H)->prev) |
| 383 | 354 |
| 384 #endif | 355 #endif |
| 385 | 356 |
| 386 #define AVL_SET_LESS(H, LH) (H)->self = (LH); | 357 #define AVL_SET_LESS(H, LH) (H)->self = (LH); |
| 387 #define AVL_SET_GREATER(H, GH) (H)->prev = (GH); | 358 #define AVL_SET_GREATER(H, GH) (H)->prev = (GH); |
| 388 | 359 |
| 389 /* high bit of high bit of | 360 /* high bit of high bit of |
| 390 ** block_size previous_block_size balance factor | 361 ** block_size previous_block_size balance factor |
| 391 ** ----------- ------------------- -------------- | 362 ** ----------- ------------------- -------------- |
| 392 ** 0 0 n/a (block allocated) | 363 ** 0 0 n/a (block allocated) |
| 393 ** 0 1 1 | 364 ** 0 1 1 |
| 394 ** 1 0 -1 | 365 ** 1 0 -1 |
| 395 ** 1 1 0 | 366 ** 1 1 0 |
| 396 */ | 367 */ |
| 397 | 368 |
| 398 #define AVL_GET_BALANCE_FACTOR(H) \ | 369 #define AVL_GET_BALANCE_FACTOR(H) \ |
| 399 ((((head_record *) (PTR_REC_TO_HEAD(H)))->block_size & \ | 370 ((((head_record *) (PTR_REC_TO_HEAD(H)))->block_size & \ |
| 400 HIGH_BIT_BAU_SIZE) ? \ | 371 HIGH_BIT_BAU_SIZE) ? \ |
| 401 (((head_record *) (PTR_REC_TO_HEAD(H)))->previous_block_size & \ | 372 (((head_record *) (PTR_REC_TO_HEAD(H)))->previous_block_size & \ |
| 402 HIGH_BIT_BAU_SIZE ? 0 : -1) : 1) | 373 HIGH_BIT_BAU_SIZE ? 0 : -1) : 1) |
| 403 | 374 |
| 404 #define AVL_SET_BALANCE_FACTOR(H, BF) \ | 375 #define AVL_SET_BALANCE_FACTOR(H, BF) \ |
| 405 { \ | 376 { \ |
| 406 register head_record *p = \ | 377 register head_record *p = \ |
| 407 (head_record *) PTR_REC_TO_HEAD(
H); \ | 378 (head_record *) PTR_REC_TO_HEAD(H);
\ |
| 408 register int bal_f = (BF); \ | 379 register int bal_f = (BF); \ |
| 409 \ | 380 \ |
| 410 if (bal_f <= 0) \ | 381 if (bal_f <= 0) \ |
| 411 p->block_size |= HIGH_BIT_BAU_SIZE; \ | 382 p->block_size |= HIGH_BIT_BAU_SIZE; \ |
| 412 else \ | 383 else \ |
| 413 p->block_size &= ~HIGH_BIT_BAU_SIZE; \ | 384 p->block_size &= ~HIGH_BIT_BAU_SIZE; \ |
| 414 if (bal_f >= 0) \ | 385 if (bal_f >= 0) \ |
| 415 p->previous_block_size |= HIGH_BIT_BAU_SIZE; \ | 386 p->previous_block_size |= HIGH_BIT_BAU_SIZE; \ |
| 416 else \ | 387 else \ |
| 417 p->previous_block_size &= ~HIGH_BIT_BAU_SIZE; \ | 388 p->previous_block_size &= ~HIGH_BIT_BAU_SIZE; \ |
| 418 } | 389 } |
| 419 | 390 |
| 420 #define COMPARE_KEY_KEY(K1, K2) ((K1) == (K2) ? 0 : ((K1) > (K2) ? 1 : -1)) | 391 #define COMPARE_KEY_KEY(K1, K2) ((K1) == (K2) ? 0 : ((K1) > (K2) ? 1 : -1)) |
| 421 | 392 |
| 422 #define AVL_COMPARE_KEY_NODE(K, H) \ | 393 #define AVL_COMPARE_KEY_NODE(K, H) \ |
| 423 COMPARE_KEY_KEY(K, BLOCK_BAUS(PTR_REC_TO_HEAD(H))) | 394 COMPARE_KEY_KEY(K, BLOCK_BAUS(PTR_REC_TO_HEAD(H))) |
| 424 | 395 |
| 425 #define AVL_COMPARE_NODE_NODE(H1, H2) \ | 396 #define AVL_COMPARE_NODE_NODE(H1, H2) \ |
| 426 COMPARE_KEY_KEY(BLOCK_BAUS(PTR_REC_TO_HEAD(H1)), \ | 397 COMPARE_KEY_KEY(BLOCK_BAUS(PTR_REC_TO_HEAD(H1)), \ |
| 427 BLOCK_BAUS(PTR_REC_TO_HEAD(H2))) | 398 BLOCK_BAUS(PTR_REC_TO_HEAD(H2))) |
| 428 | 399 |
| 429 #define AVL_NULL ((ptr_record *) 0) | 400 #define AVL_NULL ((ptr_record *) 0) |
| 430 | 401 |
| 431 #define AVL_IMPL_MASK \ | 402 #define AVL_IMPL_MASK \ |
| 432 ( AVL_IMPL_INSERT | AVL_IMPL_SEARCH | AVL_IMPL_REMOVE | AVL_IMPL_SUBST ) | 403 ( AVL_IMPL_INSERT | AVL_IMPL_SEARCH | AVL_IMPL_REMOVE | AVL_IMPL_SUBST ) |
| 433 | 404 |
| 434 #include "cavl_impl.h" | 405 #include "cavl_impl.h" |
| OLD | NEW |