| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 264 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 275 | 275 |
| 276 #ifdef DEBUG | 276 #ifdef DEBUG |
| 277 class HeapDebugUtils; | 277 class HeapDebugUtils; |
| 278 #endif | 278 #endif |
| 279 | 279 |
| 280 | 280 |
| 281 // A queue of objects promoted during scavenge. Each object is accompanied | 281 // A queue of objects promoted during scavenge. Each object is accompanied |
| 282 // by it's size to avoid dereferencing a map pointer for scanning. | 282 // by it's size to avoid dereferencing a map pointer for scanning. |
| 283 class PromotionQueue { | 283 class PromotionQueue { |
| 284 public: | 284 public: |
| 285 PromotionQueue() : front_(NULL), rear_(NULL) { } | 285 PromotionQueue(Heap* heap) |
| 286 : front_(NULL), |
| 287 rear_(NULL), |
| 288 limit_(NULL), |
| 289 emergency_stack_(0), |
| 290 heap_(heap) { } |
| 286 | 291 |
| 287 void Initialize(Address start_address) { | 292 void Initialize(); |
| 288 // Assumes that a NewSpacePage exactly fits a number of promotion queue | 293 |
| 289 // entries (where each is a pair of intptr_t). This allows us to simplify | 294 void Destroy() { |
| 290 // the test fpr when to switch pages. | 295 ASSERT(is_empty()); |
| 291 ASSERT((Page::kPageSize - MemoryChunk::kBodyOffset) % (2 * kPointerSize) | 296 delete emergency_stack_; |
| 292 == 0); | 297 emergency_stack_ = NULL; |
| 293 ASSERT(NewSpacePage::IsAtEnd(start_address)); | |
| 294 front_ = rear_ = reinterpret_cast<intptr_t*>(start_address); | |
| 295 } | 298 } |
| 296 | 299 |
| 297 bool is_empty() { return front_ == rear_; } | 300 inline void ActivateGuardIfOnTheSamePage(); |
| 301 |
| 302 Page* GetHeadPage() { |
| 303 return Page::FromAllocationTop(reinterpret_cast<Address>(rear_)); |
| 304 } |
| 305 |
| 306 void SetNewLimit(Address limit) { |
| 307 if (!guard_) { |
| 308 return; |
| 309 } |
| 310 |
| 311 ASSERT(GetHeadPage() == Page::FromAllocationTop(limit)); |
| 312 limit_ = reinterpret_cast<intptr_t*>(limit); |
| 313 |
| 314 if (limit_ <= rear_) { |
| 315 return; |
| 316 } |
| 317 |
| 318 RelocateQueueHead(); |
| 319 } |
| 320 |
| 321 bool is_empty() { |
| 322 return (front_ == rear_) && |
| 323 (emergency_stack_ == NULL || emergency_stack_->length() == 0); |
| 324 } |
| 298 | 325 |
| 299 inline void insert(HeapObject* target, int size); | 326 inline void insert(HeapObject* target, int size); |
| 300 | 327 |
| 301 void remove(HeapObject** target, int* size) { | 328 void remove(HeapObject** target, int* size) { |
| 302 ASSERT(!is_empty()); | 329 ASSERT(!is_empty()); |
| 330 if (front_ == rear_) { |
| 331 Entry e = emergency_stack_->RemoveLast(); |
| 332 *target = e.obj_; |
| 333 *size = e.size_; |
| 334 return; |
| 335 } |
| 336 |
| 303 if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(front_))) { | 337 if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(front_))) { |
| 304 NewSpacePage* front_page = | 338 NewSpacePage* front_page = |
| 305 NewSpacePage::FromAddress(reinterpret_cast<Address>(front_)); | 339 NewSpacePage::FromAddress(reinterpret_cast<Address>(front_)); |
| 306 ASSERT(!front_page->prev_page()->is_anchor()); | 340 ASSERT(!front_page->prev_page()->is_anchor()); |
| 307 front_ = | 341 front_ = |
| 308 reinterpret_cast<intptr_t*>(front_page->prev_page()->body_limit()); | 342 reinterpret_cast<intptr_t*>(front_page->prev_page()->body_limit()); |
| 309 } | 343 } |
| 310 *target = reinterpret_cast<HeapObject*>(*(--front_)); | 344 *target = reinterpret_cast<HeapObject*>(*(--front_)); |
| 311 *size = static_cast<int>(*(--front_)); | 345 *size = static_cast<int>(*(--front_)); |
| 312 // Assert no underflow. | 346 // Assert no underflow. |
| 313 SemiSpace::AssertValidRange(reinterpret_cast<Address>(rear_), | 347 SemiSpace::AssertValidRange(reinterpret_cast<Address>(rear_), |
| 314 reinterpret_cast<Address>(front_)); | 348 reinterpret_cast<Address>(front_)); |
| 315 } | 349 } |
| 316 | 350 |
| 317 private: | 351 private: |
| 318 // The front of the queue is higher in the memory page chain than the rear. | 352 // The front of the queue is higher in the memory page chain than the rear. |
| 319 intptr_t* front_; | 353 intptr_t* front_; |
| 320 intptr_t* rear_; | 354 intptr_t* rear_; |
| 355 intptr_t* limit_; |
| 356 |
| 357 bool guard_; |
| 358 |
| 359 static const int kEntrySizeInWords = 2; |
| 360 |
| 361 struct Entry { |
| 362 Entry(HeapObject* obj, int size) : obj_(obj), size_(size) { } |
| 363 |
| 364 HeapObject* obj_; |
| 365 int size_; |
| 366 }; |
| 367 List<Entry>* emergency_stack_; |
| 368 |
| 369 Heap* heap_; |
| 370 |
| 371 void RelocateQueueHead(); |
| 321 | 372 |
| 322 DISALLOW_COPY_AND_ASSIGN(PromotionQueue); | 373 DISALLOW_COPY_AND_ASSIGN(PromotionQueue); |
| 323 }; | 374 }; |
| 324 | 375 |
| 325 | 376 |
| 326 typedef void (*ScavengingCallback)(Map* map, | 377 typedef void (*ScavengingCallback)(Map* map, |
| 327 HeapObject** slot, | 378 HeapObject** slot, |
| 328 HeapObject* object); | 379 HeapObject* object); |
| 329 | 380 |
| 330 | 381 |
| (...skipping 2147 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2478 | 2529 |
| 2479 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); | 2530 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); |
| 2480 }; | 2531 }; |
| 2481 #endif // DEBUG || LIVE_OBJECT_LIST | 2532 #endif // DEBUG || LIVE_OBJECT_LIST |
| 2482 | 2533 |
| 2483 } } // namespace v8::internal | 2534 } } // namespace v8::internal |
| 2484 | 2535 |
| 2485 #undef HEAP | 2536 #undef HEAP |
| 2486 | 2537 |
| 2487 #endif // V8_HEAP_H_ | 2538 #endif // V8_HEAP_H_ |
| OLD | NEW |