| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 2122 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2133 } | 2133 } |
| 2134 | 2134 |
| 2135 INLINE(Address MarkbitIndexToAddress(uint32_t index)) { | 2135 INLINE(Address MarkbitIndexToAddress(uint32_t index)) { |
| 2136 return reinterpret_cast<Address>(index << kPointerSizeLog2); | 2136 return reinterpret_cast<Address>(index << kPointerSizeLog2); |
| 2137 } | 2137 } |
| 2138 | 2138 |
| 2139 // The allocation top and limit addresses. | 2139 // The allocation top and limit addresses. |
| 2140 Address* allocation_top_address() { return &allocation_info_.top; } | 2140 Address* allocation_top_address() { return &allocation_info_.top; } |
| 2141 Address* allocation_limit_address() { return &allocation_info_.limit; } | 2141 Address* allocation_limit_address() { return &allocation_info_.limit; } |
| 2142 | 2142 |
| 2143 MUST_USE_RESULT MaybeObject* AllocateRaw(int size_in_bytes) { | 2143 MUST_USE_RESULT INLINE(MaybeObject* AllocateRaw(int size_in_bytes)); |
| 2144 return AllocateRawInternal(size_in_bytes); | |
| 2145 } | |
| 2146 | 2144 |
| 2147 // Reset the allocation pointer to the beginning of the active semispace. | 2145 // Reset the allocation pointer to the beginning of the active semispace. |
| 2148 void ResetAllocationInfo(); | 2146 void ResetAllocationInfo(); |
| 2149 | 2147 |
| 2150 void LowerInlineAllocationLimit(intptr_t step) { | 2148 void LowerInlineAllocationLimit(intptr_t step) { |
| 2151 inline_allocation_limit_step_ = step; | 2149 inline_allocation_limit_step_ = step; |
| 2152 if (step == 0) { | 2150 if (step == 0) { |
| 2153 allocation_info_.limit = to_space_.page_high(); | 2151 allocation_info_.limit = to_space_.page_high(); |
| 2154 } else { | 2152 } else { |
| 2155 allocation_info_.limit = Min( | 2153 allocation_info_.limit = Min( |
| (...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2261 // to be lower than actual limit and then will gradually increase it | 2259 // to be lower than actual limit and then will gradually increase it |
| 2262 // in steps to guarantee that we do incremental marking steps even | 2260 // in steps to guarantee that we do incremental marking steps even |
| 2263 // when all allocation is performed from inlined generated code. | 2261 // when all allocation is performed from inlined generated code. |
| 2264 intptr_t inline_allocation_limit_step_; | 2262 intptr_t inline_allocation_limit_step_; |
| 2265 | 2263 |
| 2266 Address top_on_previous_step_; | 2264 Address top_on_previous_step_; |
| 2267 | 2265 |
| 2268 HistogramInfo* allocated_histogram_; | 2266 HistogramInfo* allocated_histogram_; |
| 2269 HistogramInfo* promoted_histogram_; | 2267 HistogramInfo* promoted_histogram_; |
| 2270 | 2268 |
| 2271 // Implementation of AllocateRaw. | 2269 MUST_USE_RESULT MaybeObject* SlowAllocateRaw(int size_in_bytes); |
| 2272 MUST_USE_RESULT inline MaybeObject* AllocateRawInternal(int size_in_bytes); | |
| 2273 | 2270 |
| 2274 friend class SemiSpaceIterator; | 2271 friend class SemiSpaceIterator; |
| 2275 | 2272 |
| 2276 public: | 2273 public: |
| 2277 TRACK_MEMORY("NewSpace") | 2274 TRACK_MEMORY("NewSpace") |
| 2278 }; | 2275 }; |
| 2279 | 2276 |
| 2280 | 2277 |
| 2281 // ----------------------------------------------------------------------------- | 2278 // ----------------------------------------------------------------------------- |
| 2282 // Old object space (excluding map objects) | 2279 // Old object space (excluding map objects) |
| (...skipping 326 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2609 } | 2606 } |
| 2610 // Must be small, since an iteration is used for lookup. | 2607 // Must be small, since an iteration is used for lookup. |
| 2611 static const int kMaxComments = 64; | 2608 static const int kMaxComments = 64; |
| 2612 }; | 2609 }; |
| 2613 #endif | 2610 #endif |
| 2614 | 2611 |
| 2615 | 2612 |
| 2616 } } // namespace v8::internal | 2613 } } // namespace v8::internal |
| 2617 | 2614 |
| 2618 #endif // V8_SPACES_H_ | 2615 #endif // V8_SPACES_H_ |
| OLD | NEW |