Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1)

Side by Side Diff: src/heap/heap.cc

Issue 2311203002: Move kMaxRegularHeapObjectSize into globals (Closed)
Patch Set: Saving the file helps... Created 4 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/globals.h ('k') | src/heap/heap-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/heap.h" 5 #include "src/heap/heap.h"
6 6
7 #include "src/accessors.h" 7 #include "src/accessors.h"
8 #include "src/api.h" 8 #include "src/api.h"
9 #include "src/ast/context-slot-cache.h" 9 #include "src/ast/context-slot-cache.h"
10 #include "src/base/bits.h" 10 #include "src/base/bits.h"
(...skipping 2435 matching lines...) Expand 10 before | Expand all | Expand 10 after
2446 DCHECK(!InNewSpace(empty_fixed_array())); 2446 DCHECK(!InNewSpace(empty_fixed_array()));
2447 return true; 2447 return true;
2448 } 2448 }
2449 2449
2450 2450
2451 AllocationResult Heap::AllocateHeapNumber(double value, MutableMode mode, 2451 AllocationResult Heap::AllocateHeapNumber(double value, MutableMode mode,
2452 PretenureFlag pretenure) { 2452 PretenureFlag pretenure) {
2453 // Statically ensure that it is safe to allocate heap numbers in paged 2453 // Statically ensure that it is safe to allocate heap numbers in paged
2454 // spaces. 2454 // spaces.
2455 int size = HeapNumber::kSize; 2455 int size = HeapNumber::kSize;
2456 STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxRegularHeapObjectSize); 2456 STATIC_ASSERT(HeapNumber::kSize <= kMaxRegularHeapObjectSize);
2457 2457
2458 AllocationSpace space = SelectSpace(pretenure); 2458 AllocationSpace space = SelectSpace(pretenure);
2459 2459
2460 HeapObject* result = nullptr; 2460 HeapObject* result = nullptr;
2461 { 2461 {
2462 AllocationResult allocation = AllocateRaw(size, space, kDoubleUnaligned); 2462 AllocationResult allocation = AllocateRaw(size, space, kDoubleUnaligned);
2463 if (!allocation.To(&result)) return allocation; 2463 if (!allocation.To(&result)) return allocation;
2464 } 2464 }
2465 2465
2466 Map* map = mode == MUTABLE ? mutable_heap_number_map() : heap_number_map(); 2466 Map* map = mode == MUTABLE ? mutable_heap_number_map() : heap_number_map();
2467 HeapObject::cast(result)->set_map_no_write_barrier(map); 2467 HeapObject::cast(result)->set_map_no_write_barrier(map);
2468 HeapNumber::cast(result)->set_value(value); 2468 HeapNumber::cast(result)->set_value(value);
2469 return result; 2469 return result;
2470 } 2470 }
2471 2471
2472 #define SIMD_ALLOCATE_DEFINITION(TYPE, Type, type, lane_count, lane_type) \ 2472 #define SIMD_ALLOCATE_DEFINITION(TYPE, Type, type, lane_count, lane_type) \
2473 AllocationResult Heap::Allocate##Type(lane_type lanes[lane_count], \ 2473 AllocationResult Heap::Allocate##Type(lane_type lanes[lane_count], \
2474 PretenureFlag pretenure) { \ 2474 PretenureFlag pretenure) { \
2475 int size = Type::kSize; \ 2475 int size = Type::kSize; \
2476 STATIC_ASSERT(Type::kSize <= Page::kMaxRegularHeapObjectSize); \ 2476 STATIC_ASSERT(Type::kSize <= kMaxRegularHeapObjectSize); \
2477 \ 2477 \
2478 AllocationSpace space = SelectSpace(pretenure); \ 2478 AllocationSpace space = SelectSpace(pretenure); \
2479 \ 2479 \
2480 HeapObject* result = nullptr; \ 2480 HeapObject* result = nullptr; \
2481 { \ 2481 { \
2482 AllocationResult allocation = \ 2482 AllocationResult allocation = \
2483 AllocateRaw(size, space, kSimd128Unaligned); \ 2483 AllocateRaw(size, space, kSimd128Unaligned); \
2484 if (!allocation.To(&result)) return allocation; \ 2484 if (!allocation.To(&result)) return allocation; \
2485 } \ 2485 } \
2486 \ 2486 \
2487 result->set_map_no_write_barrier(type##_map()); \ 2487 result->set_map_no_write_barrier(type##_map()); \
2488 Type* instance = Type::cast(result); \ 2488 Type* instance = Type::cast(result); \
2489 for (int i = 0; i < lane_count; i++) { \ 2489 for (int i = 0; i < lane_count; i++) { \
2490 instance->set_lane(i, lanes[i]); \ 2490 instance->set_lane(i, lanes[i]); \
2491 } \ 2491 } \
2492 return result; \ 2492 return result; \
2493 } 2493 }
2494 SIMD128_TYPES(SIMD_ALLOCATE_DEFINITION) 2494 SIMD128_TYPES(SIMD_ALLOCATE_DEFINITION)
2495 #undef SIMD_ALLOCATE_DEFINITION 2495 #undef SIMD_ALLOCATE_DEFINITION
2496 2496
2497 2497
2498 AllocationResult Heap::AllocateCell(Object* value) { 2498 AllocationResult Heap::AllocateCell(Object* value) {
2499 int size = Cell::kSize; 2499 int size = Cell::kSize;
2500 STATIC_ASSERT(Cell::kSize <= Page::kMaxRegularHeapObjectSize); 2500 STATIC_ASSERT(Cell::kSize <= kMaxRegularHeapObjectSize);
2501 2501
2502 HeapObject* result = nullptr; 2502 HeapObject* result = nullptr;
2503 { 2503 {
2504 AllocationResult allocation = AllocateRaw(size, OLD_SPACE); 2504 AllocationResult allocation = AllocateRaw(size, OLD_SPACE);
2505 if (!allocation.To(&result)) return allocation; 2505 if (!allocation.To(&result)) return allocation;
2506 } 2506 }
2507 result->set_map_no_write_barrier(cell_map()); 2507 result->set_map_no_write_barrier(cell_map());
2508 Cell::cast(result)->set_value(value); 2508 Cell::cast(result)->set_value(value);
2509 return result; 2509 return result;
2510 } 2510 }
2511 2511
2512 2512
2513 AllocationResult Heap::AllocatePropertyCell() { 2513 AllocationResult Heap::AllocatePropertyCell() {
2514 int size = PropertyCell::kSize; 2514 int size = PropertyCell::kSize;
2515 STATIC_ASSERT(PropertyCell::kSize <= Page::kMaxRegularHeapObjectSize); 2515 STATIC_ASSERT(PropertyCell::kSize <= kMaxRegularHeapObjectSize);
2516 2516
2517 HeapObject* result = nullptr; 2517 HeapObject* result = nullptr;
2518 AllocationResult allocation = AllocateRaw(size, OLD_SPACE); 2518 AllocationResult allocation = AllocateRaw(size, OLD_SPACE);
2519 if (!allocation.To(&result)) return allocation; 2519 if (!allocation.To(&result)) return allocation;
2520 2520
2521 result->set_map_no_write_barrier(global_property_cell_map()); 2521 result->set_map_no_write_barrier(global_property_cell_map());
2522 PropertyCell* cell = PropertyCell::cast(result); 2522 PropertyCell* cell = PropertyCell::cast(result);
2523 cell->set_dependent_code(DependentCode::cast(empty_fixed_array()), 2523 cell->set_dependent_code(DependentCode::cast(empty_fixed_array()),
2524 SKIP_WRITE_BARRIER); 2524 SKIP_WRITE_BARRIER);
2525 cell->set_property_details(PropertyDetails(Smi::FromInt(0))); 2525 cell->set_property_details(PropertyDetails(Smi::FromInt(0)));
2526 cell->set_value(the_hole_value()); 2526 cell->set_value(the_hole_value());
2527 return result; 2527 return result;
2528 } 2528 }
2529 2529
2530 2530
2531 AllocationResult Heap::AllocateWeakCell(HeapObject* value) { 2531 AllocationResult Heap::AllocateWeakCell(HeapObject* value) {
2532 int size = WeakCell::kSize; 2532 int size = WeakCell::kSize;
2533 STATIC_ASSERT(WeakCell::kSize <= Page::kMaxRegularHeapObjectSize); 2533 STATIC_ASSERT(WeakCell::kSize <= kMaxRegularHeapObjectSize);
2534 HeapObject* result = nullptr; 2534 HeapObject* result = nullptr;
2535 { 2535 {
2536 AllocationResult allocation = AllocateRaw(size, OLD_SPACE); 2536 AllocationResult allocation = AllocateRaw(size, OLD_SPACE);
2537 if (!allocation.To(&result)) return allocation; 2537 if (!allocation.To(&result)) return allocation;
2538 } 2538 }
2539 result->set_map_no_write_barrier(weak_cell_map()); 2539 result->set_map_no_write_barrier(weak_cell_map());
2540 WeakCell::cast(result)->initialize(value); 2540 WeakCell::cast(result)->initialize(value);
2541 WeakCell::cast(result)->clear_next(the_hole_value()); 2541 WeakCell::cast(result)->clear_next(the_hole_value());
2542 return result; 2542 return result;
2543 } 2543 }
(...skipping 479 matching lines...) Expand 10 before | Expand all | Expand 10 after
3023 3023
3024 FixedTypedArrayBase* Heap::EmptyFixedTypedArrayForMap(Map* map) { 3024 FixedTypedArrayBase* Heap::EmptyFixedTypedArrayForMap(Map* map) {
3025 return FixedTypedArrayBase::cast( 3025 return FixedTypedArrayBase::cast(
3026 roots_[RootIndexForEmptyFixedTypedArray(map->elements_kind())]); 3026 roots_[RootIndexForEmptyFixedTypedArray(map->elements_kind())]);
3027 } 3027 }
3028 3028
3029 3029
3030 AllocationResult Heap::AllocateForeign(Address address, 3030 AllocationResult Heap::AllocateForeign(Address address,
3031 PretenureFlag pretenure) { 3031 PretenureFlag pretenure) {
3032 // Statically ensure that it is safe to allocate foreigns in paged spaces. 3032 // Statically ensure that it is safe to allocate foreigns in paged spaces.
3033 STATIC_ASSERT(Foreign::kSize <= Page::kMaxRegularHeapObjectSize); 3033 STATIC_ASSERT(Foreign::kSize <= kMaxRegularHeapObjectSize);
3034 AllocationSpace space = (pretenure == TENURED) ? OLD_SPACE : NEW_SPACE; 3034 AllocationSpace space = (pretenure == TENURED) ? OLD_SPACE : NEW_SPACE;
3035 Foreign* result = nullptr; 3035 Foreign* result = nullptr;
3036 AllocationResult allocation = Allocate(foreign_map(), space); 3036 AllocationResult allocation = Allocate(foreign_map(), space);
3037 if (!allocation.To(&result)) return allocation; 3037 if (!allocation.To(&result)) return allocation;
3038 result->set_foreign_address(address); 3038 result->set_foreign_address(address);
3039 return result; 3039 return result;
3040 } 3040 }
3041 3041
3042 3042
3043 AllocationResult Heap::AllocateByteArray(int length, PretenureFlag pretenure) { 3043 AllocationResult Heap::AllocateByteArray(int length, PretenureFlag pretenure) {
(...skipping 958 matching lines...) Expand 10 before | Expand all | Expand 10 after
4002 AllocationResult allocation = AllocateRaw(size, space, kDoubleAligned); 4002 AllocationResult allocation = AllocateRaw(size, space, kDoubleAligned);
4003 if (!allocation.To(&object)) return allocation; 4003 if (!allocation.To(&object)) return allocation;
4004 } 4004 }
4005 4005
4006 return object; 4006 return object;
4007 } 4007 }
4008 4008
4009 4009
4010 AllocationResult Heap::AllocateSymbol() { 4010 AllocationResult Heap::AllocateSymbol() {
4011 // Statically ensure that it is safe to allocate symbols in paged spaces. 4011 // Statically ensure that it is safe to allocate symbols in paged spaces.
4012 STATIC_ASSERT(Symbol::kSize <= Page::kMaxRegularHeapObjectSize); 4012 STATIC_ASSERT(Symbol::kSize <= kMaxRegularHeapObjectSize);
4013 4013
4014 HeapObject* result = nullptr; 4014 HeapObject* result = nullptr;
4015 AllocationResult allocation = AllocateRaw(Symbol::kSize, OLD_SPACE); 4015 AllocationResult allocation = AllocateRaw(Symbol::kSize, OLD_SPACE);
4016 if (!allocation.To(&result)) return allocation; 4016 if (!allocation.To(&result)) return allocation;
4017 4017
4018 result->set_map_no_write_barrier(symbol_map()); 4018 result->set_map_no_write_barrier(symbol_map());
4019 4019
4020 // Generate a random hash value. 4020 // Generate a random hash value.
4021 int hash; 4021 int hash;
4022 int attempts = 0; 4022 int attempts = 0;
(...skipping 1016 matching lines...) Expand 10 before | Expand all | Expand 10 after
5039 5039
5040 if (FLAG_initial_old_space_size > 0) { 5040 if (FLAG_initial_old_space_size > 0) {
5041 initial_old_generation_size_ = FLAG_initial_old_space_size * MB; 5041 initial_old_generation_size_ = FLAG_initial_old_space_size * MB;
5042 } else { 5042 } else {
5043 initial_old_generation_size_ = 5043 initial_old_generation_size_ =
5044 max_old_generation_size_ / kInitalOldGenerationLimitFactor; 5044 max_old_generation_size_ / kInitalOldGenerationLimitFactor;
5045 } 5045 }
5046 old_generation_allocation_limit_ = initial_old_generation_size_; 5046 old_generation_allocation_limit_ = initial_old_generation_size_;
5047 5047
5048 // We rely on being able to allocate new arrays in paged spaces. 5048 // We rely on being able to allocate new arrays in paged spaces.
5049 DCHECK(Page::kMaxRegularHeapObjectSize >= 5049 DCHECK(kMaxRegularHeapObjectSize >=
5050 (JSArray::kSize + 5050 (JSArray::kSize +
5051 FixedArray::SizeFor(JSArray::kInitialMaxFastElementArray) + 5051 FixedArray::SizeFor(JSArray::kInitialMaxFastElementArray) +
5052 AllocationMemento::kSize)); 5052 AllocationMemento::kSize));
5053 5053
5054 code_range_size_ = code_range_size * MB; 5054 code_range_size_ = code_range_size * MB;
5055 5055
5056 configured_ = true; 5056 configured_ = true;
5057 return true; 5057 return true;
5058 } 5058 }
5059 5059
(...skipping 1404 matching lines...) Expand 10 before | Expand all | Expand 10 after
6464 } 6464 }
6465 6465
6466 6466
6467 // static 6467 // static
6468 int Heap::GetStaticVisitorIdForMap(Map* map) { 6468 int Heap::GetStaticVisitorIdForMap(Map* map) {
6469 return StaticVisitorBase::GetVisitorId(map); 6469 return StaticVisitorBase::GetVisitorId(map);
6470 } 6470 }
6471 6471
6472 } // namespace internal 6472 } // namespace internal
6473 } // namespace v8 6473 } // namespace v8
OLDNEW
« no previous file with comments | « src/globals.h ('k') | src/heap/heap-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698