| Index: src/heap/heap.h
|
| diff --git a/src/heap/heap.h b/src/heap/heap.h
|
| index 728637873255c2710cf17398b6d0f8e970113f91..a020ab2deec9c7c44ff8a891ecd4291cec2b9ca1 100644
|
| --- a/src/heap/heap.h
|
| +++ b/src/heap/heap.h
|
| @@ -895,32 +895,28 @@ class Heap {
|
| // You can't use type::cast during GC because the assert fails.
|
| // TODO(1490): Try removing the unchecked accessors, now that GC marking does
|
| // not corrupt the map.
|
| -#define ROOT_ACCESSOR(type, name, camel_name) \
|
| - type* name() { return type::cast(roots_[k##camel_name##RootIndex]); } \
|
| - type* raw_unchecked_##name() { \
|
| - return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]); \
|
| +#define ROOT_ACCESSOR(type, name, camel_name) \
|
| + inline type* name(); \
|
| + type* raw_unchecked_##name() { \
|
| + return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]); \
|
| }
|
| ROOT_LIST(ROOT_ACCESSOR)
|
| #undef ROOT_ACCESSOR
|
|
|
| // Utility type maps
|
| -#define STRUCT_MAP_ACCESSOR(NAME, Name, name) \
|
| - Map* name##_map() { return Map::cast(roots_[k##Name##MapRootIndex]); }
|
| +#define STRUCT_MAP_ACCESSOR(NAME, Name, name) inline Map* name##_map();
|
| STRUCT_LIST(STRUCT_MAP_ACCESSOR)
|
| #undef STRUCT_MAP_ACCESSOR
|
|
|
| -#define STRING_ACCESSOR(name, str) \
|
| - String* name() { return String::cast(roots_[k##name##RootIndex]); }
|
| +#define STRING_ACCESSOR(name, str) inline String* name();
|
| INTERNALIZED_STRING_LIST(STRING_ACCESSOR)
|
| #undef STRING_ACCESSOR
|
|
|
| -#define SYMBOL_ACCESSOR(name) \
|
| - Symbol* name() { return Symbol::cast(roots_[k##name##RootIndex]); }
|
| +#define SYMBOL_ACCESSOR(name) inline Symbol* name();
|
| PRIVATE_SYMBOL_LIST(SYMBOL_ACCESSOR)
|
| #undef SYMBOL_ACCESSOR
|
|
|
| -#define SYMBOL_ACCESSOR(name, varname, description) \
|
| - Symbol* name() { return Symbol::cast(roots_[k##name##RootIndex]); }
|
| +#define SYMBOL_ACCESSOR(name, varname, description) inline Symbol* name();
|
| PUBLIC_SYMBOL_LIST(SYMBOL_ACCESSOR)
|
| #undef SYMBOL_ACCESSOR
|
|
|
| @@ -1102,6 +1098,9 @@ class Heap {
|
| static inline void ScavengePointer(HeapObject** p);
|
| static inline void ScavengeObject(HeapObject** p, HeapObject* object);
|
|
|
| + // Slow part of scavenge object.
|
| + static void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
|
| +
|
| enum ScratchpadSlotMode { IGNORE_SCRATCHPAD_SLOT, RECORD_SCRATCHPAD_SLOT };
|
|
|
| // If an object has an AllocationMemento trailing it, return it, otherwise
|
| @@ -1433,10 +1432,6 @@ class Heap {
|
|
|
| inline bool OldGenerationAllocationLimitReached();
|
|
|
| - inline void DoScavengeObject(Map* map, HeapObject** slot, HeapObject* obj) {
|
| - scavenging_visitors_table_.GetVisitor(map)(map, slot, obj);
|
| - }
|
| -
|
| void QueueMemoryChunkForFree(MemoryChunk* chunk);
|
| void FreeQueuedChunks();
|
|
|
| @@ -1451,39 +1446,14 @@ class Heap {
|
| // The roots that have an index less than this are always in old space.
|
| static const int kOldSpaceRoots = 0x20;
|
|
|
| - uint32_t HashSeed() {
|
| - uint32_t seed = static_cast<uint32_t>(hash_seed()->value());
|
| - DCHECK(FLAG_randomize_hashes || seed == 0);
|
| - return seed;
|
| - }
|
| -
|
| - Smi* NextScriptId() {
|
| - int next_id = last_script_id()->value() + 1;
|
| - if (!Smi::IsValid(next_id) || next_id < 0) next_id = 1;
|
| - Smi* next_id_smi = Smi::FromInt(next_id);
|
| - set_last_script_id(next_id_smi);
|
| - return next_id_smi;
|
| - }
|
| -
|
| - void SetArgumentsAdaptorDeoptPCOffset(int pc_offset) {
|
| - DCHECK(arguments_adaptor_deopt_pc_offset() == Smi::FromInt(0));
|
| - set_arguments_adaptor_deopt_pc_offset(Smi::FromInt(pc_offset));
|
| - }
|
| + inline uint32_t HashSeed();
|
|
|
| - void SetConstructStubDeoptPCOffset(int pc_offset) {
|
| - DCHECK(construct_stub_deopt_pc_offset() == Smi::FromInt(0));
|
| - set_construct_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
|
| - }
|
| + inline Smi* NextScriptId();
|
|
|
| - void SetGetterStubDeoptPCOffset(int pc_offset) {
|
| - DCHECK(getter_stub_deopt_pc_offset() == Smi::FromInt(0));
|
| - set_getter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
|
| - }
|
| -
|
| - void SetSetterStubDeoptPCOffset(int pc_offset) {
|
| - DCHECK(setter_stub_deopt_pc_offset() == Smi::FromInt(0));
|
| - set_setter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
|
| - }
|
| + inline void SetArgumentsAdaptorDeoptPCOffset(int pc_offset);
|
| + inline void SetConstructStubDeoptPCOffset(int pc_offset);
|
| + inline void SetGetterStubDeoptPCOffset(int pc_offset);
|
| + inline void SetSetterStubDeoptPCOffset(int pc_offset);
|
|
|
| // For post mortem debugging.
|
| void RememberUnmappedPage(Address page, bool compacted);
|
| @@ -1780,15 +1750,8 @@ class Heap {
|
| // Total length of the strings we failed to flatten since the last GC.
|
| int unflattened_strings_length_;
|
|
|
| -#define ROOT_ACCESSOR(type, name, camel_name) \
|
| - inline void set_##name(type* value) { \
|
| - /* The deserializer makes use of the fact that these common roots are */ \
|
| - /* never in new space and never on a page that is being compacted. */ \
|
| - DCHECK(!deserialization_complete() || \
|
| - RootCanBeWrittenAfterInitialization(k##camel_name##RootIndex)); \
|
| - DCHECK(k##camel_name##RootIndex >= kOldSpaceRoots || !InNewSpace(value)); \
|
| - roots_[k##camel_name##RootIndex] = value; \
|
| - }
|
| +#define ROOT_ACCESSOR(type, name, camel_name) \
|
| + inline void set_##name(type* value);
|
| ROOT_LIST(ROOT_ACCESSOR)
|
| #undef ROOT_ACCESSOR
|
|
|
| @@ -2156,9 +2119,6 @@ class Heap {
|
| void ReportStatisticsBeforeGC();
|
| void ReportStatisticsAfterGC();
|
|
|
| - // Slow part of scavenge object.
|
| - static void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
|
| -
|
| // Total RegExp code ever generated
|
| double total_regexp_code_generated_;
|
|
|
|
|