Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 20 matching lines...) Expand all Loading... | |
| 31 #include "api.h" | 31 #include "api.h" |
| 32 #include "execution.h" | 32 #include "execution.h" |
| 33 #include "global-handles.h" | 33 #include "global-handles.h" |
| 34 #include "ic-inl.h" | 34 #include "ic-inl.h" |
| 35 #include "natives.h" | 35 #include "natives.h" |
| 36 #include "platform.h" | 36 #include "platform.h" |
| 37 #include "runtime.h" | 37 #include "runtime.h" |
| 38 #include "serialize.h" | 38 #include "serialize.h" |
| 39 #include "stub-cache.h" | 39 #include "stub-cache.h" |
| 40 #include "v8threads.h" | 40 #include "v8threads.h" |
| 41 #include "top.h" | |
| 42 #include "bootstrapper.h" | 41 #include "bootstrapper.h" |
| 43 | 42 |
| 44 namespace v8 { | 43 namespace v8 { |
| 45 namespace internal { | 44 namespace internal { |
| 46 | 45 |
| 47 | 46 |
| 48 // ----------------------------------------------------------------------------- | 47 // ----------------------------------------------------------------------------- |
| 49 // Coding of external references. | 48 // Coding of external references. |
| 50 | 49 |
| 51 // The encoding of an external reference. The type is in the high word. | 50 // The encoding of an external reference. The type is in the high word. |
| 52 // The id is in the low word. | 51 // The id is in the low word. |
| 53 static uint32_t EncodeExternal(TypeCode type, uint16_t id) { | 52 static uint32_t EncodeExternal(TypeCode type, uint16_t id) { |
| 54 return static_cast<uint32_t>(type) << 16 | id; | 53 return static_cast<uint32_t>(type) << 16 | id; |
| 55 } | 54 } |
| 56 | 55 |
| 57 | 56 |
| 58 static int* GetInternalPointer(StatsCounter* counter) { | 57 static int* GetInternalPointer(StatsCounter* counter) { |
| 59 // All counters refer to dummy_counter, if deserializing happens without | 58 // All counters refer to dummy_counter, if deserializing happens without |
| 60 // setting up counters. | 59 // setting up counters. |
| 61 static int dummy_counter = 0; | 60 static int dummy_counter = 0; |
| 62 return counter->Enabled() ? counter->GetInternalPointer() : &dummy_counter; | 61 return counter->Enabled() ? counter->GetInternalPointer() : &dummy_counter; |
| 63 } | 62 } |
| 64 | 63 |
| 65 | 64 |
| 66 // ExternalReferenceTable is a helper class that defines the relationship | 65 // ExternalReferenceTable is a helper class that defines the relationship |
| 67 // between external references and their encodings. It is used to build | 66 // between external references and their encodings. It is used to build |
| 68 // hashmaps in ExternalReferenceEncoder and ExternalReferenceDecoder. | 67 // hashmaps in ExternalReferenceEncoder and ExternalReferenceDecoder. |
| 69 class ExternalReferenceTable { | 68 class ExternalReferenceTable { |
| 70 public: | 69 public: |
| 71 static ExternalReferenceTable* instance() { | 70 static ExternalReferenceTable* instance(Isolate* isolate) { |
| 72 if (!instance_) instance_ = new ExternalReferenceTable(); | 71 ExternalReferenceTable* external_reference_table = |
| 73 return instance_; | 72 isolate->external_reference_table(); |
| 73 if (external_reference_table == NULL) { | |
| 74 external_reference_table = new ExternalReferenceTable(isolate); | |
| 75 isolate->set_external_reference_table(external_reference_table); | |
| 76 } | |
| 77 return external_reference_table; | |
| 74 } | 78 } |
| 75 | 79 |
| 76 int size() const { return refs_.length(); } | 80 int size() const { return refs_.length(); } |
| 77 | 81 |
| 78 Address address(int i) { return refs_[i].address; } | 82 Address address(int i) { return refs_[i].address; } |
| 79 | 83 |
| 80 uint32_t code(int i) { return refs_[i].code; } | 84 uint32_t code(int i) { return refs_[i].code; } |
| 81 | 85 |
| 82 const char* name(int i) { return refs_[i].name; } | 86 const char* name(int i) { return refs_[i].name; } |
| 83 | 87 |
| 84 int max_id(int code) { return max_id_[code]; } | 88 int max_id(int code) { return max_id_[code]; } |
| 85 | 89 |
| 86 private: | 90 private: |
| 87 static ExternalReferenceTable* instance_; | 91 explicit ExternalReferenceTable(Isolate* isolate) : refs_(64) { |
| 88 | 92 PopulateTable(isolate); |
| 89 ExternalReferenceTable() : refs_(64) { PopulateTable(); } | 93 } |
| 90 ~ExternalReferenceTable() { } | 94 ~ExternalReferenceTable() { } |
| 91 | 95 |
| 92 struct ExternalReferenceEntry { | 96 struct ExternalReferenceEntry { |
| 93 Address address; | 97 Address address; |
| 94 uint32_t code; | 98 uint32_t code; |
| 95 const char* name; | 99 const char* name; |
| 96 }; | 100 }; |
| 97 | 101 |
| 98 void PopulateTable(); | 102 void PopulateTable(Isolate* isolate); |
| 99 | 103 |
| 100 // For a few types of references, we can get their address from their id. | 104 // For a few types of references, we can get their address from their id. |
| 101 void AddFromId(TypeCode type, uint16_t id, const char* name); | 105 void AddFromId(TypeCode type, uint16_t id, const char* name); |
| 102 | 106 |
| 103 // For other types of references, the caller will figure out the address. | 107 // For other types of references, the caller will figure out the address. |
| 104 void Add(Address address, TypeCode type, uint16_t id, const char* name); | 108 void Add(Address address, TypeCode type, uint16_t id, const char* name); |
| 105 | 109 |
| 106 List<ExternalReferenceEntry> refs_; | 110 List<ExternalReferenceEntry> refs_; |
| 107 int max_id_[kTypeCodeCount]; | 111 int max_id_[kTypeCodeCount]; |
| 108 }; | 112 }; |
| 109 | 113 |
| 110 | 114 |
| 111 ExternalReferenceTable* ExternalReferenceTable::instance_ = NULL; | |
| 112 | |
| 113 | |
| 114 void ExternalReferenceTable::AddFromId(TypeCode type, | 115 void ExternalReferenceTable::AddFromId(TypeCode type, |
| 115 uint16_t id, | 116 uint16_t id, |
| 116 const char* name) { | 117 const char* name) { |
| 117 Address address; | 118 Address address; |
| 118 switch (type) { | 119 switch (type) { |
| 119 case C_BUILTIN: { | 120 case C_BUILTIN: { |
| 120 ExternalReference ref(static_cast<Builtins::CFunctionId>(id)); | 121 ExternalReference ref(static_cast<Builtins::CFunctionId>(id)); |
| 121 address = ref.address(); | 122 address = ref.address(); |
| 122 break; | 123 break; |
| 123 } | 124 } |
| (...skipping 28 matching lines...) Expand all Loading... | |
| 152 ExternalReferenceEntry entry; | 153 ExternalReferenceEntry entry; |
| 153 entry.address = address; | 154 entry.address = address; |
| 154 entry.code = EncodeExternal(type, id); | 155 entry.code = EncodeExternal(type, id); |
| 155 entry.name = name; | 156 entry.name = name; |
| 156 ASSERT_NE(0, entry.code); | 157 ASSERT_NE(0, entry.code); |
| 157 refs_.Add(entry); | 158 refs_.Add(entry); |
| 158 if (id > max_id_[type]) max_id_[type] = id; | 159 if (id > max_id_[type]) max_id_[type] = id; |
| 159 } | 160 } |
| 160 | 161 |
| 161 | 162 |
| 162 void ExternalReferenceTable::PopulateTable() { | 163 void ExternalReferenceTable::PopulateTable(Isolate* isolate) { |
| 163 for (int type_code = 0; type_code < kTypeCodeCount; type_code++) { | 164 for (int type_code = 0; type_code < kTypeCodeCount; type_code++) { |
| 164 max_id_[type_code] = 0; | 165 max_id_[type_code] = 0; |
| 165 } | 166 } |
| 166 | 167 |
| 167 // The following populates all of the different type of external references | 168 // The following populates all of the different type of external references |
| 168 // into the ExternalReferenceTable. | 169 // into the ExternalReferenceTable. |
| 169 // | 170 // |
| 170 // NOTE: This function was originally 100k of code. It has since been | 171 // NOTE: This function was originally 100k of code. It has since been |
| 171 // rewritten to be mostly table driven, as the callback macro style tends to | 172 // rewritten to be mostly table driven, as the callback macro style tends to |
| 172 // very easily cause code bloat. Please be careful in the future when adding | 173 // very easily cause code bloat. Please be careful in the future when adding |
| (...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 218 IC_UTIL_LIST(IC_ENTRY) | 219 IC_UTIL_LIST(IC_ENTRY) |
| 219 #undef IC_ENTRY | 220 #undef IC_ENTRY |
| 220 }; // end of ref_table[]. | 221 }; // end of ref_table[]. |
| 221 | 222 |
| 222 for (size_t i = 0; i < ARRAY_SIZE(ref_table); ++i) { | 223 for (size_t i = 0; i < ARRAY_SIZE(ref_table); ++i) { |
| 223 AddFromId(ref_table[i].type, ref_table[i].id, ref_table[i].name); | 224 AddFromId(ref_table[i].type, ref_table[i].id, ref_table[i].name); |
| 224 } | 225 } |
| 225 | 226 |
| 226 #ifdef ENABLE_DEBUGGER_SUPPORT | 227 #ifdef ENABLE_DEBUGGER_SUPPORT |
| 227 // Debug addresses | 228 // Debug addresses |
| 228 Add(Debug_Address(Debug::k_after_break_target_address).address(), | 229 Add(Debug_Address(Debug::k_after_break_target_address).address(isolate), |
| 229 DEBUG_ADDRESS, | 230 DEBUG_ADDRESS, |
| 230 Debug::k_after_break_target_address << kDebugIdShift, | 231 Debug::k_after_break_target_address << kDebugIdShift, |
| 231 "Debug::after_break_target_address()"); | 232 "Debug::after_break_target_address()"); |
| 232 Add(Debug_Address(Debug::k_debug_break_slot_address).address(), | 233 Add(Debug_Address(Debug::k_debug_break_slot_address).address(isolate), |
| 233 DEBUG_ADDRESS, | 234 DEBUG_ADDRESS, |
| 234 Debug::k_debug_break_slot_address << kDebugIdShift, | 235 Debug::k_debug_break_slot_address << kDebugIdShift, |
| 235 "Debug::debug_break_slot_address()"); | 236 "Debug::debug_break_slot_address()"); |
| 236 Add(Debug_Address(Debug::k_debug_break_return_address).address(), | 237 Add(Debug_Address(Debug::k_debug_break_return_address).address(isolate), |
| 237 DEBUG_ADDRESS, | 238 DEBUG_ADDRESS, |
| 238 Debug::k_debug_break_return_address << kDebugIdShift, | 239 Debug::k_debug_break_return_address << kDebugIdShift, |
| 239 "Debug::debug_break_return_address()"); | 240 "Debug::debug_break_return_address()"); |
| 240 Add(Debug_Address(Debug::k_restarter_frame_function_pointer).address(), | 241 Add(Debug_Address(Debug::k_restarter_frame_function_pointer).address(isolate), |
| 241 DEBUG_ADDRESS, | 242 DEBUG_ADDRESS, |
| 242 Debug::k_restarter_frame_function_pointer << kDebugIdShift, | 243 Debug::k_restarter_frame_function_pointer << kDebugIdShift, |
| 243 "Debug::restarter_frame_function_pointer_address()"); | 244 "Debug::restarter_frame_function_pointer_address()"); |
| 244 #endif | 245 #endif |
| 245 | 246 |
| 246 // Stat counters | 247 // Stat counters |
| 247 struct StatsRefTableEntry { | 248 struct StatsRefTableEntry { |
| 248 StatsCounter* counter; | 249 StatsCounter* (Counters::*counter)(); |
| 249 uint16_t id; | 250 uint16_t id; |
| 250 const char* name; | 251 const char* name; |
| 251 }; | 252 }; |
| 252 | 253 |
| 253 static const StatsRefTableEntry stats_ref_table[] = { | 254 const StatsRefTableEntry stats_ref_table[] = { |
| 254 #define COUNTER_ENTRY(name, caption) \ | 255 #define COUNTER_ENTRY(name, caption) \ |
| 255 { &Counters::name, \ | 256 { &Counters::name, \ |
| 256 Counters::k_##name, \ | 257 Counters::k_##name, \ |
| 257 "Counters::" #name }, | 258 "Counters::" #name }, |
| 258 | 259 |
| 259 STATS_COUNTER_LIST_1(COUNTER_ENTRY) | 260 STATS_COUNTER_LIST_1(COUNTER_ENTRY) |
| 260 STATS_COUNTER_LIST_2(COUNTER_ENTRY) | 261 STATS_COUNTER_LIST_2(COUNTER_ENTRY) |
| 261 #undef COUNTER_ENTRY | 262 #undef COUNTER_ENTRY |
| 262 }; // end of stats_ref_table[]. | 263 }; // end of stats_ref_table[]. |
| 263 | 264 |
| 265 Counters* counters = isolate->counters(); | |
| 264 for (size_t i = 0; i < ARRAY_SIZE(stats_ref_table); ++i) { | 266 for (size_t i = 0; i < ARRAY_SIZE(stats_ref_table); ++i) { |
| 265 Add(reinterpret_cast<Address>( | 267 Add(reinterpret_cast<Address>(GetInternalPointer( |
| 266 GetInternalPointer(stats_ref_table[i].counter)), | 268 (counters->*(stats_ref_table[i].counter))())), |
| 267 STATS_COUNTER, | 269 STATS_COUNTER, |
| 268 stats_ref_table[i].id, | 270 stats_ref_table[i].id, |
| 269 stats_ref_table[i].name); | 271 stats_ref_table[i].name); |
| 270 } | 272 } |
| 271 | 273 |
| 272 // Top addresses | 274 // Top addresses |
| 273 const char* top_address_format = "Top::%s"; | |
| 274 | 275 |
| 275 const char* AddressNames[] = { | 276 const char* AddressNames[] = { |
| 276 #define C(name) #name, | 277 #define C(name) "Isolate::" #name, |
| 277 TOP_ADDRESS_LIST(C) | 278 ISOLATE_ADDRESS_LIST(C) |
| 278 TOP_ADDRESS_LIST_PROF(C) | 279 ISOLATE_ADDRESS_LIST_PROF(C) |
| 279 NULL | 280 NULL |
| 280 #undef C | 281 #undef C |
| 281 }; | 282 }; |
| 282 | 283 |
| 283 int top_format_length = StrLength(top_address_format) - 2; | 284 for (uint16_t i = 0; i < Isolate::k_isolate_address_count; ++i) { |
| 284 for (uint16_t i = 0; i < Top::k_top_address_count; ++i) { | 285 Add(isolate->get_address_from_id((Isolate::AddressId)i), |
| 285 const char* address_name = AddressNames[i]; | 286 TOP_ADDRESS, i, AddressNames[i]); |
| 286 Vector<char> name = | |
| 287 Vector<char>::New(top_format_length + StrLength(address_name) + 1); | |
| 288 const char* chars = name.start(); | |
| 289 OS::SNPrintF(name, top_address_format, address_name); | |
| 290 Add(Top::get_address_from_id((Top::AddressId)i), TOP_ADDRESS, i, chars); | |
| 291 } | 287 } |
| 292 | 288 |
| 293 // Accessors | 289 // Accessors |
| 294 #define ACCESSOR_DESCRIPTOR_DECLARATION(name) \ | 290 #define ACCESSOR_DESCRIPTOR_DECLARATION(name) \ |
| 295 Add((Address)&Accessors::name, \ | 291 Add((Address)&Accessors::name, \ |
| 296 ACCESSOR, \ | 292 ACCESSOR, \ |
| 297 Accessors::k##name, \ | 293 Accessors::k##name, \ |
| 298 "Accessors::" #name); | 294 "Accessors::" #name); |
| 299 | 295 |
| 300 ACCESSOR_DESCRIPTOR_LIST(ACCESSOR_DESCRIPTOR_DECLARATION) | 296 ACCESSOR_DESCRIPTOR_LIST(ACCESSOR_DESCRIPTOR_DECLARATION) |
| 301 #undef ACCESSOR_DESCRIPTOR_DECLARATION | 297 #undef ACCESSOR_DESCRIPTOR_DECLARATION |
| 302 | 298 |
| 299 StubCache* stub_cache = isolate->stub_cache(); | |
| 300 | |
| 303 // Stub cache tables | 301 // Stub cache tables |
| 304 Add(SCTableReference::keyReference(StubCache::kPrimary).address(), | 302 Add(stub_cache->key_reference(StubCache::kPrimary).address(), |
| 305 STUB_CACHE_TABLE, | 303 STUB_CACHE_TABLE, |
| 306 1, | 304 1, |
| 307 "StubCache::primary_->key"); | 305 "StubCache::primary_->key"); |
| 308 Add(SCTableReference::valueReference(StubCache::kPrimary).address(), | 306 Add(stub_cache->value_reference(StubCache::kPrimary).address(), |
| 309 STUB_CACHE_TABLE, | 307 STUB_CACHE_TABLE, |
| 310 2, | 308 2, |
| 311 "StubCache::primary_->value"); | 309 "StubCache::primary_->value"); |
| 312 Add(SCTableReference::keyReference(StubCache::kSecondary).address(), | 310 Add(stub_cache->key_reference(StubCache::kSecondary).address(), |
| 313 STUB_CACHE_TABLE, | 311 STUB_CACHE_TABLE, |
| 314 3, | 312 3, |
| 315 "StubCache::secondary_->key"); | 313 "StubCache::secondary_->key"); |
| 316 Add(SCTableReference::valueReference(StubCache::kSecondary).address(), | 314 Add(stub_cache->value_reference(StubCache::kSecondary).address(), |
| 317 STUB_CACHE_TABLE, | 315 STUB_CACHE_TABLE, |
| 318 4, | 316 4, |
| 319 "StubCache::secondary_->value"); | 317 "StubCache::secondary_->value"); |
| 320 | 318 |
| 321 // Runtime entries | 319 // Runtime entries |
| 322 Add(ExternalReference::perform_gc_function().address(), | 320 Add(ExternalReference::perform_gc_function().address(), |
| 323 RUNTIME_ENTRY, | 321 RUNTIME_ENTRY, |
| 324 1, | 322 1, |
| 325 "Runtime::PerformGC"); | 323 "Runtime::PerformGC"); |
| 326 Add(ExternalReference::fill_heap_number_with_random_function().address(), | 324 Add(ExternalReference::fill_heap_number_with_random_function().address(), |
| 327 RUNTIME_ENTRY, | 325 RUNTIME_ENTRY, |
| 328 2, | 326 2, |
| 329 "V8::FillHeapNumberWithRandom"); | 327 "V8::FillHeapNumberWithRandom"); |
| 330 Add(ExternalReference::random_uint32_function().address(), | 328 Add(ExternalReference::random_uint32_function().address(), |
| 331 RUNTIME_ENTRY, | 329 RUNTIME_ENTRY, |
| 332 3, | 330 3, |
| 333 "V8::Random"); | 331 "V8::Random"); |
| 334 Add(ExternalReference::delete_handle_scope_extensions().address(), | 332 Add(ExternalReference::delete_handle_scope_extensions().address(), |
| 335 RUNTIME_ENTRY, | 333 RUNTIME_ENTRY, |
| 336 4, | 334 4, |
| 337 "HandleScope::DeleteExtensions"); | 335 "HandleScope::DeleteExtensions"); |
| 338 Add(ExternalReference::incremental_marking_record_write_function().address(), | 336 Add(ExternalReference::incremental_marking_record_write_function().address(), |
| 339 RUNTIME_ENTRY, | 337 RUNTIME_ENTRY, |
| 340 5, | 338 5, |
| 341 "IncrementalMarking::RecordWrite"); | 339 "IncrementalMarking::RecordWrite"); |
| 340 Add(ExternalReference::store_buffer_overflow_function().address(), | |
| 341 RUNTIME_ENTRY, | |
| 342 6, | |
| 343 "StoreBuffer::StoreBufferOverflow"); | |
| 342 | 344 |
| 343 | 345 |
| 344 // Miscellaneous | 346 // Miscellaneous |
| 345 Add(ExternalReference::the_hole_value_location().address(), | 347 Add(ExternalReference::the_hole_value_location().address(), |
| 346 UNCLASSIFIED, | 348 UNCLASSIFIED, |
| 347 2, | 349 2, |
| 348 "Factory::the_hole_value().location()"); | 350 "Factory::the_hole_value().location()"); |
| 349 Add(ExternalReference::roots_address().address(), | 351 Add(ExternalReference::roots_address().address(), |
| 350 UNCLASSIFIED, | 352 UNCLASSIFIED, |
| 351 3, | 353 3, |
| (...skipping 130 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 482 34, | 484 34, |
| 483 "Deoptimizer::ComputeOutputFrames()"); | 485 "Deoptimizer::ComputeOutputFrames()"); |
| 484 Add(ExternalReference::address_of_min_int().address(), | 486 Add(ExternalReference::address_of_min_int().address(), |
| 485 UNCLASSIFIED, | 487 UNCLASSIFIED, |
| 486 35, | 488 35, |
| 487 "LDoubleConstant::min_int"); | 489 "LDoubleConstant::min_int"); |
| 488 Add(ExternalReference::address_of_one_half().address(), | 490 Add(ExternalReference::address_of_one_half().address(), |
| 489 UNCLASSIFIED, | 491 UNCLASSIFIED, |
| 490 36, | 492 36, |
| 491 "LDoubleConstant::one_half"); | 493 "LDoubleConstant::one_half"); |
| 494 Add(ExternalReference::isolate_address().address(), | |
| 495 UNCLASSIFIED, | |
| 496 37, | |
| 497 "isolate"); | |
| 492 Add(ExternalReference::address_of_minus_zero().address(), | 498 Add(ExternalReference::address_of_minus_zero().address(), |
| 493 UNCLASSIFIED, | 499 UNCLASSIFIED, |
| 494 37, | 500 38, |
| 495 "LDoubleConstant::minus_zero"); | 501 "LDoubleConstant::minus_zero"); |
| 496 Add(ExternalReference::address_of_negative_infinity().address(), | 502 Add(ExternalReference::address_of_negative_infinity().address(), |
| 497 UNCLASSIFIED, | 503 UNCLASSIFIED, |
| 498 38, | 504 39, |
| 499 "LDoubleConstant::negative_infinity"); | 505 "LDoubleConstant::negative_infinity"); |
| 500 Add(ExternalReference::power_double_double_function().address(), | 506 Add(ExternalReference::power_double_double_function().address(), |
| 501 UNCLASSIFIED, | 507 UNCLASSIFIED, |
| 502 39, | 508 40, |
| 503 "power_double_double_function"); | 509 "power_double_double_function"); |
| 504 Add(ExternalReference::power_double_int_function().address(), | 510 Add(ExternalReference::power_double_int_function().address(), |
| 505 UNCLASSIFIED, | 511 UNCLASSIFIED, |
| 506 40, | 512 41, |
| 507 "power_double_int_function"); | 513 "power_double_int_function"); |
| 508 Add(ExternalReference::store_buffer_top().address(), | 514 Add(ExternalReference::store_buffer_top().address(), |
| 509 UNCLASSIFIED, | 515 UNCLASSIFIED, |
| 510 41, | 516 41, |
|
Erik Corry
2011/04/20 20:07:40
oooops
Vyacheslav Egorov (Chromium)
2011/04/24 11:24:08
Done.
| |
| 511 "store_buffer_top"); | 517 "store_buffer_top"); |
| 512 Add(ExternalReference::arguments_marker_location().address(), | 518 Add(ExternalReference::arguments_marker_location().address(), |
| 513 UNCLASSIFIED, | 519 UNCLASSIFIED, |
| 514 42, | 520 42, |
| 515 "Factory::arguments_marker().location()"); | 521 "Factory::arguments_marker().location()"); |
| 516 } | 522 } |
| 517 | 523 |
| 518 | 524 |
| 519 ExternalReferenceEncoder::ExternalReferenceEncoder() | 525 ExternalReferenceEncoder::ExternalReferenceEncoder() |
| 520 : encodings_(Match) { | 526 : encodings_(Match), |
| 527 isolate_(Isolate::Current()) { | |
| 521 ExternalReferenceTable* external_references = | 528 ExternalReferenceTable* external_references = |
| 522 ExternalReferenceTable::instance(); | 529 ExternalReferenceTable::instance(isolate_); |
| 523 for (int i = 0; i < external_references->size(); ++i) { | 530 for (int i = 0; i < external_references->size(); ++i) { |
| 524 Put(external_references->address(i), i); | 531 Put(external_references->address(i), i); |
| 525 } | 532 } |
| 526 } | 533 } |
| 527 | 534 |
| 528 | 535 |
| 529 uint32_t ExternalReferenceEncoder::Encode(Address key) const { | 536 uint32_t ExternalReferenceEncoder::Encode(Address key) const { |
| 530 int index = IndexOf(key); | 537 int index = IndexOf(key); |
| 531 ASSERT(key == NULL || index >= 0); | 538 ASSERT(key == NULL || index >= 0); |
| 532 return index >=0 ? ExternalReferenceTable::instance()->code(index) : 0; | 539 return index >=0 ? |
| 540 ExternalReferenceTable::instance(isolate_)->code(index) : 0; | |
| 533 } | 541 } |
| 534 | 542 |
| 535 | 543 |
| 536 const char* ExternalReferenceEncoder::NameOfAddress(Address key) const { | 544 const char* ExternalReferenceEncoder::NameOfAddress(Address key) const { |
| 537 int index = IndexOf(key); | 545 int index = IndexOf(key); |
| 538 return index >=0 ? ExternalReferenceTable::instance()->name(index) : NULL; | 546 return index >= 0 ? |
| 547 ExternalReferenceTable::instance(isolate_)->name(index) : NULL; | |
| 539 } | 548 } |
| 540 | 549 |
| 541 | 550 |
| 542 int ExternalReferenceEncoder::IndexOf(Address key) const { | 551 int ExternalReferenceEncoder::IndexOf(Address key) const { |
| 543 if (key == NULL) return -1; | 552 if (key == NULL) return -1; |
| 544 HashMap::Entry* entry = | 553 HashMap::Entry* entry = |
| 545 const_cast<HashMap &>(encodings_).Lookup(key, Hash(key), false); | 554 const_cast<HashMap&>(encodings_).Lookup(key, Hash(key), false); |
| 546 return entry == NULL | 555 return entry == NULL |
| 547 ? -1 | 556 ? -1 |
| 548 : static_cast<int>(reinterpret_cast<intptr_t>(entry->value)); | 557 : static_cast<int>(reinterpret_cast<intptr_t>(entry->value)); |
| 549 } | 558 } |
| 550 | 559 |
| 551 | 560 |
| 552 void ExternalReferenceEncoder::Put(Address key, int index) { | 561 void ExternalReferenceEncoder::Put(Address key, int index) { |
| 553 HashMap::Entry* entry = encodings_.Lookup(key, Hash(key), true); | 562 HashMap::Entry* entry = encodings_.Lookup(key, Hash(key), true); |
| 554 entry->value = reinterpret_cast<void*>(index); | 563 entry->value = reinterpret_cast<void*>(index); |
| 555 } | 564 } |
| 556 | 565 |
| 557 | 566 |
| 558 ExternalReferenceDecoder::ExternalReferenceDecoder() | 567 ExternalReferenceDecoder::ExternalReferenceDecoder() |
| 559 : encodings_(NewArray<Address*>(kTypeCodeCount)) { | 568 : encodings_(NewArray<Address*>(kTypeCodeCount)), |
| 569 isolate_(Isolate::Current()) { | |
| 560 ExternalReferenceTable* external_references = | 570 ExternalReferenceTable* external_references = |
| 561 ExternalReferenceTable::instance(); | 571 ExternalReferenceTable::instance(isolate_); |
| 562 for (int type = kFirstTypeCode; type < kTypeCodeCount; ++type) { | 572 for (int type = kFirstTypeCode; type < kTypeCodeCount; ++type) { |
| 563 int max = external_references->max_id(type) + 1; | 573 int max = external_references->max_id(type) + 1; |
| 564 encodings_[type] = NewArray<Address>(max + 1); | 574 encodings_[type] = NewArray<Address>(max + 1); |
| 565 } | 575 } |
| 566 for (int i = 0; i < external_references->size(); ++i) { | 576 for (int i = 0; i < external_references->size(); ++i) { |
| 567 Put(external_references->code(i), external_references->address(i)); | 577 Put(external_references->code(i), external_references->address(i)); |
| 568 } | 578 } |
| 569 } | 579 } |
| 570 | 580 |
| 571 | 581 |
| 572 ExternalReferenceDecoder::~ExternalReferenceDecoder() { | 582 ExternalReferenceDecoder::~ExternalReferenceDecoder() { |
| 573 for (int type = kFirstTypeCode; type < kTypeCodeCount; ++type) { | 583 for (int type = kFirstTypeCode; type < kTypeCodeCount; ++type) { |
| 574 DeleteArray(encodings_[type]); | 584 DeleteArray(encodings_[type]); |
| 575 } | 585 } |
| 576 DeleteArray(encodings_); | 586 DeleteArray(encodings_); |
| 577 } | 587 } |
| 578 | 588 |
| 579 | 589 |
| 580 bool Serializer::serialization_enabled_ = false; | 590 bool Serializer::serialization_enabled_ = false; |
| 581 bool Serializer::too_late_to_enable_now_ = false; | 591 bool Serializer::too_late_to_enable_now_ = false; |
| 582 ExternalReferenceDecoder* Deserializer::external_reference_decoder_ = NULL; | |
| 583 | 592 |
| 584 | 593 |
| 585 Deserializer::Deserializer(SnapshotByteSource* source) : source_(source) { | 594 Deserializer::Deserializer(SnapshotByteSource* source) |
| 595 : isolate_(NULL), | |
| 596 source_(source), | |
| 597 external_reference_decoder_(NULL) { | |
| 586 } | 598 } |
| 587 | 599 |
| 588 | 600 |
| 589 // This routine both allocates a new object, and also keeps | 601 // This routine both allocates a new object, and also keeps |
| 590 // track of where objects have been allocated so that we can | 602 // track of where objects have been allocated so that we can |
| 591 // fix back references when deserializing. | 603 // fix back references when deserializing. |
| 592 Address Deserializer::Allocate(int space_index, Space* space, int size) { | 604 Address Deserializer::Allocate(int space_index, Space* space, int size) { |
| 593 Address address; | 605 Address address; |
| 594 if (!SpaceIsLarge(space_index)) { | 606 if (!SpaceIsLarge(space_index)) { |
| 595 ASSERT(!SpaceIsPaged(space_index) || | 607 ASSERT(!SpaceIsPaged(space_index) || |
| (...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 654 } | 666 } |
| 655 ASSERT(SpaceIsPaged(space)); | 667 ASSERT(SpaceIsPaged(space)); |
| 656 int page_of_pointee = offset >> kPageSizeBits; | 668 int page_of_pointee = offset >> kPageSizeBits; |
| 657 Address object_address = pages_[space][page_of_pointee] + | 669 Address object_address = pages_[space][page_of_pointee] + |
| 658 (offset & Page::kPageAlignmentMask); | 670 (offset & Page::kPageAlignmentMask); |
| 659 return HeapObject::FromAddress(object_address); | 671 return HeapObject::FromAddress(object_address); |
| 660 } | 672 } |
| 661 | 673 |
| 662 | 674 |
| 663 void Deserializer::Deserialize() { | 675 void Deserializer::Deserialize() { |
| 676 isolate_ = Isolate::Current(); | |
| 664 // Don't GC while deserializing - just expand the heap. | 677 // Don't GC while deserializing - just expand the heap. |
| 665 Address* store_buffer_top = | 678 Address* store_buffer_top = |
| 666 reinterpret_cast<Address*>(Heap::store_buffer_top()); | 679 reinterpret_cast<Address*>(isolate_->heap()->store_buffer_top()); |
| 667 AlwaysAllocateScope always_allocate; | 680 AlwaysAllocateScope always_allocate; |
| 668 // Don't use the free lists while deserializing. | 681 // Don't use the free lists while deserializing. |
| 669 LinearAllocationScope allocate_linearly; | 682 LinearAllocationScope allocate_linearly; |
| 670 // No active threads. | 683 // No active threads. |
| 671 ASSERT_EQ(NULL, ThreadState::FirstInUse()); | 684 ASSERT_EQ(NULL, isolate_->thread_manager()->FirstThreadStateInUse()); |
| 672 // No active handles. | 685 // No active handles. |
| 673 ASSERT(HandleScopeImplementer::instance()->blocks()->is_empty()); | 686 ASSERT(isolate_->handle_scope_implementer()->blocks()->is_empty()); |
| 674 // Make sure the entire partial snapshot cache is traversed, filling it with | 687 // Make sure the entire partial snapshot cache is traversed, filling it with |
| 675 // valid object pointers. | 688 // valid object pointers. |
| 676 partial_snapshot_cache_length_ = kPartialSnapshotCacheCapacity; | 689 isolate_->set_serialize_partial_snapshot_cache_length( |
| 690 Isolate::kPartialSnapshotCacheCapacity); | |
| 677 ASSERT_EQ(NULL, external_reference_decoder_); | 691 ASSERT_EQ(NULL, external_reference_decoder_); |
| 678 external_reference_decoder_ = new ExternalReferenceDecoder(); | 692 external_reference_decoder_ = new ExternalReferenceDecoder(); |
| 679 Heap::IterateStrongRoots(this, VISIT_ONLY_STRONG); | 693 isolate_->heap()->IterateStrongRoots(this, VISIT_ONLY_STRONG); |
| 680 Heap::IterateWeakRoots(this, VISIT_ALL); | 694 isolate_->heap()->IterateWeakRoots(this, VISIT_ALL); |
| 681 | 695 |
| 682 Heap::set_global_contexts_list(Heap::undefined_value()); | 696 isolate_->heap()->set_global_contexts_list( |
| 683 Heap::public_set_store_buffer_top(store_buffer_top); | 697 isolate_->heap()->undefined_value()); |
| 698 isolate_->heap()->public_set_store_buffer_top(store_buffer_top); | |
| 684 } | 699 } |
| 685 | 700 |
| 686 | 701 |
| 687 void Deserializer::DeserializePartial(Object** root) { | 702 void Deserializer::DeserializePartial(Object** root) { |
| 703 isolate_ = Isolate::Current(); | |
| 688 // Don't GC while deserializing - just expand the heap. | 704 // Don't GC while deserializing - just expand the heap. |
| 689 AlwaysAllocateScope always_allocate; | 705 AlwaysAllocateScope always_allocate; |
| 690 // Don't use the free lists while deserializing. | 706 // Don't use the free lists while deserializing. |
| 691 LinearAllocationScope allocate_linearly; | 707 LinearAllocationScope allocate_linearly; |
| 692 if (external_reference_decoder_ == NULL) { | 708 if (external_reference_decoder_ == NULL) { |
| 693 external_reference_decoder_ = new ExternalReferenceDecoder(); | 709 external_reference_decoder_ = new ExternalReferenceDecoder(); |
| 694 } | 710 } |
| 695 VisitPointer(root); | 711 VisitPointer(root); |
| 696 } | 712 } |
| 697 | 713 |
| 698 | 714 |
| 699 Deserializer::~Deserializer() { | 715 Deserializer::~Deserializer() { |
| 700 ASSERT(source_->AtEOF()); | 716 ASSERT(source_->AtEOF()); |
| 701 if (external_reference_decoder_ != NULL) { | 717 if (external_reference_decoder_) { |
| 702 delete external_reference_decoder_; | 718 delete external_reference_decoder_; |
| 703 external_reference_decoder_ = NULL; | 719 external_reference_decoder_ = NULL; |
| 704 } | 720 } |
| 705 } | 721 } |
| 706 | 722 |
| 707 | 723 |
| 708 // This is called on the roots. It is the driver of the deserialization | 724 // This is called on the roots. It is the driver of the deserialization |
| 709 // process. It is also called on the body of each function. | 725 // process. It is also called on the body of each function. |
| 710 void Deserializer::VisitPointers(Object** start, Object** end) { | 726 void Deserializer::VisitPointers(Object** start, Object** end) { |
| 711 // The space must be new space. Any other space would cause ReadChunk to try | 727 // The space must be new space. Any other space would cause ReadChunk to try |
| 712 // to update the remembered using NULL as the address. | 728 // to update the remembered using NULL as the address. |
| 713 ReadChunk(start, end, NEW_SPACE, NULL); | 729 ReadChunk(start, end, NEW_SPACE, NULL); |
| 714 } | 730 } |
| 715 | 731 |
| 716 | 732 |
| 717 // This routine writes the new object into the pointer provided and then | 733 // This routine writes the new object into the pointer provided and then |
| 718 // returns true if the new object was in young space and false otherwise. | 734 // returns true if the new object was in young space and false otherwise. |
| 719 // The reason for this strange interface is that otherwise the object is | 735 // The reason for this strange interface is that otherwise the object is |
| 720 // written very late, which means the FreeSpace map is not set up by the | 736 // written very late, which means the FreeSpace map is not set up by the |
| 721 // time we need to use it to mark the space at the end of a page free. | 737 // time we need to use it to mark the space at the end of a page free. |
| 722 void Deserializer::ReadObject(int space_number, | 738 void Deserializer::ReadObject(int space_number, |
| 723 Space* space, | 739 Space* space, |
| 724 Object** write_back) { | 740 Object** write_back) { |
| 725 int size = source_->GetInt() << kObjectAlignmentBits; | 741 int size = source_->GetInt() << kObjectAlignmentBits; |
| 726 Address address = Allocate(space_number, space, size); | 742 Address address = Allocate(space_number, space, size); |
| 727 *write_back = HeapObject::FromAddress(address); | 743 *write_back = HeapObject::FromAddress(address); |
| 728 Object** current = reinterpret_cast<Object**>(address); | 744 Object** current = reinterpret_cast<Object**>(address); |
| 729 Object** limit = current + (size >> kPointerSizeLog2); | 745 Object** limit = current + (size >> kPointerSizeLog2); |
| 730 if (FLAG_log_snapshot_positions) { | 746 if (FLAG_log_snapshot_positions) { |
| 731 LOG(SnapshotPositionEvent(address, source_->position())); | 747 LOG(isolate_, SnapshotPositionEvent(address, source_->position())); |
| 732 } | 748 } |
| 733 ReadChunk(current, limit, space_number, address); | 749 ReadChunk(current, limit, space_number, address); |
| 734 #ifdef DEBUG | 750 #ifdef DEBUG |
| 735 bool is_codespace = (space == Heap::code_space()) || | 751 bool is_codespace = (space == HEAP->code_space()) || |
| 736 ((space == Heap::lo_space()) && (space_number == kLargeCode)); | 752 ((space == HEAP->lo_space()) && (space_number == kLargeCode)); |
| 737 ASSERT(HeapObject::FromAddress(address)->IsCode() == is_codespace); | 753 ASSERT(HeapObject::FromAddress(address)->IsCode() == is_codespace); |
| 738 #endif | 754 #endif |
| 739 } | 755 } |
| 740 | 756 |
| 741 | 757 |
| 742 // This macro is always used with a constant argument so it should all fold | 758 // This macro is always used with a constant argument so it should all fold |
| 743 // away to almost nothing in the generated code. It might be nicer to do this | 759 // away to almost nothing in the generated code. It might be nicer to do this |
| 744 // with the ternary operator but there are type issues with that. | 760 // with the ternary operator but there are type issues with that. |
| 745 #define ASSIGN_DEST_SPACE(space_number) \ | 761 #define ASSIGN_DEST_SPACE(space_number) \ |
| 746 Space* dest_space; \ | 762 Space* dest_space; \ |
| 747 if (space_number == NEW_SPACE) { \ | 763 if (space_number == NEW_SPACE) { \ |
| 748 dest_space = Heap::new_space(); \ | 764 dest_space = isolate->heap()->new_space(); \ |
| 749 } else if (space_number == OLD_POINTER_SPACE) { \ | 765 } else if (space_number == OLD_POINTER_SPACE) { \ |
| 750 dest_space = Heap::old_pointer_space(); \ | 766 dest_space = isolate->heap()->old_pointer_space(); \ |
| 751 } else if (space_number == OLD_DATA_SPACE) { \ | 767 } else if (space_number == OLD_DATA_SPACE) { \ |
| 752 dest_space = Heap::old_data_space(); \ | 768 dest_space = isolate->heap()->old_data_space(); \ |
| 753 } else if (space_number == CODE_SPACE) { \ | 769 } else if (space_number == CODE_SPACE) { \ |
| 754 dest_space = Heap::code_space(); \ | 770 dest_space = isolate->heap()->code_space(); \ |
| 755 } else if (space_number == MAP_SPACE) { \ | 771 } else if (space_number == MAP_SPACE) { \ |
| 756 dest_space = Heap::map_space(); \ | 772 dest_space = isolate->heap()->map_space(); \ |
| 757 } else if (space_number == CELL_SPACE) { \ | 773 } else if (space_number == CELL_SPACE) { \ |
| 758 dest_space = Heap::cell_space(); \ | 774 dest_space = isolate->heap()->cell_space(); \ |
| 759 } else { \ | 775 } else { \ |
| 760 ASSERT(space_number >= LO_SPACE); \ | 776 ASSERT(space_number >= LO_SPACE); \ |
| 761 dest_space = Heap::lo_space(); \ | 777 dest_space = isolate->heap()->lo_space(); \ |
| 762 } | 778 } |
| 763 | 779 |
| 764 | 780 |
| 765 static const int kUnknownOffsetFromStart = -1; | 781 static const int kUnknownOffsetFromStart = -1; |
| 766 | 782 |
| 767 | 783 |
| 768 void Deserializer::ReadChunk(Object** current, | 784 void Deserializer::ReadChunk(Object** current, |
| 769 Object** limit, | 785 Object** limit, |
| 770 int source_space, | 786 int source_space, |
| 771 Address address) { | 787 Address address) { |
| 788 Isolate* const isolate = isolate_; | |
| 772 while (current < limit) { | 789 while (current < limit) { |
| 773 int data = source_->Get(); | 790 int data = source_->Get(); |
| 774 switch (data) { | 791 switch (data) { |
| 775 #define CASE_STATEMENT(where, how, within, space_number) \ | 792 #define CASE_STATEMENT(where, how, within, space_number) \ |
| 776 case where + how + within + space_number: \ | 793 case where + how + within + space_number: \ |
| 777 ASSERT((where & ~kPointedToMask) == 0); \ | 794 ASSERT((where & ~kPointedToMask) == 0); \ |
| 778 ASSERT((how & ~kHowToCodeMask) == 0); \ | 795 ASSERT((how & ~kHowToCodeMask) == 0); \ |
| 779 ASSERT((within & ~kWhereToPointMask) == 0); \ | 796 ASSERT((within & ~kWhereToPointMask) == 0); \ |
| 780 ASSERT((space_number & ~kSpaceMask) == 0); | 797 ASSERT((space_number & ~kSpaceMask) == 0); |
| 781 | 798 |
| 782 #define CASE_BODY(where, how, within, space_number_if_any, offset_from_start) \ | 799 #define CASE_BODY(where, how, within, space_number_if_any, offset_from_start) \ |
| 783 { \ | 800 { \ |
| 784 bool emit_write_barrier = false; \ | 801 bool emit_write_barrier = false; \ |
| 785 bool current_was_incremented = false; \ | 802 bool current_was_incremented = false; \ |
| 786 int space_number = space_number_if_any == kAnyOldSpace ? \ | 803 int space_number = space_number_if_any == kAnyOldSpace ? \ |
| 787 (data & kSpaceMask) : space_number_if_any; \ | 804 (data & kSpaceMask) : space_number_if_any; \ |
| 788 if (where == kNewObject && how == kPlain && within == kStartOfObject) {\ | 805 if (where == kNewObject && how == kPlain && within == kStartOfObject) {\ |
| 789 ASSIGN_DEST_SPACE(space_number) \ | 806 ASSIGN_DEST_SPACE(space_number) \ |
| 790 ReadObject(space_number, dest_space, current); \ | 807 ReadObject(space_number, dest_space, current); \ |
| 791 emit_write_barrier = \ | 808 emit_write_barrier = \ |
| 792 (space_number == NEW_SPACE && source_space != NEW_SPACE); \ | 809 (space_number == NEW_SPACE && source_space != NEW_SPACE); \ |
| 793 } else { \ | 810 } else { \ |
| 794 Object* new_object = NULL; /* May not be a real Object pointer. */ \ | 811 Object* new_object = NULL; /* May not be a real Object pointer. */ \ |
| 795 if (where == kNewObject) { \ | 812 if (where == kNewObject) { \ |
| 796 ASSIGN_DEST_SPACE(space_number) \ | 813 ASSIGN_DEST_SPACE(space_number) \ |
| 797 ReadObject(space_number, dest_space, &new_object); \ | 814 ReadObject(space_number, dest_space, &new_object); \ |
| 798 } else if (where == kRootArray) { \ | 815 } else if (where == kRootArray) { \ |
| 799 int root_id = source_->GetInt(); \ | 816 int root_id = source_->GetInt(); \ |
| 800 new_object = Heap::roots_address()[root_id]; \ | 817 new_object = isolate->heap()->roots_address()[root_id]; \ |
| 801 } else if (where == kPartialSnapshotCache) { \ | 818 } else if (where == kPartialSnapshotCache) { \ |
| 802 int cache_index = source_->GetInt(); \ | 819 int cache_index = source_->GetInt(); \ |
| 803 new_object = partial_snapshot_cache_[cache_index]; \ | 820 new_object = isolate->serialize_partial_snapshot_cache() \ |
| 821 [cache_index]; \ | |
| 804 } else if (where == kExternalReference) { \ | 822 } else if (where == kExternalReference) { \ |
| 805 int reference_id = source_->GetInt(); \ | 823 int reference_id = source_->GetInt(); \ |
| 806 Address address = \ | 824 Address address = external_reference_decoder_-> \ |
| 807 external_reference_decoder_->Decode(reference_id); \ | 825 Decode(reference_id); \ |
| 808 new_object = reinterpret_cast<Object*>(address); \ | 826 new_object = reinterpret_cast<Object*>(address); \ |
| 809 } else if (where == kBackref) { \ | 827 } else if (where == kBackref) { \ |
| 810 emit_write_barrier = \ | 828 emit_write_barrier = \ |
| 811 (space_number == NEW_SPACE && source_space != NEW_SPACE); \ | 829 (space_number == NEW_SPACE && source_space != NEW_SPACE); \ |
| 812 new_object = GetAddressFromEnd(data & kSpaceMask); \ | 830 new_object = GetAddressFromEnd(data & kSpaceMask); \ |
| 813 } else { \ | 831 } else { \ |
| 814 ASSERT(where == kFromStart); \ | 832 ASSERT(where == kFromStart); \ |
| 815 if (offset_from_start == kUnknownOffsetFromStart) { \ | 833 if (offset_from_start == kUnknownOffsetFromStart) { \ |
| 816 emit_write_barrier = \ | 834 emit_write_barrier = \ |
| 817 (space_number == NEW_SPACE && source_space != NEW_SPACE); \ | 835 (space_number == NEW_SPACE && source_space != NEW_SPACE); \ |
| (...skipping 17 matching lines...) Expand all Loading... | |
| 835 if (within == kFirstInstruction) { \ | 853 if (within == kFirstInstruction) { \ |
| 836 location_of_branch_data += Assembler::kCallTargetSize; \ | 854 location_of_branch_data += Assembler::kCallTargetSize; \ |
| 837 current = reinterpret_cast<Object**>(location_of_branch_data); \ | 855 current = reinterpret_cast<Object**>(location_of_branch_data); \ |
| 838 current_was_incremented = true; \ | 856 current_was_incremented = true; \ |
| 839 } \ | 857 } \ |
| 840 } else { \ | 858 } else { \ |
| 841 *current = new_object; \ | 859 *current = new_object; \ |
| 842 } \ | 860 } \ |
| 843 } \ | 861 } \ |
| 844 if (emit_write_barrier) { \ | 862 if (emit_write_barrier) { \ |
| 845 Heap::RecordWrite(address, static_cast<int>( \ | 863 isolate->heap()->RecordWrite(address, static_cast<int>( \ |
| 846 reinterpret_cast<Address>(current) - address)); \ | 864 reinterpret_cast<Address>(current) - address)); \ |
| 847 } \ | 865 } \ |
| 848 if (!current_was_incremented) { \ | 866 if (!current_was_incremented) { \ |
| 849 current++; /* Increment current if it wasn't done above. */ \ | 867 current++; /* Increment current if it wasn't done above. */ \ |
| 850 } \ | 868 } \ |
| 851 break; \ | 869 break; \ |
| 852 } \ | 870 } \ |
| 853 | 871 |
| 854 // This generates a case and a body for each space. The large object spaces are | 872 // This generates a case and a body for each space. The large object spaces are |
| 855 // very rare in snapshots so they are grouped in one body. | 873 // very rare in snapshots so they are grouped in one body. |
| (...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1004 | 1022 |
| 1005 case kSkip: { | 1023 case kSkip: { |
| 1006 current++; | 1024 current++; |
| 1007 break; | 1025 break; |
| 1008 } | 1026 } |
| 1009 | 1027 |
| 1010 case kNativesStringResource: { | 1028 case kNativesStringResource: { |
| 1011 int index = source_->Get(); | 1029 int index = source_->Get(); |
| 1012 Vector<const char> source_vector = Natives::GetScriptSource(index); | 1030 Vector<const char> source_vector = Natives::GetScriptSource(index); |
| 1013 NativesExternalStringResource* resource = | 1031 NativesExternalStringResource* resource = |
| 1014 new NativesExternalStringResource(source_vector.start()); | 1032 new NativesExternalStringResource( |
| 1033 isolate->bootstrapper(), source_vector.start()); | |
| 1015 *current++ = reinterpret_cast<Object*>(resource); | 1034 *current++ = reinterpret_cast<Object*>(resource); |
| 1016 break; | 1035 break; |
| 1017 } | 1036 } |
| 1018 | 1037 |
| 1019 case kSynchronize: { | 1038 case kSynchronize: { |
| 1020 // If we get here then that indicates that you have a mismatch between | 1039 // If we get here then that indicates that you have a mismatch between |
| 1021 // the number of GC roots when serializing and deserializing. | 1040 // the number of GC roots when serializing and deserializing. |
| 1022 UNREACHABLE(); | 1041 UNREACHABLE(); |
| 1023 } | 1042 } |
| 1024 | 1043 |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1069 } while (character != 0); | 1088 } while (character != 0); |
| 1070 } | 1089 } |
| 1071 | 1090 |
| 1072 #endif | 1091 #endif |
| 1073 | 1092 |
| 1074 Serializer::Serializer(SnapshotByteSink* sink) | 1093 Serializer::Serializer(SnapshotByteSink* sink) |
| 1075 : sink_(sink), | 1094 : sink_(sink), |
| 1076 current_root_index_(0), | 1095 current_root_index_(0), |
| 1077 external_reference_encoder_(new ExternalReferenceEncoder), | 1096 external_reference_encoder_(new ExternalReferenceEncoder), |
| 1078 large_object_total_(0) { | 1097 large_object_total_(0) { |
| 1098 // The serializer is meant to be used only to generate initial heap images | |
| 1099 // from a context in which there is only one isolate. | |
| 1100 ASSERT(Isolate::Current()->IsDefaultIsolate()); | |
| 1079 for (int i = 0; i <= LAST_SPACE; i++) { | 1101 for (int i = 0; i <= LAST_SPACE; i++) { |
| 1080 fullness_[i] = 0; | 1102 fullness_[i] = 0; |
| 1081 } | 1103 } |
| 1082 } | 1104 } |
| 1083 | 1105 |
| 1084 | 1106 |
| 1085 Serializer::~Serializer() { | 1107 Serializer::~Serializer() { |
| 1086 delete external_reference_encoder_; | 1108 delete external_reference_encoder_; |
| 1087 } | 1109 } |
| 1088 | 1110 |
| 1089 | 1111 |
| 1090 void StartupSerializer::SerializeStrongReferences() { | 1112 void StartupSerializer::SerializeStrongReferences() { |
| 1113 Isolate* isolate = Isolate::Current(); | |
| 1091 // No active threads. | 1114 // No active threads. |
| 1092 CHECK_EQ(NULL, ThreadState::FirstInUse()); | 1115 CHECK_EQ(NULL, Isolate::Current()->thread_manager()->FirstThreadStateInUse()); |
| 1093 // No active or weak handles. | 1116 // No active or weak handles. |
| 1094 CHECK(HandleScopeImplementer::instance()->blocks()->is_empty()); | 1117 CHECK(isolate->handle_scope_implementer()->blocks()->is_empty()); |
| 1095 CHECK_EQ(0, GlobalHandles::NumberOfWeakHandles()); | 1118 CHECK_EQ(0, isolate->global_handles()->NumberOfWeakHandles()); |
| 1096 // We don't support serializing installed extensions. | 1119 // We don't support serializing installed extensions. |
| 1097 for (RegisteredExtension* ext = RegisteredExtension::first_extension(); | 1120 for (RegisteredExtension* ext = v8::RegisteredExtension::first_extension(); |
| 1098 ext != NULL; | 1121 ext != NULL; |
| 1099 ext = ext->next()) { | 1122 ext = ext->next()) { |
| 1100 CHECK_NE(v8::INSTALLED, ext->state()); | 1123 CHECK_NE(v8::INSTALLED, ext->state()); |
| 1101 } | 1124 } |
| 1102 Heap::IterateStrongRoots(this, VISIT_ONLY_STRONG); | 1125 HEAP->IterateStrongRoots(this, VISIT_ONLY_STRONG); |
| 1103 } | 1126 } |
| 1104 | 1127 |
| 1105 | 1128 |
| 1106 void PartialSerializer::Serialize(Object** object) { | 1129 void PartialSerializer::Serialize(Object** object) { |
| 1107 this->VisitPointer(object); | 1130 this->VisitPointer(object); |
| 1131 Isolate* isolate = Isolate::Current(); | |
| 1108 | 1132 |
| 1109 // After we have done the partial serialization the partial snapshot cache | 1133 // After we have done the partial serialization the partial snapshot cache |
| 1110 // will contain some references needed to decode the partial snapshot. We | 1134 // will contain some references needed to decode the partial snapshot. We |
| 1111 // fill it up with undefineds so it has a predictable length so the | 1135 // fill it up with undefineds so it has a predictable length so the |
| 1112 // deserialization code doesn't need to know the length. | 1136 // deserialization code doesn't need to know the length. |
| 1113 for (int index = partial_snapshot_cache_length_; | 1137 for (int index = isolate->serialize_partial_snapshot_cache_length(); |
| 1114 index < kPartialSnapshotCacheCapacity; | 1138 index < Isolate::kPartialSnapshotCacheCapacity; |
| 1115 index++) { | 1139 index++) { |
| 1116 partial_snapshot_cache_[index] = Heap::undefined_value(); | 1140 isolate->serialize_partial_snapshot_cache()[index] = |
| 1117 startup_serializer_->VisitPointer(&partial_snapshot_cache_[index]); | 1141 isolate->heap()->undefined_value(); |
| 1142 startup_serializer_->VisitPointer( | |
| 1143 &isolate->serialize_partial_snapshot_cache()[index]); | |
| 1118 } | 1144 } |
| 1119 partial_snapshot_cache_length_ = kPartialSnapshotCacheCapacity; | 1145 isolate->set_serialize_partial_snapshot_cache_length( |
| 1146 Isolate::kPartialSnapshotCacheCapacity); | |
| 1120 } | 1147 } |
| 1121 | 1148 |
| 1122 | 1149 |
| 1123 void Serializer::VisitPointers(Object** start, Object** end) { | 1150 void Serializer::VisitPointers(Object** start, Object** end) { |
| 1151 Isolate* isolate = Isolate::Current(); | |
| 1152 | |
| 1124 for (Object** current = start; current < end; current++) { | 1153 for (Object** current = start; current < end; current++) { |
| 1125 if (reinterpret_cast<Address>(current) == StoreBuffer::TopAddress()) { | 1154 if (reinterpret_cast<Address>(current) == |
| 1155 isolate->heap()->store_buffer()->TopAddress()) { | |
| 1126 sink_->Put(kSkip, "Skip"); | 1156 sink_->Put(kSkip, "Skip"); |
| 1127 } else if ((*current)->IsSmi()) { | 1157 } else if ((*current)->IsSmi()) { |
| 1128 sink_->Put(kRawData, "RawData"); | 1158 sink_->Put(kRawData, "RawData"); |
| 1129 sink_->PutInt(kPointerSize, "length"); | 1159 sink_->PutInt(kPointerSize, "length"); |
| 1130 for (int i = 0; i < kPointerSize; i++) { | 1160 for (int i = 0; i < kPointerSize; i++) { |
| 1131 sink_->Put(reinterpret_cast<byte*>(current)[i], "Byte"); | 1161 sink_->Put(reinterpret_cast<byte*>(current)[i], "Byte"); |
| 1132 } | 1162 } |
| 1133 } else { | 1163 } else { |
| 1134 SerializeObject(*current, kPlain, kStartOfObject); | 1164 SerializeObject(*current, kPlain, kStartOfObject); |
| 1135 } | 1165 } |
| 1136 } | 1166 } |
| 1137 } | 1167 } |
| 1138 | 1168 |
| 1139 | 1169 |
| 1140 Object* SerializerDeserializer::partial_snapshot_cache_[ | |
| 1141 kPartialSnapshotCacheCapacity]; | |
| 1142 int SerializerDeserializer::partial_snapshot_cache_length_ = 0; | |
| 1143 | |
| 1144 | |
| 1145 // This ensures that the partial snapshot cache keeps things alive during GC and | 1170 // This ensures that the partial snapshot cache keeps things alive during GC and |
| 1146 // tracks their movement. When it is called during serialization of the startup | 1171 // tracks their movement. When it is called during serialization of the startup |
| 1147 // snapshot the partial snapshot is empty, so nothing happens. When the partial | 1172 // snapshot the partial snapshot is empty, so nothing happens. When the partial |
| 1148 // (context) snapshot is created, this array is populated with the pointers that | 1173 // (context) snapshot is created, this array is populated with the pointers that |
| 1149 // the partial snapshot will need. As that happens we emit serialized objects to | 1174 // the partial snapshot will need. As that happens we emit serialized objects to |
| 1150 // the startup snapshot that correspond to the elements of this cache array. On | 1175 // the startup snapshot that correspond to the elements of this cache array. On |
| 1151 // deserialization we therefore need to visit the cache array. This fills it up | 1176 // deserialization we therefore need to visit the cache array. This fills it up |
| 1152 // with pointers to deserialized objects. | 1177 // with pointers to deserialized objects. |
| 1153 void SerializerDeserializer::Iterate(ObjectVisitor* visitor) { | 1178 void SerializerDeserializer::Iterate(ObjectVisitor* visitor) { |
| 1179 Isolate* isolate = Isolate::Current(); | |
| 1154 visitor->VisitPointers( | 1180 visitor->VisitPointers( |
| 1155 &partial_snapshot_cache_[0], | 1181 isolate->serialize_partial_snapshot_cache(), |
| 1156 &partial_snapshot_cache_[partial_snapshot_cache_length_]); | 1182 &isolate->serialize_partial_snapshot_cache()[ |
| 1183 isolate->serialize_partial_snapshot_cache_length()]); | |
| 1157 } | 1184 } |
| 1158 | 1185 |
| 1159 | 1186 |
| 1160 // When deserializing we need to set the size of the snapshot cache. This means | 1187 // When deserializing we need to set the size of the snapshot cache. This means |
| 1161 // the root iteration code (above) will iterate over array elements, writing the | 1188 // the root iteration code (above) will iterate over array elements, writing the |
| 1162 // references to deserialized objects in them. | 1189 // references to deserialized objects in them. |
| 1163 void SerializerDeserializer::SetSnapshotCacheSize(int size) { | 1190 void SerializerDeserializer::SetSnapshotCacheSize(int size) { |
| 1164 partial_snapshot_cache_length_ = size; | 1191 Isolate::Current()->set_serialize_partial_snapshot_cache_length(size); |
| 1165 } | 1192 } |
| 1166 | 1193 |
| 1167 | 1194 |
| 1168 int PartialSerializer::PartialSnapshotCacheIndex(HeapObject* heap_object) { | 1195 int PartialSerializer::PartialSnapshotCacheIndex(HeapObject* heap_object) { |
| 1169 for (int i = 0; i < partial_snapshot_cache_length_; i++) { | 1196 Isolate* isolate = Isolate::Current(); |
| 1170 Object* entry = partial_snapshot_cache_[i]; | 1197 |
| 1198 for (int i = 0; | |
| 1199 i < isolate->serialize_partial_snapshot_cache_length(); | |
| 1200 i++) { | |
| 1201 Object* entry = isolate->serialize_partial_snapshot_cache()[i]; | |
| 1171 if (entry == heap_object) return i; | 1202 if (entry == heap_object) return i; |
| 1172 } | 1203 } |
| 1173 | 1204 |
| 1174 // We didn't find the object in the cache. So we add it to the cache and | 1205 // We didn't find the object in the cache. So we add it to the cache and |
| 1175 // then visit the pointer so that it becomes part of the startup snapshot | 1206 // then visit the pointer so that it becomes part of the startup snapshot |
| 1176 // and we can refer to it from the partial snapshot. | 1207 // and we can refer to it from the partial snapshot. |
| 1177 int length = partial_snapshot_cache_length_; | 1208 int length = isolate->serialize_partial_snapshot_cache_length(); |
| 1178 CHECK(length < kPartialSnapshotCacheCapacity); | 1209 CHECK(length < Isolate::kPartialSnapshotCacheCapacity); |
| 1179 partial_snapshot_cache_[length] = heap_object; | 1210 isolate->serialize_partial_snapshot_cache()[length] = heap_object; |
| 1180 startup_serializer_->VisitPointer(&partial_snapshot_cache_[length]); | 1211 startup_serializer_->VisitPointer( |
| 1212 &isolate->serialize_partial_snapshot_cache()[length]); | |
| 1181 // We don't recurse from the startup snapshot generator into the partial | 1213 // We don't recurse from the startup snapshot generator into the partial |
| 1182 // snapshot generator. | 1214 // snapshot generator. |
| 1183 ASSERT(length == partial_snapshot_cache_length_); | 1215 ASSERT(length == isolate->serialize_partial_snapshot_cache_length()); |
| 1184 return partial_snapshot_cache_length_++; | 1216 isolate->set_serialize_partial_snapshot_cache_length(length + 1); |
| 1217 return length; | |
| 1185 } | 1218 } |
| 1186 | 1219 |
| 1187 | 1220 |
| 1188 int PartialSerializer::RootIndex(HeapObject* heap_object) { | 1221 int PartialSerializer::RootIndex(HeapObject* heap_object) { |
| 1189 for (int i = 0; i < Heap::kRootListLength; i++) { | 1222 for (int i = 0; i < Heap::kRootListLength; i++) { |
| 1190 Object* root = Heap::roots_address()[i]; | 1223 Object* root = HEAP->roots_address()[i]; |
| 1191 if (root == heap_object) return i; | 1224 if (root == heap_object) return i; |
| 1192 } | 1225 } |
| 1193 return kInvalidRootIndex; | 1226 return kInvalidRootIndex; |
| 1194 } | 1227 } |
| 1195 | 1228 |
| 1196 | 1229 |
| 1197 // Encode the location of an already deserialized object in order to write its | 1230 // Encode the location of an already deserialized object in order to write its |
| 1198 // location into a later object. We can encode the location as an offset from | 1231 // location into a later object. We can encode the location as an offset from |
| 1199 // the start of the deserialized objects or as an offset backwards from the | 1232 // the start of the deserialized objects or as an offset backwards from the |
| 1200 // current allocation pointer. | 1233 // current allocation pointer. |
| (...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1263 heap_object, | 1296 heap_object, |
| 1264 sink_, | 1297 sink_, |
| 1265 how_to_code, | 1298 how_to_code, |
| 1266 where_to_point); | 1299 where_to_point); |
| 1267 object_serializer.Serialize(); | 1300 object_serializer.Serialize(); |
| 1268 } | 1301 } |
| 1269 } | 1302 } |
| 1270 | 1303 |
| 1271 | 1304 |
| 1272 void StartupSerializer::SerializeWeakReferences() { | 1305 void StartupSerializer::SerializeWeakReferences() { |
| 1273 for (int i = partial_snapshot_cache_length_; | 1306 for (int i = Isolate::Current()->serialize_partial_snapshot_cache_length(); |
| 1274 i < kPartialSnapshotCacheCapacity; | 1307 i < Isolate::kPartialSnapshotCacheCapacity; |
| 1275 i++) { | 1308 i++) { |
| 1276 sink_->Put(kRootArray + kPlain + kStartOfObject, "RootSerialization"); | 1309 sink_->Put(kRootArray + kPlain + kStartOfObject, "RootSerialization"); |
| 1277 sink_->PutInt(Heap::kUndefinedValueRootIndex, "root_index"); | 1310 sink_->PutInt(Heap::kUndefinedValueRootIndex, "root_index"); |
| 1278 } | 1311 } |
| 1279 Heap::IterateWeakRoots(this, VISIT_ALL); | 1312 HEAP->IterateWeakRoots(this, VISIT_ALL); |
| 1280 } | 1313 } |
| 1281 | 1314 |
| 1282 | 1315 |
| 1283 void PartialSerializer::SerializeObject( | 1316 void PartialSerializer::SerializeObject( |
| 1284 Object* o, | 1317 Object* o, |
| 1285 HowToCode how_to_code, | 1318 HowToCode how_to_code, |
| 1286 WhereToPoint where_to_point) { | 1319 WhereToPoint where_to_point) { |
| 1287 CHECK(o->IsHeapObject()); | 1320 CHECK(o->IsHeapObject()); |
| 1288 HeapObject* heap_object = HeapObject::cast(o); | 1321 HeapObject* heap_object = HeapObject::cast(o); |
| 1289 | 1322 |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1330 | 1363 |
| 1331 | 1364 |
| 1332 void Serializer::ObjectSerializer::Serialize() { | 1365 void Serializer::ObjectSerializer::Serialize() { |
| 1333 int space = Serializer::SpaceOfObject(object_); | 1366 int space = Serializer::SpaceOfObject(object_); |
| 1334 int size = object_->Size(); | 1367 int size = object_->Size(); |
| 1335 | 1368 |
| 1336 sink_->Put(kNewObject + reference_representation_ + space, | 1369 sink_->Put(kNewObject + reference_representation_ + space, |
| 1337 "ObjectSerialization"); | 1370 "ObjectSerialization"); |
| 1338 sink_->PutInt(size >> kObjectAlignmentBits, "Size in words"); | 1371 sink_->PutInt(size >> kObjectAlignmentBits, "Size in words"); |
| 1339 | 1372 |
| 1340 LOG(SnapshotPositionEvent(object_->address(), sink_->Position())); | 1373 LOG(i::Isolate::Current(), |
| 1374 SnapshotPositionEvent(object_->address(), sink_->Position())); | |
| 1341 | 1375 |
| 1342 // Mark this object as already serialized. | 1376 // Mark this object as already serialized. |
| 1343 bool start_new_page; | 1377 bool start_new_page; |
| 1344 int offset = serializer_->Allocate(space, size, &start_new_page); | 1378 int offset = serializer_->Allocate(space, size, &start_new_page); |
| 1345 serializer_->address_mapper()->AddMapping(object_, offset); | 1379 serializer_->address_mapper()->AddMapping(object_, offset); |
| 1346 if (start_new_page) { | 1380 if (start_new_page) { |
| 1347 sink_->Put(kNewPage, "NewPage"); | 1381 sink_->Put(kNewPage, "NewPage"); |
| 1348 sink_->PutSection(space, "NewPageSpace"); | 1382 sink_->PutSection(space, "NewPageSpace"); |
| 1349 } | 1383 } |
| 1350 | 1384 |
| (...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1431 // objects in the snapshot. | 1465 // objects in the snapshot. |
| 1432 UNREACHABLE(); | 1466 UNREACHABLE(); |
| 1433 } | 1467 } |
| 1434 | 1468 |
| 1435 | 1469 |
| 1436 void Serializer::ObjectSerializer::VisitExternalAsciiString( | 1470 void Serializer::ObjectSerializer::VisitExternalAsciiString( |
| 1437 v8::String::ExternalAsciiStringResource** resource_pointer) { | 1471 v8::String::ExternalAsciiStringResource** resource_pointer) { |
| 1438 Address references_start = reinterpret_cast<Address>(resource_pointer); | 1472 Address references_start = reinterpret_cast<Address>(resource_pointer); |
| 1439 OutputRawData(references_start); | 1473 OutputRawData(references_start); |
| 1440 for (int i = 0; i < Natives::GetBuiltinsCount(); i++) { | 1474 for (int i = 0; i < Natives::GetBuiltinsCount(); i++) { |
| 1441 Object* source = Heap::natives_source_cache()->get(i); | 1475 Object* source = HEAP->natives_source_cache()->get(i); |
| 1442 if (!source->IsUndefined()) { | 1476 if (!source->IsUndefined()) { |
| 1443 ExternalAsciiString* string = ExternalAsciiString::cast(source); | 1477 ExternalAsciiString* string = ExternalAsciiString::cast(source); |
| 1444 typedef v8::String::ExternalAsciiStringResource Resource; | 1478 typedef v8::String::ExternalAsciiStringResource Resource; |
| 1445 Resource* resource = string->resource(); | 1479 Resource* resource = string->resource(); |
| 1446 if (resource == *resource_pointer) { | 1480 if (resource == *resource_pointer) { |
| 1447 sink_->Put(kNativesStringResource, "NativesStringResource"); | 1481 sink_->Put(kNativesStringResource, "NativesStringResource"); |
| 1448 sink_->PutSection(i, "NativesStringResourceEnd"); | 1482 sink_->PutSection(i, "NativesStringResourceEnd"); |
| 1449 bytes_processed_so_far_ += sizeof(resource); | 1483 bytes_processed_so_far_ += sizeof(resource); |
| 1450 return; | 1484 return; |
| 1451 } | 1485 } |
| (...skipping 29 matching lines...) Expand all Loading... | |
| 1481 sink_->PutSection(data, "Byte"); | 1515 sink_->PutSection(data, "Byte"); |
| 1482 } | 1516 } |
| 1483 bytes_processed_so_far_ += skipped; | 1517 bytes_processed_so_far_ += skipped; |
| 1484 } | 1518 } |
| 1485 } | 1519 } |
| 1486 | 1520 |
| 1487 | 1521 |
| 1488 int Serializer::SpaceOfObject(HeapObject* object) { | 1522 int Serializer::SpaceOfObject(HeapObject* object) { |
| 1489 for (int i = FIRST_SPACE; i <= LAST_SPACE; i++) { | 1523 for (int i = FIRST_SPACE; i <= LAST_SPACE; i++) { |
| 1490 AllocationSpace s = static_cast<AllocationSpace>(i); | 1524 AllocationSpace s = static_cast<AllocationSpace>(i); |
| 1491 if (Heap::InSpace(object, s)) { | 1525 if (HEAP->InSpace(object, s)) { |
| 1492 if (i == LO_SPACE) { | 1526 if (i == LO_SPACE) { |
| 1493 if (object->IsCode()) { | 1527 if (object->IsCode()) { |
| 1494 return kLargeCode; | 1528 return kLargeCode; |
| 1495 } else if (object->IsFixedArray()) { | 1529 } else if (object->IsFixedArray()) { |
| 1496 return kLargeFixedArray; | 1530 return kLargeFixedArray; |
| 1497 } else { | 1531 } else { |
| 1498 return kLargeData; | 1532 return kLargeData; |
| 1499 } | 1533 } |
| 1500 } | 1534 } |
| 1501 return i; | 1535 return i; |
| 1502 } | 1536 } |
| 1503 } | 1537 } |
| 1504 UNREACHABLE(); | 1538 UNREACHABLE(); |
| 1505 return 0; | 1539 return 0; |
| 1506 } | 1540 } |
| 1507 | 1541 |
| 1508 | 1542 |
| 1509 int Serializer::SpaceOfAlreadySerializedObject(HeapObject* object) { | 1543 int Serializer::SpaceOfAlreadySerializedObject(HeapObject* object) { |
| 1510 for (int i = FIRST_SPACE; i <= LAST_SPACE; i++) { | 1544 for (int i = FIRST_SPACE; i <= LAST_SPACE; i++) { |
| 1511 AllocationSpace s = static_cast<AllocationSpace>(i); | 1545 AllocationSpace s = static_cast<AllocationSpace>(i); |
| 1512 if (Heap::InSpace(object, s)) { | 1546 if (HEAP->InSpace(object, s)) { |
| 1513 return i; | 1547 return i; |
| 1514 } | 1548 } |
| 1515 } | 1549 } |
| 1516 UNREACHABLE(); | 1550 UNREACHABLE(); |
| 1517 return 0; | 1551 return 0; |
| 1518 } | 1552 } |
| 1519 | 1553 |
| 1520 | 1554 |
| 1521 int Serializer::Allocate(int space, int size, bool* new_page) { | 1555 int Serializer::Allocate(int space, int size, bool* new_page) { |
| 1522 CHECK(space >= 0 && space < kNumberOfSpaces); | 1556 CHECK(space >= 0 && space < kNumberOfSpaces); |
| (...skipping 23 matching lines...) Expand all Loading... | |
| 1546 fullness_[space] = RoundUp(fullness_[space], Page::kPageSize); | 1580 fullness_[space] = RoundUp(fullness_[space], Page::kPageSize); |
| 1547 } | 1581 } |
| 1548 } | 1582 } |
| 1549 int allocation_address = fullness_[space]; | 1583 int allocation_address = fullness_[space]; |
| 1550 fullness_[space] = allocation_address + size; | 1584 fullness_[space] = allocation_address + size; |
| 1551 return allocation_address; | 1585 return allocation_address; |
| 1552 } | 1586 } |
| 1553 | 1587 |
| 1554 | 1588 |
| 1555 } } // namespace v8::internal | 1589 } } // namespace v8::internal |
| OLD | NEW |