Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(410)

Side by Side Diff: src/serialize.cc

Issue 6685088: Merge isolates to bleeding_edge. (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: '' Created 9 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/serialize.h ('k') | src/snapshot.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2006-2008 the V8 project authors. All rights reserved. 1 // Copyright 2006-2008 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 20 matching lines...) Expand all
31 #include "api.h" 31 #include "api.h"
32 #include "execution.h" 32 #include "execution.h"
33 #include "global-handles.h" 33 #include "global-handles.h"
34 #include "ic-inl.h" 34 #include "ic-inl.h"
35 #include "natives.h" 35 #include "natives.h"
36 #include "platform.h" 36 #include "platform.h"
37 #include "runtime.h" 37 #include "runtime.h"
38 #include "serialize.h" 38 #include "serialize.h"
39 #include "stub-cache.h" 39 #include "stub-cache.h"
40 #include "v8threads.h" 40 #include "v8threads.h"
41 #include "top.h"
42 #include "bootstrapper.h" 41 #include "bootstrapper.h"
43 42
44 namespace v8 { 43 namespace v8 {
45 namespace internal { 44 namespace internal {
46 45
47 46
48 // ----------------------------------------------------------------------------- 47 // -----------------------------------------------------------------------------
49 // Coding of external references. 48 // Coding of external references.
50 49
51 // The encoding of an external reference. The type is in the high word. 50 // The encoding of an external reference. The type is in the high word.
52 // The id is in the low word. 51 // The id is in the low word.
53 static uint32_t EncodeExternal(TypeCode type, uint16_t id) { 52 static uint32_t EncodeExternal(TypeCode type, uint16_t id) {
54 return static_cast<uint32_t>(type) << 16 | id; 53 return static_cast<uint32_t>(type) << 16 | id;
55 } 54 }
56 55
57 56
58 static int* GetInternalPointer(StatsCounter* counter) { 57 static int* GetInternalPointer(StatsCounter* counter) {
59 // All counters refer to dummy_counter, if deserializing happens without 58 // All counters refer to dummy_counter, if deserializing happens without
60 // setting up counters. 59 // setting up counters.
61 static int dummy_counter = 0; 60 static int dummy_counter = 0;
62 return counter->Enabled() ? counter->GetInternalPointer() : &dummy_counter; 61 return counter->Enabled() ? counter->GetInternalPointer() : &dummy_counter;
63 } 62 }
64 63
65 64
66 // ExternalReferenceTable is a helper class that defines the relationship 65 // ExternalReferenceTable is a helper class that defines the relationship
67 // between external references and their encodings. It is used to build 66 // between external references and their encodings. It is used to build
68 // hashmaps in ExternalReferenceEncoder and ExternalReferenceDecoder. 67 // hashmaps in ExternalReferenceEncoder and ExternalReferenceDecoder.
69 class ExternalReferenceTable { 68 class ExternalReferenceTable {
70 public: 69 public:
71 static ExternalReferenceTable* instance() { 70 static ExternalReferenceTable* instance(Isolate* isolate) {
72 if (!instance_) instance_ = new ExternalReferenceTable(); 71 ExternalReferenceTable* external_reference_table =
73 return instance_; 72 isolate->external_reference_table();
73 if (external_reference_table == NULL) {
74 external_reference_table = new ExternalReferenceTable(isolate);
75 isolate->set_external_reference_table(external_reference_table);
76 }
77 return external_reference_table;
74 } 78 }
75 79
76 int size() const { return refs_.length(); } 80 int size() const { return refs_.length(); }
77 81
78 Address address(int i) { return refs_[i].address; } 82 Address address(int i) { return refs_[i].address; }
79 83
80 uint32_t code(int i) { return refs_[i].code; } 84 uint32_t code(int i) { return refs_[i].code; }
81 85
82 const char* name(int i) { return refs_[i].name; } 86 const char* name(int i) { return refs_[i].name; }
83 87
84 int max_id(int code) { return max_id_[code]; } 88 int max_id(int code) { return max_id_[code]; }
85 89
86 private: 90 private:
87 static ExternalReferenceTable* instance_; 91 explicit ExternalReferenceTable(Isolate* isolate) : refs_(64) {
88 92 PopulateTable(isolate);
89 ExternalReferenceTable() : refs_(64) { PopulateTable(); } 93 }
90 ~ExternalReferenceTable() { } 94 ~ExternalReferenceTable() { }
91 95
92 struct ExternalReferenceEntry { 96 struct ExternalReferenceEntry {
93 Address address; 97 Address address;
94 uint32_t code; 98 uint32_t code;
95 const char* name; 99 const char* name;
96 }; 100 };
97 101
98 void PopulateTable(); 102 void PopulateTable(Isolate* isolate);
99 103
100 // For a few types of references, we can get their address from their id. 104 // For a few types of references, we can get their address from their id.
101 void AddFromId(TypeCode type, uint16_t id, const char* name); 105 void AddFromId(TypeCode type, uint16_t id, const char* name);
102 106
103 // For other types of references, the caller will figure out the address. 107 // For other types of references, the caller will figure out the address.
104 void Add(Address address, TypeCode type, uint16_t id, const char* name); 108 void Add(Address address, TypeCode type, uint16_t id, const char* name);
105 109
106 List<ExternalReferenceEntry> refs_; 110 List<ExternalReferenceEntry> refs_;
107 int max_id_[kTypeCodeCount]; 111 int max_id_[kTypeCodeCount];
108 }; 112 };
109 113
110 114
111 ExternalReferenceTable* ExternalReferenceTable::instance_ = NULL;
112
113
114 void ExternalReferenceTable::AddFromId(TypeCode type, 115 void ExternalReferenceTable::AddFromId(TypeCode type,
115 uint16_t id, 116 uint16_t id,
116 const char* name) { 117 const char* name) {
117 Address address; 118 Address address;
118 switch (type) { 119 switch (type) {
119 case C_BUILTIN: { 120 case C_BUILTIN: {
120 ExternalReference ref(static_cast<Builtins::CFunctionId>(id)); 121 ExternalReference ref(static_cast<Builtins::CFunctionId>(id));
121 address = ref.address(); 122 address = ref.address();
122 break; 123 break;
123 } 124 }
(...skipping 28 matching lines...) Expand all
152 ExternalReferenceEntry entry; 153 ExternalReferenceEntry entry;
153 entry.address = address; 154 entry.address = address;
154 entry.code = EncodeExternal(type, id); 155 entry.code = EncodeExternal(type, id);
155 entry.name = name; 156 entry.name = name;
156 ASSERT_NE(0, entry.code); 157 ASSERT_NE(0, entry.code);
157 refs_.Add(entry); 158 refs_.Add(entry);
158 if (id > max_id_[type]) max_id_[type] = id; 159 if (id > max_id_[type]) max_id_[type] = id;
159 } 160 }
160 161
161 162
162 void ExternalReferenceTable::PopulateTable() { 163 void ExternalReferenceTable::PopulateTable(Isolate* isolate) {
163 for (int type_code = 0; type_code < kTypeCodeCount; type_code++) { 164 for (int type_code = 0; type_code < kTypeCodeCount; type_code++) {
164 max_id_[type_code] = 0; 165 max_id_[type_code] = 0;
165 } 166 }
166 167
167 // The following populates all of the different type of external references 168 // The following populates all of the different type of external references
168 // into the ExternalReferenceTable. 169 // into the ExternalReferenceTable.
169 // 170 //
170 // NOTE: This function was originally 100k of code. It has since been 171 // NOTE: This function was originally 100k of code. It has since been
171 // rewritten to be mostly table driven, as the callback macro style tends to 172 // rewritten to be mostly table driven, as the callback macro style tends to
172 // very easily cause code bloat. Please be careful in the future when adding 173 // very easily cause code bloat. Please be careful in the future when adding
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
218 IC_UTIL_LIST(IC_ENTRY) 219 IC_UTIL_LIST(IC_ENTRY)
219 #undef IC_ENTRY 220 #undef IC_ENTRY
220 }; // end of ref_table[]. 221 }; // end of ref_table[].
221 222
222 for (size_t i = 0; i < ARRAY_SIZE(ref_table); ++i) { 223 for (size_t i = 0; i < ARRAY_SIZE(ref_table); ++i) {
223 AddFromId(ref_table[i].type, ref_table[i].id, ref_table[i].name); 224 AddFromId(ref_table[i].type, ref_table[i].id, ref_table[i].name);
224 } 225 }
225 226
226 #ifdef ENABLE_DEBUGGER_SUPPORT 227 #ifdef ENABLE_DEBUGGER_SUPPORT
227 // Debug addresses 228 // Debug addresses
228 Add(Debug_Address(Debug::k_after_break_target_address).address(), 229 Add(Debug_Address(Debug::k_after_break_target_address).address(isolate),
229 DEBUG_ADDRESS, 230 DEBUG_ADDRESS,
230 Debug::k_after_break_target_address << kDebugIdShift, 231 Debug::k_after_break_target_address << kDebugIdShift,
231 "Debug::after_break_target_address()"); 232 "Debug::after_break_target_address()");
232 Add(Debug_Address(Debug::k_debug_break_slot_address).address(), 233 Add(Debug_Address(Debug::k_debug_break_slot_address).address(isolate),
233 DEBUG_ADDRESS, 234 DEBUG_ADDRESS,
234 Debug::k_debug_break_slot_address << kDebugIdShift, 235 Debug::k_debug_break_slot_address << kDebugIdShift,
235 "Debug::debug_break_slot_address()"); 236 "Debug::debug_break_slot_address()");
236 Add(Debug_Address(Debug::k_debug_break_return_address).address(), 237 Add(Debug_Address(Debug::k_debug_break_return_address).address(isolate),
237 DEBUG_ADDRESS, 238 DEBUG_ADDRESS,
238 Debug::k_debug_break_return_address << kDebugIdShift, 239 Debug::k_debug_break_return_address << kDebugIdShift,
239 "Debug::debug_break_return_address()"); 240 "Debug::debug_break_return_address()");
240 Add(Debug_Address(Debug::k_restarter_frame_function_pointer).address(), 241 Add(Debug_Address(Debug::k_restarter_frame_function_pointer).address(isolate),
241 DEBUG_ADDRESS, 242 DEBUG_ADDRESS,
242 Debug::k_restarter_frame_function_pointer << kDebugIdShift, 243 Debug::k_restarter_frame_function_pointer << kDebugIdShift,
243 "Debug::restarter_frame_function_pointer_address()"); 244 "Debug::restarter_frame_function_pointer_address()");
244 #endif 245 #endif
245 246
246 // Stat counters 247 // Stat counters
247 struct StatsRefTableEntry { 248 struct StatsRefTableEntry {
248 StatsCounter* counter; 249 StatsCounter* (Counters::*counter)();
249 uint16_t id; 250 uint16_t id;
250 const char* name; 251 const char* name;
251 }; 252 };
252 253
253 static const StatsRefTableEntry stats_ref_table[] = { 254 const StatsRefTableEntry stats_ref_table[] = {
254 #define COUNTER_ENTRY(name, caption) \ 255 #define COUNTER_ENTRY(name, caption) \
255 { &Counters::name, \ 256 { &Counters::name, \
256 Counters::k_##name, \ 257 Counters::k_##name, \
257 "Counters::" #name }, 258 "Counters::" #name },
258 259
259 STATS_COUNTER_LIST_1(COUNTER_ENTRY) 260 STATS_COUNTER_LIST_1(COUNTER_ENTRY)
260 STATS_COUNTER_LIST_2(COUNTER_ENTRY) 261 STATS_COUNTER_LIST_2(COUNTER_ENTRY)
261 #undef COUNTER_ENTRY 262 #undef COUNTER_ENTRY
262 }; // end of stats_ref_table[]. 263 }; // end of stats_ref_table[].
263 264
265 Counters* counters = isolate->counters();
264 for (size_t i = 0; i < ARRAY_SIZE(stats_ref_table); ++i) { 266 for (size_t i = 0; i < ARRAY_SIZE(stats_ref_table); ++i) {
265 Add(reinterpret_cast<Address>( 267 Add(reinterpret_cast<Address>(GetInternalPointer(
266 GetInternalPointer(stats_ref_table[i].counter)), 268 (counters->*(stats_ref_table[i].counter))())),
267 STATS_COUNTER, 269 STATS_COUNTER,
268 stats_ref_table[i].id, 270 stats_ref_table[i].id,
269 stats_ref_table[i].name); 271 stats_ref_table[i].name);
270 } 272 }
271 273
272 // Top addresses 274 // Top addresses
273 const char* top_address_format = "Top::%s";
274 275
275 const char* AddressNames[] = { 276 const char* AddressNames[] = {
276 #define C(name) #name, 277 #define C(name) "Isolate::" #name,
277 TOP_ADDRESS_LIST(C) 278 ISOLATE_ADDRESS_LIST(C)
278 TOP_ADDRESS_LIST_PROF(C) 279 ISOLATE_ADDRESS_LIST_PROF(C)
279 NULL 280 NULL
280 #undef C 281 #undef C
281 }; 282 };
282 283
283 int top_format_length = StrLength(top_address_format) - 2; 284 for (uint16_t i = 0; i < Isolate::k_isolate_address_count; ++i) {
284 for (uint16_t i = 0; i < Top::k_top_address_count; ++i) { 285 Add(isolate->get_address_from_id((Isolate::AddressId)i),
285 const char* address_name = AddressNames[i]; 286 TOP_ADDRESS, i, AddressNames[i]);
286 Vector<char> name =
287 Vector<char>::New(top_format_length + StrLength(address_name) + 1);
288 const char* chars = name.start();
289 OS::SNPrintF(name, top_address_format, address_name);
290 Add(Top::get_address_from_id((Top::AddressId)i), TOP_ADDRESS, i, chars);
291 } 287 }
292 288
293 // Accessors 289 // Accessors
294 #define ACCESSOR_DESCRIPTOR_DECLARATION(name) \ 290 #define ACCESSOR_DESCRIPTOR_DECLARATION(name) \
295 Add((Address)&Accessors::name, \ 291 Add((Address)&Accessors::name, \
296 ACCESSOR, \ 292 ACCESSOR, \
297 Accessors::k##name, \ 293 Accessors::k##name, \
298 "Accessors::" #name); 294 "Accessors::" #name);
299 295
300 ACCESSOR_DESCRIPTOR_LIST(ACCESSOR_DESCRIPTOR_DECLARATION) 296 ACCESSOR_DESCRIPTOR_LIST(ACCESSOR_DESCRIPTOR_DECLARATION)
301 #undef ACCESSOR_DESCRIPTOR_DECLARATION 297 #undef ACCESSOR_DESCRIPTOR_DECLARATION
302 298
299 StubCache* stub_cache = isolate->stub_cache();
300
303 // Stub cache tables 301 // Stub cache tables
304 Add(SCTableReference::keyReference(StubCache::kPrimary).address(), 302 Add(stub_cache->key_reference(StubCache::kPrimary).address(),
305 STUB_CACHE_TABLE, 303 STUB_CACHE_TABLE,
306 1, 304 1,
307 "StubCache::primary_->key"); 305 "StubCache::primary_->key");
308 Add(SCTableReference::valueReference(StubCache::kPrimary).address(), 306 Add(stub_cache->value_reference(StubCache::kPrimary).address(),
309 STUB_CACHE_TABLE, 307 STUB_CACHE_TABLE,
310 2, 308 2,
311 "StubCache::primary_->value"); 309 "StubCache::primary_->value");
312 Add(SCTableReference::keyReference(StubCache::kSecondary).address(), 310 Add(stub_cache->key_reference(StubCache::kSecondary).address(),
313 STUB_CACHE_TABLE, 311 STUB_CACHE_TABLE,
314 3, 312 3,
315 "StubCache::secondary_->key"); 313 "StubCache::secondary_->key");
316 Add(SCTableReference::valueReference(StubCache::kSecondary).address(), 314 Add(stub_cache->value_reference(StubCache::kSecondary).address(),
317 STUB_CACHE_TABLE, 315 STUB_CACHE_TABLE,
318 4, 316 4,
319 "StubCache::secondary_->value"); 317 "StubCache::secondary_->value");
320 318
321 // Runtime entries 319 // Runtime entries
322 Add(ExternalReference::perform_gc_function().address(), 320 Add(ExternalReference::perform_gc_function().address(),
323 RUNTIME_ENTRY, 321 RUNTIME_ENTRY,
324 1, 322 1,
325 "Runtime::PerformGC"); 323 "Runtime::PerformGC");
326 Add(ExternalReference::fill_heap_number_with_random_function().address(), 324 Add(ExternalReference::fill_heap_number_with_random_function().address(),
327 RUNTIME_ENTRY, 325 RUNTIME_ENTRY,
328 2, 326 2,
329 "V8::FillHeapNumberWithRandom"); 327 "V8::FillHeapNumberWithRandom");
330
331 Add(ExternalReference::random_uint32_function().address(), 328 Add(ExternalReference::random_uint32_function().address(),
332 RUNTIME_ENTRY, 329 RUNTIME_ENTRY,
333 3, 330 3,
334 "V8::Random"); 331 "V8::Random");
335
336 Add(ExternalReference::delete_handle_scope_extensions().address(), 332 Add(ExternalReference::delete_handle_scope_extensions().address(),
337 RUNTIME_ENTRY, 333 RUNTIME_ENTRY,
338 4, 334 4,
339 "HandleScope::DeleteExtensions"); 335 "HandleScope::DeleteExtensions");
340 336
341 // Miscellaneous 337 // Miscellaneous
342 Add(ExternalReference::the_hole_value_location().address(), 338 Add(ExternalReference::the_hole_value_location().address(),
343 UNCLASSIFIED, 339 UNCLASSIFIED,
344 2, 340 2,
345 "Factory::the_hole_value().location()"); 341 "Factory::the_hole_value().location()");
(...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after
479 34, 475 34,
480 "Deoptimizer::ComputeOutputFrames()"); 476 "Deoptimizer::ComputeOutputFrames()");
481 Add(ExternalReference::address_of_min_int().address(), 477 Add(ExternalReference::address_of_min_int().address(),
482 UNCLASSIFIED, 478 UNCLASSIFIED,
483 35, 479 35,
484 "LDoubleConstant::min_int"); 480 "LDoubleConstant::min_int");
485 Add(ExternalReference::address_of_one_half().address(), 481 Add(ExternalReference::address_of_one_half().address(),
486 UNCLASSIFIED, 482 UNCLASSIFIED,
487 36, 483 36,
488 "LDoubleConstant::one_half"); 484 "LDoubleConstant::one_half");
485 Add(ExternalReference::isolate_address().address(),
486 UNCLASSIFIED,
487 37,
488 "isolate");
489 Add(ExternalReference::address_of_minus_zero().address(), 489 Add(ExternalReference::address_of_minus_zero().address(),
490 UNCLASSIFIED, 490 UNCLASSIFIED,
491 37, 491 38,
492 "LDoubleConstant::minus_zero"); 492 "LDoubleConstant::minus_zero");
493 Add(ExternalReference::address_of_negative_infinity().address(), 493 Add(ExternalReference::address_of_negative_infinity().address(),
494 UNCLASSIFIED, 494 UNCLASSIFIED,
495 38, 495 39,
496 "LDoubleConstant::negative_infinity"); 496 "LDoubleConstant::negative_infinity");
497 Add(ExternalReference::power_double_double_function().address(), 497 Add(ExternalReference::power_double_double_function().address(),
498 UNCLASSIFIED, 498 UNCLASSIFIED,
499 39, 499 40,
500 "power_double_double_function"); 500 "power_double_double_function");
501 Add(ExternalReference::power_double_int_function().address(), 501 Add(ExternalReference::power_double_int_function().address(),
502 UNCLASSIFIED, 502 UNCLASSIFIED,
503 40, 503 41,
504 "power_double_int_function"); 504 "power_double_int_function");
505 Add(ExternalReference::arguments_marker_location().address(), 505 Add(ExternalReference::arguments_marker_location().address(),
506 UNCLASSIFIED, 506 UNCLASSIFIED,
507 41, 507 42,
508 "Factory::arguments_marker().location()"); 508 "Factory::arguments_marker().location()");
509 } 509 }
510 510
511 511
512 ExternalReferenceEncoder::ExternalReferenceEncoder() 512 ExternalReferenceEncoder::ExternalReferenceEncoder()
513 : encodings_(Match) { 513 : encodings_(Match),
514 isolate_(Isolate::Current()) {
514 ExternalReferenceTable* external_references = 515 ExternalReferenceTable* external_references =
515 ExternalReferenceTable::instance(); 516 ExternalReferenceTable::instance(isolate_);
516 for (int i = 0; i < external_references->size(); ++i) { 517 for (int i = 0; i < external_references->size(); ++i) {
517 Put(external_references->address(i), i); 518 Put(external_references->address(i), i);
518 } 519 }
519 } 520 }
520 521
521 522
522 uint32_t ExternalReferenceEncoder::Encode(Address key) const { 523 uint32_t ExternalReferenceEncoder::Encode(Address key) const {
523 int index = IndexOf(key); 524 int index = IndexOf(key);
524 ASSERT(key == NULL || index >= 0); 525 ASSERT(key == NULL || index >= 0);
525 return index >=0 ? ExternalReferenceTable::instance()->code(index) : 0; 526 return index >=0 ?
527 ExternalReferenceTable::instance(isolate_)->code(index) : 0;
526 } 528 }
527 529
528 530
529 const char* ExternalReferenceEncoder::NameOfAddress(Address key) const { 531 const char* ExternalReferenceEncoder::NameOfAddress(Address key) const {
530 int index = IndexOf(key); 532 int index = IndexOf(key);
531 return index >=0 ? ExternalReferenceTable::instance()->name(index) : NULL; 533 return index >= 0 ?
534 ExternalReferenceTable::instance(isolate_)->name(index) : NULL;
532 } 535 }
533 536
534 537
535 int ExternalReferenceEncoder::IndexOf(Address key) const { 538 int ExternalReferenceEncoder::IndexOf(Address key) const {
536 if (key == NULL) return -1; 539 if (key == NULL) return -1;
537 HashMap::Entry* entry = 540 HashMap::Entry* entry =
538 const_cast<HashMap &>(encodings_).Lookup(key, Hash(key), false); 541 const_cast<HashMap&>(encodings_).Lookup(key, Hash(key), false);
539 return entry == NULL 542 return entry == NULL
540 ? -1 543 ? -1
541 : static_cast<int>(reinterpret_cast<intptr_t>(entry->value)); 544 : static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
542 } 545 }
543 546
544 547
545 void ExternalReferenceEncoder::Put(Address key, int index) { 548 void ExternalReferenceEncoder::Put(Address key, int index) {
546 HashMap::Entry* entry = encodings_.Lookup(key, Hash(key), true); 549 HashMap::Entry* entry = encodings_.Lookup(key, Hash(key), true);
547 entry->value = reinterpret_cast<void*>(index); 550 entry->value = reinterpret_cast<void*>(index);
548 } 551 }
549 552
550 553
551 ExternalReferenceDecoder::ExternalReferenceDecoder() 554 ExternalReferenceDecoder::ExternalReferenceDecoder()
552 : encodings_(NewArray<Address*>(kTypeCodeCount)) { 555 : encodings_(NewArray<Address*>(kTypeCodeCount)),
556 isolate_(Isolate::Current()) {
553 ExternalReferenceTable* external_references = 557 ExternalReferenceTable* external_references =
554 ExternalReferenceTable::instance(); 558 ExternalReferenceTable::instance(isolate_);
555 for (int type = kFirstTypeCode; type < kTypeCodeCount; ++type) { 559 for (int type = kFirstTypeCode; type < kTypeCodeCount; ++type) {
556 int max = external_references->max_id(type) + 1; 560 int max = external_references->max_id(type) + 1;
557 encodings_[type] = NewArray<Address>(max + 1); 561 encodings_[type] = NewArray<Address>(max + 1);
558 } 562 }
559 for (int i = 0; i < external_references->size(); ++i) { 563 for (int i = 0; i < external_references->size(); ++i) {
560 Put(external_references->code(i), external_references->address(i)); 564 Put(external_references->code(i), external_references->address(i));
561 } 565 }
562 } 566 }
563 567
564 568
565 ExternalReferenceDecoder::~ExternalReferenceDecoder() { 569 ExternalReferenceDecoder::~ExternalReferenceDecoder() {
566 for (int type = kFirstTypeCode; type < kTypeCodeCount; ++type) { 570 for (int type = kFirstTypeCode; type < kTypeCodeCount; ++type) {
567 DeleteArray(encodings_[type]); 571 DeleteArray(encodings_[type]);
568 } 572 }
569 DeleteArray(encodings_); 573 DeleteArray(encodings_);
570 } 574 }
571 575
572 576
573 bool Serializer::serialization_enabled_ = false; 577 bool Serializer::serialization_enabled_ = false;
574 bool Serializer::too_late_to_enable_now_ = false; 578 bool Serializer::too_late_to_enable_now_ = false;
575 ExternalReferenceDecoder* Deserializer::external_reference_decoder_ = NULL;
576 579
577 580
578 Deserializer::Deserializer(SnapshotByteSource* source) : source_(source) { 581 Deserializer::Deserializer(SnapshotByteSource* source)
582 : isolate_(NULL),
583 source_(source),
584 external_reference_decoder_(NULL) {
579 } 585 }
580 586
581 587
582 // This routine both allocates a new object, and also keeps 588 // This routine both allocates a new object, and also keeps
583 // track of where objects have been allocated so that we can 589 // track of where objects have been allocated so that we can
584 // fix back references when deserializing. 590 // fix back references when deserializing.
585 Address Deserializer::Allocate(int space_index, Space* space, int size) { 591 Address Deserializer::Allocate(int space_index, Space* space, int size) {
586 Address address; 592 Address address;
587 if (!SpaceIsLarge(space_index)) { 593 if (!SpaceIsLarge(space_index)) {
588 ASSERT(!SpaceIsPaged(space_index) || 594 ASSERT(!SpaceIsPaged(space_index) ||
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
647 } 653 }
648 ASSERT(SpaceIsPaged(space)); 654 ASSERT(SpaceIsPaged(space));
649 int page_of_pointee = offset >> kPageSizeBits; 655 int page_of_pointee = offset >> kPageSizeBits;
650 Address object_address = pages_[space][page_of_pointee] + 656 Address object_address = pages_[space][page_of_pointee] +
651 (offset & Page::kPageAlignmentMask); 657 (offset & Page::kPageAlignmentMask);
652 return HeapObject::FromAddress(object_address); 658 return HeapObject::FromAddress(object_address);
653 } 659 }
654 660
655 661
656 void Deserializer::Deserialize() { 662 void Deserializer::Deserialize() {
663 isolate_ = Isolate::Current();
657 // Don't GC while deserializing - just expand the heap. 664 // Don't GC while deserializing - just expand the heap.
658 AlwaysAllocateScope always_allocate; 665 AlwaysAllocateScope always_allocate;
659 // Don't use the free lists while deserializing. 666 // Don't use the free lists while deserializing.
660 LinearAllocationScope allocate_linearly; 667 LinearAllocationScope allocate_linearly;
661 // No active threads. 668 // No active threads.
662 ASSERT_EQ(NULL, ThreadState::FirstInUse()); 669 ASSERT_EQ(NULL, isolate_->thread_manager()->FirstThreadStateInUse());
663 // No active handles. 670 // No active handles.
664 ASSERT(HandleScopeImplementer::instance()->blocks()->is_empty()); 671 ASSERT(isolate_->handle_scope_implementer()->blocks()->is_empty());
665 // Make sure the entire partial snapshot cache is traversed, filling it with 672 // Make sure the entire partial snapshot cache is traversed, filling it with
666 // valid object pointers. 673 // valid object pointers.
667 partial_snapshot_cache_length_ = kPartialSnapshotCacheCapacity; 674 isolate_->set_serialize_partial_snapshot_cache_length(
675 Isolate::kPartialSnapshotCacheCapacity);
668 ASSERT_EQ(NULL, external_reference_decoder_); 676 ASSERT_EQ(NULL, external_reference_decoder_);
669 external_reference_decoder_ = new ExternalReferenceDecoder(); 677 external_reference_decoder_ = new ExternalReferenceDecoder();
670 Heap::IterateStrongRoots(this, VISIT_ONLY_STRONG); 678 isolate_->heap()->IterateStrongRoots(this, VISIT_ONLY_STRONG);
671 Heap::IterateWeakRoots(this, VISIT_ALL); 679 isolate_->heap()->IterateWeakRoots(this, VISIT_ALL);
672 680
673 Heap::set_global_contexts_list(Heap::undefined_value()); 681 isolate_->heap()->set_global_contexts_list(
682 isolate_->heap()->undefined_value());
674 } 683 }
675 684
676 685
677 void Deserializer::DeserializePartial(Object** root) { 686 void Deserializer::DeserializePartial(Object** root) {
687 isolate_ = Isolate::Current();
678 // Don't GC while deserializing - just expand the heap. 688 // Don't GC while deserializing - just expand the heap.
679 AlwaysAllocateScope always_allocate; 689 AlwaysAllocateScope always_allocate;
680 // Don't use the free lists while deserializing. 690 // Don't use the free lists while deserializing.
681 LinearAllocationScope allocate_linearly; 691 LinearAllocationScope allocate_linearly;
682 if (external_reference_decoder_ == NULL) { 692 if (external_reference_decoder_ == NULL) {
683 external_reference_decoder_ = new ExternalReferenceDecoder(); 693 external_reference_decoder_ = new ExternalReferenceDecoder();
684 } 694 }
685 VisitPointer(root); 695 VisitPointer(root);
686 } 696 }
687 697
688 698
689 Deserializer::~Deserializer() { 699 Deserializer::~Deserializer() {
690 ASSERT(source_->AtEOF()); 700 ASSERT(source_->AtEOF());
691 if (external_reference_decoder_ != NULL) { 701 if (external_reference_decoder_) {
692 delete external_reference_decoder_; 702 delete external_reference_decoder_;
693 external_reference_decoder_ = NULL; 703 external_reference_decoder_ = NULL;
694 } 704 }
695 } 705 }
696 706
697 707
698 // This is called on the roots. It is the driver of the deserialization 708 // This is called on the roots. It is the driver of the deserialization
699 // process. It is also called on the body of each function. 709 // process. It is also called on the body of each function.
700 void Deserializer::VisitPointers(Object** start, Object** end) { 710 void Deserializer::VisitPointers(Object** start, Object** end) {
701 // The space must be new space. Any other space would cause ReadChunk to try 711 // The space must be new space. Any other space would cause ReadChunk to try
(...skipping 10 matching lines...) Expand all
712 // making it into a byte array). 722 // making it into a byte array).
713 void Deserializer::ReadObject(int space_number, 723 void Deserializer::ReadObject(int space_number,
714 Space* space, 724 Space* space,
715 Object** write_back) { 725 Object** write_back) {
716 int size = source_->GetInt() << kObjectAlignmentBits; 726 int size = source_->GetInt() << kObjectAlignmentBits;
717 Address address = Allocate(space_number, space, size); 727 Address address = Allocate(space_number, space, size);
718 *write_back = HeapObject::FromAddress(address); 728 *write_back = HeapObject::FromAddress(address);
719 Object** current = reinterpret_cast<Object**>(address); 729 Object** current = reinterpret_cast<Object**>(address);
720 Object** limit = current + (size >> kPointerSizeLog2); 730 Object** limit = current + (size >> kPointerSizeLog2);
721 if (FLAG_log_snapshot_positions) { 731 if (FLAG_log_snapshot_positions) {
722 LOG(SnapshotPositionEvent(address, source_->position())); 732 LOG(isolate_, SnapshotPositionEvent(address, source_->position()));
723 } 733 }
724 ReadChunk(current, limit, space_number, address); 734 ReadChunk(current, limit, space_number, address);
725 #ifdef DEBUG 735 #ifdef DEBUG
726 bool is_codespace = (space == Heap::code_space()) || 736 bool is_codespace = (space == HEAP->code_space()) ||
727 ((space == Heap::lo_space()) && (space_number == kLargeCode)); 737 ((space == HEAP->lo_space()) && (space_number == kLargeCode));
728 ASSERT(HeapObject::FromAddress(address)->IsCode() == is_codespace); 738 ASSERT(HeapObject::FromAddress(address)->IsCode() == is_codespace);
729 #endif 739 #endif
730 } 740 }
731 741
732 742
733 // This macro is always used with a constant argument so it should all fold 743 // This macro is always used with a constant argument so it should all fold
734 // away to almost nothing in the generated code. It might be nicer to do this 744 // away to almost nothing in the generated code. It might be nicer to do this
735 // with the ternary operator but there are type issues with that. 745 // with the ternary operator but there are type issues with that.
736 #define ASSIGN_DEST_SPACE(space_number) \ 746 #define ASSIGN_DEST_SPACE(space_number) \
737 Space* dest_space; \ 747 Space* dest_space; \
738 if (space_number == NEW_SPACE) { \ 748 if (space_number == NEW_SPACE) { \
739 dest_space = Heap::new_space(); \ 749 dest_space = isolate->heap()->new_space(); \
740 } else if (space_number == OLD_POINTER_SPACE) { \ 750 } else if (space_number == OLD_POINTER_SPACE) { \
741 dest_space = Heap::old_pointer_space(); \ 751 dest_space = isolate->heap()->old_pointer_space(); \
742 } else if (space_number == OLD_DATA_SPACE) { \ 752 } else if (space_number == OLD_DATA_SPACE) { \
743 dest_space = Heap::old_data_space(); \ 753 dest_space = isolate->heap()->old_data_space(); \
744 } else if (space_number == CODE_SPACE) { \ 754 } else if (space_number == CODE_SPACE) { \
745 dest_space = Heap::code_space(); \ 755 dest_space = isolate->heap()->code_space(); \
746 } else if (space_number == MAP_SPACE) { \ 756 } else if (space_number == MAP_SPACE) { \
747 dest_space = Heap::map_space(); \ 757 dest_space = isolate->heap()->map_space(); \
748 } else if (space_number == CELL_SPACE) { \ 758 } else if (space_number == CELL_SPACE) { \
749 dest_space = Heap::cell_space(); \ 759 dest_space = isolate->heap()->cell_space(); \
750 } else { \ 760 } else { \
751 ASSERT(space_number >= LO_SPACE); \ 761 ASSERT(space_number >= LO_SPACE); \
752 dest_space = Heap::lo_space(); \ 762 dest_space = isolate->heap()->lo_space(); \
753 } 763 }
754 764
755 765
756 static const int kUnknownOffsetFromStart = -1; 766 static const int kUnknownOffsetFromStart = -1;
757 767
758 768
759 void Deserializer::ReadChunk(Object** current, 769 void Deserializer::ReadChunk(Object** current,
760 Object** limit, 770 Object** limit,
761 int source_space, 771 int source_space,
762 Address address) { 772 Address address) {
773 Isolate* const isolate = isolate_;
763 while (current < limit) { 774 while (current < limit) {
764 int data = source_->Get(); 775 int data = source_->Get();
765 switch (data) { 776 switch (data) {
766 #define CASE_STATEMENT(where, how, within, space_number) \ 777 #define CASE_STATEMENT(where, how, within, space_number) \
767 case where + how + within + space_number: \ 778 case where + how + within + space_number: \
768 ASSERT((where & ~kPointedToMask) == 0); \ 779 ASSERT((where & ~kPointedToMask) == 0); \
769 ASSERT((how & ~kHowToCodeMask) == 0); \ 780 ASSERT((how & ~kHowToCodeMask) == 0); \
770 ASSERT((within & ~kWhereToPointMask) == 0); \ 781 ASSERT((within & ~kWhereToPointMask) == 0); \
771 ASSERT((space_number & ~kSpaceMask) == 0); 782 ASSERT((space_number & ~kSpaceMask) == 0);
772 783
773 #define CASE_BODY(where, how, within, space_number_if_any, offset_from_start) \ 784 #define CASE_BODY(where, how, within, space_number_if_any, offset_from_start) \
774 { \ 785 { \
775 bool emit_write_barrier = false; \ 786 bool emit_write_barrier = false; \
776 bool current_was_incremented = false; \ 787 bool current_was_incremented = false; \
777 int space_number = space_number_if_any == kAnyOldSpace ? \ 788 int space_number = space_number_if_any == kAnyOldSpace ? \
778 (data & kSpaceMask) : space_number_if_any; \ 789 (data & kSpaceMask) : space_number_if_any; \
779 if (where == kNewObject && how == kPlain && within == kStartOfObject) {\ 790 if (where == kNewObject && how == kPlain && within == kStartOfObject) {\
780 ASSIGN_DEST_SPACE(space_number) \ 791 ASSIGN_DEST_SPACE(space_number) \
781 ReadObject(space_number, dest_space, current); \ 792 ReadObject(space_number, dest_space, current); \
782 emit_write_barrier = \ 793 emit_write_barrier = \
783 (space_number == NEW_SPACE && source_space != NEW_SPACE); \ 794 (space_number == NEW_SPACE && source_space != NEW_SPACE); \
784 } else { \ 795 } else { \
785 Object* new_object = NULL; /* May not be a real Object pointer. */ \ 796 Object* new_object = NULL; /* May not be a real Object pointer. */ \
786 if (where == kNewObject) { \ 797 if (where == kNewObject) { \
787 ASSIGN_DEST_SPACE(space_number) \ 798 ASSIGN_DEST_SPACE(space_number) \
788 ReadObject(space_number, dest_space, &new_object); \ 799 ReadObject(space_number, dest_space, &new_object); \
789 } else if (where == kRootArray) { \ 800 } else if (where == kRootArray) { \
790 int root_id = source_->GetInt(); \ 801 int root_id = source_->GetInt(); \
791 new_object = Heap::roots_address()[root_id]; \ 802 new_object = isolate->heap()->roots_address()[root_id]; \
792 } else if (where == kPartialSnapshotCache) { \ 803 } else if (where == kPartialSnapshotCache) { \
793 int cache_index = source_->GetInt(); \ 804 int cache_index = source_->GetInt(); \
794 new_object = partial_snapshot_cache_[cache_index]; \ 805 new_object = isolate->serialize_partial_snapshot_cache() \
806 [cache_index]; \
795 } else if (where == kExternalReference) { \ 807 } else if (where == kExternalReference) { \
796 int reference_id = source_->GetInt(); \ 808 int reference_id = source_->GetInt(); \
797 Address address = \ 809 Address address = external_reference_decoder_-> \
798 external_reference_decoder_->Decode(reference_id); \ 810 Decode(reference_id); \
799 new_object = reinterpret_cast<Object*>(address); \ 811 new_object = reinterpret_cast<Object*>(address); \
800 } else if (where == kBackref) { \ 812 } else if (where == kBackref) { \
801 emit_write_barrier = \ 813 emit_write_barrier = \
802 (space_number == NEW_SPACE && source_space != NEW_SPACE); \ 814 (space_number == NEW_SPACE && source_space != NEW_SPACE); \
803 new_object = GetAddressFromEnd(data & kSpaceMask); \ 815 new_object = GetAddressFromEnd(data & kSpaceMask); \
804 } else { \ 816 } else { \
805 ASSERT(where == kFromStart); \ 817 ASSERT(where == kFromStart); \
806 if (offset_from_start == kUnknownOffsetFromStart) { \ 818 if (offset_from_start == kUnknownOffsetFromStart) { \
807 emit_write_barrier = \ 819 emit_write_barrier = \
808 (space_number == NEW_SPACE && source_space != NEW_SPACE); \ 820 (space_number == NEW_SPACE && source_space != NEW_SPACE); \
(...skipping 17 matching lines...) Expand all
826 if (within == kFirstInstruction) { \ 838 if (within == kFirstInstruction) { \
827 location_of_branch_data += Assembler::kCallTargetSize; \ 839 location_of_branch_data += Assembler::kCallTargetSize; \
828 current = reinterpret_cast<Object**>(location_of_branch_data); \ 840 current = reinterpret_cast<Object**>(location_of_branch_data); \
829 current_was_incremented = true; \ 841 current_was_incremented = true; \
830 } \ 842 } \
831 } else { \ 843 } else { \
832 *current = new_object; \ 844 *current = new_object; \
833 } \ 845 } \
834 } \ 846 } \
835 if (emit_write_barrier) { \ 847 if (emit_write_barrier) { \
836 Heap::RecordWrite(address, static_cast<int>( \ 848 isolate->heap()->RecordWrite(address, static_cast<int>( \
837 reinterpret_cast<Address>(current) - address)); \ 849 reinterpret_cast<Address>(current) - address)); \
838 } \ 850 } \
839 if (!current_was_incremented) { \ 851 if (!current_was_incremented) { \
840 current++; /* Increment current if it wasn't done above. */ \ 852 current++; /* Increment current if it wasn't done above. */ \
841 } \ 853 } \
842 break; \ 854 break; \
843 } \ 855 } \
844 856
845 // This generates a case and a body for each space. The large object spaces are 857 // This generates a case and a body for each space. The large object spaces are
846 // very rare in snapshots so they are grouped in one body. 858 // very rare in snapshots so they are grouped in one body.
(...skipping 143 matching lines...) Expand 10 before | Expand all | Expand 10 after
990 if (space == CODE_SPACE) { 1002 if (space == CODE_SPACE) {
991 CPU::FlushICache(last_object_address_, Page::kPageSize); 1003 CPU::FlushICache(last_object_address_, Page::kPageSize);
992 } 1004 }
993 break; 1005 break;
994 } 1006 }
995 1007
996 case kNativesStringResource: { 1008 case kNativesStringResource: {
997 int index = source_->Get(); 1009 int index = source_->Get();
998 Vector<const char> source_vector = Natives::GetScriptSource(index); 1010 Vector<const char> source_vector = Natives::GetScriptSource(index);
999 NativesExternalStringResource* resource = 1011 NativesExternalStringResource* resource =
1000 new NativesExternalStringResource(source_vector.start()); 1012 new NativesExternalStringResource(
1013 isolate->bootstrapper(), source_vector.start());
1001 *current++ = reinterpret_cast<Object*>(resource); 1014 *current++ = reinterpret_cast<Object*>(resource);
1002 break; 1015 break;
1003 } 1016 }
1004 1017
1005 case kSynchronize: { 1018 case kSynchronize: {
1006 // If we get here then that indicates that you have a mismatch between 1019 // If we get here then that indicates that you have a mismatch between
1007 // the number of GC roots when serializing and deserializing. 1020 // the number of GC roots when serializing and deserializing.
1008 UNREACHABLE(); 1021 UNREACHABLE();
1009 } 1022 }
1010 1023
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
1055 } while (character != 0); 1068 } while (character != 0);
1056 } 1069 }
1057 1070
1058 #endif 1071 #endif
1059 1072
1060 Serializer::Serializer(SnapshotByteSink* sink) 1073 Serializer::Serializer(SnapshotByteSink* sink)
1061 : sink_(sink), 1074 : sink_(sink),
1062 current_root_index_(0), 1075 current_root_index_(0),
1063 external_reference_encoder_(new ExternalReferenceEncoder), 1076 external_reference_encoder_(new ExternalReferenceEncoder),
1064 large_object_total_(0) { 1077 large_object_total_(0) {
1078 // The serializer is meant to be used only to generate initial heap images
1079 // from a context in which there is only one isolate.
1080 ASSERT(Isolate::Current()->IsDefaultIsolate());
1065 for (int i = 0; i <= LAST_SPACE; i++) { 1081 for (int i = 0; i <= LAST_SPACE; i++) {
1066 fullness_[i] = 0; 1082 fullness_[i] = 0;
1067 } 1083 }
1068 } 1084 }
1069 1085
1070 1086
1071 Serializer::~Serializer() { 1087 Serializer::~Serializer() {
1072 delete external_reference_encoder_; 1088 delete external_reference_encoder_;
1073 } 1089 }
1074 1090
1075 1091
1076 void StartupSerializer::SerializeStrongReferences() { 1092 void StartupSerializer::SerializeStrongReferences() {
1093 Isolate* isolate = Isolate::Current();
1077 // No active threads. 1094 // No active threads.
1078 CHECK_EQ(NULL, ThreadState::FirstInUse()); 1095 CHECK_EQ(NULL, Isolate::Current()->thread_manager()->FirstThreadStateInUse());
1079 // No active or weak handles. 1096 // No active or weak handles.
1080 CHECK(HandleScopeImplementer::instance()->blocks()->is_empty()); 1097 CHECK(isolate->handle_scope_implementer()->blocks()->is_empty());
1081 CHECK_EQ(0, GlobalHandles::NumberOfWeakHandles()); 1098 CHECK_EQ(0, isolate->global_handles()->NumberOfWeakHandles());
1082 // We don't support serializing installed extensions. 1099 // We don't support serializing installed extensions.
1083 for (RegisteredExtension* ext = RegisteredExtension::first_extension(); 1100 for (RegisteredExtension* ext = v8::RegisteredExtension::first_extension();
1084 ext != NULL; 1101 ext != NULL;
1085 ext = ext->next()) { 1102 ext = ext->next()) {
1086 CHECK_NE(v8::INSTALLED, ext->state()); 1103 CHECK_NE(v8::INSTALLED, ext->state());
1087 } 1104 }
1088 Heap::IterateStrongRoots(this, VISIT_ONLY_STRONG); 1105 HEAP->IterateStrongRoots(this, VISIT_ONLY_STRONG);
1089 } 1106 }
1090 1107
1091 1108
1092 void PartialSerializer::Serialize(Object** object) { 1109 void PartialSerializer::Serialize(Object** object) {
1093 this->VisitPointer(object); 1110 this->VisitPointer(object);
1111 Isolate* isolate = Isolate::Current();
1094 1112
1095 // After we have done the partial serialization the partial snapshot cache 1113 // After we have done the partial serialization the partial snapshot cache
1096 // will contain some references needed to decode the partial snapshot. We 1114 // will contain some references needed to decode the partial snapshot. We
1097 // fill it up with undefineds so it has a predictable length so the 1115 // fill it up with undefineds so it has a predictable length so the
1098 // deserialization code doesn't need to know the length. 1116 // deserialization code doesn't need to know the length.
1099 for (int index = partial_snapshot_cache_length_; 1117 for (int index = isolate->serialize_partial_snapshot_cache_length();
1100 index < kPartialSnapshotCacheCapacity; 1118 index < Isolate::kPartialSnapshotCacheCapacity;
1101 index++) { 1119 index++) {
1102 partial_snapshot_cache_[index] = Heap::undefined_value(); 1120 isolate->serialize_partial_snapshot_cache()[index] =
1103 startup_serializer_->VisitPointer(&partial_snapshot_cache_[index]); 1121 isolate->heap()->undefined_value();
1122 startup_serializer_->VisitPointer(
1123 &isolate->serialize_partial_snapshot_cache()[index]);
1104 } 1124 }
1105 partial_snapshot_cache_length_ = kPartialSnapshotCacheCapacity; 1125 isolate->set_serialize_partial_snapshot_cache_length(
1126 Isolate::kPartialSnapshotCacheCapacity);
1106 } 1127 }
1107 1128
1108 1129
1109 void Serializer::VisitPointers(Object** start, Object** end) { 1130 void Serializer::VisitPointers(Object** start, Object** end) {
1110 for (Object** current = start; current < end; current++) { 1131 for (Object** current = start; current < end; current++) {
1111 if ((*current)->IsSmi()) { 1132 if ((*current)->IsSmi()) {
1112 sink_->Put(kRawData, "RawData"); 1133 sink_->Put(kRawData, "RawData");
1113 sink_->PutInt(kPointerSize, "length"); 1134 sink_->PutInt(kPointerSize, "length");
1114 for (int i = 0; i < kPointerSize; i++) { 1135 for (int i = 0; i < kPointerSize; i++) {
1115 sink_->Put(reinterpret_cast<byte*>(current)[i], "Byte"); 1136 sink_->Put(reinterpret_cast<byte*>(current)[i], "Byte");
1116 } 1137 }
1117 } else { 1138 } else {
1118 SerializeObject(*current, kPlain, kStartOfObject); 1139 SerializeObject(*current, kPlain, kStartOfObject);
1119 } 1140 }
1120 } 1141 }
1121 } 1142 }
1122 1143
1123 1144
1124 Object* SerializerDeserializer::partial_snapshot_cache_[
1125 kPartialSnapshotCacheCapacity];
1126 int SerializerDeserializer::partial_snapshot_cache_length_ = 0;
1127
1128
1129 // This ensures that the partial snapshot cache keeps things alive during GC and 1145 // This ensures that the partial snapshot cache keeps things alive during GC and
1130 // tracks their movement. When it is called during serialization of the startup 1146 // tracks their movement. When it is called during serialization of the startup
1131 // snapshot the partial snapshot is empty, so nothing happens. When the partial 1147 // snapshot the partial snapshot is empty, so nothing happens. When the partial
1132 // (context) snapshot is created, this array is populated with the pointers that 1148 // (context) snapshot is created, this array is populated with the pointers that
1133 // the partial snapshot will need. As that happens we emit serialized objects to 1149 // the partial snapshot will need. As that happens we emit serialized objects to
1134 // the startup snapshot that correspond to the elements of this cache array. On 1150 // the startup snapshot that correspond to the elements of this cache array. On
1135 // deserialization we therefore need to visit the cache array. This fills it up 1151 // deserialization we therefore need to visit the cache array. This fills it up
1136 // with pointers to deserialized objects. 1152 // with pointers to deserialized objects.
1137 void SerializerDeserializer::Iterate(ObjectVisitor* visitor) { 1153 void SerializerDeserializer::Iterate(ObjectVisitor* visitor) {
1154 Isolate* isolate = Isolate::Current();
1138 visitor->VisitPointers( 1155 visitor->VisitPointers(
1139 &partial_snapshot_cache_[0], 1156 isolate->serialize_partial_snapshot_cache(),
1140 &partial_snapshot_cache_[partial_snapshot_cache_length_]); 1157 &isolate->serialize_partial_snapshot_cache()[
1158 isolate->serialize_partial_snapshot_cache_length()]);
1141 } 1159 }
1142 1160
1143 1161
1144 // When deserializing we need to set the size of the snapshot cache. This means 1162 // When deserializing we need to set the size of the snapshot cache. This means
1145 // the root iteration code (above) will iterate over array elements, writing the 1163 // the root iteration code (above) will iterate over array elements, writing the
1146 // references to deserialized objects in them. 1164 // references to deserialized objects in them.
1147 void SerializerDeserializer::SetSnapshotCacheSize(int size) { 1165 void SerializerDeserializer::SetSnapshotCacheSize(int size) {
1148 partial_snapshot_cache_length_ = size; 1166 Isolate::Current()->set_serialize_partial_snapshot_cache_length(size);
1149 } 1167 }
1150 1168
1151 1169
1152 int PartialSerializer::PartialSnapshotCacheIndex(HeapObject* heap_object) { 1170 int PartialSerializer::PartialSnapshotCacheIndex(HeapObject* heap_object) {
1153 for (int i = 0; i < partial_snapshot_cache_length_; i++) { 1171 Isolate* isolate = Isolate::Current();
1154 Object* entry = partial_snapshot_cache_[i]; 1172
1173 for (int i = 0;
1174 i < isolate->serialize_partial_snapshot_cache_length();
1175 i++) {
1176 Object* entry = isolate->serialize_partial_snapshot_cache()[i];
1155 if (entry == heap_object) return i; 1177 if (entry == heap_object) return i;
1156 } 1178 }
1157 1179
1158 // We didn't find the object in the cache. So we add it to the cache and 1180 // We didn't find the object in the cache. So we add it to the cache and
1159 // then visit the pointer so that it becomes part of the startup snapshot 1181 // then visit the pointer so that it becomes part of the startup snapshot
1160 // and we can refer to it from the partial snapshot. 1182 // and we can refer to it from the partial snapshot.
1161 int length = partial_snapshot_cache_length_; 1183 int length = isolate->serialize_partial_snapshot_cache_length();
1162 CHECK(length < kPartialSnapshotCacheCapacity); 1184 CHECK(length < Isolate::kPartialSnapshotCacheCapacity);
1163 partial_snapshot_cache_[length] = heap_object; 1185 isolate->serialize_partial_snapshot_cache()[length] = heap_object;
1164 startup_serializer_->VisitPointer(&partial_snapshot_cache_[length]); 1186 startup_serializer_->VisitPointer(
1187 &isolate->serialize_partial_snapshot_cache()[length]);
1165 // We don't recurse from the startup snapshot generator into the partial 1188 // We don't recurse from the startup snapshot generator into the partial
1166 // snapshot generator. 1189 // snapshot generator.
1167 ASSERT(length == partial_snapshot_cache_length_); 1190 ASSERT(length == isolate->serialize_partial_snapshot_cache_length());
1168 return partial_snapshot_cache_length_++; 1191 isolate->set_serialize_partial_snapshot_cache_length(length + 1);
1192 return length;
1169 } 1193 }
1170 1194
1171 1195
1172 int PartialSerializer::RootIndex(HeapObject* heap_object) { 1196 int PartialSerializer::RootIndex(HeapObject* heap_object) {
1173 for (int i = 0; i < Heap::kRootListLength; i++) { 1197 for (int i = 0; i < Heap::kRootListLength; i++) {
1174 Object* root = Heap::roots_address()[i]; 1198 Object* root = HEAP->roots_address()[i];
1175 if (root == heap_object) return i; 1199 if (root == heap_object) return i;
1176 } 1200 }
1177 return kInvalidRootIndex; 1201 return kInvalidRootIndex;
1178 } 1202 }
1179 1203
1180 1204
1181 // Encode the location of an already deserialized object in order to write its 1205 // Encode the location of an already deserialized object in order to write its
1182 // location into a later object. We can encode the location as an offset from 1206 // location into a later object. We can encode the location as an offset from
1183 // the start of the deserialized objects or as an offset backwards from the 1207 // the start of the deserialized objects or as an offset backwards from the
1184 // current allocation pointer. 1208 // current allocation pointer.
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
1247 heap_object, 1271 heap_object,
1248 sink_, 1272 sink_,
1249 how_to_code, 1273 how_to_code,
1250 where_to_point); 1274 where_to_point);
1251 object_serializer.Serialize(); 1275 object_serializer.Serialize();
1252 } 1276 }
1253 } 1277 }
1254 1278
1255 1279
1256 void StartupSerializer::SerializeWeakReferences() { 1280 void StartupSerializer::SerializeWeakReferences() {
1257 for (int i = partial_snapshot_cache_length_; 1281 for (int i = Isolate::Current()->serialize_partial_snapshot_cache_length();
1258 i < kPartialSnapshotCacheCapacity; 1282 i < Isolate::kPartialSnapshotCacheCapacity;
1259 i++) { 1283 i++) {
1260 sink_->Put(kRootArray + kPlain + kStartOfObject, "RootSerialization"); 1284 sink_->Put(kRootArray + kPlain + kStartOfObject, "RootSerialization");
1261 sink_->PutInt(Heap::kUndefinedValueRootIndex, "root_index"); 1285 sink_->PutInt(Heap::kUndefinedValueRootIndex, "root_index");
1262 } 1286 }
1263 Heap::IterateWeakRoots(this, VISIT_ALL); 1287 HEAP->IterateWeakRoots(this, VISIT_ALL);
1264 } 1288 }
1265 1289
1266 1290
1267 void PartialSerializer::SerializeObject( 1291 void PartialSerializer::SerializeObject(
1268 Object* o, 1292 Object* o,
1269 HowToCode how_to_code, 1293 HowToCode how_to_code,
1270 WhereToPoint where_to_point) { 1294 WhereToPoint where_to_point) {
1271 CHECK(o->IsHeapObject()); 1295 CHECK(o->IsHeapObject());
1272 HeapObject* heap_object = HeapObject::cast(o); 1296 HeapObject* heap_object = HeapObject::cast(o);
1273 1297
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
1314 1338
1315 1339
1316 void Serializer::ObjectSerializer::Serialize() { 1340 void Serializer::ObjectSerializer::Serialize() {
1317 int space = Serializer::SpaceOfObject(object_); 1341 int space = Serializer::SpaceOfObject(object_);
1318 int size = object_->Size(); 1342 int size = object_->Size();
1319 1343
1320 sink_->Put(kNewObject + reference_representation_ + space, 1344 sink_->Put(kNewObject + reference_representation_ + space,
1321 "ObjectSerialization"); 1345 "ObjectSerialization");
1322 sink_->PutInt(size >> kObjectAlignmentBits, "Size in words"); 1346 sink_->PutInt(size >> kObjectAlignmentBits, "Size in words");
1323 1347
1324 LOG(SnapshotPositionEvent(object_->address(), sink_->Position())); 1348 LOG(i::Isolate::Current(),
1349 SnapshotPositionEvent(object_->address(), sink_->Position()));
1325 1350
1326 // Mark this object as already serialized. 1351 // Mark this object as already serialized.
1327 bool start_new_page; 1352 bool start_new_page;
1328 int offset = serializer_->Allocate(space, size, &start_new_page); 1353 int offset = serializer_->Allocate(space, size, &start_new_page);
1329 serializer_->address_mapper()->AddMapping(object_, offset); 1354 serializer_->address_mapper()->AddMapping(object_, offset);
1330 if (start_new_page) { 1355 if (start_new_page) {
1331 sink_->Put(kNewPage, "NewPage"); 1356 sink_->Put(kNewPage, "NewPage");
1332 sink_->PutSection(space, "NewPageSpace"); 1357 sink_->PutSection(space, "NewPageSpace");
1333 } 1358 }
1334 1359
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after
1415 // objects in the snapshot. 1440 // objects in the snapshot.
1416 UNREACHABLE(); 1441 UNREACHABLE();
1417 } 1442 }
1418 1443
1419 1444
1420 void Serializer::ObjectSerializer::VisitExternalAsciiString( 1445 void Serializer::ObjectSerializer::VisitExternalAsciiString(
1421 v8::String::ExternalAsciiStringResource** resource_pointer) { 1446 v8::String::ExternalAsciiStringResource** resource_pointer) {
1422 Address references_start = reinterpret_cast<Address>(resource_pointer); 1447 Address references_start = reinterpret_cast<Address>(resource_pointer);
1423 OutputRawData(references_start); 1448 OutputRawData(references_start);
1424 for (int i = 0; i < Natives::GetBuiltinsCount(); i++) { 1449 for (int i = 0; i < Natives::GetBuiltinsCount(); i++) {
1425 Object* source = Heap::natives_source_cache()->get(i); 1450 Object* source = HEAP->natives_source_cache()->get(i);
1426 if (!source->IsUndefined()) { 1451 if (!source->IsUndefined()) {
1427 ExternalAsciiString* string = ExternalAsciiString::cast(source); 1452 ExternalAsciiString* string = ExternalAsciiString::cast(source);
1428 typedef v8::String::ExternalAsciiStringResource Resource; 1453 typedef v8::String::ExternalAsciiStringResource Resource;
1429 Resource* resource = string->resource(); 1454 Resource* resource = string->resource();
1430 if (resource == *resource_pointer) { 1455 if (resource == *resource_pointer) {
1431 sink_->Put(kNativesStringResource, "NativesStringResource"); 1456 sink_->Put(kNativesStringResource, "NativesStringResource");
1432 sink_->PutSection(i, "NativesStringResourceEnd"); 1457 sink_->PutSection(i, "NativesStringResourceEnd");
1433 bytes_processed_so_far_ += sizeof(resource); 1458 bytes_processed_so_far_ += sizeof(resource);
1434 return; 1459 return;
1435 } 1460 }
(...skipping 29 matching lines...) Expand all
1465 sink_->PutSection(data, "Byte"); 1490 sink_->PutSection(data, "Byte");
1466 } 1491 }
1467 bytes_processed_so_far_ += skipped; 1492 bytes_processed_so_far_ += skipped;
1468 } 1493 }
1469 } 1494 }
1470 1495
1471 1496
1472 int Serializer::SpaceOfObject(HeapObject* object) { 1497 int Serializer::SpaceOfObject(HeapObject* object) {
1473 for (int i = FIRST_SPACE; i <= LAST_SPACE; i++) { 1498 for (int i = FIRST_SPACE; i <= LAST_SPACE; i++) {
1474 AllocationSpace s = static_cast<AllocationSpace>(i); 1499 AllocationSpace s = static_cast<AllocationSpace>(i);
1475 if (Heap::InSpace(object, s)) { 1500 if (HEAP->InSpace(object, s)) {
1476 if (i == LO_SPACE) { 1501 if (i == LO_SPACE) {
1477 if (object->IsCode()) { 1502 if (object->IsCode()) {
1478 return kLargeCode; 1503 return kLargeCode;
1479 } else if (object->IsFixedArray()) { 1504 } else if (object->IsFixedArray()) {
1480 return kLargeFixedArray; 1505 return kLargeFixedArray;
1481 } else { 1506 } else {
1482 return kLargeData; 1507 return kLargeData;
1483 } 1508 }
1484 } 1509 }
1485 return i; 1510 return i;
1486 } 1511 }
1487 } 1512 }
1488 UNREACHABLE(); 1513 UNREACHABLE();
1489 return 0; 1514 return 0;
1490 } 1515 }
1491 1516
1492 1517
1493 int Serializer::SpaceOfAlreadySerializedObject(HeapObject* object) { 1518 int Serializer::SpaceOfAlreadySerializedObject(HeapObject* object) {
1494 for (int i = FIRST_SPACE; i <= LAST_SPACE; i++) { 1519 for (int i = FIRST_SPACE; i <= LAST_SPACE; i++) {
1495 AllocationSpace s = static_cast<AllocationSpace>(i); 1520 AllocationSpace s = static_cast<AllocationSpace>(i);
1496 if (Heap::InSpace(object, s)) { 1521 if (HEAP->InSpace(object, s)) {
1497 return i; 1522 return i;
1498 } 1523 }
1499 } 1524 }
1500 UNREACHABLE(); 1525 UNREACHABLE();
1501 return 0; 1526 return 0;
1502 } 1527 }
1503 1528
1504 1529
1505 int Serializer::Allocate(int space, int size, bool* new_page) { 1530 int Serializer::Allocate(int space, int size, bool* new_page) {
1506 CHECK(space >= 0 && space < kNumberOfSpaces); 1531 CHECK(space >= 0 && space < kNumberOfSpaces);
(...skipping 23 matching lines...) Expand all
1530 fullness_[space] = RoundUp(fullness_[space], Page::kPageSize); 1555 fullness_[space] = RoundUp(fullness_[space], Page::kPageSize);
1531 } 1556 }
1532 } 1557 }
1533 int allocation_address = fullness_[space]; 1558 int allocation_address = fullness_[space];
1534 fullness_[space] = allocation_address + size; 1559 fullness_[space] = allocation_address + size;
1535 return allocation_address; 1560 return allocation_address;
1536 } 1561 }
1537 1562
1538 1563
1539 } } // namespace v8::internal 1564 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/serialize.h ('k') | src/snapshot.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698