Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(171)

Side by Side Diff: src/serialize.cc

Issue 3970002: [Isolates] Minimize TLS accesses during startup deserialization. (Closed)
Patch Set: Created 10 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/serialize.h ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2006-2008 the V8 project authors. All rights reserved. 1 // Copyright 2006-2008 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 228 matching lines...) Expand 10 before | Expand all | Expand 10 after
239 Debug::k_debug_break_return_address << kDebugIdShift, 239 Debug::k_debug_break_return_address << kDebugIdShift,
240 "Debug::debug_break_return_address()"); 240 "Debug::debug_break_return_address()");
241 Add(Debug_Address(Debug::k_restarter_frame_function_pointer).address(isolate), 241 Add(Debug_Address(Debug::k_restarter_frame_function_pointer).address(isolate),
242 DEBUG_ADDRESS, 242 DEBUG_ADDRESS,
243 Debug::k_restarter_frame_function_pointer << kDebugIdShift, 243 Debug::k_restarter_frame_function_pointer << kDebugIdShift,
244 "Debug::restarter_frame_function_pointer_address()"); 244 "Debug::restarter_frame_function_pointer_address()");
245 #endif 245 #endif
246 246
247 // Stat counters 247 // Stat counters
248 struct StatsRefTableEntry { 248 struct StatsRefTableEntry {
249 StatsCounter* counter; 249 StatsCounter* (Counters::*counter)();
250 uint16_t id; 250 uint16_t id;
251 const char* name; 251 const char* name;
252 }; 252 };
253 253
254 const StatsRefTableEntry stats_ref_table[] = { 254 const StatsRefTableEntry stats_ref_table[] = {
255 #define COUNTER_ENTRY(name, caption) \ 255 #define COUNTER_ENTRY(name, caption) \
256 { COUNTERS->name(), \ 256 { &Counters::name, \
257 Counters::k_##name, \ 257 Counters::k_##name, \
258 "Counters::" #name }, 258 "Counters::" #name },
259 259
260 STATS_COUNTER_LIST_1(COUNTER_ENTRY) 260 STATS_COUNTER_LIST_1(COUNTER_ENTRY)
261 STATS_COUNTER_LIST_2(COUNTER_ENTRY) 261 STATS_COUNTER_LIST_2(COUNTER_ENTRY)
262 #undef COUNTER_ENTRY 262 #undef COUNTER_ENTRY
263 }; // end of stats_ref_table[]. 263 }; // end of stats_ref_table[].
264 264
265 Counters* counters = isolate->counters();
265 for (size_t i = 0; i < ARRAY_SIZE(stats_ref_table); ++i) { 266 for (size_t i = 0; i < ARRAY_SIZE(stats_ref_table); ++i) {
266 Add(reinterpret_cast<Address>( 267 Add(reinterpret_cast<Address>(GetInternalPointer(
267 GetInternalPointer(stats_ref_table[i].counter)), 268 (counters->*(stats_ref_table[i].counter))())),
268 STATS_COUNTER, 269 STATS_COUNTER,
269 stats_ref_table[i].id, 270 stats_ref_table[i].id,
270 stats_ref_table[i].name); 271 stats_ref_table[i].name);
271 } 272 }
272 273
273 // Top addresses 274 // Top addresses
274 const char* top_address_format = "Isolate::%s";
275 275
276 const char* AddressNames[] = { 276 const char* AddressNames[] = {
277 #define C(name) #name, 277 #define C(name) "Isolate::" #name,
278 ISOLATE_ADDRESS_LIST(C) 278 ISOLATE_ADDRESS_LIST(C)
279 ISOLATE_ADDRESS_LIST_PROF(C) 279 ISOLATE_ADDRESS_LIST_PROF(C)
280 NULL 280 NULL
281 #undef C 281 #undef C
282 }; 282 };
283 283
284 int top_format_length = StrLength(top_address_format) - 2;
285 for (uint16_t i = 0; i < Isolate::k_isolate_address_count; ++i) { 284 for (uint16_t i = 0; i < Isolate::k_isolate_address_count; ++i) {
286 const char* address_name = AddressNames[i];
287 Vector<char> name =
288 Vector<char>::New(top_format_length + StrLength(address_name) + 1);
289 const char* chars = name.start();
290 OS::SNPrintF(name, top_address_format, address_name);
291 Add(isolate->get_address_from_id((Isolate::AddressId)i), 285 Add(isolate->get_address_from_id((Isolate::AddressId)i),
292 TOP_ADDRESS, i, chars); 286 TOP_ADDRESS, i, AddressNames[i]);
293 } 287 }
294 288
295 // Extensions 289 // Extensions
296 Add(FUNCTION_ADDR(GCExtension::GC), EXTENSION, 1, 290 Add(FUNCTION_ADDR(GCExtension::GC), EXTENSION, 1,
297 "GCExtension::GC"); 291 "GCExtension::GC");
298 292
299 // Accessors 293 // Accessors
300 #define ACCESSOR_DESCRIPTOR_DECLARATION(name) \ 294 #define ACCESSOR_DESCRIPTOR_DECLARATION(name) \
301 Add((Address)&Accessors::name, \ 295 Add((Address)&Accessors::name, \
302 ACCESSOR, \ 296 ACCESSOR, \
(...skipping 228 matching lines...) Expand 10 before | Expand all | Expand 10 after
531 } 525 }
532 DeleteArray(encodings_); 526 DeleteArray(encodings_);
533 } 527 }
534 528
535 529
536 bool Serializer::serialization_enabled_ = false; 530 bool Serializer::serialization_enabled_ = false;
537 bool Serializer::too_late_to_enable_now_ = false; 531 bool Serializer::too_late_to_enable_now_ = false;
538 532
539 533
540 Deserializer::Deserializer(SnapshotByteSource* source) 534 Deserializer::Deserializer(SnapshotByteSource* source)
541 : source_(source), 535 : isolate_(NULL),
536 source_(source),
542 external_reference_decoder_(NULL) { 537 external_reference_decoder_(NULL) {
543 } 538 }
544 539
545 540
546 // This routine both allocates a new object, and also keeps 541 // This routine both allocates a new object, and also keeps
547 // track of where objects have been allocated so that we can 542 // track of where objects have been allocated so that we can
548 // fix back references when deserializing. 543 // fix back references when deserializing.
549 Address Deserializer::Allocate(int space_index, Space* space, int size) { 544 Address Deserializer::Allocate(int space_index, Space* space, int size) {
550 Address address; 545 Address address;
551 if (!SpaceIsLarge(space_index)) { 546 if (!SpaceIsLarge(space_index)) {
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
610 } 605 }
611 ASSERT(SpaceIsPaged(space)); 606 ASSERT(SpaceIsPaged(space));
612 int page_of_pointee = offset >> kPageSizeBits; 607 int page_of_pointee = offset >> kPageSizeBits;
613 Address object_address = pages_[space][page_of_pointee] + 608 Address object_address = pages_[space][page_of_pointee] +
614 (offset & Page::kPageAlignmentMask); 609 (offset & Page::kPageAlignmentMask);
615 return HeapObject::FromAddress(object_address); 610 return HeapObject::FromAddress(object_address);
616 } 611 }
617 612
618 613
619 void Deserializer::Deserialize() { 614 void Deserializer::Deserialize() {
615 isolate_ = Isolate::Current();
620 // Don't GC while deserializing - just expand the heap. 616 // Don't GC while deserializing - just expand the heap.
621 AlwaysAllocateScope always_allocate; 617 AlwaysAllocateScope always_allocate;
622 // Don't use the free lists while deserializing. 618 // Don't use the free lists while deserializing.
623 LinearAllocationScope allocate_linearly; 619 LinearAllocationScope allocate_linearly;
624 // No active threads. 620 // No active threads.
625 ASSERT_EQ(NULL, 621 ASSERT_EQ(NULL, isolate_->thread_manager()->FirstThreadStateInUse());
626 Isolate::Current()->thread_manager()->FirstThreadStateInUse());
627 // No active handles. 622 // No active handles.
628 ASSERT(Isolate::Current()->handle_scope_implementer()->blocks()->is_empty()); 623 ASSERT(isolate_->handle_scope_implementer()->blocks()->is_empty());
629 // Make sure the entire partial snapshot cache is traversed, filling it with 624 // Make sure the entire partial snapshot cache is traversed, filling it with
630 // valid object pointers. 625 // valid object pointers.
631 Isolate::Current()->set_serialize_partial_snapshot_cache_length( 626 isolate_->set_serialize_partial_snapshot_cache_length(
632 Isolate::kPartialSnapshotCacheCapacity); 627 Isolate::kPartialSnapshotCacheCapacity);
633 ASSERT_EQ(NULL, external_reference_decoder_); 628 ASSERT_EQ(NULL, external_reference_decoder_);
634 external_reference_decoder_ = new ExternalReferenceDecoder(); 629 external_reference_decoder_ = new ExternalReferenceDecoder();
635 HEAP->IterateStrongRoots(this, VISIT_ONLY_STRONG); 630 isolate_->heap()->IterateStrongRoots(this, VISIT_ONLY_STRONG);
636 HEAP->IterateWeakRoots(this, VISIT_ALL); 631 isolate_->heap()->IterateWeakRoots(this, VISIT_ALL);
637 } 632 }
638 633
639 634
640 void Deserializer::DeserializePartial(Object** root) { 635 void Deserializer::DeserializePartial(Object** root) {
636 isolate_ = Isolate::Current();
641 // Don't GC while deserializing - just expand the heap. 637 // Don't GC while deserializing - just expand the heap.
642 AlwaysAllocateScope always_allocate; 638 AlwaysAllocateScope always_allocate;
643 // Don't use the free lists while deserializing. 639 // Don't use the free lists while deserializing.
644 LinearAllocationScope allocate_linearly; 640 LinearAllocationScope allocate_linearly;
645 if (external_reference_decoder_ == NULL) { 641 if (external_reference_decoder_ == NULL) {
646 external_reference_decoder_ = new ExternalReferenceDecoder(); 642 external_reference_decoder_ = new ExternalReferenceDecoder();
647 } 643 }
648 VisitPointer(root); 644 VisitPointer(root);
649 } 645 }
650 646
(...skipping 28 matching lines...) Expand all
679 int size = source_->GetInt() << kObjectAlignmentBits; 675 int size = source_->GetInt() << kObjectAlignmentBits;
680 Address address = Allocate(space_number, space, size); 676 Address address = Allocate(space_number, space, size);
681 *write_back = HeapObject::FromAddress(address); 677 *write_back = HeapObject::FromAddress(address);
682 Object** current = reinterpret_cast<Object**>(address); 678 Object** current = reinterpret_cast<Object**>(address);
683 Object** limit = current + (size >> kPointerSizeLog2); 679 Object** limit = current + (size >> kPointerSizeLog2);
684 if (FLAG_log_snapshot_positions) { 680 if (FLAG_log_snapshot_positions) {
685 LOG(SnapshotPositionEvent(address, source_->position())); 681 LOG(SnapshotPositionEvent(address, source_->position()));
686 } 682 }
687 ReadChunk(current, limit, space_number, address); 683 ReadChunk(current, limit, space_number, address);
688 684
689 if (space == space->heap()->map_space()) { 685 if (space_number == MAP_SPACE) {
690 ASSERT(size == Map::kSize); 686 ASSERT(size == Map::kSize);
691 HeapObject* obj = HeapObject::FromAddress(address); 687 HeapObject* obj = HeapObject::FromAddress(address);
692 Map* map = reinterpret_cast<Map*>(obj); 688 Map* map = reinterpret_cast<Map*>(obj);
693 map->set_heap(space->heap()); 689 map->set_heap(isolate_->heap());
694 } 690 }
695 } 691 }
696 692
697 693
698 // This macro is always used with a constant argument so it should all fold 694 // This macro is always used with a constant argument so it should all fold
699 // away to almost nothing in the generated code. It might be nicer to do this 695 // away to almost nothing in the generated code. It might be nicer to do this
700 // with the ternary operator but there are type issues with that. 696 // with the ternary operator but there are type issues with that.
701 #define ASSIGN_DEST_SPACE(space_number) \ 697 #define ASSIGN_DEST_SPACE(space_number) \
702 Space* dest_space; \ 698 Space* dest_space; \
703 if (space_number == NEW_SPACE) { \ 699 if (space_number == NEW_SPACE) { \
704 dest_space = HEAP->new_space(); \ 700 dest_space = isolate->heap()->new_space(); \
705 } else if (space_number == OLD_POINTER_SPACE) { \ 701 } else if (space_number == OLD_POINTER_SPACE) { \
706 dest_space = HEAP->old_pointer_space(); \ 702 dest_space = isolate->heap()->old_pointer_space(); \
707 } else if (space_number == OLD_DATA_SPACE) { \ 703 } else if (space_number == OLD_DATA_SPACE) { \
708 dest_space = HEAP->old_data_space(); \ 704 dest_space = isolate->heap()->old_data_space(); \
709 } else if (space_number == CODE_SPACE) { \ 705 } else if (space_number == CODE_SPACE) { \
710 dest_space = HEAP->code_space(); \ 706 dest_space = isolate->heap()->code_space(); \
711 } else if (space_number == MAP_SPACE) { \ 707 } else if (space_number == MAP_SPACE) { \
712 dest_space = HEAP->map_space(); \ 708 dest_space = isolate->heap()->map_space(); \
713 } else if (space_number == CELL_SPACE) { \ 709 } else if (space_number == CELL_SPACE) { \
714 dest_space = HEAP->cell_space(); \ 710 dest_space = isolate->heap()->cell_space(); \
715 } else { \ 711 } else { \
716 ASSERT(space_number >= LO_SPACE); \ 712 ASSERT(space_number >= LO_SPACE); \
717 dest_space = HEAP->lo_space(); \ 713 dest_space = isolate->heap()->lo_space(); \
718 } 714 }
719 715
720 716
721 static const int kUnknownOffsetFromStart = -1; 717 static const int kUnknownOffsetFromStart = -1;
722 718
723 719
724 void Deserializer::ReadChunk(Object** current, 720 void Deserializer::ReadChunk(Object** current,
725 Object** limit, 721 Object** limit,
726 int source_space, 722 int source_space,
727 Address address) { 723 Address address) {
728 Isolate* isolate = Isolate::Current(); 724 Isolate* const isolate = isolate_;
729 while (current < limit) { 725 while (current < limit) {
730 int data = source_->Get(); 726 int data = source_->Get();
731 switch (data) { 727 switch (data) {
732 #define CASE_STATEMENT(where, how, within, space_number) \ 728 #define CASE_STATEMENT(where, how, within, space_number) \
733 case where + how + within + space_number: \ 729 case where + how + within + space_number: \
734 ASSERT((where & ~kPointedToMask) == 0); \ 730 ASSERT((where & ~kPointedToMask) == 0); \
735 ASSERT((how & ~kHowToCodeMask) == 0); \ 731 ASSERT((how & ~kHowToCodeMask) == 0); \
736 ASSERT((within & ~kWhereToPointMask) == 0); \ 732 ASSERT((within & ~kWhereToPointMask) == 0); \
737 ASSERT((space_number & ~kSpaceMask) == 0); 733 ASSERT((space_number & ~kSpaceMask) == 0);
738 734
739 #define CASE_BODY(where, how, within, space_number_if_any, offset_from_start) \ 735 #define CASE_BODY(where, how, within, space_number_if_any, offset_from_start) \
740 { \ 736 { \
741 bool emit_write_barrier = false; \ 737 bool emit_write_barrier = false; \
742 bool current_was_incremented = false; \ 738 bool current_was_incremented = false; \
743 int space_number = space_number_if_any == kAnyOldSpace ? \ 739 int space_number = space_number_if_any == kAnyOldSpace ? \
744 (data & kSpaceMask) : space_number_if_any; \ 740 (data & kSpaceMask) : space_number_if_any; \
745 if (where == kNewObject && how == kPlain && within == kStartOfObject) {\ 741 if (where == kNewObject && how == kPlain && within == kStartOfObject) {\
746 ASSIGN_DEST_SPACE(space_number) \ 742 ASSIGN_DEST_SPACE(space_number) \
747 ReadObject(space_number, dest_space, current); \ 743 ReadObject(space_number, dest_space, current); \
748 emit_write_barrier = \ 744 emit_write_barrier = \
749 (space_number == NEW_SPACE && source_space != NEW_SPACE); \ 745 (space_number == NEW_SPACE && source_space != NEW_SPACE); \
750 } else { \ 746 } else { \
751 Object* new_object = NULL; /* May not be a real Object pointer. */ \ 747 Object* new_object = NULL; /* May not be a real Object pointer. */ \
752 if (where == kNewObject) { \ 748 if (where == kNewObject) { \
753 ASSIGN_DEST_SPACE(space_number) \ 749 ASSIGN_DEST_SPACE(space_number) \
754 ReadObject(space_number, dest_space, &new_object); \ 750 ReadObject(space_number, dest_space, &new_object); \
755 } else if (where == kRootArray) { \ 751 } else if (where == kRootArray) { \
756 int root_id = source_->GetInt(); \ 752 int root_id = source_->GetInt(); \
757 new_object = HEAP->roots_address()[root_id]; \ 753 new_object = isolate->heap()->roots_address()[root_id]; \
758 } else if (where == kPartialSnapshotCache) { \ 754 } else if (where == kPartialSnapshotCache) { \
759 int cache_index = source_->GetInt(); \ 755 int cache_index = source_->GetInt(); \
760 new_object = isolate->serialize_partial_snapshot_cache() \ 756 new_object = isolate->serialize_partial_snapshot_cache() \
761 [cache_index]; \ 757 [cache_index]; \
762 } else if (where == kExternalReference) { \ 758 } else if (where == kExternalReference) { \
763 int reference_id = source_->GetInt(); \ 759 int reference_id = source_->GetInt(); \
764 Address address = external_reference_decoder_-> \ 760 Address address = external_reference_decoder_-> \
765 Decode(reference_id); \ 761 Decode(reference_id); \
766 new_object = reinterpret_cast<Object*>(address); \ 762 new_object = reinterpret_cast<Object*>(address); \
767 } else if (where == kBackref) { \ 763 } else if (where == kBackref) { \
(...skipping 25 matching lines...) Expand all
793 if (within == kFirstInstruction) { \ 789 if (within == kFirstInstruction) { \
794 location_of_branch_data += Assembler::kCallTargetSize; \ 790 location_of_branch_data += Assembler::kCallTargetSize; \
795 current = reinterpret_cast<Object**>(location_of_branch_data); \ 791 current = reinterpret_cast<Object**>(location_of_branch_data); \
796 current_was_incremented = true; \ 792 current_was_incremented = true; \
797 } \ 793 } \
798 } else { \ 794 } else { \
799 *current = new_object; \ 795 *current = new_object; \
800 } \ 796 } \
801 } \ 797 } \
802 if (emit_write_barrier) { \ 798 if (emit_write_barrier) { \
803 HEAP->RecordWrite(address, static_cast<int>( \ 799 isolate->heap()->RecordWrite(address, static_cast<int>( \
804 reinterpret_cast<Address>(current) - address)); \ 800 reinterpret_cast<Address>(current) - address)); \
805 } \ 801 } \
806 if (!current_was_incremented) { \ 802 if (!current_was_incremented) { \
807 current++; /* Increment current if it wasn't done above. */ \ 803 current++; /* Increment current if it wasn't done above. */ \
808 } \ 804 } \
809 break; \ 805 break; \
810 } \ 806 } \
811 807
812 // This generates a case and a body for each space. The large object spaces are 808 // This generates a case and a body for each space. The large object spaces are
813 // very rare in snapshots so they are grouped in one body. 809 // very rare in snapshots so they are grouped in one body.
(...skipping 144 matching lines...) Expand 10 before | Expand all | Expand 10 after
958 CPU::FlushICache(last_object_address_, Page::kPageSize); 954 CPU::FlushICache(last_object_address_, Page::kPageSize);
959 } 955 }
960 break; 956 break;
961 } 957 }
962 958
963 case kNativesStringResource: { 959 case kNativesStringResource: {
964 int index = source_->Get(); 960 int index = source_->Get();
965 Vector<const char> source_vector = Natives::GetScriptSource(index); 961 Vector<const char> source_vector = Natives::GetScriptSource(index);
966 NativesExternalStringResource* resource = 962 NativesExternalStringResource* resource =
967 new NativesExternalStringResource( 963 new NativesExternalStringResource(
968 Isolate::Current()->bootstrapper(), source_vector.start()); 964 isolate->bootstrapper(), source_vector.start());
969 *current++ = reinterpret_cast<Object*>(resource); 965 *current++ = reinterpret_cast<Object*>(resource);
970 break; 966 break;
971 } 967 }
972 968
973 case kSynchronize: { 969 case kSynchronize: {
974 // If we get here then that indicates that you have a mismatch between 970 // If we get here then that indicates that you have a mismatch between
975 // the number of GC roots when serializing and deserializing. 971 // the number of GC roots when serializing and deserializing.
976 UNREACHABLE(); 972 UNREACHABLE();
977 } 973 }
978 974
(...skipping 523 matching lines...) Expand 10 before | Expand all | Expand 10 after
1502 fullness_[space] = RoundUp(fullness_[space], Page::kPageSize); 1498 fullness_[space] = RoundUp(fullness_[space], Page::kPageSize);
1503 } 1499 }
1504 } 1500 }
1505 int allocation_address = fullness_[space]; 1501 int allocation_address = fullness_[space];
1506 fullness_[space] = allocation_address + size; 1502 fullness_[space] = allocation_address + size;
1507 return allocation_address; 1503 return allocation_address;
1508 } 1504 }
1509 1505
1510 1506
1511 } } // namespace v8::internal 1507 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/serialize.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698