Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(64)

Side by Side Diff: runtime/vm/snapshot.cc

Issue 1250463004: Migrate NoSafepointScope; add constrained concurrent allocation to unit test (Closed) Base URL: git@github.com:dart-lang/sdk.git@master
Patch Set: Ready for review. Created 5 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/snapshot.h" 5 #include "vm/snapshot.h"
6 6
7 #include "platform/assert.h" 7 #include "platform/assert.h"
8 #include "vm/bootstrap.h" 8 #include "vm/bootstrap.h"
9 #include "vm/class_finalizer.h" 9 #include "vm/class_finalizer.h"
10 #include "vm/dart.h" 10 #include "vm/dart.h"
(...skipping 591 matching lines...) Expand 10 before | Expand all | Expand 10 after
602 const String& msg = String::Handle(String::New(message_buffer, Heap::kOld)); 602 const String& msg = String::Handle(String::New(message_buffer, Heap::kOld));
603 return ApiError::New(msg, Heap::kOld); 603 return ApiError::New(msg, Heap::kOld);
604 } 604 }
605 Advance(version_len); 605 Advance(version_len);
606 return ApiError::null(); 606 return ApiError::null();
607 } 607 }
608 608
609 609
610 #define ALLOC_NEW_OBJECT_WITH_LEN(type, length) \ 610 #define ALLOC_NEW_OBJECT_WITH_LEN(type, length) \
611 ASSERT(kind_ == Snapshot::kFull); \ 611 ASSERT(kind_ == Snapshot::kFull); \
612 ASSERT(isolate()->no_safepoint_scope_depth() != 0); \ 612 ASSERT(Thread::Current()->no_safepoint_scope_depth() != 0); \
siva 2015/07/22 17:52:49 Ditto comment about Thread::Current() in this file
koda 2015/07/22 19:55:47 Done.
613 Raw##type* obj = reinterpret_cast<Raw##type*>( \ 613 Raw##type* obj = reinterpret_cast<Raw##type*>( \
614 AllocateUninitialized(k##type##Cid, type::InstanceSize(length))); \ 614 AllocateUninitialized(k##type##Cid, type::InstanceSize(length))); \
615 obj->StoreSmi(&(obj->ptr()->length_), Smi::New(length)); \ 615 obj->StoreSmi(&(obj->ptr()->length_), Smi::New(length)); \
616 return obj; \ 616 return obj; \
617 617
618 618
619 RawArray* SnapshotReader::NewArray(intptr_t len) { 619 RawArray* SnapshotReader::NewArray(intptr_t len) {
620 ALLOC_NEW_OBJECT_WITH_LEN(Array, len); 620 ALLOC_NEW_OBJECT_WITH_LEN(Array, len);
621 } 621 }
622 622
(...skipping 13 matching lines...) Expand all
636 } 636 }
637 637
638 638
639 RawTypeArguments* SnapshotReader::NewTypeArguments(intptr_t len) { 639 RawTypeArguments* SnapshotReader::NewTypeArguments(intptr_t len) {
640 ALLOC_NEW_OBJECT_WITH_LEN(TypeArguments, len); 640 ALLOC_NEW_OBJECT_WITH_LEN(TypeArguments, len);
641 } 641 }
642 642
643 643
644 RawTokenStream* SnapshotReader::NewTokenStream(intptr_t len) { 644 RawTokenStream* SnapshotReader::NewTokenStream(intptr_t len) {
645 ASSERT(kind_ == Snapshot::kFull); 645 ASSERT(kind_ == Snapshot::kFull);
646 ASSERT(isolate()->no_safepoint_scope_depth() != 0); 646 ASSERT(Thread::Current()->no_safepoint_scope_depth() != 0);
647 stream_ = reinterpret_cast<RawTokenStream*>( 647 stream_ = reinterpret_cast<RawTokenStream*>(
648 AllocateUninitialized(kTokenStreamCid, TokenStream::InstanceSize())); 648 AllocateUninitialized(kTokenStreamCid, TokenStream::InstanceSize()));
649 uint8_t* array = const_cast<uint8_t*>(CurrentBufferAddress()); 649 uint8_t* array = const_cast<uint8_t*>(CurrentBufferAddress());
650 ASSERT(array != NULL); 650 ASSERT(array != NULL);
651 Advance(len); 651 Advance(len);
652 data_ = reinterpret_cast<RawExternalTypedData*>( 652 data_ = reinterpret_cast<RawExternalTypedData*>(
653 AllocateUninitialized(kExternalTypedDataUint8ArrayCid, 653 AllocateUninitialized(kExternalTypedDataUint8ArrayCid,
654 ExternalTypedData::InstanceSize())); 654 ExternalTypedData::InstanceSize()));
655 data_.SetData(array); 655 data_.SetData(array);
656 data_.SetLength(len); 656 data_.SetLength(len);
657 stream_.SetStream(data_); 657 stream_.SetStream(data_);
658 return stream_.raw(); 658 return stream_.raw();
659 } 659 }
660 660
661 661
662 RawContext* SnapshotReader::NewContext(intptr_t num_variables) { 662 RawContext* SnapshotReader::NewContext(intptr_t num_variables) {
663 ASSERT(kind_ == Snapshot::kFull); 663 ASSERT(kind_ == Snapshot::kFull);
664 ASSERT(isolate()->no_safepoint_scope_depth() != 0); 664 ASSERT(Thread::Current()->no_safepoint_scope_depth() != 0);
665 RawContext* obj = reinterpret_cast<RawContext*>( 665 RawContext* obj = reinterpret_cast<RawContext*>(
666 AllocateUninitialized(kContextCid, Context::InstanceSize(num_variables))); 666 AllocateUninitialized(kContextCid, Context::InstanceSize(num_variables)));
667 obj->ptr()->num_variables_ = num_variables; 667 obj->ptr()->num_variables_ = num_variables;
668 return obj; 668 return obj;
669 } 669 }
670 670
671 671
672 RawClass* SnapshotReader::NewClass(intptr_t class_id) { 672 RawClass* SnapshotReader::NewClass(intptr_t class_id) {
673 ASSERT(kind_ == Snapshot::kFull); 673 ASSERT(kind_ == Snapshot::kFull);
674 ASSERT(isolate()->no_safepoint_scope_depth() != 0); 674 ASSERT(Thread::Current()->no_safepoint_scope_depth() != 0);
675 if (class_id < kNumPredefinedCids) { 675 if (class_id < kNumPredefinedCids) {
676 ASSERT((class_id >= kInstanceCid) && 676 ASSERT((class_id >= kInstanceCid) &&
677 (class_id <= kNullCid)); 677 (class_id <= kNullCid));
678 return isolate()->class_table()->At(class_id); 678 return isolate()->class_table()->At(class_id);
679 } 679 }
680 RawClass* obj = reinterpret_cast<RawClass*>( 680 RawClass* obj = reinterpret_cast<RawClass*>(
681 AllocateUninitialized(kClassCid, Class::InstanceSize())); 681 AllocateUninitialized(kClassCid, Class::InstanceSize()));
682 Instance fake; 682 Instance fake;
683 obj->ptr()->handle_vtable_ = fake.vtable(); 683 obj->ptr()->handle_vtable_ = fake.vtable();
684 cls_ = obj; 684 cls_ = obj;
685 cls_.set_id(class_id); 685 cls_.set_id(class_id);
686 isolate()->RegisterClassAt(class_id, cls_); 686 isolate()->RegisterClassAt(class_id, cls_);
687 return cls_.raw(); 687 return cls_.raw();
688 } 688 }
689 689
690 690
691 RawInstance* SnapshotReader::NewInstance() { 691 RawInstance* SnapshotReader::NewInstance() {
692 ASSERT(kind_ == Snapshot::kFull); 692 ASSERT(kind_ == Snapshot::kFull);
693 ASSERT(isolate()->no_safepoint_scope_depth() != 0); 693 ASSERT(Thread::Current()->no_safepoint_scope_depth() != 0);
694 RawInstance* obj = reinterpret_cast<RawInstance*>( 694 RawInstance* obj = reinterpret_cast<RawInstance*>(
695 AllocateUninitialized(kObjectCid, Instance::InstanceSize())); 695 AllocateUninitialized(kObjectCid, Instance::InstanceSize()));
696 return obj; 696 return obj;
697 } 697 }
698 698
699 699
700 RawMint* SnapshotReader::NewMint(int64_t value) { 700 RawMint* SnapshotReader::NewMint(int64_t value) {
701 ASSERT(kind_ == Snapshot::kFull); 701 ASSERT(kind_ == Snapshot::kFull);
702 ASSERT(isolate()->no_safepoint_scope_depth() != 0); 702 ASSERT(Thread::Current()->no_safepoint_scope_depth() != 0);
703 RawMint* obj = reinterpret_cast<RawMint*>( 703 RawMint* obj = reinterpret_cast<RawMint*>(
704 AllocateUninitialized(kMintCid, Mint::InstanceSize())); 704 AllocateUninitialized(kMintCid, Mint::InstanceSize()));
705 obj->ptr()->value_ = value; 705 obj->ptr()->value_ = value;
706 return obj; 706 return obj;
707 } 707 }
708 708
709 709
710 RawDouble* SnapshotReader::NewDouble(double value) { 710 RawDouble* SnapshotReader::NewDouble(double value) {
711 ASSERT(kind_ == Snapshot::kFull); 711 ASSERT(kind_ == Snapshot::kFull);
712 ASSERT(isolate()->no_safepoint_scope_depth() != 0); 712 ASSERT(Thread::Current()->no_safepoint_scope_depth() != 0);
713 RawDouble* obj = reinterpret_cast<RawDouble*>( 713 RawDouble* obj = reinterpret_cast<RawDouble*>(
714 AllocateUninitialized(kDoubleCid, Double::InstanceSize())); 714 AllocateUninitialized(kDoubleCid, Double::InstanceSize()));
715 obj->ptr()->value_ = value; 715 obj->ptr()->value_ = value;
716 return obj; 716 return obj;
717 } 717 }
718 718
719 719
720 RawTypedData* SnapshotReader::NewTypedData(intptr_t class_id, intptr_t len) { 720 RawTypedData* SnapshotReader::NewTypedData(intptr_t class_id, intptr_t len) {
721 ASSERT(kind_ == Snapshot::kFull); 721 ASSERT(kind_ == Snapshot::kFull);
722 ASSERT(isolate()->no_safepoint_scope_depth() != 0); 722 ASSERT(Thread::Current()->no_safepoint_scope_depth() != 0);
723 const intptr_t lengthInBytes = len * TypedData::ElementSizeInBytes(class_id); 723 const intptr_t lengthInBytes = len * TypedData::ElementSizeInBytes(class_id);
724 RawTypedData* obj = reinterpret_cast<RawTypedData*>( 724 RawTypedData* obj = reinterpret_cast<RawTypedData*>(
725 AllocateUninitialized(class_id, TypedData::InstanceSize(lengthInBytes))); 725 AllocateUninitialized(class_id, TypedData::InstanceSize(lengthInBytes)));
726 obj->StoreSmi(&(obj->ptr()->length_), Smi::New(len)); 726 obj->StoreSmi(&(obj->ptr()->length_), Smi::New(len));
727 return obj; 727 return obj;
728 } 728 }
729 729
730 730
731 #define ALLOC_NEW_OBJECT(type) \ 731 #define ALLOC_NEW_OBJECT(type) \
732 ASSERT(kind_ == Snapshot::kFull); \ 732 ASSERT(kind_ == Snapshot::kFull); \
733 ASSERT(isolate()->no_safepoint_scope_depth() != 0); \ 733 ASSERT(Thread::Current()->no_safepoint_scope_depth() != 0); \
734 return reinterpret_cast<Raw##type*>( \ 734 return reinterpret_cast<Raw##type*>( \
735 AllocateUninitialized(k##type##Cid, type::InstanceSize())); \ 735 AllocateUninitialized(k##type##Cid, type::InstanceSize())); \
736 736
737 737
738 RawBigint* SnapshotReader::NewBigint() { 738 RawBigint* SnapshotReader::NewBigint() {
739 ALLOC_NEW_OBJECT(Bigint); 739 ALLOC_NEW_OBJECT(Bigint);
740 } 740 }
741 741
742 742
743 RawUnresolvedClass* SnapshotReader::NewUnresolvedClass() { 743 RawUnresolvedClass* SnapshotReader::NewUnresolvedClass() {
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after
821 821
822 822
823 RawGrowableObjectArray* SnapshotReader::NewGrowableObjectArray() { 823 RawGrowableObjectArray* SnapshotReader::NewGrowableObjectArray() {
824 ALLOC_NEW_OBJECT(GrowableObjectArray); 824 ALLOC_NEW_OBJECT(GrowableObjectArray);
825 } 825 }
826 826
827 827
828 RawFloat32x4* SnapshotReader::NewFloat32x4(float v0, float v1, float v2, 828 RawFloat32x4* SnapshotReader::NewFloat32x4(float v0, float v1, float v2,
829 float v3) { 829 float v3) {
830 ASSERT(kind_ == Snapshot::kFull); 830 ASSERT(kind_ == Snapshot::kFull);
831 ASSERT(isolate()->no_safepoint_scope_depth() != 0); 831 ASSERT(Thread::Current()->no_safepoint_scope_depth() != 0);
832 RawFloat32x4* obj = reinterpret_cast<RawFloat32x4*>( 832 RawFloat32x4* obj = reinterpret_cast<RawFloat32x4*>(
833 AllocateUninitialized(kFloat32x4Cid, Float32x4::InstanceSize())); 833 AllocateUninitialized(kFloat32x4Cid, Float32x4::InstanceSize()));
834 obj->ptr()->value_[0] = v0; 834 obj->ptr()->value_[0] = v0;
835 obj->ptr()->value_[1] = v1; 835 obj->ptr()->value_[1] = v1;
836 obj->ptr()->value_[2] = v2; 836 obj->ptr()->value_[2] = v2;
837 obj->ptr()->value_[3] = v3; 837 obj->ptr()->value_[3] = v3;
838 return obj; 838 return obj;
839 } 839 }
840 840
841 841
842 RawInt32x4* SnapshotReader::NewInt32x4(uint32_t v0, uint32_t v1, uint32_t v2, 842 RawInt32x4* SnapshotReader::NewInt32x4(uint32_t v0, uint32_t v1, uint32_t v2,
843 uint32_t v3) { 843 uint32_t v3) {
844 ASSERT(kind_ == Snapshot::kFull); 844 ASSERT(kind_ == Snapshot::kFull);
845 ASSERT(isolate()->no_safepoint_scope_depth() != 0); 845 ASSERT(Thread::Current()->no_safepoint_scope_depth() != 0);
846 RawInt32x4* obj = reinterpret_cast<RawInt32x4*>( 846 RawInt32x4* obj = reinterpret_cast<RawInt32x4*>(
847 AllocateUninitialized(kInt32x4Cid, Int32x4::InstanceSize())); 847 AllocateUninitialized(kInt32x4Cid, Int32x4::InstanceSize()));
848 obj->ptr()->value_[0] = v0; 848 obj->ptr()->value_[0] = v0;
849 obj->ptr()->value_[1] = v1; 849 obj->ptr()->value_[1] = v1;
850 obj->ptr()->value_[2] = v2; 850 obj->ptr()->value_[2] = v2;
851 obj->ptr()->value_[3] = v3; 851 obj->ptr()->value_[3] = v3;
852 return obj; 852 return obj;
853 } 853 }
854 854
855 855
856 RawFloat64x2* SnapshotReader::NewFloat64x2(double v0, double v1) { 856 RawFloat64x2* SnapshotReader::NewFloat64x2(double v0, double v1) {
857 ASSERT(kind_ == Snapshot::kFull); 857 ASSERT(kind_ == Snapshot::kFull);
858 ASSERT(isolate()->no_safepoint_scope_depth() != 0); 858 ASSERT(Thread::Current()->no_safepoint_scope_depth() != 0);
859 RawFloat64x2* obj = reinterpret_cast<RawFloat64x2*>( 859 RawFloat64x2* obj = reinterpret_cast<RawFloat64x2*>(
860 AllocateUninitialized(kFloat64x2Cid, Float64x2::InstanceSize())); 860 AllocateUninitialized(kFloat64x2Cid, Float64x2::InstanceSize()));
861 obj->ptr()->value_[0] = v0; 861 obj->ptr()->value_[0] = v0;
862 obj->ptr()->value_[1] = v1; 862 obj->ptr()->value_[1] = v1;
863 return obj; 863 return obj;
864 } 864 }
865 865
866 866
867 RawApiError* SnapshotReader::NewApiError() { 867 RawApiError* SnapshotReader::NewApiError() {
868 ALLOC_NEW_OBJECT(ApiError); 868 ALLOC_NEW_OBJECT(ApiError);
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
907 } 907 }
908 ASSERT(SerializedHeaderTag::decode(class_header) == kObjectId); 908 ASSERT(SerializedHeaderTag::decode(class_header) == kObjectId);
909 intptr_t class_id = SerializedHeaderData::decode(class_header); 909 intptr_t class_id = SerializedHeaderData::decode(class_header);
910 ASSERT(IsObjectStoreClassId(class_id)); 910 ASSERT(IsObjectStoreClassId(class_id));
911 return class_id; 911 return class_id;
912 } 912 }
913 913
914 914
915 RawObject* SnapshotReader::AllocateUninitialized(intptr_t class_id, 915 RawObject* SnapshotReader::AllocateUninitialized(intptr_t class_id,
916 intptr_t size) { 916 intptr_t size) {
917 ASSERT(isolate()->no_safepoint_scope_depth() != 0); 917 ASSERT(Thread::Current()->no_safepoint_scope_depth() != 0);
918 ASSERT(Utils::IsAligned(size, kObjectAlignment)); 918 ASSERT(Utils::IsAligned(size, kObjectAlignment));
919 919
920 // Allocate memory where all words look like smis. This is currently 920 // Allocate memory where all words look like smis. This is currently
921 // only needed for DEBUG-mode validation in StorePointer/StoreSmi, but will 921 // only needed for DEBUG-mode validation in StorePointer/StoreSmi, but will
922 // be essential with the upcoming deletion barrier. 922 // be essential with the upcoming deletion barrier.
923 uword address = 923 uword address =
924 old_space()->TryAllocateSmiInitializedLocked(size, 924 old_space()->TryAllocateSmiInitializedLocked(size,
925 PageSpace::kForceGrowth); 925 PageSpace::kForceGrowth);
926 if (address == 0) { 926 if (address == 0) {
927 // Use the preallocated out of memory exception to avoid calling 927 // Use the preallocated out of memory exception to avoid calling
(...skipping 768 matching lines...) Expand 10 before | Expand all | Expand 10 after
1696 } 1696 }
1697 } 1697 }
1698 1698
1699 1699
1700 ForwardList::ForwardList(intptr_t first_object_id) 1700 ForwardList::ForwardList(intptr_t first_object_id)
1701 : first_object_id_(first_object_id), 1701 : first_object_id_(first_object_id),
1702 nodes_(), 1702 nodes_(),
1703 first_unprocessed_object_id_(first_object_id) { 1703 first_unprocessed_object_id_(first_object_id) {
1704 // The ForwardList encodes information in the header tag word. There cannot 1704 // The ForwardList encodes information in the header tag word. There cannot
1705 // be any concurrent GC tasks while it is in use. 1705 // be any concurrent GC tasks while it is in use.
1706 Isolate* isolate = Isolate::Current(); 1706 Thread* thread = Thread::Current();
1707 Isolate* isolate = thread->isolate();
1707 PageSpace* page_space = isolate->heap()->old_space(); 1708 PageSpace* page_space = isolate->heap()->old_space();
1708 MonitorLocker ml(page_space->tasks_lock()); 1709 MonitorLocker ml(page_space->tasks_lock());
1709 while (page_space->tasks() > 0) { 1710 while (page_space->tasks() > 0) {
1710 ml.Wait(); 1711 ml.Wait();
1711 } 1712 }
1712 // Ensure that no GC happens while we are writing out the full snapshot. 1713 // Ensure that no GC happens while we are writing out the full snapshot.
1713 isolate->IncrementNoSafepointScopeDepth(); 1714 thread->IncrementNoSafepointScopeDepth();
1714 } 1715 }
1715 1716
1716 1717
1717 ForwardList::~ForwardList() { 1718 ForwardList::~ForwardList() {
1718 } 1719 }
1719 1720
1720 1721
1721 intptr_t ForwardList::MarkAndAddObject(RawObject* raw, SerializeState state) { 1722 intptr_t ForwardList::MarkAndAddObject(RawObject* raw, SerializeState state) {
1722 NoSafepointScope no_safepoint; 1723 NoSafepointScope no_safepoint;
1723 intptr_t object_id = next_object_id(); 1724 intptr_t object_id = next_object_id();
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
1762 1763
1763 1764
1764 void ForwardList::UnmarkAll() const { 1765 void ForwardList::UnmarkAll() const {
1765 for (intptr_t id = first_object_id(); id < next_object_id(); ++id) { 1766 for (intptr_t id = first_object_id(); id < next_object_id(); ++id) {
1766 const Node* node = NodeForObjectId(id); 1767 const Node* node = NodeForObjectId(id);
1767 RawObject* raw = node->raw(); 1768 RawObject* raw = node->raw();
1768 if (SerializedHeaderTag::decode(raw->ptr()->tags_) == kObjectId) { 1769 if (SerializedHeaderTag::decode(raw->ptr()->tags_) == kObjectId) {
1769 raw->ptr()->tags_ = node->tags(); // Restore original tags. 1770 raw->ptr()->tags_ = node->tags(); // Restore original tags.
1770 } 1771 }
1771 } 1772 }
1772 Isolate::Current()->DecrementNoSafepointScopeDepth(); 1773 Thread::Current()->DecrementNoSafepointScopeDepth();
1773 } 1774 }
1774 1775
1775 1776
1776 bool SnapshotWriter::CheckAndWritePredefinedObject(RawObject* rawobj) { 1777 bool SnapshotWriter::CheckAndWritePredefinedObject(RawObject* rawobj) {
1777 // Check if object can be written in one of the following ways: 1778 // Check if object can be written in one of the following ways:
1778 // - Smi: the Smi value is written as is (last bit is not tagged). 1779 // - Smi: the Smi value is written as is (last bit is not tagged).
1779 // - VM internal class (from VM isolate): (index of class in vm isolate | 0x3) 1780 // - VM internal class (from VM isolate): (index of class in vm isolate | 0x3)
1780 // - Object that has already been written: (negative id in stream | 0x3) 1781 // - Object that has already been written: (negative id in stream | 0x3)
1781 1782
1782 NoSafepointScope no_safepoint; 1783 NoSafepointScope no_safepoint;
(...skipping 559 matching lines...) Expand 10 before | Expand all | Expand 10 after
2342 NoSafepointScope no_safepoint; 2343 NoSafepointScope no_safepoint;
2343 WriteObject(obj.raw()); 2344 WriteObject(obj.raw());
2344 UnmarkAll(); 2345 UnmarkAll();
2345 } else { 2346 } else {
2346 ThrowException(exception_type(), exception_msg()); 2347 ThrowException(exception_type(), exception_msg());
2347 } 2348 }
2348 } 2349 }
2349 2350
2350 2351
2351 } // namespace dart 2352 } // namespace dart
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698