Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(78)

Side by Side Diff: src/spaces.cc

Issue 7324051: Remove heap protection support. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 9 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/spaces.h ('k') | src/spaces-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 850 matching lines...) Expand 10 before | Expand all | Expand 10 after
861 } 861 }
862 862
863 863
864 void PagedSpace::TearDown() { 864 void PagedSpace::TearDown() {
865 Isolate::Current()->memory_allocator()->FreeAllPages(this); 865 Isolate::Current()->memory_allocator()->FreeAllPages(this);
866 first_page_ = NULL; 866 first_page_ = NULL;
867 accounting_stats_.Clear(); 867 accounting_stats_.Clear();
868 } 868 }
869 869
870 870
871 #ifdef ENABLE_HEAP_PROTECTION
872
873 void PagedSpace::Protect() {
874 Page* page = first_page_;
875 while (page->is_valid()) {
876 Isolate::Current()->memory_allocator()->ProtectChunkFromPage(page);
877 page = Isolate::Current()->memory_allocator()->
878 FindLastPageInSameChunk(page)->next_page();
879 }
880 }
881
882
883 void PagedSpace::Unprotect() {
884 Page* page = first_page_;
885 while (page->is_valid()) {
886 Isolate::Current()->memory_allocator()->UnprotectChunkFromPage(page);
887 page = Isolate::Current()->memory_allocator()->
888 FindLastPageInSameChunk(page)->next_page();
889 }
890 }
891
892 #endif
893
894
895 void PagedSpace::MarkAllPagesClean() { 871 void PagedSpace::MarkAllPagesClean() {
896 PageIterator it(this, PageIterator::ALL_PAGES); 872 PageIterator it(this, PageIterator::ALL_PAGES);
897 while (it.has_next()) { 873 while (it.has_next()) {
898 it.next()->SetRegionMarks(Page::kAllRegionsCleanMarks); 874 it.next()->SetRegionMarks(Page::kAllRegionsCleanMarks);
899 } 875 }
900 } 876 }
901 877
902 878
903 MaybeObject* PagedSpace::FindObject(Address addr) { 879 MaybeObject* PagedSpace::FindObject(Address addr) {
904 // Note: this function can only be called before or after mark-compact GC 880 // Note: this function can only be called before or after mark-compact GC
(...skipping 346 matching lines...) Expand 10 before | Expand all | Expand 10 after
1251 allocation_info_.top = NULL; 1227 allocation_info_.top = NULL;
1252 allocation_info_.limit = NULL; 1228 allocation_info_.limit = NULL;
1253 mc_forwarding_info_.top = NULL; 1229 mc_forwarding_info_.top = NULL;
1254 mc_forwarding_info_.limit = NULL; 1230 mc_forwarding_info_.limit = NULL;
1255 1231
1256 to_space_.TearDown(); 1232 to_space_.TearDown();
1257 from_space_.TearDown(); 1233 from_space_.TearDown();
1258 } 1234 }
1259 1235
1260 1236
1261 #ifdef ENABLE_HEAP_PROTECTION
1262
1263 void NewSpace::Protect() {
1264 heap()->isolate()->memory_allocator()->Protect(ToSpaceLow(), Capacity());
1265 heap()->isolate()->memory_allocator()->Protect(FromSpaceLow(), Capacity());
1266 }
1267
1268
1269 void NewSpace::Unprotect() {
1270 heap()->isolate()->memory_allocator()->Unprotect(ToSpaceLow(), Capacity(),
1271 to_space_.executable());
1272 heap()->isolate()->memory_allocator()->Unprotect(FromSpaceLow(), Capacity(),
1273 from_space_.executable());
1274 }
1275
1276 #endif
1277
1278
1279 void NewSpace::Flip() { 1237 void NewSpace::Flip() {
1280 SemiSpace tmp = from_space_; 1238 SemiSpace tmp = from_space_;
1281 from_space_ = to_space_; 1239 from_space_ = to_space_;
1282 to_space_ = tmp; 1240 to_space_ = tmp;
1283 } 1241 }
1284 1242
1285 1243
1286 void NewSpace::Grow() { 1244 void NewSpace::Grow() {
1287 ASSERT(Capacity() < MaximumCapacity()); 1245 ASSERT(Capacity() < MaximumCapacity());
1288 if (to_space_.Grow()) { 1246 if (to_space_.Grow()) {
(...skipping 1513 matching lines...) Expand 10 before | Expand all | Expand 10 after
2802 heap()->isolate()->memory_allocator()->PerformAllocationCallback( 2760 heap()->isolate()->memory_allocator()->PerformAllocationCallback(
2803 space, kAllocationActionFree, size); 2761 space, kAllocationActionFree, size);
2804 } 2762 }
2805 2763
2806 size_ = 0; 2764 size_ = 0;
2807 page_count_ = 0; 2765 page_count_ = 0;
2808 objects_size_ = 0; 2766 objects_size_ = 0;
2809 } 2767 }
2810 2768
2811 2769
2812 #ifdef ENABLE_HEAP_PROTECTION
2813
2814 void LargeObjectSpace::Protect() {
2815 LargeObjectChunk* chunk = first_chunk_;
2816 while (chunk != NULL) {
2817 heap()->isolate()->memory_allocator()->Protect(chunk->address(),
2818 chunk->size());
2819 chunk = chunk->next();
2820 }
2821 }
2822
2823
2824 void LargeObjectSpace::Unprotect() {
2825 LargeObjectChunk* chunk = first_chunk_;
2826 while (chunk != NULL) {
2827 bool is_code = chunk->GetObject()->IsCode();
2828 heap()->isolate()->memory_allocator()->Unprotect(chunk->address(),
2829 chunk->size(), is_code ? EXECUTABLE : NOT_EXECUTABLE);
2830 chunk = chunk->next();
2831 }
2832 }
2833
2834 #endif
2835
2836
2837 MaybeObject* LargeObjectSpace::AllocateRawInternal(int requested_size, 2770 MaybeObject* LargeObjectSpace::AllocateRawInternal(int requested_size,
2838 int object_size, 2771 int object_size,
2839 Executability executable) { 2772 Executability executable) {
2840 ASSERT(0 < object_size && object_size <= requested_size); 2773 ASSERT(0 < object_size && object_size <= requested_size);
2841 2774
2842 // Check if we want to force a GC before growing the old space further. 2775 // Check if we want to force a GC before growing the old space further.
2843 // If so, fail the allocation. 2776 // If so, fail the allocation.
2844 if (!heap()->always_allocate() && 2777 if (!heap()->always_allocate() &&
2845 heap()->OldGenerationAllocationLimitReached()) { 2778 heap()->OldGenerationAllocationLimitReached()) {
2846 return Failure::RetryAfterGC(identity()); 2779 return Failure::RetryAfterGC(identity());
(...skipping 289 matching lines...) Expand 10 before | Expand all | Expand 10 after
3136 for (HeapObject* obj = obj_it.next(); obj != NULL; obj = obj_it.next()) { 3069 for (HeapObject* obj = obj_it.next(); obj != NULL; obj = obj_it.next()) {
3137 if (obj->IsCode()) { 3070 if (obj->IsCode()) {
3138 Code* code = Code::cast(obj); 3071 Code* code = Code::cast(obj);
3139 isolate->code_kind_statistics()[code->kind()] += code->Size(); 3072 isolate->code_kind_statistics()[code->kind()] += code->Size();
3140 } 3073 }
3141 } 3074 }
3142 } 3075 }
3143 #endif // DEBUG 3076 #endif // DEBUG
3144 3077
3145 } } // namespace v8::internal 3078 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/spaces.h ('k') | src/spaces-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698