Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1)

Side by Side Diff: src/heap.cc

Issue 20867003: More cleanup regarding the maximum non-large object allocation size. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 7 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | src/objects-visiting.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 1990 matching lines...) Expand 10 before | Expand all | Expand 10 after
2001 kVisitStruct, 2001 kVisitStruct,
2002 kVisitStructGeneric>(); 2002 kVisitStructGeneric>();
2003 } 2003 }
2004 2004
2005 static VisitorDispatchTable<ScavengingCallback>* GetTable() { 2005 static VisitorDispatchTable<ScavengingCallback>* GetTable() {
2006 return &table_; 2006 return &table_;
2007 } 2007 }
2008 2008
2009 private: 2009 private:
2010 enum ObjectContents { DATA_OBJECT, POINTER_OBJECT }; 2010 enum ObjectContents { DATA_OBJECT, POINTER_OBJECT };
2011 enum SizeRestriction { SMALL, UNKNOWN_SIZE };
2012 2011
2013 static void RecordCopiedObject(Heap* heap, HeapObject* obj) { 2012 static void RecordCopiedObject(Heap* heap, HeapObject* obj) {
2014 bool should_record = false; 2013 bool should_record = false;
2015 #ifdef DEBUG 2014 #ifdef DEBUG
2016 should_record = FLAG_heap_stats; 2015 should_record = FLAG_heap_stats;
2017 #endif 2016 #endif
2018 should_record = should_record || FLAG_log_gc; 2017 should_record = should_record || FLAG_log_gc;
2019 if (should_record) { 2018 if (should_record) {
2020 if (heap->new_space()->Contains(obj)) { 2019 if (heap->new_space()->Contains(obj)) {
2021 heap->new_space()->RecordAllocation(obj); 2020 heap->new_space()->RecordAllocation(obj);
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
2053 } 2052 }
2054 2053
2055 if (marks_handling == TRANSFER_MARKS) { 2054 if (marks_handling == TRANSFER_MARKS) {
2056 if (Marking::TransferColor(source, target)) { 2055 if (Marking::TransferColor(source, target)) {
2057 MemoryChunk::IncrementLiveBytesFromGC(target->address(), size); 2056 MemoryChunk::IncrementLiveBytesFromGC(target->address(), size);
2058 } 2057 }
2059 } 2058 }
2060 } 2059 }
2061 2060
2062 2061
2063 template<ObjectContents object_contents, 2062 template<ObjectContents object_contents, int alignment>
2064 SizeRestriction size_restriction,
2065 int alignment>
2066 static inline void EvacuateObject(Map* map, 2063 static inline void EvacuateObject(Map* map,
2067 HeapObject** slot, 2064 HeapObject** slot,
2068 HeapObject* object, 2065 HeapObject* object,
2069 int object_size) { 2066 int object_size) {
2070 SLOW_ASSERT((size_restriction != SMALL) || 2067 SLOW_ASSERT(object_size <= Page::kMaxNonCodeHeapObjectSize);
2071 (object_size <= Page::kMaxNonCodeHeapObjectSize));
2072 SLOW_ASSERT(object->Size() == object_size); 2068 SLOW_ASSERT(object->Size() == object_size);
2073 2069
2074 int allocation_size = object_size; 2070 int allocation_size = object_size;
2075 if (alignment != kObjectAlignment) { 2071 if (alignment != kObjectAlignment) {
2076 ASSERT(alignment == kDoubleAlignment); 2072 ASSERT(alignment == kDoubleAlignment);
2077 allocation_size += kPointerSize; 2073 allocation_size += kPointerSize;
2078 } 2074 }
2079 2075
2080 Heap* heap = map->GetHeap(); 2076 Heap* heap = map->GetHeap();
2081 if (heap->ShouldBePromoted(object->address(), object_size)) { 2077 if (heap->ShouldBePromoted(object->address(), object_size)) {
2082 MaybeObject* maybe_result; 2078 MaybeObject* maybe_result;
2083 2079
2084 if ((size_restriction != SMALL) && 2080 if (object_contents == DATA_OBJECT) {
2085 (allocation_size > Page::kMaxNonCodeHeapObjectSize)) { 2081 maybe_result = heap->old_data_space()->AllocateRaw(allocation_size);
2086 maybe_result = heap->lo_space()->AllocateRaw(allocation_size,
2087 NOT_EXECUTABLE);
2088 } else { 2082 } else {
2089 if (object_contents == DATA_OBJECT) { 2083 maybe_result =
2090 maybe_result = heap->old_data_space()->AllocateRaw(allocation_size); 2084 heap->old_pointer_space()->AllocateRaw(allocation_size);
2091 } else {
2092 maybe_result =
2093 heap->old_pointer_space()->AllocateRaw(allocation_size);
2094 }
2095 } 2085 }
2096 2086
2097 Object* result = NULL; // Initialization to please compiler. 2087 Object* result = NULL; // Initialization to please compiler.
2098 if (maybe_result->ToObject(&result)) { 2088 if (maybe_result->ToObject(&result)) {
2099 HeapObject* target = HeapObject::cast(result); 2089 HeapObject* target = HeapObject::cast(result);
2100 2090
2101 if (alignment != kObjectAlignment) { 2091 if (alignment != kObjectAlignment) {
2102 target = EnsureDoubleAligned(heap, target, allocation_size); 2092 target = EnsureDoubleAligned(heap, target, allocation_size);
2103 } 2093 }
2104 2094
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
2158 map->GetHeap()->mark_compact_collector()-> 2148 map->GetHeap()->mark_compact_collector()->
2159 RecordCodeEntrySlot(code_entry_slot, code); 2149 RecordCodeEntrySlot(code_entry_slot, code);
2160 } 2150 }
2161 } 2151 }
2162 2152
2163 2153
2164 static inline void EvacuateFixedArray(Map* map, 2154 static inline void EvacuateFixedArray(Map* map,
2165 HeapObject** slot, 2155 HeapObject** slot,
2166 HeapObject* object) { 2156 HeapObject* object) {
2167 int object_size = FixedArray::BodyDescriptor::SizeOf(map, object); 2157 int object_size = FixedArray::BodyDescriptor::SizeOf(map, object);
2168 EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(map, 2158 EvacuateObject<POINTER_OBJECT, kObjectAlignment>(
2169 slot, 2159 map, slot, object, object_size);
2170 object,
2171 object_size);
2172 } 2160 }
2173 2161
2174 2162
2175 static inline void EvacuateFixedDoubleArray(Map* map, 2163 static inline void EvacuateFixedDoubleArray(Map* map,
2176 HeapObject** slot, 2164 HeapObject** slot,
2177 HeapObject* object) { 2165 HeapObject* object) {
2178 int length = reinterpret_cast<FixedDoubleArray*>(object)->length(); 2166 int length = reinterpret_cast<FixedDoubleArray*>(object)->length();
2179 int object_size = FixedDoubleArray::SizeFor(length); 2167 int object_size = FixedDoubleArray::SizeFor(length);
2180 EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kDoubleAlignment>( 2168 EvacuateObject<DATA_OBJECT, kDoubleAlignment>(
2181 map, 2169 map, slot, object, object_size);
2182 slot,
2183 object,
2184 object_size);
2185 } 2170 }
2186 2171
2187 2172
2188 static inline void EvacuateByteArray(Map* map, 2173 static inline void EvacuateByteArray(Map* map,
2189 HeapObject** slot, 2174 HeapObject** slot,
2190 HeapObject* object) { 2175 HeapObject* object) {
2191 int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize(); 2176 int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize();
2192 EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kObjectAlignment>( 2177 EvacuateObject<DATA_OBJECT, kObjectAlignment>(
2193 map, slot, object, object_size); 2178 map, slot, object, object_size);
2194 } 2179 }
2195 2180
2196 2181
2197 static inline void EvacuateSeqOneByteString(Map* map, 2182 static inline void EvacuateSeqOneByteString(Map* map,
2198 HeapObject** slot, 2183 HeapObject** slot,
2199 HeapObject* object) { 2184 HeapObject* object) {
2200 int object_size = SeqOneByteString::cast(object)-> 2185 int object_size = SeqOneByteString::cast(object)->
2201 SeqOneByteStringSize(map->instance_type()); 2186 SeqOneByteStringSize(map->instance_type());
2202 EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kObjectAlignment>( 2187 EvacuateObject<DATA_OBJECT, kObjectAlignment>(
2203 map, slot, object, object_size); 2188 map, slot, object, object_size);
2204 } 2189 }
2205 2190
2206 2191
2207 static inline void EvacuateSeqTwoByteString(Map* map, 2192 static inline void EvacuateSeqTwoByteString(Map* map,
2208 HeapObject** slot, 2193 HeapObject** slot,
2209 HeapObject* object) { 2194 HeapObject* object) {
2210 int object_size = SeqTwoByteString::cast(object)-> 2195 int object_size = SeqTwoByteString::cast(object)->
2211 SeqTwoByteStringSize(map->instance_type()); 2196 SeqTwoByteStringSize(map->instance_type());
2212 EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kObjectAlignment>( 2197 EvacuateObject<DATA_OBJECT, kObjectAlignment>(
2213 map, slot, object, object_size); 2198 map, slot, object, object_size);
2214 } 2199 }
2215 2200
2216 2201
2217 static inline bool IsShortcutCandidate(int type) { 2202 static inline bool IsShortcutCandidate(int type) {
2218 return ((type & kShortcutTypeMask) == kShortcutTypeTag); 2203 return ((type & kShortcutTypeMask) == kShortcutTypeTag);
2219 } 2204 }
2220 2205
2221 static inline void EvacuateShortcutCandidate(Map* map, 2206 static inline void EvacuateShortcutCandidate(Map* map,
2222 HeapObject** slot, 2207 HeapObject** slot,
(...skipping 23 matching lines...) Expand all
2246 object->set_map_word(MapWord::FromForwardingAddress(target)); 2231 object->set_map_word(MapWord::FromForwardingAddress(target));
2247 return; 2232 return;
2248 } 2233 }
2249 2234
2250 heap->DoScavengeObject(first->map(), slot, first); 2235 heap->DoScavengeObject(first->map(), slot, first);
2251 object->set_map_word(MapWord::FromForwardingAddress(*slot)); 2236 object->set_map_word(MapWord::FromForwardingAddress(*slot));
2252 return; 2237 return;
2253 } 2238 }
2254 2239
2255 int object_size = ConsString::kSize; 2240 int object_size = ConsString::kSize;
2256 EvacuateObject<POINTER_OBJECT, SMALL, kObjectAlignment>( 2241 EvacuateObject<POINTER_OBJECT, kObjectAlignment>(
2257 map, slot, object, object_size); 2242 map, slot, object, object_size);
2258 } 2243 }
2259 2244
2260 template<ObjectContents object_contents> 2245 template<ObjectContents object_contents>
2261 class ObjectEvacuationStrategy { 2246 class ObjectEvacuationStrategy {
2262 public: 2247 public:
2263 template<int object_size> 2248 template<int object_size>
2264 static inline void VisitSpecialized(Map* map, 2249 static inline void VisitSpecialized(Map* map,
2265 HeapObject** slot, 2250 HeapObject** slot,
2266 HeapObject* object) { 2251 HeapObject* object) {
2267 EvacuateObject<object_contents, SMALL, kObjectAlignment>( 2252 EvacuateObject<object_contents, kObjectAlignment>(
2268 map, slot, object, object_size); 2253 map, slot, object, object_size);
2269 } 2254 }
2270 2255
2271 static inline void Visit(Map* map, 2256 static inline void Visit(Map* map,
2272 HeapObject** slot, 2257 HeapObject** slot,
2273 HeapObject* object) { 2258 HeapObject* object) {
2274 int object_size = map->instance_size(); 2259 int object_size = map->instance_size();
2275 EvacuateObject<object_contents, SMALL, kObjectAlignment>( 2260 EvacuateObject<object_contents, kObjectAlignment>(
2276 map, slot, object, object_size); 2261 map, slot, object, object_size);
2277 } 2262 }
2278 }; 2263 };
2279 2264
2280 static VisitorDispatchTable<ScavengingCallback> table_; 2265 static VisitorDispatchTable<ScavengingCallback> table_;
2281 }; 2266 };
2282 2267
2283 2268
2284 template<MarksHandling marks_handling, 2269 template<MarksHandling marks_handling,
2285 LoggingAndProfiling logging_and_profiling_mode> 2270 LoggingAndProfiling logging_and_profiling_mode>
(...skipping 5757 matching lines...) Expand 10 before | Expand all | Expand 10 after
8043 if (FLAG_parallel_recompilation) { 8028 if (FLAG_parallel_recompilation) {
8044 heap_->relocation_mutex_->Lock(); 8029 heap_->relocation_mutex_->Lock();
8045 #ifdef DEBUG 8030 #ifdef DEBUG
8046 heap_->relocation_mutex_locked_by_optimizer_thread_ = 8031 heap_->relocation_mutex_locked_by_optimizer_thread_ =
8047 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); 8032 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread();
8048 #endif // DEBUG 8033 #endif // DEBUG
8049 } 8034 }
8050 } 8035 }
8051 8036
8052 } } // namespace v8::internal 8037 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « no previous file | src/objects-visiting.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698