Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1)

Side by Side Diff: src/objects-visiting.h

Issue 3066044: Generalize virtually dispatched scavenger to virtually dispatched specialized visitors. (Closed)
Patch Set: cleanup Created 10 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/objects-inl.h ('k') | src/objects-visiting.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 // Copyright 2006-2009 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #ifndef V8_OBJECTS_ITERATION_H_
29 #define V8_OBJECTS_ITERATION_H_
30
31 // This file provides base classes and auxiliary methods for defining
32 // static object visitors used during GC.
33 // Visiting HeapObject body with a normal ObjectVisitor requires performing
34 // two switches on object's instance type to determine object size and layout
35 // and one or more virtual method calls on visitor itself.
36 // Static visitor is different: it provides a dispatch table which contains
37 // pointers to specialized visit functions. Each map has the visitor_id
38 // field which contains an index of specialized visitor to use.
39
40 namespace v8 {
41 namespace internal {
42
43
44 // Base class for all static visitors.
45 class StaticVisitorBase : public AllStatic {
46 public:
47 enum VisitorId {
48 kVisitSeqAsciiString = 0,
49 kVisitSeqTwoByteString,
50 kVisitShortcutCandidate,
51 kVisitByteArray,
52 kVisitFixedArray,
53
54 // For data objects, JS objects and structs along with generic visitor we
55 // provide visitors specialized by object size in words.
56 // Method GetVisitorIdForSize should be used to calculate visitor id for
57 // given instance size.
58
59 kVisitDataObject,
60 kVisitDataObject2 = kVisitDataObject,
61 kVisitDataObject3,
62 kVisitDataObject4,
63 kVisitDataObject5,
64 kVisitDataObject6,
65 kVisitDataObject7,
66 kVisitDataObject8,
67 kVisitDataObject9,
68 kVisitDataObjectGeneric,
69
70 kVisitJSObject,
71 kVisitJSObject2 = kVisitJSObject,
72 kVisitJSObject3,
73 kVisitJSObject4,
74 kVisitJSObject5,
75 kVisitJSObject6,
76 kVisitJSObject7,
77 kVisitJSObject8,
78 kVisitJSObject9,
79 kVisitJSObjectGeneric,
80
81 kVisitStruct,
82 kVisitStruct2 = kVisitStruct,
83 kVisitStruct3,
84 kVisitStruct4,
85 kVisitStruct5,
86 kVisitStruct6,
87 kVisitStruct7,
88 kVisitStruct8,
89 kVisitStruct9,
90 kVisitStructGeneric,
91
92 kVisitConsString,
93 kVisitOddball,
94 kVisitCode,
95 kVisitMap,
96 kVisitPropertyCell,
97 kVisitSharedFunctionInfo,
98
99 kVisitorIdCount,
100 kMinObjectSizeInWords = 2
101 };
102
103 // Determine which specialized visitor should be used for given instance type
104 // and instance type.
105 static VisitorId GetVisitorId(int instance_type, int instance_size);
106
107 static VisitorId GetVisitorId(Map* map) {
108 return GetVisitorId(map->instance_type(), map->instance_size());
109 }
110
111 // For visitors that allow specialization by size calculate VisitorId based
112 // on size, base visitor id and generic visitor id.
113 static VisitorId GetVisitorIdForSize(VisitorId base,
114 VisitorId generic,
115 int object_size) {
116 ASSERT((base == kVisitDataObject) ||
117 (base == kVisitStruct) ||
118 (base == kVisitJSObject));
119 ASSERT(IsAligned(object_size, kPointerSize));
120 ASSERT(kMinObjectSizeInWords * kPointerSize <= object_size);
121 ASSERT(object_size < Page::kMaxHeapObjectSize);
122
123 const VisitorId specialization = static_cast<VisitorId>(
124 base + (object_size >> kPointerSizeLog2) - kMinObjectSizeInWords);
125
126 return Min(specialization, generic);
127 }
128 };
129
130
131 template<typename Callback>
132 class VisitorDispatchTable {
133 public:
134 inline Callback GetVisitor(Map* map) {
135 return callbacks_[map->visitor_id()];
136 }
137
138 void Register(StaticVisitorBase::VisitorId id, Callback callback) {
139 ASSERT((0 <= id) && (id < StaticVisitorBase::kVisitorIdCount));
140 callbacks_[id] = callback;
141 }
142
143 template<typename Visitor,
144 StaticVisitorBase::VisitorId base,
145 StaticVisitorBase::VisitorId generic,
146 int object_size_in_words>
147 void RegisterSpecialization() {
148 static const int size = object_size_in_words * kPointerSize;
149 Register(StaticVisitorBase::GetVisitorIdForSize(base, generic, size),
150 &Visitor::template VisitSpecialized<size>);
151 }
152
153
154 template<typename Visitor,
155 StaticVisitorBase::VisitorId base,
156 StaticVisitorBase::VisitorId generic>
157 void RegisterSpecializations() {
158 STATIC_ASSERT(
159 (generic - base + StaticVisitorBase::kMinObjectSizeInWords) == 10);
160 RegisterSpecialization<Visitor, base, generic, 2>();
161 RegisterSpecialization<Visitor, base, generic, 3>();
162 RegisterSpecialization<Visitor, base, generic, 4>();
163 RegisterSpecialization<Visitor, base, generic, 5>();
164 RegisterSpecialization<Visitor, base, generic, 6>();
165 RegisterSpecialization<Visitor, base, generic, 7>();
166 RegisterSpecialization<Visitor, base, generic, 8>();
167 RegisterSpecialization<Visitor, base, generic, 9>();
168 Register(generic, &Visitor::Visit);
169 }
170
171 private:
172 Callback callbacks_[StaticVisitorBase::kVisitorIdCount];
173 };
174
175
176 template<typename StaticVisitor>
177 class BodyVisitorBase : public AllStatic {
178 public:
179 static inline void IteratePointers(HeapObject* object,
180 int start_offset,
181 int end_offset) {
182 Object** start_slot = reinterpret_cast<Object**>(object->address() +
183 start_offset);
184 Object** end_slot = reinterpret_cast<Object**>(object->address() +
185 end_offset);
186 StaticVisitor::VisitPointers(start_slot, end_slot);
187 }
188 };
189
190
191 template<typename StaticVisitor, typename BodyDescriptor, typename ReturnType>
192 class FlexibleBodyVisitor : public BodyVisitorBase<StaticVisitor> {
193 public:
194 static inline ReturnType Visit(Map* map, HeapObject* object) {
195 int object_size = BodyDescriptor::SizeOf(map, object);
196 IteratePointers(object, BodyDescriptor::kStartOffset, object_size);
197 return static_cast<ReturnType>(object_size);
198 }
199
200 template<int object_size>
201 static inline ReturnType VisitSpecialized(Map* map, HeapObject* object) {
202 IteratePointers(object, BodyDescriptor::kStartOffset, object_size);
203 return static_cast<ReturnType>(object_size);
204 }
205 };
206
207
208 template<typename StaticVisitor, typename BodyDescriptor, typename ReturnType>
209 class FixedBodyVisitor : public BodyVisitorBase<StaticVisitor> {
210 public:
211 static inline ReturnType Visit(Map* map, HeapObject* object) {
212 IteratePointers(object,
213 BodyDescriptor::kStartOffset,
214 BodyDescriptor::kEndOffset);
215 return static_cast<ReturnType>(BodyDescriptor::kSize);
216 }
217 };
218
219
220 // Base class for visitors used for a linear new space iteration.
221 // IterateBody returns size of visited object.
222 // Certain types of objects (i.e. Code objects) are not handled
223 // by dispatch table of this visitor because they cannot appear
224 // in the new space.
225 //
226 // This class is intended to be used in the following way:
227 //
228 // class SomeVisitor : public StaticNewSpaceVisitor<SomeVisitor> {
229 // ...
230 // }
231 //
232 // This is an example of Curiously recurring template pattern
233 // (see http://en.wikipedia.org/wiki/Curiously_recurring_template_pattern).
234 // We use CRTP to guarantee aggressive compile time optimizations (i.e.
235 // inlining and specialization of StaticVisitor::VisitPointers methods).
236 template<typename StaticVisitor>
237 class StaticNewSpaceVisitor : public StaticVisitorBase {
238 public:
239 static void Initialize() {
240 table_.Register(kVisitShortcutCandidate,
241 &FixedBodyVisitor<StaticVisitor,
242 ConsString::BodyDescriptor,
243 int>::Visit);
244
245 table_.Register(kVisitConsString,
246 &FixedBodyVisitor<StaticVisitor,
247 ConsString::BodyDescriptor,
248 int>::Visit);
249
250 table_.Register(kVisitFixedArray,
251 &FlexibleBodyVisitor<StaticVisitor,
252 FixedArray::BodyDescriptor,
253 int>::Visit);
254
255 table_.Register(kVisitByteArray, &VisitByteArray);
256
257 table_.Register(kVisitSharedFunctionInfo,
258 &FixedBodyVisitor<StaticVisitor,
259 SharedFunctionInfo::BodyDescriptor,
260 int>::Visit);
261
262 table_.Register(kVisitSeqAsciiString, &VisitSeqAsciiString);
263
264 table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
265
266 table_.RegisterSpecializations<DataObjectVisitor,
267 kVisitDataObject,
268 kVisitDataObjectGeneric>();
269 table_.RegisterSpecializations<JSObjectVisitor,
270 kVisitJSObject,
271 kVisitJSObjectGeneric>();
272 table_.RegisterSpecializations<StructVisitor,
273 kVisitStruct,
274 kVisitStructGeneric>();
275 }
276
277 static inline int IterateBody(Map* map, HeapObject* obj) {
278 return table_.GetVisitor(map)(map, obj);
279 }
280
281 static inline void VisitPointers(Object** start, Object** end) {
282 for (Object** p = start; p < end; p++) StaticVisitor::VisitPointer(p);
283 }
284
285 private:
286 static inline int VisitByteArray(Map* map, HeapObject* object) {
287 return reinterpret_cast<ByteArray*>(object)->ByteArraySize();
288 }
289
290 static inline int VisitSeqAsciiString(Map* map, HeapObject* object) {
291 return SeqAsciiString::cast(object)->
292 SeqAsciiStringSize(map->instance_type());
293 }
294
295 static inline int VisitSeqTwoByteString(Map* map, HeapObject* object) {
296 return SeqTwoByteString::cast(object)->
297 SeqTwoByteStringSize(map->instance_type());
298 }
299
300 class DataObjectVisitor {
301 public:
302 template<int object_size>
303 static inline int VisitSpecialized(Map* map, HeapObject* object) {
304 return object_size;
305 }
306
307 static inline int Visit(Map* map, HeapObject* object) {
308 return map->instance_size();
309 }
310 };
311
312 typedef FlexibleBodyVisitor<StaticVisitor,
313 StructBodyDescriptor,
314 int> StructVisitor;
315
316 typedef FlexibleBodyVisitor<StaticVisitor,
317 JSObject::BodyDescriptor,
318 int> JSObjectVisitor;
319
320 typedef int (*Callback)(Map* map, HeapObject* object);
321
322 static VisitorDispatchTable<Callback> table_;
323 };
324
325
326 template<typename StaticVisitor>
327 VisitorDispatchTable<typename StaticNewSpaceVisitor<StaticVisitor>::Callback>
328 StaticNewSpaceVisitor<StaticVisitor>::table_;
329
330
331 void Code::CodeIterateBody(ObjectVisitor* v) {
332 int mode_mask = RelocInfo::kCodeTargetMask |
333 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
334 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
335 RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
336 RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
337 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
338
339 // Use the relocation info pointer before it is visited by
340 // the heap compaction in the next statement.
341 RelocIterator it(this, mode_mask);
342
343 IteratePointers(v,
344 kRelocationInfoOffset,
345 kRelocationInfoOffset + kPointerSize);
346
347 for (; !it.done(); it.next()) {
348 it.rinfo()->Visit(v);
349 }
350 }
351
352
353 template<typename StaticVisitor>
354 void Code::CodeIterateBody() {
355 int mode_mask = RelocInfo::kCodeTargetMask |
356 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
357 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
358 RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
359 RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
360 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
361
362 // Use the relocation info pointer before it is visited by
363 // the heap compaction in the next statement.
364 RelocIterator it(this, mode_mask);
365
366 StaticVisitor::VisitPointer(
367 reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset));
368
369 for (; !it.done(); it.next()) {
370 it.rinfo()->Visit<StaticVisitor>();
371 }
372 }
373
374
375 } } // namespace v8::internal
376
377 #endif // V8_OBJECTS_ITERATION_H_
OLDNEW
« no previous file with comments | « src/objects-inl.h ('k') | src/objects-visiting.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698