OLD | NEW |
| (Empty) |
1 // Copyright 2011 the V8 project authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "src/v8.h" | |
6 | |
7 #include "src/ic-inl.h" | |
8 #include "src/objects-visiting.h" | |
9 | |
10 namespace v8 { | |
11 namespace internal { | |
12 | |
13 | |
14 StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId( | |
15 int instance_type, | |
16 int instance_size) { | |
17 if (instance_type < FIRST_NONSTRING_TYPE) { | |
18 switch (instance_type & kStringRepresentationMask) { | |
19 case kSeqStringTag: | |
20 if ((instance_type & kStringEncodingMask) == kOneByteStringTag) { | |
21 return kVisitSeqOneByteString; | |
22 } else { | |
23 return kVisitSeqTwoByteString; | |
24 } | |
25 | |
26 case kConsStringTag: | |
27 if (IsShortcutCandidate(instance_type)) { | |
28 return kVisitShortcutCandidate; | |
29 } else { | |
30 return kVisitConsString; | |
31 } | |
32 | |
33 case kSlicedStringTag: | |
34 return kVisitSlicedString; | |
35 | |
36 case kExternalStringTag: | |
37 return GetVisitorIdForSize(kVisitDataObject, | |
38 kVisitDataObjectGeneric, | |
39 instance_size); | |
40 } | |
41 UNREACHABLE(); | |
42 } | |
43 | |
44 switch (instance_type) { | |
45 case BYTE_ARRAY_TYPE: | |
46 return kVisitByteArray; | |
47 | |
48 case FREE_SPACE_TYPE: | |
49 return kVisitFreeSpace; | |
50 | |
51 case FIXED_ARRAY_TYPE: | |
52 return kVisitFixedArray; | |
53 | |
54 case FIXED_DOUBLE_ARRAY_TYPE: | |
55 return kVisitFixedDoubleArray; | |
56 | |
57 case CONSTANT_POOL_ARRAY_TYPE: | |
58 return kVisitConstantPoolArray; | |
59 | |
60 case ODDBALL_TYPE: | |
61 return kVisitOddball; | |
62 | |
63 case MAP_TYPE: | |
64 return kVisitMap; | |
65 | |
66 case CODE_TYPE: | |
67 return kVisitCode; | |
68 | |
69 case CELL_TYPE: | |
70 return kVisitCell; | |
71 | |
72 case PROPERTY_CELL_TYPE: | |
73 return kVisitPropertyCell; | |
74 | |
75 case JS_SET_TYPE: | |
76 return GetVisitorIdForSize(kVisitStruct, | |
77 kVisitStructGeneric, | |
78 JSSet::kSize); | |
79 | |
80 case JS_MAP_TYPE: | |
81 return GetVisitorIdForSize(kVisitStruct, | |
82 kVisitStructGeneric, | |
83 JSMap::kSize); | |
84 | |
85 case JS_WEAK_MAP_TYPE: | |
86 case JS_WEAK_SET_TYPE: | |
87 return kVisitJSWeakCollection; | |
88 | |
89 case JS_REGEXP_TYPE: | |
90 return kVisitJSRegExp; | |
91 | |
92 case SHARED_FUNCTION_INFO_TYPE: | |
93 return kVisitSharedFunctionInfo; | |
94 | |
95 case JS_PROXY_TYPE: | |
96 return GetVisitorIdForSize(kVisitStruct, | |
97 kVisitStructGeneric, | |
98 JSProxy::kSize); | |
99 | |
100 case JS_FUNCTION_PROXY_TYPE: | |
101 return GetVisitorIdForSize(kVisitStruct, | |
102 kVisitStructGeneric, | |
103 JSFunctionProxy::kSize); | |
104 | |
105 case FOREIGN_TYPE: | |
106 return GetVisitorIdForSize(kVisitDataObject, | |
107 kVisitDataObjectGeneric, | |
108 Foreign::kSize); | |
109 | |
110 case SYMBOL_TYPE: | |
111 return kVisitSymbol; | |
112 | |
113 case FILLER_TYPE: | |
114 return kVisitDataObjectGeneric; | |
115 | |
116 case JS_ARRAY_BUFFER_TYPE: | |
117 return kVisitJSArrayBuffer; | |
118 | |
119 case JS_TYPED_ARRAY_TYPE: | |
120 return kVisitJSTypedArray; | |
121 | |
122 case JS_DATA_VIEW_TYPE: | |
123 return kVisitJSDataView; | |
124 | |
125 case JS_OBJECT_TYPE: | |
126 case JS_CONTEXT_EXTENSION_OBJECT_TYPE: | |
127 case JS_GENERATOR_OBJECT_TYPE: | |
128 case JS_MODULE_TYPE: | |
129 case JS_VALUE_TYPE: | |
130 case JS_DATE_TYPE: | |
131 case JS_ARRAY_TYPE: | |
132 case JS_GLOBAL_PROXY_TYPE: | |
133 case JS_GLOBAL_OBJECT_TYPE: | |
134 case JS_BUILTINS_OBJECT_TYPE: | |
135 case JS_MESSAGE_OBJECT_TYPE: | |
136 case JS_SET_ITERATOR_TYPE: | |
137 case JS_MAP_ITERATOR_TYPE: | |
138 return GetVisitorIdForSize(kVisitJSObject, | |
139 kVisitJSObjectGeneric, | |
140 instance_size); | |
141 | |
142 case JS_FUNCTION_TYPE: | |
143 return kVisitJSFunction; | |
144 | |
145 case HEAP_NUMBER_TYPE: | |
146 case MUTABLE_HEAP_NUMBER_TYPE: | |
147 #define EXTERNAL_ARRAY_CASE(Type, type, TYPE, ctype, size) \ | |
148 case EXTERNAL_##TYPE##_ARRAY_TYPE: | |
149 | |
150 TYPED_ARRAYS(EXTERNAL_ARRAY_CASE) | |
151 return GetVisitorIdForSize(kVisitDataObject, | |
152 kVisitDataObjectGeneric, | |
153 instance_size); | |
154 #undef EXTERNAL_ARRAY_CASE | |
155 | |
156 case FIXED_UINT8_ARRAY_TYPE: | |
157 case FIXED_INT8_ARRAY_TYPE: | |
158 case FIXED_UINT16_ARRAY_TYPE: | |
159 case FIXED_INT16_ARRAY_TYPE: | |
160 case FIXED_UINT32_ARRAY_TYPE: | |
161 case FIXED_INT32_ARRAY_TYPE: | |
162 case FIXED_FLOAT32_ARRAY_TYPE: | |
163 case FIXED_UINT8_CLAMPED_ARRAY_TYPE: | |
164 return kVisitFixedTypedArray; | |
165 | |
166 case FIXED_FLOAT64_ARRAY_TYPE: | |
167 return kVisitFixedFloat64Array; | |
168 | |
169 #define MAKE_STRUCT_CASE(NAME, Name, name) \ | |
170 case NAME##_TYPE: | |
171 STRUCT_LIST(MAKE_STRUCT_CASE) | |
172 #undef MAKE_STRUCT_CASE | |
173 if (instance_type == ALLOCATION_SITE_TYPE) { | |
174 return kVisitAllocationSite; | |
175 } | |
176 | |
177 return GetVisitorIdForSize(kVisitStruct, | |
178 kVisitStructGeneric, | |
179 instance_size); | |
180 | |
181 default: | |
182 UNREACHABLE(); | |
183 return kVisitorIdCount; | |
184 } | |
185 } | |
186 | |
187 | |
188 // We don't record weak slots during marking or scavenges. Instead we do it | |
189 // once when we complete mark-compact cycle. Note that write barrier has no | |
190 // effect if we are already in the middle of compacting mark-sweep cycle and we | |
191 // have to record slots manually. | |
192 static bool MustRecordSlots(Heap* heap) { | |
193 return heap->gc_state() == Heap::MARK_COMPACT && | |
194 heap->mark_compact_collector()->is_compacting(); | |
195 } | |
196 | |
197 | |
198 template <class T> | |
199 struct WeakListVisitor; | |
200 | |
201 | |
202 template <class T> | |
203 Object* VisitWeakList(Heap* heap, | |
204 Object* list, | |
205 WeakObjectRetainer* retainer) { | |
206 Object* undefined = heap->undefined_value(); | |
207 Object* head = undefined; | |
208 T* tail = NULL; | |
209 MarkCompactCollector* collector = heap->mark_compact_collector(); | |
210 bool record_slots = MustRecordSlots(heap); | |
211 while (list != undefined) { | |
212 // Check whether to keep the candidate in the list. | |
213 T* candidate = reinterpret_cast<T*>(list); | |
214 Object* retained = retainer->RetainAs(list); | |
215 if (retained != NULL) { | |
216 if (head == undefined) { | |
217 // First element in the list. | |
218 head = retained; | |
219 } else { | |
220 // Subsequent elements in the list. | |
221 DCHECK(tail != NULL); | |
222 WeakListVisitor<T>::SetWeakNext(tail, retained); | |
223 if (record_slots) { | |
224 Object** next_slot = | |
225 HeapObject::RawField(tail, WeakListVisitor<T>::WeakNextOffset()); | |
226 collector->RecordSlot(next_slot, next_slot, retained); | |
227 } | |
228 } | |
229 // Retained object is new tail. | |
230 DCHECK(!retained->IsUndefined()); | |
231 candidate = reinterpret_cast<T*>(retained); | |
232 tail = candidate; | |
233 | |
234 | |
235 // tail is a live object, visit it. | |
236 WeakListVisitor<T>::VisitLiveObject(heap, tail, retainer); | |
237 } else { | |
238 WeakListVisitor<T>::VisitPhantomObject(heap, candidate); | |
239 } | |
240 | |
241 // Move to next element in the list. | |
242 list = WeakListVisitor<T>::WeakNext(candidate); | |
243 } | |
244 | |
245 // Terminate the list if there is one or more elements. | |
246 if (tail != NULL) { | |
247 WeakListVisitor<T>::SetWeakNext(tail, undefined); | |
248 } | |
249 return head; | |
250 } | |
251 | |
252 | |
253 template <class T> | |
254 static void ClearWeakList(Heap* heap, | |
255 Object* list) { | |
256 Object* undefined = heap->undefined_value(); | |
257 while (list != undefined) { | |
258 T* candidate = reinterpret_cast<T*>(list); | |
259 list = WeakListVisitor<T>::WeakNext(candidate); | |
260 WeakListVisitor<T>::SetWeakNext(candidate, undefined); | |
261 } | |
262 } | |
263 | |
264 | |
265 template<> | |
266 struct WeakListVisitor<JSFunction> { | |
267 static void SetWeakNext(JSFunction* function, Object* next) { | |
268 function->set_next_function_link(next); | |
269 } | |
270 | |
271 static Object* WeakNext(JSFunction* function) { | |
272 return function->next_function_link(); | |
273 } | |
274 | |
275 static int WeakNextOffset() { | |
276 return JSFunction::kNextFunctionLinkOffset; | |
277 } | |
278 | |
279 static void VisitLiveObject(Heap*, JSFunction*, WeakObjectRetainer*) {} | |
280 | |
281 static void VisitPhantomObject(Heap*, JSFunction*) {} | |
282 }; | |
283 | |
284 | |
285 template<> | |
286 struct WeakListVisitor<Code> { | |
287 static void SetWeakNext(Code* code, Object* next) { | |
288 code->set_next_code_link(next); | |
289 } | |
290 | |
291 static Object* WeakNext(Code* code) { | |
292 return code->next_code_link(); | |
293 } | |
294 | |
295 static int WeakNextOffset() { | |
296 return Code::kNextCodeLinkOffset; | |
297 } | |
298 | |
299 static void VisitLiveObject(Heap*, Code*, WeakObjectRetainer*) {} | |
300 | |
301 static void VisitPhantomObject(Heap*, Code*) {} | |
302 }; | |
303 | |
304 | |
305 template<> | |
306 struct WeakListVisitor<Context> { | |
307 static void SetWeakNext(Context* context, Object* next) { | |
308 context->set(Context::NEXT_CONTEXT_LINK, | |
309 next, | |
310 UPDATE_WRITE_BARRIER); | |
311 } | |
312 | |
313 static Object* WeakNext(Context* context) { | |
314 return context->get(Context::NEXT_CONTEXT_LINK); | |
315 } | |
316 | |
317 static int WeakNextOffset() { | |
318 return FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK); | |
319 } | |
320 | |
321 static void VisitLiveObject(Heap* heap, | |
322 Context* context, | |
323 WeakObjectRetainer* retainer) { | |
324 // Process the three weak lists linked off the context. | |
325 DoWeakList<JSFunction>(heap, context, retainer, | |
326 Context::OPTIMIZED_FUNCTIONS_LIST); | |
327 DoWeakList<Code>(heap, context, retainer, Context::OPTIMIZED_CODE_LIST); | |
328 DoWeakList<Code>(heap, context, retainer, Context::DEOPTIMIZED_CODE_LIST); | |
329 } | |
330 | |
331 template<class T> | |
332 static void DoWeakList(Heap* heap, | |
333 Context* context, | |
334 WeakObjectRetainer* retainer, | |
335 int index) { | |
336 // Visit the weak list, removing dead intermediate elements. | |
337 Object* list_head = VisitWeakList<T>(heap, context->get(index), retainer); | |
338 | |
339 // Update the list head. | |
340 context->set(index, list_head, UPDATE_WRITE_BARRIER); | |
341 | |
342 if (MustRecordSlots(heap)) { | |
343 // Record the updated slot if necessary. | |
344 Object** head_slot = HeapObject::RawField( | |
345 context, FixedArray::SizeFor(index)); | |
346 heap->mark_compact_collector()->RecordSlot( | |
347 head_slot, head_slot, list_head); | |
348 } | |
349 } | |
350 | |
351 static void VisitPhantomObject(Heap* heap, Context* context) { | |
352 ClearWeakList<JSFunction>(heap, | |
353 context->get(Context::OPTIMIZED_FUNCTIONS_LIST)); | |
354 ClearWeakList<Code>(heap, context->get(Context::OPTIMIZED_CODE_LIST)); | |
355 ClearWeakList<Code>(heap, context->get(Context::DEOPTIMIZED_CODE_LIST)); | |
356 } | |
357 }; | |
358 | |
359 | |
360 template<> | |
361 struct WeakListVisitor<JSArrayBufferView> { | |
362 static void SetWeakNext(JSArrayBufferView* obj, Object* next) { | |
363 obj->set_weak_next(next); | |
364 } | |
365 | |
366 static Object* WeakNext(JSArrayBufferView* obj) { | |
367 return obj->weak_next(); | |
368 } | |
369 | |
370 static int WeakNextOffset() { | |
371 return JSArrayBufferView::kWeakNextOffset; | |
372 } | |
373 | |
374 static void VisitLiveObject(Heap*, JSArrayBufferView*, WeakObjectRetainer*) {} | |
375 | |
376 static void VisitPhantomObject(Heap*, JSArrayBufferView*) {} | |
377 }; | |
378 | |
379 | |
380 template<> | |
381 struct WeakListVisitor<JSArrayBuffer> { | |
382 static void SetWeakNext(JSArrayBuffer* obj, Object* next) { | |
383 obj->set_weak_next(next); | |
384 } | |
385 | |
386 static Object* WeakNext(JSArrayBuffer* obj) { | |
387 return obj->weak_next(); | |
388 } | |
389 | |
390 static int WeakNextOffset() { | |
391 return JSArrayBuffer::kWeakNextOffset; | |
392 } | |
393 | |
394 static void VisitLiveObject(Heap* heap, | |
395 JSArrayBuffer* array_buffer, | |
396 WeakObjectRetainer* retainer) { | |
397 Object* typed_array_obj = | |
398 VisitWeakList<JSArrayBufferView>( | |
399 heap, | |
400 array_buffer->weak_first_view(), | |
401 retainer); | |
402 array_buffer->set_weak_first_view(typed_array_obj); | |
403 if (typed_array_obj != heap->undefined_value() && MustRecordSlots(heap)) { | |
404 Object** slot = HeapObject::RawField( | |
405 array_buffer, JSArrayBuffer::kWeakFirstViewOffset); | |
406 heap->mark_compact_collector()->RecordSlot(slot, slot, typed_array_obj); | |
407 } | |
408 } | |
409 | |
410 static void VisitPhantomObject(Heap* heap, JSArrayBuffer* phantom) { | |
411 Runtime::FreeArrayBuffer(heap->isolate(), phantom); | |
412 } | |
413 }; | |
414 | |
415 | |
416 template<> | |
417 struct WeakListVisitor<AllocationSite> { | |
418 static void SetWeakNext(AllocationSite* obj, Object* next) { | |
419 obj->set_weak_next(next); | |
420 } | |
421 | |
422 static Object* WeakNext(AllocationSite* obj) { | |
423 return obj->weak_next(); | |
424 } | |
425 | |
426 static int WeakNextOffset() { | |
427 return AllocationSite::kWeakNextOffset; | |
428 } | |
429 | |
430 static void VisitLiveObject(Heap*, AllocationSite*, WeakObjectRetainer*) {} | |
431 | |
432 static void VisitPhantomObject(Heap*, AllocationSite*) {} | |
433 }; | |
434 | |
435 | |
436 template Object* VisitWeakList<Code>( | |
437 Heap* heap, Object* list, WeakObjectRetainer* retainer); | |
438 | |
439 | |
440 template Object* VisitWeakList<JSFunction>( | |
441 Heap* heap, Object* list, WeakObjectRetainer* retainer); | |
442 | |
443 | |
444 template Object* VisitWeakList<Context>( | |
445 Heap* heap, Object* list, WeakObjectRetainer* retainer); | |
446 | |
447 | |
448 template Object* VisitWeakList<JSArrayBuffer>( | |
449 Heap* heap, Object* list, WeakObjectRetainer* retainer); | |
450 | |
451 | |
452 template Object* VisitWeakList<AllocationSite>( | |
453 Heap* heap, Object* list, WeakObjectRetainer* retainer); | |
454 | |
455 } } // namespace v8::internal | |
OLD | NEW |