OLD | NEW |
---|---|
1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #ifndef VM_HANDLES_IMPL_H_ | 5 #ifndef VM_HANDLES_IMPL_H_ |
6 #define VM_HANDLES_IMPL_H_ | 6 #define VM_HANDLES_IMPL_H_ |
7 | 7 |
8 #include "vm/heap.h" | |
9 #include "vm/heap_trace.h" | |
8 #include "vm/visitor.h" | 10 #include "vm/visitor.h" |
9 | 11 |
10 namespace dart { | 12 namespace dart { |
11 | 13 |
12 DECLARE_DEBUG_FLAG(bool, trace_handles_count); | 14 DECLARE_DEBUG_FLAG(bool, trace_handles_count); |
13 | 15 |
14 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> | 16 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
15 void Handles<kHandleSizeInWords, | 17 void Handles<kHandleSizeInWords, |
16 kHandlesPerChunk, | 18 kHandlesPerChunk, |
17 kOffsetOfRawPtr>::VisitObjectPointers( | 19 kOffsetOfRawPtr>::VisitObjectPointers( |
(...skipping 10 matching lines...) Expand all Loading... | |
28 do { | 30 do { |
29 block->VisitObjectPointers(visitor); | 31 block->VisitObjectPointers(visitor); |
30 block = block->next_block(); | 32 block = block->next_block(); |
31 } while (block != NULL); | 33 } while (block != NULL); |
32 } | 34 } |
33 | 35 |
34 | 36 |
35 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> | 37 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
36 void Handles<kHandleSizeInWords, | 38 void Handles<kHandleSizeInWords, |
37 kHandlesPerChunk, | 39 kHandlesPerChunk, |
40 kOffsetOfRawPtr>::VisitScopedHandles( | |
41 ObjectPointerVisitor* visitor) { | |
42 HandlesBlock* block; | |
43 // Visit all scoped handles. | |
44 block = &first_scoped_block_; | |
45 do { | |
46 block->VisitObjectPointers(visitor); | |
47 block = block->next_block(); | |
48 } while (block != NULL); | |
49 } | |
50 | |
51 | |
52 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> | |
53 void Handles<kHandleSizeInWords, | |
54 kHandlesPerChunk, | |
55 kOffsetOfRawPtr>::VisitUnvisitedScopedHandles( | |
56 ObjectPointerVisitor* visitor) { | |
57 HandlesBlock* block = &first_scoped_block_; | |
58 while (block != NULL && block != last_visited_block_) { | |
59 block->VisitUnvisitedObjectPointers(visitor); | |
60 block = block->next_block(); | |
61 } | |
62 // We want this to point to first_scoped_block.next, | |
63 // Because pointers are still being added to first_scoped_block | |
64 // So it may be "partially new", and require a partial scan. | |
65 last_visited_block_ = first_scoped_block_.next_block(); | |
66 } | |
67 | |
68 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> | |
69 void Handles<kHandleSizeInWords, | |
70 kHandlesPerChunk, | |
38 kOffsetOfRawPtr>::Visit(HandleVisitor* visitor) { | 71 kOffsetOfRawPtr>::Visit(HandleVisitor* visitor) { |
39 // Visit all zone handles. | 72 // Visit all zone handles. |
40 HandlesBlock* block = zone_blocks_; | 73 HandlesBlock* block = zone_blocks_; |
41 while (block != NULL) { | 74 while (block != NULL) { |
42 block->Visit(visitor); | 75 block->Visit(visitor); |
43 block = block->next_block(); | 76 block = block->next_block(); |
44 } | 77 } |
45 | 78 |
46 // Visit all scoped handles. | 79 // Visit all scoped handles. |
47 block = &first_scoped_block_; | 80 block = &first_scoped_block_; |
48 do { | 81 do { |
49 block->Visit(visitor); | 82 block->Visit(visitor); |
50 block = block->next_block(); | 83 block = block->next_block(); |
51 } while (block != NULL); | 84 } while (block != NULL); |
siva
2012/12/14 02:11:54
Can we use the new VisitScopedHandles(visitor) her
cshapiro
2012/12/15 19:56:49
Sure can. Done.
| |
52 } | 85 } |
53 | 86 |
54 | 87 |
55 // Figure out the current handle scope using the current Isolate and | 88 // Figure out the current handle scope using the current Isolate and |
56 // allocate a handle in that scope. The function assumes that a | 89 // allocate a handle in that scope. The function assumes that a |
57 // current Isolate, current zone and current handle scope exist. It | 90 // current Isolate, current zone and current handle scope exist. It |
58 // asserts for this appropriately. | 91 // asserts for this appropriately. |
59 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> | 92 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
60 uword Handles<kHandleSizeInWords, | 93 uword Handles<kHandleSizeInWords, |
61 kHandlesPerChunk, | 94 kHandlesPerChunk, |
(...skipping 14 matching lines...) Expand all Loading... | |
76 // this appropriately. | 109 // this appropriately. |
77 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> | 110 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
78 uword Handles<kHandleSizeInWords, | 111 uword Handles<kHandleSizeInWords, |
79 kHandlesPerChunk, | 112 kHandlesPerChunk, |
80 kOffsetOfRawPtr>::AllocateZoneHandle(Isolate* isolate) { | 113 kOffsetOfRawPtr>::AllocateZoneHandle(Isolate* isolate) { |
81 ASSERT(isolate != NULL); | 114 ASSERT(isolate != NULL); |
82 ASSERT(isolate->current_zone() != NULL); | 115 ASSERT(isolate->current_zone() != NULL); |
83 ASSERT(isolate->no_handle_scope_depth() == 0); | 116 ASSERT(isolate->no_handle_scope_depth() == 0); |
84 Handles* handles = isolate->current_zone()->handles(); | 117 Handles* handles = isolate->current_zone()->handles(); |
85 ASSERT(handles != NULL); | 118 ASSERT(handles != NULL); |
86 return handles->AllocateHandleInZone(); | 119 uword address = handles->AllocateHandleInZone(); |
120 if (HeapTrace::is_enabled()) { | |
121 uword zone_addr = reinterpret_cast<uword>(isolate->current_zone()); | |
122 isolate->heap()->trace()->TraceAllocateZoneHandle(address, zone_addr); | |
123 } | |
124 return address; | |
87 } | 125 } |
88 | 126 |
89 | 127 |
90 // Figure out the current zone using the current Isolate and | 128 // Figure out the current zone using the current Isolate and |
91 // check if the specified handle has been allocated in this zone. | 129 // check if the specified handle has been allocated in this zone. |
92 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> | 130 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
93 bool Handles<kHandleSizeInWords, | 131 bool Handles<kHandleSizeInWords, |
94 kHandlesPerChunk, | 132 kHandlesPerChunk, |
95 kOffsetOfRawPtr>::IsZoneHandle(uword handle) { | 133 kOffsetOfRawPtr>::IsZoneHandle(uword handle) { |
96 // TODO(5411412): Accessing the current isolate is a performance problem, | 134 // TODO(5411412): Accessing the current isolate is a performance problem, |
97 // consider passing it down as a parameter. | 135 // consider passing it down as a parameter. |
98 Isolate* isolate = Isolate::Current(); | 136 Isolate* isolate = Isolate::Current(); |
99 ASSERT(isolate != NULL); | 137 ASSERT(isolate != NULL); |
100 ASSERT(isolate->current_zone() != NULL); | 138 ASSERT(isolate->current_zone() != NULL); |
101 Handles* handles = isolate->current_zone()->handles(); | 139 Handles* handles = isolate->current_zone()->handles(); |
102 ASSERT(handles != NULL); | 140 ASSERT(handles != NULL); |
103 return handles->IsValidZoneHandle(handle); | 141 return handles->IsValidZoneHandle(handle); |
104 } | 142 } |
105 | 143 |
106 | 144 |
107 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> | 145 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
108 void Handles<kHandleSizeInWords, | 146 void Handles<kHandleSizeInWords, |
109 kHandlesPerChunk, | 147 kHandlesPerChunk, |
110 kOffsetOfRawPtr>::DeleteAll() { | 148 kOffsetOfRawPtr>::DeleteAll() { |
111 // Delete all the zone allocated handle blocks. | 149 // Delete all the zone allocated handle blocks. |
150 // GCTrace does not need to trace this call to DeleteHandleBlocks, | |
151 // since the individual zone deletions will be caught | |
152 // by instrumentation in the BaseZone destructor. | |
112 DeleteHandleBlocks(zone_blocks_); | 153 DeleteHandleBlocks(zone_blocks_); |
113 zone_blocks_ = NULL; | 154 zone_blocks_ = NULL; |
114 | 155 |
115 // Delete all the scoped handle blocks. | 156 // Delete all the scoped handle blocks. |
157 // Do not trace if there is no current isolate. This can happen during | |
158 // isolate shutdown. | |
159 if (HeapTrace::is_enabled() && Isolate::Current() != NULL) { | |
160 Isolate::Current()->heap()->trace()->TraceDeleteScopedHandles(); | |
161 } | |
162 | |
163 | |
116 scoped_blocks_ = first_scoped_block_.next_block(); | 164 scoped_blocks_ = first_scoped_block_.next_block(); |
117 DeleteHandleBlocks(scoped_blocks_); | 165 DeleteHandleBlocks(scoped_blocks_); |
118 first_scoped_block_.ReInit(); | 166 first_scoped_block_.ReInit(); |
119 scoped_blocks_ = &first_scoped_block_; | 167 scoped_blocks_ = &first_scoped_block_; |
120 } | 168 } |
121 | 169 |
122 | 170 |
123 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> | 171 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
124 void Handles<kHandleSizeInWords, | 172 void Handles<kHandleSizeInWords, |
125 kHandlesPerChunk, | 173 kHandlesPerChunk, |
(...skipping 169 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
295 for (intptr_t i = 0; i < next_handle_slot_; i += kHandleSizeInWords) { | 343 for (intptr_t i = 0; i < next_handle_slot_; i += kHandleSizeInWords) { |
296 visitor->VisitPointer( | 344 visitor->VisitPointer( |
297 reinterpret_cast<RawObject**>(&data_[i + kOffsetOfRawPtr/kWordSize])); | 345 reinterpret_cast<RawObject**>(&data_[i + kOffsetOfRawPtr/kWordSize])); |
298 } | 346 } |
299 } | 347 } |
300 | 348 |
301 | 349 |
302 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> | 350 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
303 void Handles<kHandleSizeInWords, | 351 void Handles<kHandleSizeInWords, |
304 kHandlesPerChunk, | 352 kHandlesPerChunk, |
353 kOffsetOfRawPtr>::HandlesBlock::VisitUnvisitedObjectPointers( | |
354 ObjectPointerVisitor* visitor) { | |
355 ASSERT(visitor != NULL); | |
356 | |
357 // last_visited_handle_ picks up where we were last time, | |
358 // so there is nothing in the intialization position of this for loop. | |
359 | |
360 while (last_visited_handle_ < next_handle_slot_) { | |
361 last_visited_handle_ += kHandleSizeInWords; | |
362 uword* addr = &data_[last_visited_handle_ + kOffsetOfRawPtr / kWordSize]; | |
363 visitor->VisitPointer(reinterpret_cast<RawObject**>(addr)); | |
364 } | |
365 } | |
366 | |
367 | |
368 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> | |
369 void Handles<kHandleSizeInWords, | |
370 kHandlesPerChunk, | |
305 kOffsetOfRawPtr>::HandlesBlock::Visit(HandleVisitor* visitor) { | 371 kOffsetOfRawPtr>::HandlesBlock::Visit(HandleVisitor* visitor) { |
306 ASSERT(visitor != NULL); | 372 ASSERT(visitor != NULL); |
307 for (intptr_t i = 0; i < next_handle_slot_; i += kHandleSizeInWords) { | 373 for (intptr_t i = 0; i < next_handle_slot_; i += kHandleSizeInWords) { |
308 visitor->VisitHandle(reinterpret_cast<uword>(&data_[i])); | 374 visitor->VisitHandle(reinterpret_cast<uword>(&data_[i])); |
309 } | 375 } |
310 } | 376 } |
311 | 377 |
312 | 378 |
313 #if defined(DEBUG) | 379 #if defined(DEBUG) |
314 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> | 380 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
(...skipping 13 matching lines...) Expand all Loading... | |
328 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> | 394 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
329 int Handles<kHandleSizeInWords, | 395 int Handles<kHandleSizeInWords, |
330 kHandlesPerChunk, | 396 kHandlesPerChunk, |
331 kOffsetOfRawPtr>::HandlesBlock::HandleCount() const { | 397 kOffsetOfRawPtr>::HandlesBlock::HandleCount() const { |
332 return (next_handle_slot_ / kHandleSizeInWords); | 398 return (next_handle_slot_ / kHandleSizeInWords); |
333 } | 399 } |
334 | 400 |
335 } // namespace dart | 401 } // namespace dart |
336 | 402 |
337 #endif // VM_HANDLES_IMPL_H_ | 403 #endif // VM_HANDLES_IMPL_H_ |
OLD | NEW |