OLD | NEW |
1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/assembler.h" | 5 #include "vm/assembler.h" |
6 | 6 |
7 #include "platform/utils.h" | 7 #include "platform/utils.h" |
8 #include "vm/cpu.h" | 8 #include "vm/cpu.h" |
9 #include "vm/heap.h" | 9 #include "vm/heap.h" |
10 #include "vm/memory_region.h" | 10 #include "vm/memory_region.h" |
(...skipping 19 matching lines...) Expand all Loading... |
30 Zone* zone = Thread::Current()->zone(); | 30 Zone* zone = Thread::Current()->zone(); |
31 uword result = zone->AllocUnsafe(capacity); | 31 uword result = zone->AllocUnsafe(capacity); |
32 #if defined(DEBUG) | 32 #if defined(DEBUG) |
33 // Initialize the buffer with kBreakPointInstruction to force a break | 33 // Initialize the buffer with kBreakPointInstruction to force a break |
34 // point if we ever execute an uninitialized part of the code buffer. | 34 // point if we ever execute an uninitialized part of the code buffer. |
35 Assembler::InitializeMemoryWithBreakpoints(result, capacity); | 35 Assembler::InitializeMemoryWithBreakpoints(result, capacity); |
36 #endif | 36 #endif |
37 return result; | 37 return result; |
38 } | 38 } |
39 | 39 |
40 | |
41 #if defined(DEBUG) | 40 #if defined(DEBUG) |
42 AssemblerBuffer::EnsureCapacity::EnsureCapacity(AssemblerBuffer* buffer) { | 41 AssemblerBuffer::EnsureCapacity::EnsureCapacity(AssemblerBuffer* buffer) { |
43 if (buffer->cursor() >= buffer->limit()) buffer->ExtendCapacity(); | 42 if (buffer->cursor() >= buffer->limit()) buffer->ExtendCapacity(); |
44 // In debug mode, we save the assembler buffer along with the gap | 43 // In debug mode, we save the assembler buffer along with the gap |
45 // size before we start emitting to the buffer. This allows us to | 44 // size before we start emitting to the buffer. This allows us to |
46 // check that any single generated instruction doesn't overflow the | 45 // check that any single generated instruction doesn't overflow the |
47 // limit implied by the minimum gap size. | 46 // limit implied by the minimum gap size. |
48 buffer_ = buffer; | 47 buffer_ = buffer; |
49 gap_ = ComputeGap(); | 48 gap_ = ComputeGap(); |
50 // Make sure that extending the capacity leaves a big enough gap | 49 // Make sure that extending the capacity leaves a big enough gap |
51 // for any kind of instruction. | 50 // for any kind of instruction. |
52 ASSERT(gap_ >= kMinimumGap); | 51 ASSERT(gap_ >= kMinimumGap); |
53 // Mark the buffer as having ensured the capacity. | 52 // Mark the buffer as having ensured the capacity. |
54 ASSERT(!buffer->HasEnsuredCapacity()); // Cannot nest. | 53 ASSERT(!buffer->HasEnsuredCapacity()); // Cannot nest. |
55 buffer->has_ensured_capacity_ = true; | 54 buffer->has_ensured_capacity_ = true; |
56 } | 55 } |
57 | 56 |
58 | |
59 AssemblerBuffer::EnsureCapacity::~EnsureCapacity() { | 57 AssemblerBuffer::EnsureCapacity::~EnsureCapacity() { |
60 // Unmark the buffer, so we cannot emit after this. | 58 // Unmark the buffer, so we cannot emit after this. |
61 buffer_->has_ensured_capacity_ = false; | 59 buffer_->has_ensured_capacity_ = false; |
62 // Make sure the generated instruction doesn't take up more | 60 // Make sure the generated instruction doesn't take up more |
63 // space than the minimum gap. | 61 // space than the minimum gap. |
64 intptr_t delta = gap_ - ComputeGap(); | 62 intptr_t delta = gap_ - ComputeGap(); |
65 ASSERT(delta <= kMinimumGap); | 63 ASSERT(delta <= kMinimumGap); |
66 } | 64 } |
67 #endif | 65 #endif |
68 | 66 |
69 | |
70 AssemblerBuffer::AssemblerBuffer() | 67 AssemblerBuffer::AssemblerBuffer() |
71 : pointer_offsets_(new ZoneGrowableArray<intptr_t>(16)) { | 68 : pointer_offsets_(new ZoneGrowableArray<intptr_t>(16)) { |
72 static const intptr_t kInitialBufferCapacity = 4 * KB; | 69 static const intptr_t kInitialBufferCapacity = 4 * KB; |
73 contents_ = NewContents(kInitialBufferCapacity); | 70 contents_ = NewContents(kInitialBufferCapacity); |
74 cursor_ = contents_; | 71 cursor_ = contents_; |
75 limit_ = ComputeLimit(contents_, kInitialBufferCapacity); | 72 limit_ = ComputeLimit(contents_, kInitialBufferCapacity); |
76 fixup_ = NULL; | 73 fixup_ = NULL; |
77 #if defined(DEBUG) | 74 #if defined(DEBUG) |
78 has_ensured_capacity_ = false; | 75 has_ensured_capacity_ = false; |
79 fixups_processed_ = false; | 76 fixups_processed_ = false; |
80 #endif | 77 #endif |
81 | 78 |
82 // Verify internal state. | 79 // Verify internal state. |
83 ASSERT(Capacity() == kInitialBufferCapacity); | 80 ASSERT(Capacity() == kInitialBufferCapacity); |
84 ASSERT(Size() == 0); | 81 ASSERT(Size() == 0); |
85 } | 82 } |
86 | 83 |
87 | |
88 AssemblerBuffer::~AssemblerBuffer() {} | 84 AssemblerBuffer::~AssemblerBuffer() {} |
89 | 85 |
90 | |
91 void AssemblerBuffer::ProcessFixups(const MemoryRegion& region) { | 86 void AssemblerBuffer::ProcessFixups(const MemoryRegion& region) { |
92 AssemblerFixup* fixup = fixup_; | 87 AssemblerFixup* fixup = fixup_; |
93 while (fixup != NULL) { | 88 while (fixup != NULL) { |
94 fixup->Process(region, fixup->position()); | 89 fixup->Process(region, fixup->position()); |
95 fixup = fixup->previous(); | 90 fixup = fixup->previous(); |
96 } | 91 } |
97 } | 92 } |
98 | 93 |
99 | |
100 void AssemblerBuffer::FinalizeInstructions(const MemoryRegion& instructions) { | 94 void AssemblerBuffer::FinalizeInstructions(const MemoryRegion& instructions) { |
101 // Copy the instructions from the buffer. | 95 // Copy the instructions from the buffer. |
102 MemoryRegion from(reinterpret_cast<void*>(contents()), Size()); | 96 MemoryRegion from(reinterpret_cast<void*>(contents()), Size()); |
103 instructions.CopyFrom(0, from); | 97 instructions.CopyFrom(0, from); |
104 | 98 |
105 // Process fixups in the instructions. | 99 // Process fixups in the instructions. |
106 ProcessFixups(instructions); | 100 ProcessFixups(instructions); |
107 #if defined(DEBUG) | 101 #if defined(DEBUG) |
108 fixups_processed_ = true; | 102 fixups_processed_ = true; |
109 #endif | 103 #endif |
110 } | 104 } |
111 | 105 |
112 | |
113 void AssemblerBuffer::ExtendCapacity() { | 106 void AssemblerBuffer::ExtendCapacity() { |
114 intptr_t old_size = Size(); | 107 intptr_t old_size = Size(); |
115 intptr_t old_capacity = Capacity(); | 108 intptr_t old_capacity = Capacity(); |
116 intptr_t new_capacity = | 109 intptr_t new_capacity = |
117 Utils::Minimum(old_capacity * 2, old_capacity + 1 * MB); | 110 Utils::Minimum(old_capacity * 2, old_capacity + 1 * MB); |
118 if (new_capacity < old_capacity) { | 111 if (new_capacity < old_capacity) { |
119 FATAL("Unexpected overflow in AssemblerBuffer::ExtendCapacity"); | 112 FATAL("Unexpected overflow in AssemblerBuffer::ExtendCapacity"); |
120 } | 113 } |
121 | 114 |
122 // Allocate the new data area and copy contents of the old one to it. | 115 // Allocate the new data area and copy contents of the old one to it. |
123 uword new_contents = NewContents(new_capacity); | 116 uword new_contents = NewContents(new_capacity); |
124 memmove(reinterpret_cast<void*>(new_contents), | 117 memmove(reinterpret_cast<void*>(new_contents), |
125 reinterpret_cast<void*>(contents_), old_size); | 118 reinterpret_cast<void*>(contents_), old_size); |
126 | 119 |
127 // Compute the relocation delta and switch to the new contents area. | 120 // Compute the relocation delta and switch to the new contents area. |
128 intptr_t delta = new_contents - contents_; | 121 intptr_t delta = new_contents - contents_; |
129 contents_ = new_contents; | 122 contents_ = new_contents; |
130 | 123 |
131 // Update the cursor and recompute the limit. | 124 // Update the cursor and recompute the limit. |
132 cursor_ += delta; | 125 cursor_ += delta; |
133 limit_ = ComputeLimit(new_contents, new_capacity); | 126 limit_ = ComputeLimit(new_contents, new_capacity); |
134 | 127 |
135 // Verify internal state. | 128 // Verify internal state. |
136 ASSERT(Capacity() == new_capacity); | 129 ASSERT(Capacity() == new_capacity); |
137 ASSERT(Size() == old_size); | 130 ASSERT(Size() == old_size); |
138 } | 131 } |
139 | 132 |
140 | |
141 class PatchCodeWithHandle : public AssemblerFixup { | 133 class PatchCodeWithHandle : public AssemblerFixup { |
142 public: | 134 public: |
143 PatchCodeWithHandle(ZoneGrowableArray<intptr_t>* pointer_offsets, | 135 PatchCodeWithHandle(ZoneGrowableArray<intptr_t>* pointer_offsets, |
144 const Object& object) | 136 const Object& object) |
145 : pointer_offsets_(pointer_offsets), object_(object) {} | 137 : pointer_offsets_(pointer_offsets), object_(object) {} |
146 | 138 |
147 void Process(const MemoryRegion& region, intptr_t position) { | 139 void Process(const MemoryRegion& region, intptr_t position) { |
148 // Patch the handle into the code. Once the instructions are installed into | 140 // Patch the handle into the code. Once the instructions are installed into |
149 // a raw code object and the pointer offsets are setup, the handle is | 141 // a raw code object and the pointer offsets are setup, the handle is |
150 // resolved. | 142 // resolved. |
151 region.Store<const Object*>(position, &object_); | 143 region.Store<const Object*>(position, &object_); |
152 pointer_offsets_->Add(position); | 144 pointer_offsets_->Add(position); |
153 } | 145 } |
154 | 146 |
155 virtual bool IsPointerOffset() const { return true; } | 147 virtual bool IsPointerOffset() const { return true; } |
156 | 148 |
157 private: | 149 private: |
158 ZoneGrowableArray<intptr_t>* pointer_offsets_; | 150 ZoneGrowableArray<intptr_t>* pointer_offsets_; |
159 const Object& object_; | 151 const Object& object_; |
160 }; | 152 }; |
161 | 153 |
162 | |
163 intptr_t AssemblerBuffer::CountPointerOffsets() const { | 154 intptr_t AssemblerBuffer::CountPointerOffsets() const { |
164 intptr_t count = 0; | 155 intptr_t count = 0; |
165 AssemblerFixup* current = fixup_; | 156 AssemblerFixup* current = fixup_; |
166 while (current != NULL) { | 157 while (current != NULL) { |
167 if (current->IsPointerOffset()) ++count; | 158 if (current->IsPointerOffset()) ++count; |
168 current = current->previous_; | 159 current = current->previous_; |
169 } | 160 } |
170 return count; | 161 return count; |
171 } | 162 } |
172 | 163 |
173 | |
174 void AssemblerBuffer::EmitObject(const Object& object) { | 164 void AssemblerBuffer::EmitObject(const Object& object) { |
175 // Since we are going to store the handle as part of the fixup information | 165 // Since we are going to store the handle as part of the fixup information |
176 // the handle needs to be a zone handle. | 166 // the handle needs to be a zone handle. |
177 ASSERT(object.IsNotTemporaryScopedHandle()); | 167 ASSERT(object.IsNotTemporaryScopedHandle()); |
178 ASSERT(object.IsOld()); | 168 ASSERT(object.IsOld()); |
179 EmitFixup(new PatchCodeWithHandle(pointer_offsets_, object)); | 169 EmitFixup(new PatchCodeWithHandle(pointer_offsets_, object)); |
180 cursor_ += kWordSize; // Reserve space for pointer. | 170 cursor_ += kWordSize; // Reserve space for pointer. |
181 } | 171 } |
182 | 172 |
183 | |
184 // Shared macros are implemented here. | 173 // Shared macros are implemented here. |
185 void Assembler::Unimplemented(const char* message) { | 174 void Assembler::Unimplemented(const char* message) { |
186 const char* format = "Unimplemented: %s"; | 175 const char* format = "Unimplemented: %s"; |
187 const intptr_t len = OS::SNPrint(NULL, 0, format, message); | 176 const intptr_t len = OS::SNPrint(NULL, 0, format, message); |
188 char* buffer = reinterpret_cast<char*>(malloc(len + 1)); | 177 char* buffer = reinterpret_cast<char*>(malloc(len + 1)); |
189 OS::SNPrint(buffer, len + 1, format, message); | 178 OS::SNPrint(buffer, len + 1, format, message); |
190 Stop(buffer); | 179 Stop(buffer); |
191 } | 180 } |
192 | 181 |
193 | |
194 void Assembler::Untested(const char* message) { | 182 void Assembler::Untested(const char* message) { |
195 const char* format = "Untested: %s"; | 183 const char* format = "Untested: %s"; |
196 const intptr_t len = OS::SNPrint(NULL, 0, format, message); | 184 const intptr_t len = OS::SNPrint(NULL, 0, format, message); |
197 char* buffer = reinterpret_cast<char*>(malloc(len + 1)); | 185 char* buffer = reinterpret_cast<char*>(malloc(len + 1)); |
198 OS::SNPrint(buffer, len + 1, format, message); | 186 OS::SNPrint(buffer, len + 1, format, message); |
199 Stop(buffer); | 187 Stop(buffer); |
200 } | 188 } |
201 | 189 |
202 | |
203 void Assembler::Unreachable(const char* message) { | 190 void Assembler::Unreachable(const char* message) { |
204 const char* format = "Unreachable: %s"; | 191 const char* format = "Unreachable: %s"; |
205 const intptr_t len = OS::SNPrint(NULL, 0, format, message); | 192 const intptr_t len = OS::SNPrint(NULL, 0, format, message); |
206 char* buffer = reinterpret_cast<char*>(malloc(len + 1)); | 193 char* buffer = reinterpret_cast<char*>(malloc(len + 1)); |
207 OS::SNPrint(buffer, len + 1, format, message); | 194 OS::SNPrint(buffer, len + 1, format, message); |
208 Stop(buffer); | 195 Stop(buffer); |
209 } | 196 } |
210 | 197 |
211 | |
212 void Assembler::Comment(const char* format, ...) { | 198 void Assembler::Comment(const char* format, ...) { |
213 if (EmittingComments()) { | 199 if (EmittingComments()) { |
214 char buffer[1024]; | 200 char buffer[1024]; |
215 | 201 |
216 va_list args; | 202 va_list args; |
217 va_start(args, format); | 203 va_start(args, format); |
218 OS::VSNPrint(buffer, sizeof(buffer), format, args); | 204 OS::VSNPrint(buffer, sizeof(buffer), format, args); |
219 va_end(args); | 205 va_end(args); |
220 | 206 |
221 comments_.Add( | 207 comments_.Add( |
222 new CodeComment(buffer_.GetPosition(), | 208 new CodeComment(buffer_.GetPosition(), |
223 String::ZoneHandle(String::New(buffer, Heap::kOld)))); | 209 String::ZoneHandle(String::New(buffer, Heap::kOld)))); |
224 } | 210 } |
225 } | 211 } |
226 | 212 |
227 | |
228 bool Assembler::EmittingComments() { | 213 bool Assembler::EmittingComments() { |
229 return FLAG_code_comments || FLAG_disassemble || FLAG_disassemble_optimized; | 214 return FLAG_code_comments || FLAG_disassemble || FLAG_disassemble_optimized; |
230 } | 215 } |
231 | 216 |
232 | |
233 const Code::Comments& Assembler::GetCodeComments() const { | 217 const Code::Comments& Assembler::GetCodeComments() const { |
234 Code::Comments& comments = Code::Comments::New(comments_.length()); | 218 Code::Comments& comments = Code::Comments::New(comments_.length()); |
235 | 219 |
236 for (intptr_t i = 0; i < comments_.length(); i++) { | 220 for (intptr_t i = 0; i < comments_.length(); i++) { |
237 comments.SetPCOffsetAt(i, comments_[i]->pc_offset()); | 221 comments.SetPCOffsetAt(i, comments_[i]->pc_offset()); |
238 comments.SetCommentAt(i, comments_[i]->comment()); | 222 comments.SetCommentAt(i, comments_[i]->comment()); |
239 } | 223 } |
240 | 224 |
241 return comments; | 225 return comments; |
242 } | 226 } |
243 | 227 |
244 | |
245 intptr_t ObjectPoolWrapper::AddObject(const Object& obj, | 228 intptr_t ObjectPoolWrapper::AddObject(const Object& obj, |
246 Patchability patchable) { | 229 Patchability patchable) { |
247 ASSERT(obj.IsNotTemporaryScopedHandle()); | 230 ASSERT(obj.IsNotTemporaryScopedHandle()); |
248 return AddObject(ObjectPoolWrapperEntry(&obj), patchable); | 231 return AddObject(ObjectPoolWrapperEntry(&obj), patchable); |
249 } | 232 } |
250 | 233 |
251 | |
252 intptr_t ObjectPoolWrapper::AddImmediate(uword imm) { | 234 intptr_t ObjectPoolWrapper::AddImmediate(uword imm) { |
253 return AddObject(ObjectPoolWrapperEntry(imm, ObjectPool::kImmediate), | 235 return AddObject(ObjectPoolWrapperEntry(imm, ObjectPool::kImmediate), |
254 kNotPatchable); | 236 kNotPatchable); |
255 } | 237 } |
256 | 238 |
257 intptr_t ObjectPoolWrapper::AddObject(ObjectPoolWrapperEntry entry, | 239 intptr_t ObjectPoolWrapper::AddObject(ObjectPoolWrapperEntry entry, |
258 Patchability patchable) { | 240 Patchability patchable) { |
259 ASSERT((entry.type_ != ObjectPool::kTaggedObject) || | 241 ASSERT((entry.type_ != ObjectPool::kTaggedObject) || |
260 (entry.obj_->IsNotTemporaryScopedHandle() && | 242 (entry.obj_->IsNotTemporaryScopedHandle() && |
261 (entry.equivalence_ == NULL || | 243 (entry.equivalence_ == NULL || |
262 entry.equivalence_->IsNotTemporaryScopedHandle()))); | 244 entry.equivalence_->IsNotTemporaryScopedHandle()))); |
263 object_pool_.Add(entry); | 245 object_pool_.Add(entry); |
264 if (patchable == kNotPatchable) { | 246 if (patchable == kNotPatchable) { |
265 // The object isn't patchable. Record the index for fast lookup. | 247 // The object isn't patchable. Record the index for fast lookup. |
266 object_pool_index_table_.Insert( | 248 object_pool_index_table_.Insert( |
267 ObjIndexPair(entry, object_pool_.length() - 1)); | 249 ObjIndexPair(entry, object_pool_.length() - 1)); |
268 } | 250 } |
269 return object_pool_.length() - 1; | 251 return object_pool_.length() - 1; |
270 } | 252 } |
271 | 253 |
272 | |
273 intptr_t ObjectPoolWrapper::FindObject(ObjectPoolWrapperEntry entry, | 254 intptr_t ObjectPoolWrapper::FindObject(ObjectPoolWrapperEntry entry, |
274 Patchability patchable) { | 255 Patchability patchable) { |
275 // If the object is not patchable, check if we've already got it in the | 256 // If the object is not patchable, check if we've already got it in the |
276 // object pool. | 257 // object pool. |
277 if (patchable == kNotPatchable) { | 258 if (patchable == kNotPatchable) { |
278 intptr_t idx = object_pool_index_table_.LookupValue(entry); | 259 intptr_t idx = object_pool_index_table_.LookupValue(entry); |
279 if (idx != ObjIndexPair::kNoIndex) { | 260 if (idx != ObjIndexPair::kNoIndex) { |
280 return idx; | 261 return idx; |
281 } | 262 } |
282 } | 263 } |
283 return AddObject(entry, patchable); | 264 return AddObject(entry, patchable); |
284 } | 265 } |
285 | 266 |
286 | |
287 intptr_t ObjectPoolWrapper::FindObject(const Object& obj, | 267 intptr_t ObjectPoolWrapper::FindObject(const Object& obj, |
288 Patchability patchable) { | 268 Patchability patchable) { |
289 return FindObject(ObjectPoolWrapperEntry(&obj), patchable); | 269 return FindObject(ObjectPoolWrapperEntry(&obj), patchable); |
290 } | 270 } |
291 | 271 |
292 | |
293 intptr_t ObjectPoolWrapper::FindObject(const Object& obj, | 272 intptr_t ObjectPoolWrapper::FindObject(const Object& obj, |
294 const Object& equivalence) { | 273 const Object& equivalence) { |
295 return FindObject(ObjectPoolWrapperEntry(&obj, &equivalence), kNotPatchable); | 274 return FindObject(ObjectPoolWrapperEntry(&obj, &equivalence), kNotPatchable); |
296 } | 275 } |
297 | 276 |
298 | |
299 intptr_t ObjectPoolWrapper::FindImmediate(uword imm) { | 277 intptr_t ObjectPoolWrapper::FindImmediate(uword imm) { |
300 return FindObject(ObjectPoolWrapperEntry(imm, ObjectPool::kImmediate), | 278 return FindObject(ObjectPoolWrapperEntry(imm, ObjectPool::kImmediate), |
301 kNotPatchable); | 279 kNotPatchable); |
302 } | 280 } |
303 | 281 |
304 | |
305 intptr_t ObjectPoolWrapper::FindNativeEntry(const ExternalLabel* label, | 282 intptr_t ObjectPoolWrapper::FindNativeEntry(const ExternalLabel* label, |
306 Patchability patchable) { | 283 Patchability patchable) { |
307 return FindObject( | 284 return FindObject( |
308 ObjectPoolWrapperEntry(label->address(), ObjectPool::kNativeEntry), | 285 ObjectPoolWrapperEntry(label->address(), ObjectPool::kNativeEntry), |
309 patchable); | 286 patchable); |
310 } | 287 } |
311 | 288 |
312 | |
313 RawObjectPool* ObjectPoolWrapper::MakeObjectPool() { | 289 RawObjectPool* ObjectPoolWrapper::MakeObjectPool() { |
314 intptr_t len = object_pool_.length(); | 290 intptr_t len = object_pool_.length(); |
315 if (len == 0) { | 291 if (len == 0) { |
316 return Object::empty_object_pool().raw(); | 292 return Object::empty_object_pool().raw(); |
317 } | 293 } |
318 const ObjectPool& result = ObjectPool::Handle(ObjectPool::New(len)); | 294 const ObjectPool& result = ObjectPool::Handle(ObjectPool::New(len)); |
319 ObjectPoolInfo pool_info(result); | 295 ObjectPoolInfo pool_info(result); |
320 for (intptr_t i = 0; i < len; ++i) { | 296 for (intptr_t i = 0; i < len; ++i) { |
321 ObjectPool::EntryType info = object_pool_[i].type_; | 297 ObjectPool::EntryType info = object_pool_[i].type_; |
322 pool_info.SetInfoAt(i, info); | 298 pool_info.SetInfoAt(i, info); |
323 if (info == ObjectPool::kTaggedObject) { | 299 if (info == ObjectPool::kTaggedObject) { |
324 result.SetObjectAt(i, *object_pool_[i].obj_); | 300 result.SetObjectAt(i, *object_pool_[i].obj_); |
325 } else { | 301 } else { |
326 result.SetRawValueAt(i, object_pool_[i].raw_value_); | 302 result.SetRawValueAt(i, object_pool_[i].raw_value_); |
327 } | 303 } |
328 } | 304 } |
329 return result.raw(); | 305 return result.raw(); |
330 } | 306 } |
331 | 307 |
332 | |
333 } // namespace dart | 308 } // namespace dart |
OLD | NEW |