Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(166)

Side by Side Diff: runtime/vm/flow_graph_compiler_mips.cc

Issue 13228002: First two codegen tests passing on SIMMIPS (Closed) Base URL: http://dart.googlecode.com/svn/branches/bleeding_edge/dart/
Patch Set: Created 7 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_MIPS. 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_MIPS.
6 #if defined(TARGET_ARCH_MIPS) 6 #if defined(TARGET_ARCH_MIPS)
7 7
8 #include "vm/flow_graph_compiler.h" 8 #include "vm/flow_graph_compiler.h"
9 9
10 #include "vm/longjump.h" 10 #include "lib/error.h"
11 #include "vm/ast_printer.h"
12 #include "vm/dart_entry.h"
13 #include "vm/il_printer.h"
14 #include "vm/locations.h"
15 #include "vm/object_store.h"
16 #include "vm/parser.h"
17 #include "vm/stub_code.h"
18 #include "vm/symbols.h"
11 19
12 namespace dart { 20 namespace dart {
13 21
22 DECLARE_FLAG(int, optimization_counter_threshold);
23 DECLARE_FLAG(bool, print_ast);
24 DECLARE_FLAG(bool, print_scopes);
25 DECLARE_FLAG(bool, enable_type_checks);
26
27
14 FlowGraphCompiler::~FlowGraphCompiler() { 28 FlowGraphCompiler::~FlowGraphCompiler() {
15 // BlockInfos are zone-allocated, so their destructors are not called. 29 // BlockInfos are zone-allocated, so their destructors are not called.
16 // Verify the labels explicitly here. 30 // Verify the labels explicitly here.
17 for (int i = 0; i < block_info_.length(); ++i) { 31 for (int i = 0; i < block_info_.length(); ++i) {
18 ASSERT(!block_info_[i]->jump_label()->IsLinked()); 32 ASSERT(!block_info_[i]->jump_label()->IsLinked());
19 } 33 }
20 } 34 }
21 35
22 36
23 bool FlowGraphCompiler::SupportsUnboxedMints() { 37 bool FlowGraphCompiler::SupportsUnboxedMints() {
24 return false; 38 return false;
25 } 39 }
26 40
27 41
28 void CompilerDeoptInfoWithStub::GenerateCode(FlowGraphCompiler* compiler, 42 void CompilerDeoptInfoWithStub::GenerateCode(FlowGraphCompiler* compiler,
29 intptr_t stub_ix) { 43 intptr_t stub_ix) {
30 UNIMPLEMENTED(); 44 UNIMPLEMENTED();
31 } 45 }
32 46
33 47
48 #define __ assembler()->
49
50
34 void FlowGraphCompiler::GenerateBoolToJump(Register bool_register, 51 void FlowGraphCompiler::GenerateBoolToJump(Register bool_register,
35 Label* is_true, 52 Label* is_true,
36 Label* is_false) { 53 Label* is_false) {
37 UNIMPLEMENTED(); 54 UNIMPLEMENTED();
38 } 55 }
39 56
40 57
41 RawSubtypeTestCache* FlowGraphCompiler::GenerateCallSubtypeTestStub( 58 RawSubtypeTestCache* FlowGraphCompiler::GenerateCallSubtypeTestStub(
42 TypeTestStubKind test_kind, 59 TypeTestStubKind test_kind,
43 Register instance_reg, 60 Register instance_reg,
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after
121 void FlowGraphCompiler::GenerateAssertAssignable(intptr_t token_pos, 138 void FlowGraphCompiler::GenerateAssertAssignable(intptr_t token_pos,
122 intptr_t deopt_id, 139 intptr_t deopt_id,
123 const AbstractType& dst_type, 140 const AbstractType& dst_type,
124 const String& dst_name, 141 const String& dst_name,
125 LocationSummary* locs) { 142 LocationSummary* locs) {
126 UNIMPLEMENTED(); 143 UNIMPLEMENTED();
127 } 144 }
128 145
129 146
130 void FlowGraphCompiler::EmitInstructionPrologue(Instruction* instr) { 147 void FlowGraphCompiler::EmitInstructionPrologue(Instruction* instr) {
131 UNIMPLEMENTED(); 148 if (!is_optimizing()) {
149 if (FLAG_enable_type_checks && instr->IsAssertAssignable()) {
150 AssertAssignableInstr* assert = instr->AsAssertAssignable();
151 AddCurrentDescriptor(PcDescriptors::kDeoptBefore,
152 assert->deopt_id(),
153 assert->token_pos());
154 }
155 AllocateRegistersLocally(instr);
156 }
132 } 157 }
133 158
134 159
135 void FlowGraphCompiler::EmitInstructionEpilogue(Instruction* instr) { 160 void FlowGraphCompiler::EmitInstructionEpilogue(Instruction* instr) {
136 UNIMPLEMENTED(); 161 if (is_optimizing()) return;
162 Definition* defn = instr->AsDefinition();
163 if ((defn != NULL) && defn->is_used()) {
164 __ Push(defn->locs()->out().reg());
165 }
137 } 166 }
138 167
139 168
140 void FlowGraphCompiler::CopyParameters() { 169 void FlowGraphCompiler::CopyParameters() {
141 UNIMPLEMENTED(); 170 UNIMPLEMENTED();
142 } 171 }
143 172
144 173
145 void FlowGraphCompiler::GenerateInlinedGetter(intptr_t offset) { 174 void FlowGraphCompiler::GenerateInlinedGetter(intptr_t offset) {
146 UNIMPLEMENTED(); 175 UNIMPLEMENTED();
147 } 176 }
148 177
149 178
150 void FlowGraphCompiler::GenerateInlinedSetter(intptr_t offset) { 179 void FlowGraphCompiler::GenerateInlinedSetter(intptr_t offset) {
151 UNIMPLEMENTED(); 180 UNIMPLEMENTED();
152 } 181 }
153 182
154 183
155 void FlowGraphCompiler::EmitFrameEntry() { 184 void FlowGraphCompiler::EmitFrameEntry() {
156 UNIMPLEMENTED(); 185 const Function& function = parsed_function().function();
186 if (CanOptimizeFunction() && function.is_optimizable()) {
187 const bool can_optimize = !is_optimizing() || may_reoptimize();
188 const Register function_reg = T0;
189 if (can_optimize) {
190 Label next;
191 // The pool pointer is not setup before entering the Dart frame.
192
193 // We need to know the PC to find the pool pointer.
194 __ GetPC(T2);
195
196 // Preserve PP of caller.
197 __ mov(T1, PP);
198
199 // Temporarily setup pool pointer for this dart function.
200 const intptr_t object_pool_pc_dist =
201 Instructions::HeaderSize() - Instructions::object_pool_offset() +
202 assembler()->CodeSize() - (3 * Instr::kInstrSize);
203 __ lw(PP, Address(T2 /* PC - 12 */, -object_pool_pc_dist));
204
205 // Load function object from object pool.
206 __ LoadObject(function_reg, function); // Uses PP.
207
208 // Restore PP of caller.
209 __ mov(PP, T1);
210 }
regis 2013/03/29 00:23:32 The sequence above is rather expensive. I am not h
zra 2013/03/29 17:10:44 I've inlined GetPC here, too. Yah, I guess if we d
211 // Patch point is after the eventually inlined function object.
212 AddCurrentDescriptor(PcDescriptors::kEntryPatch,
213 Isolate::kNoDeoptId,
214 0); // No token position.
215 if (can_optimize) {
216 // Reoptimization of optimized function is triggered by counting in
217 // IC stubs, but not at the entry of the function.
218 if (!is_optimizing()) {
219 __ lw(T1, FieldAddress(function_reg,
220 Function::usage_counter_offset()));
221 __ addiu(T1, T1, Immediate(1));
222 __ sw(T1, FieldAddress(function_reg,
223 Function::usage_counter_offset()));
224 } else {
225 __ lw(T1, FieldAddress(function_reg,
226 Function::usage_counter_offset()));
227 }
228
229 // Skip Branch if T1 is less than the threshold.
230 Label dont_branch;
231 __ LoadImmediate(T2, FLAG_optimization_counter_threshold);
232 __ sltu(T2, T1, T2);
233 __ bgtz(T2, &dont_branch);
234
235 ASSERT(function_reg == T0);
236 __ Branch(&StubCode::OptimizeFunctionLabel());
237
238 __ Bind(&dont_branch);
239 }
240 } else {
241 AddCurrentDescriptor(PcDescriptors::kEntryPatch,
242 Isolate::kNoDeoptId,
243 0); // No token position.
244 }
245 __ Comment("Enter frame");
246 __ EnterDartFrame((StackSize() * kWordSize));
157 } 247 }
158 248
159 249 // Input parameters:
250 // RA: return address.
251 // SP: address of last argument.
252 // FP: caller's frame pointer.
253 // PP: caller's pool pointer.
254 // S5: ic-data.
255 // S4: arguments descriptor array.
160 void FlowGraphCompiler::CompileGraph() { 256 void FlowGraphCompiler::CompileGraph() {
161 UNIMPLEMENTED(); 257 InitCompiler();
258 if (TryIntrinsify()) {
259 // Although this intrinsified code will never be patched, it must satisfy
260 // CodePatcher::CodeIsPatchable, which verifies that this code has a minimum
261 // code size.
262 __ break_(0);
263 __ Branch(&StubCode::FixCallersTargetLabel());
264 return;
265 }
266
267 EmitFrameEntry();
268
269 const Function& function = parsed_function().function();
270
271 const int num_fixed_params = function.num_fixed_parameters();
272 const int num_copied_params = parsed_function().num_copied_params();
273 const int num_locals = parsed_function().num_stack_locals();
274
275 // We check the number of passed arguments when we have to copy them due to
276 // the presence of optional parameters.
277 // No such checking code is generated if only fixed parameters are declared,
278 // unless we are in debug mode or unless we are compiling a closure.
279 LocalVariable* saved_args_desc_var =
280 parsed_function().GetSavedArgumentsDescriptorVar();
281 if (num_copied_params == 0) {
282 #ifdef DEBUG
283 ASSERT(!parsed_function().function().HasOptionalParameters());
284 const bool check_arguments = true;
285 #else
286 const bool check_arguments = function.IsClosureFunction();
287 #endif
288 if (check_arguments) {
289 __ Comment("Check argument count");
290 // Check that exactly num_fixed arguments are passed in.
291 Label correct_num_arguments, wrong_num_arguments;
292 __ lw(T0, FieldAddress(S4, ArgumentsDescriptor::count_offset()));
293 __ LoadImmediate(T1, Smi::RawValue(num_fixed_params));
294 __ bne(T0, T1, &wrong_num_arguments);
295
296 __ lw(T1, FieldAddress(S4,
297 ArgumentsDescriptor::positional_count_offset()));
298 __ beq(T0, T1, &correct_num_arguments);
299 __ Bind(&wrong_num_arguments);
300 if (function.IsClosureFunction()) {
301 if (StackSize() != 0) {
302 // We need to unwind the space we reserved for locals and copied
303 // parameters. The NoSuchMethodFunction stub does not expect to see
304 // that area on the stack.
305 __ addiu(SP, SP, Immediate(StackSize() * kWordSize));
306 }
307 // The call below has an empty stackmap because we have just
308 // dropped the spill slots.
309 BitmapBuilder* empty_stack_bitmap = new BitmapBuilder();
310
311 // Invoke noSuchMethod function passing "call" as the function name.
312 const int kNumArgsChecked = 1;
313 const ICData& ic_data = ICData::ZoneHandle(
314 ICData::New(function, Symbols::Call(),
315 Isolate::kNoDeoptId, kNumArgsChecked));
316 __ LoadObject(S5, ic_data);
317 // FP - 4 : saved PP, object pool pointer of caller.
318 // FP + 0 : previous frame pointer.
319 // FP + 4 : return address.
320 // FP + 8 : PC marker, for easy identification of RawInstruction obj.
321 // FP + 12: last argument (arg n-1).
322 // SP + 0 : saved PP.
323 // SP + 16 + 4*(n-1) : first argument (arg 0).
324 // S5 : ic-data.
325 // S4 : arguments descriptor array.
326 __ BranchLink(&StubCode::CallNoSuchMethodFunctionLabel());
327 if (is_optimizing()) {
328 stackmap_table_builder_->AddEntry(assembler()->CodeSize(),
329 empty_stack_bitmap,
330 0); // No registers.
331 }
332 // The noSuchMethod call may return.
333 __ LeaveDartFrame();
334 __ Ret();
335 } else {
336 __ Stop("Wrong number of arguments");
337 }
338 __ Bind(&correct_num_arguments);
339 }
340 // The arguments descriptor is never saved in the absence of optional
341 // parameters, since any argument definition test would always yield true.
342 ASSERT(saved_args_desc_var == NULL);
343 } else {
344 if (saved_args_desc_var != NULL) {
345 __ Comment("Save arguments descriptor");
346 const Register kArgumentsDescriptorReg = S4;
347 // The saved_args_desc_var is allocated one slot before the first local.
348 const intptr_t slot = parsed_function().first_stack_local_index() + 1;
349 // If the saved_args_desc_var is captured, it is first moved to the stack
350 // and later to the context, once the context is allocated.
351 ASSERT(saved_args_desc_var->is_captured() ||
352 (saved_args_desc_var->index() == slot));
353 __ sw(kArgumentsDescriptorReg, Address(FP, slot * kWordSize));
354 }
355 CopyParameters();
356 }
357
358 // In unoptimized code, initialize (non-argument) stack allocated slots to
359 // null. This does not cover the saved_args_desc_var slot.
360 if (!is_optimizing() && (num_locals > 0)) {
361 __ Comment("Initialize spill slots");
362 const intptr_t slot_base = parsed_function().first_stack_local_index();
363 __ LoadImmediate(T0, reinterpret_cast<intptr_t>(Object::null()));
364 for (intptr_t i = 0; i < num_locals; ++i) {
365 // Subtract index i (locals lie at lower addresses than FP).
366 __ sw(T0, Address(FP, (slot_base - i) * kWordSize));
367 }
368 }
369
370 if (FLAG_print_scopes) {
371 // Print the function scope (again) after generating the prologue in order
372 // to see annotations such as allocation indices of locals.
373 if (FLAG_print_ast) {
374 // Second printing.
375 OS::Print("Annotated ");
376 }
377 AstPrinter::PrintFunctionScope(parsed_function());
378 }
379
380 VisitBlocks();
381
382 __ break_(0);
383 GenerateDeferredCode();
384 // Emit function patching code. This will be swapped with the first 5 bytes
385 // at entry point.
386 AddCurrentDescriptor(PcDescriptors::kPatchCode,
387 Isolate::kNoDeoptId,
388 0); // No token position.
389 __ Branch(&StubCode::FixCallersTargetLabel());
390 AddCurrentDescriptor(PcDescriptors::kLazyDeoptJump,
391 Isolate::kNoDeoptId,
392 0); // No token position.
393 __ Branch(&StubCode::DeoptimizeLazyLabel());
162 } 394 }
163 395
164 396
165 void FlowGraphCompiler::GenerateCall(intptr_t token_pos, 397 void FlowGraphCompiler::GenerateCall(intptr_t token_pos,
166 const ExternalLabel* label, 398 const ExternalLabel* label,
167 PcDescriptors::Kind kind, 399 PcDescriptors::Kind kind,
168 LocationSummary* locs) { 400 LocationSummary* locs) {
169 UNIMPLEMENTED(); 401 UNIMPLEMENTED();
170 } 402 }
171 403
172 404
173 void FlowGraphCompiler::GenerateDartCall(intptr_t deopt_id, 405 void FlowGraphCompiler::GenerateDartCall(intptr_t deopt_id,
174 intptr_t token_pos, 406 intptr_t token_pos,
175 const ExternalLabel* label, 407 const ExternalLabel* label,
176 PcDescriptors::Kind kind, 408 PcDescriptors::Kind kind,
177 LocationSummary* locs) { 409 LocationSummary* locs) {
178 UNIMPLEMENTED(); 410 UNIMPLEMENTED();
179 } 411 }
180 412
181 413
182 void FlowGraphCompiler::GenerateCallRuntime(intptr_t token_pos, 414 void FlowGraphCompiler::GenerateCallRuntime(intptr_t token_pos,
183 intptr_t deopt_id, 415 intptr_t deopt_id,
184 const RuntimeEntry& entry, 416 const RuntimeEntry& entry,
185 LocationSummary* locs) { 417 LocationSummary* locs) {
186 UNIMPLEMENTED(); 418 __ Unimplemented("call runtime");
187 } 419 }
188 420
189 421
190 void FlowGraphCompiler::EmitOptimizedInstanceCall( 422 void FlowGraphCompiler::EmitOptimizedInstanceCall(
191 ExternalLabel* target_label, 423 ExternalLabel* target_label,
192 const ICData& ic_data, 424 const ICData& ic_data,
193 const Array& arguments_descriptor, 425 const Array& arguments_descriptor,
194 intptr_t argument_count, 426 intptr_t argument_count,
195 intptr_t deopt_id, 427 intptr_t deopt_id,
196 intptr_t token_pos, 428 intptr_t token_pos,
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
245 } 477 }
246 478
247 479
248 void FlowGraphCompiler::EmitSuperEqualityCallPrologue(Register result, 480 void FlowGraphCompiler::EmitSuperEqualityCallPrologue(Register result,
249 Label* skip_call) { 481 Label* skip_call) {
250 UNIMPLEMENTED(); 482 UNIMPLEMENTED();
251 } 483 }
252 484
253 485
254 void FlowGraphCompiler::SaveLiveRegisters(LocationSummary* locs) { 486 void FlowGraphCompiler::SaveLiveRegisters(LocationSummary* locs) {
255 UNIMPLEMENTED(); 487 // TODO(vegorov): consider saving only caller save (volatile) registers.
488 const intptr_t fpu_registers = locs->live_registers()->fpu_registers();
489 if (fpu_registers > 0) {
490 UNIMPLEMENTED();
491 }
492
493 // Store general purpose registers with the lowest register number at the
494 // lowest address.
495 const intptr_t cpu_registers = locs->live_registers()->cpu_registers();
496 ASSERT((cpu_registers & ~kAllCpuRegistersList) == 0);
497 const int register_count = Utils::CountOneBits(cpu_registers);
498 int registers_pushed = 0;
499
500 __ addiu(SP, SP, Immediate(-register_count * kWordSize));
501 for (int i = 0; i < kNumberOfCpuRegisters; i++) {
502 Register r = static_cast<Register>(i);
503 if (locs->live_registers()->ContainsRegister(r)) {
504 __ sw(r, Address(SP, registers_pushed * kWordSize));
505 registers_pushed++;
506 }
507 }
256 } 508 }
257 509
258 510
259 void FlowGraphCompiler::RestoreLiveRegisters(LocationSummary* locs) { 511 void FlowGraphCompiler::RestoreLiveRegisters(LocationSummary* locs) {
260 UNIMPLEMENTED(); 512 // General purpose registers have the lowest register number at the
513 // lowest address.
514 const intptr_t cpu_registers = locs->live_registers()->cpu_registers();
515 ASSERT((cpu_registers & ~kAllCpuRegistersList) == 0);
516 const int register_count = Utils::CountOneBits(cpu_registers);
517 int registers_popped = 0;
518
519 for (int i = 0; i < kNumberOfCpuRegisters; i++) {
520 Register r = static_cast<Register>(i);
521 if (locs->live_registers()->ContainsRegister(r)) {
522 __ lw(r, Address(SP, registers_popped * kWordSize));
523 registers_popped++;
524 }
525 }
526 __ addiu(SP, SP, Immediate(register_count * kWordSize));
527
528 const intptr_t fpu_registers = locs->live_registers()->fpu_registers();
529 if (fpu_registers > 0) {
530 UNIMPLEMENTED();
531 }
261 } 532 }
262 533
263 534
264 void FlowGraphCompiler::EmitTestAndCall(const ICData& ic_data, 535 void FlowGraphCompiler::EmitTestAndCall(const ICData& ic_data,
265 Register class_id_reg, 536 Register class_id_reg,
266 intptr_t arg_count, 537 intptr_t arg_count,
267 const Array& arg_names, 538 const Array& arg_names,
268 Label* deopt, 539 Label* deopt,
269 intptr_t deopt_id, 540 intptr_t deopt_id,
270 intptr_t token_index, 541 intptr_t token_index,
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after
366 637
367 638
368 void ParallelMoveResolver::Exchange(const Address& mem1, const Address& mem2) { 639 void ParallelMoveResolver::Exchange(const Address& mem1, const Address& mem2) {
369 UNIMPLEMENTED(); 640 UNIMPLEMENTED();
370 } 641 }
371 642
372 643
373 } // namespace dart 644 } // namespace dart
374 645
375 #endif // defined TARGET_ARCH_MIPS 646 #endif // defined TARGET_ARCH_MIPS
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698