Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(85)

Side by Side Diff: runtime/vm/flow_graph_compiler_mips.cc

Issue 13228002: First two codegen tests passing on SIMMIPS (Closed) Base URL: http://dart.googlecode.com/svn/branches/bleeding_edge/dart/
Patch Set: Created 7 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_MIPS. 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_MIPS.
6 #if defined(TARGET_ARCH_MIPS) 6 #if defined(TARGET_ARCH_MIPS)
7 7
8 #include "vm/flow_graph_compiler.h" 8 #include "vm/flow_graph_compiler.h"
9 9
10 #include "vm/longjump.h" 10 #include "lib/error.h"
11 #include "vm/ast_printer.h"
12 #include "vm/dart_entry.h"
13 #include "vm/il_printer.h"
14 #include "vm/locations.h"
15 #include "vm/object_store.h"
16 #include "vm/parser.h"
17 #include "vm/stub_code.h"
18 #include "vm/symbols.h"
11 19
12 namespace dart { 20 namespace dart {
13 21
22 DECLARE_FLAG(int, optimization_counter_threshold);
23 DECLARE_FLAG(bool, print_ast);
24 DECLARE_FLAG(bool, print_scopes);
25 DECLARE_FLAG(bool, enable_type_checks);
26
27
14 FlowGraphCompiler::~FlowGraphCompiler() { 28 FlowGraphCompiler::~FlowGraphCompiler() {
15 // BlockInfos are zone-allocated, so their destructors are not called. 29 // BlockInfos are zone-allocated, so their destructors are not called.
16 // Verify the labels explicitly here. 30 // Verify the labels explicitly here.
17 for (int i = 0; i < block_info_.length(); ++i) { 31 for (int i = 0; i < block_info_.length(); ++i) {
18 ASSERT(!block_info_[i]->jump_label()->IsLinked()); 32 ASSERT(!block_info_[i]->jump_label()->IsLinked());
19 } 33 }
20 } 34 }
21 35
22 36
23 bool FlowGraphCompiler::SupportsUnboxedMints() { 37 bool FlowGraphCompiler::SupportsUnboxedMints() {
24 return false; 38 return false;
25 } 39 }
26 40
27 41
28 void CompilerDeoptInfoWithStub::GenerateCode(FlowGraphCompiler* compiler, 42 void CompilerDeoptInfoWithStub::GenerateCode(FlowGraphCompiler* compiler,
29 intptr_t stub_ix) { 43 intptr_t stub_ix) {
30 UNIMPLEMENTED(); 44 UNIMPLEMENTED();
31 } 45 }
32 46
33 47
48 #define __ assembler()->
49
50
34 void FlowGraphCompiler::GenerateBoolToJump(Register bool_register, 51 void FlowGraphCompiler::GenerateBoolToJump(Register bool_register,
35 Label* is_true, 52 Label* is_true,
36 Label* is_false) { 53 Label* is_false) {
37 UNIMPLEMENTED(); 54 UNIMPLEMENTED();
38 } 55 }
39 56
40 57
41 RawSubtypeTestCache* FlowGraphCompiler::GenerateCallSubtypeTestStub( 58 RawSubtypeTestCache* FlowGraphCompiler::GenerateCallSubtypeTestStub(
42 TypeTestStubKind test_kind, 59 TypeTestStubKind test_kind,
43 Register instance_reg, 60 Register instance_reg,
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after
121 void FlowGraphCompiler::GenerateAssertAssignable(intptr_t token_pos, 138 void FlowGraphCompiler::GenerateAssertAssignable(intptr_t token_pos,
122 intptr_t deopt_id, 139 intptr_t deopt_id,
123 const AbstractType& dst_type, 140 const AbstractType& dst_type,
124 const String& dst_name, 141 const String& dst_name,
125 LocationSummary* locs) { 142 LocationSummary* locs) {
126 UNIMPLEMENTED(); 143 UNIMPLEMENTED();
127 } 144 }
128 145
129 146
130 void FlowGraphCompiler::EmitInstructionPrologue(Instruction* instr) { 147 void FlowGraphCompiler::EmitInstructionPrologue(Instruction* instr) {
131 UNIMPLEMENTED(); 148 if (!is_optimizing()) {
149 if (FLAG_enable_type_checks && instr->IsAssertAssignable()) {
150 AssertAssignableInstr* assert = instr->AsAssertAssignable();
151 AddCurrentDescriptor(PcDescriptors::kDeoptBefore,
152 assert->deopt_id(),
153 assert->token_pos());
154 }
155 AllocateRegistersLocally(instr);
156 }
132 } 157 }
133 158
134 159
135 void FlowGraphCompiler::EmitInstructionEpilogue(Instruction* instr) { 160 void FlowGraphCompiler::EmitInstructionEpilogue(Instruction* instr) {
136 UNIMPLEMENTED(); 161 if (is_optimizing()) return;
162 Definition* defn = instr->AsDefinition();
163 if ((defn != NULL) && defn->is_used()) {
164 __ Push(defn->locs()->out().reg());
165 }
137 } 166 }
138 167
139 168
140 void FlowGraphCompiler::CopyParameters() { 169 void FlowGraphCompiler::CopyParameters() {
141 UNIMPLEMENTED(); 170 UNIMPLEMENTED();
142 } 171 }
143 172
144 173
145 void FlowGraphCompiler::GenerateInlinedGetter(intptr_t offset) { 174 void FlowGraphCompiler::GenerateInlinedGetter(intptr_t offset) {
146 UNIMPLEMENTED(); 175 UNIMPLEMENTED();
147 } 176 }
148 177
149 178
150 void FlowGraphCompiler::GenerateInlinedSetter(intptr_t offset) { 179 void FlowGraphCompiler::GenerateInlinedSetter(intptr_t offset) {
151 UNIMPLEMENTED(); 180 UNIMPLEMENTED();
152 } 181 }
153 182
154 183
155 void FlowGraphCompiler::EmitFrameEntry() { 184 void FlowGraphCompiler::EmitFrameEntry() {
156 UNIMPLEMENTED(); 185 const Function& function = parsed_function().function();
186 if (CanOptimizeFunction() && function.is_optimizable()) {
187 const bool can_optimize = !is_optimizing() || may_reoptimize();
188 const Register function_reg = T0;
189 if (can_optimize) {
190 Label next;
191 // The pool pointer is not setup before entering the Dart frame.
192
193 __ mov(TMP, RA); // Save RA.
194 __ bal(&next); // Branch and link to next instruction to get PC in RA.
195 __ delay_slot()->mov(T2, RA); // Save PC of the following mov.
196
197 // Calculate offset of pool pointer from the PC.
198 const intptr_t object_pool_pc_dist =
199 Instructions::HeaderSize() - Instructions::object_pool_offset() +
200 assembler()->CodeSize();
201
202 __ Bind(&next);
203 __ mov(RA, TMP); // Restore RA.
204
205 // Preserve PP of caller.
206 __ mov(T1, PP);
207
208 // Temporarily setup pool pointer for this dart function.
209 __ lw(PP, Address(T2, -object_pool_pc_dist));
210
211 // Load function object from object pool.
212 __ LoadObject(function_reg, function); // Uses PP.
213
214 // Restore PP of caller.
215 __ mov(PP, T1);
216 }
217 // Patch point is after the eventually inlined function object.
218 AddCurrentDescriptor(PcDescriptors::kEntryPatch,
219 Isolate::kNoDeoptId,
220 0); // No token position.
221 if (can_optimize) {
222 // Reoptimization of optimized function is triggered by counting in
223 // IC stubs, but not at the entry of the function.
224 if (!is_optimizing()) {
225 __ lw(T1, FieldAddress(function_reg,
226 Function::usage_counter_offset()));
227 __ addiu(T1, T1, Immediate(1));
228 __ sw(T1, FieldAddress(function_reg,
229 Function::usage_counter_offset()));
230 } else {
231 __ lw(T1, FieldAddress(function_reg,
232 Function::usage_counter_offset()));
233 }
234
235 // Skip Branch if T1 is less than the threshold.
236 Label dont_branch;
237 __ LoadImmediate(T2, FLAG_optimization_counter_threshold);
238 __ sltu(T2, T1, T2);
239 __ bgtz(T2, &dont_branch);
240
241 ASSERT(function_reg == T0);
242 __ Branch(&StubCode::OptimizeFunctionLabel());
243
244 __ Bind(&dont_branch);
245 }
246 } else {
247 AddCurrentDescriptor(PcDescriptors::kEntryPatch,
248 Isolate::kNoDeoptId,
249 0); // No token position.
250 }
251 __ Comment("Enter frame");
252 __ EnterDartFrame((StackSize() * kWordSize));
157 } 253 }
158 254
159 255 // Input parameters:
256 // RA: return address.
257 // SP: address of last argument.
258 // FP: caller's frame pointer.
259 // PP: caller's pool pointer.
260 // S5: ic-data.
261 // S4: arguments descriptor array.
160 void FlowGraphCompiler::CompileGraph() { 262 void FlowGraphCompiler::CompileGraph() {
161 UNIMPLEMENTED(); 263 InitCompiler();
264 if (TryIntrinsify()) {
265 // Although this intrinsified code will never be patched, it must satisfy
266 // CodePatcher::CodeIsPatchable, which verifies that this code has a minimum
267 // code size.
268 __ break_(0);
269 __ Branch(&StubCode::FixCallersTargetLabel());
270 return;
271 }
272
273 EmitFrameEntry();
274
275 const Function& function = parsed_function().function();
276
277 const int num_fixed_params = function.num_fixed_parameters();
278 const int num_copied_params = parsed_function().num_copied_params();
279 const int num_locals = parsed_function().num_stack_locals();
280
281 // We check the number of passed arguments when we have to copy them due to
282 // the presence of optional parameters.
283 // No such checking code is generated if only fixed parameters are declared,
284 // unless we are in debug mode or unless we are compiling a closure.
285 LocalVariable* saved_args_desc_var =
286 parsed_function().GetSavedArgumentsDescriptorVar();
287 if (num_copied_params == 0) {
288 #ifdef DEBUG
289 ASSERT(!parsed_function().function().HasOptionalParameters());
290 const bool check_arguments = true;
291 #else
292 const bool check_arguments = function.IsClosureFunction();
293 #endif
294 if (check_arguments) {
295 __ Comment("Check argument count");
296 // Check that exactly num_fixed arguments are passed in.
297 Label correct_num_arguments, wrong_num_arguments;
298 __ lw(T0, FieldAddress(S4, ArgumentsDescriptor::count_offset()));
299 __ LoadImmediate(T1, Smi::RawValue(num_fixed_params));
300 __ bne(T0, T1, &wrong_num_arguments);
301
302 __ lw(T1, FieldAddress(S4,
303 ArgumentsDescriptor::positional_count_offset()));
304 __ beq(T0, T1, &correct_num_arguments);
305 __ Bind(&wrong_num_arguments);
306 if (function.IsClosureFunction()) {
307 if (StackSize() != 0) {
308 // We need to unwind the space we reserved for locals and copied
309 // parameters. The NoSuchMethodFunction stub does not expect to see
310 // that area on the stack.
311 __ addiu(SP, SP, Immediate(StackSize() * kWordSize));
312 }
313 // The call below has an empty stackmap because we have just
314 // dropped the spill slots.
315 BitmapBuilder* empty_stack_bitmap = new BitmapBuilder();
316
317 // Invoke noSuchMethod function passing "call" as the function name.
318 const int kNumArgsChecked = 1;
319 const ICData& ic_data = ICData::ZoneHandle(
320 ICData::New(function, Symbols::Call(),
321 Isolate::kNoDeoptId, kNumArgsChecked));
322 __ LoadObject(S5, ic_data);
323 // FP - 4 : saved PP, object pool pointer of caller.
324 // FP + 0 : previous frame pointer.
325 // FP + 4 : return address.
326 // FP + 8 : PC marker, for easy identification of RawInstruction obj.
327 // FP + 12: last argument (arg n-1).
328 // SP + 0 : saved PP.
329 // SP + 16 + 4*(n-1) : first argument (arg 0).
330 // S5 : ic-data.
331 // S4 : arguments descriptor array.
332 __ BranchLink(&StubCode::CallNoSuchMethodFunctionLabel());
333 if (is_optimizing()) {
334 stackmap_table_builder_->AddEntry(assembler()->CodeSize(),
335 empty_stack_bitmap,
336 0); // No registers.
337 }
338 // The noSuchMethod call may return.
339 __ LeaveDartFrame();
340 __ Ret();
341 } else {
342 __ Stop("Wrong number of arguments");
343 }
344 __ Bind(&correct_num_arguments);
345 }
346 // The arguments descriptor is never saved in the absence of optional
347 // parameters, since any argument definition test would always yield true.
348 ASSERT(saved_args_desc_var == NULL);
349 } else {
350 if (saved_args_desc_var != NULL) {
351 __ Comment("Save arguments descriptor");
352 const Register kArgumentsDescriptorReg = S4;
353 // The saved_args_desc_var is allocated one slot before the first local.
354 const intptr_t slot = parsed_function().first_stack_local_index() + 1;
355 // If the saved_args_desc_var is captured, it is first moved to the stack
356 // and later to the context, once the context is allocated.
357 ASSERT(saved_args_desc_var->is_captured() ||
358 (saved_args_desc_var->index() == slot));
359 __ sw(kArgumentsDescriptorReg, Address(FP, slot * kWordSize));
360 }
361 CopyParameters();
362 }
363
364 // In unoptimized code, initialize (non-argument) stack allocated slots to
365 // null. This does not cover the saved_args_desc_var slot.
366 if (!is_optimizing() && (num_locals > 0)) {
367 __ Comment("Initialize spill slots");
368 const intptr_t slot_base = parsed_function().first_stack_local_index();
369 __ LoadImmediate(T0, reinterpret_cast<intptr_t>(Object::null()));
370 for (intptr_t i = 0; i < num_locals; ++i) {
371 // Subtract index i (locals lie at lower addresses than FP).
372 __ sw(T0, Address(FP, (slot_base - i) * kWordSize));
373 }
374 }
375
376 if (FLAG_print_scopes) {
377 // Print the function scope (again) after generating the prologue in order
378 // to see annotations such as allocation indices of locals.
379 if (FLAG_print_ast) {
380 // Second printing.
381 OS::Print("Annotated ");
382 }
383 AstPrinter::PrintFunctionScope(parsed_function());
384 }
385
386 VisitBlocks();
387
388 __ break_(0);
389 GenerateDeferredCode();
390 // Emit function patching code. This will be swapped with the first 5 bytes
391 // at entry point.
392 AddCurrentDescriptor(PcDescriptors::kPatchCode,
393 Isolate::kNoDeoptId,
394 0); // No token position.
395 __ Branch(&StubCode::FixCallersTargetLabel());
396 AddCurrentDescriptor(PcDescriptors::kLazyDeoptJump,
397 Isolate::kNoDeoptId,
398 0); // No token position.
399 __ Branch(&StubCode::DeoptimizeLazyLabel());
162 } 400 }
163 401
164 402
165 void FlowGraphCompiler::GenerateCall(intptr_t token_pos, 403 void FlowGraphCompiler::GenerateCall(intptr_t token_pos,
166 const ExternalLabel* label, 404 const ExternalLabel* label,
167 PcDescriptors::Kind kind, 405 PcDescriptors::Kind kind,
168 LocationSummary* locs) { 406 LocationSummary* locs) {
169 UNIMPLEMENTED(); 407 UNIMPLEMENTED();
170 } 408 }
171 409
172 410
173 void FlowGraphCompiler::GenerateDartCall(intptr_t deopt_id, 411 void FlowGraphCompiler::GenerateDartCall(intptr_t deopt_id,
174 intptr_t token_pos, 412 intptr_t token_pos,
175 const ExternalLabel* label, 413 const ExternalLabel* label,
176 PcDescriptors::Kind kind, 414 PcDescriptors::Kind kind,
177 LocationSummary* locs) { 415 LocationSummary* locs) {
178 UNIMPLEMENTED(); 416 UNIMPLEMENTED();
179 } 417 }
180 418
181 419
182 void FlowGraphCompiler::GenerateCallRuntime(intptr_t token_pos, 420 void FlowGraphCompiler::GenerateCallRuntime(intptr_t token_pos,
183 intptr_t deopt_id, 421 intptr_t deopt_id,
184 const RuntimeEntry& entry, 422 const RuntimeEntry& entry,
185 LocationSummary* locs) { 423 LocationSummary* locs) {
186 UNIMPLEMENTED(); 424 __ Unimplemented("call runtime");
187 } 425 }
188 426
189 427
190 void FlowGraphCompiler::EmitOptimizedInstanceCall( 428 void FlowGraphCompiler::EmitOptimizedInstanceCall(
191 ExternalLabel* target_label, 429 ExternalLabel* target_label,
192 const ICData& ic_data, 430 const ICData& ic_data,
193 const Array& arguments_descriptor, 431 const Array& arguments_descriptor,
194 intptr_t argument_count, 432 intptr_t argument_count,
195 intptr_t deopt_id, 433 intptr_t deopt_id,
196 intptr_t token_pos, 434 intptr_t token_pos,
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
245 } 483 }
246 484
247 485
248 void FlowGraphCompiler::EmitSuperEqualityCallPrologue(Register result, 486 void FlowGraphCompiler::EmitSuperEqualityCallPrologue(Register result,
249 Label* skip_call) { 487 Label* skip_call) {
250 UNIMPLEMENTED(); 488 UNIMPLEMENTED();
251 } 489 }
252 490
253 491
254 void FlowGraphCompiler::SaveLiveRegisters(LocationSummary* locs) { 492 void FlowGraphCompiler::SaveLiveRegisters(LocationSummary* locs) {
255 UNIMPLEMENTED(); 493 // TODO(vegorov): consider saving only caller save (volatile) registers.
494 const intptr_t fpu_registers = locs->live_registers()->fpu_registers();
495 if (fpu_registers > 0) {
496 UNIMPLEMENTED();
497 }
498
499 // Store general purpose registers with the lowest register number at the
500 // lowest address.
501 const intptr_t cpu_registers = locs->live_registers()->cpu_registers();
502 ASSERT((cpu_registers & ~kAllCpuRegistersList) == 0);
503 const int register_count = Utils::CountOneBits(cpu_registers);
504 int registers_pushed = 0;
505
506 __ addiu(SP, SP, Immediate(-register_count * kWordSize));
507 for (int i = 0; i < kNumberOfCpuRegisters; i++) {
508 Register r = static_cast<Register>(i);
509 if (locs->live_registers()->ContainsRegister(r)) {
510 __ sw(r, Address(SP, registers_pushed * kWordSize));
511 registers_pushed++;
512 }
513 }
256 } 514 }
257 515
258 516
259 void FlowGraphCompiler::RestoreLiveRegisters(LocationSummary* locs) { 517 void FlowGraphCompiler::RestoreLiveRegisters(LocationSummary* locs) {
260 UNIMPLEMENTED(); 518 // General purpose registers have the lowest register number at the
519 // lowest address.
520 const intptr_t cpu_registers = locs->live_registers()->cpu_registers();
521 ASSERT((cpu_registers & ~kAllCpuRegistersList) == 0);
522 const int register_count = Utils::CountOneBits(cpu_registers);
523 int registers_popped = 0;
524
525 for (int i = 0; i < kNumberOfCpuRegisters; i++) {
526 Register r = static_cast<Register>(i);
527 if (locs->live_registers()->ContainsRegister(r)) {
528 __ lw(r, Address(SP, registers_popped * kWordSize));
529 registers_popped++;
530 }
531 }
532 __ addiu(SP, SP, Immediate(register_count * kWordSize));
533
534 const intptr_t fpu_registers = locs->live_registers()->fpu_registers();
535 if (fpu_registers > 0) {
536 UNIMPLEMENTED();
537 }
261 } 538 }
262 539
263 540
264 void FlowGraphCompiler::EmitTestAndCall(const ICData& ic_data, 541 void FlowGraphCompiler::EmitTestAndCall(const ICData& ic_data,
265 Register class_id_reg, 542 Register class_id_reg,
266 intptr_t arg_count, 543 intptr_t arg_count,
267 const Array& arg_names, 544 const Array& arg_names,
268 Label* deopt, 545 Label* deopt,
269 intptr_t deopt_id, 546 intptr_t deopt_id,
270 intptr_t token_index, 547 intptr_t token_index,
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after
366 643
367 644
368 void ParallelMoveResolver::Exchange(const Address& mem1, const Address& mem2) { 645 void ParallelMoveResolver::Exchange(const Address& mem1, const Address& mem2) {
369 UNIMPLEMENTED(); 646 UNIMPLEMENTED();
370 } 647 }
371 648
372 649
373 } // namespace dart 650 } // namespace dart
374 651
375 #endif // defined TARGET_ARCH_MIPS 652 #endif // defined TARGET_ARCH_MIPS
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698