OLD | NEW |
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_MIPS. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_MIPS. |
6 #if defined(TARGET_ARCH_MIPS) | 6 #if defined(TARGET_ARCH_MIPS) |
7 | 7 |
8 #include "vm/flow_graph_compiler.h" | 8 #include "vm/flow_graph_compiler.h" |
9 | 9 |
10 #include "lib/error.h" | 10 #include "lib/error.h" |
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
77 UNIMPLEMENTED(); | 77 UNIMPLEMENTED(); |
78 return NULL; | 78 return NULL; |
79 } | 79 } |
80 | 80 |
81 | 81 |
82 void FlowGraphCompiler::CheckClassIds(Register class_id_reg, | 82 void FlowGraphCompiler::CheckClassIds(Register class_id_reg, |
83 const GrowableArray<intptr_t>& class_ids, | 83 const GrowableArray<intptr_t>& class_ids, |
84 Label* is_equal_lbl, | 84 Label* is_equal_lbl, |
85 Label* is_not_equal_lbl) { | 85 Label* is_not_equal_lbl) { |
86 for (intptr_t i = 0; i < class_ids.length(); i++) { | 86 for (intptr_t i = 0; i < class_ids.length(); i++) { |
87 __ LoadImmediate(TMP, class_ids[i]); | 87 __ BranchEq(class_id_reg, class_ids[i], is_equal_lbl); |
88 __ beq(class_id_reg, TMP, is_equal_lbl); | |
89 } | 88 } |
90 __ b(is_not_equal_lbl); | 89 __ b(is_not_equal_lbl); |
91 } | 90 } |
92 | 91 |
93 | 92 |
94 // Testing against an instantiated type with no arguments, without | 93 // Testing against an instantiated type with no arguments, without |
95 // SubtypeTestCache. | 94 // SubtypeTestCache. |
96 // A0: instance being type checked (preserved). | 95 // A0: instance being type checked (preserved). |
97 // Clobbers: T0, T1, T2 | 96 // Clobbers: T0, T1, T2 |
98 // Returns true if there is a fallthrough. | 97 // Returns true if there is a fallthrough. |
(...skipping 15 matching lines...) Expand all Loading... |
114 type_class, | 113 type_class, |
115 TypeArguments::Handle(), | 114 TypeArguments::Handle(), |
116 NULL)) { | 115 NULL)) { |
117 __ beq(T0, ZR, is_instance_lbl); | 116 __ beq(T0, ZR, is_instance_lbl); |
118 } else { | 117 } else { |
119 __ beq(T0, ZR, is_not_instance_lbl); | 118 __ beq(T0, ZR, is_not_instance_lbl); |
120 } | 119 } |
121 // Compare if the classes are equal. | 120 // Compare if the classes are equal. |
122 const Register kClassIdReg = T0; | 121 const Register kClassIdReg = T0; |
123 __ LoadClassId(kClassIdReg, kInstanceReg); | 122 __ LoadClassId(kClassIdReg, kInstanceReg); |
124 __ LoadImmediate(T1, type_class.id()); | 123 __ BranchEq(kClassIdReg, type_class.id(), is_instance_lbl); |
125 __ beq(kClassIdReg, T1, is_instance_lbl); | 124 |
126 // See ClassFinalizer::ResolveSuperTypeAndInterfaces for list of restricted | 125 // See ClassFinalizer::ResolveSuperTypeAndInterfaces for list of restricted |
127 // interfaces. | 126 // interfaces. |
128 // Bool interface can be implemented only by core class Bool. | 127 // Bool interface can be implemented only by core class Bool. |
129 if (type.IsBoolType()) { | 128 if (type.IsBoolType()) { |
130 __ LoadImmediate(T1, kBoolCid); | 129 __ BranchEq(kClassIdReg, kBoolCid, is_instance_lbl); |
131 __ beq(kClassIdReg, T1, is_instance_lbl); | |
132 __ b(is_not_instance_lbl); | 130 __ b(is_not_instance_lbl); |
133 return false; | 131 return false; |
134 } | 132 } |
135 if (type.IsFunctionType()) { | 133 if (type.IsFunctionType()) { |
136 // Check if instance is a closure. | 134 // Check if instance is a closure. |
137 __ LoadClassById(T1, kClassIdReg); | 135 __ LoadClassById(T1, kClassIdReg); |
138 __ lw(T1, FieldAddress(T1, Class::signature_function_offset())); | 136 __ lw(T1, FieldAddress(T1, Class::signature_function_offset())); |
139 __ LoadImmediate(T2, reinterpret_cast<int32_t>(Object::null())); | 137 __ BranchNe(T1, reinterpret_cast<int32_t>(Object::null()), is_instance_lbl); |
140 __ bne(T1, T2, is_instance_lbl); | |
141 } | 138 } |
142 // Custom checking for numbers (Smi, Mint, Bigint and Double). | 139 // Custom checking for numbers (Smi, Mint, Bigint and Double). |
143 // Note that instance is not Smi (checked above). | 140 // Note that instance is not Smi (checked above). |
144 if (type.IsSubtypeOf(Type::Handle(Type::Number()), NULL)) { | 141 if (type.IsSubtypeOf(Type::Handle(Type::Number()), NULL)) { |
145 GenerateNumberTypeCheck( | 142 GenerateNumberTypeCheck( |
146 kClassIdReg, type, is_instance_lbl, is_not_instance_lbl); | 143 kClassIdReg, type, is_instance_lbl, is_not_instance_lbl); |
147 return false; | 144 return false; |
148 } | 145 } |
149 if (type.IsStringType()) { | 146 if (type.IsStringType()) { |
150 GenerateStringTypeCheck(kClassIdReg, is_instance_lbl, is_not_instance_lbl); | 147 GenerateStringTypeCheck(kClassIdReg, is_instance_lbl, is_not_instance_lbl); |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
197 } | 194 } |
198 if (TypeCheckAsClassEquality(type)) { | 195 if (TypeCheckAsClassEquality(type)) { |
199 const intptr_t type_cid = Class::Handle(type.type_class()).id(); | 196 const intptr_t type_cid = Class::Handle(type.type_class()).id(); |
200 const Register kInstanceReg = A0; | 197 const Register kInstanceReg = A0; |
201 __ andi(T0, kInstanceReg, Immediate(kSmiTagMask)); | 198 __ andi(T0, kInstanceReg, Immediate(kSmiTagMask)); |
202 if (type_cid == kSmiCid) { | 199 if (type_cid == kSmiCid) { |
203 __ beq(T0, ZR, is_instance_lbl); | 200 __ beq(T0, ZR, is_instance_lbl); |
204 } else { | 201 } else { |
205 __ beq(T0, ZR, is_not_instance_lbl); | 202 __ beq(T0, ZR, is_not_instance_lbl); |
206 __ LoadClassId(T0, kInstanceReg); | 203 __ LoadClassId(T0, kInstanceReg); |
207 __ LoadImmediate(T1, type_cid); | 204 __ BranchEq(T0, type_cid, is_instance_lbl); |
208 __ beq(T0, T1, is_instance_lbl); | |
209 } | 205 } |
210 __ b(is_not_instance_lbl); | 206 __ b(is_not_instance_lbl); |
211 return SubtypeTestCache::null(); | 207 return SubtypeTestCache::null(); |
212 } | 208 } |
213 if (type.IsInstantiated()) { | 209 if (type.IsInstantiated()) { |
214 const Class& type_class = Class::ZoneHandle(type.type_class()); | 210 const Class& type_class = Class::ZoneHandle(type.type_class()); |
215 // A Smi object cannot be the instance of a parameterized class. | 211 // A Smi object cannot be the instance of a parameterized class. |
216 // A class equality check is only applicable with a dst type of a | 212 // A class equality check is only applicable with a dst type of a |
217 // non-parameterized class or with a raw dst type of a parameterized class. | 213 // non-parameterized class or with a raw dst type of a parameterized class. |
218 if (type_class.HasTypeArguments()) { | 214 if (type_class.HasTypeArguments()) { |
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
275 ASSERT(dst_type.IsFinalized()); | 271 ASSERT(dst_type.IsFinalized()); |
276 // Assignable check is skipped in FlowGraphBuilder, not here. | 272 // Assignable check is skipped in FlowGraphBuilder, not here. |
277 ASSERT(dst_type.IsMalformed() || | 273 ASSERT(dst_type.IsMalformed() || |
278 (!dst_type.IsDynamicType() && !dst_type.IsObjectType())); | 274 (!dst_type.IsDynamicType() && !dst_type.IsObjectType())); |
279 // Preserve instantiator and its type arguments. | 275 // Preserve instantiator and its type arguments. |
280 __ addiu(SP, SP, Immediate(-2 * kWordSize)); | 276 __ addiu(SP, SP, Immediate(-2 * kWordSize)); |
281 __ sw(A2, Address(SP, 1 * kWordSize)); | 277 __ sw(A2, Address(SP, 1 * kWordSize)); |
282 __ sw(A1, Address(SP, 0 * kWordSize)); | 278 __ sw(A1, Address(SP, 0 * kWordSize)); |
283 // A null object is always assignable and is returned as result. | 279 // A null object is always assignable and is returned as result. |
284 Label is_assignable, runtime_call; | 280 Label is_assignable, runtime_call; |
285 __ LoadImmediate(T0, reinterpret_cast<int32_t>(Object::null())); | 281 __ BranchEq(A0, reinterpret_cast<int32_t>(Object::null()), &is_assignable); |
286 __ beq(A0, T0, &is_assignable); | |
287 | 282 |
288 if (!FLAG_eliminate_type_checks) { | 283 if (!FLAG_eliminate_type_checks) { |
289 // If type checks are not eliminated during the graph building then | 284 // If type checks are not eliminated during the graph building then |
290 // a transition sentinel can be seen here. | 285 // a transition sentinel can be seen here. |
291 __ LoadObject(T0, Object::transition_sentinel()); | 286 __ BranchEq(A0, Object::transition_sentinel(), &is_assignable); |
292 __ beq(A0, T0, &is_assignable); | |
293 } | 287 } |
294 | 288 |
295 // Generate throw new TypeError() if the type is malformed. | 289 // Generate throw new TypeError() if the type is malformed. |
296 if (dst_type.IsMalformed()) { | 290 if (dst_type.IsMalformed()) { |
297 const Error& error = Error::Handle(dst_type.malformed_error()); | 291 const Error& error = Error::Handle(dst_type.malformed_error()); |
298 const String& error_message = String::ZoneHandle( | 292 const String& error_message = String::ZoneHandle( |
299 Symbols::New(error.ToErrorCString())); | 293 Symbols::New(error.ToErrorCString())); |
300 __ PushObject(Object::ZoneHandle()); // Make room for the result. | 294 __ PushObject(Object::ZoneHandle()); // Make room for the result. |
301 __ Push(A0); // Push the source object. | 295 __ Push(A0); // Push the source object. |
302 __ PushObject(dst_name); // Push the name of the destination. | 296 __ PushObject(dst_name); // Push the name of the destination. |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
337 __ LoadObject(A0, test_cache); | 331 __ LoadObject(A0, test_cache); |
338 __ Push(A0); | 332 __ Push(A0); |
339 GenerateCallRuntime(token_pos, deopt_id, kTypeCheckRuntimeEntry, locs); | 333 GenerateCallRuntime(token_pos, deopt_id, kTypeCheckRuntimeEntry, locs); |
340 // Pop the parameters supplied to the runtime entry. The result of the | 334 // Pop the parameters supplied to the runtime entry. The result of the |
341 // type check runtime call is the checked value. | 335 // type check runtime call is the checked value. |
342 __ Drop(6); | 336 __ Drop(6); |
343 __ Pop(A0); | 337 __ Pop(A0); |
344 | 338 |
345 __ Bind(&is_assignable); | 339 __ Bind(&is_assignable); |
346 // Restore instantiator and its type arguments. | 340 // Restore instantiator and its type arguments. |
347 __ lw(A1, Address(SP, 0 * kWordSize)); | 341 __ lw(A1, Address(SP, 0 * kWordSize)); |
348 __ lw(A2, Address(SP, 1 * kWordSize)); | 342 __ lw(A2, Address(SP, 1 * kWordSize)); |
349 __ addiu(SP, SP, Immediate(2 * kWordSize)); | 343 __ addiu(SP, SP, Immediate(2 * kWordSize)); |
350 } | 344 } |
351 | 345 |
352 | 346 |
353 void FlowGraphCompiler::EmitInstructionPrologue(Instruction* instr) { | 347 void FlowGraphCompiler::EmitInstructionPrologue(Instruction* instr) { |
354 if (!is_optimizing()) { | 348 if (!is_optimizing()) { |
355 if (FLAG_enable_type_checks && instr->IsAssertAssignable()) { | 349 if (FLAG_enable_type_checks && instr->IsAssertAssignable()) { |
356 AssertAssignableInstr* assert = instr->AsAssertAssignable(); | 350 AssertAssignableInstr* assert = instr->AsAssertAssignable(); |
357 AddCurrentDescriptor(PcDescriptors::kDeoptBefore, | 351 AddCurrentDescriptor(PcDescriptors::kDeoptBefore, |
358 assert->deopt_id(), | 352 assert->deopt_id(), |
359 assert->token_pos()); | 353 assert->token_pos()); |
(...skipping 27 matching lines...) Expand all Loading... |
387 ASSERT(parsed_function().first_parameter_index() == kFirstLocalSlotIndex); | 381 ASSERT(parsed_function().first_parameter_index() == kFirstLocalSlotIndex); |
388 | 382 |
389 // Check that min_num_pos_args <= num_pos_args <= max_num_pos_args, | 383 // Check that min_num_pos_args <= num_pos_args <= max_num_pos_args, |
390 // where num_pos_args is the number of positional arguments passed in. | 384 // where num_pos_args is the number of positional arguments passed in. |
391 const int min_num_pos_args = num_fixed_params; | 385 const int min_num_pos_args = num_fixed_params; |
392 const int max_num_pos_args = num_fixed_params + num_opt_pos_params; | 386 const int max_num_pos_args = num_fixed_params + num_opt_pos_params; |
393 | 387 |
394 __ lw(T2, FieldAddress(S4, ArgumentsDescriptor::positional_count_offset())); | 388 __ lw(T2, FieldAddress(S4, ArgumentsDescriptor::positional_count_offset())); |
395 // Check that min_num_pos_args <= num_pos_args. | 389 // Check that min_num_pos_args <= num_pos_args. |
396 Label wrong_num_arguments; | 390 Label wrong_num_arguments; |
397 __ addiu(T3, T2, Immediate(-Smi::RawValue(min_num_pos_args))); | 391 __ BranchLt(T2, Smi::RawValue(min_num_pos_args), &wrong_num_arguments); |
398 __ bltz(T3, &wrong_num_arguments); | |
399 | 392 |
400 // Check that num_pos_args <= max_num_pos_args. | 393 // Check that num_pos_args <= max_num_pos_args. |
401 __ addiu(T3, T2, Immediate(-Smi::RawValue(max_num_pos_args))); | 394 __ BranchGt(T2, Smi::RawValue(max_num_pos_args), &wrong_num_arguments); |
402 __ bgtz(T3, &wrong_num_arguments); | |
403 | 395 |
404 // Copy positional arguments. | 396 // Copy positional arguments. |
405 // Argument i passed at fp[kLastParamSlotIndex + num_args - 1 - i] is copied | 397 // Argument i passed at fp[kLastParamSlotIndex + num_args - 1 - i] is copied |
406 // to fp[kFirstLocalSlotIndex - i]. | 398 // to fp[kFirstLocalSlotIndex - i]. |
407 | 399 |
408 __ lw(T1, FieldAddress(S4, ArgumentsDescriptor::count_offset())); | 400 __ lw(T1, FieldAddress(S4, ArgumentsDescriptor::count_offset())); |
409 // Since T1 and T2 are Smi, use LSL 1 instead of LSL 2. | 401 // Since T1 and T2 are Smi, use sll 1 instead of sll 2. |
410 // Let T1 point to the last passed positional argument, i.e. to | 402 // Let T1 point to the last passed positional argument, i.e. to |
411 // fp[kLastParamSlotIndex + num_args - 1 - (num_pos_args - 1)]. | 403 // fp[kLastParamSlotIndex + num_args - 1 - (num_pos_args - 1)]. |
412 __ subu(T1, T1, T2); | 404 __ subu(T1, T1, T2); |
413 __ sll(T1, T1, 1); | 405 __ sll(T1, T1, 1); |
414 __ addu(T1, FP, T1); | 406 __ addu(T1, FP, T1); |
415 __ addiu(T1, T1, Immediate(kLastParamSlotIndex * kWordSize)); | 407 __ addiu(T1, T1, Immediate(kLastParamSlotIndex * kWordSize)); |
416 | 408 |
417 // Let T0 point to the last copied positional argument, i.e. to | 409 // Let T0 point to the last copied positional argument, i.e. to |
418 // fp[kFirstLocalSlotIndex - (num_pos_args - 1)]. | 410 // fp[kFirstLocalSlotIndex - (num_pos_args - 1)]. |
419 __ addiu(T0, FP, Immediate((kFirstLocalSlotIndex + 1) * kWordSize)); | 411 __ addiu(T0, FP, Immediate((kFirstLocalSlotIndex + 1) * kWordSize)); |
420 __ sll(T3, T2, 1); // T2 is a Smi. | 412 __ sll(T3, T2, 1); // T2 is a Smi. |
421 __ subu(T0, T0, T3); | 413 __ subu(T0, T0, T3); |
422 | 414 |
423 Label loop, loop_condition; | 415 Label loop, loop_condition; |
424 __ b(&loop_condition); | 416 __ b(&loop_condition); |
425 __ delay_slot()->SmiUntag(T2); | 417 __ delay_slot()->SmiUntag(T2); |
426 // We do not use the final allocation index of the variable here, i.e. | 418 // We do not use the final allocation index of the variable here, i.e. |
427 // scope->VariableAt(i)->index(), because captured variables still need | 419 // scope->VariableAt(i)->index(), because captured variables still need |
428 // to be copied to the context that is not yet allocated. | 420 // to be copied to the context that is not yet allocated. |
429 __ Bind(&loop); | 421 __ Bind(&loop); |
430 __ addu(T4, T1, T2); | 422 __ addu(T4, T1, T2); |
431 __ addu(T5, T0, T2); | 423 __ addu(T5, T0, T2); |
432 __ lw(TMP, Address(T4)); | 424 __ lw(T3, Address(T4)); |
433 __ sw(TMP, Address(T5)); | 425 __ sw(T3, Address(T5)); |
434 __ Bind(&loop_condition); | 426 __ Bind(&loop_condition); |
435 __ addiu(T2, T2, Immediate(-kWordSize)); | 427 __ addiu(T2, T2, Immediate(-kWordSize)); |
436 __ bgez(T2, &loop); | 428 __ bgez(T2, &loop); |
437 | 429 |
438 // Copy or initialize optional named arguments. | 430 // Copy or initialize optional named arguments. |
439 Label all_arguments_processed; | 431 Label all_arguments_processed; |
440 if (num_opt_named_params > 0) { | 432 if (num_opt_named_params > 0) { |
441 // Start by alphabetically sorting the names of the optional parameters. | 433 // Start by alphabetically sorting the names of the optional parameters. |
442 LocalVariable** opt_param = new LocalVariable*[num_opt_named_params]; | 434 LocalVariable** opt_param = new LocalVariable*[num_opt_named_params]; |
443 int* opt_param_position = new int[num_opt_named_params]; | 435 int* opt_param_position = new int[num_opt_named_params]; |
(...skipping 24 matching lines...) Expand all Loading... |
468 // Let T0 point to the entry of the first named argument. | 460 // Let T0 point to the entry of the first named argument. |
469 __ addiu(T0, S4, Immediate( | 461 __ addiu(T0, S4, Immediate( |
470 ArgumentsDescriptor::first_named_entry_offset() - kHeapObjectTag)); | 462 ArgumentsDescriptor::first_named_entry_offset() - kHeapObjectTag)); |
471 for (int i = 0; i < num_opt_named_params; i++) { | 463 for (int i = 0; i < num_opt_named_params; i++) { |
472 Label load_default_value, assign_optional_parameter; | 464 Label load_default_value, assign_optional_parameter; |
473 const int param_pos = opt_param_position[i]; | 465 const int param_pos = opt_param_position[i]; |
474 // Check if this named parameter was passed in. | 466 // Check if this named parameter was passed in. |
475 // Load T3 with the name of the argument. | 467 // Load T3 with the name of the argument. |
476 __ lw(T3, Address(T0, ArgumentsDescriptor::name_offset())); | 468 __ lw(T3, Address(T0, ArgumentsDescriptor::name_offset())); |
477 ASSERT(opt_param[i]->name().IsSymbol()); | 469 ASSERT(opt_param[i]->name().IsSymbol()); |
478 __ LoadObject(T4, opt_param[i]->name()); | 470 __ BranchNe(T3, opt_param[i]->name(), &load_default_value); |
479 __ bne(T3, T4, &load_default_value); | |
480 | 471 |
481 // Load T3 with passed-in argument at provided arg_pos, i.e. at | 472 // Load T3 with passed-in argument at provided arg_pos, i.e. at |
482 // fp[kLastParamSlotIndex + num_args - 1 - arg_pos]. | 473 // fp[kLastParamSlotIndex + num_args - 1 - arg_pos]. |
483 __ lw(T3, Address(T0, ArgumentsDescriptor::position_offset())); | 474 __ lw(T3, Address(T0, ArgumentsDescriptor::position_offset())); |
484 // T3 is arg_pos as Smi. | 475 // T3 is arg_pos as Smi. |
485 // Point to next named entry. | 476 // Point to next named entry. |
486 __ addiu(T0, T0, Immediate(ArgumentsDescriptor::named_entry_size())); | 477 __ addiu(T0, T0, Immediate(ArgumentsDescriptor::named_entry_size())); |
487 __ subu(T3, ZR, T3); | 478 __ subu(T3, ZR, T3); |
488 __ sll(T3, T3, 1); | 479 __ sll(T3, T3, 1); |
489 __ addu(T3, T1, T3); | 480 __ addu(T3, T1, T3); |
(...skipping 11 matching lines...) Expand all Loading... |
501 // We do not use the final allocation index of the variable here, i.e. | 492 // We do not use the final allocation index of the variable here, i.e. |
502 // scope->VariableAt(i)->index(), because captured variables still need | 493 // scope->VariableAt(i)->index(), because captured variables still need |
503 // to be copied to the context that is not yet allocated. | 494 // to be copied to the context that is not yet allocated. |
504 const intptr_t computed_param_pos = kFirstLocalSlotIndex - param_pos; | 495 const intptr_t computed_param_pos = kFirstLocalSlotIndex - param_pos; |
505 __ sw(T3, Address(FP, computed_param_pos * kWordSize)); | 496 __ sw(T3, Address(FP, computed_param_pos * kWordSize)); |
506 } | 497 } |
507 delete[] opt_param; | 498 delete[] opt_param; |
508 delete[] opt_param_position; | 499 delete[] opt_param_position; |
509 // Check that T0 now points to the null terminator in the array descriptor. | 500 // Check that T0 now points to the null terminator in the array descriptor. |
510 __ lw(T3, Address(T0)); | 501 __ lw(T3, Address(T0)); |
511 __ LoadImmediate(T4, reinterpret_cast<int32_t>(Object::null())); | 502 __ BranchEq(T3, reinterpret_cast<int32_t>(Object::null()), |
512 __ beq(T3, T4, &all_arguments_processed); | 503 &all_arguments_processed); |
513 } else { | 504 } else { |
514 ASSERT(num_opt_pos_params > 0); | 505 ASSERT(num_opt_pos_params > 0); |
515 __ lw(T2, | 506 __ lw(T2, |
516 FieldAddress(S4, ArgumentsDescriptor::positional_count_offset())); | 507 FieldAddress(S4, ArgumentsDescriptor::positional_count_offset())); |
517 __ SmiUntag(T2); | 508 __ SmiUntag(T2); |
518 for (int i = 0; i < num_opt_pos_params; i++) { | 509 for (int i = 0; i < num_opt_pos_params; i++) { |
519 Label next_parameter; | 510 Label next_parameter; |
520 // Handle this optional positional parameter only if k or fewer positional | 511 // Handle this optional positional parameter only if k or fewer positional |
521 // arguments have been passed, where k is param_pos, the position of this | 512 // arguments have been passed, where k is param_pos, the position of this |
522 // optional parameter in the formal parameter list. | 513 // optional parameter in the formal parameter list. |
523 const int param_pos = num_fixed_params + i; | 514 const int param_pos = num_fixed_params + i; |
524 __ addiu(T3, T2, Immediate(-param_pos)); | 515 __ BranchGt(T2, param_pos, &next_parameter); |
525 __ bgtz(T3, &next_parameter); | |
526 // Load T3 with default argument. | 516 // Load T3 with default argument. |
527 const Object& value = Object::ZoneHandle( | 517 const Object& value = Object::ZoneHandle( |
528 parsed_function().default_parameter_values().At(i)); | 518 parsed_function().default_parameter_values().At(i)); |
529 __ LoadObject(T3, value); | 519 __ LoadObject(T3, value); |
530 // Assign T3 to fp[kFirstLocalSlotIndex - param_pos]. | 520 // Assign T3 to fp[kFirstLocalSlotIndex - param_pos]. |
531 // We do not use the final allocation index of the variable here, i.e. | 521 // We do not use the final allocation index of the variable here, i.e. |
532 // scope->VariableAt(i)->index(), because captured variables still need | 522 // scope->VariableAt(i)->index(), because captured variables still need |
533 // to be copied to the context that is not yet allocated. | 523 // to be copied to the context that is not yet allocated. |
534 const intptr_t computed_param_pos = kFirstLocalSlotIndex - param_pos; | 524 const intptr_t computed_param_pos = kFirstLocalSlotIndex - param_pos; |
535 __ sw(T3, Address(FP, computed_param_pos * kWordSize)); | 525 __ sw(T3, Address(FP, computed_param_pos * kWordSize)); |
536 __ Bind(&next_parameter); | 526 __ Bind(&next_parameter); |
537 } | 527 } |
538 __ lw(T1, FieldAddress(S4, ArgumentsDescriptor::count_offset())); | 528 __ lw(T1, FieldAddress(S4, ArgumentsDescriptor::count_offset())); |
539 __ SmiUntag(T1); | 529 __ SmiUntag(T1); |
540 // Check that T2 equals T1, i.e. no named arguments passed. | 530 // Check that T2 equals T1, i.e. no named arguments passed. |
541 __ beq(T2, T2, &all_arguments_processed); | 531 __ beq(T2, T1, &all_arguments_processed); |
542 } | 532 } |
543 | 533 |
544 __ Bind(&wrong_num_arguments); | 534 __ Bind(&wrong_num_arguments); |
545 if (StackSize() != 0) { | 535 if (StackSize() != 0) { |
546 // We need to unwind the space we reserved for locals and copied parameters. | 536 // We need to unwind the space we reserved for locals and copied parameters. |
547 // The NoSuchMethodFunction stub does not expect to see that area on the | 537 // The NoSuchMethodFunction stub does not expect to see that area on the |
548 // stack. | 538 // stack. |
549 __ addiu(SP, SP, Immediate(StackSize() * kWordSize)); | 539 __ addiu(SP, SP, Immediate(StackSize() * kWordSize)); |
550 } | 540 } |
551 // The call below has an empty stackmap because we have just | 541 // The call below has an empty stackmap because we have just |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
583 // Nullify originally passed arguments only after they have been copied and | 573 // Nullify originally passed arguments only after they have been copied and |
584 // checked, otherwise noSuchMethod would not see their original values. | 574 // checked, otherwise noSuchMethod would not see their original values. |
585 // This step can be skipped in case we decide that formal parameters are | 575 // This step can be skipped in case we decide that formal parameters are |
586 // implicitly final, since garbage collecting the unmodified value is not | 576 // implicitly final, since garbage collecting the unmodified value is not |
587 // an issue anymore. | 577 // an issue anymore. |
588 | 578 |
589 // S4 : arguments descriptor array. | 579 // S4 : arguments descriptor array. |
590 __ lw(T2, FieldAddress(S4, ArgumentsDescriptor::count_offset())); | 580 __ lw(T2, FieldAddress(S4, ArgumentsDescriptor::count_offset())); |
591 __ SmiUntag(T2); | 581 __ SmiUntag(T2); |
592 | 582 |
593 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null())); | 583 __ LoadImmediate(T0, reinterpret_cast<intptr_t>(Object::null())); |
594 Label null_args_loop, null_args_loop_condition; | 584 Label null_args_loop, null_args_loop_condition; |
595 __ b(&null_args_loop_condition); | 585 __ b(&null_args_loop_condition); |
596 __ delay_slot()->addiu(T1, FP, Immediate(kLastParamSlotIndex * kWordSize)); | 586 __ delay_slot()->addiu(T1, FP, Immediate(kLastParamSlotIndex * kWordSize)); |
597 __ Bind(&null_args_loop); | 587 __ Bind(&null_args_loop); |
598 __ addu(T3, T1, T2); | 588 __ addu(T3, T1, T2); |
599 __ sw(TMP, Address(T3)); | 589 __ sw(T0, Address(T3)); |
600 __ Bind(&null_args_loop_condition); | 590 __ Bind(&null_args_loop_condition); |
601 __ addiu(T2, T2, Immediate(-kWordSize)); | 591 __ addiu(T2, T2, Immediate(-kWordSize)); |
602 __ bgez(T2, &null_args_loop); | 592 __ bgez(T2, &null_args_loop); |
603 } | 593 } |
604 | 594 |
605 | 595 |
606 void FlowGraphCompiler::GenerateInlinedGetter(intptr_t offset) { | 596 void FlowGraphCompiler::GenerateInlinedGetter(intptr_t offset) { |
607 UNIMPLEMENTED(); | 597 UNIMPLEMENTED(); |
608 } | 598 } |
609 | 599 |
610 | 600 |
611 void FlowGraphCompiler::GenerateInlinedSetter(intptr_t offset) { | 601 void FlowGraphCompiler::GenerateInlinedSetter(intptr_t offset) { |
612 UNIMPLEMENTED(); | 602 UNIMPLEMENTED(); |
613 } | 603 } |
614 | 604 |
615 | 605 |
616 void FlowGraphCompiler::EmitFrameEntry() { | 606 void FlowGraphCompiler::EmitFrameEntry() { |
617 const Function& function = parsed_function().function(); | 607 const Function& function = parsed_function().function(); |
618 if (CanOptimizeFunction() && function.is_optimizable()) { | 608 if (CanOptimizeFunction() && function.is_optimizable()) { |
619 const bool can_optimize = !is_optimizing() || may_reoptimize(); | 609 const bool can_optimize = !is_optimizing() || may_reoptimize(); |
620 const Register function_reg = T0; | 610 const Register function_reg = T0; |
621 if (can_optimize) { | 611 if (can_optimize) { |
622 Label next; | 612 Label next; |
623 // The pool pointer is not setup before entering the Dart frame. | 613 // The pool pointer is not setup before entering the Dart frame. |
624 | 614 |
625 __ mov(TMP, RA); // Save RA. | 615 __ mov(TMP1, RA); // Save RA. |
626 __ bal(&next); // Branch and link to next instruction to get PC in RA. | 616 __ bal(&next); // Branch and link to next instruction to get PC in RA. |
627 __ delay_slot()->mov(T2, RA); // Save PC of the following mov. | 617 __ delay_slot()->mov(T2, RA); // Save PC of the following mov. |
628 | 618 |
629 // Calculate offset of pool pointer from the PC. | 619 // Calculate offset of pool pointer from the PC. |
630 const intptr_t object_pool_pc_dist = | 620 const intptr_t object_pool_pc_dist = |
631 Instructions::HeaderSize() - Instructions::object_pool_offset() + | 621 Instructions::HeaderSize() - Instructions::object_pool_offset() + |
632 assembler()->CodeSize(); | 622 assembler()->CodeSize(); |
633 | 623 |
634 __ Bind(&next); | 624 __ Bind(&next); |
635 __ mov(RA, TMP); // Restore RA. | 625 __ mov(RA, TMP1); // Restore RA. |
636 | 626 |
637 // Preserve PP of caller. | 627 // Preserve PP of caller. |
638 __ mov(T1, PP); | 628 __ mov(T1, PP); |
639 | 629 |
640 // Temporarily setup pool pointer for this dart function. | 630 // Temporarily setup pool pointer for this dart function. |
641 __ lw(PP, Address(T2, -object_pool_pc_dist)); | 631 __ lw(PP, Address(T2, -object_pool_pc_dist)); |
642 | 632 |
643 // Load function object from object pool. | 633 // Load function object from object pool. |
644 __ LoadObject(function_reg, function); // Uses PP. | 634 __ LoadObject(function_reg, function); // Uses PP. |
645 | 635 |
(...skipping 13 matching lines...) Expand all Loading... |
659 __ addiu(T1, T1, Immediate(1)); | 649 __ addiu(T1, T1, Immediate(1)); |
660 __ sw(T1, FieldAddress(function_reg, | 650 __ sw(T1, FieldAddress(function_reg, |
661 Function::usage_counter_offset())); | 651 Function::usage_counter_offset())); |
662 } else { | 652 } else { |
663 __ lw(T1, FieldAddress(function_reg, | 653 __ lw(T1, FieldAddress(function_reg, |
664 Function::usage_counter_offset())); | 654 Function::usage_counter_offset())); |
665 } | 655 } |
666 | 656 |
667 // Skip Branch if T1 is less than the threshold. | 657 // Skip Branch if T1 is less than the threshold. |
668 Label dont_branch; | 658 Label dont_branch; |
669 __ LoadImmediate(T2, FLAG_optimization_counter_threshold); | 659 __ BranchLt(T1, FLAG_optimization_counter_threshold, &dont_branch); |
670 __ sltu(T2, T1, T2); | |
671 __ bgtz(T2, &dont_branch); | |
672 | 660 |
673 ASSERT(function_reg == T0); | 661 ASSERT(function_reg == T0); |
674 __ Branch(&StubCode::OptimizeFunctionLabel()); | 662 __ Branch(&StubCode::OptimizeFunctionLabel()); |
675 | 663 |
676 __ Bind(&dont_branch); | 664 __ Bind(&dont_branch); |
677 } | 665 } |
678 } else { | 666 } else { |
679 AddCurrentDescriptor(PcDescriptors::kEntryPatch, | 667 AddCurrentDescriptor(PcDescriptors::kEntryPatch, |
680 Isolate::kNoDeoptId, | 668 Isolate::kNoDeoptId, |
681 0); // No token position. | 669 0); // No token position. |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
722 ASSERT(!parsed_function().function().HasOptionalParameters()); | 710 ASSERT(!parsed_function().function().HasOptionalParameters()); |
723 const bool check_arguments = true; | 711 const bool check_arguments = true; |
724 #else | 712 #else |
725 const bool check_arguments = function.IsClosureFunction(); | 713 const bool check_arguments = function.IsClosureFunction(); |
726 #endif | 714 #endif |
727 if (check_arguments) { | 715 if (check_arguments) { |
728 __ Comment("Check argument count"); | 716 __ Comment("Check argument count"); |
729 // Check that exactly num_fixed arguments are passed in. | 717 // Check that exactly num_fixed arguments are passed in. |
730 Label correct_num_arguments, wrong_num_arguments; | 718 Label correct_num_arguments, wrong_num_arguments; |
731 __ lw(T0, FieldAddress(S4, ArgumentsDescriptor::count_offset())); | 719 __ lw(T0, FieldAddress(S4, ArgumentsDescriptor::count_offset())); |
732 __ LoadImmediate(T1, Smi::RawValue(num_fixed_params)); | 720 __ BranchNe(T0, Smi::RawValue(num_fixed_params), &wrong_num_arguments); |
733 __ bne(T0, T1, &wrong_num_arguments); | |
734 | 721 |
735 __ lw(T1, FieldAddress(S4, | 722 __ lw(T1, FieldAddress(S4, |
736 ArgumentsDescriptor::positional_count_offset())); | 723 ArgumentsDescriptor::positional_count_offset())); |
737 __ beq(T0, T1, &correct_num_arguments); | 724 __ beq(T0, T1, &correct_num_arguments); |
738 __ Bind(&wrong_num_arguments); | 725 __ Bind(&wrong_num_arguments); |
739 if (function.IsClosureFunction()) { | 726 if (function.IsClosureFunction()) { |
740 if (StackSize() != 0) { | 727 if (StackSize() != 0) { |
741 // We need to unwind the space we reserved for locals and copied | 728 // We need to unwind the space we reserved for locals and copied |
742 // parameters. The NoSuchMethodFunction stub does not expect to see | 729 // parameters. The NoSuchMethodFunction stub does not expect to see |
743 // that area on the stack. | 730 // that area on the stack. |
(...skipping 363 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1107 | 1094 |
1108 | 1095 |
1109 void ParallelMoveResolver::Exchange(const Address& mem1, const Address& mem2) { | 1096 void ParallelMoveResolver::Exchange(const Address& mem1, const Address& mem2) { |
1110 UNIMPLEMENTED(); | 1097 UNIMPLEMENTED(); |
1111 } | 1098 } |
1112 | 1099 |
1113 | 1100 |
1114 } // namespace dart | 1101 } // namespace dart |
1115 | 1102 |
1116 #endif // defined TARGET_ARCH_MIPS | 1103 #endif // defined TARGET_ARCH_MIPS |
OLD | NEW |