| OLD | NEW |
| 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_MIPS. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_MIPS. |
| 6 #if defined(TARGET_ARCH_MIPS) | 6 #if defined(TARGET_ARCH_MIPS) |
| 7 | 7 |
| 8 #include "vm/flow_graph_compiler.h" | 8 #include "vm/flow_graph_compiler.h" |
| 9 | 9 |
| 10 #include "lib/error.h" | 10 #include "lib/error.h" |
| (...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 77 UNIMPLEMENTED(); | 77 UNIMPLEMENTED(); |
| 78 return NULL; | 78 return NULL; |
| 79 } | 79 } |
| 80 | 80 |
| 81 | 81 |
| 82 void FlowGraphCompiler::CheckClassIds(Register class_id_reg, | 82 void FlowGraphCompiler::CheckClassIds(Register class_id_reg, |
| 83 const GrowableArray<intptr_t>& class_ids, | 83 const GrowableArray<intptr_t>& class_ids, |
| 84 Label* is_equal_lbl, | 84 Label* is_equal_lbl, |
| 85 Label* is_not_equal_lbl) { | 85 Label* is_not_equal_lbl) { |
| 86 for (intptr_t i = 0; i < class_ids.length(); i++) { | 86 for (intptr_t i = 0; i < class_ids.length(); i++) { |
| 87 __ LoadImmediate(TMP, class_ids[i]); | 87 __ BranchEqual(class_id_reg, class_ids[i], is_equal_lbl); |
| 88 __ beq(class_id_reg, TMP, is_equal_lbl); | |
| 89 } | 88 } |
| 90 __ b(is_not_equal_lbl); | 89 __ b(is_not_equal_lbl); |
| 91 } | 90 } |
| 92 | 91 |
| 93 | 92 |
| 94 // Testing against an instantiated type with no arguments, without | 93 // Testing against an instantiated type with no arguments, without |
| 95 // SubtypeTestCache. | 94 // SubtypeTestCache. |
| 96 // A0: instance being type checked (preserved). | 95 // A0: instance being type checked (preserved). |
| 97 // Clobbers: T0, T1, T2 | 96 // Clobbers: T0, T1, T2 |
| 98 // Returns true if there is a fallthrough. | 97 // Returns true if there is a fallthrough. |
| (...skipping 15 matching lines...) Expand all Loading... |
| 114 type_class, | 113 type_class, |
| 115 TypeArguments::Handle(), | 114 TypeArguments::Handle(), |
| 116 NULL)) { | 115 NULL)) { |
| 117 __ beq(T0, ZR, is_instance_lbl); | 116 __ beq(T0, ZR, is_instance_lbl); |
| 118 } else { | 117 } else { |
| 119 __ beq(T0, ZR, is_not_instance_lbl); | 118 __ beq(T0, ZR, is_not_instance_lbl); |
| 120 } | 119 } |
| 121 // Compare if the classes are equal. | 120 // Compare if the classes are equal. |
| 122 const Register kClassIdReg = T0; | 121 const Register kClassIdReg = T0; |
| 123 __ LoadClassId(kClassIdReg, kInstanceReg); | 122 __ LoadClassId(kClassIdReg, kInstanceReg); |
| 124 __ LoadImmediate(T1, type_class.id()); | 123 __ BranchEqual(kClassIdReg, type_class.id(), is_instance_lbl); |
| 125 __ beq(kClassIdReg, T1, is_instance_lbl); | 124 |
| 126 // See ClassFinalizer::ResolveSuperTypeAndInterfaces for list of restricted | 125 // See ClassFinalizer::ResolveSuperTypeAndInterfaces for list of restricted |
| 127 // interfaces. | 126 // interfaces. |
| 128 // Bool interface can be implemented only by core class Bool. | 127 // Bool interface can be implemented only by core class Bool. |
| 129 if (type.IsBoolType()) { | 128 if (type.IsBoolType()) { |
| 130 __ LoadImmediate(T1, kBoolCid); | 129 __ BranchEqual(kClassIdReg, kBoolCid, is_instance_lbl); |
| 131 __ beq(kClassIdReg, T1, is_instance_lbl); | |
| 132 __ b(is_not_instance_lbl); | 130 __ b(is_not_instance_lbl); |
| 133 return false; | 131 return false; |
| 134 } | 132 } |
| 135 if (type.IsFunctionType()) { | 133 if (type.IsFunctionType()) { |
| 136 // Check if instance is a closure. | 134 // Check if instance is a closure. |
| 137 __ LoadClassById(T1, kClassIdReg); | 135 __ LoadClassById(T1, kClassIdReg); |
| 138 __ lw(T1, FieldAddress(T1, Class::signature_function_offset())); | 136 __ lw(T1, FieldAddress(T1, Class::signature_function_offset())); |
| 139 __ LoadImmediate(T2, reinterpret_cast<int32_t>(Object::null())); | 137 __ BranchNotEqual(T1, reinterpret_cast<int32_t>(Object::null()), |
| 140 __ bne(T1, T2, is_instance_lbl); | 138 is_instance_lbl); |
| 141 } | 139 } |
| 142 // Custom checking for numbers (Smi, Mint, Bigint and Double). | 140 // Custom checking for numbers (Smi, Mint, Bigint and Double). |
| 143 // Note that instance is not Smi (checked above). | 141 // Note that instance is not Smi (checked above). |
| 144 if (type.IsSubtypeOf(Type::Handle(Type::Number()), NULL)) { | 142 if (type.IsSubtypeOf(Type::Handle(Type::Number()), NULL)) { |
| 145 GenerateNumberTypeCheck( | 143 GenerateNumberTypeCheck( |
| 146 kClassIdReg, type, is_instance_lbl, is_not_instance_lbl); | 144 kClassIdReg, type, is_instance_lbl, is_not_instance_lbl); |
| 147 return false; | 145 return false; |
| 148 } | 146 } |
| 149 if (type.IsStringType()) { | 147 if (type.IsStringType()) { |
| 150 GenerateStringTypeCheck(kClassIdReg, is_instance_lbl, is_not_instance_lbl); | 148 GenerateStringTypeCheck(kClassIdReg, is_instance_lbl, is_not_instance_lbl); |
| (...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 197 } | 195 } |
| 198 if (TypeCheckAsClassEquality(type)) { | 196 if (TypeCheckAsClassEquality(type)) { |
| 199 const intptr_t type_cid = Class::Handle(type.type_class()).id(); | 197 const intptr_t type_cid = Class::Handle(type.type_class()).id(); |
| 200 const Register kInstanceReg = A0; | 198 const Register kInstanceReg = A0; |
| 201 __ andi(T0, kInstanceReg, Immediate(kSmiTagMask)); | 199 __ andi(T0, kInstanceReg, Immediate(kSmiTagMask)); |
| 202 if (type_cid == kSmiCid) { | 200 if (type_cid == kSmiCid) { |
| 203 __ beq(T0, ZR, is_instance_lbl); | 201 __ beq(T0, ZR, is_instance_lbl); |
| 204 } else { | 202 } else { |
| 205 __ beq(T0, ZR, is_not_instance_lbl); | 203 __ beq(T0, ZR, is_not_instance_lbl); |
| 206 __ LoadClassId(T0, kInstanceReg); | 204 __ LoadClassId(T0, kInstanceReg); |
| 207 __ LoadImmediate(T1, type_cid); | 205 __ BranchEqual(T0, type_cid, is_instance_lbl); |
| 208 __ beq(T0, T1, is_instance_lbl); | |
| 209 } | 206 } |
| 210 __ b(is_not_instance_lbl); | 207 __ b(is_not_instance_lbl); |
| 211 return SubtypeTestCache::null(); | 208 return SubtypeTestCache::null(); |
| 212 } | 209 } |
| 213 if (type.IsInstantiated()) { | 210 if (type.IsInstantiated()) { |
| 214 const Class& type_class = Class::ZoneHandle(type.type_class()); | 211 const Class& type_class = Class::ZoneHandle(type.type_class()); |
| 215 // A Smi object cannot be the instance of a parameterized class. | 212 // A Smi object cannot be the instance of a parameterized class. |
| 216 // A class equality check is only applicable with a dst type of a | 213 // A class equality check is only applicable with a dst type of a |
| 217 // non-parameterized class or with a raw dst type of a parameterized class. | 214 // non-parameterized class or with a raw dst type of a parameterized class. |
| 218 if (type_class.HasTypeArguments()) { | 215 if (type_class.HasTypeArguments()) { |
| (...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 275 ASSERT(dst_type.IsFinalized()); | 272 ASSERT(dst_type.IsFinalized()); |
| 276 // Assignable check is skipped in FlowGraphBuilder, not here. | 273 // Assignable check is skipped in FlowGraphBuilder, not here. |
| 277 ASSERT(dst_type.IsMalformed() || | 274 ASSERT(dst_type.IsMalformed() || |
| 278 (!dst_type.IsDynamicType() && !dst_type.IsObjectType())); | 275 (!dst_type.IsDynamicType() && !dst_type.IsObjectType())); |
| 279 // Preserve instantiator and its type arguments. | 276 // Preserve instantiator and its type arguments. |
| 280 __ addiu(SP, SP, Immediate(-2 * kWordSize)); | 277 __ addiu(SP, SP, Immediate(-2 * kWordSize)); |
| 281 __ sw(A2, Address(SP, 1 * kWordSize)); | 278 __ sw(A2, Address(SP, 1 * kWordSize)); |
| 282 __ sw(A1, Address(SP, 0 * kWordSize)); | 279 __ sw(A1, Address(SP, 0 * kWordSize)); |
| 283 // A null object is always assignable and is returned as result. | 280 // A null object is always assignable and is returned as result. |
| 284 Label is_assignable, runtime_call; | 281 Label is_assignable, runtime_call; |
| 285 __ LoadImmediate(T0, reinterpret_cast<int32_t>(Object::null())); | 282 __ BranchEqual(A0, reinterpret_cast<int32_t>(Object::null()), &is_assignable); |
| 286 __ beq(A0, T0, &is_assignable); | |
| 287 | 283 |
| 288 if (!FLAG_eliminate_type_checks) { | 284 if (!FLAG_eliminate_type_checks) { |
| 289 // If type checks are not eliminated during the graph building then | 285 // If type checks are not eliminated during the graph building then |
| 290 // a transition sentinel can be seen here. | 286 // a transition sentinel can be seen here. |
| 291 __ LoadObject(T0, Object::transition_sentinel()); | 287 __ BranchEqual(A0, Object::transition_sentinel(), &is_assignable); |
| 292 __ beq(A0, T0, &is_assignable); | |
| 293 } | 288 } |
| 294 | 289 |
| 295 // Generate throw new TypeError() if the type is malformed. | 290 // Generate throw new TypeError() if the type is malformed. |
| 296 if (dst_type.IsMalformed()) { | 291 if (dst_type.IsMalformed()) { |
| 297 const Error& error = Error::Handle(dst_type.malformed_error()); | 292 const Error& error = Error::Handle(dst_type.malformed_error()); |
| 298 const String& error_message = String::ZoneHandle( | 293 const String& error_message = String::ZoneHandle( |
| 299 Symbols::New(error.ToErrorCString())); | 294 Symbols::New(error.ToErrorCString())); |
| 300 __ PushObject(Object::ZoneHandle()); // Make room for the result. | 295 __ PushObject(Object::ZoneHandle()); // Make room for the result. |
| 301 __ Push(A0); // Push the source object. | 296 __ Push(A0); // Push the source object. |
| 302 __ PushObject(dst_name); // Push the name of the destination. | 297 __ PushObject(dst_name); // Push the name of the destination. |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 337 __ LoadObject(A0, test_cache); | 332 __ LoadObject(A0, test_cache); |
| 338 __ Push(A0); | 333 __ Push(A0); |
| 339 GenerateCallRuntime(token_pos, deopt_id, kTypeCheckRuntimeEntry, locs); | 334 GenerateCallRuntime(token_pos, deopt_id, kTypeCheckRuntimeEntry, locs); |
| 340 // Pop the parameters supplied to the runtime entry. The result of the | 335 // Pop the parameters supplied to the runtime entry. The result of the |
| 341 // type check runtime call is the checked value. | 336 // type check runtime call is the checked value. |
| 342 __ Drop(6); | 337 __ Drop(6); |
| 343 __ Pop(A0); | 338 __ Pop(A0); |
| 344 | 339 |
| 345 __ Bind(&is_assignable); | 340 __ Bind(&is_assignable); |
| 346 // Restore instantiator and its type arguments. | 341 // Restore instantiator and its type arguments. |
| 347 __ lw(A1, Address(SP, 0 * kWordSize)); | 342 __ lw(A1, Address(SP, 0 * kWordSize)); |
| 348 __ lw(A2, Address(SP, 1 * kWordSize)); | 343 __ lw(A2, Address(SP, 1 * kWordSize)); |
| 349 __ addiu(SP, SP, Immediate(2 * kWordSize)); | 344 __ addiu(SP, SP, Immediate(2 * kWordSize)); |
| 350 } | 345 } |
| 351 | 346 |
| 352 | 347 |
| 353 void FlowGraphCompiler::EmitInstructionPrologue(Instruction* instr) { | 348 void FlowGraphCompiler::EmitInstructionPrologue(Instruction* instr) { |
| 354 if (!is_optimizing()) { | 349 if (!is_optimizing()) { |
| 355 if (FLAG_enable_type_checks && instr->IsAssertAssignable()) { | 350 if (FLAG_enable_type_checks && instr->IsAssertAssignable()) { |
| 356 AssertAssignableInstr* assert = instr->AsAssertAssignable(); | 351 AssertAssignableInstr* assert = instr->AsAssertAssignable(); |
| 357 AddCurrentDescriptor(PcDescriptors::kDeoptBefore, | 352 AddCurrentDescriptor(PcDescriptors::kDeoptBefore, |
| 358 assert->deopt_id(), | 353 assert->deopt_id(), |
| 359 assert->token_pos()); | 354 assert->token_pos()); |
| (...skipping 27 matching lines...) Expand all Loading... |
| 387 ASSERT(parsed_function().first_parameter_index() == kFirstLocalSlotIndex); | 382 ASSERT(parsed_function().first_parameter_index() == kFirstLocalSlotIndex); |
| 388 | 383 |
| 389 // Check that min_num_pos_args <= num_pos_args <= max_num_pos_args, | 384 // Check that min_num_pos_args <= num_pos_args <= max_num_pos_args, |
| 390 // where num_pos_args is the number of positional arguments passed in. | 385 // where num_pos_args is the number of positional arguments passed in. |
| 391 const int min_num_pos_args = num_fixed_params; | 386 const int min_num_pos_args = num_fixed_params; |
| 392 const int max_num_pos_args = num_fixed_params + num_opt_pos_params; | 387 const int max_num_pos_args = num_fixed_params + num_opt_pos_params; |
| 393 | 388 |
| 394 __ lw(T2, FieldAddress(S4, ArgumentsDescriptor::positional_count_offset())); | 389 __ lw(T2, FieldAddress(S4, ArgumentsDescriptor::positional_count_offset())); |
| 395 // Check that min_num_pos_args <= num_pos_args. | 390 // Check that min_num_pos_args <= num_pos_args. |
| 396 Label wrong_num_arguments; | 391 Label wrong_num_arguments; |
| 397 __ addiu(T3, T2, Immediate(-Smi::RawValue(min_num_pos_args))); | 392 __ BranchLess(T2, Smi::RawValue(min_num_pos_args), &wrong_num_arguments); |
| 398 __ bltz(T3, &wrong_num_arguments); | |
| 399 | 393 |
| 400 // Check that num_pos_args <= max_num_pos_args. | 394 // Check that num_pos_args <= max_num_pos_args. |
| 401 __ addiu(T3, T2, Immediate(-Smi::RawValue(max_num_pos_args))); | 395 __ BranchGreater(T2, Smi::RawValue(max_num_pos_args), &wrong_num_arguments); |
| 402 __ bgtz(T3, &wrong_num_arguments); | |
| 403 | 396 |
| 404 // Copy positional arguments. | 397 // Copy positional arguments. |
| 405 // Argument i passed at fp[kLastParamSlotIndex + num_args - 1 - i] is copied | 398 // Argument i passed at fp[kLastParamSlotIndex + num_args - 1 - i] is copied |
| 406 // to fp[kFirstLocalSlotIndex - i]. | 399 // to fp[kFirstLocalSlotIndex - i]. |
| 407 | 400 |
| 408 __ lw(T1, FieldAddress(S4, ArgumentsDescriptor::count_offset())); | 401 __ lw(T1, FieldAddress(S4, ArgumentsDescriptor::count_offset())); |
| 409 // Since T1 and T2 are Smi, use LSL 1 instead of LSL 2. | 402 // Since T1 and T2 are Smi, use sll 1 instead of sll 2. |
| 410 // Let T1 point to the last passed positional argument, i.e. to | 403 // Let T1 point to the last passed positional argument, i.e. to |
| 411 // fp[kLastParamSlotIndex + num_args - 1 - (num_pos_args - 1)]. | 404 // fp[kLastParamSlotIndex + num_args - 1 - (num_pos_args - 1)]. |
| 412 __ subu(T1, T1, T2); | 405 __ subu(T1, T1, T2); |
| 413 __ sll(T1, T1, 1); | 406 __ sll(T1, T1, 1); |
| 414 __ addu(T1, FP, T1); | 407 __ addu(T1, FP, T1); |
| 415 __ addiu(T1, T1, Immediate(kLastParamSlotIndex * kWordSize)); | 408 __ addiu(T1, T1, Immediate(kLastParamSlotIndex * kWordSize)); |
| 416 | 409 |
| 417 // Let T0 point to the last copied positional argument, i.e. to | 410 // Let T0 point to the last copied positional argument, i.e. to |
| 418 // fp[kFirstLocalSlotIndex - (num_pos_args - 1)]. | 411 // fp[kFirstLocalSlotIndex - (num_pos_args - 1)]. |
| 419 __ addiu(T0, FP, Immediate((kFirstLocalSlotIndex + 1) * kWordSize)); | 412 __ addiu(T0, FP, Immediate((kFirstLocalSlotIndex + 1) * kWordSize)); |
| 420 __ sll(T3, T2, 1); // T2 is a Smi. | 413 __ sll(T3, T2, 1); // T2 is a Smi. |
| 421 __ subu(T0, T0, T3); | 414 __ subu(T0, T0, T3); |
| 422 | 415 |
| 423 Label loop, loop_condition; | 416 Label loop, loop_condition; |
| 424 __ b(&loop_condition); | 417 __ b(&loop_condition); |
| 425 __ delay_slot()->SmiUntag(T2); | 418 __ delay_slot()->SmiUntag(T2); |
| 426 // We do not use the final allocation index of the variable here, i.e. | 419 // We do not use the final allocation index of the variable here, i.e. |
| 427 // scope->VariableAt(i)->index(), because captured variables still need | 420 // scope->VariableAt(i)->index(), because captured variables still need |
| 428 // to be copied to the context that is not yet allocated. | 421 // to be copied to the context that is not yet allocated. |
| 429 __ Bind(&loop); | 422 __ Bind(&loop); |
| 430 __ addu(T4, T1, T2); | 423 __ addu(T4, T1, T2); |
| 431 __ addu(T5, T0, T2); | 424 __ addu(T5, T0, T2); |
| 432 __ lw(TMP, Address(T4)); | 425 __ lw(T3, Address(T4)); |
| 433 __ sw(TMP, Address(T5)); | 426 __ sw(T3, Address(T5)); |
| 434 __ Bind(&loop_condition); | 427 __ Bind(&loop_condition); |
| 435 __ addiu(T2, T2, Immediate(-kWordSize)); | 428 __ addiu(T2, T2, Immediate(-kWordSize)); |
| 436 __ bgez(T2, &loop); | 429 __ bgez(T2, &loop); |
| 437 | 430 |
| 438 // Copy or initialize optional named arguments. | 431 // Copy or initialize optional named arguments. |
| 439 Label all_arguments_processed; | 432 Label all_arguments_processed; |
| 440 if (num_opt_named_params > 0) { | 433 if (num_opt_named_params > 0) { |
| 441 // Start by alphabetically sorting the names of the optional parameters. | 434 // Start by alphabetically sorting the names of the optional parameters. |
| 442 LocalVariable** opt_param = new LocalVariable*[num_opt_named_params]; | 435 LocalVariable** opt_param = new LocalVariable*[num_opt_named_params]; |
| 443 int* opt_param_position = new int[num_opt_named_params]; | 436 int* opt_param_position = new int[num_opt_named_params]; |
| (...skipping 24 matching lines...) Expand all Loading... |
| 468 // Let T0 point to the entry of the first named argument. | 461 // Let T0 point to the entry of the first named argument. |
| 469 __ addiu(T0, S4, Immediate( | 462 __ addiu(T0, S4, Immediate( |
| 470 ArgumentsDescriptor::first_named_entry_offset() - kHeapObjectTag)); | 463 ArgumentsDescriptor::first_named_entry_offset() - kHeapObjectTag)); |
| 471 for (int i = 0; i < num_opt_named_params; i++) { | 464 for (int i = 0; i < num_opt_named_params; i++) { |
| 472 Label load_default_value, assign_optional_parameter; | 465 Label load_default_value, assign_optional_parameter; |
| 473 const int param_pos = opt_param_position[i]; | 466 const int param_pos = opt_param_position[i]; |
| 474 // Check if this named parameter was passed in. | 467 // Check if this named parameter was passed in. |
| 475 // Load T3 with the name of the argument. | 468 // Load T3 with the name of the argument. |
| 476 __ lw(T3, Address(T0, ArgumentsDescriptor::name_offset())); | 469 __ lw(T3, Address(T0, ArgumentsDescriptor::name_offset())); |
| 477 ASSERT(opt_param[i]->name().IsSymbol()); | 470 ASSERT(opt_param[i]->name().IsSymbol()); |
| 478 __ LoadObject(T4, opt_param[i]->name()); | 471 __ BranchNotEqual(T3, opt_param[i]->name(), &load_default_value); |
| 479 __ bne(T3, T4, &load_default_value); | |
| 480 | 472 |
| 481 // Load T3 with passed-in argument at provided arg_pos, i.e. at | 473 // Load T3 with passed-in argument at provided arg_pos, i.e. at |
| 482 // fp[kLastParamSlotIndex + num_args - 1 - arg_pos]. | 474 // fp[kLastParamSlotIndex + num_args - 1 - arg_pos]. |
| 483 __ lw(T3, Address(T0, ArgumentsDescriptor::position_offset())); | 475 __ lw(T3, Address(T0, ArgumentsDescriptor::position_offset())); |
| 484 // T3 is arg_pos as Smi. | 476 // T3 is arg_pos as Smi. |
| 485 // Point to next named entry. | 477 // Point to next named entry. |
| 486 __ addiu(T0, T0, Immediate(ArgumentsDescriptor::named_entry_size())); | 478 __ addiu(T0, T0, Immediate(ArgumentsDescriptor::named_entry_size())); |
| 487 __ subu(T3, ZR, T3); | 479 __ subu(T3, ZR, T3); |
| 488 __ sll(T3, T3, 1); | 480 __ sll(T3, T3, 1); |
| 489 __ addu(T3, T1, T3); | 481 __ addu(T3, T1, T3); |
| (...skipping 11 matching lines...) Expand all Loading... |
| 501 // We do not use the final allocation index of the variable here, i.e. | 493 // We do not use the final allocation index of the variable here, i.e. |
| 502 // scope->VariableAt(i)->index(), because captured variables still need | 494 // scope->VariableAt(i)->index(), because captured variables still need |
| 503 // to be copied to the context that is not yet allocated. | 495 // to be copied to the context that is not yet allocated. |
| 504 const intptr_t computed_param_pos = kFirstLocalSlotIndex - param_pos; | 496 const intptr_t computed_param_pos = kFirstLocalSlotIndex - param_pos; |
| 505 __ sw(T3, Address(FP, computed_param_pos * kWordSize)); | 497 __ sw(T3, Address(FP, computed_param_pos * kWordSize)); |
| 506 } | 498 } |
| 507 delete[] opt_param; | 499 delete[] opt_param; |
| 508 delete[] opt_param_position; | 500 delete[] opt_param_position; |
| 509 // Check that T0 now points to the null terminator in the array descriptor. | 501 // Check that T0 now points to the null terminator in the array descriptor. |
| 510 __ lw(T3, Address(T0)); | 502 __ lw(T3, Address(T0)); |
| 511 __ LoadImmediate(T4, reinterpret_cast<int32_t>(Object::null())); | 503 __ BranchEqual(T3, reinterpret_cast<int32_t>(Object::null()), |
| 512 __ beq(T3, T4, &all_arguments_processed); | 504 &all_arguments_processed); |
| 513 } else { | 505 } else { |
| 514 ASSERT(num_opt_pos_params > 0); | 506 ASSERT(num_opt_pos_params > 0); |
| 515 __ lw(T2, | 507 __ lw(T2, |
| 516 FieldAddress(S4, ArgumentsDescriptor::positional_count_offset())); | 508 FieldAddress(S4, ArgumentsDescriptor::positional_count_offset())); |
| 517 __ SmiUntag(T2); | 509 __ SmiUntag(T2); |
| 518 for (int i = 0; i < num_opt_pos_params; i++) { | 510 for (int i = 0; i < num_opt_pos_params; i++) { |
| 519 Label next_parameter; | 511 Label next_parameter; |
| 520 // Handle this optional positional parameter only if k or fewer positional | 512 // Handle this optional positional parameter only if k or fewer positional |
| 521 // arguments have been passed, where k is param_pos, the position of this | 513 // arguments have been passed, where k is param_pos, the position of this |
| 522 // optional parameter in the formal parameter list. | 514 // optional parameter in the formal parameter list. |
| 523 const int param_pos = num_fixed_params + i; | 515 const int param_pos = num_fixed_params + i; |
| 524 __ addiu(T3, T2, Immediate(-param_pos)); | 516 __ BranchGreater(T2, param_pos, &next_parameter); |
| 525 __ bgtz(T3, &next_parameter); | |
| 526 // Load T3 with default argument. | 517 // Load T3 with default argument. |
| 527 const Object& value = Object::ZoneHandle( | 518 const Object& value = Object::ZoneHandle( |
| 528 parsed_function().default_parameter_values().At(i)); | 519 parsed_function().default_parameter_values().At(i)); |
| 529 __ LoadObject(T3, value); | 520 __ LoadObject(T3, value); |
| 530 // Assign T3 to fp[kFirstLocalSlotIndex - param_pos]. | 521 // Assign T3 to fp[kFirstLocalSlotIndex - param_pos]. |
| 531 // We do not use the final allocation index of the variable here, i.e. | 522 // We do not use the final allocation index of the variable here, i.e. |
| 532 // scope->VariableAt(i)->index(), because captured variables still need | 523 // scope->VariableAt(i)->index(), because captured variables still need |
| 533 // to be copied to the context that is not yet allocated. | 524 // to be copied to the context that is not yet allocated. |
| 534 const intptr_t computed_param_pos = kFirstLocalSlotIndex - param_pos; | 525 const intptr_t computed_param_pos = kFirstLocalSlotIndex - param_pos; |
| 535 __ sw(T3, Address(FP, computed_param_pos * kWordSize)); | 526 __ sw(T3, Address(FP, computed_param_pos * kWordSize)); |
| 536 __ Bind(&next_parameter); | 527 __ Bind(&next_parameter); |
| 537 } | 528 } |
| 538 __ lw(T1, FieldAddress(S4, ArgumentsDescriptor::count_offset())); | 529 __ lw(T1, FieldAddress(S4, ArgumentsDescriptor::count_offset())); |
| 539 __ SmiUntag(T1); | 530 __ SmiUntag(T1); |
| 540 // Check that T2 equals T1, i.e. no named arguments passed. | 531 // Check that T2 equals T1, i.e. no named arguments passed. |
| 541 __ beq(T2, T2, &all_arguments_processed); | 532 __ beq(T2, T1, &all_arguments_processed); |
| 542 } | 533 } |
| 543 | 534 |
| 544 __ Bind(&wrong_num_arguments); | 535 __ Bind(&wrong_num_arguments); |
| 545 if (StackSize() != 0) { | 536 if (StackSize() != 0) { |
| 546 // We need to unwind the space we reserved for locals and copied parameters. | 537 // We need to unwind the space we reserved for locals and copied parameters. |
| 547 // The NoSuchMethodFunction stub does not expect to see that area on the | 538 // The NoSuchMethodFunction stub does not expect to see that area on the |
| 548 // stack. | 539 // stack. |
| 549 __ addiu(SP, SP, Immediate(StackSize() * kWordSize)); | 540 __ addiu(SP, SP, Immediate(StackSize() * kWordSize)); |
| 550 } | 541 } |
| 551 // The call below has an empty stackmap because we have just | 542 // The call below has an empty stackmap because we have just |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 583 // Nullify originally passed arguments only after they have been copied and | 574 // Nullify originally passed arguments only after they have been copied and |
| 584 // checked, otherwise noSuchMethod would not see their original values. | 575 // checked, otherwise noSuchMethod would not see their original values. |
| 585 // This step can be skipped in case we decide that formal parameters are | 576 // This step can be skipped in case we decide that formal parameters are |
| 586 // implicitly final, since garbage collecting the unmodified value is not | 577 // implicitly final, since garbage collecting the unmodified value is not |
| 587 // an issue anymore. | 578 // an issue anymore. |
| 588 | 579 |
| 589 // S4 : arguments descriptor array. | 580 // S4 : arguments descriptor array. |
| 590 __ lw(T2, FieldAddress(S4, ArgumentsDescriptor::count_offset())); | 581 __ lw(T2, FieldAddress(S4, ArgumentsDescriptor::count_offset())); |
| 591 __ SmiUntag(T2); | 582 __ SmiUntag(T2); |
| 592 | 583 |
| 593 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null())); | 584 __ LoadImmediate(T0, reinterpret_cast<intptr_t>(Object::null())); |
| 594 Label null_args_loop, null_args_loop_condition; | 585 Label null_args_loop, null_args_loop_condition; |
| 595 __ b(&null_args_loop_condition); | 586 __ b(&null_args_loop_condition); |
| 596 __ delay_slot()->addiu(T1, FP, Immediate(kLastParamSlotIndex * kWordSize)); | 587 __ delay_slot()->addiu(T1, FP, Immediate(kLastParamSlotIndex * kWordSize)); |
| 597 __ Bind(&null_args_loop); | 588 __ Bind(&null_args_loop); |
| 598 __ addu(T3, T1, T2); | 589 __ addu(T3, T1, T2); |
| 599 __ sw(TMP, Address(T3)); | 590 __ sw(T0, Address(T3)); |
| 600 __ Bind(&null_args_loop_condition); | 591 __ Bind(&null_args_loop_condition); |
| 601 __ addiu(T2, T2, Immediate(-kWordSize)); | 592 __ addiu(T2, T2, Immediate(-kWordSize)); |
| 602 __ bgez(T2, &null_args_loop); | 593 __ bgez(T2, &null_args_loop); |
| 603 } | 594 } |
| 604 | 595 |
| 605 | 596 |
| 606 void FlowGraphCompiler::GenerateInlinedGetter(intptr_t offset) { | 597 void FlowGraphCompiler::GenerateInlinedGetter(intptr_t offset) { |
| 607 UNIMPLEMENTED(); | 598 UNIMPLEMENTED(); |
| 608 } | 599 } |
| 609 | 600 |
| 610 | 601 |
| 611 void FlowGraphCompiler::GenerateInlinedSetter(intptr_t offset) { | 602 void FlowGraphCompiler::GenerateInlinedSetter(intptr_t offset) { |
| 612 UNIMPLEMENTED(); | 603 UNIMPLEMENTED(); |
| 613 } | 604 } |
| 614 | 605 |
| 615 | 606 |
| 616 void FlowGraphCompiler::EmitFrameEntry() { | 607 void FlowGraphCompiler::EmitFrameEntry() { |
| 617 const Function& function = parsed_function().function(); | 608 const Function& function = parsed_function().function(); |
| 618 if (CanOptimizeFunction() && function.is_optimizable()) { | 609 if (CanOptimizeFunction() && function.is_optimizable()) { |
| 619 const bool can_optimize = !is_optimizing() || may_reoptimize(); | 610 const bool can_optimize = !is_optimizing() || may_reoptimize(); |
| 620 const Register function_reg = T0; | 611 const Register function_reg = T0; |
| 621 if (can_optimize) { | 612 if (can_optimize) { |
| 622 Label next; | 613 Label next; |
| 623 // The pool pointer is not setup before entering the Dart frame. | 614 // The pool pointer is not setup before entering the Dart frame. |
| 624 | 615 |
| 625 __ mov(TMP, RA); // Save RA. | 616 __ mov(TMP1, RA); // Save RA. |
| 626 __ bal(&next); // Branch and link to next instruction to get PC in RA. | 617 __ bal(&next); // Branch and link to next instruction to get PC in RA. |
| 627 __ delay_slot()->mov(T2, RA); // Save PC of the following mov. | 618 __ delay_slot()->mov(T2, RA); // Save PC of the following mov. |
| 628 | 619 |
| 629 // Calculate offset of pool pointer from the PC. | 620 // Calculate offset of pool pointer from the PC. |
| 630 const intptr_t object_pool_pc_dist = | 621 const intptr_t object_pool_pc_dist = |
| 631 Instructions::HeaderSize() - Instructions::object_pool_offset() + | 622 Instructions::HeaderSize() - Instructions::object_pool_offset() + |
| 632 assembler()->CodeSize(); | 623 assembler()->CodeSize(); |
| 633 | 624 |
| 634 __ Bind(&next); | 625 __ Bind(&next); |
| 635 __ mov(RA, TMP); // Restore RA. | 626 __ mov(RA, TMP1); // Restore RA. |
| 636 | 627 |
| 637 // Preserve PP of caller. | 628 // Preserve PP of caller. |
| 638 __ mov(T1, PP); | 629 __ mov(T1, PP); |
| 639 | 630 |
| 640 // Temporarily setup pool pointer for this dart function. | 631 // Temporarily setup pool pointer for this dart function. |
| 641 __ lw(PP, Address(T2, -object_pool_pc_dist)); | 632 __ lw(PP, Address(T2, -object_pool_pc_dist)); |
| 642 | 633 |
| 643 // Load function object from object pool. | 634 // Load function object from object pool. |
| 644 __ LoadObject(function_reg, function); // Uses PP. | 635 __ LoadObject(function_reg, function); // Uses PP. |
| 645 | 636 |
| (...skipping 13 matching lines...) Expand all Loading... |
| 659 __ addiu(T1, T1, Immediate(1)); | 650 __ addiu(T1, T1, Immediate(1)); |
| 660 __ sw(T1, FieldAddress(function_reg, | 651 __ sw(T1, FieldAddress(function_reg, |
| 661 Function::usage_counter_offset())); | 652 Function::usage_counter_offset())); |
| 662 } else { | 653 } else { |
| 663 __ lw(T1, FieldAddress(function_reg, | 654 __ lw(T1, FieldAddress(function_reg, |
| 664 Function::usage_counter_offset())); | 655 Function::usage_counter_offset())); |
| 665 } | 656 } |
| 666 | 657 |
| 667 // Skip Branch if T1 is less than the threshold. | 658 // Skip Branch if T1 is less than the threshold. |
| 668 Label dont_branch; | 659 Label dont_branch; |
| 669 __ LoadImmediate(T2, FLAG_optimization_counter_threshold); | 660 __ BranchLess(T1, FLAG_optimization_counter_threshold, &dont_branch); |
| 670 __ sltu(T2, T1, T2); | |
| 671 __ bgtz(T2, &dont_branch); | |
| 672 | 661 |
| 673 ASSERT(function_reg == T0); | 662 ASSERT(function_reg == T0); |
| 674 __ Branch(&StubCode::OptimizeFunctionLabel()); | 663 __ Branch(&StubCode::OptimizeFunctionLabel()); |
| 675 | 664 |
| 676 __ Bind(&dont_branch); | 665 __ Bind(&dont_branch); |
| 677 } | 666 } |
| 678 } else { | 667 } else { |
| 679 AddCurrentDescriptor(PcDescriptors::kEntryPatch, | 668 AddCurrentDescriptor(PcDescriptors::kEntryPatch, |
| 680 Isolate::kNoDeoptId, | 669 Isolate::kNoDeoptId, |
| 681 0); // No token position. | 670 0); // No token position. |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 722 ASSERT(!parsed_function().function().HasOptionalParameters()); | 711 ASSERT(!parsed_function().function().HasOptionalParameters()); |
| 723 const bool check_arguments = true; | 712 const bool check_arguments = true; |
| 724 #else | 713 #else |
| 725 const bool check_arguments = function.IsClosureFunction(); | 714 const bool check_arguments = function.IsClosureFunction(); |
| 726 #endif | 715 #endif |
| 727 if (check_arguments) { | 716 if (check_arguments) { |
| 728 __ Comment("Check argument count"); | 717 __ Comment("Check argument count"); |
| 729 // Check that exactly num_fixed arguments are passed in. | 718 // Check that exactly num_fixed arguments are passed in. |
| 730 Label correct_num_arguments, wrong_num_arguments; | 719 Label correct_num_arguments, wrong_num_arguments; |
| 731 __ lw(T0, FieldAddress(S4, ArgumentsDescriptor::count_offset())); | 720 __ lw(T0, FieldAddress(S4, ArgumentsDescriptor::count_offset())); |
| 732 __ LoadImmediate(T1, Smi::RawValue(num_fixed_params)); | 721 __ BranchNotEqual(T0, Smi::RawValue(num_fixed_params), |
| 733 __ bne(T0, T1, &wrong_num_arguments); | 722 &wrong_num_arguments); |
| 734 | 723 |
| 735 __ lw(T1, FieldAddress(S4, | 724 __ lw(T1, FieldAddress(S4, |
| 736 ArgumentsDescriptor::positional_count_offset())); | 725 ArgumentsDescriptor::positional_count_offset())); |
| 737 __ beq(T0, T1, &correct_num_arguments); | 726 __ beq(T0, T1, &correct_num_arguments); |
| 738 __ Bind(&wrong_num_arguments); | 727 __ Bind(&wrong_num_arguments); |
| 739 if (function.IsClosureFunction()) { | 728 if (function.IsClosureFunction()) { |
| 740 if (StackSize() != 0) { | 729 if (StackSize() != 0) { |
| 741 // We need to unwind the space we reserved for locals and copied | 730 // We need to unwind the space we reserved for locals and copied |
| 742 // parameters. The NoSuchMethodFunction stub does not expect to see | 731 // parameters. The NoSuchMethodFunction stub does not expect to see |
| 743 // that area on the stack. | 732 // that area on the stack. |
| (...skipping 363 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1107 | 1096 |
| 1108 | 1097 |
| 1109 void ParallelMoveResolver::Exchange(const Address& mem1, const Address& mem2) { | 1098 void ParallelMoveResolver::Exchange(const Address& mem1, const Address& mem2) { |
| 1110 UNIMPLEMENTED(); | 1099 UNIMPLEMENTED(); |
| 1111 } | 1100 } |
| 1112 | 1101 |
| 1113 | 1102 |
| 1114 } // namespace dart | 1103 } // namespace dart |
| 1115 | 1104 |
| 1116 #endif // defined TARGET_ARCH_MIPS | 1105 #endif // defined TARGET_ARCH_MIPS |
| OLD | NEW |