| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 43 Isolate* isolate, | 43 Isolate* isolate, |
| 44 CodeStubInterfaceDescriptor* descriptor) { | 44 CodeStubInterfaceDescriptor* descriptor) { |
| 45 static Register registers[] = { a2 }; | 45 static Register registers[] = { a2 }; |
| 46 descriptor->register_param_count_ = 1; | 46 descriptor->register_param_count_ = 1; |
| 47 descriptor->register_params_ = registers; | 47 descriptor->register_params_ = registers; |
| 48 descriptor->deoptimization_handler_ = | 48 descriptor->deoptimization_handler_ = |
| 49 Runtime::FunctionForId(Runtime::kNewClosureFromStubFailure)->entry; | 49 Runtime::FunctionForId(Runtime::kNewClosureFromStubFailure)->entry; |
| 50 } | 50 } |
| 51 | 51 |
| 52 | 52 |
| 53 void FastNewContextStub::InitializeInterfaceDescriptor( |
| 54 Isolate* isolate, |
| 55 CodeStubInterfaceDescriptor* descriptor) { |
| 56 static Register registers[] = { a1 }; |
| 57 descriptor->register_param_count_ = 1; |
| 58 descriptor->register_params_ = registers; |
| 59 descriptor->deoptimization_handler_ = NULL; |
| 60 } |
| 61 |
| 62 |
| 53 void ToNumberStub::InitializeInterfaceDescriptor( | 63 void ToNumberStub::InitializeInterfaceDescriptor( |
| 54 Isolate* isolate, | 64 Isolate* isolate, |
| 55 CodeStubInterfaceDescriptor* descriptor) { | 65 CodeStubInterfaceDescriptor* descriptor) { |
| 56 static Register registers[] = { a0 }; | 66 static Register registers[] = { a0 }; |
| 57 descriptor->register_param_count_ = 1; | 67 descriptor->register_param_count_ = 1; |
| 58 descriptor->register_params_ = registers; | 68 descriptor->register_params_ = registers; |
| 59 descriptor->deoptimization_handler_ = NULL; | 69 descriptor->deoptimization_handler_ = NULL; |
| 60 } | 70 } |
| 61 | 71 |
| 62 | 72 |
| (...skipping 26 matching lines...) Expand all Loading... |
| 89 descriptor->register_param_count_ = 4; | 99 descriptor->register_param_count_ = 4; |
| 90 descriptor->register_params_ = registers; | 100 descriptor->register_params_ = registers; |
| 91 descriptor->deoptimization_handler_ = | 101 descriptor->deoptimization_handler_ = |
| 92 Runtime::FunctionForId(Runtime::kCreateObjectLiteral)->entry; | 102 Runtime::FunctionForId(Runtime::kCreateObjectLiteral)->entry; |
| 93 } | 103 } |
| 94 | 104 |
| 95 | 105 |
| 96 void CreateAllocationSiteStub::InitializeInterfaceDescriptor( | 106 void CreateAllocationSiteStub::InitializeInterfaceDescriptor( |
| 97 Isolate* isolate, | 107 Isolate* isolate, |
| 98 CodeStubInterfaceDescriptor* descriptor) { | 108 CodeStubInterfaceDescriptor* descriptor) { |
| 99 static Register registers[] = { a2 }; | 109 static Register registers[] = { a2, a3 }; |
| 100 descriptor->register_param_count_ = 1; | 110 descriptor->register_param_count_ = 2; |
| 101 descriptor->register_params_ = registers; | 111 descriptor->register_params_ = registers; |
| 102 descriptor->deoptimization_handler_ = NULL; | 112 descriptor->deoptimization_handler_ = NULL; |
| 103 } | 113 } |
| 104 | 114 |
| 105 | 115 |
| 106 void KeyedLoadFastElementStub::InitializeInterfaceDescriptor( | 116 void KeyedLoadFastElementStub::InitializeInterfaceDescriptor( |
| 107 Isolate* isolate, | 117 Isolate* isolate, |
| 108 CodeStubInterfaceDescriptor* descriptor) { | 118 CodeStubInterfaceDescriptor* descriptor) { |
| 109 static Register registers[] = { a1, a0 }; | 119 static Register registers[] = { a1, a0 }; |
| 110 descriptor->register_param_count_ = 2; | 120 descriptor->register_param_count_ = 2; |
| 111 descriptor->register_params_ = registers; | 121 descriptor->register_params_ = registers; |
| 112 descriptor->deoptimization_handler_ = | 122 descriptor->deoptimization_handler_ = |
| 113 FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure); | 123 FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure); |
| 114 } | 124 } |
| 115 | 125 |
| 116 | 126 |
| 117 void KeyedLoadDictionaryElementStub::InitializeInterfaceDescriptor( | 127 void KeyedLoadDictionaryElementStub::InitializeInterfaceDescriptor( |
| 118 Isolate* isolate, | 128 Isolate* isolate, |
| 119 CodeStubInterfaceDescriptor* descriptor) { | 129 CodeStubInterfaceDescriptor* descriptor) { |
| 120 static Register registers[] = {a1, a0 }; | 130 static Register registers[] = {a1, a0 }; |
| 121 descriptor->register_param_count_ = 2; | 131 descriptor->register_param_count_ = 2; |
| 122 descriptor->register_params_ = registers; | 132 descriptor->register_params_ = registers; |
| 123 descriptor->deoptimization_handler_ = | 133 descriptor->deoptimization_handler_ = |
| 124 FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure); | 134 FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure); |
| 125 } | 135 } |
| 126 | 136 |
| 127 | 137 |
| 138 void RegExpConstructResultStub::InitializeInterfaceDescriptor( |
| 139 Isolate* isolate, |
| 140 CodeStubInterfaceDescriptor* descriptor) { |
| 141 static Register registers[] = { a2, a1, a0 }; |
| 142 descriptor->register_param_count_ = 3; |
| 143 descriptor->register_params_ = registers; |
| 144 descriptor->deoptimization_handler_ = |
| 145 Runtime::FunctionForId(Runtime::kRegExpConstructResult)->entry; |
| 146 } |
| 147 |
| 148 |
| 128 void LoadFieldStub::InitializeInterfaceDescriptor( | 149 void LoadFieldStub::InitializeInterfaceDescriptor( |
| 129 Isolate* isolate, | 150 Isolate* isolate, |
| 130 CodeStubInterfaceDescriptor* descriptor) { | 151 CodeStubInterfaceDescriptor* descriptor) { |
| 131 static Register registers[] = { a0 }; | 152 static Register registers[] = { a0 }; |
| 132 descriptor->register_param_count_ = 1; | 153 descriptor->register_param_count_ = 1; |
| 133 descriptor->register_params_ = registers; | 154 descriptor->register_params_ = registers; |
| 134 descriptor->deoptimization_handler_ = NULL; | 155 descriptor->deoptimization_handler_ = NULL; |
| 135 } | 156 } |
| 136 | 157 |
| 137 | 158 |
| 138 void KeyedLoadFieldStub::InitializeInterfaceDescriptor( | 159 void KeyedLoadFieldStub::InitializeInterfaceDescriptor( |
| 139 Isolate* isolate, | 160 Isolate* isolate, |
| 140 CodeStubInterfaceDescriptor* descriptor) { | 161 CodeStubInterfaceDescriptor* descriptor) { |
| 141 static Register registers[] = { a1 }; | 162 static Register registers[] = { a1 }; |
| 142 descriptor->register_param_count_ = 1; | 163 descriptor->register_param_count_ = 1; |
| 143 descriptor->register_params_ = registers; | 164 descriptor->register_params_ = registers; |
| 144 descriptor->deoptimization_handler_ = NULL; | 165 descriptor->deoptimization_handler_ = NULL; |
| 145 } | 166 } |
| 146 | 167 |
| 147 | 168 |
| 148 void KeyedArrayCallStub::InitializeInterfaceDescriptor( | |
| 149 Isolate* isolate, | |
| 150 CodeStubInterfaceDescriptor* descriptor) { | |
| 151 static Register registers[] = { a2 }; | |
| 152 descriptor->register_param_count_ = 1; | |
| 153 descriptor->register_params_ = registers; | |
| 154 descriptor->continuation_type_ = TAIL_CALL_CONTINUATION; | |
| 155 descriptor->handler_arguments_mode_ = PASS_ARGUMENTS; | |
| 156 descriptor->deoptimization_handler_ = | |
| 157 FUNCTION_ADDR(KeyedCallIC_MissFromStubFailure); | |
| 158 } | |
| 159 | |
| 160 | |
| 161 void KeyedStoreFastElementStub::InitializeInterfaceDescriptor( | 169 void KeyedStoreFastElementStub::InitializeInterfaceDescriptor( |
| 162 Isolate* isolate, | 170 Isolate* isolate, |
| 163 CodeStubInterfaceDescriptor* descriptor) { | 171 CodeStubInterfaceDescriptor* descriptor) { |
| 164 static Register registers[] = { a2, a1, a0 }; | 172 static Register registers[] = { a2, a1, a0 }; |
| 165 descriptor->register_param_count_ = 3; | 173 descriptor->register_param_count_ = 3; |
| 166 descriptor->register_params_ = registers; | 174 descriptor->register_params_ = registers; |
| 167 descriptor->deoptimization_handler_ = | 175 descriptor->deoptimization_handler_ = |
| 168 FUNCTION_ADDR(KeyedStoreIC_MissFromStubFailure); | 176 FUNCTION_ADDR(KeyedStoreIC_MissFromStubFailure); |
| 169 } | 177 } |
| 170 | 178 |
| (...skipping 23 matching lines...) Expand all Loading... |
| 194 } | 202 } |
| 195 | 203 |
| 196 | 204 |
| 197 static void InitializeArrayConstructorDescriptor( | 205 static void InitializeArrayConstructorDescriptor( |
| 198 Isolate* isolate, | 206 Isolate* isolate, |
| 199 CodeStubInterfaceDescriptor* descriptor, | 207 CodeStubInterfaceDescriptor* descriptor, |
| 200 int constant_stack_parameter_count) { | 208 int constant_stack_parameter_count) { |
| 201 // register state | 209 // register state |
| 202 // a0 -- number of arguments | 210 // a0 -- number of arguments |
| 203 // a1 -- function | 211 // a1 -- function |
| 204 // a2 -- type info cell with elements kind | 212 // a2 -- allocation site with elements kind |
| 205 static Register registers_variable_args[] = { a1, a2, a0 }; | 213 static Register registers_variable_args[] = { a1, a2, a0 }; |
| 206 static Register registers_no_args[] = { a1, a2 }; | 214 static Register registers_no_args[] = { a1, a2 }; |
| 207 | 215 |
| 208 if (constant_stack_parameter_count == 0) { | 216 if (constant_stack_parameter_count == 0) { |
| 209 descriptor->register_param_count_ = 2; | 217 descriptor->register_param_count_ = 2; |
| 210 descriptor->register_params_ = registers_no_args; | 218 descriptor->register_params_ = registers_no_args; |
| 211 } else { | 219 } else { |
| 212 // stack param count needs (constructor pointer, and single argument) | 220 // stack param count needs (constructor pointer, and single argument) |
| 213 descriptor->handler_arguments_mode_ = PASS_ARGUMENTS; | 221 descriptor->handler_arguments_mode_ = PASS_ARGUMENTS; |
| 214 descriptor->stack_parameter_count_ = a0; | 222 descriptor->stack_parameter_count_ = a0; |
| (...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 344 Isolate* isolate, | 352 Isolate* isolate, |
| 345 CodeStubInterfaceDescriptor* descriptor) { | 353 CodeStubInterfaceDescriptor* descriptor) { |
| 346 static Register registers[] = { a2, a1, a0 }; | 354 static Register registers[] = { a2, a1, a0 }; |
| 347 descriptor->register_param_count_ = 3; | 355 descriptor->register_param_count_ = 3; |
| 348 descriptor->register_params_ = registers; | 356 descriptor->register_params_ = registers; |
| 349 descriptor->deoptimization_handler_ = | 357 descriptor->deoptimization_handler_ = |
| 350 FUNCTION_ADDR(BinaryOpIC_MissWithAllocationSite); | 358 FUNCTION_ADDR(BinaryOpIC_MissWithAllocationSite); |
| 351 } | 359 } |
| 352 | 360 |
| 353 | 361 |
| 354 void NewStringAddStub::InitializeInterfaceDescriptor( | 362 void StringAddStub::InitializeInterfaceDescriptor( |
| 355 Isolate* isolate, | 363 Isolate* isolate, |
| 356 CodeStubInterfaceDescriptor* descriptor) { | 364 CodeStubInterfaceDescriptor* descriptor) { |
| 357 static Register registers[] = { a1, a0 }; | 365 static Register registers[] = { a1, a0 }; |
| 358 descriptor->register_param_count_ = 2; | 366 descriptor->register_param_count_ = 2; |
| 359 descriptor->register_params_ = registers; | 367 descriptor->register_params_ = registers; |
| 360 descriptor->deoptimization_handler_ = | 368 descriptor->deoptimization_handler_ = |
| 361 Runtime::FunctionForId(Runtime::kStringAdd)->entry; | 369 Runtime::FunctionForId(Runtime::kStringAdd)->entry; |
| 362 } | 370 } |
| 363 | 371 |
| 364 | 372 |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 402 a2, // name | 410 a2, // name |
| 403 }; | 411 }; |
| 404 static Representation representations[] = { | 412 static Representation representations[] = { |
| 405 Representation::Tagged(), // context | 413 Representation::Tagged(), // context |
| 406 Representation::Tagged(), // name | 414 Representation::Tagged(), // name |
| 407 }; | 415 }; |
| 408 descriptor->register_param_count_ = 2; | 416 descriptor->register_param_count_ = 2; |
| 409 descriptor->register_params_ = registers; | 417 descriptor->register_params_ = registers; |
| 410 descriptor->param_representations_ = representations; | 418 descriptor->param_representations_ = representations; |
| 411 } | 419 } |
| 420 { |
| 421 CallInterfaceDescriptor* descriptor = |
| 422 isolate->call_descriptor(Isolate::CallHandler); |
| 423 static Register registers[] = { cp, // context |
| 424 a0, // receiver |
| 425 }; |
| 426 static Representation representations[] = { |
| 427 Representation::Tagged(), // context |
| 428 Representation::Tagged(), // receiver |
| 429 }; |
| 430 descriptor->register_param_count_ = 2; |
| 431 descriptor->register_params_ = registers; |
| 432 descriptor->param_representations_ = representations; |
| 433 } |
| 434 { |
| 435 CallInterfaceDescriptor* descriptor = |
| 436 isolate->call_descriptor(Isolate::ApiFunctionCall); |
| 437 static Register registers[] = { a0, // callee |
| 438 t0, // call_data |
| 439 a2, // holder |
| 440 a1, // api_function_address |
| 441 cp, // context |
| 442 }; |
| 443 static Representation representations[] = { |
| 444 Representation::Tagged(), // callee |
| 445 Representation::Tagged(), // call_data |
| 446 Representation::Tagged(), // holder |
| 447 Representation::External(), // api_function_address |
| 448 Representation::Tagged(), // context |
| 449 }; |
| 450 descriptor->register_param_count_ = 5; |
| 451 descriptor->register_params_ = registers; |
| 452 descriptor->param_representations_ = representations; |
| 453 } |
| 412 } | 454 } |
| 413 | 455 |
| 414 | 456 |
| 415 #define __ ACCESS_MASM(masm) | 457 #define __ ACCESS_MASM(masm) |
| 416 | 458 |
| 417 | 459 |
| 418 static void EmitIdenticalObjectComparison(MacroAssembler* masm, | 460 static void EmitIdenticalObjectComparison(MacroAssembler* masm, |
| 419 Label* slow, | 461 Label* slow, |
| 420 Condition cc); | 462 Condition cc); |
| 421 static void EmitSmiNonsmiComparison(MacroAssembler* masm, | 463 static void EmitSmiNonsmiComparison(MacroAssembler* masm, |
| (...skipping 12 matching lines...) Expand all Loading... |
| 434 Isolate* isolate = masm->isolate(); | 476 Isolate* isolate = masm->isolate(); |
| 435 isolate->counters()->code_stubs()->Increment(); | 477 isolate->counters()->code_stubs()->Increment(); |
| 436 | 478 |
| 437 CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate); | 479 CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate); |
| 438 int param_count = descriptor->register_param_count_; | 480 int param_count = descriptor->register_param_count_; |
| 439 { | 481 { |
| 440 // Call the runtime system in a fresh internal frame. | 482 // Call the runtime system in a fresh internal frame. |
| 441 FrameScope scope(masm, StackFrame::INTERNAL); | 483 FrameScope scope(masm, StackFrame::INTERNAL); |
| 442 ASSERT(descriptor->register_param_count_ == 0 || | 484 ASSERT(descriptor->register_param_count_ == 0 || |
| 443 a0.is(descriptor->register_params_[param_count - 1])); | 485 a0.is(descriptor->register_params_[param_count - 1])); |
| 444 // Push arguments | 486 // Push arguments, adjust sp. |
| 487 __ Subu(sp, sp, Operand(param_count * kPointerSize)); |
| 445 for (int i = 0; i < param_count; ++i) { | 488 for (int i = 0; i < param_count; ++i) { |
| 446 __ push(descriptor->register_params_[i]); | 489 // Store argument to stack. |
| 490 __ sw(descriptor->register_params_[i], |
| 491 MemOperand(sp, (param_count-1-i) * kPointerSize)); |
| 447 } | 492 } |
| 448 ExternalReference miss = descriptor->miss_handler(); | 493 ExternalReference miss = descriptor->miss_handler(); |
| 449 __ CallExternalReference(miss, descriptor->register_param_count_); | 494 __ CallExternalReference(miss, descriptor->register_param_count_); |
| 450 } | 495 } |
| 451 | 496 |
| 452 __ Ret(); | 497 __ Ret(); |
| 453 } | 498 } |
| 454 | 499 |
| 455 | 500 |
| 456 void FastNewContextStub::Generate(MacroAssembler* masm) { | |
| 457 // Try to allocate the context in new space. | |
| 458 Label gc; | |
| 459 int length = slots_ + Context::MIN_CONTEXT_SLOTS; | |
| 460 | |
| 461 // Attempt to allocate the context in new space. | |
| 462 __ Allocate(FixedArray::SizeFor(length), v0, a1, a2, &gc, TAG_OBJECT); | |
| 463 | |
| 464 // Load the function from the stack. | |
| 465 __ lw(a3, MemOperand(sp, 0)); | |
| 466 | |
| 467 // Set up the object header. | |
| 468 __ LoadRoot(a1, Heap::kFunctionContextMapRootIndex); | |
| 469 __ li(a2, Operand(Smi::FromInt(length))); | |
| 470 __ sw(a2, FieldMemOperand(v0, FixedArray::kLengthOffset)); | |
| 471 __ sw(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); | |
| 472 | |
| 473 // Set up the fixed slots, copy the global object from the previous context. | |
| 474 __ lw(a2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); | |
| 475 __ li(a1, Operand(Smi::FromInt(0))); | |
| 476 __ sw(a3, MemOperand(v0, Context::SlotOffset(Context::CLOSURE_INDEX))); | |
| 477 __ sw(cp, MemOperand(v0, Context::SlotOffset(Context::PREVIOUS_INDEX))); | |
| 478 __ sw(a1, MemOperand(v0, Context::SlotOffset(Context::EXTENSION_INDEX))); | |
| 479 __ sw(a2, MemOperand(v0, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); | |
| 480 | |
| 481 // Initialize the rest of the slots to undefined. | |
| 482 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex); | |
| 483 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) { | |
| 484 __ sw(a1, MemOperand(v0, Context::SlotOffset(i))); | |
| 485 } | |
| 486 | |
| 487 // Remove the on-stack argument and return. | |
| 488 __ mov(cp, v0); | |
| 489 __ DropAndRet(1); | |
| 490 | |
| 491 // Need to collect. Call into runtime system. | |
| 492 __ bind(&gc); | |
| 493 __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1); | |
| 494 } | |
| 495 | |
| 496 | |
| 497 void FastNewBlockContextStub::Generate(MacroAssembler* masm) { | |
| 498 // Stack layout on entry: | |
| 499 // | |
| 500 // [sp]: function. | |
| 501 // [sp + kPointerSize]: serialized scope info | |
| 502 | |
| 503 // Try to allocate the context in new space. | |
| 504 Label gc; | |
| 505 int length = slots_ + Context::MIN_CONTEXT_SLOTS; | |
| 506 __ Allocate(FixedArray::SizeFor(length), v0, a1, a2, &gc, TAG_OBJECT); | |
| 507 | |
| 508 // Load the function from the stack. | |
| 509 __ lw(a3, MemOperand(sp, 0)); | |
| 510 | |
| 511 // Load the serialized scope info from the stack. | |
| 512 __ lw(a1, MemOperand(sp, 1 * kPointerSize)); | |
| 513 | |
| 514 // Set up the object header. | |
| 515 __ LoadRoot(a2, Heap::kBlockContextMapRootIndex); | |
| 516 __ sw(a2, FieldMemOperand(v0, HeapObject::kMapOffset)); | |
| 517 __ li(a2, Operand(Smi::FromInt(length))); | |
| 518 __ sw(a2, FieldMemOperand(v0, FixedArray::kLengthOffset)); | |
| 519 | |
| 520 // If this block context is nested in the native context we get a smi | |
| 521 // sentinel instead of a function. The block context should get the | |
| 522 // canonical empty function of the native context as its closure which | |
| 523 // we still have to look up. | |
| 524 Label after_sentinel; | |
| 525 __ JumpIfNotSmi(a3, &after_sentinel); | |
| 526 if (FLAG_debug_code) { | |
| 527 __ Assert(eq, kExpected0AsASmiSentinel, a3, Operand(zero_reg)); | |
| 528 } | |
| 529 __ lw(a3, GlobalObjectOperand()); | |
| 530 __ lw(a3, FieldMemOperand(a3, GlobalObject::kNativeContextOffset)); | |
| 531 __ lw(a3, ContextOperand(a3, Context::CLOSURE_INDEX)); | |
| 532 __ bind(&after_sentinel); | |
| 533 | |
| 534 // Set up the fixed slots, copy the global object from the previous context. | |
| 535 __ lw(a2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); | |
| 536 __ sw(a3, ContextOperand(v0, Context::CLOSURE_INDEX)); | |
| 537 __ sw(cp, ContextOperand(v0, Context::PREVIOUS_INDEX)); | |
| 538 __ sw(a1, ContextOperand(v0, Context::EXTENSION_INDEX)); | |
| 539 __ sw(a2, ContextOperand(v0, Context::GLOBAL_OBJECT_INDEX)); | |
| 540 | |
| 541 // Initialize the rest of the slots to the hole value. | |
| 542 __ LoadRoot(a1, Heap::kTheHoleValueRootIndex); | |
| 543 for (int i = 0; i < slots_; i++) { | |
| 544 __ sw(a1, ContextOperand(v0, i + Context::MIN_CONTEXT_SLOTS)); | |
| 545 } | |
| 546 | |
| 547 // Remove the on-stack argument and return. | |
| 548 __ mov(cp, v0); | |
| 549 __ DropAndRet(2); | |
| 550 | |
| 551 // Need to collect. Call into runtime system. | |
| 552 __ bind(&gc); | |
| 553 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1); | |
| 554 } | |
| 555 | |
| 556 | |
| 557 // Takes a Smi and converts to an IEEE 64 bit floating point value in two | 501 // Takes a Smi and converts to an IEEE 64 bit floating point value in two |
| 558 // registers. The format is 1 sign bit, 11 exponent bits (biased 1023) and | 502 // registers. The format is 1 sign bit, 11 exponent bits (biased 1023) and |
| 559 // 52 fraction bits (20 in the first word, 32 in the second). Zeros is a | 503 // 52 fraction bits (20 in the first word, 32 in the second). Zeros is a |
| 560 // scratch register. Destroys the source register. No GC occurs during this | 504 // scratch register. Destroys the source register. No GC occurs during this |
| 561 // stub so you don't have to set up the frame. | 505 // stub so you don't have to set up the frame. |
| 562 class ConvertToDoubleStub : public PlatformCodeStub { | 506 class ConvertToDoubleStub : public PlatformCodeStub { |
| 563 public: | 507 public: |
| 564 ConvertToDoubleStub(Register result_reg_1, | 508 ConvertToDoubleStub(Register result_reg_1, |
| 565 Register result_reg_2, | 509 Register result_reg_2, |
| 566 Register source_reg, | 510 Register source_reg, |
| (...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 676 // Load double input. | 620 // Load double input. |
| 677 __ ldc1(double_scratch, MemOperand(input_reg, double_offset)); | 621 __ ldc1(double_scratch, MemOperand(input_reg, double_offset)); |
| 678 | 622 |
| 679 // Clear cumulative exception flags and save the FCSR. | 623 // Clear cumulative exception flags and save the FCSR. |
| 680 __ cfc1(scratch2, FCSR); | 624 __ cfc1(scratch2, FCSR); |
| 681 __ ctc1(zero_reg, FCSR); | 625 __ ctc1(zero_reg, FCSR); |
| 682 | 626 |
| 683 // Try a conversion to a signed integer. | 627 // Try a conversion to a signed integer. |
| 684 __ Trunc_w_d(double_scratch, double_scratch); | 628 __ Trunc_w_d(double_scratch, double_scratch); |
| 685 // Move the converted value into the result register. | 629 // Move the converted value into the result register. |
| 686 __ mfc1(result_reg, double_scratch); | 630 __ mfc1(scratch3, double_scratch); |
| 687 | 631 |
| 688 // Retrieve and restore the FCSR. | 632 // Retrieve and restore the FCSR. |
| 689 __ cfc1(scratch, FCSR); | 633 __ cfc1(scratch, FCSR); |
| 690 __ ctc1(scratch2, FCSR); | 634 __ ctc1(scratch2, FCSR); |
| 691 | 635 |
| 692 // Check for overflow and NaNs. | 636 // Check for overflow and NaNs. |
| 693 __ And( | 637 __ And( |
| 694 scratch, scratch, | 638 scratch, scratch, |
| 695 kFCSROverflowFlagMask | kFCSRUnderflowFlagMask | 639 kFCSROverflowFlagMask | kFCSRUnderflowFlagMask |
| 696 | kFCSRInvalidOpFlagMask); | 640 | kFCSRInvalidOpFlagMask); |
| 697 // If we had no exceptions we are done. | 641 // If we had no exceptions then set result_reg and we are done. |
| 698 __ Branch(&done, eq, scratch, Operand(zero_reg)); | 642 Label error; |
| 643 __ Branch(&error, ne, scratch, Operand(zero_reg)); |
| 644 __ Move(result_reg, scratch3); |
| 645 __ Branch(&done); |
| 646 __ bind(&error); |
| 699 } | 647 } |
| 700 | 648 |
| 701 // Load the double value and perform a manual truncation. | 649 // Load the double value and perform a manual truncation. |
| 702 Register input_high = scratch2; | 650 Register input_high = scratch2; |
| 703 Register input_low = scratch3; | 651 Register input_low = scratch3; |
| 704 | 652 |
| 705 __ lw(input_low, MemOperand(input_reg, double_offset)); | 653 __ lw(input_low, MemOperand(input_reg, double_offset)); |
| 706 __ lw(input_high, MemOperand(input_reg, double_offset + kIntSize)); | 654 __ lw(input_high, MemOperand(input_reg, double_offset + kIntSize)); |
| 707 | 655 |
| 708 Label normal_exponent, restore_sign; | 656 Label normal_exponent, restore_sign; |
| (...skipping 2487 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3196 // (9) Sliced string. Replace subject with parent. Go to (4). | 3144 // (9) Sliced string. Replace subject with parent. Go to (4). |
| 3197 // Load offset into t0 and replace subject string with parent. | 3145 // Load offset into t0 and replace subject string with parent. |
| 3198 __ lw(t0, FieldMemOperand(subject, SlicedString::kOffsetOffset)); | 3146 __ lw(t0, FieldMemOperand(subject, SlicedString::kOffsetOffset)); |
| 3199 __ sra(t0, t0, kSmiTagSize); | 3147 __ sra(t0, t0, kSmiTagSize); |
| 3200 __ lw(subject, FieldMemOperand(subject, SlicedString::kParentOffset)); | 3148 __ lw(subject, FieldMemOperand(subject, SlicedString::kParentOffset)); |
| 3201 __ jmp(&check_underlying); // Go to (4). | 3149 __ jmp(&check_underlying); // Go to (4). |
| 3202 #endif // V8_INTERPRETED_REGEXP | 3150 #endif // V8_INTERPRETED_REGEXP |
| 3203 } | 3151 } |
| 3204 | 3152 |
| 3205 | 3153 |
| 3206 void RegExpConstructResultStub::Generate(MacroAssembler* masm) { | |
| 3207 const int kMaxInlineLength = 100; | |
| 3208 Label slowcase; | |
| 3209 Label done; | |
| 3210 __ lw(a1, MemOperand(sp, kPointerSize * 2)); | |
| 3211 STATIC_ASSERT(kSmiTag == 0); | |
| 3212 STATIC_ASSERT(kSmiTagSize == 1); | |
| 3213 __ JumpIfNotSmi(a1, &slowcase); | |
| 3214 __ Branch(&slowcase, hi, a1, Operand(Smi::FromInt(kMaxInlineLength))); | |
| 3215 // Smi-tagging is equivalent to multiplying by 2. | |
| 3216 // Allocate RegExpResult followed by FixedArray with size in ebx. | |
| 3217 // JSArray: [Map][empty properties][Elements][Length-smi][index][input] | |
| 3218 // Elements: [Map][Length][..elements..] | |
| 3219 // Size of JSArray with two in-object properties and the header of a | |
| 3220 // FixedArray. | |
| 3221 int objects_size = | |
| 3222 (JSRegExpResult::kSize + FixedArray::kHeaderSize) / kPointerSize; | |
| 3223 __ srl(t1, a1, kSmiTagSize + kSmiShiftSize); | |
| 3224 __ Addu(a2, t1, Operand(objects_size)); | |
| 3225 __ Allocate( | |
| 3226 a2, // In: Size, in words. | |
| 3227 v0, // Out: Start of allocation (tagged). | |
| 3228 a3, // Scratch register. | |
| 3229 t0, // Scratch register. | |
| 3230 &slowcase, | |
| 3231 static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS)); | |
| 3232 // v0: Start of allocated area, object-tagged. | |
| 3233 // a1: Number of elements in array, as smi. | |
| 3234 // t1: Number of elements, untagged. | |
| 3235 | |
| 3236 // Set JSArray map to global.regexp_result_map(). | |
| 3237 // Set empty properties FixedArray. | |
| 3238 // Set elements to point to FixedArray allocated right after the JSArray. | |
| 3239 // Interleave operations for better latency. | |
| 3240 __ lw(a2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); | |
| 3241 __ Addu(a3, v0, Operand(JSRegExpResult::kSize)); | |
| 3242 __ li(t0, Operand(masm->isolate()->factory()->empty_fixed_array())); | |
| 3243 __ lw(a2, FieldMemOperand(a2, GlobalObject::kNativeContextOffset)); | |
| 3244 __ sw(a3, FieldMemOperand(v0, JSObject::kElementsOffset)); | |
| 3245 __ lw(a2, ContextOperand(a2, Context::REGEXP_RESULT_MAP_INDEX)); | |
| 3246 __ sw(t0, FieldMemOperand(v0, JSObject::kPropertiesOffset)); | |
| 3247 __ sw(a2, FieldMemOperand(v0, HeapObject::kMapOffset)); | |
| 3248 | |
| 3249 // Set input, index and length fields from arguments. | |
| 3250 __ lw(a1, MemOperand(sp, kPointerSize * 0)); | |
| 3251 __ lw(a2, MemOperand(sp, kPointerSize * 1)); | |
| 3252 __ lw(t2, MemOperand(sp, kPointerSize * 2)); | |
| 3253 __ sw(a1, FieldMemOperand(v0, JSRegExpResult::kInputOffset)); | |
| 3254 __ sw(a2, FieldMemOperand(v0, JSRegExpResult::kIndexOffset)); | |
| 3255 __ sw(t2, FieldMemOperand(v0, JSArray::kLengthOffset)); | |
| 3256 | |
| 3257 // Fill out the elements FixedArray. | |
| 3258 // v0: JSArray, tagged. | |
| 3259 // a3: FixedArray, tagged. | |
| 3260 // t1: Number of elements in array, untagged. | |
| 3261 | |
| 3262 // Set map. | |
| 3263 __ li(a2, Operand(masm->isolate()->factory()->fixed_array_map())); | |
| 3264 __ sw(a2, FieldMemOperand(a3, HeapObject::kMapOffset)); | |
| 3265 // Set FixedArray length. | |
| 3266 __ sll(t2, t1, kSmiTagSize); | |
| 3267 __ sw(t2, FieldMemOperand(a3, FixedArray::kLengthOffset)); | |
| 3268 // Fill contents of fixed-array with undefined. | |
| 3269 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); | |
| 3270 __ Addu(a3, a3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | |
| 3271 // Fill fixed array elements with undefined. | |
| 3272 // v0: JSArray, tagged. | |
| 3273 // a2: undefined. | |
| 3274 // a3: Start of elements in FixedArray. | |
| 3275 // t1: Number of elements to fill. | |
| 3276 Label loop; | |
| 3277 __ sll(t1, t1, kPointerSizeLog2); // Convert num elements to num bytes. | |
| 3278 __ addu(t1, t1, a3); // Point past last element to store. | |
| 3279 __ bind(&loop); | |
| 3280 __ Branch(&done, ge, a3, Operand(t1)); // Break when a3 past end of elem. | |
| 3281 __ sw(a2, MemOperand(a3)); | |
| 3282 __ Branch(&loop, USE_DELAY_SLOT); | |
| 3283 __ addiu(a3, a3, kPointerSize); // In branch delay slot. | |
| 3284 | |
| 3285 __ bind(&done); | |
| 3286 __ DropAndRet(3); | |
| 3287 | |
| 3288 __ bind(&slowcase); | |
| 3289 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1); | |
| 3290 } | |
| 3291 | |
| 3292 | |
| 3293 static void GenerateRecordCallTarget(MacroAssembler* masm) { | 3154 static void GenerateRecordCallTarget(MacroAssembler* masm) { |
| 3294 // Cache the called function in a global property cell. Cache states | 3155 // Cache the called function in a feedback vector slot. Cache states |
| 3295 // are uninitialized, monomorphic (indicated by a JSFunction), and | 3156 // are uninitialized, monomorphic (indicated by a JSFunction), and |
| 3296 // megamorphic. | 3157 // megamorphic. |
| 3297 // a0 : number of arguments to the construct function | 3158 // a0 : number of arguments to the construct function |
| 3298 // a1 : the function to call | 3159 // a1 : the function to call |
| 3299 // a2 : cache cell for call target | 3160 // a2 : Feedback vector |
| 3161 // a3 : slot in feedback vector (Smi) |
| 3300 Label initialize, done, miss, megamorphic, not_array_function; | 3162 Label initialize, done, miss, megamorphic, not_array_function; |
| 3301 | 3163 |
| 3302 ASSERT_EQ(*TypeFeedbackCells::MegamorphicSentinel(masm->isolate()), | 3164 ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()), |
| 3303 masm->isolate()->heap()->undefined_value()); | 3165 masm->isolate()->heap()->undefined_value()); |
| 3304 ASSERT_EQ(*TypeFeedbackCells::UninitializedSentinel(masm->isolate()), | 3166 ASSERT_EQ(*TypeFeedbackInfo::UninitializedSentinel(masm->isolate()), |
| 3305 masm->isolate()->heap()->the_hole_value()); | 3167 masm->isolate()->heap()->the_hole_value()); |
| 3306 | 3168 |
| 3307 // Load the cache state into a3. | 3169 // Load the cache state into t0. |
| 3308 __ lw(a3, FieldMemOperand(a2, Cell::kValueOffset)); | 3170 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); |
| 3171 __ Addu(t0, a2, Operand(t0)); |
| 3172 __ lw(t0, FieldMemOperand(t0, FixedArray::kHeaderSize)); |
| 3309 | 3173 |
| 3310 // A monomorphic cache hit or an already megamorphic state: invoke the | 3174 // A monomorphic cache hit or an already megamorphic state: invoke the |
| 3311 // function without changing the state. | 3175 // function without changing the state. |
| 3312 __ Branch(&done, eq, a3, Operand(a1)); | 3176 __ Branch(&done, eq, t0, Operand(a1)); |
| 3313 | 3177 |
| 3314 // If we came here, we need to see if we are the array function. | 3178 // If we came here, we need to see if we are the array function. |
| 3315 // If we didn't have a matching function, and we didn't find the megamorph | 3179 // If we didn't have a matching function, and we didn't find the megamorph |
| 3316 // sentinel, then we have in the cell either some other function or an | 3180 // sentinel, then we have in the slot either some other function or an |
| 3317 // AllocationSite. Do a map check on the object in a3. | 3181 // AllocationSite. Do a map check on the object in a3. |
| 3318 __ lw(t1, FieldMemOperand(a3, 0)); | 3182 __ lw(t1, FieldMemOperand(t0, 0)); |
| 3319 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); | 3183 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); |
| 3320 __ Branch(&miss, ne, t1, Operand(at)); | 3184 __ Branch(&miss, ne, t1, Operand(at)); |
| 3321 | 3185 |
| 3322 // Make sure the function is the Array() function | 3186 // Make sure the function is the Array() function |
| 3323 __ LoadArrayFunction(a3); | 3187 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, t0); |
| 3324 __ Branch(&megamorphic, ne, a1, Operand(a3)); | 3188 __ Branch(&megamorphic, ne, a1, Operand(t0)); |
| 3325 __ jmp(&done); | 3189 __ jmp(&done); |
| 3326 | 3190 |
| 3327 __ bind(&miss); | 3191 __ bind(&miss); |
| 3328 | 3192 |
| 3329 // A monomorphic miss (i.e, here the cache is not uninitialized) goes | 3193 // A monomorphic miss (i.e, here the cache is not uninitialized) goes |
| 3330 // megamorphic. | 3194 // megamorphic. |
| 3331 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 3195 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); |
| 3332 __ Branch(&initialize, eq, a3, Operand(at)); | 3196 __ Branch(&initialize, eq, t0, Operand(at)); |
| 3333 // MegamorphicSentinel is an immortal immovable object (undefined) so no | 3197 // MegamorphicSentinel is an immortal immovable object (undefined) so no |
| 3334 // write-barrier is needed. | 3198 // write-barrier is needed. |
| 3335 __ bind(&megamorphic); | 3199 __ bind(&megamorphic); |
| 3200 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); |
| 3201 __ Addu(t0, a2, Operand(t0)); |
| 3336 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | 3202 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); |
| 3337 __ sw(at, FieldMemOperand(a2, Cell::kValueOffset)); | 3203 __ sw(at, FieldMemOperand(t0, FixedArray::kHeaderSize)); |
| 3338 __ jmp(&done); | 3204 __ jmp(&done); |
| 3339 | 3205 |
| 3340 // An uninitialized cache is patched with the function or sentinel to | 3206 // An uninitialized cache is patched with the function or sentinel to |
| 3341 // indicate the ElementsKind if function is the Array constructor. | 3207 // indicate the ElementsKind if function is the Array constructor. |
| 3342 __ bind(&initialize); | 3208 __ bind(&initialize); |
| 3343 // Make sure the function is the Array() function | 3209 // Make sure the function is the Array() function |
| 3344 __ LoadArrayFunction(a3); | 3210 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, t0); |
| 3345 __ Branch(¬_array_function, ne, a1, Operand(a3)); | 3211 __ Branch(¬_array_function, ne, a1, Operand(t0)); |
| 3346 | 3212 |
| 3347 // The target function is the Array constructor. | 3213 // The target function is the Array constructor. |
| 3348 // Create an AllocationSite if we don't already have it, store it in the cell. | 3214 // Create an AllocationSite if we don't already have it, store it in the slot. |
| 3349 { | 3215 { |
| 3350 FrameScope scope(masm, StackFrame::INTERNAL); | 3216 FrameScope scope(masm, StackFrame::INTERNAL); |
| 3351 const RegList kSavedRegs = | 3217 const RegList kSavedRegs = |
| 3352 1 << 4 | // a0 | 3218 1 << 4 | // a0 |
| 3353 1 << 5 | // a1 | 3219 1 << 5 | // a1 |
| 3354 1 << 6; // a2 | 3220 1 << 6 | // a2 |
| 3221 1 << 7; // a3 |
| 3355 | 3222 |
| 3356 // Arguments register must be smi-tagged to call out. | 3223 // Arguments register must be smi-tagged to call out. |
| 3357 __ SmiTag(a0); | 3224 __ SmiTag(a0); |
| 3358 __ MultiPush(kSavedRegs); | 3225 __ MultiPush(kSavedRegs); |
| 3359 | 3226 |
| 3360 CreateAllocationSiteStub create_stub; | 3227 CreateAllocationSiteStub create_stub; |
| 3361 __ CallStub(&create_stub); | 3228 __ CallStub(&create_stub); |
| 3362 | 3229 |
| 3363 __ MultiPop(kSavedRegs); | 3230 __ MultiPop(kSavedRegs); |
| 3364 __ SmiUntag(a0); | 3231 __ SmiUntag(a0); |
| 3365 } | 3232 } |
| 3366 __ Branch(&done); | 3233 __ Branch(&done); |
| 3367 | 3234 |
| 3368 __ bind(¬_array_function); | 3235 __ bind(¬_array_function); |
| 3369 __ sw(a1, FieldMemOperand(a2, Cell::kValueOffset)); | 3236 |
| 3370 // No need for a write barrier here - cells are rescanned. | 3237 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); |
| 3238 __ Addu(t0, a2, Operand(t0)); |
| 3239 __ Addu(t0, t0, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 3240 __ sw(a1, MemOperand(t0, 0)); |
| 3241 |
| 3242 __ Push(t0, a2, a1); |
| 3243 __ RecordWrite(a2, t0, a1, kRAHasNotBeenSaved, kDontSaveFPRegs, |
| 3244 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); |
| 3245 __ Pop(t0, a2, a1); |
| 3371 | 3246 |
| 3372 __ bind(&done); | 3247 __ bind(&done); |
| 3373 } | 3248 } |
| 3374 | 3249 |
| 3375 | 3250 |
| 3376 void CallFunctionStub::Generate(MacroAssembler* masm) { | 3251 void CallFunctionStub::Generate(MacroAssembler* masm) { |
| 3377 // a1 : the function to call | 3252 // a1 : the function to call |
| 3378 // a2 : cache cell for call target | 3253 // a2 : feedback vector |
| 3379 Label slow, non_function; | 3254 // a3 : (only if a2 is not undefined) slot in feedback vector (Smi) |
| 3255 Label slow, non_function, wrap, cont; |
| 3380 | 3256 |
| 3381 // Check that the function is really a JavaScript function. | 3257 if (NeedsChecks()) { |
| 3382 // a1: pushed function (to be verified) | 3258 // Check that the function is really a JavaScript function. |
| 3383 __ JumpIfSmi(a1, &non_function); | 3259 // a1: pushed function (to be verified) |
| 3260 __ JumpIfSmi(a1, &non_function); |
| 3384 | 3261 |
| 3385 // Goto slow case if we do not have a function. | 3262 // Goto slow case if we do not have a function. |
| 3386 __ GetObjectType(a1, a3, a3); | 3263 __ GetObjectType(a1, t0, t0); |
| 3387 __ Branch(&slow, ne, a3, Operand(JS_FUNCTION_TYPE)); | 3264 __ Branch(&slow, ne, t0, Operand(JS_FUNCTION_TYPE)); |
| 3388 | 3265 |
| 3389 if (RecordCallTarget()) { | 3266 if (RecordCallTarget()) { |
| 3390 GenerateRecordCallTarget(masm); | 3267 GenerateRecordCallTarget(masm); |
| 3268 } |
| 3391 } | 3269 } |
| 3392 | 3270 |
| 3393 // Fast-case: Invoke the function now. | 3271 // Fast-case: Invoke the function now. |
| 3394 // a1: pushed function | 3272 // a1: pushed function |
| 3395 ParameterCount actual(argc_); | 3273 ParameterCount actual(argc_); |
| 3396 | 3274 |
| 3275 if (CallAsMethod()) { |
| 3276 if (NeedsChecks()) { |
| 3277 // Do not transform the receiver for strict mode functions and natives. |
| 3278 __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); |
| 3279 __ lw(t0, FieldMemOperand(a3, SharedFunctionInfo::kCompilerHintsOffset)); |
| 3280 int32_t strict_mode_function_mask = |
| 3281 1 << (SharedFunctionInfo::kStrictModeFunction + kSmiTagSize); |
| 3282 int32_t native_mask = 1 << (SharedFunctionInfo::kNative + kSmiTagSize); |
| 3283 __ And(at, t0, Operand(strict_mode_function_mask | native_mask)); |
| 3284 __ Branch(&cont, ne, at, Operand(zero_reg)); |
| 3285 } |
| 3286 |
| 3287 // Compute the receiver in non-strict mode. |
| 3288 __ lw(a3, MemOperand(sp, argc_ * kPointerSize)); |
| 3289 |
| 3290 if (NeedsChecks()) { |
| 3291 __ JumpIfSmi(a3, &wrap); |
| 3292 __ GetObjectType(a3, t0, t0); |
| 3293 __ Branch(&wrap, lt, t0, Operand(FIRST_SPEC_OBJECT_TYPE)); |
| 3294 } else { |
| 3295 __ jmp(&wrap); |
| 3296 } |
| 3297 |
| 3298 __ bind(&cont); |
| 3299 } |
| 3397 __ InvokeFunction(a1, actual, JUMP_FUNCTION, NullCallWrapper()); | 3300 __ InvokeFunction(a1, actual, JUMP_FUNCTION, NullCallWrapper()); |
| 3398 | 3301 |
| 3399 // Slow-case: Non-function called. | 3302 if (NeedsChecks()) { |
| 3400 __ bind(&slow); | 3303 // Slow-case: Non-function called. |
| 3401 if (RecordCallTarget()) { | 3304 __ bind(&slow); |
| 3402 // If there is a call target cache, mark it megamorphic in the | 3305 if (RecordCallTarget()) { |
| 3403 // non-function case. MegamorphicSentinel is an immortal immovable | 3306 // If there is a call target cache, mark it megamorphic in the |
| 3404 // object (undefined) so no write barrier is needed. | 3307 // non-function case. MegamorphicSentinel is an immortal immovable |
| 3405 ASSERT_EQ(*TypeFeedbackCells::MegamorphicSentinel(masm->isolate()), | 3308 // object (undefined) so no write barrier is needed. |
| 3406 masm->isolate()->heap()->undefined_value()); | 3309 ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()), |
| 3407 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | 3310 masm->isolate()->heap()->undefined_value()); |
| 3408 __ sw(at, FieldMemOperand(a2, Cell::kValueOffset)); | 3311 __ sll(t1, a3, kPointerSizeLog2 - kSmiTagSize); |
| 3409 } | 3312 __ Addu(t1, a2, Operand(t1)); |
| 3410 // Check for function proxy. | 3313 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); |
| 3411 __ Branch(&non_function, ne, a3, Operand(JS_FUNCTION_PROXY_TYPE)); | 3314 __ sw(at, FieldMemOperand(t1, FixedArray::kHeaderSize)); |
| 3412 __ push(a1); // Put proxy as additional argument. | 3315 } |
| 3413 __ li(a0, Operand(argc_ + 1, RelocInfo::NONE32)); | 3316 // Check for function proxy. |
| 3414 __ li(a2, Operand(0, RelocInfo::NONE32)); | 3317 __ Branch(&non_function, ne, t0, Operand(JS_FUNCTION_PROXY_TYPE)); |
| 3415 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY); | 3318 __ push(a1); // Put proxy as additional argument. |
| 3416 { | 3319 __ li(a0, Operand(argc_ + 1, RelocInfo::NONE32)); |
| 3417 Handle<Code> adaptor = | 3320 __ li(a2, Operand(0, RelocInfo::NONE32)); |
| 3418 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); | 3321 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY); |
| 3419 __ Jump(adaptor, RelocInfo::CODE_TARGET); | 3322 { |
| 3323 Handle<Code> adaptor = |
| 3324 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); |
| 3325 __ Jump(adaptor, RelocInfo::CODE_TARGET); |
| 3326 } |
| 3327 |
| 3328 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead |
| 3329 // of the original receiver from the call site). |
| 3330 __ bind(&non_function); |
| 3331 __ sw(a1, MemOperand(sp, argc_ * kPointerSize)); |
| 3332 __ li(a0, Operand(argc_)); // Set up the number of arguments. |
| 3333 __ li(a2, Operand(0, RelocInfo::NONE32)); |
| 3334 __ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION); |
| 3335 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
| 3336 RelocInfo::CODE_TARGET); |
| 3420 } | 3337 } |
| 3421 | 3338 |
| 3422 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead | 3339 if (CallAsMethod()) { |
| 3423 // of the original receiver from the call site). | 3340 __ bind(&wrap); |
| 3424 __ bind(&non_function); | 3341 // Wrap the receiver and patch it back onto the stack. |
| 3425 __ sw(a1, MemOperand(sp, argc_ * kPointerSize)); | 3342 { FrameScope frame_scope(masm, StackFrame::INTERNAL); |
| 3426 __ li(a0, Operand(argc_)); // Set up the number of arguments. | 3343 __ Push(a1, a3); |
| 3427 __ mov(a2, zero_reg); | 3344 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); |
| 3428 __ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION); | 3345 __ pop(a1); |
| 3429 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), | 3346 } |
| 3430 RelocInfo::CODE_TARGET); | 3347 __ mov(a0, v0); |
| 3348 __ sw(a0, MemOperand(sp, argc_ * kPointerSize)); |
| 3349 __ jmp(&cont); |
| 3350 } |
| 3431 } | 3351 } |
| 3432 | 3352 |
| 3433 | 3353 |
| 3434 void CallConstructStub::Generate(MacroAssembler* masm) { | 3354 void CallConstructStub::Generate(MacroAssembler* masm) { |
| 3435 // a0 : number of arguments | 3355 // a0 : number of arguments |
| 3436 // a1 : the function to call | 3356 // a1 : the function to call |
| 3437 // a2 : cache cell for call target | 3357 // a2 : feedback vector |
| 3358 // a3 : (only if a2 is not undefined) slot in feedback vector (Smi) |
| 3438 Label slow, non_function_call; | 3359 Label slow, non_function_call; |
| 3439 | 3360 |
| 3440 // Check that the function is not a smi. | 3361 // Check that the function is not a smi. |
| 3441 __ JumpIfSmi(a1, &non_function_call); | 3362 __ JumpIfSmi(a1, &non_function_call); |
| 3442 // Check that the function is a JSFunction. | 3363 // Check that the function is a JSFunction. |
| 3443 __ GetObjectType(a1, a3, a3); | 3364 __ GetObjectType(a1, t0, t0); |
| 3444 __ Branch(&slow, ne, a3, Operand(JS_FUNCTION_TYPE)); | 3365 __ Branch(&slow, ne, t0, Operand(JS_FUNCTION_TYPE)); |
| 3445 | 3366 |
| 3446 if (RecordCallTarget()) { | 3367 if (RecordCallTarget()) { |
| 3447 GenerateRecordCallTarget(masm); | 3368 GenerateRecordCallTarget(masm); |
| 3448 } | 3369 } |
| 3449 | 3370 |
| 3450 // Jump to the function-specific construct stub. | 3371 // Jump to the function-specific construct stub. |
| 3451 Register jmp_reg = a3; | 3372 Register jmp_reg = t0; |
| 3452 __ lw(jmp_reg, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | 3373 __ lw(jmp_reg, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); |
| 3453 __ lw(jmp_reg, FieldMemOperand(jmp_reg, | 3374 __ lw(jmp_reg, FieldMemOperand(jmp_reg, |
| 3454 SharedFunctionInfo::kConstructStubOffset)); | 3375 SharedFunctionInfo::kConstructStubOffset)); |
| 3455 __ Addu(at, jmp_reg, Operand(Code::kHeaderSize - kHeapObjectTag)); | 3376 __ Addu(at, jmp_reg, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 3456 __ Jump(at); | 3377 __ Jump(at); |
| 3457 | 3378 |
| 3458 // a0: number of arguments | 3379 // a0: number of arguments |
| 3459 // a1: called object | 3380 // a1: called object |
| 3460 // a3: object type | 3381 // t0: object type |
| 3461 Label do_call; | 3382 Label do_call; |
| 3462 __ bind(&slow); | 3383 __ bind(&slow); |
| 3463 __ Branch(&non_function_call, ne, a3, Operand(JS_FUNCTION_PROXY_TYPE)); | 3384 __ Branch(&non_function_call, ne, t0, Operand(JS_FUNCTION_PROXY_TYPE)); |
| 3464 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR); | 3385 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR); |
| 3465 __ jmp(&do_call); | 3386 __ jmp(&do_call); |
| 3466 | 3387 |
| 3467 __ bind(&non_function_call); | 3388 __ bind(&non_function_call); |
| 3468 __ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); | 3389 __ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); |
| 3469 __ bind(&do_call); | 3390 __ bind(&do_call); |
| 3470 // Set expected number of arguments to zero (not changing r0). | 3391 // Set expected number of arguments to zero (not changing r0). |
| 3471 __ li(a2, Operand(0, RelocInfo::NONE32)); | 3392 __ li(a2, Operand(0, RelocInfo::NONE32)); |
| 3472 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), | 3393 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
| 3473 RelocInfo::CODE_TARGET); | 3394 RelocInfo::CODE_TARGET); |
| (...skipping 140 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3614 __ CallRuntime(Runtime::kCharFromCode, 1); | 3535 __ CallRuntime(Runtime::kCharFromCode, 1); |
| 3615 __ Move(result_, v0); | 3536 __ Move(result_, v0); |
| 3616 | 3537 |
| 3617 call_helper.AfterCall(masm); | 3538 call_helper.AfterCall(masm); |
| 3618 __ Branch(&exit_); | 3539 __ Branch(&exit_); |
| 3619 | 3540 |
| 3620 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase); | 3541 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase); |
| 3621 } | 3542 } |
| 3622 | 3543 |
| 3623 | 3544 |
| 3624 void StringHelper::GenerateCopyCharacters(MacroAssembler* masm, | |
| 3625 Register dest, | |
| 3626 Register src, | |
| 3627 Register count, | |
| 3628 Register scratch, | |
| 3629 bool ascii) { | |
| 3630 Label loop; | |
| 3631 Label done; | |
| 3632 // This loop just copies one character at a time, as it is only used for | |
| 3633 // very short strings. | |
| 3634 if (!ascii) { | |
| 3635 __ addu(count, count, count); | |
| 3636 } | |
| 3637 __ Branch(&done, eq, count, Operand(zero_reg)); | |
| 3638 __ addu(count, dest, count); // Count now points to the last dest byte. | |
| 3639 | |
| 3640 __ bind(&loop); | |
| 3641 __ lbu(scratch, MemOperand(src)); | |
| 3642 __ addiu(src, src, 1); | |
| 3643 __ sb(scratch, MemOperand(dest)); | |
| 3644 __ addiu(dest, dest, 1); | |
| 3645 __ Branch(&loop, lt, dest, Operand(count)); | |
| 3646 | |
| 3647 __ bind(&done); | |
| 3648 } | |
| 3649 | |
| 3650 | |
| 3651 enum CopyCharactersFlags { | 3545 enum CopyCharactersFlags { |
| 3652 COPY_ASCII = 1, | 3546 COPY_ASCII = 1, |
| 3653 DEST_ALWAYS_ALIGNED = 2 | 3547 DEST_ALWAYS_ALIGNED = 2 |
| 3654 }; | 3548 }; |
| 3655 | 3549 |
| 3656 | 3550 |
| 3657 void StringHelper::GenerateCopyCharactersLong(MacroAssembler* masm, | 3551 void StringHelper::GenerateCopyCharactersLong(MacroAssembler* masm, |
| 3658 Register dest, | 3552 Register dest, |
| 3659 Register src, | 3553 Register src, |
| 3660 Register count, | 3554 Register count, |
| (...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3759 __ lbu(scratch1, MemOperand(src)); | 3653 __ lbu(scratch1, MemOperand(src)); |
| 3760 __ addiu(src, src, 1); | 3654 __ addiu(src, src, 1); |
| 3761 __ sb(scratch1, MemOperand(dest)); | 3655 __ sb(scratch1, MemOperand(dest)); |
| 3762 __ addiu(dest, dest, 1); | 3656 __ addiu(dest, dest, 1); |
| 3763 __ Branch(&byte_loop); | 3657 __ Branch(&byte_loop); |
| 3764 | 3658 |
| 3765 __ bind(&done); | 3659 __ bind(&done); |
| 3766 } | 3660 } |
| 3767 | 3661 |
| 3768 | 3662 |
| 3769 void StringHelper::GenerateTwoCharacterStringTableProbe(MacroAssembler* masm, | |
| 3770 Register c1, | |
| 3771 Register c2, | |
| 3772 Register scratch1, | |
| 3773 Register scratch2, | |
| 3774 Register scratch3, | |
| 3775 Register scratch4, | |
| 3776 Register scratch5, | |
| 3777 Label* not_found) { | |
| 3778 // Register scratch3 is the general scratch register in this function. | |
| 3779 Register scratch = scratch3; | |
| 3780 | |
| 3781 // Make sure that both characters are not digits as such strings has a | |
| 3782 // different hash algorithm. Don't try to look for these in the string table. | |
| 3783 Label not_array_index; | |
| 3784 __ Subu(scratch, c1, Operand(static_cast<int>('0'))); | |
| 3785 __ Branch(¬_array_index, | |
| 3786 Ugreater, | |
| 3787 scratch, | |
| 3788 Operand(static_cast<int>('9' - '0'))); | |
| 3789 __ Subu(scratch, c2, Operand(static_cast<int>('0'))); | |
| 3790 | |
| 3791 // If check failed combine both characters into single halfword. | |
| 3792 // This is required by the contract of the method: code at the | |
| 3793 // not_found branch expects this combination in c1 register. | |
| 3794 Label tmp; | |
| 3795 __ sll(scratch1, c2, kBitsPerByte); | |
| 3796 __ Branch(&tmp, Ugreater, scratch, Operand(static_cast<int>('9' - '0'))); | |
| 3797 __ Or(c1, c1, scratch1); | |
| 3798 __ bind(&tmp); | |
| 3799 __ Branch( | |
| 3800 not_found, Uless_equal, scratch, Operand(static_cast<int>('9' - '0'))); | |
| 3801 | |
| 3802 __ bind(¬_array_index); | |
| 3803 // Calculate the two character string hash. | |
| 3804 Register hash = scratch1; | |
| 3805 StringHelper::GenerateHashInit(masm, hash, c1); | |
| 3806 StringHelper::GenerateHashAddCharacter(masm, hash, c2); | |
| 3807 StringHelper::GenerateHashGetHash(masm, hash); | |
| 3808 | |
| 3809 // Collect the two characters in a register. | |
| 3810 Register chars = c1; | |
| 3811 __ sll(scratch, c2, kBitsPerByte); | |
| 3812 __ Or(chars, chars, scratch); | |
| 3813 | |
| 3814 // chars: two character string, char 1 in byte 0 and char 2 in byte 1. | |
| 3815 // hash: hash of two character string. | |
| 3816 | |
| 3817 // Load string table. | |
| 3818 // Load address of first element of the string table. | |
| 3819 Register string_table = c2; | |
| 3820 __ LoadRoot(string_table, Heap::kStringTableRootIndex); | |
| 3821 | |
| 3822 Register undefined = scratch4; | |
| 3823 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex); | |
| 3824 | |
| 3825 // Calculate capacity mask from the string table capacity. | |
| 3826 Register mask = scratch2; | |
| 3827 __ lw(mask, FieldMemOperand(string_table, StringTable::kCapacityOffset)); | |
| 3828 __ sra(mask, mask, 1); | |
| 3829 __ Addu(mask, mask, -1); | |
| 3830 | |
| 3831 // Calculate untagged address of the first element of the string table. | |
| 3832 Register first_string_table_element = string_table; | |
| 3833 __ Addu(first_string_table_element, string_table, | |
| 3834 Operand(StringTable::kElementsStartOffset - kHeapObjectTag)); | |
| 3835 | |
| 3836 // Registers. | |
| 3837 // chars: two character string, char 1 in byte 0 and char 2 in byte 1. | |
| 3838 // hash: hash of two character string | |
| 3839 // mask: capacity mask | |
| 3840 // first_string_table_element: address of the first element of | |
| 3841 // the string table | |
| 3842 // undefined: the undefined object | |
| 3843 // scratch: - | |
| 3844 | |
| 3845 // Perform a number of probes in the string table. | |
| 3846 const int kProbes = 4; | |
| 3847 Label found_in_string_table; | |
| 3848 Label next_probe[kProbes]; | |
| 3849 Register candidate = scratch5; // Scratch register contains candidate. | |
| 3850 for (int i = 0; i < kProbes; i++) { | |
| 3851 // Calculate entry in string table. | |
| 3852 if (i > 0) { | |
| 3853 __ Addu(candidate, hash, Operand(StringTable::GetProbeOffset(i))); | |
| 3854 } else { | |
| 3855 __ mov(candidate, hash); | |
| 3856 } | |
| 3857 | |
| 3858 __ And(candidate, candidate, Operand(mask)); | |
| 3859 | |
| 3860 // Load the entry from the symble table. | |
| 3861 STATIC_ASSERT(StringTable::kEntrySize == 1); | |
| 3862 __ sll(scratch, candidate, kPointerSizeLog2); | |
| 3863 __ Addu(scratch, scratch, first_string_table_element); | |
| 3864 __ lw(candidate, MemOperand(scratch)); | |
| 3865 | |
| 3866 // If entry is undefined no string with this hash can be found. | |
| 3867 Label is_string; | |
| 3868 __ GetObjectType(candidate, scratch, scratch); | |
| 3869 __ Branch(&is_string, ne, scratch, Operand(ODDBALL_TYPE)); | |
| 3870 | |
| 3871 __ Branch(not_found, eq, undefined, Operand(candidate)); | |
| 3872 // Must be the hole (deleted entry). | |
| 3873 if (FLAG_debug_code) { | |
| 3874 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex); | |
| 3875 __ Assert(eq, kOddballInStringTableIsNotUndefinedOrTheHole, | |
| 3876 scratch, Operand(candidate)); | |
| 3877 } | |
| 3878 __ jmp(&next_probe[i]); | |
| 3879 | |
| 3880 __ bind(&is_string); | |
| 3881 | |
| 3882 // Check that the candidate is a non-external ASCII string. The instance | |
| 3883 // type is still in the scratch register from the CompareObjectType | |
| 3884 // operation. | |
| 3885 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch, &next_probe[i]); | |
| 3886 | |
| 3887 // If length is not 2 the string is not a candidate. | |
| 3888 __ lw(scratch, FieldMemOperand(candidate, String::kLengthOffset)); | |
| 3889 __ Branch(&next_probe[i], ne, scratch, Operand(Smi::FromInt(2))); | |
| 3890 | |
| 3891 // Check if the two characters match. | |
| 3892 // Assumes that word load is little endian. | |
| 3893 __ lhu(scratch, FieldMemOperand(candidate, SeqOneByteString::kHeaderSize)); | |
| 3894 __ Branch(&found_in_string_table, eq, chars, Operand(scratch)); | |
| 3895 __ bind(&next_probe[i]); | |
| 3896 } | |
| 3897 | |
| 3898 // No matching 2 character string found by probing. | |
| 3899 __ jmp(not_found); | |
| 3900 | |
| 3901 // Scratch register contains result when we fall through to here. | |
| 3902 Register result = candidate; | |
| 3903 __ bind(&found_in_string_table); | |
| 3904 __ mov(v0, result); | |
| 3905 } | |
| 3906 | |
| 3907 | |
| 3908 void StringHelper::GenerateHashInit(MacroAssembler* masm, | 3663 void StringHelper::GenerateHashInit(MacroAssembler* masm, |
| 3909 Register hash, | 3664 Register hash, |
| 3910 Register character) { | 3665 Register character) { |
| 3911 // hash = seed + character + ((seed + character) << 10); | 3666 // hash = seed + character + ((seed + character) << 10); |
| 3912 __ LoadRoot(hash, Heap::kHashSeedRootIndex); | 3667 __ LoadRoot(hash, Heap::kHashSeedRootIndex); |
| 3913 // Untag smi seed and add the character. | 3668 // Untag smi seed and add the character. |
| 3914 __ SmiUntag(hash); | 3669 __ SmiUntag(hash); |
| 3915 __ addu(hash, hash, character); | 3670 __ addu(hash, hash, character); |
| 3916 __ sll(at, hash, 10); | 3671 __ sll(at, hash, 10); |
| 3917 __ addu(hash, hash, at); | 3672 __ addu(hash, hash, at); |
| (...skipping 407 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4325 // Compare flat ASCII strings natively. Remove arguments from stack first. | 4080 // Compare flat ASCII strings natively. Remove arguments from stack first. |
| 4326 __ IncrementCounter(counters->string_compare_native(), 1, a2, a3); | 4081 __ IncrementCounter(counters->string_compare_native(), 1, a2, a3); |
| 4327 __ Addu(sp, sp, Operand(2 * kPointerSize)); | 4082 __ Addu(sp, sp, Operand(2 * kPointerSize)); |
| 4328 GenerateCompareFlatAsciiStrings(masm, a1, a0, a2, a3, t0, t1); | 4083 GenerateCompareFlatAsciiStrings(masm, a1, a0, a2, a3, t0, t1); |
| 4329 | 4084 |
| 4330 __ bind(&runtime); | 4085 __ bind(&runtime); |
| 4331 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); | 4086 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); |
| 4332 } | 4087 } |
| 4333 | 4088 |
| 4334 | 4089 |
| 4090 void ArrayPushStub::Generate(MacroAssembler* masm) { |
| 4091 Register receiver = a0; |
| 4092 Register scratch = a1; |
| 4093 |
| 4094 int argc = arguments_count(); |
| 4095 |
| 4096 if (argc == 0) { |
| 4097 // Nothing to do, just return the length. |
| 4098 __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset)); |
| 4099 __ DropAndRet(argc + 1); |
| 4100 return; |
| 4101 } |
| 4102 |
| 4103 Isolate* isolate = masm->isolate(); |
| 4104 |
| 4105 if (argc != 1) { |
| 4106 __ TailCallExternalReference( |
| 4107 ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1); |
| 4108 return; |
| 4109 } |
| 4110 |
| 4111 Label call_builtin, attempt_to_grow_elements, with_write_barrier; |
| 4112 |
| 4113 Register elements = t2; |
| 4114 Register end_elements = t1; |
| 4115 // Get the elements array of the object. |
| 4116 __ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset)); |
| 4117 |
| 4118 if (IsFastSmiOrObjectElementsKind(elements_kind())) { |
| 4119 // Check that the elements are in fast mode and writable. |
| 4120 __ CheckMap(elements, |
| 4121 scratch, |
| 4122 Heap::kFixedArrayMapRootIndex, |
| 4123 &call_builtin, |
| 4124 DONT_DO_SMI_CHECK); |
| 4125 } |
| 4126 |
| 4127 // Get the array's length into scratch and calculate new length. |
| 4128 __ lw(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset)); |
| 4129 __ Addu(scratch, scratch, Operand(Smi::FromInt(argc))); |
| 4130 |
| 4131 // Get the elements' length. |
| 4132 __ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset)); |
| 4133 |
| 4134 const int kEndElementsOffset = |
| 4135 FixedArray::kHeaderSize - kHeapObjectTag - argc * kPointerSize; |
| 4136 |
| 4137 if (IsFastSmiOrObjectElementsKind(elements_kind())) { |
| 4138 // Check if we could survive without allocation. |
| 4139 __ Branch(&attempt_to_grow_elements, gt, scratch, Operand(t0)); |
| 4140 |
| 4141 // Check if value is a smi. |
| 4142 __ lw(t0, MemOperand(sp, (argc - 1) * kPointerSize)); |
| 4143 __ JumpIfNotSmi(t0, &with_write_barrier); |
| 4144 |
| 4145 // Store the value. |
| 4146 // We may need a register containing the address end_elements below, |
| 4147 // so write back the value in end_elements. |
| 4148 __ sll(end_elements, scratch, kPointerSizeLog2 - kSmiTagSize); |
| 4149 __ Addu(end_elements, elements, end_elements); |
| 4150 __ Addu(end_elements, end_elements, kEndElementsOffset); |
| 4151 __ sw(t0, MemOperand(end_elements)); |
| 4152 } else { |
| 4153 // Check if we could survive without allocation. |
| 4154 __ Branch(&call_builtin, gt, scratch, Operand(t0)); |
| 4155 |
| 4156 __ lw(t0, MemOperand(sp, (argc - 1) * kPointerSize)); |
| 4157 __ StoreNumberToDoubleElements(t0, scratch, elements, a3, t1, a2, |
| 4158 &call_builtin, argc * kDoubleSize); |
| 4159 } |
| 4160 |
| 4161 // Save new length. |
| 4162 __ sw(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset)); |
| 4163 __ mov(v0, scratch); |
| 4164 __ DropAndRet(argc + 1); |
| 4165 |
| 4166 if (IsFastDoubleElementsKind(elements_kind())) { |
| 4167 __ bind(&call_builtin); |
| 4168 __ TailCallExternalReference( |
| 4169 ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1); |
| 4170 return; |
| 4171 } |
| 4172 |
| 4173 __ bind(&with_write_barrier); |
| 4174 |
| 4175 if (IsFastSmiElementsKind(elements_kind())) { |
| 4176 if (FLAG_trace_elements_transitions) __ jmp(&call_builtin); |
| 4177 |
| 4178 __ lw(t3, FieldMemOperand(t0, HeapObject::kMapOffset)); |
| 4179 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); |
| 4180 __ Branch(&call_builtin, eq, t3, Operand(at)); |
| 4181 |
| 4182 ElementsKind target_kind = IsHoleyElementsKind(elements_kind()) |
| 4183 ? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS; |
| 4184 __ lw(a3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); |
| 4185 __ lw(a3, FieldMemOperand(a3, GlobalObject::kNativeContextOffset)); |
| 4186 __ lw(a3, ContextOperand(a3, Context::JS_ARRAY_MAPS_INDEX)); |
| 4187 const int header_size = FixedArrayBase::kHeaderSize; |
| 4188 // Verify that the object can be transitioned in place. |
| 4189 const int origin_offset = header_size + elements_kind() * kPointerSize; |
| 4190 __ lw(a2, FieldMemOperand(receiver, origin_offset)); |
| 4191 __ lw(at, FieldMemOperand(a3, HeapObject::kMapOffset)); |
| 4192 __ Branch(&call_builtin, ne, a2, Operand(at)); |
| 4193 |
| 4194 |
| 4195 const int target_offset = header_size + target_kind * kPointerSize; |
| 4196 __ lw(a3, FieldMemOperand(a3, target_offset)); |
| 4197 __ mov(a2, receiver); |
| 4198 ElementsTransitionGenerator::GenerateMapChangeElementsTransition( |
| 4199 masm, DONT_TRACK_ALLOCATION_SITE, NULL); |
| 4200 } |
| 4201 |
| 4202 // Save new length. |
| 4203 __ sw(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset)); |
| 4204 |
| 4205 // Store the value. |
| 4206 // We may need a register containing the address end_elements below, so write |
| 4207 // back the value in end_elements. |
| 4208 __ sll(end_elements, scratch, kPointerSizeLog2 - kSmiTagSize); |
| 4209 __ Addu(end_elements, elements, end_elements); |
| 4210 __ Addu(end_elements, end_elements, kEndElementsOffset); |
| 4211 __ sw(t0, MemOperand(end_elements)); |
| 4212 |
| 4213 __ RecordWrite(elements, |
| 4214 end_elements, |
| 4215 t0, |
| 4216 kRAHasNotBeenSaved, |
| 4217 kDontSaveFPRegs, |
| 4218 EMIT_REMEMBERED_SET, |
| 4219 OMIT_SMI_CHECK); |
| 4220 __ mov(v0, scratch); |
| 4221 __ DropAndRet(argc + 1); |
| 4222 |
| 4223 __ bind(&attempt_to_grow_elements); |
| 4224 // scratch: array's length + 1. |
| 4225 |
| 4226 if (!FLAG_inline_new) { |
| 4227 __ bind(&call_builtin); |
| 4228 __ TailCallExternalReference( |
| 4229 ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1); |
| 4230 return; |
| 4231 } |
| 4232 |
| 4233 __ lw(a2, MemOperand(sp, (argc - 1) * kPointerSize)); |
| 4234 // Growing elements that are SMI-only requires special handling in case the |
| 4235 // new element is non-Smi. For now, delegate to the builtin. |
| 4236 if (IsFastSmiElementsKind(elements_kind())) { |
| 4237 __ JumpIfNotSmi(a2, &call_builtin); |
| 4238 } |
| 4239 |
| 4240 // We could be lucky and the elements array could be at the top of new-space. |
| 4241 // In this case we can just grow it in place by moving the allocation pointer |
| 4242 // up. |
| 4243 ExternalReference new_space_allocation_top = |
| 4244 ExternalReference::new_space_allocation_top_address(isolate); |
| 4245 ExternalReference new_space_allocation_limit = |
| 4246 ExternalReference::new_space_allocation_limit_address(isolate); |
| 4247 |
| 4248 const int kAllocationDelta = 4; |
| 4249 ASSERT(kAllocationDelta >= argc); |
| 4250 // Load top and check if it is the end of elements. |
| 4251 __ sll(end_elements, scratch, kPointerSizeLog2 - kSmiTagSize); |
| 4252 __ Addu(end_elements, elements, end_elements); |
| 4253 __ Addu(end_elements, end_elements, Operand(kEndElementsOffset)); |
| 4254 __ li(t0, Operand(new_space_allocation_top)); |
| 4255 __ lw(a3, MemOperand(t0)); |
| 4256 __ Branch(&call_builtin, ne, a3, Operand(end_elements)); |
| 4257 |
| 4258 __ li(t3, Operand(new_space_allocation_limit)); |
| 4259 __ lw(t3, MemOperand(t3)); |
| 4260 __ Addu(a3, a3, Operand(kAllocationDelta * kPointerSize)); |
| 4261 __ Branch(&call_builtin, hi, a3, Operand(t3)); |
| 4262 |
| 4263 // We fit and could grow elements. |
| 4264 // Update new_space_allocation_top. |
| 4265 __ sw(a3, MemOperand(t0)); |
| 4266 // Push the argument. |
| 4267 __ sw(a2, MemOperand(end_elements)); |
| 4268 // Fill the rest with holes. |
| 4269 __ LoadRoot(a3, Heap::kTheHoleValueRootIndex); |
| 4270 for (int i = 1; i < kAllocationDelta; i++) { |
| 4271 __ sw(a3, MemOperand(end_elements, i * kPointerSize)); |
| 4272 } |
| 4273 |
| 4274 // Update elements' and array's sizes. |
| 4275 __ sw(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset)); |
| 4276 __ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset)); |
| 4277 __ Addu(t0, t0, Operand(Smi::FromInt(kAllocationDelta))); |
| 4278 __ sw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset)); |
| 4279 |
| 4280 // Elements are in new space, so write barrier is not required. |
| 4281 __ mov(v0, scratch); |
| 4282 __ DropAndRet(argc + 1); |
| 4283 |
| 4284 __ bind(&call_builtin); |
| 4285 __ TailCallExternalReference( |
| 4286 ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1); |
| 4287 } |
| 4288 |
| 4289 |
| 4335 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { | 4290 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { |
| 4336 // ----------- S t a t e ------------- | 4291 // ----------- S t a t e ------------- |
| 4337 // -- a1 : left | 4292 // -- a1 : left |
| 4338 // -- a0 : right | 4293 // -- a0 : right |
| 4339 // -- ra : return address | 4294 // -- ra : return address |
| 4340 // ----------------------------------- | 4295 // ----------------------------------- |
| 4341 Isolate* isolate = masm->isolate(); | 4296 Isolate* isolate = masm->isolate(); |
| 4342 | 4297 |
| 4343 // Load a2 with the allocation site. We stick an undefined dummy value here | 4298 // Load a2 with the allocation site. We stick an undefined dummy value here |
| 4344 // and replace it with the real allocation site later when we instantiate this | 4299 // and replace it with the real allocation site later when we instantiate this |
| 4345 // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate(). | 4300 // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate(). |
| 4346 __ li(a2, handle(isolate->heap()->undefined_value())); | 4301 __ li(a2, handle(isolate->heap()->undefined_value())); |
| 4347 | 4302 |
| 4348 // Make sure that we actually patched the allocation site. | 4303 // Make sure that we actually patched the allocation site. |
| 4349 if (FLAG_debug_code) { | 4304 if (FLAG_debug_code) { |
| 4350 __ And(at, a2, Operand(kSmiTagMask)); | 4305 __ And(at, a2, Operand(kSmiTagMask)); |
| 4351 __ Assert(ne, kExpectedAllocationSite, at, Operand(zero_reg)); | 4306 __ Assert(ne, kExpectedAllocationSite, at, Operand(zero_reg)); |
| 4352 __ lw(t0, FieldMemOperand(a2, HeapObject::kMapOffset)); | 4307 __ lw(t0, FieldMemOperand(a2, HeapObject::kMapOffset)); |
| 4353 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); | 4308 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); |
| 4354 __ Assert(eq, kExpectedAllocationSite, t0, Operand(at)); | 4309 __ Assert(eq, kExpectedAllocationSite, t0, Operand(at)); |
| 4355 } | 4310 } |
| 4356 | 4311 |
| 4357 // Tail call into the stub that handles binary operations with allocation | 4312 // Tail call into the stub that handles binary operations with allocation |
| 4358 // sites. | 4313 // sites. |
| 4359 BinaryOpWithAllocationSiteStub stub(state_); | 4314 BinaryOpWithAllocationSiteStub stub(state_); |
| 4360 __ TailCallStub(&stub); | 4315 __ TailCallStub(&stub); |
| 4361 } | 4316 } |
| 4362 | 4317 |
| 4363 | 4318 |
| 4364 void StringAddStub::Generate(MacroAssembler* masm) { | |
| 4365 Label call_runtime, call_builtin; | |
| 4366 Builtins::JavaScript builtin_id = Builtins::ADD; | |
| 4367 | |
| 4368 Counters* counters = masm->isolate()->counters(); | |
| 4369 | |
| 4370 // Stack on entry: | |
| 4371 // sp[0]: second argument (right). | |
| 4372 // sp[4]: first argument (left). | |
| 4373 | |
| 4374 // Load the two arguments. | |
| 4375 __ lw(a0, MemOperand(sp, 1 * kPointerSize)); // First argument. | |
| 4376 __ lw(a1, MemOperand(sp, 0 * kPointerSize)); // Second argument. | |
| 4377 | |
| 4378 // Make sure that both arguments are strings if not known in advance. | |
| 4379 // Otherwise, at least one of the arguments is definitely a string, | |
| 4380 // and we convert the one that is not known to be a string. | |
| 4381 if ((flags_ & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_BOTH) { | |
| 4382 ASSERT((flags_ & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT); | |
| 4383 ASSERT((flags_ & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT); | |
| 4384 __ JumpIfEitherSmi(a0, a1, &call_runtime); | |
| 4385 // Load instance types. | |
| 4386 __ lw(t0, FieldMemOperand(a0, HeapObject::kMapOffset)); | |
| 4387 __ lw(t1, FieldMemOperand(a1, HeapObject::kMapOffset)); | |
| 4388 __ lbu(t0, FieldMemOperand(t0, Map::kInstanceTypeOffset)); | |
| 4389 __ lbu(t1, FieldMemOperand(t1, Map::kInstanceTypeOffset)); | |
| 4390 STATIC_ASSERT(kStringTag == 0); | |
| 4391 // If either is not a string, go to runtime. | |
| 4392 __ Or(t4, t0, Operand(t1)); | |
| 4393 __ And(t4, t4, Operand(kIsNotStringMask)); | |
| 4394 __ Branch(&call_runtime, ne, t4, Operand(zero_reg)); | |
| 4395 } else if ((flags_ & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) { | |
| 4396 ASSERT((flags_ & STRING_ADD_CHECK_RIGHT) == 0); | |
| 4397 GenerateConvertArgument( | |
| 4398 masm, 1 * kPointerSize, a0, a2, a3, t0, t1, &call_builtin); | |
| 4399 builtin_id = Builtins::STRING_ADD_RIGHT; | |
| 4400 } else if ((flags_ & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) { | |
| 4401 ASSERT((flags_ & STRING_ADD_CHECK_LEFT) == 0); | |
| 4402 GenerateConvertArgument( | |
| 4403 masm, 0 * kPointerSize, a1, a2, a3, t0, t1, &call_builtin); | |
| 4404 builtin_id = Builtins::STRING_ADD_LEFT; | |
| 4405 } | |
| 4406 | |
| 4407 // Both arguments are strings. | |
| 4408 // a0: first string | |
| 4409 // a1: second string | |
| 4410 // t0: first string instance type (if flags_ == NO_STRING_ADD_FLAGS) | |
| 4411 // t1: second string instance type (if flags_ == NO_STRING_ADD_FLAGS) | |
| 4412 { | |
| 4413 Label strings_not_empty; | |
| 4414 // Check if either of the strings are empty. In that case return the other. | |
| 4415 // These tests use zero-length check on string-length whch is an Smi. | |
| 4416 // Assert that Smi::FromInt(0) is really 0. | |
| 4417 STATIC_ASSERT(kSmiTag == 0); | |
| 4418 ASSERT(Smi::FromInt(0) == 0); | |
| 4419 __ lw(a2, FieldMemOperand(a0, String::kLengthOffset)); | |
| 4420 __ lw(a3, FieldMemOperand(a1, String::kLengthOffset)); | |
| 4421 __ mov(v0, a0); // Assume we'll return first string (from a0). | |
| 4422 __ Movz(v0, a1, a2); // If first is empty, return second (from a1). | |
| 4423 __ slt(t4, zero_reg, a2); // if (a2 > 0) t4 = 1. | |
| 4424 __ slt(t5, zero_reg, a3); // if (a3 > 0) t5 = 1. | |
| 4425 __ and_(t4, t4, t5); // Branch if both strings were non-empty. | |
| 4426 __ Branch(&strings_not_empty, ne, t4, Operand(zero_reg)); | |
| 4427 | |
| 4428 __ IncrementCounter(counters->string_add_native(), 1, a2, a3); | |
| 4429 __ DropAndRet(2); | |
| 4430 | |
| 4431 __ bind(&strings_not_empty); | |
| 4432 } | |
| 4433 | |
| 4434 // Untag both string-lengths. | |
| 4435 __ sra(a2, a2, kSmiTagSize); | |
| 4436 __ sra(a3, a3, kSmiTagSize); | |
| 4437 | |
| 4438 // Both strings are non-empty. | |
| 4439 // a0: first string | |
| 4440 // a1: second string | |
| 4441 // a2: length of first string | |
| 4442 // a3: length of second string | |
| 4443 // t0: first string instance type (if flags_ == NO_STRING_ADD_FLAGS) | |
| 4444 // t1: second string instance type (if flags_ == NO_STRING_ADD_FLAGS) | |
| 4445 // Look at the length of the result of adding the two strings. | |
| 4446 Label string_add_flat_result, longer_than_two; | |
| 4447 // Adding two lengths can't overflow. | |
| 4448 STATIC_ASSERT(String::kMaxLength < String::kMaxLength * 2); | |
| 4449 __ Addu(t2, a2, Operand(a3)); | |
| 4450 // Use the string table when adding two one character strings, as it | |
| 4451 // helps later optimizations to return a string here. | |
| 4452 __ Branch(&longer_than_two, ne, t2, Operand(2)); | |
| 4453 | |
| 4454 // Check that both strings are non-external ASCII strings. | |
| 4455 if ((flags_ & STRING_ADD_CHECK_BOTH) != STRING_ADD_CHECK_BOTH) { | |
| 4456 __ lw(t0, FieldMemOperand(a0, HeapObject::kMapOffset)); | |
| 4457 __ lw(t1, FieldMemOperand(a1, HeapObject::kMapOffset)); | |
| 4458 __ lbu(t0, FieldMemOperand(t0, Map::kInstanceTypeOffset)); | |
| 4459 __ lbu(t1, FieldMemOperand(t1, Map::kInstanceTypeOffset)); | |
| 4460 } | |
| 4461 __ JumpIfBothInstanceTypesAreNotSequentialAscii(t0, t1, t2, t3, | |
| 4462 &call_runtime); | |
| 4463 | |
| 4464 // Get the two characters forming the sub string. | |
| 4465 __ lbu(a2, FieldMemOperand(a0, SeqOneByteString::kHeaderSize)); | |
| 4466 __ lbu(a3, FieldMemOperand(a1, SeqOneByteString::kHeaderSize)); | |
| 4467 | |
| 4468 // Try to lookup two character string in string table. If it is not found | |
| 4469 // just allocate a new one. | |
| 4470 Label make_two_character_string; | |
| 4471 StringHelper::GenerateTwoCharacterStringTableProbe( | |
| 4472 masm, a2, a3, t2, t3, t0, t1, t5, &make_two_character_string); | |
| 4473 __ IncrementCounter(counters->string_add_native(), 1, a2, a3); | |
| 4474 __ DropAndRet(2); | |
| 4475 | |
| 4476 __ bind(&make_two_character_string); | |
| 4477 // Resulting string has length 2 and first chars of two strings | |
| 4478 // are combined into single halfword in a2 register. | |
| 4479 // So we can fill resulting string without two loops by a single | |
| 4480 // halfword store instruction (which assumes that processor is | |
| 4481 // in a little endian mode). | |
| 4482 __ li(t2, Operand(2)); | |
| 4483 __ AllocateAsciiString(v0, t2, t0, t1, t5, &call_runtime); | |
| 4484 __ sh(a2, FieldMemOperand(v0, SeqOneByteString::kHeaderSize)); | |
| 4485 __ IncrementCounter(counters->string_add_native(), 1, a2, a3); | |
| 4486 __ DropAndRet(2); | |
| 4487 | |
| 4488 __ bind(&longer_than_two); | |
| 4489 // Check if resulting string will be flat. | |
| 4490 __ Branch(&string_add_flat_result, lt, t2, Operand(ConsString::kMinLength)); | |
| 4491 // Handle exceptionally long strings in the runtime system. | |
| 4492 STATIC_ASSERT((String::kMaxLength & 0x80000000) == 0); | |
| 4493 ASSERT(IsPowerOf2(String::kMaxLength + 1)); | |
| 4494 // kMaxLength + 1 is representable as shifted literal, kMaxLength is not. | |
| 4495 __ Branch(&call_runtime, hs, t2, Operand(String::kMaxLength + 1)); | |
| 4496 | |
| 4497 // If result is not supposed to be flat, allocate a cons string object. | |
| 4498 // If both strings are ASCII the result is an ASCII cons string. | |
| 4499 if ((flags_ & STRING_ADD_CHECK_BOTH) != STRING_ADD_CHECK_BOTH) { | |
| 4500 __ lw(t0, FieldMemOperand(a0, HeapObject::kMapOffset)); | |
| 4501 __ lw(t1, FieldMemOperand(a1, HeapObject::kMapOffset)); | |
| 4502 __ lbu(t0, FieldMemOperand(t0, Map::kInstanceTypeOffset)); | |
| 4503 __ lbu(t1, FieldMemOperand(t1, Map::kInstanceTypeOffset)); | |
| 4504 } | |
| 4505 Label non_ascii, allocated, ascii_data; | |
| 4506 STATIC_ASSERT(kTwoByteStringTag == 0); | |
| 4507 // Branch to non_ascii if either string-encoding field is zero (non-ASCII). | |
| 4508 __ And(t4, t0, Operand(t1)); | |
| 4509 __ And(t4, t4, Operand(kStringEncodingMask)); | |
| 4510 __ Branch(&non_ascii, eq, t4, Operand(zero_reg)); | |
| 4511 | |
| 4512 // Allocate an ASCII cons string. | |
| 4513 __ bind(&ascii_data); | |
| 4514 __ AllocateAsciiConsString(v0, t2, t0, t1, &call_runtime); | |
| 4515 __ bind(&allocated); | |
| 4516 // Fill the fields of the cons string. | |
| 4517 Label skip_write_barrier, after_writing; | |
| 4518 ExternalReference high_promotion_mode = ExternalReference:: | |
| 4519 new_space_high_promotion_mode_active_address(masm->isolate()); | |
| 4520 __ li(t0, Operand(high_promotion_mode)); | |
| 4521 __ lw(t0, MemOperand(t0, 0)); | |
| 4522 __ Branch(&skip_write_barrier, eq, t0, Operand(zero_reg)); | |
| 4523 | |
| 4524 __ mov(t3, v0); | |
| 4525 __ sw(a0, FieldMemOperand(t3, ConsString::kFirstOffset)); | |
| 4526 __ RecordWriteField(t3, | |
| 4527 ConsString::kFirstOffset, | |
| 4528 a0, | |
| 4529 t0, | |
| 4530 kRAHasNotBeenSaved, | |
| 4531 kDontSaveFPRegs); | |
| 4532 __ sw(a1, FieldMemOperand(t3, ConsString::kSecondOffset)); | |
| 4533 __ RecordWriteField(t3, | |
| 4534 ConsString::kSecondOffset, | |
| 4535 a1, | |
| 4536 t0, | |
| 4537 kRAHasNotBeenSaved, | |
| 4538 kDontSaveFPRegs); | |
| 4539 __ jmp(&after_writing); | |
| 4540 | |
| 4541 __ bind(&skip_write_barrier); | |
| 4542 __ sw(a0, FieldMemOperand(v0, ConsString::kFirstOffset)); | |
| 4543 __ sw(a1, FieldMemOperand(v0, ConsString::kSecondOffset)); | |
| 4544 | |
| 4545 __ bind(&after_writing); | |
| 4546 | |
| 4547 __ IncrementCounter(counters->string_add_native(), 1, a2, a3); | |
| 4548 __ DropAndRet(2); | |
| 4549 | |
| 4550 __ bind(&non_ascii); | |
| 4551 // At least one of the strings is two-byte. Check whether it happens | |
| 4552 // to contain only one byte characters. | |
| 4553 // t0: first instance type. | |
| 4554 // t1: second instance type. | |
| 4555 // Branch to if _both_ instances have kOneByteDataHintMask set. | |
| 4556 __ And(at, t0, Operand(kOneByteDataHintMask)); | |
| 4557 __ and_(at, at, t1); | |
| 4558 __ Branch(&ascii_data, ne, at, Operand(zero_reg)); | |
| 4559 __ Xor(t0, t0, Operand(t1)); | |
| 4560 STATIC_ASSERT(kOneByteStringTag != 0 && kOneByteDataHintTag != 0); | |
| 4561 __ And(t0, t0, Operand(kOneByteStringTag | kOneByteDataHintTag)); | |
| 4562 __ Branch(&ascii_data, eq, t0, | |
| 4563 Operand(kOneByteStringTag | kOneByteDataHintTag)); | |
| 4564 | |
| 4565 // Allocate a two byte cons string. | |
| 4566 __ AllocateTwoByteConsString(v0, t2, t0, t1, &call_runtime); | |
| 4567 __ Branch(&allocated); | |
| 4568 | |
| 4569 // We cannot encounter sliced strings or cons strings here since: | |
| 4570 STATIC_ASSERT(SlicedString::kMinLength >= ConsString::kMinLength); | |
| 4571 // Handle creating a flat result from either external or sequential strings. | |
| 4572 // Locate the first characters' locations. | |
| 4573 // a0: first string | |
| 4574 // a1: second string | |
| 4575 // a2: length of first string | |
| 4576 // a3: length of second string | |
| 4577 // t0: first string instance type (if flags_ == NO_STRING_ADD_FLAGS) | |
| 4578 // t1: second string instance type (if flags_ == NO_STRING_ADD_FLAGS) | |
| 4579 // t2: sum of lengths. | |
| 4580 Label first_prepared, second_prepared; | |
| 4581 __ bind(&string_add_flat_result); | |
| 4582 if ((flags_ & STRING_ADD_CHECK_BOTH) != STRING_ADD_CHECK_BOTH) { | |
| 4583 __ lw(t0, FieldMemOperand(a0, HeapObject::kMapOffset)); | |
| 4584 __ lw(t1, FieldMemOperand(a1, HeapObject::kMapOffset)); | |
| 4585 __ lbu(t0, FieldMemOperand(t0, Map::kInstanceTypeOffset)); | |
| 4586 __ lbu(t1, FieldMemOperand(t1, Map::kInstanceTypeOffset)); | |
| 4587 } | |
| 4588 // Check whether both strings have same encoding | |
| 4589 __ Xor(t3, t0, Operand(t1)); | |
| 4590 __ And(t3, t3, Operand(kStringEncodingMask)); | |
| 4591 __ Branch(&call_runtime, ne, t3, Operand(zero_reg)); | |
| 4592 | |
| 4593 STATIC_ASSERT(kSeqStringTag == 0); | |
| 4594 __ And(t4, t0, Operand(kStringRepresentationMask)); | |
| 4595 | |
| 4596 STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize); | |
| 4597 Label skip_first_add; | |
| 4598 __ Branch(&skip_first_add, ne, t4, Operand(zero_reg)); | |
| 4599 __ Branch(USE_DELAY_SLOT, &first_prepared); | |
| 4600 __ addiu(t3, a0, SeqOneByteString::kHeaderSize - kHeapObjectTag); | |
| 4601 __ bind(&skip_first_add); | |
| 4602 // External string: rule out short external string and load string resource. | |
| 4603 STATIC_ASSERT(kShortExternalStringTag != 0); | |
| 4604 __ And(t4, t0, Operand(kShortExternalStringMask)); | |
| 4605 __ Branch(&call_runtime, ne, t4, Operand(zero_reg)); | |
| 4606 __ lw(t3, FieldMemOperand(a0, ExternalString::kResourceDataOffset)); | |
| 4607 __ bind(&first_prepared); | |
| 4608 | |
| 4609 STATIC_ASSERT(kSeqStringTag == 0); | |
| 4610 __ And(t4, t1, Operand(kStringRepresentationMask)); | |
| 4611 STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize); | |
| 4612 Label skip_second_add; | |
| 4613 __ Branch(&skip_second_add, ne, t4, Operand(zero_reg)); | |
| 4614 __ Branch(USE_DELAY_SLOT, &second_prepared); | |
| 4615 __ addiu(a1, a1, SeqOneByteString::kHeaderSize - kHeapObjectTag); | |
| 4616 __ bind(&skip_second_add); | |
| 4617 // External string: rule out short external string and load string resource. | |
| 4618 STATIC_ASSERT(kShortExternalStringTag != 0); | |
| 4619 __ And(t4, t1, Operand(kShortExternalStringMask)); | |
| 4620 __ Branch(&call_runtime, ne, t4, Operand(zero_reg)); | |
| 4621 __ lw(a1, FieldMemOperand(a1, ExternalString::kResourceDataOffset)); | |
| 4622 __ bind(&second_prepared); | |
| 4623 | |
| 4624 Label non_ascii_string_add_flat_result; | |
| 4625 // t3: first character of first string | |
| 4626 // a1: first character of second string | |
| 4627 // a2: length of first string | |
| 4628 // a3: length of second string | |
| 4629 // t2: sum of lengths. | |
| 4630 // Both strings have the same encoding. | |
| 4631 STATIC_ASSERT(kTwoByteStringTag == 0); | |
| 4632 __ And(t4, t1, Operand(kStringEncodingMask)); | |
| 4633 __ Branch(&non_ascii_string_add_flat_result, eq, t4, Operand(zero_reg)); | |
| 4634 | |
| 4635 __ AllocateAsciiString(v0, t2, t0, t1, t5, &call_runtime); | |
| 4636 __ Addu(t2, v0, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); | |
| 4637 // v0: result string. | |
| 4638 // t3: first character of first string. | |
| 4639 // a1: first character of second string | |
| 4640 // a2: length of first string. | |
| 4641 // a3: length of second string. | |
| 4642 // t2: first character of result. | |
| 4643 | |
| 4644 StringHelper::GenerateCopyCharacters(masm, t2, t3, a2, t0, true); | |
| 4645 // t2: next character of result. | |
| 4646 StringHelper::GenerateCopyCharacters(masm, t2, a1, a3, t0, true); | |
| 4647 __ IncrementCounter(counters->string_add_native(), 1, a2, a3); | |
| 4648 __ DropAndRet(2); | |
| 4649 | |
| 4650 __ bind(&non_ascii_string_add_flat_result); | |
| 4651 __ AllocateTwoByteString(v0, t2, t0, t1, t5, &call_runtime); | |
| 4652 __ Addu(t2, v0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); | |
| 4653 // v0: result string. | |
| 4654 // t3: first character of first string. | |
| 4655 // a1: first character of second string. | |
| 4656 // a2: length of first string. | |
| 4657 // a3: length of second string. | |
| 4658 // t2: first character of result. | |
| 4659 StringHelper::GenerateCopyCharacters(masm, t2, t3, a2, t0, false); | |
| 4660 // t2: next character of result. | |
| 4661 StringHelper::GenerateCopyCharacters(masm, t2, a1, a3, t0, false); | |
| 4662 | |
| 4663 __ IncrementCounter(counters->string_add_native(), 1, a2, a3); | |
| 4664 __ DropAndRet(2); | |
| 4665 | |
| 4666 // Just jump to runtime to add the two strings. | |
| 4667 __ bind(&call_runtime); | |
| 4668 __ TailCallRuntime(Runtime::kStringAdd, 2, 1); | |
| 4669 | |
| 4670 if (call_builtin.is_linked()) { | |
| 4671 __ bind(&call_builtin); | |
| 4672 __ InvokeBuiltin(builtin_id, JUMP_FUNCTION); | |
| 4673 } | |
| 4674 } | |
| 4675 | |
| 4676 | |
| 4677 void StringAddStub::GenerateRegisterArgsPush(MacroAssembler* masm) { | |
| 4678 __ push(a0); | |
| 4679 __ push(a1); | |
| 4680 } | |
| 4681 | |
| 4682 | |
| 4683 void StringAddStub::GenerateRegisterArgsPop(MacroAssembler* masm) { | |
| 4684 __ pop(a1); | |
| 4685 __ pop(a0); | |
| 4686 } | |
| 4687 | |
| 4688 | |
| 4689 void StringAddStub::GenerateConvertArgument(MacroAssembler* masm, | |
| 4690 int stack_offset, | |
| 4691 Register arg, | |
| 4692 Register scratch1, | |
| 4693 Register scratch2, | |
| 4694 Register scratch3, | |
| 4695 Register scratch4, | |
| 4696 Label* slow) { | |
| 4697 // First check if the argument is already a string. | |
| 4698 Label not_string, done; | |
| 4699 __ JumpIfSmi(arg, ¬_string); | |
| 4700 __ GetObjectType(arg, scratch1, scratch1); | |
| 4701 __ Branch(&done, lt, scratch1, Operand(FIRST_NONSTRING_TYPE)); | |
| 4702 | |
| 4703 // Check the number to string cache. | |
| 4704 __ bind(¬_string); | |
| 4705 // Puts the cached result into scratch1. | |
| 4706 __ LookupNumberStringCache(arg, scratch1, scratch2, scratch3, scratch4, slow); | |
| 4707 __ mov(arg, scratch1); | |
| 4708 __ sw(arg, MemOperand(sp, stack_offset)); | |
| 4709 __ bind(&done); | |
| 4710 } | |
| 4711 | |
| 4712 | |
| 4713 void ICCompareStub::GenerateSmis(MacroAssembler* masm) { | 4319 void ICCompareStub::GenerateSmis(MacroAssembler* masm) { |
| 4714 ASSERT(state_ == CompareIC::SMI); | 4320 ASSERT(state_ == CompareIC::SMI); |
| 4715 Label miss; | 4321 Label miss; |
| 4716 __ Or(a2, a1, a0); | 4322 __ Or(a2, a1, a0); |
| 4717 __ JumpIfNotSmi(a2, &miss); | 4323 __ JumpIfNotSmi(a2, &miss); |
| 4718 | 4324 |
| 4719 if (GetCondition() == eq) { | 4325 if (GetCondition() == eq) { |
| 4720 // For equality we do not care about the sign of the result. | 4326 // For equality we do not care about the sign of the result. |
| 4721 __ Ret(USE_DELAY_SLOT); | 4327 __ Ret(USE_DELAY_SLOT); |
| 4722 __ Subu(v0, a0, a1); | 4328 __ Subu(v0, a0, a1); |
| (...skipping 908 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5631 if (function_mode_ == JS_FUNCTION_STUB_MODE) { | 5237 if (function_mode_ == JS_FUNCTION_STUB_MODE) { |
| 5632 __ Addu(a1, a1, Operand(1)); | 5238 __ Addu(a1, a1, Operand(1)); |
| 5633 } | 5239 } |
| 5634 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); | 5240 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); |
| 5635 __ sll(a1, a1, kPointerSizeLog2); | 5241 __ sll(a1, a1, kPointerSizeLog2); |
| 5636 __ Ret(USE_DELAY_SLOT); | 5242 __ Ret(USE_DELAY_SLOT); |
| 5637 __ Addu(sp, sp, a1); | 5243 __ Addu(sp, sp, a1); |
| 5638 } | 5244 } |
| 5639 | 5245 |
| 5640 | 5246 |
| 5641 void StubFailureTailCallTrampolineStub::Generate(MacroAssembler* masm) { | |
| 5642 CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs); | |
| 5643 __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET); | |
| 5644 __ mov(a1, v0); | |
| 5645 int parameter_count_offset = | |
| 5646 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset; | |
| 5647 __ lw(a0, MemOperand(fp, parameter_count_offset)); | |
| 5648 // The parameter count above includes the receiver for the arguments passed to | |
| 5649 // the deoptimization handler. Subtract the receiver for the parameter count | |
| 5650 // for the call. | |
| 5651 __ Subu(a0, a0, 1); | |
| 5652 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); | |
| 5653 ParameterCount argument_count(a0); | |
| 5654 __ InvokeFunction(a1, argument_count, JUMP_FUNCTION, NullCallWrapper()); | |
| 5655 } | |
| 5656 | |
| 5657 | |
| 5658 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { | 5247 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { |
| 5659 if (masm->isolate()->function_entry_hook() != NULL) { | 5248 if (masm->isolate()->function_entry_hook() != NULL) { |
| 5660 ProfileEntryHookStub stub; | 5249 ProfileEntryHookStub stub; |
| 5661 __ push(ra); | 5250 __ push(ra); |
| 5662 __ CallStub(&stub); | 5251 __ CallStub(&stub); |
| 5663 __ pop(ra); | 5252 __ pop(ra); |
| 5664 } | 5253 } |
| 5665 } | 5254 } |
| 5666 | 5255 |
| 5667 | 5256 |
| (...skipping 21 matching lines...) Expand all Loading... |
| 5689 // Grab that for the second argument to the hook. | 5278 // Grab that for the second argument to the hook. |
| 5690 __ Addu(a1, sp, Operand(kNumSavedRegs * kPointerSize)); | 5279 __ Addu(a1, sp, Operand(kNumSavedRegs * kPointerSize)); |
| 5691 | 5280 |
| 5692 // Align the stack if necessary. | 5281 // Align the stack if necessary. |
| 5693 int frame_alignment = masm->ActivationFrameAlignment(); | 5282 int frame_alignment = masm->ActivationFrameAlignment(); |
| 5694 if (frame_alignment > kPointerSize) { | 5283 if (frame_alignment > kPointerSize) { |
| 5695 __ mov(s5, sp); | 5284 __ mov(s5, sp); |
| 5696 ASSERT(IsPowerOf2(frame_alignment)); | 5285 ASSERT(IsPowerOf2(frame_alignment)); |
| 5697 __ And(sp, sp, Operand(-frame_alignment)); | 5286 __ And(sp, sp, Operand(-frame_alignment)); |
| 5698 } | 5287 } |
| 5699 | 5288 __ Subu(sp, sp, kCArgsSlotsSize); |
| 5700 #if defined(V8_HOST_ARCH_MIPS) | 5289 #if defined(V8_HOST_ARCH_MIPS) |
| 5701 int32_t entry_hook = | 5290 int32_t entry_hook = |
| 5702 reinterpret_cast<int32_t>(masm->isolate()->function_entry_hook()); | 5291 reinterpret_cast<int32_t>(masm->isolate()->function_entry_hook()); |
| 5703 __ li(at, Operand(entry_hook)); | 5292 __ li(t9, Operand(entry_hook)); |
| 5704 #else | 5293 #else |
| 5705 // Under the simulator we need to indirect the entry hook through a | 5294 // Under the simulator we need to indirect the entry hook through a |
| 5706 // trampoline function at a known address. | 5295 // trampoline function at a known address. |
| 5707 // It additionally takes an isolate as a third parameter. | 5296 // It additionally takes an isolate as a third parameter. |
| 5708 __ li(a2, Operand(ExternalReference::isolate_address(masm->isolate()))); | 5297 __ li(a2, Operand(ExternalReference::isolate_address(masm->isolate()))); |
| 5709 | 5298 |
| 5710 ApiFunction dispatcher(FUNCTION_ADDR(EntryHookTrampoline)); | 5299 ApiFunction dispatcher(FUNCTION_ADDR(EntryHookTrampoline)); |
| 5711 __ li(at, Operand(ExternalReference(&dispatcher, | 5300 __ li(t9, Operand(ExternalReference(&dispatcher, |
| 5712 ExternalReference::BUILTIN_CALL, | 5301 ExternalReference::BUILTIN_CALL, |
| 5713 masm->isolate()))); | 5302 masm->isolate()))); |
| 5714 #endif | 5303 #endif |
| 5715 __ Call(at); | 5304 // Call C function through t9 to conform ABI for PIC. |
| 5305 __ Call(t9); |
| 5716 | 5306 |
| 5717 // Restore the stack pointer if needed. | 5307 // Restore the stack pointer if needed. |
| 5718 if (frame_alignment > kPointerSize) { | 5308 if (frame_alignment > kPointerSize) { |
| 5719 __ mov(sp, s5); | 5309 __ mov(sp, s5); |
| 5310 } else { |
| 5311 __ Addu(sp, sp, kCArgsSlotsSize); |
| 5720 } | 5312 } |
| 5721 | 5313 |
| 5722 // Also pop ra to get Ret(0). | 5314 // Also pop ra to get Ret(0). |
| 5723 __ MultiPop(kSavedRegs | ra.bit()); | 5315 __ MultiPop(kSavedRegs | ra.bit()); |
| 5724 __ Ret(); | 5316 __ Ret(); |
| 5725 } | 5317 } |
| 5726 | 5318 |
| 5727 | 5319 |
| 5728 template<class T> | 5320 template<class T> |
| 5729 static void CreateArrayDispatch(MacroAssembler* masm, | 5321 static void CreateArrayDispatch(MacroAssembler* masm, |
| (...skipping 13 matching lines...) Expand all Loading... |
| 5743 // If we reached this point there is a problem. | 5335 // If we reached this point there is a problem. |
| 5744 __ Abort(kUnexpectedElementsKindInArrayConstructor); | 5336 __ Abort(kUnexpectedElementsKindInArrayConstructor); |
| 5745 } else { | 5337 } else { |
| 5746 UNREACHABLE(); | 5338 UNREACHABLE(); |
| 5747 } | 5339 } |
| 5748 } | 5340 } |
| 5749 | 5341 |
| 5750 | 5342 |
| 5751 static void CreateArrayDispatchOneArgument(MacroAssembler* masm, | 5343 static void CreateArrayDispatchOneArgument(MacroAssembler* masm, |
| 5752 AllocationSiteOverrideMode mode) { | 5344 AllocationSiteOverrideMode mode) { |
| 5753 // a2 - type info cell (if mode != DISABLE_ALLOCATION_SITES) | 5345 // a2 - allocation site (if mode != DISABLE_ALLOCATION_SITES) |
| 5754 // a3 - kind (if mode != DISABLE_ALLOCATION_SITES) | 5346 // a3 - kind (if mode != DISABLE_ALLOCATION_SITES) |
| 5755 // a0 - number of arguments | 5347 // a0 - number of arguments |
| 5756 // a1 - constructor? | 5348 // a1 - constructor? |
| 5757 // sp[0] - last argument | 5349 // sp[0] - last argument |
| 5758 Label normal_sequence; | 5350 Label normal_sequence; |
| 5759 if (mode == DONT_OVERRIDE) { | 5351 if (mode == DONT_OVERRIDE) { |
| 5760 ASSERT(FAST_SMI_ELEMENTS == 0); | 5352 ASSERT(FAST_SMI_ELEMENTS == 0); |
| 5761 ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); | 5353 ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); |
| 5762 ASSERT(FAST_ELEMENTS == 2); | 5354 ASSERT(FAST_ELEMENTS == 2); |
| 5763 ASSERT(FAST_HOLEY_ELEMENTS == 3); | 5355 ASSERT(FAST_HOLEY_ELEMENTS == 3); |
| (...skipping 16 matching lines...) Expand all Loading... |
| 5780 ArraySingleArgumentConstructorStub stub_holey(holey_initial, | 5372 ArraySingleArgumentConstructorStub stub_holey(holey_initial, |
| 5781 DISABLE_ALLOCATION_SITES); | 5373 DISABLE_ALLOCATION_SITES); |
| 5782 __ TailCallStub(&stub_holey); | 5374 __ TailCallStub(&stub_holey); |
| 5783 | 5375 |
| 5784 __ bind(&normal_sequence); | 5376 __ bind(&normal_sequence); |
| 5785 ArraySingleArgumentConstructorStub stub(initial, | 5377 ArraySingleArgumentConstructorStub stub(initial, |
| 5786 DISABLE_ALLOCATION_SITES); | 5378 DISABLE_ALLOCATION_SITES); |
| 5787 __ TailCallStub(&stub); | 5379 __ TailCallStub(&stub); |
| 5788 } else if (mode == DONT_OVERRIDE) { | 5380 } else if (mode == DONT_OVERRIDE) { |
| 5789 // We are going to create a holey array, but our kind is non-holey. | 5381 // We are going to create a holey array, but our kind is non-holey. |
| 5790 // Fix kind and retry (only if we have an allocation site in the cell). | 5382 // Fix kind and retry (only if we have an allocation site in the slot). |
| 5791 __ Addu(a3, a3, Operand(1)); | 5383 __ Addu(a3, a3, Operand(1)); |
| 5792 __ lw(t1, FieldMemOperand(a2, Cell::kValueOffset)); | |
| 5793 | 5384 |
| 5794 if (FLAG_debug_code) { | 5385 if (FLAG_debug_code) { |
| 5795 __ lw(t1, FieldMemOperand(t1, 0)); | 5386 __ lw(t1, FieldMemOperand(a2, 0)); |
| 5796 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); | 5387 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); |
| 5797 __ Assert(eq, kExpectedAllocationSiteInCell, t1, Operand(at)); | 5388 __ Assert(eq, kExpectedAllocationSite, t1, Operand(at)); |
| 5798 __ lw(t1, FieldMemOperand(a2, Cell::kValueOffset)); | |
| 5799 } | 5389 } |
| 5800 | 5390 |
| 5801 // Save the resulting elements kind in type info. We can't just store a3 | 5391 // Save the resulting elements kind in type info. We can't just store a3 |
| 5802 // in the AllocationSite::transition_info field because elements kind is | 5392 // in the AllocationSite::transition_info field because elements kind is |
| 5803 // restricted to a portion of the field...upper bits need to be left alone. | 5393 // restricted to a portion of the field...upper bits need to be left alone. |
| 5804 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); | 5394 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); |
| 5805 __ lw(t0, FieldMemOperand(t1, AllocationSite::kTransitionInfoOffset)); | 5395 __ lw(t0, FieldMemOperand(a2, AllocationSite::kTransitionInfoOffset)); |
| 5806 __ Addu(t0, t0, Operand(Smi::FromInt(kFastElementsKindPackedToHoley))); | 5396 __ Addu(t0, t0, Operand(Smi::FromInt(kFastElementsKindPackedToHoley))); |
| 5807 __ sw(t0, FieldMemOperand(t1, AllocationSite::kTransitionInfoOffset)); | 5397 __ sw(t0, FieldMemOperand(a2, AllocationSite::kTransitionInfoOffset)); |
| 5808 | 5398 |
| 5809 | 5399 |
| 5810 __ bind(&normal_sequence); | 5400 __ bind(&normal_sequence); |
| 5811 int last_index = GetSequenceIndexFromFastElementsKind( | 5401 int last_index = GetSequenceIndexFromFastElementsKind( |
| 5812 TERMINAL_FAST_ELEMENTS_KIND); | 5402 TERMINAL_FAST_ELEMENTS_KIND); |
| 5813 for (int i = 0; i <= last_index; ++i) { | 5403 for (int i = 0; i <= last_index; ++i) { |
| 5814 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); | 5404 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); |
| 5815 ArraySingleArgumentConstructorStub stub(kind); | 5405 ArraySingleArgumentConstructorStub stub(kind); |
| 5816 __ TailCallStub(&stub, eq, a3, Operand(kind)); | 5406 __ TailCallStub(&stub, eq, a3, Operand(kind)); |
| 5817 } | 5407 } |
| (...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5889 } else { | 5479 } else { |
| 5890 UNREACHABLE(); | 5480 UNREACHABLE(); |
| 5891 } | 5481 } |
| 5892 } | 5482 } |
| 5893 | 5483 |
| 5894 | 5484 |
| 5895 void ArrayConstructorStub::Generate(MacroAssembler* masm) { | 5485 void ArrayConstructorStub::Generate(MacroAssembler* masm) { |
| 5896 // ----------- S t a t e ------------- | 5486 // ----------- S t a t e ------------- |
| 5897 // -- a0 : argc (only if argument_count_ == ANY) | 5487 // -- a0 : argc (only if argument_count_ == ANY) |
| 5898 // -- a1 : constructor | 5488 // -- a1 : constructor |
| 5899 // -- a2 : type info cell | 5489 // -- a2 : feedback vector (fixed array or undefined) |
| 5490 // -- a3 : slot index (if a2 is fixed array) |
| 5900 // -- sp[0] : return address | 5491 // -- sp[0] : return address |
| 5901 // -- sp[4] : last argument | 5492 // -- sp[4] : last argument |
| 5902 // ----------------------------------- | 5493 // ----------------------------------- |
| 5903 if (FLAG_debug_code) { | 5494 if (FLAG_debug_code) { |
| 5904 // The array construct code is only set for the global and natives | 5495 // The array construct code is only set for the global and natives |
| 5905 // builtin Array functions which always have maps. | 5496 // builtin Array functions which always have maps. |
| 5906 | 5497 |
| 5907 // Initial map for the builtin Array function should be a map. | 5498 // Initial map for the builtin Array function should be a map. |
| 5908 __ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); | 5499 __ lw(t0, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); |
| 5909 // Will both indicate a NULL and a Smi. | 5500 // Will both indicate a NULL and a Smi. |
| 5910 __ SmiTst(a3, at); | 5501 __ SmiTst(t0, at); |
| 5911 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, | 5502 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, |
| 5912 at, Operand(zero_reg)); | 5503 at, Operand(zero_reg)); |
| 5913 __ GetObjectType(a3, a3, t0); | 5504 __ GetObjectType(t0, t0, t1); |
| 5914 __ Assert(eq, kUnexpectedInitialMapForArrayFunction, | 5505 __ Assert(eq, kUnexpectedInitialMapForArrayFunction, |
| 5915 t0, Operand(MAP_TYPE)); | 5506 t1, Operand(MAP_TYPE)); |
| 5916 | 5507 |
| 5917 // We should either have undefined in a2 or a valid cell. | 5508 // We should either have undefined in a2 or a valid fixed array. |
| 5918 Label okay_here; | 5509 Label okay_here; |
| 5919 Handle<Map> cell_map = masm->isolate()->factory()->cell_map(); | 5510 Handle<Map> fixed_array_map = masm->isolate()->factory()->fixed_array_map(); |
| 5920 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | 5511 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); |
| 5921 __ Branch(&okay_here, eq, a2, Operand(at)); | 5512 __ Branch(&okay_here, eq, a2, Operand(at)); |
| 5922 __ lw(a3, FieldMemOperand(a2, 0)); | 5513 __ lw(t0, FieldMemOperand(a2, 0)); |
| 5923 __ Assert(eq, kExpectedPropertyCellInRegisterA2, | 5514 __ Assert(eq, kExpectedFixedArrayInRegisterA2, |
| 5924 a3, Operand(cell_map)); | 5515 t0, Operand(fixed_array_map)); |
| 5516 |
| 5517 // a3 should be a smi if we don't have undefined in a2 |
| 5518 __ AssertSmi(a3); |
| 5519 |
| 5925 __ bind(&okay_here); | 5520 __ bind(&okay_here); |
| 5926 } | 5521 } |
| 5927 | 5522 |
| 5928 Label no_info; | 5523 Label no_info; |
| 5929 // Get the elements kind and case on that. | 5524 // Get the elements kind and case on that. |
| 5930 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | 5525 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); |
| 5931 __ Branch(&no_info, eq, a2, Operand(at)); | 5526 __ Branch(&no_info, eq, a2, Operand(at)); |
| 5932 __ lw(a3, FieldMemOperand(a2, Cell::kValueOffset)); | 5527 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); |
| 5528 __ Addu(a2, a2, Operand(t0)); |
| 5529 __ lw(a2, FieldMemOperand(a2, FixedArray::kHeaderSize)); |
| 5933 | 5530 |
| 5934 // If the type cell is undefined, or contains anything other than an | 5531 // If the feedback vector is undefined, or contains anything other than an |
| 5935 // AllocationSite, call an array constructor that doesn't use AllocationSites. | 5532 // AllocationSite, call an array constructor that doesn't use AllocationSites. |
| 5936 __ lw(t0, FieldMemOperand(a3, 0)); | 5533 __ lw(t0, FieldMemOperand(a2, 0)); |
| 5937 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); | 5534 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); |
| 5938 __ Branch(&no_info, ne, t0, Operand(at)); | 5535 __ Branch(&no_info, ne, t0, Operand(at)); |
| 5939 | 5536 |
| 5940 __ lw(a3, FieldMemOperand(a3, AllocationSite::kTransitionInfoOffset)); | 5537 __ lw(a3, FieldMemOperand(a2, AllocationSite::kTransitionInfoOffset)); |
| 5941 __ SmiUntag(a3); | 5538 __ SmiUntag(a3); |
| 5942 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); | 5539 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); |
| 5943 __ And(a3, a3, Operand(AllocationSite::ElementsKindBits::kMask)); | 5540 __ And(a3, a3, Operand(AllocationSite::ElementsKindBits::kMask)); |
| 5944 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE); | 5541 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE); |
| 5945 | 5542 |
| 5946 __ bind(&no_info); | 5543 __ bind(&no_info); |
| 5947 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES); | 5544 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES); |
| 5948 } | 5545 } |
| 5949 | 5546 |
| 5950 | 5547 |
| (...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6015 | 5612 |
| 6016 Label fast_elements_case; | 5613 Label fast_elements_case; |
| 6017 __ Branch(&fast_elements_case, eq, a3, Operand(FAST_ELEMENTS)); | 5614 __ Branch(&fast_elements_case, eq, a3, Operand(FAST_ELEMENTS)); |
| 6018 GenerateCase(masm, FAST_HOLEY_ELEMENTS); | 5615 GenerateCase(masm, FAST_HOLEY_ELEMENTS); |
| 6019 | 5616 |
| 6020 __ bind(&fast_elements_case); | 5617 __ bind(&fast_elements_case); |
| 6021 GenerateCase(masm, FAST_ELEMENTS); | 5618 GenerateCase(masm, FAST_ELEMENTS); |
| 6022 } | 5619 } |
| 6023 | 5620 |
| 6024 | 5621 |
| 5622 void CallApiFunctionStub::Generate(MacroAssembler* masm) { |
| 5623 // ----------- S t a t e ------------- |
| 5624 // -- a0 : callee |
| 5625 // -- t0 : call_data |
| 5626 // -- a2 : holder |
| 5627 // -- a1 : api_function_address |
| 5628 // -- cp : context |
| 5629 // -- |
| 5630 // -- sp[0] : last argument |
| 5631 // -- ... |
| 5632 // -- sp[(argc - 1)* 4] : first argument |
| 5633 // -- sp[argc * 4] : receiver |
| 5634 // ----------------------------------- |
| 5635 |
| 5636 Register callee = a0; |
| 5637 Register call_data = t0; |
| 5638 Register holder = a2; |
| 5639 Register api_function_address = a1; |
| 5640 Register context = cp; |
| 5641 |
| 5642 int argc = ArgumentBits::decode(bit_field_); |
| 5643 bool is_store = IsStoreBits::decode(bit_field_); |
| 5644 bool call_data_undefined = CallDataUndefinedBits::decode(bit_field_); |
| 5645 |
| 5646 typedef FunctionCallbackArguments FCA; |
| 5647 |
| 5648 STATIC_ASSERT(FCA::kContextSaveIndex == 6); |
| 5649 STATIC_ASSERT(FCA::kCalleeIndex == 5); |
| 5650 STATIC_ASSERT(FCA::kDataIndex == 4); |
| 5651 STATIC_ASSERT(FCA::kReturnValueOffset == 3); |
| 5652 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2); |
| 5653 STATIC_ASSERT(FCA::kIsolateIndex == 1); |
| 5654 STATIC_ASSERT(FCA::kHolderIndex == 0); |
| 5655 STATIC_ASSERT(FCA::kArgsLength == 7); |
| 5656 |
| 5657 Isolate* isolate = masm->isolate(); |
| 5658 |
| 5659 // Save context, callee and call data. |
| 5660 __ Push(context, callee, call_data); |
| 5661 // Load context from callee. |
| 5662 __ lw(context, FieldMemOperand(callee, JSFunction::kContextOffset)); |
| 5663 |
| 5664 Register scratch = call_data; |
| 5665 if (!call_data_undefined) { |
| 5666 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); |
| 5667 } |
| 5668 // Push return value and default return value. |
| 5669 __ Push(scratch, scratch); |
| 5670 __ li(scratch, |
| 5671 Operand(ExternalReference::isolate_address(isolate))); |
| 5672 // Push isolate and holder. |
| 5673 __ Push(scratch, holder); |
| 5674 |
| 5675 // Prepare arguments. |
| 5676 __ mov(scratch, sp); |
| 5677 |
| 5678 // Allocate the v8::Arguments structure in the arguments' space since |
| 5679 // it's not controlled by GC. |
| 5680 const int kApiStackSpace = 4; |
| 5681 |
| 5682 FrameScope frame_scope(masm, StackFrame::MANUAL); |
| 5683 __ EnterExitFrame(false, kApiStackSpace); |
| 5684 |
| 5685 ASSERT(!api_function_address.is(a0) && !scratch.is(a0)); |
| 5686 // a0 = FunctionCallbackInfo& |
| 5687 // Arguments is after the return address. |
| 5688 __ Addu(a0, sp, Operand(1 * kPointerSize)); |
| 5689 // FunctionCallbackInfo::implicit_args_ |
| 5690 __ sw(scratch, MemOperand(a0, 0 * kPointerSize)); |
| 5691 // FunctionCallbackInfo::values_ |
| 5692 __ Addu(at, scratch, Operand((FCA::kArgsLength - 1 + argc) * kPointerSize)); |
| 5693 __ sw(at, MemOperand(a0, 1 * kPointerSize)); |
| 5694 // FunctionCallbackInfo::length_ = argc |
| 5695 __ li(at, Operand(argc)); |
| 5696 __ sw(at, MemOperand(a0, 2 * kPointerSize)); |
| 5697 // FunctionCallbackInfo::is_construct_call = 0 |
| 5698 __ sw(zero_reg, MemOperand(a0, 3 * kPointerSize)); |
| 5699 |
| 5700 const int kStackUnwindSpace = argc + FCA::kArgsLength + 1; |
| 5701 Address thunk_address = FUNCTION_ADDR(&InvokeFunctionCallback); |
| 5702 ExternalReference::Type thunk_type = ExternalReference::PROFILING_API_CALL; |
| 5703 ApiFunction thunk_fun(thunk_address); |
| 5704 ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type, |
| 5705 masm->isolate()); |
| 5706 |
| 5707 AllowExternalCallThatCantCauseGC scope(masm); |
| 5708 MemOperand context_restore_operand( |
| 5709 fp, (2 + FCA::kContextSaveIndex) * kPointerSize); |
| 5710 // Stores return the first js argument. |
| 5711 int return_value_offset = 0; |
| 5712 if (is_store) { |
| 5713 return_value_offset = 2 + FCA::kArgsLength; |
| 5714 } else { |
| 5715 return_value_offset = 2 + FCA::kReturnValueOffset; |
| 5716 } |
| 5717 MemOperand return_value_operand(fp, return_value_offset * kPointerSize); |
| 5718 |
| 5719 __ CallApiFunctionAndReturn(api_function_address, |
| 5720 thunk_ref, |
| 5721 kStackUnwindSpace, |
| 5722 return_value_operand, |
| 5723 &context_restore_operand); |
| 5724 } |
| 5725 |
| 5726 |
| 5727 void CallApiGetterStub::Generate(MacroAssembler* masm) { |
| 5728 // ----------- S t a t e ------------- |
| 5729 // -- sp[0] : name |
| 5730 // -- sp[4 - kArgsLength*4] : PropertyCallbackArguments object |
| 5731 // -- ... |
| 5732 // -- a2 : api_function_address |
| 5733 // ----------------------------------- |
| 5734 |
| 5735 Register api_function_address = a2; |
| 5736 |
| 5737 __ mov(a0, sp); // a0 = Handle<Name> |
| 5738 __ Addu(a1, a0, Operand(1 * kPointerSize)); // a1 = PCA |
| 5739 |
| 5740 const int kApiStackSpace = 1; |
| 5741 FrameScope frame_scope(masm, StackFrame::MANUAL); |
| 5742 __ EnterExitFrame(false, kApiStackSpace); |
| 5743 |
| 5744 // Create PropertyAccessorInfo instance on the stack above the exit frame with |
| 5745 // a1 (internal::Object** args_) as the data. |
| 5746 __ sw(a1, MemOperand(sp, 1 * kPointerSize)); |
| 5747 __ Addu(a1, sp, Operand(1 * kPointerSize)); // a1 = AccessorInfo& |
| 5748 |
| 5749 const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1; |
| 5750 |
| 5751 Address thunk_address = FUNCTION_ADDR(&InvokeAccessorGetterCallback); |
| 5752 ExternalReference::Type thunk_type = |
| 5753 ExternalReference::PROFILING_GETTER_CALL; |
| 5754 ApiFunction thunk_fun(thunk_address); |
| 5755 ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type, |
| 5756 masm->isolate()); |
| 5757 __ CallApiFunctionAndReturn(api_function_address, |
| 5758 thunk_ref, |
| 5759 kStackUnwindSpace, |
| 5760 MemOperand(fp, 6 * kPointerSize), |
| 5761 NULL); |
| 5762 } |
| 5763 |
| 5764 |
| 6025 #undef __ | 5765 #undef __ |
| 6026 | 5766 |
| 6027 } } // namespace v8::internal | 5767 } } // namespace v8::internal |
| 6028 | 5768 |
| 6029 #endif // V8_TARGET_ARCH_MIPS | 5769 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |