| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 25 matching lines...) Expand all Loading... |
| 36 namespace v8 { | 36 namespace v8 { |
| 37 namespace internal { | 37 namespace internal { |
| 38 | 38 |
| 39 | 39 |
| 40 // When invoking builtins, we need to record the safepoint in the middle of | 40 // When invoking builtins, we need to record the safepoint in the middle of |
| 41 // the invoke instruction sequence generated by the macro assembler. | 41 // the invoke instruction sequence generated by the macro assembler. |
| 42 class SafepointGenerator : public PostCallGenerator { | 42 class SafepointGenerator : public PostCallGenerator { |
| 43 public: | 43 public: |
| 44 SafepointGenerator(LCodeGen* codegen, | 44 SafepointGenerator(LCodeGen* codegen, |
| 45 LPointerMap* pointers, | 45 LPointerMap* pointers, |
| 46 int deoptimization_index) | 46 int deoptimization_index, |
| 47 bool ensure_reloc_space = false) |
| 47 : codegen_(codegen), | 48 : codegen_(codegen), |
| 48 pointers_(pointers), | 49 pointers_(pointers), |
| 49 deoptimization_index_(deoptimization_index) { } | 50 deoptimization_index_(deoptimization_index), |
| 51 ensure_reloc_space_(ensure_reloc_space) { } |
| 50 virtual ~SafepointGenerator() { } | 52 virtual ~SafepointGenerator() { } |
| 51 | 53 |
| 52 virtual void Generate() { | 54 virtual void Generate() { |
| 55 // Ensure that we have enough space in the reloc info to patch |
| 56 // this with calls when doing deoptimization. |
| 57 if (ensure_reloc_space_) { |
| 58 codegen_->EnsureRelocSpaceForDeoptimization(); |
| 59 } |
| 53 codegen_->RecordSafepoint(pointers_, deoptimization_index_); | 60 codegen_->RecordSafepoint(pointers_, deoptimization_index_); |
| 54 } | 61 } |
| 55 | 62 |
| 56 private: | 63 private: |
| 57 LCodeGen* codegen_; | 64 LCodeGen* codegen_; |
| 58 LPointerMap* pointers_; | 65 LPointerMap* pointers_; |
| 59 int deoptimization_index_; | 66 int deoptimization_index_; |
| 67 bool ensure_reloc_space_; |
| 60 }; | 68 }; |
| 61 | 69 |
| 62 | 70 |
| 63 #define __ masm()-> | 71 #define __ masm()-> |
| 64 | 72 |
| 65 bool LCodeGen::GenerateCode() { | 73 bool LCodeGen::GenerateCode() { |
| 66 HPhase phase("Code generation", chunk()); | 74 HPhase phase("Code generation", chunk()); |
| 67 ASSERT(is_unused()); | 75 ASSERT(is_unused()); |
| 68 status_ = GENERATING; | 76 status_ = GENERATING; |
| 69 CpuFeatures::Scope scope(SSE2); | 77 CpuFeatures::Scope scope(SSE2); |
| 70 return GeneratePrologue() && | 78 return GeneratePrologue() && |
| 71 GenerateBody() && | 79 GenerateBody() && |
| 72 GenerateDeferredCode() && | 80 GenerateDeferredCode() && |
| 81 GenerateRelocPadding() && |
| 73 GenerateSafepointTable(); | 82 GenerateSafepointTable(); |
| 74 } | 83 } |
| 75 | 84 |
| 76 | 85 |
| 77 void LCodeGen::FinishCode(Handle<Code> code) { | 86 void LCodeGen::FinishCode(Handle<Code> code) { |
| 78 ASSERT(is_done()); | 87 ASSERT(is_done()); |
| 79 code->set_stack_slots(StackSlotCount()); | 88 code->set_stack_slots(StackSlotCount()); |
| 80 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); | 89 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); |
| 81 PopulateDeoptimizationData(code); | 90 PopulateDeoptimizationData(code); |
| 82 } | 91 } |
| (...skipping 24 matching lines...) Expand all Loading... |
| 107 | 116 |
| 108 // Copy the string before recording it in the assembler to avoid | 117 // Copy the string before recording it in the assembler to avoid |
| 109 // issues when the stack allocated buffer goes out of scope. | 118 // issues when the stack allocated buffer goes out of scope. |
| 110 size_t length = builder.position(); | 119 size_t length = builder.position(); |
| 111 Vector<char> copy = Vector<char>::New(length + 1); | 120 Vector<char> copy = Vector<char>::New(length + 1); |
| 112 memcpy(copy.start(), builder.Finalize(), copy.length()); | 121 memcpy(copy.start(), builder.Finalize(), copy.length()); |
| 113 masm()->RecordComment(copy.start()); | 122 masm()->RecordComment(copy.start()); |
| 114 } | 123 } |
| 115 | 124 |
| 116 | 125 |
| 126 bool LCodeGen::GenerateRelocPadding() { |
| 127 int reloc_size = masm()->relocation_writer_size(); |
| 128 while (reloc_size < deoptimization_reloc_size.min_size) { |
| 129 __ RecordComment(RelocInfo::kFillerCommentString, true); |
| 130 reloc_size += RelocInfo::kMinRelocCommentSize; |
| 131 } |
| 132 return !is_aborted(); |
| 133 } |
| 134 |
| 135 |
| 117 bool LCodeGen::GeneratePrologue() { | 136 bool LCodeGen::GeneratePrologue() { |
| 118 ASSERT(is_generating()); | 137 ASSERT(is_generating()); |
| 119 | 138 |
| 120 #ifdef DEBUG | 139 #ifdef DEBUG |
| 121 if (strlen(FLAG_stop_at) > 0 && | 140 if (strlen(FLAG_stop_at) > 0 && |
| 122 info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) { | 141 info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) { |
| 123 __ int3(); | 142 __ int3(); |
| 124 } | 143 } |
| 125 #endif | 144 #endif |
| 126 | 145 |
| (...skipping 21 matching lines...) Expand all Loading... |
| 148 const int kPageSize = 4 * KB; | 167 const int kPageSize = 4 * KB; |
| 149 for (int offset = slots * kPointerSize - kPageSize; | 168 for (int offset = slots * kPointerSize - kPageSize; |
| 150 offset > 0; | 169 offset > 0; |
| 151 offset -= kPageSize) { | 170 offset -= kPageSize) { |
| 152 __ mov(Operand(esp, offset), eax); | 171 __ mov(Operand(esp, offset), eax); |
| 153 } | 172 } |
| 154 #endif | 173 #endif |
| 155 } | 174 } |
| 156 } | 175 } |
| 157 | 176 |
| 177 // Possibly allocate a local context. |
| 178 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; |
| 179 if (heap_slots > 0) { |
| 180 Comment(";;; Allocate local context"); |
| 181 // Argument to NewContext is the function, which is still in edi. |
| 182 __ push(edi); |
| 183 if (heap_slots <= FastNewContextStub::kMaximumSlots) { |
| 184 FastNewContextStub stub(heap_slots); |
| 185 __ CallStub(&stub); |
| 186 } else { |
| 187 __ CallRuntime(Runtime::kNewContext, 1); |
| 188 } |
| 189 RecordSafepoint(Safepoint::kNoDeoptimizationIndex); |
| 190 // Context is returned in both eax and esi. It replaces the context |
| 191 // passed to us. It's saved in the stack and kept live in esi. |
| 192 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi); |
| 193 |
| 194 // Copy parameters into context if necessary. |
| 195 int num_parameters = scope()->num_parameters(); |
| 196 for (int i = 0; i < num_parameters; i++) { |
| 197 Slot* slot = scope()->parameter(i)->AsSlot(); |
| 198 if (slot != NULL && slot->type() == Slot::CONTEXT) { |
| 199 int parameter_offset = StandardFrameConstants::kCallerSPOffset + |
| 200 (num_parameters - 1 - i) * kPointerSize; |
| 201 // Load parameter from stack. |
| 202 __ mov(eax, Operand(ebp, parameter_offset)); |
| 203 // Store it in the context. |
| 204 int context_offset = Context::SlotOffset(slot->index()); |
| 205 __ mov(Operand(esi, context_offset), eax); |
| 206 // Update the write barrier. This clobbers all involved |
| 207 // registers, so we have to use a third register to avoid |
| 208 // clobbering esi. |
| 209 __ mov(ecx, esi); |
| 210 __ RecordWrite(ecx, context_offset, eax, ebx); |
| 211 } |
| 212 } |
| 213 Comment(";;; End allocate local context"); |
| 214 } |
| 215 |
| 158 // Trace the call. | 216 // Trace the call. |
| 159 if (FLAG_trace) { | 217 if (FLAG_trace) { |
| 218 // We have not executed any compiled code yet, so esi still holds the |
| 219 // incoming context. |
| 160 __ CallRuntime(Runtime::kTraceEnter, 0); | 220 __ CallRuntime(Runtime::kTraceEnter, 0); |
| 161 } | 221 } |
| 162 return !is_aborted(); | 222 return !is_aborted(); |
| 163 } | 223 } |
| 164 | 224 |
| 165 | 225 |
| 166 bool LCodeGen::GenerateBody() { | 226 bool LCodeGen::GenerateBody() { |
| 167 ASSERT(is_generating()); | 227 ASSERT(is_generating()); |
| 168 bool emit_instructions = true; | 228 bool emit_instructions = true; |
| 169 for (current_instruction_ = 0; | 229 for (current_instruction_ = 0; |
| (...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 318 environment->spilled_double_registers()[value->index()], | 378 environment->spilled_double_registers()[value->index()], |
| 319 false); | 379 false); |
| 320 } | 380 } |
| 321 } | 381 } |
| 322 | 382 |
| 323 AddToTranslation(translation, value, environment->HasTaggedValueAt(i)); | 383 AddToTranslation(translation, value, environment->HasTaggedValueAt(i)); |
| 324 } | 384 } |
| 325 } | 385 } |
| 326 | 386 |
| 327 | 387 |
| 388 void LCodeGen::EnsureRelocSpaceForDeoptimization() { |
| 389 // Since we patch the reloc info with RUNTIME_ENTRY calls every patch |
| 390 // site will take up 2 bytes + any pc-jumps. |
| 391 // We are conservative and always reserver 6 bytes in case where a |
| 392 // simple pc-jump is not enough. |
| 393 uint32_t pc_delta = |
| 394 masm()->pc_offset() - deoptimization_reloc_size.last_pc_offset; |
| 395 if (is_uintn(pc_delta, 6)) { |
| 396 deoptimization_reloc_size.min_size += 2; |
| 397 } else { |
| 398 deoptimization_reloc_size.min_size += 6; |
| 399 } |
| 400 deoptimization_reloc_size.last_pc_offset = masm()->pc_offset(); |
| 401 } |
| 402 |
| 403 |
| 328 void LCodeGen::AddToTranslation(Translation* translation, | 404 void LCodeGen::AddToTranslation(Translation* translation, |
| 329 LOperand* op, | 405 LOperand* op, |
| 330 bool is_tagged) { | 406 bool is_tagged) { |
| 331 if (op == NULL) { | 407 if (op == NULL) { |
| 332 // TODO(twuerthinger): Introduce marker operands to indicate that this value | 408 // TODO(twuerthinger): Introduce marker operands to indicate that this value |
| 333 // is not present and must be reconstructed from the deoptimizer. Currently | 409 // is not present and must be reconstructed from the deoptimizer. Currently |
| 334 // this is only used for the arguments object. | 410 // this is only used for the arguments object. |
| 335 translation->StoreArgumentsObject(); | 411 translation->StoreArgumentsObject(); |
| 336 } else if (op->IsStackSlot()) { | 412 } else if (op->IsStackSlot()) { |
| 337 if (is_tagged) { | 413 if (is_tagged) { |
| (...skipping 22 matching lines...) Expand all Loading... |
| 360 int src_index = DefineDeoptimizationLiteral(literal); | 436 int src_index = DefineDeoptimizationLiteral(literal); |
| 361 translation->StoreLiteral(src_index); | 437 translation->StoreLiteral(src_index); |
| 362 } else { | 438 } else { |
| 363 UNREACHABLE(); | 439 UNREACHABLE(); |
| 364 } | 440 } |
| 365 } | 441 } |
| 366 | 442 |
| 367 | 443 |
| 368 void LCodeGen::CallCode(Handle<Code> code, | 444 void LCodeGen::CallCode(Handle<Code> code, |
| 369 RelocInfo::Mode mode, | 445 RelocInfo::Mode mode, |
| 370 LInstruction* instr) { | 446 LInstruction* instr, |
| 447 bool adjusted) { |
| 371 ASSERT(instr != NULL); | 448 ASSERT(instr != NULL); |
| 372 LPointerMap* pointers = instr->pointer_map(); | 449 LPointerMap* pointers = instr->pointer_map(); |
| 373 RecordPosition(pointers->position()); | 450 RecordPosition(pointers->position()); |
| 451 |
| 452 if (!adjusted) { |
| 453 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
| 454 } |
| 374 __ call(code, mode); | 455 __ call(code, mode); |
| 456 |
| 457 EnsureRelocSpaceForDeoptimization(); |
| 375 RegisterLazyDeoptimization(instr); | 458 RegisterLazyDeoptimization(instr); |
| 376 | 459 |
| 377 // Signal that we don't inline smi code before these stubs in the | 460 // Signal that we don't inline smi code before these stubs in the |
| 378 // optimizing code generator. | 461 // optimizing code generator. |
| 379 if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC || | 462 if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC || |
| 380 code->kind() == Code::COMPARE_IC) { | 463 code->kind() == Code::COMPARE_IC) { |
| 381 __ nop(); | 464 __ nop(); |
| 382 } | 465 } |
| 383 } | 466 } |
| 384 | 467 |
| 385 | 468 |
| 386 void LCodeGen::CallRuntime(const Runtime::Function* function, | 469 void LCodeGen::CallRuntime(const Runtime::Function* fun, |
| 387 int num_arguments, | 470 int argc, |
| 388 LInstruction* instr) { | 471 LInstruction* instr, |
| 472 bool adjusted) { |
| 389 ASSERT(instr != NULL); | 473 ASSERT(instr != NULL); |
| 390 ASSERT(instr->HasPointerMap()); | 474 ASSERT(instr->HasPointerMap()); |
| 391 LPointerMap* pointers = instr->pointer_map(); | 475 LPointerMap* pointers = instr->pointer_map(); |
| 392 RecordPosition(pointers->position()); | 476 RecordPosition(pointers->position()); |
| 393 | 477 |
| 394 __ CallRuntime(function, num_arguments); | 478 if (!adjusted) { |
| 479 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
| 480 } |
| 481 __ CallRuntime(fun, argc); |
| 395 RegisterLazyDeoptimization(instr); | 482 RegisterLazyDeoptimization(instr); |
| 396 } | 483 } |
| 397 | 484 |
| 398 | 485 |
| 399 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr) { | 486 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr) { |
| 400 // Create the environment to bailout to. If the call has side effects | 487 // Create the environment to bailout to. If the call has side effects |
| 401 // execution has to continue after the call otherwise execution can continue | 488 // execution has to continue after the call otherwise execution can continue |
| 402 // from a previous bailout point repeating the call. | 489 // from a previous bailout point repeating the call. |
| 403 LEnvironment* deoptimization_environment; | 490 LEnvironment* deoptimization_environment; |
| 404 if (instr->HasDeoptimizationEnvironment()) { | 491 if (instr->HasDeoptimizationEnvironment()) { |
| (...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 494 } | 581 } |
| 495 | 582 |
| 496 | 583 |
| 497 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { | 584 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { |
| 498 int length = deoptimizations_.length(); | 585 int length = deoptimizations_.length(); |
| 499 if (length == 0) return; | 586 if (length == 0) return; |
| 500 ASSERT(FLAG_deopt); | 587 ASSERT(FLAG_deopt); |
| 501 Handle<DeoptimizationInputData> data = | 588 Handle<DeoptimizationInputData> data = |
| 502 FACTORY->NewDeoptimizationInputData(length, TENURED); | 589 FACTORY->NewDeoptimizationInputData(length, TENURED); |
| 503 | 590 |
| 504 data->SetTranslationByteArray(*translations_.CreateByteArray()); | 591 Handle<ByteArray> translations = translations_.CreateByteArray(); |
| 592 data->SetTranslationByteArray(*translations); |
| 505 data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_)); | 593 data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_)); |
| 506 | 594 |
| 507 Handle<FixedArray> literals = | 595 Handle<FixedArray> literals = |
| 508 FACTORY->NewFixedArray(deoptimization_literals_.length(), TENURED); | 596 FACTORY->NewFixedArray(deoptimization_literals_.length(), TENURED); |
| 509 for (int i = 0; i < deoptimization_literals_.length(); i++) { | 597 for (int i = 0; i < deoptimization_literals_.length(); i++) { |
| 510 literals->set(i, *deoptimization_literals_[i]); | 598 literals->set(i, *deoptimization_literals_[i]); |
| 511 } | 599 } |
| 512 data->SetLiteralArray(*literals); | 600 data->SetLiteralArray(*literals); |
| 513 | 601 |
| 514 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id())); | 602 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id())); |
| (...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 561 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), | 649 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), |
| 562 kind, arguments, deoptimization_index); | 650 kind, arguments, deoptimization_index); |
| 563 for (int i = 0; i < operands->length(); i++) { | 651 for (int i = 0; i < operands->length(); i++) { |
| 564 LOperand* pointer = operands->at(i); | 652 LOperand* pointer = operands->at(i); |
| 565 if (pointer->IsStackSlot()) { | 653 if (pointer->IsStackSlot()) { |
| 566 safepoint.DefinePointerSlot(pointer->index()); | 654 safepoint.DefinePointerSlot(pointer->index()); |
| 567 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { | 655 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { |
| 568 safepoint.DefinePointerRegister(ToRegister(pointer)); | 656 safepoint.DefinePointerRegister(ToRegister(pointer)); |
| 569 } | 657 } |
| 570 } | 658 } |
| 571 if (kind & Safepoint::kWithRegisters) { | |
| 572 // Register esi always contains a pointer to the context. | |
| 573 safepoint.DefinePointerRegister(esi); | |
| 574 } | |
| 575 } | 659 } |
| 576 | 660 |
| 577 | 661 |
| 578 void LCodeGen::RecordSafepoint(LPointerMap* pointers, | 662 void LCodeGen::RecordSafepoint(LPointerMap* pointers, |
| 579 int deoptimization_index) { | 663 int deoptimization_index) { |
| 580 RecordSafepoint(pointers, Safepoint::kSimple, 0, deoptimization_index); | 664 RecordSafepoint(pointers, Safepoint::kSimple, 0, deoptimization_index); |
| 581 } | 665 } |
| 582 | 666 |
| 583 | 667 |
| 668 void LCodeGen::RecordSafepoint(int deoptimization_index) { |
| 669 LPointerMap empty_pointers(RelocInfo::kNoPosition); |
| 670 RecordSafepoint(&empty_pointers, deoptimization_index); |
| 671 } |
| 672 |
| 673 |
| 584 void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers, | 674 void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers, |
| 585 int arguments, | 675 int arguments, |
| 586 int deoptimization_index) { | 676 int deoptimization_index) { |
| 587 RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments, | 677 RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments, |
| 588 deoptimization_index); | 678 deoptimization_index); |
| 589 } | 679 } |
| 590 | 680 |
| 591 | 681 |
| 592 void LCodeGen::RecordPosition(int position) { | 682 void LCodeGen::RecordPosition(int position) { |
| 593 if (!FLAG_debug_info || position == RelocInfo::kNoPosition) return; | 683 if (!FLAG_debug_info || position == RelocInfo::kNoPosition) return; |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 628 } | 718 } |
| 629 } | 719 } |
| 630 | 720 |
| 631 | 721 |
| 632 void LCodeGen::DoParameter(LParameter* instr) { | 722 void LCodeGen::DoParameter(LParameter* instr) { |
| 633 // Nothing to do. | 723 // Nothing to do. |
| 634 } | 724 } |
| 635 | 725 |
| 636 | 726 |
| 637 void LCodeGen::DoCallStub(LCallStub* instr) { | 727 void LCodeGen::DoCallStub(LCallStub* instr) { |
| 728 ASSERT(ToRegister(instr->context()).is(esi)); |
| 638 ASSERT(ToRegister(instr->result()).is(eax)); | 729 ASSERT(ToRegister(instr->result()).is(eax)); |
| 639 switch (instr->hydrogen()->major_key()) { | 730 switch (instr->hydrogen()->major_key()) { |
| 640 case CodeStub::RegExpConstructResult: { | 731 case CodeStub::RegExpConstructResult: { |
| 641 RegExpConstructResultStub stub; | 732 RegExpConstructResultStub stub; |
| 642 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 733 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 643 break; | 734 break; |
| 644 } | 735 } |
| 645 case CodeStub::RegExpExec: { | 736 case CodeStub::RegExpExec: { |
| 646 RegExpExecStub stub; | 737 RegExpExecStub stub; |
| 647 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 738 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| (...skipping 340 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 988 } | 1079 } |
| 989 | 1080 |
| 990 | 1081 |
| 991 void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) { | 1082 void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) { |
| 992 Register result = ToRegister(instr->result()); | 1083 Register result = ToRegister(instr->result()); |
| 993 Register array = ToRegister(instr->InputAt(0)); | 1084 Register array = ToRegister(instr->InputAt(0)); |
| 994 __ mov(result, FieldOperand(array, FixedArray::kLengthOffset)); | 1085 __ mov(result, FieldOperand(array, FixedArray::kLengthOffset)); |
| 995 } | 1086 } |
| 996 | 1087 |
| 997 | 1088 |
| 1089 void LCodeGen::DoPixelArrayLength(LPixelArrayLength* instr) { |
| 1090 Register result = ToRegister(instr->result()); |
| 1091 Register array = ToRegister(instr->InputAt(0)); |
| 1092 __ mov(result, FieldOperand(array, PixelArray::kLengthOffset)); |
| 1093 } |
| 1094 |
| 1095 |
| 998 void LCodeGen::DoValueOf(LValueOf* instr) { | 1096 void LCodeGen::DoValueOf(LValueOf* instr) { |
| 999 Register input = ToRegister(instr->InputAt(0)); | 1097 Register input = ToRegister(instr->InputAt(0)); |
| 1000 Register result = ToRegister(instr->result()); | 1098 Register result = ToRegister(instr->result()); |
| 1001 Register map = ToRegister(instr->TempAt(0)); | 1099 Register map = ToRegister(instr->TempAt(0)); |
| 1002 ASSERT(input.is(result)); | 1100 ASSERT(input.is(result)); |
| 1003 NearLabel done; | 1101 NearLabel done; |
| 1004 // If the object is a smi return the object. | 1102 // If the object is a smi return the object. |
| 1005 __ test(input, Immediate(kSmiTagMask)); | 1103 __ test(input, Immediate(kSmiTagMask)); |
| 1006 __ j(zero, &done); | 1104 __ j(zero, &done); |
| 1007 | 1105 |
| 1008 // If the object is not a value type, return the object. | 1106 // If the object is not a value type, return the object. |
| 1009 __ CmpObjectType(input, JS_VALUE_TYPE, map); | 1107 __ CmpObjectType(input, JS_VALUE_TYPE, map); |
| 1010 __ j(not_equal, &done); | 1108 __ j(not_equal, &done); |
| 1011 __ mov(result, FieldOperand(input, JSValue::kValueOffset)); | 1109 __ mov(result, FieldOperand(input, JSValue::kValueOffset)); |
| 1012 | 1110 |
| 1013 __ bind(&done); | 1111 __ bind(&done); |
| 1014 } | 1112 } |
| 1015 | 1113 |
| 1016 | 1114 |
| 1017 void LCodeGen::DoBitNotI(LBitNotI* instr) { | 1115 void LCodeGen::DoBitNotI(LBitNotI* instr) { |
| 1018 LOperand* input = instr->InputAt(0); | 1116 LOperand* input = instr->InputAt(0); |
| 1019 ASSERT(input->Equals(instr->result())); | 1117 ASSERT(input->Equals(instr->result())); |
| 1020 __ not_(ToRegister(input)); | 1118 __ not_(ToRegister(input)); |
| 1021 } | 1119 } |
| 1022 | 1120 |
| 1023 | 1121 |
| 1024 void LCodeGen::DoThrow(LThrow* instr) { | 1122 void LCodeGen::DoThrow(LThrow* instr) { |
| 1025 __ push(ToOperand(instr->InputAt(0))); | 1123 __ push(ToOperand(instr->InputAt(0))); |
| 1026 CallRuntime(Runtime::kThrow, 1, instr); | 1124 CallRuntime(Runtime::kThrow, 1, instr, false); |
| 1027 | 1125 |
| 1028 if (FLAG_debug_code) { | 1126 if (FLAG_debug_code) { |
| 1029 Comment("Unreachable code."); | 1127 Comment("Unreachable code."); |
| 1030 __ int3(); | 1128 __ int3(); |
| 1031 } | 1129 } |
| 1032 } | 1130 } |
| 1033 | 1131 |
| 1034 | 1132 |
| 1035 void LCodeGen::DoAddI(LAddI* instr) { | 1133 void LCodeGen::DoAddI(LAddI* instr) { |
| 1036 LOperand* left = instr->InputAt(0); | 1134 LOperand* left = instr->InputAt(0); |
| 1037 LOperand* right = instr->InputAt(1); | 1135 LOperand* right = instr->InputAt(1); |
| 1038 ASSERT(left->Equals(instr->result())); | 1136 ASSERT(left->Equals(instr->result())); |
| 1039 | 1137 |
| 1040 if (right->IsConstantOperand()) { | 1138 if (right->IsConstantOperand()) { |
| 1041 __ add(ToOperand(left), ToImmediate(right)); | 1139 __ add(ToOperand(left), ToImmediate(right)); |
| 1042 } else { | 1140 } else { |
| 1043 __ add(ToRegister(left), ToOperand(right)); | 1141 __ add(ToRegister(left), ToOperand(right)); |
| 1044 } | 1142 } |
| 1045 | 1143 |
| 1046 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { | 1144 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { |
| 1047 DeoptimizeIf(overflow, instr->environment()); | 1145 DeoptimizeIf(overflow, instr->environment()); |
| 1048 } | 1146 } |
| 1049 } | 1147 } |
| 1050 | 1148 |
| 1051 | 1149 |
| 1052 void LCodeGen::DoArithmeticD(LArithmeticD* instr) { | 1150 void LCodeGen::DoArithmeticD(LArithmeticD* instr) { |
| 1053 LOperand* left = instr->InputAt(0); | 1151 XMMRegister left = ToDoubleRegister(instr->InputAt(0)); |
| 1054 LOperand* right = instr->InputAt(1); | 1152 XMMRegister right = ToDoubleRegister(instr->InputAt(1)); |
| 1153 XMMRegister result = ToDoubleRegister(instr->result()); |
| 1055 // Modulo uses a fixed result register. | 1154 // Modulo uses a fixed result register. |
| 1056 ASSERT(instr->op() == Token::MOD || left->Equals(instr->result())); | 1155 ASSERT(instr->op() == Token::MOD || left.is(result)); |
| 1057 switch (instr->op()) { | 1156 switch (instr->op()) { |
| 1058 case Token::ADD: | 1157 case Token::ADD: |
| 1059 __ addsd(ToDoubleRegister(left), ToDoubleRegister(right)); | 1158 __ addsd(left, right); |
| 1060 break; | 1159 break; |
| 1061 case Token::SUB: | 1160 case Token::SUB: |
| 1062 __ subsd(ToDoubleRegister(left), ToDoubleRegister(right)); | 1161 __ subsd(left, right); |
| 1063 break; | 1162 break; |
| 1064 case Token::MUL: | 1163 case Token::MUL: |
| 1065 __ mulsd(ToDoubleRegister(left), ToDoubleRegister(right)); | 1164 __ mulsd(left, right); |
| 1066 break; | 1165 break; |
| 1067 case Token::DIV: | 1166 case Token::DIV: |
| 1068 __ divsd(ToDoubleRegister(left), ToDoubleRegister(right)); | 1167 __ divsd(left, right); |
| 1069 break; | 1168 break; |
| 1070 case Token::MOD: { | 1169 case Token::MOD: { |
| 1071 // Pass two doubles as arguments on the stack. | 1170 // Pass two doubles as arguments on the stack. |
| 1072 __ PrepareCallCFunction(4, eax); | 1171 __ PrepareCallCFunction(4, eax); |
| 1073 __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left)); | 1172 __ movdbl(Operand(esp, 0 * kDoubleSize), left); |
| 1074 __ movdbl(Operand(esp, 1 * kDoubleSize), ToDoubleRegister(right)); | 1173 __ movdbl(Operand(esp, 1 * kDoubleSize), right); |
| 1075 __ CallCFunction(ExternalReference::double_fp_operation(Token::MOD), 4); | 1174 __ CallCFunction(ExternalReference::double_fp_operation(Token::MOD), 4); |
| 1076 | 1175 |
| 1077 // Return value is in st(0) on ia32. | 1176 // Return value is in st(0) on ia32. |
| 1078 // Store it into the (fixed) result register. | 1177 // Store it into the (fixed) result register. |
| 1079 __ sub(Operand(esp), Immediate(kDoubleSize)); | 1178 __ sub(Operand(esp), Immediate(kDoubleSize)); |
| 1080 __ fstp_d(Operand(esp, 0)); | 1179 __ fstp_d(Operand(esp, 0)); |
| 1081 __ movdbl(ToDoubleRegister(instr->result()), Operand(esp, 0)); | 1180 __ movdbl(result, Operand(esp, 0)); |
| 1082 __ add(Operand(esp), Immediate(kDoubleSize)); | 1181 __ add(Operand(esp), Immediate(kDoubleSize)); |
| 1083 break; | 1182 break; |
| 1084 } | 1183 } |
| 1085 default: | 1184 default: |
| 1086 UNREACHABLE(); | 1185 UNREACHABLE(); |
| 1087 break; | 1186 break; |
| 1088 } | 1187 } |
| 1089 } | 1188 } |
| 1090 | 1189 |
| 1091 | 1190 |
| 1092 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { | 1191 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { |
| 1093 ASSERT(ToRegister(instr->InputAt(0)).is(edx)); | 1192 ASSERT(ToRegister(instr->InputAt(0)).is(edx)); |
| 1094 ASSERT(ToRegister(instr->InputAt(1)).is(eax)); | 1193 ASSERT(ToRegister(instr->InputAt(1)).is(eax)); |
| 1095 ASSERT(ToRegister(instr->result()).is(eax)); | 1194 ASSERT(ToRegister(instr->result()).is(eax)); |
| 1096 | 1195 |
| 1097 TypeRecordingBinaryOpStub stub(instr->op(), NO_OVERWRITE); | 1196 TypeRecordingBinaryOpStub stub(instr->op(), NO_OVERWRITE); |
| 1098 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 1197 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); |
| 1099 } | 1198 } |
| 1100 | 1199 |
| 1101 | 1200 |
| 1102 int LCodeGen::GetNextEmittedBlock(int block) { | 1201 int LCodeGen::GetNextEmittedBlock(int block) { |
| 1103 for (int i = block + 1; i < graph()->blocks()->length(); ++i) { | 1202 for (int i = block + 1; i < graph()->blocks()->length(); ++i) { |
| 1104 LLabel* label = chunk_->GetLabel(i); | 1203 LLabel* label = chunk_->GetLabel(i); |
| 1105 if (!label->HasReplacement()) return i; | 1204 if (!label->HasReplacement()) return i; |
| 1106 } | 1205 } |
| 1107 return -1; | 1206 return -1; |
| 1108 } | 1207 } |
| (...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1201 deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block)); | 1300 deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block)); |
| 1202 } else { | 1301 } else { |
| 1203 __ jmp(chunk_->GetAssemblyLabel(block)); | 1302 __ jmp(chunk_->GetAssemblyLabel(block)); |
| 1204 } | 1303 } |
| 1205 } | 1304 } |
| 1206 } | 1305 } |
| 1207 | 1306 |
| 1208 | 1307 |
| 1209 void LCodeGen::DoDeferredStackCheck(LGoto* instr) { | 1308 void LCodeGen::DoDeferredStackCheck(LGoto* instr) { |
| 1210 __ pushad(); | 1309 __ pushad(); |
| 1310 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
| 1211 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); | 1311 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); |
| 1212 RecordSafepointWithRegisters( | 1312 RecordSafepointWithRegisters( |
| 1213 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); | 1313 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); |
| 1214 __ popad(); | 1314 __ popad(); |
| 1215 } | 1315 } |
| 1216 | 1316 |
| 1217 void LCodeGen::DoGoto(LGoto* instr) { | 1317 void LCodeGen::DoGoto(LGoto* instr) { |
| 1218 class DeferredStackCheck: public LDeferredCode { | 1318 class DeferredStackCheck: public LDeferredCode { |
| 1219 public: | 1319 public: |
| 1220 DeferredStackCheck(LCodeGen* codegen, LGoto* instr) | 1320 DeferredStackCheck(LCodeGen* codegen, LGoto* instr) |
| (...skipping 470 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1691 int true_block = instr->true_block_id(); | 1791 int true_block = instr->true_block_id(); |
| 1692 int false_block = instr->false_block_id(); | 1792 int false_block = instr->false_block_id(); |
| 1693 | 1793 |
| 1694 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map()); | 1794 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map()); |
| 1695 EmitBranch(true_block, false_block, equal); | 1795 EmitBranch(true_block, false_block, equal); |
| 1696 } | 1796 } |
| 1697 | 1797 |
| 1698 | 1798 |
| 1699 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { | 1799 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { |
| 1700 // Object and function are in fixed registers defined by the stub. | 1800 // Object and function are in fixed registers defined by the stub. |
| 1801 ASSERT(ToRegister(instr->context()).is(esi)); |
| 1701 InstanceofStub stub(InstanceofStub::kArgsInRegisters); | 1802 InstanceofStub stub(InstanceofStub::kArgsInRegisters); |
| 1702 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 1803 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 1703 | 1804 |
| 1704 NearLabel true_value, done; | 1805 NearLabel true_value, done; |
| 1705 __ test(eax, Operand(eax)); | 1806 __ test(eax, Operand(eax)); |
| 1706 __ j(zero, &true_value); | 1807 __ j(zero, &true_value); |
| 1707 __ mov(ToRegister(instr->result()), FACTORY->false_value()); | 1808 __ mov(ToRegister(instr->result()), FACTORY->false_value()); |
| 1708 __ jmp(&done); | 1809 __ jmp(&done); |
| 1709 __ bind(&true_value); | 1810 __ bind(&true_value); |
| 1710 __ mov(ToRegister(instr->result()), FACTORY->true_value()); | 1811 __ mov(ToRegister(instr->result()), FACTORY->true_value()); |
| 1711 __ bind(&done); | 1812 __ bind(&done); |
| 1712 } | 1813 } |
| 1713 | 1814 |
| 1714 | 1815 |
| 1715 void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) { | 1816 void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) { |
| 1817 ASSERT(ToRegister(instr->context()).is(esi)); |
| 1716 int true_block = chunk_->LookupDestination(instr->true_block_id()); | 1818 int true_block = chunk_->LookupDestination(instr->true_block_id()); |
| 1717 int false_block = chunk_->LookupDestination(instr->false_block_id()); | 1819 int false_block = chunk_->LookupDestination(instr->false_block_id()); |
| 1718 | 1820 |
| 1719 InstanceofStub stub(InstanceofStub::kArgsInRegisters); | 1821 InstanceofStub stub(InstanceofStub::kArgsInRegisters); |
| 1720 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 1822 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 1721 __ test(eax, Operand(eax)); | 1823 __ test(eax, Operand(eax)); |
| 1722 EmitBranch(true_block, false_block, zero); | 1824 EmitBranch(true_block, false_block, zero); |
| 1723 } | 1825 } |
| 1724 | 1826 |
| 1725 | 1827 |
| (...skipping 14 matching lines...) Expand all Loading... |
| 1740 Label map_check_; | 1842 Label map_check_; |
| 1741 }; | 1843 }; |
| 1742 | 1844 |
| 1743 DeferredInstanceOfKnownGlobal* deferred; | 1845 DeferredInstanceOfKnownGlobal* deferred; |
| 1744 deferred = new DeferredInstanceOfKnownGlobal(this, instr); | 1846 deferred = new DeferredInstanceOfKnownGlobal(this, instr); |
| 1745 | 1847 |
| 1746 Label done, false_result; | 1848 Label done, false_result; |
| 1747 Register object = ToRegister(instr->InputAt(0)); | 1849 Register object = ToRegister(instr->InputAt(0)); |
| 1748 Register temp = ToRegister(instr->TempAt(0)); | 1850 Register temp = ToRegister(instr->TempAt(0)); |
| 1749 | 1851 |
| 1750 // A Smi is not instance of anything. | 1852 // A Smi is not an instance of anything. |
| 1751 __ test(object, Immediate(kSmiTagMask)); | 1853 __ test(object, Immediate(kSmiTagMask)); |
| 1752 __ j(zero, &false_result, not_taken); | 1854 __ j(zero, &false_result, not_taken); |
| 1753 | 1855 |
| 1754 // This is the inlined call site instanceof cache. The two occourences of the | 1856 // This is the inlined call site instanceof cache. The two occurences of the |
| 1755 // hole value will be patched to the last map/result pair generated by the | 1857 // hole value will be patched to the last map/result pair generated by the |
| 1756 // instanceof stub. | 1858 // instanceof stub. |
| 1757 NearLabel cache_miss; | 1859 NearLabel cache_miss; |
| 1758 Register map = ToRegister(instr->TempAt(0)); | 1860 Register map = ToRegister(instr->TempAt(0)); |
| 1759 __ mov(map, FieldOperand(object, HeapObject::kMapOffset)); | 1861 __ mov(map, FieldOperand(object, HeapObject::kMapOffset)); |
| 1760 __ bind(deferred->map_check()); // Label for calculating code patching. | 1862 __ bind(deferred->map_check()); // Label for calculating code patching. |
| 1761 __ cmp(map, FACTORY->the_hole_value()); // Patched to cached map. | 1863 __ cmp(map, FACTORY->the_hole_value()); // Patched to cached map. |
| 1762 __ j(not_equal, &cache_miss, not_taken); | 1864 __ j(not_equal, &cache_miss, not_taken); |
| 1763 __ mov(eax, FACTORY->the_hole_value()); // Patched to either true or false. | 1865 __ mov(eax, FACTORY->the_hole_value()); // Patched to either true or false. |
| 1764 __ jmp(&done); | 1866 __ jmp(&done); |
| 1765 | 1867 |
| 1766 // The inlined call site cache did not match. Check null and string before | 1868 // The inlined call site cache did not match. Check for null and string |
| 1767 // calling the deferred code. | 1869 // before calling the deferred code. |
| 1768 __ bind(&cache_miss); | 1870 __ bind(&cache_miss); |
| 1769 // Null is not instance of anything. | 1871 // Null is not an instance of anything. |
| 1770 __ cmp(object, FACTORY->null_value()); | 1872 __ cmp(object, FACTORY->null_value()); |
| 1771 __ j(equal, &false_result); | 1873 __ j(equal, &false_result); |
| 1772 | 1874 |
| 1773 // String values are not instances of anything. | 1875 // String values are not instances of anything. |
| 1774 Condition is_string = masm_->IsObjectStringType(object, temp, temp); | 1876 Condition is_string = masm_->IsObjectStringType(object, temp, temp); |
| 1775 __ j(is_string, &false_result); | 1877 __ j(is_string, &false_result); |
| 1776 | 1878 |
| 1777 // Go to the deferred code. | 1879 // Go to the deferred code. |
| 1778 __ jmp(deferred->entry()); | 1880 __ jmp(deferred->entry()); |
| 1779 | 1881 |
| (...skipping 19 matching lines...) Expand all Loading... |
| 1799 flags = static_cast<InstanceofStub::Flags>( | 1901 flags = static_cast<InstanceofStub::Flags>( |
| 1800 flags | InstanceofStub::kReturnTrueFalseObject); | 1902 flags | InstanceofStub::kReturnTrueFalseObject); |
| 1801 InstanceofStub stub(flags); | 1903 InstanceofStub stub(flags); |
| 1802 | 1904 |
| 1803 // Get the temp register reserved by the instruction. This needs to be edi as | 1905 // Get the temp register reserved by the instruction. This needs to be edi as |
| 1804 // its slot of the pushing of safepoint registers is used to communicate the | 1906 // its slot of the pushing of safepoint registers is used to communicate the |
| 1805 // offset to the location of the map check. | 1907 // offset to the location of the map check. |
| 1806 Register temp = ToRegister(instr->TempAt(0)); | 1908 Register temp = ToRegister(instr->TempAt(0)); |
| 1807 ASSERT(temp.is(edi)); | 1909 ASSERT(temp.is(edi)); |
| 1808 __ mov(InstanceofStub::right(), Immediate(instr->function())); | 1910 __ mov(InstanceofStub::right(), Immediate(instr->function())); |
| 1809 static const int kAdditionalDelta = 13; | 1911 static const int kAdditionalDelta = 16; |
| 1810 int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta; | 1912 int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta; |
| 1811 Label before_push_delta; | 1913 Label before_push_delta; |
| 1812 __ bind(&before_push_delta); | 1914 __ bind(&before_push_delta); |
| 1813 __ mov(temp, Immediate(delta)); | 1915 __ mov(temp, Immediate(delta)); |
| 1814 __ mov(Operand(esp, EspIndexForPushAll(temp) * kPointerSize), temp); | 1916 __ StoreToSafepointRegisterSlot(temp, temp); |
| 1815 __ call(stub.GetCode(), RelocInfo::CODE_TARGET); | 1917 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); |
| 1816 ASSERT_EQ(kAdditionalDelta, | |
| 1817 masm_->SizeOfCodeGeneratedSince(&before_push_delta)); | |
| 1818 RecordSafepointWithRegisters( | |
| 1819 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); | |
| 1820 // Put the result value into the eax slot and restore all registers. | 1918 // Put the result value into the eax slot and restore all registers. |
| 1821 __ mov(Operand(esp, EspIndexForPushAll(eax) * kPointerSize), eax); | 1919 __ StoreToSafepointRegisterSlot(eax, eax); |
| 1822 | |
| 1823 __ PopSafepointRegisters(); | 1920 __ PopSafepointRegisters(); |
| 1824 } | 1921 } |
| 1825 | 1922 |
| 1826 | 1923 |
| 1827 static Condition ComputeCompareCondition(Token::Value op) { | 1924 static Condition ComputeCompareCondition(Token::Value op) { |
| 1828 switch (op) { | 1925 switch (op) { |
| 1829 case Token::EQ_STRICT: | 1926 case Token::EQ_STRICT: |
| 1830 case Token::EQ: | 1927 case Token::EQ: |
| 1831 return equal; | 1928 return equal; |
| 1832 case Token::LT: | 1929 case Token::LT: |
| 1833 return less; | 1930 return less; |
| 1834 case Token::GT: | 1931 case Token::GT: |
| 1835 return greater; | 1932 return greater; |
| 1836 case Token::LTE: | 1933 case Token::LTE: |
| 1837 return less_equal; | 1934 return less_equal; |
| 1838 case Token::GTE: | 1935 case Token::GTE: |
| 1839 return greater_equal; | 1936 return greater_equal; |
| 1840 default: | 1937 default: |
| 1841 UNREACHABLE(); | 1938 UNREACHABLE(); |
| 1842 return no_condition; | 1939 return no_condition; |
| 1843 } | 1940 } |
| 1844 } | 1941 } |
| 1845 | 1942 |
| 1846 | 1943 |
| 1847 void LCodeGen::DoCmpT(LCmpT* instr) { | 1944 void LCodeGen::DoCmpT(LCmpT* instr) { |
| 1848 Token::Value op = instr->op(); | 1945 Token::Value op = instr->op(); |
| 1849 | 1946 |
| 1850 Handle<Code> ic = CompareIC::GetUninitialized(op); | 1947 Handle<Code> ic = CompareIC::GetUninitialized(op); |
| 1851 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 1948 CallCode(ic, RelocInfo::CODE_TARGET, instr, false); |
| 1852 | 1949 |
| 1853 Condition condition = ComputeCompareCondition(op); | 1950 Condition condition = ComputeCompareCondition(op); |
| 1854 if (op == Token::GT || op == Token::LTE) { | 1951 if (op == Token::GT || op == Token::LTE) { |
| 1855 condition = ReverseCondition(condition); | 1952 condition = ReverseCondition(condition); |
| 1856 } | 1953 } |
| 1857 NearLabel true_value, done; | 1954 NearLabel true_value, done; |
| 1858 __ test(eax, Operand(eax)); | 1955 __ test(eax, Operand(eax)); |
| 1859 __ j(condition, &true_value); | 1956 __ j(condition, &true_value); |
| 1860 __ mov(ToRegister(instr->result()), FACTORY->false_value()); | 1957 __ mov(ToRegister(instr->result()), FACTORY->false_value()); |
| 1861 __ jmp(&done); | 1958 __ jmp(&done); |
| 1862 __ bind(&true_value); | 1959 __ bind(&true_value); |
| 1863 __ mov(ToRegister(instr->result()), FACTORY->true_value()); | 1960 __ mov(ToRegister(instr->result()), FACTORY->true_value()); |
| 1864 __ bind(&done); | 1961 __ bind(&done); |
| 1865 } | 1962 } |
| 1866 | 1963 |
| 1867 | 1964 |
| 1868 void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) { | 1965 void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) { |
| 1869 Token::Value op = instr->op(); | 1966 Token::Value op = instr->op(); |
| 1870 int true_block = chunk_->LookupDestination(instr->true_block_id()); | 1967 int true_block = chunk_->LookupDestination(instr->true_block_id()); |
| 1871 int false_block = chunk_->LookupDestination(instr->false_block_id()); | 1968 int false_block = chunk_->LookupDestination(instr->false_block_id()); |
| 1872 | 1969 |
| 1873 Handle<Code> ic = CompareIC::GetUninitialized(op); | 1970 Handle<Code> ic = CompareIC::GetUninitialized(op); |
| 1874 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 1971 CallCode(ic, RelocInfo::CODE_TARGET, instr, false); |
| 1875 | 1972 |
| 1876 // The compare stub expects compare condition and the input operands | 1973 // The compare stub expects compare condition and the input operands |
| 1877 // reversed for GT and LTE. | 1974 // reversed for GT and LTE. |
| 1878 Condition condition = ComputeCompareCondition(op); | 1975 Condition condition = ComputeCompareCondition(op); |
| 1879 if (op == Token::GT || op == Token::LTE) { | 1976 if (op == Token::GT || op == Token::LTE) { |
| 1880 condition = ReverseCondition(condition); | 1977 condition = ReverseCondition(condition); |
| 1881 } | 1978 } |
| 1882 __ test(eax, Operand(eax)); | 1979 __ test(eax, Operand(eax)); |
| 1883 EmitBranch(true_block, false_block, condition); | 1980 EmitBranch(true_block, false_block, condition); |
| 1884 } | 1981 } |
| 1885 | 1982 |
| 1886 | 1983 |
| 1887 void LCodeGen::DoReturn(LReturn* instr) { | 1984 void LCodeGen::DoReturn(LReturn* instr) { |
| 1888 if (FLAG_trace) { | 1985 if (FLAG_trace) { |
| 1889 // Preserve the return value on the stack and rely on the runtime | 1986 // Preserve the return value on the stack and rely on the runtime call |
| 1890 // call to return the value in the same register. | 1987 // to return the value in the same register. We're leaving the code |
| 1988 // managed by the register allocator and tearing down the frame, it's |
| 1989 // safe to write to the context register. |
| 1891 __ push(eax); | 1990 __ push(eax); |
| 1991 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
| 1892 __ CallRuntime(Runtime::kTraceExit, 1); | 1992 __ CallRuntime(Runtime::kTraceExit, 1); |
| 1893 } | 1993 } |
| 1894 __ mov(esp, ebp); | 1994 __ mov(esp, ebp); |
| 1895 __ pop(ebp); | 1995 __ pop(ebp); |
| 1896 __ Ret((ParameterCount() + 1) * kPointerSize, ecx); | 1996 __ Ret((ParameterCount() + 1) * kPointerSize, ecx); |
| 1897 } | 1997 } |
| 1898 | 1998 |
| 1899 | 1999 |
| 1900 void LCodeGen::DoLoadGlobal(LLoadGlobal* instr) { | 2000 void LCodeGen::DoLoadGlobal(LLoadGlobal* instr) { |
| 1901 Register result = ToRegister(instr->result()); | 2001 Register result = ToRegister(instr->result()); |
| (...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1950 if (instr->hydrogen()->is_in_object()) { | 2050 if (instr->hydrogen()->is_in_object()) { |
| 1951 __ mov(result, FieldOperand(object, instr->hydrogen()->offset())); | 2051 __ mov(result, FieldOperand(object, instr->hydrogen()->offset())); |
| 1952 } else { | 2052 } else { |
| 1953 __ mov(result, FieldOperand(object, JSObject::kPropertiesOffset)); | 2053 __ mov(result, FieldOperand(object, JSObject::kPropertiesOffset)); |
| 1954 __ mov(result, FieldOperand(result, instr->hydrogen()->offset())); | 2054 __ mov(result, FieldOperand(result, instr->hydrogen()->offset())); |
| 1955 } | 2055 } |
| 1956 } | 2056 } |
| 1957 | 2057 |
| 1958 | 2058 |
| 1959 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { | 2059 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { |
| 2060 ASSERT(ToRegister(instr->context()).is(esi)); |
| 1960 ASSERT(ToRegister(instr->object()).is(eax)); | 2061 ASSERT(ToRegister(instr->object()).is(eax)); |
| 1961 ASSERT(ToRegister(instr->result()).is(eax)); | 2062 ASSERT(ToRegister(instr->result()).is(eax)); |
| 1962 | 2063 |
| 1963 __ mov(ecx, instr->name()); | 2064 __ mov(ecx, instr->name()); |
| 1964 Handle<Code> ic(Isolate::Current()->builtins()->builtin( | 2065 Handle<Code> ic(Isolate::Current()->builtins()->builtin( |
| 1965 Builtins::LoadIC_Initialize)); | 2066 Builtins::LoadIC_Initialize)); |
| 1966 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2067 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 1967 } | 2068 } |
| 1968 | 2069 |
| 1969 | 2070 |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2003 // in the function's map. | 2104 // in the function's map. |
| 2004 __ bind(&non_instance); | 2105 __ bind(&non_instance); |
| 2005 __ mov(result, FieldOperand(result, Map::kConstructorOffset)); | 2106 __ mov(result, FieldOperand(result, Map::kConstructorOffset)); |
| 2006 | 2107 |
| 2007 // All done. | 2108 // All done. |
| 2008 __ bind(&done); | 2109 __ bind(&done); |
| 2009 } | 2110 } |
| 2010 | 2111 |
| 2011 | 2112 |
| 2012 void LCodeGen::DoLoadElements(LLoadElements* instr) { | 2113 void LCodeGen::DoLoadElements(LLoadElements* instr) { |
| 2013 ASSERT(instr->result()->Equals(instr->InputAt(0))); | 2114 Register result = ToRegister(instr->result()); |
| 2014 Register reg = ToRegister(instr->InputAt(0)); | 2115 Register input = ToRegister(instr->InputAt(0)); |
| 2015 __ mov(reg, FieldOperand(reg, JSObject::kElementsOffset)); | 2116 __ mov(result, FieldOperand(input, JSObject::kElementsOffset)); |
| 2016 if (FLAG_debug_code) { | 2117 if (FLAG_debug_code) { |
| 2017 NearLabel done; | 2118 NearLabel done; |
| 2018 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), | 2119 __ cmp(FieldOperand(result, HeapObject::kMapOffset), |
| 2019 Immediate(FACTORY->fixed_array_map())); | 2120 Immediate(FACTORY->fixed_array_map())); |
| 2020 __ j(equal, &done); | 2121 __ j(equal, &done); |
| 2021 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), | 2122 __ cmp(FieldOperand(result, HeapObject::kMapOffset), |
| 2123 Immediate(FACTORY->pixel_array_map())); |
| 2124 __ j(equal, &done); |
| 2125 __ cmp(FieldOperand(result, HeapObject::kMapOffset), |
| 2022 Immediate(FACTORY->fixed_cow_array_map())); | 2126 Immediate(FACTORY->fixed_cow_array_map())); |
| 2023 __ Check(equal, "Check for fast elements failed."); | 2127 __ Check(equal, "Check for fast elements or pixel array failed."); |
| 2024 __ bind(&done); | 2128 __ bind(&done); |
| 2025 } | 2129 } |
| 2026 } | 2130 } |
| 2027 | 2131 |
| 2028 | 2132 |
| 2133 void LCodeGen::DoLoadPixelArrayExternalPointer( |
| 2134 LLoadPixelArrayExternalPointer* instr) { |
| 2135 Register result = ToRegister(instr->result()); |
| 2136 Register input = ToRegister(instr->InputAt(0)); |
| 2137 __ mov(result, FieldOperand(input, PixelArray::kExternalPointerOffset)); |
| 2138 } |
| 2139 |
| 2140 |
| 2029 void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) { | 2141 void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) { |
| 2030 Register arguments = ToRegister(instr->arguments()); | 2142 Register arguments = ToRegister(instr->arguments()); |
| 2031 Register length = ToRegister(instr->length()); | 2143 Register length = ToRegister(instr->length()); |
| 2032 Operand index = ToOperand(instr->index()); | 2144 Operand index = ToOperand(instr->index()); |
| 2033 Register result = ToRegister(instr->result()); | 2145 Register result = ToRegister(instr->result()); |
| 2034 | 2146 |
| 2035 __ sub(length, index); | 2147 __ sub(length, index); |
| 2036 DeoptimizeIf(below_equal, instr->environment()); | 2148 DeoptimizeIf(below_equal, instr->environment()); |
| 2037 | 2149 |
| 2038 // There are two words between the frame pointer and the last argument. | 2150 // There are two words between the frame pointer and the last argument. |
| (...skipping 13 matching lines...) Expand all Loading... |
| 2052 key, | 2164 key, |
| 2053 times_pointer_size, | 2165 times_pointer_size, |
| 2054 FixedArray::kHeaderSize)); | 2166 FixedArray::kHeaderSize)); |
| 2055 | 2167 |
| 2056 // Check for the hole value. | 2168 // Check for the hole value. |
| 2057 __ cmp(result, FACTORY->the_hole_value()); | 2169 __ cmp(result, FACTORY->the_hole_value()); |
| 2058 DeoptimizeIf(equal, instr->environment()); | 2170 DeoptimizeIf(equal, instr->environment()); |
| 2059 } | 2171 } |
| 2060 | 2172 |
| 2061 | 2173 |
| 2174 void LCodeGen::DoLoadPixelArrayElement(LLoadPixelArrayElement* instr) { |
| 2175 Register external_pointer = ToRegister(instr->external_pointer()); |
| 2176 Register key = ToRegister(instr->key()); |
| 2177 Register result = ToRegister(instr->result()); |
| 2178 ASSERT(result.is(external_pointer)); |
| 2179 |
| 2180 // Load the result. |
| 2181 __ movzx_b(result, Operand(external_pointer, key, times_1, 0)); |
| 2182 } |
| 2183 |
| 2184 |
| 2062 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) { | 2185 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) { |
| 2186 ASSERT(ToRegister(instr->context()).is(esi)); |
| 2063 ASSERT(ToRegister(instr->object()).is(edx)); | 2187 ASSERT(ToRegister(instr->object()).is(edx)); |
| 2064 ASSERT(ToRegister(instr->key()).is(eax)); | 2188 ASSERT(ToRegister(instr->key()).is(eax)); |
| 2065 | 2189 |
| 2066 Handle<Code> ic(Isolate::Current()->builtins()->builtin( | 2190 Handle<Code> ic(Isolate::Current()->builtins()->builtin( |
| 2067 Builtins::KeyedLoadIC_Initialize)); | 2191 Builtins::KeyedLoadIC_Initialize)); |
| 2068 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2192 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 2069 } | 2193 } |
| 2070 | 2194 |
| 2071 | 2195 |
| 2072 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { | 2196 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2111 ArgumentsAdaptorFrameConstants::kLengthOffset)); | 2235 ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 2112 __ SmiUntag(result); | 2236 __ SmiUntag(result); |
| 2113 | 2237 |
| 2114 // Argument length is in result register. | 2238 // Argument length is in result register. |
| 2115 __ bind(&done); | 2239 __ bind(&done); |
| 2116 } | 2240 } |
| 2117 | 2241 |
| 2118 | 2242 |
| 2119 void LCodeGen::DoApplyArguments(LApplyArguments* instr) { | 2243 void LCodeGen::DoApplyArguments(LApplyArguments* instr) { |
| 2120 Register receiver = ToRegister(instr->receiver()); | 2244 Register receiver = ToRegister(instr->receiver()); |
| 2121 ASSERT(ToRegister(instr->function()).is(edi)); | 2245 Register function = ToRegister(instr->function()); |
| 2246 Register length = ToRegister(instr->length()); |
| 2247 Register elements = ToRegister(instr->elements()); |
| 2248 Register scratch = ToRegister(instr->TempAt(0)); |
| 2249 ASSERT(receiver.is(eax)); // Used for parameter count. |
| 2250 ASSERT(function.is(edi)); // Required by InvokeFunction. |
| 2122 ASSERT(ToRegister(instr->result()).is(eax)); | 2251 ASSERT(ToRegister(instr->result()).is(eax)); |
| 2123 | 2252 |
| 2124 // If the receiver is null or undefined, we have to pass the | 2253 // If the receiver is null or undefined, we have to pass the global object |
| 2125 // global object as a receiver. | 2254 // as a receiver. |
| 2126 NearLabel global_receiver, receiver_ok; | 2255 NearLabel global_object, receiver_ok; |
| 2127 __ cmp(receiver, FACTORY->null_value()); | 2256 __ cmp(receiver, FACTORY->null_value()); |
| 2128 __ j(equal, &global_receiver); | 2257 __ j(equal, &global_object); |
| 2129 __ cmp(receiver, FACTORY->undefined_value()); | 2258 __ cmp(receiver, FACTORY->undefined_value()); |
| 2130 __ j(not_equal, &receiver_ok); | 2259 __ j(equal, &global_object); |
| 2131 __ bind(&global_receiver); | 2260 |
| 2132 __ mov(receiver, GlobalObjectOperand()); | 2261 // The receiver should be a JS object. |
| 2262 __ test(receiver, Immediate(kSmiTagMask)); |
| 2263 DeoptimizeIf(equal, instr->environment()); |
| 2264 __ CmpObjectType(receiver, FIRST_JS_OBJECT_TYPE, scratch); |
| 2265 DeoptimizeIf(below, instr->environment()); |
| 2266 __ jmp(&receiver_ok); |
| 2267 |
| 2268 __ bind(&global_object); |
| 2269 // TODO(kmillikin): We have a hydrogen value for the global object. See |
| 2270 // if it's better to use it than to explicitly fetch it from the context |
| 2271 // here. |
| 2272 __ mov(receiver, Operand(ebp, StandardFrameConstants::kContextOffset)); |
| 2273 __ mov(receiver, ContextOperand(receiver, Context::GLOBAL_INDEX)); |
| 2133 __ bind(&receiver_ok); | 2274 __ bind(&receiver_ok); |
| 2134 | 2275 |
| 2135 Register length = ToRegister(instr->length()); | |
| 2136 Register elements = ToRegister(instr->elements()); | |
| 2137 | |
| 2138 Label invoke; | |
| 2139 | |
| 2140 // Copy the arguments to this function possibly from the | 2276 // Copy the arguments to this function possibly from the |
| 2141 // adaptor frame below it. | 2277 // adaptor frame below it. |
| 2142 const uint32_t kArgumentsLimit = 1 * KB; | 2278 const uint32_t kArgumentsLimit = 1 * KB; |
| 2143 __ cmp(length, kArgumentsLimit); | 2279 __ cmp(length, kArgumentsLimit); |
| 2144 DeoptimizeIf(above, instr->environment()); | 2280 DeoptimizeIf(above, instr->environment()); |
| 2145 | 2281 |
| 2146 __ push(receiver); | 2282 __ push(receiver); |
| 2147 __ mov(receiver, length); | 2283 __ mov(receiver, length); |
| 2148 | 2284 |
| 2149 // Loop through the arguments pushing them onto the execution | 2285 // Loop through the arguments pushing them onto the execution |
| 2150 // stack. | 2286 // stack. |
| 2151 Label loop; | 2287 NearLabel invoke, loop; |
| 2152 // length is a small non-negative integer, due to the test above. | 2288 // length is a small non-negative integer, due to the test above. |
| 2153 __ test(length, Operand(length)); | 2289 __ test(length, Operand(length)); |
| 2154 __ j(zero, &invoke); | 2290 __ j(zero, &invoke); |
| 2155 __ bind(&loop); | 2291 __ bind(&loop); |
| 2156 __ push(Operand(elements, length, times_pointer_size, 1 * kPointerSize)); | 2292 __ push(Operand(elements, length, times_pointer_size, 1 * kPointerSize)); |
| 2157 __ dec(length); | 2293 __ dec(length); |
| 2158 __ j(not_zero, &loop); | 2294 __ j(not_zero, &loop); |
| 2159 | 2295 |
| 2160 // Invoke the function. | 2296 // Invoke the function. |
| 2161 __ bind(&invoke); | 2297 __ bind(&invoke); |
| 2162 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); | 2298 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); |
| 2163 LPointerMap* pointers = instr->pointer_map(); | 2299 LPointerMap* pointers = instr->pointer_map(); |
| 2164 LEnvironment* env = instr->deoptimization_environment(); | 2300 LEnvironment* env = instr->deoptimization_environment(); |
| 2165 RecordPosition(pointers->position()); | 2301 RecordPosition(pointers->position()); |
| 2166 RegisterEnvironmentForDeoptimization(env); | 2302 RegisterEnvironmentForDeoptimization(env); |
| 2167 SafepointGenerator safepoint_generator(this, | 2303 SafepointGenerator safepoint_generator(this, |
| 2168 pointers, | 2304 pointers, |
| 2169 env->deoptimization_index()); | 2305 env->deoptimization_index(), |
| 2170 ASSERT(receiver.is(eax)); | 2306 true); |
| 2171 v8::internal::ParameterCount actual(eax); | 2307 v8::internal::ParameterCount actual(eax); |
| 2172 __ InvokeFunction(edi, actual, CALL_FUNCTION, &safepoint_generator); | 2308 __ InvokeFunction(function, actual, CALL_FUNCTION, &safepoint_generator); |
| 2173 | |
| 2174 // Restore context. | |
| 2175 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | |
| 2176 } | 2309 } |
| 2177 | 2310 |
| 2178 | 2311 |
| 2179 void LCodeGen::DoPushArgument(LPushArgument* instr) { | 2312 void LCodeGen::DoPushArgument(LPushArgument* instr) { |
| 2180 LOperand* argument = instr->InputAt(0); | 2313 LOperand* argument = instr->InputAt(0); |
| 2181 if (argument->IsConstantOperand()) { | 2314 if (argument->IsConstantOperand()) { |
| 2182 __ push(ToImmediate(argument)); | 2315 __ push(ToImmediate(argument)); |
| 2183 } else { | 2316 } else { |
| 2184 __ push(ToOperand(argument)); | 2317 __ push(ToOperand(argument)); |
| 2185 } | 2318 } |
| 2186 } | 2319 } |
| 2187 | 2320 |
| 2188 | 2321 |
| 2189 void LCodeGen::DoContext(LContext* instr) { | 2322 void LCodeGen::DoContext(LContext* instr) { |
| 2190 Register result = ToRegister(instr->result()); | 2323 Register result = ToRegister(instr->result()); |
| 2191 __ mov(result, esi); | 2324 __ mov(result, Operand(ebp, StandardFrameConstants::kContextOffset)); |
| 2192 } | 2325 } |
| 2193 | 2326 |
| 2194 | 2327 |
| 2195 void LCodeGen::DoOuterContext(LOuterContext* instr) { | 2328 void LCodeGen::DoOuterContext(LOuterContext* instr) { |
| 2196 Register context = ToRegister(instr->context()); | 2329 Register context = ToRegister(instr->context()); |
| 2197 Register result = ToRegister(instr->result()); | 2330 Register result = ToRegister(instr->result()); |
| 2198 __ mov(result, Operand(context, Context::SlotOffset(Context::CLOSURE_INDEX))); | 2331 __ mov(result, Operand(context, Context::SlotOffset(Context::CLOSURE_INDEX))); |
| 2199 __ mov(result, FieldOperand(result, JSFunction::kContextOffset)); | 2332 __ mov(result, FieldOperand(result, JSFunction::kContextOffset)); |
| 2200 } | 2333 } |
| 2201 | 2334 |
| (...skipping 15 matching lines...) Expand all Loading... |
| 2217 void LCodeGen::CallKnownFunction(Handle<JSFunction> function, | 2350 void LCodeGen::CallKnownFunction(Handle<JSFunction> function, |
| 2218 int arity, | 2351 int arity, |
| 2219 LInstruction* instr) { | 2352 LInstruction* instr) { |
| 2220 // Change context if needed. | 2353 // Change context if needed. |
| 2221 bool change_context = | 2354 bool change_context = |
| 2222 (graph()->info()->closure()->context() != function->context()) || | 2355 (graph()->info()->closure()->context() != function->context()) || |
| 2223 scope()->contains_with() || | 2356 scope()->contains_with() || |
| 2224 (scope()->num_heap_slots() > 0); | 2357 (scope()->num_heap_slots() > 0); |
| 2225 if (change_context) { | 2358 if (change_context) { |
| 2226 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); | 2359 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); |
| 2360 } else { |
| 2361 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
| 2227 } | 2362 } |
| 2228 | 2363 |
| 2229 // Set eax to arguments count if adaption is not needed. Assumes that eax | 2364 // Set eax to arguments count if adaption is not needed. Assumes that eax |
| 2230 // is available to write to at this point. | 2365 // is available to write to at this point. |
| 2231 if (!function->NeedsArgumentsAdaption()) { | 2366 if (!function->NeedsArgumentsAdaption()) { |
| 2232 __ mov(eax, arity); | 2367 __ mov(eax, arity); |
| 2233 } | 2368 } |
| 2234 | 2369 |
| 2235 LPointerMap* pointers = instr->pointer_map(); | 2370 LPointerMap* pointers = instr->pointer_map(); |
| 2236 RecordPosition(pointers->position()); | 2371 RecordPosition(pointers->position()); |
| 2237 | 2372 |
| 2238 // Invoke function. | 2373 // Invoke function. |
| 2239 if (*function == *graph()->info()->closure()) { | 2374 if (*function == *graph()->info()->closure()) { |
| 2240 __ CallSelf(); | 2375 __ CallSelf(); |
| 2241 } else { | 2376 } else { |
| 2242 __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset)); | 2377 __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset)); |
| 2378 EnsureRelocSpaceForDeoptimization(); |
| 2243 } | 2379 } |
| 2244 | 2380 |
| 2245 // Setup deoptimization. | 2381 // Setup deoptimization. |
| 2246 RegisterLazyDeoptimization(instr); | 2382 RegisterLazyDeoptimization(instr); |
| 2247 | |
| 2248 // Restore context. | |
| 2249 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | |
| 2250 } | 2383 } |
| 2251 | 2384 |
| 2252 | 2385 |
| 2253 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { | 2386 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { |
| 2254 ASSERT(ToRegister(instr->result()).is(eax)); | 2387 ASSERT(ToRegister(instr->result()).is(eax)); |
| 2255 __ mov(edi, instr->function()); | 2388 __ mov(edi, instr->function()); |
| 2256 CallKnownFunction(instr->function(), instr->arity(), instr); | 2389 CallKnownFunction(instr->function(), instr->arity(), instr); |
| 2257 } | 2390 } |
| 2258 | 2391 |
| 2259 | 2392 |
| (...skipping 22 matching lines...) Expand all Loading... |
| 2282 | 2415 |
| 2283 __ bind(&negative); | 2416 __ bind(&negative); |
| 2284 | 2417 |
| 2285 Label allocated, slow; | 2418 Label allocated, slow; |
| 2286 __ AllocateHeapNumber(tmp, tmp2, no_reg, &slow); | 2419 __ AllocateHeapNumber(tmp, tmp2, no_reg, &slow); |
| 2287 __ jmp(&allocated); | 2420 __ jmp(&allocated); |
| 2288 | 2421 |
| 2289 // Slow case: Call the runtime system to do the number allocation. | 2422 // Slow case: Call the runtime system to do the number allocation. |
| 2290 __ bind(&slow); | 2423 __ bind(&slow); |
| 2291 | 2424 |
| 2425 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
| 2292 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); | 2426 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); |
| 2293 RecordSafepointWithRegisters( | 2427 RecordSafepointWithRegisters( |
| 2294 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); | 2428 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); |
| 2295 // Set the pointer to the new heap number in tmp. | 2429 // Set the pointer to the new heap number in tmp. |
| 2296 if (!tmp.is(eax)) __ mov(tmp, eax); | 2430 if (!tmp.is(eax)) __ mov(tmp, eax); |
| 2297 | 2431 |
| 2298 // Restore input_reg after call to runtime. | 2432 // Restore input_reg after call to runtime. |
| 2299 __ mov(input_reg, Operand(esp, EspIndexForPushAll(input_reg) * kPointerSize)); | 2433 __ LoadFromSafepointRegisterSlot(input_reg, input_reg); |
| 2300 | 2434 |
| 2301 __ bind(&allocated); | 2435 __ bind(&allocated); |
| 2302 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kExponentOffset)); | 2436 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kExponentOffset)); |
| 2303 __ and_(tmp2, ~HeapNumber::kSignMask); | 2437 __ and_(tmp2, ~HeapNumber::kSignMask); |
| 2304 __ mov(FieldOperand(tmp, HeapNumber::kExponentOffset), tmp2); | 2438 __ mov(FieldOperand(tmp, HeapNumber::kExponentOffset), tmp2); |
| 2305 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kMantissaOffset)); | 2439 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kMantissaOffset)); |
| 2306 __ mov(FieldOperand(tmp, HeapNumber::kMantissaOffset), tmp2); | 2440 __ mov(FieldOperand(tmp, HeapNumber::kMantissaOffset), tmp2); |
| 2307 __ mov(Operand(esp, EspIndexForPushAll(input_reg) * kPointerSize), tmp); | 2441 __ StoreToSafepointRegisterSlot(input_reg, tmp); |
| 2308 | 2442 |
| 2309 __ bind(&done); | 2443 __ bind(&done); |
| 2310 __ PopSafepointRegisters(); | 2444 __ PopSafepointRegisters(); |
| 2311 } | 2445 } |
| 2312 | 2446 |
| 2313 | 2447 |
| 2314 void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) { | 2448 void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) { |
| 2315 Register input_reg = ToRegister(instr->InputAt(0)); | 2449 Register input_reg = ToRegister(instr->InputAt(0)); |
| 2316 __ test(input_reg, Operand(input_reg)); | 2450 __ test(input_reg, Operand(input_reg)); |
| 2317 Label is_positive; | 2451 Label is_positive; |
| (...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2422 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0)); | 2556 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0)); |
| 2423 ASSERT(ToDoubleRegister(instr->result()).is(input_reg)); | 2557 ASSERT(ToDoubleRegister(instr->result()).is(input_reg)); |
| 2424 __ sqrtsd(input_reg, input_reg); | 2558 __ sqrtsd(input_reg, input_reg); |
| 2425 } | 2559 } |
| 2426 | 2560 |
| 2427 | 2561 |
| 2428 void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) { | 2562 void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) { |
| 2429 XMMRegister xmm_scratch = xmm0; | 2563 XMMRegister xmm_scratch = xmm0; |
| 2430 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0)); | 2564 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0)); |
| 2431 ASSERT(ToDoubleRegister(instr->result()).is(input_reg)); | 2565 ASSERT(ToDoubleRegister(instr->result()).is(input_reg)); |
| 2432 ExternalReference negative_infinity = | |
| 2433 ExternalReference::address_of_negative_infinity(); | |
| 2434 __ movdbl(xmm_scratch, Operand::StaticVariable(negative_infinity)); | |
| 2435 __ ucomisd(xmm_scratch, input_reg); | |
| 2436 DeoptimizeIf(equal, instr->environment()); | |
| 2437 __ xorpd(xmm_scratch, xmm_scratch); | 2566 __ xorpd(xmm_scratch, xmm_scratch); |
| 2438 __ addsd(input_reg, xmm_scratch); // Convert -0 to +0. | 2567 __ addsd(input_reg, xmm_scratch); // Convert -0 to +0. |
| 2439 __ sqrtsd(input_reg, input_reg); | 2568 __ sqrtsd(input_reg, input_reg); |
| 2440 } | 2569 } |
| 2441 | 2570 |
| 2442 | 2571 |
| 2443 void LCodeGen::DoPower(LPower* instr) { | 2572 void LCodeGen::DoPower(LPower* instr) { |
| 2444 LOperand* left = instr->InputAt(0); | 2573 LOperand* left = instr->InputAt(0); |
| 2445 LOperand* right = instr->InputAt(1); | 2574 LOperand* right = instr->InputAt(1); |
| 2446 DoubleRegister result_reg = ToDoubleRegister(instr->result()); | 2575 DoubleRegister result_reg = ToDoubleRegister(instr->result()); |
| (...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2493 __ fstp_d(Operand(esp, 0)); | 2622 __ fstp_d(Operand(esp, 0)); |
| 2494 __ movdbl(result_reg, Operand(esp, 0)); | 2623 __ movdbl(result_reg, Operand(esp, 0)); |
| 2495 __ add(Operand(esp), Immediate(kDoubleSize)); | 2624 __ add(Operand(esp), Immediate(kDoubleSize)); |
| 2496 } | 2625 } |
| 2497 | 2626 |
| 2498 | 2627 |
| 2499 void LCodeGen::DoMathLog(LUnaryMathOperation* instr) { | 2628 void LCodeGen::DoMathLog(LUnaryMathOperation* instr) { |
| 2500 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); | 2629 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); |
| 2501 TranscendentalCacheStub stub(TranscendentalCache::LOG, | 2630 TranscendentalCacheStub stub(TranscendentalCache::LOG, |
| 2502 TranscendentalCacheStub::UNTAGGED); | 2631 TranscendentalCacheStub::UNTAGGED); |
| 2503 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 2632 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); |
| 2504 } | 2633 } |
| 2505 | 2634 |
| 2506 | 2635 |
| 2507 void LCodeGen::DoMathCos(LUnaryMathOperation* instr) { | 2636 void LCodeGen::DoMathCos(LUnaryMathOperation* instr) { |
| 2508 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); | 2637 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); |
| 2509 TranscendentalCacheStub stub(TranscendentalCache::COS, | 2638 TranscendentalCacheStub stub(TranscendentalCache::COS, |
| 2510 TranscendentalCacheStub::UNTAGGED); | 2639 TranscendentalCacheStub::UNTAGGED); |
| 2511 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 2640 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); |
| 2512 } | 2641 } |
| 2513 | 2642 |
| 2514 | 2643 |
| 2515 void LCodeGen::DoMathSin(LUnaryMathOperation* instr) { | 2644 void LCodeGen::DoMathSin(LUnaryMathOperation* instr) { |
| 2516 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); | 2645 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); |
| 2517 TranscendentalCacheStub stub(TranscendentalCache::SIN, | 2646 TranscendentalCacheStub stub(TranscendentalCache::SIN, |
| 2518 TranscendentalCacheStub::UNTAGGED); | 2647 TranscendentalCacheStub::UNTAGGED); |
| 2519 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 2648 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); |
| 2520 } | 2649 } |
| 2521 | 2650 |
| 2522 | 2651 |
| 2523 void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) { | 2652 void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) { |
| 2524 switch (instr->op()) { | 2653 switch (instr->op()) { |
| 2525 case kMathAbs: | 2654 case kMathAbs: |
| 2526 DoMathAbs(instr); | 2655 DoMathAbs(instr); |
| 2527 break; | 2656 break; |
| 2528 case kMathFloor: | 2657 case kMathFloor: |
| 2529 DoMathFloor(instr); | 2658 DoMathFloor(instr); |
| (...skipping 17 matching lines...) Expand all Loading... |
| 2547 DoMathLog(instr); | 2676 DoMathLog(instr); |
| 2548 break; | 2677 break; |
| 2549 | 2678 |
| 2550 default: | 2679 default: |
| 2551 UNREACHABLE(); | 2680 UNREACHABLE(); |
| 2552 } | 2681 } |
| 2553 } | 2682 } |
| 2554 | 2683 |
| 2555 | 2684 |
| 2556 void LCodeGen::DoCallKeyed(LCallKeyed* instr) { | 2685 void LCodeGen::DoCallKeyed(LCallKeyed* instr) { |
| 2686 ASSERT(ToRegister(instr->context()).is(esi)); |
| 2687 ASSERT(ToRegister(instr->key()).is(ecx)); |
| 2557 ASSERT(ToRegister(instr->result()).is(eax)); | 2688 ASSERT(ToRegister(instr->result()).is(eax)); |
| 2558 ASSERT(ToRegister(instr->InputAt(0)).is(ecx)); | |
| 2559 | 2689 |
| 2560 int arity = instr->arity(); | 2690 int arity = instr->arity(); |
| 2561 Handle<Code> ic = Isolate::Current()->stub_cache()-> | 2691 Handle<Code> ic = Isolate::Current()->stub_cache()-> |
| 2562 ComputeKeyedCallInitialize(arity, NOT_IN_LOOP); | 2692 ComputeKeyedCallInitialize(arity, NOT_IN_LOOP); |
| 2563 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2693 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 2564 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | |
| 2565 } | 2694 } |
| 2566 | 2695 |
| 2567 | 2696 |
| 2568 void LCodeGen::DoCallNamed(LCallNamed* instr) { | 2697 void LCodeGen::DoCallNamed(LCallNamed* instr) { |
| 2698 ASSERT(ToRegister(instr->context()).is(esi)); |
| 2569 ASSERT(ToRegister(instr->result()).is(eax)); | 2699 ASSERT(ToRegister(instr->result()).is(eax)); |
| 2570 | 2700 |
| 2571 int arity = instr->arity(); | 2701 int arity = instr->arity(); |
| 2572 Handle<Code> ic = Isolate::Current()->stub_cache()-> | 2702 Handle<Code> ic = Isolate::Current()->stub_cache()-> |
| 2573 ComputeCallInitialize(arity, NOT_IN_LOOP); | 2703 ComputeCallInitialize(arity, NOT_IN_LOOP); |
| 2574 __ mov(ecx, instr->name()); | 2704 __ mov(ecx, instr->name()); |
| 2575 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2705 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 2576 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | |
| 2577 } | 2706 } |
| 2578 | 2707 |
| 2579 | 2708 |
| 2580 void LCodeGen::DoCallFunction(LCallFunction* instr) { | 2709 void LCodeGen::DoCallFunction(LCallFunction* instr) { |
| 2710 ASSERT(ToRegister(instr->context()).is(esi)); |
| 2581 ASSERT(ToRegister(instr->result()).is(eax)); | 2711 ASSERT(ToRegister(instr->result()).is(eax)); |
| 2582 | 2712 |
| 2583 int arity = instr->arity(); | 2713 int arity = instr->arity(); |
| 2584 CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE); | 2714 CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE); |
| 2585 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 2715 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 2586 __ Drop(1); | 2716 __ Drop(1); |
| 2587 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | |
| 2588 } | 2717 } |
| 2589 | 2718 |
| 2590 | 2719 |
| 2591 void LCodeGen::DoCallGlobal(LCallGlobal* instr) { | 2720 void LCodeGen::DoCallGlobal(LCallGlobal* instr) { |
| 2721 ASSERT(ToRegister(instr->context()).is(esi)); |
| 2592 ASSERT(ToRegister(instr->result()).is(eax)); | 2722 ASSERT(ToRegister(instr->result()).is(eax)); |
| 2593 | 2723 |
| 2594 int arity = instr->arity(); | 2724 int arity = instr->arity(); |
| 2595 Handle<Code> ic = Isolate::Current()->stub_cache()-> | 2725 Handle<Code> ic = Isolate::Current()->stub_cache()-> |
| 2596 ComputeCallInitialize(arity, NOT_IN_LOOP); | 2726 ComputeCallInitialize(arity, NOT_IN_LOOP); |
| 2597 __ mov(ecx, instr->name()); | 2727 __ mov(ecx, instr->name()); |
| 2598 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr); | 2728 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr); |
| 2599 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | |
| 2600 } | 2729 } |
| 2601 | 2730 |
| 2602 | 2731 |
| 2603 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) { | 2732 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) { |
| 2604 ASSERT(ToRegister(instr->result()).is(eax)); | 2733 ASSERT(ToRegister(instr->result()).is(eax)); |
| 2605 __ mov(edi, instr->target()); | 2734 __ mov(edi, instr->target()); |
| 2606 CallKnownFunction(instr->target(), instr->arity(), instr); | 2735 CallKnownFunction(instr->target(), instr->arity(), instr); |
| 2607 } | 2736 } |
| 2608 | 2737 |
| 2609 | 2738 |
| 2610 void LCodeGen::DoCallNew(LCallNew* instr) { | 2739 void LCodeGen::DoCallNew(LCallNew* instr) { |
| 2611 ASSERT(ToRegister(instr->InputAt(0)).is(edi)); | 2740 ASSERT(ToRegister(instr->context()).is(esi)); |
| 2741 ASSERT(ToRegister(instr->constructor()).is(edi)); |
| 2612 ASSERT(ToRegister(instr->result()).is(eax)); | 2742 ASSERT(ToRegister(instr->result()).is(eax)); |
| 2613 | 2743 |
| 2614 Handle<Code> builtin(Isolate::Current()->builtins()->builtin( | 2744 Handle<Code> builtin(Isolate::Current()->builtins()->builtin( |
| 2615 Builtins::JSConstructCall)); | 2745 Builtins::JSConstructCall)); |
| 2616 __ Set(eax, Immediate(instr->arity())); | 2746 __ Set(eax, Immediate(instr->arity())); |
| 2617 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr); | 2747 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr); |
| 2618 } | 2748 } |
| 2619 | 2749 |
| 2620 | 2750 |
| 2621 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { | 2751 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { |
| 2622 CallRuntime(instr->function(), instr->arity(), instr); | 2752 CallRuntime(instr->function(), instr->arity(), instr, false); |
| 2623 } | 2753 } |
| 2624 | 2754 |
| 2625 | 2755 |
| 2626 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { | 2756 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { |
| 2627 Register object = ToRegister(instr->object()); | 2757 Register object = ToRegister(instr->object()); |
| 2628 Register value = ToRegister(instr->value()); | 2758 Register value = ToRegister(instr->value()); |
| 2629 int offset = instr->offset(); | 2759 int offset = instr->offset(); |
| 2630 | 2760 |
| 2631 if (!instr->transition().is_null()) { | 2761 if (!instr->transition().is_null()) { |
| 2632 __ mov(FieldOperand(object, HeapObject::kMapOffset), instr->transition()); | 2762 __ mov(FieldOperand(object, HeapObject::kMapOffset), instr->transition()); |
| (...skipping 14 matching lines...) Expand all Loading... |
| 2647 if (instr->needs_write_barrier()) { | 2777 if (instr->needs_write_barrier()) { |
| 2648 // Update the write barrier for the properties array. | 2778 // Update the write barrier for the properties array. |
| 2649 // object is used as a scratch register. | 2779 // object is used as a scratch register. |
| 2650 __ RecordWrite(temp, offset, value, object); | 2780 __ RecordWrite(temp, offset, value, object); |
| 2651 } | 2781 } |
| 2652 } | 2782 } |
| 2653 } | 2783 } |
| 2654 | 2784 |
| 2655 | 2785 |
| 2656 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { | 2786 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { |
| 2787 ASSERT(ToRegister(instr->context()).is(esi)); |
| 2657 ASSERT(ToRegister(instr->object()).is(edx)); | 2788 ASSERT(ToRegister(instr->object()).is(edx)); |
| 2658 ASSERT(ToRegister(instr->value()).is(eax)); | 2789 ASSERT(ToRegister(instr->value()).is(eax)); |
| 2659 | 2790 |
| 2660 __ mov(ecx, instr->name()); | 2791 __ mov(ecx, instr->name()); |
| 2661 Handle<Code> ic(Isolate::Current()->builtins()->builtin( | 2792 Handle<Code> ic(Isolate::Current()->builtins()->builtin( |
| 2662 Builtins::StoreIC_Initialize)); | 2793 info_->is_strict() ? Builtins::StoreIC_Initialize_Strict |
| 2794 : Builtins::StoreIC_Initialize)); |
| 2663 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2795 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 2664 } | 2796 } |
| 2665 | 2797 |
| 2666 | 2798 |
| 2667 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { | 2799 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { |
| 2668 __ cmp(ToRegister(instr->index()), ToOperand(instr->length())); | 2800 __ cmp(ToRegister(instr->index()), ToOperand(instr->length())); |
| 2669 DeoptimizeIf(above_equal, instr->environment()); | 2801 DeoptimizeIf(above_equal, instr->environment()); |
| 2670 } | 2802 } |
| 2671 | 2803 |
| 2672 | 2804 |
| 2805 void LCodeGen::DoStorePixelArrayElement(LStorePixelArrayElement* instr) { |
| 2806 Register external_pointer = ToRegister(instr->external_pointer()); |
| 2807 Register key = ToRegister(instr->key()); |
| 2808 Register value = ToRegister(instr->value()); |
| 2809 ASSERT(ToRegister(instr->TempAt(0)).is(eax)); |
| 2810 |
| 2811 __ mov(eax, value); |
| 2812 { // Clamp the value to [0..255]. |
| 2813 NearLabel done; |
| 2814 __ test(eax, Immediate(0xFFFFFF00)); |
| 2815 __ j(zero, &done); |
| 2816 __ setcc(negative, eax); // 1 if negative, 0 if positive. |
| 2817 __ dec_b(eax); // 0 if negative, 255 if positive. |
| 2818 __ bind(&done); |
| 2819 } |
| 2820 __ mov_b(Operand(external_pointer, key, times_1, 0), eax); |
| 2821 } |
| 2822 |
| 2823 |
| 2673 void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) { | 2824 void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) { |
| 2674 Register value = ToRegister(instr->value()); | 2825 Register value = ToRegister(instr->value()); |
| 2675 Register elements = ToRegister(instr->object()); | 2826 Register elements = ToRegister(instr->object()); |
| 2676 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg; | 2827 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg; |
| 2677 | 2828 |
| 2678 // Do the store. | 2829 // Do the store. |
| 2679 if (instr->key()->IsConstantOperand()) { | 2830 if (instr->key()->IsConstantOperand()) { |
| 2680 ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); | 2831 ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); |
| 2681 LConstantOperand* const_operand = LConstantOperand::cast(instr->key()); | 2832 LConstantOperand* const_operand = LConstantOperand::cast(instr->key()); |
| 2682 int offset = | 2833 int offset = |
| (...skipping 13 matching lines...) Expand all Loading... |
| 2696 FieldOperand(elements, | 2847 FieldOperand(elements, |
| 2697 key, | 2848 key, |
| 2698 times_pointer_size, | 2849 times_pointer_size, |
| 2699 FixedArray::kHeaderSize)); | 2850 FixedArray::kHeaderSize)); |
| 2700 __ RecordWrite(elements, key, value); | 2851 __ RecordWrite(elements, key, value); |
| 2701 } | 2852 } |
| 2702 } | 2853 } |
| 2703 | 2854 |
| 2704 | 2855 |
| 2705 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { | 2856 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { |
| 2857 ASSERT(ToRegister(instr->context()).is(esi)); |
| 2706 ASSERT(ToRegister(instr->object()).is(edx)); | 2858 ASSERT(ToRegister(instr->object()).is(edx)); |
| 2707 ASSERT(ToRegister(instr->key()).is(ecx)); | 2859 ASSERT(ToRegister(instr->key()).is(ecx)); |
| 2708 ASSERT(ToRegister(instr->value()).is(eax)); | 2860 ASSERT(ToRegister(instr->value()).is(eax)); |
| 2709 | 2861 |
| 2710 Handle<Code> ic(Isolate::Current()->builtins()->builtin( | 2862 Handle<Code> ic(Isolate::Current()->builtins()->builtin( |
| 2711 Builtins::KeyedStoreIC_Initialize)); | 2863 info_->is_strict() ? Builtins::KeyedStoreIC_Initialize_Strict |
| 2864 : Builtins::KeyedStoreIC_Initialize)); |
| 2712 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2865 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 2713 } | 2866 } |
| 2714 | 2867 |
| 2715 | 2868 |
| 2716 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { | 2869 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { |
| 2717 class DeferredStringCharCodeAt: public LDeferredCode { | 2870 class DeferredStringCharCodeAt: public LDeferredCode { |
| 2718 public: | 2871 public: |
| 2719 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr) | 2872 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr) |
| 2720 : LDeferredCode(codegen), instr_(instr) { } | 2873 : LDeferredCode(codegen), instr_(instr) { } |
| 2721 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); } | 2874 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); } |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2772 __ j(not_equal, deferred->entry()); | 2925 __ j(not_equal, deferred->entry()); |
| 2773 // Get the first of the two strings and load its instance type. | 2926 // Get the first of the two strings and load its instance type. |
| 2774 __ mov(string, FieldOperand(string, ConsString::kFirstOffset)); | 2927 __ mov(string, FieldOperand(string, ConsString::kFirstOffset)); |
| 2775 __ mov(result, FieldOperand(string, HeapObject::kMapOffset)); | 2928 __ mov(result, FieldOperand(string, HeapObject::kMapOffset)); |
| 2776 __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset)); | 2929 __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset)); |
| 2777 // If the first cons component is also non-flat, then go to runtime. | 2930 // If the first cons component is also non-flat, then go to runtime. |
| 2778 STATIC_ASSERT(kSeqStringTag == 0); | 2931 STATIC_ASSERT(kSeqStringTag == 0); |
| 2779 __ test(result, Immediate(kStringRepresentationMask)); | 2932 __ test(result, Immediate(kStringRepresentationMask)); |
| 2780 __ j(not_zero, deferred->entry()); | 2933 __ j(not_zero, deferred->entry()); |
| 2781 | 2934 |
| 2782 // Check for 1-byte or 2-byte string. | 2935 // Check for ASCII or two-byte string. |
| 2783 __ bind(&flat_string); | 2936 __ bind(&flat_string); |
| 2784 STATIC_ASSERT(kAsciiStringTag != 0); | 2937 STATIC_ASSERT(kAsciiStringTag != 0); |
| 2785 __ test(result, Immediate(kStringEncodingMask)); | 2938 __ test(result, Immediate(kStringEncodingMask)); |
| 2786 __ j(not_zero, &ascii_string); | 2939 __ j(not_zero, &ascii_string); |
| 2787 | 2940 |
| 2788 // 2-byte string. | 2941 // Two-byte string. |
| 2789 // Load the 2-byte character code into the result register. | 2942 // Load the two-byte character code into the result register. |
| 2790 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1); | 2943 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1); |
| 2791 if (instr->index()->IsConstantOperand()) { | 2944 if (instr->index()->IsConstantOperand()) { |
| 2792 __ movzx_w(result, | 2945 __ movzx_w(result, |
| 2793 FieldOperand(string, | 2946 FieldOperand(string, |
| 2794 SeqTwoByteString::kHeaderSize + 2 * const_index)); | 2947 SeqTwoByteString::kHeaderSize + |
| 2948 (kUC16Size * const_index))); |
| 2795 } else { | 2949 } else { |
| 2796 __ movzx_w(result, FieldOperand(string, | 2950 __ movzx_w(result, FieldOperand(string, |
| 2797 index, | 2951 index, |
| 2798 times_2, | 2952 times_2, |
| 2799 SeqTwoByteString::kHeaderSize)); | 2953 SeqTwoByteString::kHeaderSize)); |
| 2800 } | 2954 } |
| 2801 __ jmp(&done); | 2955 __ jmp(&done); |
| 2802 | 2956 |
| 2803 // ASCII string. | 2957 // ASCII string. |
| 2804 // Load the byte into the result register. | 2958 // Load the byte into the result register. |
| (...skipping 27 matching lines...) Expand all Loading... |
| 2832 // DoStringCharCodeAt above. | 2986 // DoStringCharCodeAt above. |
| 2833 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue); | 2987 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue); |
| 2834 if (instr->index()->IsConstantOperand()) { | 2988 if (instr->index()->IsConstantOperand()) { |
| 2835 int const_index = ToInteger32(LConstantOperand::cast(instr->index())); | 2989 int const_index = ToInteger32(LConstantOperand::cast(instr->index())); |
| 2836 __ push(Immediate(Smi::FromInt(const_index))); | 2990 __ push(Immediate(Smi::FromInt(const_index))); |
| 2837 } else { | 2991 } else { |
| 2838 Register index = ToRegister(instr->index()); | 2992 Register index = ToRegister(instr->index()); |
| 2839 __ SmiTag(index); | 2993 __ SmiTag(index); |
| 2840 __ push(index); | 2994 __ push(index); |
| 2841 } | 2995 } |
| 2996 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
| 2842 __ CallRuntimeSaveDoubles(Runtime::kStringCharCodeAt); | 2997 __ CallRuntimeSaveDoubles(Runtime::kStringCharCodeAt); |
| 2843 RecordSafepointWithRegisters( | 2998 RecordSafepointWithRegisters( |
| 2844 instr->pointer_map(), 2, Safepoint::kNoDeoptimizationIndex); | 2999 instr->pointer_map(), 2, Safepoint::kNoDeoptimizationIndex); |
| 2845 if (FLAG_debug_code) { | 3000 if (FLAG_debug_code) { |
| 2846 __ AbortIfNotSmi(eax); | 3001 __ AbortIfNotSmi(eax); |
| 2847 } | 3002 } |
| 2848 __ SmiUntag(eax); | 3003 __ SmiUntag(eax); |
| 2849 __ mov(Operand(esp, EspIndexForPushAll(result) * kPointerSize), eax); | 3004 __ StoreToSafepointRegisterSlot(result, eax); |
| 2850 __ PopSafepointRegisters(); | 3005 __ PopSafepointRegisters(); |
| 2851 } | 3006 } |
| 2852 | 3007 |
| 2853 | 3008 |
| 2854 void LCodeGen::DoStringLength(LStringLength* instr) { | 3009 void LCodeGen::DoStringLength(LStringLength* instr) { |
| 2855 Register string = ToRegister(instr->string()); | 3010 Register string = ToRegister(instr->string()); |
| 2856 Register result = ToRegister(instr->result()); | 3011 Register result = ToRegister(instr->result()); |
| 2857 __ mov(result, FieldOperand(string, String::kLengthOffset)); | 3012 __ mov(result, FieldOperand(string, String::kLengthOffset)); |
| 2858 } | 3013 } |
| 2859 | 3014 |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2907 __ AllocateHeapNumber(reg, tmp, no_reg, &slow); | 3062 __ AllocateHeapNumber(reg, tmp, no_reg, &slow); |
| 2908 __ jmp(&done); | 3063 __ jmp(&done); |
| 2909 } | 3064 } |
| 2910 | 3065 |
| 2911 // Slow case: Call the runtime system to do the number allocation. | 3066 // Slow case: Call the runtime system to do the number allocation. |
| 2912 __ bind(&slow); | 3067 __ bind(&slow); |
| 2913 | 3068 |
| 2914 // TODO(3095996): Put a valid pointer value in the stack slot where the result | 3069 // TODO(3095996): Put a valid pointer value in the stack slot where the result |
| 2915 // register is stored, as this register is in the pointer map, but contains an | 3070 // register is stored, as this register is in the pointer map, but contains an |
| 2916 // integer value. | 3071 // integer value. |
| 2917 __ mov(Operand(esp, EspIndexForPushAll(reg) * kPointerSize), Immediate(0)); | 3072 __ StoreToSafepointRegisterSlot(reg, Immediate(0)); |
| 2918 | 3073 |
| 3074 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
| 2919 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); | 3075 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); |
| 2920 RecordSafepointWithRegisters( | 3076 RecordSafepointWithRegisters( |
| 2921 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); | 3077 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); |
| 2922 if (!reg.is(eax)) __ mov(reg, eax); | 3078 if (!reg.is(eax)) __ mov(reg, eax); |
| 2923 | 3079 |
| 2924 // Done. Put the value in xmm0 into the value of the allocated heap | 3080 // Done. Put the value in xmm0 into the value of the allocated heap |
| 2925 // number. | 3081 // number. |
| 2926 __ bind(&done); | 3082 __ bind(&done); |
| 2927 __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), xmm0); | 3083 __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), xmm0); |
| 2928 __ mov(Operand(esp, EspIndexForPushAll(reg) * kPointerSize), reg); | 3084 __ StoreToSafepointRegisterSlot(reg, reg); |
| 2929 __ PopSafepointRegisters(); | 3085 __ PopSafepointRegisters(); |
| 2930 } | 3086 } |
| 2931 | 3087 |
| 2932 | 3088 |
| 2933 void LCodeGen::DoNumberTagD(LNumberTagD* instr) { | 3089 void LCodeGen::DoNumberTagD(LNumberTagD* instr) { |
| 2934 class DeferredNumberTagD: public LDeferredCode { | 3090 class DeferredNumberTagD: public LDeferredCode { |
| 2935 public: | 3091 public: |
| 2936 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr) | 3092 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr) |
| 2937 : LDeferredCode(codegen), instr_(instr) { } | 3093 : LDeferredCode(codegen), instr_(instr) { } |
| 2938 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); } | 3094 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); } |
| (...skipping 17 matching lines...) Expand all Loading... |
| 2956 | 3112 |
| 2957 | 3113 |
| 2958 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) { | 3114 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) { |
| 2959 // TODO(3095996): Get rid of this. For now, we need to make the | 3115 // TODO(3095996): Get rid of this. For now, we need to make the |
| 2960 // result register contain a valid pointer because it is already | 3116 // result register contain a valid pointer because it is already |
| 2961 // contained in the register pointer map. | 3117 // contained in the register pointer map. |
| 2962 Register reg = ToRegister(instr->result()); | 3118 Register reg = ToRegister(instr->result()); |
| 2963 __ Set(reg, Immediate(0)); | 3119 __ Set(reg, Immediate(0)); |
| 2964 | 3120 |
| 2965 __ PushSafepointRegisters(); | 3121 __ PushSafepointRegisters(); |
| 3122 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
| 2966 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); | 3123 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); |
| 2967 RecordSafepointWithRegisters( | 3124 RecordSafepointWithRegisters( |
| 2968 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); | 3125 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); |
| 2969 __ mov(Operand(esp, EspIndexForPushAll(reg) * kPointerSize), eax); | 3126 __ StoreToSafepointRegisterSlot(reg, eax); |
| 2970 __ PopSafepointRegisters(); | 3127 __ PopSafepointRegisters(); |
| 2971 } | 3128 } |
| 2972 | 3129 |
| 2973 | 3130 |
| 2974 void LCodeGen::DoSmiTag(LSmiTag* instr) { | 3131 void LCodeGen::DoSmiTag(LSmiTag* instr) { |
| 2975 LOperand* input = instr->InputAt(0); | 3132 LOperand* input = instr->InputAt(0); |
| 2976 ASSERT(input->IsRegister() && input->Equals(instr->result())); | 3133 ASSERT(input->IsRegister() && input->Equals(instr->result())); |
| 2977 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow)); | 3134 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow)); |
| 2978 __ SmiTag(ToRegister(input)); | 3135 __ SmiTag(ToRegister(input)); |
| 2979 } | 3136 } |
| (...skipping 391 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3371 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); | 3528 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); |
| 3372 __ push(Immediate(instr->hydrogen()->constant_elements())); | 3529 __ push(Immediate(instr->hydrogen()->constant_elements())); |
| 3373 | 3530 |
| 3374 // Pick the right runtime function or stub to call. | 3531 // Pick the right runtime function or stub to call. |
| 3375 int length = instr->hydrogen()->length(); | 3532 int length = instr->hydrogen()->length(); |
| 3376 if (instr->hydrogen()->IsCopyOnWrite()) { | 3533 if (instr->hydrogen()->IsCopyOnWrite()) { |
| 3377 ASSERT(instr->hydrogen()->depth() == 1); | 3534 ASSERT(instr->hydrogen()->depth() == 1); |
| 3378 FastCloneShallowArrayStub::Mode mode = | 3535 FastCloneShallowArrayStub::Mode mode = |
| 3379 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS; | 3536 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS; |
| 3380 FastCloneShallowArrayStub stub(mode, length); | 3537 FastCloneShallowArrayStub stub(mode, length); |
| 3381 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 3538 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); |
| 3382 } else if (instr->hydrogen()->depth() > 1) { | 3539 } else if (instr->hydrogen()->depth() > 1) { |
| 3383 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr); | 3540 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr, false); |
| 3384 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { | 3541 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { |
| 3385 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr); | 3542 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr, false); |
| 3386 } else { | 3543 } else { |
| 3387 FastCloneShallowArrayStub::Mode mode = | 3544 FastCloneShallowArrayStub::Mode mode = |
| 3388 FastCloneShallowArrayStub::CLONE_ELEMENTS; | 3545 FastCloneShallowArrayStub::CLONE_ELEMENTS; |
| 3389 FastCloneShallowArrayStub stub(mode, length); | 3546 FastCloneShallowArrayStub stub(mode, length); |
| 3390 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 3547 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); |
| 3391 } | 3548 } |
| 3392 } | 3549 } |
| 3393 | 3550 |
| 3394 | 3551 |
| 3395 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) { | 3552 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) { |
| 3553 ASSERT(ToRegister(instr->context()).is(esi)); |
| 3396 // Setup the parameters to the stub/runtime call. | 3554 // Setup the parameters to the stub/runtime call. |
| 3397 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); | 3555 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); |
| 3398 __ push(FieldOperand(eax, JSFunction::kLiteralsOffset)); | 3556 __ push(FieldOperand(eax, JSFunction::kLiteralsOffset)); |
| 3399 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); | 3557 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); |
| 3400 __ push(Immediate(instr->hydrogen()->constant_properties())); | 3558 __ push(Immediate(instr->hydrogen()->constant_properties())); |
| 3401 __ push(Immediate(Smi::FromInt(instr->hydrogen()->fast_elements() ? 1 : 0))); | 3559 __ push(Immediate(Smi::FromInt(instr->hydrogen()->fast_elements() ? 1 : 0))); |
| 3402 | 3560 |
| 3403 // Pick the right runtime function to call. | 3561 // Pick the right runtime function to call. |
| 3404 if (instr->hydrogen()->depth() > 1) { | 3562 if (instr->hydrogen()->depth() > 1) { |
| 3405 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr); | 3563 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr); |
| (...skipping 17 matching lines...) Expand all Loading... |
| 3423 __ mov(ebx, FieldOperand(ecx, literal_offset)); | 3581 __ mov(ebx, FieldOperand(ecx, literal_offset)); |
| 3424 __ cmp(ebx, FACTORY->undefined_value()); | 3582 __ cmp(ebx, FACTORY->undefined_value()); |
| 3425 __ j(not_equal, &materialized); | 3583 __ j(not_equal, &materialized); |
| 3426 | 3584 |
| 3427 // Create regexp literal using runtime function | 3585 // Create regexp literal using runtime function |
| 3428 // Result will be in eax. | 3586 // Result will be in eax. |
| 3429 __ push(ecx); | 3587 __ push(ecx); |
| 3430 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); | 3588 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); |
| 3431 __ push(Immediate(instr->hydrogen()->pattern())); | 3589 __ push(Immediate(instr->hydrogen()->pattern())); |
| 3432 __ push(Immediate(instr->hydrogen()->flags())); | 3590 __ push(Immediate(instr->hydrogen()->flags())); |
| 3433 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr); | 3591 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr, false); |
| 3434 __ mov(ebx, eax); | 3592 __ mov(ebx, eax); |
| 3435 | 3593 |
| 3436 __ bind(&materialized); | 3594 __ bind(&materialized); |
| 3437 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; | 3595 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; |
| 3438 Label allocated, runtime_allocate; | 3596 Label allocated, runtime_allocate; |
| 3439 __ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT); | 3597 __ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT); |
| 3440 __ jmp(&allocated); | 3598 __ jmp(&allocated); |
| 3441 | 3599 |
| 3442 __ bind(&runtime_allocate); | 3600 __ bind(&runtime_allocate); |
| 3443 __ push(ebx); | 3601 __ push(ebx); |
| 3444 __ push(Immediate(Smi::FromInt(size))); | 3602 __ push(Immediate(Smi::FromInt(size))); |
| 3445 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr); | 3603 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr, false); |
| 3446 __ pop(ebx); | 3604 __ pop(ebx); |
| 3447 | 3605 |
| 3448 __ bind(&allocated); | 3606 __ bind(&allocated); |
| 3449 // Copy the content into the newly allocated memory. | 3607 // Copy the content into the newly allocated memory. |
| 3450 // (Unroll copy loop once for better throughput). | 3608 // (Unroll copy loop once for better throughput). |
| 3451 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) { | 3609 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) { |
| 3452 __ mov(edx, FieldOperand(ebx, i)); | 3610 __ mov(edx, FieldOperand(ebx, i)); |
| 3453 __ mov(ecx, FieldOperand(ebx, i + kPointerSize)); | 3611 __ mov(ecx, FieldOperand(ebx, i + kPointerSize)); |
| 3454 __ mov(FieldOperand(eax, i), edx); | 3612 __ mov(FieldOperand(eax, i), edx); |
| 3455 __ mov(FieldOperand(eax, i + kPointerSize), ecx); | 3613 __ mov(FieldOperand(eax, i + kPointerSize), ecx); |
| 3456 } | 3614 } |
| 3457 if ((size % (2 * kPointerSize)) != 0) { | 3615 if ((size % (2 * kPointerSize)) != 0) { |
| 3458 __ mov(edx, FieldOperand(ebx, size - kPointerSize)); | 3616 __ mov(edx, FieldOperand(ebx, size - kPointerSize)); |
| 3459 __ mov(FieldOperand(eax, size - kPointerSize), edx); | 3617 __ mov(FieldOperand(eax, size - kPointerSize), edx); |
| 3460 } | 3618 } |
| 3461 } | 3619 } |
| 3462 | 3620 |
| 3463 | 3621 |
| 3464 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { | 3622 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { |
| 3465 // Use the fast case closure allocation code that allocates in new | 3623 // Use the fast case closure allocation code that allocates in new |
| 3466 // space for nested functions that don't need literals cloning. | 3624 // space for nested functions that don't need literals cloning. |
| 3467 Handle<SharedFunctionInfo> shared_info = instr->shared_info(); | 3625 Handle<SharedFunctionInfo> shared_info = instr->shared_info(); |
| 3468 bool pretenure = instr->hydrogen()->pretenure(); | 3626 bool pretenure = instr->hydrogen()->pretenure(); |
| 3469 if (shared_info->num_literals() == 0 && !pretenure) { | 3627 if (shared_info->num_literals() == 0 && !pretenure) { |
| 3470 FastNewClosureStub stub; | 3628 FastNewClosureStub stub; |
| 3471 __ push(Immediate(shared_info)); | 3629 __ push(Immediate(shared_info)); |
| 3472 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 3630 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); |
| 3473 } else { | 3631 } else { |
| 3474 __ push(esi); | 3632 __ push(Operand(ebp, StandardFrameConstants::kContextOffset)); |
| 3475 __ push(Immediate(shared_info)); | 3633 __ push(Immediate(shared_info)); |
| 3476 __ push(Immediate(pretenure | 3634 __ push(Immediate(pretenure |
| 3477 ? FACTORY->true_value() | 3635 ? FACTORY->true_value() |
| 3478 : FACTORY->false_value())); | 3636 : FACTORY->false_value())); |
| 3479 CallRuntime(Runtime::kNewClosure, 3, instr); | 3637 CallRuntime(Runtime::kNewClosure, 3, instr, false); |
| 3480 } | 3638 } |
| 3481 } | 3639 } |
| 3482 | 3640 |
| 3483 | 3641 |
| 3484 void LCodeGen::DoTypeof(LTypeof* instr) { | 3642 void LCodeGen::DoTypeof(LTypeof* instr) { |
| 3485 LOperand* input = instr->InputAt(0); | 3643 LOperand* input = instr->InputAt(0); |
| 3486 if (input->IsConstantOperand()) { | 3644 if (input->IsConstantOperand()) { |
| 3487 __ push(ToImmediate(input)); | 3645 __ push(ToImmediate(input)); |
| 3488 } else { | 3646 } else { |
| 3489 __ push(ToOperand(input)); | 3647 __ push(ToOperand(input)); |
| 3490 } | 3648 } |
| 3491 CallRuntime(Runtime::kTypeof, 1, instr); | 3649 CallRuntime(Runtime::kTypeof, 1, instr, false); |
| 3492 } | 3650 } |
| 3493 | 3651 |
| 3494 | 3652 |
| 3495 void LCodeGen::DoTypeofIs(LTypeofIs* instr) { | 3653 void LCodeGen::DoTypeofIs(LTypeofIs* instr) { |
| 3496 Register input = ToRegister(instr->InputAt(0)); | 3654 Register input = ToRegister(instr->InputAt(0)); |
| 3497 Register result = ToRegister(instr->result()); | 3655 Register result = ToRegister(instr->result()); |
| 3498 Label true_label; | 3656 Label true_label; |
| 3499 Label false_label; | 3657 Label false_label; |
| 3500 NearLabel done; | 3658 NearLabel done; |
| 3501 | 3659 |
| (...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3672 if (key->IsConstantOperand()) { | 3830 if (key->IsConstantOperand()) { |
| 3673 __ push(ToImmediate(key)); | 3831 __ push(ToImmediate(key)); |
| 3674 } else { | 3832 } else { |
| 3675 __ push(ToOperand(key)); | 3833 __ push(ToOperand(key)); |
| 3676 } | 3834 } |
| 3677 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); | 3835 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); |
| 3678 LPointerMap* pointers = instr->pointer_map(); | 3836 LPointerMap* pointers = instr->pointer_map(); |
| 3679 LEnvironment* env = instr->deoptimization_environment(); | 3837 LEnvironment* env = instr->deoptimization_environment(); |
| 3680 RecordPosition(pointers->position()); | 3838 RecordPosition(pointers->position()); |
| 3681 RegisterEnvironmentForDeoptimization(env); | 3839 RegisterEnvironmentForDeoptimization(env); |
| 3840 // Create safepoint generator that will also ensure enough space in the |
| 3841 // reloc info for patching in deoptimization (since this is invoking a |
| 3842 // builtin) |
| 3682 SafepointGenerator safepoint_generator(this, | 3843 SafepointGenerator safepoint_generator(this, |
| 3683 pointers, | 3844 pointers, |
| 3684 env->deoptimization_index()); | 3845 env->deoptimization_index(), |
| 3846 true); |
| 3847 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
| 3848 __ push(Immediate(Smi::FromInt(strict_mode_flag()))); |
| 3685 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, &safepoint_generator); | 3849 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, &safepoint_generator); |
| 3686 } | 3850 } |
| 3687 | 3851 |
| 3688 | 3852 |
| 3689 void LCodeGen::DoStackCheck(LStackCheck* instr) { | 3853 void LCodeGen::DoStackCheck(LStackCheck* instr) { |
| 3690 // Perform stack overflow check. | 3854 // Perform stack overflow check. |
| 3691 NearLabel done; | 3855 NearLabel done; |
| 3692 ExternalReference stack_limit = ExternalReference::address_of_stack_limit(); | 3856 ExternalReference stack_limit = ExternalReference::address_of_stack_limit(); |
| 3693 __ cmp(esp, Operand::StaticVariable(stack_limit)); | 3857 __ cmp(esp, Operand::StaticVariable(stack_limit)); |
| 3694 __ j(above_equal, &done); | 3858 __ j(above_equal, &done); |
| 3695 | 3859 |
| 3696 StackCheckStub stub; | 3860 StackCheckStub stub; |
| 3697 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 3861 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); |
| 3698 __ bind(&done); | 3862 __ bind(&done); |
| 3699 } | 3863 } |
| 3700 | 3864 |
| 3701 | 3865 |
| 3702 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { | 3866 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { |
| 3703 // This is a pseudo-instruction that ensures that the environment here is | 3867 // This is a pseudo-instruction that ensures that the environment here is |
| 3704 // properly registered for deoptimization and records the assembler's PC | 3868 // properly registered for deoptimization and records the assembler's PC |
| 3705 // offset. | 3869 // offset. |
| 3706 LEnvironment* environment = instr->environment(); | 3870 LEnvironment* environment = instr->environment(); |
| 3707 environment->SetSpilledRegisters(instr->SpilledRegisterArray(), | 3871 environment->SetSpilledRegisters(instr->SpilledRegisterArray(), |
| 3708 instr->SpilledDoubleRegisterArray()); | 3872 instr->SpilledDoubleRegisterArray()); |
| 3709 | 3873 |
| 3710 // If the environment were already registered, we would have no way of | 3874 // If the environment were already registered, we would have no way of |
| 3711 // backpatching it with the spill slot operands. | 3875 // backpatching it with the spill slot operands. |
| 3712 ASSERT(!environment->HasBeenRegistered()); | 3876 ASSERT(!environment->HasBeenRegistered()); |
| 3713 RegisterEnvironmentForDeoptimization(environment); | 3877 RegisterEnvironmentForDeoptimization(environment); |
| 3714 ASSERT(osr_pc_offset_ == -1); | 3878 ASSERT(osr_pc_offset_ == -1); |
| 3715 osr_pc_offset_ = masm()->pc_offset(); | 3879 osr_pc_offset_ = masm()->pc_offset(); |
| 3716 } | 3880 } |
| 3717 | 3881 |
| 3718 | 3882 |
| 3719 #undef __ | 3883 #undef __ |
| 3720 | 3884 |
| 3721 } } // namespace v8::internal | 3885 } } // namespace v8::internal |
| 3722 | 3886 |
| 3723 #endif // V8_TARGET_ARCH_IA32 | 3887 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |