| OLD | NEW |
| 1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| 11 // with the distribution. | 11 // with the distribution. |
| 12 // * Neither the name of Google Inc. nor the names of its | 12 // * Neither the name of Google Inc. nor the names of its |
| 13 // contributors may be used to endorse or promote products derived | 13 // contributors may be used to endorse or promote products derived |
| 14 // from this software without specific prior written permission. | 14 // from this software without specific prior written permission. |
| 15 // | 15 // |
| 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
| 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
| 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
| 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
| 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
| 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
| 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 | 27 |
| 28 #include "v8.h" |
| 29 |
| 30 #if defined(V8_TARGET_ARCH_IA32) |
| 31 |
| 28 #include "ia32/lithium-codegen-ia32.h" | 32 #include "ia32/lithium-codegen-ia32.h" |
| 29 #include "code-stubs.h" | 33 #include "code-stubs.h" |
| 30 #include "stub-cache.h" | 34 #include "stub-cache.h" |
| 31 | 35 |
| 32 namespace v8 { | 36 namespace v8 { |
| 33 namespace internal { | 37 namespace internal { |
| 34 | 38 |
| 35 | 39 |
| 40 // When invoking builtins, we need to record the safepoint in the middle of |
| 41 // the invoke instruction sequence generated by the macro assembler. |
| 36 class SafepointGenerator : public PostCallGenerator { | 42 class SafepointGenerator : public PostCallGenerator { |
| 37 public: | 43 public: |
| 38 SafepointGenerator(LCodeGen* codegen, | 44 SafepointGenerator(LCodeGen* codegen, |
| 39 LPointerMap* pointers, | 45 LPointerMap* pointers, |
| 40 int deoptimization_index) | 46 int deoptimization_index, |
| 47 bool ensure_reloc_space = false) |
| 41 : codegen_(codegen), | 48 : codegen_(codegen), |
| 42 pointers_(pointers), | 49 pointers_(pointers), |
| 43 deoptimization_index_(deoptimization_index) { } | 50 deoptimization_index_(deoptimization_index), |
| 51 ensure_reloc_space_(ensure_reloc_space) { } |
| 44 virtual ~SafepointGenerator() { } | 52 virtual ~SafepointGenerator() { } |
| 45 | 53 |
| 46 virtual void Generate() { | 54 virtual void Generate() { |
| 55 // Ensure that we have enough space in the reloc info to patch |
| 56 // this with calls when doing deoptimization. |
| 57 if (ensure_reloc_space_) { |
| 58 codegen_->masm()->RecordComment(RelocInfo::kFillerCommentString, true); |
| 59 } |
| 47 codegen_->RecordSafepoint(pointers_, deoptimization_index_); | 60 codegen_->RecordSafepoint(pointers_, deoptimization_index_); |
| 48 } | 61 } |
| 49 | 62 |
| 50 private: | 63 private: |
| 51 LCodeGen* codegen_; | 64 LCodeGen* codegen_; |
| 52 LPointerMap* pointers_; | 65 LPointerMap* pointers_; |
| 53 int deoptimization_index_; | 66 int deoptimization_index_; |
| 67 bool ensure_reloc_space_; |
| 54 }; | 68 }; |
| 55 | 69 |
| 56 | 70 |
| 57 #define __ masm()-> | 71 #define __ masm()-> |
| 58 | 72 |
| 59 bool LCodeGen::GenerateCode() { | 73 bool LCodeGen::GenerateCode() { |
| 60 HPhase phase("Code generation", chunk()); | 74 HPhase phase("Code generation", chunk()); |
| 61 ASSERT(is_unused()); | 75 ASSERT(is_unused()); |
| 62 status_ = GENERATING; | 76 status_ = GENERATING; |
| 63 CpuFeatures::Scope scope(SSE2); | 77 CpuFeatures::Scope scope(SSE2); |
| 64 return GeneratePrologue() && | 78 return GeneratePrologue() && |
| 65 GenerateBody() && | 79 GenerateBody() && |
| 66 GenerateDeferredCode() && | 80 GenerateDeferredCode() && |
| 67 GenerateSafepointTable(); | 81 GenerateSafepointTable(); |
| 68 } | 82 } |
| 69 | 83 |
| 70 | 84 |
| 71 void LCodeGen::FinishCode(Handle<Code> code) { | 85 void LCodeGen::FinishCode(Handle<Code> code) { |
| 72 ASSERT(is_done()); | 86 ASSERT(is_done()); |
| 73 code->set_stack_slots(StackSlotCount()); | 87 code->set_stack_slots(StackSlotCount()); |
| 74 code->set_safepoint_table_start(safepoints_.GetCodeOffset()); | 88 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); |
| 75 PopulateDeoptimizationData(code); | 89 PopulateDeoptimizationData(code); |
| 76 } | 90 } |
| 77 | 91 |
| 78 | 92 |
| 79 void LCodeGen::Abort(const char* format, ...) { | 93 void LCodeGen::Abort(const char* format, ...) { |
| 80 if (FLAG_trace_bailout) { | 94 if (FLAG_trace_bailout) { |
| 81 SmartPointer<char> debug_name = graph()->debug_name()->ToCString(); | 95 SmartPointer<char> debug_name = graph()->debug_name()->ToCString(); |
| 82 PrintF("Aborting LCodeGen in @\"%s\": ", *debug_name); | 96 PrintF("Aborting LCodeGen in @\"%s\": ", *debug_name); |
| 83 va_list arguments; | 97 va_list arguments; |
| 84 va_start(arguments, format); | 98 va_start(arguments, format); |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 128 if (slots > 0) { | 142 if (slots > 0) { |
| 129 if (FLAG_debug_code) { | 143 if (FLAG_debug_code) { |
| 130 __ mov(Operand(eax), Immediate(slots)); | 144 __ mov(Operand(eax), Immediate(slots)); |
| 131 Label loop; | 145 Label loop; |
| 132 __ bind(&loop); | 146 __ bind(&loop); |
| 133 __ push(Immediate(kSlotsZapValue)); | 147 __ push(Immediate(kSlotsZapValue)); |
| 134 __ dec(eax); | 148 __ dec(eax); |
| 135 __ j(not_zero, &loop); | 149 __ j(not_zero, &loop); |
| 136 } else { | 150 } else { |
| 137 __ sub(Operand(esp), Immediate(slots * kPointerSize)); | 151 __ sub(Operand(esp), Immediate(slots * kPointerSize)); |
| 152 #ifdef _MSC_VER |
| 153 // On windows, you may not access the stack more than one page below |
| 154 // the most recently mapped page. To make the allocated area randomly |
| 155 // accessible, we write to each page in turn (the value is irrelevant). |
| 156 const int kPageSize = 4 * KB; |
| 157 for (int offset = slots * kPointerSize - kPageSize; |
| 158 offset > 0; |
| 159 offset -= kPageSize) { |
| 160 __ mov(Operand(esp, offset), eax); |
| 161 } |
| 162 #endif |
| 138 } | 163 } |
| 139 } | 164 } |
| 140 | 165 |
| 141 // Trace the call. | 166 // Trace the call. |
| 142 if (FLAG_trace) { | 167 if (FLAG_trace) { |
| 168 // We have not executed any compiled code yet, so esi still holds the |
| 169 // incoming context. |
| 143 __ CallRuntime(Runtime::kTraceEnter, 0); | 170 __ CallRuntime(Runtime::kTraceEnter, 0); |
| 144 } | 171 } |
| 145 return !is_aborted(); | 172 return !is_aborted(); |
| 146 } | 173 } |
| 147 | 174 |
| 148 | 175 |
| 149 bool LCodeGen::GenerateBody() { | 176 bool LCodeGen::GenerateBody() { |
| 150 ASSERT(is_generating()); | 177 ASSERT(is_generating()); |
| 151 bool emit_instructions = true; | 178 bool emit_instructions = true; |
| 152 for (current_instruction_ = 0; | 179 for (current_instruction_ = 0; |
| (...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 254 // Local or spill slot. Skip the frame pointer, function, and | 281 // Local or spill slot. Skip the frame pointer, function, and |
| 255 // context in the fixed part of the frame. | 282 // context in the fixed part of the frame. |
| 256 return Operand(ebp, -(index + 3) * kPointerSize); | 283 return Operand(ebp, -(index + 3) * kPointerSize); |
| 257 } else { | 284 } else { |
| 258 // Incoming parameter. Skip the return address. | 285 // Incoming parameter. Skip the return address. |
| 259 return Operand(ebp, -(index - 1) * kPointerSize); | 286 return Operand(ebp, -(index - 1) * kPointerSize); |
| 260 } | 287 } |
| 261 } | 288 } |
| 262 | 289 |
| 263 | 290 |
| 291 Operand LCodeGen::HighOperand(LOperand* op) { |
| 292 ASSERT(op->IsDoubleStackSlot()); |
| 293 int index = op->index(); |
| 294 int offset = (index >= 0) ? index + 3 : index - 1; |
| 295 return Operand(ebp, -offset * kPointerSize); |
| 296 } |
| 297 |
| 298 |
| 299 void LCodeGen::WriteTranslation(LEnvironment* environment, |
| 300 Translation* translation) { |
| 301 if (environment == NULL) return; |
| 302 |
| 303 // The translation includes one command per value in the environment. |
| 304 int translation_size = environment->values()->length(); |
| 305 // The output frame height does not include the parameters. |
| 306 int height = translation_size - environment->parameter_count(); |
| 307 |
| 308 WriteTranslation(environment->outer(), translation); |
| 309 int closure_id = DefineDeoptimizationLiteral(environment->closure()); |
| 310 translation->BeginFrame(environment->ast_id(), closure_id, height); |
| 311 for (int i = 0; i < translation_size; ++i) { |
| 312 LOperand* value = environment->values()->at(i); |
| 313 // spilled_registers_ and spilled_double_registers_ are either |
| 314 // both NULL or both set. |
| 315 if (environment->spilled_registers() != NULL && value != NULL) { |
| 316 if (value->IsRegister() && |
| 317 environment->spilled_registers()[value->index()] != NULL) { |
| 318 translation->MarkDuplicate(); |
| 319 AddToTranslation(translation, |
| 320 environment->spilled_registers()[value->index()], |
| 321 environment->HasTaggedValueAt(i)); |
| 322 } else if ( |
| 323 value->IsDoubleRegister() && |
| 324 environment->spilled_double_registers()[value->index()] != NULL) { |
| 325 translation->MarkDuplicate(); |
| 326 AddToTranslation( |
| 327 translation, |
| 328 environment->spilled_double_registers()[value->index()], |
| 329 false); |
| 330 } |
| 331 } |
| 332 |
| 333 AddToTranslation(translation, value, environment->HasTaggedValueAt(i)); |
| 334 } |
| 335 } |
| 336 |
| 337 |
| 264 void LCodeGen::AddToTranslation(Translation* translation, | 338 void LCodeGen::AddToTranslation(Translation* translation, |
| 265 LOperand* op, | 339 LOperand* op, |
| 266 bool is_tagged) { | 340 bool is_tagged) { |
| 267 if (op == NULL) { | 341 if (op == NULL) { |
| 268 // TODO(twuerthinger): Introduce marker operands to indicate that this value | 342 // TODO(twuerthinger): Introduce marker operands to indicate that this value |
| 269 // is not present and must be reconstructed from the deoptimizer. Currently | 343 // is not present and must be reconstructed from the deoptimizer. Currently |
| 270 // this is only used for the arguments object. | 344 // this is only used for the arguments object. |
| 271 translation->StoreArgumentsObject(); | 345 translation->StoreArgumentsObject(); |
| 272 } else if (op->IsStackSlot()) { | 346 } else if (op->IsStackSlot()) { |
| 273 if (is_tagged) { | 347 if (is_tagged) { |
| (...skipping 22 matching lines...) Expand all Loading... |
| 296 int src_index = DefineDeoptimizationLiteral(literal); | 370 int src_index = DefineDeoptimizationLiteral(literal); |
| 297 translation->StoreLiteral(src_index); | 371 translation->StoreLiteral(src_index); |
| 298 } else { | 372 } else { |
| 299 UNREACHABLE(); | 373 UNREACHABLE(); |
| 300 } | 374 } |
| 301 } | 375 } |
| 302 | 376 |
| 303 | 377 |
| 304 void LCodeGen::CallCode(Handle<Code> code, | 378 void LCodeGen::CallCode(Handle<Code> code, |
| 305 RelocInfo::Mode mode, | 379 RelocInfo::Mode mode, |
| 306 LInstruction* instr) { | 380 LInstruction* instr, |
| 307 if (instr != NULL) { | 381 bool adjusted) { |
| 308 LPointerMap* pointers = instr->pointer_map(); | 382 ASSERT(instr != NULL); |
| 309 RecordPosition(pointers->position()); | 383 LPointerMap* pointers = instr->pointer_map(); |
| 310 __ call(code, mode); | 384 RecordPosition(pointers->position()); |
| 311 RegisterLazyDeoptimization(instr); | 385 if (!adjusted) { |
| 312 } else { | 386 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
| 313 LPointerMap no_pointers(0); | |
| 314 RecordPosition(no_pointers.position()); | |
| 315 __ call(code, mode); | |
| 316 RecordSafepoint(&no_pointers, Safepoint::kNoDeoptimizationIndex); | |
| 317 } | 387 } |
| 388 __ call(code, mode); |
| 389 RegisterLazyDeoptimization(instr); |
| 318 | 390 |
| 319 // Signal that we don't inline smi code before these stubs in the | 391 // Signal that we don't inline smi code before these stubs in the |
| 320 // optimizing code generator. | 392 // optimizing code generator. |
| 321 if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC || | 393 if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC || |
| 322 code->kind() == Code::COMPARE_IC) { | 394 code->kind() == Code::COMPARE_IC) { |
| 323 __ nop(); | 395 __ nop(); |
| 324 } | 396 } |
| 325 } | 397 } |
| 326 | 398 |
| 327 | 399 |
| 328 void LCodeGen::CallRuntime(Runtime::Function* function, | 400 void LCodeGen::CallRuntime(Runtime::Function* fun, |
| 329 int num_arguments, | 401 int argc, |
| 330 LInstruction* instr) { | 402 LInstruction* instr, |
| 403 bool adjusted) { |
| 331 ASSERT(instr != NULL); | 404 ASSERT(instr != NULL); |
| 405 ASSERT(instr->HasPointerMap()); |
| 332 LPointerMap* pointers = instr->pointer_map(); | 406 LPointerMap* pointers = instr->pointer_map(); |
| 333 ASSERT(pointers != NULL); | |
| 334 RecordPosition(pointers->position()); | 407 RecordPosition(pointers->position()); |
| 335 | 408 |
| 336 __ CallRuntime(function, num_arguments); | 409 if (!adjusted) { |
| 337 // Runtime calls to Throw are not supposed to ever return at the | 410 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
| 338 // call site, so don't register lazy deoptimization for these. We do | |
| 339 // however have to record a safepoint since throwing exceptions can | |
| 340 // cause garbage collections. | |
| 341 // BUG(3243555): register a lazy deoptimization point at throw. We need | |
| 342 // it to be able to inline functions containing a throw statement. | |
| 343 if (!instr->IsThrow()) { | |
| 344 RegisterLazyDeoptimization(instr); | |
| 345 } else { | |
| 346 RecordSafepoint(instr->pointer_map(), Safepoint::kNoDeoptimizationIndex); | |
| 347 } | 411 } |
| 412 __ CallRuntime(fun, argc); |
| 413 RegisterLazyDeoptimization(instr); |
| 348 } | 414 } |
| 349 | 415 |
| 350 | 416 |
| 351 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr) { | 417 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr) { |
| 352 // Create the environment to bailout to. If the call has side effects | 418 // Create the environment to bailout to. If the call has side effects |
| 353 // execution has to continue after the call otherwise execution can continue | 419 // execution has to continue after the call otherwise execution can continue |
| 354 // from a previous bailout point repeating the call. | 420 // from a previous bailout point repeating the call. |
| 355 LEnvironment* deoptimization_environment; | 421 LEnvironment* deoptimization_environment; |
| 356 if (instr->HasDeoptimizationEnvironment()) { | 422 if (instr->HasDeoptimizationEnvironment()) { |
| 357 deoptimization_environment = instr->deoptimization_environment(); | 423 deoptimization_environment = instr->deoptimization_environment(); |
| (...skipping 20 matching lines...) Expand all Loading... |
| 378 // Layout of the translation: | 444 // Layout of the translation: |
| 379 // 0 ........................................................ size - 1 + 4 | 445 // 0 ........................................................ size - 1 + 4 |
| 380 // [expression stack including arguments] [locals] [4 words] [parameters] | 446 // [expression stack including arguments] [locals] [4 words] [parameters] |
| 381 // |>------------ translation_size ------------<| | 447 // |>------------ translation_size ------------<| |
| 382 | 448 |
| 383 int frame_count = 0; | 449 int frame_count = 0; |
| 384 for (LEnvironment* e = environment; e != NULL; e = e->outer()) { | 450 for (LEnvironment* e = environment; e != NULL; e = e->outer()) { |
| 385 ++frame_count; | 451 ++frame_count; |
| 386 } | 452 } |
| 387 Translation translation(&translations_, frame_count); | 453 Translation translation(&translations_, frame_count); |
| 388 environment->WriteTranslation(this, &translation); | 454 WriteTranslation(environment, &translation); |
| 389 int deoptimization_index = deoptimizations_.length(); | 455 int deoptimization_index = deoptimizations_.length(); |
| 390 environment->Register(deoptimization_index, translation.index()); | 456 environment->Register(deoptimization_index, translation.index()); |
| 391 deoptimizations_.Add(environment); | 457 deoptimizations_.Add(environment); |
| 392 } | 458 } |
| 393 } | 459 } |
| 394 | 460 |
| 395 | 461 |
| 396 void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) { | 462 void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) { |
| 397 RegisterEnvironmentForDeoptimization(environment); | 463 RegisterEnvironmentForDeoptimization(environment); |
| 398 ASSERT(environment->HasBeenRegistered()); | 464 ASSERT(environment->HasBeenRegistered()); |
| (...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 497 for (int i = 0, length = inlined_closures->length(); | 563 for (int i = 0, length = inlined_closures->length(); |
| 498 i < length; | 564 i < length; |
| 499 i++) { | 565 i++) { |
| 500 DefineDeoptimizationLiteral(inlined_closures->at(i)); | 566 DefineDeoptimizationLiteral(inlined_closures->at(i)); |
| 501 } | 567 } |
| 502 | 568 |
| 503 inlined_function_count_ = deoptimization_literals_.length(); | 569 inlined_function_count_ = deoptimization_literals_.length(); |
| 504 } | 570 } |
| 505 | 571 |
| 506 | 572 |
| 573 void LCodeGen::RecordSafepoint( |
| 574 LPointerMap* pointers, |
| 575 Safepoint::Kind kind, |
| 576 int arguments, |
| 577 int deoptimization_index) { |
| 578 const ZoneList<LOperand*>* operands = pointers->operands(); |
| 579 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), |
| 580 kind, arguments, deoptimization_index); |
| 581 for (int i = 0; i < operands->length(); i++) { |
| 582 LOperand* pointer = operands->at(i); |
| 583 if (pointer->IsStackSlot()) { |
| 584 safepoint.DefinePointerSlot(pointer->index()); |
| 585 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { |
| 586 safepoint.DefinePointerRegister(ToRegister(pointer)); |
| 587 } |
| 588 } |
| 589 } |
| 590 |
| 591 |
| 507 void LCodeGen::RecordSafepoint(LPointerMap* pointers, | 592 void LCodeGen::RecordSafepoint(LPointerMap* pointers, |
| 508 int deoptimization_index) { | 593 int deoptimization_index) { |
| 509 const ZoneList<LOperand*>* operands = pointers->operands(); | 594 RecordSafepoint(pointers, Safepoint::kSimple, 0, deoptimization_index); |
| 510 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), | |
| 511 deoptimization_index); | |
| 512 for (int i = 0; i < operands->length(); i++) { | |
| 513 LOperand* pointer = operands->at(i); | |
| 514 if (pointer->IsStackSlot()) { | |
| 515 safepoint.DefinePointerSlot(pointer->index()); | |
| 516 } | |
| 517 } | |
| 518 } | 595 } |
| 519 | 596 |
| 520 | 597 |
| 521 void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers, | 598 void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers, |
| 522 int arguments, | 599 int arguments, |
| 523 int deoptimization_index) { | 600 int deoptimization_index) { |
| 524 const ZoneList<LOperand*>* operands = pointers->operands(); | 601 RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments, |
| 525 Safepoint safepoint = | 602 deoptimization_index); |
| 526 safepoints_.DefineSafepointWithRegisters( | |
| 527 masm(), arguments, deoptimization_index); | |
| 528 for (int i = 0; i < operands->length(); i++) { | |
| 529 LOperand* pointer = operands->at(i); | |
| 530 if (pointer->IsStackSlot()) { | |
| 531 safepoint.DefinePointerSlot(pointer->index()); | |
| 532 } else if (pointer->IsRegister()) { | |
| 533 safepoint.DefinePointerRegister(ToRegister(pointer)); | |
| 534 } | |
| 535 } | |
| 536 // Register esi always contains a pointer to the context. | |
| 537 safepoint.DefinePointerRegister(esi); | |
| 538 } | 603 } |
| 539 | 604 |
| 540 | 605 |
| 541 void LCodeGen::RecordPosition(int position) { | 606 void LCodeGen::RecordPosition(int position) { |
| 542 if (!FLAG_debug_info || position == RelocInfo::kNoPosition) return; | 607 if (!FLAG_debug_info || position == RelocInfo::kNoPosition) return; |
| 543 masm()->positions_recorder()->RecordPosition(position); | 608 masm()->positions_recorder()->RecordPosition(position); |
| 544 } | 609 } |
| 545 | 610 |
| 546 | 611 |
| 547 void LCodeGen::DoLabel(LLabel* label) { | 612 void LCodeGen::DoLabel(LLabel* label) { |
| 548 if (label->is_loop_header()) { | 613 if (label->is_loop_header()) { |
| 549 Comment(";;; B%d - LOOP entry", label->block_id()); | 614 Comment(";;; B%d - LOOP entry", label->block_id()); |
| 550 } else { | 615 } else { |
| 551 Comment(";;; B%d", label->block_id()); | 616 Comment(";;; B%d", label->block_id()); |
| 552 } | 617 } |
| 553 __ bind(label->label()); | 618 __ bind(label->label()); |
| 554 current_block_ = label->block_id(); | 619 current_block_ = label->block_id(); |
| 555 LCodeGen::DoGap(label); | 620 LCodeGen::DoGap(label); |
| 556 } | 621 } |
| 557 | 622 |
| 558 | 623 |
| 559 void LCodeGen::DoParallelMove(LParallelMove* move) { | 624 void LCodeGen::DoParallelMove(LParallelMove* move) { |
| 560 // xmm0 must always be a scratch register. | 625 resolver_.Resolve(move); |
| 561 XMMRegister xmm_scratch = xmm0; | |
| 562 LUnallocated marker_operand(LUnallocated::NONE); | |
| 563 | |
| 564 Register cpu_scratch = esi; | |
| 565 bool destroys_cpu_scratch = false; | |
| 566 | |
| 567 LGapResolver resolver(move->move_operands(), &marker_operand); | |
| 568 const ZoneList<LMoveOperands>* moves = resolver.ResolveInReverseOrder(); | |
| 569 for (int i = moves->length() - 1; i >= 0; --i) { | |
| 570 LMoveOperands move = moves->at(i); | |
| 571 LOperand* from = move.from(); | |
| 572 LOperand* to = move.to(); | |
| 573 ASSERT(!from->IsDoubleRegister() || | |
| 574 !ToDoubleRegister(from).is(xmm_scratch)); | |
| 575 ASSERT(!to->IsDoubleRegister() || !ToDoubleRegister(to).is(xmm_scratch)); | |
| 576 ASSERT(!from->IsRegister() || !ToRegister(from).is(cpu_scratch)); | |
| 577 ASSERT(!to->IsRegister() || !ToRegister(to).is(cpu_scratch)); | |
| 578 if (from->IsConstantOperand()) { | |
| 579 __ mov(ToOperand(to), ToImmediate(from)); | |
| 580 } else if (from == &marker_operand) { | |
| 581 if (to->IsRegister() || to->IsStackSlot()) { | |
| 582 __ mov(ToOperand(to), cpu_scratch); | |
| 583 ASSERT(destroys_cpu_scratch); | |
| 584 } else { | |
| 585 ASSERT(to->IsDoubleRegister() || to->IsDoubleStackSlot()); | |
| 586 __ movdbl(ToOperand(to), xmm_scratch); | |
| 587 } | |
| 588 } else if (to == &marker_operand) { | |
| 589 if (from->IsRegister() || from->IsStackSlot()) { | |
| 590 __ mov(cpu_scratch, ToOperand(from)); | |
| 591 destroys_cpu_scratch = true; | |
| 592 } else { | |
| 593 ASSERT(from->IsDoubleRegister() || from->IsDoubleStackSlot()); | |
| 594 __ movdbl(xmm_scratch, ToOperand(from)); | |
| 595 } | |
| 596 } else if (from->IsRegister()) { | |
| 597 __ mov(ToOperand(to), ToRegister(from)); | |
| 598 } else if (to->IsRegister()) { | |
| 599 __ mov(ToRegister(to), ToOperand(from)); | |
| 600 } else if (from->IsStackSlot()) { | |
| 601 ASSERT(to->IsStackSlot()); | |
| 602 __ push(eax); | |
| 603 __ mov(eax, ToOperand(from)); | |
| 604 __ mov(ToOperand(to), eax); | |
| 605 __ pop(eax); | |
| 606 } else if (from->IsDoubleRegister()) { | |
| 607 __ movdbl(ToOperand(to), ToDoubleRegister(from)); | |
| 608 } else if (to->IsDoubleRegister()) { | |
| 609 __ movdbl(ToDoubleRegister(to), ToOperand(from)); | |
| 610 } else { | |
| 611 ASSERT(to->IsDoubleStackSlot() && from->IsDoubleStackSlot()); | |
| 612 __ movdbl(xmm_scratch, ToOperand(from)); | |
| 613 __ movdbl(ToOperand(to), xmm_scratch); | |
| 614 } | |
| 615 } | |
| 616 | |
| 617 if (destroys_cpu_scratch) { | |
| 618 __ mov(cpu_scratch, Operand(ebp, -kPointerSize)); | |
| 619 } | |
| 620 } | 626 } |
| 621 | 627 |
| 622 | 628 |
| 623 void LCodeGen::DoGap(LGap* gap) { | 629 void LCodeGen::DoGap(LGap* gap) { |
| 624 for (int i = LGap::FIRST_INNER_POSITION; | 630 for (int i = LGap::FIRST_INNER_POSITION; |
| 625 i <= LGap::LAST_INNER_POSITION; | 631 i <= LGap::LAST_INNER_POSITION; |
| 626 i++) { | 632 i++) { |
| 627 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i); | 633 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i); |
| 628 LParallelMove* move = gap->GetParallelMove(inner_pos); | 634 LParallelMove* move = gap->GetParallelMove(inner_pos); |
| 629 if (move != NULL) DoParallelMove(move); | 635 if (move != NULL) DoParallelMove(move); |
| 630 } | 636 } |
| 631 | 637 |
| 632 LInstruction* next = GetNextInstruction(); | 638 LInstruction* next = GetNextInstruction(); |
| 633 if (next != NULL && next->IsLazyBailout()) { | 639 if (next != NULL && next->IsLazyBailout()) { |
| 634 int pc = masm()->pc_offset(); | 640 int pc = masm()->pc_offset(); |
| 635 safepoints_.SetPcAfterGap(pc); | 641 safepoints_.SetPcAfterGap(pc); |
| 636 } | 642 } |
| 637 } | 643 } |
| 638 | 644 |
| 639 | 645 |
| 640 void LCodeGen::DoParameter(LParameter* instr) { | 646 void LCodeGen::DoParameter(LParameter* instr) { |
| 641 // Nothing to do. | 647 // Nothing to do. |
| 642 } | 648 } |
| 643 | 649 |
| 644 | 650 |
| 645 void LCodeGen::DoCallStub(LCallStub* instr) { | 651 void LCodeGen::DoCallStub(LCallStub* instr) { |
| 652 ASSERT(ToRegister(instr->context()).is(esi)); |
| 646 ASSERT(ToRegister(instr->result()).is(eax)); | 653 ASSERT(ToRegister(instr->result()).is(eax)); |
| 647 switch (instr->hydrogen()->major_key()) { | 654 switch (instr->hydrogen()->major_key()) { |
| 648 case CodeStub::RegExpConstructResult: { | 655 case CodeStub::RegExpConstructResult: { |
| 649 RegExpConstructResultStub stub; | 656 RegExpConstructResultStub stub; |
| 650 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 657 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 651 break; | 658 break; |
| 652 } | 659 } |
| 653 case CodeStub::RegExpExec: { | 660 case CodeStub::RegExpExec: { |
| 654 RegExpExecStub stub; | 661 RegExpExecStub stub; |
| 655 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 662 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 696 } | 703 } |
| 697 } | 704 } |
| 698 | 705 |
| 699 | 706 |
| 700 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) { | 707 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) { |
| 701 // Nothing to do. | 708 // Nothing to do. |
| 702 } | 709 } |
| 703 | 710 |
| 704 | 711 |
| 705 void LCodeGen::DoModI(LModI* instr) { | 712 void LCodeGen::DoModI(LModI* instr) { |
| 706 LOperand* right = instr->right(); | 713 LOperand* right = instr->InputAt(1); |
| 707 ASSERT(ToRegister(instr->result()).is(edx)); | 714 ASSERT(ToRegister(instr->result()).is(edx)); |
| 708 ASSERT(ToRegister(instr->left()).is(eax)); | 715 ASSERT(ToRegister(instr->InputAt(0)).is(eax)); |
| 709 ASSERT(!ToRegister(instr->right()).is(eax)); | 716 ASSERT(!ToRegister(instr->InputAt(1)).is(eax)); |
| 710 ASSERT(!ToRegister(instr->right()).is(edx)); | 717 ASSERT(!ToRegister(instr->InputAt(1)).is(edx)); |
| 711 | 718 |
| 712 Register right_reg = ToRegister(right); | 719 Register right_reg = ToRegister(right); |
| 713 | 720 |
| 714 // Check for x % 0. | 721 // Check for x % 0. |
| 715 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) { | 722 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) { |
| 716 __ test(right_reg, ToOperand(right)); | 723 __ test(right_reg, ToOperand(right)); |
| 717 DeoptimizeIf(zero, instr->environment()); | 724 DeoptimizeIf(zero, instr->environment()); |
| 718 } | 725 } |
| 719 | 726 |
| 720 // Sign extend to edx. | 727 // Sign extend to edx. |
| (...skipping 15 matching lines...) Expand all Loading... |
| 736 __ bind(&positive_left); | 743 __ bind(&positive_left); |
| 737 __ idiv(right_reg); | 744 __ idiv(right_reg); |
| 738 __ bind(&done); | 745 __ bind(&done); |
| 739 } else { | 746 } else { |
| 740 __ idiv(right_reg); | 747 __ idiv(right_reg); |
| 741 } | 748 } |
| 742 } | 749 } |
| 743 | 750 |
| 744 | 751 |
| 745 void LCodeGen::DoDivI(LDivI* instr) { | 752 void LCodeGen::DoDivI(LDivI* instr) { |
| 746 LOperand* right = instr->right(); | 753 LOperand* right = instr->InputAt(1); |
| 747 ASSERT(ToRegister(instr->result()).is(eax)); | 754 ASSERT(ToRegister(instr->result()).is(eax)); |
| 748 ASSERT(ToRegister(instr->left()).is(eax)); | 755 ASSERT(ToRegister(instr->InputAt(0)).is(eax)); |
| 749 ASSERT(!ToRegister(instr->right()).is(eax)); | 756 ASSERT(!ToRegister(instr->InputAt(1)).is(eax)); |
| 750 ASSERT(!ToRegister(instr->right()).is(edx)); | 757 ASSERT(!ToRegister(instr->InputAt(1)).is(edx)); |
| 751 | 758 |
| 752 Register left_reg = eax; | 759 Register left_reg = eax; |
| 753 | 760 |
| 754 // Check for x / 0. | 761 // Check for x / 0. |
| 755 Register right_reg = ToRegister(right); | 762 Register right_reg = ToRegister(right); |
| 756 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) { | 763 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) { |
| 757 __ test(right_reg, ToOperand(right)); | 764 __ test(right_reg, ToOperand(right)); |
| 758 DeoptimizeIf(zero, instr->environment()); | 765 DeoptimizeIf(zero, instr->environment()); |
| 759 } | 766 } |
| 760 | 767 |
| (...skipping 21 matching lines...) Expand all Loading... |
| 782 __ cdq(); | 789 __ cdq(); |
| 783 __ idiv(right_reg); | 790 __ idiv(right_reg); |
| 784 | 791 |
| 785 // Deoptimize if remainder is not 0. | 792 // Deoptimize if remainder is not 0. |
| 786 __ test(edx, Operand(edx)); | 793 __ test(edx, Operand(edx)); |
| 787 DeoptimizeIf(not_zero, instr->environment()); | 794 DeoptimizeIf(not_zero, instr->environment()); |
| 788 } | 795 } |
| 789 | 796 |
| 790 | 797 |
| 791 void LCodeGen::DoMulI(LMulI* instr) { | 798 void LCodeGen::DoMulI(LMulI* instr) { |
| 792 Register left = ToRegister(instr->left()); | 799 Register left = ToRegister(instr->InputAt(0)); |
| 793 LOperand* right = instr->right(); | 800 LOperand* right = instr->InputAt(1); |
| 794 | 801 |
| 795 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 802 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 796 __ mov(ToRegister(instr->temp()), left); | 803 __ mov(ToRegister(instr->TempAt(0)), left); |
| 797 } | 804 } |
| 798 | 805 |
| 799 if (right->IsConstantOperand()) { | 806 if (right->IsConstantOperand()) { |
| 800 __ imul(left, left, ToInteger32(LConstantOperand::cast(right))); | 807 __ imul(left, left, ToInteger32(LConstantOperand::cast(right))); |
| 801 } else { | 808 } else { |
| 802 __ imul(left, ToOperand(right)); | 809 __ imul(left, ToOperand(right)); |
| 803 } | 810 } |
| 804 | 811 |
| 805 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { | 812 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { |
| 806 DeoptimizeIf(overflow, instr->environment()); | 813 DeoptimizeIf(overflow, instr->environment()); |
| 807 } | 814 } |
| 808 | 815 |
| 809 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 816 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 810 // Bail out if the result is supposed to be negative zero. | 817 // Bail out if the result is supposed to be negative zero. |
| 811 NearLabel done; | 818 NearLabel done; |
| 812 __ test(left, Operand(left)); | 819 __ test(left, Operand(left)); |
| 813 __ j(not_zero, &done); | 820 __ j(not_zero, &done); |
| 814 if (right->IsConstantOperand()) { | 821 if (right->IsConstantOperand()) { |
| 815 if (ToInteger32(LConstantOperand::cast(right)) < 0) { | 822 if (ToInteger32(LConstantOperand::cast(right)) <= 0) { |
| 816 DeoptimizeIf(no_condition, instr->environment()); | 823 DeoptimizeIf(no_condition, instr->environment()); |
| 817 } | 824 } |
| 818 } else { | 825 } else { |
| 819 // Test the non-zero operand for negative sign. | 826 // Test the non-zero operand for negative sign. |
| 820 __ or_(ToRegister(instr->temp()), ToOperand(right)); | 827 __ or_(ToRegister(instr->TempAt(0)), ToOperand(right)); |
| 821 DeoptimizeIf(sign, instr->environment()); | 828 DeoptimizeIf(sign, instr->environment()); |
| 822 } | 829 } |
| 823 __ bind(&done); | 830 __ bind(&done); |
| 824 } | 831 } |
| 825 } | 832 } |
| 826 | 833 |
| 827 | 834 |
| 828 void LCodeGen::DoBitI(LBitI* instr) { | 835 void LCodeGen::DoBitI(LBitI* instr) { |
| 829 LOperand* left = instr->left(); | 836 LOperand* left = instr->InputAt(0); |
| 830 LOperand* right = instr->right(); | 837 LOperand* right = instr->InputAt(1); |
| 831 ASSERT(left->Equals(instr->result())); | 838 ASSERT(left->Equals(instr->result())); |
| 832 ASSERT(left->IsRegister()); | 839 ASSERT(left->IsRegister()); |
| 833 | 840 |
| 834 if (right->IsConstantOperand()) { | 841 if (right->IsConstantOperand()) { |
| 835 int right_operand = ToInteger32(LConstantOperand::cast(right)); | 842 int right_operand = ToInteger32(LConstantOperand::cast(right)); |
| 836 switch (instr->op()) { | 843 switch (instr->op()) { |
| 837 case Token::BIT_AND: | 844 case Token::BIT_AND: |
| 838 __ and_(ToRegister(left), right_operand); | 845 __ and_(ToRegister(left), right_operand); |
| 839 break; | 846 break; |
| 840 case Token::BIT_OR: | 847 case Token::BIT_OR: |
| (...skipping 19 matching lines...) Expand all Loading... |
| 860 break; | 867 break; |
| 861 default: | 868 default: |
| 862 UNREACHABLE(); | 869 UNREACHABLE(); |
| 863 break; | 870 break; |
| 864 } | 871 } |
| 865 } | 872 } |
| 866 } | 873 } |
| 867 | 874 |
| 868 | 875 |
| 869 void LCodeGen::DoShiftI(LShiftI* instr) { | 876 void LCodeGen::DoShiftI(LShiftI* instr) { |
| 870 LOperand* left = instr->left(); | 877 LOperand* left = instr->InputAt(0); |
| 871 LOperand* right = instr->right(); | 878 LOperand* right = instr->InputAt(1); |
| 872 ASSERT(left->Equals(instr->result())); | 879 ASSERT(left->Equals(instr->result())); |
| 873 ASSERT(left->IsRegister()); | 880 ASSERT(left->IsRegister()); |
| 874 if (right->IsRegister()) { | 881 if (right->IsRegister()) { |
| 875 ASSERT(ToRegister(right).is(ecx)); | 882 ASSERT(ToRegister(right).is(ecx)); |
| 876 | 883 |
| 877 switch (instr->op()) { | 884 switch (instr->op()) { |
| 878 case Token::SAR: | 885 case Token::SAR: |
| 879 __ sar_cl(ToRegister(left)); | 886 __ sar_cl(ToRegister(left)); |
| 880 break; | 887 break; |
| 881 case Token::SHR: | 888 case Token::SHR: |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 916 break; | 923 break; |
| 917 default: | 924 default: |
| 918 UNREACHABLE(); | 925 UNREACHABLE(); |
| 919 break; | 926 break; |
| 920 } | 927 } |
| 921 } | 928 } |
| 922 } | 929 } |
| 923 | 930 |
| 924 | 931 |
| 925 void LCodeGen::DoSubI(LSubI* instr) { | 932 void LCodeGen::DoSubI(LSubI* instr) { |
| 926 LOperand* left = instr->left(); | 933 LOperand* left = instr->InputAt(0); |
| 927 LOperand* right = instr->right(); | 934 LOperand* right = instr->InputAt(1); |
| 928 ASSERT(left->Equals(instr->result())); | 935 ASSERT(left->Equals(instr->result())); |
| 929 | 936 |
| 930 if (right->IsConstantOperand()) { | 937 if (right->IsConstantOperand()) { |
| 931 __ sub(ToOperand(left), ToImmediate(right)); | 938 __ sub(ToOperand(left), ToImmediate(right)); |
| 932 } else { | 939 } else { |
| 933 __ sub(ToRegister(left), ToOperand(right)); | 940 __ sub(ToRegister(left), ToOperand(right)); |
| 934 } | 941 } |
| 935 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { | 942 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { |
| 936 DeoptimizeIf(overflow, instr->environment()); | 943 DeoptimizeIf(overflow, instr->environment()); |
| 937 } | 944 } |
| 938 } | 945 } |
| 939 | 946 |
| 940 | 947 |
| 941 void LCodeGen::DoConstantI(LConstantI* instr) { | 948 void LCodeGen::DoConstantI(LConstantI* instr) { |
| 942 ASSERT(instr->result()->IsRegister()); | 949 ASSERT(instr->result()->IsRegister()); |
| 943 __ Set(ToRegister(instr->result()), Immediate(instr->value())); | 950 __ Set(ToRegister(instr->result()), Immediate(instr->value())); |
| 944 } | 951 } |
| 945 | 952 |
| 946 | 953 |
| 947 void LCodeGen::DoConstantD(LConstantD* instr) { | 954 void LCodeGen::DoConstantD(LConstantD* instr) { |
| 948 ASSERT(instr->result()->IsDoubleRegister()); | 955 ASSERT(instr->result()->IsDoubleRegister()); |
| 949 XMMRegister res = ToDoubleRegister(instr->result()); | 956 XMMRegister res = ToDoubleRegister(instr->result()); |
| 950 double v = instr->value(); | 957 double v = instr->value(); |
| 951 // Use xor to produce +0.0 in a fast and compact way, but avoid to | 958 // Use xor to produce +0.0 in a fast and compact way, but avoid to |
| 952 // do so if the constant is -0.0. | 959 // do so if the constant is -0.0. |
| 953 if (BitCast<uint64_t, double>(v) == 0) { | 960 if (BitCast<uint64_t, double>(v) == 0) { |
| 954 __ xorpd(res, res); | 961 __ xorpd(res, res); |
| 955 } else { | 962 } else { |
| 956 int32_t v_int32 = static_cast<int32_t>(v); | 963 Register temp = ToRegister(instr->TempAt(0)); |
| 957 if (static_cast<double>(v_int32) == v) { | 964 uint64_t int_val = BitCast<uint64_t, double>(v); |
| 958 __ push_imm32(v_int32); | 965 int32_t lower = static_cast<int32_t>(int_val); |
| 959 __ cvtsi2sd(res, Operand(esp, 0)); | 966 int32_t upper = static_cast<int32_t>(int_val >> (kBitsPerInt)); |
| 960 __ add(Operand(esp), Immediate(kPointerSize)); | 967 if (CpuFeatures::IsSupported(SSE4_1)) { |
| 968 CpuFeatures::Scope scope(SSE4_1); |
| 969 if (lower != 0) { |
| 970 __ Set(temp, Immediate(lower)); |
| 971 __ movd(res, Operand(temp)); |
| 972 __ Set(temp, Immediate(upper)); |
| 973 __ pinsrd(res, Operand(temp), 1); |
| 974 } else { |
| 975 __ xorpd(res, res); |
| 976 __ Set(temp, Immediate(upper)); |
| 977 __ pinsrd(res, Operand(temp), 1); |
| 978 } |
| 961 } else { | 979 } else { |
| 962 uint64_t int_val = BitCast<uint64_t, double>(v); | 980 __ Set(temp, Immediate(upper)); |
| 963 int32_t lower = static_cast<int32_t>(int_val); | 981 __ movd(res, Operand(temp)); |
| 964 int32_t upper = static_cast<int32_t>(int_val >> (kBitsPerInt)); | 982 __ psllq(res, 32); |
| 965 __ push_imm32(upper); | 983 if (lower != 0) { |
| 966 __ push_imm32(lower); | 984 __ Set(temp, Immediate(lower)); |
| 967 __ movdbl(res, Operand(esp, 0)); | 985 __ movd(xmm0, Operand(temp)); |
| 968 __ add(Operand(esp), Immediate(2 * kPointerSize)); | 986 __ por(res, xmm0); |
| 987 } |
| 969 } | 988 } |
| 970 } | 989 } |
| 971 } | 990 } |
| 972 | 991 |
| 973 | 992 |
| 974 void LCodeGen::DoConstantT(LConstantT* instr) { | 993 void LCodeGen::DoConstantT(LConstantT* instr) { |
| 975 ASSERT(instr->result()->IsRegister()); | 994 ASSERT(instr->result()->IsRegister()); |
| 976 __ Set(ToRegister(instr->result()), Immediate(instr->value())); | 995 __ Set(ToRegister(instr->result()), Immediate(instr->value())); |
| 977 } | 996 } |
| 978 | 997 |
| 979 | 998 |
| 980 void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) { | 999 void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) { |
| 981 Register result = ToRegister(instr->result()); | 1000 Register result = ToRegister(instr->result()); |
| 982 Register array = ToRegister(instr->input()); | 1001 Register array = ToRegister(instr->InputAt(0)); |
| 983 __ mov(result, FieldOperand(array, JSArray::kLengthOffset)); | 1002 __ mov(result, FieldOperand(array, JSArray::kLengthOffset)); |
| 984 } | 1003 } |
| 985 | 1004 |
| 986 | 1005 |
| 987 void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) { | 1006 void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) { |
| 988 Register result = ToRegister(instr->result()); | 1007 Register result = ToRegister(instr->result()); |
| 989 Register array = ToRegister(instr->input()); | 1008 Register array = ToRegister(instr->InputAt(0)); |
| 990 __ mov(result, FieldOperand(array, FixedArray::kLengthOffset)); | 1009 __ mov(result, FieldOperand(array, FixedArray::kLengthOffset)); |
| 991 } | 1010 } |
| 992 | 1011 |
| 993 | 1012 |
| 1013 void LCodeGen::DoPixelArrayLength(LPixelArrayLength* instr) { |
| 1014 Register result = ToRegister(instr->result()); |
| 1015 Register array = ToRegister(instr->InputAt(0)); |
| 1016 __ mov(result, FieldOperand(array, PixelArray::kLengthOffset)); |
| 1017 } |
| 1018 |
| 1019 |
| 994 void LCodeGen::DoValueOf(LValueOf* instr) { | 1020 void LCodeGen::DoValueOf(LValueOf* instr) { |
| 995 Register input = ToRegister(instr->input()); | 1021 Register input = ToRegister(instr->InputAt(0)); |
| 996 Register result = ToRegister(instr->result()); | 1022 Register result = ToRegister(instr->result()); |
| 997 Register map = ToRegister(instr->temporary()); | 1023 Register map = ToRegister(instr->TempAt(0)); |
| 998 ASSERT(input.is(result)); | 1024 ASSERT(input.is(result)); |
| 999 NearLabel done; | 1025 NearLabel done; |
| 1000 // If the object is a smi return the object. | 1026 // If the object is a smi return the object. |
| 1001 __ test(input, Immediate(kSmiTagMask)); | 1027 __ test(input, Immediate(kSmiTagMask)); |
| 1002 __ j(zero, &done); | 1028 __ j(zero, &done); |
| 1003 | 1029 |
| 1004 // If the object is not a value type, return the object. | 1030 // If the object is not a value type, return the object. |
| 1005 __ CmpObjectType(input, JS_VALUE_TYPE, map); | 1031 __ CmpObjectType(input, JS_VALUE_TYPE, map); |
| 1006 __ j(not_equal, &done); | 1032 __ j(not_equal, &done); |
| 1007 __ mov(result, FieldOperand(input, JSValue::kValueOffset)); | 1033 __ mov(result, FieldOperand(input, JSValue::kValueOffset)); |
| 1008 | 1034 |
| 1009 __ bind(&done); | 1035 __ bind(&done); |
| 1010 } | 1036 } |
| 1011 | 1037 |
| 1012 | 1038 |
| 1013 void LCodeGen::DoBitNotI(LBitNotI* instr) { | 1039 void LCodeGen::DoBitNotI(LBitNotI* instr) { |
| 1014 LOperand* input = instr->input(); | 1040 LOperand* input = instr->InputAt(0); |
| 1015 ASSERT(input->Equals(instr->result())); | 1041 ASSERT(input->Equals(instr->result())); |
| 1016 __ not_(ToRegister(input)); | 1042 __ not_(ToRegister(input)); |
| 1017 } | 1043 } |
| 1018 | 1044 |
| 1019 | 1045 |
| 1020 void LCodeGen::DoThrow(LThrow* instr) { | 1046 void LCodeGen::DoThrow(LThrow* instr) { |
| 1021 __ push(ToOperand(instr->input())); | 1047 __ push(ToOperand(instr->InputAt(0))); |
| 1022 CallRuntime(Runtime::kThrow, 1, instr); | 1048 CallRuntime(Runtime::kThrow, 1, instr, false); |
| 1023 | 1049 |
| 1024 if (FLAG_debug_code) { | 1050 if (FLAG_debug_code) { |
| 1025 Comment("Unreachable code."); | 1051 Comment("Unreachable code."); |
| 1026 __ int3(); | 1052 __ int3(); |
| 1027 } | 1053 } |
| 1028 } | 1054 } |
| 1029 | 1055 |
| 1030 | 1056 |
| 1031 void LCodeGen::DoAddI(LAddI* instr) { | 1057 void LCodeGen::DoAddI(LAddI* instr) { |
| 1032 LOperand* left = instr->left(); | 1058 LOperand* left = instr->InputAt(0); |
| 1033 LOperand* right = instr->right(); | 1059 LOperand* right = instr->InputAt(1); |
| 1034 ASSERT(left->Equals(instr->result())); | 1060 ASSERT(left->Equals(instr->result())); |
| 1035 | 1061 |
| 1036 if (right->IsConstantOperand()) { | 1062 if (right->IsConstantOperand()) { |
| 1037 __ add(ToOperand(left), ToImmediate(right)); | 1063 __ add(ToOperand(left), ToImmediate(right)); |
| 1038 } else { | 1064 } else { |
| 1039 __ add(ToRegister(left), ToOperand(right)); | 1065 __ add(ToRegister(left), ToOperand(right)); |
| 1040 } | 1066 } |
| 1041 | 1067 |
| 1042 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { | 1068 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { |
| 1043 DeoptimizeIf(overflow, instr->environment()); | 1069 DeoptimizeIf(overflow, instr->environment()); |
| 1044 } | 1070 } |
| 1045 } | 1071 } |
| 1046 | 1072 |
| 1047 | 1073 |
| 1048 void LCodeGen::DoArithmeticD(LArithmeticD* instr) { | 1074 void LCodeGen::DoArithmeticD(LArithmeticD* instr) { |
| 1049 LOperand* left = instr->left(); | 1075 LOperand* left = instr->InputAt(0); |
| 1050 LOperand* right = instr->right(); | 1076 LOperand* right = instr->InputAt(1); |
| 1051 // Modulo uses a fixed result register. | 1077 // Modulo uses a fixed result register. |
| 1052 ASSERT(instr->op() == Token::MOD || left->Equals(instr->result())); | 1078 ASSERT(instr->op() == Token::MOD || left->Equals(instr->result())); |
| 1053 switch (instr->op()) { | 1079 switch (instr->op()) { |
| 1054 case Token::ADD: | 1080 case Token::ADD: |
| 1055 __ addsd(ToDoubleRegister(left), ToDoubleRegister(right)); | 1081 __ addsd(ToDoubleRegister(left), ToDoubleRegister(right)); |
| 1056 break; | 1082 break; |
| 1057 case Token::SUB: | 1083 case Token::SUB: |
| 1058 __ subsd(ToDoubleRegister(left), ToDoubleRegister(right)); | 1084 __ subsd(ToDoubleRegister(left), ToDoubleRegister(right)); |
| 1059 break; | 1085 break; |
| 1060 case Token::MUL: | 1086 case Token::MUL: |
| (...skipping 18 matching lines...) Expand all Loading... |
| 1079 break; | 1105 break; |
| 1080 } | 1106 } |
| 1081 default: | 1107 default: |
| 1082 UNREACHABLE(); | 1108 UNREACHABLE(); |
| 1083 break; | 1109 break; |
| 1084 } | 1110 } |
| 1085 } | 1111 } |
| 1086 | 1112 |
| 1087 | 1113 |
| 1088 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { | 1114 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { |
| 1089 ASSERT(ToRegister(instr->left()).is(edx)); | 1115 ASSERT(ToRegister(instr->InputAt(0)).is(edx)); |
| 1090 ASSERT(ToRegister(instr->right()).is(eax)); | 1116 ASSERT(ToRegister(instr->InputAt(1)).is(eax)); |
| 1091 ASSERT(ToRegister(instr->result()).is(eax)); | 1117 ASSERT(ToRegister(instr->result()).is(eax)); |
| 1092 | 1118 |
| 1093 TypeRecordingBinaryOpStub stub(instr->op(), NO_OVERWRITE); | 1119 TypeRecordingBinaryOpStub stub(instr->op(), NO_OVERWRITE); |
| 1094 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 1120 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); |
| 1095 } | 1121 } |
| 1096 | 1122 |
| 1097 | 1123 |
| 1098 int LCodeGen::GetNextEmittedBlock(int block) { | 1124 int LCodeGen::GetNextEmittedBlock(int block) { |
| 1099 for (int i = block + 1; i < graph()->blocks()->length(); ++i) { | 1125 for (int i = block + 1; i < graph()->blocks()->length(); ++i) { |
| 1100 LLabel* label = chunk_->GetLabel(i); | 1126 LLabel* label = chunk_->GetLabel(i); |
| 1101 if (!label->HasReplacement()) return i; | 1127 if (!label->HasReplacement()) return i; |
| 1102 } | 1128 } |
| 1103 return -1; | 1129 return -1; |
| 1104 } | 1130 } |
| (...skipping 16 matching lines...) Expand all Loading... |
| 1121 } | 1147 } |
| 1122 } | 1148 } |
| 1123 | 1149 |
| 1124 | 1150 |
| 1125 void LCodeGen::DoBranch(LBranch* instr) { | 1151 void LCodeGen::DoBranch(LBranch* instr) { |
| 1126 int true_block = chunk_->LookupDestination(instr->true_block_id()); | 1152 int true_block = chunk_->LookupDestination(instr->true_block_id()); |
| 1127 int false_block = chunk_->LookupDestination(instr->false_block_id()); | 1153 int false_block = chunk_->LookupDestination(instr->false_block_id()); |
| 1128 | 1154 |
| 1129 Representation r = instr->hydrogen()->representation(); | 1155 Representation r = instr->hydrogen()->representation(); |
| 1130 if (r.IsInteger32()) { | 1156 if (r.IsInteger32()) { |
| 1131 Register reg = ToRegister(instr->input()); | 1157 Register reg = ToRegister(instr->InputAt(0)); |
| 1132 __ test(reg, Operand(reg)); | 1158 __ test(reg, Operand(reg)); |
| 1133 EmitBranch(true_block, false_block, not_zero); | 1159 EmitBranch(true_block, false_block, not_zero); |
| 1134 } else if (r.IsDouble()) { | 1160 } else if (r.IsDouble()) { |
| 1135 XMMRegister reg = ToDoubleRegister(instr->input()); | 1161 XMMRegister reg = ToDoubleRegister(instr->InputAt(0)); |
| 1136 __ xorpd(xmm0, xmm0); | 1162 __ xorpd(xmm0, xmm0); |
| 1137 __ ucomisd(reg, xmm0); | 1163 __ ucomisd(reg, xmm0); |
| 1138 EmitBranch(true_block, false_block, not_equal); | 1164 EmitBranch(true_block, false_block, not_equal); |
| 1139 } else { | 1165 } else { |
| 1140 ASSERT(r.IsTagged()); | 1166 ASSERT(r.IsTagged()); |
| 1141 Register reg = ToRegister(instr->input()); | 1167 Register reg = ToRegister(instr->InputAt(0)); |
| 1142 if (instr->hydrogen()->type().IsBoolean()) { | 1168 if (instr->hydrogen()->type().IsBoolean()) { |
| 1143 __ cmp(reg, Factory::true_value()); | 1169 __ cmp(reg, Factory::true_value()); |
| 1144 EmitBranch(true_block, false_block, equal); | 1170 EmitBranch(true_block, false_block, equal); |
| 1145 } else { | 1171 } else { |
| 1146 Label* true_label = chunk_->GetAssemblyLabel(true_block); | 1172 Label* true_label = chunk_->GetAssemblyLabel(true_block); |
| 1147 Label* false_label = chunk_->GetAssemblyLabel(false_block); | 1173 Label* false_label = chunk_->GetAssemblyLabel(false_block); |
| 1148 | 1174 |
| 1149 __ cmp(reg, Factory::undefined_value()); | 1175 __ cmp(reg, Factory::undefined_value()); |
| 1150 __ j(equal, false_label); | 1176 __ j(equal, false_label); |
| 1151 __ cmp(reg, Factory::true_value()); | 1177 __ cmp(reg, Factory::true_value()); |
| (...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1197 deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block)); | 1223 deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block)); |
| 1198 } else { | 1224 } else { |
| 1199 __ jmp(chunk_->GetAssemblyLabel(block)); | 1225 __ jmp(chunk_->GetAssemblyLabel(block)); |
| 1200 } | 1226 } |
| 1201 } | 1227 } |
| 1202 } | 1228 } |
| 1203 | 1229 |
| 1204 | 1230 |
| 1205 void LCodeGen::DoDeferredStackCheck(LGoto* instr) { | 1231 void LCodeGen::DoDeferredStackCheck(LGoto* instr) { |
| 1206 __ pushad(); | 1232 __ pushad(); |
| 1233 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
| 1207 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); | 1234 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); |
| 1208 RecordSafepointWithRegisters( | 1235 RecordSafepointWithRegisters( |
| 1209 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); | 1236 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); |
| 1210 __ popad(); | 1237 __ popad(); |
| 1211 } | 1238 } |
| 1212 | 1239 |
| 1213 void LCodeGen::DoGoto(LGoto* instr) { | 1240 void LCodeGen::DoGoto(LGoto* instr) { |
| 1214 class DeferredStackCheck: public LDeferredCode { | 1241 class DeferredStackCheck: public LDeferredCode { |
| 1215 public: | 1242 public: |
| 1216 DeferredStackCheck(LCodeGen* codegen, LGoto* instr) | 1243 DeferredStackCheck(LCodeGen* codegen, LGoto* instr) |
| (...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1259 void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) { | 1286 void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) { |
| 1260 if (right->IsConstantOperand()) { | 1287 if (right->IsConstantOperand()) { |
| 1261 __ cmp(ToOperand(left), ToImmediate(right)); | 1288 __ cmp(ToOperand(left), ToImmediate(right)); |
| 1262 } else { | 1289 } else { |
| 1263 __ cmp(ToRegister(left), ToOperand(right)); | 1290 __ cmp(ToRegister(left), ToOperand(right)); |
| 1264 } | 1291 } |
| 1265 } | 1292 } |
| 1266 | 1293 |
| 1267 | 1294 |
| 1268 void LCodeGen::DoCmpID(LCmpID* instr) { | 1295 void LCodeGen::DoCmpID(LCmpID* instr) { |
| 1269 LOperand* left = instr->left(); | 1296 LOperand* left = instr->InputAt(0); |
| 1270 LOperand* right = instr->right(); | 1297 LOperand* right = instr->InputAt(1); |
| 1271 LOperand* result = instr->result(); | 1298 LOperand* result = instr->result(); |
| 1272 | 1299 |
| 1273 NearLabel unordered; | 1300 NearLabel unordered; |
| 1274 if (instr->is_double()) { | 1301 if (instr->is_double()) { |
| 1275 // Don't base result on EFLAGS when a NaN is involved. Instead | 1302 // Don't base result on EFLAGS when a NaN is involved. Instead |
| 1276 // jump to the unordered case, which produces a false value. | 1303 // jump to the unordered case, which produces a false value. |
| 1277 __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right)); | 1304 __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right)); |
| 1278 __ j(parity_even, &unordered, not_taken); | 1305 __ j(parity_even, &unordered, not_taken); |
| 1279 } else { | 1306 } else { |
| 1280 EmitCmpI(left, right); | 1307 EmitCmpI(left, right); |
| 1281 } | 1308 } |
| 1282 | 1309 |
| 1283 NearLabel done; | 1310 NearLabel done; |
| 1284 Condition cc = TokenToCondition(instr->op(), instr->is_double()); | 1311 Condition cc = TokenToCondition(instr->op(), instr->is_double()); |
| 1285 __ mov(ToRegister(result), Handle<Object>(Heap::true_value())); | 1312 __ mov(ToRegister(result), Factory::true_value()); |
| 1286 __ j(cc, &done); | 1313 __ j(cc, &done); |
| 1287 | 1314 |
| 1288 __ bind(&unordered); | 1315 __ bind(&unordered); |
| 1289 __ mov(ToRegister(result), Handle<Object>(Heap::false_value())); | 1316 __ mov(ToRegister(result), Factory::false_value()); |
| 1290 __ bind(&done); | 1317 __ bind(&done); |
| 1291 } | 1318 } |
| 1292 | 1319 |
| 1293 | 1320 |
| 1294 void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) { | 1321 void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) { |
| 1295 LOperand* left = instr->left(); | 1322 LOperand* left = instr->InputAt(0); |
| 1296 LOperand* right = instr->right(); | 1323 LOperand* right = instr->InputAt(1); |
| 1297 int false_block = chunk_->LookupDestination(instr->false_block_id()); | 1324 int false_block = chunk_->LookupDestination(instr->false_block_id()); |
| 1298 int true_block = chunk_->LookupDestination(instr->true_block_id()); | 1325 int true_block = chunk_->LookupDestination(instr->true_block_id()); |
| 1299 | 1326 |
| 1300 if (instr->is_double()) { | 1327 if (instr->is_double()) { |
| 1301 // Don't base result on EFLAGS when a NaN is involved. Instead | 1328 // Don't base result on EFLAGS when a NaN is involved. Instead |
| 1302 // jump to the false block. | 1329 // jump to the false block. |
| 1303 __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right)); | 1330 __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right)); |
| 1304 __ j(parity_even, chunk_->GetAssemblyLabel(false_block)); | 1331 __ j(parity_even, chunk_->GetAssemblyLabel(false_block)); |
| 1305 } else { | 1332 } else { |
| 1306 EmitCmpI(left, right); | 1333 EmitCmpI(left, right); |
| 1307 } | 1334 } |
| 1308 | 1335 |
| 1309 Condition cc = TokenToCondition(instr->op(), instr->is_double()); | 1336 Condition cc = TokenToCondition(instr->op(), instr->is_double()); |
| 1310 EmitBranch(true_block, false_block, cc); | 1337 EmitBranch(true_block, false_block, cc); |
| 1311 } | 1338 } |
| 1312 | 1339 |
| 1313 | 1340 |
| 1314 void LCodeGen::DoCmpJSObjectEq(LCmpJSObjectEq* instr) { | 1341 void LCodeGen::DoCmpJSObjectEq(LCmpJSObjectEq* instr) { |
| 1315 Register left = ToRegister(instr->left()); | 1342 Register left = ToRegister(instr->InputAt(0)); |
| 1316 Register right = ToRegister(instr->right()); | 1343 Register right = ToRegister(instr->InputAt(1)); |
| 1317 Register result = ToRegister(instr->result()); | 1344 Register result = ToRegister(instr->result()); |
| 1318 | 1345 |
| 1319 __ cmp(left, Operand(right)); | 1346 __ cmp(left, Operand(right)); |
| 1320 __ mov(result, Handle<Object>(Heap::true_value())); | 1347 __ mov(result, Factory::true_value()); |
| 1321 NearLabel done; | 1348 NearLabel done; |
| 1322 __ j(equal, &done); | 1349 __ j(equal, &done); |
| 1323 __ mov(result, Handle<Object>(Heap::false_value())); | 1350 __ mov(result, Factory::false_value()); |
| 1324 __ bind(&done); | 1351 __ bind(&done); |
| 1325 } | 1352 } |
| 1326 | 1353 |
| 1327 | 1354 |
| 1328 void LCodeGen::DoCmpJSObjectEqAndBranch(LCmpJSObjectEqAndBranch* instr) { | 1355 void LCodeGen::DoCmpJSObjectEqAndBranch(LCmpJSObjectEqAndBranch* instr) { |
| 1329 Register left = ToRegister(instr->left()); | 1356 Register left = ToRegister(instr->InputAt(0)); |
| 1330 Register right = ToRegister(instr->right()); | 1357 Register right = ToRegister(instr->InputAt(1)); |
| 1331 int false_block = chunk_->LookupDestination(instr->false_block_id()); | 1358 int false_block = chunk_->LookupDestination(instr->false_block_id()); |
| 1332 int true_block = chunk_->LookupDestination(instr->true_block_id()); | 1359 int true_block = chunk_->LookupDestination(instr->true_block_id()); |
| 1333 | 1360 |
| 1334 __ cmp(left, Operand(right)); | 1361 __ cmp(left, Operand(right)); |
| 1335 EmitBranch(true_block, false_block, equal); | 1362 EmitBranch(true_block, false_block, equal); |
| 1336 } | 1363 } |
| 1337 | 1364 |
| 1338 | 1365 |
| 1339 void LCodeGen::DoIsNull(LIsNull* instr) { | 1366 void LCodeGen::DoIsNull(LIsNull* instr) { |
| 1340 Register reg = ToRegister(instr->input()); | 1367 Register reg = ToRegister(instr->InputAt(0)); |
| 1341 Register result = ToRegister(instr->result()); | 1368 Register result = ToRegister(instr->result()); |
| 1342 | 1369 |
| 1343 // TODO(fsc): If the expression is known to be a smi, then it's | 1370 // TODO(fsc): If the expression is known to be a smi, then it's |
| 1344 // definitely not null. Materialize false. | 1371 // definitely not null. Materialize false. |
| 1345 | 1372 |
| 1346 __ cmp(reg, Factory::null_value()); | 1373 __ cmp(reg, Factory::null_value()); |
| 1347 if (instr->is_strict()) { | 1374 if (instr->is_strict()) { |
| 1348 __ mov(result, Handle<Object>(Heap::true_value())); | 1375 __ mov(result, Factory::true_value()); |
| 1349 NearLabel done; | 1376 NearLabel done; |
| 1350 __ j(equal, &done); | 1377 __ j(equal, &done); |
| 1351 __ mov(result, Handle<Object>(Heap::false_value())); | 1378 __ mov(result, Factory::false_value()); |
| 1352 __ bind(&done); | 1379 __ bind(&done); |
| 1353 } else { | 1380 } else { |
| 1354 NearLabel true_value, false_value, done; | 1381 NearLabel true_value, false_value, done; |
| 1355 __ j(equal, &true_value); | 1382 __ j(equal, &true_value); |
| 1356 __ cmp(reg, Factory::undefined_value()); | 1383 __ cmp(reg, Factory::undefined_value()); |
| 1357 __ j(equal, &true_value); | 1384 __ j(equal, &true_value); |
| 1358 __ test(reg, Immediate(kSmiTagMask)); | 1385 __ test(reg, Immediate(kSmiTagMask)); |
| 1359 __ j(zero, &false_value); | 1386 __ j(zero, &false_value); |
| 1360 // Check for undetectable objects by looking in the bit field in | 1387 // Check for undetectable objects by looking in the bit field in |
| 1361 // the map. The object has already been smi checked. | 1388 // the map. The object has already been smi checked. |
| 1362 Register scratch = result; | 1389 Register scratch = result; |
| 1363 __ mov(scratch, FieldOperand(reg, HeapObject::kMapOffset)); | 1390 __ mov(scratch, FieldOperand(reg, HeapObject::kMapOffset)); |
| 1364 __ movzx_b(scratch, FieldOperand(scratch, Map::kBitFieldOffset)); | 1391 __ movzx_b(scratch, FieldOperand(scratch, Map::kBitFieldOffset)); |
| 1365 __ test(scratch, Immediate(1 << Map::kIsUndetectable)); | 1392 __ test(scratch, Immediate(1 << Map::kIsUndetectable)); |
| 1366 __ j(not_zero, &true_value); | 1393 __ j(not_zero, &true_value); |
| 1367 __ bind(&false_value); | 1394 __ bind(&false_value); |
| 1368 __ mov(result, Handle<Object>(Heap::false_value())); | 1395 __ mov(result, Factory::false_value()); |
| 1369 __ jmp(&done); | 1396 __ jmp(&done); |
| 1370 __ bind(&true_value); | 1397 __ bind(&true_value); |
| 1371 __ mov(result, Handle<Object>(Heap::true_value())); | 1398 __ mov(result, Factory::true_value()); |
| 1372 __ bind(&done); | 1399 __ bind(&done); |
| 1373 } | 1400 } |
| 1374 } | 1401 } |
| 1375 | 1402 |
| 1376 | 1403 |
| 1377 void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) { | 1404 void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) { |
| 1378 Register reg = ToRegister(instr->input()); | 1405 Register reg = ToRegister(instr->InputAt(0)); |
| 1379 | 1406 |
| 1380 // TODO(fsc): If the expression is known to be a smi, then it's | 1407 // TODO(fsc): If the expression is known to be a smi, then it's |
| 1381 // definitely not null. Jump to the false block. | 1408 // definitely not null. Jump to the false block. |
| 1382 | 1409 |
| 1383 int true_block = chunk_->LookupDestination(instr->true_block_id()); | 1410 int true_block = chunk_->LookupDestination(instr->true_block_id()); |
| 1384 int false_block = chunk_->LookupDestination(instr->false_block_id()); | 1411 int false_block = chunk_->LookupDestination(instr->false_block_id()); |
| 1385 | 1412 |
| 1386 __ cmp(reg, Factory::null_value()); | 1413 __ cmp(reg, Factory::null_value()); |
| 1387 if (instr->is_strict()) { | 1414 if (instr->is_strict()) { |
| 1388 EmitBranch(true_block, false_block, equal); | 1415 EmitBranch(true_block, false_block, equal); |
| 1389 } else { | 1416 } else { |
| 1390 Label* true_label = chunk_->GetAssemblyLabel(true_block); | 1417 Label* true_label = chunk_->GetAssemblyLabel(true_block); |
| 1391 Label* false_label = chunk_->GetAssemblyLabel(false_block); | 1418 Label* false_label = chunk_->GetAssemblyLabel(false_block); |
| 1392 __ j(equal, true_label); | 1419 __ j(equal, true_label); |
| 1393 __ cmp(reg, Factory::undefined_value()); | 1420 __ cmp(reg, Factory::undefined_value()); |
| 1394 __ j(equal, true_label); | 1421 __ j(equal, true_label); |
| 1395 __ test(reg, Immediate(kSmiTagMask)); | 1422 __ test(reg, Immediate(kSmiTagMask)); |
| 1396 __ j(zero, false_label); | 1423 __ j(zero, false_label); |
| 1397 // Check for undetectable objects by looking in the bit field in | 1424 // Check for undetectable objects by looking in the bit field in |
| 1398 // the map. The object has already been smi checked. | 1425 // the map. The object has already been smi checked. |
| 1399 Register scratch = ToRegister(instr->temp()); | 1426 Register scratch = ToRegister(instr->TempAt(0)); |
| 1400 __ mov(scratch, FieldOperand(reg, HeapObject::kMapOffset)); | 1427 __ mov(scratch, FieldOperand(reg, HeapObject::kMapOffset)); |
| 1401 __ movzx_b(scratch, FieldOperand(scratch, Map::kBitFieldOffset)); | 1428 __ movzx_b(scratch, FieldOperand(scratch, Map::kBitFieldOffset)); |
| 1402 __ test(scratch, Immediate(1 << Map::kIsUndetectable)); | 1429 __ test(scratch, Immediate(1 << Map::kIsUndetectable)); |
| 1403 EmitBranch(true_block, false_block, not_zero); | 1430 EmitBranch(true_block, false_block, not_zero); |
| 1404 } | 1431 } |
| 1405 } | 1432 } |
| 1406 | 1433 |
| 1407 | 1434 |
| 1408 Condition LCodeGen::EmitIsObject(Register input, | 1435 Condition LCodeGen::EmitIsObject(Register input, |
| 1409 Register temp1, | 1436 Register temp1, |
| (...skipping 18 matching lines...) Expand all Loading... |
| 1428 | 1455 |
| 1429 __ movzx_b(temp2, FieldOperand(temp1, Map::kInstanceTypeOffset)); | 1456 __ movzx_b(temp2, FieldOperand(temp1, Map::kInstanceTypeOffset)); |
| 1430 __ cmp(temp2, FIRST_JS_OBJECT_TYPE); | 1457 __ cmp(temp2, FIRST_JS_OBJECT_TYPE); |
| 1431 __ j(below, is_not_object); | 1458 __ j(below, is_not_object); |
| 1432 __ cmp(temp2, LAST_JS_OBJECT_TYPE); | 1459 __ cmp(temp2, LAST_JS_OBJECT_TYPE); |
| 1433 return below_equal; | 1460 return below_equal; |
| 1434 } | 1461 } |
| 1435 | 1462 |
| 1436 | 1463 |
| 1437 void LCodeGen::DoIsObject(LIsObject* instr) { | 1464 void LCodeGen::DoIsObject(LIsObject* instr) { |
| 1438 Register reg = ToRegister(instr->input()); | 1465 Register reg = ToRegister(instr->InputAt(0)); |
| 1439 Register result = ToRegister(instr->result()); | 1466 Register result = ToRegister(instr->result()); |
| 1440 Register temp = ToRegister(instr->temp()); | 1467 Register temp = ToRegister(instr->TempAt(0)); |
| 1441 Label is_false, is_true, done; | 1468 Label is_false, is_true, done; |
| 1442 | 1469 |
| 1443 Condition true_cond = EmitIsObject(reg, result, temp, &is_false, &is_true); | 1470 Condition true_cond = EmitIsObject(reg, result, temp, &is_false, &is_true); |
| 1444 __ j(true_cond, &is_true); | 1471 __ j(true_cond, &is_true); |
| 1445 | 1472 |
| 1446 __ bind(&is_false); | 1473 __ bind(&is_false); |
| 1447 __ mov(result, Handle<Object>(Heap::false_value())); | 1474 __ mov(result, Factory::false_value()); |
| 1448 __ jmp(&done); | 1475 __ jmp(&done); |
| 1449 | 1476 |
| 1450 __ bind(&is_true); | 1477 __ bind(&is_true); |
| 1451 __ mov(result, Handle<Object>(Heap::true_value())); | 1478 __ mov(result, Factory::true_value()); |
| 1452 | 1479 |
| 1453 __ bind(&done); | 1480 __ bind(&done); |
| 1454 } | 1481 } |
| 1455 | 1482 |
| 1456 | 1483 |
| 1457 void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) { | 1484 void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) { |
| 1458 Register reg = ToRegister(instr->input()); | 1485 Register reg = ToRegister(instr->InputAt(0)); |
| 1459 Register temp = ToRegister(instr->temp()); | 1486 Register temp = ToRegister(instr->TempAt(0)); |
| 1460 Register temp2 = ToRegister(instr->temp2()); | 1487 Register temp2 = ToRegister(instr->TempAt(1)); |
| 1461 | 1488 |
| 1462 int true_block = chunk_->LookupDestination(instr->true_block_id()); | 1489 int true_block = chunk_->LookupDestination(instr->true_block_id()); |
| 1463 int false_block = chunk_->LookupDestination(instr->false_block_id()); | 1490 int false_block = chunk_->LookupDestination(instr->false_block_id()); |
| 1464 Label* true_label = chunk_->GetAssemblyLabel(true_block); | 1491 Label* true_label = chunk_->GetAssemblyLabel(true_block); |
| 1465 Label* false_label = chunk_->GetAssemblyLabel(false_block); | 1492 Label* false_label = chunk_->GetAssemblyLabel(false_block); |
| 1466 | 1493 |
| 1467 Condition true_cond = EmitIsObject(reg, temp, temp2, false_label, true_label); | 1494 Condition true_cond = EmitIsObject(reg, temp, temp2, false_label, true_label); |
| 1468 | 1495 |
| 1469 EmitBranch(true_block, false_block, true_cond); | 1496 EmitBranch(true_block, false_block, true_cond); |
| 1470 } | 1497 } |
| 1471 | 1498 |
| 1472 | 1499 |
| 1473 void LCodeGen::DoIsSmi(LIsSmi* instr) { | 1500 void LCodeGen::DoIsSmi(LIsSmi* instr) { |
| 1474 Operand input = ToOperand(instr->input()); | 1501 Operand input = ToOperand(instr->InputAt(0)); |
| 1475 Register result = ToRegister(instr->result()); | 1502 Register result = ToRegister(instr->result()); |
| 1476 | 1503 |
| 1477 ASSERT(instr->hydrogen()->value()->representation().IsTagged()); | 1504 ASSERT(instr->hydrogen()->value()->representation().IsTagged()); |
| 1478 __ test(input, Immediate(kSmiTagMask)); | 1505 __ test(input, Immediate(kSmiTagMask)); |
| 1479 __ mov(result, Handle<Object>(Heap::true_value())); | 1506 __ mov(result, Factory::true_value()); |
| 1480 NearLabel done; | 1507 NearLabel done; |
| 1481 __ j(zero, &done); | 1508 __ j(zero, &done); |
| 1482 __ mov(result, Handle<Object>(Heap::false_value())); | 1509 __ mov(result, Factory::false_value()); |
| 1483 __ bind(&done); | 1510 __ bind(&done); |
| 1484 } | 1511 } |
| 1485 | 1512 |
| 1486 | 1513 |
| 1487 void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) { | 1514 void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) { |
| 1488 Operand input = ToOperand(instr->input()); | 1515 Operand input = ToOperand(instr->InputAt(0)); |
| 1489 | 1516 |
| 1490 int true_block = chunk_->LookupDestination(instr->true_block_id()); | 1517 int true_block = chunk_->LookupDestination(instr->true_block_id()); |
| 1491 int false_block = chunk_->LookupDestination(instr->false_block_id()); | 1518 int false_block = chunk_->LookupDestination(instr->false_block_id()); |
| 1492 | 1519 |
| 1493 __ test(input, Immediate(kSmiTagMask)); | 1520 __ test(input, Immediate(kSmiTagMask)); |
| 1494 EmitBranch(true_block, false_block, zero); | 1521 EmitBranch(true_block, false_block, zero); |
| 1495 } | 1522 } |
| 1496 | 1523 |
| 1497 | 1524 |
| 1498 InstanceType LHasInstanceType::TestType() { | 1525 static InstanceType TestType(HHasInstanceType* instr) { |
| 1499 InstanceType from = hydrogen()->from(); | 1526 InstanceType from = instr->from(); |
| 1500 InstanceType to = hydrogen()->to(); | 1527 InstanceType to = instr->to(); |
| 1501 if (from == FIRST_TYPE) return to; | 1528 if (from == FIRST_TYPE) return to; |
| 1502 ASSERT(from == to || to == LAST_TYPE); | 1529 ASSERT(from == to || to == LAST_TYPE); |
| 1503 return from; | 1530 return from; |
| 1504 } | 1531 } |
| 1505 | 1532 |
| 1506 | 1533 |
| 1507 | 1534 static Condition BranchCondition(HHasInstanceType* instr) { |
| 1508 Condition LHasInstanceType::BranchCondition() { | 1535 InstanceType from = instr->from(); |
| 1509 InstanceType from = hydrogen()->from(); | 1536 InstanceType to = instr->to(); |
| 1510 InstanceType to = hydrogen()->to(); | |
| 1511 if (from == to) return equal; | 1537 if (from == to) return equal; |
| 1512 if (to == LAST_TYPE) return above_equal; | 1538 if (to == LAST_TYPE) return above_equal; |
| 1513 if (from == FIRST_TYPE) return below_equal; | 1539 if (from == FIRST_TYPE) return below_equal; |
| 1514 UNREACHABLE(); | 1540 UNREACHABLE(); |
| 1515 return equal; | 1541 return equal; |
| 1516 } | 1542 } |
| 1517 | 1543 |
| 1518 | 1544 |
| 1519 void LCodeGen::DoHasInstanceType(LHasInstanceType* instr) { | 1545 void LCodeGen::DoHasInstanceType(LHasInstanceType* instr) { |
| 1520 Register input = ToRegister(instr->input()); | 1546 Register input = ToRegister(instr->InputAt(0)); |
| 1521 Register result = ToRegister(instr->result()); | 1547 Register result = ToRegister(instr->result()); |
| 1522 | 1548 |
| 1523 ASSERT(instr->hydrogen()->value()->representation().IsTagged()); | 1549 ASSERT(instr->hydrogen()->value()->representation().IsTagged()); |
| 1524 __ test(input, Immediate(kSmiTagMask)); | 1550 __ test(input, Immediate(kSmiTagMask)); |
| 1525 NearLabel done, is_false; | 1551 NearLabel done, is_false; |
| 1526 __ j(zero, &is_false); | 1552 __ j(zero, &is_false); |
| 1527 __ CmpObjectType(input, instr->TestType(), result); | 1553 __ CmpObjectType(input, TestType(instr->hydrogen()), result); |
| 1528 __ j(NegateCondition(instr->BranchCondition()), &is_false); | 1554 __ j(NegateCondition(BranchCondition(instr->hydrogen())), &is_false); |
| 1529 __ mov(result, Handle<Object>(Heap::true_value())); | 1555 __ mov(result, Factory::true_value()); |
| 1530 __ jmp(&done); | 1556 __ jmp(&done); |
| 1531 __ bind(&is_false); | 1557 __ bind(&is_false); |
| 1532 __ mov(result, Handle<Object>(Heap::false_value())); | 1558 __ mov(result, Factory::false_value()); |
| 1533 __ bind(&done); | 1559 __ bind(&done); |
| 1534 } | 1560 } |
| 1535 | 1561 |
| 1536 | 1562 |
| 1537 void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) { | 1563 void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) { |
| 1538 Register input = ToRegister(instr->input()); | 1564 Register input = ToRegister(instr->InputAt(0)); |
| 1539 Register temp = ToRegister(instr->temp()); | 1565 Register temp = ToRegister(instr->TempAt(0)); |
| 1540 | 1566 |
| 1541 int true_block = chunk_->LookupDestination(instr->true_block_id()); | 1567 int true_block = chunk_->LookupDestination(instr->true_block_id()); |
| 1542 int false_block = chunk_->LookupDestination(instr->false_block_id()); | 1568 int false_block = chunk_->LookupDestination(instr->false_block_id()); |
| 1543 | 1569 |
| 1544 Label* false_label = chunk_->GetAssemblyLabel(false_block); | 1570 Label* false_label = chunk_->GetAssemblyLabel(false_block); |
| 1545 | 1571 |
| 1546 __ test(input, Immediate(kSmiTagMask)); | 1572 __ test(input, Immediate(kSmiTagMask)); |
| 1547 __ j(zero, false_label); | 1573 __ j(zero, false_label); |
| 1548 | 1574 |
| 1549 __ CmpObjectType(input, instr->TestType(), temp); | 1575 __ CmpObjectType(input, TestType(instr->hydrogen()), temp); |
| 1550 EmitBranch(true_block, false_block, instr->BranchCondition()); | 1576 EmitBranch(true_block, false_block, BranchCondition(instr->hydrogen())); |
| 1551 } | 1577 } |
| 1552 | 1578 |
| 1553 | 1579 |
| 1554 void LCodeGen::DoHasCachedArrayIndex(LHasCachedArrayIndex* instr) { | 1580 void LCodeGen::DoHasCachedArrayIndex(LHasCachedArrayIndex* instr) { |
| 1555 Register input = ToRegister(instr->input()); | 1581 Register input = ToRegister(instr->InputAt(0)); |
| 1556 Register result = ToRegister(instr->result()); | 1582 Register result = ToRegister(instr->result()); |
| 1557 | 1583 |
| 1558 ASSERT(instr->hydrogen()->value()->representation().IsTagged()); | 1584 ASSERT(instr->hydrogen()->value()->representation().IsTagged()); |
| 1559 __ mov(result, Handle<Object>(Heap::true_value())); | 1585 __ mov(result, Factory::true_value()); |
| 1560 __ test(FieldOperand(input, String::kHashFieldOffset), | 1586 __ test(FieldOperand(input, String::kHashFieldOffset), |
| 1561 Immediate(String::kContainsCachedArrayIndexMask)); | 1587 Immediate(String::kContainsCachedArrayIndexMask)); |
| 1562 NearLabel done; | 1588 NearLabel done; |
| 1563 __ j(not_zero, &done); | 1589 __ j(not_zero, &done); |
| 1564 __ mov(result, Handle<Object>(Heap::false_value())); | 1590 __ mov(result, Factory::false_value()); |
| 1565 __ bind(&done); | 1591 __ bind(&done); |
| 1566 } | 1592 } |
| 1567 | 1593 |
| 1568 | 1594 |
| 1569 void LCodeGen::DoHasCachedArrayIndexAndBranch( | 1595 void LCodeGen::DoHasCachedArrayIndexAndBranch( |
| 1570 LHasCachedArrayIndexAndBranch* instr) { | 1596 LHasCachedArrayIndexAndBranch* instr) { |
| 1571 Register input = ToRegister(instr->input()); | 1597 Register input = ToRegister(instr->InputAt(0)); |
| 1572 | 1598 |
| 1573 int true_block = chunk_->LookupDestination(instr->true_block_id()); | 1599 int true_block = chunk_->LookupDestination(instr->true_block_id()); |
| 1574 int false_block = chunk_->LookupDestination(instr->false_block_id()); | 1600 int false_block = chunk_->LookupDestination(instr->false_block_id()); |
| 1575 | 1601 |
| 1576 __ test(FieldOperand(input, String::kHashFieldOffset), | 1602 __ test(FieldOperand(input, String::kHashFieldOffset), |
| 1577 Immediate(String::kContainsCachedArrayIndexMask)); | 1603 Immediate(String::kContainsCachedArrayIndexMask)); |
| 1578 EmitBranch(true_block, false_block, not_equal); | 1604 EmitBranch(true_block, false_block, not_equal); |
| 1579 } | 1605 } |
| 1580 | 1606 |
| 1581 | 1607 |
| (...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1630 // booted. This routine isn't expected to work for random API-created | 1656 // booted. This routine isn't expected to work for random API-created |
| 1631 // classes and it doesn't have to because you can't access it with natives | 1657 // classes and it doesn't have to because you can't access it with natives |
| 1632 // syntax. Since both sides are symbols it is sufficient to use an identity | 1658 // syntax. Since both sides are symbols it is sufficient to use an identity |
| 1633 // comparison. | 1659 // comparison. |
| 1634 __ cmp(temp, class_name); | 1660 __ cmp(temp, class_name); |
| 1635 // End with the answer in the z flag. | 1661 // End with the answer in the z flag. |
| 1636 } | 1662 } |
| 1637 | 1663 |
| 1638 | 1664 |
| 1639 void LCodeGen::DoClassOfTest(LClassOfTest* instr) { | 1665 void LCodeGen::DoClassOfTest(LClassOfTest* instr) { |
| 1640 Register input = ToRegister(instr->input()); | 1666 Register input = ToRegister(instr->InputAt(0)); |
| 1641 Register result = ToRegister(instr->result()); | 1667 Register result = ToRegister(instr->result()); |
| 1642 ASSERT(input.is(result)); | 1668 ASSERT(input.is(result)); |
| 1643 Register temp = ToRegister(instr->temporary()); | 1669 Register temp = ToRegister(instr->TempAt(0)); |
| 1644 Handle<String> class_name = instr->hydrogen()->class_name(); | 1670 Handle<String> class_name = instr->hydrogen()->class_name(); |
| 1645 NearLabel done; | 1671 NearLabel done; |
| 1646 Label is_true, is_false; | 1672 Label is_true, is_false; |
| 1647 | 1673 |
| 1648 EmitClassOfTest(&is_true, &is_false, class_name, input, temp, input); | 1674 EmitClassOfTest(&is_true, &is_false, class_name, input, temp, input); |
| 1649 | 1675 |
| 1650 __ j(not_equal, &is_false); | 1676 __ j(not_equal, &is_false); |
| 1651 | 1677 |
| 1652 __ bind(&is_true); | 1678 __ bind(&is_true); |
| 1653 __ mov(result, Handle<Object>(Heap::true_value())); | 1679 __ mov(result, Factory::true_value()); |
| 1654 __ jmp(&done); | 1680 __ jmp(&done); |
| 1655 | 1681 |
| 1656 __ bind(&is_false); | 1682 __ bind(&is_false); |
| 1657 __ mov(result, Handle<Object>(Heap::false_value())); | 1683 __ mov(result, Factory::false_value()); |
| 1658 __ bind(&done); | 1684 __ bind(&done); |
| 1659 } | 1685 } |
| 1660 | 1686 |
| 1661 | 1687 |
| 1662 void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) { | 1688 void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) { |
| 1663 Register input = ToRegister(instr->input()); | 1689 Register input = ToRegister(instr->InputAt(0)); |
| 1664 Register temp = ToRegister(instr->temporary()); | 1690 Register temp = ToRegister(instr->TempAt(0)); |
| 1665 Register temp2 = ToRegister(instr->temporary2()); | 1691 Register temp2 = ToRegister(instr->TempAt(1)); |
| 1666 if (input.is(temp)) { | 1692 if (input.is(temp)) { |
| 1667 // Swap. | 1693 // Swap. |
| 1668 Register swapper = temp; | 1694 Register swapper = temp; |
| 1669 temp = temp2; | 1695 temp = temp2; |
| 1670 temp2 = swapper; | 1696 temp2 = swapper; |
| 1671 } | 1697 } |
| 1672 Handle<String> class_name = instr->hydrogen()->class_name(); | 1698 Handle<String> class_name = instr->hydrogen()->class_name(); |
| 1673 | 1699 |
| 1674 int true_block = chunk_->LookupDestination(instr->true_block_id()); | 1700 int true_block = chunk_->LookupDestination(instr->true_block_id()); |
| 1675 int false_block = chunk_->LookupDestination(instr->false_block_id()); | 1701 int false_block = chunk_->LookupDestination(instr->false_block_id()); |
| 1676 | 1702 |
| 1677 Label* true_label = chunk_->GetAssemblyLabel(true_block); | 1703 Label* true_label = chunk_->GetAssemblyLabel(true_block); |
| 1678 Label* false_label = chunk_->GetAssemblyLabel(false_block); | 1704 Label* false_label = chunk_->GetAssemblyLabel(false_block); |
| 1679 | 1705 |
| 1680 EmitClassOfTest(true_label, false_label, class_name, input, temp, temp2); | 1706 EmitClassOfTest(true_label, false_label, class_name, input, temp, temp2); |
| 1681 | 1707 |
| 1682 EmitBranch(true_block, false_block, equal); | 1708 EmitBranch(true_block, false_block, equal); |
| 1683 } | 1709 } |
| 1684 | 1710 |
| 1685 | 1711 |
| 1686 void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) { | 1712 void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) { |
| 1687 Register reg = ToRegister(instr->input()); | 1713 Register reg = ToRegister(instr->InputAt(0)); |
| 1688 int true_block = instr->true_block_id(); | 1714 int true_block = instr->true_block_id(); |
| 1689 int false_block = instr->false_block_id(); | 1715 int false_block = instr->false_block_id(); |
| 1690 | 1716 |
| 1691 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map()); | 1717 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map()); |
| 1692 EmitBranch(true_block, false_block, equal); | 1718 EmitBranch(true_block, false_block, equal); |
| 1693 } | 1719 } |
| 1694 | 1720 |
| 1695 | 1721 |
| 1696 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { | 1722 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { |
| 1697 // Object and function are in fixed registers eax and edx. | 1723 // Object and function are in fixed registers defined by the stub. |
| 1724 ASSERT(ToRegister(instr->context()).is(esi)); |
| 1698 InstanceofStub stub(InstanceofStub::kArgsInRegisters); | 1725 InstanceofStub stub(InstanceofStub::kArgsInRegisters); |
| 1699 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 1726 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 1700 | 1727 |
| 1701 NearLabel true_value, done; | 1728 NearLabel true_value, done; |
| 1702 __ test(eax, Operand(eax)); | 1729 __ test(eax, Operand(eax)); |
| 1703 __ j(zero, &true_value); | 1730 __ j(zero, &true_value); |
| 1704 __ mov(ToRegister(instr->result()), Factory::false_value()); | 1731 __ mov(ToRegister(instr->result()), Factory::false_value()); |
| 1705 __ jmp(&done); | 1732 __ jmp(&done); |
| 1706 __ bind(&true_value); | 1733 __ bind(&true_value); |
| 1707 __ mov(ToRegister(instr->result()), Factory::true_value()); | 1734 __ mov(ToRegister(instr->result()), Factory::true_value()); |
| 1708 __ bind(&done); | 1735 __ bind(&done); |
| 1709 } | 1736 } |
| 1710 | 1737 |
| 1711 | 1738 |
| 1712 void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) { | 1739 void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) { |
| 1740 ASSERT(ToRegister(instr->context()).is(esi)); |
| 1713 int true_block = chunk_->LookupDestination(instr->true_block_id()); | 1741 int true_block = chunk_->LookupDestination(instr->true_block_id()); |
| 1714 int false_block = chunk_->LookupDestination(instr->false_block_id()); | 1742 int false_block = chunk_->LookupDestination(instr->false_block_id()); |
| 1715 | 1743 |
| 1716 InstanceofStub stub(InstanceofStub::kArgsInRegisters); | 1744 InstanceofStub stub(InstanceofStub::kArgsInRegisters); |
| 1717 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 1745 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 1718 __ test(eax, Operand(eax)); | 1746 __ test(eax, Operand(eax)); |
| 1719 EmitBranch(true_block, false_block, zero); | 1747 EmitBranch(true_block, false_block, zero); |
| 1720 } | 1748 } |
| 1721 | 1749 |
| 1722 | 1750 |
| 1751 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { |
| 1752 class DeferredInstanceOfKnownGlobal: public LDeferredCode { |
| 1753 public: |
| 1754 DeferredInstanceOfKnownGlobal(LCodeGen* codegen, |
| 1755 LInstanceOfKnownGlobal* instr) |
| 1756 : LDeferredCode(codegen), instr_(instr) { } |
| 1757 virtual void Generate() { |
| 1758 codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_); |
| 1759 } |
| 1760 |
| 1761 Label* map_check() { return &map_check_; } |
| 1762 |
| 1763 private: |
| 1764 LInstanceOfKnownGlobal* instr_; |
| 1765 Label map_check_; |
| 1766 }; |
| 1767 |
| 1768 DeferredInstanceOfKnownGlobal* deferred; |
| 1769 deferred = new DeferredInstanceOfKnownGlobal(this, instr); |
| 1770 |
| 1771 Label done, false_result; |
| 1772 Register object = ToRegister(instr->InputAt(0)); |
| 1773 Register temp = ToRegister(instr->TempAt(0)); |
| 1774 |
| 1775 // A Smi is not an instance of anything. |
| 1776 __ test(object, Immediate(kSmiTagMask)); |
| 1777 __ j(zero, &false_result, not_taken); |
| 1778 |
| 1779 // This is the inlined call site instanceof cache. The two occurences of the |
| 1780 // hole value will be patched to the last map/result pair generated by the |
| 1781 // instanceof stub. |
| 1782 NearLabel cache_miss; |
| 1783 Register map = ToRegister(instr->TempAt(0)); |
| 1784 __ mov(map, FieldOperand(object, HeapObject::kMapOffset)); |
| 1785 __ bind(deferred->map_check()); // Label for calculating code patching. |
| 1786 __ cmp(map, Factory::the_hole_value()); // Patched to cached map. |
| 1787 __ j(not_equal, &cache_miss, not_taken); |
| 1788 __ mov(eax, Factory::the_hole_value()); // Patched to either true or false. |
| 1789 __ jmp(&done); |
| 1790 |
| 1791 // The inlined call site cache did not match. Check for null and string |
| 1792 // before calling the deferred code. |
| 1793 __ bind(&cache_miss); |
| 1794 // Null is not an instance of anything. |
| 1795 __ cmp(object, Factory::null_value()); |
| 1796 __ j(equal, &false_result); |
| 1797 |
| 1798 // String values are not instances of anything. |
| 1799 Condition is_string = masm_->IsObjectStringType(object, temp, temp); |
| 1800 __ j(is_string, &false_result); |
| 1801 |
| 1802 // Go to the deferred code. |
| 1803 __ jmp(deferred->entry()); |
| 1804 |
| 1805 __ bind(&false_result); |
| 1806 __ mov(ToRegister(instr->result()), Factory::false_value()); |
| 1807 |
| 1808 // Here result has either true or false. Deferred code also produces true or |
| 1809 // false object. |
| 1810 __ bind(deferred->exit()); |
| 1811 __ bind(&done); |
| 1812 } |
| 1813 |
| 1814 |
| 1815 void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, |
| 1816 Label* map_check) { |
| 1817 __ PushSafepointRegisters(); |
| 1818 |
| 1819 InstanceofStub::Flags flags = InstanceofStub::kNoFlags; |
| 1820 flags = static_cast<InstanceofStub::Flags>( |
| 1821 flags | InstanceofStub::kArgsInRegisters); |
| 1822 flags = static_cast<InstanceofStub::Flags>( |
| 1823 flags | InstanceofStub::kCallSiteInlineCheck); |
| 1824 flags = static_cast<InstanceofStub::Flags>( |
| 1825 flags | InstanceofStub::kReturnTrueFalseObject); |
| 1826 InstanceofStub stub(flags); |
| 1827 |
| 1828 // Get the temp register reserved by the instruction. This needs to be edi as |
| 1829 // its slot of the pushing of safepoint registers is used to communicate the |
| 1830 // offset to the location of the map check. |
| 1831 Register temp = ToRegister(instr->TempAt(0)); |
| 1832 ASSERT(temp.is(edi)); |
| 1833 __ mov(InstanceofStub::right(), Immediate(instr->function())); |
| 1834 static const int kAdditionalDelta = 16; |
| 1835 int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta; |
| 1836 Label before_push_delta; |
| 1837 __ bind(&before_push_delta); |
| 1838 __ mov(temp, Immediate(delta)); |
| 1839 __ mov(Operand(esp, EspIndexForPushAll(temp) * kPointerSize), temp); |
| 1840 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
| 1841 __ call(stub.GetCode(), RelocInfo::CODE_TARGET); |
| 1842 ASSERT_EQ(kAdditionalDelta, |
| 1843 masm_->SizeOfCodeGeneratedSince(&before_push_delta)); |
| 1844 RecordSafepointWithRegisters( |
| 1845 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); |
| 1846 // Put the result value into the eax slot and restore all registers. |
| 1847 __ mov(Operand(esp, EspIndexForPushAll(eax) * kPointerSize), eax); |
| 1848 |
| 1849 __ PopSafepointRegisters(); |
| 1850 } |
| 1851 |
| 1852 |
| 1723 static Condition ComputeCompareCondition(Token::Value op) { | 1853 static Condition ComputeCompareCondition(Token::Value op) { |
| 1724 switch (op) { | 1854 switch (op) { |
| 1725 case Token::EQ_STRICT: | 1855 case Token::EQ_STRICT: |
| 1726 case Token::EQ: | 1856 case Token::EQ: |
| 1727 return equal; | 1857 return equal; |
| 1728 case Token::LT: | 1858 case Token::LT: |
| 1729 return less; | 1859 return less; |
| 1730 case Token::GT: | 1860 case Token::GT: |
| 1731 return greater; | 1861 return greater; |
| 1732 case Token::LTE: | 1862 case Token::LTE: |
| 1733 return less_equal; | 1863 return less_equal; |
| 1734 case Token::GTE: | 1864 case Token::GTE: |
| 1735 return greater_equal; | 1865 return greater_equal; |
| 1736 default: | 1866 default: |
| 1737 UNREACHABLE(); | 1867 UNREACHABLE(); |
| 1738 return no_condition; | 1868 return no_condition; |
| 1739 } | 1869 } |
| 1740 } | 1870 } |
| 1741 | 1871 |
| 1742 | 1872 |
| 1743 void LCodeGen::DoCmpT(LCmpT* instr) { | 1873 void LCodeGen::DoCmpT(LCmpT* instr) { |
| 1744 Token::Value op = instr->op(); | 1874 Token::Value op = instr->op(); |
| 1745 | 1875 |
| 1746 Handle<Code> ic = CompareIC::GetUninitialized(op); | 1876 Handle<Code> ic = CompareIC::GetUninitialized(op); |
| 1747 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 1877 CallCode(ic, RelocInfo::CODE_TARGET, instr, false); |
| 1748 | 1878 |
| 1749 Condition condition = ComputeCompareCondition(op); | 1879 Condition condition = ComputeCompareCondition(op); |
| 1750 if (op == Token::GT || op == Token::LTE) { | 1880 if (op == Token::GT || op == Token::LTE) { |
| 1751 condition = ReverseCondition(condition); | 1881 condition = ReverseCondition(condition); |
| 1752 } | 1882 } |
| 1753 NearLabel true_value, done; | 1883 NearLabel true_value, done; |
| 1754 __ test(eax, Operand(eax)); | 1884 __ test(eax, Operand(eax)); |
| 1755 __ j(condition, &true_value); | 1885 __ j(condition, &true_value); |
| 1756 __ mov(ToRegister(instr->result()), Factory::false_value()); | 1886 __ mov(ToRegister(instr->result()), Factory::false_value()); |
| 1757 __ jmp(&done); | 1887 __ jmp(&done); |
| 1758 __ bind(&true_value); | 1888 __ bind(&true_value); |
| 1759 __ mov(ToRegister(instr->result()), Factory::true_value()); | 1889 __ mov(ToRegister(instr->result()), Factory::true_value()); |
| 1760 __ bind(&done); | 1890 __ bind(&done); |
| 1761 } | 1891 } |
| 1762 | 1892 |
| 1763 | 1893 |
| 1764 void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) { | 1894 void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) { |
| 1765 Token::Value op = instr->op(); | 1895 Token::Value op = instr->op(); |
| 1766 int true_block = chunk_->LookupDestination(instr->true_block_id()); | 1896 int true_block = chunk_->LookupDestination(instr->true_block_id()); |
| 1767 int false_block = chunk_->LookupDestination(instr->false_block_id()); | 1897 int false_block = chunk_->LookupDestination(instr->false_block_id()); |
| 1768 | 1898 |
| 1769 Handle<Code> ic = CompareIC::GetUninitialized(op); | 1899 Handle<Code> ic = CompareIC::GetUninitialized(op); |
| 1770 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 1900 CallCode(ic, RelocInfo::CODE_TARGET, instr, false); |
| 1771 | 1901 |
| 1772 // The compare stub expects compare condition and the input operands | 1902 // The compare stub expects compare condition and the input operands |
| 1773 // reversed for GT and LTE. | 1903 // reversed for GT and LTE. |
| 1774 Condition condition = ComputeCompareCondition(op); | 1904 Condition condition = ComputeCompareCondition(op); |
| 1775 if (op == Token::GT || op == Token::LTE) { | 1905 if (op == Token::GT || op == Token::LTE) { |
| 1776 condition = ReverseCondition(condition); | 1906 condition = ReverseCondition(condition); |
| 1777 } | 1907 } |
| 1778 __ test(eax, Operand(eax)); | 1908 __ test(eax, Operand(eax)); |
| 1779 EmitBranch(true_block, false_block, condition); | 1909 EmitBranch(true_block, false_block, condition); |
| 1780 } | 1910 } |
| 1781 | 1911 |
| 1782 | 1912 |
| 1783 void LCodeGen::DoReturn(LReturn* instr) { | 1913 void LCodeGen::DoReturn(LReturn* instr) { |
| 1784 if (FLAG_trace) { | 1914 if (FLAG_trace) { |
| 1785 // Preserve the return value on the stack and rely on the runtime | 1915 // Preserve the return value on the stack and rely on the runtime call |
| 1786 // call to return the value in the same register. | 1916 // to return the value in the same register. We're leaving the code |
| 1917 // managed by the register allocator and tearing down the frame, it's |
| 1918 // safe to write to the context register. |
| 1787 __ push(eax); | 1919 __ push(eax); |
| 1920 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
| 1788 __ CallRuntime(Runtime::kTraceExit, 1); | 1921 __ CallRuntime(Runtime::kTraceExit, 1); |
| 1789 } | 1922 } |
| 1790 __ mov(esp, ebp); | 1923 __ mov(esp, ebp); |
| 1791 __ pop(ebp); | 1924 __ pop(ebp); |
| 1792 __ ret((ParameterCount() + 1) * kPointerSize); | 1925 __ Ret((ParameterCount() + 1) * kPointerSize, ecx); |
| 1793 } | 1926 } |
| 1794 | 1927 |
| 1795 | 1928 |
| 1796 void LCodeGen::DoLoadGlobal(LLoadGlobal* instr) { | 1929 void LCodeGen::DoLoadGlobal(LLoadGlobal* instr) { |
| 1797 Register result = ToRegister(instr->result()); | 1930 Register result = ToRegister(instr->result()); |
| 1798 __ mov(result, Operand::Cell(instr->hydrogen()->cell())); | 1931 __ mov(result, Operand::Cell(instr->hydrogen()->cell())); |
| 1799 if (instr->hydrogen()->check_hole_value()) { | 1932 if (instr->hydrogen()->check_hole_value()) { |
| 1800 __ cmp(result, Factory::the_hole_value()); | 1933 __ cmp(result, Factory::the_hole_value()); |
| 1801 DeoptimizeIf(equal, instr->environment()); | 1934 DeoptimizeIf(equal, instr->environment()); |
| 1802 } | 1935 } |
| 1803 } | 1936 } |
| 1804 | 1937 |
| 1805 | 1938 |
| 1806 void LCodeGen::DoStoreGlobal(LStoreGlobal* instr) { | 1939 void LCodeGen::DoStoreGlobal(LStoreGlobal* instr) { |
| 1807 Register value = ToRegister(instr->input()); | 1940 Register value = ToRegister(instr->InputAt(0)); |
| 1808 __ mov(Operand::Cell(instr->hydrogen()->cell()), value); | 1941 Operand cell_operand = Operand::Cell(instr->hydrogen()->cell()); |
| 1942 |
| 1943 // If the cell we are storing to contains the hole it could have |
| 1944 // been deleted from the property dictionary. In that case, we need |
| 1945 // to update the property details in the property dictionary to mark |
| 1946 // it as no longer deleted. We deoptimize in that case. |
| 1947 if (instr->hydrogen()->check_hole_value()) { |
| 1948 __ cmp(cell_operand, Factory::the_hole_value()); |
| 1949 DeoptimizeIf(equal, instr->environment()); |
| 1950 } |
| 1951 |
| 1952 // Store the value. |
| 1953 __ mov(cell_operand, value); |
| 1954 } |
| 1955 |
| 1956 |
| 1957 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { |
| 1958 Register context = ToRegister(instr->context()); |
| 1959 Register result = ToRegister(instr->result()); |
| 1960 __ mov(result, ContextOperand(context, instr->slot_index())); |
| 1961 } |
| 1962 |
| 1963 |
| 1964 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { |
| 1965 Register context = ToRegister(instr->context()); |
| 1966 Register value = ToRegister(instr->value()); |
| 1967 __ mov(ContextOperand(context, instr->slot_index()), value); |
| 1968 if (instr->needs_write_barrier()) { |
| 1969 Register temp = ToRegister(instr->TempAt(0)); |
| 1970 int offset = Context::SlotOffset(instr->slot_index()); |
| 1971 __ RecordWrite(context, offset, value, temp, kSaveFPRegs); |
| 1972 } |
| 1809 } | 1973 } |
| 1810 | 1974 |
| 1811 | 1975 |
| 1812 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) { | 1976 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) { |
| 1813 Register object = ToRegister(instr->input()); | 1977 Register object = ToRegister(instr->InputAt(0)); |
| 1814 Register result = ToRegister(instr->result()); | 1978 Register result = ToRegister(instr->result()); |
| 1815 if (instr->hydrogen()->is_in_object()) { | 1979 if (instr->hydrogen()->is_in_object()) { |
| 1816 __ mov(result, FieldOperand(object, instr->hydrogen()->offset())); | 1980 __ mov(result, FieldOperand(object, instr->hydrogen()->offset())); |
| 1817 } else { | 1981 } else { |
| 1818 __ mov(result, FieldOperand(object, JSObject::kPropertiesOffset)); | 1982 __ mov(result, FieldOperand(object, JSObject::kPropertiesOffset)); |
| 1819 __ mov(result, FieldOperand(result, instr->hydrogen()->offset())); | 1983 __ mov(result, FieldOperand(result, instr->hydrogen()->offset())); |
| 1820 } | 1984 } |
| 1821 } | 1985 } |
| 1822 | 1986 |
| 1823 | 1987 |
| 1824 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { | 1988 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { |
| 1989 ASSERT(ToRegister(instr->context()).is(esi)); |
| 1825 ASSERT(ToRegister(instr->object()).is(eax)); | 1990 ASSERT(ToRegister(instr->object()).is(eax)); |
| 1826 ASSERT(ToRegister(instr->result()).is(eax)); | 1991 ASSERT(ToRegister(instr->result()).is(eax)); |
| 1827 | 1992 |
| 1828 __ mov(ecx, instr->name()); | 1993 __ mov(ecx, instr->name()); |
| 1829 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); | 1994 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); |
| 1830 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 1995 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 1831 } | 1996 } |
| 1832 | 1997 |
| 1833 | 1998 |
| 1834 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) { | 1999 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) { |
| 1835 Register function = ToRegister(instr->function()); | 2000 Register function = ToRegister(instr->function()); |
| 1836 Register temp = ToRegister(instr->temporary()); | 2001 Register temp = ToRegister(instr->TempAt(0)); |
| 1837 Register result = ToRegister(instr->result()); | 2002 Register result = ToRegister(instr->result()); |
| 1838 | 2003 |
| 1839 // Check that the function really is a function. | 2004 // Check that the function really is a function. |
| 1840 __ CmpObjectType(function, JS_FUNCTION_TYPE, result); | 2005 __ CmpObjectType(function, JS_FUNCTION_TYPE, result); |
| 1841 DeoptimizeIf(not_equal, instr->environment()); | 2006 DeoptimizeIf(not_equal, instr->environment()); |
| 1842 | 2007 |
| 1843 // Check whether the function has an instance prototype. | 2008 // Check whether the function has an instance prototype. |
| 1844 NearLabel non_instance; | 2009 NearLabel non_instance; |
| 1845 __ test_b(FieldOperand(result, Map::kBitFieldOffset), | 2010 __ test_b(FieldOperand(result, Map::kBitFieldOffset), |
| 1846 1 << Map::kHasNonInstancePrototype); | 2011 1 << Map::kHasNonInstancePrototype); |
| (...skipping 20 matching lines...) Expand all Loading... |
| 1867 // in the function's map. | 2032 // in the function's map. |
| 1868 __ bind(&non_instance); | 2033 __ bind(&non_instance); |
| 1869 __ mov(result, FieldOperand(result, Map::kConstructorOffset)); | 2034 __ mov(result, FieldOperand(result, Map::kConstructorOffset)); |
| 1870 | 2035 |
| 1871 // All done. | 2036 // All done. |
| 1872 __ bind(&done); | 2037 __ bind(&done); |
| 1873 } | 2038 } |
| 1874 | 2039 |
| 1875 | 2040 |
| 1876 void LCodeGen::DoLoadElements(LLoadElements* instr) { | 2041 void LCodeGen::DoLoadElements(LLoadElements* instr) { |
| 1877 ASSERT(instr->result()->Equals(instr->input())); | 2042 Register result = ToRegister(instr->result()); |
| 1878 Register reg = ToRegister(instr->input()); | 2043 Register input = ToRegister(instr->InputAt(0)); |
| 1879 __ mov(reg, FieldOperand(reg, JSObject::kElementsOffset)); | 2044 __ mov(result, FieldOperand(input, JSObject::kElementsOffset)); |
| 1880 if (FLAG_debug_code) { | 2045 if (FLAG_debug_code) { |
| 1881 NearLabel done; | 2046 NearLabel done; |
| 1882 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), | 2047 __ cmp(FieldOperand(result, HeapObject::kMapOffset), |
| 1883 Immediate(Factory::fixed_array_map())); | 2048 Immediate(Factory::fixed_array_map())); |
| 1884 __ j(equal, &done); | 2049 __ j(equal, &done); |
| 1885 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), | 2050 __ cmp(FieldOperand(result, HeapObject::kMapOffset), |
| 2051 Immediate(Factory::pixel_array_map())); |
| 2052 __ j(equal, &done); |
| 2053 __ cmp(FieldOperand(result, HeapObject::kMapOffset), |
| 1886 Immediate(Factory::fixed_cow_array_map())); | 2054 Immediate(Factory::fixed_cow_array_map())); |
| 1887 __ Check(equal, "Check for fast elements failed."); | 2055 __ Check(equal, "Check for fast elements or pixel array failed."); |
| 1888 __ bind(&done); | 2056 __ bind(&done); |
| 1889 } | 2057 } |
| 1890 } | 2058 } |
| 1891 | 2059 |
| 1892 | 2060 |
| 2061 void LCodeGen::DoLoadPixelArrayExternalPointer( |
| 2062 LLoadPixelArrayExternalPointer* instr) { |
| 2063 Register result = ToRegister(instr->result()); |
| 2064 Register input = ToRegister(instr->InputAt(0)); |
| 2065 __ mov(result, FieldOperand(input, PixelArray::kExternalPointerOffset)); |
| 2066 } |
| 2067 |
| 2068 |
| 1893 void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) { | 2069 void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) { |
| 1894 Register arguments = ToRegister(instr->arguments()); | 2070 Register arguments = ToRegister(instr->arguments()); |
| 1895 Register length = ToRegister(instr->length()); | 2071 Register length = ToRegister(instr->length()); |
| 1896 Operand index = ToOperand(instr->index()); | 2072 Operand index = ToOperand(instr->index()); |
| 1897 Register result = ToRegister(instr->result()); | 2073 Register result = ToRegister(instr->result()); |
| 1898 | 2074 |
| 1899 __ sub(length, index); | 2075 __ sub(length, index); |
| 1900 DeoptimizeIf(below_equal, instr->environment()); | 2076 DeoptimizeIf(below_equal, instr->environment()); |
| 1901 | 2077 |
| 2078 // There are two words between the frame pointer and the last argument. |
| 2079 // Subtracting from length accounts for one of them add one more. |
| 1902 __ mov(result, Operand(arguments, length, times_4, kPointerSize)); | 2080 __ mov(result, Operand(arguments, length, times_4, kPointerSize)); |
| 1903 } | 2081 } |
| 1904 | 2082 |
| 1905 | 2083 |
| 1906 void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) { | 2084 void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) { |
| 1907 Register elements = ToRegister(instr->elements()); | 2085 Register elements = ToRegister(instr->elements()); |
| 1908 Register key = ToRegister(instr->key()); | 2086 Register key = ToRegister(instr->key()); |
| 1909 Register result; | 2087 Register result = ToRegister(instr->result()); |
| 1910 if (instr->load_result() != NULL) { | 2088 ASSERT(result.is(elements)); |
| 1911 result = ToRegister(instr->load_result()); | |
| 1912 } else { | |
| 1913 result = ToRegister(instr->result()); | |
| 1914 ASSERT(result.is(elements)); | |
| 1915 } | |
| 1916 | 2089 |
| 1917 // Load the result. | 2090 // Load the result. |
| 1918 __ mov(result, FieldOperand(elements, key, times_4, FixedArray::kHeaderSize)); | 2091 __ mov(result, FieldOperand(elements, |
| 2092 key, |
| 2093 times_pointer_size, |
| 2094 FixedArray::kHeaderSize)); |
| 1919 | 2095 |
| 1920 Representation r = instr->hydrogen()->representation(); | 2096 // Check for the hole value. |
| 1921 if (r.IsInteger32()) { | 2097 __ cmp(result, Factory::the_hole_value()); |
| 1922 // Untag and check for smi. | 2098 DeoptimizeIf(equal, instr->environment()); |
| 1923 __ SmiUntag(result); | 2099 } |
| 1924 DeoptimizeIf(carry, instr->environment()); | 2100 |
| 1925 } else if (r.IsDouble()) { | 2101 |
| 1926 EmitNumberUntagD(result, | 2102 void LCodeGen::DoLoadPixelArrayElement(LLoadPixelArrayElement* instr) { |
| 1927 ToDoubleRegister(instr->result()), | 2103 Register external_elements = ToRegister(instr->external_pointer()); |
| 1928 instr->environment()); | 2104 Register key = ToRegister(instr->key()); |
| 1929 } else { | 2105 Register result = ToRegister(instr->result()); |
| 1930 // Check for the hole value. | 2106 ASSERT(result.is(external_elements)); |
| 1931 ASSERT(r.IsTagged()); | 2107 |
| 1932 __ cmp(result, Factory::the_hole_value()); | 2108 // Load the result. |
| 1933 DeoptimizeIf(equal, instr->environment()); | 2109 __ movzx_b(result, Operand(external_elements, key, times_1, 0)); |
| 1934 } | |
| 1935 } | 2110 } |
| 1936 | 2111 |
| 1937 | 2112 |
| 1938 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) { | 2113 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) { |
| 2114 ASSERT(ToRegister(instr->context()).is(esi)); |
| 1939 ASSERT(ToRegister(instr->object()).is(edx)); | 2115 ASSERT(ToRegister(instr->object()).is(edx)); |
| 1940 ASSERT(ToRegister(instr->key()).is(eax)); | 2116 ASSERT(ToRegister(instr->key()).is(eax)); |
| 1941 | 2117 |
| 1942 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); | 2118 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); |
| 1943 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2119 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 1944 } | 2120 } |
| 1945 | 2121 |
| 1946 | 2122 |
| 1947 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { | 2123 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { |
| 1948 Register result = ToRegister(instr->result()); | 2124 Register result = ToRegister(instr->result()); |
| 1949 | 2125 |
| 1950 // Check for arguments adapter frame. | 2126 // Check for arguments adapter frame. |
| 1951 Label done, adapted; | 2127 NearLabel done, adapted; |
| 1952 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); | 2128 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); |
| 1953 __ mov(result, Operand(result, StandardFrameConstants::kContextOffset)); | 2129 __ mov(result, Operand(result, StandardFrameConstants::kContextOffset)); |
| 1954 __ cmp(Operand(result), | 2130 __ cmp(Operand(result), |
| 1955 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | 2131 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
| 1956 __ j(equal, &adapted); | 2132 __ j(equal, &adapted); |
| 1957 | 2133 |
| 1958 // No arguments adaptor frame. | 2134 // No arguments adaptor frame. |
| 1959 __ mov(result, Operand(ebp)); | 2135 __ mov(result, Operand(ebp)); |
| 1960 __ jmp(&done); | 2136 __ jmp(&done); |
| 1961 | 2137 |
| 1962 // Arguments adaptor frame present. | 2138 // Arguments adaptor frame present. |
| 1963 __ bind(&adapted); | 2139 __ bind(&adapted); |
| 1964 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); | 2140 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); |
| 1965 | 2141 |
| 1966 // Done. Pointer to topmost argument is in result. | 2142 // Result is the frame pointer for the frame if not adapted and for the real |
| 2143 // frame below the adaptor frame if adapted. |
| 1967 __ bind(&done); | 2144 __ bind(&done); |
| 1968 } | 2145 } |
| 1969 | 2146 |
| 1970 | 2147 |
| 1971 void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) { | 2148 void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) { |
| 1972 Operand elem = ToOperand(instr->input()); | 2149 Operand elem = ToOperand(instr->InputAt(0)); |
| 1973 Register result = ToRegister(instr->result()); | 2150 Register result = ToRegister(instr->result()); |
| 1974 | 2151 |
| 1975 Label done; | 2152 NearLabel done; |
| 1976 | 2153 |
| 1977 // No arguments adaptor frame. Number of arguments is fixed. | 2154 // If no arguments adaptor frame the number of arguments is fixed. |
| 1978 __ cmp(ebp, elem); | 2155 __ cmp(ebp, elem); |
| 1979 __ mov(result, Immediate(scope()->num_parameters())); | 2156 __ mov(result, Immediate(scope()->num_parameters())); |
| 1980 __ j(equal, &done); | 2157 __ j(equal, &done); |
| 1981 | 2158 |
| 1982 // Arguments adaptor frame present. Get argument length from there. | 2159 // Arguments adaptor frame present. Get argument length from there. |
| 1983 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); | 2160 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); |
| 1984 __ mov(result, Operand(result, | 2161 __ mov(result, Operand(result, |
| 1985 ArgumentsAdaptorFrameConstants::kLengthOffset)); | 2162 ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 1986 __ SmiUntag(result); | 2163 __ SmiUntag(result); |
| 1987 | 2164 |
| 1988 // Done. Argument length is in result register. | 2165 // Argument length is in result register. |
| 1989 __ bind(&done); | 2166 __ bind(&done); |
| 1990 } | 2167 } |
| 1991 | 2168 |
| 1992 | 2169 |
| 1993 void LCodeGen::DoApplyArguments(LApplyArguments* instr) { | 2170 void LCodeGen::DoApplyArguments(LApplyArguments* instr) { |
| 1994 Register receiver = ToRegister(instr->receiver()); | 2171 Register receiver = ToRegister(instr->receiver()); |
| 1995 ASSERT(ToRegister(instr->function()).is(edi)); | 2172 Register function = ToRegister(instr->function()); |
| 2173 Register length = ToRegister(instr->length()); |
| 2174 Register elements = ToRegister(instr->elements()); |
| 2175 Register scratch = ToRegister(instr->TempAt(0)); |
| 2176 ASSERT(receiver.is(eax)); // Used for parameter count. |
| 2177 ASSERT(function.is(edi)); // Required by InvokeFunction. |
| 1996 ASSERT(ToRegister(instr->result()).is(eax)); | 2178 ASSERT(ToRegister(instr->result()).is(eax)); |
| 1997 | 2179 |
| 1998 // If the receiver is null or undefined, we have to pass the | 2180 // If the receiver is null or undefined, we have to pass the global object |
| 1999 // global object as a receiver. | 2181 // as a receiver. |
| 2000 NearLabel global_receiver, receiver_ok; | 2182 NearLabel global_object, receiver_ok; |
| 2001 __ cmp(receiver, Factory::null_value()); | 2183 __ cmp(receiver, Factory::null_value()); |
| 2002 __ j(equal, &global_receiver); | 2184 __ j(equal, &global_object); |
| 2003 __ cmp(receiver, Factory::undefined_value()); | 2185 __ cmp(receiver, Factory::undefined_value()); |
| 2004 __ j(not_equal, &receiver_ok); | 2186 __ j(equal, &global_object); |
| 2005 __ bind(&global_receiver); | 2187 |
| 2006 __ mov(receiver, GlobalObjectOperand()); | 2188 // The receiver should be a JS object. |
| 2189 __ test(receiver, Immediate(kSmiTagMask)); |
| 2190 DeoptimizeIf(equal, instr->environment()); |
| 2191 __ CmpObjectType(receiver, FIRST_JS_OBJECT_TYPE, scratch); |
| 2192 DeoptimizeIf(below, instr->environment()); |
| 2193 __ jmp(&receiver_ok); |
| 2194 |
| 2195 __ bind(&global_object); |
| 2196 // TODO(kmillikin): We have a hydrogen value for the global object. See |
| 2197 // if it's better to use it than to explicitly fetch it from the context |
| 2198 // here. |
| 2199 __ mov(receiver, Operand(ebp, StandardFrameConstants::kContextOffset)); |
| 2200 __ mov(receiver, ContextOperand(receiver, Context::GLOBAL_INDEX)); |
| 2007 __ bind(&receiver_ok); | 2201 __ bind(&receiver_ok); |
| 2008 | 2202 |
| 2009 Register length = ToRegister(instr->length()); | |
| 2010 Register elements = ToRegister(instr->elements()); | |
| 2011 | |
| 2012 Label invoke; | |
| 2013 | |
| 2014 // Copy the arguments to this function possibly from the | 2203 // Copy the arguments to this function possibly from the |
| 2015 // adaptor frame below it. | 2204 // adaptor frame below it. |
| 2016 const uint32_t kArgumentsLimit = 1 * KB; | 2205 const uint32_t kArgumentsLimit = 1 * KB; |
| 2017 __ cmp(length, kArgumentsLimit); | 2206 __ cmp(length, kArgumentsLimit); |
| 2018 DeoptimizeIf(above, instr->environment()); | 2207 DeoptimizeIf(above, instr->environment()); |
| 2019 | 2208 |
| 2020 __ push(receiver); | 2209 __ push(receiver); |
| 2021 __ mov(receiver, length); | 2210 __ mov(receiver, length); |
| 2022 | 2211 |
| 2023 // Loop through the arguments pushing them onto the execution | 2212 // Loop through the arguments pushing them onto the execution |
| 2024 // stack. | 2213 // stack. |
| 2025 Label loop; | 2214 NearLabel invoke, loop; |
| 2026 // length is a small non-negative integer, due to the test above. | 2215 // length is a small non-negative integer, due to the test above. |
| 2027 __ test(length, Operand(length)); | 2216 __ test(length, Operand(length)); |
| 2028 __ j(zero, &invoke); | 2217 __ j(zero, &invoke); |
| 2029 __ bind(&loop); | 2218 __ bind(&loop); |
| 2030 __ push(Operand(elements, length, times_pointer_size, 1 * kPointerSize)); | 2219 __ push(Operand(elements, length, times_pointer_size, 1 * kPointerSize)); |
| 2031 __ dec(length); | 2220 __ dec(length); |
| 2032 __ j(not_zero, &loop); | 2221 __ j(not_zero, &loop); |
| 2033 | 2222 |
| 2034 // Invoke the function. | 2223 // Invoke the function. |
| 2035 __ bind(&invoke); | 2224 __ bind(&invoke); |
| 2036 ASSERT(receiver.is(eax)); | 2225 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); |
| 2226 LPointerMap* pointers = instr->pointer_map(); |
| 2227 LEnvironment* env = instr->deoptimization_environment(); |
| 2228 RecordPosition(pointers->position()); |
| 2229 RegisterEnvironmentForDeoptimization(env); |
| 2230 SafepointGenerator safepoint_generator(this, |
| 2231 pointers, |
| 2232 env->deoptimization_index(), |
| 2233 true); |
| 2037 v8::internal::ParameterCount actual(eax); | 2234 v8::internal::ParameterCount actual(eax); |
| 2038 SafepointGenerator safepoint_generator(this, | 2235 __ InvokeFunction(function, actual, CALL_FUNCTION, &safepoint_generator); |
| 2039 instr->pointer_map(), | |
| 2040 Safepoint::kNoDeoptimizationIndex); | |
| 2041 __ InvokeFunction(edi, actual, CALL_FUNCTION, &safepoint_generator); | |
| 2042 } | 2236 } |
| 2043 | 2237 |
| 2044 | 2238 |
| 2045 void LCodeGen::DoPushArgument(LPushArgument* instr) { | 2239 void LCodeGen::DoPushArgument(LPushArgument* instr) { |
| 2046 LOperand* argument = instr->input(); | 2240 LOperand* argument = instr->InputAt(0); |
| 2047 if (argument->IsConstantOperand()) { | 2241 if (argument->IsConstantOperand()) { |
| 2048 __ push(ToImmediate(argument)); | 2242 __ push(ToImmediate(argument)); |
| 2049 } else { | 2243 } else { |
| 2050 __ push(ToOperand(argument)); | 2244 __ push(ToOperand(argument)); |
| 2051 } | 2245 } |
| 2052 } | 2246 } |
| 2053 | 2247 |
| 2054 | 2248 |
| 2249 void LCodeGen::DoContext(LContext* instr) { |
| 2250 Register result = ToRegister(instr->result()); |
| 2251 __ mov(result, Operand(ebp, StandardFrameConstants::kContextOffset)); |
| 2252 } |
| 2253 |
| 2254 |
| 2255 void LCodeGen::DoOuterContext(LOuterContext* instr) { |
| 2256 Register context = ToRegister(instr->context()); |
| 2257 Register result = ToRegister(instr->result()); |
| 2258 __ mov(result, Operand(context, Context::SlotOffset(Context::CLOSURE_INDEX))); |
| 2259 __ mov(result, FieldOperand(result, JSFunction::kContextOffset)); |
| 2260 } |
| 2261 |
| 2262 |
| 2055 void LCodeGen::DoGlobalObject(LGlobalObject* instr) { | 2263 void LCodeGen::DoGlobalObject(LGlobalObject* instr) { |
| 2264 Register context = ToRegister(instr->context()); |
| 2056 Register result = ToRegister(instr->result()); | 2265 Register result = ToRegister(instr->result()); |
| 2057 __ mov(result, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX))); | 2266 __ mov(result, Operand(context, Context::SlotOffset(Context::GLOBAL_INDEX))); |
| 2058 } | 2267 } |
| 2059 | 2268 |
| 2060 | 2269 |
| 2061 void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) { | 2270 void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) { |
| 2271 Register global = ToRegister(instr->global()); |
| 2062 Register result = ToRegister(instr->result()); | 2272 Register result = ToRegister(instr->result()); |
| 2063 __ mov(result, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX))); | 2273 __ mov(result, FieldOperand(global, GlobalObject::kGlobalReceiverOffset)); |
| 2064 __ mov(result, FieldOperand(result, GlobalObject::kGlobalReceiverOffset)); | |
| 2065 } | 2274 } |
| 2066 | 2275 |
| 2067 | 2276 |
| 2068 void LCodeGen::CallKnownFunction(Handle<JSFunction> function, | 2277 void LCodeGen::CallKnownFunction(Handle<JSFunction> function, |
| 2069 int arity, | 2278 int arity, |
| 2070 LInstruction* instr) { | 2279 LInstruction* instr) { |
| 2071 // Change context if needed. | 2280 // Change context if needed. |
| 2072 bool change_context = | 2281 bool change_context = |
| 2073 (graph()->info()->closure()->context() != function->context()) || | 2282 (graph()->info()->closure()->context() != function->context()) || |
| 2074 scope()->contains_with() || | 2283 scope()->contains_with() || |
| 2075 (scope()->num_heap_slots() > 0); | 2284 (scope()->num_heap_slots() > 0); |
| 2076 if (change_context) { | 2285 if (change_context) { |
| 2077 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); | 2286 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); |
| 2287 } else { |
| 2288 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
| 2078 } | 2289 } |
| 2079 | 2290 |
| 2080 // Set eax to arguments count if adaption is not needed. Assumes that eax | 2291 // Set eax to arguments count if adaption is not needed. Assumes that eax |
| 2081 // is available to write to at this point. | 2292 // is available to write to at this point. |
| 2082 if (!function->NeedsArgumentsAdaption()) { | 2293 if (!function->NeedsArgumentsAdaption()) { |
| 2083 __ mov(eax, arity); | 2294 __ mov(eax, arity); |
| 2084 } | 2295 } |
| 2085 | 2296 |
| 2086 LPointerMap* pointers = instr->pointer_map(); | 2297 LPointerMap* pointers = instr->pointer_map(); |
| 2087 RecordPosition(pointers->position()); | 2298 RecordPosition(pointers->position()); |
| 2088 | 2299 |
| 2089 // Invoke function. | 2300 // Invoke function. |
| 2090 if (*function == *graph()->info()->closure()) { | 2301 if (*function == *graph()->info()->closure()) { |
| 2091 __ CallSelf(); | 2302 __ CallSelf(); |
| 2092 } else { | 2303 } else { |
| 2304 // This is an indirect call and will not be recorded in the reloc info. |
| 2305 // Add a comment to the reloc info in case we need to patch this during |
| 2306 // deoptimization. |
| 2307 __ RecordComment(RelocInfo::kFillerCommentString, true); |
| 2093 __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset)); | 2308 __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset)); |
| 2094 } | 2309 } |
| 2095 | 2310 |
| 2096 // Setup deoptimization. | 2311 // Setup deoptimization. |
| 2097 RegisterLazyDeoptimization(instr); | 2312 RegisterLazyDeoptimization(instr); |
| 2098 | |
| 2099 // Restore context. | |
| 2100 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | |
| 2101 } | 2313 } |
| 2102 | 2314 |
| 2103 | 2315 |
| 2104 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { | 2316 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { |
| 2105 ASSERT(ToRegister(instr->result()).is(eax)); | 2317 ASSERT(ToRegister(instr->result()).is(eax)); |
| 2106 __ mov(edi, instr->function()); | 2318 __ mov(edi, instr->function()); |
| 2107 CallKnownFunction(instr->function(), instr->arity(), instr); | 2319 CallKnownFunction(instr->function(), instr->arity(), instr); |
| 2108 } | 2320 } |
| 2109 | 2321 |
| 2110 | 2322 |
| 2111 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) { | 2323 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) { |
| 2112 Register input_reg = ToRegister(instr->input()); | 2324 Register input_reg = ToRegister(instr->InputAt(0)); |
| 2113 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), | 2325 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), |
| 2114 Factory::heap_number_map()); | 2326 Factory::heap_number_map()); |
| 2115 DeoptimizeIf(not_equal, instr->environment()); | 2327 DeoptimizeIf(not_equal, instr->environment()); |
| 2116 | 2328 |
| 2117 Label done; | 2329 Label done; |
| 2118 Register tmp = input_reg.is(eax) ? ecx : eax; | 2330 Register tmp = input_reg.is(eax) ? ecx : eax; |
| 2119 Register tmp2 = tmp.is(ecx) ? edx : input_reg.is(ecx) ? edx : ecx; | 2331 Register tmp2 = tmp.is(ecx) ? edx : input_reg.is(ecx) ? edx : ecx; |
| 2120 | 2332 |
| 2121 // Preserve the value of all registers. | 2333 // Preserve the value of all registers. |
| 2122 __ PushSafepointRegisters(); | 2334 __ PushSafepointRegisters(); |
| 2123 | 2335 |
| 2124 Label negative; | 2336 Label negative; |
| 2125 __ mov(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset)); | 2337 __ mov(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset)); |
| 2126 // Check the sign of the argument. If the argument is positive, | 2338 // Check the sign of the argument. If the argument is positive, just |
| 2127 // just return it. | 2339 // return it. We do not need to patch the stack since |input| and |
| 2340 // |result| are the same register and |input| will be restored |
| 2341 // unchanged by popping safepoint registers. |
| 2128 __ test(tmp, Immediate(HeapNumber::kSignMask)); | 2342 __ test(tmp, Immediate(HeapNumber::kSignMask)); |
| 2129 __ j(not_zero, &negative); | 2343 __ j(not_zero, &negative); |
| 2130 __ mov(tmp, input_reg); | |
| 2131 __ jmp(&done); | 2344 __ jmp(&done); |
| 2132 | 2345 |
| 2133 __ bind(&negative); | 2346 __ bind(&negative); |
| 2134 | 2347 |
| 2135 Label allocated, slow; | 2348 Label allocated, slow; |
| 2136 __ AllocateHeapNumber(tmp, tmp2, no_reg, &slow); | 2349 __ AllocateHeapNumber(tmp, tmp2, no_reg, &slow); |
| 2137 __ jmp(&allocated); | 2350 __ jmp(&allocated); |
| 2138 | 2351 |
| 2139 // Slow case: Call the runtime system to do the number allocation. | 2352 // Slow case: Call the runtime system to do the number allocation. |
| 2140 __ bind(&slow); | 2353 __ bind(&slow); |
| 2141 | 2354 |
| 2355 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
| 2142 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); | 2356 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); |
| 2143 RecordSafepointWithRegisters( | 2357 RecordSafepointWithRegisters( |
| 2144 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); | 2358 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); |
| 2145 // Set the pointer to the new heap number in tmp. | 2359 // Set the pointer to the new heap number in tmp. |
| 2146 if (!tmp.is(eax)) __ mov(tmp, eax); | 2360 if (!tmp.is(eax)) __ mov(tmp, eax); |
| 2147 | 2361 |
| 2148 // Restore input_reg after call to runtime. | 2362 // Restore input_reg after call to runtime. |
| 2149 __ mov(input_reg, Operand(esp, EspIndexForPushAll(input_reg) * kPointerSize)); | 2363 __ mov(input_reg, Operand(esp, EspIndexForPushAll(input_reg) * kPointerSize)); |
| 2150 | 2364 |
| 2151 __ bind(&allocated); | 2365 __ bind(&allocated); |
| 2152 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kExponentOffset)); | 2366 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kExponentOffset)); |
| 2153 __ and_(tmp2, ~HeapNumber::kSignMask); | 2367 __ and_(tmp2, ~HeapNumber::kSignMask); |
| 2154 __ mov(FieldOperand(tmp, HeapNumber::kExponentOffset), tmp2); | 2368 __ mov(FieldOperand(tmp, HeapNumber::kExponentOffset), tmp2); |
| 2155 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kMantissaOffset)); | 2369 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kMantissaOffset)); |
| 2156 __ mov(FieldOperand(tmp, HeapNumber::kMantissaOffset), tmp2); | 2370 __ mov(FieldOperand(tmp, HeapNumber::kMantissaOffset), tmp2); |
| 2371 __ mov(Operand(esp, EspIndexForPushAll(input_reg) * kPointerSize), tmp); |
| 2157 | 2372 |
| 2158 __ bind(&done); | 2373 __ bind(&done); |
| 2159 __ mov(Operand(esp, EspIndexForPushAll(input_reg) * kPointerSize), tmp); | 2374 __ PopSafepointRegisters(); |
| 2375 } |
| 2160 | 2376 |
| 2161 __ PopSafepointRegisters(); | 2377 |
| 2378 void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) { |
| 2379 Register input_reg = ToRegister(instr->InputAt(0)); |
| 2380 __ test(input_reg, Operand(input_reg)); |
| 2381 Label is_positive; |
| 2382 __ j(not_sign, &is_positive); |
| 2383 __ neg(input_reg); |
| 2384 __ test(input_reg, Operand(input_reg)); |
| 2385 DeoptimizeIf(negative, instr->environment()); |
| 2386 __ bind(&is_positive); |
| 2162 } | 2387 } |
| 2163 | 2388 |
| 2164 | 2389 |
| 2165 void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) { | 2390 void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) { |
| 2166 // Class for deferred case. | 2391 // Class for deferred case. |
| 2167 class DeferredMathAbsTaggedHeapNumber: public LDeferredCode { | 2392 class DeferredMathAbsTaggedHeapNumber: public LDeferredCode { |
| 2168 public: | 2393 public: |
| 2169 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen, | 2394 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen, |
| 2170 LUnaryMathOperation* instr) | 2395 LUnaryMathOperation* instr) |
| 2171 : LDeferredCode(codegen), instr_(instr) { } | 2396 : LDeferredCode(codegen), instr_(instr) { } |
| 2172 virtual void Generate() { | 2397 virtual void Generate() { |
| 2173 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_); | 2398 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_); |
| 2174 } | 2399 } |
| 2175 private: | 2400 private: |
| 2176 LUnaryMathOperation* instr_; | 2401 LUnaryMathOperation* instr_; |
| 2177 }; | 2402 }; |
| 2178 | 2403 |
| 2179 ASSERT(instr->input()->Equals(instr->result())); | 2404 ASSERT(instr->InputAt(0)->Equals(instr->result())); |
| 2180 Representation r = instr->hydrogen()->value()->representation(); | 2405 Representation r = instr->hydrogen()->value()->representation(); |
| 2181 | 2406 |
| 2182 if (r.IsDouble()) { | 2407 if (r.IsDouble()) { |
| 2183 XMMRegister scratch = xmm0; | 2408 XMMRegister scratch = xmm0; |
| 2184 XMMRegister input_reg = ToDoubleRegister(instr->input()); | 2409 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0)); |
| 2185 __ pxor(scratch, scratch); | 2410 __ pxor(scratch, scratch); |
| 2186 __ subsd(scratch, input_reg); | 2411 __ subsd(scratch, input_reg); |
| 2187 __ pand(input_reg, scratch); | 2412 __ pand(input_reg, scratch); |
| 2188 } else if (r.IsInteger32()) { | 2413 } else if (r.IsInteger32()) { |
| 2189 Register input_reg = ToRegister(instr->input()); | 2414 EmitIntegerMathAbs(instr); |
| 2190 __ test(input_reg, Operand(input_reg)); | |
| 2191 Label is_positive; | |
| 2192 __ j(not_sign, &is_positive); | |
| 2193 __ neg(input_reg); | |
| 2194 __ test(input_reg, Operand(input_reg)); | |
| 2195 DeoptimizeIf(negative, instr->environment()); | |
| 2196 __ bind(&is_positive); | |
| 2197 } else { // Tagged case. | 2415 } else { // Tagged case. |
| 2198 DeferredMathAbsTaggedHeapNumber* deferred = | 2416 DeferredMathAbsTaggedHeapNumber* deferred = |
| 2199 new DeferredMathAbsTaggedHeapNumber(this, instr); | 2417 new DeferredMathAbsTaggedHeapNumber(this, instr); |
| 2200 Label not_smi; | 2418 Register input_reg = ToRegister(instr->InputAt(0)); |
| 2201 Register input_reg = ToRegister(instr->input()); | |
| 2202 // Smi check. | 2419 // Smi check. |
| 2203 __ test(input_reg, Immediate(kSmiTagMask)); | 2420 __ test(input_reg, Immediate(kSmiTagMask)); |
| 2204 __ j(not_zero, deferred->entry()); | 2421 __ j(not_zero, deferred->entry()); |
| 2205 __ test(input_reg, Operand(input_reg)); | 2422 EmitIntegerMathAbs(instr); |
| 2206 Label is_positive; | |
| 2207 __ j(not_sign, &is_positive); | |
| 2208 __ neg(input_reg); | |
| 2209 | |
| 2210 __ test(input_reg, Operand(input_reg)); | |
| 2211 DeoptimizeIf(negative, instr->environment()); | |
| 2212 | |
| 2213 __ bind(&is_positive); | |
| 2214 __ bind(deferred->exit()); | 2423 __ bind(deferred->exit()); |
| 2215 } | 2424 } |
| 2216 } | 2425 } |
| 2217 | 2426 |
| 2218 | 2427 |
| 2219 void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) { | 2428 void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) { |
| 2220 XMMRegister xmm_scratch = xmm0; | 2429 XMMRegister xmm_scratch = xmm0; |
| 2221 Register output_reg = ToRegister(instr->result()); | 2430 Register output_reg = ToRegister(instr->result()); |
| 2222 XMMRegister input_reg = ToDoubleRegister(instr->input()); | 2431 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0)); |
| 2223 __ xorpd(xmm_scratch, xmm_scratch); // Zero the register. | 2432 __ xorpd(xmm_scratch, xmm_scratch); // Zero the register. |
| 2224 __ ucomisd(input_reg, xmm_scratch); | 2433 __ ucomisd(input_reg, xmm_scratch); |
| 2225 | 2434 |
| 2226 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 2435 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 2227 DeoptimizeIf(below_equal, instr->environment()); | 2436 DeoptimizeIf(below_equal, instr->environment()); |
| 2228 } else { | 2437 } else { |
| 2229 DeoptimizeIf(below, instr->environment()); | 2438 DeoptimizeIf(below, instr->environment()); |
| 2230 } | 2439 } |
| 2231 | 2440 |
| 2232 // Use truncating instruction (OK because input is positive). | 2441 // Use truncating instruction (OK because input is positive). |
| 2233 __ cvttsd2si(output_reg, Operand(input_reg)); | 2442 __ cvttsd2si(output_reg, Operand(input_reg)); |
| 2234 | 2443 |
| 2235 // Overflow is signalled with minint. | 2444 // Overflow is signalled with minint. |
| 2236 __ cmp(output_reg, 0x80000000u); | 2445 __ cmp(output_reg, 0x80000000u); |
| 2237 DeoptimizeIf(equal, instr->environment()); | 2446 DeoptimizeIf(equal, instr->environment()); |
| 2238 } | 2447 } |
| 2239 | 2448 |
| 2240 | 2449 |
| 2241 void LCodeGen::DoMathRound(LUnaryMathOperation* instr) { | 2450 void LCodeGen::DoMathRound(LUnaryMathOperation* instr) { |
| 2242 XMMRegister xmm_scratch = xmm0; | 2451 XMMRegister xmm_scratch = xmm0; |
| 2243 Register output_reg = ToRegister(instr->result()); | 2452 Register output_reg = ToRegister(instr->result()); |
| 2244 XMMRegister input_reg = ToDoubleRegister(instr->input()); | 2453 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0)); |
| 2245 | 2454 |
| 2246 // xmm_scratch = 0.5 | 2455 // xmm_scratch = 0.5 |
| 2247 ExternalReference one_half = ExternalReference::address_of_one_half(); | 2456 ExternalReference one_half = ExternalReference::address_of_one_half(); |
| 2248 __ movdbl(xmm_scratch, Operand::StaticVariable(one_half)); | 2457 __ movdbl(xmm_scratch, Operand::StaticVariable(one_half)); |
| 2249 | 2458 |
| 2250 // input = input + 0.5 | 2459 // input = input + 0.5 |
| 2251 __ addsd(input_reg, xmm_scratch); | 2460 __ addsd(input_reg, xmm_scratch); |
| 2252 | 2461 |
| 2253 // We need to return -0 for the input range [-0.5, 0[, otherwise | 2462 // We need to return -0 for the input range [-0.5, 0[, otherwise |
| 2254 // compute Math.floor(value + 0.5). | 2463 // compute Math.floor(value + 0.5). |
| (...skipping 12 matching lines...) Expand all Loading... |
| 2267 // Use truncating instruction (OK because input is positive). | 2476 // Use truncating instruction (OK because input is positive). |
| 2268 __ cvttsd2si(output_reg, Operand(input_reg)); | 2477 __ cvttsd2si(output_reg, Operand(input_reg)); |
| 2269 | 2478 |
| 2270 // Overflow is signalled with minint. | 2479 // Overflow is signalled with minint. |
| 2271 __ cmp(output_reg, 0x80000000u); | 2480 __ cmp(output_reg, 0x80000000u); |
| 2272 DeoptimizeIf(equal, instr->environment()); | 2481 DeoptimizeIf(equal, instr->environment()); |
| 2273 } | 2482 } |
| 2274 | 2483 |
| 2275 | 2484 |
| 2276 void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) { | 2485 void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) { |
| 2277 XMMRegister input_reg = ToDoubleRegister(instr->input()); | 2486 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0)); |
| 2278 ASSERT(ToDoubleRegister(instr->result()).is(input_reg)); | 2487 ASSERT(ToDoubleRegister(instr->result()).is(input_reg)); |
| 2279 __ sqrtsd(input_reg, input_reg); | 2488 __ sqrtsd(input_reg, input_reg); |
| 2280 } | 2489 } |
| 2281 | 2490 |
| 2282 | 2491 |
| 2283 void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) { | 2492 void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) { |
| 2284 XMMRegister xmm_scratch = xmm0; | 2493 XMMRegister xmm_scratch = xmm0; |
| 2285 XMMRegister input_reg = ToDoubleRegister(instr->input()); | 2494 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0)); |
| 2286 ASSERT(ToDoubleRegister(instr->result()).is(input_reg)); | 2495 ASSERT(ToDoubleRegister(instr->result()).is(input_reg)); |
| 2287 ExternalReference negative_infinity = | 2496 ExternalReference negative_infinity = |
| 2288 ExternalReference::address_of_negative_infinity(); | 2497 ExternalReference::address_of_negative_infinity(); |
| 2289 __ movdbl(xmm_scratch, Operand::StaticVariable(negative_infinity)); | 2498 __ movdbl(xmm_scratch, Operand::StaticVariable(negative_infinity)); |
| 2290 __ ucomisd(xmm_scratch, input_reg); | 2499 __ ucomisd(xmm_scratch, input_reg); |
| 2291 DeoptimizeIf(equal, instr->environment()); | 2500 DeoptimizeIf(equal, instr->environment()); |
| 2501 __ xorpd(xmm_scratch, xmm_scratch); |
| 2502 __ addsd(input_reg, xmm_scratch); // Convert -0 to +0. |
| 2292 __ sqrtsd(input_reg, input_reg); | 2503 __ sqrtsd(input_reg, input_reg); |
| 2293 } | 2504 } |
| 2294 | 2505 |
| 2295 | 2506 |
| 2296 void LCodeGen::DoPower(LPower* instr) { | 2507 void LCodeGen::DoPower(LPower* instr) { |
| 2297 LOperand* left = instr->left(); | 2508 LOperand* left = instr->InputAt(0); |
| 2298 LOperand* right = instr->right(); | 2509 LOperand* right = instr->InputAt(1); |
| 2299 DoubleRegister result_reg = ToDoubleRegister(instr->result()); | 2510 DoubleRegister result_reg = ToDoubleRegister(instr->result()); |
| 2300 Representation exponent_type = instr->hydrogen()->right()->representation(); | 2511 Representation exponent_type = instr->hydrogen()->right()->representation(); |
| 2301 if (exponent_type.IsDouble()) { | 2512 if (exponent_type.IsDouble()) { |
| 2302 // It is safe to use ebx directly since the instruction is marked | 2513 // It is safe to use ebx directly since the instruction is marked |
| 2303 // as a call. | 2514 // as a call. |
| 2304 __ PrepareCallCFunction(4, ebx); | 2515 __ PrepareCallCFunction(4, ebx); |
| 2305 __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left)); | 2516 __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left)); |
| 2306 __ movdbl(Operand(esp, 1 * kDoubleSize), ToDoubleRegister(right)); | 2517 __ movdbl(Operand(esp, 1 * kDoubleSize), ToDoubleRegister(right)); |
| 2307 __ CallCFunction(ExternalReference::power_double_double_function(), 4); | 2518 __ CallCFunction(ExternalReference::power_double_double_function(), 4); |
| 2308 } else if (exponent_type.IsInteger32()) { | 2519 } else if (exponent_type.IsInteger32()) { |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2346 __ fstp_d(Operand(esp, 0)); | 2557 __ fstp_d(Operand(esp, 0)); |
| 2347 __ movdbl(result_reg, Operand(esp, 0)); | 2558 __ movdbl(result_reg, Operand(esp, 0)); |
| 2348 __ add(Operand(esp), Immediate(kDoubleSize)); | 2559 __ add(Operand(esp), Immediate(kDoubleSize)); |
| 2349 } | 2560 } |
| 2350 | 2561 |
| 2351 | 2562 |
| 2352 void LCodeGen::DoMathLog(LUnaryMathOperation* instr) { | 2563 void LCodeGen::DoMathLog(LUnaryMathOperation* instr) { |
| 2353 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); | 2564 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); |
| 2354 TranscendentalCacheStub stub(TranscendentalCache::LOG, | 2565 TranscendentalCacheStub stub(TranscendentalCache::LOG, |
| 2355 TranscendentalCacheStub::UNTAGGED); | 2566 TranscendentalCacheStub::UNTAGGED); |
| 2356 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 2567 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); |
| 2357 } | 2568 } |
| 2358 | 2569 |
| 2359 | 2570 |
| 2360 void LCodeGen::DoMathCos(LUnaryMathOperation* instr) { | 2571 void LCodeGen::DoMathCos(LUnaryMathOperation* instr) { |
| 2361 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); | 2572 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); |
| 2362 TranscendentalCacheStub stub(TranscendentalCache::COS, | 2573 TranscendentalCacheStub stub(TranscendentalCache::COS, |
| 2363 TranscendentalCacheStub::UNTAGGED); | 2574 TranscendentalCacheStub::UNTAGGED); |
| 2364 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 2575 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); |
| 2365 } | 2576 } |
| 2366 | 2577 |
| 2367 | 2578 |
| 2368 void LCodeGen::DoMathSin(LUnaryMathOperation* instr) { | 2579 void LCodeGen::DoMathSin(LUnaryMathOperation* instr) { |
| 2369 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); | 2580 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); |
| 2370 TranscendentalCacheStub stub(TranscendentalCache::SIN, | 2581 TranscendentalCacheStub stub(TranscendentalCache::SIN, |
| 2371 TranscendentalCacheStub::UNTAGGED); | 2582 TranscendentalCacheStub::UNTAGGED); |
| 2372 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 2583 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); |
| 2373 } | 2584 } |
| 2374 | 2585 |
| 2375 | 2586 |
| 2376 void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) { | 2587 void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) { |
| 2377 switch (instr->op()) { | 2588 switch (instr->op()) { |
| 2378 case kMathAbs: | 2589 case kMathAbs: |
| 2379 DoMathAbs(instr); | 2590 DoMathAbs(instr); |
| 2380 break; | 2591 break; |
| 2381 case kMathFloor: | 2592 case kMathFloor: |
| 2382 DoMathFloor(instr); | 2593 DoMathFloor(instr); |
| (...skipping 17 matching lines...) Expand all Loading... |
| 2400 DoMathLog(instr); | 2611 DoMathLog(instr); |
| 2401 break; | 2612 break; |
| 2402 | 2613 |
| 2403 default: | 2614 default: |
| 2404 UNREACHABLE(); | 2615 UNREACHABLE(); |
| 2405 } | 2616 } |
| 2406 } | 2617 } |
| 2407 | 2618 |
| 2408 | 2619 |
| 2409 void LCodeGen::DoCallKeyed(LCallKeyed* instr) { | 2620 void LCodeGen::DoCallKeyed(LCallKeyed* instr) { |
| 2621 ASSERT(ToRegister(instr->context()).is(esi)); |
| 2622 ASSERT(ToRegister(instr->key()).is(ecx)); |
| 2410 ASSERT(ToRegister(instr->result()).is(eax)); | 2623 ASSERT(ToRegister(instr->result()).is(eax)); |
| 2411 | 2624 |
| 2412 int arity = instr->arity(); | 2625 int arity = instr->arity(); |
| 2413 Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arity, NOT_IN_LOOP); | 2626 Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arity, NOT_IN_LOOP); |
| 2414 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2627 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 2415 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | |
| 2416 } | 2628 } |
| 2417 | 2629 |
| 2418 | 2630 |
| 2419 void LCodeGen::DoCallNamed(LCallNamed* instr) { | 2631 void LCodeGen::DoCallNamed(LCallNamed* instr) { |
| 2632 ASSERT(ToRegister(instr->context()).is(esi)); |
| 2420 ASSERT(ToRegister(instr->result()).is(eax)); | 2633 ASSERT(ToRegister(instr->result()).is(eax)); |
| 2421 | 2634 |
| 2422 int arity = instr->arity(); | 2635 int arity = instr->arity(); |
| 2423 Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP); | 2636 Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP); |
| 2424 __ mov(ecx, instr->name()); | 2637 __ mov(ecx, instr->name()); |
| 2425 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2638 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 2426 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | |
| 2427 } | 2639 } |
| 2428 | 2640 |
| 2429 | 2641 |
| 2430 void LCodeGen::DoCallFunction(LCallFunction* instr) { | 2642 void LCodeGen::DoCallFunction(LCallFunction* instr) { |
| 2643 ASSERT(ToRegister(instr->context()).is(esi)); |
| 2431 ASSERT(ToRegister(instr->result()).is(eax)); | 2644 ASSERT(ToRegister(instr->result()).is(eax)); |
| 2432 | 2645 |
| 2433 int arity = instr->arity(); | 2646 int arity = instr->arity(); |
| 2434 CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE); | 2647 CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE); |
| 2435 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 2648 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 2436 __ Drop(1); | 2649 __ Drop(1); |
| 2437 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | |
| 2438 } | 2650 } |
| 2439 | 2651 |
| 2440 | 2652 |
| 2441 void LCodeGen::DoCallGlobal(LCallGlobal* instr) { | 2653 void LCodeGen::DoCallGlobal(LCallGlobal* instr) { |
| 2654 ASSERT(ToRegister(instr->context()).is(esi)); |
| 2442 ASSERT(ToRegister(instr->result()).is(eax)); | 2655 ASSERT(ToRegister(instr->result()).is(eax)); |
| 2443 | 2656 |
| 2444 int arity = instr->arity(); | 2657 int arity = instr->arity(); |
| 2445 Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP); | 2658 Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP); |
| 2446 __ mov(ecx, instr->name()); | 2659 __ mov(ecx, instr->name()); |
| 2447 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr); | 2660 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr); |
| 2448 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | |
| 2449 } | 2661 } |
| 2450 | 2662 |
| 2451 | 2663 |
| 2452 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) { | 2664 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) { |
| 2453 ASSERT(ToRegister(instr->result()).is(eax)); | 2665 ASSERT(ToRegister(instr->result()).is(eax)); |
| 2454 __ mov(edi, instr->target()); | 2666 __ mov(edi, instr->target()); |
| 2455 CallKnownFunction(instr->target(), instr->arity(), instr); | 2667 CallKnownFunction(instr->target(), instr->arity(), instr); |
| 2456 } | 2668 } |
| 2457 | 2669 |
| 2458 | 2670 |
| 2459 void LCodeGen::DoCallNew(LCallNew* instr) { | 2671 void LCodeGen::DoCallNew(LCallNew* instr) { |
| 2460 ASSERT(ToRegister(instr->input()).is(edi)); | 2672 ASSERT(ToRegister(instr->context()).is(esi)); |
| 2673 ASSERT(ToRegister(instr->constructor()).is(edi)); |
| 2461 ASSERT(ToRegister(instr->result()).is(eax)); | 2674 ASSERT(ToRegister(instr->result()).is(eax)); |
| 2462 | 2675 |
| 2463 Handle<Code> builtin(Builtins::builtin(Builtins::JSConstructCall)); | 2676 Handle<Code> builtin(Builtins::builtin(Builtins::JSConstructCall)); |
| 2464 __ Set(eax, Immediate(instr->arity())); | 2677 __ Set(eax, Immediate(instr->arity())); |
| 2465 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr); | 2678 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr); |
| 2466 } | 2679 } |
| 2467 | 2680 |
| 2468 | 2681 |
| 2469 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { | 2682 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { |
| 2470 CallRuntime(instr->function(), instr->arity(), instr); | 2683 CallRuntime(instr->function(), instr->arity(), instr, false); |
| 2471 } | 2684 } |
| 2472 | 2685 |
| 2473 | 2686 |
| 2474 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { | 2687 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { |
| 2475 Register object = ToRegister(instr->object()); | 2688 Register object = ToRegister(instr->object()); |
| 2476 Register value = ToRegister(instr->value()); | 2689 Register value = ToRegister(instr->value()); |
| 2477 int offset = instr->offset(); | 2690 int offset = instr->offset(); |
| 2478 | 2691 |
| 2479 if (!instr->transition().is_null()) { | 2692 if (!instr->transition().is_null()) { |
| 2480 __ mov(FieldOperand(object, HeapObject::kMapOffset), instr->transition()); | 2693 __ mov(FieldOperand(object, HeapObject::kMapOffset), instr->transition()); |
| 2481 } | 2694 } |
| 2482 | 2695 |
| 2483 // Do the store. | 2696 // Do the store. |
| 2484 if (instr->is_in_object()) { | 2697 if (instr->is_in_object()) { |
| 2485 __ mov(FieldOperand(object, offset), value); | 2698 __ mov(FieldOperand(object, offset), value); |
| 2486 if (instr->needs_write_barrier()) { | 2699 if (instr->needs_write_barrier()) { |
| 2487 Register temp = ToRegister(instr->temp()); | 2700 Register temp = ToRegister(instr->TempAt(0)); |
| 2488 // Update the write barrier for the object for in-object properties. | 2701 // Update the write barrier for the object for in-object properties. |
| 2489 __ RecordWrite(object, offset, value, temp, kSaveFPRegs); | 2702 __ RecordWrite(object, offset, value, temp, kSaveFPRegs); |
| 2490 } | 2703 } |
| 2491 } else { | 2704 } else { |
| 2492 Register temp = ToRegister(instr->temp()); | 2705 Register temp = ToRegister(instr->TempAt(0)); |
| 2493 __ mov(temp, FieldOperand(object, JSObject::kPropertiesOffset)); | 2706 __ mov(temp, FieldOperand(object, JSObject::kPropertiesOffset)); |
| 2494 __ mov(FieldOperand(temp, offset), value); | 2707 __ mov(FieldOperand(temp, offset), value); |
| 2495 if (instr->needs_write_barrier()) { | 2708 if (instr->needs_write_barrier()) { |
| 2496 // Update the write barrier for the properties array. | 2709 // Update the write barrier for the properties array. |
| 2497 // object is used as a scratch register. | 2710 // object is used as a scratch register. |
| 2498 __ RecordWrite(temp, offset, value, object, kSaveFPRegs); | 2711 __ RecordWrite(temp, offset, value, object, kSaveFPRegs); |
| 2499 } | 2712 } |
| 2500 } | 2713 } |
| 2501 } | 2714 } |
| 2502 | 2715 |
| 2503 | 2716 |
| 2504 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { | 2717 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { |
| 2718 ASSERT(ToRegister(instr->context()).is(esi)); |
| 2505 ASSERT(ToRegister(instr->object()).is(edx)); | 2719 ASSERT(ToRegister(instr->object()).is(edx)); |
| 2506 ASSERT(ToRegister(instr->value()).is(eax)); | 2720 ASSERT(ToRegister(instr->value()).is(eax)); |
| 2507 | 2721 |
| 2508 __ mov(ecx, instr->name()); | 2722 __ mov(ecx, instr->name()); |
| 2509 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); | 2723 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); |
| 2510 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2724 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 2511 } | 2725 } |
| 2512 | 2726 |
| 2513 | 2727 |
| 2514 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { | 2728 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { |
| 2515 __ cmp(ToRegister(instr->index()), ToOperand(instr->length())); | 2729 __ cmp(ToRegister(instr->index()), ToOperand(instr->length())); |
| 2516 DeoptimizeIf(above_equal, instr->environment()); | 2730 DeoptimizeIf(above_equal, instr->environment()); |
| 2517 } | 2731 } |
| 2518 | 2732 |
| 2519 | 2733 |
| 2520 void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) { | 2734 void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) { |
| 2521 Register value = ToRegister(instr->value()); | 2735 Register value = ToRegister(instr->value()); |
| 2522 Register elements = ToRegister(instr->object()); | 2736 Register elements = ToRegister(instr->object()); |
| 2523 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg; | 2737 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg; |
| 2524 | 2738 |
| 2525 // Do the store. | 2739 // Do the store. |
| 2526 if (instr->key()->IsConstantOperand()) { | 2740 if (instr->key()->IsConstantOperand()) { |
| 2527 ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); | 2741 ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); |
| 2528 LConstantOperand* const_operand = LConstantOperand::cast(instr->key()); | 2742 LConstantOperand* const_operand = LConstantOperand::cast(instr->key()); |
| 2529 int offset = | 2743 int offset = |
| 2530 ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize; | 2744 ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize; |
| 2531 __ mov(FieldOperand(elements, offset), value); | 2745 __ mov(FieldOperand(elements, offset), value); |
| 2532 } else { | 2746 } else { |
| 2533 __ mov(FieldOperand(elements, key, times_4, FixedArray::kHeaderSize), | 2747 __ mov(FieldOperand(elements, |
| 2748 key, |
| 2749 times_pointer_size, |
| 2750 FixedArray::kHeaderSize), |
| 2534 value); | 2751 value); |
| 2535 } | 2752 } |
| 2536 | 2753 |
| 2537 // Update the write barrier unless we're certain that we're storing a smi. | |
| 2538 if (instr->hydrogen()->NeedsWriteBarrier()) { | 2754 if (instr->hydrogen()->NeedsWriteBarrier()) { |
| 2539 // Compute address of modified element and store it into key register. | 2755 // Compute address of modified element and store it into key register. |
| 2540 __ lea(key, FieldOperand(elements, key, times_4, FixedArray::kHeaderSize)); | 2756 __ lea(key, |
| 2757 FieldOperand(elements, |
| 2758 key, |
| 2759 times_pointer_size, |
| 2760 FixedArray::kHeaderSize)); |
| 2541 __ RecordWrite(elements, key, value, kSaveFPRegs); | 2761 __ RecordWrite(elements, key, value, kSaveFPRegs); |
| 2542 } | 2762 } |
| 2543 } | 2763 } |
| 2544 | 2764 |
| 2545 | 2765 |
| 2546 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { | 2766 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { |
| 2767 ASSERT(ToRegister(instr->context()).is(esi)); |
| 2547 ASSERT(ToRegister(instr->object()).is(edx)); | 2768 ASSERT(ToRegister(instr->object()).is(edx)); |
| 2548 ASSERT(ToRegister(instr->key()).is(ecx)); | 2769 ASSERT(ToRegister(instr->key()).is(ecx)); |
| 2549 ASSERT(ToRegister(instr->value()).is(eax)); | 2770 ASSERT(ToRegister(instr->value()).is(eax)); |
| 2550 | 2771 |
| 2551 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize)); | 2772 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize)); |
| 2552 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2773 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 2553 } | 2774 } |
| 2554 | 2775 |
| 2555 | 2776 |
| 2777 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { |
| 2778 class DeferredStringCharCodeAt: public LDeferredCode { |
| 2779 public: |
| 2780 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr) |
| 2781 : LDeferredCode(codegen), instr_(instr) { } |
| 2782 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); } |
| 2783 private: |
| 2784 LStringCharCodeAt* instr_; |
| 2785 }; |
| 2786 |
| 2787 Register string = ToRegister(instr->string()); |
| 2788 Register index = no_reg; |
| 2789 int const_index = -1; |
| 2790 if (instr->index()->IsConstantOperand()) { |
| 2791 const_index = ToInteger32(LConstantOperand::cast(instr->index())); |
| 2792 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue); |
| 2793 if (!Smi::IsValid(const_index)) { |
| 2794 // Guaranteed to be out of bounds because of the assert above. |
| 2795 // So the bounds check that must dominate this instruction must |
| 2796 // have deoptimized already. |
| 2797 if (FLAG_debug_code) { |
| 2798 __ Abort("StringCharCodeAt: out of bounds index."); |
| 2799 } |
| 2800 // No code needs to be generated. |
| 2801 return; |
| 2802 } |
| 2803 } else { |
| 2804 index = ToRegister(instr->index()); |
| 2805 } |
| 2806 Register result = ToRegister(instr->result()); |
| 2807 |
| 2808 DeferredStringCharCodeAt* deferred = |
| 2809 new DeferredStringCharCodeAt(this, instr); |
| 2810 |
| 2811 NearLabel flat_string, ascii_string, done; |
| 2812 |
| 2813 // Fetch the instance type of the receiver into result register. |
| 2814 __ mov(result, FieldOperand(string, HeapObject::kMapOffset)); |
| 2815 __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset)); |
| 2816 |
| 2817 // We need special handling for non-flat strings. |
| 2818 STATIC_ASSERT(kSeqStringTag == 0); |
| 2819 __ test(result, Immediate(kStringRepresentationMask)); |
| 2820 __ j(zero, &flat_string); |
| 2821 |
| 2822 // Handle non-flat strings. |
| 2823 __ test(result, Immediate(kIsConsStringMask)); |
| 2824 __ j(zero, deferred->entry()); |
| 2825 |
| 2826 // ConsString. |
| 2827 // Check whether the right hand side is the empty string (i.e. if |
| 2828 // this is really a flat string in a cons string). If that is not |
| 2829 // the case we would rather go to the runtime system now to flatten |
| 2830 // the string. |
| 2831 __ cmp(FieldOperand(string, ConsString::kSecondOffset), |
| 2832 Immediate(Factory::empty_string())); |
| 2833 __ j(not_equal, deferred->entry()); |
| 2834 // Get the first of the two strings and load its instance type. |
| 2835 __ mov(string, FieldOperand(string, ConsString::kFirstOffset)); |
| 2836 __ mov(result, FieldOperand(string, HeapObject::kMapOffset)); |
| 2837 __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset)); |
| 2838 // If the first cons component is also non-flat, then go to runtime. |
| 2839 STATIC_ASSERT(kSeqStringTag == 0); |
| 2840 __ test(result, Immediate(kStringRepresentationMask)); |
| 2841 __ j(not_zero, deferred->entry()); |
| 2842 |
| 2843 // Check for 1-byte or 2-byte string. |
| 2844 __ bind(&flat_string); |
| 2845 STATIC_ASSERT(kAsciiStringTag != 0); |
| 2846 __ test(result, Immediate(kStringEncodingMask)); |
| 2847 __ j(not_zero, &ascii_string); |
| 2848 |
| 2849 // 2-byte string. |
| 2850 // Load the 2-byte character code into the result register. |
| 2851 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1); |
| 2852 if (instr->index()->IsConstantOperand()) { |
| 2853 __ movzx_w(result, |
| 2854 FieldOperand(string, |
| 2855 SeqTwoByteString::kHeaderSize + 2 * const_index)); |
| 2856 } else { |
| 2857 __ movzx_w(result, FieldOperand(string, |
| 2858 index, |
| 2859 times_2, |
| 2860 SeqTwoByteString::kHeaderSize)); |
| 2861 } |
| 2862 __ jmp(&done); |
| 2863 |
| 2864 // ASCII string. |
| 2865 // Load the byte into the result register. |
| 2866 __ bind(&ascii_string); |
| 2867 if (instr->index()->IsConstantOperand()) { |
| 2868 __ movzx_b(result, FieldOperand(string, |
| 2869 SeqAsciiString::kHeaderSize + const_index)); |
| 2870 } else { |
| 2871 __ movzx_b(result, FieldOperand(string, |
| 2872 index, |
| 2873 times_1, |
| 2874 SeqAsciiString::kHeaderSize)); |
| 2875 } |
| 2876 __ bind(&done); |
| 2877 __ bind(deferred->exit()); |
| 2878 } |
| 2879 |
| 2880 |
| 2881 void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) { |
| 2882 Register string = ToRegister(instr->string()); |
| 2883 Register result = ToRegister(instr->result()); |
| 2884 |
| 2885 // TODO(3095996): Get rid of this. For now, we need to make the |
| 2886 // result register contain a valid pointer because it is already |
| 2887 // contained in the register pointer map. |
| 2888 __ Set(result, Immediate(0)); |
| 2889 |
| 2890 __ PushSafepointRegisters(); |
| 2891 __ push(string); |
| 2892 // Push the index as a smi. This is safe because of the checks in |
| 2893 // DoStringCharCodeAt above. |
| 2894 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue); |
| 2895 if (instr->index()->IsConstantOperand()) { |
| 2896 int const_index = ToInteger32(LConstantOperand::cast(instr->index())); |
| 2897 __ push(Immediate(Smi::FromInt(const_index))); |
| 2898 } else { |
| 2899 Register index = ToRegister(instr->index()); |
| 2900 __ SmiTag(index); |
| 2901 __ push(index); |
| 2902 } |
| 2903 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
| 2904 __ CallRuntimeSaveDoubles(Runtime::kStringCharCodeAt); |
| 2905 RecordSafepointWithRegisters( |
| 2906 instr->pointer_map(), 2, Safepoint::kNoDeoptimizationIndex); |
| 2907 if (FLAG_debug_code) { |
| 2908 __ AbortIfNotSmi(eax); |
| 2909 } |
| 2910 __ SmiUntag(eax); |
| 2911 __ mov(Operand(esp, EspIndexForPushAll(result) * kPointerSize), eax); |
| 2912 __ PopSafepointRegisters(); |
| 2913 } |
| 2914 |
| 2915 |
| 2916 void LCodeGen::DoStringLength(LStringLength* instr) { |
| 2917 Register string = ToRegister(instr->string()); |
| 2918 Register result = ToRegister(instr->result()); |
| 2919 __ mov(result, FieldOperand(string, String::kLengthOffset)); |
| 2920 } |
| 2921 |
| 2922 |
| 2556 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) { | 2923 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) { |
| 2557 LOperand* input = instr->input(); | 2924 LOperand* input = instr->InputAt(0); |
| 2558 ASSERT(input->IsRegister() || input->IsStackSlot()); | 2925 ASSERT(input->IsRegister() || input->IsStackSlot()); |
| 2559 LOperand* output = instr->result(); | 2926 LOperand* output = instr->result(); |
| 2560 ASSERT(output->IsDoubleRegister()); | 2927 ASSERT(output->IsDoubleRegister()); |
| 2561 __ cvtsi2sd(ToDoubleRegister(output), ToOperand(input)); | 2928 __ cvtsi2sd(ToDoubleRegister(output), ToOperand(input)); |
| 2562 } | 2929 } |
| 2563 | 2930 |
| 2564 | 2931 |
| 2565 void LCodeGen::DoNumberTagI(LNumberTagI* instr) { | 2932 void LCodeGen::DoNumberTagI(LNumberTagI* instr) { |
| 2566 class DeferredNumberTagI: public LDeferredCode { | 2933 class DeferredNumberTagI: public LDeferredCode { |
| 2567 public: | 2934 public: |
| 2568 DeferredNumberTagI(LCodeGen* codegen, LNumberTagI* instr) | 2935 DeferredNumberTagI(LCodeGen* codegen, LNumberTagI* instr) |
| 2569 : LDeferredCode(codegen), instr_(instr) { } | 2936 : LDeferredCode(codegen), instr_(instr) { } |
| 2570 virtual void Generate() { codegen()->DoDeferredNumberTagI(instr_); } | 2937 virtual void Generate() { codegen()->DoDeferredNumberTagI(instr_); } |
| 2571 private: | 2938 private: |
| 2572 LNumberTagI* instr_; | 2939 LNumberTagI* instr_; |
| 2573 }; | 2940 }; |
| 2574 | 2941 |
| 2575 LOperand* input = instr->input(); | 2942 LOperand* input = instr->InputAt(0); |
| 2576 ASSERT(input->IsRegister() && input->Equals(instr->result())); | 2943 ASSERT(input->IsRegister() && input->Equals(instr->result())); |
| 2577 Register reg = ToRegister(input); | 2944 Register reg = ToRegister(input); |
| 2578 | 2945 |
| 2579 DeferredNumberTagI* deferred = new DeferredNumberTagI(this, instr); | 2946 DeferredNumberTagI* deferred = new DeferredNumberTagI(this, instr); |
| 2580 __ SmiTag(reg); | 2947 __ SmiTag(reg); |
| 2581 __ j(overflow, deferred->entry()); | 2948 __ j(overflow, deferred->entry()); |
| 2582 __ bind(deferred->exit()); | 2949 __ bind(deferred->exit()); |
| 2583 } | 2950 } |
| 2584 | 2951 |
| 2585 | 2952 |
| 2586 void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) { | 2953 void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) { |
| 2587 Label slow; | 2954 Label slow; |
| 2588 Register reg = ToRegister(instr->input()); | 2955 Register reg = ToRegister(instr->InputAt(0)); |
| 2589 Register tmp = reg.is(eax) ? ecx : eax; | 2956 Register tmp = reg.is(eax) ? ecx : eax; |
| 2590 | 2957 |
| 2591 // Preserve the value of all registers. | 2958 // Preserve the value of all registers. |
| 2592 __ PushSafepointRegisters(); | 2959 __ PushSafepointRegisters(); |
| 2593 | 2960 |
| 2594 // There was overflow, so bits 30 and 31 of the original integer | 2961 // There was overflow, so bits 30 and 31 of the original integer |
| 2595 // disagree. Try to allocate a heap number in new space and store | 2962 // disagree. Try to allocate a heap number in new space and store |
| 2596 // the value in there. If that fails, call the runtime system. | 2963 // the value in there. If that fails, call the runtime system. |
| 2597 NearLabel done; | 2964 NearLabel done; |
| 2598 __ SmiUntag(reg); | 2965 __ SmiUntag(reg); |
| 2599 __ xor_(reg, 0x80000000); | 2966 __ xor_(reg, 0x80000000); |
| 2600 __ cvtsi2sd(xmm0, Operand(reg)); | 2967 __ cvtsi2sd(xmm0, Operand(reg)); |
| 2601 if (FLAG_inline_new) { | 2968 if (FLAG_inline_new) { |
| 2602 __ AllocateHeapNumber(reg, tmp, no_reg, &slow); | 2969 __ AllocateHeapNumber(reg, tmp, no_reg, &slow); |
| 2603 __ jmp(&done); | 2970 __ jmp(&done); |
| 2604 } | 2971 } |
| 2605 | 2972 |
| 2606 // Slow case: Call the runtime system to do the number allocation. | 2973 // Slow case: Call the runtime system to do the number allocation. |
| 2607 __ bind(&slow); | 2974 __ bind(&slow); |
| 2608 | 2975 |
| 2609 // TODO(3095996): Put a valid pointer value in the stack slot where the result | 2976 // TODO(3095996): Put a valid pointer value in the stack slot where the result |
| 2610 // register is stored, as this register is in the pointer map, but contains an | 2977 // register is stored, as this register is in the pointer map, but contains an |
| 2611 // integer value. | 2978 // integer value. |
| 2612 __ mov(Operand(esp, EspIndexForPushAll(reg) * kPointerSize), Immediate(0)); | 2979 __ mov(Operand(esp, EspIndexForPushAll(reg) * kPointerSize), Immediate(0)); |
| 2613 | 2980 |
| 2981 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
| 2614 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); | 2982 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); |
| 2615 RecordSafepointWithRegisters( | 2983 RecordSafepointWithRegisters( |
| 2616 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); | 2984 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); |
| 2617 if (!reg.is(eax)) __ mov(reg, eax); | 2985 if (!reg.is(eax)) __ mov(reg, eax); |
| 2618 | 2986 |
| 2619 // Done. Put the value in xmm0 into the value of the allocated heap | 2987 // Done. Put the value in xmm0 into the value of the allocated heap |
| 2620 // number. | 2988 // number. |
| 2621 __ bind(&done); | 2989 __ bind(&done); |
| 2622 __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), xmm0); | 2990 __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), xmm0); |
| 2623 __ mov(Operand(esp, EspIndexForPushAll(reg) * kPointerSize), reg); | 2991 __ mov(Operand(esp, EspIndexForPushAll(reg) * kPointerSize), reg); |
| 2624 __ PopSafepointRegisters(); | 2992 __ PopSafepointRegisters(); |
| 2625 } | 2993 } |
| 2626 | 2994 |
| 2627 | 2995 |
| 2628 void LCodeGen::DoNumberTagD(LNumberTagD* instr) { | 2996 void LCodeGen::DoNumberTagD(LNumberTagD* instr) { |
| 2629 class DeferredNumberTagD: public LDeferredCode { | 2997 class DeferredNumberTagD: public LDeferredCode { |
| 2630 public: | 2998 public: |
| 2631 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr) | 2999 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr) |
| 2632 : LDeferredCode(codegen), instr_(instr) { } | 3000 : LDeferredCode(codegen), instr_(instr) { } |
| 2633 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); } | 3001 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); } |
| 2634 private: | 3002 private: |
| 2635 LNumberTagD* instr_; | 3003 LNumberTagD* instr_; |
| 2636 }; | 3004 }; |
| 2637 | 3005 |
| 2638 XMMRegister input_reg = ToDoubleRegister(instr->input()); | 3006 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0)); |
| 2639 Register reg = ToRegister(instr->result()); | 3007 Register reg = ToRegister(instr->result()); |
| 2640 Register tmp = ToRegister(instr->temp()); | 3008 Register tmp = ToRegister(instr->TempAt(0)); |
| 2641 | 3009 |
| 2642 DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr); | 3010 DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr); |
| 2643 if (FLAG_inline_new) { | 3011 if (FLAG_inline_new) { |
| 2644 __ AllocateHeapNumber(reg, tmp, no_reg, deferred->entry()); | 3012 __ AllocateHeapNumber(reg, tmp, no_reg, deferred->entry()); |
| 2645 } else { | 3013 } else { |
| 2646 __ jmp(deferred->entry()); | 3014 __ jmp(deferred->entry()); |
| 2647 } | 3015 } |
| 2648 __ bind(deferred->exit()); | 3016 __ bind(deferred->exit()); |
| 2649 __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), input_reg); | 3017 __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), input_reg); |
| 2650 } | 3018 } |
| 2651 | 3019 |
| 2652 | 3020 |
| 2653 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) { | 3021 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) { |
| 2654 // TODO(3095996): Get rid of this. For now, we need to make the | 3022 // TODO(3095996): Get rid of this. For now, we need to make the |
| 2655 // result register contain a valid pointer because it is already | 3023 // result register contain a valid pointer because it is already |
| 2656 // contained in the register pointer map. | 3024 // contained in the register pointer map. |
| 2657 Register reg = ToRegister(instr->result()); | 3025 Register reg = ToRegister(instr->result()); |
| 2658 __ Set(reg, Immediate(0)); | 3026 __ Set(reg, Immediate(0)); |
| 2659 | 3027 |
| 2660 __ PushSafepointRegisters(); | 3028 __ PushSafepointRegisters(); |
| 3029 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
| 2661 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); | 3030 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); |
| 2662 RecordSafepointWithRegisters( | 3031 RecordSafepointWithRegisters( |
| 2663 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); | 3032 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); |
| 2664 __ mov(Operand(esp, EspIndexForPushAll(reg) * kPointerSize), eax); | 3033 __ mov(Operand(esp, EspIndexForPushAll(reg) * kPointerSize), eax); |
| 2665 __ PopSafepointRegisters(); | 3034 __ PopSafepointRegisters(); |
| 2666 } | 3035 } |
| 2667 | 3036 |
| 2668 | 3037 |
| 2669 void LCodeGen::DoSmiTag(LSmiTag* instr) { | 3038 void LCodeGen::DoSmiTag(LSmiTag* instr) { |
| 2670 LOperand* input = instr->input(); | 3039 LOperand* input = instr->InputAt(0); |
| 2671 ASSERT(input->IsRegister() && input->Equals(instr->result())); | 3040 ASSERT(input->IsRegister() && input->Equals(instr->result())); |
| 2672 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow)); | 3041 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow)); |
| 2673 __ SmiTag(ToRegister(input)); | 3042 __ SmiTag(ToRegister(input)); |
| 2674 } | 3043 } |
| 2675 | 3044 |
| 2676 | 3045 |
| 2677 void LCodeGen::DoSmiUntag(LSmiUntag* instr) { | 3046 void LCodeGen::DoSmiUntag(LSmiUntag* instr) { |
| 2678 LOperand* input = instr->input(); | 3047 LOperand* input = instr->InputAt(0); |
| 2679 ASSERT(input->IsRegister() && input->Equals(instr->result())); | 3048 ASSERT(input->IsRegister() && input->Equals(instr->result())); |
| 2680 if (instr->needs_check()) { | 3049 if (instr->needs_check()) { |
| 2681 __ test(ToRegister(input), Immediate(kSmiTagMask)); | 3050 __ test(ToRegister(input), Immediate(kSmiTagMask)); |
| 2682 DeoptimizeIf(not_zero, instr->environment()); | 3051 DeoptimizeIf(not_zero, instr->environment()); |
| 2683 } | 3052 } |
| 2684 __ SmiUntag(ToRegister(input)); | 3053 __ SmiUntag(ToRegister(input)); |
| 2685 } | 3054 } |
| 2686 | 3055 |
| 2687 | 3056 |
| 2688 void LCodeGen::EmitNumberUntagD(Register input_reg, | 3057 void LCodeGen::EmitNumberUntagD(Register input_reg, |
| (...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2728 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr) | 3097 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr) |
| 2729 : LDeferredCode(codegen), instr_(instr) { } | 3098 : LDeferredCode(codegen), instr_(instr) { } |
| 2730 virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); } | 3099 virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); } |
| 2731 private: | 3100 private: |
| 2732 LTaggedToI* instr_; | 3101 LTaggedToI* instr_; |
| 2733 }; | 3102 }; |
| 2734 | 3103 |
| 2735 | 3104 |
| 2736 void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) { | 3105 void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) { |
| 2737 NearLabel done, heap_number; | 3106 NearLabel done, heap_number; |
| 2738 Register input_reg = ToRegister(instr->input()); | 3107 Register input_reg = ToRegister(instr->InputAt(0)); |
| 2739 | 3108 |
| 2740 // Heap number map check. | 3109 // Heap number map check. |
| 2741 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), | 3110 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), |
| 2742 Factory::heap_number_map()); | 3111 Factory::heap_number_map()); |
| 2743 | 3112 |
| 2744 if (instr->truncating()) { | 3113 if (instr->truncating()) { |
| 2745 __ j(equal, &heap_number); | 3114 __ j(equal, &heap_number); |
| 2746 // Check for undefined. Undefined is converted to zero for truncating | 3115 // Check for undefined. Undefined is converted to zero for truncating |
| 2747 // conversions. | 3116 // conversions. |
| 2748 __ cmp(input_reg, Factory::undefined_value()); | 3117 __ cmp(input_reg, Factory::undefined_value()); |
| (...skipping 22 matching lines...) Expand all Loading... |
| 2771 | 3140 |
| 2772 // Reserve space for 64 bit answer. | 3141 // Reserve space for 64 bit answer. |
| 2773 __ bind(&convert); | 3142 __ bind(&convert); |
| 2774 __ sub(Operand(esp), Immediate(kDoubleSize)); | 3143 __ sub(Operand(esp), Immediate(kDoubleSize)); |
| 2775 // Do conversion, which cannot fail because we checked the exponent. | 3144 // Do conversion, which cannot fail because we checked the exponent. |
| 2776 __ fisttp_d(Operand(esp, 0)); | 3145 __ fisttp_d(Operand(esp, 0)); |
| 2777 __ mov(input_reg, Operand(esp, 0)); // Low word of answer is the result. | 3146 __ mov(input_reg, Operand(esp, 0)); // Low word of answer is the result. |
| 2778 __ add(Operand(esp), Immediate(kDoubleSize)); | 3147 __ add(Operand(esp), Immediate(kDoubleSize)); |
| 2779 } else { | 3148 } else { |
| 2780 NearLabel deopt; | 3149 NearLabel deopt; |
| 2781 XMMRegister xmm_temp = ToDoubleRegister(instr->temp()); | 3150 XMMRegister xmm_temp = ToDoubleRegister(instr->TempAt(0)); |
| 2782 __ movdbl(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset)); | 3151 __ movdbl(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset)); |
| 2783 __ cvttsd2si(input_reg, Operand(xmm0)); | 3152 __ cvttsd2si(input_reg, Operand(xmm0)); |
| 2784 __ cmp(input_reg, 0x80000000u); | 3153 __ cmp(input_reg, 0x80000000u); |
| 2785 __ j(not_equal, &done); | 3154 __ j(not_equal, &done); |
| 2786 // Check if the input was 0x8000000 (kMinInt). | 3155 // Check if the input was 0x8000000 (kMinInt). |
| 2787 // If no, then we got an overflow and we deoptimize. | 3156 // If no, then we got an overflow and we deoptimize. |
| 2788 ExternalReference min_int = ExternalReference::address_of_min_int(); | 3157 ExternalReference min_int = ExternalReference::address_of_min_int(); |
| 2789 __ movdbl(xmm_temp, Operand::StaticVariable(min_int)); | 3158 __ movdbl(xmm_temp, Operand::StaticVariable(min_int)); |
| 2790 __ ucomisd(xmm_temp, xmm0); | 3159 __ ucomisd(xmm_temp, xmm0); |
| 2791 DeoptimizeIf(not_equal, instr->environment()); | 3160 DeoptimizeIf(not_equal, instr->environment()); |
| 2792 DeoptimizeIf(parity_even, instr->environment()); // NaN. | 3161 DeoptimizeIf(parity_even, instr->environment()); // NaN. |
| 2793 } | 3162 } |
| 2794 } else { | 3163 } else { |
| 2795 // Deoptimize if we don't have a heap number. | 3164 // Deoptimize if we don't have a heap number. |
| 2796 DeoptimizeIf(not_equal, instr->environment()); | 3165 DeoptimizeIf(not_equal, instr->environment()); |
| 2797 | 3166 |
| 2798 XMMRegister xmm_temp = ToDoubleRegister(instr->temp()); | 3167 XMMRegister xmm_temp = ToDoubleRegister(instr->TempAt(0)); |
| 2799 __ movdbl(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset)); | 3168 __ movdbl(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset)); |
| 2800 __ cvttsd2si(input_reg, Operand(xmm0)); | 3169 __ cvttsd2si(input_reg, Operand(xmm0)); |
| 2801 __ cvtsi2sd(xmm_temp, Operand(input_reg)); | 3170 __ cvtsi2sd(xmm_temp, Operand(input_reg)); |
| 2802 __ ucomisd(xmm0, xmm_temp); | 3171 __ ucomisd(xmm0, xmm_temp); |
| 2803 DeoptimizeIf(not_equal, instr->environment()); | 3172 DeoptimizeIf(not_equal, instr->environment()); |
| 2804 DeoptimizeIf(parity_even, instr->environment()); // NaN. | 3173 DeoptimizeIf(parity_even, instr->environment()); // NaN. |
| 2805 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 3174 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 2806 __ test(input_reg, Operand(input_reg)); | 3175 __ test(input_reg, Operand(input_reg)); |
| 2807 __ j(not_zero, &done); | 3176 __ j(not_zero, &done); |
| 2808 __ movmskpd(input_reg, xmm0); | 3177 __ movmskpd(input_reg, xmm0); |
| 2809 __ and_(input_reg, 1); | 3178 __ and_(input_reg, 1); |
| 2810 DeoptimizeIf(not_zero, instr->environment()); | 3179 DeoptimizeIf(not_zero, instr->environment()); |
| 2811 } | 3180 } |
| 2812 } | 3181 } |
| 2813 __ bind(&done); | 3182 __ bind(&done); |
| 2814 } | 3183 } |
| 2815 | 3184 |
| 2816 | 3185 |
| 2817 void LCodeGen::DoTaggedToI(LTaggedToI* instr) { | 3186 void LCodeGen::DoTaggedToI(LTaggedToI* instr) { |
| 2818 LOperand* input = instr->input(); | 3187 LOperand* input = instr->InputAt(0); |
| 2819 ASSERT(input->IsRegister()); | 3188 ASSERT(input->IsRegister()); |
| 2820 ASSERT(input->Equals(instr->result())); | 3189 ASSERT(input->Equals(instr->result())); |
| 2821 | 3190 |
| 2822 Register input_reg = ToRegister(input); | 3191 Register input_reg = ToRegister(input); |
| 2823 | 3192 |
| 2824 DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr); | 3193 DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr); |
| 2825 | 3194 |
| 2826 // Smi check. | 3195 // Smi check. |
| 2827 __ test(input_reg, Immediate(kSmiTagMask)); | 3196 __ test(input_reg, Immediate(kSmiTagMask)); |
| 2828 __ j(not_zero, deferred->entry()); | 3197 __ j(not_zero, deferred->entry()); |
| 2829 | 3198 |
| 2830 // Smi to int32 conversion | 3199 // Smi to int32 conversion |
| 2831 __ SmiUntag(input_reg); // Untag smi. | 3200 __ SmiUntag(input_reg); // Untag smi. |
| 2832 | 3201 |
| 2833 __ bind(deferred->exit()); | 3202 __ bind(deferred->exit()); |
| 2834 } | 3203 } |
| 2835 | 3204 |
| 2836 | 3205 |
| 2837 void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) { | 3206 void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) { |
| 2838 LOperand* input = instr->input(); | 3207 LOperand* input = instr->InputAt(0); |
| 2839 ASSERT(input->IsRegister()); | 3208 ASSERT(input->IsRegister()); |
| 2840 LOperand* result = instr->result(); | 3209 LOperand* result = instr->result(); |
| 2841 ASSERT(result->IsDoubleRegister()); | 3210 ASSERT(result->IsDoubleRegister()); |
| 2842 | 3211 |
| 2843 Register input_reg = ToRegister(input); | 3212 Register input_reg = ToRegister(input); |
| 2844 XMMRegister result_reg = ToDoubleRegister(result); | 3213 XMMRegister result_reg = ToDoubleRegister(result); |
| 2845 | 3214 |
| 2846 EmitNumberUntagD(input_reg, result_reg, instr->environment()); | 3215 EmitNumberUntagD(input_reg, result_reg, instr->environment()); |
| 2847 } | 3216 } |
| 2848 | 3217 |
| 2849 | 3218 |
| 2850 void LCodeGen::DoDoubleToI(LDoubleToI* instr) { | 3219 void LCodeGen::DoDoubleToI(LDoubleToI* instr) { |
| 2851 LOperand* input = instr->input(); | 3220 LOperand* input = instr->InputAt(0); |
| 2852 ASSERT(input->IsDoubleRegister()); | 3221 ASSERT(input->IsDoubleRegister()); |
| 2853 LOperand* result = instr->result(); | 3222 LOperand* result = instr->result(); |
| 2854 ASSERT(result->IsRegister()); | 3223 ASSERT(result->IsRegister()); |
| 2855 | 3224 |
| 2856 XMMRegister input_reg = ToDoubleRegister(input); | 3225 XMMRegister input_reg = ToDoubleRegister(input); |
| 2857 Register result_reg = ToRegister(result); | 3226 Register result_reg = ToRegister(result); |
| 2858 | 3227 |
| 2859 if (instr->truncating()) { | 3228 if (instr->truncating()) { |
| 2860 // Performs a truncating conversion of a floating point number as used by | 3229 // Performs a truncating conversion of a floating point number as used by |
| 2861 // the JS bitwise operations. | 3230 // the JS bitwise operations. |
| (...skipping 16 matching lines...) Expand all Loading... |
| 2878 __ add(Operand(esp), Immediate(kDoubleSize)); | 3247 __ add(Operand(esp), Immediate(kDoubleSize)); |
| 2879 DeoptimizeIf(no_condition, instr->environment()); | 3248 DeoptimizeIf(no_condition, instr->environment()); |
| 2880 __ bind(&convert); | 3249 __ bind(&convert); |
| 2881 // Do conversion, which cannot fail because we checked the exponent. | 3250 // Do conversion, which cannot fail because we checked the exponent. |
| 2882 __ fld_d(Operand(esp, 0)); | 3251 __ fld_d(Operand(esp, 0)); |
| 2883 __ fisttp_d(Operand(esp, 0)); | 3252 __ fisttp_d(Operand(esp, 0)); |
| 2884 __ mov(result_reg, Operand(esp, 0)); // Low word of answer is the result. | 3253 __ mov(result_reg, Operand(esp, 0)); // Low word of answer is the result. |
| 2885 __ add(Operand(esp), Immediate(kDoubleSize)); | 3254 __ add(Operand(esp), Immediate(kDoubleSize)); |
| 2886 __ bind(&done); | 3255 __ bind(&done); |
| 2887 } else { | 3256 } else { |
| 2888 // This will bail out if the input was not in the int32 range (or, | 3257 NearLabel done; |
| 2889 // unfortunately, if the input was 0x80000000). | 3258 Register temp_reg = ToRegister(instr->TempAt(0)); |
| 2890 DeoptimizeIf(equal, instr->environment()); | 3259 XMMRegister xmm_scratch = xmm0; |
| 3260 |
| 3261 // If cvttsd2si succeeded, we're done. Otherwise, we attempt |
| 3262 // manual conversion. |
| 3263 __ j(not_equal, &done); |
| 3264 |
| 3265 // Get high 32 bits of the input in result_reg and temp_reg. |
| 3266 __ pshufd(xmm_scratch, input_reg, 1); |
| 3267 __ movd(Operand(temp_reg), xmm_scratch); |
| 3268 __ mov(result_reg, temp_reg); |
| 3269 |
| 3270 // Prepare negation mask in temp_reg. |
| 3271 __ sar(temp_reg, kBitsPerInt - 1); |
| 3272 |
| 3273 // Extract the exponent from result_reg and subtract adjusted |
| 3274 // bias from it. The adjustment is selected in a way such that |
| 3275 // when the difference is zero, the answer is in the low 32 bits |
| 3276 // of the input, otherwise a shift has to be performed. |
| 3277 __ shr(result_reg, HeapNumber::kExponentShift); |
| 3278 __ and_(result_reg, |
| 3279 HeapNumber::kExponentMask >> HeapNumber::kExponentShift); |
| 3280 __ sub(Operand(result_reg), |
| 3281 Immediate(HeapNumber::kExponentBias + |
| 3282 HeapNumber::kExponentBits + |
| 3283 HeapNumber::kMantissaBits)); |
| 3284 // Don't handle big (> kMantissaBits + kExponentBits == 63) or |
| 3285 // special exponents. |
| 3286 DeoptimizeIf(greater, instr->environment()); |
| 3287 |
| 3288 // Zero out the sign and the exponent in the input (by shifting |
| 3289 // it to the left) and restore the implicit mantissa bit, |
| 3290 // i.e. convert the input to unsigned int64 shifted left by |
| 3291 // kExponentBits. |
| 3292 ExternalReference minus_zero = ExternalReference::address_of_minus_zero(); |
| 3293 // Minus zero has the most significant bit set and the other |
| 3294 // bits cleared. |
| 3295 __ movdbl(xmm_scratch, Operand::StaticVariable(minus_zero)); |
| 3296 __ psllq(input_reg, HeapNumber::kExponentBits); |
| 3297 __ por(input_reg, xmm_scratch); |
| 3298 |
| 3299 // Get the amount to shift the input right in xmm_scratch. |
| 3300 __ neg(result_reg); |
| 3301 __ movd(xmm_scratch, Operand(result_reg)); |
| 3302 |
| 3303 // Shift the input right and extract low 32 bits. |
| 3304 __ psrlq(input_reg, xmm_scratch); |
| 3305 __ movd(Operand(result_reg), input_reg); |
| 3306 |
| 3307 // Use the prepared mask in temp_reg to negate the result if necessary. |
| 3308 __ xor_(result_reg, Operand(temp_reg)); |
| 3309 __ sub(result_reg, Operand(temp_reg)); |
| 3310 __ bind(&done); |
| 2891 } | 3311 } |
| 2892 } else { | 3312 } else { |
| 2893 NearLabel done; | 3313 NearLabel done; |
| 2894 __ cvttsd2si(result_reg, Operand(input_reg)); | 3314 __ cvttsd2si(result_reg, Operand(input_reg)); |
| 2895 __ cvtsi2sd(xmm0, Operand(result_reg)); | 3315 __ cvtsi2sd(xmm0, Operand(result_reg)); |
| 2896 __ ucomisd(xmm0, input_reg); | 3316 __ ucomisd(xmm0, input_reg); |
| 2897 DeoptimizeIf(not_equal, instr->environment()); | 3317 DeoptimizeIf(not_equal, instr->environment()); |
| 2898 DeoptimizeIf(parity_even, instr->environment()); // NaN. | 3318 DeoptimizeIf(parity_even, instr->environment()); // NaN. |
| 2899 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 3319 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 2900 // The integer converted back is equal to the original. We | 3320 // The integer converted back is equal to the original. We |
| 2901 // only have to test if we got -0 as an input. | 3321 // only have to test if we got -0 as an input. |
| 2902 __ test(result_reg, Operand(result_reg)); | 3322 __ test(result_reg, Operand(result_reg)); |
| 2903 __ j(not_zero, &done); | 3323 __ j(not_zero, &done); |
| 2904 __ movmskpd(result_reg, input_reg); | 3324 __ movmskpd(result_reg, input_reg); |
| 2905 // Bit 0 contains the sign of the double in input_reg. | 3325 // Bit 0 contains the sign of the double in input_reg. |
| 2906 // If input was positive, we are ok and return 0, otherwise | 3326 // If input was positive, we are ok and return 0, otherwise |
| 2907 // deoptimize. | 3327 // deoptimize. |
| 2908 __ and_(result_reg, 1); | 3328 __ and_(result_reg, 1); |
| 2909 DeoptimizeIf(not_zero, instr->environment()); | 3329 DeoptimizeIf(not_zero, instr->environment()); |
| 2910 } | 3330 } |
| 2911 __ bind(&done); | 3331 __ bind(&done); |
| 2912 } | 3332 } |
| 2913 } | 3333 } |
| 2914 | 3334 |
| 2915 | 3335 |
| 2916 void LCodeGen::DoCheckSmi(LCheckSmi* instr) { | 3336 void LCodeGen::DoCheckSmi(LCheckSmi* instr) { |
| 2917 LOperand* input = instr->input(); | 3337 LOperand* input = instr->InputAt(0); |
| 2918 ASSERT(input->IsRegister()); | 3338 ASSERT(input->IsRegister()); |
| 2919 __ test(ToRegister(input), Immediate(kSmiTagMask)); | 3339 __ test(ToRegister(input), Immediate(kSmiTagMask)); |
| 2920 DeoptimizeIf(instr->condition(), instr->environment()); | 3340 DeoptimizeIf(instr->condition(), instr->environment()); |
| 2921 } | 3341 } |
| 2922 | 3342 |
| 2923 | 3343 |
| 2924 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) { | 3344 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) { |
| 2925 Register input = ToRegister(instr->input()); | 3345 Register input = ToRegister(instr->InputAt(0)); |
| 2926 Register temp = ToRegister(instr->temp()); | 3346 Register temp = ToRegister(instr->TempAt(0)); |
| 2927 InstanceType first = instr->hydrogen()->first(); | 3347 InstanceType first = instr->hydrogen()->first(); |
| 2928 InstanceType last = instr->hydrogen()->last(); | 3348 InstanceType last = instr->hydrogen()->last(); |
| 2929 | 3349 |
| 2930 __ mov(temp, FieldOperand(input, HeapObject::kMapOffset)); | 3350 __ mov(temp, FieldOperand(input, HeapObject::kMapOffset)); |
| 2931 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset), | |
| 2932 static_cast<int8_t>(first)); | |
| 2933 | 3351 |
| 2934 // If there is only one type in the interval check for equality. | 3352 // If there is only one type in the interval check for equality. |
| 2935 if (first == last) { | 3353 if (first == last) { |
| 3354 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset), |
| 3355 static_cast<int8_t>(first)); |
| 2936 DeoptimizeIf(not_equal, instr->environment()); | 3356 DeoptimizeIf(not_equal, instr->environment()); |
| 2937 } else { | 3357 } else if (first == FIRST_STRING_TYPE && last == LAST_STRING_TYPE) { |
| 3358 // String has a dedicated bit in instance type. |
| 3359 __ test_b(FieldOperand(temp, Map::kInstanceTypeOffset), kIsNotStringMask); |
| 3360 DeoptimizeIf(not_zero, instr->environment()); |
| 3361 } else { |
| 3362 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset), |
| 3363 static_cast<int8_t>(first)); |
| 2938 DeoptimizeIf(below, instr->environment()); | 3364 DeoptimizeIf(below, instr->environment()); |
| 2939 // Omit check for the last type. | 3365 // Omit check for the last type. |
| 2940 if (last != LAST_TYPE) { | 3366 if (last != LAST_TYPE) { |
| 2941 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset), | 3367 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset), |
| 2942 static_cast<int8_t>(last)); | 3368 static_cast<int8_t>(last)); |
| 2943 DeoptimizeIf(above, instr->environment()); | 3369 DeoptimizeIf(above, instr->environment()); |
| 2944 } | 3370 } |
| 2945 } | 3371 } |
| 2946 } | 3372 } |
| 2947 | 3373 |
| 2948 | 3374 |
| 2949 void LCodeGen::DoCheckFunction(LCheckFunction* instr) { | 3375 void LCodeGen::DoCheckFunction(LCheckFunction* instr) { |
| 2950 ASSERT(instr->input()->IsRegister()); | 3376 ASSERT(instr->InputAt(0)->IsRegister()); |
| 2951 Register reg = ToRegister(instr->input()); | 3377 Register reg = ToRegister(instr->InputAt(0)); |
| 2952 __ cmp(reg, instr->hydrogen()->target()); | 3378 __ cmp(reg, instr->hydrogen()->target()); |
| 2953 DeoptimizeIf(not_equal, instr->environment()); | 3379 DeoptimizeIf(not_equal, instr->environment()); |
| 2954 } | 3380 } |
| 2955 | 3381 |
| 2956 | 3382 |
| 2957 void LCodeGen::DoCheckMap(LCheckMap* instr) { | 3383 void LCodeGen::DoCheckMap(LCheckMap* instr) { |
| 2958 LOperand* input = instr->input(); | 3384 LOperand* input = instr->InputAt(0); |
| 2959 ASSERT(input->IsRegister()); | 3385 ASSERT(input->IsRegister()); |
| 2960 Register reg = ToRegister(input); | 3386 Register reg = ToRegister(input); |
| 2961 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), | 3387 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), |
| 2962 instr->hydrogen()->map()); | 3388 instr->hydrogen()->map()); |
| 2963 DeoptimizeIf(not_equal, instr->environment()); | 3389 DeoptimizeIf(not_equal, instr->environment()); |
| 2964 } | 3390 } |
| 2965 | 3391 |
| 2966 | 3392 |
| 2967 void LCodeGen::LoadPrototype(Register result, Handle<JSObject> prototype) { | 3393 void LCodeGen::LoadHeapObject(Register result, Handle<HeapObject> object) { |
| 2968 if (Heap::InNewSpace(*prototype)) { | 3394 if (Heap::InNewSpace(*object)) { |
| 2969 Handle<JSGlobalPropertyCell> cell = | 3395 Handle<JSGlobalPropertyCell> cell = |
| 2970 Factory::NewJSGlobalPropertyCell(prototype); | 3396 Factory::NewJSGlobalPropertyCell(object); |
| 2971 __ mov(result, Operand::Cell(cell)); | 3397 __ mov(result, Operand::Cell(cell)); |
| 2972 } else { | 3398 } else { |
| 2973 __ mov(result, prototype); | 3399 __ mov(result, object); |
| 2974 } | 3400 } |
| 2975 } | 3401 } |
| 2976 | 3402 |
| 2977 | 3403 |
| 2978 void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) { | 3404 void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) { |
| 2979 Register reg = ToRegister(instr->temp()); | 3405 Register reg = ToRegister(instr->TempAt(0)); |
| 2980 | 3406 |
| 2981 Handle<JSObject> holder = instr->holder(); | 3407 Handle<JSObject> holder = instr->holder(); |
| 2982 Handle<Map> receiver_map = instr->receiver_map(); | 3408 Handle<JSObject> current_prototype = instr->prototype(); |
| 2983 Handle<JSObject> current_prototype(JSObject::cast(receiver_map->prototype())); | |
| 2984 | 3409 |
| 2985 // Load prototype object. | 3410 // Load prototype object. |
| 2986 LoadPrototype(reg, current_prototype); | 3411 LoadHeapObject(reg, current_prototype); |
| 2987 | 3412 |
| 2988 // Check prototype maps up to the holder. | 3413 // Check prototype maps up to the holder. |
| 2989 while (!current_prototype.is_identical_to(holder)) { | 3414 while (!current_prototype.is_identical_to(holder)) { |
| 2990 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), | 3415 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), |
| 2991 Handle<Map>(current_prototype->map())); | 3416 Handle<Map>(current_prototype->map())); |
| 2992 DeoptimizeIf(not_equal, instr->environment()); | 3417 DeoptimizeIf(not_equal, instr->environment()); |
| 2993 current_prototype = | 3418 current_prototype = |
| 2994 Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype())); | 3419 Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype())); |
| 2995 // Load next prototype object. | 3420 // Load next prototype object. |
| 2996 LoadPrototype(reg, current_prototype); | 3421 LoadHeapObject(reg, current_prototype); |
| 2997 } | 3422 } |
| 2998 | 3423 |
| 2999 // Check the holder map. | 3424 // Check the holder map. |
| 3000 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), | 3425 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), |
| 3001 Handle<Map>(current_prototype->map())); | 3426 Handle<Map>(current_prototype->map())); |
| 3002 DeoptimizeIf(not_equal, instr->environment()); | 3427 DeoptimizeIf(not_equal, instr->environment()); |
| 3003 } | 3428 } |
| 3004 | 3429 |
| 3005 | 3430 |
| 3006 void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) { | 3431 void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) { |
| 3007 // Setup the parameters to the stub/runtime call. | 3432 // Setup the parameters to the stub/runtime call. |
| 3008 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); | 3433 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); |
| 3009 __ push(FieldOperand(eax, JSFunction::kLiteralsOffset)); | 3434 __ push(FieldOperand(eax, JSFunction::kLiteralsOffset)); |
| 3010 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); | 3435 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); |
| 3011 __ push(Immediate(instr->hydrogen()->constant_elements())); | 3436 __ push(Immediate(instr->hydrogen()->constant_elements())); |
| 3012 | 3437 |
| 3013 // Pick the right runtime function or stub to call. | 3438 // Pick the right runtime function or stub to call. |
| 3014 int length = instr->hydrogen()->length(); | 3439 int length = instr->hydrogen()->length(); |
| 3015 if (instr->hydrogen()->IsCopyOnWrite()) { | 3440 if (instr->hydrogen()->IsCopyOnWrite()) { |
| 3016 ASSERT(instr->hydrogen()->depth() == 1); | 3441 ASSERT(instr->hydrogen()->depth() == 1); |
| 3017 FastCloneShallowArrayStub::Mode mode = | 3442 FastCloneShallowArrayStub::Mode mode = |
| 3018 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS; | 3443 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS; |
| 3019 FastCloneShallowArrayStub stub(mode, length); | 3444 FastCloneShallowArrayStub stub(mode, length); |
| 3020 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 3445 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); |
| 3021 } else if (instr->hydrogen()->depth() > 1) { | 3446 } else if (instr->hydrogen()->depth() > 1) { |
| 3022 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr); | 3447 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr, false); |
| 3023 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { | 3448 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { |
| 3024 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr); | 3449 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr, false); |
| 3025 } else { | 3450 } else { |
| 3026 FastCloneShallowArrayStub::Mode mode = | 3451 FastCloneShallowArrayStub::Mode mode = |
| 3027 FastCloneShallowArrayStub::CLONE_ELEMENTS; | 3452 FastCloneShallowArrayStub::CLONE_ELEMENTS; |
| 3028 FastCloneShallowArrayStub stub(mode, length); | 3453 FastCloneShallowArrayStub stub(mode, length); |
| 3029 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 3454 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); |
| 3030 } | 3455 } |
| 3031 } | 3456 } |
| 3032 | 3457 |
| 3033 | 3458 |
| 3034 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) { | 3459 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) { |
| 3460 ASSERT(ToRegister(instr->context()).is(esi)); |
| 3035 // Setup the parameters to the stub/runtime call. | 3461 // Setup the parameters to the stub/runtime call. |
| 3036 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); | 3462 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); |
| 3037 __ push(FieldOperand(eax, JSFunction::kLiteralsOffset)); | 3463 __ push(FieldOperand(eax, JSFunction::kLiteralsOffset)); |
| 3038 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); | 3464 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); |
| 3039 __ push(Immediate(instr->hydrogen()->constant_properties())); | 3465 __ push(Immediate(instr->hydrogen()->constant_properties())); |
| 3040 __ push(Immediate(Smi::FromInt(instr->hydrogen()->fast_elements() ? 1 : 0))); | 3466 __ push(Immediate(Smi::FromInt(instr->hydrogen()->fast_elements() ? 1 : 0))); |
| 3041 | 3467 |
| 3042 // Pick the right runtime function to call. | 3468 // Pick the right runtime function to call. |
| 3043 if (instr->hydrogen()->depth() > 1) { | 3469 if (instr->hydrogen()->depth() > 1) { |
| 3044 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr); | 3470 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr); |
| (...skipping 17 matching lines...) Expand all Loading... |
| 3062 __ mov(ebx, FieldOperand(ecx, literal_offset)); | 3488 __ mov(ebx, FieldOperand(ecx, literal_offset)); |
| 3063 __ cmp(ebx, Factory::undefined_value()); | 3489 __ cmp(ebx, Factory::undefined_value()); |
| 3064 __ j(not_equal, &materialized); | 3490 __ j(not_equal, &materialized); |
| 3065 | 3491 |
| 3066 // Create regexp literal using runtime function | 3492 // Create regexp literal using runtime function |
| 3067 // Result will be in eax. | 3493 // Result will be in eax. |
| 3068 __ push(ecx); | 3494 __ push(ecx); |
| 3069 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); | 3495 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); |
| 3070 __ push(Immediate(instr->hydrogen()->pattern())); | 3496 __ push(Immediate(instr->hydrogen()->pattern())); |
| 3071 __ push(Immediate(instr->hydrogen()->flags())); | 3497 __ push(Immediate(instr->hydrogen()->flags())); |
| 3072 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr); | 3498 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr, false); |
| 3073 __ mov(ebx, eax); | 3499 __ mov(ebx, eax); |
| 3074 | 3500 |
| 3075 __ bind(&materialized); | 3501 __ bind(&materialized); |
| 3076 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; | 3502 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; |
| 3077 Label allocated, runtime_allocate; | 3503 Label allocated, runtime_allocate; |
| 3078 __ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT); | 3504 __ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT); |
| 3079 __ jmp(&allocated); | 3505 __ jmp(&allocated); |
| 3080 | 3506 |
| 3081 __ bind(&runtime_allocate); | 3507 __ bind(&runtime_allocate); |
| 3082 __ push(ebx); | 3508 __ push(ebx); |
| 3083 __ push(Immediate(Smi::FromInt(size))); | 3509 __ push(Immediate(Smi::FromInt(size))); |
| 3084 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr); | 3510 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr, false); |
| 3085 __ pop(ebx); | 3511 __ pop(ebx); |
| 3086 | 3512 |
| 3087 __ bind(&allocated); | 3513 __ bind(&allocated); |
| 3088 // Copy the content into the newly allocated memory. | 3514 // Copy the content into the newly allocated memory. |
| 3089 // (Unroll copy loop once for better throughput). | 3515 // (Unroll copy loop once for better throughput). |
| 3090 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) { | 3516 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) { |
| 3091 __ mov(edx, FieldOperand(ebx, i)); | 3517 __ mov(edx, FieldOperand(ebx, i)); |
| 3092 __ mov(ecx, FieldOperand(ebx, i + kPointerSize)); | 3518 __ mov(ecx, FieldOperand(ebx, i + kPointerSize)); |
| 3093 __ mov(FieldOperand(eax, i), edx); | 3519 __ mov(FieldOperand(eax, i), edx); |
| 3094 __ mov(FieldOperand(eax, i + kPointerSize), ecx); | 3520 __ mov(FieldOperand(eax, i + kPointerSize), ecx); |
| 3095 } | 3521 } |
| 3096 if ((size % (2 * kPointerSize)) != 0) { | 3522 if ((size % (2 * kPointerSize)) != 0) { |
| 3097 __ mov(edx, FieldOperand(ebx, size - kPointerSize)); | 3523 __ mov(edx, FieldOperand(ebx, size - kPointerSize)); |
| 3098 __ mov(FieldOperand(eax, size - kPointerSize), edx); | 3524 __ mov(FieldOperand(eax, size - kPointerSize), edx); |
| 3099 } | 3525 } |
| 3100 } | 3526 } |
| 3101 | 3527 |
| 3102 | 3528 |
| 3103 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { | 3529 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { |
| 3104 // Use the fast case closure allocation code that allocates in new | 3530 // Use the fast case closure allocation code that allocates in new |
| 3105 // space for nested functions that don't need literals cloning. | 3531 // space for nested functions that don't need literals cloning. |
| 3106 Handle<SharedFunctionInfo> shared_info = instr->shared_info(); | 3532 Handle<SharedFunctionInfo> shared_info = instr->shared_info(); |
| 3107 bool pretenure = !instr->hydrogen()->pretenure(); | 3533 bool pretenure = instr->hydrogen()->pretenure(); |
| 3108 if (shared_info->num_literals() == 0 && !pretenure) { | 3534 if (shared_info->num_literals() == 0 && !pretenure) { |
| 3109 FastNewClosureStub stub; | 3535 FastNewClosureStub stub; |
| 3110 __ push(Immediate(shared_info)); | 3536 __ push(Immediate(shared_info)); |
| 3111 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 3537 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); |
| 3112 } else { | 3538 } else { |
| 3113 __ push(esi); | 3539 __ push(Operand(ebp, StandardFrameConstants::kContextOffset)); |
| 3114 __ push(Immediate(shared_info)); | 3540 __ push(Immediate(shared_info)); |
| 3115 __ push(Immediate(pretenure | 3541 __ push(Immediate(pretenure |
| 3116 ? Factory::true_value() | 3542 ? Factory::true_value() |
| 3117 : Factory::false_value())); | 3543 : Factory::false_value())); |
| 3118 CallRuntime(Runtime::kNewClosure, 3, instr); | 3544 CallRuntime(Runtime::kNewClosure, 3, instr, false); |
| 3119 } | 3545 } |
| 3120 } | 3546 } |
| 3121 | 3547 |
| 3122 | 3548 |
| 3123 void LCodeGen::DoTypeof(LTypeof* instr) { | 3549 void LCodeGen::DoTypeof(LTypeof* instr) { |
| 3124 LOperand* input = instr->input(); | 3550 LOperand* input = instr->InputAt(0); |
| 3125 if (input->IsConstantOperand()) { | 3551 if (input->IsConstantOperand()) { |
| 3126 __ push(ToImmediate(input)); | 3552 __ push(ToImmediate(input)); |
| 3127 } else { | 3553 } else { |
| 3128 __ push(ToOperand(input)); | 3554 __ push(ToOperand(input)); |
| 3129 } | 3555 } |
| 3130 CallRuntime(Runtime::kTypeof, 1, instr); | 3556 CallRuntime(Runtime::kTypeof, 1, instr, false); |
| 3131 } | 3557 } |
| 3132 | 3558 |
| 3133 | 3559 |
| 3134 void LCodeGen::DoTypeofIs(LTypeofIs* instr) { | 3560 void LCodeGen::DoTypeofIs(LTypeofIs* instr) { |
| 3135 Register input = ToRegister(instr->input()); | 3561 Register input = ToRegister(instr->InputAt(0)); |
| 3136 Register result = ToRegister(instr->result()); | 3562 Register result = ToRegister(instr->result()); |
| 3137 Label true_label; | 3563 Label true_label; |
| 3138 Label false_label; | 3564 Label false_label; |
| 3139 NearLabel done; | 3565 NearLabel done; |
| 3140 | 3566 |
| 3141 Condition final_branch_condition = EmitTypeofIs(&true_label, | 3567 Condition final_branch_condition = EmitTypeofIs(&true_label, |
| 3142 &false_label, | 3568 &false_label, |
| 3143 input, | 3569 input, |
| 3144 instr->type_literal()); | 3570 instr->type_literal()); |
| 3145 __ j(final_branch_condition, &true_label); | 3571 __ j(final_branch_condition, &true_label); |
| 3146 __ bind(&false_label); | 3572 __ bind(&false_label); |
| 3147 __ mov(result, Handle<Object>(Heap::false_value())); | 3573 __ mov(result, Factory::false_value()); |
| 3148 __ jmp(&done); | 3574 __ jmp(&done); |
| 3149 | 3575 |
| 3150 __ bind(&true_label); | 3576 __ bind(&true_label); |
| 3151 __ mov(result, Handle<Object>(Heap::true_value())); | 3577 __ mov(result, Factory::true_value()); |
| 3152 | 3578 |
| 3153 __ bind(&done); | 3579 __ bind(&done); |
| 3154 } | 3580 } |
| 3155 | 3581 |
| 3156 | 3582 |
| 3157 void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) { | 3583 void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) { |
| 3158 Register input = ToRegister(instr->input()); | 3584 Register input = ToRegister(instr->InputAt(0)); |
| 3159 int true_block = chunk_->LookupDestination(instr->true_block_id()); | 3585 int true_block = chunk_->LookupDestination(instr->true_block_id()); |
| 3160 int false_block = chunk_->LookupDestination(instr->false_block_id()); | 3586 int false_block = chunk_->LookupDestination(instr->false_block_id()); |
| 3161 Label* true_label = chunk_->GetAssemblyLabel(true_block); | 3587 Label* true_label = chunk_->GetAssemblyLabel(true_block); |
| 3162 Label* false_label = chunk_->GetAssemblyLabel(false_block); | 3588 Label* false_label = chunk_->GetAssemblyLabel(false_block); |
| 3163 | 3589 |
| 3164 Condition final_branch_condition = EmitTypeofIs(true_label, | 3590 Condition final_branch_condition = EmitTypeofIs(true_label, |
| 3165 false_label, | 3591 false_label, |
| 3166 input, | 3592 input, |
| 3167 instr->type_literal()); | 3593 instr->type_literal()); |
| 3168 | 3594 |
| (...skipping 17 matching lines...) Expand all Loading... |
| 3186 __ test(input, Immediate(kSmiTagMask)); | 3612 __ test(input, Immediate(kSmiTagMask)); |
| 3187 __ j(zero, false_label); | 3613 __ j(zero, false_label); |
| 3188 __ mov(input, FieldOperand(input, HeapObject::kMapOffset)); | 3614 __ mov(input, FieldOperand(input, HeapObject::kMapOffset)); |
| 3189 __ test_b(FieldOperand(input, Map::kBitFieldOffset), | 3615 __ test_b(FieldOperand(input, Map::kBitFieldOffset), |
| 3190 1 << Map::kIsUndetectable); | 3616 1 << Map::kIsUndetectable); |
| 3191 __ j(not_zero, false_label); | 3617 __ j(not_zero, false_label); |
| 3192 __ CmpInstanceType(input, FIRST_NONSTRING_TYPE); | 3618 __ CmpInstanceType(input, FIRST_NONSTRING_TYPE); |
| 3193 final_branch_condition = below; | 3619 final_branch_condition = below; |
| 3194 | 3620 |
| 3195 } else if (type_name->Equals(Heap::boolean_symbol())) { | 3621 } else if (type_name->Equals(Heap::boolean_symbol())) { |
| 3196 __ cmp(input, Handle<Object>(Heap::true_value())); | 3622 __ cmp(input, Factory::true_value()); |
| 3197 __ j(equal, true_label); | 3623 __ j(equal, true_label); |
| 3198 __ cmp(input, Handle<Object>(Heap::false_value())); | 3624 __ cmp(input, Factory::false_value()); |
| 3199 final_branch_condition = equal; | 3625 final_branch_condition = equal; |
| 3200 | 3626 |
| 3201 } else if (type_name->Equals(Heap::undefined_symbol())) { | 3627 } else if (type_name->Equals(Heap::undefined_symbol())) { |
| 3202 __ cmp(input, Factory::undefined_value()); | 3628 __ cmp(input, Factory::undefined_value()); |
| 3203 __ j(equal, true_label); | 3629 __ j(equal, true_label); |
| 3204 __ test(input, Immediate(kSmiTagMask)); | 3630 __ test(input, Immediate(kSmiTagMask)); |
| 3205 __ j(zero, false_label); | 3631 __ j(zero, false_label); |
| 3206 // Check for undetectable objects => true. | 3632 // Check for undetectable objects => true. |
| 3207 __ mov(input, FieldOperand(input, HeapObject::kMapOffset)); | 3633 __ mov(input, FieldOperand(input, HeapObject::kMapOffset)); |
| 3208 __ test_b(FieldOperand(input, Map::kBitFieldOffset), | 3634 __ test_b(FieldOperand(input, Map::kBitFieldOffset), |
| (...skipping 30 matching lines...) Expand all Loading... |
| 3239 } else { | 3665 } else { |
| 3240 final_branch_condition = not_equal; | 3666 final_branch_condition = not_equal; |
| 3241 __ jmp(false_label); | 3667 __ jmp(false_label); |
| 3242 // A dead branch instruction will be generated after this point. | 3668 // A dead branch instruction will be generated after this point. |
| 3243 } | 3669 } |
| 3244 | 3670 |
| 3245 return final_branch_condition; | 3671 return final_branch_condition; |
| 3246 } | 3672 } |
| 3247 | 3673 |
| 3248 | 3674 |
| 3675 void LCodeGen::DoIsConstructCall(LIsConstructCall* instr) { |
| 3676 Register result = ToRegister(instr->result()); |
| 3677 NearLabel true_label; |
| 3678 NearLabel false_label; |
| 3679 NearLabel done; |
| 3680 |
| 3681 EmitIsConstructCall(result); |
| 3682 __ j(equal, &true_label); |
| 3683 |
| 3684 __ mov(result, Factory::false_value()); |
| 3685 __ jmp(&done); |
| 3686 |
| 3687 __ bind(&true_label); |
| 3688 __ mov(result, Factory::true_value()); |
| 3689 |
| 3690 __ bind(&done); |
| 3691 } |
| 3692 |
| 3693 |
| 3694 void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) { |
| 3695 Register temp = ToRegister(instr->TempAt(0)); |
| 3696 int true_block = chunk_->LookupDestination(instr->true_block_id()); |
| 3697 int false_block = chunk_->LookupDestination(instr->false_block_id()); |
| 3698 |
| 3699 EmitIsConstructCall(temp); |
| 3700 EmitBranch(true_block, false_block, equal); |
| 3701 } |
| 3702 |
| 3703 |
| 3704 void LCodeGen::EmitIsConstructCall(Register temp) { |
| 3705 // Get the frame pointer for the calling frame. |
| 3706 __ mov(temp, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); |
| 3707 |
| 3708 // Skip the arguments adaptor frame if it exists. |
| 3709 NearLabel check_frame_marker; |
| 3710 __ cmp(Operand(temp, StandardFrameConstants::kContextOffset), |
| 3711 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
| 3712 __ j(not_equal, &check_frame_marker); |
| 3713 __ mov(temp, Operand(temp, StandardFrameConstants::kCallerFPOffset)); |
| 3714 |
| 3715 // Check the marker in the calling frame. |
| 3716 __ bind(&check_frame_marker); |
| 3717 __ cmp(Operand(temp, StandardFrameConstants::kMarkerOffset), |
| 3718 Immediate(Smi::FromInt(StackFrame::CONSTRUCT))); |
| 3719 } |
| 3720 |
| 3721 |
| 3249 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { | 3722 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { |
| 3250 // No code for lazy bailout instruction. Used to capture environment after a | 3723 // No code for lazy bailout instruction. Used to capture environment after a |
| 3251 // call for populating the safepoint data with deoptimization data. | 3724 // call for populating the safepoint data with deoptimization data. |
| 3252 } | 3725 } |
| 3253 | 3726 |
| 3254 | 3727 |
| 3255 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { | 3728 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { |
| 3256 DeoptimizeIf(no_condition, instr->environment()); | 3729 DeoptimizeIf(no_condition, instr->environment()); |
| 3257 } | 3730 } |
| 3258 | 3731 |
| 3259 | 3732 |
| 3260 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) { | 3733 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) { |
| 3261 LOperand* obj = instr->object(); | 3734 LOperand* obj = instr->object(); |
| 3262 LOperand* key = instr->key(); | 3735 LOperand* key = instr->key(); |
| 3263 __ push(ToOperand(obj)); | 3736 __ push(ToOperand(obj)); |
| 3264 if (key->IsConstantOperand()) { | 3737 if (key->IsConstantOperand()) { |
| 3265 __ push(ToImmediate(key)); | 3738 __ push(ToImmediate(key)); |
| 3266 } else { | 3739 } else { |
| 3267 __ push(ToOperand(key)); | 3740 __ push(ToOperand(key)); |
| 3268 } | 3741 } |
| 3269 RecordPosition(instr->pointer_map()->position()); | 3742 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); |
| 3743 LPointerMap* pointers = instr->pointer_map(); |
| 3744 LEnvironment* env = instr->deoptimization_environment(); |
| 3745 RecordPosition(pointers->position()); |
| 3746 RegisterEnvironmentForDeoptimization(env); |
| 3747 // Create safepoint generator that will also ensure enough space in the |
| 3748 // reloc info for patching in deoptimization (since this is invoking a |
| 3749 // builtin) |
| 3270 SafepointGenerator safepoint_generator(this, | 3750 SafepointGenerator safepoint_generator(this, |
| 3271 instr->pointer_map(), | 3751 pointers, |
| 3272 Safepoint::kNoDeoptimizationIndex); | 3752 env->deoptimization_index(), |
| 3753 true); |
| 3754 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
| 3755 __ push(Immediate(Smi::FromInt(strict_mode_flag()))); |
| 3273 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, &safepoint_generator); | 3756 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, &safepoint_generator); |
| 3274 } | 3757 } |
| 3275 | 3758 |
| 3276 | 3759 |
| 3277 void LCodeGen::DoStackCheck(LStackCheck* instr) { | 3760 void LCodeGen::DoStackCheck(LStackCheck* instr) { |
| 3278 // Perform stack overflow check. | 3761 // Perform stack overflow check. |
| 3279 NearLabel done; | 3762 NearLabel done; |
| 3280 ExternalReference stack_limit = ExternalReference::address_of_stack_limit(); | 3763 ExternalReference stack_limit = ExternalReference::address_of_stack_limit(); |
| 3281 __ cmp(esp, Operand::StaticVariable(stack_limit)); | 3764 __ cmp(esp, Operand::StaticVariable(stack_limit)); |
| 3282 __ j(above_equal, &done); | 3765 __ j(above_equal, &done); |
| 3283 | 3766 |
| 3284 StackCheckStub stub; | 3767 StackCheckStub stub; |
| 3285 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 3768 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); |
| 3286 __ bind(&done); | 3769 __ bind(&done); |
| 3287 } | 3770 } |
| 3288 | 3771 |
| 3289 | 3772 |
| 3290 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { | 3773 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { |
| 3291 // This is a pseudo-instruction that ensures that the environment here is | 3774 // This is a pseudo-instruction that ensures that the environment here is |
| 3292 // properly registered for deoptimization and records the assembler's PC | 3775 // properly registered for deoptimization and records the assembler's PC |
| 3293 // offset. | 3776 // offset. |
| 3294 LEnvironment* environment = instr->environment(); | 3777 LEnvironment* environment = instr->environment(); |
| 3295 environment->SetSpilledRegisters(instr->SpilledRegisterArray(), | 3778 environment->SetSpilledRegisters(instr->SpilledRegisterArray(), |
| 3296 instr->SpilledDoubleRegisterArray()); | 3779 instr->SpilledDoubleRegisterArray()); |
| 3297 | 3780 |
| 3298 // If the environment were already registered, we would have no way of | 3781 // If the environment were already registered, we would have no way of |
| 3299 // backpatching it with the spill slot operands. | 3782 // backpatching it with the spill slot operands. |
| 3300 ASSERT(!environment->HasBeenRegistered()); | 3783 ASSERT(!environment->HasBeenRegistered()); |
| 3301 RegisterEnvironmentForDeoptimization(environment); | 3784 RegisterEnvironmentForDeoptimization(environment); |
| 3302 ASSERT(osr_pc_offset_ == -1); | 3785 ASSERT(osr_pc_offset_ == -1); |
| 3303 osr_pc_offset_ = masm()->pc_offset(); | 3786 osr_pc_offset_ = masm()->pc_offset(); |
| 3304 } | 3787 } |
| 3305 | 3788 |
| 3306 | 3789 |
| 3307 #undef __ | 3790 #undef __ |
| 3308 | 3791 |
| 3309 } } // namespace v8::internal | 3792 } } // namespace v8::internal |
| 3793 |
| 3794 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |