| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #if V8_TARGET_ARCH_X87 | 7 #if V8_TARGET_ARCH_X87 |
| 8 | 8 |
| 9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
| 10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
| (...skipping 29 matching lines...) Expand all Loading... |
| 40 LCodeGen* codegen_; | 40 LCodeGen* codegen_; |
| 41 LPointerMap* pointers_; | 41 LPointerMap* pointers_; |
| 42 Safepoint::DeoptMode deopt_mode_; | 42 Safepoint::DeoptMode deopt_mode_; |
| 43 }; | 43 }; |
| 44 | 44 |
| 45 | 45 |
| 46 #define __ masm()-> | 46 #define __ masm()-> |
| 47 | 47 |
| 48 bool LCodeGen::GenerateCode() { | 48 bool LCodeGen::GenerateCode() { |
| 49 LPhase phase("Z_Code generation", chunk()); | 49 LPhase phase("Z_Code generation", chunk()); |
| 50 ASSERT(is_unused()); | 50 DCHECK(is_unused()); |
| 51 status_ = GENERATING; | 51 status_ = GENERATING; |
| 52 | 52 |
| 53 // Open a frame scope to indicate that there is a frame on the stack. The | 53 // Open a frame scope to indicate that there is a frame on the stack. The |
| 54 // MANUAL indicates that the scope shouldn't actually generate code to set up | 54 // MANUAL indicates that the scope shouldn't actually generate code to set up |
| 55 // the frame (that is done in GeneratePrologue). | 55 // the frame (that is done in GeneratePrologue). |
| 56 FrameScope frame_scope(masm_, StackFrame::MANUAL); | 56 FrameScope frame_scope(masm_, StackFrame::MANUAL); |
| 57 | 57 |
| 58 support_aligned_spilled_doubles_ = info()->IsOptimizing(); | 58 support_aligned_spilled_doubles_ = info()->IsOptimizing(); |
| 59 | 59 |
| 60 dynamic_frame_alignment_ = info()->IsOptimizing() && | 60 dynamic_frame_alignment_ = info()->IsOptimizing() && |
| 61 ((chunk()->num_double_slots() > 2 && | 61 ((chunk()->num_double_slots() > 2 && |
| 62 !chunk()->graph()->is_recursive()) || | 62 !chunk()->graph()->is_recursive()) || |
| 63 !info()->osr_ast_id().IsNone()); | 63 !info()->osr_ast_id().IsNone()); |
| 64 | 64 |
| 65 return GeneratePrologue() && | 65 return GeneratePrologue() && |
| 66 GenerateBody() && | 66 GenerateBody() && |
| 67 GenerateDeferredCode() && | 67 GenerateDeferredCode() && |
| 68 GenerateJumpTable() && | 68 GenerateJumpTable() && |
| 69 GenerateSafepointTable(); | 69 GenerateSafepointTable(); |
| 70 } | 70 } |
| 71 | 71 |
| 72 | 72 |
| 73 void LCodeGen::FinishCode(Handle<Code> code) { | 73 void LCodeGen::FinishCode(Handle<Code> code) { |
| 74 ASSERT(is_done()); | 74 DCHECK(is_done()); |
| 75 code->set_stack_slots(GetStackSlotCount()); | 75 code->set_stack_slots(GetStackSlotCount()); |
| 76 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); | 76 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); |
| 77 if (code->is_optimized_code()) RegisterWeakObjectsInOptimizedCode(code); | 77 if (code->is_optimized_code()) RegisterWeakObjectsInOptimizedCode(code); |
| 78 PopulateDeoptimizationData(code); | 78 PopulateDeoptimizationData(code); |
| 79 if (!info()->IsStub()) { | 79 if (!info()->IsStub()) { |
| 80 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code); | 80 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code); |
| 81 } | 81 } |
| 82 } | 82 } |
| 83 | 83 |
| 84 | 84 |
| 85 #ifdef _MSC_VER | 85 #ifdef _MSC_VER |
| 86 void LCodeGen::MakeSureStackPagesMapped(int offset) { | 86 void LCodeGen::MakeSureStackPagesMapped(int offset) { |
| 87 const int kPageSize = 4 * KB; | 87 const int kPageSize = 4 * KB; |
| 88 for (offset -= kPageSize; offset > 0; offset -= kPageSize) { | 88 for (offset -= kPageSize; offset > 0; offset -= kPageSize) { |
| 89 __ mov(Operand(esp, offset), eax); | 89 __ mov(Operand(esp, offset), eax); |
| 90 } | 90 } |
| 91 } | 91 } |
| 92 #endif | 92 #endif |
| 93 | 93 |
| 94 | 94 |
| 95 bool LCodeGen::GeneratePrologue() { | 95 bool LCodeGen::GeneratePrologue() { |
| 96 ASSERT(is_generating()); | 96 DCHECK(is_generating()); |
| 97 | 97 |
| 98 if (info()->IsOptimizing()) { | 98 if (info()->IsOptimizing()) { |
| 99 ProfileEntryHookStub::MaybeCallEntryHook(masm_); | 99 ProfileEntryHookStub::MaybeCallEntryHook(masm_); |
| 100 | 100 |
| 101 #ifdef DEBUG | 101 #ifdef DEBUG |
| 102 if (strlen(FLAG_stop_at) > 0 && | 102 if (strlen(FLAG_stop_at) > 0 && |
| 103 info_->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) { | 103 info_->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) { |
| 104 __ int3(); | 104 __ int3(); |
| 105 } | 105 } |
| 106 #endif | 106 #endif |
| (...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 148 __ add(Operand(ebx), Immediate(kPointerSize)); | 148 __ add(Operand(ebx), Immediate(kPointerSize)); |
| 149 __ dec(ecx); | 149 __ dec(ecx); |
| 150 __ j(not_zero, &align_loop, Label::kNear); | 150 __ j(not_zero, &align_loop, Label::kNear); |
| 151 __ mov(Operand(ebx, 0), Immediate(kAlignmentZapValue)); | 151 __ mov(Operand(ebx, 0), Immediate(kAlignmentZapValue)); |
| 152 __ bind(&do_not_pad); | 152 __ bind(&do_not_pad); |
| 153 } | 153 } |
| 154 } | 154 } |
| 155 | 155 |
| 156 info()->set_prologue_offset(masm_->pc_offset()); | 156 info()->set_prologue_offset(masm_->pc_offset()); |
| 157 if (NeedsEagerFrame()) { | 157 if (NeedsEagerFrame()) { |
| 158 ASSERT(!frame_is_built_); | 158 DCHECK(!frame_is_built_); |
| 159 frame_is_built_ = true; | 159 frame_is_built_ = true; |
| 160 if (info()->IsStub()) { | 160 if (info()->IsStub()) { |
| 161 __ StubPrologue(); | 161 __ StubPrologue(); |
| 162 } else { | 162 } else { |
| 163 __ Prologue(info()->IsCodePreAgingActive()); | 163 __ Prologue(info()->IsCodePreAgingActive()); |
| 164 } | 164 } |
| 165 info()->AddNoFrameRange(0, masm_->pc_offset()); | 165 info()->AddNoFrameRange(0, masm_->pc_offset()); |
| 166 } | 166 } |
| 167 | 167 |
| 168 if (info()->IsOptimizing() && | 168 if (info()->IsOptimizing() && |
| 169 dynamic_frame_alignment_ && | 169 dynamic_frame_alignment_ && |
| 170 FLAG_debug_code) { | 170 FLAG_debug_code) { |
| 171 __ test(esp, Immediate(kPointerSize)); | 171 __ test(esp, Immediate(kPointerSize)); |
| 172 __ Assert(zero, kFrameIsExpectedToBeAligned); | 172 __ Assert(zero, kFrameIsExpectedToBeAligned); |
| 173 } | 173 } |
| 174 | 174 |
| 175 // Reserve space for the stack slots needed by the code. | 175 // Reserve space for the stack slots needed by the code. |
| 176 int slots = GetStackSlotCount(); | 176 int slots = GetStackSlotCount(); |
| 177 ASSERT(slots != 0 || !info()->IsOptimizing()); | 177 DCHECK(slots != 0 || !info()->IsOptimizing()); |
| 178 if (slots > 0) { | 178 if (slots > 0) { |
| 179 if (slots == 1) { | 179 if (slots == 1) { |
| 180 if (dynamic_frame_alignment_) { | 180 if (dynamic_frame_alignment_) { |
| 181 __ push(edx); | 181 __ push(edx); |
| 182 } else { | 182 } else { |
| 183 __ push(Immediate(kNoAlignmentPadding)); | 183 __ push(Immediate(kNoAlignmentPadding)); |
| 184 } | 184 } |
| 185 } else { | 185 } else { |
| 186 if (FLAG_debug_code) { | 186 if (FLAG_debug_code) { |
| 187 __ sub(Operand(esp), Immediate(slots * kPointerSize)); | 187 __ sub(Operand(esp), Immediate(slots * kPointerSize)); |
| (...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 316 // Save the first local, which is overwritten by the alignment state. | 316 // Save the first local, which is overwritten by the alignment state. |
| 317 Operand alignment_loc = MemOperand(ebp, -3 * kPointerSize); | 317 Operand alignment_loc = MemOperand(ebp, -3 * kPointerSize); |
| 318 __ push(alignment_loc); | 318 __ push(alignment_loc); |
| 319 | 319 |
| 320 // Set the dynamic frame alignment state. | 320 // Set the dynamic frame alignment state. |
| 321 __ mov(alignment_loc, edx); | 321 __ mov(alignment_loc, edx); |
| 322 | 322 |
| 323 // Adjust the frame size, subsuming the unoptimized frame into the | 323 // Adjust the frame size, subsuming the unoptimized frame into the |
| 324 // optimized frame. | 324 // optimized frame. |
| 325 int slots = GetStackSlotCount() - graph()->osr()->UnoptimizedFrameSlots(); | 325 int slots = GetStackSlotCount() - graph()->osr()->UnoptimizedFrameSlots(); |
| 326 ASSERT(slots >= 1); | 326 DCHECK(slots >= 1); |
| 327 __ sub(esp, Immediate((slots - 1) * kPointerSize)); | 327 __ sub(esp, Immediate((slots - 1) * kPointerSize)); |
| 328 } | 328 } |
| 329 | 329 |
| 330 | 330 |
| 331 void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) { | 331 void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) { |
| 332 if (instr->IsCall()) { | 332 if (instr->IsCall()) { |
| 333 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); | 333 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); |
| 334 } | 334 } |
| 335 if (!instr->IsLazyBailout() && !instr->IsGap()) { | 335 if (!instr->IsLazyBailout() && !instr->IsGap()) { |
| 336 safepoints_.BumpLastLazySafepointIndex(); | 336 safepoints_.BumpLastLazySafepointIndex(); |
| 337 } | 337 } |
| 338 FlushX87StackIfNecessary(instr); | 338 FlushX87StackIfNecessary(instr); |
| 339 } | 339 } |
| 340 | 340 |
| 341 | 341 |
| 342 void LCodeGen::GenerateBodyInstructionPost(LInstruction* instr) { | 342 void LCodeGen::GenerateBodyInstructionPost(LInstruction* instr) { |
| 343 if (instr->IsGoto()) { | 343 if (instr->IsGoto()) { |
| 344 x87_stack_.LeavingBlock(current_block_, LGoto::cast(instr)); | 344 x87_stack_.LeavingBlock(current_block_, LGoto::cast(instr)); |
| 345 } else if (FLAG_debug_code && FLAG_enable_slow_asserts && | 345 } else if (FLAG_debug_code && FLAG_enable_slow_asserts && |
| 346 !instr->IsGap() && !instr->IsReturn()) { | 346 !instr->IsGap() && !instr->IsReturn()) { |
| 347 if (instr->ClobbersDoubleRegisters(isolate())) { | 347 if (instr->ClobbersDoubleRegisters(isolate())) { |
| 348 if (instr->HasDoubleRegisterResult()) { | 348 if (instr->HasDoubleRegisterResult()) { |
| 349 ASSERT_EQ(1, x87_stack_.depth()); | 349 DCHECK_EQ(1, x87_stack_.depth()); |
| 350 } else { | 350 } else { |
| 351 ASSERT_EQ(0, x87_stack_.depth()); | 351 DCHECK_EQ(0, x87_stack_.depth()); |
| 352 } | 352 } |
| 353 } | 353 } |
| 354 __ VerifyX87StackDepth(x87_stack_.depth()); | 354 __ VerifyX87StackDepth(x87_stack_.depth()); |
| 355 } | 355 } |
| 356 } | 356 } |
| 357 | 357 |
| 358 | 358 |
| 359 bool LCodeGen::GenerateJumpTable() { | 359 bool LCodeGen::GenerateJumpTable() { |
| 360 Label needs_frame; | 360 Label needs_frame; |
| 361 if (jump_table_.length() > 0) { | 361 if (jump_table_.length() > 0) { |
| 362 Comment(";;; -------------------- Jump table --------------------"); | 362 Comment(";;; -------------------- Jump table --------------------"); |
| 363 } | 363 } |
| 364 for (int i = 0; i < jump_table_.length(); i++) { | 364 for (int i = 0; i < jump_table_.length(); i++) { |
| 365 __ bind(&jump_table_[i].label); | 365 __ bind(&jump_table_[i].label); |
| 366 Address entry = jump_table_[i].address; | 366 Address entry = jump_table_[i].address; |
| 367 Deoptimizer::BailoutType type = jump_table_[i].bailout_type; | 367 Deoptimizer::BailoutType type = jump_table_[i].bailout_type; |
| 368 int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type); | 368 int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type); |
| 369 if (id == Deoptimizer::kNotDeoptimizationEntry) { | 369 if (id == Deoptimizer::kNotDeoptimizationEntry) { |
| 370 Comment(";;; jump table entry %d.", i); | 370 Comment(";;; jump table entry %d.", i); |
| 371 } else { | 371 } else { |
| 372 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id); | 372 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id); |
| 373 } | 373 } |
| 374 if (jump_table_[i].needs_frame) { | 374 if (jump_table_[i].needs_frame) { |
| 375 ASSERT(!info()->saves_caller_doubles()); | 375 DCHECK(!info()->saves_caller_doubles()); |
| 376 __ push(Immediate(ExternalReference::ForDeoptEntry(entry))); | 376 __ push(Immediate(ExternalReference::ForDeoptEntry(entry))); |
| 377 if (needs_frame.is_bound()) { | 377 if (needs_frame.is_bound()) { |
| 378 __ jmp(&needs_frame); | 378 __ jmp(&needs_frame); |
| 379 } else { | 379 } else { |
| 380 __ bind(&needs_frame); | 380 __ bind(&needs_frame); |
| 381 __ push(MemOperand(ebp, StandardFrameConstants::kContextOffset)); | 381 __ push(MemOperand(ebp, StandardFrameConstants::kContextOffset)); |
| 382 // This variant of deopt can only be used with stubs. Since we don't | 382 // This variant of deopt can only be used with stubs. Since we don't |
| 383 // have a function pointer to install in the stack frame that we're | 383 // have a function pointer to install in the stack frame that we're |
| 384 // building, install a special marker there instead. | 384 // building, install a special marker there instead. |
| 385 ASSERT(info()->IsStub()); | 385 DCHECK(info()->IsStub()); |
| 386 __ push(Immediate(Smi::FromInt(StackFrame::STUB))); | 386 __ push(Immediate(Smi::FromInt(StackFrame::STUB))); |
| 387 // Push a PC inside the function so that the deopt code can find where | 387 // Push a PC inside the function so that the deopt code can find where |
| 388 // the deopt comes from. It doesn't have to be the precise return | 388 // the deopt comes from. It doesn't have to be the precise return |
| 389 // address of a "calling" LAZY deopt, it only has to be somewhere | 389 // address of a "calling" LAZY deopt, it only has to be somewhere |
| 390 // inside the code body. | 390 // inside the code body. |
| 391 Label push_approx_pc; | 391 Label push_approx_pc; |
| 392 __ call(&push_approx_pc); | 392 __ call(&push_approx_pc); |
| 393 __ bind(&push_approx_pc); | 393 __ bind(&push_approx_pc); |
| 394 // Push the continuation which was stashed were the ebp should | 394 // Push the continuation which was stashed were the ebp should |
| 395 // be. Replace it with the saved ebp. | 395 // be. Replace it with the saved ebp. |
| 396 __ push(MemOperand(esp, 3 * kPointerSize)); | 396 __ push(MemOperand(esp, 3 * kPointerSize)); |
| 397 __ mov(MemOperand(esp, 4 * kPointerSize), ebp); | 397 __ mov(MemOperand(esp, 4 * kPointerSize), ebp); |
| 398 __ lea(ebp, MemOperand(esp, 4 * kPointerSize)); | 398 __ lea(ebp, MemOperand(esp, 4 * kPointerSize)); |
| 399 __ ret(0); // Call the continuation without clobbering registers. | 399 __ ret(0); // Call the continuation without clobbering registers. |
| 400 } | 400 } |
| 401 } else { | 401 } else { |
| 402 __ call(entry, RelocInfo::RUNTIME_ENTRY); | 402 __ call(entry, RelocInfo::RUNTIME_ENTRY); |
| 403 } | 403 } |
| 404 } | 404 } |
| 405 return !is_aborted(); | 405 return !is_aborted(); |
| 406 } | 406 } |
| 407 | 407 |
| 408 | 408 |
| 409 bool LCodeGen::GenerateDeferredCode() { | 409 bool LCodeGen::GenerateDeferredCode() { |
| 410 ASSERT(is_generating()); | 410 DCHECK(is_generating()); |
| 411 if (deferred_.length() > 0) { | 411 if (deferred_.length() > 0) { |
| 412 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { | 412 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { |
| 413 LDeferredCode* code = deferred_[i]; | 413 LDeferredCode* code = deferred_[i]; |
| 414 X87Stack copy(code->x87_stack()); | 414 X87Stack copy(code->x87_stack()); |
| 415 x87_stack_ = copy; | 415 x87_stack_ = copy; |
| 416 | 416 |
| 417 HValue* value = | 417 HValue* value = |
| 418 instructions_->at(code->instruction_index())->hydrogen_value(); | 418 instructions_->at(code->instruction_index())->hydrogen_value(); |
| 419 RecordAndWritePosition( | 419 RecordAndWritePosition( |
| 420 chunk()->graph()->SourcePositionToScriptPosition(value->position())); | 420 chunk()->graph()->SourcePositionToScriptPosition(value->position())); |
| 421 | 421 |
| 422 Comment(";;; <@%d,#%d> " | 422 Comment(";;; <@%d,#%d> " |
| 423 "-------------------- Deferred %s --------------------", | 423 "-------------------- Deferred %s --------------------", |
| 424 code->instruction_index(), | 424 code->instruction_index(), |
| 425 code->instr()->hydrogen_value()->id(), | 425 code->instr()->hydrogen_value()->id(), |
| 426 code->instr()->Mnemonic()); | 426 code->instr()->Mnemonic()); |
| 427 __ bind(code->entry()); | 427 __ bind(code->entry()); |
| 428 if (NeedsDeferredFrame()) { | 428 if (NeedsDeferredFrame()) { |
| 429 Comment(";;; Build frame"); | 429 Comment(";;; Build frame"); |
| 430 ASSERT(!frame_is_built_); | 430 DCHECK(!frame_is_built_); |
| 431 ASSERT(info()->IsStub()); | 431 DCHECK(info()->IsStub()); |
| 432 frame_is_built_ = true; | 432 frame_is_built_ = true; |
| 433 // Build the frame in such a way that esi isn't trashed. | 433 // Build the frame in such a way that esi isn't trashed. |
| 434 __ push(ebp); // Caller's frame pointer. | 434 __ push(ebp); // Caller's frame pointer. |
| 435 __ push(Operand(ebp, StandardFrameConstants::kContextOffset)); | 435 __ push(Operand(ebp, StandardFrameConstants::kContextOffset)); |
| 436 __ push(Immediate(Smi::FromInt(StackFrame::STUB))); | 436 __ push(Immediate(Smi::FromInt(StackFrame::STUB))); |
| 437 __ lea(ebp, Operand(esp, 2 * kPointerSize)); | 437 __ lea(ebp, Operand(esp, 2 * kPointerSize)); |
| 438 Comment(";;; Deferred code"); | 438 Comment(";;; Deferred code"); |
| 439 } | 439 } |
| 440 code->Generate(); | 440 code->Generate(); |
| 441 if (NeedsDeferredFrame()) { | 441 if (NeedsDeferredFrame()) { |
| 442 __ bind(code->done()); | 442 __ bind(code->done()); |
| 443 Comment(";;; Destroy frame"); | 443 Comment(";;; Destroy frame"); |
| 444 ASSERT(frame_is_built_); | 444 DCHECK(frame_is_built_); |
| 445 frame_is_built_ = false; | 445 frame_is_built_ = false; |
| 446 __ mov(esp, ebp); | 446 __ mov(esp, ebp); |
| 447 __ pop(ebp); | 447 __ pop(ebp); |
| 448 } | 448 } |
| 449 __ jmp(code->exit()); | 449 __ jmp(code->exit()); |
| 450 } | 450 } |
| 451 } | 451 } |
| 452 | 452 |
| 453 // Deferred code is the last part of the instruction sequence. Mark | 453 // Deferred code is the last part of the instruction sequence. Mark |
| 454 // the generated code as done unless we bailed out. | 454 // the generated code as done unless we bailed out. |
| 455 if (!is_aborted()) status_ = DONE; | 455 if (!is_aborted()) status_ = DONE; |
| 456 return !is_aborted(); | 456 return !is_aborted(); |
| 457 } | 457 } |
| 458 | 458 |
| 459 | 459 |
| 460 bool LCodeGen::GenerateSafepointTable() { | 460 bool LCodeGen::GenerateSafepointTable() { |
| 461 ASSERT(is_done()); | 461 DCHECK(is_done()); |
| 462 if (!info()->IsStub()) { | 462 if (!info()->IsStub()) { |
| 463 // For lazy deoptimization we need space to patch a call after every call. | 463 // For lazy deoptimization we need space to patch a call after every call. |
| 464 // Ensure there is always space for such patching, even if the code ends | 464 // Ensure there is always space for such patching, even if the code ends |
| 465 // in a call. | 465 // in a call. |
| 466 int target_offset = masm()->pc_offset() + Deoptimizer::patch_size(); | 466 int target_offset = masm()->pc_offset() + Deoptimizer::patch_size(); |
| 467 while (masm()->pc_offset() < target_offset) { | 467 while (masm()->pc_offset() < target_offset) { |
| 468 masm()->nop(); | 468 masm()->nop(); |
| 469 } | 469 } |
| 470 } | 470 } |
| 471 safepoints_.Emit(masm(), GetStackSlotCount()); | 471 safepoints_.Emit(masm(), GetStackSlotCount()); |
| 472 return !is_aborted(); | 472 return !is_aborted(); |
| 473 } | 473 } |
| 474 | 474 |
| 475 | 475 |
| 476 Register LCodeGen::ToRegister(int index) const { | 476 Register LCodeGen::ToRegister(int index) const { |
| 477 return Register::FromAllocationIndex(index); | 477 return Register::FromAllocationIndex(index); |
| 478 } | 478 } |
| 479 | 479 |
| 480 | 480 |
| 481 X87Register LCodeGen::ToX87Register(int index) const { | 481 X87Register LCodeGen::ToX87Register(int index) const { |
| 482 return X87Register::FromAllocationIndex(index); | 482 return X87Register::FromAllocationIndex(index); |
| 483 } | 483 } |
| 484 | 484 |
| 485 | 485 |
| 486 void LCodeGen::X87LoadForUsage(X87Register reg) { | 486 void LCodeGen::X87LoadForUsage(X87Register reg) { |
| 487 ASSERT(x87_stack_.Contains(reg)); | 487 DCHECK(x87_stack_.Contains(reg)); |
| 488 x87_stack_.Fxch(reg); | 488 x87_stack_.Fxch(reg); |
| 489 x87_stack_.pop(); | 489 x87_stack_.pop(); |
| 490 } | 490 } |
| 491 | 491 |
| 492 | 492 |
| 493 void LCodeGen::X87LoadForUsage(X87Register reg1, X87Register reg2) { | 493 void LCodeGen::X87LoadForUsage(X87Register reg1, X87Register reg2) { |
| 494 ASSERT(x87_stack_.Contains(reg1)); | 494 DCHECK(x87_stack_.Contains(reg1)); |
| 495 ASSERT(x87_stack_.Contains(reg2)); | 495 DCHECK(x87_stack_.Contains(reg2)); |
| 496 x87_stack_.Fxch(reg1, 1); | 496 x87_stack_.Fxch(reg1, 1); |
| 497 x87_stack_.Fxch(reg2); | 497 x87_stack_.Fxch(reg2); |
| 498 x87_stack_.pop(); | 498 x87_stack_.pop(); |
| 499 x87_stack_.pop(); | 499 x87_stack_.pop(); |
| 500 } | 500 } |
| 501 | 501 |
| 502 | 502 |
| 503 void LCodeGen::X87Stack::Fxch(X87Register reg, int other_slot) { | 503 void LCodeGen::X87Stack::Fxch(X87Register reg, int other_slot) { |
| 504 ASSERT(is_mutable_); | 504 DCHECK(is_mutable_); |
| 505 ASSERT(Contains(reg) && stack_depth_ > other_slot); | 505 DCHECK(Contains(reg) && stack_depth_ > other_slot); |
| 506 int i = ArrayIndex(reg); | 506 int i = ArrayIndex(reg); |
| 507 int st = st2idx(i); | 507 int st = st2idx(i); |
| 508 if (st != other_slot) { | 508 if (st != other_slot) { |
| 509 int other_i = st2idx(other_slot); | 509 int other_i = st2idx(other_slot); |
| 510 X87Register other = stack_[other_i]; | 510 X87Register other = stack_[other_i]; |
| 511 stack_[other_i] = reg; | 511 stack_[other_i] = reg; |
| 512 stack_[i] = other; | 512 stack_[i] = other; |
| 513 if (st == 0) { | 513 if (st == 0) { |
| 514 __ fxch(other_slot); | 514 __ fxch(other_slot); |
| 515 } else if (other_slot == 0) { | 515 } else if (other_slot == 0) { |
| (...skipping 23 matching lines...) Expand all Loading... |
| 539 | 539 |
| 540 bool LCodeGen::X87Stack::Contains(X87Register reg) { | 540 bool LCodeGen::X87Stack::Contains(X87Register reg) { |
| 541 for (int i = 0; i < stack_depth_; i++) { | 541 for (int i = 0; i < stack_depth_; i++) { |
| 542 if (stack_[i].is(reg)) return true; | 542 if (stack_[i].is(reg)) return true; |
| 543 } | 543 } |
| 544 return false; | 544 return false; |
| 545 } | 545 } |
| 546 | 546 |
| 547 | 547 |
| 548 void LCodeGen::X87Stack::Free(X87Register reg) { | 548 void LCodeGen::X87Stack::Free(X87Register reg) { |
| 549 ASSERT(is_mutable_); | 549 DCHECK(is_mutable_); |
| 550 ASSERT(Contains(reg)); | 550 DCHECK(Contains(reg)); |
| 551 int i = ArrayIndex(reg); | 551 int i = ArrayIndex(reg); |
| 552 int st = st2idx(i); | 552 int st = st2idx(i); |
| 553 if (st > 0) { | 553 if (st > 0) { |
| 554 // keep track of how fstp(i) changes the order of elements | 554 // keep track of how fstp(i) changes the order of elements |
| 555 int tos_i = st2idx(0); | 555 int tos_i = st2idx(0); |
| 556 stack_[i] = stack_[tos_i]; | 556 stack_[i] = stack_[tos_i]; |
| 557 } | 557 } |
| 558 pop(); | 558 pop(); |
| 559 __ fstp(st); | 559 __ fstp(st); |
| 560 } | 560 } |
| 561 | 561 |
| 562 | 562 |
| 563 void LCodeGen::X87Mov(X87Register dst, Operand src, X87OperandType opts) { | 563 void LCodeGen::X87Mov(X87Register dst, Operand src, X87OperandType opts) { |
| 564 if (x87_stack_.Contains(dst)) { | 564 if (x87_stack_.Contains(dst)) { |
| 565 x87_stack_.Fxch(dst); | 565 x87_stack_.Fxch(dst); |
| 566 __ fstp(0); | 566 __ fstp(0); |
| 567 } else { | 567 } else { |
| 568 x87_stack_.push(dst); | 568 x87_stack_.push(dst); |
| 569 } | 569 } |
| 570 X87Fld(src, opts); | 570 X87Fld(src, opts); |
| 571 } | 571 } |
| 572 | 572 |
| 573 | 573 |
| 574 void LCodeGen::X87Fld(Operand src, X87OperandType opts) { | 574 void LCodeGen::X87Fld(Operand src, X87OperandType opts) { |
| 575 ASSERT(!src.is_reg_only()); | 575 DCHECK(!src.is_reg_only()); |
| 576 switch (opts) { | 576 switch (opts) { |
| 577 case kX87DoubleOperand: | 577 case kX87DoubleOperand: |
| 578 __ fld_d(src); | 578 __ fld_d(src); |
| 579 break; | 579 break; |
| 580 case kX87FloatOperand: | 580 case kX87FloatOperand: |
| 581 __ fld_s(src); | 581 __ fld_s(src); |
| 582 break; | 582 break; |
| 583 case kX87IntOperand: | 583 case kX87IntOperand: |
| 584 __ fild_s(src); | 584 __ fild_s(src); |
| 585 break; | 585 break; |
| 586 default: | 586 default: |
| 587 UNREACHABLE(); | 587 UNREACHABLE(); |
| 588 } | 588 } |
| 589 } | 589 } |
| 590 | 590 |
| 591 | 591 |
| 592 void LCodeGen::X87Mov(Operand dst, X87Register src, X87OperandType opts) { | 592 void LCodeGen::X87Mov(Operand dst, X87Register src, X87OperandType opts) { |
| 593 ASSERT(!dst.is_reg_only()); | 593 DCHECK(!dst.is_reg_only()); |
| 594 x87_stack_.Fxch(src); | 594 x87_stack_.Fxch(src); |
| 595 switch (opts) { | 595 switch (opts) { |
| 596 case kX87DoubleOperand: | 596 case kX87DoubleOperand: |
| 597 __ fst_d(dst); | 597 __ fst_d(dst); |
| 598 break; | 598 break; |
| 599 case kX87IntOperand: | 599 case kX87IntOperand: |
| 600 __ fist_s(dst); | 600 __ fist_s(dst); |
| 601 break; | 601 break; |
| 602 default: | 602 default: |
| 603 UNREACHABLE(); | 603 UNREACHABLE(); |
| 604 } | 604 } |
| 605 } | 605 } |
| 606 | 606 |
| 607 | 607 |
| 608 void LCodeGen::X87Stack::PrepareToWrite(X87Register reg) { | 608 void LCodeGen::X87Stack::PrepareToWrite(X87Register reg) { |
| 609 ASSERT(is_mutable_); | 609 DCHECK(is_mutable_); |
| 610 if (Contains(reg)) { | 610 if (Contains(reg)) { |
| 611 Free(reg); | 611 Free(reg); |
| 612 } | 612 } |
| 613 // Mark this register as the next register to write to | 613 // Mark this register as the next register to write to |
| 614 stack_[stack_depth_] = reg; | 614 stack_[stack_depth_] = reg; |
| 615 } | 615 } |
| 616 | 616 |
| 617 | 617 |
| 618 void LCodeGen::X87Stack::CommitWrite(X87Register reg) { | 618 void LCodeGen::X87Stack::CommitWrite(X87Register reg) { |
| 619 ASSERT(is_mutable_); | 619 DCHECK(is_mutable_); |
| 620 // Assert the reg is prepared to write, but not on the virtual stack yet | 620 // Assert the reg is prepared to write, but not on the virtual stack yet |
| 621 ASSERT(!Contains(reg) && stack_[stack_depth_].is(reg) && | 621 DCHECK(!Contains(reg) && stack_[stack_depth_].is(reg) && |
| 622 stack_depth_ < X87Register::kMaxNumAllocatableRegisters); | 622 stack_depth_ < X87Register::kMaxNumAllocatableRegisters); |
| 623 stack_depth_++; | 623 stack_depth_++; |
| 624 } | 624 } |
| 625 | 625 |
| 626 | 626 |
| 627 void LCodeGen::X87PrepareBinaryOp( | 627 void LCodeGen::X87PrepareBinaryOp( |
| 628 X87Register left, X87Register right, X87Register result) { | 628 X87Register left, X87Register right, X87Register result) { |
| 629 // You need to use DefineSameAsFirst for x87 instructions | 629 // You need to use DefineSameAsFirst for x87 instructions |
| 630 ASSERT(result.is(left)); | 630 DCHECK(result.is(left)); |
| 631 x87_stack_.Fxch(right, 1); | 631 x87_stack_.Fxch(right, 1); |
| 632 x87_stack_.Fxch(left); | 632 x87_stack_.Fxch(left); |
| 633 } | 633 } |
| 634 | 634 |
| 635 | 635 |
| 636 void LCodeGen::X87Stack::FlushIfNecessary(LInstruction* instr, LCodeGen* cgen) { | 636 void LCodeGen::X87Stack::FlushIfNecessary(LInstruction* instr, LCodeGen* cgen) { |
| 637 if (stack_depth_ > 0 && instr->ClobbersDoubleRegisters(isolate())) { | 637 if (stack_depth_ > 0 && instr->ClobbersDoubleRegisters(isolate())) { |
| 638 bool double_inputs = instr->HasDoubleRegisterInput(); | 638 bool double_inputs = instr->HasDoubleRegisterInput(); |
| 639 | 639 |
| 640 // Flush stack from tos down, since FreeX87() will mess with tos | 640 // Flush stack from tos down, since FreeX87() will mess with tos |
| (...skipping 12 matching lines...) Expand all Loading... |
| 653 while (stack_depth_ > 0) { | 653 while (stack_depth_ > 0) { |
| 654 __ fstp(0); | 654 __ fstp(0); |
| 655 stack_depth_--; | 655 stack_depth_--; |
| 656 } | 656 } |
| 657 if (FLAG_debug_code && FLAG_enable_slow_asserts) __ VerifyX87StackDepth(0); | 657 if (FLAG_debug_code && FLAG_enable_slow_asserts) __ VerifyX87StackDepth(0); |
| 658 } | 658 } |
| 659 } | 659 } |
| 660 | 660 |
| 661 | 661 |
| 662 void LCodeGen::X87Stack::LeavingBlock(int current_block_id, LGoto* goto_instr) { | 662 void LCodeGen::X87Stack::LeavingBlock(int current_block_id, LGoto* goto_instr) { |
| 663 ASSERT(stack_depth_ <= 1); | 663 DCHECK(stack_depth_ <= 1); |
| 664 // If ever used for new stubs producing two pairs of doubles joined into two | 664 // If ever used for new stubs producing two pairs of doubles joined into two |
| 665 // phis this assert hits. That situation is not handled, since the two stacks | 665 // phis this assert hits. That situation is not handled, since the two stacks |
| 666 // might have st0 and st1 swapped. | 666 // might have st0 and st1 swapped. |
| 667 if (current_block_id + 1 != goto_instr->block_id()) { | 667 if (current_block_id + 1 != goto_instr->block_id()) { |
| 668 // If we have a value on the x87 stack on leaving a block, it must be a | 668 // If we have a value on the x87 stack on leaving a block, it must be a |
| 669 // phi input. If the next block we compile is not the join block, we have | 669 // phi input. If the next block we compile is not the join block, we have |
| 670 // to discard the stack state. | 670 // to discard the stack state. |
| 671 stack_depth_ = 0; | 671 stack_depth_ = 0; |
| 672 } | 672 } |
| 673 } | 673 } |
| 674 | 674 |
| 675 | 675 |
| 676 void LCodeGen::EmitFlushX87ForDeopt() { | 676 void LCodeGen::EmitFlushX87ForDeopt() { |
| 677 // The deoptimizer does not support X87 Registers. But as long as we | 677 // The deoptimizer does not support X87 Registers. But as long as we |
| 678 // deopt from a stub its not a problem, since we will re-materialize the | 678 // deopt from a stub its not a problem, since we will re-materialize the |
| 679 // original stub inputs, which can't be double registers. | 679 // original stub inputs, which can't be double registers. |
| 680 ASSERT(info()->IsStub()); | 680 DCHECK(info()->IsStub()); |
| 681 if (FLAG_debug_code && FLAG_enable_slow_asserts) { | 681 if (FLAG_debug_code && FLAG_enable_slow_asserts) { |
| 682 __ pushfd(); | 682 __ pushfd(); |
| 683 __ VerifyX87StackDepth(x87_stack_.depth()); | 683 __ VerifyX87StackDepth(x87_stack_.depth()); |
| 684 __ popfd(); | 684 __ popfd(); |
| 685 } | 685 } |
| 686 for (int i = 0; i < x87_stack_.depth(); i++) __ fstp(0); | 686 for (int i = 0; i < x87_stack_.depth(); i++) __ fstp(0); |
| 687 } | 687 } |
| 688 | 688 |
| 689 | 689 |
| 690 Register LCodeGen::ToRegister(LOperand* op) const { | 690 Register LCodeGen::ToRegister(LOperand* op) const { |
| 691 ASSERT(op->IsRegister()); | 691 DCHECK(op->IsRegister()); |
| 692 return ToRegister(op->index()); | 692 return ToRegister(op->index()); |
| 693 } | 693 } |
| 694 | 694 |
| 695 | 695 |
| 696 X87Register LCodeGen::ToX87Register(LOperand* op) const { | 696 X87Register LCodeGen::ToX87Register(LOperand* op) const { |
| 697 ASSERT(op->IsDoubleRegister()); | 697 DCHECK(op->IsDoubleRegister()); |
| 698 return ToX87Register(op->index()); | 698 return ToX87Register(op->index()); |
| 699 } | 699 } |
| 700 | 700 |
| 701 | 701 |
| 702 int32_t LCodeGen::ToInteger32(LConstantOperand* op) const { | 702 int32_t LCodeGen::ToInteger32(LConstantOperand* op) const { |
| 703 return ToRepresentation(op, Representation::Integer32()); | 703 return ToRepresentation(op, Representation::Integer32()); |
| 704 } | 704 } |
| 705 | 705 |
| 706 | 706 |
| 707 int32_t LCodeGen::ToRepresentation(LConstantOperand* op, | 707 int32_t LCodeGen::ToRepresentation(LConstantOperand* op, |
| 708 const Representation& r) const { | 708 const Representation& r) const { |
| 709 HConstant* constant = chunk_->LookupConstant(op); | 709 HConstant* constant = chunk_->LookupConstant(op); |
| 710 int32_t value = constant->Integer32Value(); | 710 int32_t value = constant->Integer32Value(); |
| 711 if (r.IsInteger32()) return value; | 711 if (r.IsInteger32()) return value; |
| 712 ASSERT(r.IsSmiOrTagged()); | 712 DCHECK(r.IsSmiOrTagged()); |
| 713 return reinterpret_cast<int32_t>(Smi::FromInt(value)); | 713 return reinterpret_cast<int32_t>(Smi::FromInt(value)); |
| 714 } | 714 } |
| 715 | 715 |
| 716 | 716 |
| 717 Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const { | 717 Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const { |
| 718 HConstant* constant = chunk_->LookupConstant(op); | 718 HConstant* constant = chunk_->LookupConstant(op); |
| 719 ASSERT(chunk_->LookupLiteralRepresentation(op).IsSmiOrTagged()); | 719 DCHECK(chunk_->LookupLiteralRepresentation(op).IsSmiOrTagged()); |
| 720 return constant->handle(isolate()); | 720 return constant->handle(isolate()); |
| 721 } | 721 } |
| 722 | 722 |
| 723 | 723 |
| 724 double LCodeGen::ToDouble(LConstantOperand* op) const { | 724 double LCodeGen::ToDouble(LConstantOperand* op) const { |
| 725 HConstant* constant = chunk_->LookupConstant(op); | 725 HConstant* constant = chunk_->LookupConstant(op); |
| 726 ASSERT(constant->HasDoubleValue()); | 726 DCHECK(constant->HasDoubleValue()); |
| 727 return constant->DoubleValue(); | 727 return constant->DoubleValue(); |
| 728 } | 728 } |
| 729 | 729 |
| 730 | 730 |
| 731 ExternalReference LCodeGen::ToExternalReference(LConstantOperand* op) const { | 731 ExternalReference LCodeGen::ToExternalReference(LConstantOperand* op) const { |
| 732 HConstant* constant = chunk_->LookupConstant(op); | 732 HConstant* constant = chunk_->LookupConstant(op); |
| 733 ASSERT(constant->HasExternalReferenceValue()); | 733 DCHECK(constant->HasExternalReferenceValue()); |
| 734 return constant->ExternalReferenceValue(); | 734 return constant->ExternalReferenceValue(); |
| 735 } | 735 } |
| 736 | 736 |
| 737 | 737 |
| 738 bool LCodeGen::IsInteger32(LConstantOperand* op) const { | 738 bool LCodeGen::IsInteger32(LConstantOperand* op) const { |
| 739 return chunk_->LookupLiteralRepresentation(op).IsSmiOrInteger32(); | 739 return chunk_->LookupLiteralRepresentation(op).IsSmiOrInteger32(); |
| 740 } | 740 } |
| 741 | 741 |
| 742 | 742 |
| 743 bool LCodeGen::IsSmi(LConstantOperand* op) const { | 743 bool LCodeGen::IsSmi(LConstantOperand* op) const { |
| 744 return chunk_->LookupLiteralRepresentation(op).IsSmi(); | 744 return chunk_->LookupLiteralRepresentation(op).IsSmi(); |
| 745 } | 745 } |
| 746 | 746 |
| 747 | 747 |
| 748 static int ArgumentsOffsetWithoutFrame(int index) { | 748 static int ArgumentsOffsetWithoutFrame(int index) { |
| 749 ASSERT(index < 0); | 749 DCHECK(index < 0); |
| 750 return -(index + 1) * kPointerSize + kPCOnStackSize; | 750 return -(index + 1) * kPointerSize + kPCOnStackSize; |
| 751 } | 751 } |
| 752 | 752 |
| 753 | 753 |
| 754 Operand LCodeGen::ToOperand(LOperand* op) const { | 754 Operand LCodeGen::ToOperand(LOperand* op) const { |
| 755 if (op->IsRegister()) return Operand(ToRegister(op)); | 755 if (op->IsRegister()) return Operand(ToRegister(op)); |
| 756 ASSERT(!op->IsDoubleRegister()); | 756 DCHECK(!op->IsDoubleRegister()); |
| 757 ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot()); | 757 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot()); |
| 758 if (NeedsEagerFrame()) { | 758 if (NeedsEagerFrame()) { |
| 759 return Operand(ebp, StackSlotOffset(op->index())); | 759 return Operand(ebp, StackSlotOffset(op->index())); |
| 760 } else { | 760 } else { |
| 761 // Retrieve parameter without eager stack-frame relative to the | 761 // Retrieve parameter without eager stack-frame relative to the |
| 762 // stack-pointer. | 762 // stack-pointer. |
| 763 return Operand(esp, ArgumentsOffsetWithoutFrame(op->index())); | 763 return Operand(esp, ArgumentsOffsetWithoutFrame(op->index())); |
| 764 } | 764 } |
| 765 } | 765 } |
| 766 | 766 |
| 767 | 767 |
| 768 Operand LCodeGen::HighOperand(LOperand* op) { | 768 Operand LCodeGen::HighOperand(LOperand* op) { |
| 769 ASSERT(op->IsDoubleStackSlot()); | 769 DCHECK(op->IsDoubleStackSlot()); |
| 770 if (NeedsEagerFrame()) { | 770 if (NeedsEagerFrame()) { |
| 771 return Operand(ebp, StackSlotOffset(op->index()) + kPointerSize); | 771 return Operand(ebp, StackSlotOffset(op->index()) + kPointerSize); |
| 772 } else { | 772 } else { |
| 773 // Retrieve parameter without eager stack-frame relative to the | 773 // Retrieve parameter without eager stack-frame relative to the |
| 774 // stack-pointer. | 774 // stack-pointer. |
| 775 return Operand( | 775 return Operand( |
| 776 esp, ArgumentsOffsetWithoutFrame(op->index()) + kPointerSize); | 776 esp, ArgumentsOffsetWithoutFrame(op->index()) + kPointerSize); |
| 777 } | 777 } |
| 778 } | 778 } |
| 779 | 779 |
| (...skipping 14 matching lines...) Expand all Loading... |
| 794 ? DefineDeoptimizationLiteral(environment->closure()) | 794 ? DefineDeoptimizationLiteral(environment->closure()) |
| 795 : Translation::kSelfLiteralId; | 795 : Translation::kSelfLiteralId; |
| 796 switch (environment->frame_type()) { | 796 switch (environment->frame_type()) { |
| 797 case JS_FUNCTION: | 797 case JS_FUNCTION: |
| 798 translation->BeginJSFrame(environment->ast_id(), closure_id, height); | 798 translation->BeginJSFrame(environment->ast_id(), closure_id, height); |
| 799 break; | 799 break; |
| 800 case JS_CONSTRUCT: | 800 case JS_CONSTRUCT: |
| 801 translation->BeginConstructStubFrame(closure_id, translation_size); | 801 translation->BeginConstructStubFrame(closure_id, translation_size); |
| 802 break; | 802 break; |
| 803 case JS_GETTER: | 803 case JS_GETTER: |
| 804 ASSERT(translation_size == 1); | 804 DCHECK(translation_size == 1); |
| 805 ASSERT(height == 0); | 805 DCHECK(height == 0); |
| 806 translation->BeginGetterStubFrame(closure_id); | 806 translation->BeginGetterStubFrame(closure_id); |
| 807 break; | 807 break; |
| 808 case JS_SETTER: | 808 case JS_SETTER: |
| 809 ASSERT(translation_size == 2); | 809 DCHECK(translation_size == 2); |
| 810 ASSERT(height == 0); | 810 DCHECK(height == 0); |
| 811 translation->BeginSetterStubFrame(closure_id); | 811 translation->BeginSetterStubFrame(closure_id); |
| 812 break; | 812 break; |
| 813 case ARGUMENTS_ADAPTOR: | 813 case ARGUMENTS_ADAPTOR: |
| 814 translation->BeginArgumentsAdaptorFrame(closure_id, translation_size); | 814 translation->BeginArgumentsAdaptorFrame(closure_id, translation_size); |
| 815 break; | 815 break; |
| 816 case STUB: | 816 case STUB: |
| 817 translation->BeginCompiledStubFrame(); | 817 translation->BeginCompiledStubFrame(); |
| 818 break; | 818 break; |
| 819 default: | 819 default: |
| 820 UNREACHABLE(); | 820 UNREACHABLE(); |
| (...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 897 } else { | 897 } else { |
| 898 UNREACHABLE(); | 898 UNREACHABLE(); |
| 899 } | 899 } |
| 900 } | 900 } |
| 901 | 901 |
| 902 | 902 |
| 903 void LCodeGen::CallCodeGeneric(Handle<Code> code, | 903 void LCodeGen::CallCodeGeneric(Handle<Code> code, |
| 904 RelocInfo::Mode mode, | 904 RelocInfo::Mode mode, |
| 905 LInstruction* instr, | 905 LInstruction* instr, |
| 906 SafepointMode safepoint_mode) { | 906 SafepointMode safepoint_mode) { |
| 907 ASSERT(instr != NULL); | 907 DCHECK(instr != NULL); |
| 908 __ call(code, mode); | 908 __ call(code, mode); |
| 909 RecordSafepointWithLazyDeopt(instr, safepoint_mode); | 909 RecordSafepointWithLazyDeopt(instr, safepoint_mode); |
| 910 | 910 |
| 911 // Signal that we don't inline smi code before these stubs in the | 911 // Signal that we don't inline smi code before these stubs in the |
| 912 // optimizing code generator. | 912 // optimizing code generator. |
| 913 if (code->kind() == Code::BINARY_OP_IC || | 913 if (code->kind() == Code::BINARY_OP_IC || |
| 914 code->kind() == Code::COMPARE_IC) { | 914 code->kind() == Code::COMPARE_IC) { |
| 915 __ nop(); | 915 __ nop(); |
| 916 } | 916 } |
| 917 } | 917 } |
| 918 | 918 |
| 919 | 919 |
| 920 void LCodeGen::CallCode(Handle<Code> code, | 920 void LCodeGen::CallCode(Handle<Code> code, |
| 921 RelocInfo::Mode mode, | 921 RelocInfo::Mode mode, |
| 922 LInstruction* instr) { | 922 LInstruction* instr) { |
| 923 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT); | 923 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT); |
| 924 } | 924 } |
| 925 | 925 |
| 926 | 926 |
| 927 void LCodeGen::CallRuntime(const Runtime::Function* fun, | 927 void LCodeGen::CallRuntime(const Runtime::Function* fun, |
| 928 int argc, | 928 int argc, |
| 929 LInstruction* instr) { | 929 LInstruction* instr) { |
| 930 ASSERT(instr != NULL); | 930 DCHECK(instr != NULL); |
| 931 ASSERT(instr->HasPointerMap()); | 931 DCHECK(instr->HasPointerMap()); |
| 932 | 932 |
| 933 __ CallRuntime(fun, argc); | 933 __ CallRuntime(fun, argc); |
| 934 | 934 |
| 935 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); | 935 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); |
| 936 | 936 |
| 937 ASSERT(info()->is_calling()); | 937 DCHECK(info()->is_calling()); |
| 938 } | 938 } |
| 939 | 939 |
| 940 | 940 |
| 941 void LCodeGen::LoadContextFromDeferred(LOperand* context) { | 941 void LCodeGen::LoadContextFromDeferred(LOperand* context) { |
| 942 if (context->IsRegister()) { | 942 if (context->IsRegister()) { |
| 943 if (!ToRegister(context).is(esi)) { | 943 if (!ToRegister(context).is(esi)) { |
| 944 __ mov(esi, ToRegister(context)); | 944 __ mov(esi, ToRegister(context)); |
| 945 } | 945 } |
| 946 } else if (context->IsStackSlot()) { | 946 } else if (context->IsStackSlot()) { |
| 947 __ mov(esi, ToOperand(context)); | 947 __ mov(esi, ToOperand(context)); |
| 948 } else if (context->IsConstantOperand()) { | 948 } else if (context->IsConstantOperand()) { |
| 949 HConstant* constant = | 949 HConstant* constant = |
| 950 chunk_->LookupConstant(LConstantOperand::cast(context)); | 950 chunk_->LookupConstant(LConstantOperand::cast(context)); |
| 951 __ LoadObject(esi, Handle<Object>::cast(constant->handle(isolate()))); | 951 __ LoadObject(esi, Handle<Object>::cast(constant->handle(isolate()))); |
| 952 } else { | 952 } else { |
| 953 UNREACHABLE(); | 953 UNREACHABLE(); |
| 954 } | 954 } |
| 955 } | 955 } |
| 956 | 956 |
| 957 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, | 957 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, |
| 958 int argc, | 958 int argc, |
| 959 LInstruction* instr, | 959 LInstruction* instr, |
| 960 LOperand* context) { | 960 LOperand* context) { |
| 961 LoadContextFromDeferred(context); | 961 LoadContextFromDeferred(context); |
| 962 | 962 |
| 963 __ CallRuntime(id); | 963 __ CallRuntime(id); |
| 964 RecordSafepointWithRegisters( | 964 RecordSafepointWithRegisters( |
| 965 instr->pointer_map(), argc, Safepoint::kNoLazyDeopt); | 965 instr->pointer_map(), argc, Safepoint::kNoLazyDeopt); |
| 966 | 966 |
| 967 ASSERT(info()->is_calling()); | 967 DCHECK(info()->is_calling()); |
| 968 } | 968 } |
| 969 | 969 |
| 970 | 970 |
| 971 void LCodeGen::RegisterEnvironmentForDeoptimization( | 971 void LCodeGen::RegisterEnvironmentForDeoptimization( |
| 972 LEnvironment* environment, Safepoint::DeoptMode mode) { | 972 LEnvironment* environment, Safepoint::DeoptMode mode) { |
| 973 environment->set_has_been_used(); | 973 environment->set_has_been_used(); |
| 974 if (!environment->HasBeenRegistered()) { | 974 if (!environment->HasBeenRegistered()) { |
| 975 // Physical stack frame layout: | 975 // Physical stack frame layout: |
| 976 // -x ............. -4 0 ..................................... y | 976 // -x ............. -4 0 ..................................... y |
| 977 // [incoming arguments] [spill slots] [pushed outgoing arguments] | 977 // [incoming arguments] [spill slots] [pushed outgoing arguments] |
| (...skipping 24 matching lines...) Expand all Loading... |
| 1002 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1); | 1002 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1); |
| 1003 deoptimizations_.Add(environment, zone()); | 1003 deoptimizations_.Add(environment, zone()); |
| 1004 } | 1004 } |
| 1005 } | 1005 } |
| 1006 | 1006 |
| 1007 | 1007 |
| 1008 void LCodeGen::DeoptimizeIf(Condition cc, | 1008 void LCodeGen::DeoptimizeIf(Condition cc, |
| 1009 LEnvironment* environment, | 1009 LEnvironment* environment, |
| 1010 Deoptimizer::BailoutType bailout_type) { | 1010 Deoptimizer::BailoutType bailout_type) { |
| 1011 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); | 1011 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); |
| 1012 ASSERT(environment->HasBeenRegistered()); | 1012 DCHECK(environment->HasBeenRegistered()); |
| 1013 int id = environment->deoptimization_index(); | 1013 int id = environment->deoptimization_index(); |
| 1014 ASSERT(info()->IsOptimizing() || info()->IsStub()); | 1014 DCHECK(info()->IsOptimizing() || info()->IsStub()); |
| 1015 Address entry = | 1015 Address entry = |
| 1016 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); | 1016 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); |
| 1017 if (entry == NULL) { | 1017 if (entry == NULL) { |
| 1018 Abort(kBailoutWasNotPrepared); | 1018 Abort(kBailoutWasNotPrepared); |
| 1019 return; | 1019 return; |
| 1020 } | 1020 } |
| 1021 | 1021 |
| 1022 if (DeoptEveryNTimes()) { | 1022 if (DeoptEveryNTimes()) { |
| 1023 ExternalReference count = ExternalReference::stress_deopt_count(isolate()); | 1023 ExternalReference count = ExternalReference::stress_deopt_count(isolate()); |
| 1024 Label no_deopt; | 1024 Label no_deopt; |
| 1025 __ pushfd(); | 1025 __ pushfd(); |
| 1026 __ push(eax); | 1026 __ push(eax); |
| 1027 __ mov(eax, Operand::StaticVariable(count)); | 1027 __ mov(eax, Operand::StaticVariable(count)); |
| 1028 __ sub(eax, Immediate(1)); | 1028 __ sub(eax, Immediate(1)); |
| 1029 __ j(not_zero, &no_deopt, Label::kNear); | 1029 __ j(not_zero, &no_deopt, Label::kNear); |
| 1030 if (FLAG_trap_on_deopt) __ int3(); | 1030 if (FLAG_trap_on_deopt) __ int3(); |
| 1031 __ mov(eax, Immediate(FLAG_deopt_every_n_times)); | 1031 __ mov(eax, Immediate(FLAG_deopt_every_n_times)); |
| 1032 __ mov(Operand::StaticVariable(count), eax); | 1032 __ mov(Operand::StaticVariable(count), eax); |
| 1033 __ pop(eax); | 1033 __ pop(eax); |
| 1034 __ popfd(); | 1034 __ popfd(); |
| 1035 ASSERT(frame_is_built_); | 1035 DCHECK(frame_is_built_); |
| 1036 __ call(entry, RelocInfo::RUNTIME_ENTRY); | 1036 __ call(entry, RelocInfo::RUNTIME_ENTRY); |
| 1037 __ bind(&no_deopt); | 1037 __ bind(&no_deopt); |
| 1038 __ mov(Operand::StaticVariable(count), eax); | 1038 __ mov(Operand::StaticVariable(count), eax); |
| 1039 __ pop(eax); | 1039 __ pop(eax); |
| 1040 __ popfd(); | 1040 __ popfd(); |
| 1041 } | 1041 } |
| 1042 | 1042 |
| 1043 // Before Instructions which can deopt, we normally flush the x87 stack. But | 1043 // Before Instructions which can deopt, we normally flush the x87 stack. But |
| 1044 // we can have inputs or outputs of the current instruction on the stack, | 1044 // we can have inputs or outputs of the current instruction on the stack, |
| 1045 // thus we need to flush them here from the physical stack to leave it in a | 1045 // thus we need to flush them here from the physical stack to leave it in a |
| 1046 // consistent state. | 1046 // consistent state. |
| 1047 if (x87_stack_.depth() > 0) { | 1047 if (x87_stack_.depth() > 0) { |
| 1048 Label done; | 1048 Label done; |
| 1049 if (cc != no_condition) __ j(NegateCondition(cc), &done, Label::kNear); | 1049 if (cc != no_condition) __ j(NegateCondition(cc), &done, Label::kNear); |
| 1050 EmitFlushX87ForDeopt(); | 1050 EmitFlushX87ForDeopt(); |
| 1051 __ bind(&done); | 1051 __ bind(&done); |
| 1052 } | 1052 } |
| 1053 | 1053 |
| 1054 if (info()->ShouldTrapOnDeopt()) { | 1054 if (info()->ShouldTrapOnDeopt()) { |
| 1055 Label done; | 1055 Label done; |
| 1056 if (cc != no_condition) __ j(NegateCondition(cc), &done, Label::kNear); | 1056 if (cc != no_condition) __ j(NegateCondition(cc), &done, Label::kNear); |
| 1057 __ int3(); | 1057 __ int3(); |
| 1058 __ bind(&done); | 1058 __ bind(&done); |
| 1059 } | 1059 } |
| 1060 | 1060 |
| 1061 ASSERT(info()->IsStub() || frame_is_built_); | 1061 DCHECK(info()->IsStub() || frame_is_built_); |
| 1062 if (cc == no_condition && frame_is_built_) { | 1062 if (cc == no_condition && frame_is_built_) { |
| 1063 __ call(entry, RelocInfo::RUNTIME_ENTRY); | 1063 __ call(entry, RelocInfo::RUNTIME_ENTRY); |
| 1064 } else { | 1064 } else { |
| 1065 // We often have several deopts to the same entry, reuse the last | 1065 // We often have several deopts to the same entry, reuse the last |
| 1066 // jump entry if this is the case. | 1066 // jump entry if this is the case. |
| 1067 if (jump_table_.is_empty() || | 1067 if (jump_table_.is_empty() || |
| 1068 jump_table_.last().address != entry || | 1068 jump_table_.last().address != entry || |
| 1069 jump_table_.last().needs_frame != !frame_is_built_ || | 1069 jump_table_.last().needs_frame != !frame_is_built_ || |
| 1070 jump_table_.last().bailout_type != bailout_type) { | 1070 jump_table_.last().bailout_type != bailout_type) { |
| 1071 Deoptimizer::JumpTableEntry table_entry(entry, | 1071 Deoptimizer::JumpTableEntry table_entry(entry, |
| (...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1139 int result = deoptimization_literals_.length(); | 1139 int result = deoptimization_literals_.length(); |
| 1140 for (int i = 0; i < deoptimization_literals_.length(); ++i) { | 1140 for (int i = 0; i < deoptimization_literals_.length(); ++i) { |
| 1141 if (deoptimization_literals_[i].is_identical_to(literal)) return i; | 1141 if (deoptimization_literals_[i].is_identical_to(literal)) return i; |
| 1142 } | 1142 } |
| 1143 deoptimization_literals_.Add(literal, zone()); | 1143 deoptimization_literals_.Add(literal, zone()); |
| 1144 return result; | 1144 return result; |
| 1145 } | 1145 } |
| 1146 | 1146 |
| 1147 | 1147 |
| 1148 void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() { | 1148 void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() { |
| 1149 ASSERT(deoptimization_literals_.length() == 0); | 1149 DCHECK(deoptimization_literals_.length() == 0); |
| 1150 | 1150 |
| 1151 const ZoneList<Handle<JSFunction> >* inlined_closures = | 1151 const ZoneList<Handle<JSFunction> >* inlined_closures = |
| 1152 chunk()->inlined_closures(); | 1152 chunk()->inlined_closures(); |
| 1153 | 1153 |
| 1154 for (int i = 0, length = inlined_closures->length(); | 1154 for (int i = 0, length = inlined_closures->length(); |
| 1155 i < length; | 1155 i < length; |
| 1156 i++) { | 1156 i++) { |
| 1157 DefineDeoptimizationLiteral(inlined_closures->at(i)); | 1157 DefineDeoptimizationLiteral(inlined_closures->at(i)); |
| 1158 } | 1158 } |
| 1159 | 1159 |
| 1160 inlined_function_count_ = deoptimization_literals_.length(); | 1160 inlined_function_count_ = deoptimization_literals_.length(); |
| 1161 } | 1161 } |
| 1162 | 1162 |
| 1163 | 1163 |
| 1164 void LCodeGen::RecordSafepointWithLazyDeopt( | 1164 void LCodeGen::RecordSafepointWithLazyDeopt( |
| 1165 LInstruction* instr, SafepointMode safepoint_mode) { | 1165 LInstruction* instr, SafepointMode safepoint_mode) { |
| 1166 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) { | 1166 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) { |
| 1167 RecordSafepoint(instr->pointer_map(), Safepoint::kLazyDeopt); | 1167 RecordSafepoint(instr->pointer_map(), Safepoint::kLazyDeopt); |
| 1168 } else { | 1168 } else { |
| 1169 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); | 1169 DCHECK(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); |
| 1170 RecordSafepointWithRegisters( | 1170 RecordSafepointWithRegisters( |
| 1171 instr->pointer_map(), 0, Safepoint::kLazyDeopt); | 1171 instr->pointer_map(), 0, Safepoint::kLazyDeopt); |
| 1172 } | 1172 } |
| 1173 } | 1173 } |
| 1174 | 1174 |
| 1175 | 1175 |
| 1176 void LCodeGen::RecordSafepoint( | 1176 void LCodeGen::RecordSafepoint( |
| 1177 LPointerMap* pointers, | 1177 LPointerMap* pointers, |
| 1178 Safepoint::Kind kind, | 1178 Safepoint::Kind kind, |
| 1179 int arguments, | 1179 int arguments, |
| 1180 Safepoint::DeoptMode deopt_mode) { | 1180 Safepoint::DeoptMode deopt_mode) { |
| 1181 ASSERT(kind == expected_safepoint_kind_); | 1181 DCHECK(kind == expected_safepoint_kind_); |
| 1182 const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands(); | 1182 const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands(); |
| 1183 Safepoint safepoint = | 1183 Safepoint safepoint = |
| 1184 safepoints_.DefineSafepoint(masm(), kind, arguments, deopt_mode); | 1184 safepoints_.DefineSafepoint(masm(), kind, arguments, deopt_mode); |
| 1185 for (int i = 0; i < operands->length(); i++) { | 1185 for (int i = 0; i < operands->length(); i++) { |
| 1186 LOperand* pointer = operands->at(i); | 1186 LOperand* pointer = operands->at(i); |
| 1187 if (pointer->IsStackSlot()) { | 1187 if (pointer->IsStackSlot()) { |
| 1188 safepoint.DefinePointerSlot(pointer->index(), zone()); | 1188 safepoint.DefinePointerSlot(pointer->index(), zone()); |
| 1189 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { | 1189 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { |
| 1190 safepoint.DefinePointerRegister(ToRegister(pointer), zone()); | 1190 safepoint.DefinePointerRegister(ToRegister(pointer), zone()); |
| 1191 } | 1191 } |
| (...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1258 DoGap(instr); | 1258 DoGap(instr); |
| 1259 } | 1259 } |
| 1260 | 1260 |
| 1261 | 1261 |
| 1262 void LCodeGen::DoParameter(LParameter* instr) { | 1262 void LCodeGen::DoParameter(LParameter* instr) { |
| 1263 // Nothing to do. | 1263 // Nothing to do. |
| 1264 } | 1264 } |
| 1265 | 1265 |
| 1266 | 1266 |
| 1267 void LCodeGen::DoCallStub(LCallStub* instr) { | 1267 void LCodeGen::DoCallStub(LCallStub* instr) { |
| 1268 ASSERT(ToRegister(instr->context()).is(esi)); | 1268 DCHECK(ToRegister(instr->context()).is(esi)); |
| 1269 ASSERT(ToRegister(instr->result()).is(eax)); | 1269 DCHECK(ToRegister(instr->result()).is(eax)); |
| 1270 switch (instr->hydrogen()->major_key()) { | 1270 switch (instr->hydrogen()->major_key()) { |
| 1271 case CodeStub::RegExpExec: { | 1271 case CodeStub::RegExpExec: { |
| 1272 RegExpExecStub stub(isolate()); | 1272 RegExpExecStub stub(isolate()); |
| 1273 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 1273 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 1274 break; | 1274 break; |
| 1275 } | 1275 } |
| 1276 case CodeStub::SubString: { | 1276 case CodeStub::SubString: { |
| 1277 SubStringStub stub(isolate()); | 1277 SubStringStub stub(isolate()); |
| 1278 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 1278 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 1279 break; | 1279 break; |
| (...skipping 10 matching lines...) Expand all Loading... |
| 1290 | 1290 |
| 1291 | 1291 |
| 1292 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) { | 1292 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) { |
| 1293 GenerateOsrPrologue(); | 1293 GenerateOsrPrologue(); |
| 1294 } | 1294 } |
| 1295 | 1295 |
| 1296 | 1296 |
| 1297 void LCodeGen::DoModByPowerOf2I(LModByPowerOf2I* instr) { | 1297 void LCodeGen::DoModByPowerOf2I(LModByPowerOf2I* instr) { |
| 1298 Register dividend = ToRegister(instr->dividend()); | 1298 Register dividend = ToRegister(instr->dividend()); |
| 1299 int32_t divisor = instr->divisor(); | 1299 int32_t divisor = instr->divisor(); |
| 1300 ASSERT(dividend.is(ToRegister(instr->result()))); | 1300 DCHECK(dividend.is(ToRegister(instr->result()))); |
| 1301 | 1301 |
| 1302 // Theoretically, a variation of the branch-free code for integer division by | 1302 // Theoretically, a variation of the branch-free code for integer division by |
| 1303 // a power of 2 (calculating the remainder via an additional multiplication | 1303 // a power of 2 (calculating the remainder via an additional multiplication |
| 1304 // (which gets simplified to an 'and') and subtraction) should be faster, and | 1304 // (which gets simplified to an 'and') and subtraction) should be faster, and |
| 1305 // this is exactly what GCC and clang emit. Nevertheless, benchmarks seem to | 1305 // this is exactly what GCC and clang emit. Nevertheless, benchmarks seem to |
| 1306 // indicate that positive dividends are heavily favored, so the branching | 1306 // indicate that positive dividends are heavily favored, so the branching |
| 1307 // version performs better. | 1307 // version performs better. |
| 1308 HMod* hmod = instr->hydrogen(); | 1308 HMod* hmod = instr->hydrogen(); |
| 1309 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1); | 1309 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1); |
| 1310 Label dividend_is_not_negative, done; | 1310 Label dividend_is_not_negative, done; |
| (...skipping 12 matching lines...) Expand all Loading... |
| 1323 | 1323 |
| 1324 __ bind(÷nd_is_not_negative); | 1324 __ bind(÷nd_is_not_negative); |
| 1325 __ and_(dividend, mask); | 1325 __ and_(dividend, mask); |
| 1326 __ bind(&done); | 1326 __ bind(&done); |
| 1327 } | 1327 } |
| 1328 | 1328 |
| 1329 | 1329 |
| 1330 void LCodeGen::DoModByConstI(LModByConstI* instr) { | 1330 void LCodeGen::DoModByConstI(LModByConstI* instr) { |
| 1331 Register dividend = ToRegister(instr->dividend()); | 1331 Register dividend = ToRegister(instr->dividend()); |
| 1332 int32_t divisor = instr->divisor(); | 1332 int32_t divisor = instr->divisor(); |
| 1333 ASSERT(ToRegister(instr->result()).is(eax)); | 1333 DCHECK(ToRegister(instr->result()).is(eax)); |
| 1334 | 1334 |
| 1335 if (divisor == 0) { | 1335 if (divisor == 0) { |
| 1336 DeoptimizeIf(no_condition, instr->environment()); | 1336 DeoptimizeIf(no_condition, instr->environment()); |
| 1337 return; | 1337 return; |
| 1338 } | 1338 } |
| 1339 | 1339 |
| 1340 __ TruncatingDiv(dividend, Abs(divisor)); | 1340 __ TruncatingDiv(dividend, Abs(divisor)); |
| 1341 __ imul(edx, edx, Abs(divisor)); | 1341 __ imul(edx, edx, Abs(divisor)); |
| 1342 __ mov(eax, dividend); | 1342 __ mov(eax, dividend); |
| 1343 __ sub(eax, edx); | 1343 __ sub(eax, edx); |
| 1344 | 1344 |
| 1345 // Check for negative zero. | 1345 // Check for negative zero. |
| 1346 HMod* hmod = instr->hydrogen(); | 1346 HMod* hmod = instr->hydrogen(); |
| 1347 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1347 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 1348 Label remainder_not_zero; | 1348 Label remainder_not_zero; |
| 1349 __ j(not_zero, &remainder_not_zero, Label::kNear); | 1349 __ j(not_zero, &remainder_not_zero, Label::kNear); |
| 1350 __ cmp(dividend, Immediate(0)); | 1350 __ cmp(dividend, Immediate(0)); |
| 1351 DeoptimizeIf(less, instr->environment()); | 1351 DeoptimizeIf(less, instr->environment()); |
| 1352 __ bind(&remainder_not_zero); | 1352 __ bind(&remainder_not_zero); |
| 1353 } | 1353 } |
| 1354 } | 1354 } |
| 1355 | 1355 |
| 1356 | 1356 |
| 1357 void LCodeGen::DoModI(LModI* instr) { | 1357 void LCodeGen::DoModI(LModI* instr) { |
| 1358 HMod* hmod = instr->hydrogen(); | 1358 HMod* hmod = instr->hydrogen(); |
| 1359 | 1359 |
| 1360 Register left_reg = ToRegister(instr->left()); | 1360 Register left_reg = ToRegister(instr->left()); |
| 1361 ASSERT(left_reg.is(eax)); | 1361 DCHECK(left_reg.is(eax)); |
| 1362 Register right_reg = ToRegister(instr->right()); | 1362 Register right_reg = ToRegister(instr->right()); |
| 1363 ASSERT(!right_reg.is(eax)); | 1363 DCHECK(!right_reg.is(eax)); |
| 1364 ASSERT(!right_reg.is(edx)); | 1364 DCHECK(!right_reg.is(edx)); |
| 1365 Register result_reg = ToRegister(instr->result()); | 1365 Register result_reg = ToRegister(instr->result()); |
| 1366 ASSERT(result_reg.is(edx)); | 1366 DCHECK(result_reg.is(edx)); |
| 1367 | 1367 |
| 1368 Label done; | 1368 Label done; |
| 1369 // Check for x % 0, idiv would signal a divide error. We have to | 1369 // Check for x % 0, idiv would signal a divide error. We have to |
| 1370 // deopt in this case because we can't return a NaN. | 1370 // deopt in this case because we can't return a NaN. |
| 1371 if (hmod->CheckFlag(HValue::kCanBeDivByZero)) { | 1371 if (hmod->CheckFlag(HValue::kCanBeDivByZero)) { |
| 1372 __ test(right_reg, Operand(right_reg)); | 1372 __ test(right_reg, Operand(right_reg)); |
| 1373 DeoptimizeIf(zero, instr->environment()); | 1373 DeoptimizeIf(zero, instr->environment()); |
| 1374 } | 1374 } |
| 1375 | 1375 |
| 1376 // Check for kMinInt % -1, idiv would signal a divide error. We | 1376 // Check for kMinInt % -1, idiv would signal a divide error. We |
| (...skipping 29 matching lines...) Expand all Loading... |
| 1406 } | 1406 } |
| 1407 __ idiv(right_reg); | 1407 __ idiv(right_reg); |
| 1408 __ bind(&done); | 1408 __ bind(&done); |
| 1409 } | 1409 } |
| 1410 | 1410 |
| 1411 | 1411 |
| 1412 void LCodeGen::DoDivByPowerOf2I(LDivByPowerOf2I* instr) { | 1412 void LCodeGen::DoDivByPowerOf2I(LDivByPowerOf2I* instr) { |
| 1413 Register dividend = ToRegister(instr->dividend()); | 1413 Register dividend = ToRegister(instr->dividend()); |
| 1414 int32_t divisor = instr->divisor(); | 1414 int32_t divisor = instr->divisor(); |
| 1415 Register result = ToRegister(instr->result()); | 1415 Register result = ToRegister(instr->result()); |
| 1416 ASSERT(divisor == kMinInt || IsPowerOf2(Abs(divisor))); | 1416 DCHECK(divisor == kMinInt || IsPowerOf2(Abs(divisor))); |
| 1417 ASSERT(!result.is(dividend)); | 1417 DCHECK(!result.is(dividend)); |
| 1418 | 1418 |
| 1419 // Check for (0 / -x) that will produce negative zero. | 1419 // Check for (0 / -x) that will produce negative zero. |
| 1420 HDiv* hdiv = instr->hydrogen(); | 1420 HDiv* hdiv = instr->hydrogen(); |
| 1421 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { | 1421 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { |
| 1422 __ test(dividend, dividend); | 1422 __ test(dividend, dividend); |
| 1423 DeoptimizeIf(zero, instr->environment()); | 1423 DeoptimizeIf(zero, instr->environment()); |
| 1424 } | 1424 } |
| 1425 // Check for (kMinInt / -1). | 1425 // Check for (kMinInt / -1). |
| 1426 if (hdiv->CheckFlag(HValue::kCanOverflow) && divisor == -1) { | 1426 if (hdiv->CheckFlag(HValue::kCanOverflow) && divisor == -1) { |
| 1427 __ cmp(dividend, kMinInt); | 1427 __ cmp(dividend, kMinInt); |
| (...skipping 15 matching lines...) Expand all Loading... |
| 1443 __ add(result, dividend); | 1443 __ add(result, dividend); |
| 1444 __ sar(result, shift); | 1444 __ sar(result, shift); |
| 1445 } | 1445 } |
| 1446 if (divisor < 0) __ neg(result); | 1446 if (divisor < 0) __ neg(result); |
| 1447 } | 1447 } |
| 1448 | 1448 |
| 1449 | 1449 |
| 1450 void LCodeGen::DoDivByConstI(LDivByConstI* instr) { | 1450 void LCodeGen::DoDivByConstI(LDivByConstI* instr) { |
| 1451 Register dividend = ToRegister(instr->dividend()); | 1451 Register dividend = ToRegister(instr->dividend()); |
| 1452 int32_t divisor = instr->divisor(); | 1452 int32_t divisor = instr->divisor(); |
| 1453 ASSERT(ToRegister(instr->result()).is(edx)); | 1453 DCHECK(ToRegister(instr->result()).is(edx)); |
| 1454 | 1454 |
| 1455 if (divisor == 0) { | 1455 if (divisor == 0) { |
| 1456 DeoptimizeIf(no_condition, instr->environment()); | 1456 DeoptimizeIf(no_condition, instr->environment()); |
| 1457 return; | 1457 return; |
| 1458 } | 1458 } |
| 1459 | 1459 |
| 1460 // Check for (0 / -x) that will produce negative zero. | 1460 // Check for (0 / -x) that will produce negative zero. |
| 1461 HDiv* hdiv = instr->hydrogen(); | 1461 HDiv* hdiv = instr->hydrogen(); |
| 1462 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { | 1462 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { |
| 1463 __ test(dividend, dividend); | 1463 __ test(dividend, dividend); |
| (...skipping 11 matching lines...) Expand all Loading... |
| 1475 } | 1475 } |
| 1476 } | 1476 } |
| 1477 | 1477 |
| 1478 | 1478 |
| 1479 // TODO(svenpanne) Refactor this to avoid code duplication with DoFlooringDivI. | 1479 // TODO(svenpanne) Refactor this to avoid code duplication with DoFlooringDivI. |
| 1480 void LCodeGen::DoDivI(LDivI* instr) { | 1480 void LCodeGen::DoDivI(LDivI* instr) { |
| 1481 HBinaryOperation* hdiv = instr->hydrogen(); | 1481 HBinaryOperation* hdiv = instr->hydrogen(); |
| 1482 Register dividend = ToRegister(instr->dividend()); | 1482 Register dividend = ToRegister(instr->dividend()); |
| 1483 Register divisor = ToRegister(instr->divisor()); | 1483 Register divisor = ToRegister(instr->divisor()); |
| 1484 Register remainder = ToRegister(instr->temp()); | 1484 Register remainder = ToRegister(instr->temp()); |
| 1485 ASSERT(dividend.is(eax)); | 1485 DCHECK(dividend.is(eax)); |
| 1486 ASSERT(remainder.is(edx)); | 1486 DCHECK(remainder.is(edx)); |
| 1487 ASSERT(ToRegister(instr->result()).is(eax)); | 1487 DCHECK(ToRegister(instr->result()).is(eax)); |
| 1488 ASSERT(!divisor.is(eax)); | 1488 DCHECK(!divisor.is(eax)); |
| 1489 ASSERT(!divisor.is(edx)); | 1489 DCHECK(!divisor.is(edx)); |
| 1490 | 1490 |
| 1491 // Check for x / 0. | 1491 // Check for x / 0. |
| 1492 if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { | 1492 if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { |
| 1493 __ test(divisor, divisor); | 1493 __ test(divisor, divisor); |
| 1494 DeoptimizeIf(zero, instr->environment()); | 1494 DeoptimizeIf(zero, instr->environment()); |
| 1495 } | 1495 } |
| 1496 | 1496 |
| 1497 // Check for (0 / -x) that will produce negative zero. | 1497 // Check for (0 / -x) that will produce negative zero. |
| 1498 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1498 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 1499 Label dividend_not_zero; | 1499 Label dividend_not_zero; |
| (...skipping 22 matching lines...) Expand all Loading... |
| 1522 // Deoptimize if remainder is not 0. | 1522 // Deoptimize if remainder is not 0. |
| 1523 __ test(remainder, remainder); | 1523 __ test(remainder, remainder); |
| 1524 DeoptimizeIf(not_zero, instr->environment()); | 1524 DeoptimizeIf(not_zero, instr->environment()); |
| 1525 } | 1525 } |
| 1526 } | 1526 } |
| 1527 | 1527 |
| 1528 | 1528 |
| 1529 void LCodeGen::DoFlooringDivByPowerOf2I(LFlooringDivByPowerOf2I* instr) { | 1529 void LCodeGen::DoFlooringDivByPowerOf2I(LFlooringDivByPowerOf2I* instr) { |
| 1530 Register dividend = ToRegister(instr->dividend()); | 1530 Register dividend = ToRegister(instr->dividend()); |
| 1531 int32_t divisor = instr->divisor(); | 1531 int32_t divisor = instr->divisor(); |
| 1532 ASSERT(dividend.is(ToRegister(instr->result()))); | 1532 DCHECK(dividend.is(ToRegister(instr->result()))); |
| 1533 | 1533 |
| 1534 // If the divisor is positive, things are easy: There can be no deopts and we | 1534 // If the divisor is positive, things are easy: There can be no deopts and we |
| 1535 // can simply do an arithmetic right shift. | 1535 // can simply do an arithmetic right shift. |
| 1536 if (divisor == 1) return; | 1536 if (divisor == 1) return; |
| 1537 int32_t shift = WhichPowerOf2Abs(divisor); | 1537 int32_t shift = WhichPowerOf2Abs(divisor); |
| 1538 if (divisor > 1) { | 1538 if (divisor > 1) { |
| 1539 __ sar(dividend, shift); | 1539 __ sar(dividend, shift); |
| 1540 return; | 1540 return; |
| 1541 } | 1541 } |
| 1542 | 1542 |
| (...skipping 23 matching lines...) Expand all Loading... |
| 1566 __ jmp(&done, Label::kNear); | 1566 __ jmp(&done, Label::kNear); |
| 1567 __ bind(¬_kmin_int); | 1567 __ bind(¬_kmin_int); |
| 1568 __ sar(dividend, shift); | 1568 __ sar(dividend, shift); |
| 1569 __ bind(&done); | 1569 __ bind(&done); |
| 1570 } | 1570 } |
| 1571 | 1571 |
| 1572 | 1572 |
| 1573 void LCodeGen::DoFlooringDivByConstI(LFlooringDivByConstI* instr) { | 1573 void LCodeGen::DoFlooringDivByConstI(LFlooringDivByConstI* instr) { |
| 1574 Register dividend = ToRegister(instr->dividend()); | 1574 Register dividend = ToRegister(instr->dividend()); |
| 1575 int32_t divisor = instr->divisor(); | 1575 int32_t divisor = instr->divisor(); |
| 1576 ASSERT(ToRegister(instr->result()).is(edx)); | 1576 DCHECK(ToRegister(instr->result()).is(edx)); |
| 1577 | 1577 |
| 1578 if (divisor == 0) { | 1578 if (divisor == 0) { |
| 1579 DeoptimizeIf(no_condition, instr->environment()); | 1579 DeoptimizeIf(no_condition, instr->environment()); |
| 1580 return; | 1580 return; |
| 1581 } | 1581 } |
| 1582 | 1582 |
| 1583 // Check for (0 / -x) that will produce negative zero. | 1583 // Check for (0 / -x) that will produce negative zero. |
| 1584 HMathFloorOfDiv* hdiv = instr->hydrogen(); | 1584 HMathFloorOfDiv* hdiv = instr->hydrogen(); |
| 1585 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { | 1585 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { |
| 1586 __ test(dividend, dividend); | 1586 __ test(dividend, dividend); |
| 1587 DeoptimizeIf(zero, instr->environment()); | 1587 DeoptimizeIf(zero, instr->environment()); |
| 1588 } | 1588 } |
| 1589 | 1589 |
| 1590 // Easy case: We need no dynamic check for the dividend and the flooring | 1590 // Easy case: We need no dynamic check for the dividend and the flooring |
| 1591 // division is the same as the truncating division. | 1591 // division is the same as the truncating division. |
| 1592 if ((divisor > 0 && !hdiv->CheckFlag(HValue::kLeftCanBeNegative)) || | 1592 if ((divisor > 0 && !hdiv->CheckFlag(HValue::kLeftCanBeNegative)) || |
| 1593 (divisor < 0 && !hdiv->CheckFlag(HValue::kLeftCanBePositive))) { | 1593 (divisor < 0 && !hdiv->CheckFlag(HValue::kLeftCanBePositive))) { |
| 1594 __ TruncatingDiv(dividend, Abs(divisor)); | 1594 __ TruncatingDiv(dividend, Abs(divisor)); |
| 1595 if (divisor < 0) __ neg(edx); | 1595 if (divisor < 0) __ neg(edx); |
| 1596 return; | 1596 return; |
| 1597 } | 1597 } |
| 1598 | 1598 |
| 1599 // In the general case we may need to adjust before and after the truncating | 1599 // In the general case we may need to adjust before and after the truncating |
| 1600 // division to get a flooring division. | 1600 // division to get a flooring division. |
| 1601 Register temp = ToRegister(instr->temp3()); | 1601 Register temp = ToRegister(instr->temp3()); |
| 1602 ASSERT(!temp.is(dividend) && !temp.is(eax) && !temp.is(edx)); | 1602 DCHECK(!temp.is(dividend) && !temp.is(eax) && !temp.is(edx)); |
| 1603 Label needs_adjustment, done; | 1603 Label needs_adjustment, done; |
| 1604 __ cmp(dividend, Immediate(0)); | 1604 __ cmp(dividend, Immediate(0)); |
| 1605 __ j(divisor > 0 ? less : greater, &needs_adjustment, Label::kNear); | 1605 __ j(divisor > 0 ? less : greater, &needs_adjustment, Label::kNear); |
| 1606 __ TruncatingDiv(dividend, Abs(divisor)); | 1606 __ TruncatingDiv(dividend, Abs(divisor)); |
| 1607 if (divisor < 0) __ neg(edx); | 1607 if (divisor < 0) __ neg(edx); |
| 1608 __ jmp(&done, Label::kNear); | 1608 __ jmp(&done, Label::kNear); |
| 1609 __ bind(&needs_adjustment); | 1609 __ bind(&needs_adjustment); |
| 1610 __ lea(temp, Operand(dividend, divisor > 0 ? 1 : -1)); | 1610 __ lea(temp, Operand(dividend, divisor > 0 ? 1 : -1)); |
| 1611 __ TruncatingDiv(temp, Abs(divisor)); | 1611 __ TruncatingDiv(temp, Abs(divisor)); |
| 1612 if (divisor < 0) __ neg(edx); | 1612 if (divisor < 0) __ neg(edx); |
| 1613 __ dec(edx); | 1613 __ dec(edx); |
| 1614 __ bind(&done); | 1614 __ bind(&done); |
| 1615 } | 1615 } |
| 1616 | 1616 |
| 1617 | 1617 |
| 1618 // TODO(svenpanne) Refactor this to avoid code duplication with DoDivI. | 1618 // TODO(svenpanne) Refactor this to avoid code duplication with DoDivI. |
| 1619 void LCodeGen::DoFlooringDivI(LFlooringDivI* instr) { | 1619 void LCodeGen::DoFlooringDivI(LFlooringDivI* instr) { |
| 1620 HBinaryOperation* hdiv = instr->hydrogen(); | 1620 HBinaryOperation* hdiv = instr->hydrogen(); |
| 1621 Register dividend = ToRegister(instr->dividend()); | 1621 Register dividend = ToRegister(instr->dividend()); |
| 1622 Register divisor = ToRegister(instr->divisor()); | 1622 Register divisor = ToRegister(instr->divisor()); |
| 1623 Register remainder = ToRegister(instr->temp()); | 1623 Register remainder = ToRegister(instr->temp()); |
| 1624 Register result = ToRegister(instr->result()); | 1624 Register result = ToRegister(instr->result()); |
| 1625 ASSERT(dividend.is(eax)); | 1625 DCHECK(dividend.is(eax)); |
| 1626 ASSERT(remainder.is(edx)); | 1626 DCHECK(remainder.is(edx)); |
| 1627 ASSERT(result.is(eax)); | 1627 DCHECK(result.is(eax)); |
| 1628 ASSERT(!divisor.is(eax)); | 1628 DCHECK(!divisor.is(eax)); |
| 1629 ASSERT(!divisor.is(edx)); | 1629 DCHECK(!divisor.is(edx)); |
| 1630 | 1630 |
| 1631 // Check for x / 0. | 1631 // Check for x / 0. |
| 1632 if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { | 1632 if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { |
| 1633 __ test(divisor, divisor); | 1633 __ test(divisor, divisor); |
| 1634 DeoptimizeIf(zero, instr->environment()); | 1634 DeoptimizeIf(zero, instr->environment()); |
| 1635 } | 1635 } |
| 1636 | 1636 |
| 1637 // Check for (0 / -x) that will produce negative zero. | 1637 // Check for (0 / -x) that will produce negative zero. |
| 1638 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1638 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 1639 Label dividend_not_zero; | 1639 Label dividend_not_zero; |
| (...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1749 DeoptimizeIf(sign, instr->environment()); | 1749 DeoptimizeIf(sign, instr->environment()); |
| 1750 } | 1750 } |
| 1751 __ bind(&done); | 1751 __ bind(&done); |
| 1752 } | 1752 } |
| 1753 } | 1753 } |
| 1754 | 1754 |
| 1755 | 1755 |
| 1756 void LCodeGen::DoBitI(LBitI* instr) { | 1756 void LCodeGen::DoBitI(LBitI* instr) { |
| 1757 LOperand* left = instr->left(); | 1757 LOperand* left = instr->left(); |
| 1758 LOperand* right = instr->right(); | 1758 LOperand* right = instr->right(); |
| 1759 ASSERT(left->Equals(instr->result())); | 1759 DCHECK(left->Equals(instr->result())); |
| 1760 ASSERT(left->IsRegister()); | 1760 DCHECK(left->IsRegister()); |
| 1761 | 1761 |
| 1762 if (right->IsConstantOperand()) { | 1762 if (right->IsConstantOperand()) { |
| 1763 int32_t right_operand = | 1763 int32_t right_operand = |
| 1764 ToRepresentation(LConstantOperand::cast(right), | 1764 ToRepresentation(LConstantOperand::cast(right), |
| 1765 instr->hydrogen()->representation()); | 1765 instr->hydrogen()->representation()); |
| 1766 switch (instr->op()) { | 1766 switch (instr->op()) { |
| 1767 case Token::BIT_AND: | 1767 case Token::BIT_AND: |
| 1768 __ and_(ToRegister(left), right_operand); | 1768 __ and_(ToRegister(left), right_operand); |
| 1769 break; | 1769 break; |
| 1770 case Token::BIT_OR: | 1770 case Token::BIT_OR: |
| (...skipping 25 matching lines...) Expand all Loading... |
| 1796 UNREACHABLE(); | 1796 UNREACHABLE(); |
| 1797 break; | 1797 break; |
| 1798 } | 1798 } |
| 1799 } | 1799 } |
| 1800 } | 1800 } |
| 1801 | 1801 |
| 1802 | 1802 |
| 1803 void LCodeGen::DoShiftI(LShiftI* instr) { | 1803 void LCodeGen::DoShiftI(LShiftI* instr) { |
| 1804 LOperand* left = instr->left(); | 1804 LOperand* left = instr->left(); |
| 1805 LOperand* right = instr->right(); | 1805 LOperand* right = instr->right(); |
| 1806 ASSERT(left->Equals(instr->result())); | 1806 DCHECK(left->Equals(instr->result())); |
| 1807 ASSERT(left->IsRegister()); | 1807 DCHECK(left->IsRegister()); |
| 1808 if (right->IsRegister()) { | 1808 if (right->IsRegister()) { |
| 1809 ASSERT(ToRegister(right).is(ecx)); | 1809 DCHECK(ToRegister(right).is(ecx)); |
| 1810 | 1810 |
| 1811 switch (instr->op()) { | 1811 switch (instr->op()) { |
| 1812 case Token::ROR: | 1812 case Token::ROR: |
| 1813 __ ror_cl(ToRegister(left)); | 1813 __ ror_cl(ToRegister(left)); |
| 1814 if (instr->can_deopt()) { | 1814 if (instr->can_deopt()) { |
| 1815 __ test(ToRegister(left), ToRegister(left)); | 1815 __ test(ToRegister(left), ToRegister(left)); |
| 1816 DeoptimizeIf(sign, instr->environment()); | 1816 DeoptimizeIf(sign, instr->environment()); |
| 1817 } | 1817 } |
| 1818 break; | 1818 break; |
| 1819 case Token::SAR: | 1819 case Token::SAR: |
| (...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1876 UNREACHABLE(); | 1876 UNREACHABLE(); |
| 1877 break; | 1877 break; |
| 1878 } | 1878 } |
| 1879 } | 1879 } |
| 1880 } | 1880 } |
| 1881 | 1881 |
| 1882 | 1882 |
| 1883 void LCodeGen::DoSubI(LSubI* instr) { | 1883 void LCodeGen::DoSubI(LSubI* instr) { |
| 1884 LOperand* left = instr->left(); | 1884 LOperand* left = instr->left(); |
| 1885 LOperand* right = instr->right(); | 1885 LOperand* right = instr->right(); |
| 1886 ASSERT(left->Equals(instr->result())); | 1886 DCHECK(left->Equals(instr->result())); |
| 1887 | 1887 |
| 1888 if (right->IsConstantOperand()) { | 1888 if (right->IsConstantOperand()) { |
| 1889 __ sub(ToOperand(left), | 1889 __ sub(ToOperand(left), |
| 1890 ToImmediate(right, instr->hydrogen()->representation())); | 1890 ToImmediate(right, instr->hydrogen()->representation())); |
| 1891 } else { | 1891 } else { |
| 1892 __ sub(ToRegister(left), ToOperand(right)); | 1892 __ sub(ToRegister(left), ToOperand(right)); |
| 1893 } | 1893 } |
| 1894 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { | 1894 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { |
| 1895 DeoptimizeIf(overflow, instr->environment()); | 1895 DeoptimizeIf(overflow, instr->environment()); |
| 1896 } | 1896 } |
| 1897 } | 1897 } |
| 1898 | 1898 |
| 1899 | 1899 |
| 1900 void LCodeGen::DoConstantI(LConstantI* instr) { | 1900 void LCodeGen::DoConstantI(LConstantI* instr) { |
| 1901 __ Move(ToRegister(instr->result()), Immediate(instr->value())); | 1901 __ Move(ToRegister(instr->result()), Immediate(instr->value())); |
| 1902 } | 1902 } |
| 1903 | 1903 |
| 1904 | 1904 |
| 1905 void LCodeGen::DoConstantS(LConstantS* instr) { | 1905 void LCodeGen::DoConstantS(LConstantS* instr) { |
| 1906 __ Move(ToRegister(instr->result()), Immediate(instr->value())); | 1906 __ Move(ToRegister(instr->result()), Immediate(instr->value())); |
| 1907 } | 1907 } |
| 1908 | 1908 |
| 1909 | 1909 |
| 1910 void LCodeGen::DoConstantD(LConstantD* instr) { | 1910 void LCodeGen::DoConstantD(LConstantD* instr) { |
| 1911 double v = instr->value(); | 1911 double v = instr->value(); |
| 1912 uint64_t int_val = BitCast<uint64_t, double>(v); | 1912 uint64_t int_val = BitCast<uint64_t, double>(v); |
| 1913 int32_t lower = static_cast<int32_t>(int_val); | 1913 int32_t lower = static_cast<int32_t>(int_val); |
| 1914 int32_t upper = static_cast<int32_t>(int_val >> (kBitsPerInt)); | 1914 int32_t upper = static_cast<int32_t>(int_val >> (kBitsPerInt)); |
| 1915 ASSERT(instr->result()->IsDoubleRegister()); | 1915 DCHECK(instr->result()->IsDoubleRegister()); |
| 1916 | 1916 |
| 1917 __ push(Immediate(upper)); | 1917 __ push(Immediate(upper)); |
| 1918 __ push(Immediate(lower)); | 1918 __ push(Immediate(lower)); |
| 1919 X87Register reg = ToX87Register(instr->result()); | 1919 X87Register reg = ToX87Register(instr->result()); |
| 1920 X87Mov(reg, Operand(esp, 0)); | 1920 X87Mov(reg, Operand(esp, 0)); |
| 1921 __ add(Operand(esp), Immediate(kDoubleSize)); | 1921 __ add(Operand(esp), Immediate(kDoubleSize)); |
| 1922 } | 1922 } |
| 1923 | 1923 |
| 1924 | 1924 |
| 1925 void LCodeGen::DoConstantE(LConstantE* instr) { | 1925 void LCodeGen::DoConstantE(LConstantE* instr) { |
| (...skipping 15 matching lines...) Expand all Loading... |
| 1941 __ EnumLength(result, map); | 1941 __ EnumLength(result, map); |
| 1942 } | 1942 } |
| 1943 | 1943 |
| 1944 | 1944 |
| 1945 void LCodeGen::DoDateField(LDateField* instr) { | 1945 void LCodeGen::DoDateField(LDateField* instr) { |
| 1946 Register object = ToRegister(instr->date()); | 1946 Register object = ToRegister(instr->date()); |
| 1947 Register result = ToRegister(instr->result()); | 1947 Register result = ToRegister(instr->result()); |
| 1948 Register scratch = ToRegister(instr->temp()); | 1948 Register scratch = ToRegister(instr->temp()); |
| 1949 Smi* index = instr->index(); | 1949 Smi* index = instr->index(); |
| 1950 Label runtime, done; | 1950 Label runtime, done; |
| 1951 ASSERT(object.is(result)); | 1951 DCHECK(object.is(result)); |
| 1952 ASSERT(object.is(eax)); | 1952 DCHECK(object.is(eax)); |
| 1953 | 1953 |
| 1954 __ test(object, Immediate(kSmiTagMask)); | 1954 __ test(object, Immediate(kSmiTagMask)); |
| 1955 DeoptimizeIf(zero, instr->environment()); | 1955 DeoptimizeIf(zero, instr->environment()); |
| 1956 __ CmpObjectType(object, JS_DATE_TYPE, scratch); | 1956 __ CmpObjectType(object, JS_DATE_TYPE, scratch); |
| 1957 DeoptimizeIf(not_equal, instr->environment()); | 1957 DeoptimizeIf(not_equal, instr->environment()); |
| 1958 | 1958 |
| 1959 if (index->value() == 0) { | 1959 if (index->value() == 0) { |
| 1960 __ mov(result, FieldOperand(object, JSDate::kValueOffset)); | 1960 __ mov(result, FieldOperand(object, JSDate::kValueOffset)); |
| 1961 } else { | 1961 } else { |
| 1962 if (index->value() < JSDate::kFirstUncachedField) { | 1962 if (index->value() < JSDate::kFirstUncachedField) { |
| (...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2037 int encoding_mask = | 2037 int encoding_mask = |
| 2038 instr->hydrogen()->encoding() == String::ONE_BYTE_ENCODING | 2038 instr->hydrogen()->encoding() == String::ONE_BYTE_ENCODING |
| 2039 ? one_byte_seq_type : two_byte_seq_type; | 2039 ? one_byte_seq_type : two_byte_seq_type; |
| 2040 __ EmitSeqStringSetCharCheck(string, index, value, encoding_mask); | 2040 __ EmitSeqStringSetCharCheck(string, index, value, encoding_mask); |
| 2041 } | 2041 } |
| 2042 | 2042 |
| 2043 Operand operand = BuildSeqStringOperand(string, instr->index(), encoding); | 2043 Operand operand = BuildSeqStringOperand(string, instr->index(), encoding); |
| 2044 if (instr->value()->IsConstantOperand()) { | 2044 if (instr->value()->IsConstantOperand()) { |
| 2045 int value = ToRepresentation(LConstantOperand::cast(instr->value()), | 2045 int value = ToRepresentation(LConstantOperand::cast(instr->value()), |
| 2046 Representation::Integer32()); | 2046 Representation::Integer32()); |
| 2047 ASSERT_LE(0, value); | 2047 DCHECK_LE(0, value); |
| 2048 if (encoding == String::ONE_BYTE_ENCODING) { | 2048 if (encoding == String::ONE_BYTE_ENCODING) { |
| 2049 ASSERT_LE(value, String::kMaxOneByteCharCode); | 2049 DCHECK_LE(value, String::kMaxOneByteCharCode); |
| 2050 __ mov_b(operand, static_cast<int8_t>(value)); | 2050 __ mov_b(operand, static_cast<int8_t>(value)); |
| 2051 } else { | 2051 } else { |
| 2052 ASSERT_LE(value, String::kMaxUtf16CodeUnit); | 2052 DCHECK_LE(value, String::kMaxUtf16CodeUnit); |
| 2053 __ mov_w(operand, static_cast<int16_t>(value)); | 2053 __ mov_w(operand, static_cast<int16_t>(value)); |
| 2054 } | 2054 } |
| 2055 } else { | 2055 } else { |
| 2056 Register value = ToRegister(instr->value()); | 2056 Register value = ToRegister(instr->value()); |
| 2057 if (encoding == String::ONE_BYTE_ENCODING) { | 2057 if (encoding == String::ONE_BYTE_ENCODING) { |
| 2058 __ mov_b(operand, value); | 2058 __ mov_b(operand, value); |
| 2059 } else { | 2059 } else { |
| 2060 __ mov_w(operand, value); | 2060 __ mov_w(operand, value); |
| 2061 } | 2061 } |
| 2062 } | 2062 } |
| (...skipping 23 matching lines...) Expand all Loading... |
| 2086 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { | 2086 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { |
| 2087 DeoptimizeIf(overflow, instr->environment()); | 2087 DeoptimizeIf(overflow, instr->environment()); |
| 2088 } | 2088 } |
| 2089 } | 2089 } |
| 2090 } | 2090 } |
| 2091 | 2091 |
| 2092 | 2092 |
| 2093 void LCodeGen::DoMathMinMax(LMathMinMax* instr) { | 2093 void LCodeGen::DoMathMinMax(LMathMinMax* instr) { |
| 2094 LOperand* left = instr->left(); | 2094 LOperand* left = instr->left(); |
| 2095 LOperand* right = instr->right(); | 2095 LOperand* right = instr->right(); |
| 2096 ASSERT(left->Equals(instr->result())); | 2096 DCHECK(left->Equals(instr->result())); |
| 2097 HMathMinMax::Operation operation = instr->hydrogen()->operation(); | 2097 HMathMinMax::Operation operation = instr->hydrogen()->operation(); |
| 2098 if (instr->hydrogen()->representation().IsSmiOrInteger32()) { | 2098 if (instr->hydrogen()->representation().IsSmiOrInteger32()) { |
| 2099 Label return_left; | 2099 Label return_left; |
| 2100 Condition condition = (operation == HMathMinMax::kMathMin) | 2100 Condition condition = (operation == HMathMinMax::kMathMin) |
| 2101 ? less_equal | 2101 ? less_equal |
| 2102 : greater_equal; | 2102 : greater_equal; |
| 2103 if (right->IsConstantOperand()) { | 2103 if (right->IsConstantOperand()) { |
| 2104 Operand left_op = ToOperand(left); | 2104 Operand left_op = ToOperand(left); |
| 2105 Immediate immediate = ToImmediate(LConstantOperand::cast(instr->right()), | 2105 Immediate immediate = ToImmediate(LConstantOperand::cast(instr->right()), |
| 2106 instr->hydrogen()->representation()); | 2106 instr->hydrogen()->representation()); |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2141 break; | 2141 break; |
| 2142 case Token::DIV: | 2142 case Token::DIV: |
| 2143 __ fdiv_i(1); | 2143 __ fdiv_i(1); |
| 2144 break; | 2144 break; |
| 2145 case Token::MOD: { | 2145 case Token::MOD: { |
| 2146 // Pass two doubles as arguments on the stack. | 2146 // Pass two doubles as arguments on the stack. |
| 2147 __ PrepareCallCFunction(4, eax); | 2147 __ PrepareCallCFunction(4, eax); |
| 2148 X87Mov(Operand(esp, 1 * kDoubleSize), right); | 2148 X87Mov(Operand(esp, 1 * kDoubleSize), right); |
| 2149 X87Mov(Operand(esp, 0), left); | 2149 X87Mov(Operand(esp, 0), left); |
| 2150 X87Free(right); | 2150 X87Free(right); |
| 2151 ASSERT(left.is(result)); | 2151 DCHECK(left.is(result)); |
| 2152 X87PrepareToWrite(result); | 2152 X87PrepareToWrite(result); |
| 2153 __ CallCFunction( | 2153 __ CallCFunction( |
| 2154 ExternalReference::mod_two_doubles_operation(isolate()), | 2154 ExternalReference::mod_two_doubles_operation(isolate()), |
| 2155 4); | 2155 4); |
| 2156 | 2156 |
| 2157 // Return value is in st(0) on ia32. | 2157 // Return value is in st(0) on ia32. |
| 2158 X87CommitWrite(result); | 2158 X87CommitWrite(result); |
| 2159 break; | 2159 break; |
| 2160 } | 2160 } |
| 2161 default: | 2161 default: |
| 2162 UNREACHABLE(); | 2162 UNREACHABLE(); |
| 2163 break; | 2163 break; |
| 2164 } | 2164 } |
| 2165 } | 2165 } |
| 2166 | 2166 |
| 2167 | 2167 |
| 2168 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { | 2168 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { |
| 2169 ASSERT(ToRegister(instr->context()).is(esi)); | 2169 DCHECK(ToRegister(instr->context()).is(esi)); |
| 2170 ASSERT(ToRegister(instr->left()).is(edx)); | 2170 DCHECK(ToRegister(instr->left()).is(edx)); |
| 2171 ASSERT(ToRegister(instr->right()).is(eax)); | 2171 DCHECK(ToRegister(instr->right()).is(eax)); |
| 2172 ASSERT(ToRegister(instr->result()).is(eax)); | 2172 DCHECK(ToRegister(instr->result()).is(eax)); |
| 2173 | 2173 |
| 2174 BinaryOpICStub stub(isolate(), instr->op(), NO_OVERWRITE); | 2174 BinaryOpICStub stub(isolate(), instr->op(), NO_OVERWRITE); |
| 2175 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 2175 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 2176 } | 2176 } |
| 2177 | 2177 |
| 2178 | 2178 |
| 2179 template<class InstrType> | 2179 template<class InstrType> |
| 2180 void LCodeGen::EmitBranch(InstrType instr, Condition cc) { | 2180 void LCodeGen::EmitBranch(InstrType instr, Condition cc) { |
| 2181 int left_block = instr->TrueDestination(chunk_); | 2181 int left_block = instr->TrueDestination(chunk_); |
| 2182 int right_block = instr->FalseDestination(chunk_); | 2182 int right_block = instr->FalseDestination(chunk_); |
| (...skipping 26 matching lines...) Expand all Loading... |
| 2209 | 2209 |
| 2210 void LCodeGen::DoBranch(LBranch* instr) { | 2210 void LCodeGen::DoBranch(LBranch* instr) { |
| 2211 Representation r = instr->hydrogen()->value()->representation(); | 2211 Representation r = instr->hydrogen()->value()->representation(); |
| 2212 if (r.IsSmiOrInteger32()) { | 2212 if (r.IsSmiOrInteger32()) { |
| 2213 Register reg = ToRegister(instr->value()); | 2213 Register reg = ToRegister(instr->value()); |
| 2214 __ test(reg, Operand(reg)); | 2214 __ test(reg, Operand(reg)); |
| 2215 EmitBranch(instr, not_zero); | 2215 EmitBranch(instr, not_zero); |
| 2216 } else if (r.IsDouble()) { | 2216 } else if (r.IsDouble()) { |
| 2217 UNREACHABLE(); | 2217 UNREACHABLE(); |
| 2218 } else { | 2218 } else { |
| 2219 ASSERT(r.IsTagged()); | 2219 DCHECK(r.IsTagged()); |
| 2220 Register reg = ToRegister(instr->value()); | 2220 Register reg = ToRegister(instr->value()); |
| 2221 HType type = instr->hydrogen()->value()->type(); | 2221 HType type = instr->hydrogen()->value()->type(); |
| 2222 if (type.IsBoolean()) { | 2222 if (type.IsBoolean()) { |
| 2223 ASSERT(!info()->IsStub()); | 2223 DCHECK(!info()->IsStub()); |
| 2224 __ cmp(reg, factory()->true_value()); | 2224 __ cmp(reg, factory()->true_value()); |
| 2225 EmitBranch(instr, equal); | 2225 EmitBranch(instr, equal); |
| 2226 } else if (type.IsSmi()) { | 2226 } else if (type.IsSmi()) { |
| 2227 ASSERT(!info()->IsStub()); | 2227 DCHECK(!info()->IsStub()); |
| 2228 __ test(reg, Operand(reg)); | 2228 __ test(reg, Operand(reg)); |
| 2229 EmitBranch(instr, not_equal); | 2229 EmitBranch(instr, not_equal); |
| 2230 } else if (type.IsJSArray()) { | 2230 } else if (type.IsJSArray()) { |
| 2231 ASSERT(!info()->IsStub()); | 2231 DCHECK(!info()->IsStub()); |
| 2232 EmitBranch(instr, no_condition); | 2232 EmitBranch(instr, no_condition); |
| 2233 } else if (type.IsHeapNumber()) { | 2233 } else if (type.IsHeapNumber()) { |
| 2234 UNREACHABLE(); | 2234 UNREACHABLE(); |
| 2235 } else if (type.IsString()) { | 2235 } else if (type.IsString()) { |
| 2236 ASSERT(!info()->IsStub()); | 2236 DCHECK(!info()->IsStub()); |
| 2237 __ cmp(FieldOperand(reg, String::kLengthOffset), Immediate(0)); | 2237 __ cmp(FieldOperand(reg, String::kLengthOffset), Immediate(0)); |
| 2238 EmitBranch(instr, not_equal); | 2238 EmitBranch(instr, not_equal); |
| 2239 } else { | 2239 } else { |
| 2240 ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types(); | 2240 ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types(); |
| 2241 if (expected.IsEmpty()) expected = ToBooleanStub::Types::Generic(); | 2241 if (expected.IsEmpty()) expected = ToBooleanStub::Types::Generic(); |
| 2242 | 2242 |
| 2243 if (expected.Contains(ToBooleanStub::UNDEFINED)) { | 2243 if (expected.Contains(ToBooleanStub::UNDEFINED)) { |
| 2244 // undefined -> false. | 2244 // undefined -> false. |
| 2245 __ cmp(reg, factory()->undefined_value()); | 2245 __ cmp(reg, factory()->undefined_value()); |
| 2246 __ j(equal, instr->FalseLabel(chunk_)); | 2246 __ j(equal, instr->FalseLabel(chunk_)); |
| (...skipping 19 matching lines...) Expand all Loading... |
| 2266 __ JumpIfSmi(reg, instr->TrueLabel(chunk_)); | 2266 __ JumpIfSmi(reg, instr->TrueLabel(chunk_)); |
| 2267 } else if (expected.NeedsMap()) { | 2267 } else if (expected.NeedsMap()) { |
| 2268 // If we need a map later and have a Smi -> deopt. | 2268 // If we need a map later and have a Smi -> deopt. |
| 2269 __ test(reg, Immediate(kSmiTagMask)); | 2269 __ test(reg, Immediate(kSmiTagMask)); |
| 2270 DeoptimizeIf(zero, instr->environment()); | 2270 DeoptimizeIf(zero, instr->environment()); |
| 2271 } | 2271 } |
| 2272 | 2272 |
| 2273 Register map = no_reg; // Keep the compiler happy. | 2273 Register map = no_reg; // Keep the compiler happy. |
| 2274 if (expected.NeedsMap()) { | 2274 if (expected.NeedsMap()) { |
| 2275 map = ToRegister(instr->temp()); | 2275 map = ToRegister(instr->temp()); |
| 2276 ASSERT(!map.is(reg)); | 2276 DCHECK(!map.is(reg)); |
| 2277 __ mov(map, FieldOperand(reg, HeapObject::kMapOffset)); | 2277 __ mov(map, FieldOperand(reg, HeapObject::kMapOffset)); |
| 2278 | 2278 |
| 2279 if (expected.CanBeUndetectable()) { | 2279 if (expected.CanBeUndetectable()) { |
| 2280 // Undetectable -> false. | 2280 // Undetectable -> false. |
| 2281 __ test_b(FieldOperand(map, Map::kBitFieldOffset), | 2281 __ test_b(FieldOperand(map, Map::kBitFieldOffset), |
| 2282 1 << Map::kIsUndetectable); | 2282 1 << Map::kIsUndetectable); |
| 2283 __ j(not_zero, instr->FalseLabel(chunk_)); | 2283 __ j(not_zero, instr->FalseLabel(chunk_)); |
| 2284 } | 2284 } |
| 2285 } | 2285 } |
| 2286 | 2286 |
| (...skipping 173 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2460 | 2460 |
| 2461 __ add(esp, Immediate(kDoubleSize)); | 2461 __ add(esp, Immediate(kDoubleSize)); |
| 2462 int offset = sizeof(kHoleNanUpper32); | 2462 int offset = sizeof(kHoleNanUpper32); |
| 2463 __ cmp(MemOperand(esp, -offset), Immediate(kHoleNanUpper32)); | 2463 __ cmp(MemOperand(esp, -offset), Immediate(kHoleNanUpper32)); |
| 2464 EmitBranch(instr, equal); | 2464 EmitBranch(instr, equal); |
| 2465 } | 2465 } |
| 2466 | 2466 |
| 2467 | 2467 |
| 2468 void LCodeGen::DoCompareMinusZeroAndBranch(LCompareMinusZeroAndBranch* instr) { | 2468 void LCodeGen::DoCompareMinusZeroAndBranch(LCompareMinusZeroAndBranch* instr) { |
| 2469 Representation rep = instr->hydrogen()->value()->representation(); | 2469 Representation rep = instr->hydrogen()->value()->representation(); |
| 2470 ASSERT(!rep.IsInteger32()); | 2470 DCHECK(!rep.IsInteger32()); |
| 2471 | 2471 |
| 2472 if (rep.IsDouble()) { | 2472 if (rep.IsDouble()) { |
| 2473 UNREACHABLE(); | 2473 UNREACHABLE(); |
| 2474 } else { | 2474 } else { |
| 2475 Register value = ToRegister(instr->value()); | 2475 Register value = ToRegister(instr->value()); |
| 2476 Handle<Map> map = masm()->isolate()->factory()->heap_number_map(); | 2476 Handle<Map> map = masm()->isolate()->factory()->heap_number_map(); |
| 2477 __ CheckMap(value, map, instr->FalseLabel(chunk()), DO_SMI_CHECK); | 2477 __ CheckMap(value, map, instr->FalseLabel(chunk()), DO_SMI_CHECK); |
| 2478 __ cmp(FieldOperand(value, HeapNumber::kExponentOffset), | 2478 __ cmp(FieldOperand(value, HeapNumber::kExponentOffset), |
| 2479 Immediate(0x1)); | 2479 Immediate(0x1)); |
| 2480 EmitFalseBranch(instr, no_overflow); | 2480 EmitFalseBranch(instr, no_overflow); |
| (...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2601 __ test(eax, Operand(eax)); | 2601 __ test(eax, Operand(eax)); |
| 2602 | 2602 |
| 2603 EmitBranch(instr, condition); | 2603 EmitBranch(instr, condition); |
| 2604 } | 2604 } |
| 2605 | 2605 |
| 2606 | 2606 |
| 2607 static InstanceType TestType(HHasInstanceTypeAndBranch* instr) { | 2607 static InstanceType TestType(HHasInstanceTypeAndBranch* instr) { |
| 2608 InstanceType from = instr->from(); | 2608 InstanceType from = instr->from(); |
| 2609 InstanceType to = instr->to(); | 2609 InstanceType to = instr->to(); |
| 2610 if (from == FIRST_TYPE) return to; | 2610 if (from == FIRST_TYPE) return to; |
| 2611 ASSERT(from == to || to == LAST_TYPE); | 2611 DCHECK(from == to || to == LAST_TYPE); |
| 2612 return from; | 2612 return from; |
| 2613 } | 2613 } |
| 2614 | 2614 |
| 2615 | 2615 |
| 2616 static Condition BranchCondition(HHasInstanceTypeAndBranch* instr) { | 2616 static Condition BranchCondition(HHasInstanceTypeAndBranch* instr) { |
| 2617 InstanceType from = instr->from(); | 2617 InstanceType from = instr->from(); |
| 2618 InstanceType to = instr->to(); | 2618 InstanceType to = instr->to(); |
| 2619 if (from == to) return equal; | 2619 if (from == to) return equal; |
| 2620 if (to == LAST_TYPE) return above_equal; | 2620 if (to == LAST_TYPE) return above_equal; |
| 2621 if (from == FIRST_TYPE) return below_equal; | 2621 if (from == FIRST_TYPE) return below_equal; |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2659 | 2659 |
| 2660 | 2660 |
| 2661 // Branches to a label or falls through with the answer in the z flag. Trashes | 2661 // Branches to a label or falls through with the answer in the z flag. Trashes |
| 2662 // the temp registers, but not the input. | 2662 // the temp registers, but not the input. |
| 2663 void LCodeGen::EmitClassOfTest(Label* is_true, | 2663 void LCodeGen::EmitClassOfTest(Label* is_true, |
| 2664 Label* is_false, | 2664 Label* is_false, |
| 2665 Handle<String>class_name, | 2665 Handle<String>class_name, |
| 2666 Register input, | 2666 Register input, |
| 2667 Register temp, | 2667 Register temp, |
| 2668 Register temp2) { | 2668 Register temp2) { |
| 2669 ASSERT(!input.is(temp)); | 2669 DCHECK(!input.is(temp)); |
| 2670 ASSERT(!input.is(temp2)); | 2670 DCHECK(!input.is(temp2)); |
| 2671 ASSERT(!temp.is(temp2)); | 2671 DCHECK(!temp.is(temp2)); |
| 2672 __ JumpIfSmi(input, is_false); | 2672 __ JumpIfSmi(input, is_false); |
| 2673 | 2673 |
| 2674 if (class_name->IsOneByteEqualTo(STATIC_ASCII_VECTOR("Function"))) { | 2674 if (class_name->IsOneByteEqualTo(STATIC_ASCII_VECTOR("Function"))) { |
| 2675 // Assuming the following assertions, we can use the same compares to test | 2675 // Assuming the following assertions, we can use the same compares to test |
| 2676 // for both being a function type and being in the object type range. | 2676 // for both being a function type and being in the object type range. |
| 2677 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); | 2677 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); |
| 2678 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE == | 2678 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE == |
| 2679 FIRST_SPEC_OBJECT_TYPE + 1); | 2679 FIRST_SPEC_OBJECT_TYPE + 1); |
| 2680 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == | 2680 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == |
| 2681 LAST_SPEC_OBJECT_TYPE - 1); | 2681 LAST_SPEC_OBJECT_TYPE - 1); |
| (...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2739 | 2739 |
| 2740 void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) { | 2740 void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) { |
| 2741 Register reg = ToRegister(instr->value()); | 2741 Register reg = ToRegister(instr->value()); |
| 2742 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map()); | 2742 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map()); |
| 2743 EmitBranch(instr, equal); | 2743 EmitBranch(instr, equal); |
| 2744 } | 2744 } |
| 2745 | 2745 |
| 2746 | 2746 |
| 2747 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { | 2747 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { |
| 2748 // Object and function are in fixed registers defined by the stub. | 2748 // Object and function are in fixed registers defined by the stub. |
| 2749 ASSERT(ToRegister(instr->context()).is(esi)); | 2749 DCHECK(ToRegister(instr->context()).is(esi)); |
| 2750 InstanceofStub stub(isolate(), InstanceofStub::kArgsInRegisters); | 2750 InstanceofStub stub(isolate(), InstanceofStub::kArgsInRegisters); |
| 2751 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 2751 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 2752 | 2752 |
| 2753 Label true_value, done; | 2753 Label true_value, done; |
| 2754 __ test(eax, Operand(eax)); | 2754 __ test(eax, Operand(eax)); |
| 2755 __ j(zero, &true_value, Label::kNear); | 2755 __ j(zero, &true_value, Label::kNear); |
| 2756 __ mov(ToRegister(instr->result()), factory()->false_value()); | 2756 __ mov(ToRegister(instr->result()), factory()->false_value()); |
| 2757 __ jmp(&done, Label::kNear); | 2757 __ jmp(&done, Label::kNear); |
| 2758 __ bind(&true_value); | 2758 __ bind(&true_value); |
| 2759 __ mov(ToRegister(instr->result()), factory()->true_value()); | 2759 __ mov(ToRegister(instr->result()), factory()->true_value()); |
| (...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2836 flags | InstanceofStub::kCallSiteInlineCheck); | 2836 flags | InstanceofStub::kCallSiteInlineCheck); |
| 2837 flags = static_cast<InstanceofStub::Flags>( | 2837 flags = static_cast<InstanceofStub::Flags>( |
| 2838 flags | InstanceofStub::kReturnTrueFalseObject); | 2838 flags | InstanceofStub::kReturnTrueFalseObject); |
| 2839 InstanceofStub stub(isolate(), flags); | 2839 InstanceofStub stub(isolate(), flags); |
| 2840 | 2840 |
| 2841 // Get the temp register reserved by the instruction. This needs to be a | 2841 // Get the temp register reserved by the instruction. This needs to be a |
| 2842 // register which is pushed last by PushSafepointRegisters as top of the | 2842 // register which is pushed last by PushSafepointRegisters as top of the |
| 2843 // stack is used to pass the offset to the location of the map check to | 2843 // stack is used to pass the offset to the location of the map check to |
| 2844 // the stub. | 2844 // the stub. |
| 2845 Register temp = ToRegister(instr->temp()); | 2845 Register temp = ToRegister(instr->temp()); |
| 2846 ASSERT(MacroAssembler::SafepointRegisterStackIndex(temp) == 0); | 2846 DCHECK(MacroAssembler::SafepointRegisterStackIndex(temp) == 0); |
| 2847 __ LoadHeapObject(InstanceofStub::right(), instr->function()); | 2847 __ LoadHeapObject(InstanceofStub::right(), instr->function()); |
| 2848 static const int kAdditionalDelta = 13; | 2848 static const int kAdditionalDelta = 13; |
| 2849 int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta; | 2849 int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta; |
| 2850 __ mov(temp, Immediate(delta)); | 2850 __ mov(temp, Immediate(delta)); |
| 2851 __ StoreToSafepointRegisterSlot(temp, temp); | 2851 __ StoreToSafepointRegisterSlot(temp, temp); |
| 2852 CallCodeGeneric(stub.GetCode(), | 2852 CallCodeGeneric(stub.GetCode(), |
| 2853 RelocInfo::CODE_TARGET, | 2853 RelocInfo::CODE_TARGET, |
| 2854 instr, | 2854 instr, |
| 2855 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); | 2855 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); |
| 2856 // Get the deoptimization index of the LLazyBailout-environment that | 2856 // Get the deoptimization index of the LLazyBailout-environment that |
| (...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2892 Immediate(kAlignmentZapValue)); | 2892 Immediate(kAlignmentZapValue)); |
| 2893 __ Assert(equal, kExpectedAlignmentMarker); | 2893 __ Assert(equal, kExpectedAlignmentMarker); |
| 2894 } | 2894 } |
| 2895 __ Ret((parameter_count + extra_value_count) * kPointerSize, ecx); | 2895 __ Ret((parameter_count + extra_value_count) * kPointerSize, ecx); |
| 2896 } else { | 2896 } else { |
| 2897 Register reg = ToRegister(instr->parameter_count()); | 2897 Register reg = ToRegister(instr->parameter_count()); |
| 2898 // The argument count parameter is a smi | 2898 // The argument count parameter is a smi |
| 2899 __ SmiUntag(reg); | 2899 __ SmiUntag(reg); |
| 2900 Register return_addr_reg = reg.is(ecx) ? ebx : ecx; | 2900 Register return_addr_reg = reg.is(ecx) ? ebx : ecx; |
| 2901 if (dynamic_frame_alignment && FLAG_debug_code) { | 2901 if (dynamic_frame_alignment && FLAG_debug_code) { |
| 2902 ASSERT(extra_value_count == 2); | 2902 DCHECK(extra_value_count == 2); |
| 2903 __ cmp(Operand(esp, reg, times_pointer_size, | 2903 __ cmp(Operand(esp, reg, times_pointer_size, |
| 2904 extra_value_count * kPointerSize), | 2904 extra_value_count * kPointerSize), |
| 2905 Immediate(kAlignmentZapValue)); | 2905 Immediate(kAlignmentZapValue)); |
| 2906 __ Assert(equal, kExpectedAlignmentMarker); | 2906 __ Assert(equal, kExpectedAlignmentMarker); |
| 2907 } | 2907 } |
| 2908 | 2908 |
| 2909 // emit code to restore stack based on instr->parameter_count() | 2909 // emit code to restore stack based on instr->parameter_count() |
| 2910 __ pop(return_addr_reg); // save return address | 2910 __ pop(return_addr_reg); // save return address |
| 2911 if (dynamic_frame_alignment) { | 2911 if (dynamic_frame_alignment) { |
| 2912 __ inc(reg); // 1 more for alignment | 2912 __ inc(reg); // 1 more for alignment |
| (...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2959 Register result = ToRegister(instr->result()); | 2959 Register result = ToRegister(instr->result()); |
| 2960 __ mov(result, Operand::ForCell(instr->hydrogen()->cell().handle())); | 2960 __ mov(result, Operand::ForCell(instr->hydrogen()->cell().handle())); |
| 2961 if (instr->hydrogen()->RequiresHoleCheck()) { | 2961 if (instr->hydrogen()->RequiresHoleCheck()) { |
| 2962 __ cmp(result, factory()->the_hole_value()); | 2962 __ cmp(result, factory()->the_hole_value()); |
| 2963 DeoptimizeIf(equal, instr->environment()); | 2963 DeoptimizeIf(equal, instr->environment()); |
| 2964 } | 2964 } |
| 2965 } | 2965 } |
| 2966 | 2966 |
| 2967 | 2967 |
| 2968 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) { | 2968 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) { |
| 2969 ASSERT(ToRegister(instr->context()).is(esi)); | 2969 DCHECK(ToRegister(instr->context()).is(esi)); |
| 2970 ASSERT(ToRegister(instr->global_object()).is(LoadIC::ReceiverRegister())); | 2970 DCHECK(ToRegister(instr->global_object()).is(LoadIC::ReceiverRegister())); |
| 2971 ASSERT(ToRegister(instr->result()).is(eax)); | 2971 DCHECK(ToRegister(instr->result()).is(eax)); |
| 2972 | 2972 |
| 2973 __ mov(LoadIC::NameRegister(), instr->name()); | 2973 __ mov(LoadIC::NameRegister(), instr->name()); |
| 2974 if (FLAG_vector_ics) { | 2974 if (FLAG_vector_ics) { |
| 2975 Register vector = ToRegister(instr->temp_vector()); | 2975 Register vector = ToRegister(instr->temp_vector()); |
| 2976 ASSERT(vector.is(LoadIC::VectorRegister())); | 2976 DCHECK(vector.is(LoadIC::VectorRegister())); |
| 2977 __ mov(vector, instr->hydrogen()->feedback_vector()); | 2977 __ mov(vector, instr->hydrogen()->feedback_vector()); |
| 2978 // No need to allocate this register. | 2978 // No need to allocate this register. |
| 2979 ASSERT(LoadIC::SlotRegister().is(eax)); | 2979 DCHECK(LoadIC::SlotRegister().is(eax)); |
| 2980 __ mov(LoadIC::SlotRegister(), | 2980 __ mov(LoadIC::SlotRegister(), |
| 2981 Immediate(Smi::FromInt(instr->hydrogen()->slot()))); | 2981 Immediate(Smi::FromInt(instr->hydrogen()->slot()))); |
| 2982 } | 2982 } |
| 2983 ContextualMode mode = instr->for_typeof() ? NOT_CONTEXTUAL : CONTEXTUAL; | 2983 ContextualMode mode = instr->for_typeof() ? NOT_CONTEXTUAL : CONTEXTUAL; |
| 2984 Handle<Code> ic = LoadIC::initialize_stub(isolate(), mode); | 2984 Handle<Code> ic = LoadIC::initialize_stub(isolate(), mode); |
| 2985 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2985 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 2986 } | 2986 } |
| 2987 | 2987 |
| 2988 | 2988 |
| 2989 void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) { | 2989 void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) { |
| (...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3082 Register result = ToRegister(instr->result()); | 3082 Register result = ToRegister(instr->result()); |
| 3083 if (!access.IsInobject()) { | 3083 if (!access.IsInobject()) { |
| 3084 __ mov(result, FieldOperand(object, JSObject::kPropertiesOffset)); | 3084 __ mov(result, FieldOperand(object, JSObject::kPropertiesOffset)); |
| 3085 object = result; | 3085 object = result; |
| 3086 } | 3086 } |
| 3087 __ Load(result, FieldOperand(object, offset), access.representation()); | 3087 __ Load(result, FieldOperand(object, offset), access.representation()); |
| 3088 } | 3088 } |
| 3089 | 3089 |
| 3090 | 3090 |
| 3091 void LCodeGen::EmitPushTaggedOperand(LOperand* operand) { | 3091 void LCodeGen::EmitPushTaggedOperand(LOperand* operand) { |
| 3092 ASSERT(!operand->IsDoubleRegister()); | 3092 DCHECK(!operand->IsDoubleRegister()); |
| 3093 if (operand->IsConstantOperand()) { | 3093 if (operand->IsConstantOperand()) { |
| 3094 Handle<Object> object = ToHandle(LConstantOperand::cast(operand)); | 3094 Handle<Object> object = ToHandle(LConstantOperand::cast(operand)); |
| 3095 AllowDeferredHandleDereference smi_check; | 3095 AllowDeferredHandleDereference smi_check; |
| 3096 if (object->IsSmi()) { | 3096 if (object->IsSmi()) { |
| 3097 __ Push(Handle<Smi>::cast(object)); | 3097 __ Push(Handle<Smi>::cast(object)); |
| 3098 } else { | 3098 } else { |
| 3099 __ PushHeapObject(Handle<HeapObject>::cast(object)); | 3099 __ PushHeapObject(Handle<HeapObject>::cast(object)); |
| 3100 } | 3100 } |
| 3101 } else if (operand->IsRegister()) { | 3101 } else if (operand->IsRegister()) { |
| 3102 __ push(ToRegister(operand)); | 3102 __ push(ToRegister(operand)); |
| 3103 } else { | 3103 } else { |
| 3104 __ push(ToOperand(operand)); | 3104 __ push(ToOperand(operand)); |
| 3105 } | 3105 } |
| 3106 } | 3106 } |
| 3107 | 3107 |
| 3108 | 3108 |
| 3109 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { | 3109 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { |
| 3110 ASSERT(ToRegister(instr->context()).is(esi)); | 3110 DCHECK(ToRegister(instr->context()).is(esi)); |
| 3111 ASSERT(ToRegister(instr->object()).is(LoadIC::ReceiverRegister())); | 3111 DCHECK(ToRegister(instr->object()).is(LoadIC::ReceiverRegister())); |
| 3112 ASSERT(ToRegister(instr->result()).is(eax)); | 3112 DCHECK(ToRegister(instr->result()).is(eax)); |
| 3113 | 3113 |
| 3114 __ mov(LoadIC::NameRegister(), instr->name()); | 3114 __ mov(LoadIC::NameRegister(), instr->name()); |
| 3115 if (FLAG_vector_ics) { | 3115 if (FLAG_vector_ics) { |
| 3116 Register vector = ToRegister(instr->temp_vector()); | 3116 Register vector = ToRegister(instr->temp_vector()); |
| 3117 ASSERT(vector.is(LoadIC::VectorRegister())); | 3117 DCHECK(vector.is(LoadIC::VectorRegister())); |
| 3118 __ mov(vector, instr->hydrogen()->feedback_vector()); | 3118 __ mov(vector, instr->hydrogen()->feedback_vector()); |
| 3119 // No need to allocate this register. | 3119 // No need to allocate this register. |
| 3120 ASSERT(LoadIC::SlotRegister().is(eax)); | 3120 DCHECK(LoadIC::SlotRegister().is(eax)); |
| 3121 __ mov(LoadIC::SlotRegister(), | 3121 __ mov(LoadIC::SlotRegister(), |
| 3122 Immediate(Smi::FromInt(instr->hydrogen()->slot()))); | 3122 Immediate(Smi::FromInt(instr->hydrogen()->slot()))); |
| 3123 } | 3123 } |
| 3124 Handle<Code> ic = LoadIC::initialize_stub(isolate(), NOT_CONTEXTUAL); | 3124 Handle<Code> ic = LoadIC::initialize_stub(isolate(), NOT_CONTEXTUAL); |
| 3125 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 3125 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 3126 } | 3126 } |
| 3127 | 3127 |
| 3128 | 3128 |
| 3129 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) { | 3129 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) { |
| 3130 Register function = ToRegister(instr->function()); | 3130 Register function = ToRegister(instr->function()); |
| (...skipping 200 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3331 ScaleFactor scale_factor = static_cast<ScaleFactor>(shift_size); | 3331 ScaleFactor scale_factor = static_cast<ScaleFactor>(shift_size); |
| 3332 return Operand(elements_pointer_reg, | 3332 return Operand(elements_pointer_reg, |
| 3333 ToRegister(key), | 3333 ToRegister(key), |
| 3334 scale_factor, | 3334 scale_factor, |
| 3335 base_offset); | 3335 base_offset); |
| 3336 } | 3336 } |
| 3337 } | 3337 } |
| 3338 | 3338 |
| 3339 | 3339 |
| 3340 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) { | 3340 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) { |
| 3341 ASSERT(ToRegister(instr->context()).is(esi)); | 3341 DCHECK(ToRegister(instr->context()).is(esi)); |
| 3342 ASSERT(ToRegister(instr->object()).is(LoadIC::ReceiverRegister())); | 3342 DCHECK(ToRegister(instr->object()).is(LoadIC::ReceiverRegister())); |
| 3343 ASSERT(ToRegister(instr->key()).is(LoadIC::NameRegister())); | 3343 DCHECK(ToRegister(instr->key()).is(LoadIC::NameRegister())); |
| 3344 | 3344 |
| 3345 if (FLAG_vector_ics) { | 3345 if (FLAG_vector_ics) { |
| 3346 Register vector = ToRegister(instr->temp_vector()); | 3346 Register vector = ToRegister(instr->temp_vector()); |
| 3347 ASSERT(vector.is(LoadIC::VectorRegister())); | 3347 DCHECK(vector.is(LoadIC::VectorRegister())); |
| 3348 __ mov(vector, instr->hydrogen()->feedback_vector()); | 3348 __ mov(vector, instr->hydrogen()->feedback_vector()); |
| 3349 // No need to allocate this register. | 3349 // No need to allocate this register. |
| 3350 ASSERT(LoadIC::SlotRegister().is(eax)); | 3350 DCHECK(LoadIC::SlotRegister().is(eax)); |
| 3351 __ mov(LoadIC::SlotRegister(), | 3351 __ mov(LoadIC::SlotRegister(), |
| 3352 Immediate(Smi::FromInt(instr->hydrogen()->slot()))); | 3352 Immediate(Smi::FromInt(instr->hydrogen()->slot()))); |
| 3353 } | 3353 } |
| 3354 | 3354 |
| 3355 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); | 3355 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); |
| 3356 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 3356 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 3357 } | 3357 } |
| 3358 | 3358 |
| 3359 | 3359 |
| 3360 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { | 3360 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { |
| (...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3455 __ mov(receiver, FieldOperand(receiver, proxy_offset)); | 3455 __ mov(receiver, FieldOperand(receiver, proxy_offset)); |
| 3456 __ bind(&receiver_ok); | 3456 __ bind(&receiver_ok); |
| 3457 } | 3457 } |
| 3458 | 3458 |
| 3459 | 3459 |
| 3460 void LCodeGen::DoApplyArguments(LApplyArguments* instr) { | 3460 void LCodeGen::DoApplyArguments(LApplyArguments* instr) { |
| 3461 Register receiver = ToRegister(instr->receiver()); | 3461 Register receiver = ToRegister(instr->receiver()); |
| 3462 Register function = ToRegister(instr->function()); | 3462 Register function = ToRegister(instr->function()); |
| 3463 Register length = ToRegister(instr->length()); | 3463 Register length = ToRegister(instr->length()); |
| 3464 Register elements = ToRegister(instr->elements()); | 3464 Register elements = ToRegister(instr->elements()); |
| 3465 ASSERT(receiver.is(eax)); // Used for parameter count. | 3465 DCHECK(receiver.is(eax)); // Used for parameter count. |
| 3466 ASSERT(function.is(edi)); // Required by InvokeFunction. | 3466 DCHECK(function.is(edi)); // Required by InvokeFunction. |
| 3467 ASSERT(ToRegister(instr->result()).is(eax)); | 3467 DCHECK(ToRegister(instr->result()).is(eax)); |
| 3468 | 3468 |
| 3469 // Copy the arguments to this function possibly from the | 3469 // Copy the arguments to this function possibly from the |
| 3470 // adaptor frame below it. | 3470 // adaptor frame below it. |
| 3471 const uint32_t kArgumentsLimit = 1 * KB; | 3471 const uint32_t kArgumentsLimit = 1 * KB; |
| 3472 __ cmp(length, kArgumentsLimit); | 3472 __ cmp(length, kArgumentsLimit); |
| 3473 DeoptimizeIf(above, instr->environment()); | 3473 DeoptimizeIf(above, instr->environment()); |
| 3474 | 3474 |
| 3475 __ push(receiver); | 3475 __ push(receiver); |
| 3476 __ mov(receiver, length); | 3476 __ mov(receiver, length); |
| 3477 | 3477 |
| 3478 // Loop through the arguments pushing them onto the execution | 3478 // Loop through the arguments pushing them onto the execution |
| 3479 // stack. | 3479 // stack. |
| 3480 Label invoke, loop; | 3480 Label invoke, loop; |
| 3481 // length is a small non-negative integer, due to the test above. | 3481 // length is a small non-negative integer, due to the test above. |
| 3482 __ test(length, Operand(length)); | 3482 __ test(length, Operand(length)); |
| 3483 __ j(zero, &invoke, Label::kNear); | 3483 __ j(zero, &invoke, Label::kNear); |
| 3484 __ bind(&loop); | 3484 __ bind(&loop); |
| 3485 __ push(Operand(elements, length, times_pointer_size, 1 * kPointerSize)); | 3485 __ push(Operand(elements, length, times_pointer_size, 1 * kPointerSize)); |
| 3486 __ dec(length); | 3486 __ dec(length); |
| 3487 __ j(not_zero, &loop); | 3487 __ j(not_zero, &loop); |
| 3488 | 3488 |
| 3489 // Invoke the function. | 3489 // Invoke the function. |
| 3490 __ bind(&invoke); | 3490 __ bind(&invoke); |
| 3491 ASSERT(instr->HasPointerMap()); | 3491 DCHECK(instr->HasPointerMap()); |
| 3492 LPointerMap* pointers = instr->pointer_map(); | 3492 LPointerMap* pointers = instr->pointer_map(); |
| 3493 SafepointGenerator safepoint_generator( | 3493 SafepointGenerator safepoint_generator( |
| 3494 this, pointers, Safepoint::kLazyDeopt); | 3494 this, pointers, Safepoint::kLazyDeopt); |
| 3495 ParameterCount actual(eax); | 3495 ParameterCount actual(eax); |
| 3496 __ InvokeFunction(function, actual, CALL_FUNCTION, safepoint_generator); | 3496 __ InvokeFunction(function, actual, CALL_FUNCTION, safepoint_generator); |
| 3497 } | 3497 } |
| 3498 | 3498 |
| 3499 | 3499 |
| 3500 void LCodeGen::DoDebugBreak(LDebugBreak* instr) { | 3500 void LCodeGen::DoDebugBreak(LDebugBreak* instr) { |
| 3501 __ int3(); | 3501 __ int3(); |
| (...skipping 16 matching lines...) Expand all Loading... |
| 3518 __ mov(result, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); | 3518 __ mov(result, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); |
| 3519 } | 3519 } |
| 3520 | 3520 |
| 3521 | 3521 |
| 3522 void LCodeGen::DoContext(LContext* instr) { | 3522 void LCodeGen::DoContext(LContext* instr) { |
| 3523 Register result = ToRegister(instr->result()); | 3523 Register result = ToRegister(instr->result()); |
| 3524 if (info()->IsOptimizing()) { | 3524 if (info()->IsOptimizing()) { |
| 3525 __ mov(result, Operand(ebp, StandardFrameConstants::kContextOffset)); | 3525 __ mov(result, Operand(ebp, StandardFrameConstants::kContextOffset)); |
| 3526 } else { | 3526 } else { |
| 3527 // If there is no frame, the context must be in esi. | 3527 // If there is no frame, the context must be in esi. |
| 3528 ASSERT(result.is(esi)); | 3528 DCHECK(result.is(esi)); |
| 3529 } | 3529 } |
| 3530 } | 3530 } |
| 3531 | 3531 |
| 3532 | 3532 |
| 3533 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) { | 3533 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) { |
| 3534 ASSERT(ToRegister(instr->context()).is(esi)); | 3534 DCHECK(ToRegister(instr->context()).is(esi)); |
| 3535 __ push(esi); // The context is the first argument. | 3535 __ push(esi); // The context is the first argument. |
| 3536 __ push(Immediate(instr->hydrogen()->pairs())); | 3536 __ push(Immediate(instr->hydrogen()->pairs())); |
| 3537 __ push(Immediate(Smi::FromInt(instr->hydrogen()->flags()))); | 3537 __ push(Immediate(Smi::FromInt(instr->hydrogen()->flags()))); |
| 3538 CallRuntime(Runtime::kDeclareGlobals, 3, instr); | 3538 CallRuntime(Runtime::kDeclareGlobals, 3, instr); |
| 3539 } | 3539 } |
| 3540 | 3540 |
| 3541 | 3541 |
| 3542 void LCodeGen::CallKnownFunction(Handle<JSFunction> function, | 3542 void LCodeGen::CallKnownFunction(Handle<JSFunction> function, |
| 3543 int formal_parameter_count, | 3543 int formal_parameter_count, |
| 3544 int arity, | 3544 int arity, |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3576 SafepointGenerator generator( | 3576 SafepointGenerator generator( |
| 3577 this, pointers, Safepoint::kLazyDeopt); | 3577 this, pointers, Safepoint::kLazyDeopt); |
| 3578 ParameterCount count(arity); | 3578 ParameterCount count(arity); |
| 3579 ParameterCount expected(formal_parameter_count); | 3579 ParameterCount expected(formal_parameter_count); |
| 3580 __ InvokeFunction(function, expected, count, CALL_FUNCTION, generator); | 3580 __ InvokeFunction(function, expected, count, CALL_FUNCTION, generator); |
| 3581 } | 3581 } |
| 3582 } | 3582 } |
| 3583 | 3583 |
| 3584 | 3584 |
| 3585 void LCodeGen::DoCallWithDescriptor(LCallWithDescriptor* instr) { | 3585 void LCodeGen::DoCallWithDescriptor(LCallWithDescriptor* instr) { |
| 3586 ASSERT(ToRegister(instr->result()).is(eax)); | 3586 DCHECK(ToRegister(instr->result()).is(eax)); |
| 3587 | 3587 |
| 3588 LPointerMap* pointers = instr->pointer_map(); | 3588 LPointerMap* pointers = instr->pointer_map(); |
| 3589 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); | 3589 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); |
| 3590 | 3590 |
| 3591 if (instr->target()->IsConstantOperand()) { | 3591 if (instr->target()->IsConstantOperand()) { |
| 3592 LConstantOperand* target = LConstantOperand::cast(instr->target()); | 3592 LConstantOperand* target = LConstantOperand::cast(instr->target()); |
| 3593 Handle<Code> code = Handle<Code>::cast(ToHandle(target)); | 3593 Handle<Code> code = Handle<Code>::cast(ToHandle(target)); |
| 3594 generator.BeforeCall(__ CallSize(code, RelocInfo::CODE_TARGET)); | 3594 generator.BeforeCall(__ CallSize(code, RelocInfo::CODE_TARGET)); |
| 3595 __ call(code, RelocInfo::CODE_TARGET); | 3595 __ call(code, RelocInfo::CODE_TARGET); |
| 3596 } else { | 3596 } else { |
| 3597 ASSERT(instr->target()->IsRegister()); | 3597 DCHECK(instr->target()->IsRegister()); |
| 3598 Register target = ToRegister(instr->target()); | 3598 Register target = ToRegister(instr->target()); |
| 3599 generator.BeforeCall(__ CallSize(Operand(target))); | 3599 generator.BeforeCall(__ CallSize(Operand(target))); |
| 3600 __ add(target, Immediate(Code::kHeaderSize - kHeapObjectTag)); | 3600 __ add(target, Immediate(Code::kHeaderSize - kHeapObjectTag)); |
| 3601 __ call(target); | 3601 __ call(target); |
| 3602 } | 3602 } |
| 3603 generator.AfterCall(); | 3603 generator.AfterCall(); |
| 3604 } | 3604 } |
| 3605 | 3605 |
| 3606 | 3606 |
| 3607 void LCodeGen::DoCallJSFunction(LCallJSFunction* instr) { | 3607 void LCodeGen::DoCallJSFunction(LCallJSFunction* instr) { |
| 3608 ASSERT(ToRegister(instr->function()).is(edi)); | 3608 DCHECK(ToRegister(instr->function()).is(edi)); |
| 3609 ASSERT(ToRegister(instr->result()).is(eax)); | 3609 DCHECK(ToRegister(instr->result()).is(eax)); |
| 3610 | 3610 |
| 3611 if (instr->hydrogen()->pass_argument_count()) { | 3611 if (instr->hydrogen()->pass_argument_count()) { |
| 3612 __ mov(eax, instr->arity()); | 3612 __ mov(eax, instr->arity()); |
| 3613 } | 3613 } |
| 3614 | 3614 |
| 3615 // Change context. | 3615 // Change context. |
| 3616 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); | 3616 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); |
| 3617 | 3617 |
| 3618 bool is_self_call = false; | 3618 bool is_self_call = false; |
| 3619 if (instr->hydrogen()->function()->IsConstant()) { | 3619 if (instr->hydrogen()->function()->IsConstant()) { |
| (...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3698 const X87Stack& x87_stack) | 3698 const X87Stack& x87_stack) |
| 3699 : LDeferredCode(codegen, x87_stack), instr_(instr) { } | 3699 : LDeferredCode(codegen, x87_stack), instr_(instr) { } |
| 3700 virtual void Generate() V8_OVERRIDE { | 3700 virtual void Generate() V8_OVERRIDE { |
| 3701 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_); | 3701 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_); |
| 3702 } | 3702 } |
| 3703 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } | 3703 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } |
| 3704 private: | 3704 private: |
| 3705 LMathAbs* instr_; | 3705 LMathAbs* instr_; |
| 3706 }; | 3706 }; |
| 3707 | 3707 |
| 3708 ASSERT(instr->value()->Equals(instr->result())); | 3708 DCHECK(instr->value()->Equals(instr->result())); |
| 3709 Representation r = instr->hydrogen()->value()->representation(); | 3709 Representation r = instr->hydrogen()->value()->representation(); |
| 3710 | 3710 |
| 3711 if (r.IsDouble()) { | 3711 if (r.IsDouble()) { |
| 3712 UNIMPLEMENTED(); | 3712 UNIMPLEMENTED(); |
| 3713 } else if (r.IsSmiOrInteger32()) { | 3713 } else if (r.IsSmiOrInteger32()) { |
| 3714 EmitIntegerMathAbs(instr); | 3714 EmitIntegerMathAbs(instr); |
| 3715 } else { // Tagged case. | 3715 } else { // Tagged case. |
| 3716 DeferredMathAbsTaggedHeapNumber* deferred = | 3716 DeferredMathAbsTaggedHeapNumber* deferred = |
| 3717 new(zone()) DeferredMathAbsTaggedHeapNumber(this, instr, x87_stack_); | 3717 new(zone()) DeferredMathAbsTaggedHeapNumber(this, instr, x87_stack_); |
| 3718 Register input_reg = ToRegister(instr->value()); | 3718 Register input_reg = ToRegister(instr->value()); |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3763 UNIMPLEMENTED(); | 3763 UNIMPLEMENTED(); |
| 3764 } | 3764 } |
| 3765 | 3765 |
| 3766 | 3766 |
| 3767 void LCodeGen::DoMathExp(LMathExp* instr) { | 3767 void LCodeGen::DoMathExp(LMathExp* instr) { |
| 3768 UNIMPLEMENTED(); | 3768 UNIMPLEMENTED(); |
| 3769 } | 3769 } |
| 3770 | 3770 |
| 3771 | 3771 |
| 3772 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) { | 3772 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) { |
| 3773 ASSERT(ToRegister(instr->context()).is(esi)); | 3773 DCHECK(ToRegister(instr->context()).is(esi)); |
| 3774 ASSERT(ToRegister(instr->function()).is(edi)); | 3774 DCHECK(ToRegister(instr->function()).is(edi)); |
| 3775 ASSERT(instr->HasPointerMap()); | 3775 DCHECK(instr->HasPointerMap()); |
| 3776 | 3776 |
| 3777 Handle<JSFunction> known_function = instr->hydrogen()->known_function(); | 3777 Handle<JSFunction> known_function = instr->hydrogen()->known_function(); |
| 3778 if (known_function.is_null()) { | 3778 if (known_function.is_null()) { |
| 3779 LPointerMap* pointers = instr->pointer_map(); | 3779 LPointerMap* pointers = instr->pointer_map(); |
| 3780 SafepointGenerator generator( | 3780 SafepointGenerator generator( |
| 3781 this, pointers, Safepoint::kLazyDeopt); | 3781 this, pointers, Safepoint::kLazyDeopt); |
| 3782 ParameterCount count(instr->arity()); | 3782 ParameterCount count(instr->arity()); |
| 3783 __ InvokeFunction(edi, count, CALL_FUNCTION, generator); | 3783 __ InvokeFunction(edi, count, CALL_FUNCTION, generator); |
| 3784 } else { | 3784 } else { |
| 3785 CallKnownFunction(known_function, | 3785 CallKnownFunction(known_function, |
| 3786 instr->hydrogen()->formal_parameter_count(), | 3786 instr->hydrogen()->formal_parameter_count(), |
| 3787 instr->arity(), | 3787 instr->arity(), |
| 3788 instr, | 3788 instr, |
| 3789 EDI_CONTAINS_TARGET); | 3789 EDI_CONTAINS_TARGET); |
| 3790 } | 3790 } |
| 3791 } | 3791 } |
| 3792 | 3792 |
| 3793 | 3793 |
| 3794 void LCodeGen::DoCallFunction(LCallFunction* instr) { | 3794 void LCodeGen::DoCallFunction(LCallFunction* instr) { |
| 3795 ASSERT(ToRegister(instr->context()).is(esi)); | 3795 DCHECK(ToRegister(instr->context()).is(esi)); |
| 3796 ASSERT(ToRegister(instr->function()).is(edi)); | 3796 DCHECK(ToRegister(instr->function()).is(edi)); |
| 3797 ASSERT(ToRegister(instr->result()).is(eax)); | 3797 DCHECK(ToRegister(instr->result()).is(eax)); |
| 3798 | 3798 |
| 3799 int arity = instr->arity(); | 3799 int arity = instr->arity(); |
| 3800 CallFunctionStub stub(isolate(), arity, instr->hydrogen()->function_flags()); | 3800 CallFunctionStub stub(isolate(), arity, instr->hydrogen()->function_flags()); |
| 3801 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 3801 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 3802 } | 3802 } |
| 3803 | 3803 |
| 3804 | 3804 |
| 3805 void LCodeGen::DoCallNew(LCallNew* instr) { | 3805 void LCodeGen::DoCallNew(LCallNew* instr) { |
| 3806 ASSERT(ToRegister(instr->context()).is(esi)); | 3806 DCHECK(ToRegister(instr->context()).is(esi)); |
| 3807 ASSERT(ToRegister(instr->constructor()).is(edi)); | 3807 DCHECK(ToRegister(instr->constructor()).is(edi)); |
| 3808 ASSERT(ToRegister(instr->result()).is(eax)); | 3808 DCHECK(ToRegister(instr->result()).is(eax)); |
| 3809 | 3809 |
| 3810 // No cell in ebx for construct type feedback in optimized code | 3810 // No cell in ebx for construct type feedback in optimized code |
| 3811 __ mov(ebx, isolate()->factory()->undefined_value()); | 3811 __ mov(ebx, isolate()->factory()->undefined_value()); |
| 3812 CallConstructStub stub(isolate(), NO_CALL_CONSTRUCTOR_FLAGS); | 3812 CallConstructStub stub(isolate(), NO_CALL_CONSTRUCTOR_FLAGS); |
| 3813 __ Move(eax, Immediate(instr->arity())); | 3813 __ Move(eax, Immediate(instr->arity())); |
| 3814 CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); | 3814 CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); |
| 3815 } | 3815 } |
| 3816 | 3816 |
| 3817 | 3817 |
| 3818 void LCodeGen::DoCallNewArray(LCallNewArray* instr) { | 3818 void LCodeGen::DoCallNewArray(LCallNewArray* instr) { |
| 3819 ASSERT(ToRegister(instr->context()).is(esi)); | 3819 DCHECK(ToRegister(instr->context()).is(esi)); |
| 3820 ASSERT(ToRegister(instr->constructor()).is(edi)); | 3820 DCHECK(ToRegister(instr->constructor()).is(edi)); |
| 3821 ASSERT(ToRegister(instr->result()).is(eax)); | 3821 DCHECK(ToRegister(instr->result()).is(eax)); |
| 3822 | 3822 |
| 3823 __ Move(eax, Immediate(instr->arity())); | 3823 __ Move(eax, Immediate(instr->arity())); |
| 3824 __ mov(ebx, isolate()->factory()->undefined_value()); | 3824 __ mov(ebx, isolate()->factory()->undefined_value()); |
| 3825 ElementsKind kind = instr->hydrogen()->elements_kind(); | 3825 ElementsKind kind = instr->hydrogen()->elements_kind(); |
| 3826 AllocationSiteOverrideMode override_mode = | 3826 AllocationSiteOverrideMode override_mode = |
| 3827 (AllocationSite::GetMode(kind) == TRACK_ALLOCATION_SITE) | 3827 (AllocationSite::GetMode(kind) == TRACK_ALLOCATION_SITE) |
| 3828 ? DISABLE_ALLOCATION_SITES | 3828 ? DISABLE_ALLOCATION_SITES |
| 3829 : DONT_OVERRIDE; | 3829 : DONT_OVERRIDE; |
| 3830 | 3830 |
| 3831 if (instr->arity() == 0) { | 3831 if (instr->arity() == 0) { |
| (...skipping 22 matching lines...) Expand all Loading... |
| 3854 CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); | 3854 CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); |
| 3855 __ bind(&done); | 3855 __ bind(&done); |
| 3856 } else { | 3856 } else { |
| 3857 ArrayNArgumentsConstructorStub stub(isolate(), kind, override_mode); | 3857 ArrayNArgumentsConstructorStub stub(isolate(), kind, override_mode); |
| 3858 CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); | 3858 CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); |
| 3859 } | 3859 } |
| 3860 } | 3860 } |
| 3861 | 3861 |
| 3862 | 3862 |
| 3863 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { | 3863 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { |
| 3864 ASSERT(ToRegister(instr->context()).is(esi)); | 3864 DCHECK(ToRegister(instr->context()).is(esi)); |
| 3865 CallRuntime(instr->function(), instr->arity(), instr); | 3865 CallRuntime(instr->function(), instr->arity(), instr); |
| 3866 } | 3866 } |
| 3867 | 3867 |
| 3868 | 3868 |
| 3869 void LCodeGen::DoStoreCodeEntry(LStoreCodeEntry* instr) { | 3869 void LCodeGen::DoStoreCodeEntry(LStoreCodeEntry* instr) { |
| 3870 Register function = ToRegister(instr->function()); | 3870 Register function = ToRegister(instr->function()); |
| 3871 Register code_object = ToRegister(instr->code_object()); | 3871 Register code_object = ToRegister(instr->code_object()); |
| 3872 __ lea(code_object, FieldOperand(code_object, Code::kHeaderSize)); | 3872 __ lea(code_object, FieldOperand(code_object, Code::kHeaderSize)); |
| 3873 __ mov(FieldOperand(function, JSFunction::kCodeEntryOffset), code_object); | 3873 __ mov(FieldOperand(function, JSFunction::kCodeEntryOffset), code_object); |
| 3874 } | 3874 } |
| (...skipping 12 matching lines...) Expand all Loading... |
| 3887 } | 3887 } |
| 3888 | 3888 |
| 3889 | 3889 |
| 3890 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { | 3890 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { |
| 3891 Representation representation = instr->hydrogen()->field_representation(); | 3891 Representation representation = instr->hydrogen()->field_representation(); |
| 3892 | 3892 |
| 3893 HObjectAccess access = instr->hydrogen()->access(); | 3893 HObjectAccess access = instr->hydrogen()->access(); |
| 3894 int offset = access.offset(); | 3894 int offset = access.offset(); |
| 3895 | 3895 |
| 3896 if (access.IsExternalMemory()) { | 3896 if (access.IsExternalMemory()) { |
| 3897 ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); | 3897 DCHECK(!instr->hydrogen()->NeedsWriteBarrier()); |
| 3898 MemOperand operand = instr->object()->IsConstantOperand() | 3898 MemOperand operand = instr->object()->IsConstantOperand() |
| 3899 ? MemOperand::StaticVariable( | 3899 ? MemOperand::StaticVariable( |
| 3900 ToExternalReference(LConstantOperand::cast(instr->object()))) | 3900 ToExternalReference(LConstantOperand::cast(instr->object()))) |
| 3901 : MemOperand(ToRegister(instr->object()), offset); | 3901 : MemOperand(ToRegister(instr->object()), offset); |
| 3902 if (instr->value()->IsConstantOperand()) { | 3902 if (instr->value()->IsConstantOperand()) { |
| 3903 LConstantOperand* operand_value = LConstantOperand::cast(instr->value()); | 3903 LConstantOperand* operand_value = LConstantOperand::cast(instr->value()); |
| 3904 __ mov(operand, Immediate(ToInteger32(operand_value))); | 3904 __ mov(operand, Immediate(ToInteger32(operand_value))); |
| 3905 } else { | 3905 } else { |
| 3906 Register value = ToRegister(instr->value()); | 3906 Register value = ToRegister(instr->value()); |
| 3907 __ Store(value, operand, representation); | 3907 __ Store(value, operand, representation); |
| 3908 } | 3908 } |
| 3909 return; | 3909 return; |
| 3910 } | 3910 } |
| 3911 | 3911 |
| 3912 Register object = ToRegister(instr->object()); | 3912 Register object = ToRegister(instr->object()); |
| 3913 __ AssertNotSmi(object); | 3913 __ AssertNotSmi(object); |
| 3914 ASSERT(!representation.IsSmi() || | 3914 DCHECK(!representation.IsSmi() || |
| 3915 !instr->value()->IsConstantOperand() || | 3915 !instr->value()->IsConstantOperand() || |
| 3916 IsSmi(LConstantOperand::cast(instr->value()))); | 3916 IsSmi(LConstantOperand::cast(instr->value()))); |
| 3917 if (representation.IsDouble()) { | 3917 if (representation.IsDouble()) { |
| 3918 ASSERT(access.IsInobject()); | 3918 DCHECK(access.IsInobject()); |
| 3919 ASSERT(!instr->hydrogen()->has_transition()); | 3919 DCHECK(!instr->hydrogen()->has_transition()); |
| 3920 ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); | 3920 DCHECK(!instr->hydrogen()->NeedsWriteBarrier()); |
| 3921 X87Register value = ToX87Register(instr->value()); | 3921 X87Register value = ToX87Register(instr->value()); |
| 3922 X87Mov(FieldOperand(object, offset), value); | 3922 X87Mov(FieldOperand(object, offset), value); |
| 3923 return; | 3923 return; |
| 3924 } | 3924 } |
| 3925 | 3925 |
| 3926 if (instr->hydrogen()->has_transition()) { | 3926 if (instr->hydrogen()->has_transition()) { |
| 3927 Handle<Map> transition = instr->hydrogen()->transition_map(); | 3927 Handle<Map> transition = instr->hydrogen()->transition_map(); |
| 3928 AddDeprecationDependency(transition); | 3928 AddDeprecationDependency(transition); |
| 3929 __ mov(FieldOperand(object, HeapObject::kMapOffset), transition); | 3929 __ mov(FieldOperand(object, HeapObject::kMapOffset), transition); |
| 3930 if (instr->hydrogen()->NeedsWriteBarrierForMap()) { | 3930 if (instr->hydrogen()->NeedsWriteBarrierForMap()) { |
| (...skipping 14 matching lines...) Expand all Loading... |
| 3945 } | 3945 } |
| 3946 | 3946 |
| 3947 MemOperand operand = FieldOperand(write_register, offset); | 3947 MemOperand operand = FieldOperand(write_register, offset); |
| 3948 if (instr->value()->IsConstantOperand()) { | 3948 if (instr->value()->IsConstantOperand()) { |
| 3949 LConstantOperand* operand_value = LConstantOperand::cast(instr->value()); | 3949 LConstantOperand* operand_value = LConstantOperand::cast(instr->value()); |
| 3950 if (operand_value->IsRegister()) { | 3950 if (operand_value->IsRegister()) { |
| 3951 Register value = ToRegister(operand_value); | 3951 Register value = ToRegister(operand_value); |
| 3952 __ Store(value, operand, representation); | 3952 __ Store(value, operand, representation); |
| 3953 } else if (representation.IsInteger32()) { | 3953 } else if (representation.IsInteger32()) { |
| 3954 Immediate immediate = ToImmediate(operand_value, representation); | 3954 Immediate immediate = ToImmediate(operand_value, representation); |
| 3955 ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); | 3955 DCHECK(!instr->hydrogen()->NeedsWriteBarrier()); |
| 3956 __ mov(operand, immediate); | 3956 __ mov(operand, immediate); |
| 3957 } else { | 3957 } else { |
| 3958 Handle<Object> handle_value = ToHandle(operand_value); | 3958 Handle<Object> handle_value = ToHandle(operand_value); |
| 3959 ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); | 3959 DCHECK(!instr->hydrogen()->NeedsWriteBarrier()); |
| 3960 __ mov(operand, handle_value); | 3960 __ mov(operand, handle_value); |
| 3961 } | 3961 } |
| 3962 } else { | 3962 } else { |
| 3963 Register value = ToRegister(instr->value()); | 3963 Register value = ToRegister(instr->value()); |
| 3964 __ Store(value, operand, representation); | 3964 __ Store(value, operand, representation); |
| 3965 } | 3965 } |
| 3966 | 3966 |
| 3967 if (instr->hydrogen()->NeedsWriteBarrier()) { | 3967 if (instr->hydrogen()->NeedsWriteBarrier()) { |
| 3968 Register value = ToRegister(instr->value()); | 3968 Register value = ToRegister(instr->value()); |
| 3969 Register temp = access.IsInobject() ? ToRegister(instr->temp()) : object; | 3969 Register temp = access.IsInobject() ? ToRegister(instr->temp()) : object; |
| 3970 // Update the write barrier for the object for in-object properties. | 3970 // Update the write barrier for the object for in-object properties. |
| 3971 __ RecordWriteField(write_register, | 3971 __ RecordWriteField(write_register, |
| 3972 offset, | 3972 offset, |
| 3973 value, | 3973 value, |
| 3974 temp, | 3974 temp, |
| 3975 EMIT_REMEMBERED_SET, | 3975 EMIT_REMEMBERED_SET, |
| 3976 instr->hydrogen()->SmiCheckForWriteBarrier(), | 3976 instr->hydrogen()->SmiCheckForWriteBarrier(), |
| 3977 instr->hydrogen()->PointersToHereCheckForValue()); | 3977 instr->hydrogen()->PointersToHereCheckForValue()); |
| 3978 } | 3978 } |
| 3979 } | 3979 } |
| 3980 | 3980 |
| 3981 | 3981 |
| 3982 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { | 3982 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { |
| 3983 ASSERT(ToRegister(instr->context()).is(esi)); | 3983 DCHECK(ToRegister(instr->context()).is(esi)); |
| 3984 ASSERT(ToRegister(instr->object()).is(StoreIC::ReceiverRegister())); | 3984 DCHECK(ToRegister(instr->object()).is(StoreIC::ReceiverRegister())); |
| 3985 ASSERT(ToRegister(instr->value()).is(StoreIC::ValueRegister())); | 3985 DCHECK(ToRegister(instr->value()).is(StoreIC::ValueRegister())); |
| 3986 | 3986 |
| 3987 __ mov(StoreIC::NameRegister(), instr->name()); | 3987 __ mov(StoreIC::NameRegister(), instr->name()); |
| 3988 Handle<Code> ic = StoreIC::initialize_stub(isolate(), instr->strict_mode()); | 3988 Handle<Code> ic = StoreIC::initialize_stub(isolate(), instr->strict_mode()); |
| 3989 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 3989 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 3990 } | 3990 } |
| 3991 | 3991 |
| 3992 | 3992 |
| 3993 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { | 3993 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { |
| 3994 Condition cc = instr->hydrogen()->allow_equality() ? above : above_equal; | 3994 Condition cc = instr->hydrogen()->allow_equality() ? above : above_equal; |
| 3995 if (instr->index()->IsConstantOperand()) { | 3995 if (instr->index()->IsConstantOperand()) { |
| (...skipping 150 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4146 FAST_ELEMENTS, | 4146 FAST_ELEMENTS, |
| 4147 instr->base_offset()); | 4147 instr->base_offset()); |
| 4148 if (instr->value()->IsRegister()) { | 4148 if (instr->value()->IsRegister()) { |
| 4149 __ mov(operand, ToRegister(instr->value())); | 4149 __ mov(operand, ToRegister(instr->value())); |
| 4150 } else { | 4150 } else { |
| 4151 LConstantOperand* operand_value = LConstantOperand::cast(instr->value()); | 4151 LConstantOperand* operand_value = LConstantOperand::cast(instr->value()); |
| 4152 if (IsSmi(operand_value)) { | 4152 if (IsSmi(operand_value)) { |
| 4153 Immediate immediate = ToImmediate(operand_value, Representation::Smi()); | 4153 Immediate immediate = ToImmediate(operand_value, Representation::Smi()); |
| 4154 __ mov(operand, immediate); | 4154 __ mov(operand, immediate); |
| 4155 } else { | 4155 } else { |
| 4156 ASSERT(!IsInteger32(operand_value)); | 4156 DCHECK(!IsInteger32(operand_value)); |
| 4157 Handle<Object> handle_value = ToHandle(operand_value); | 4157 Handle<Object> handle_value = ToHandle(operand_value); |
| 4158 __ mov(operand, handle_value); | 4158 __ mov(operand, handle_value); |
| 4159 } | 4159 } |
| 4160 } | 4160 } |
| 4161 | 4161 |
| 4162 if (instr->hydrogen()->NeedsWriteBarrier()) { | 4162 if (instr->hydrogen()->NeedsWriteBarrier()) { |
| 4163 ASSERT(instr->value()->IsRegister()); | 4163 DCHECK(instr->value()->IsRegister()); |
| 4164 Register value = ToRegister(instr->value()); | 4164 Register value = ToRegister(instr->value()); |
| 4165 ASSERT(!instr->key()->IsConstantOperand()); | 4165 DCHECK(!instr->key()->IsConstantOperand()); |
| 4166 SmiCheck check_needed = | 4166 SmiCheck check_needed = |
| 4167 instr->hydrogen()->value()->type().IsHeapObject() | 4167 instr->hydrogen()->value()->type().IsHeapObject() |
| 4168 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; | 4168 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; |
| 4169 // Compute address of modified element and store it into key register. | 4169 // Compute address of modified element and store it into key register. |
| 4170 __ lea(key, operand); | 4170 __ lea(key, operand); |
| 4171 __ RecordWrite(elements, | 4171 __ RecordWrite(elements, |
| 4172 key, | 4172 key, |
| 4173 value, | 4173 value, |
| 4174 EMIT_REMEMBERED_SET, | 4174 EMIT_REMEMBERED_SET, |
| 4175 check_needed, | 4175 check_needed, |
| 4176 instr->hydrogen()->PointersToHereCheckForValue()); | 4176 instr->hydrogen()->PointersToHereCheckForValue()); |
| 4177 } | 4177 } |
| 4178 } | 4178 } |
| 4179 | 4179 |
| 4180 | 4180 |
| 4181 void LCodeGen::DoStoreKeyed(LStoreKeyed* instr) { | 4181 void LCodeGen::DoStoreKeyed(LStoreKeyed* instr) { |
| 4182 // By cases...external, fast-double, fast | 4182 // By cases...external, fast-double, fast |
| 4183 if (instr->is_typed_elements()) { | 4183 if (instr->is_typed_elements()) { |
| 4184 DoStoreKeyedExternalArray(instr); | 4184 DoStoreKeyedExternalArray(instr); |
| 4185 } else if (instr->hydrogen()->value()->representation().IsDouble()) { | 4185 } else if (instr->hydrogen()->value()->representation().IsDouble()) { |
| 4186 DoStoreKeyedFixedDoubleArray(instr); | 4186 DoStoreKeyedFixedDoubleArray(instr); |
| 4187 } else { | 4187 } else { |
| 4188 DoStoreKeyedFixedArray(instr); | 4188 DoStoreKeyedFixedArray(instr); |
| 4189 } | 4189 } |
| 4190 } | 4190 } |
| 4191 | 4191 |
| 4192 | 4192 |
| 4193 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { | 4193 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { |
| 4194 ASSERT(ToRegister(instr->context()).is(esi)); | 4194 DCHECK(ToRegister(instr->context()).is(esi)); |
| 4195 ASSERT(ToRegister(instr->object()).is(KeyedStoreIC::ReceiverRegister())); | 4195 DCHECK(ToRegister(instr->object()).is(KeyedStoreIC::ReceiverRegister())); |
| 4196 ASSERT(ToRegister(instr->key()).is(KeyedStoreIC::NameRegister())); | 4196 DCHECK(ToRegister(instr->key()).is(KeyedStoreIC::NameRegister())); |
| 4197 ASSERT(ToRegister(instr->value()).is(KeyedStoreIC::ValueRegister())); | 4197 DCHECK(ToRegister(instr->value()).is(KeyedStoreIC::ValueRegister())); |
| 4198 | 4198 |
| 4199 Handle<Code> ic = instr->strict_mode() == STRICT | 4199 Handle<Code> ic = instr->strict_mode() == STRICT |
| 4200 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() | 4200 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() |
| 4201 : isolate()->builtins()->KeyedStoreIC_Initialize(); | 4201 : isolate()->builtins()->KeyedStoreIC_Initialize(); |
| 4202 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 4202 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 4203 } | 4203 } |
| 4204 | 4204 |
| 4205 | 4205 |
| 4206 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { | 4206 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { |
| 4207 Register object = ToRegister(instr->object()); | 4207 Register object = ToRegister(instr->object()); |
| (...skipping 18 matching lines...) Expand all Loading... |
| 4226 IsSimpleMapChangeTransition(from_kind, to_kind); | 4226 IsSimpleMapChangeTransition(from_kind, to_kind); |
| 4227 Label::Distance branch_distance = | 4227 Label::Distance branch_distance = |
| 4228 is_simple_map_transition ? Label::kNear : Label::kFar; | 4228 is_simple_map_transition ? Label::kNear : Label::kFar; |
| 4229 __ cmp(FieldOperand(object_reg, HeapObject::kMapOffset), from_map); | 4229 __ cmp(FieldOperand(object_reg, HeapObject::kMapOffset), from_map); |
| 4230 __ j(not_equal, ¬_applicable, branch_distance); | 4230 __ j(not_equal, ¬_applicable, branch_distance); |
| 4231 if (is_simple_map_transition) { | 4231 if (is_simple_map_transition) { |
| 4232 Register new_map_reg = ToRegister(instr->new_map_temp()); | 4232 Register new_map_reg = ToRegister(instr->new_map_temp()); |
| 4233 __ mov(FieldOperand(object_reg, HeapObject::kMapOffset), | 4233 __ mov(FieldOperand(object_reg, HeapObject::kMapOffset), |
| 4234 Immediate(to_map)); | 4234 Immediate(to_map)); |
| 4235 // Write barrier. | 4235 // Write barrier. |
| 4236 ASSERT_NE(instr->temp(), NULL); | 4236 DCHECK_NE(instr->temp(), NULL); |
| 4237 __ RecordWriteForMap(object_reg, to_map, new_map_reg, | 4237 __ RecordWriteForMap(object_reg, to_map, new_map_reg, |
| 4238 ToRegister(instr->temp())); | 4238 ToRegister(instr->temp())); |
| 4239 } else { | 4239 } else { |
| 4240 ASSERT(ToRegister(instr->context()).is(esi)); | 4240 DCHECK(ToRegister(instr->context()).is(esi)); |
| 4241 ASSERT(object_reg.is(eax)); | 4241 DCHECK(object_reg.is(eax)); |
| 4242 PushSafepointRegistersScope scope(this); | 4242 PushSafepointRegistersScope scope(this); |
| 4243 __ mov(ebx, to_map); | 4243 __ mov(ebx, to_map); |
| 4244 bool is_js_array = from_map->instance_type() == JS_ARRAY_TYPE; | 4244 bool is_js_array = from_map->instance_type() == JS_ARRAY_TYPE; |
| 4245 TransitionElementsKindStub stub(isolate(), from_kind, to_kind, is_js_array); | 4245 TransitionElementsKindStub stub(isolate(), from_kind, to_kind, is_js_array); |
| 4246 __ CallStub(&stub); | 4246 __ CallStub(&stub); |
| 4247 RecordSafepointWithLazyDeopt(instr, | 4247 RecordSafepointWithLazyDeopt(instr, |
| 4248 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); | 4248 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); |
| 4249 } | 4249 } |
| 4250 __ bind(¬_applicable); | 4250 __ bind(¬_applicable); |
| 4251 } | 4251 } |
| (...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4321 codegen()->DoDeferredStringCharFromCode(instr_); | 4321 codegen()->DoDeferredStringCharFromCode(instr_); |
| 4322 } | 4322 } |
| 4323 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } | 4323 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } |
| 4324 private: | 4324 private: |
| 4325 LStringCharFromCode* instr_; | 4325 LStringCharFromCode* instr_; |
| 4326 }; | 4326 }; |
| 4327 | 4327 |
| 4328 DeferredStringCharFromCode* deferred = | 4328 DeferredStringCharFromCode* deferred = |
| 4329 new(zone()) DeferredStringCharFromCode(this, instr, x87_stack_); | 4329 new(zone()) DeferredStringCharFromCode(this, instr, x87_stack_); |
| 4330 | 4330 |
| 4331 ASSERT(instr->hydrogen()->value()->representation().IsInteger32()); | 4331 DCHECK(instr->hydrogen()->value()->representation().IsInteger32()); |
| 4332 Register char_code = ToRegister(instr->char_code()); | 4332 Register char_code = ToRegister(instr->char_code()); |
| 4333 Register result = ToRegister(instr->result()); | 4333 Register result = ToRegister(instr->result()); |
| 4334 ASSERT(!char_code.is(result)); | 4334 DCHECK(!char_code.is(result)); |
| 4335 | 4335 |
| 4336 __ cmp(char_code, String::kMaxOneByteCharCode); | 4336 __ cmp(char_code, String::kMaxOneByteCharCode); |
| 4337 __ j(above, deferred->entry()); | 4337 __ j(above, deferred->entry()); |
| 4338 __ Move(result, Immediate(factory()->single_character_string_cache())); | 4338 __ Move(result, Immediate(factory()->single_character_string_cache())); |
| 4339 __ mov(result, FieldOperand(result, | 4339 __ mov(result, FieldOperand(result, |
| 4340 char_code, times_pointer_size, | 4340 char_code, times_pointer_size, |
| 4341 FixedArray::kHeaderSize)); | 4341 FixedArray::kHeaderSize)); |
| 4342 __ cmp(result, factory()->undefined_value()); | 4342 __ cmp(result, factory()->undefined_value()); |
| 4343 __ j(equal, deferred->entry()); | 4343 __ j(equal, deferred->entry()); |
| 4344 __ bind(deferred->exit()); | 4344 __ bind(deferred->exit()); |
| (...skipping 11 matching lines...) Expand all Loading... |
| 4356 | 4356 |
| 4357 PushSafepointRegistersScope scope(this); | 4357 PushSafepointRegistersScope scope(this); |
| 4358 __ SmiTag(char_code); | 4358 __ SmiTag(char_code); |
| 4359 __ push(char_code); | 4359 __ push(char_code); |
| 4360 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr, instr->context()); | 4360 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr, instr->context()); |
| 4361 __ StoreToSafepointRegisterSlot(result, eax); | 4361 __ StoreToSafepointRegisterSlot(result, eax); |
| 4362 } | 4362 } |
| 4363 | 4363 |
| 4364 | 4364 |
| 4365 void LCodeGen::DoStringAdd(LStringAdd* instr) { | 4365 void LCodeGen::DoStringAdd(LStringAdd* instr) { |
| 4366 ASSERT(ToRegister(instr->context()).is(esi)); | 4366 DCHECK(ToRegister(instr->context()).is(esi)); |
| 4367 ASSERT(ToRegister(instr->left()).is(edx)); | 4367 DCHECK(ToRegister(instr->left()).is(edx)); |
| 4368 ASSERT(ToRegister(instr->right()).is(eax)); | 4368 DCHECK(ToRegister(instr->right()).is(eax)); |
| 4369 StringAddStub stub(isolate(), | 4369 StringAddStub stub(isolate(), |
| 4370 instr->hydrogen()->flags(), | 4370 instr->hydrogen()->flags(), |
| 4371 instr->hydrogen()->pretenure_flag()); | 4371 instr->hydrogen()->pretenure_flag()); |
| 4372 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 4372 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 4373 } | 4373 } |
| 4374 | 4374 |
| 4375 | 4375 |
| 4376 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) { | 4376 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) { |
| 4377 LOperand* input = instr->value(); | 4377 LOperand* input = instr->value(); |
| 4378 LOperand* output = instr->result(); | 4378 LOperand* output = instr->result(); |
| 4379 ASSERT(input->IsRegister() || input->IsStackSlot()); | 4379 DCHECK(input->IsRegister() || input->IsStackSlot()); |
| 4380 ASSERT(output->IsDoubleRegister()); | 4380 DCHECK(output->IsDoubleRegister()); |
| 4381 if (input->IsRegister()) { | 4381 if (input->IsRegister()) { |
| 4382 Register input_reg = ToRegister(input); | 4382 Register input_reg = ToRegister(input); |
| 4383 __ push(input_reg); | 4383 __ push(input_reg); |
| 4384 X87Mov(ToX87Register(output), Operand(esp, 0), kX87IntOperand); | 4384 X87Mov(ToX87Register(output), Operand(esp, 0), kX87IntOperand); |
| 4385 __ pop(input_reg); | 4385 __ pop(input_reg); |
| 4386 } else { | 4386 } else { |
| 4387 X87Mov(ToX87Register(output), ToOperand(input), kX87IntOperand); | 4387 X87Mov(ToX87Register(output), ToOperand(input), kX87IntOperand); |
| 4388 } | 4388 } |
| 4389 } | 4389 } |
| 4390 | 4390 |
| (...skipping 18 matching lines...) Expand all Loading... |
| 4409 virtual void Generate() V8_OVERRIDE { | 4409 virtual void Generate() V8_OVERRIDE { |
| 4410 codegen()->DoDeferredNumberTagIU(instr_, instr_->value(), instr_->temp(), | 4410 codegen()->DoDeferredNumberTagIU(instr_, instr_->value(), instr_->temp(), |
| 4411 SIGNED_INT32); | 4411 SIGNED_INT32); |
| 4412 } | 4412 } |
| 4413 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } | 4413 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } |
| 4414 private: | 4414 private: |
| 4415 LNumberTagI* instr_; | 4415 LNumberTagI* instr_; |
| 4416 }; | 4416 }; |
| 4417 | 4417 |
| 4418 LOperand* input = instr->value(); | 4418 LOperand* input = instr->value(); |
| 4419 ASSERT(input->IsRegister() && input->Equals(instr->result())); | 4419 DCHECK(input->IsRegister() && input->Equals(instr->result())); |
| 4420 Register reg = ToRegister(input); | 4420 Register reg = ToRegister(input); |
| 4421 | 4421 |
| 4422 DeferredNumberTagI* deferred = | 4422 DeferredNumberTagI* deferred = |
| 4423 new(zone()) DeferredNumberTagI(this, instr, x87_stack_); | 4423 new(zone()) DeferredNumberTagI(this, instr, x87_stack_); |
| 4424 __ SmiTag(reg); | 4424 __ SmiTag(reg); |
| 4425 __ j(overflow, deferred->entry()); | 4425 __ j(overflow, deferred->entry()); |
| 4426 __ bind(deferred->exit()); | 4426 __ bind(deferred->exit()); |
| 4427 } | 4427 } |
| 4428 | 4428 |
| 4429 | 4429 |
| 4430 void LCodeGen::DoNumberTagU(LNumberTagU* instr) { | 4430 void LCodeGen::DoNumberTagU(LNumberTagU* instr) { |
| 4431 class DeferredNumberTagU V8_FINAL : public LDeferredCode { | 4431 class DeferredNumberTagU V8_FINAL : public LDeferredCode { |
| 4432 public: | 4432 public: |
| 4433 DeferredNumberTagU(LCodeGen* codegen, | 4433 DeferredNumberTagU(LCodeGen* codegen, |
| 4434 LNumberTagU* instr, | 4434 LNumberTagU* instr, |
| 4435 const X87Stack& x87_stack) | 4435 const X87Stack& x87_stack) |
| 4436 : LDeferredCode(codegen, x87_stack), instr_(instr) { } | 4436 : LDeferredCode(codegen, x87_stack), instr_(instr) { } |
| 4437 virtual void Generate() V8_OVERRIDE { | 4437 virtual void Generate() V8_OVERRIDE { |
| 4438 codegen()->DoDeferredNumberTagIU(instr_, instr_->value(), instr_->temp(), | 4438 codegen()->DoDeferredNumberTagIU(instr_, instr_->value(), instr_->temp(), |
| 4439 UNSIGNED_INT32); | 4439 UNSIGNED_INT32); |
| 4440 } | 4440 } |
| 4441 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } | 4441 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } |
| 4442 private: | 4442 private: |
| 4443 LNumberTagU* instr_; | 4443 LNumberTagU* instr_; |
| 4444 }; | 4444 }; |
| 4445 | 4445 |
| 4446 LOperand* input = instr->value(); | 4446 LOperand* input = instr->value(); |
| 4447 ASSERT(input->IsRegister() && input->Equals(instr->result())); | 4447 DCHECK(input->IsRegister() && input->Equals(instr->result())); |
| 4448 Register reg = ToRegister(input); | 4448 Register reg = ToRegister(input); |
| 4449 | 4449 |
| 4450 DeferredNumberTagU* deferred = | 4450 DeferredNumberTagU* deferred = |
| 4451 new(zone()) DeferredNumberTagU(this, instr, x87_stack_); | 4451 new(zone()) DeferredNumberTagU(this, instr, x87_stack_); |
| 4452 __ cmp(reg, Immediate(Smi::kMaxValue)); | 4452 __ cmp(reg, Immediate(Smi::kMaxValue)); |
| 4453 __ j(above, deferred->entry()); | 4453 __ j(above, deferred->entry()); |
| 4454 __ SmiTag(reg); | 4454 __ SmiTag(reg); |
| 4455 __ bind(deferred->exit()); | 4455 __ bind(deferred->exit()); |
| 4456 } | 4456 } |
| 4457 | 4457 |
| (...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4583 if (hchange->CheckFlag(HValue::kCanOverflow) && | 4583 if (hchange->CheckFlag(HValue::kCanOverflow) && |
| 4584 !hchange->value()->CheckFlag(HValue::kUint32)) { | 4584 !hchange->value()->CheckFlag(HValue::kUint32)) { |
| 4585 DeoptimizeIf(overflow, instr->environment()); | 4585 DeoptimizeIf(overflow, instr->environment()); |
| 4586 } | 4586 } |
| 4587 } | 4587 } |
| 4588 | 4588 |
| 4589 | 4589 |
| 4590 void LCodeGen::DoSmiUntag(LSmiUntag* instr) { | 4590 void LCodeGen::DoSmiUntag(LSmiUntag* instr) { |
| 4591 LOperand* input = instr->value(); | 4591 LOperand* input = instr->value(); |
| 4592 Register result = ToRegister(input); | 4592 Register result = ToRegister(input); |
| 4593 ASSERT(input->IsRegister() && input->Equals(instr->result())); | 4593 DCHECK(input->IsRegister() && input->Equals(instr->result())); |
| 4594 if (instr->needs_check()) { | 4594 if (instr->needs_check()) { |
| 4595 __ test(result, Immediate(kSmiTagMask)); | 4595 __ test(result, Immediate(kSmiTagMask)); |
| 4596 DeoptimizeIf(not_zero, instr->environment()); | 4596 DeoptimizeIf(not_zero, instr->environment()); |
| 4597 } else { | 4597 } else { |
| 4598 __ AssertSmi(result); | 4598 __ AssertSmi(result); |
| 4599 } | 4599 } |
| 4600 __ SmiUntag(result); | 4600 __ SmiUntag(result); |
| 4601 } | 4601 } |
| 4602 | 4602 |
| 4603 | 4603 |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4648 __ mov(temp_reg, FieldOperand(input_reg, HeapNumber::kExponentOffset)); | 4648 __ mov(temp_reg, FieldOperand(input_reg, HeapNumber::kExponentOffset)); |
| 4649 __ test(temp_reg, Immediate(HeapNumber::kSignMask)); | 4649 __ test(temp_reg, Immediate(HeapNumber::kSignMask)); |
| 4650 __ j(zero, &done, Label::kNear); | 4650 __ j(zero, &done, Label::kNear); |
| 4651 | 4651 |
| 4652 // Pop FPU stack before deoptimizing. | 4652 // Pop FPU stack before deoptimizing. |
| 4653 __ fstp(0); | 4653 __ fstp(0); |
| 4654 DeoptimizeIf(not_zero, env); | 4654 DeoptimizeIf(not_zero, env); |
| 4655 } | 4655 } |
| 4656 __ jmp(&done, Label::kNear); | 4656 __ jmp(&done, Label::kNear); |
| 4657 } else { | 4657 } else { |
| 4658 ASSERT(mode == NUMBER_CANDIDATE_IS_SMI); | 4658 DCHECK(mode == NUMBER_CANDIDATE_IS_SMI); |
| 4659 } | 4659 } |
| 4660 | 4660 |
| 4661 __ bind(&load_smi); | 4661 __ bind(&load_smi); |
| 4662 // Clobbering a temp is faster than re-tagging the | 4662 // Clobbering a temp is faster than re-tagging the |
| 4663 // input register since we avoid dependencies. | 4663 // input register since we avoid dependencies. |
| 4664 __ mov(temp_reg, input_reg); | 4664 __ mov(temp_reg, input_reg); |
| 4665 __ SmiUntag(temp_reg); // Untag smi before converting to float. | 4665 __ SmiUntag(temp_reg); // Untag smi before converting to float. |
| 4666 __ push(temp_reg); | 4666 __ push(temp_reg); |
| 4667 __ fild_s(Operand(esp, 0)); | 4667 __ fild_s(Operand(esp, 0)); |
| 4668 __ add(esp, Immediate(kPointerSize)); | 4668 __ add(esp, Immediate(kPointerSize)); |
| (...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4727 : LDeferredCode(codegen, x87_stack), instr_(instr) { } | 4727 : LDeferredCode(codegen, x87_stack), instr_(instr) { } |
| 4728 virtual void Generate() V8_OVERRIDE { | 4728 virtual void Generate() V8_OVERRIDE { |
| 4729 codegen()->DoDeferredTaggedToI(instr_, done()); | 4729 codegen()->DoDeferredTaggedToI(instr_, done()); |
| 4730 } | 4730 } |
| 4731 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } | 4731 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } |
| 4732 private: | 4732 private: |
| 4733 LTaggedToI* instr_; | 4733 LTaggedToI* instr_; |
| 4734 }; | 4734 }; |
| 4735 | 4735 |
| 4736 LOperand* input = instr->value(); | 4736 LOperand* input = instr->value(); |
| 4737 ASSERT(input->IsRegister()); | 4737 DCHECK(input->IsRegister()); |
| 4738 Register input_reg = ToRegister(input); | 4738 Register input_reg = ToRegister(input); |
| 4739 ASSERT(input_reg.is(ToRegister(instr->result()))); | 4739 DCHECK(input_reg.is(ToRegister(instr->result()))); |
| 4740 | 4740 |
| 4741 if (instr->hydrogen()->value()->representation().IsSmi()) { | 4741 if (instr->hydrogen()->value()->representation().IsSmi()) { |
| 4742 __ SmiUntag(input_reg); | 4742 __ SmiUntag(input_reg); |
| 4743 } else { | 4743 } else { |
| 4744 DeferredTaggedToI* deferred = | 4744 DeferredTaggedToI* deferred = |
| 4745 new(zone()) DeferredTaggedToI(this, instr, x87_stack_); | 4745 new(zone()) DeferredTaggedToI(this, instr, x87_stack_); |
| 4746 // Optimistically untag the input. | 4746 // Optimistically untag the input. |
| 4747 // If the input is a HeapObject, SmiUntag will set the carry flag. | 4747 // If the input is a HeapObject, SmiUntag will set the carry flag. |
| 4748 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0); | 4748 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0); |
| 4749 __ SmiUntag(input_reg); | 4749 __ SmiUntag(input_reg); |
| 4750 // Branch to deferred code if the input was tagged. | 4750 // Branch to deferred code if the input was tagged. |
| 4751 // The deferred code will take care of restoring the tag. | 4751 // The deferred code will take care of restoring the tag. |
| 4752 __ j(carry, deferred->entry()); | 4752 __ j(carry, deferred->entry()); |
| 4753 __ bind(deferred->exit()); | 4753 __ bind(deferred->exit()); |
| 4754 } | 4754 } |
| 4755 } | 4755 } |
| 4756 | 4756 |
| 4757 | 4757 |
| 4758 void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) { | 4758 void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) { |
| 4759 LOperand* input = instr->value(); | 4759 LOperand* input = instr->value(); |
| 4760 ASSERT(input->IsRegister()); | 4760 DCHECK(input->IsRegister()); |
| 4761 LOperand* temp = instr->temp(); | 4761 LOperand* temp = instr->temp(); |
| 4762 ASSERT(temp->IsRegister()); | 4762 DCHECK(temp->IsRegister()); |
| 4763 LOperand* result = instr->result(); | 4763 LOperand* result = instr->result(); |
| 4764 ASSERT(result->IsDoubleRegister()); | 4764 DCHECK(result->IsDoubleRegister()); |
| 4765 | 4765 |
| 4766 Register input_reg = ToRegister(input); | 4766 Register input_reg = ToRegister(input); |
| 4767 bool deoptimize_on_minus_zero = | 4767 bool deoptimize_on_minus_zero = |
| 4768 instr->hydrogen()->deoptimize_on_minus_zero(); | 4768 instr->hydrogen()->deoptimize_on_minus_zero(); |
| 4769 Register temp_reg = ToRegister(temp); | 4769 Register temp_reg = ToRegister(temp); |
| 4770 | 4770 |
| 4771 HValue* value = instr->hydrogen()->value(); | 4771 HValue* value = instr->hydrogen()->value(); |
| 4772 NumberUntagDMode mode = value->representation().IsSmi() | 4772 NumberUntagDMode mode = value->representation().IsSmi() |
| 4773 ? NUMBER_CANDIDATE_IS_SMI : NUMBER_CANDIDATE_IS_ANY_TAGGED; | 4773 ? NUMBER_CANDIDATE_IS_SMI : NUMBER_CANDIDATE_IS_ANY_TAGGED; |
| 4774 | 4774 |
| 4775 EmitNumberUntagDNoSSE2(input_reg, | 4775 EmitNumberUntagDNoSSE2(input_reg, |
| 4776 temp_reg, | 4776 temp_reg, |
| 4777 ToX87Register(result), | 4777 ToX87Register(result), |
| 4778 instr->hydrogen()->can_convert_undefined_to_nan(), | 4778 instr->hydrogen()->can_convert_undefined_to_nan(), |
| 4779 deoptimize_on_minus_zero, | 4779 deoptimize_on_minus_zero, |
| 4780 instr->environment(), | 4780 instr->environment(), |
| 4781 mode); | 4781 mode); |
| 4782 } | 4782 } |
| 4783 | 4783 |
| 4784 | 4784 |
| 4785 void LCodeGen::DoDoubleToI(LDoubleToI* instr) { | 4785 void LCodeGen::DoDoubleToI(LDoubleToI* instr) { |
| 4786 LOperand* input = instr->value(); | 4786 LOperand* input = instr->value(); |
| 4787 ASSERT(input->IsDoubleRegister()); | 4787 DCHECK(input->IsDoubleRegister()); |
| 4788 LOperand* result = instr->result(); | 4788 LOperand* result = instr->result(); |
| 4789 ASSERT(result->IsRegister()); | 4789 DCHECK(result->IsRegister()); |
| 4790 Register result_reg = ToRegister(result); | 4790 Register result_reg = ToRegister(result); |
| 4791 | 4791 |
| 4792 if (instr->truncating()) { | 4792 if (instr->truncating()) { |
| 4793 X87Register input_reg = ToX87Register(input); | 4793 X87Register input_reg = ToX87Register(input); |
| 4794 X87Fxch(input_reg); | 4794 X87Fxch(input_reg); |
| 4795 __ TruncateX87TOSToI(result_reg); | 4795 __ TruncateX87TOSToI(result_reg); |
| 4796 } else { | 4796 } else { |
| 4797 Label bailout, done; | 4797 Label bailout, done; |
| 4798 X87Register input_reg = ToX87Register(input); | 4798 X87Register input_reg = ToX87Register(input); |
| 4799 X87Fxch(input_reg); | 4799 X87Fxch(input_reg); |
| 4800 __ X87TOSToI(result_reg, instr->hydrogen()->GetMinusZeroMode(), | 4800 __ X87TOSToI(result_reg, instr->hydrogen()->GetMinusZeroMode(), |
| 4801 &bailout, Label::kNear); | 4801 &bailout, Label::kNear); |
| 4802 __ jmp(&done, Label::kNear); | 4802 __ jmp(&done, Label::kNear); |
| 4803 __ bind(&bailout); | 4803 __ bind(&bailout); |
| 4804 DeoptimizeIf(no_condition, instr->environment()); | 4804 DeoptimizeIf(no_condition, instr->environment()); |
| 4805 __ bind(&done); | 4805 __ bind(&done); |
| 4806 } | 4806 } |
| 4807 } | 4807 } |
| 4808 | 4808 |
| 4809 | 4809 |
| 4810 void LCodeGen::DoDoubleToSmi(LDoubleToSmi* instr) { | 4810 void LCodeGen::DoDoubleToSmi(LDoubleToSmi* instr) { |
| 4811 LOperand* input = instr->value(); | 4811 LOperand* input = instr->value(); |
| 4812 ASSERT(input->IsDoubleRegister()); | 4812 DCHECK(input->IsDoubleRegister()); |
| 4813 LOperand* result = instr->result(); | 4813 LOperand* result = instr->result(); |
| 4814 ASSERT(result->IsRegister()); | 4814 DCHECK(result->IsRegister()); |
| 4815 Register result_reg = ToRegister(result); | 4815 Register result_reg = ToRegister(result); |
| 4816 | 4816 |
| 4817 Label bailout, done; | 4817 Label bailout, done; |
| 4818 X87Register input_reg = ToX87Register(input); | 4818 X87Register input_reg = ToX87Register(input); |
| 4819 X87Fxch(input_reg); | 4819 X87Fxch(input_reg); |
| 4820 __ X87TOSToI(result_reg, instr->hydrogen()->GetMinusZeroMode(), | 4820 __ X87TOSToI(result_reg, instr->hydrogen()->GetMinusZeroMode(), |
| 4821 &bailout, Label::kNear); | 4821 &bailout, Label::kNear); |
| 4822 __ jmp(&done, Label::kNear); | 4822 __ jmp(&done, Label::kNear); |
| 4823 __ bind(&bailout); | 4823 __ bind(&bailout); |
| 4824 DeoptimizeIf(no_condition, instr->environment()); | 4824 DeoptimizeIf(no_condition, instr->environment()); |
| (...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4870 static_cast<int8_t>(last)); | 4870 static_cast<int8_t>(last)); |
| 4871 DeoptimizeIf(above, instr->environment()); | 4871 DeoptimizeIf(above, instr->environment()); |
| 4872 } | 4872 } |
| 4873 } | 4873 } |
| 4874 } else { | 4874 } else { |
| 4875 uint8_t mask; | 4875 uint8_t mask; |
| 4876 uint8_t tag; | 4876 uint8_t tag; |
| 4877 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag); | 4877 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag); |
| 4878 | 4878 |
| 4879 if (IsPowerOf2(mask)) { | 4879 if (IsPowerOf2(mask)) { |
| 4880 ASSERT(tag == 0 || IsPowerOf2(tag)); | 4880 DCHECK(tag == 0 || IsPowerOf2(tag)); |
| 4881 __ test_b(FieldOperand(temp, Map::kInstanceTypeOffset), mask); | 4881 __ test_b(FieldOperand(temp, Map::kInstanceTypeOffset), mask); |
| 4882 DeoptimizeIf(tag == 0 ? not_zero : zero, instr->environment()); | 4882 DeoptimizeIf(tag == 0 ? not_zero : zero, instr->environment()); |
| 4883 } else { | 4883 } else { |
| 4884 __ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset)); | 4884 __ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset)); |
| 4885 __ and_(temp, mask); | 4885 __ and_(temp, mask); |
| 4886 __ cmp(temp, tag); | 4886 __ cmp(temp, tag); |
| 4887 DeoptimizeIf(not_equal, instr->environment()); | 4887 DeoptimizeIf(not_equal, instr->environment()); |
| 4888 } | 4888 } |
| 4889 } | 4889 } |
| 4890 } | 4890 } |
| (...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4942 | 4942 |
| 4943 if (instr->hydrogen()->IsStabilityCheck()) { | 4943 if (instr->hydrogen()->IsStabilityCheck()) { |
| 4944 const UniqueSet<Map>* maps = instr->hydrogen()->maps(); | 4944 const UniqueSet<Map>* maps = instr->hydrogen()->maps(); |
| 4945 for (int i = 0; i < maps->size(); ++i) { | 4945 for (int i = 0; i < maps->size(); ++i) { |
| 4946 AddStabilityDependency(maps->at(i).handle()); | 4946 AddStabilityDependency(maps->at(i).handle()); |
| 4947 } | 4947 } |
| 4948 return; | 4948 return; |
| 4949 } | 4949 } |
| 4950 | 4950 |
| 4951 LOperand* input = instr->value(); | 4951 LOperand* input = instr->value(); |
| 4952 ASSERT(input->IsRegister()); | 4952 DCHECK(input->IsRegister()); |
| 4953 Register reg = ToRegister(input); | 4953 Register reg = ToRegister(input); |
| 4954 | 4954 |
| 4955 DeferredCheckMaps* deferred = NULL; | 4955 DeferredCheckMaps* deferred = NULL; |
| 4956 if (instr->hydrogen()->HasMigrationTarget()) { | 4956 if (instr->hydrogen()->HasMigrationTarget()) { |
| 4957 deferred = new(zone()) DeferredCheckMaps(this, instr, reg, x87_stack_); | 4957 deferred = new(zone()) DeferredCheckMaps(this, instr, reg, x87_stack_); |
| 4958 __ bind(deferred->check_maps()); | 4958 __ bind(deferred->check_maps()); |
| 4959 } | 4959 } |
| 4960 | 4960 |
| 4961 const UniqueSet<Map>* maps = instr->hydrogen()->maps(); | 4961 const UniqueSet<Map>* maps = instr->hydrogen()->maps(); |
| 4962 Label success; | 4962 Label success; |
| (...skipping 14 matching lines...) Expand all Loading... |
| 4977 __ bind(&success); | 4977 __ bind(&success); |
| 4978 } | 4978 } |
| 4979 | 4979 |
| 4980 | 4980 |
| 4981 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) { | 4981 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) { |
| 4982 UNREACHABLE(); | 4982 UNREACHABLE(); |
| 4983 } | 4983 } |
| 4984 | 4984 |
| 4985 | 4985 |
| 4986 void LCodeGen::DoClampIToUint8(LClampIToUint8* instr) { | 4986 void LCodeGen::DoClampIToUint8(LClampIToUint8* instr) { |
| 4987 ASSERT(instr->unclamped()->Equals(instr->result())); | 4987 DCHECK(instr->unclamped()->Equals(instr->result())); |
| 4988 Register value_reg = ToRegister(instr->result()); | 4988 Register value_reg = ToRegister(instr->result()); |
| 4989 __ ClampUint8(value_reg); | 4989 __ ClampUint8(value_reg); |
| 4990 } | 4990 } |
| 4991 | 4991 |
| 4992 | 4992 |
| 4993 void LCodeGen::DoClampTToUint8NoSSE2(LClampTToUint8NoSSE2* instr) { | 4993 void LCodeGen::DoClampTToUint8NoSSE2(LClampTToUint8NoSSE2* instr) { |
| 4994 Register input_reg = ToRegister(instr->unclamped()); | 4994 Register input_reg = ToRegister(instr->unclamped()); |
| 4995 Register result_reg = ToRegister(instr->result()); | 4995 Register result_reg = ToRegister(instr->result()); |
| 4996 Register scratch = ToRegister(instr->scratch()); | 4996 Register scratch = ToRegister(instr->scratch()); |
| 4997 Register scratch2 = ToRegister(instr->scratch2()); | 4997 Register scratch2 = ToRegister(instr->scratch2()); |
| (...skipping 144 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5142 | 5142 |
| 5143 Register result = ToRegister(instr->result()); | 5143 Register result = ToRegister(instr->result()); |
| 5144 Register temp = ToRegister(instr->temp()); | 5144 Register temp = ToRegister(instr->temp()); |
| 5145 | 5145 |
| 5146 // Allocate memory for the object. | 5146 // Allocate memory for the object. |
| 5147 AllocationFlags flags = TAG_OBJECT; | 5147 AllocationFlags flags = TAG_OBJECT; |
| 5148 if (instr->hydrogen()->MustAllocateDoubleAligned()) { | 5148 if (instr->hydrogen()->MustAllocateDoubleAligned()) { |
| 5149 flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT); | 5149 flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT); |
| 5150 } | 5150 } |
| 5151 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) { | 5151 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) { |
| 5152 ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation()); | 5152 DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation()); |
| 5153 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); | 5153 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation()); |
| 5154 flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE); | 5154 flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE); |
| 5155 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) { | 5155 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) { |
| 5156 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); | 5156 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation()); |
| 5157 flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE); | 5157 flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE); |
| 5158 } | 5158 } |
| 5159 | 5159 |
| 5160 if (instr->size()->IsConstantOperand()) { | 5160 if (instr->size()->IsConstantOperand()) { |
| 5161 int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); | 5161 int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); |
| 5162 if (size <= Page::kMaxRegularHeapObjectSize) { | 5162 if (size <= Page::kMaxRegularHeapObjectSize) { |
| 5163 __ Allocate(size, result, temp, no_reg, deferred->entry(), flags); | 5163 __ Allocate(size, result, temp, no_reg, deferred->entry(), flags); |
| 5164 } else { | 5164 } else { |
| 5165 __ jmp(deferred->entry()); | 5165 __ jmp(deferred->entry()); |
| 5166 } | 5166 } |
| (...skipping 27 matching lines...) Expand all Loading... |
| 5194 Register result = ToRegister(instr->result()); | 5194 Register result = ToRegister(instr->result()); |
| 5195 | 5195 |
| 5196 // TODO(3095996): Get rid of this. For now, we need to make the | 5196 // TODO(3095996): Get rid of this. For now, we need to make the |
| 5197 // result register contain a valid pointer because it is already | 5197 // result register contain a valid pointer because it is already |
| 5198 // contained in the register pointer map. | 5198 // contained in the register pointer map. |
| 5199 __ Move(result, Immediate(Smi::FromInt(0))); | 5199 __ Move(result, Immediate(Smi::FromInt(0))); |
| 5200 | 5200 |
| 5201 PushSafepointRegistersScope scope(this); | 5201 PushSafepointRegistersScope scope(this); |
| 5202 if (instr->size()->IsRegister()) { | 5202 if (instr->size()->IsRegister()) { |
| 5203 Register size = ToRegister(instr->size()); | 5203 Register size = ToRegister(instr->size()); |
| 5204 ASSERT(!size.is(result)); | 5204 DCHECK(!size.is(result)); |
| 5205 __ SmiTag(ToRegister(instr->size())); | 5205 __ SmiTag(ToRegister(instr->size())); |
| 5206 __ push(size); | 5206 __ push(size); |
| 5207 } else { | 5207 } else { |
| 5208 int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); | 5208 int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); |
| 5209 if (size >= 0 && size <= Smi::kMaxValue) { | 5209 if (size >= 0 && size <= Smi::kMaxValue) { |
| 5210 __ push(Immediate(Smi::FromInt(size))); | 5210 __ push(Immediate(Smi::FromInt(size))); |
| 5211 } else { | 5211 } else { |
| 5212 // We should never get here at runtime => abort | 5212 // We should never get here at runtime => abort |
| 5213 __ int3(); | 5213 __ int3(); |
| 5214 return; | 5214 return; |
| 5215 } | 5215 } |
| 5216 } | 5216 } |
| 5217 | 5217 |
| 5218 int flags = AllocateDoubleAlignFlag::encode( | 5218 int flags = AllocateDoubleAlignFlag::encode( |
| 5219 instr->hydrogen()->MustAllocateDoubleAligned()); | 5219 instr->hydrogen()->MustAllocateDoubleAligned()); |
| 5220 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) { | 5220 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) { |
| 5221 ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation()); | 5221 DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation()); |
| 5222 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); | 5222 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation()); |
| 5223 flags = AllocateTargetSpace::update(flags, OLD_POINTER_SPACE); | 5223 flags = AllocateTargetSpace::update(flags, OLD_POINTER_SPACE); |
| 5224 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) { | 5224 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) { |
| 5225 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); | 5225 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation()); |
| 5226 flags = AllocateTargetSpace::update(flags, OLD_DATA_SPACE); | 5226 flags = AllocateTargetSpace::update(flags, OLD_DATA_SPACE); |
| 5227 } else { | 5227 } else { |
| 5228 flags = AllocateTargetSpace::update(flags, NEW_SPACE); | 5228 flags = AllocateTargetSpace::update(flags, NEW_SPACE); |
| 5229 } | 5229 } |
| 5230 __ push(Immediate(Smi::FromInt(flags))); | 5230 __ push(Immediate(Smi::FromInt(flags))); |
| 5231 | 5231 |
| 5232 CallRuntimeFromDeferred( | 5232 CallRuntimeFromDeferred( |
| 5233 Runtime::kAllocateInTargetSpace, 2, instr, instr->context()); | 5233 Runtime::kAllocateInTargetSpace, 2, instr, instr->context()); |
| 5234 __ StoreToSafepointRegisterSlot(result, eax); | 5234 __ StoreToSafepointRegisterSlot(result, eax); |
| 5235 } | 5235 } |
| 5236 | 5236 |
| 5237 | 5237 |
| 5238 void LCodeGen::DoToFastProperties(LToFastProperties* instr) { | 5238 void LCodeGen::DoToFastProperties(LToFastProperties* instr) { |
| 5239 ASSERT(ToRegister(instr->value()).is(eax)); | 5239 DCHECK(ToRegister(instr->value()).is(eax)); |
| 5240 __ push(eax); | 5240 __ push(eax); |
| 5241 CallRuntime(Runtime::kToFastProperties, 1, instr); | 5241 CallRuntime(Runtime::kToFastProperties, 1, instr); |
| 5242 } | 5242 } |
| 5243 | 5243 |
| 5244 | 5244 |
| 5245 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { | 5245 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { |
| 5246 ASSERT(ToRegister(instr->context()).is(esi)); | 5246 DCHECK(ToRegister(instr->context()).is(esi)); |
| 5247 Label materialized; | 5247 Label materialized; |
| 5248 // Registers will be used as follows: | 5248 // Registers will be used as follows: |
| 5249 // ecx = literals array. | 5249 // ecx = literals array. |
| 5250 // ebx = regexp literal. | 5250 // ebx = regexp literal. |
| 5251 // eax = regexp literal clone. | 5251 // eax = regexp literal clone. |
| 5252 // esi = context. | 5252 // esi = context. |
| 5253 int literal_offset = | 5253 int literal_offset = |
| 5254 FixedArray::OffsetOfElementAt(instr->hydrogen()->literal_index()); | 5254 FixedArray::OffsetOfElementAt(instr->hydrogen()->literal_index()); |
| 5255 __ LoadHeapObject(ecx, instr->hydrogen()->literals()); | 5255 __ LoadHeapObject(ecx, instr->hydrogen()->literals()); |
| 5256 __ mov(ebx, FieldOperand(ecx, literal_offset)); | 5256 __ mov(ebx, FieldOperand(ecx, literal_offset)); |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5288 __ mov(FieldOperand(eax, i + kPointerSize), ecx); | 5288 __ mov(FieldOperand(eax, i + kPointerSize), ecx); |
| 5289 } | 5289 } |
| 5290 if ((size % (2 * kPointerSize)) != 0) { | 5290 if ((size % (2 * kPointerSize)) != 0) { |
| 5291 __ mov(edx, FieldOperand(ebx, size - kPointerSize)); | 5291 __ mov(edx, FieldOperand(ebx, size - kPointerSize)); |
| 5292 __ mov(FieldOperand(eax, size - kPointerSize), edx); | 5292 __ mov(FieldOperand(eax, size - kPointerSize), edx); |
| 5293 } | 5293 } |
| 5294 } | 5294 } |
| 5295 | 5295 |
| 5296 | 5296 |
| 5297 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { | 5297 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { |
| 5298 ASSERT(ToRegister(instr->context()).is(esi)); | 5298 DCHECK(ToRegister(instr->context()).is(esi)); |
| 5299 // Use the fast case closure allocation code that allocates in new | 5299 // Use the fast case closure allocation code that allocates in new |
| 5300 // space for nested functions that don't need literals cloning. | 5300 // space for nested functions that don't need literals cloning. |
| 5301 bool pretenure = instr->hydrogen()->pretenure(); | 5301 bool pretenure = instr->hydrogen()->pretenure(); |
| 5302 if (!pretenure && instr->hydrogen()->has_no_literals()) { | 5302 if (!pretenure && instr->hydrogen()->has_no_literals()) { |
| 5303 FastNewClosureStub stub(isolate(), | 5303 FastNewClosureStub stub(isolate(), |
| 5304 instr->hydrogen()->strict_mode(), | 5304 instr->hydrogen()->strict_mode(), |
| 5305 instr->hydrogen()->is_generator()); | 5305 instr->hydrogen()->is_generator()); |
| 5306 __ mov(ebx, Immediate(instr->hydrogen()->shared_info())); | 5306 __ mov(ebx, Immediate(instr->hydrogen()->shared_info())); |
| 5307 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 5307 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 5308 } else { | 5308 } else { |
| 5309 __ push(esi); | 5309 __ push(esi); |
| 5310 __ push(Immediate(instr->hydrogen()->shared_info())); | 5310 __ push(Immediate(instr->hydrogen()->shared_info())); |
| 5311 __ push(Immediate(pretenure ? factory()->true_value() | 5311 __ push(Immediate(pretenure ? factory()->true_value() |
| 5312 : factory()->false_value())); | 5312 : factory()->false_value())); |
| 5313 CallRuntime(Runtime::kNewClosure, 3, instr); | 5313 CallRuntime(Runtime::kNewClosure, 3, instr); |
| 5314 } | 5314 } |
| 5315 } | 5315 } |
| 5316 | 5316 |
| 5317 | 5317 |
| 5318 void LCodeGen::DoTypeof(LTypeof* instr) { | 5318 void LCodeGen::DoTypeof(LTypeof* instr) { |
| 5319 ASSERT(ToRegister(instr->context()).is(esi)); | 5319 DCHECK(ToRegister(instr->context()).is(esi)); |
| 5320 LOperand* input = instr->value(); | 5320 LOperand* input = instr->value(); |
| 5321 EmitPushTaggedOperand(input); | 5321 EmitPushTaggedOperand(input); |
| 5322 CallRuntime(Runtime::kTypeof, 1, instr); | 5322 CallRuntime(Runtime::kTypeof, 1, instr); |
| 5323 } | 5323 } |
| 5324 | 5324 |
| 5325 | 5325 |
| 5326 void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) { | 5326 void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) { |
| 5327 Register input = ToRegister(instr->value()); | 5327 Register input = ToRegister(instr->value()); |
| 5328 Condition final_branch_condition = EmitTypeofIs(instr, input); | 5328 Condition final_branch_condition = EmitTypeofIs(instr, input); |
| 5329 if (final_branch_condition != no_condition) { | 5329 if (final_branch_condition != no_condition) { |
| (...skipping 113 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5443 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; | 5443 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; |
| 5444 __ Nop(padding_size); | 5444 __ Nop(padding_size); |
| 5445 } | 5445 } |
| 5446 } | 5446 } |
| 5447 last_lazy_deopt_pc_ = masm()->pc_offset(); | 5447 last_lazy_deopt_pc_ = masm()->pc_offset(); |
| 5448 } | 5448 } |
| 5449 | 5449 |
| 5450 | 5450 |
| 5451 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { | 5451 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { |
| 5452 last_lazy_deopt_pc_ = masm()->pc_offset(); | 5452 last_lazy_deopt_pc_ = masm()->pc_offset(); |
| 5453 ASSERT(instr->HasEnvironment()); | 5453 DCHECK(instr->HasEnvironment()); |
| 5454 LEnvironment* env = instr->environment(); | 5454 LEnvironment* env = instr->environment(); |
| 5455 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 5455 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
| 5456 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 5456 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
| 5457 } | 5457 } |
| 5458 | 5458 |
| 5459 | 5459 |
| 5460 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { | 5460 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { |
| 5461 Deoptimizer::BailoutType type = instr->hydrogen()->type(); | 5461 Deoptimizer::BailoutType type = instr->hydrogen()->type(); |
| 5462 // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the | 5462 // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the |
| 5463 // needed return address), even though the implementation of LAZY and EAGER is | 5463 // needed return address), even though the implementation of LAZY and EAGER is |
| (...skipping 16 matching lines...) Expand all Loading... |
| 5480 // Nothing to see here, move on! | 5480 // Nothing to see here, move on! |
| 5481 } | 5481 } |
| 5482 | 5482 |
| 5483 | 5483 |
| 5484 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { | 5484 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { |
| 5485 PushSafepointRegistersScope scope(this); | 5485 PushSafepointRegistersScope scope(this); |
| 5486 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | 5486 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
| 5487 __ CallRuntime(Runtime::kStackGuard); | 5487 __ CallRuntime(Runtime::kStackGuard); |
| 5488 RecordSafepointWithLazyDeopt( | 5488 RecordSafepointWithLazyDeopt( |
| 5489 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); | 5489 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); |
| 5490 ASSERT(instr->HasEnvironment()); | 5490 DCHECK(instr->HasEnvironment()); |
| 5491 LEnvironment* env = instr->environment(); | 5491 LEnvironment* env = instr->environment(); |
| 5492 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 5492 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
| 5493 } | 5493 } |
| 5494 | 5494 |
| 5495 | 5495 |
| 5496 void LCodeGen::DoStackCheck(LStackCheck* instr) { | 5496 void LCodeGen::DoStackCheck(LStackCheck* instr) { |
| 5497 class DeferredStackCheck V8_FINAL : public LDeferredCode { | 5497 class DeferredStackCheck V8_FINAL : public LDeferredCode { |
| 5498 public: | 5498 public: |
| 5499 DeferredStackCheck(LCodeGen* codegen, | 5499 DeferredStackCheck(LCodeGen* codegen, |
| 5500 LStackCheck* instr, | 5500 LStackCheck* instr, |
| 5501 const X87Stack& x87_stack) | 5501 const X87Stack& x87_stack) |
| 5502 : LDeferredCode(codegen, x87_stack), instr_(instr) { } | 5502 : LDeferredCode(codegen, x87_stack), instr_(instr) { } |
| 5503 virtual void Generate() V8_OVERRIDE { | 5503 virtual void Generate() V8_OVERRIDE { |
| 5504 codegen()->DoDeferredStackCheck(instr_); | 5504 codegen()->DoDeferredStackCheck(instr_); |
| 5505 } | 5505 } |
| 5506 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } | 5506 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } |
| 5507 private: | 5507 private: |
| 5508 LStackCheck* instr_; | 5508 LStackCheck* instr_; |
| 5509 }; | 5509 }; |
| 5510 | 5510 |
| 5511 ASSERT(instr->HasEnvironment()); | 5511 DCHECK(instr->HasEnvironment()); |
| 5512 LEnvironment* env = instr->environment(); | 5512 LEnvironment* env = instr->environment(); |
| 5513 // There is no LLazyBailout instruction for stack-checks. We have to | 5513 // There is no LLazyBailout instruction for stack-checks. We have to |
| 5514 // prepare for lazy deoptimization explicitly here. | 5514 // prepare for lazy deoptimization explicitly here. |
| 5515 if (instr->hydrogen()->is_function_entry()) { | 5515 if (instr->hydrogen()->is_function_entry()) { |
| 5516 // Perform stack overflow check. | 5516 // Perform stack overflow check. |
| 5517 Label done; | 5517 Label done; |
| 5518 ExternalReference stack_limit = | 5518 ExternalReference stack_limit = |
| 5519 ExternalReference::address_of_stack_limit(isolate()); | 5519 ExternalReference::address_of_stack_limit(isolate()); |
| 5520 __ cmp(esp, Operand::StaticVariable(stack_limit)); | 5520 __ cmp(esp, Operand::StaticVariable(stack_limit)); |
| 5521 __ j(above_equal, &done, Label::kNear); | 5521 __ j(above_equal, &done, Label::kNear); |
| 5522 | 5522 |
| 5523 ASSERT(instr->context()->IsRegister()); | 5523 DCHECK(instr->context()->IsRegister()); |
| 5524 ASSERT(ToRegister(instr->context()).is(esi)); | 5524 DCHECK(ToRegister(instr->context()).is(esi)); |
| 5525 CallCode(isolate()->builtins()->StackCheck(), | 5525 CallCode(isolate()->builtins()->StackCheck(), |
| 5526 RelocInfo::CODE_TARGET, | 5526 RelocInfo::CODE_TARGET, |
| 5527 instr); | 5527 instr); |
| 5528 __ bind(&done); | 5528 __ bind(&done); |
| 5529 } else { | 5529 } else { |
| 5530 ASSERT(instr->hydrogen()->is_backwards_branch()); | 5530 DCHECK(instr->hydrogen()->is_backwards_branch()); |
| 5531 // Perform stack overflow check if this goto needs it before jumping. | 5531 // Perform stack overflow check if this goto needs it before jumping. |
| 5532 DeferredStackCheck* deferred_stack_check = | 5532 DeferredStackCheck* deferred_stack_check = |
| 5533 new(zone()) DeferredStackCheck(this, instr, x87_stack_); | 5533 new(zone()) DeferredStackCheck(this, instr, x87_stack_); |
| 5534 ExternalReference stack_limit = | 5534 ExternalReference stack_limit = |
| 5535 ExternalReference::address_of_stack_limit(isolate()); | 5535 ExternalReference::address_of_stack_limit(isolate()); |
| 5536 __ cmp(esp, Operand::StaticVariable(stack_limit)); | 5536 __ cmp(esp, Operand::StaticVariable(stack_limit)); |
| 5537 __ j(below, deferred_stack_check->entry()); | 5537 __ j(below, deferred_stack_check->entry()); |
| 5538 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); | 5538 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); |
| 5539 __ bind(instr->done_label()); | 5539 __ bind(instr->done_label()); |
| 5540 deferred_stack_check->SetExit(instr->done_label()); | 5540 deferred_stack_check->SetExit(instr->done_label()); |
| 5541 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 5541 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
| 5542 // Don't record a deoptimization index for the safepoint here. | 5542 // Don't record a deoptimization index for the safepoint here. |
| 5543 // This will be done explicitly when emitting call and the safepoint in | 5543 // This will be done explicitly when emitting call and the safepoint in |
| 5544 // the deferred code. | 5544 // the deferred code. |
| 5545 } | 5545 } |
| 5546 } | 5546 } |
| 5547 | 5547 |
| 5548 | 5548 |
| 5549 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { | 5549 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { |
| 5550 // This is a pseudo-instruction that ensures that the environment here is | 5550 // This is a pseudo-instruction that ensures that the environment here is |
| 5551 // properly registered for deoptimization and records the assembler's PC | 5551 // properly registered for deoptimization and records the assembler's PC |
| 5552 // offset. | 5552 // offset. |
| 5553 LEnvironment* environment = instr->environment(); | 5553 LEnvironment* environment = instr->environment(); |
| 5554 | 5554 |
| 5555 // If the environment were already registered, we would have no way of | 5555 // If the environment were already registered, we would have no way of |
| 5556 // backpatching it with the spill slot operands. | 5556 // backpatching it with the spill slot operands. |
| 5557 ASSERT(!environment->HasBeenRegistered()); | 5557 DCHECK(!environment->HasBeenRegistered()); |
| 5558 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); | 5558 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); |
| 5559 | 5559 |
| 5560 GenerateOsrPrologue(); | 5560 GenerateOsrPrologue(); |
| 5561 } | 5561 } |
| 5562 | 5562 |
| 5563 | 5563 |
| 5564 void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) { | 5564 void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) { |
| 5565 ASSERT(ToRegister(instr->context()).is(esi)); | 5565 DCHECK(ToRegister(instr->context()).is(esi)); |
| 5566 __ cmp(eax, isolate()->factory()->undefined_value()); | 5566 __ cmp(eax, isolate()->factory()->undefined_value()); |
| 5567 DeoptimizeIf(equal, instr->environment()); | 5567 DeoptimizeIf(equal, instr->environment()); |
| 5568 | 5568 |
| 5569 __ cmp(eax, isolate()->factory()->null_value()); | 5569 __ cmp(eax, isolate()->factory()->null_value()); |
| 5570 DeoptimizeIf(equal, instr->environment()); | 5570 DeoptimizeIf(equal, instr->environment()); |
| 5571 | 5571 |
| 5572 __ test(eax, Immediate(kSmiTagMask)); | 5572 __ test(eax, Immediate(kSmiTagMask)); |
| 5573 DeoptimizeIf(zero, instr->environment()); | 5573 DeoptimizeIf(zero, instr->environment()); |
| 5574 | 5574 |
| 5575 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); | 5575 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); |
| (...skipping 132 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5708 CallRuntime(Runtime::kPushBlockContext, 2, instr); | 5708 CallRuntime(Runtime::kPushBlockContext, 2, instr); |
| 5709 RecordSafepoint(Safepoint::kNoLazyDeopt); | 5709 RecordSafepoint(Safepoint::kNoLazyDeopt); |
| 5710 } | 5710 } |
| 5711 | 5711 |
| 5712 | 5712 |
| 5713 #undef __ | 5713 #undef __ |
| 5714 | 5714 |
| 5715 } } // namespace v8::internal | 5715 } } // namespace v8::internal |
| 5716 | 5716 |
| 5717 #endif // V8_TARGET_ARCH_X87 | 5717 #endif // V8_TARGET_ARCH_X87 |
| OLD | NEW |