OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_IA32 | 7 #if V8_TARGET_ARCH_IA32 |
8 | 8 |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 28 matching lines...) Expand all Loading... |
39 LCodeGen* codegen_; | 39 LCodeGen* codegen_; |
40 LPointerMap* pointers_; | 40 LPointerMap* pointers_; |
41 Safepoint::DeoptMode deopt_mode_; | 41 Safepoint::DeoptMode deopt_mode_; |
42 }; | 42 }; |
43 | 43 |
44 | 44 |
45 #define __ masm()-> | 45 #define __ masm()-> |
46 | 46 |
47 bool LCodeGen::GenerateCode() { | 47 bool LCodeGen::GenerateCode() { |
48 LPhase phase("Z_Code generation", chunk()); | 48 LPhase phase("Z_Code generation", chunk()); |
49 ASSERT(is_unused()); | 49 DCHECK(is_unused()); |
50 status_ = GENERATING; | 50 status_ = GENERATING; |
51 | 51 |
52 // Open a frame scope to indicate that there is a frame on the stack. The | 52 // Open a frame scope to indicate that there is a frame on the stack. The |
53 // MANUAL indicates that the scope shouldn't actually generate code to set up | 53 // MANUAL indicates that the scope shouldn't actually generate code to set up |
54 // the frame (that is done in GeneratePrologue). | 54 // the frame (that is done in GeneratePrologue). |
55 FrameScope frame_scope(masm_, StackFrame::MANUAL); | 55 FrameScope frame_scope(masm_, StackFrame::MANUAL); |
56 | 56 |
57 support_aligned_spilled_doubles_ = info()->IsOptimizing(); | 57 support_aligned_spilled_doubles_ = info()->IsOptimizing(); |
58 | 58 |
59 dynamic_frame_alignment_ = info()->IsOptimizing() && | 59 dynamic_frame_alignment_ = info()->IsOptimizing() && |
60 ((chunk()->num_double_slots() > 2 && | 60 ((chunk()->num_double_slots() > 2 && |
61 !chunk()->graph()->is_recursive()) || | 61 !chunk()->graph()->is_recursive()) || |
62 !info()->osr_ast_id().IsNone()); | 62 !info()->osr_ast_id().IsNone()); |
63 | 63 |
64 return GeneratePrologue() && | 64 return GeneratePrologue() && |
65 GenerateBody() && | 65 GenerateBody() && |
66 GenerateDeferredCode() && | 66 GenerateDeferredCode() && |
67 GenerateJumpTable() && | 67 GenerateJumpTable() && |
68 GenerateSafepointTable(); | 68 GenerateSafepointTable(); |
69 } | 69 } |
70 | 70 |
71 | 71 |
72 void LCodeGen::FinishCode(Handle<Code> code) { | 72 void LCodeGen::FinishCode(Handle<Code> code) { |
73 ASSERT(is_done()); | 73 DCHECK(is_done()); |
74 code->set_stack_slots(GetStackSlotCount()); | 74 code->set_stack_slots(GetStackSlotCount()); |
75 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); | 75 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); |
76 if (code->is_optimized_code()) RegisterWeakObjectsInOptimizedCode(code); | 76 if (code->is_optimized_code()) RegisterWeakObjectsInOptimizedCode(code); |
77 PopulateDeoptimizationData(code); | 77 PopulateDeoptimizationData(code); |
78 if (!info()->IsStub()) { | 78 if (!info()->IsStub()) { |
79 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code); | 79 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code); |
80 } | 80 } |
81 } | 81 } |
82 | 82 |
83 | 83 |
84 #ifdef _MSC_VER | 84 #ifdef _MSC_VER |
85 void LCodeGen::MakeSureStackPagesMapped(int offset) { | 85 void LCodeGen::MakeSureStackPagesMapped(int offset) { |
86 const int kPageSize = 4 * KB; | 86 const int kPageSize = 4 * KB; |
87 for (offset -= kPageSize; offset > 0; offset -= kPageSize) { | 87 for (offset -= kPageSize; offset > 0; offset -= kPageSize) { |
88 __ mov(Operand(esp, offset), eax); | 88 __ mov(Operand(esp, offset), eax); |
89 } | 89 } |
90 } | 90 } |
91 #endif | 91 #endif |
92 | 92 |
93 | 93 |
94 void LCodeGen::SaveCallerDoubles() { | 94 void LCodeGen::SaveCallerDoubles() { |
95 ASSERT(info()->saves_caller_doubles()); | 95 DCHECK(info()->saves_caller_doubles()); |
96 ASSERT(NeedsEagerFrame()); | 96 DCHECK(NeedsEagerFrame()); |
97 Comment(";;; Save clobbered callee double registers"); | 97 Comment(";;; Save clobbered callee double registers"); |
98 int count = 0; | 98 int count = 0; |
99 BitVector* doubles = chunk()->allocated_double_registers(); | 99 BitVector* doubles = chunk()->allocated_double_registers(); |
100 BitVector::Iterator save_iterator(doubles); | 100 BitVector::Iterator save_iterator(doubles); |
101 while (!save_iterator.Done()) { | 101 while (!save_iterator.Done()) { |
102 __ movsd(MemOperand(esp, count * kDoubleSize), | 102 __ movsd(MemOperand(esp, count * kDoubleSize), |
103 XMMRegister::FromAllocationIndex(save_iterator.Current())); | 103 XMMRegister::FromAllocationIndex(save_iterator.Current())); |
104 save_iterator.Advance(); | 104 save_iterator.Advance(); |
105 count++; | 105 count++; |
106 } | 106 } |
107 } | 107 } |
108 | 108 |
109 | 109 |
110 void LCodeGen::RestoreCallerDoubles() { | 110 void LCodeGen::RestoreCallerDoubles() { |
111 ASSERT(info()->saves_caller_doubles()); | 111 DCHECK(info()->saves_caller_doubles()); |
112 ASSERT(NeedsEagerFrame()); | 112 DCHECK(NeedsEagerFrame()); |
113 Comment(";;; Restore clobbered callee double registers"); | 113 Comment(";;; Restore clobbered callee double registers"); |
114 BitVector* doubles = chunk()->allocated_double_registers(); | 114 BitVector* doubles = chunk()->allocated_double_registers(); |
115 BitVector::Iterator save_iterator(doubles); | 115 BitVector::Iterator save_iterator(doubles); |
116 int count = 0; | 116 int count = 0; |
117 while (!save_iterator.Done()) { | 117 while (!save_iterator.Done()) { |
118 __ movsd(XMMRegister::FromAllocationIndex(save_iterator.Current()), | 118 __ movsd(XMMRegister::FromAllocationIndex(save_iterator.Current()), |
119 MemOperand(esp, count * kDoubleSize)); | 119 MemOperand(esp, count * kDoubleSize)); |
120 save_iterator.Advance(); | 120 save_iterator.Advance(); |
121 count++; | 121 count++; |
122 } | 122 } |
123 } | 123 } |
124 | 124 |
125 | 125 |
126 bool LCodeGen::GeneratePrologue() { | 126 bool LCodeGen::GeneratePrologue() { |
127 ASSERT(is_generating()); | 127 DCHECK(is_generating()); |
128 | 128 |
129 if (info()->IsOptimizing()) { | 129 if (info()->IsOptimizing()) { |
130 ProfileEntryHookStub::MaybeCallEntryHook(masm_); | 130 ProfileEntryHookStub::MaybeCallEntryHook(masm_); |
131 | 131 |
132 #ifdef DEBUG | 132 #ifdef DEBUG |
133 if (strlen(FLAG_stop_at) > 0 && | 133 if (strlen(FLAG_stop_at) > 0 && |
134 info_->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) { | 134 info_->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) { |
135 __ int3(); | 135 __ int3(); |
136 } | 136 } |
137 #endif | 137 #endif |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
179 __ add(Operand(ebx), Immediate(kPointerSize)); | 179 __ add(Operand(ebx), Immediate(kPointerSize)); |
180 __ dec(ecx); | 180 __ dec(ecx); |
181 __ j(not_zero, &align_loop, Label::kNear); | 181 __ j(not_zero, &align_loop, Label::kNear); |
182 __ mov(Operand(ebx, 0), Immediate(kAlignmentZapValue)); | 182 __ mov(Operand(ebx, 0), Immediate(kAlignmentZapValue)); |
183 __ bind(&do_not_pad); | 183 __ bind(&do_not_pad); |
184 } | 184 } |
185 } | 185 } |
186 | 186 |
187 info()->set_prologue_offset(masm_->pc_offset()); | 187 info()->set_prologue_offset(masm_->pc_offset()); |
188 if (NeedsEagerFrame()) { | 188 if (NeedsEagerFrame()) { |
189 ASSERT(!frame_is_built_); | 189 DCHECK(!frame_is_built_); |
190 frame_is_built_ = true; | 190 frame_is_built_ = true; |
191 if (info()->IsStub()) { | 191 if (info()->IsStub()) { |
192 __ StubPrologue(); | 192 __ StubPrologue(); |
193 } else { | 193 } else { |
194 __ Prologue(info()->IsCodePreAgingActive()); | 194 __ Prologue(info()->IsCodePreAgingActive()); |
195 } | 195 } |
196 info()->AddNoFrameRange(0, masm_->pc_offset()); | 196 info()->AddNoFrameRange(0, masm_->pc_offset()); |
197 } | 197 } |
198 | 198 |
199 if (info()->IsOptimizing() && | 199 if (info()->IsOptimizing() && |
200 dynamic_frame_alignment_ && | 200 dynamic_frame_alignment_ && |
201 FLAG_debug_code) { | 201 FLAG_debug_code) { |
202 __ test(esp, Immediate(kPointerSize)); | 202 __ test(esp, Immediate(kPointerSize)); |
203 __ Assert(zero, kFrameIsExpectedToBeAligned); | 203 __ Assert(zero, kFrameIsExpectedToBeAligned); |
204 } | 204 } |
205 | 205 |
206 // Reserve space for the stack slots needed by the code. | 206 // Reserve space for the stack slots needed by the code. |
207 int slots = GetStackSlotCount(); | 207 int slots = GetStackSlotCount(); |
208 ASSERT(slots != 0 || !info()->IsOptimizing()); | 208 DCHECK(slots != 0 || !info()->IsOptimizing()); |
209 if (slots > 0) { | 209 if (slots > 0) { |
210 if (slots == 1) { | 210 if (slots == 1) { |
211 if (dynamic_frame_alignment_) { | 211 if (dynamic_frame_alignment_) { |
212 __ push(edx); | 212 __ push(edx); |
213 } else { | 213 } else { |
214 __ push(Immediate(kNoAlignmentPadding)); | 214 __ push(Immediate(kNoAlignmentPadding)); |
215 } | 215 } |
216 } else { | 216 } else { |
217 if (FLAG_debug_code) { | 217 if (FLAG_debug_code) { |
218 __ sub(Operand(esp), Immediate(slots * kPointerSize)); | 218 __ sub(Operand(esp), Immediate(slots * kPointerSize)); |
(...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
350 // Save the first local, which is overwritten by the alignment state. | 350 // Save the first local, which is overwritten by the alignment state. |
351 Operand alignment_loc = MemOperand(ebp, -3 * kPointerSize); | 351 Operand alignment_loc = MemOperand(ebp, -3 * kPointerSize); |
352 __ push(alignment_loc); | 352 __ push(alignment_loc); |
353 | 353 |
354 // Set the dynamic frame alignment state. | 354 // Set the dynamic frame alignment state. |
355 __ mov(alignment_loc, edx); | 355 __ mov(alignment_loc, edx); |
356 | 356 |
357 // Adjust the frame size, subsuming the unoptimized frame into the | 357 // Adjust the frame size, subsuming the unoptimized frame into the |
358 // optimized frame. | 358 // optimized frame. |
359 int slots = GetStackSlotCount() - graph()->osr()->UnoptimizedFrameSlots(); | 359 int slots = GetStackSlotCount() - graph()->osr()->UnoptimizedFrameSlots(); |
360 ASSERT(slots >= 1); | 360 DCHECK(slots >= 1); |
361 __ sub(esp, Immediate((slots - 1) * kPointerSize)); | 361 __ sub(esp, Immediate((slots - 1) * kPointerSize)); |
362 } | 362 } |
363 | 363 |
364 | 364 |
365 void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) { | 365 void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) { |
366 if (instr->IsCall()) { | 366 if (instr->IsCall()) { |
367 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); | 367 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); |
368 } | 368 } |
369 if (!instr->IsLazyBailout() && !instr->IsGap()) { | 369 if (!instr->IsLazyBailout() && !instr->IsGap()) { |
370 safepoints_.BumpLastLazySafepointIndex(); | 370 safepoints_.BumpLastLazySafepointIndex(); |
(...skipping 13 matching lines...) Expand all Loading... |
384 __ bind(&jump_table_[i].label); | 384 __ bind(&jump_table_[i].label); |
385 Address entry = jump_table_[i].address; | 385 Address entry = jump_table_[i].address; |
386 Deoptimizer::BailoutType type = jump_table_[i].bailout_type; | 386 Deoptimizer::BailoutType type = jump_table_[i].bailout_type; |
387 int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type); | 387 int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type); |
388 if (id == Deoptimizer::kNotDeoptimizationEntry) { | 388 if (id == Deoptimizer::kNotDeoptimizationEntry) { |
389 Comment(";;; jump table entry %d.", i); | 389 Comment(";;; jump table entry %d.", i); |
390 } else { | 390 } else { |
391 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id); | 391 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id); |
392 } | 392 } |
393 if (jump_table_[i].needs_frame) { | 393 if (jump_table_[i].needs_frame) { |
394 ASSERT(!info()->saves_caller_doubles()); | 394 DCHECK(!info()->saves_caller_doubles()); |
395 __ push(Immediate(ExternalReference::ForDeoptEntry(entry))); | 395 __ push(Immediate(ExternalReference::ForDeoptEntry(entry))); |
396 if (needs_frame.is_bound()) { | 396 if (needs_frame.is_bound()) { |
397 __ jmp(&needs_frame); | 397 __ jmp(&needs_frame); |
398 } else { | 398 } else { |
399 __ bind(&needs_frame); | 399 __ bind(&needs_frame); |
400 __ push(MemOperand(ebp, StandardFrameConstants::kContextOffset)); | 400 __ push(MemOperand(ebp, StandardFrameConstants::kContextOffset)); |
401 // This variant of deopt can only be used with stubs. Since we don't | 401 // This variant of deopt can only be used with stubs. Since we don't |
402 // have a function pointer to install in the stack frame that we're | 402 // have a function pointer to install in the stack frame that we're |
403 // building, install a special marker there instead. | 403 // building, install a special marker there instead. |
404 ASSERT(info()->IsStub()); | 404 DCHECK(info()->IsStub()); |
405 __ push(Immediate(Smi::FromInt(StackFrame::STUB))); | 405 __ push(Immediate(Smi::FromInt(StackFrame::STUB))); |
406 // Push a PC inside the function so that the deopt code can find where | 406 // Push a PC inside the function so that the deopt code can find where |
407 // the deopt comes from. It doesn't have to be the precise return | 407 // the deopt comes from. It doesn't have to be the precise return |
408 // address of a "calling" LAZY deopt, it only has to be somewhere | 408 // address of a "calling" LAZY deopt, it only has to be somewhere |
409 // inside the code body. | 409 // inside the code body. |
410 Label push_approx_pc; | 410 Label push_approx_pc; |
411 __ call(&push_approx_pc); | 411 __ call(&push_approx_pc); |
412 __ bind(&push_approx_pc); | 412 __ bind(&push_approx_pc); |
413 // Push the continuation which was stashed were the ebp should | 413 // Push the continuation which was stashed were the ebp should |
414 // be. Replace it with the saved ebp. | 414 // be. Replace it with the saved ebp. |
415 __ push(MemOperand(esp, 3 * kPointerSize)); | 415 __ push(MemOperand(esp, 3 * kPointerSize)); |
416 __ mov(MemOperand(esp, 4 * kPointerSize), ebp); | 416 __ mov(MemOperand(esp, 4 * kPointerSize), ebp); |
417 __ lea(ebp, MemOperand(esp, 4 * kPointerSize)); | 417 __ lea(ebp, MemOperand(esp, 4 * kPointerSize)); |
418 __ ret(0); // Call the continuation without clobbering registers. | 418 __ ret(0); // Call the continuation without clobbering registers. |
419 } | 419 } |
420 } else { | 420 } else { |
421 if (info()->saves_caller_doubles()) RestoreCallerDoubles(); | 421 if (info()->saves_caller_doubles()) RestoreCallerDoubles(); |
422 __ call(entry, RelocInfo::RUNTIME_ENTRY); | 422 __ call(entry, RelocInfo::RUNTIME_ENTRY); |
423 } | 423 } |
424 } | 424 } |
425 return !is_aborted(); | 425 return !is_aborted(); |
426 } | 426 } |
427 | 427 |
428 | 428 |
429 bool LCodeGen::GenerateDeferredCode() { | 429 bool LCodeGen::GenerateDeferredCode() { |
430 ASSERT(is_generating()); | 430 DCHECK(is_generating()); |
431 if (deferred_.length() > 0) { | 431 if (deferred_.length() > 0) { |
432 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { | 432 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { |
433 LDeferredCode* code = deferred_[i]; | 433 LDeferredCode* code = deferred_[i]; |
434 | 434 |
435 HValue* value = | 435 HValue* value = |
436 instructions_->at(code->instruction_index())->hydrogen_value(); | 436 instructions_->at(code->instruction_index())->hydrogen_value(); |
437 RecordAndWritePosition( | 437 RecordAndWritePosition( |
438 chunk()->graph()->SourcePositionToScriptPosition(value->position())); | 438 chunk()->graph()->SourcePositionToScriptPosition(value->position())); |
439 | 439 |
440 Comment(";;; <@%d,#%d> " | 440 Comment(";;; <@%d,#%d> " |
441 "-------------------- Deferred %s --------------------", | 441 "-------------------- Deferred %s --------------------", |
442 code->instruction_index(), | 442 code->instruction_index(), |
443 code->instr()->hydrogen_value()->id(), | 443 code->instr()->hydrogen_value()->id(), |
444 code->instr()->Mnemonic()); | 444 code->instr()->Mnemonic()); |
445 __ bind(code->entry()); | 445 __ bind(code->entry()); |
446 if (NeedsDeferredFrame()) { | 446 if (NeedsDeferredFrame()) { |
447 Comment(";;; Build frame"); | 447 Comment(";;; Build frame"); |
448 ASSERT(!frame_is_built_); | 448 DCHECK(!frame_is_built_); |
449 ASSERT(info()->IsStub()); | 449 DCHECK(info()->IsStub()); |
450 frame_is_built_ = true; | 450 frame_is_built_ = true; |
451 // Build the frame in such a way that esi isn't trashed. | 451 // Build the frame in such a way that esi isn't trashed. |
452 __ push(ebp); // Caller's frame pointer. | 452 __ push(ebp); // Caller's frame pointer. |
453 __ push(Operand(ebp, StandardFrameConstants::kContextOffset)); | 453 __ push(Operand(ebp, StandardFrameConstants::kContextOffset)); |
454 __ push(Immediate(Smi::FromInt(StackFrame::STUB))); | 454 __ push(Immediate(Smi::FromInt(StackFrame::STUB))); |
455 __ lea(ebp, Operand(esp, 2 * kPointerSize)); | 455 __ lea(ebp, Operand(esp, 2 * kPointerSize)); |
456 Comment(";;; Deferred code"); | 456 Comment(";;; Deferred code"); |
457 } | 457 } |
458 code->Generate(); | 458 code->Generate(); |
459 if (NeedsDeferredFrame()) { | 459 if (NeedsDeferredFrame()) { |
460 __ bind(code->done()); | 460 __ bind(code->done()); |
461 Comment(";;; Destroy frame"); | 461 Comment(";;; Destroy frame"); |
462 ASSERT(frame_is_built_); | 462 DCHECK(frame_is_built_); |
463 frame_is_built_ = false; | 463 frame_is_built_ = false; |
464 __ mov(esp, ebp); | 464 __ mov(esp, ebp); |
465 __ pop(ebp); | 465 __ pop(ebp); |
466 } | 466 } |
467 __ jmp(code->exit()); | 467 __ jmp(code->exit()); |
468 } | 468 } |
469 } | 469 } |
470 | 470 |
471 // Deferred code is the last part of the instruction sequence. Mark | 471 // Deferred code is the last part of the instruction sequence. Mark |
472 // the generated code as done unless we bailed out. | 472 // the generated code as done unless we bailed out. |
473 if (!is_aborted()) status_ = DONE; | 473 if (!is_aborted()) status_ = DONE; |
474 return !is_aborted(); | 474 return !is_aborted(); |
475 } | 475 } |
476 | 476 |
477 | 477 |
478 bool LCodeGen::GenerateSafepointTable() { | 478 bool LCodeGen::GenerateSafepointTable() { |
479 ASSERT(is_done()); | 479 DCHECK(is_done()); |
480 if (!info()->IsStub()) { | 480 if (!info()->IsStub()) { |
481 // For lazy deoptimization we need space to patch a call after every call. | 481 // For lazy deoptimization we need space to patch a call after every call. |
482 // Ensure there is always space for such patching, even if the code ends | 482 // Ensure there is always space for such patching, even if the code ends |
483 // in a call. | 483 // in a call. |
484 int target_offset = masm()->pc_offset() + Deoptimizer::patch_size(); | 484 int target_offset = masm()->pc_offset() + Deoptimizer::patch_size(); |
485 while (masm()->pc_offset() < target_offset) { | 485 while (masm()->pc_offset() < target_offset) { |
486 masm()->nop(); | 486 masm()->nop(); |
487 } | 487 } |
488 } | 488 } |
489 safepoints_.Emit(masm(), GetStackSlotCount()); | 489 safepoints_.Emit(masm(), GetStackSlotCount()); |
490 return !is_aborted(); | 490 return !is_aborted(); |
491 } | 491 } |
492 | 492 |
493 | 493 |
494 Register LCodeGen::ToRegister(int index) const { | 494 Register LCodeGen::ToRegister(int index) const { |
495 return Register::FromAllocationIndex(index); | 495 return Register::FromAllocationIndex(index); |
496 } | 496 } |
497 | 497 |
498 | 498 |
499 XMMRegister LCodeGen::ToDoubleRegister(int index) const { | 499 XMMRegister LCodeGen::ToDoubleRegister(int index) const { |
500 return XMMRegister::FromAllocationIndex(index); | 500 return XMMRegister::FromAllocationIndex(index); |
501 } | 501 } |
502 | 502 |
503 | 503 |
504 Register LCodeGen::ToRegister(LOperand* op) const { | 504 Register LCodeGen::ToRegister(LOperand* op) const { |
505 ASSERT(op->IsRegister()); | 505 DCHECK(op->IsRegister()); |
506 return ToRegister(op->index()); | 506 return ToRegister(op->index()); |
507 } | 507 } |
508 | 508 |
509 | 509 |
510 XMMRegister LCodeGen::ToDoubleRegister(LOperand* op) const { | 510 XMMRegister LCodeGen::ToDoubleRegister(LOperand* op) const { |
511 ASSERT(op->IsDoubleRegister()); | 511 DCHECK(op->IsDoubleRegister()); |
512 return ToDoubleRegister(op->index()); | 512 return ToDoubleRegister(op->index()); |
513 } | 513 } |
514 | 514 |
515 | 515 |
516 int32_t LCodeGen::ToInteger32(LConstantOperand* op) const { | 516 int32_t LCodeGen::ToInteger32(LConstantOperand* op) const { |
517 return ToRepresentation(op, Representation::Integer32()); | 517 return ToRepresentation(op, Representation::Integer32()); |
518 } | 518 } |
519 | 519 |
520 | 520 |
521 int32_t LCodeGen::ToRepresentation(LConstantOperand* op, | 521 int32_t LCodeGen::ToRepresentation(LConstantOperand* op, |
522 const Representation& r) const { | 522 const Representation& r) const { |
523 HConstant* constant = chunk_->LookupConstant(op); | 523 HConstant* constant = chunk_->LookupConstant(op); |
524 int32_t value = constant->Integer32Value(); | 524 int32_t value = constant->Integer32Value(); |
525 if (r.IsInteger32()) return value; | 525 if (r.IsInteger32()) return value; |
526 ASSERT(r.IsSmiOrTagged()); | 526 DCHECK(r.IsSmiOrTagged()); |
527 return reinterpret_cast<int32_t>(Smi::FromInt(value)); | 527 return reinterpret_cast<int32_t>(Smi::FromInt(value)); |
528 } | 528 } |
529 | 529 |
530 | 530 |
531 Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const { | 531 Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const { |
532 HConstant* constant = chunk_->LookupConstant(op); | 532 HConstant* constant = chunk_->LookupConstant(op); |
533 ASSERT(chunk_->LookupLiteralRepresentation(op).IsSmiOrTagged()); | 533 DCHECK(chunk_->LookupLiteralRepresentation(op).IsSmiOrTagged()); |
534 return constant->handle(isolate()); | 534 return constant->handle(isolate()); |
535 } | 535 } |
536 | 536 |
537 | 537 |
538 double LCodeGen::ToDouble(LConstantOperand* op) const { | 538 double LCodeGen::ToDouble(LConstantOperand* op) const { |
539 HConstant* constant = chunk_->LookupConstant(op); | 539 HConstant* constant = chunk_->LookupConstant(op); |
540 ASSERT(constant->HasDoubleValue()); | 540 DCHECK(constant->HasDoubleValue()); |
541 return constant->DoubleValue(); | 541 return constant->DoubleValue(); |
542 } | 542 } |
543 | 543 |
544 | 544 |
545 ExternalReference LCodeGen::ToExternalReference(LConstantOperand* op) const { | 545 ExternalReference LCodeGen::ToExternalReference(LConstantOperand* op) const { |
546 HConstant* constant = chunk_->LookupConstant(op); | 546 HConstant* constant = chunk_->LookupConstant(op); |
547 ASSERT(constant->HasExternalReferenceValue()); | 547 DCHECK(constant->HasExternalReferenceValue()); |
548 return constant->ExternalReferenceValue(); | 548 return constant->ExternalReferenceValue(); |
549 } | 549 } |
550 | 550 |
551 | 551 |
552 bool LCodeGen::IsInteger32(LConstantOperand* op) const { | 552 bool LCodeGen::IsInteger32(LConstantOperand* op) const { |
553 return chunk_->LookupLiteralRepresentation(op).IsSmiOrInteger32(); | 553 return chunk_->LookupLiteralRepresentation(op).IsSmiOrInteger32(); |
554 } | 554 } |
555 | 555 |
556 | 556 |
557 bool LCodeGen::IsSmi(LConstantOperand* op) const { | 557 bool LCodeGen::IsSmi(LConstantOperand* op) const { |
558 return chunk_->LookupLiteralRepresentation(op).IsSmi(); | 558 return chunk_->LookupLiteralRepresentation(op).IsSmi(); |
559 } | 559 } |
560 | 560 |
561 | 561 |
562 static int ArgumentsOffsetWithoutFrame(int index) { | 562 static int ArgumentsOffsetWithoutFrame(int index) { |
563 ASSERT(index < 0); | 563 DCHECK(index < 0); |
564 return -(index + 1) * kPointerSize + kPCOnStackSize; | 564 return -(index + 1) * kPointerSize + kPCOnStackSize; |
565 } | 565 } |
566 | 566 |
567 | 567 |
568 Operand LCodeGen::ToOperand(LOperand* op) const { | 568 Operand LCodeGen::ToOperand(LOperand* op) const { |
569 if (op->IsRegister()) return Operand(ToRegister(op)); | 569 if (op->IsRegister()) return Operand(ToRegister(op)); |
570 if (op->IsDoubleRegister()) return Operand(ToDoubleRegister(op)); | 570 if (op->IsDoubleRegister()) return Operand(ToDoubleRegister(op)); |
571 ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot()); | 571 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot()); |
572 if (NeedsEagerFrame()) { | 572 if (NeedsEagerFrame()) { |
573 return Operand(ebp, StackSlotOffset(op->index())); | 573 return Operand(ebp, StackSlotOffset(op->index())); |
574 } else { | 574 } else { |
575 // Retrieve parameter without eager stack-frame relative to the | 575 // Retrieve parameter without eager stack-frame relative to the |
576 // stack-pointer. | 576 // stack-pointer. |
577 return Operand(esp, ArgumentsOffsetWithoutFrame(op->index())); | 577 return Operand(esp, ArgumentsOffsetWithoutFrame(op->index())); |
578 } | 578 } |
579 } | 579 } |
580 | 580 |
581 | 581 |
582 Operand LCodeGen::HighOperand(LOperand* op) { | 582 Operand LCodeGen::HighOperand(LOperand* op) { |
583 ASSERT(op->IsDoubleStackSlot()); | 583 DCHECK(op->IsDoubleStackSlot()); |
584 if (NeedsEagerFrame()) { | 584 if (NeedsEagerFrame()) { |
585 return Operand(ebp, StackSlotOffset(op->index()) + kPointerSize); | 585 return Operand(ebp, StackSlotOffset(op->index()) + kPointerSize); |
586 } else { | 586 } else { |
587 // Retrieve parameter without eager stack-frame relative to the | 587 // Retrieve parameter without eager stack-frame relative to the |
588 // stack-pointer. | 588 // stack-pointer. |
589 return Operand( | 589 return Operand( |
590 esp, ArgumentsOffsetWithoutFrame(op->index()) + kPointerSize); | 590 esp, ArgumentsOffsetWithoutFrame(op->index()) + kPointerSize); |
591 } | 591 } |
592 } | 592 } |
593 | 593 |
(...skipping 14 matching lines...) Expand all Loading... |
608 ? DefineDeoptimizationLiteral(environment->closure()) | 608 ? DefineDeoptimizationLiteral(environment->closure()) |
609 : Translation::kSelfLiteralId; | 609 : Translation::kSelfLiteralId; |
610 switch (environment->frame_type()) { | 610 switch (environment->frame_type()) { |
611 case JS_FUNCTION: | 611 case JS_FUNCTION: |
612 translation->BeginJSFrame(environment->ast_id(), closure_id, height); | 612 translation->BeginJSFrame(environment->ast_id(), closure_id, height); |
613 break; | 613 break; |
614 case JS_CONSTRUCT: | 614 case JS_CONSTRUCT: |
615 translation->BeginConstructStubFrame(closure_id, translation_size); | 615 translation->BeginConstructStubFrame(closure_id, translation_size); |
616 break; | 616 break; |
617 case JS_GETTER: | 617 case JS_GETTER: |
618 ASSERT(translation_size == 1); | 618 DCHECK(translation_size == 1); |
619 ASSERT(height == 0); | 619 DCHECK(height == 0); |
620 translation->BeginGetterStubFrame(closure_id); | 620 translation->BeginGetterStubFrame(closure_id); |
621 break; | 621 break; |
622 case JS_SETTER: | 622 case JS_SETTER: |
623 ASSERT(translation_size == 2); | 623 DCHECK(translation_size == 2); |
624 ASSERT(height == 0); | 624 DCHECK(height == 0); |
625 translation->BeginSetterStubFrame(closure_id); | 625 translation->BeginSetterStubFrame(closure_id); |
626 break; | 626 break; |
627 case ARGUMENTS_ADAPTOR: | 627 case ARGUMENTS_ADAPTOR: |
628 translation->BeginArgumentsAdaptorFrame(closure_id, translation_size); | 628 translation->BeginArgumentsAdaptorFrame(closure_id, translation_size); |
629 break; | 629 break; |
630 case STUB: | 630 case STUB: |
631 translation->BeginCompiledStubFrame(); | 631 translation->BeginCompiledStubFrame(); |
632 break; | 632 break; |
633 default: | 633 default: |
634 UNREACHABLE(); | 634 UNREACHABLE(); |
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
714 } else { | 714 } else { |
715 UNREACHABLE(); | 715 UNREACHABLE(); |
716 } | 716 } |
717 } | 717 } |
718 | 718 |
719 | 719 |
720 void LCodeGen::CallCodeGeneric(Handle<Code> code, | 720 void LCodeGen::CallCodeGeneric(Handle<Code> code, |
721 RelocInfo::Mode mode, | 721 RelocInfo::Mode mode, |
722 LInstruction* instr, | 722 LInstruction* instr, |
723 SafepointMode safepoint_mode) { | 723 SafepointMode safepoint_mode) { |
724 ASSERT(instr != NULL); | 724 DCHECK(instr != NULL); |
725 __ call(code, mode); | 725 __ call(code, mode); |
726 RecordSafepointWithLazyDeopt(instr, safepoint_mode); | 726 RecordSafepointWithLazyDeopt(instr, safepoint_mode); |
727 | 727 |
728 // Signal that we don't inline smi code before these stubs in the | 728 // Signal that we don't inline smi code before these stubs in the |
729 // optimizing code generator. | 729 // optimizing code generator. |
730 if (code->kind() == Code::BINARY_OP_IC || | 730 if (code->kind() == Code::BINARY_OP_IC || |
731 code->kind() == Code::COMPARE_IC) { | 731 code->kind() == Code::COMPARE_IC) { |
732 __ nop(); | 732 __ nop(); |
733 } | 733 } |
734 } | 734 } |
735 | 735 |
736 | 736 |
737 void LCodeGen::CallCode(Handle<Code> code, | 737 void LCodeGen::CallCode(Handle<Code> code, |
738 RelocInfo::Mode mode, | 738 RelocInfo::Mode mode, |
739 LInstruction* instr) { | 739 LInstruction* instr) { |
740 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT); | 740 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT); |
741 } | 741 } |
742 | 742 |
743 | 743 |
744 void LCodeGen::CallRuntime(const Runtime::Function* fun, | 744 void LCodeGen::CallRuntime(const Runtime::Function* fun, |
745 int argc, | 745 int argc, |
746 LInstruction* instr, | 746 LInstruction* instr, |
747 SaveFPRegsMode save_doubles) { | 747 SaveFPRegsMode save_doubles) { |
748 ASSERT(instr != NULL); | 748 DCHECK(instr != NULL); |
749 ASSERT(instr->HasPointerMap()); | 749 DCHECK(instr->HasPointerMap()); |
750 | 750 |
751 __ CallRuntime(fun, argc, save_doubles); | 751 __ CallRuntime(fun, argc, save_doubles); |
752 | 752 |
753 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); | 753 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); |
754 | 754 |
755 ASSERT(info()->is_calling()); | 755 DCHECK(info()->is_calling()); |
756 } | 756 } |
757 | 757 |
758 | 758 |
759 void LCodeGen::LoadContextFromDeferred(LOperand* context) { | 759 void LCodeGen::LoadContextFromDeferred(LOperand* context) { |
760 if (context->IsRegister()) { | 760 if (context->IsRegister()) { |
761 if (!ToRegister(context).is(esi)) { | 761 if (!ToRegister(context).is(esi)) { |
762 __ mov(esi, ToRegister(context)); | 762 __ mov(esi, ToRegister(context)); |
763 } | 763 } |
764 } else if (context->IsStackSlot()) { | 764 } else if (context->IsStackSlot()) { |
765 __ mov(esi, ToOperand(context)); | 765 __ mov(esi, ToOperand(context)); |
766 } else if (context->IsConstantOperand()) { | 766 } else if (context->IsConstantOperand()) { |
767 HConstant* constant = | 767 HConstant* constant = |
768 chunk_->LookupConstant(LConstantOperand::cast(context)); | 768 chunk_->LookupConstant(LConstantOperand::cast(context)); |
769 __ LoadObject(esi, Handle<Object>::cast(constant->handle(isolate()))); | 769 __ LoadObject(esi, Handle<Object>::cast(constant->handle(isolate()))); |
770 } else { | 770 } else { |
771 UNREACHABLE(); | 771 UNREACHABLE(); |
772 } | 772 } |
773 } | 773 } |
774 | 774 |
775 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, | 775 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, |
776 int argc, | 776 int argc, |
777 LInstruction* instr, | 777 LInstruction* instr, |
778 LOperand* context) { | 778 LOperand* context) { |
779 LoadContextFromDeferred(context); | 779 LoadContextFromDeferred(context); |
780 | 780 |
781 __ CallRuntimeSaveDoubles(id); | 781 __ CallRuntimeSaveDoubles(id); |
782 RecordSafepointWithRegisters( | 782 RecordSafepointWithRegisters( |
783 instr->pointer_map(), argc, Safepoint::kNoLazyDeopt); | 783 instr->pointer_map(), argc, Safepoint::kNoLazyDeopt); |
784 | 784 |
785 ASSERT(info()->is_calling()); | 785 DCHECK(info()->is_calling()); |
786 } | 786 } |
787 | 787 |
788 | 788 |
789 void LCodeGen::RegisterEnvironmentForDeoptimization( | 789 void LCodeGen::RegisterEnvironmentForDeoptimization( |
790 LEnvironment* environment, Safepoint::DeoptMode mode) { | 790 LEnvironment* environment, Safepoint::DeoptMode mode) { |
791 environment->set_has_been_used(); | 791 environment->set_has_been_used(); |
792 if (!environment->HasBeenRegistered()) { | 792 if (!environment->HasBeenRegistered()) { |
793 // Physical stack frame layout: | 793 // Physical stack frame layout: |
794 // -x ............. -4 0 ..................................... y | 794 // -x ............. -4 0 ..................................... y |
795 // [incoming arguments] [spill slots] [pushed outgoing arguments] | 795 // [incoming arguments] [spill slots] [pushed outgoing arguments] |
(...skipping 24 matching lines...) Expand all Loading... |
820 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1); | 820 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1); |
821 deoptimizations_.Add(environment, zone()); | 821 deoptimizations_.Add(environment, zone()); |
822 } | 822 } |
823 } | 823 } |
824 | 824 |
825 | 825 |
826 void LCodeGen::DeoptimizeIf(Condition cc, | 826 void LCodeGen::DeoptimizeIf(Condition cc, |
827 LEnvironment* environment, | 827 LEnvironment* environment, |
828 Deoptimizer::BailoutType bailout_type) { | 828 Deoptimizer::BailoutType bailout_type) { |
829 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); | 829 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); |
830 ASSERT(environment->HasBeenRegistered()); | 830 DCHECK(environment->HasBeenRegistered()); |
831 int id = environment->deoptimization_index(); | 831 int id = environment->deoptimization_index(); |
832 ASSERT(info()->IsOptimizing() || info()->IsStub()); | 832 DCHECK(info()->IsOptimizing() || info()->IsStub()); |
833 Address entry = | 833 Address entry = |
834 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); | 834 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); |
835 if (entry == NULL) { | 835 if (entry == NULL) { |
836 Abort(kBailoutWasNotPrepared); | 836 Abort(kBailoutWasNotPrepared); |
837 return; | 837 return; |
838 } | 838 } |
839 | 839 |
840 if (DeoptEveryNTimes()) { | 840 if (DeoptEveryNTimes()) { |
841 ExternalReference count = ExternalReference::stress_deopt_count(isolate()); | 841 ExternalReference count = ExternalReference::stress_deopt_count(isolate()); |
842 Label no_deopt; | 842 Label no_deopt; |
843 __ pushfd(); | 843 __ pushfd(); |
844 __ push(eax); | 844 __ push(eax); |
845 __ mov(eax, Operand::StaticVariable(count)); | 845 __ mov(eax, Operand::StaticVariable(count)); |
846 __ sub(eax, Immediate(1)); | 846 __ sub(eax, Immediate(1)); |
847 __ j(not_zero, &no_deopt, Label::kNear); | 847 __ j(not_zero, &no_deopt, Label::kNear); |
848 if (FLAG_trap_on_deopt) __ int3(); | 848 if (FLAG_trap_on_deopt) __ int3(); |
849 __ mov(eax, Immediate(FLAG_deopt_every_n_times)); | 849 __ mov(eax, Immediate(FLAG_deopt_every_n_times)); |
850 __ mov(Operand::StaticVariable(count), eax); | 850 __ mov(Operand::StaticVariable(count), eax); |
851 __ pop(eax); | 851 __ pop(eax); |
852 __ popfd(); | 852 __ popfd(); |
853 ASSERT(frame_is_built_); | 853 DCHECK(frame_is_built_); |
854 __ call(entry, RelocInfo::RUNTIME_ENTRY); | 854 __ call(entry, RelocInfo::RUNTIME_ENTRY); |
855 __ bind(&no_deopt); | 855 __ bind(&no_deopt); |
856 __ mov(Operand::StaticVariable(count), eax); | 856 __ mov(Operand::StaticVariable(count), eax); |
857 __ pop(eax); | 857 __ pop(eax); |
858 __ popfd(); | 858 __ popfd(); |
859 } | 859 } |
860 | 860 |
861 if (info()->ShouldTrapOnDeopt()) { | 861 if (info()->ShouldTrapOnDeopt()) { |
862 Label done; | 862 Label done; |
863 if (cc != no_condition) __ j(NegateCondition(cc), &done, Label::kNear); | 863 if (cc != no_condition) __ j(NegateCondition(cc), &done, Label::kNear); |
864 __ int3(); | 864 __ int3(); |
865 __ bind(&done); | 865 __ bind(&done); |
866 } | 866 } |
867 | 867 |
868 ASSERT(info()->IsStub() || frame_is_built_); | 868 DCHECK(info()->IsStub() || frame_is_built_); |
869 if (cc == no_condition && frame_is_built_) { | 869 if (cc == no_condition && frame_is_built_) { |
870 __ call(entry, RelocInfo::RUNTIME_ENTRY); | 870 __ call(entry, RelocInfo::RUNTIME_ENTRY); |
871 } else { | 871 } else { |
872 // We often have several deopts to the same entry, reuse the last | 872 // We often have several deopts to the same entry, reuse the last |
873 // jump entry if this is the case. | 873 // jump entry if this is the case. |
874 if (jump_table_.is_empty() || | 874 if (jump_table_.is_empty() || |
875 jump_table_.last().address != entry || | 875 jump_table_.last().address != entry || |
876 jump_table_.last().needs_frame != !frame_is_built_ || | 876 jump_table_.last().needs_frame != !frame_is_built_ || |
877 jump_table_.last().bailout_type != bailout_type) { | 877 jump_table_.last().bailout_type != bailout_type) { |
878 Deoptimizer::JumpTableEntry table_entry(entry, | 878 Deoptimizer::JumpTableEntry table_entry(entry, |
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
946 int result = deoptimization_literals_.length(); | 946 int result = deoptimization_literals_.length(); |
947 for (int i = 0; i < deoptimization_literals_.length(); ++i) { | 947 for (int i = 0; i < deoptimization_literals_.length(); ++i) { |
948 if (deoptimization_literals_[i].is_identical_to(literal)) return i; | 948 if (deoptimization_literals_[i].is_identical_to(literal)) return i; |
949 } | 949 } |
950 deoptimization_literals_.Add(literal, zone()); | 950 deoptimization_literals_.Add(literal, zone()); |
951 return result; | 951 return result; |
952 } | 952 } |
953 | 953 |
954 | 954 |
955 void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() { | 955 void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() { |
956 ASSERT(deoptimization_literals_.length() == 0); | 956 DCHECK(deoptimization_literals_.length() == 0); |
957 | 957 |
958 const ZoneList<Handle<JSFunction> >* inlined_closures = | 958 const ZoneList<Handle<JSFunction> >* inlined_closures = |
959 chunk()->inlined_closures(); | 959 chunk()->inlined_closures(); |
960 | 960 |
961 for (int i = 0, length = inlined_closures->length(); | 961 for (int i = 0, length = inlined_closures->length(); |
962 i < length; | 962 i < length; |
963 i++) { | 963 i++) { |
964 DefineDeoptimizationLiteral(inlined_closures->at(i)); | 964 DefineDeoptimizationLiteral(inlined_closures->at(i)); |
965 } | 965 } |
966 | 966 |
967 inlined_function_count_ = deoptimization_literals_.length(); | 967 inlined_function_count_ = deoptimization_literals_.length(); |
968 } | 968 } |
969 | 969 |
970 | 970 |
971 void LCodeGen::RecordSafepointWithLazyDeopt( | 971 void LCodeGen::RecordSafepointWithLazyDeopt( |
972 LInstruction* instr, SafepointMode safepoint_mode) { | 972 LInstruction* instr, SafepointMode safepoint_mode) { |
973 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) { | 973 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) { |
974 RecordSafepoint(instr->pointer_map(), Safepoint::kLazyDeopt); | 974 RecordSafepoint(instr->pointer_map(), Safepoint::kLazyDeopt); |
975 } else { | 975 } else { |
976 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); | 976 DCHECK(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); |
977 RecordSafepointWithRegisters( | 977 RecordSafepointWithRegisters( |
978 instr->pointer_map(), 0, Safepoint::kLazyDeopt); | 978 instr->pointer_map(), 0, Safepoint::kLazyDeopt); |
979 } | 979 } |
980 } | 980 } |
981 | 981 |
982 | 982 |
983 void LCodeGen::RecordSafepoint( | 983 void LCodeGen::RecordSafepoint( |
984 LPointerMap* pointers, | 984 LPointerMap* pointers, |
985 Safepoint::Kind kind, | 985 Safepoint::Kind kind, |
986 int arguments, | 986 int arguments, |
987 Safepoint::DeoptMode deopt_mode) { | 987 Safepoint::DeoptMode deopt_mode) { |
988 ASSERT(kind == expected_safepoint_kind_); | 988 DCHECK(kind == expected_safepoint_kind_); |
989 const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands(); | 989 const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands(); |
990 Safepoint safepoint = | 990 Safepoint safepoint = |
991 safepoints_.DefineSafepoint(masm(), kind, arguments, deopt_mode); | 991 safepoints_.DefineSafepoint(masm(), kind, arguments, deopt_mode); |
992 for (int i = 0; i < operands->length(); i++) { | 992 for (int i = 0; i < operands->length(); i++) { |
993 LOperand* pointer = operands->at(i); | 993 LOperand* pointer = operands->at(i); |
994 if (pointer->IsStackSlot()) { | 994 if (pointer->IsStackSlot()) { |
995 safepoint.DefinePointerSlot(pointer->index(), zone()); | 995 safepoint.DefinePointerSlot(pointer->index(), zone()); |
996 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { | 996 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { |
997 safepoint.DefinePointerRegister(ToRegister(pointer), zone()); | 997 safepoint.DefinePointerRegister(ToRegister(pointer), zone()); |
998 } | 998 } |
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1065 DoGap(instr); | 1065 DoGap(instr); |
1066 } | 1066 } |
1067 | 1067 |
1068 | 1068 |
1069 void LCodeGen::DoParameter(LParameter* instr) { | 1069 void LCodeGen::DoParameter(LParameter* instr) { |
1070 // Nothing to do. | 1070 // Nothing to do. |
1071 } | 1071 } |
1072 | 1072 |
1073 | 1073 |
1074 void LCodeGen::DoCallStub(LCallStub* instr) { | 1074 void LCodeGen::DoCallStub(LCallStub* instr) { |
1075 ASSERT(ToRegister(instr->context()).is(esi)); | 1075 DCHECK(ToRegister(instr->context()).is(esi)); |
1076 ASSERT(ToRegister(instr->result()).is(eax)); | 1076 DCHECK(ToRegister(instr->result()).is(eax)); |
1077 switch (instr->hydrogen()->major_key()) { | 1077 switch (instr->hydrogen()->major_key()) { |
1078 case CodeStub::RegExpExec: { | 1078 case CodeStub::RegExpExec: { |
1079 RegExpExecStub stub(isolate()); | 1079 RegExpExecStub stub(isolate()); |
1080 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 1080 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
1081 break; | 1081 break; |
1082 } | 1082 } |
1083 case CodeStub::SubString: { | 1083 case CodeStub::SubString: { |
1084 SubStringStub stub(isolate()); | 1084 SubStringStub stub(isolate()); |
1085 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 1085 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
1086 break; | 1086 break; |
(...skipping 10 matching lines...) Expand all Loading... |
1097 | 1097 |
1098 | 1098 |
1099 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) { | 1099 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) { |
1100 GenerateOsrPrologue(); | 1100 GenerateOsrPrologue(); |
1101 } | 1101 } |
1102 | 1102 |
1103 | 1103 |
1104 void LCodeGen::DoModByPowerOf2I(LModByPowerOf2I* instr) { | 1104 void LCodeGen::DoModByPowerOf2I(LModByPowerOf2I* instr) { |
1105 Register dividend = ToRegister(instr->dividend()); | 1105 Register dividend = ToRegister(instr->dividend()); |
1106 int32_t divisor = instr->divisor(); | 1106 int32_t divisor = instr->divisor(); |
1107 ASSERT(dividend.is(ToRegister(instr->result()))); | 1107 DCHECK(dividend.is(ToRegister(instr->result()))); |
1108 | 1108 |
1109 // Theoretically, a variation of the branch-free code for integer division by | 1109 // Theoretically, a variation of the branch-free code for integer division by |
1110 // a power of 2 (calculating the remainder via an additional multiplication | 1110 // a power of 2 (calculating the remainder via an additional multiplication |
1111 // (which gets simplified to an 'and') and subtraction) should be faster, and | 1111 // (which gets simplified to an 'and') and subtraction) should be faster, and |
1112 // this is exactly what GCC and clang emit. Nevertheless, benchmarks seem to | 1112 // this is exactly what GCC and clang emit. Nevertheless, benchmarks seem to |
1113 // indicate that positive dividends are heavily favored, so the branching | 1113 // indicate that positive dividends are heavily favored, so the branching |
1114 // version performs better. | 1114 // version performs better. |
1115 HMod* hmod = instr->hydrogen(); | 1115 HMod* hmod = instr->hydrogen(); |
1116 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1); | 1116 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1); |
1117 Label dividend_is_not_negative, done; | 1117 Label dividend_is_not_negative, done; |
(...skipping 12 matching lines...) Expand all Loading... |
1130 | 1130 |
1131 __ bind(÷nd_is_not_negative); | 1131 __ bind(÷nd_is_not_negative); |
1132 __ and_(dividend, mask); | 1132 __ and_(dividend, mask); |
1133 __ bind(&done); | 1133 __ bind(&done); |
1134 } | 1134 } |
1135 | 1135 |
1136 | 1136 |
1137 void LCodeGen::DoModByConstI(LModByConstI* instr) { | 1137 void LCodeGen::DoModByConstI(LModByConstI* instr) { |
1138 Register dividend = ToRegister(instr->dividend()); | 1138 Register dividend = ToRegister(instr->dividend()); |
1139 int32_t divisor = instr->divisor(); | 1139 int32_t divisor = instr->divisor(); |
1140 ASSERT(ToRegister(instr->result()).is(eax)); | 1140 DCHECK(ToRegister(instr->result()).is(eax)); |
1141 | 1141 |
1142 if (divisor == 0) { | 1142 if (divisor == 0) { |
1143 DeoptimizeIf(no_condition, instr->environment()); | 1143 DeoptimizeIf(no_condition, instr->environment()); |
1144 return; | 1144 return; |
1145 } | 1145 } |
1146 | 1146 |
1147 __ TruncatingDiv(dividend, Abs(divisor)); | 1147 __ TruncatingDiv(dividend, Abs(divisor)); |
1148 __ imul(edx, edx, Abs(divisor)); | 1148 __ imul(edx, edx, Abs(divisor)); |
1149 __ mov(eax, dividend); | 1149 __ mov(eax, dividend); |
1150 __ sub(eax, edx); | 1150 __ sub(eax, edx); |
1151 | 1151 |
1152 // Check for negative zero. | 1152 // Check for negative zero. |
1153 HMod* hmod = instr->hydrogen(); | 1153 HMod* hmod = instr->hydrogen(); |
1154 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1154 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { |
1155 Label remainder_not_zero; | 1155 Label remainder_not_zero; |
1156 __ j(not_zero, &remainder_not_zero, Label::kNear); | 1156 __ j(not_zero, &remainder_not_zero, Label::kNear); |
1157 __ cmp(dividend, Immediate(0)); | 1157 __ cmp(dividend, Immediate(0)); |
1158 DeoptimizeIf(less, instr->environment()); | 1158 DeoptimizeIf(less, instr->environment()); |
1159 __ bind(&remainder_not_zero); | 1159 __ bind(&remainder_not_zero); |
1160 } | 1160 } |
1161 } | 1161 } |
1162 | 1162 |
1163 | 1163 |
1164 void LCodeGen::DoModI(LModI* instr) { | 1164 void LCodeGen::DoModI(LModI* instr) { |
1165 HMod* hmod = instr->hydrogen(); | 1165 HMod* hmod = instr->hydrogen(); |
1166 | 1166 |
1167 Register left_reg = ToRegister(instr->left()); | 1167 Register left_reg = ToRegister(instr->left()); |
1168 ASSERT(left_reg.is(eax)); | 1168 DCHECK(left_reg.is(eax)); |
1169 Register right_reg = ToRegister(instr->right()); | 1169 Register right_reg = ToRegister(instr->right()); |
1170 ASSERT(!right_reg.is(eax)); | 1170 DCHECK(!right_reg.is(eax)); |
1171 ASSERT(!right_reg.is(edx)); | 1171 DCHECK(!right_reg.is(edx)); |
1172 Register result_reg = ToRegister(instr->result()); | 1172 Register result_reg = ToRegister(instr->result()); |
1173 ASSERT(result_reg.is(edx)); | 1173 DCHECK(result_reg.is(edx)); |
1174 | 1174 |
1175 Label done; | 1175 Label done; |
1176 // Check for x % 0, idiv would signal a divide error. We have to | 1176 // Check for x % 0, idiv would signal a divide error. We have to |
1177 // deopt in this case because we can't return a NaN. | 1177 // deopt in this case because we can't return a NaN. |
1178 if (hmod->CheckFlag(HValue::kCanBeDivByZero)) { | 1178 if (hmod->CheckFlag(HValue::kCanBeDivByZero)) { |
1179 __ test(right_reg, Operand(right_reg)); | 1179 __ test(right_reg, Operand(right_reg)); |
1180 DeoptimizeIf(zero, instr->environment()); | 1180 DeoptimizeIf(zero, instr->environment()); |
1181 } | 1181 } |
1182 | 1182 |
1183 // Check for kMinInt % -1, idiv would signal a divide error. We | 1183 // Check for kMinInt % -1, idiv would signal a divide error. We |
(...skipping 29 matching lines...) Expand all Loading... |
1213 } | 1213 } |
1214 __ idiv(right_reg); | 1214 __ idiv(right_reg); |
1215 __ bind(&done); | 1215 __ bind(&done); |
1216 } | 1216 } |
1217 | 1217 |
1218 | 1218 |
1219 void LCodeGen::DoDivByPowerOf2I(LDivByPowerOf2I* instr) { | 1219 void LCodeGen::DoDivByPowerOf2I(LDivByPowerOf2I* instr) { |
1220 Register dividend = ToRegister(instr->dividend()); | 1220 Register dividend = ToRegister(instr->dividend()); |
1221 int32_t divisor = instr->divisor(); | 1221 int32_t divisor = instr->divisor(); |
1222 Register result = ToRegister(instr->result()); | 1222 Register result = ToRegister(instr->result()); |
1223 ASSERT(divisor == kMinInt || IsPowerOf2(Abs(divisor))); | 1223 DCHECK(divisor == kMinInt || IsPowerOf2(Abs(divisor))); |
1224 ASSERT(!result.is(dividend)); | 1224 DCHECK(!result.is(dividend)); |
1225 | 1225 |
1226 // Check for (0 / -x) that will produce negative zero. | 1226 // Check for (0 / -x) that will produce negative zero. |
1227 HDiv* hdiv = instr->hydrogen(); | 1227 HDiv* hdiv = instr->hydrogen(); |
1228 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { | 1228 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { |
1229 __ test(dividend, dividend); | 1229 __ test(dividend, dividend); |
1230 DeoptimizeIf(zero, instr->environment()); | 1230 DeoptimizeIf(zero, instr->environment()); |
1231 } | 1231 } |
1232 // Check for (kMinInt / -1). | 1232 // Check for (kMinInt / -1). |
1233 if (hdiv->CheckFlag(HValue::kCanOverflow) && divisor == -1) { | 1233 if (hdiv->CheckFlag(HValue::kCanOverflow) && divisor == -1) { |
1234 __ cmp(dividend, kMinInt); | 1234 __ cmp(dividend, kMinInt); |
(...skipping 15 matching lines...) Expand all Loading... |
1250 __ add(result, dividend); | 1250 __ add(result, dividend); |
1251 __ sar(result, shift); | 1251 __ sar(result, shift); |
1252 } | 1252 } |
1253 if (divisor < 0) __ neg(result); | 1253 if (divisor < 0) __ neg(result); |
1254 } | 1254 } |
1255 | 1255 |
1256 | 1256 |
1257 void LCodeGen::DoDivByConstI(LDivByConstI* instr) { | 1257 void LCodeGen::DoDivByConstI(LDivByConstI* instr) { |
1258 Register dividend = ToRegister(instr->dividend()); | 1258 Register dividend = ToRegister(instr->dividend()); |
1259 int32_t divisor = instr->divisor(); | 1259 int32_t divisor = instr->divisor(); |
1260 ASSERT(ToRegister(instr->result()).is(edx)); | 1260 DCHECK(ToRegister(instr->result()).is(edx)); |
1261 | 1261 |
1262 if (divisor == 0) { | 1262 if (divisor == 0) { |
1263 DeoptimizeIf(no_condition, instr->environment()); | 1263 DeoptimizeIf(no_condition, instr->environment()); |
1264 return; | 1264 return; |
1265 } | 1265 } |
1266 | 1266 |
1267 // Check for (0 / -x) that will produce negative zero. | 1267 // Check for (0 / -x) that will produce negative zero. |
1268 HDiv* hdiv = instr->hydrogen(); | 1268 HDiv* hdiv = instr->hydrogen(); |
1269 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { | 1269 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { |
1270 __ test(dividend, dividend); | 1270 __ test(dividend, dividend); |
(...skipping 11 matching lines...) Expand all Loading... |
1282 } | 1282 } |
1283 } | 1283 } |
1284 | 1284 |
1285 | 1285 |
1286 // TODO(svenpanne) Refactor this to avoid code duplication with DoFlooringDivI. | 1286 // TODO(svenpanne) Refactor this to avoid code duplication with DoFlooringDivI. |
1287 void LCodeGen::DoDivI(LDivI* instr) { | 1287 void LCodeGen::DoDivI(LDivI* instr) { |
1288 HBinaryOperation* hdiv = instr->hydrogen(); | 1288 HBinaryOperation* hdiv = instr->hydrogen(); |
1289 Register dividend = ToRegister(instr->dividend()); | 1289 Register dividend = ToRegister(instr->dividend()); |
1290 Register divisor = ToRegister(instr->divisor()); | 1290 Register divisor = ToRegister(instr->divisor()); |
1291 Register remainder = ToRegister(instr->temp()); | 1291 Register remainder = ToRegister(instr->temp()); |
1292 ASSERT(dividend.is(eax)); | 1292 DCHECK(dividend.is(eax)); |
1293 ASSERT(remainder.is(edx)); | 1293 DCHECK(remainder.is(edx)); |
1294 ASSERT(ToRegister(instr->result()).is(eax)); | 1294 DCHECK(ToRegister(instr->result()).is(eax)); |
1295 ASSERT(!divisor.is(eax)); | 1295 DCHECK(!divisor.is(eax)); |
1296 ASSERT(!divisor.is(edx)); | 1296 DCHECK(!divisor.is(edx)); |
1297 | 1297 |
1298 // Check for x / 0. | 1298 // Check for x / 0. |
1299 if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { | 1299 if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { |
1300 __ test(divisor, divisor); | 1300 __ test(divisor, divisor); |
1301 DeoptimizeIf(zero, instr->environment()); | 1301 DeoptimizeIf(zero, instr->environment()); |
1302 } | 1302 } |
1303 | 1303 |
1304 // Check for (0 / -x) that will produce negative zero. | 1304 // Check for (0 / -x) that will produce negative zero. |
1305 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1305 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { |
1306 Label dividend_not_zero; | 1306 Label dividend_not_zero; |
(...skipping 22 matching lines...) Expand all Loading... |
1329 // Deoptimize if remainder is not 0. | 1329 // Deoptimize if remainder is not 0. |
1330 __ test(remainder, remainder); | 1330 __ test(remainder, remainder); |
1331 DeoptimizeIf(not_zero, instr->environment()); | 1331 DeoptimizeIf(not_zero, instr->environment()); |
1332 } | 1332 } |
1333 } | 1333 } |
1334 | 1334 |
1335 | 1335 |
1336 void LCodeGen::DoFlooringDivByPowerOf2I(LFlooringDivByPowerOf2I* instr) { | 1336 void LCodeGen::DoFlooringDivByPowerOf2I(LFlooringDivByPowerOf2I* instr) { |
1337 Register dividend = ToRegister(instr->dividend()); | 1337 Register dividend = ToRegister(instr->dividend()); |
1338 int32_t divisor = instr->divisor(); | 1338 int32_t divisor = instr->divisor(); |
1339 ASSERT(dividend.is(ToRegister(instr->result()))); | 1339 DCHECK(dividend.is(ToRegister(instr->result()))); |
1340 | 1340 |
1341 // If the divisor is positive, things are easy: There can be no deopts and we | 1341 // If the divisor is positive, things are easy: There can be no deopts and we |
1342 // can simply do an arithmetic right shift. | 1342 // can simply do an arithmetic right shift. |
1343 if (divisor == 1) return; | 1343 if (divisor == 1) return; |
1344 int32_t shift = WhichPowerOf2Abs(divisor); | 1344 int32_t shift = WhichPowerOf2Abs(divisor); |
1345 if (divisor > 1) { | 1345 if (divisor > 1) { |
1346 __ sar(dividend, shift); | 1346 __ sar(dividend, shift); |
1347 return; | 1347 return; |
1348 } | 1348 } |
1349 | 1349 |
(...skipping 23 matching lines...) Expand all Loading... |
1373 __ jmp(&done, Label::kNear); | 1373 __ jmp(&done, Label::kNear); |
1374 __ bind(¬_kmin_int); | 1374 __ bind(¬_kmin_int); |
1375 __ sar(dividend, shift); | 1375 __ sar(dividend, shift); |
1376 __ bind(&done); | 1376 __ bind(&done); |
1377 } | 1377 } |
1378 | 1378 |
1379 | 1379 |
1380 void LCodeGen::DoFlooringDivByConstI(LFlooringDivByConstI* instr) { | 1380 void LCodeGen::DoFlooringDivByConstI(LFlooringDivByConstI* instr) { |
1381 Register dividend = ToRegister(instr->dividend()); | 1381 Register dividend = ToRegister(instr->dividend()); |
1382 int32_t divisor = instr->divisor(); | 1382 int32_t divisor = instr->divisor(); |
1383 ASSERT(ToRegister(instr->result()).is(edx)); | 1383 DCHECK(ToRegister(instr->result()).is(edx)); |
1384 | 1384 |
1385 if (divisor == 0) { | 1385 if (divisor == 0) { |
1386 DeoptimizeIf(no_condition, instr->environment()); | 1386 DeoptimizeIf(no_condition, instr->environment()); |
1387 return; | 1387 return; |
1388 } | 1388 } |
1389 | 1389 |
1390 // Check for (0 / -x) that will produce negative zero. | 1390 // Check for (0 / -x) that will produce negative zero. |
1391 HMathFloorOfDiv* hdiv = instr->hydrogen(); | 1391 HMathFloorOfDiv* hdiv = instr->hydrogen(); |
1392 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { | 1392 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { |
1393 __ test(dividend, dividend); | 1393 __ test(dividend, dividend); |
1394 DeoptimizeIf(zero, instr->environment()); | 1394 DeoptimizeIf(zero, instr->environment()); |
1395 } | 1395 } |
1396 | 1396 |
1397 // Easy case: We need no dynamic check for the dividend and the flooring | 1397 // Easy case: We need no dynamic check for the dividend and the flooring |
1398 // division is the same as the truncating division. | 1398 // division is the same as the truncating division. |
1399 if ((divisor > 0 && !hdiv->CheckFlag(HValue::kLeftCanBeNegative)) || | 1399 if ((divisor > 0 && !hdiv->CheckFlag(HValue::kLeftCanBeNegative)) || |
1400 (divisor < 0 && !hdiv->CheckFlag(HValue::kLeftCanBePositive))) { | 1400 (divisor < 0 && !hdiv->CheckFlag(HValue::kLeftCanBePositive))) { |
1401 __ TruncatingDiv(dividend, Abs(divisor)); | 1401 __ TruncatingDiv(dividend, Abs(divisor)); |
1402 if (divisor < 0) __ neg(edx); | 1402 if (divisor < 0) __ neg(edx); |
1403 return; | 1403 return; |
1404 } | 1404 } |
1405 | 1405 |
1406 // In the general case we may need to adjust before and after the truncating | 1406 // In the general case we may need to adjust before and after the truncating |
1407 // division to get a flooring division. | 1407 // division to get a flooring division. |
1408 Register temp = ToRegister(instr->temp3()); | 1408 Register temp = ToRegister(instr->temp3()); |
1409 ASSERT(!temp.is(dividend) && !temp.is(eax) && !temp.is(edx)); | 1409 DCHECK(!temp.is(dividend) && !temp.is(eax) && !temp.is(edx)); |
1410 Label needs_adjustment, done; | 1410 Label needs_adjustment, done; |
1411 __ cmp(dividend, Immediate(0)); | 1411 __ cmp(dividend, Immediate(0)); |
1412 __ j(divisor > 0 ? less : greater, &needs_adjustment, Label::kNear); | 1412 __ j(divisor > 0 ? less : greater, &needs_adjustment, Label::kNear); |
1413 __ TruncatingDiv(dividend, Abs(divisor)); | 1413 __ TruncatingDiv(dividend, Abs(divisor)); |
1414 if (divisor < 0) __ neg(edx); | 1414 if (divisor < 0) __ neg(edx); |
1415 __ jmp(&done, Label::kNear); | 1415 __ jmp(&done, Label::kNear); |
1416 __ bind(&needs_adjustment); | 1416 __ bind(&needs_adjustment); |
1417 __ lea(temp, Operand(dividend, divisor > 0 ? 1 : -1)); | 1417 __ lea(temp, Operand(dividend, divisor > 0 ? 1 : -1)); |
1418 __ TruncatingDiv(temp, Abs(divisor)); | 1418 __ TruncatingDiv(temp, Abs(divisor)); |
1419 if (divisor < 0) __ neg(edx); | 1419 if (divisor < 0) __ neg(edx); |
1420 __ dec(edx); | 1420 __ dec(edx); |
1421 __ bind(&done); | 1421 __ bind(&done); |
1422 } | 1422 } |
1423 | 1423 |
1424 | 1424 |
1425 // TODO(svenpanne) Refactor this to avoid code duplication with DoDivI. | 1425 // TODO(svenpanne) Refactor this to avoid code duplication with DoDivI. |
1426 void LCodeGen::DoFlooringDivI(LFlooringDivI* instr) { | 1426 void LCodeGen::DoFlooringDivI(LFlooringDivI* instr) { |
1427 HBinaryOperation* hdiv = instr->hydrogen(); | 1427 HBinaryOperation* hdiv = instr->hydrogen(); |
1428 Register dividend = ToRegister(instr->dividend()); | 1428 Register dividend = ToRegister(instr->dividend()); |
1429 Register divisor = ToRegister(instr->divisor()); | 1429 Register divisor = ToRegister(instr->divisor()); |
1430 Register remainder = ToRegister(instr->temp()); | 1430 Register remainder = ToRegister(instr->temp()); |
1431 Register result = ToRegister(instr->result()); | 1431 Register result = ToRegister(instr->result()); |
1432 ASSERT(dividend.is(eax)); | 1432 DCHECK(dividend.is(eax)); |
1433 ASSERT(remainder.is(edx)); | 1433 DCHECK(remainder.is(edx)); |
1434 ASSERT(result.is(eax)); | 1434 DCHECK(result.is(eax)); |
1435 ASSERT(!divisor.is(eax)); | 1435 DCHECK(!divisor.is(eax)); |
1436 ASSERT(!divisor.is(edx)); | 1436 DCHECK(!divisor.is(edx)); |
1437 | 1437 |
1438 // Check for x / 0. | 1438 // Check for x / 0. |
1439 if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { | 1439 if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { |
1440 __ test(divisor, divisor); | 1440 __ test(divisor, divisor); |
1441 DeoptimizeIf(zero, instr->environment()); | 1441 DeoptimizeIf(zero, instr->environment()); |
1442 } | 1442 } |
1443 | 1443 |
1444 // Check for (0 / -x) that will produce negative zero. | 1444 // Check for (0 / -x) that will produce negative zero. |
1445 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1445 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { |
1446 Label dividend_not_zero; | 1446 Label dividend_not_zero; |
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1556 DeoptimizeIf(sign, instr->environment()); | 1556 DeoptimizeIf(sign, instr->environment()); |
1557 } | 1557 } |
1558 __ bind(&done); | 1558 __ bind(&done); |
1559 } | 1559 } |
1560 } | 1560 } |
1561 | 1561 |
1562 | 1562 |
1563 void LCodeGen::DoBitI(LBitI* instr) { | 1563 void LCodeGen::DoBitI(LBitI* instr) { |
1564 LOperand* left = instr->left(); | 1564 LOperand* left = instr->left(); |
1565 LOperand* right = instr->right(); | 1565 LOperand* right = instr->right(); |
1566 ASSERT(left->Equals(instr->result())); | 1566 DCHECK(left->Equals(instr->result())); |
1567 ASSERT(left->IsRegister()); | 1567 DCHECK(left->IsRegister()); |
1568 | 1568 |
1569 if (right->IsConstantOperand()) { | 1569 if (right->IsConstantOperand()) { |
1570 int32_t right_operand = | 1570 int32_t right_operand = |
1571 ToRepresentation(LConstantOperand::cast(right), | 1571 ToRepresentation(LConstantOperand::cast(right), |
1572 instr->hydrogen()->representation()); | 1572 instr->hydrogen()->representation()); |
1573 switch (instr->op()) { | 1573 switch (instr->op()) { |
1574 case Token::BIT_AND: | 1574 case Token::BIT_AND: |
1575 __ and_(ToRegister(left), right_operand); | 1575 __ and_(ToRegister(left), right_operand); |
1576 break; | 1576 break; |
1577 case Token::BIT_OR: | 1577 case Token::BIT_OR: |
(...skipping 25 matching lines...) Expand all Loading... |
1603 UNREACHABLE(); | 1603 UNREACHABLE(); |
1604 break; | 1604 break; |
1605 } | 1605 } |
1606 } | 1606 } |
1607 } | 1607 } |
1608 | 1608 |
1609 | 1609 |
1610 void LCodeGen::DoShiftI(LShiftI* instr) { | 1610 void LCodeGen::DoShiftI(LShiftI* instr) { |
1611 LOperand* left = instr->left(); | 1611 LOperand* left = instr->left(); |
1612 LOperand* right = instr->right(); | 1612 LOperand* right = instr->right(); |
1613 ASSERT(left->Equals(instr->result())); | 1613 DCHECK(left->Equals(instr->result())); |
1614 ASSERT(left->IsRegister()); | 1614 DCHECK(left->IsRegister()); |
1615 if (right->IsRegister()) { | 1615 if (right->IsRegister()) { |
1616 ASSERT(ToRegister(right).is(ecx)); | 1616 DCHECK(ToRegister(right).is(ecx)); |
1617 | 1617 |
1618 switch (instr->op()) { | 1618 switch (instr->op()) { |
1619 case Token::ROR: | 1619 case Token::ROR: |
1620 __ ror_cl(ToRegister(left)); | 1620 __ ror_cl(ToRegister(left)); |
1621 if (instr->can_deopt()) { | 1621 if (instr->can_deopt()) { |
1622 __ test(ToRegister(left), ToRegister(left)); | 1622 __ test(ToRegister(left), ToRegister(left)); |
1623 DeoptimizeIf(sign, instr->environment()); | 1623 DeoptimizeIf(sign, instr->environment()); |
1624 } | 1624 } |
1625 break; | 1625 break; |
1626 case Token::SAR: | 1626 case Token::SAR: |
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1683 UNREACHABLE(); | 1683 UNREACHABLE(); |
1684 break; | 1684 break; |
1685 } | 1685 } |
1686 } | 1686 } |
1687 } | 1687 } |
1688 | 1688 |
1689 | 1689 |
1690 void LCodeGen::DoSubI(LSubI* instr) { | 1690 void LCodeGen::DoSubI(LSubI* instr) { |
1691 LOperand* left = instr->left(); | 1691 LOperand* left = instr->left(); |
1692 LOperand* right = instr->right(); | 1692 LOperand* right = instr->right(); |
1693 ASSERT(left->Equals(instr->result())); | 1693 DCHECK(left->Equals(instr->result())); |
1694 | 1694 |
1695 if (right->IsConstantOperand()) { | 1695 if (right->IsConstantOperand()) { |
1696 __ sub(ToOperand(left), | 1696 __ sub(ToOperand(left), |
1697 ToImmediate(right, instr->hydrogen()->representation())); | 1697 ToImmediate(right, instr->hydrogen()->representation())); |
1698 } else { | 1698 } else { |
1699 __ sub(ToRegister(left), ToOperand(right)); | 1699 __ sub(ToRegister(left), ToOperand(right)); |
1700 } | 1700 } |
1701 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { | 1701 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { |
1702 DeoptimizeIf(overflow, instr->environment()); | 1702 DeoptimizeIf(overflow, instr->environment()); |
1703 } | 1703 } |
1704 } | 1704 } |
1705 | 1705 |
1706 | 1706 |
1707 void LCodeGen::DoConstantI(LConstantI* instr) { | 1707 void LCodeGen::DoConstantI(LConstantI* instr) { |
1708 __ Move(ToRegister(instr->result()), Immediate(instr->value())); | 1708 __ Move(ToRegister(instr->result()), Immediate(instr->value())); |
1709 } | 1709 } |
1710 | 1710 |
1711 | 1711 |
1712 void LCodeGen::DoConstantS(LConstantS* instr) { | 1712 void LCodeGen::DoConstantS(LConstantS* instr) { |
1713 __ Move(ToRegister(instr->result()), Immediate(instr->value())); | 1713 __ Move(ToRegister(instr->result()), Immediate(instr->value())); |
1714 } | 1714 } |
1715 | 1715 |
1716 | 1716 |
1717 void LCodeGen::DoConstantD(LConstantD* instr) { | 1717 void LCodeGen::DoConstantD(LConstantD* instr) { |
1718 double v = instr->value(); | 1718 double v = instr->value(); |
1719 uint64_t int_val = BitCast<uint64_t, double>(v); | 1719 uint64_t int_val = BitCast<uint64_t, double>(v); |
1720 int32_t lower = static_cast<int32_t>(int_val); | 1720 int32_t lower = static_cast<int32_t>(int_val); |
1721 int32_t upper = static_cast<int32_t>(int_val >> (kBitsPerInt)); | 1721 int32_t upper = static_cast<int32_t>(int_val >> (kBitsPerInt)); |
1722 ASSERT(instr->result()->IsDoubleRegister()); | 1722 DCHECK(instr->result()->IsDoubleRegister()); |
1723 | 1723 |
1724 XMMRegister res = ToDoubleRegister(instr->result()); | 1724 XMMRegister res = ToDoubleRegister(instr->result()); |
1725 if (int_val == 0) { | 1725 if (int_val == 0) { |
1726 __ xorps(res, res); | 1726 __ xorps(res, res); |
1727 } else { | 1727 } else { |
1728 Register temp = ToRegister(instr->temp()); | 1728 Register temp = ToRegister(instr->temp()); |
1729 if (CpuFeatures::IsSupported(SSE4_1)) { | 1729 if (CpuFeatures::IsSupported(SSE4_1)) { |
1730 CpuFeatureScope scope2(masm(), SSE4_1); | 1730 CpuFeatureScope scope2(masm(), SSE4_1); |
1731 if (lower != 0) { | 1731 if (lower != 0) { |
1732 __ Move(temp, Immediate(lower)); | 1732 __ Move(temp, Immediate(lower)); |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1772 __ EnumLength(result, map); | 1772 __ EnumLength(result, map); |
1773 } | 1773 } |
1774 | 1774 |
1775 | 1775 |
1776 void LCodeGen::DoDateField(LDateField* instr) { | 1776 void LCodeGen::DoDateField(LDateField* instr) { |
1777 Register object = ToRegister(instr->date()); | 1777 Register object = ToRegister(instr->date()); |
1778 Register result = ToRegister(instr->result()); | 1778 Register result = ToRegister(instr->result()); |
1779 Register scratch = ToRegister(instr->temp()); | 1779 Register scratch = ToRegister(instr->temp()); |
1780 Smi* index = instr->index(); | 1780 Smi* index = instr->index(); |
1781 Label runtime, done; | 1781 Label runtime, done; |
1782 ASSERT(object.is(result)); | 1782 DCHECK(object.is(result)); |
1783 ASSERT(object.is(eax)); | 1783 DCHECK(object.is(eax)); |
1784 | 1784 |
1785 __ test(object, Immediate(kSmiTagMask)); | 1785 __ test(object, Immediate(kSmiTagMask)); |
1786 DeoptimizeIf(zero, instr->environment()); | 1786 DeoptimizeIf(zero, instr->environment()); |
1787 __ CmpObjectType(object, JS_DATE_TYPE, scratch); | 1787 __ CmpObjectType(object, JS_DATE_TYPE, scratch); |
1788 DeoptimizeIf(not_equal, instr->environment()); | 1788 DeoptimizeIf(not_equal, instr->environment()); |
1789 | 1789 |
1790 if (index->value() == 0) { | 1790 if (index->value() == 0) { |
1791 __ mov(result, FieldOperand(object, JSDate::kValueOffset)); | 1791 __ mov(result, FieldOperand(object, JSDate::kValueOffset)); |
1792 } else { | 1792 } else { |
1793 if (index->value() < JSDate::kFirstUncachedField) { | 1793 if (index->value() < JSDate::kFirstUncachedField) { |
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1868 int encoding_mask = | 1868 int encoding_mask = |
1869 instr->hydrogen()->encoding() == String::ONE_BYTE_ENCODING | 1869 instr->hydrogen()->encoding() == String::ONE_BYTE_ENCODING |
1870 ? one_byte_seq_type : two_byte_seq_type; | 1870 ? one_byte_seq_type : two_byte_seq_type; |
1871 __ EmitSeqStringSetCharCheck(string, index, value, encoding_mask); | 1871 __ EmitSeqStringSetCharCheck(string, index, value, encoding_mask); |
1872 } | 1872 } |
1873 | 1873 |
1874 Operand operand = BuildSeqStringOperand(string, instr->index(), encoding); | 1874 Operand operand = BuildSeqStringOperand(string, instr->index(), encoding); |
1875 if (instr->value()->IsConstantOperand()) { | 1875 if (instr->value()->IsConstantOperand()) { |
1876 int value = ToRepresentation(LConstantOperand::cast(instr->value()), | 1876 int value = ToRepresentation(LConstantOperand::cast(instr->value()), |
1877 Representation::Integer32()); | 1877 Representation::Integer32()); |
1878 ASSERT_LE(0, value); | 1878 DCHECK_LE(0, value); |
1879 if (encoding == String::ONE_BYTE_ENCODING) { | 1879 if (encoding == String::ONE_BYTE_ENCODING) { |
1880 ASSERT_LE(value, String::kMaxOneByteCharCode); | 1880 DCHECK_LE(value, String::kMaxOneByteCharCode); |
1881 __ mov_b(operand, static_cast<int8_t>(value)); | 1881 __ mov_b(operand, static_cast<int8_t>(value)); |
1882 } else { | 1882 } else { |
1883 ASSERT_LE(value, String::kMaxUtf16CodeUnit); | 1883 DCHECK_LE(value, String::kMaxUtf16CodeUnit); |
1884 __ mov_w(operand, static_cast<int16_t>(value)); | 1884 __ mov_w(operand, static_cast<int16_t>(value)); |
1885 } | 1885 } |
1886 } else { | 1886 } else { |
1887 Register value = ToRegister(instr->value()); | 1887 Register value = ToRegister(instr->value()); |
1888 if (encoding == String::ONE_BYTE_ENCODING) { | 1888 if (encoding == String::ONE_BYTE_ENCODING) { |
1889 __ mov_b(operand, value); | 1889 __ mov_b(operand, value); |
1890 } else { | 1890 } else { |
1891 __ mov_w(operand, value); | 1891 __ mov_w(operand, value); |
1892 } | 1892 } |
1893 } | 1893 } |
(...skipping 23 matching lines...) Expand all Loading... |
1917 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { | 1917 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { |
1918 DeoptimizeIf(overflow, instr->environment()); | 1918 DeoptimizeIf(overflow, instr->environment()); |
1919 } | 1919 } |
1920 } | 1920 } |
1921 } | 1921 } |
1922 | 1922 |
1923 | 1923 |
1924 void LCodeGen::DoMathMinMax(LMathMinMax* instr) { | 1924 void LCodeGen::DoMathMinMax(LMathMinMax* instr) { |
1925 LOperand* left = instr->left(); | 1925 LOperand* left = instr->left(); |
1926 LOperand* right = instr->right(); | 1926 LOperand* right = instr->right(); |
1927 ASSERT(left->Equals(instr->result())); | 1927 DCHECK(left->Equals(instr->result())); |
1928 HMathMinMax::Operation operation = instr->hydrogen()->operation(); | 1928 HMathMinMax::Operation operation = instr->hydrogen()->operation(); |
1929 if (instr->hydrogen()->representation().IsSmiOrInteger32()) { | 1929 if (instr->hydrogen()->representation().IsSmiOrInteger32()) { |
1930 Label return_left; | 1930 Label return_left; |
1931 Condition condition = (operation == HMathMinMax::kMathMin) | 1931 Condition condition = (operation == HMathMinMax::kMathMin) |
1932 ? less_equal | 1932 ? less_equal |
1933 : greater_equal; | 1933 : greater_equal; |
1934 if (right->IsConstantOperand()) { | 1934 if (right->IsConstantOperand()) { |
1935 Operand left_op = ToOperand(left); | 1935 Operand left_op = ToOperand(left); |
1936 Immediate immediate = ToImmediate(LConstantOperand::cast(instr->right()), | 1936 Immediate immediate = ToImmediate(LConstantOperand::cast(instr->right()), |
1937 instr->hydrogen()->representation()); | 1937 instr->hydrogen()->representation()); |
1938 __ cmp(left_op, immediate); | 1938 __ cmp(left_op, immediate); |
1939 __ j(condition, &return_left, Label::kNear); | 1939 __ j(condition, &return_left, Label::kNear); |
1940 __ mov(left_op, immediate); | 1940 __ mov(left_op, immediate); |
1941 } else { | 1941 } else { |
1942 Register left_reg = ToRegister(left); | 1942 Register left_reg = ToRegister(left); |
1943 Operand right_op = ToOperand(right); | 1943 Operand right_op = ToOperand(right); |
1944 __ cmp(left_reg, right_op); | 1944 __ cmp(left_reg, right_op); |
1945 __ j(condition, &return_left, Label::kNear); | 1945 __ j(condition, &return_left, Label::kNear); |
1946 __ mov(left_reg, right_op); | 1946 __ mov(left_reg, right_op); |
1947 } | 1947 } |
1948 __ bind(&return_left); | 1948 __ bind(&return_left); |
1949 } else { | 1949 } else { |
1950 ASSERT(instr->hydrogen()->representation().IsDouble()); | 1950 DCHECK(instr->hydrogen()->representation().IsDouble()); |
1951 Label check_nan_left, check_zero, return_left, return_right; | 1951 Label check_nan_left, check_zero, return_left, return_right; |
1952 Condition condition = (operation == HMathMinMax::kMathMin) ? below : above; | 1952 Condition condition = (operation == HMathMinMax::kMathMin) ? below : above; |
1953 XMMRegister left_reg = ToDoubleRegister(left); | 1953 XMMRegister left_reg = ToDoubleRegister(left); |
1954 XMMRegister right_reg = ToDoubleRegister(right); | 1954 XMMRegister right_reg = ToDoubleRegister(right); |
1955 __ ucomisd(left_reg, right_reg); | 1955 __ ucomisd(left_reg, right_reg); |
1956 __ j(parity_even, &check_nan_left, Label::kNear); // At least one NaN. | 1956 __ j(parity_even, &check_nan_left, Label::kNear); // At least one NaN. |
1957 __ j(equal, &check_zero, Label::kNear); // left == right. | 1957 __ j(equal, &check_zero, Label::kNear); // left == right. |
1958 __ j(condition, &return_left, Label::kNear); | 1958 __ j(condition, &return_left, Label::kNear); |
1959 __ jmp(&return_right, Label::kNear); | 1959 __ jmp(&return_right, Label::kNear); |
1960 | 1960 |
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2021 break; | 2021 break; |
2022 } | 2022 } |
2023 default: | 2023 default: |
2024 UNREACHABLE(); | 2024 UNREACHABLE(); |
2025 break; | 2025 break; |
2026 } | 2026 } |
2027 } | 2027 } |
2028 | 2028 |
2029 | 2029 |
2030 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { | 2030 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { |
2031 ASSERT(ToRegister(instr->context()).is(esi)); | 2031 DCHECK(ToRegister(instr->context()).is(esi)); |
2032 ASSERT(ToRegister(instr->left()).is(edx)); | 2032 DCHECK(ToRegister(instr->left()).is(edx)); |
2033 ASSERT(ToRegister(instr->right()).is(eax)); | 2033 DCHECK(ToRegister(instr->right()).is(eax)); |
2034 ASSERT(ToRegister(instr->result()).is(eax)); | 2034 DCHECK(ToRegister(instr->result()).is(eax)); |
2035 | 2035 |
2036 BinaryOpICStub stub(isolate(), instr->op(), NO_OVERWRITE); | 2036 BinaryOpICStub stub(isolate(), instr->op(), NO_OVERWRITE); |
2037 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 2037 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
2038 } | 2038 } |
2039 | 2039 |
2040 | 2040 |
2041 template<class InstrType> | 2041 template<class InstrType> |
2042 void LCodeGen::EmitBranch(InstrType instr, Condition cc) { | 2042 void LCodeGen::EmitBranch(InstrType instr, Condition cc) { |
2043 int left_block = instr->TrueDestination(chunk_); | 2043 int left_block = instr->TrueDestination(chunk_); |
2044 int right_block = instr->FalseDestination(chunk_); | 2044 int right_block = instr->FalseDestination(chunk_); |
(...skipping 24 matching lines...) Expand all Loading... |
2069 } | 2069 } |
2070 | 2070 |
2071 | 2071 |
2072 void LCodeGen::DoBranch(LBranch* instr) { | 2072 void LCodeGen::DoBranch(LBranch* instr) { |
2073 Representation r = instr->hydrogen()->value()->representation(); | 2073 Representation r = instr->hydrogen()->value()->representation(); |
2074 if (r.IsSmiOrInteger32()) { | 2074 if (r.IsSmiOrInteger32()) { |
2075 Register reg = ToRegister(instr->value()); | 2075 Register reg = ToRegister(instr->value()); |
2076 __ test(reg, Operand(reg)); | 2076 __ test(reg, Operand(reg)); |
2077 EmitBranch(instr, not_zero); | 2077 EmitBranch(instr, not_zero); |
2078 } else if (r.IsDouble()) { | 2078 } else if (r.IsDouble()) { |
2079 ASSERT(!info()->IsStub()); | 2079 DCHECK(!info()->IsStub()); |
2080 XMMRegister reg = ToDoubleRegister(instr->value()); | 2080 XMMRegister reg = ToDoubleRegister(instr->value()); |
2081 XMMRegister xmm_scratch = double_scratch0(); | 2081 XMMRegister xmm_scratch = double_scratch0(); |
2082 __ xorps(xmm_scratch, xmm_scratch); | 2082 __ xorps(xmm_scratch, xmm_scratch); |
2083 __ ucomisd(reg, xmm_scratch); | 2083 __ ucomisd(reg, xmm_scratch); |
2084 EmitBranch(instr, not_equal); | 2084 EmitBranch(instr, not_equal); |
2085 } else { | 2085 } else { |
2086 ASSERT(r.IsTagged()); | 2086 DCHECK(r.IsTagged()); |
2087 Register reg = ToRegister(instr->value()); | 2087 Register reg = ToRegister(instr->value()); |
2088 HType type = instr->hydrogen()->value()->type(); | 2088 HType type = instr->hydrogen()->value()->type(); |
2089 if (type.IsBoolean()) { | 2089 if (type.IsBoolean()) { |
2090 ASSERT(!info()->IsStub()); | 2090 DCHECK(!info()->IsStub()); |
2091 __ cmp(reg, factory()->true_value()); | 2091 __ cmp(reg, factory()->true_value()); |
2092 EmitBranch(instr, equal); | 2092 EmitBranch(instr, equal); |
2093 } else if (type.IsSmi()) { | 2093 } else if (type.IsSmi()) { |
2094 ASSERT(!info()->IsStub()); | 2094 DCHECK(!info()->IsStub()); |
2095 __ test(reg, Operand(reg)); | 2095 __ test(reg, Operand(reg)); |
2096 EmitBranch(instr, not_equal); | 2096 EmitBranch(instr, not_equal); |
2097 } else if (type.IsJSArray()) { | 2097 } else if (type.IsJSArray()) { |
2098 ASSERT(!info()->IsStub()); | 2098 DCHECK(!info()->IsStub()); |
2099 EmitBranch(instr, no_condition); | 2099 EmitBranch(instr, no_condition); |
2100 } else if (type.IsHeapNumber()) { | 2100 } else if (type.IsHeapNumber()) { |
2101 ASSERT(!info()->IsStub()); | 2101 DCHECK(!info()->IsStub()); |
2102 XMMRegister xmm_scratch = double_scratch0(); | 2102 XMMRegister xmm_scratch = double_scratch0(); |
2103 __ xorps(xmm_scratch, xmm_scratch); | 2103 __ xorps(xmm_scratch, xmm_scratch); |
2104 __ ucomisd(xmm_scratch, FieldOperand(reg, HeapNumber::kValueOffset)); | 2104 __ ucomisd(xmm_scratch, FieldOperand(reg, HeapNumber::kValueOffset)); |
2105 EmitBranch(instr, not_equal); | 2105 EmitBranch(instr, not_equal); |
2106 } else if (type.IsString()) { | 2106 } else if (type.IsString()) { |
2107 ASSERT(!info()->IsStub()); | 2107 DCHECK(!info()->IsStub()); |
2108 __ cmp(FieldOperand(reg, String::kLengthOffset), Immediate(0)); | 2108 __ cmp(FieldOperand(reg, String::kLengthOffset), Immediate(0)); |
2109 EmitBranch(instr, not_equal); | 2109 EmitBranch(instr, not_equal); |
2110 } else { | 2110 } else { |
2111 ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types(); | 2111 ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types(); |
2112 if (expected.IsEmpty()) expected = ToBooleanStub::Types::Generic(); | 2112 if (expected.IsEmpty()) expected = ToBooleanStub::Types::Generic(); |
2113 | 2113 |
2114 if (expected.Contains(ToBooleanStub::UNDEFINED)) { | 2114 if (expected.Contains(ToBooleanStub::UNDEFINED)) { |
2115 // undefined -> false. | 2115 // undefined -> false. |
2116 __ cmp(reg, factory()->undefined_value()); | 2116 __ cmp(reg, factory()->undefined_value()); |
2117 __ j(equal, instr->FalseLabel(chunk_)); | 2117 __ j(equal, instr->FalseLabel(chunk_)); |
(...skipping 19 matching lines...) Expand all Loading... |
2137 __ JumpIfSmi(reg, instr->TrueLabel(chunk_)); | 2137 __ JumpIfSmi(reg, instr->TrueLabel(chunk_)); |
2138 } else if (expected.NeedsMap()) { | 2138 } else if (expected.NeedsMap()) { |
2139 // If we need a map later and have a Smi -> deopt. | 2139 // If we need a map later and have a Smi -> deopt. |
2140 __ test(reg, Immediate(kSmiTagMask)); | 2140 __ test(reg, Immediate(kSmiTagMask)); |
2141 DeoptimizeIf(zero, instr->environment()); | 2141 DeoptimizeIf(zero, instr->environment()); |
2142 } | 2142 } |
2143 | 2143 |
2144 Register map = no_reg; // Keep the compiler happy. | 2144 Register map = no_reg; // Keep the compiler happy. |
2145 if (expected.NeedsMap()) { | 2145 if (expected.NeedsMap()) { |
2146 map = ToRegister(instr->temp()); | 2146 map = ToRegister(instr->temp()); |
2147 ASSERT(!map.is(reg)); | 2147 DCHECK(!map.is(reg)); |
2148 __ mov(map, FieldOperand(reg, HeapObject::kMapOffset)); | 2148 __ mov(map, FieldOperand(reg, HeapObject::kMapOffset)); |
2149 | 2149 |
2150 if (expected.CanBeUndetectable()) { | 2150 if (expected.CanBeUndetectable()) { |
2151 // Undetectable -> false. | 2151 // Undetectable -> false. |
2152 __ test_b(FieldOperand(map, Map::kBitFieldOffset), | 2152 __ test_b(FieldOperand(map, Map::kBitFieldOffset), |
2153 1 << Map::kIsUndetectable); | 2153 1 << Map::kIsUndetectable); |
2154 __ j(not_zero, instr->FalseLabel(chunk_)); | 2154 __ j(not_zero, instr->FalseLabel(chunk_)); |
2155 } | 2155 } |
2156 } | 2156 } |
2157 | 2157 |
(...skipping 159 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2317 | 2317 |
2318 __ add(esp, Immediate(kDoubleSize)); | 2318 __ add(esp, Immediate(kDoubleSize)); |
2319 int offset = sizeof(kHoleNanUpper32); | 2319 int offset = sizeof(kHoleNanUpper32); |
2320 __ cmp(MemOperand(esp, -offset), Immediate(kHoleNanUpper32)); | 2320 __ cmp(MemOperand(esp, -offset), Immediate(kHoleNanUpper32)); |
2321 EmitBranch(instr, equal); | 2321 EmitBranch(instr, equal); |
2322 } | 2322 } |
2323 | 2323 |
2324 | 2324 |
2325 void LCodeGen::DoCompareMinusZeroAndBranch(LCompareMinusZeroAndBranch* instr) { | 2325 void LCodeGen::DoCompareMinusZeroAndBranch(LCompareMinusZeroAndBranch* instr) { |
2326 Representation rep = instr->hydrogen()->value()->representation(); | 2326 Representation rep = instr->hydrogen()->value()->representation(); |
2327 ASSERT(!rep.IsInteger32()); | 2327 DCHECK(!rep.IsInteger32()); |
2328 Register scratch = ToRegister(instr->temp()); | 2328 Register scratch = ToRegister(instr->temp()); |
2329 | 2329 |
2330 if (rep.IsDouble()) { | 2330 if (rep.IsDouble()) { |
2331 XMMRegister value = ToDoubleRegister(instr->value()); | 2331 XMMRegister value = ToDoubleRegister(instr->value()); |
2332 XMMRegister xmm_scratch = double_scratch0(); | 2332 XMMRegister xmm_scratch = double_scratch0(); |
2333 __ xorps(xmm_scratch, xmm_scratch); | 2333 __ xorps(xmm_scratch, xmm_scratch); |
2334 __ ucomisd(xmm_scratch, value); | 2334 __ ucomisd(xmm_scratch, value); |
2335 EmitFalseBranch(instr, not_equal); | 2335 EmitFalseBranch(instr, not_equal); |
2336 __ movmskpd(scratch, value); | 2336 __ movmskpd(scratch, value); |
2337 __ test(scratch, Immediate(1)); | 2337 __ test(scratch, Immediate(1)); |
(...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2466 __ test(eax, Operand(eax)); | 2466 __ test(eax, Operand(eax)); |
2467 | 2467 |
2468 EmitBranch(instr, condition); | 2468 EmitBranch(instr, condition); |
2469 } | 2469 } |
2470 | 2470 |
2471 | 2471 |
2472 static InstanceType TestType(HHasInstanceTypeAndBranch* instr) { | 2472 static InstanceType TestType(HHasInstanceTypeAndBranch* instr) { |
2473 InstanceType from = instr->from(); | 2473 InstanceType from = instr->from(); |
2474 InstanceType to = instr->to(); | 2474 InstanceType to = instr->to(); |
2475 if (from == FIRST_TYPE) return to; | 2475 if (from == FIRST_TYPE) return to; |
2476 ASSERT(from == to || to == LAST_TYPE); | 2476 DCHECK(from == to || to == LAST_TYPE); |
2477 return from; | 2477 return from; |
2478 } | 2478 } |
2479 | 2479 |
2480 | 2480 |
2481 static Condition BranchCondition(HHasInstanceTypeAndBranch* instr) { | 2481 static Condition BranchCondition(HHasInstanceTypeAndBranch* instr) { |
2482 InstanceType from = instr->from(); | 2482 InstanceType from = instr->from(); |
2483 InstanceType to = instr->to(); | 2483 InstanceType to = instr->to(); |
2484 if (from == to) return equal; | 2484 if (from == to) return equal; |
2485 if (to == LAST_TYPE) return above_equal; | 2485 if (to == LAST_TYPE) return above_equal; |
2486 if (from == FIRST_TYPE) return below_equal; | 2486 if (from == FIRST_TYPE) return below_equal; |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2524 | 2524 |
2525 | 2525 |
2526 // Branches to a label or falls through with the answer in the z flag. Trashes | 2526 // Branches to a label or falls through with the answer in the z flag. Trashes |
2527 // the temp registers, but not the input. | 2527 // the temp registers, but not the input. |
2528 void LCodeGen::EmitClassOfTest(Label* is_true, | 2528 void LCodeGen::EmitClassOfTest(Label* is_true, |
2529 Label* is_false, | 2529 Label* is_false, |
2530 Handle<String>class_name, | 2530 Handle<String>class_name, |
2531 Register input, | 2531 Register input, |
2532 Register temp, | 2532 Register temp, |
2533 Register temp2) { | 2533 Register temp2) { |
2534 ASSERT(!input.is(temp)); | 2534 DCHECK(!input.is(temp)); |
2535 ASSERT(!input.is(temp2)); | 2535 DCHECK(!input.is(temp2)); |
2536 ASSERT(!temp.is(temp2)); | 2536 DCHECK(!temp.is(temp2)); |
2537 __ JumpIfSmi(input, is_false); | 2537 __ JumpIfSmi(input, is_false); |
2538 | 2538 |
2539 if (class_name->IsOneByteEqualTo(STATIC_ASCII_VECTOR("Function"))) { | 2539 if (class_name->IsOneByteEqualTo(STATIC_ASCII_VECTOR("Function"))) { |
2540 // Assuming the following assertions, we can use the same compares to test | 2540 // Assuming the following assertions, we can use the same compares to test |
2541 // for both being a function type and being in the object type range. | 2541 // for both being a function type and being in the object type range. |
2542 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); | 2542 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); |
2543 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE == | 2543 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE == |
2544 FIRST_SPEC_OBJECT_TYPE + 1); | 2544 FIRST_SPEC_OBJECT_TYPE + 1); |
2545 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == | 2545 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == |
2546 LAST_SPEC_OBJECT_TYPE - 1); | 2546 LAST_SPEC_OBJECT_TYPE - 1); |
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2604 | 2604 |
2605 void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) { | 2605 void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) { |
2606 Register reg = ToRegister(instr->value()); | 2606 Register reg = ToRegister(instr->value()); |
2607 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map()); | 2607 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map()); |
2608 EmitBranch(instr, equal); | 2608 EmitBranch(instr, equal); |
2609 } | 2609 } |
2610 | 2610 |
2611 | 2611 |
2612 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { | 2612 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { |
2613 // Object and function are in fixed registers defined by the stub. | 2613 // Object and function are in fixed registers defined by the stub. |
2614 ASSERT(ToRegister(instr->context()).is(esi)); | 2614 DCHECK(ToRegister(instr->context()).is(esi)); |
2615 InstanceofStub stub(isolate(), InstanceofStub::kArgsInRegisters); | 2615 InstanceofStub stub(isolate(), InstanceofStub::kArgsInRegisters); |
2616 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 2616 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
2617 | 2617 |
2618 Label true_value, done; | 2618 Label true_value, done; |
2619 __ test(eax, Operand(eax)); | 2619 __ test(eax, Operand(eax)); |
2620 __ j(zero, &true_value, Label::kNear); | 2620 __ j(zero, &true_value, Label::kNear); |
2621 __ mov(ToRegister(instr->result()), factory()->false_value()); | 2621 __ mov(ToRegister(instr->result()), factory()->false_value()); |
2622 __ jmp(&done, Label::kNear); | 2622 __ jmp(&done, Label::kNear); |
2623 __ bind(&true_value); | 2623 __ bind(&true_value); |
2624 __ mov(ToRegister(instr->result()), factory()->true_value()); | 2624 __ mov(ToRegister(instr->result()), factory()->true_value()); |
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2700 flags | InstanceofStub::kCallSiteInlineCheck); | 2700 flags | InstanceofStub::kCallSiteInlineCheck); |
2701 flags = static_cast<InstanceofStub::Flags>( | 2701 flags = static_cast<InstanceofStub::Flags>( |
2702 flags | InstanceofStub::kReturnTrueFalseObject); | 2702 flags | InstanceofStub::kReturnTrueFalseObject); |
2703 InstanceofStub stub(isolate(), flags); | 2703 InstanceofStub stub(isolate(), flags); |
2704 | 2704 |
2705 // Get the temp register reserved by the instruction. This needs to be a | 2705 // Get the temp register reserved by the instruction. This needs to be a |
2706 // register which is pushed last by PushSafepointRegisters as top of the | 2706 // register which is pushed last by PushSafepointRegisters as top of the |
2707 // stack is used to pass the offset to the location of the map check to | 2707 // stack is used to pass the offset to the location of the map check to |
2708 // the stub. | 2708 // the stub. |
2709 Register temp = ToRegister(instr->temp()); | 2709 Register temp = ToRegister(instr->temp()); |
2710 ASSERT(MacroAssembler::SafepointRegisterStackIndex(temp) == 0); | 2710 DCHECK(MacroAssembler::SafepointRegisterStackIndex(temp) == 0); |
2711 __ LoadHeapObject(InstanceofStub::right(), instr->function()); | 2711 __ LoadHeapObject(InstanceofStub::right(), instr->function()); |
2712 static const int kAdditionalDelta = 13; | 2712 static const int kAdditionalDelta = 13; |
2713 int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta; | 2713 int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta; |
2714 __ mov(temp, Immediate(delta)); | 2714 __ mov(temp, Immediate(delta)); |
2715 __ StoreToSafepointRegisterSlot(temp, temp); | 2715 __ StoreToSafepointRegisterSlot(temp, temp); |
2716 CallCodeGeneric(stub.GetCode(), | 2716 CallCodeGeneric(stub.GetCode(), |
2717 RelocInfo::CODE_TARGET, | 2717 RelocInfo::CODE_TARGET, |
2718 instr, | 2718 instr, |
2719 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); | 2719 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); |
2720 // Get the deoptimization index of the LLazyBailout-environment that | 2720 // Get the deoptimization index of the LLazyBailout-environment that |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2756 Immediate(kAlignmentZapValue)); | 2756 Immediate(kAlignmentZapValue)); |
2757 __ Assert(equal, kExpectedAlignmentMarker); | 2757 __ Assert(equal, kExpectedAlignmentMarker); |
2758 } | 2758 } |
2759 __ Ret((parameter_count + extra_value_count) * kPointerSize, ecx); | 2759 __ Ret((parameter_count + extra_value_count) * kPointerSize, ecx); |
2760 } else { | 2760 } else { |
2761 Register reg = ToRegister(instr->parameter_count()); | 2761 Register reg = ToRegister(instr->parameter_count()); |
2762 // The argument count parameter is a smi | 2762 // The argument count parameter is a smi |
2763 __ SmiUntag(reg); | 2763 __ SmiUntag(reg); |
2764 Register return_addr_reg = reg.is(ecx) ? ebx : ecx; | 2764 Register return_addr_reg = reg.is(ecx) ? ebx : ecx; |
2765 if (dynamic_frame_alignment && FLAG_debug_code) { | 2765 if (dynamic_frame_alignment && FLAG_debug_code) { |
2766 ASSERT(extra_value_count == 2); | 2766 DCHECK(extra_value_count == 2); |
2767 __ cmp(Operand(esp, reg, times_pointer_size, | 2767 __ cmp(Operand(esp, reg, times_pointer_size, |
2768 extra_value_count * kPointerSize), | 2768 extra_value_count * kPointerSize), |
2769 Immediate(kAlignmentZapValue)); | 2769 Immediate(kAlignmentZapValue)); |
2770 __ Assert(equal, kExpectedAlignmentMarker); | 2770 __ Assert(equal, kExpectedAlignmentMarker); |
2771 } | 2771 } |
2772 | 2772 |
2773 // emit code to restore stack based on instr->parameter_count() | 2773 // emit code to restore stack based on instr->parameter_count() |
2774 __ pop(return_addr_reg); // save return address | 2774 __ pop(return_addr_reg); // save return address |
2775 if (dynamic_frame_alignment) { | 2775 if (dynamic_frame_alignment) { |
2776 __ inc(reg); // 1 more for alignment | 2776 __ inc(reg); // 1 more for alignment |
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2824 Register result = ToRegister(instr->result()); | 2824 Register result = ToRegister(instr->result()); |
2825 __ mov(result, Operand::ForCell(instr->hydrogen()->cell().handle())); | 2825 __ mov(result, Operand::ForCell(instr->hydrogen()->cell().handle())); |
2826 if (instr->hydrogen()->RequiresHoleCheck()) { | 2826 if (instr->hydrogen()->RequiresHoleCheck()) { |
2827 __ cmp(result, factory()->the_hole_value()); | 2827 __ cmp(result, factory()->the_hole_value()); |
2828 DeoptimizeIf(equal, instr->environment()); | 2828 DeoptimizeIf(equal, instr->environment()); |
2829 } | 2829 } |
2830 } | 2830 } |
2831 | 2831 |
2832 | 2832 |
2833 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) { | 2833 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) { |
2834 ASSERT(ToRegister(instr->context()).is(esi)); | 2834 DCHECK(ToRegister(instr->context()).is(esi)); |
2835 ASSERT(ToRegister(instr->global_object()).is(LoadIC::ReceiverRegister())); | 2835 DCHECK(ToRegister(instr->global_object()).is(LoadIC::ReceiverRegister())); |
2836 ASSERT(ToRegister(instr->result()).is(eax)); | 2836 DCHECK(ToRegister(instr->result()).is(eax)); |
2837 | 2837 |
2838 __ mov(LoadIC::NameRegister(), instr->name()); | 2838 __ mov(LoadIC::NameRegister(), instr->name()); |
2839 if (FLAG_vector_ics) { | 2839 if (FLAG_vector_ics) { |
2840 Register vector = ToRegister(instr->temp_vector()); | 2840 Register vector = ToRegister(instr->temp_vector()); |
2841 ASSERT(vector.is(LoadIC::VectorRegister())); | 2841 DCHECK(vector.is(LoadIC::VectorRegister())); |
2842 __ mov(vector, instr->hydrogen()->feedback_vector()); | 2842 __ mov(vector, instr->hydrogen()->feedback_vector()); |
2843 // No need to allocate this register. | 2843 // No need to allocate this register. |
2844 ASSERT(LoadIC::SlotRegister().is(eax)); | 2844 DCHECK(LoadIC::SlotRegister().is(eax)); |
2845 __ mov(LoadIC::SlotRegister(), | 2845 __ mov(LoadIC::SlotRegister(), |
2846 Immediate(Smi::FromInt(instr->hydrogen()->slot()))); | 2846 Immediate(Smi::FromInt(instr->hydrogen()->slot()))); |
2847 } | 2847 } |
2848 ContextualMode mode = instr->for_typeof() ? NOT_CONTEXTUAL : CONTEXTUAL; | 2848 ContextualMode mode = instr->for_typeof() ? NOT_CONTEXTUAL : CONTEXTUAL; |
2849 Handle<Code> ic = LoadIC::initialize_stub(isolate(), mode); | 2849 Handle<Code> ic = LoadIC::initialize_stub(isolate(), mode); |
2850 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2850 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
2851 } | 2851 } |
2852 | 2852 |
2853 | 2853 |
2854 void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) { | 2854 void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) { |
(...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2949 Register result = ToRegister(instr->result()); | 2949 Register result = ToRegister(instr->result()); |
2950 if (!access.IsInobject()) { | 2950 if (!access.IsInobject()) { |
2951 __ mov(result, FieldOperand(object, JSObject::kPropertiesOffset)); | 2951 __ mov(result, FieldOperand(object, JSObject::kPropertiesOffset)); |
2952 object = result; | 2952 object = result; |
2953 } | 2953 } |
2954 __ Load(result, FieldOperand(object, offset), access.representation()); | 2954 __ Load(result, FieldOperand(object, offset), access.representation()); |
2955 } | 2955 } |
2956 | 2956 |
2957 | 2957 |
2958 void LCodeGen::EmitPushTaggedOperand(LOperand* operand) { | 2958 void LCodeGen::EmitPushTaggedOperand(LOperand* operand) { |
2959 ASSERT(!operand->IsDoubleRegister()); | 2959 DCHECK(!operand->IsDoubleRegister()); |
2960 if (operand->IsConstantOperand()) { | 2960 if (operand->IsConstantOperand()) { |
2961 Handle<Object> object = ToHandle(LConstantOperand::cast(operand)); | 2961 Handle<Object> object = ToHandle(LConstantOperand::cast(operand)); |
2962 AllowDeferredHandleDereference smi_check; | 2962 AllowDeferredHandleDereference smi_check; |
2963 if (object->IsSmi()) { | 2963 if (object->IsSmi()) { |
2964 __ Push(Handle<Smi>::cast(object)); | 2964 __ Push(Handle<Smi>::cast(object)); |
2965 } else { | 2965 } else { |
2966 __ PushHeapObject(Handle<HeapObject>::cast(object)); | 2966 __ PushHeapObject(Handle<HeapObject>::cast(object)); |
2967 } | 2967 } |
2968 } else if (operand->IsRegister()) { | 2968 } else if (operand->IsRegister()) { |
2969 __ push(ToRegister(operand)); | 2969 __ push(ToRegister(operand)); |
2970 } else { | 2970 } else { |
2971 __ push(ToOperand(operand)); | 2971 __ push(ToOperand(operand)); |
2972 } | 2972 } |
2973 } | 2973 } |
2974 | 2974 |
2975 | 2975 |
2976 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { | 2976 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { |
2977 ASSERT(ToRegister(instr->context()).is(esi)); | 2977 DCHECK(ToRegister(instr->context()).is(esi)); |
2978 ASSERT(ToRegister(instr->object()).is(LoadIC::ReceiverRegister())); | 2978 DCHECK(ToRegister(instr->object()).is(LoadIC::ReceiverRegister())); |
2979 ASSERT(ToRegister(instr->result()).is(eax)); | 2979 DCHECK(ToRegister(instr->result()).is(eax)); |
2980 | 2980 |
2981 __ mov(LoadIC::NameRegister(), instr->name()); | 2981 __ mov(LoadIC::NameRegister(), instr->name()); |
2982 if (FLAG_vector_ics) { | 2982 if (FLAG_vector_ics) { |
2983 Register vector = ToRegister(instr->temp_vector()); | 2983 Register vector = ToRegister(instr->temp_vector()); |
2984 ASSERT(vector.is(LoadIC::VectorRegister())); | 2984 DCHECK(vector.is(LoadIC::VectorRegister())); |
2985 __ mov(vector, instr->hydrogen()->feedback_vector()); | 2985 __ mov(vector, instr->hydrogen()->feedback_vector()); |
2986 // No need to allocate this register. | 2986 // No need to allocate this register. |
2987 ASSERT(LoadIC::SlotRegister().is(eax)); | 2987 DCHECK(LoadIC::SlotRegister().is(eax)); |
2988 __ mov(LoadIC::SlotRegister(), | 2988 __ mov(LoadIC::SlotRegister(), |
2989 Immediate(Smi::FromInt(instr->hydrogen()->slot()))); | 2989 Immediate(Smi::FromInt(instr->hydrogen()->slot()))); |
2990 } | 2990 } |
2991 Handle<Code> ic = LoadIC::initialize_stub(isolate(), NOT_CONTEXTUAL); | 2991 Handle<Code> ic = LoadIC::initialize_stub(isolate(), NOT_CONTEXTUAL); |
2992 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2992 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
2993 } | 2993 } |
2994 | 2994 |
2995 | 2995 |
2996 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) { | 2996 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) { |
2997 Register function = ToRegister(instr->function()); | 2997 Register function = ToRegister(instr->function()); |
(...skipping 203 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3201 ScaleFactor scale_factor = static_cast<ScaleFactor>(shift_size); | 3201 ScaleFactor scale_factor = static_cast<ScaleFactor>(shift_size); |
3202 return Operand(elements_pointer_reg, | 3202 return Operand(elements_pointer_reg, |
3203 ToRegister(key), | 3203 ToRegister(key), |
3204 scale_factor, | 3204 scale_factor, |
3205 base_offset); | 3205 base_offset); |
3206 } | 3206 } |
3207 } | 3207 } |
3208 | 3208 |
3209 | 3209 |
3210 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) { | 3210 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) { |
3211 ASSERT(ToRegister(instr->context()).is(esi)); | 3211 DCHECK(ToRegister(instr->context()).is(esi)); |
3212 ASSERT(ToRegister(instr->object()).is(LoadIC::ReceiverRegister())); | 3212 DCHECK(ToRegister(instr->object()).is(LoadIC::ReceiverRegister())); |
3213 ASSERT(ToRegister(instr->key()).is(LoadIC::NameRegister())); | 3213 DCHECK(ToRegister(instr->key()).is(LoadIC::NameRegister())); |
3214 | 3214 |
3215 if (FLAG_vector_ics) { | 3215 if (FLAG_vector_ics) { |
3216 Register vector = ToRegister(instr->temp_vector()); | 3216 Register vector = ToRegister(instr->temp_vector()); |
3217 ASSERT(vector.is(LoadIC::VectorRegister())); | 3217 DCHECK(vector.is(LoadIC::VectorRegister())); |
3218 __ mov(vector, instr->hydrogen()->feedback_vector()); | 3218 __ mov(vector, instr->hydrogen()->feedback_vector()); |
3219 // No need to allocate this register. | 3219 // No need to allocate this register. |
3220 ASSERT(LoadIC::SlotRegister().is(eax)); | 3220 DCHECK(LoadIC::SlotRegister().is(eax)); |
3221 __ mov(LoadIC::SlotRegister(), | 3221 __ mov(LoadIC::SlotRegister(), |
3222 Immediate(Smi::FromInt(instr->hydrogen()->slot()))); | 3222 Immediate(Smi::FromInt(instr->hydrogen()->slot()))); |
3223 } | 3223 } |
3224 | 3224 |
3225 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); | 3225 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); |
3226 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 3226 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
3227 } | 3227 } |
3228 | 3228 |
3229 | 3229 |
3230 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { | 3230 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { |
(...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3325 __ mov(receiver, FieldOperand(receiver, proxy_offset)); | 3325 __ mov(receiver, FieldOperand(receiver, proxy_offset)); |
3326 __ bind(&receiver_ok); | 3326 __ bind(&receiver_ok); |
3327 } | 3327 } |
3328 | 3328 |
3329 | 3329 |
3330 void LCodeGen::DoApplyArguments(LApplyArguments* instr) { | 3330 void LCodeGen::DoApplyArguments(LApplyArguments* instr) { |
3331 Register receiver = ToRegister(instr->receiver()); | 3331 Register receiver = ToRegister(instr->receiver()); |
3332 Register function = ToRegister(instr->function()); | 3332 Register function = ToRegister(instr->function()); |
3333 Register length = ToRegister(instr->length()); | 3333 Register length = ToRegister(instr->length()); |
3334 Register elements = ToRegister(instr->elements()); | 3334 Register elements = ToRegister(instr->elements()); |
3335 ASSERT(receiver.is(eax)); // Used for parameter count. | 3335 DCHECK(receiver.is(eax)); // Used for parameter count. |
3336 ASSERT(function.is(edi)); // Required by InvokeFunction. | 3336 DCHECK(function.is(edi)); // Required by InvokeFunction. |
3337 ASSERT(ToRegister(instr->result()).is(eax)); | 3337 DCHECK(ToRegister(instr->result()).is(eax)); |
3338 | 3338 |
3339 // Copy the arguments to this function possibly from the | 3339 // Copy the arguments to this function possibly from the |
3340 // adaptor frame below it. | 3340 // adaptor frame below it. |
3341 const uint32_t kArgumentsLimit = 1 * KB; | 3341 const uint32_t kArgumentsLimit = 1 * KB; |
3342 __ cmp(length, kArgumentsLimit); | 3342 __ cmp(length, kArgumentsLimit); |
3343 DeoptimizeIf(above, instr->environment()); | 3343 DeoptimizeIf(above, instr->environment()); |
3344 | 3344 |
3345 __ push(receiver); | 3345 __ push(receiver); |
3346 __ mov(receiver, length); | 3346 __ mov(receiver, length); |
3347 | 3347 |
3348 // Loop through the arguments pushing them onto the execution | 3348 // Loop through the arguments pushing them onto the execution |
3349 // stack. | 3349 // stack. |
3350 Label invoke, loop; | 3350 Label invoke, loop; |
3351 // length is a small non-negative integer, due to the test above. | 3351 // length is a small non-negative integer, due to the test above. |
3352 __ test(length, Operand(length)); | 3352 __ test(length, Operand(length)); |
3353 __ j(zero, &invoke, Label::kNear); | 3353 __ j(zero, &invoke, Label::kNear); |
3354 __ bind(&loop); | 3354 __ bind(&loop); |
3355 __ push(Operand(elements, length, times_pointer_size, 1 * kPointerSize)); | 3355 __ push(Operand(elements, length, times_pointer_size, 1 * kPointerSize)); |
3356 __ dec(length); | 3356 __ dec(length); |
3357 __ j(not_zero, &loop); | 3357 __ j(not_zero, &loop); |
3358 | 3358 |
3359 // Invoke the function. | 3359 // Invoke the function. |
3360 __ bind(&invoke); | 3360 __ bind(&invoke); |
3361 ASSERT(instr->HasPointerMap()); | 3361 DCHECK(instr->HasPointerMap()); |
3362 LPointerMap* pointers = instr->pointer_map(); | 3362 LPointerMap* pointers = instr->pointer_map(); |
3363 SafepointGenerator safepoint_generator( | 3363 SafepointGenerator safepoint_generator( |
3364 this, pointers, Safepoint::kLazyDeopt); | 3364 this, pointers, Safepoint::kLazyDeopt); |
3365 ParameterCount actual(eax); | 3365 ParameterCount actual(eax); |
3366 __ InvokeFunction(function, actual, CALL_FUNCTION, safepoint_generator); | 3366 __ InvokeFunction(function, actual, CALL_FUNCTION, safepoint_generator); |
3367 } | 3367 } |
3368 | 3368 |
3369 | 3369 |
3370 void LCodeGen::DoDebugBreak(LDebugBreak* instr) { | 3370 void LCodeGen::DoDebugBreak(LDebugBreak* instr) { |
3371 __ int3(); | 3371 __ int3(); |
(...skipping 16 matching lines...) Expand all Loading... |
3388 __ mov(result, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); | 3388 __ mov(result, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); |
3389 } | 3389 } |
3390 | 3390 |
3391 | 3391 |
3392 void LCodeGen::DoContext(LContext* instr) { | 3392 void LCodeGen::DoContext(LContext* instr) { |
3393 Register result = ToRegister(instr->result()); | 3393 Register result = ToRegister(instr->result()); |
3394 if (info()->IsOptimizing()) { | 3394 if (info()->IsOptimizing()) { |
3395 __ mov(result, Operand(ebp, StandardFrameConstants::kContextOffset)); | 3395 __ mov(result, Operand(ebp, StandardFrameConstants::kContextOffset)); |
3396 } else { | 3396 } else { |
3397 // If there is no frame, the context must be in esi. | 3397 // If there is no frame, the context must be in esi. |
3398 ASSERT(result.is(esi)); | 3398 DCHECK(result.is(esi)); |
3399 } | 3399 } |
3400 } | 3400 } |
3401 | 3401 |
3402 | 3402 |
3403 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) { | 3403 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) { |
3404 ASSERT(ToRegister(instr->context()).is(esi)); | 3404 DCHECK(ToRegister(instr->context()).is(esi)); |
3405 __ push(esi); // The context is the first argument. | 3405 __ push(esi); // The context is the first argument. |
3406 __ push(Immediate(instr->hydrogen()->pairs())); | 3406 __ push(Immediate(instr->hydrogen()->pairs())); |
3407 __ push(Immediate(Smi::FromInt(instr->hydrogen()->flags()))); | 3407 __ push(Immediate(Smi::FromInt(instr->hydrogen()->flags()))); |
3408 CallRuntime(Runtime::kDeclareGlobals, 3, instr); | 3408 CallRuntime(Runtime::kDeclareGlobals, 3, instr); |
3409 } | 3409 } |
3410 | 3410 |
3411 | 3411 |
3412 void LCodeGen::CallKnownFunction(Handle<JSFunction> function, | 3412 void LCodeGen::CallKnownFunction(Handle<JSFunction> function, |
3413 int formal_parameter_count, | 3413 int formal_parameter_count, |
3414 int arity, | 3414 int arity, |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3446 SafepointGenerator generator( | 3446 SafepointGenerator generator( |
3447 this, pointers, Safepoint::kLazyDeopt); | 3447 this, pointers, Safepoint::kLazyDeopt); |
3448 ParameterCount count(arity); | 3448 ParameterCount count(arity); |
3449 ParameterCount expected(formal_parameter_count); | 3449 ParameterCount expected(formal_parameter_count); |
3450 __ InvokeFunction(function, expected, count, CALL_FUNCTION, generator); | 3450 __ InvokeFunction(function, expected, count, CALL_FUNCTION, generator); |
3451 } | 3451 } |
3452 } | 3452 } |
3453 | 3453 |
3454 | 3454 |
3455 void LCodeGen::DoCallWithDescriptor(LCallWithDescriptor* instr) { | 3455 void LCodeGen::DoCallWithDescriptor(LCallWithDescriptor* instr) { |
3456 ASSERT(ToRegister(instr->result()).is(eax)); | 3456 DCHECK(ToRegister(instr->result()).is(eax)); |
3457 | 3457 |
3458 LPointerMap* pointers = instr->pointer_map(); | 3458 LPointerMap* pointers = instr->pointer_map(); |
3459 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); | 3459 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); |
3460 | 3460 |
3461 if (instr->target()->IsConstantOperand()) { | 3461 if (instr->target()->IsConstantOperand()) { |
3462 LConstantOperand* target = LConstantOperand::cast(instr->target()); | 3462 LConstantOperand* target = LConstantOperand::cast(instr->target()); |
3463 Handle<Code> code = Handle<Code>::cast(ToHandle(target)); | 3463 Handle<Code> code = Handle<Code>::cast(ToHandle(target)); |
3464 generator.BeforeCall(__ CallSize(code, RelocInfo::CODE_TARGET)); | 3464 generator.BeforeCall(__ CallSize(code, RelocInfo::CODE_TARGET)); |
3465 __ call(code, RelocInfo::CODE_TARGET); | 3465 __ call(code, RelocInfo::CODE_TARGET); |
3466 } else { | 3466 } else { |
3467 ASSERT(instr->target()->IsRegister()); | 3467 DCHECK(instr->target()->IsRegister()); |
3468 Register target = ToRegister(instr->target()); | 3468 Register target = ToRegister(instr->target()); |
3469 generator.BeforeCall(__ CallSize(Operand(target))); | 3469 generator.BeforeCall(__ CallSize(Operand(target))); |
3470 __ add(target, Immediate(Code::kHeaderSize - kHeapObjectTag)); | 3470 __ add(target, Immediate(Code::kHeaderSize - kHeapObjectTag)); |
3471 __ call(target); | 3471 __ call(target); |
3472 } | 3472 } |
3473 generator.AfterCall(); | 3473 generator.AfterCall(); |
3474 } | 3474 } |
3475 | 3475 |
3476 | 3476 |
3477 void LCodeGen::DoCallJSFunction(LCallJSFunction* instr) { | 3477 void LCodeGen::DoCallJSFunction(LCallJSFunction* instr) { |
3478 ASSERT(ToRegister(instr->function()).is(edi)); | 3478 DCHECK(ToRegister(instr->function()).is(edi)); |
3479 ASSERT(ToRegister(instr->result()).is(eax)); | 3479 DCHECK(ToRegister(instr->result()).is(eax)); |
3480 | 3480 |
3481 if (instr->hydrogen()->pass_argument_count()) { | 3481 if (instr->hydrogen()->pass_argument_count()) { |
3482 __ mov(eax, instr->arity()); | 3482 __ mov(eax, instr->arity()); |
3483 } | 3483 } |
3484 | 3484 |
3485 // Change context. | 3485 // Change context. |
3486 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); | 3486 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); |
3487 | 3487 |
3488 bool is_self_call = false; | 3488 bool is_self_call = false; |
3489 if (instr->hydrogen()->function()->IsConstant()) { | 3489 if (instr->hydrogen()->function()->IsConstant()) { |
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3567 LMathAbs* instr) | 3567 LMathAbs* instr) |
3568 : LDeferredCode(codegen), instr_(instr) { } | 3568 : LDeferredCode(codegen), instr_(instr) { } |
3569 virtual void Generate() V8_OVERRIDE { | 3569 virtual void Generate() V8_OVERRIDE { |
3570 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_); | 3570 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_); |
3571 } | 3571 } |
3572 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } | 3572 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } |
3573 private: | 3573 private: |
3574 LMathAbs* instr_; | 3574 LMathAbs* instr_; |
3575 }; | 3575 }; |
3576 | 3576 |
3577 ASSERT(instr->value()->Equals(instr->result())); | 3577 DCHECK(instr->value()->Equals(instr->result())); |
3578 Representation r = instr->hydrogen()->value()->representation(); | 3578 Representation r = instr->hydrogen()->value()->representation(); |
3579 | 3579 |
3580 if (r.IsDouble()) { | 3580 if (r.IsDouble()) { |
3581 XMMRegister scratch = double_scratch0(); | 3581 XMMRegister scratch = double_scratch0(); |
3582 XMMRegister input_reg = ToDoubleRegister(instr->value()); | 3582 XMMRegister input_reg = ToDoubleRegister(instr->value()); |
3583 __ xorps(scratch, scratch); | 3583 __ xorps(scratch, scratch); |
3584 __ subsd(scratch, input_reg); | 3584 __ subsd(scratch, input_reg); |
3585 __ andps(input_reg, scratch); | 3585 __ andps(input_reg, scratch); |
3586 } else if (r.IsSmiOrInteger32()) { | 3586 } else if (r.IsSmiOrInteger32()) { |
3587 EmitIntegerMathAbs(instr); | 3587 EmitIntegerMathAbs(instr); |
(...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3736 Operand input = ToOperand(instr->value()); | 3736 Operand input = ToOperand(instr->value()); |
3737 XMMRegister output = ToDoubleRegister(instr->result()); | 3737 XMMRegister output = ToDoubleRegister(instr->result()); |
3738 __ sqrtsd(output, input); | 3738 __ sqrtsd(output, input); |
3739 } | 3739 } |
3740 | 3740 |
3741 | 3741 |
3742 void LCodeGen::DoMathPowHalf(LMathPowHalf* instr) { | 3742 void LCodeGen::DoMathPowHalf(LMathPowHalf* instr) { |
3743 XMMRegister xmm_scratch = double_scratch0(); | 3743 XMMRegister xmm_scratch = double_scratch0(); |
3744 XMMRegister input_reg = ToDoubleRegister(instr->value()); | 3744 XMMRegister input_reg = ToDoubleRegister(instr->value()); |
3745 Register scratch = ToRegister(instr->temp()); | 3745 Register scratch = ToRegister(instr->temp()); |
3746 ASSERT(ToDoubleRegister(instr->result()).is(input_reg)); | 3746 DCHECK(ToDoubleRegister(instr->result()).is(input_reg)); |
3747 | 3747 |
3748 // Note that according to ECMA-262 15.8.2.13: | 3748 // Note that according to ECMA-262 15.8.2.13: |
3749 // Math.pow(-Infinity, 0.5) == Infinity | 3749 // Math.pow(-Infinity, 0.5) == Infinity |
3750 // Math.sqrt(-Infinity) == NaN | 3750 // Math.sqrt(-Infinity) == NaN |
3751 Label done, sqrt; | 3751 Label done, sqrt; |
3752 // Check base for -Infinity. According to IEEE-754, single-precision | 3752 // Check base for -Infinity. According to IEEE-754, single-precision |
3753 // -Infinity has the highest 9 bits set and the lowest 23 bits cleared. | 3753 // -Infinity has the highest 9 bits set and the lowest 23 bits cleared. |
3754 __ mov(scratch, 0xFF800000); | 3754 __ mov(scratch, 0xFF800000); |
3755 __ movd(xmm_scratch, scratch); | 3755 __ movd(xmm_scratch, scratch); |
3756 __ cvtss2sd(xmm_scratch, xmm_scratch); | 3756 __ cvtss2sd(xmm_scratch, xmm_scratch); |
(...skipping 13 matching lines...) Expand all Loading... |
3770 __ addsd(input_reg, xmm_scratch); // Convert -0 to +0. | 3770 __ addsd(input_reg, xmm_scratch); // Convert -0 to +0. |
3771 __ sqrtsd(input_reg, input_reg); | 3771 __ sqrtsd(input_reg, input_reg); |
3772 __ bind(&done); | 3772 __ bind(&done); |
3773 } | 3773 } |
3774 | 3774 |
3775 | 3775 |
3776 void LCodeGen::DoPower(LPower* instr) { | 3776 void LCodeGen::DoPower(LPower* instr) { |
3777 Representation exponent_type = instr->hydrogen()->right()->representation(); | 3777 Representation exponent_type = instr->hydrogen()->right()->representation(); |
3778 // Having marked this as a call, we can use any registers. | 3778 // Having marked this as a call, we can use any registers. |
3779 // Just make sure that the input/output registers are the expected ones. | 3779 // Just make sure that the input/output registers are the expected ones. |
3780 ASSERT(!instr->right()->IsDoubleRegister() || | 3780 DCHECK(!instr->right()->IsDoubleRegister() || |
3781 ToDoubleRegister(instr->right()).is(xmm1)); | 3781 ToDoubleRegister(instr->right()).is(xmm1)); |
3782 ASSERT(!instr->right()->IsRegister() || | 3782 DCHECK(!instr->right()->IsRegister() || |
3783 ToRegister(instr->right()).is(eax)); | 3783 ToRegister(instr->right()).is(eax)); |
3784 ASSERT(ToDoubleRegister(instr->left()).is(xmm2)); | 3784 DCHECK(ToDoubleRegister(instr->left()).is(xmm2)); |
3785 ASSERT(ToDoubleRegister(instr->result()).is(xmm3)); | 3785 DCHECK(ToDoubleRegister(instr->result()).is(xmm3)); |
3786 | 3786 |
3787 if (exponent_type.IsSmi()) { | 3787 if (exponent_type.IsSmi()) { |
3788 MathPowStub stub(isolate(), MathPowStub::TAGGED); | 3788 MathPowStub stub(isolate(), MathPowStub::TAGGED); |
3789 __ CallStub(&stub); | 3789 __ CallStub(&stub); |
3790 } else if (exponent_type.IsTagged()) { | 3790 } else if (exponent_type.IsTagged()) { |
3791 Label no_deopt; | 3791 Label no_deopt; |
3792 __ JumpIfSmi(eax, &no_deopt); | 3792 __ JumpIfSmi(eax, &no_deopt); |
3793 __ CmpObjectType(eax, HEAP_NUMBER_TYPE, ecx); | 3793 __ CmpObjectType(eax, HEAP_NUMBER_TYPE, ecx); |
3794 DeoptimizeIf(not_equal, instr->environment()); | 3794 DeoptimizeIf(not_equal, instr->environment()); |
3795 __ bind(&no_deopt); | 3795 __ bind(&no_deopt); |
3796 MathPowStub stub(isolate(), MathPowStub::TAGGED); | 3796 MathPowStub stub(isolate(), MathPowStub::TAGGED); |
3797 __ CallStub(&stub); | 3797 __ CallStub(&stub); |
3798 } else if (exponent_type.IsInteger32()) { | 3798 } else if (exponent_type.IsInteger32()) { |
3799 MathPowStub stub(isolate(), MathPowStub::INTEGER); | 3799 MathPowStub stub(isolate(), MathPowStub::INTEGER); |
3800 __ CallStub(&stub); | 3800 __ CallStub(&stub); |
3801 } else { | 3801 } else { |
3802 ASSERT(exponent_type.IsDouble()); | 3802 DCHECK(exponent_type.IsDouble()); |
3803 MathPowStub stub(isolate(), MathPowStub::DOUBLE); | 3803 MathPowStub stub(isolate(), MathPowStub::DOUBLE); |
3804 __ CallStub(&stub); | 3804 __ CallStub(&stub); |
3805 } | 3805 } |
3806 } | 3806 } |
3807 | 3807 |
3808 | 3808 |
3809 void LCodeGen::DoMathLog(LMathLog* instr) { | 3809 void LCodeGen::DoMathLog(LMathLog* instr) { |
3810 ASSERT(instr->value()->Equals(instr->result())); | 3810 DCHECK(instr->value()->Equals(instr->result())); |
3811 XMMRegister input_reg = ToDoubleRegister(instr->value()); | 3811 XMMRegister input_reg = ToDoubleRegister(instr->value()); |
3812 XMMRegister xmm_scratch = double_scratch0(); | 3812 XMMRegister xmm_scratch = double_scratch0(); |
3813 Label positive, done, zero; | 3813 Label positive, done, zero; |
3814 __ xorps(xmm_scratch, xmm_scratch); | 3814 __ xorps(xmm_scratch, xmm_scratch); |
3815 __ ucomisd(input_reg, xmm_scratch); | 3815 __ ucomisd(input_reg, xmm_scratch); |
3816 __ j(above, &positive, Label::kNear); | 3816 __ j(above, &positive, Label::kNear); |
3817 __ j(not_carry, &zero, Label::kNear); | 3817 __ j(not_carry, &zero, Label::kNear); |
3818 ExternalReference nan = | 3818 ExternalReference nan = |
3819 ExternalReference::address_of_canonical_non_hole_nan(); | 3819 ExternalReference::address_of_canonical_non_hole_nan(); |
3820 __ movsd(input_reg, Operand::StaticVariable(nan)); | 3820 __ movsd(input_reg, Operand::StaticVariable(nan)); |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3856 XMMRegister result = ToDoubleRegister(instr->result()); | 3856 XMMRegister result = ToDoubleRegister(instr->result()); |
3857 XMMRegister temp0 = double_scratch0(); | 3857 XMMRegister temp0 = double_scratch0(); |
3858 Register temp1 = ToRegister(instr->temp1()); | 3858 Register temp1 = ToRegister(instr->temp1()); |
3859 Register temp2 = ToRegister(instr->temp2()); | 3859 Register temp2 = ToRegister(instr->temp2()); |
3860 | 3860 |
3861 MathExpGenerator::EmitMathExp(masm(), input, result, temp0, temp1, temp2); | 3861 MathExpGenerator::EmitMathExp(masm(), input, result, temp0, temp1, temp2); |
3862 } | 3862 } |
3863 | 3863 |
3864 | 3864 |
3865 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) { | 3865 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) { |
3866 ASSERT(ToRegister(instr->context()).is(esi)); | 3866 DCHECK(ToRegister(instr->context()).is(esi)); |
3867 ASSERT(ToRegister(instr->function()).is(edi)); | 3867 DCHECK(ToRegister(instr->function()).is(edi)); |
3868 ASSERT(instr->HasPointerMap()); | 3868 DCHECK(instr->HasPointerMap()); |
3869 | 3869 |
3870 Handle<JSFunction> known_function = instr->hydrogen()->known_function(); | 3870 Handle<JSFunction> known_function = instr->hydrogen()->known_function(); |
3871 if (known_function.is_null()) { | 3871 if (known_function.is_null()) { |
3872 LPointerMap* pointers = instr->pointer_map(); | 3872 LPointerMap* pointers = instr->pointer_map(); |
3873 SafepointGenerator generator( | 3873 SafepointGenerator generator( |
3874 this, pointers, Safepoint::kLazyDeopt); | 3874 this, pointers, Safepoint::kLazyDeopt); |
3875 ParameterCount count(instr->arity()); | 3875 ParameterCount count(instr->arity()); |
3876 __ InvokeFunction(edi, count, CALL_FUNCTION, generator); | 3876 __ InvokeFunction(edi, count, CALL_FUNCTION, generator); |
3877 } else { | 3877 } else { |
3878 CallKnownFunction(known_function, | 3878 CallKnownFunction(known_function, |
3879 instr->hydrogen()->formal_parameter_count(), | 3879 instr->hydrogen()->formal_parameter_count(), |
3880 instr->arity(), | 3880 instr->arity(), |
3881 instr, | 3881 instr, |
3882 EDI_CONTAINS_TARGET); | 3882 EDI_CONTAINS_TARGET); |
3883 } | 3883 } |
3884 } | 3884 } |
3885 | 3885 |
3886 | 3886 |
3887 void LCodeGen::DoCallFunction(LCallFunction* instr) { | 3887 void LCodeGen::DoCallFunction(LCallFunction* instr) { |
3888 ASSERT(ToRegister(instr->context()).is(esi)); | 3888 DCHECK(ToRegister(instr->context()).is(esi)); |
3889 ASSERT(ToRegister(instr->function()).is(edi)); | 3889 DCHECK(ToRegister(instr->function()).is(edi)); |
3890 ASSERT(ToRegister(instr->result()).is(eax)); | 3890 DCHECK(ToRegister(instr->result()).is(eax)); |
3891 | 3891 |
3892 int arity = instr->arity(); | 3892 int arity = instr->arity(); |
3893 CallFunctionStub stub(isolate(), arity, instr->hydrogen()->function_flags()); | 3893 CallFunctionStub stub(isolate(), arity, instr->hydrogen()->function_flags()); |
3894 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 3894 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
3895 } | 3895 } |
3896 | 3896 |
3897 | 3897 |
3898 void LCodeGen::DoCallNew(LCallNew* instr) { | 3898 void LCodeGen::DoCallNew(LCallNew* instr) { |
3899 ASSERT(ToRegister(instr->context()).is(esi)); | 3899 DCHECK(ToRegister(instr->context()).is(esi)); |
3900 ASSERT(ToRegister(instr->constructor()).is(edi)); | 3900 DCHECK(ToRegister(instr->constructor()).is(edi)); |
3901 ASSERT(ToRegister(instr->result()).is(eax)); | 3901 DCHECK(ToRegister(instr->result()).is(eax)); |
3902 | 3902 |
3903 // No cell in ebx for construct type feedback in optimized code | 3903 // No cell in ebx for construct type feedback in optimized code |
3904 __ mov(ebx, isolate()->factory()->undefined_value()); | 3904 __ mov(ebx, isolate()->factory()->undefined_value()); |
3905 CallConstructStub stub(isolate(), NO_CALL_CONSTRUCTOR_FLAGS); | 3905 CallConstructStub stub(isolate(), NO_CALL_CONSTRUCTOR_FLAGS); |
3906 __ Move(eax, Immediate(instr->arity())); | 3906 __ Move(eax, Immediate(instr->arity())); |
3907 CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); | 3907 CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); |
3908 } | 3908 } |
3909 | 3909 |
3910 | 3910 |
3911 void LCodeGen::DoCallNewArray(LCallNewArray* instr) { | 3911 void LCodeGen::DoCallNewArray(LCallNewArray* instr) { |
3912 ASSERT(ToRegister(instr->context()).is(esi)); | 3912 DCHECK(ToRegister(instr->context()).is(esi)); |
3913 ASSERT(ToRegister(instr->constructor()).is(edi)); | 3913 DCHECK(ToRegister(instr->constructor()).is(edi)); |
3914 ASSERT(ToRegister(instr->result()).is(eax)); | 3914 DCHECK(ToRegister(instr->result()).is(eax)); |
3915 | 3915 |
3916 __ Move(eax, Immediate(instr->arity())); | 3916 __ Move(eax, Immediate(instr->arity())); |
3917 __ mov(ebx, isolate()->factory()->undefined_value()); | 3917 __ mov(ebx, isolate()->factory()->undefined_value()); |
3918 ElementsKind kind = instr->hydrogen()->elements_kind(); | 3918 ElementsKind kind = instr->hydrogen()->elements_kind(); |
3919 AllocationSiteOverrideMode override_mode = | 3919 AllocationSiteOverrideMode override_mode = |
3920 (AllocationSite::GetMode(kind) == TRACK_ALLOCATION_SITE) | 3920 (AllocationSite::GetMode(kind) == TRACK_ALLOCATION_SITE) |
3921 ? DISABLE_ALLOCATION_SITES | 3921 ? DISABLE_ALLOCATION_SITES |
3922 : DONT_OVERRIDE; | 3922 : DONT_OVERRIDE; |
3923 | 3923 |
3924 if (instr->arity() == 0) { | 3924 if (instr->arity() == 0) { |
(...skipping 22 matching lines...) Expand all Loading... |
3947 CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); | 3947 CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); |
3948 __ bind(&done); | 3948 __ bind(&done); |
3949 } else { | 3949 } else { |
3950 ArrayNArgumentsConstructorStub stub(isolate(), kind, override_mode); | 3950 ArrayNArgumentsConstructorStub stub(isolate(), kind, override_mode); |
3951 CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); | 3951 CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); |
3952 } | 3952 } |
3953 } | 3953 } |
3954 | 3954 |
3955 | 3955 |
3956 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { | 3956 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { |
3957 ASSERT(ToRegister(instr->context()).is(esi)); | 3957 DCHECK(ToRegister(instr->context()).is(esi)); |
3958 CallRuntime(instr->function(), instr->arity(), instr, instr->save_doubles()); | 3958 CallRuntime(instr->function(), instr->arity(), instr, instr->save_doubles()); |
3959 } | 3959 } |
3960 | 3960 |
3961 | 3961 |
3962 void LCodeGen::DoStoreCodeEntry(LStoreCodeEntry* instr) { | 3962 void LCodeGen::DoStoreCodeEntry(LStoreCodeEntry* instr) { |
3963 Register function = ToRegister(instr->function()); | 3963 Register function = ToRegister(instr->function()); |
3964 Register code_object = ToRegister(instr->code_object()); | 3964 Register code_object = ToRegister(instr->code_object()); |
3965 __ lea(code_object, FieldOperand(code_object, Code::kHeaderSize)); | 3965 __ lea(code_object, FieldOperand(code_object, Code::kHeaderSize)); |
3966 __ mov(FieldOperand(function, JSFunction::kCodeEntryOffset), code_object); | 3966 __ mov(FieldOperand(function, JSFunction::kCodeEntryOffset), code_object); |
3967 } | 3967 } |
(...skipping 12 matching lines...) Expand all Loading... |
3980 } | 3980 } |
3981 | 3981 |
3982 | 3982 |
3983 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { | 3983 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { |
3984 Representation representation = instr->hydrogen()->field_representation(); | 3984 Representation representation = instr->hydrogen()->field_representation(); |
3985 | 3985 |
3986 HObjectAccess access = instr->hydrogen()->access(); | 3986 HObjectAccess access = instr->hydrogen()->access(); |
3987 int offset = access.offset(); | 3987 int offset = access.offset(); |
3988 | 3988 |
3989 if (access.IsExternalMemory()) { | 3989 if (access.IsExternalMemory()) { |
3990 ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); | 3990 DCHECK(!instr->hydrogen()->NeedsWriteBarrier()); |
3991 MemOperand operand = instr->object()->IsConstantOperand() | 3991 MemOperand operand = instr->object()->IsConstantOperand() |
3992 ? MemOperand::StaticVariable( | 3992 ? MemOperand::StaticVariable( |
3993 ToExternalReference(LConstantOperand::cast(instr->object()))) | 3993 ToExternalReference(LConstantOperand::cast(instr->object()))) |
3994 : MemOperand(ToRegister(instr->object()), offset); | 3994 : MemOperand(ToRegister(instr->object()), offset); |
3995 if (instr->value()->IsConstantOperand()) { | 3995 if (instr->value()->IsConstantOperand()) { |
3996 LConstantOperand* operand_value = LConstantOperand::cast(instr->value()); | 3996 LConstantOperand* operand_value = LConstantOperand::cast(instr->value()); |
3997 __ mov(operand, Immediate(ToInteger32(operand_value))); | 3997 __ mov(operand, Immediate(ToInteger32(operand_value))); |
3998 } else { | 3998 } else { |
3999 Register value = ToRegister(instr->value()); | 3999 Register value = ToRegister(instr->value()); |
4000 __ Store(value, operand, representation); | 4000 __ Store(value, operand, representation); |
4001 } | 4001 } |
4002 return; | 4002 return; |
4003 } | 4003 } |
4004 | 4004 |
4005 Register object = ToRegister(instr->object()); | 4005 Register object = ToRegister(instr->object()); |
4006 __ AssertNotSmi(object); | 4006 __ AssertNotSmi(object); |
4007 | 4007 |
4008 ASSERT(!representation.IsSmi() || | 4008 DCHECK(!representation.IsSmi() || |
4009 !instr->value()->IsConstantOperand() || | 4009 !instr->value()->IsConstantOperand() || |
4010 IsSmi(LConstantOperand::cast(instr->value()))); | 4010 IsSmi(LConstantOperand::cast(instr->value()))); |
4011 if (representation.IsDouble()) { | 4011 if (representation.IsDouble()) { |
4012 ASSERT(access.IsInobject()); | 4012 DCHECK(access.IsInobject()); |
4013 ASSERT(!instr->hydrogen()->has_transition()); | 4013 DCHECK(!instr->hydrogen()->has_transition()); |
4014 ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); | 4014 DCHECK(!instr->hydrogen()->NeedsWriteBarrier()); |
4015 XMMRegister value = ToDoubleRegister(instr->value()); | 4015 XMMRegister value = ToDoubleRegister(instr->value()); |
4016 __ movsd(FieldOperand(object, offset), value); | 4016 __ movsd(FieldOperand(object, offset), value); |
4017 return; | 4017 return; |
4018 } | 4018 } |
4019 | 4019 |
4020 if (instr->hydrogen()->has_transition()) { | 4020 if (instr->hydrogen()->has_transition()) { |
4021 Handle<Map> transition = instr->hydrogen()->transition_map(); | 4021 Handle<Map> transition = instr->hydrogen()->transition_map(); |
4022 AddDeprecationDependency(transition); | 4022 AddDeprecationDependency(transition); |
4023 __ mov(FieldOperand(object, HeapObject::kMapOffset), transition); | 4023 __ mov(FieldOperand(object, HeapObject::kMapOffset), transition); |
4024 if (instr->hydrogen()->NeedsWriteBarrierForMap()) { | 4024 if (instr->hydrogen()->NeedsWriteBarrierForMap()) { |
(...skipping 12 matching lines...) Expand all Loading... |
4037 } | 4037 } |
4038 | 4038 |
4039 MemOperand operand = FieldOperand(write_register, offset); | 4039 MemOperand operand = FieldOperand(write_register, offset); |
4040 if (instr->value()->IsConstantOperand()) { | 4040 if (instr->value()->IsConstantOperand()) { |
4041 LConstantOperand* operand_value = LConstantOperand::cast(instr->value()); | 4041 LConstantOperand* operand_value = LConstantOperand::cast(instr->value()); |
4042 if (operand_value->IsRegister()) { | 4042 if (operand_value->IsRegister()) { |
4043 Register value = ToRegister(operand_value); | 4043 Register value = ToRegister(operand_value); |
4044 __ Store(value, operand, representation); | 4044 __ Store(value, operand, representation); |
4045 } else if (representation.IsInteger32()) { | 4045 } else if (representation.IsInteger32()) { |
4046 Immediate immediate = ToImmediate(operand_value, representation); | 4046 Immediate immediate = ToImmediate(operand_value, representation); |
4047 ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); | 4047 DCHECK(!instr->hydrogen()->NeedsWriteBarrier()); |
4048 __ mov(operand, immediate); | 4048 __ mov(operand, immediate); |
4049 } else { | 4049 } else { |
4050 Handle<Object> handle_value = ToHandle(operand_value); | 4050 Handle<Object> handle_value = ToHandle(operand_value); |
4051 ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); | 4051 DCHECK(!instr->hydrogen()->NeedsWriteBarrier()); |
4052 __ mov(operand, handle_value); | 4052 __ mov(operand, handle_value); |
4053 } | 4053 } |
4054 } else { | 4054 } else { |
4055 Register value = ToRegister(instr->value()); | 4055 Register value = ToRegister(instr->value()); |
4056 __ Store(value, operand, representation); | 4056 __ Store(value, operand, representation); |
4057 } | 4057 } |
4058 | 4058 |
4059 if (instr->hydrogen()->NeedsWriteBarrier()) { | 4059 if (instr->hydrogen()->NeedsWriteBarrier()) { |
4060 Register value = ToRegister(instr->value()); | 4060 Register value = ToRegister(instr->value()); |
4061 Register temp = access.IsInobject() ? ToRegister(instr->temp()) : object; | 4061 Register temp = access.IsInobject() ? ToRegister(instr->temp()) : object; |
4062 // Update the write barrier for the object for in-object properties. | 4062 // Update the write barrier for the object for in-object properties. |
4063 __ RecordWriteField(write_register, | 4063 __ RecordWriteField(write_register, |
4064 offset, | 4064 offset, |
4065 value, | 4065 value, |
4066 temp, | 4066 temp, |
4067 kSaveFPRegs, | 4067 kSaveFPRegs, |
4068 EMIT_REMEMBERED_SET, | 4068 EMIT_REMEMBERED_SET, |
4069 instr->hydrogen()->SmiCheckForWriteBarrier(), | 4069 instr->hydrogen()->SmiCheckForWriteBarrier(), |
4070 instr->hydrogen()->PointersToHereCheckForValue()); | 4070 instr->hydrogen()->PointersToHereCheckForValue()); |
4071 } | 4071 } |
4072 } | 4072 } |
4073 | 4073 |
4074 | 4074 |
4075 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { | 4075 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { |
4076 ASSERT(ToRegister(instr->context()).is(esi)); | 4076 DCHECK(ToRegister(instr->context()).is(esi)); |
4077 ASSERT(ToRegister(instr->object()).is(StoreIC::ReceiverRegister())); | 4077 DCHECK(ToRegister(instr->object()).is(StoreIC::ReceiverRegister())); |
4078 ASSERT(ToRegister(instr->value()).is(StoreIC::ValueRegister())); | 4078 DCHECK(ToRegister(instr->value()).is(StoreIC::ValueRegister())); |
4079 | 4079 |
4080 __ mov(StoreIC::NameRegister(), instr->name()); | 4080 __ mov(StoreIC::NameRegister(), instr->name()); |
4081 Handle<Code> ic = StoreIC::initialize_stub(isolate(), instr->strict_mode()); | 4081 Handle<Code> ic = StoreIC::initialize_stub(isolate(), instr->strict_mode()); |
4082 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 4082 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
4083 } | 4083 } |
4084 | 4084 |
4085 | 4085 |
4086 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { | 4086 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { |
4087 Condition cc = instr->hydrogen()->allow_equality() ? above : above_equal; | 4087 Condition cc = instr->hydrogen()->allow_equality() ? above : above_equal; |
4088 if (instr->index()->IsConstantOperand()) { | 4088 if (instr->index()->IsConstantOperand()) { |
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4209 FAST_ELEMENTS, | 4209 FAST_ELEMENTS, |
4210 instr->base_offset()); | 4210 instr->base_offset()); |
4211 if (instr->value()->IsRegister()) { | 4211 if (instr->value()->IsRegister()) { |
4212 __ mov(operand, ToRegister(instr->value())); | 4212 __ mov(operand, ToRegister(instr->value())); |
4213 } else { | 4213 } else { |
4214 LConstantOperand* operand_value = LConstantOperand::cast(instr->value()); | 4214 LConstantOperand* operand_value = LConstantOperand::cast(instr->value()); |
4215 if (IsSmi(operand_value)) { | 4215 if (IsSmi(operand_value)) { |
4216 Immediate immediate = ToImmediate(operand_value, Representation::Smi()); | 4216 Immediate immediate = ToImmediate(operand_value, Representation::Smi()); |
4217 __ mov(operand, immediate); | 4217 __ mov(operand, immediate); |
4218 } else { | 4218 } else { |
4219 ASSERT(!IsInteger32(operand_value)); | 4219 DCHECK(!IsInteger32(operand_value)); |
4220 Handle<Object> handle_value = ToHandle(operand_value); | 4220 Handle<Object> handle_value = ToHandle(operand_value); |
4221 __ mov(operand, handle_value); | 4221 __ mov(operand, handle_value); |
4222 } | 4222 } |
4223 } | 4223 } |
4224 | 4224 |
4225 if (instr->hydrogen()->NeedsWriteBarrier()) { | 4225 if (instr->hydrogen()->NeedsWriteBarrier()) { |
4226 ASSERT(instr->value()->IsRegister()); | 4226 DCHECK(instr->value()->IsRegister()); |
4227 Register value = ToRegister(instr->value()); | 4227 Register value = ToRegister(instr->value()); |
4228 ASSERT(!instr->key()->IsConstantOperand()); | 4228 DCHECK(!instr->key()->IsConstantOperand()); |
4229 SmiCheck check_needed = | 4229 SmiCheck check_needed = |
4230 instr->hydrogen()->value()->type().IsHeapObject() | 4230 instr->hydrogen()->value()->type().IsHeapObject() |
4231 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; | 4231 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; |
4232 // Compute address of modified element and store it into key register. | 4232 // Compute address of modified element and store it into key register. |
4233 __ lea(key, operand); | 4233 __ lea(key, operand); |
4234 __ RecordWrite(elements, | 4234 __ RecordWrite(elements, |
4235 key, | 4235 key, |
4236 value, | 4236 value, |
4237 kSaveFPRegs, | 4237 kSaveFPRegs, |
4238 EMIT_REMEMBERED_SET, | 4238 EMIT_REMEMBERED_SET, |
4239 check_needed, | 4239 check_needed, |
4240 instr->hydrogen()->PointersToHereCheckForValue()); | 4240 instr->hydrogen()->PointersToHereCheckForValue()); |
4241 } | 4241 } |
4242 } | 4242 } |
4243 | 4243 |
4244 | 4244 |
4245 void LCodeGen::DoStoreKeyed(LStoreKeyed* instr) { | 4245 void LCodeGen::DoStoreKeyed(LStoreKeyed* instr) { |
4246 // By cases...external, fast-double, fast | 4246 // By cases...external, fast-double, fast |
4247 if (instr->is_typed_elements()) { | 4247 if (instr->is_typed_elements()) { |
4248 DoStoreKeyedExternalArray(instr); | 4248 DoStoreKeyedExternalArray(instr); |
4249 } else if (instr->hydrogen()->value()->representation().IsDouble()) { | 4249 } else if (instr->hydrogen()->value()->representation().IsDouble()) { |
4250 DoStoreKeyedFixedDoubleArray(instr); | 4250 DoStoreKeyedFixedDoubleArray(instr); |
4251 } else { | 4251 } else { |
4252 DoStoreKeyedFixedArray(instr); | 4252 DoStoreKeyedFixedArray(instr); |
4253 } | 4253 } |
4254 } | 4254 } |
4255 | 4255 |
4256 | 4256 |
4257 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { | 4257 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { |
4258 ASSERT(ToRegister(instr->context()).is(esi)); | 4258 DCHECK(ToRegister(instr->context()).is(esi)); |
4259 ASSERT(ToRegister(instr->object()).is(KeyedStoreIC::ReceiverRegister())); | 4259 DCHECK(ToRegister(instr->object()).is(KeyedStoreIC::ReceiverRegister())); |
4260 ASSERT(ToRegister(instr->key()).is(KeyedStoreIC::NameRegister())); | 4260 DCHECK(ToRegister(instr->key()).is(KeyedStoreIC::NameRegister())); |
4261 ASSERT(ToRegister(instr->value()).is(KeyedStoreIC::ValueRegister())); | 4261 DCHECK(ToRegister(instr->value()).is(KeyedStoreIC::ValueRegister())); |
4262 | 4262 |
4263 Handle<Code> ic = instr->strict_mode() == STRICT | 4263 Handle<Code> ic = instr->strict_mode() == STRICT |
4264 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() | 4264 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() |
4265 : isolate()->builtins()->KeyedStoreIC_Initialize(); | 4265 : isolate()->builtins()->KeyedStoreIC_Initialize(); |
4266 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 4266 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
4267 } | 4267 } |
4268 | 4268 |
4269 | 4269 |
4270 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { | 4270 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { |
4271 Register object = ToRegister(instr->object()); | 4271 Register object = ToRegister(instr->object()); |
(...skipping 18 matching lines...) Expand all Loading... |
4290 IsSimpleMapChangeTransition(from_kind, to_kind); | 4290 IsSimpleMapChangeTransition(from_kind, to_kind); |
4291 Label::Distance branch_distance = | 4291 Label::Distance branch_distance = |
4292 is_simple_map_transition ? Label::kNear : Label::kFar; | 4292 is_simple_map_transition ? Label::kNear : Label::kFar; |
4293 __ cmp(FieldOperand(object_reg, HeapObject::kMapOffset), from_map); | 4293 __ cmp(FieldOperand(object_reg, HeapObject::kMapOffset), from_map); |
4294 __ j(not_equal, ¬_applicable, branch_distance); | 4294 __ j(not_equal, ¬_applicable, branch_distance); |
4295 if (is_simple_map_transition) { | 4295 if (is_simple_map_transition) { |
4296 Register new_map_reg = ToRegister(instr->new_map_temp()); | 4296 Register new_map_reg = ToRegister(instr->new_map_temp()); |
4297 __ mov(FieldOperand(object_reg, HeapObject::kMapOffset), | 4297 __ mov(FieldOperand(object_reg, HeapObject::kMapOffset), |
4298 Immediate(to_map)); | 4298 Immediate(to_map)); |
4299 // Write barrier. | 4299 // Write barrier. |
4300 ASSERT_NE(instr->temp(), NULL); | 4300 DCHECK_NE(instr->temp(), NULL); |
4301 __ RecordWriteForMap(object_reg, to_map, new_map_reg, | 4301 __ RecordWriteForMap(object_reg, to_map, new_map_reg, |
4302 ToRegister(instr->temp()), | 4302 ToRegister(instr->temp()), |
4303 kDontSaveFPRegs); | 4303 kDontSaveFPRegs); |
4304 } else { | 4304 } else { |
4305 ASSERT(ToRegister(instr->context()).is(esi)); | 4305 DCHECK(ToRegister(instr->context()).is(esi)); |
4306 ASSERT(object_reg.is(eax)); | 4306 DCHECK(object_reg.is(eax)); |
4307 PushSafepointRegistersScope scope(this); | 4307 PushSafepointRegistersScope scope(this); |
4308 __ mov(ebx, to_map); | 4308 __ mov(ebx, to_map); |
4309 bool is_js_array = from_map->instance_type() == JS_ARRAY_TYPE; | 4309 bool is_js_array = from_map->instance_type() == JS_ARRAY_TYPE; |
4310 TransitionElementsKindStub stub(isolate(), from_kind, to_kind, is_js_array); | 4310 TransitionElementsKindStub stub(isolate(), from_kind, to_kind, is_js_array); |
4311 __ CallStub(&stub); | 4311 __ CallStub(&stub); |
4312 RecordSafepointWithLazyDeopt(instr, | 4312 RecordSafepointWithLazyDeopt(instr, |
4313 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); | 4313 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); |
4314 } | 4314 } |
4315 __ bind(¬_applicable); | 4315 __ bind(¬_applicable); |
4316 } | 4316 } |
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4384 codegen()->DoDeferredStringCharFromCode(instr_); | 4384 codegen()->DoDeferredStringCharFromCode(instr_); |
4385 } | 4385 } |
4386 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } | 4386 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } |
4387 private: | 4387 private: |
4388 LStringCharFromCode* instr_; | 4388 LStringCharFromCode* instr_; |
4389 }; | 4389 }; |
4390 | 4390 |
4391 DeferredStringCharFromCode* deferred = | 4391 DeferredStringCharFromCode* deferred = |
4392 new(zone()) DeferredStringCharFromCode(this, instr); | 4392 new(zone()) DeferredStringCharFromCode(this, instr); |
4393 | 4393 |
4394 ASSERT(instr->hydrogen()->value()->representation().IsInteger32()); | 4394 DCHECK(instr->hydrogen()->value()->representation().IsInteger32()); |
4395 Register char_code = ToRegister(instr->char_code()); | 4395 Register char_code = ToRegister(instr->char_code()); |
4396 Register result = ToRegister(instr->result()); | 4396 Register result = ToRegister(instr->result()); |
4397 ASSERT(!char_code.is(result)); | 4397 DCHECK(!char_code.is(result)); |
4398 | 4398 |
4399 __ cmp(char_code, String::kMaxOneByteCharCode); | 4399 __ cmp(char_code, String::kMaxOneByteCharCode); |
4400 __ j(above, deferred->entry()); | 4400 __ j(above, deferred->entry()); |
4401 __ Move(result, Immediate(factory()->single_character_string_cache())); | 4401 __ Move(result, Immediate(factory()->single_character_string_cache())); |
4402 __ mov(result, FieldOperand(result, | 4402 __ mov(result, FieldOperand(result, |
4403 char_code, times_pointer_size, | 4403 char_code, times_pointer_size, |
4404 FixedArray::kHeaderSize)); | 4404 FixedArray::kHeaderSize)); |
4405 __ cmp(result, factory()->undefined_value()); | 4405 __ cmp(result, factory()->undefined_value()); |
4406 __ j(equal, deferred->entry()); | 4406 __ j(equal, deferred->entry()); |
4407 __ bind(deferred->exit()); | 4407 __ bind(deferred->exit()); |
(...skipping 11 matching lines...) Expand all Loading... |
4419 | 4419 |
4420 PushSafepointRegistersScope scope(this); | 4420 PushSafepointRegistersScope scope(this); |
4421 __ SmiTag(char_code); | 4421 __ SmiTag(char_code); |
4422 __ push(char_code); | 4422 __ push(char_code); |
4423 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr, instr->context()); | 4423 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr, instr->context()); |
4424 __ StoreToSafepointRegisterSlot(result, eax); | 4424 __ StoreToSafepointRegisterSlot(result, eax); |
4425 } | 4425 } |
4426 | 4426 |
4427 | 4427 |
4428 void LCodeGen::DoStringAdd(LStringAdd* instr) { | 4428 void LCodeGen::DoStringAdd(LStringAdd* instr) { |
4429 ASSERT(ToRegister(instr->context()).is(esi)); | 4429 DCHECK(ToRegister(instr->context()).is(esi)); |
4430 ASSERT(ToRegister(instr->left()).is(edx)); | 4430 DCHECK(ToRegister(instr->left()).is(edx)); |
4431 ASSERT(ToRegister(instr->right()).is(eax)); | 4431 DCHECK(ToRegister(instr->right()).is(eax)); |
4432 StringAddStub stub(isolate(), | 4432 StringAddStub stub(isolate(), |
4433 instr->hydrogen()->flags(), | 4433 instr->hydrogen()->flags(), |
4434 instr->hydrogen()->pretenure_flag()); | 4434 instr->hydrogen()->pretenure_flag()); |
4435 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 4435 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
4436 } | 4436 } |
4437 | 4437 |
4438 | 4438 |
4439 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) { | 4439 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) { |
4440 LOperand* input = instr->value(); | 4440 LOperand* input = instr->value(); |
4441 LOperand* output = instr->result(); | 4441 LOperand* output = instr->result(); |
4442 ASSERT(input->IsRegister() || input->IsStackSlot()); | 4442 DCHECK(input->IsRegister() || input->IsStackSlot()); |
4443 ASSERT(output->IsDoubleRegister()); | 4443 DCHECK(output->IsDoubleRegister()); |
4444 __ Cvtsi2sd(ToDoubleRegister(output), ToOperand(input)); | 4444 __ Cvtsi2sd(ToDoubleRegister(output), ToOperand(input)); |
4445 } | 4445 } |
4446 | 4446 |
4447 | 4447 |
4448 void LCodeGen::DoUint32ToDouble(LUint32ToDouble* instr) { | 4448 void LCodeGen::DoUint32ToDouble(LUint32ToDouble* instr) { |
4449 LOperand* input = instr->value(); | 4449 LOperand* input = instr->value(); |
4450 LOperand* output = instr->result(); | 4450 LOperand* output = instr->result(); |
4451 __ LoadUint32(ToDoubleRegister(output), ToRegister(input)); | 4451 __ LoadUint32(ToDoubleRegister(output), ToRegister(input)); |
4452 } | 4452 } |
4453 | 4453 |
4454 | 4454 |
4455 void LCodeGen::DoNumberTagI(LNumberTagI* instr) { | 4455 void LCodeGen::DoNumberTagI(LNumberTagI* instr) { |
4456 class DeferredNumberTagI V8_FINAL : public LDeferredCode { | 4456 class DeferredNumberTagI V8_FINAL : public LDeferredCode { |
4457 public: | 4457 public: |
4458 DeferredNumberTagI(LCodeGen* codegen, | 4458 DeferredNumberTagI(LCodeGen* codegen, |
4459 LNumberTagI* instr) | 4459 LNumberTagI* instr) |
4460 : LDeferredCode(codegen), instr_(instr) { } | 4460 : LDeferredCode(codegen), instr_(instr) { } |
4461 virtual void Generate() V8_OVERRIDE { | 4461 virtual void Generate() V8_OVERRIDE { |
4462 codegen()->DoDeferredNumberTagIU( | 4462 codegen()->DoDeferredNumberTagIU( |
4463 instr_, instr_->value(), instr_->temp(), SIGNED_INT32); | 4463 instr_, instr_->value(), instr_->temp(), SIGNED_INT32); |
4464 } | 4464 } |
4465 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } | 4465 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } |
4466 private: | 4466 private: |
4467 LNumberTagI* instr_; | 4467 LNumberTagI* instr_; |
4468 }; | 4468 }; |
4469 | 4469 |
4470 LOperand* input = instr->value(); | 4470 LOperand* input = instr->value(); |
4471 ASSERT(input->IsRegister() && input->Equals(instr->result())); | 4471 DCHECK(input->IsRegister() && input->Equals(instr->result())); |
4472 Register reg = ToRegister(input); | 4472 Register reg = ToRegister(input); |
4473 | 4473 |
4474 DeferredNumberTagI* deferred = | 4474 DeferredNumberTagI* deferred = |
4475 new(zone()) DeferredNumberTagI(this, instr); | 4475 new(zone()) DeferredNumberTagI(this, instr); |
4476 __ SmiTag(reg); | 4476 __ SmiTag(reg); |
4477 __ j(overflow, deferred->entry()); | 4477 __ j(overflow, deferred->entry()); |
4478 __ bind(deferred->exit()); | 4478 __ bind(deferred->exit()); |
4479 } | 4479 } |
4480 | 4480 |
4481 | 4481 |
4482 void LCodeGen::DoNumberTagU(LNumberTagU* instr) { | 4482 void LCodeGen::DoNumberTagU(LNumberTagU* instr) { |
4483 class DeferredNumberTagU V8_FINAL : public LDeferredCode { | 4483 class DeferredNumberTagU V8_FINAL : public LDeferredCode { |
4484 public: | 4484 public: |
4485 DeferredNumberTagU(LCodeGen* codegen, LNumberTagU* instr) | 4485 DeferredNumberTagU(LCodeGen* codegen, LNumberTagU* instr) |
4486 : LDeferredCode(codegen), instr_(instr) { } | 4486 : LDeferredCode(codegen), instr_(instr) { } |
4487 virtual void Generate() V8_OVERRIDE { | 4487 virtual void Generate() V8_OVERRIDE { |
4488 codegen()->DoDeferredNumberTagIU( | 4488 codegen()->DoDeferredNumberTagIU( |
4489 instr_, instr_->value(), instr_->temp(), UNSIGNED_INT32); | 4489 instr_, instr_->value(), instr_->temp(), UNSIGNED_INT32); |
4490 } | 4490 } |
4491 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } | 4491 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } |
4492 private: | 4492 private: |
4493 LNumberTagU* instr_; | 4493 LNumberTagU* instr_; |
4494 }; | 4494 }; |
4495 | 4495 |
4496 LOperand* input = instr->value(); | 4496 LOperand* input = instr->value(); |
4497 ASSERT(input->IsRegister() && input->Equals(instr->result())); | 4497 DCHECK(input->IsRegister() && input->Equals(instr->result())); |
4498 Register reg = ToRegister(input); | 4498 Register reg = ToRegister(input); |
4499 | 4499 |
4500 DeferredNumberTagU* deferred = | 4500 DeferredNumberTagU* deferred = |
4501 new(zone()) DeferredNumberTagU(this, instr); | 4501 new(zone()) DeferredNumberTagU(this, instr); |
4502 __ cmp(reg, Immediate(Smi::kMaxValue)); | 4502 __ cmp(reg, Immediate(Smi::kMaxValue)); |
4503 __ j(above, deferred->entry()); | 4503 __ j(above, deferred->entry()); |
4504 __ SmiTag(reg); | 4504 __ SmiTag(reg); |
4505 __ bind(deferred->exit()); | 4505 __ bind(deferred->exit()); |
4506 } | 4506 } |
4507 | 4507 |
(...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4623 if (hchange->CheckFlag(HValue::kCanOverflow) && | 4623 if (hchange->CheckFlag(HValue::kCanOverflow) && |
4624 !hchange->value()->CheckFlag(HValue::kUint32)) { | 4624 !hchange->value()->CheckFlag(HValue::kUint32)) { |
4625 DeoptimizeIf(overflow, instr->environment()); | 4625 DeoptimizeIf(overflow, instr->environment()); |
4626 } | 4626 } |
4627 } | 4627 } |
4628 | 4628 |
4629 | 4629 |
4630 void LCodeGen::DoSmiUntag(LSmiUntag* instr) { | 4630 void LCodeGen::DoSmiUntag(LSmiUntag* instr) { |
4631 LOperand* input = instr->value(); | 4631 LOperand* input = instr->value(); |
4632 Register result = ToRegister(input); | 4632 Register result = ToRegister(input); |
4633 ASSERT(input->IsRegister() && input->Equals(instr->result())); | 4633 DCHECK(input->IsRegister() && input->Equals(instr->result())); |
4634 if (instr->needs_check()) { | 4634 if (instr->needs_check()) { |
4635 __ test(result, Immediate(kSmiTagMask)); | 4635 __ test(result, Immediate(kSmiTagMask)); |
4636 DeoptimizeIf(not_zero, instr->environment()); | 4636 DeoptimizeIf(not_zero, instr->environment()); |
4637 } else { | 4637 } else { |
4638 __ AssertSmi(result); | 4638 __ AssertSmi(result); |
4639 } | 4639 } |
4640 __ SmiUntag(result); | 4640 __ SmiUntag(result); |
4641 } | 4641 } |
4642 | 4642 |
4643 | 4643 |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4683 // Convert undefined (and hole) to NaN. | 4683 // Convert undefined (and hole) to NaN. |
4684 __ cmp(input_reg, factory()->undefined_value()); | 4684 __ cmp(input_reg, factory()->undefined_value()); |
4685 DeoptimizeIf(not_equal, env); | 4685 DeoptimizeIf(not_equal, env); |
4686 | 4686 |
4687 ExternalReference nan = | 4687 ExternalReference nan = |
4688 ExternalReference::address_of_canonical_non_hole_nan(); | 4688 ExternalReference::address_of_canonical_non_hole_nan(); |
4689 __ movsd(result_reg, Operand::StaticVariable(nan)); | 4689 __ movsd(result_reg, Operand::StaticVariable(nan)); |
4690 __ jmp(&done, Label::kNear); | 4690 __ jmp(&done, Label::kNear); |
4691 } | 4691 } |
4692 } else { | 4692 } else { |
4693 ASSERT(mode == NUMBER_CANDIDATE_IS_SMI); | 4693 DCHECK(mode == NUMBER_CANDIDATE_IS_SMI); |
4694 } | 4694 } |
4695 | 4695 |
4696 __ bind(&load_smi); | 4696 __ bind(&load_smi); |
4697 // Smi to XMM conversion. Clobbering a temp is faster than re-tagging the | 4697 // Smi to XMM conversion. Clobbering a temp is faster than re-tagging the |
4698 // input register since we avoid dependencies. | 4698 // input register since we avoid dependencies. |
4699 __ mov(temp_reg, input_reg); | 4699 __ mov(temp_reg, input_reg); |
4700 __ SmiUntag(temp_reg); // Untag smi before converting to float. | 4700 __ SmiUntag(temp_reg); // Untag smi before converting to float. |
4701 __ Cvtsi2sd(result_reg, Operand(temp_reg)); | 4701 __ Cvtsi2sd(result_reg, Operand(temp_reg)); |
4702 __ bind(&done); | 4702 __ bind(&done); |
4703 } | 4703 } |
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4760 : LDeferredCode(codegen), instr_(instr) { } | 4760 : LDeferredCode(codegen), instr_(instr) { } |
4761 virtual void Generate() V8_OVERRIDE { | 4761 virtual void Generate() V8_OVERRIDE { |
4762 codegen()->DoDeferredTaggedToI(instr_, done()); | 4762 codegen()->DoDeferredTaggedToI(instr_, done()); |
4763 } | 4763 } |
4764 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } | 4764 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } |
4765 private: | 4765 private: |
4766 LTaggedToI* instr_; | 4766 LTaggedToI* instr_; |
4767 }; | 4767 }; |
4768 | 4768 |
4769 LOperand* input = instr->value(); | 4769 LOperand* input = instr->value(); |
4770 ASSERT(input->IsRegister()); | 4770 DCHECK(input->IsRegister()); |
4771 Register input_reg = ToRegister(input); | 4771 Register input_reg = ToRegister(input); |
4772 ASSERT(input_reg.is(ToRegister(instr->result()))); | 4772 DCHECK(input_reg.is(ToRegister(instr->result()))); |
4773 | 4773 |
4774 if (instr->hydrogen()->value()->representation().IsSmi()) { | 4774 if (instr->hydrogen()->value()->representation().IsSmi()) { |
4775 __ SmiUntag(input_reg); | 4775 __ SmiUntag(input_reg); |
4776 } else { | 4776 } else { |
4777 DeferredTaggedToI* deferred = | 4777 DeferredTaggedToI* deferred = |
4778 new(zone()) DeferredTaggedToI(this, instr); | 4778 new(zone()) DeferredTaggedToI(this, instr); |
4779 // Optimistically untag the input. | 4779 // Optimistically untag the input. |
4780 // If the input is a HeapObject, SmiUntag will set the carry flag. | 4780 // If the input is a HeapObject, SmiUntag will set the carry flag. |
4781 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0); | 4781 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0); |
4782 __ SmiUntag(input_reg); | 4782 __ SmiUntag(input_reg); |
4783 // Branch to deferred code if the input was tagged. | 4783 // Branch to deferred code if the input was tagged. |
4784 // The deferred code will take care of restoring the tag. | 4784 // The deferred code will take care of restoring the tag. |
4785 __ j(carry, deferred->entry()); | 4785 __ j(carry, deferred->entry()); |
4786 __ bind(deferred->exit()); | 4786 __ bind(deferred->exit()); |
4787 } | 4787 } |
4788 } | 4788 } |
4789 | 4789 |
4790 | 4790 |
4791 void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) { | 4791 void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) { |
4792 LOperand* input = instr->value(); | 4792 LOperand* input = instr->value(); |
4793 ASSERT(input->IsRegister()); | 4793 DCHECK(input->IsRegister()); |
4794 LOperand* temp = instr->temp(); | 4794 LOperand* temp = instr->temp(); |
4795 ASSERT(temp->IsRegister()); | 4795 DCHECK(temp->IsRegister()); |
4796 LOperand* result = instr->result(); | 4796 LOperand* result = instr->result(); |
4797 ASSERT(result->IsDoubleRegister()); | 4797 DCHECK(result->IsDoubleRegister()); |
4798 | 4798 |
4799 Register input_reg = ToRegister(input); | 4799 Register input_reg = ToRegister(input); |
4800 bool deoptimize_on_minus_zero = | 4800 bool deoptimize_on_minus_zero = |
4801 instr->hydrogen()->deoptimize_on_minus_zero(); | 4801 instr->hydrogen()->deoptimize_on_minus_zero(); |
4802 Register temp_reg = ToRegister(temp); | 4802 Register temp_reg = ToRegister(temp); |
4803 | 4803 |
4804 HValue* value = instr->hydrogen()->value(); | 4804 HValue* value = instr->hydrogen()->value(); |
4805 NumberUntagDMode mode = value->representation().IsSmi() | 4805 NumberUntagDMode mode = value->representation().IsSmi() |
4806 ? NUMBER_CANDIDATE_IS_SMI : NUMBER_CANDIDATE_IS_ANY_TAGGED; | 4806 ? NUMBER_CANDIDATE_IS_SMI : NUMBER_CANDIDATE_IS_ANY_TAGGED; |
4807 | 4807 |
4808 XMMRegister result_reg = ToDoubleRegister(result); | 4808 XMMRegister result_reg = ToDoubleRegister(result); |
4809 EmitNumberUntagD(input_reg, | 4809 EmitNumberUntagD(input_reg, |
4810 temp_reg, | 4810 temp_reg, |
4811 result_reg, | 4811 result_reg, |
4812 instr->hydrogen()->can_convert_undefined_to_nan(), | 4812 instr->hydrogen()->can_convert_undefined_to_nan(), |
4813 deoptimize_on_minus_zero, | 4813 deoptimize_on_minus_zero, |
4814 instr->environment(), | 4814 instr->environment(), |
4815 mode); | 4815 mode); |
4816 } | 4816 } |
4817 | 4817 |
4818 | 4818 |
4819 void LCodeGen::DoDoubleToI(LDoubleToI* instr) { | 4819 void LCodeGen::DoDoubleToI(LDoubleToI* instr) { |
4820 LOperand* input = instr->value(); | 4820 LOperand* input = instr->value(); |
4821 ASSERT(input->IsDoubleRegister()); | 4821 DCHECK(input->IsDoubleRegister()); |
4822 LOperand* result = instr->result(); | 4822 LOperand* result = instr->result(); |
4823 ASSERT(result->IsRegister()); | 4823 DCHECK(result->IsRegister()); |
4824 Register result_reg = ToRegister(result); | 4824 Register result_reg = ToRegister(result); |
4825 | 4825 |
4826 if (instr->truncating()) { | 4826 if (instr->truncating()) { |
4827 XMMRegister input_reg = ToDoubleRegister(input); | 4827 XMMRegister input_reg = ToDoubleRegister(input); |
4828 __ TruncateDoubleToI(result_reg, input_reg); | 4828 __ TruncateDoubleToI(result_reg, input_reg); |
4829 } else { | 4829 } else { |
4830 Label bailout, done; | 4830 Label bailout, done; |
4831 XMMRegister input_reg = ToDoubleRegister(input); | 4831 XMMRegister input_reg = ToDoubleRegister(input); |
4832 XMMRegister xmm_scratch = double_scratch0(); | 4832 XMMRegister xmm_scratch = double_scratch0(); |
4833 __ DoubleToI(result_reg, input_reg, xmm_scratch, | 4833 __ DoubleToI(result_reg, input_reg, xmm_scratch, |
4834 instr->hydrogen()->GetMinusZeroMode(), &bailout, Label::kNear); | 4834 instr->hydrogen()->GetMinusZeroMode(), &bailout, Label::kNear); |
4835 __ jmp(&done, Label::kNear); | 4835 __ jmp(&done, Label::kNear); |
4836 __ bind(&bailout); | 4836 __ bind(&bailout); |
4837 DeoptimizeIf(no_condition, instr->environment()); | 4837 DeoptimizeIf(no_condition, instr->environment()); |
4838 __ bind(&done); | 4838 __ bind(&done); |
4839 } | 4839 } |
4840 } | 4840 } |
4841 | 4841 |
4842 | 4842 |
4843 void LCodeGen::DoDoubleToSmi(LDoubleToSmi* instr) { | 4843 void LCodeGen::DoDoubleToSmi(LDoubleToSmi* instr) { |
4844 LOperand* input = instr->value(); | 4844 LOperand* input = instr->value(); |
4845 ASSERT(input->IsDoubleRegister()); | 4845 DCHECK(input->IsDoubleRegister()); |
4846 LOperand* result = instr->result(); | 4846 LOperand* result = instr->result(); |
4847 ASSERT(result->IsRegister()); | 4847 DCHECK(result->IsRegister()); |
4848 Register result_reg = ToRegister(result); | 4848 Register result_reg = ToRegister(result); |
4849 | 4849 |
4850 Label bailout, done; | 4850 Label bailout, done; |
4851 XMMRegister input_reg = ToDoubleRegister(input); | 4851 XMMRegister input_reg = ToDoubleRegister(input); |
4852 XMMRegister xmm_scratch = double_scratch0(); | 4852 XMMRegister xmm_scratch = double_scratch0(); |
4853 __ DoubleToI(result_reg, input_reg, xmm_scratch, | 4853 __ DoubleToI(result_reg, input_reg, xmm_scratch, |
4854 instr->hydrogen()->GetMinusZeroMode(), &bailout, Label::kNear); | 4854 instr->hydrogen()->GetMinusZeroMode(), &bailout, Label::kNear); |
4855 __ jmp(&done, Label::kNear); | 4855 __ jmp(&done, Label::kNear); |
4856 __ bind(&bailout); | 4856 __ bind(&bailout); |
4857 DeoptimizeIf(no_condition, instr->environment()); | 4857 DeoptimizeIf(no_condition, instr->environment()); |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4903 static_cast<int8_t>(last)); | 4903 static_cast<int8_t>(last)); |
4904 DeoptimizeIf(above, instr->environment()); | 4904 DeoptimizeIf(above, instr->environment()); |
4905 } | 4905 } |
4906 } | 4906 } |
4907 } else { | 4907 } else { |
4908 uint8_t mask; | 4908 uint8_t mask; |
4909 uint8_t tag; | 4909 uint8_t tag; |
4910 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag); | 4910 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag); |
4911 | 4911 |
4912 if (IsPowerOf2(mask)) { | 4912 if (IsPowerOf2(mask)) { |
4913 ASSERT(tag == 0 || IsPowerOf2(tag)); | 4913 DCHECK(tag == 0 || IsPowerOf2(tag)); |
4914 __ test_b(FieldOperand(temp, Map::kInstanceTypeOffset), mask); | 4914 __ test_b(FieldOperand(temp, Map::kInstanceTypeOffset), mask); |
4915 DeoptimizeIf(tag == 0 ? not_zero : zero, instr->environment()); | 4915 DeoptimizeIf(tag == 0 ? not_zero : zero, instr->environment()); |
4916 } else { | 4916 } else { |
4917 __ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset)); | 4917 __ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset)); |
4918 __ and_(temp, mask); | 4918 __ and_(temp, mask); |
4919 __ cmp(temp, tag); | 4919 __ cmp(temp, tag); |
4920 DeoptimizeIf(not_equal, instr->environment()); | 4920 DeoptimizeIf(not_equal, instr->environment()); |
4921 } | 4921 } |
4922 } | 4922 } |
4923 } | 4923 } |
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4972 | 4972 |
4973 if (instr->hydrogen()->IsStabilityCheck()) { | 4973 if (instr->hydrogen()->IsStabilityCheck()) { |
4974 const UniqueSet<Map>* maps = instr->hydrogen()->maps(); | 4974 const UniqueSet<Map>* maps = instr->hydrogen()->maps(); |
4975 for (int i = 0; i < maps->size(); ++i) { | 4975 for (int i = 0; i < maps->size(); ++i) { |
4976 AddStabilityDependency(maps->at(i).handle()); | 4976 AddStabilityDependency(maps->at(i).handle()); |
4977 } | 4977 } |
4978 return; | 4978 return; |
4979 } | 4979 } |
4980 | 4980 |
4981 LOperand* input = instr->value(); | 4981 LOperand* input = instr->value(); |
4982 ASSERT(input->IsRegister()); | 4982 DCHECK(input->IsRegister()); |
4983 Register reg = ToRegister(input); | 4983 Register reg = ToRegister(input); |
4984 | 4984 |
4985 DeferredCheckMaps* deferred = NULL; | 4985 DeferredCheckMaps* deferred = NULL; |
4986 if (instr->hydrogen()->HasMigrationTarget()) { | 4986 if (instr->hydrogen()->HasMigrationTarget()) { |
4987 deferred = new(zone()) DeferredCheckMaps(this, instr, reg); | 4987 deferred = new(zone()) DeferredCheckMaps(this, instr, reg); |
4988 __ bind(deferred->check_maps()); | 4988 __ bind(deferred->check_maps()); |
4989 } | 4989 } |
4990 | 4990 |
4991 const UniqueSet<Map>* maps = instr->hydrogen()->maps(); | 4991 const UniqueSet<Map>* maps = instr->hydrogen()->maps(); |
4992 Label success; | 4992 Label success; |
(...skipping 17 matching lines...) Expand all Loading... |
5010 | 5010 |
5011 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) { | 5011 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) { |
5012 XMMRegister value_reg = ToDoubleRegister(instr->unclamped()); | 5012 XMMRegister value_reg = ToDoubleRegister(instr->unclamped()); |
5013 XMMRegister xmm_scratch = double_scratch0(); | 5013 XMMRegister xmm_scratch = double_scratch0(); |
5014 Register result_reg = ToRegister(instr->result()); | 5014 Register result_reg = ToRegister(instr->result()); |
5015 __ ClampDoubleToUint8(value_reg, xmm_scratch, result_reg); | 5015 __ ClampDoubleToUint8(value_reg, xmm_scratch, result_reg); |
5016 } | 5016 } |
5017 | 5017 |
5018 | 5018 |
5019 void LCodeGen::DoClampIToUint8(LClampIToUint8* instr) { | 5019 void LCodeGen::DoClampIToUint8(LClampIToUint8* instr) { |
5020 ASSERT(instr->unclamped()->Equals(instr->result())); | 5020 DCHECK(instr->unclamped()->Equals(instr->result())); |
5021 Register value_reg = ToRegister(instr->result()); | 5021 Register value_reg = ToRegister(instr->result()); |
5022 __ ClampUint8(value_reg); | 5022 __ ClampUint8(value_reg); |
5023 } | 5023 } |
5024 | 5024 |
5025 | 5025 |
5026 void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) { | 5026 void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) { |
5027 ASSERT(instr->unclamped()->Equals(instr->result())); | 5027 DCHECK(instr->unclamped()->Equals(instr->result())); |
5028 Register input_reg = ToRegister(instr->unclamped()); | 5028 Register input_reg = ToRegister(instr->unclamped()); |
5029 XMMRegister temp_xmm_reg = ToDoubleRegister(instr->temp_xmm()); | 5029 XMMRegister temp_xmm_reg = ToDoubleRegister(instr->temp_xmm()); |
5030 XMMRegister xmm_scratch = double_scratch0(); | 5030 XMMRegister xmm_scratch = double_scratch0(); |
5031 Label is_smi, done, heap_number; | 5031 Label is_smi, done, heap_number; |
5032 | 5032 |
5033 __ JumpIfSmi(input_reg, &is_smi); | 5033 __ JumpIfSmi(input_reg, &is_smi); |
5034 | 5034 |
5035 // Check for heap number | 5035 // Check for heap number |
5036 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), | 5036 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), |
5037 factory()->heap_number_map()); | 5037 factory()->heap_number_map()); |
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5112 | 5112 |
5113 Register result = ToRegister(instr->result()); | 5113 Register result = ToRegister(instr->result()); |
5114 Register temp = ToRegister(instr->temp()); | 5114 Register temp = ToRegister(instr->temp()); |
5115 | 5115 |
5116 // Allocate memory for the object. | 5116 // Allocate memory for the object. |
5117 AllocationFlags flags = TAG_OBJECT; | 5117 AllocationFlags flags = TAG_OBJECT; |
5118 if (instr->hydrogen()->MustAllocateDoubleAligned()) { | 5118 if (instr->hydrogen()->MustAllocateDoubleAligned()) { |
5119 flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT); | 5119 flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT); |
5120 } | 5120 } |
5121 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) { | 5121 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) { |
5122 ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation()); | 5122 DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation()); |
5123 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); | 5123 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation()); |
5124 flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE); | 5124 flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE); |
5125 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) { | 5125 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) { |
5126 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); | 5126 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation()); |
5127 flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE); | 5127 flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE); |
5128 } | 5128 } |
5129 | 5129 |
5130 if (instr->size()->IsConstantOperand()) { | 5130 if (instr->size()->IsConstantOperand()) { |
5131 int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); | 5131 int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); |
5132 if (size <= Page::kMaxRegularHeapObjectSize) { | 5132 if (size <= Page::kMaxRegularHeapObjectSize) { |
5133 __ Allocate(size, result, temp, no_reg, deferred->entry(), flags); | 5133 __ Allocate(size, result, temp, no_reg, deferred->entry(), flags); |
5134 } else { | 5134 } else { |
5135 __ jmp(deferred->entry()); | 5135 __ jmp(deferred->entry()); |
5136 } | 5136 } |
(...skipping 27 matching lines...) Expand all Loading... |
5164 Register result = ToRegister(instr->result()); | 5164 Register result = ToRegister(instr->result()); |
5165 | 5165 |
5166 // TODO(3095996): Get rid of this. For now, we need to make the | 5166 // TODO(3095996): Get rid of this. For now, we need to make the |
5167 // result register contain a valid pointer because it is already | 5167 // result register contain a valid pointer because it is already |
5168 // contained in the register pointer map. | 5168 // contained in the register pointer map. |
5169 __ Move(result, Immediate(Smi::FromInt(0))); | 5169 __ Move(result, Immediate(Smi::FromInt(0))); |
5170 | 5170 |
5171 PushSafepointRegistersScope scope(this); | 5171 PushSafepointRegistersScope scope(this); |
5172 if (instr->size()->IsRegister()) { | 5172 if (instr->size()->IsRegister()) { |
5173 Register size = ToRegister(instr->size()); | 5173 Register size = ToRegister(instr->size()); |
5174 ASSERT(!size.is(result)); | 5174 DCHECK(!size.is(result)); |
5175 __ SmiTag(ToRegister(instr->size())); | 5175 __ SmiTag(ToRegister(instr->size())); |
5176 __ push(size); | 5176 __ push(size); |
5177 } else { | 5177 } else { |
5178 int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); | 5178 int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); |
5179 if (size >= 0 && size <= Smi::kMaxValue) { | 5179 if (size >= 0 && size <= Smi::kMaxValue) { |
5180 __ push(Immediate(Smi::FromInt(size))); | 5180 __ push(Immediate(Smi::FromInt(size))); |
5181 } else { | 5181 } else { |
5182 // We should never get here at runtime => abort | 5182 // We should never get here at runtime => abort |
5183 __ int3(); | 5183 __ int3(); |
5184 return; | 5184 return; |
5185 } | 5185 } |
5186 } | 5186 } |
5187 | 5187 |
5188 int flags = AllocateDoubleAlignFlag::encode( | 5188 int flags = AllocateDoubleAlignFlag::encode( |
5189 instr->hydrogen()->MustAllocateDoubleAligned()); | 5189 instr->hydrogen()->MustAllocateDoubleAligned()); |
5190 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) { | 5190 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) { |
5191 ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation()); | 5191 DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation()); |
5192 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); | 5192 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation()); |
5193 flags = AllocateTargetSpace::update(flags, OLD_POINTER_SPACE); | 5193 flags = AllocateTargetSpace::update(flags, OLD_POINTER_SPACE); |
5194 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) { | 5194 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) { |
5195 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); | 5195 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation()); |
5196 flags = AllocateTargetSpace::update(flags, OLD_DATA_SPACE); | 5196 flags = AllocateTargetSpace::update(flags, OLD_DATA_SPACE); |
5197 } else { | 5197 } else { |
5198 flags = AllocateTargetSpace::update(flags, NEW_SPACE); | 5198 flags = AllocateTargetSpace::update(flags, NEW_SPACE); |
5199 } | 5199 } |
5200 __ push(Immediate(Smi::FromInt(flags))); | 5200 __ push(Immediate(Smi::FromInt(flags))); |
5201 | 5201 |
5202 CallRuntimeFromDeferred( | 5202 CallRuntimeFromDeferred( |
5203 Runtime::kAllocateInTargetSpace, 2, instr, instr->context()); | 5203 Runtime::kAllocateInTargetSpace, 2, instr, instr->context()); |
5204 __ StoreToSafepointRegisterSlot(result, eax); | 5204 __ StoreToSafepointRegisterSlot(result, eax); |
5205 } | 5205 } |
5206 | 5206 |
5207 | 5207 |
5208 void LCodeGen::DoToFastProperties(LToFastProperties* instr) { | 5208 void LCodeGen::DoToFastProperties(LToFastProperties* instr) { |
5209 ASSERT(ToRegister(instr->value()).is(eax)); | 5209 DCHECK(ToRegister(instr->value()).is(eax)); |
5210 __ push(eax); | 5210 __ push(eax); |
5211 CallRuntime(Runtime::kToFastProperties, 1, instr); | 5211 CallRuntime(Runtime::kToFastProperties, 1, instr); |
5212 } | 5212 } |
5213 | 5213 |
5214 | 5214 |
5215 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { | 5215 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { |
5216 ASSERT(ToRegister(instr->context()).is(esi)); | 5216 DCHECK(ToRegister(instr->context()).is(esi)); |
5217 Label materialized; | 5217 Label materialized; |
5218 // Registers will be used as follows: | 5218 // Registers will be used as follows: |
5219 // ecx = literals array. | 5219 // ecx = literals array. |
5220 // ebx = regexp literal. | 5220 // ebx = regexp literal. |
5221 // eax = regexp literal clone. | 5221 // eax = regexp literal clone. |
5222 // esi = context. | 5222 // esi = context. |
5223 int literal_offset = | 5223 int literal_offset = |
5224 FixedArray::OffsetOfElementAt(instr->hydrogen()->literal_index()); | 5224 FixedArray::OffsetOfElementAt(instr->hydrogen()->literal_index()); |
5225 __ LoadHeapObject(ecx, instr->hydrogen()->literals()); | 5225 __ LoadHeapObject(ecx, instr->hydrogen()->literals()); |
5226 __ mov(ebx, FieldOperand(ecx, literal_offset)); | 5226 __ mov(ebx, FieldOperand(ecx, literal_offset)); |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5258 __ mov(FieldOperand(eax, i + kPointerSize), ecx); | 5258 __ mov(FieldOperand(eax, i + kPointerSize), ecx); |
5259 } | 5259 } |
5260 if ((size % (2 * kPointerSize)) != 0) { | 5260 if ((size % (2 * kPointerSize)) != 0) { |
5261 __ mov(edx, FieldOperand(ebx, size - kPointerSize)); | 5261 __ mov(edx, FieldOperand(ebx, size - kPointerSize)); |
5262 __ mov(FieldOperand(eax, size - kPointerSize), edx); | 5262 __ mov(FieldOperand(eax, size - kPointerSize), edx); |
5263 } | 5263 } |
5264 } | 5264 } |
5265 | 5265 |
5266 | 5266 |
5267 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { | 5267 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { |
5268 ASSERT(ToRegister(instr->context()).is(esi)); | 5268 DCHECK(ToRegister(instr->context()).is(esi)); |
5269 // Use the fast case closure allocation code that allocates in new | 5269 // Use the fast case closure allocation code that allocates in new |
5270 // space for nested functions that don't need literals cloning. | 5270 // space for nested functions that don't need literals cloning. |
5271 bool pretenure = instr->hydrogen()->pretenure(); | 5271 bool pretenure = instr->hydrogen()->pretenure(); |
5272 if (!pretenure && instr->hydrogen()->has_no_literals()) { | 5272 if (!pretenure && instr->hydrogen()->has_no_literals()) { |
5273 FastNewClosureStub stub(isolate(), | 5273 FastNewClosureStub stub(isolate(), |
5274 instr->hydrogen()->strict_mode(), | 5274 instr->hydrogen()->strict_mode(), |
5275 instr->hydrogen()->is_generator()); | 5275 instr->hydrogen()->is_generator()); |
5276 __ mov(ebx, Immediate(instr->hydrogen()->shared_info())); | 5276 __ mov(ebx, Immediate(instr->hydrogen()->shared_info())); |
5277 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 5277 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
5278 } else { | 5278 } else { |
5279 __ push(esi); | 5279 __ push(esi); |
5280 __ push(Immediate(instr->hydrogen()->shared_info())); | 5280 __ push(Immediate(instr->hydrogen()->shared_info())); |
5281 __ push(Immediate(pretenure ? factory()->true_value() | 5281 __ push(Immediate(pretenure ? factory()->true_value() |
5282 : factory()->false_value())); | 5282 : factory()->false_value())); |
5283 CallRuntime(Runtime::kNewClosure, 3, instr); | 5283 CallRuntime(Runtime::kNewClosure, 3, instr); |
5284 } | 5284 } |
5285 } | 5285 } |
5286 | 5286 |
5287 | 5287 |
5288 void LCodeGen::DoTypeof(LTypeof* instr) { | 5288 void LCodeGen::DoTypeof(LTypeof* instr) { |
5289 ASSERT(ToRegister(instr->context()).is(esi)); | 5289 DCHECK(ToRegister(instr->context()).is(esi)); |
5290 LOperand* input = instr->value(); | 5290 LOperand* input = instr->value(); |
5291 EmitPushTaggedOperand(input); | 5291 EmitPushTaggedOperand(input); |
5292 CallRuntime(Runtime::kTypeof, 1, instr); | 5292 CallRuntime(Runtime::kTypeof, 1, instr); |
5293 } | 5293 } |
5294 | 5294 |
5295 | 5295 |
5296 void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) { | 5296 void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) { |
5297 Register input = ToRegister(instr->value()); | 5297 Register input = ToRegister(instr->value()); |
5298 Condition final_branch_condition = EmitTypeofIs(instr, input); | 5298 Condition final_branch_condition = EmitTypeofIs(instr, input); |
5299 if (final_branch_condition != no_condition) { | 5299 if (final_branch_condition != no_condition) { |
(...skipping 113 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5413 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; | 5413 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; |
5414 __ Nop(padding_size); | 5414 __ Nop(padding_size); |
5415 } | 5415 } |
5416 } | 5416 } |
5417 last_lazy_deopt_pc_ = masm()->pc_offset(); | 5417 last_lazy_deopt_pc_ = masm()->pc_offset(); |
5418 } | 5418 } |
5419 | 5419 |
5420 | 5420 |
5421 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { | 5421 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { |
5422 last_lazy_deopt_pc_ = masm()->pc_offset(); | 5422 last_lazy_deopt_pc_ = masm()->pc_offset(); |
5423 ASSERT(instr->HasEnvironment()); | 5423 DCHECK(instr->HasEnvironment()); |
5424 LEnvironment* env = instr->environment(); | 5424 LEnvironment* env = instr->environment(); |
5425 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 5425 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
5426 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 5426 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
5427 } | 5427 } |
5428 | 5428 |
5429 | 5429 |
5430 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { | 5430 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { |
5431 Deoptimizer::BailoutType type = instr->hydrogen()->type(); | 5431 Deoptimizer::BailoutType type = instr->hydrogen()->type(); |
5432 // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the | 5432 // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the |
5433 // needed return address), even though the implementation of LAZY and EAGER is | 5433 // needed return address), even though the implementation of LAZY and EAGER is |
(...skipping 16 matching lines...) Expand all Loading... |
5450 // Nothing to see here, move on! | 5450 // Nothing to see here, move on! |
5451 } | 5451 } |
5452 | 5452 |
5453 | 5453 |
5454 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { | 5454 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { |
5455 PushSafepointRegistersScope scope(this); | 5455 PushSafepointRegistersScope scope(this); |
5456 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | 5456 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
5457 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); | 5457 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); |
5458 RecordSafepointWithLazyDeopt( | 5458 RecordSafepointWithLazyDeopt( |
5459 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); | 5459 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); |
5460 ASSERT(instr->HasEnvironment()); | 5460 DCHECK(instr->HasEnvironment()); |
5461 LEnvironment* env = instr->environment(); | 5461 LEnvironment* env = instr->environment(); |
5462 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 5462 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
5463 } | 5463 } |
5464 | 5464 |
5465 | 5465 |
5466 void LCodeGen::DoStackCheck(LStackCheck* instr) { | 5466 void LCodeGen::DoStackCheck(LStackCheck* instr) { |
5467 class DeferredStackCheck V8_FINAL : public LDeferredCode { | 5467 class DeferredStackCheck V8_FINAL : public LDeferredCode { |
5468 public: | 5468 public: |
5469 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr) | 5469 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr) |
5470 : LDeferredCode(codegen), instr_(instr) { } | 5470 : LDeferredCode(codegen), instr_(instr) { } |
5471 virtual void Generate() V8_OVERRIDE { | 5471 virtual void Generate() V8_OVERRIDE { |
5472 codegen()->DoDeferredStackCheck(instr_); | 5472 codegen()->DoDeferredStackCheck(instr_); |
5473 } | 5473 } |
5474 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } | 5474 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } |
5475 private: | 5475 private: |
5476 LStackCheck* instr_; | 5476 LStackCheck* instr_; |
5477 }; | 5477 }; |
5478 | 5478 |
5479 ASSERT(instr->HasEnvironment()); | 5479 DCHECK(instr->HasEnvironment()); |
5480 LEnvironment* env = instr->environment(); | 5480 LEnvironment* env = instr->environment(); |
5481 // There is no LLazyBailout instruction for stack-checks. We have to | 5481 // There is no LLazyBailout instruction for stack-checks. We have to |
5482 // prepare for lazy deoptimization explicitly here. | 5482 // prepare for lazy deoptimization explicitly here. |
5483 if (instr->hydrogen()->is_function_entry()) { | 5483 if (instr->hydrogen()->is_function_entry()) { |
5484 // Perform stack overflow check. | 5484 // Perform stack overflow check. |
5485 Label done; | 5485 Label done; |
5486 ExternalReference stack_limit = | 5486 ExternalReference stack_limit = |
5487 ExternalReference::address_of_stack_limit(isolate()); | 5487 ExternalReference::address_of_stack_limit(isolate()); |
5488 __ cmp(esp, Operand::StaticVariable(stack_limit)); | 5488 __ cmp(esp, Operand::StaticVariable(stack_limit)); |
5489 __ j(above_equal, &done, Label::kNear); | 5489 __ j(above_equal, &done, Label::kNear); |
5490 | 5490 |
5491 ASSERT(instr->context()->IsRegister()); | 5491 DCHECK(instr->context()->IsRegister()); |
5492 ASSERT(ToRegister(instr->context()).is(esi)); | 5492 DCHECK(ToRegister(instr->context()).is(esi)); |
5493 CallCode(isolate()->builtins()->StackCheck(), | 5493 CallCode(isolate()->builtins()->StackCheck(), |
5494 RelocInfo::CODE_TARGET, | 5494 RelocInfo::CODE_TARGET, |
5495 instr); | 5495 instr); |
5496 __ bind(&done); | 5496 __ bind(&done); |
5497 } else { | 5497 } else { |
5498 ASSERT(instr->hydrogen()->is_backwards_branch()); | 5498 DCHECK(instr->hydrogen()->is_backwards_branch()); |
5499 // Perform stack overflow check if this goto needs it before jumping. | 5499 // Perform stack overflow check if this goto needs it before jumping. |
5500 DeferredStackCheck* deferred_stack_check = | 5500 DeferredStackCheck* deferred_stack_check = |
5501 new(zone()) DeferredStackCheck(this, instr); | 5501 new(zone()) DeferredStackCheck(this, instr); |
5502 ExternalReference stack_limit = | 5502 ExternalReference stack_limit = |
5503 ExternalReference::address_of_stack_limit(isolate()); | 5503 ExternalReference::address_of_stack_limit(isolate()); |
5504 __ cmp(esp, Operand::StaticVariable(stack_limit)); | 5504 __ cmp(esp, Operand::StaticVariable(stack_limit)); |
5505 __ j(below, deferred_stack_check->entry()); | 5505 __ j(below, deferred_stack_check->entry()); |
5506 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); | 5506 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); |
5507 __ bind(instr->done_label()); | 5507 __ bind(instr->done_label()); |
5508 deferred_stack_check->SetExit(instr->done_label()); | 5508 deferred_stack_check->SetExit(instr->done_label()); |
5509 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 5509 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
5510 // Don't record a deoptimization index for the safepoint here. | 5510 // Don't record a deoptimization index for the safepoint here. |
5511 // This will be done explicitly when emitting call and the safepoint in | 5511 // This will be done explicitly when emitting call and the safepoint in |
5512 // the deferred code. | 5512 // the deferred code. |
5513 } | 5513 } |
5514 } | 5514 } |
5515 | 5515 |
5516 | 5516 |
5517 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { | 5517 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { |
5518 // This is a pseudo-instruction that ensures that the environment here is | 5518 // This is a pseudo-instruction that ensures that the environment here is |
5519 // properly registered for deoptimization and records the assembler's PC | 5519 // properly registered for deoptimization and records the assembler's PC |
5520 // offset. | 5520 // offset. |
5521 LEnvironment* environment = instr->environment(); | 5521 LEnvironment* environment = instr->environment(); |
5522 | 5522 |
5523 // If the environment were already registered, we would have no way of | 5523 // If the environment were already registered, we would have no way of |
5524 // backpatching it with the spill slot operands. | 5524 // backpatching it with the spill slot operands. |
5525 ASSERT(!environment->HasBeenRegistered()); | 5525 DCHECK(!environment->HasBeenRegistered()); |
5526 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); | 5526 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); |
5527 | 5527 |
5528 GenerateOsrPrologue(); | 5528 GenerateOsrPrologue(); |
5529 } | 5529 } |
5530 | 5530 |
5531 | 5531 |
5532 void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) { | 5532 void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) { |
5533 ASSERT(ToRegister(instr->context()).is(esi)); | 5533 DCHECK(ToRegister(instr->context()).is(esi)); |
5534 __ cmp(eax, isolate()->factory()->undefined_value()); | 5534 __ cmp(eax, isolate()->factory()->undefined_value()); |
5535 DeoptimizeIf(equal, instr->environment()); | 5535 DeoptimizeIf(equal, instr->environment()); |
5536 | 5536 |
5537 __ cmp(eax, isolate()->factory()->null_value()); | 5537 __ cmp(eax, isolate()->factory()->null_value()); |
5538 DeoptimizeIf(equal, instr->environment()); | 5538 DeoptimizeIf(equal, instr->environment()); |
5539 | 5539 |
5540 __ test(eax, Immediate(kSmiTagMask)); | 5540 __ test(eax, Immediate(kSmiTagMask)); |
5541 DeoptimizeIf(zero, instr->environment()); | 5541 DeoptimizeIf(zero, instr->environment()); |
5542 | 5542 |
5543 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); | 5543 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); |
(...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5675 CallRuntime(Runtime::kPushBlockContext, 2, instr); | 5675 CallRuntime(Runtime::kPushBlockContext, 2, instr); |
5676 RecordSafepoint(Safepoint::kNoLazyDeopt); | 5676 RecordSafepoint(Safepoint::kNoLazyDeopt); |
5677 } | 5677 } |
5678 | 5678 |
5679 | 5679 |
5680 #undef __ | 5680 #undef __ |
5681 | 5681 |
5682 } } // namespace v8::internal | 5682 } } // namespace v8::internal |
5683 | 5683 |
5684 #endif // V8_TARGET_ARCH_IA32 | 5684 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |