Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(941)

Side by Side Diff: src/x64/lithium-codegen-x64.cc

Issue 199903002: Introduce Push and Pop macro instructions for x64 (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Rebased with bleeding_edge Created 6 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/ic-x64.cc ('k') | src/x64/macro-assembler-x64.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 173 matching lines...) Expand 10 before | Expand all | Expand 10 after
184 } 184 }
185 185
186 // Reserve space for the stack slots needed by the code. 186 // Reserve space for the stack slots needed by the code.
187 int slots = GetStackSlotCount(); 187 int slots = GetStackSlotCount();
188 if (slots > 0) { 188 if (slots > 0) {
189 if (FLAG_debug_code) { 189 if (FLAG_debug_code) {
190 __ subq(rsp, Immediate(slots * kPointerSize)); 190 __ subq(rsp, Immediate(slots * kPointerSize));
191 #ifdef _MSC_VER 191 #ifdef _MSC_VER
192 MakeSureStackPagesMapped(slots * kPointerSize); 192 MakeSureStackPagesMapped(slots * kPointerSize);
193 #endif 193 #endif
194 __ push(rax); 194 __ Push(rax);
195 __ Set(rax, slots); 195 __ Set(rax, slots);
196 __ movq(kScratchRegister, kSlotsZapValue); 196 __ movq(kScratchRegister, kSlotsZapValue);
197 Label loop; 197 Label loop;
198 __ bind(&loop); 198 __ bind(&loop);
199 __ movp(MemOperand(rsp, rax, times_pointer_size, 0), 199 __ movp(MemOperand(rsp, rax, times_pointer_size, 0),
200 kScratchRegister); 200 kScratchRegister);
201 __ decl(rax); 201 __ decl(rax);
202 __ j(not_zero, &loop); 202 __ j(not_zero, &loop);
203 __ pop(rax); 203 __ Pop(rax);
204 } else { 204 } else {
205 __ subq(rsp, Immediate(slots * kPointerSize)); 205 __ subq(rsp, Immediate(slots * kPointerSize));
206 #ifdef _MSC_VER 206 #ifdef _MSC_VER
207 MakeSureStackPagesMapped(slots * kPointerSize); 207 MakeSureStackPagesMapped(slots * kPointerSize);
208 #endif 208 #endif
209 } 209 }
210 210
211 if (info()->saves_caller_doubles()) { 211 if (info()->saves_caller_doubles()) {
212 SaveCallerDoubles(); 212 SaveCallerDoubles();
213 } 213 }
214 } 214 }
215 215
216 // Possibly allocate a local context. 216 // Possibly allocate a local context.
217 int heap_slots = info_->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; 217 int heap_slots = info_->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
218 if (heap_slots > 0) { 218 if (heap_slots > 0) {
219 Comment(";;; Allocate local context"); 219 Comment(";;; Allocate local context");
220 // Argument to NewContext is the function, which is still in rdi. 220 // Argument to NewContext is the function, which is still in rdi.
221 if (heap_slots <= FastNewContextStub::kMaximumSlots) { 221 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
222 FastNewContextStub stub(heap_slots); 222 FastNewContextStub stub(heap_slots);
223 __ CallStub(&stub); 223 __ CallStub(&stub);
224 } else { 224 } else {
225 __ push(rdi); 225 __ Push(rdi);
226 __ CallRuntime(Runtime::kNewFunctionContext, 1); 226 __ CallRuntime(Runtime::kNewFunctionContext, 1);
227 } 227 }
228 RecordSafepoint(Safepoint::kNoLazyDeopt); 228 RecordSafepoint(Safepoint::kNoLazyDeopt);
229 // Context is returned in rax. It replaces the context passed to us. 229 // Context is returned in rax. It replaces the context passed to us.
230 // It's saved in the stack and kept live in rsi. 230 // It's saved in the stack and kept live in rsi.
231 __ movp(rsi, rax); 231 __ movp(rsi, rax);
232 __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rax); 232 __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rax);
233 233
234 // Copy any necessary parameters into the context. 234 // Copy any necessary parameters into the context.
235 int num_parameters = scope()->num_parameters(); 235 int num_parameters = scope()->num_parameters();
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after
296 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id); 296 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id);
297 } 297 }
298 if (jump_table_[i].needs_frame) { 298 if (jump_table_[i].needs_frame) {
299 ASSERT(!info()->saves_caller_doubles()); 299 ASSERT(!info()->saves_caller_doubles());
300 __ Move(kScratchRegister, ExternalReference::ForDeoptEntry(entry)); 300 __ Move(kScratchRegister, ExternalReference::ForDeoptEntry(entry));
301 if (needs_frame.is_bound()) { 301 if (needs_frame.is_bound()) {
302 __ jmp(&needs_frame); 302 __ jmp(&needs_frame);
303 } else { 303 } else {
304 __ bind(&needs_frame); 304 __ bind(&needs_frame);
305 __ movp(rsi, MemOperand(rbp, StandardFrameConstants::kContextOffset)); 305 __ movp(rsi, MemOperand(rbp, StandardFrameConstants::kContextOffset));
306 __ push(rbp); 306 __ pushq(rbp);
307 __ movp(rbp, rsp); 307 __ movp(rbp, rsp);
308 __ push(rsi); 308 __ Push(rsi);
309 // This variant of deopt can only be used with stubs. Since we don't 309 // This variant of deopt can only be used with stubs. Since we don't
310 // have a function pointer to install in the stack frame that we're 310 // have a function pointer to install in the stack frame that we're
311 // building, install a special marker there instead. 311 // building, install a special marker there instead.
312 ASSERT(info()->IsStub()); 312 ASSERT(info()->IsStub());
313 __ Move(rsi, Smi::FromInt(StackFrame::STUB)); 313 __ Move(rsi, Smi::FromInt(StackFrame::STUB));
314 __ push(rsi); 314 __ Push(rsi);
315 __ movp(rsi, MemOperand(rsp, kPointerSize)); 315 __ movp(rsi, MemOperand(rsp, kPointerSize));
316 __ call(kScratchRegister); 316 __ call(kScratchRegister);
317 } 317 }
318 } else { 318 } else {
319 if (info()->saves_caller_doubles()) { 319 if (info()->saves_caller_doubles()) {
320 ASSERT(info()->IsStub()); 320 ASSERT(info()->IsStub());
321 RestoreCallerDoubles(); 321 RestoreCallerDoubles();
322 } 322 }
323 __ call(entry, RelocInfo::RUNTIME_ENTRY); 323 __ call(entry, RelocInfo::RUNTIME_ENTRY);
324 } 324 }
(...skipping 18 matching lines...) Expand all
343 code->instruction_index(), 343 code->instruction_index(),
344 code->instr()->hydrogen_value()->id(), 344 code->instr()->hydrogen_value()->id(),
345 code->instr()->Mnemonic()); 345 code->instr()->Mnemonic());
346 __ bind(code->entry()); 346 __ bind(code->entry());
347 if (NeedsDeferredFrame()) { 347 if (NeedsDeferredFrame()) {
348 Comment(";;; Build frame"); 348 Comment(";;; Build frame");
349 ASSERT(!frame_is_built_); 349 ASSERT(!frame_is_built_);
350 ASSERT(info()->IsStub()); 350 ASSERT(info()->IsStub());
351 frame_is_built_ = true; 351 frame_is_built_ = true;
352 // Build the frame in such a way that esi isn't trashed. 352 // Build the frame in such a way that esi isn't trashed.
353 __ push(rbp); // Caller's frame pointer. 353 __ pushq(rbp); // Caller's frame pointer.
354 __ push(Operand(rbp, StandardFrameConstants::kContextOffset)); 354 __ Push(Operand(rbp, StandardFrameConstants::kContextOffset));
355 __ Push(Smi::FromInt(StackFrame::STUB)); 355 __ Push(Smi::FromInt(StackFrame::STUB));
356 __ lea(rbp, Operand(rsp, 2 * kPointerSize)); 356 __ lea(rbp, Operand(rsp, 2 * kPointerSize));
357 Comment(";;; Deferred code"); 357 Comment(";;; Deferred code");
358 } 358 }
359 code->Generate(); 359 code->Generate();
360 if (NeedsDeferredFrame()) { 360 if (NeedsDeferredFrame()) {
361 __ bind(code->done()); 361 __ bind(code->done());
362 Comment(";;; Destroy frame"); 362 Comment(";;; Destroy frame");
363 ASSERT(frame_is_built_); 363 ASSERT(frame_is_built_);
364 frame_is_built_ = false; 364 frame_is_built_ = false;
365 __ movp(rsp, rbp); 365 __ movp(rsp, rbp);
366 __ pop(rbp); 366 __ popq(rbp);
367 } 367 }
368 __ jmp(code->exit()); 368 __ jmp(code->exit());
369 } 369 }
370 } 370 }
371 371
372 // Deferred code is the last part of the instruction sequence. Mark 372 // Deferred code is the last part of the instruction sequence. Mark
373 // the generated code as done unless we bailed out. 373 // the generated code as done unless we bailed out.
374 if (!is_aborted()) status_ = DONE; 374 if (!is_aborted()) status_ = DONE;
375 return !is_aborted(); 375 return !is_aborted();
376 } 376 }
(...skipping 338 matching lines...) Expand 10 before | Expand all | Expand 10 after
715 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); 715 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type);
716 if (entry == NULL) { 716 if (entry == NULL) {
717 Abort(kBailoutWasNotPrepared); 717 Abort(kBailoutWasNotPrepared);
718 return; 718 return;
719 } 719 }
720 720
721 if (DeoptEveryNTimes()) { 721 if (DeoptEveryNTimes()) {
722 ExternalReference count = ExternalReference::stress_deopt_count(isolate()); 722 ExternalReference count = ExternalReference::stress_deopt_count(isolate());
723 Label no_deopt; 723 Label no_deopt;
724 __ pushfq(); 724 __ pushfq();
725 __ push(rax); 725 __ Push(rax);
726 Operand count_operand = masm()->ExternalOperand(count, kScratchRegister); 726 Operand count_operand = masm()->ExternalOperand(count, kScratchRegister);
727 __ movl(rax, count_operand); 727 __ movl(rax, count_operand);
728 __ subl(rax, Immediate(1)); 728 __ subl(rax, Immediate(1));
729 __ j(not_zero, &no_deopt, Label::kNear); 729 __ j(not_zero, &no_deopt, Label::kNear);
730 if (FLAG_trap_on_deopt) __ int3(); 730 if (FLAG_trap_on_deopt) __ int3();
731 __ movl(rax, Immediate(FLAG_deopt_every_n_times)); 731 __ movl(rax, Immediate(FLAG_deopt_every_n_times));
732 __ movl(count_operand, rax); 732 __ movl(count_operand, rax);
733 __ pop(rax); 733 __ Pop(rax);
734 __ popfq(); 734 __ popfq();
735 ASSERT(frame_is_built_); 735 ASSERT(frame_is_built_);
736 __ call(entry, RelocInfo::RUNTIME_ENTRY); 736 __ call(entry, RelocInfo::RUNTIME_ENTRY);
737 __ bind(&no_deopt); 737 __ bind(&no_deopt);
738 __ movl(count_operand, rax); 738 __ movl(count_operand, rax);
739 __ pop(rax); 739 __ Pop(rax);
740 __ popfq(); 740 __ popfq();
741 } 741 }
742 742
743 if (info()->ShouldTrapOnDeopt()) { 743 if (info()->ShouldTrapOnDeopt()) {
744 Label done; 744 Label done;
745 if (cc != no_condition) { 745 if (cc != no_condition) {
746 __ j(NegateCondition(cc), &done, Label::kNear); 746 __ j(NegateCondition(cc), &done, Label::kNear);
747 } 747 }
748 __ int3(); 748 __ int3();
749 __ bind(&done); 749 __ bind(&done);
(...skipping 915 matching lines...) Expand 10 before | Expand all | Expand 10 after
1665 SeqString::kHeaderSize); 1665 SeqString::kHeaderSize);
1666 } 1666 }
1667 1667
1668 1668
1669 void LCodeGen::DoSeqStringGetChar(LSeqStringGetChar* instr) { 1669 void LCodeGen::DoSeqStringGetChar(LSeqStringGetChar* instr) {
1670 String::Encoding encoding = instr->hydrogen()->encoding(); 1670 String::Encoding encoding = instr->hydrogen()->encoding();
1671 Register result = ToRegister(instr->result()); 1671 Register result = ToRegister(instr->result());
1672 Register string = ToRegister(instr->string()); 1672 Register string = ToRegister(instr->string());
1673 1673
1674 if (FLAG_debug_code) { 1674 if (FLAG_debug_code) {
1675 __ push(string); 1675 __ Push(string);
1676 __ movp(string, FieldOperand(string, HeapObject::kMapOffset)); 1676 __ movp(string, FieldOperand(string, HeapObject::kMapOffset));
1677 __ movzxbq(string, FieldOperand(string, Map::kInstanceTypeOffset)); 1677 __ movzxbq(string, FieldOperand(string, Map::kInstanceTypeOffset));
1678 1678
1679 __ andb(string, Immediate(kStringRepresentationMask | kStringEncodingMask)); 1679 __ andb(string, Immediate(kStringRepresentationMask | kStringEncodingMask));
1680 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; 1680 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
1681 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; 1681 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
1682 __ cmpq(string, Immediate(encoding == String::ONE_BYTE_ENCODING 1682 __ cmpq(string, Immediate(encoding == String::ONE_BYTE_ENCODING
1683 ? one_byte_seq_type : two_byte_seq_type)); 1683 ? one_byte_seq_type : two_byte_seq_type));
1684 __ Check(equal, kUnexpectedStringType); 1684 __ Check(equal, kUnexpectedStringType);
1685 __ pop(string); 1685 __ Pop(string);
1686 } 1686 }
1687 1687
1688 Operand operand = BuildSeqStringOperand(string, instr->index(), encoding); 1688 Operand operand = BuildSeqStringOperand(string, instr->index(), encoding);
1689 if (encoding == String::ONE_BYTE_ENCODING) { 1689 if (encoding == String::ONE_BYTE_ENCODING) {
1690 __ movzxbl(result, operand); 1690 __ movzxbl(result, operand);
1691 } else { 1691 } else {
1692 __ movzxwl(result, operand); 1692 __ movzxwl(result, operand);
1693 } 1693 }
1694 } 1694 }
1695 1695
(...skipping 797 matching lines...) Expand 10 before | Expand all | Expand 10 after
2493 Register reg = ToRegister(instr->value()); 2493 Register reg = ToRegister(instr->value());
2494 2494
2495 __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map()); 2495 __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map());
2496 EmitBranch(instr, equal); 2496 EmitBranch(instr, equal);
2497 } 2497 }
2498 2498
2499 2499
2500 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { 2500 void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
2501 ASSERT(ToRegister(instr->context()).is(rsi)); 2501 ASSERT(ToRegister(instr->context()).is(rsi));
2502 InstanceofStub stub(InstanceofStub::kNoFlags); 2502 InstanceofStub stub(InstanceofStub::kNoFlags);
2503 __ push(ToRegister(instr->left())); 2503 __ Push(ToRegister(instr->left()));
2504 __ push(ToRegister(instr->right())); 2504 __ Push(ToRegister(instr->right()));
2505 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 2505 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
2506 Label true_value, done; 2506 Label true_value, done;
2507 __ testq(rax, rax); 2507 __ testq(rax, rax);
2508 __ j(zero, &true_value, Label::kNear); 2508 __ j(zero, &true_value, Label::kNear);
2509 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex); 2509 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex);
2510 __ jmp(&done, Label::kNear); 2510 __ jmp(&done, Label::kNear);
2511 __ bind(&true_value); 2511 __ bind(&true_value);
2512 __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex); 2512 __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex);
2513 __ bind(&done); 2513 __ bind(&done);
2514 } 2514 }
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
2580 2580
2581 2581
2582 void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, 2582 void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
2583 Label* map_check) { 2583 Label* map_check) {
2584 { 2584 {
2585 PushSafepointRegistersScope scope(this); 2585 PushSafepointRegistersScope scope(this);
2586 InstanceofStub::Flags flags = static_cast<InstanceofStub::Flags>( 2586 InstanceofStub::Flags flags = static_cast<InstanceofStub::Flags>(
2587 InstanceofStub::kNoFlags | InstanceofStub::kCallSiteInlineCheck); 2587 InstanceofStub::kNoFlags | InstanceofStub::kCallSiteInlineCheck);
2588 InstanceofStub stub(flags); 2588 InstanceofStub stub(flags);
2589 2589
2590 __ push(ToRegister(instr->value())); 2590 __ Push(ToRegister(instr->value()));
2591 __ Push(instr->function()); 2591 __ Push(instr->function());
2592 2592
2593 static const int kAdditionalDelta = 10; 2593 static const int kAdditionalDelta = 10;
2594 int delta = 2594 int delta =
2595 masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta; 2595 masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta;
2596 ASSERT(delta >= 0); 2596 ASSERT(delta >= 0);
2597 __ push_imm32(delta); 2597 __ PushImm32(delta);
2598 2598
2599 // We are pushing three values on the stack but recording a 2599 // We are pushing three values on the stack but recording a
2600 // safepoint with two arguments because stub is going to 2600 // safepoint with two arguments because stub is going to
2601 // remove the third argument from the stack before jumping 2601 // remove the third argument from the stack before jumping
2602 // to instanceof builtin on the slow path. 2602 // to instanceof builtin on the slow path.
2603 CallCodeGeneric(stub.GetCode(isolate()), 2603 CallCodeGeneric(stub.GetCode(isolate()),
2604 RelocInfo::CODE_TARGET, 2604 RelocInfo::CODE_TARGET,
2605 instr, 2605 instr,
2606 RECORD_SAFEPOINT_WITH_REGISTERS, 2606 RECORD_SAFEPOINT_WITH_REGISTERS,
2607 2); 2607 2);
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
2642 __ bind(&done); 2642 __ bind(&done);
2643 } 2643 }
2644 2644
2645 2645
2646 void LCodeGen::DoReturn(LReturn* instr) { 2646 void LCodeGen::DoReturn(LReturn* instr) {
2647 if (FLAG_trace && info()->IsOptimizing()) { 2647 if (FLAG_trace && info()->IsOptimizing()) {
2648 // Preserve the return value on the stack and rely on the runtime call 2648 // Preserve the return value on the stack and rely on the runtime call
2649 // to return the value in the same register. We're leaving the code 2649 // to return the value in the same register. We're leaving the code
2650 // managed by the register allocator and tearing down the frame, it's 2650 // managed by the register allocator and tearing down the frame, it's
2651 // safe to write to the context register. 2651 // safe to write to the context register.
2652 __ push(rax); 2652 __ Push(rax);
2653 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 2653 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2654 __ CallRuntime(Runtime::kTraceExit, 1); 2654 __ CallRuntime(Runtime::kTraceExit, 1);
2655 } 2655 }
2656 if (info()->saves_caller_doubles()) { 2656 if (info()->saves_caller_doubles()) {
2657 RestoreCallerDoubles(); 2657 RestoreCallerDoubles();
2658 } 2658 }
2659 int no_frame_start = -1; 2659 int no_frame_start = -1;
2660 if (NeedsEagerFrame()) { 2660 if (NeedsEagerFrame()) {
2661 __ movp(rsp, rbp); 2661 __ movp(rsp, rbp);
2662 __ pop(rbp); 2662 __ popq(rbp);
2663 no_frame_start = masm_->pc_offset(); 2663 no_frame_start = masm_->pc_offset();
2664 } 2664 }
2665 if (instr->has_constant_parameter_count()) { 2665 if (instr->has_constant_parameter_count()) {
2666 __ Ret((ToInteger32(instr->constant_parameter_count()) + 1) * kPointerSize, 2666 __ Ret((ToInteger32(instr->constant_parameter_count()) + 1) * kPointerSize,
2667 rcx); 2667 rcx);
2668 } else { 2668 } else {
2669 Register reg = ToRegister(instr->parameter_count()); 2669 Register reg = ToRegister(instr->parameter_count());
2670 // The argument count parameter is a smi 2670 // The argument count parameter is a smi
2671 __ SmiToInteger32(reg, reg); 2671 __ SmiToInteger32(reg, reg);
2672 Register return_addr_reg = reg.is(rcx) ? rbx : rcx; 2672 Register return_addr_reg = reg.is(rcx) ? rbx : rcx;
(...skipping 591 matching lines...) Expand 10 before | Expand all | Expand 10 after
3264 ASSERT(receiver.is(rax)); // Used for parameter count. 3264 ASSERT(receiver.is(rax)); // Used for parameter count.
3265 ASSERT(function.is(rdi)); // Required by InvokeFunction. 3265 ASSERT(function.is(rdi)); // Required by InvokeFunction.
3266 ASSERT(ToRegister(instr->result()).is(rax)); 3266 ASSERT(ToRegister(instr->result()).is(rax));
3267 3267
3268 // Copy the arguments to this function possibly from the 3268 // Copy the arguments to this function possibly from the
3269 // adaptor frame below it. 3269 // adaptor frame below it.
3270 const uint32_t kArgumentsLimit = 1 * KB; 3270 const uint32_t kArgumentsLimit = 1 * KB;
3271 __ cmpq(length, Immediate(kArgumentsLimit)); 3271 __ cmpq(length, Immediate(kArgumentsLimit));
3272 DeoptimizeIf(above, instr->environment()); 3272 DeoptimizeIf(above, instr->environment());
3273 3273
3274 __ push(receiver); 3274 __ Push(receiver);
3275 __ movp(receiver, length); 3275 __ movp(receiver, length);
3276 3276
3277 // Loop through the arguments pushing them onto the execution 3277 // Loop through the arguments pushing them onto the execution
3278 // stack. 3278 // stack.
3279 Label invoke, loop; 3279 Label invoke, loop;
3280 // length is a small non-negative integer, due to the test above. 3280 // length is a small non-negative integer, due to the test above.
3281 __ testl(length, length); 3281 __ testl(length, length);
3282 __ j(zero, &invoke, Label::kNear); 3282 __ j(zero, &invoke, Label::kNear);
3283 __ bind(&loop); 3283 __ bind(&loop);
3284 StackArgumentsAccessor args(elements, length, 3284 StackArgumentsAccessor args(elements, length,
3285 ARGUMENTS_DONT_CONTAIN_RECEIVER); 3285 ARGUMENTS_DONT_CONTAIN_RECEIVER);
3286 __ push(args.GetArgumentOperand(0)); 3286 __ Push(args.GetArgumentOperand(0));
3287 __ decl(length); 3287 __ decl(length);
3288 __ j(not_zero, &loop); 3288 __ j(not_zero, &loop);
3289 3289
3290 // Invoke the function. 3290 // Invoke the function.
3291 __ bind(&invoke); 3291 __ bind(&invoke);
3292 ASSERT(instr->HasPointerMap()); 3292 ASSERT(instr->HasPointerMap());
3293 LPointerMap* pointers = instr->pointer_map(); 3293 LPointerMap* pointers = instr->pointer_map();
3294 SafepointGenerator safepoint_generator( 3294 SafepointGenerator safepoint_generator(
3295 this, pointers, Safepoint::kLazyDeopt); 3295 this, pointers, Safepoint::kLazyDeopt);
3296 ParameterCount actual(rax); 3296 ParameterCount actual(rax);
(...skipping 24 matching lines...) Expand all
3321 __ movp(result, Operand(rbp, StandardFrameConstants::kContextOffset)); 3321 __ movp(result, Operand(rbp, StandardFrameConstants::kContextOffset));
3322 } else { 3322 } else {
3323 // If there is no frame, the context must be in rsi. 3323 // If there is no frame, the context must be in rsi.
3324 ASSERT(result.is(rsi)); 3324 ASSERT(result.is(rsi));
3325 } 3325 }
3326 } 3326 }
3327 3327
3328 3328
3329 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) { 3329 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
3330 ASSERT(ToRegister(instr->context()).is(rsi)); 3330 ASSERT(ToRegister(instr->context()).is(rsi));
3331 __ push(rsi); // The context is the first argument. 3331 __ Push(rsi); // The context is the first argument.
3332 __ Push(instr->hydrogen()->pairs()); 3332 __ Push(instr->hydrogen()->pairs());
3333 __ Push(Smi::FromInt(instr->hydrogen()->flags())); 3333 __ Push(Smi::FromInt(instr->hydrogen()->flags()));
3334 CallRuntime(Runtime::kDeclareGlobals, 3, instr); 3334 CallRuntime(Runtime::kDeclareGlobals, 3, instr);
3335 } 3335 }
3336 3336
3337 3337
3338 void LCodeGen::CallKnownFunction(Handle<JSFunction> function, 3338 void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
3339 int formal_parameter_count, 3339 int formal_parameter_count,
3340 int arity, 3340 int arity,
3341 LInstruction* instr, 3341 LInstruction* instr,
(...skipping 1089 matching lines...) Expand 10 before | Expand all | Expand 10 after
4431 void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) { 4431 void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
4432 Register string = ToRegister(instr->string()); 4432 Register string = ToRegister(instr->string());
4433 Register result = ToRegister(instr->result()); 4433 Register result = ToRegister(instr->result());
4434 4434
4435 // TODO(3095996): Get rid of this. For now, we need to make the 4435 // TODO(3095996): Get rid of this. For now, we need to make the
4436 // result register contain a valid pointer because it is already 4436 // result register contain a valid pointer because it is already
4437 // contained in the register pointer map. 4437 // contained in the register pointer map.
4438 __ Set(result, 0); 4438 __ Set(result, 0);
4439 4439
4440 PushSafepointRegistersScope scope(this); 4440 PushSafepointRegistersScope scope(this);
4441 __ push(string); 4441 __ Push(string);
4442 // Push the index as a smi. This is safe because of the checks in 4442 // Push the index as a smi. This is safe because of the checks in
4443 // DoStringCharCodeAt above. 4443 // DoStringCharCodeAt above.
4444 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue); 4444 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
4445 if (instr->index()->IsConstantOperand()) { 4445 if (instr->index()->IsConstantOperand()) {
4446 int32_t const_index = ToInteger32(LConstantOperand::cast(instr->index())); 4446 int32_t const_index = ToInteger32(LConstantOperand::cast(instr->index()));
4447 __ Push(Smi::FromInt(const_index)); 4447 __ Push(Smi::FromInt(const_index));
4448 } else { 4448 } else {
4449 Register index = ToRegister(instr->index()); 4449 Register index = ToRegister(instr->index());
4450 __ Integer32ToSmi(index, index); 4450 __ Integer32ToSmi(index, index);
4451 __ push(index); 4451 __ Push(index);
4452 } 4452 }
4453 CallRuntimeFromDeferred( 4453 CallRuntimeFromDeferred(
4454 Runtime::kStringCharCodeAt, 2, instr, instr->context()); 4454 Runtime::kStringCharCodeAt, 2, instr, instr->context());
4455 __ AssertSmi(rax); 4455 __ AssertSmi(rax);
4456 __ SmiToInteger32(rax, rax); 4456 __ SmiToInteger32(rax, rax);
4457 __ StoreToSafepointRegisterSlot(result, rax); 4457 __ StoreToSafepointRegisterSlot(result, rax);
4458 } 4458 }
4459 4459
4460 4460
4461 void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) { 4461 void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
4496 Register char_code = ToRegister(instr->char_code()); 4496 Register char_code = ToRegister(instr->char_code());
4497 Register result = ToRegister(instr->result()); 4497 Register result = ToRegister(instr->result());
4498 4498
4499 // TODO(3095996): Get rid of this. For now, we need to make the 4499 // TODO(3095996): Get rid of this. For now, we need to make the
4500 // result register contain a valid pointer because it is already 4500 // result register contain a valid pointer because it is already
4501 // contained in the register pointer map. 4501 // contained in the register pointer map.
4502 __ Set(result, 0); 4502 __ Set(result, 0);
4503 4503
4504 PushSafepointRegistersScope scope(this); 4504 PushSafepointRegistersScope scope(this);
4505 __ Integer32ToSmi(char_code, char_code); 4505 __ Integer32ToSmi(char_code, char_code);
4506 __ push(char_code); 4506 __ Push(char_code);
4507 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr, instr->context()); 4507 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr, instr->context());
4508 __ StoreToSafepointRegisterSlot(result, rax); 4508 __ StoreToSafepointRegisterSlot(result, rax);
4509 } 4509 }
4510 4510
4511 4511
4512 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) { 4512 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
4513 LOperand* input = instr->value(); 4513 LOperand* input = instr->value();
4514 ASSERT(input->IsRegister() || input->IsStackSlot()); 4514 ASSERT(input->IsRegister() || input->IsStackSlot());
4515 LOperand* output = instr->result(); 4515 LOperand* output = instr->result();
4516 ASSERT(output->IsDoubleRegister()); 4516 ASSERT(output->IsDoubleRegister());
(...skipping 449 matching lines...) Expand 10 before | Expand all | Expand 10 after
4966 void LCodeGen::DoCheckValue(LCheckValue* instr) { 4966 void LCodeGen::DoCheckValue(LCheckValue* instr) {
4967 Register reg = ToRegister(instr->value()); 4967 Register reg = ToRegister(instr->value());
4968 __ Cmp(reg, instr->hydrogen()->object().handle()); 4968 __ Cmp(reg, instr->hydrogen()->object().handle());
4969 DeoptimizeIf(not_equal, instr->environment()); 4969 DeoptimizeIf(not_equal, instr->environment());
4970 } 4970 }
4971 4971
4972 4972
4973 void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { 4973 void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) {
4974 { 4974 {
4975 PushSafepointRegistersScope scope(this); 4975 PushSafepointRegistersScope scope(this);
4976 __ push(object); 4976 __ Push(object);
4977 __ Set(rsi, 0); 4977 __ Set(rsi, 0);
4978 __ CallRuntimeSaveDoubles(Runtime::kTryMigrateInstance); 4978 __ CallRuntimeSaveDoubles(Runtime::kTryMigrateInstance);
4979 RecordSafepointWithRegisters( 4979 RecordSafepointWithRegisters(
4980 instr->pointer_map(), 1, Safepoint::kNoLazyDeopt); 4980 instr->pointer_map(), 1, Safepoint::kNoLazyDeopt);
4981 4981
4982 __ testq(rax, Immediate(kSmiTagMask)); 4982 __ testq(rax, Immediate(kSmiTagMask));
4983 } 4983 }
4984 DeoptimizeIf(zero, instr->environment()); 4984 DeoptimizeIf(zero, instr->environment());
4985 } 4985 }
4986 4986
(...skipping 195 matching lines...) Expand 10 before | Expand all | Expand 10 after
5182 // TODO(3095996): Get rid of this. For now, we need to make the 5182 // TODO(3095996): Get rid of this. For now, we need to make the
5183 // result register contain a valid pointer because it is already 5183 // result register contain a valid pointer because it is already
5184 // contained in the register pointer map. 5184 // contained in the register pointer map.
5185 __ Move(result, Smi::FromInt(0)); 5185 __ Move(result, Smi::FromInt(0));
5186 5186
5187 PushSafepointRegistersScope scope(this); 5187 PushSafepointRegistersScope scope(this);
5188 if (instr->size()->IsRegister()) { 5188 if (instr->size()->IsRegister()) {
5189 Register size = ToRegister(instr->size()); 5189 Register size = ToRegister(instr->size());
5190 ASSERT(!size.is(result)); 5190 ASSERT(!size.is(result));
5191 __ Integer32ToSmi(size, size); 5191 __ Integer32ToSmi(size, size);
5192 __ push(size); 5192 __ Push(size);
5193 } else { 5193 } else {
5194 int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); 5194 int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
5195 __ Push(Smi::FromInt(size)); 5195 __ Push(Smi::FromInt(size));
5196 } 5196 }
5197 5197
5198 int flags = 0; 5198 int flags = 0;
5199 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) { 5199 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
5200 ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation()); 5200 ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation());
5201 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); 5201 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
5202 flags = AllocateTargetSpace::update(flags, OLD_POINTER_SPACE); 5202 flags = AllocateTargetSpace::update(flags, OLD_POINTER_SPACE);
5203 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) { 5203 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
5204 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); 5204 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
5205 flags = AllocateTargetSpace::update(flags, OLD_DATA_SPACE); 5205 flags = AllocateTargetSpace::update(flags, OLD_DATA_SPACE);
5206 } else { 5206 } else {
5207 flags = AllocateTargetSpace::update(flags, NEW_SPACE); 5207 flags = AllocateTargetSpace::update(flags, NEW_SPACE);
5208 } 5208 }
5209 __ Push(Smi::FromInt(flags)); 5209 __ Push(Smi::FromInt(flags));
5210 5210
5211 CallRuntimeFromDeferred( 5211 CallRuntimeFromDeferred(
5212 Runtime::kAllocateInTargetSpace, 2, instr, instr->context()); 5212 Runtime::kAllocateInTargetSpace, 2, instr, instr->context());
5213 __ StoreToSafepointRegisterSlot(result, rax); 5213 __ StoreToSafepointRegisterSlot(result, rax);
5214 } 5214 }
5215 5215
5216 5216
5217 void LCodeGen::DoToFastProperties(LToFastProperties* instr) { 5217 void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
5218 ASSERT(ToRegister(instr->value()).is(rax)); 5218 ASSERT(ToRegister(instr->value()).is(rax));
5219 __ push(rax); 5219 __ Push(rax);
5220 CallRuntime(Runtime::kToFastProperties, 1, instr); 5220 CallRuntime(Runtime::kToFastProperties, 1, instr);
5221 } 5221 }
5222 5222
5223 5223
5224 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { 5224 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
5225 ASSERT(ToRegister(instr->context()).is(rsi)); 5225 ASSERT(ToRegister(instr->context()).is(rsi));
5226 Label materialized; 5226 Label materialized;
5227 // Registers will be used as follows: 5227 // Registers will be used as follows:
5228 // rcx = literals array. 5228 // rcx = literals array.
5229 // rbx = regexp literal. 5229 // rbx = regexp literal.
5230 // rax = regexp literal clone. 5230 // rax = regexp literal clone.
5231 int literal_offset = 5231 int literal_offset =
5232 FixedArray::OffsetOfElementAt(instr->hydrogen()->literal_index()); 5232 FixedArray::OffsetOfElementAt(instr->hydrogen()->literal_index());
5233 __ Move(rcx, instr->hydrogen()->literals()); 5233 __ Move(rcx, instr->hydrogen()->literals());
5234 __ movp(rbx, FieldOperand(rcx, literal_offset)); 5234 __ movp(rbx, FieldOperand(rcx, literal_offset));
5235 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); 5235 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
5236 __ j(not_equal, &materialized, Label::kNear); 5236 __ j(not_equal, &materialized, Label::kNear);
5237 5237
5238 // Create regexp literal using runtime function 5238 // Create regexp literal using runtime function
5239 // Result will be in rax. 5239 // Result will be in rax.
5240 __ push(rcx); 5240 __ Push(rcx);
5241 __ Push(Smi::FromInt(instr->hydrogen()->literal_index())); 5241 __ Push(Smi::FromInt(instr->hydrogen()->literal_index()));
5242 __ Push(instr->hydrogen()->pattern()); 5242 __ Push(instr->hydrogen()->pattern());
5243 __ Push(instr->hydrogen()->flags()); 5243 __ Push(instr->hydrogen()->flags());
5244 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr); 5244 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
5245 __ movp(rbx, rax); 5245 __ movp(rbx, rax);
5246 5246
5247 __ bind(&materialized); 5247 __ bind(&materialized);
5248 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; 5248 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
5249 Label allocated, runtime_allocate; 5249 Label allocated, runtime_allocate;
5250 __ Allocate(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT); 5250 __ Allocate(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
5251 __ jmp(&allocated, Label::kNear); 5251 __ jmp(&allocated, Label::kNear);
5252 5252
5253 __ bind(&runtime_allocate); 5253 __ bind(&runtime_allocate);
5254 __ push(rbx); 5254 __ Push(rbx);
5255 __ Push(Smi::FromInt(size)); 5255 __ Push(Smi::FromInt(size));
5256 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr); 5256 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
5257 __ pop(rbx); 5257 __ Pop(rbx);
5258 5258
5259 __ bind(&allocated); 5259 __ bind(&allocated);
5260 // Copy the content into the newly allocated memory. 5260 // Copy the content into the newly allocated memory.
5261 // (Unroll copy loop once for better throughput). 5261 // (Unroll copy loop once for better throughput).
5262 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) { 5262 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
5263 __ movp(rdx, FieldOperand(rbx, i)); 5263 __ movp(rdx, FieldOperand(rbx, i));
5264 __ movp(rcx, FieldOperand(rbx, i + kPointerSize)); 5264 __ movp(rcx, FieldOperand(rbx, i + kPointerSize));
5265 __ movp(FieldOperand(rax, i), rdx); 5265 __ movp(FieldOperand(rax, i), rdx);
5266 __ movp(FieldOperand(rax, i + kPointerSize), rcx); 5266 __ movp(FieldOperand(rax, i + kPointerSize), rcx);
5267 } 5267 }
5268 if ((size % (2 * kPointerSize)) != 0) { 5268 if ((size % (2 * kPointerSize)) != 0) {
5269 __ movp(rdx, FieldOperand(rbx, size - kPointerSize)); 5269 __ movp(rdx, FieldOperand(rbx, size - kPointerSize));
5270 __ movp(FieldOperand(rax, size - kPointerSize), rdx); 5270 __ movp(FieldOperand(rax, size - kPointerSize), rdx);
5271 } 5271 }
5272 } 5272 }
5273 5273
5274 5274
5275 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { 5275 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
5276 ASSERT(ToRegister(instr->context()).is(rsi)); 5276 ASSERT(ToRegister(instr->context()).is(rsi));
5277 // Use the fast case closure allocation code that allocates in new 5277 // Use the fast case closure allocation code that allocates in new
5278 // space for nested functions that don't need literals cloning. 5278 // space for nested functions that don't need literals cloning.
5279 bool pretenure = instr->hydrogen()->pretenure(); 5279 bool pretenure = instr->hydrogen()->pretenure();
5280 if (!pretenure && instr->hydrogen()->has_no_literals()) { 5280 if (!pretenure && instr->hydrogen()->has_no_literals()) {
5281 FastNewClosureStub stub(instr->hydrogen()->strict_mode(), 5281 FastNewClosureStub stub(instr->hydrogen()->strict_mode(),
5282 instr->hydrogen()->is_generator()); 5282 instr->hydrogen()->is_generator());
5283 __ Move(rbx, instr->hydrogen()->shared_info()); 5283 __ Move(rbx, instr->hydrogen()->shared_info());
5284 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 5284 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
5285 } else { 5285 } else {
5286 __ push(rsi); 5286 __ Push(rsi);
5287 __ Push(instr->hydrogen()->shared_info()); 5287 __ Push(instr->hydrogen()->shared_info());
5288 __ PushRoot(pretenure ? Heap::kTrueValueRootIndex : 5288 __ PushRoot(pretenure ? Heap::kTrueValueRootIndex :
5289 Heap::kFalseValueRootIndex); 5289 Heap::kFalseValueRootIndex);
5290 CallRuntime(Runtime::kNewClosure, 3, instr); 5290 CallRuntime(Runtime::kNewClosure, 3, instr);
5291 } 5291 }
5292 } 5292 }
5293 5293
5294 5294
5295 void LCodeGen::DoTypeof(LTypeof* instr) { 5295 void LCodeGen::DoTypeof(LTypeof* instr) {
5296 ASSERT(ToRegister(instr->context()).is(rsi)); 5296 ASSERT(ToRegister(instr->context()).is(rsi));
5297 LOperand* input = instr->value(); 5297 LOperand* input = instr->value();
5298 EmitPushTaggedOperand(input); 5298 EmitPushTaggedOperand(input);
5299 CallRuntime(Runtime::kTypeof, 1, instr); 5299 CallRuntime(Runtime::kTypeof, 1, instr);
5300 } 5300 }
5301 5301
5302 5302
5303 void LCodeGen::EmitPushTaggedOperand(LOperand* operand) { 5303 void LCodeGen::EmitPushTaggedOperand(LOperand* operand) {
5304 ASSERT(!operand->IsDoubleRegister()); 5304 ASSERT(!operand->IsDoubleRegister());
5305 if (operand->IsConstantOperand()) { 5305 if (operand->IsConstantOperand()) {
5306 __ Push(ToHandle(LConstantOperand::cast(operand))); 5306 __ Push(ToHandle(LConstantOperand::cast(operand)));
5307 } else if (operand->IsRegister()) { 5307 } else if (operand->IsRegister()) {
5308 __ push(ToRegister(operand)); 5308 __ Push(ToRegister(operand));
5309 } else { 5309 } else {
5310 __ push(ToOperand(operand)); 5310 __ Push(ToOperand(operand));
5311 } 5311 }
5312 } 5312 }
5313 5313
5314 5314
5315 void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) { 5315 void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
5316 Register input = ToRegister(instr->value()); 5316 Register input = ToRegister(instr->value());
5317 Condition final_branch_condition = EmitTypeofIs(instr, input); 5317 Condition final_branch_condition = EmitTypeofIs(instr, input);
5318 if (final_branch_condition != no_condition) { 5318 if (final_branch_condition != no_condition) {
5319 EmitBranch(instr, final_branch_condition); 5319 EmitBranch(instr, final_branch_condition);
5320 } 5320 }
(...skipping 252 matching lines...) Expand 10 before | Expand all | Expand 10 after
5573 DeoptimizeIf(below_equal, instr->environment()); 5573 DeoptimizeIf(below_equal, instr->environment());
5574 5574
5575 Label use_cache, call_runtime; 5575 Label use_cache, call_runtime;
5576 __ CheckEnumCache(null_value, &call_runtime); 5576 __ CheckEnumCache(null_value, &call_runtime);
5577 5577
5578 __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset)); 5578 __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
5579 __ jmp(&use_cache, Label::kNear); 5579 __ jmp(&use_cache, Label::kNear);
5580 5580
5581 // Get the set of properties to enumerate. 5581 // Get the set of properties to enumerate.
5582 __ bind(&call_runtime); 5582 __ bind(&call_runtime);
5583 __ push(rax); 5583 __ Push(rax);
5584 CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr); 5584 CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr);
5585 5585
5586 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), 5586 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
5587 Heap::kMetaMapRootIndex); 5587 Heap::kMetaMapRootIndex);
5588 DeoptimizeIf(not_equal, instr->environment()); 5588 DeoptimizeIf(not_equal, instr->environment());
5589 __ bind(&use_cache); 5589 __ bind(&use_cache);
5590 } 5590 }
5591 5591
5592 5592
5593 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) { 5593 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
5643 FixedArray::kHeaderSize - kPointerSize)); 5643 FixedArray::kHeaderSize - kPointerSize));
5644 __ bind(&done); 5644 __ bind(&done);
5645 } 5645 }
5646 5646
5647 5647
5648 #undef __ 5648 #undef __
5649 5649
5650 } } // namespace v8::internal 5650 } } // namespace v8::internal
5651 5651
5652 #endif // V8_TARGET_ARCH_X64 5652 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/ic-x64.cc ('k') | src/x64/macro-assembler-x64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698