Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(88)

Side by Side Diff: src/x64/lithium-codegen-x64.cc

Issue 6614010: [Isolates] Merge 6700:7030 from bleeding_edge to isolates. (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/isolates/
Patch Set: '' Created 9 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/lithium-codegen-x64.h ('k') | src/x64/lithium-x64.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 19 matching lines...) Expand all
30 #if defined(V8_TARGET_ARCH_X64) 30 #if defined(V8_TARGET_ARCH_X64)
31 31
32 #include "x64/lithium-codegen-x64.h" 32 #include "x64/lithium-codegen-x64.h"
33 #include "code-stubs.h" 33 #include "code-stubs.h"
34 #include "stub-cache.h" 34 #include "stub-cache.h"
35 35
36 namespace v8 { 36 namespace v8 {
37 namespace internal { 37 namespace internal {
38 38
39 39
40 // When invoking builtins, we need to record the safepoint in the middle of
41 // the invoke instruction sequence generated by the macro assembler.
42 class SafepointGenerator : public PostCallGenerator {
43 public:
44 SafepointGenerator(LCodeGen* codegen,
45 LPointerMap* pointers,
46 int deoptimization_index,
47 bool ensure_reloc_space = false)
48 : codegen_(codegen),
49 pointers_(pointers),
50 deoptimization_index_(deoptimization_index),
51 ensure_reloc_space_(ensure_reloc_space) { }
52 virtual ~SafepointGenerator() { }
53
54 virtual void Generate() {
55 // Ensure that we have enough space in the reloc info to patch
56 // this with calls when doing deoptimization.
57 if (ensure_reloc_space_) {
58 codegen_->masm()->RecordComment(RelocInfo::kFillerCommentString, true);
59 }
60 codegen_->RecordSafepoint(pointers_, deoptimization_index_);
61 }
62
63 private:
64 LCodeGen* codegen_;
65 LPointerMap* pointers_;
66 int deoptimization_index_;
67 bool ensure_reloc_space_;
68 };
69
70
40 #define __ masm()-> 71 #define __ masm()->
41 72
42 bool LCodeGen::GenerateCode() { 73 bool LCodeGen::GenerateCode() {
43 HPhase phase("Code generation", chunk()); 74 HPhase phase("Code generation", chunk());
44 ASSERT(is_unused()); 75 ASSERT(is_unused());
45 status_ = GENERATING; 76 status_ = GENERATING;
46 return GeneratePrologue() && 77 return GeneratePrologue() &&
47 GenerateBody() && 78 GenerateBody() &&
48 GenerateDeferredCode() && 79 GenerateDeferredCode() &&
80 GenerateJumpTable() &&
49 GenerateSafepointTable(); 81 GenerateSafepointTable();
50 } 82 }
51 83
52 84
53 void LCodeGen::FinishCode(Handle<Code> code) { 85 void LCodeGen::FinishCode(Handle<Code> code) {
54 ASSERT(is_done()); 86 ASSERT(is_done());
55 code->set_stack_slots(StackSlotCount()); 87 code->set_stack_slots(StackSlotCount());
56 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); 88 code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
57 PopulateDeoptimizationData(code); 89 PopulateDeoptimizationData(code);
58 } 90 }
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after
125 const int kPageSize = 4 * KB; 157 const int kPageSize = 4 * KB;
126 for (int offset = slots * kPointerSize - kPageSize; 158 for (int offset = slots * kPointerSize - kPageSize;
127 offset > 0; 159 offset > 0;
128 offset -= kPageSize) { 160 offset -= kPageSize) {
129 __ movq(Operand(rsp, offset), rax); 161 __ movq(Operand(rsp, offset), rax);
130 } 162 }
131 #endif 163 #endif
132 } 164 }
133 } 165 }
134 166
167 // Possibly allocate a local context.
168 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
169 if (heap_slots > 0) {
170 Comment(";;; Allocate local context");
171 // Argument to NewContext is the function, which is still in rdi.
172 __ push(rdi);
173 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
174 FastNewContextStub stub(heap_slots);
175 __ CallStub(&stub);
176 } else {
177 __ CallRuntime(Runtime::kNewContext, 1);
178 }
179 RecordSafepoint(Safepoint::kNoDeoptimizationIndex);
180 // Context is returned in both rax and rsi. It replaces the context
181 // passed to us. It's saved in the stack and kept live in rsi.
182 __ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
183
184 // Copy any necessary parameters into the context.
185 int num_parameters = scope()->num_parameters();
186 for (int i = 0; i < num_parameters; i++) {
187 Slot* slot = scope()->parameter(i)->AsSlot();
188 if (slot != NULL && slot->type() == Slot::CONTEXT) {
189 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
190 (num_parameters - 1 - i) * kPointerSize;
191 // Load parameter from stack.
192 __ movq(rax, Operand(rbp, parameter_offset));
193 // Store it in the context.
194 int context_offset = Context::SlotOffset(slot->index());
195 __ movq(Operand(rsi, context_offset), rax);
196 // Update the write barrier. This clobbers all involved
197 // registers, so we have use a third register to avoid
198 // clobbering rsi.
199 __ movq(rcx, rsi);
200 __ RecordWrite(rcx, context_offset, rax, rbx);
201 }
202 }
203 Comment(";;; End allocate local context");
204 }
205
135 // Trace the call. 206 // Trace the call.
136 if (FLAG_trace) { 207 if (FLAG_trace) {
137 __ CallRuntime(Runtime::kTraceEnter, 0); 208 __ CallRuntime(Runtime::kTraceEnter, 0);
138 } 209 }
139 return !is_aborted(); 210 return !is_aborted();
140 } 211 }
141 212
142 213
143 bool LCodeGen::GenerateBody() { 214 bool LCodeGen::GenerateBody() {
144 ASSERT(is_generating()); 215 ASSERT(is_generating());
(...skipping 18 matching lines...) Expand all
163 234
164 LInstruction* LCodeGen::GetNextInstruction() { 235 LInstruction* LCodeGen::GetNextInstruction() {
165 if (current_instruction_ < instructions_->length() - 1) { 236 if (current_instruction_ < instructions_->length() - 1) {
166 return instructions_->at(current_instruction_ + 1); 237 return instructions_->at(current_instruction_ + 1);
167 } else { 238 } else {
168 return NULL; 239 return NULL;
169 } 240 }
170 } 241 }
171 242
172 243
244 bool LCodeGen::GenerateJumpTable() {
245 for (int i = 0; i < jump_table_.length(); i++) {
246 JumpTableEntry* info = jump_table_[i];
247 __ bind(&(info->label_));
248 __ Jump(info->address_, RelocInfo::RUNTIME_ENTRY);
249 }
250 return !is_aborted();
251 }
252
253
173 bool LCodeGen::GenerateDeferredCode() { 254 bool LCodeGen::GenerateDeferredCode() {
174 ASSERT(is_generating()); 255 ASSERT(is_generating());
175 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { 256 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
176 LDeferredCode* code = deferred_[i]; 257 LDeferredCode* code = deferred_[i];
177 __ bind(code->entry()); 258 __ bind(code->entry());
178 code->Generate(); 259 code->Generate();
179 __ jmp(code->exit()); 260 __ jmp(code->exit());
180 } 261 }
181 262
182 // Deferred code is the last part of the instruction sequence. Mark 263 // Deferred code is the last part of the instruction sequence. Mark
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
245 Handle<Object> value = chunk_->LookupLiteral(op); 326 Handle<Object> value = chunk_->LookupLiteral(op);
246 ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32()); 327 ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
247 ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) == 328 ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) ==
248 value->Number()); 329 value->Number());
249 return static_cast<int32_t>(value->Number()); 330 return static_cast<int32_t>(value->Number());
250 } 331 }
251 332
252 333
253 Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const { 334 Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const {
254 Handle<Object> literal = chunk_->LookupLiteral(op); 335 Handle<Object> literal = chunk_->LookupLiteral(op);
255 Representation r = chunk_->LookupLiteralRepresentation(op); 336 ASSERT(chunk_->LookupLiteralRepresentation(op).IsTagged());
256 ASSERT(r.IsTagged());
257 return literal; 337 return literal;
258 } 338 }
259 339
260 340
261 Operand LCodeGen::ToOperand(LOperand* op) const { 341 Operand LCodeGen::ToOperand(LOperand* op) const {
262 // Does not handle registers. In X64 assembler, plain registers are not 342 // Does not handle registers. In X64 assembler, plain registers are not
263 // representable as an Operand. 343 // representable as an Operand.
264 ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot()); 344 ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
265 int index = op->index(); 345 int index = op->index();
266 if (index >= 0) { 346 if (index >= 0) {
(...skipping 169 matching lines...) Expand 10 before | Expand all | Expand 10 after
436 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER); 516 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
437 ASSERT(entry != NULL); 517 ASSERT(entry != NULL);
438 if (entry == NULL) { 518 if (entry == NULL) {
439 Abort("bailout was not prepared"); 519 Abort("bailout was not prepared");
440 return; 520 return;
441 } 521 }
442 522
443 if (cc == no_condition) { 523 if (cc == no_condition) {
444 __ Jump(entry, RelocInfo::RUNTIME_ENTRY); 524 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
445 } else { 525 } else {
446 NearLabel done; 526 JumpTableEntry* jump_info = NULL;
447 __ j(NegateCondition(cc), &done); 527 // We often have several deopts to the same entry, reuse the last
448 __ Jump(entry, RelocInfo::RUNTIME_ENTRY); 528 // jump entry if this is the case.
449 __ bind(&done); 529 if (jump_table_.length() > 0 &&
530 jump_table_[jump_table_.length() - 1]->address_ == entry) {
531 jump_info = jump_table_[jump_table_.length() - 1];
532 } else {
533 jump_info = new JumpTableEntry(entry);
534 jump_table_.Add(jump_info);
535 }
536 __ j(cc, &jump_info->label_);
450 } 537 }
451 } 538 }
452 539
453 540
454 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { 541 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
455 int length = deoptimizations_.length(); 542 int length = deoptimizations_.length();
456 if (length == 0) return; 543 if (length == 0) return;
457 ASSERT(FLAG_deopt); 544 ASSERT(FLAG_deopt);
458 Handle<DeoptimizationInputData> data = 545 Handle<DeoptimizationInputData> data =
459 FACTORY->NewDeoptimizationInputData(length, TENURED); 546 FACTORY->NewDeoptimizationInputData(length, TENURED);
460 547
461 data->SetTranslationByteArray(*translations_.CreateByteArray()); 548 Handle<ByteArray> translations = translations_.CreateByteArray();
549 data->SetTranslationByteArray(*translations);
462 data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_)); 550 data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
463 551
464 Handle<FixedArray> literals = 552 Handle<FixedArray> literals =
465 FACTORY->NewFixedArray(deoptimization_literals_.length(), TENURED); 553 FACTORY->NewFixedArray(deoptimization_literals_.length(), TENURED);
466 for (int i = 0; i < deoptimization_literals_.length(); i++) { 554 for (int i = 0; i < deoptimization_literals_.length(); i++) {
467 literals->set(i, *deoptimization_literals_[i]); 555 literals->set(i, *deoptimization_literals_[i]);
468 } 556 }
469 data->SetLiteralArray(*literals); 557 data->SetLiteralArray(*literals);
470 558
471 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id())); 559 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id()));
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after
532 } 620 }
533 } 621 }
534 622
535 623
536 void LCodeGen::RecordSafepoint(LPointerMap* pointers, 624 void LCodeGen::RecordSafepoint(LPointerMap* pointers,
537 int deoptimization_index) { 625 int deoptimization_index) {
538 RecordSafepoint(pointers, Safepoint::kSimple, 0, deoptimization_index); 626 RecordSafepoint(pointers, Safepoint::kSimple, 0, deoptimization_index);
539 } 627 }
540 628
541 629
630 void LCodeGen::RecordSafepoint(int deoptimization_index) {
631 LPointerMap empty_pointers(RelocInfo::kNoPosition);
632 RecordSafepoint(&empty_pointers, deoptimization_index);
633 }
634
635
542 void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers, 636 void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
543 int arguments, 637 int arguments,
544 int deoptimization_index) { 638 int deoptimization_index) {
545 RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments, 639 RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments,
546 deoptimization_index); 640 deoptimization_index);
547 } 641 }
548 642
549 643
550 void LCodeGen::RecordPosition(int position) { 644 void LCodeGen::RecordPosition(int position) {
551 if (!FLAG_debug_info || position == RelocInfo::kNoPosition) return; 645 if (!FLAG_debug_info || position == RelocInfo::kNoPosition) return;
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
604 RegExpExecStub stub; 698 RegExpExecStub stub;
605 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 699 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
606 break; 700 break;
607 } 701 }
608 case CodeStub::SubString: { 702 case CodeStub::SubString: {
609 SubStringStub stub; 703 SubStringStub stub;
610 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 704 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
611 break; 705 break;
612 } 706 }
613 case CodeStub::StringCharAt: { 707 case CodeStub::StringCharAt: {
614 // TODO(1116): Add StringCharAt stub to x64. 708 StringCharAtStub stub;
615 Abort("Unimplemented: %s", "StringCharAt Stub"); 709 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
616 break; 710 break;
617 } 711 }
618 case CodeStub::MathPow: { 712 case CodeStub::MathPow: {
619 // TODO(1115): Add MathPow stub to x64. 713 MathPowStub stub;
620 Abort("Unimplemented: %s", "MathPow Stub"); 714 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
621 break; 715 break;
622 } 716 }
623 case CodeStub::NumberToString: { 717 case CodeStub::NumberToString: {
624 NumberToStringStub stub; 718 NumberToStringStub stub;
625 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 719 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
626 break; 720 break;
627 } 721 }
628 case CodeStub::StringAdd: { 722 case CodeStub::StringAdd: {
629 StringAddStub stub(NO_STRING_ADD_FLAGS); 723 StringAddStub stub(NO_STRING_ADD_FLAGS);
630 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 724 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
631 break; 725 break;
632 } 726 }
633 case CodeStub::StringCompare: { 727 case CodeStub::StringCompare: {
634 StringCompareStub stub; 728 StringCompareStub stub;
635 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 729 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
636 break; 730 break;
637 } 731 }
638 case CodeStub::TranscendentalCache: { 732 case CodeStub::TranscendentalCache: {
639 TranscendentalCacheStub stub(instr->transcendental_type()); 733 TranscendentalCacheStub stub(instr->transcendental_type(),
734 TranscendentalCacheStub::TAGGED);
640 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 735 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
641 break; 736 break;
642 } 737 }
643 default: 738 default:
644 UNREACHABLE(); 739 UNREACHABLE();
645 } 740 }
646 } 741 }
647 742
648 743
649 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) { 744 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
650 // Nothing to do. 745 // Nothing to do.
651 } 746 }
652 747
653 748
654 void LCodeGen::DoModI(LModI* instr) { 749 void LCodeGen::DoModI(LModI* instr) {
655 Abort("Unimplemented: %s", "DoModI"); 750 LOperand* right = instr->InputAt(1);
751 ASSERT(ToRegister(instr->result()).is(rdx));
752 ASSERT(ToRegister(instr->InputAt(0)).is(rax));
753 ASSERT(!ToRegister(instr->InputAt(1)).is(rax));
754 ASSERT(!ToRegister(instr->InputAt(1)).is(rdx));
755
756 Register right_reg = ToRegister(right);
757
758 // Check for x % 0.
759 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
760 __ testl(right_reg, right_reg);
761 DeoptimizeIf(zero, instr->environment());
762 }
763
764 // Sign extend eax to edx. (We are using only the low 32 bits of the values.)
765 __ cdq();
766
767 // Check for (0 % -x) that will produce negative zero.
768 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
769 NearLabel positive_left;
770 NearLabel done;
771 __ testl(rax, rax);
772 __ j(not_sign, &positive_left);
773 __ idivl(right_reg);
774
775 // Test the remainder for 0, because then the result would be -0.
776 __ testl(rdx, rdx);
777 __ j(not_zero, &done);
778
779 DeoptimizeIf(no_condition, instr->environment());
780 __ bind(&positive_left);
781 __ idivl(right_reg);
782 __ bind(&done);
783 } else {
784 __ idivl(right_reg);
785 }
656 } 786 }
657 787
658 788
659 void LCodeGen::DoDivI(LDivI* instr) { 789 void LCodeGen::DoDivI(LDivI* instr) {
660 LOperand* right = instr->InputAt(1); 790 LOperand* right = instr->InputAt(1);
661 ASSERT(ToRegister(instr->result()).is(rax)); 791 ASSERT(ToRegister(instr->result()).is(rax));
662 ASSERT(ToRegister(instr->InputAt(0)).is(rax)); 792 ASSERT(ToRegister(instr->InputAt(0)).is(rax));
663 ASSERT(!ToRegister(instr->InputAt(1)).is(rax)); 793 ASSERT(!ToRegister(instr->InputAt(1)).is(rax));
664 ASSERT(!ToRegister(instr->InputAt(1)).is(rdx)); 794 ASSERT(!ToRegister(instr->InputAt(1)).is(rdx));
665 795
(...skipping 215 matching lines...) Expand 10 before | Expand all | Expand 10 after
881 void LCodeGen::DoConstantI(LConstantI* instr) { 1011 void LCodeGen::DoConstantI(LConstantI* instr) {
882 ASSERT(instr->result()->IsRegister()); 1012 ASSERT(instr->result()->IsRegister());
883 __ movl(ToRegister(instr->result()), Immediate(instr->value())); 1013 __ movl(ToRegister(instr->result()), Immediate(instr->value()));
884 } 1014 }
885 1015
886 1016
887 void LCodeGen::DoConstantD(LConstantD* instr) { 1017 void LCodeGen::DoConstantD(LConstantD* instr) {
888 ASSERT(instr->result()->IsDoubleRegister()); 1018 ASSERT(instr->result()->IsDoubleRegister());
889 XMMRegister res = ToDoubleRegister(instr->result()); 1019 XMMRegister res = ToDoubleRegister(instr->result());
890 double v = instr->value(); 1020 double v = instr->value();
1021 uint64_t int_val = BitCast<uint64_t, double>(v);
891 // Use xor to produce +0.0 in a fast and compact way, but avoid to 1022 // Use xor to produce +0.0 in a fast and compact way, but avoid to
892 // do so if the constant is -0.0. 1023 // do so if the constant is -0.0.
893 if (BitCast<uint64_t, double>(v) == 0) { 1024 if (int_val == 0) {
894 __ xorpd(res, res); 1025 __ xorpd(res, res);
895 } else { 1026 } else {
896 Register tmp = ToRegister(instr->TempAt(0)); 1027 Register tmp = ToRegister(instr->TempAt(0));
897 int32_t v_int32 = static_cast<int32_t>(v); 1028 __ Set(tmp, int_val);
898 if (static_cast<double>(v_int32) == v) { 1029 __ movq(res, tmp);
899 __ movl(tmp, Immediate(v_int32));
900 __ cvtlsi2sd(res, tmp);
901 } else {
902 uint64_t int_val = BitCast<uint64_t, double>(v);
903 __ Set(tmp, int_val);
904 __ movd(res, tmp);
905 }
906 } 1030 }
907 } 1031 }
908 1032
909 1033
910 void LCodeGen::DoConstantT(LConstantT* instr) { 1034 void LCodeGen::DoConstantT(LConstantT* instr) {
911 ASSERT(instr->result()->IsRegister()); 1035 ASSERT(instr->result()->IsRegister());
912 __ Move(ToRegister(instr->result()), instr->value()); 1036 __ Move(ToRegister(instr->result()), instr->value());
913 } 1037 }
914 1038
915 1039
916 void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) { 1040 void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
917 Register result = ToRegister(instr->result()); 1041 Register result = ToRegister(instr->result());
918 Register array = ToRegister(instr->InputAt(0)); 1042 Register array = ToRegister(instr->InputAt(0));
919 __ movq(result, FieldOperand(array, JSArray::kLengthOffset)); 1043 __ movq(result, FieldOperand(array, JSArray::kLengthOffset));
920 } 1044 }
921 1045
922 1046
923 void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) { 1047 void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) {
924 Register result = ToRegister(instr->result()); 1048 Register result = ToRegister(instr->result());
925 Register array = ToRegister(instr->InputAt(0)); 1049 Register array = ToRegister(instr->InputAt(0));
926 __ movq(result, FieldOperand(array, FixedArray::kLengthOffset)); 1050 __ movq(result, FieldOperand(array, FixedArray::kLengthOffset));
927 } 1051 }
928 1052
929 1053
1054 void LCodeGen::DoPixelArrayLength(LPixelArrayLength* instr) {
1055 Register result = ToRegister(instr->result());
1056 Register array = ToRegister(instr->InputAt(0));
1057 __ movq(result, FieldOperand(array, PixelArray::kLengthOffset));
1058 }
1059
1060
930 void LCodeGen::DoValueOf(LValueOf* instr) { 1061 void LCodeGen::DoValueOf(LValueOf* instr) {
931 Abort("Unimplemented: %s", "DoValueOf"); 1062 Register input = ToRegister(instr->InputAt(0));
1063 Register result = ToRegister(instr->result());
1064 ASSERT(input.is(result));
1065 NearLabel done;
1066 // If the object is a smi return the object.
1067 __ JumpIfSmi(input, &done);
1068
1069 // If the object is not a value type, return the object.
1070 __ CmpObjectType(input, JS_VALUE_TYPE, kScratchRegister);
1071 __ j(not_equal, &done);
1072 __ movq(result, FieldOperand(input, JSValue::kValueOffset));
1073
1074 __ bind(&done);
932 } 1075 }
933 1076
934 1077
935 void LCodeGen::DoBitNotI(LBitNotI* instr) { 1078 void LCodeGen::DoBitNotI(LBitNotI* instr) {
936 LOperand* input = instr->InputAt(0); 1079 LOperand* input = instr->InputAt(0);
937 ASSERT(input->Equals(instr->result())); 1080 ASSERT(input->Equals(instr->result()));
938 __ not_(ToRegister(input)); 1081 __ not_(ToRegister(input));
939 } 1082 }
940 1083
941 1084
(...skipping 22 matching lines...) Expand all
964 __ addl(ToRegister(left), ToOperand(right)); 1107 __ addl(ToRegister(left), ToOperand(right));
965 } 1108 }
966 1109
967 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { 1110 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
968 DeoptimizeIf(overflow, instr->environment()); 1111 DeoptimizeIf(overflow, instr->environment());
969 } 1112 }
970 } 1113 }
971 1114
972 1115
973 void LCodeGen::DoArithmeticD(LArithmeticD* instr) { 1116 void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
974 Abort("Unimplemented: %s", "DoArithmeticD"); 1117 XMMRegister left = ToDoubleRegister(instr->InputAt(0));
1118 XMMRegister right = ToDoubleRegister(instr->InputAt(1));
1119 XMMRegister result = ToDoubleRegister(instr->result());
1120 // All operations except MOD are computed in-place.
1121 ASSERT(instr->op() == Token::MOD || left.is(result));
1122 switch (instr->op()) {
1123 case Token::ADD:
1124 __ addsd(left, right);
1125 break;
1126 case Token::SUB:
1127 __ subsd(left, right);
1128 break;
1129 case Token::MUL:
1130 __ mulsd(left, right);
1131 break;
1132 case Token::DIV:
1133 __ divsd(left, right);
1134 break;
1135 case Token::MOD:
1136 __ PrepareCallCFunction(2);
1137 __ movsd(xmm0, left);
1138 ASSERT(right.is(xmm1));
1139 __ CallCFunction(ExternalReference::double_fp_operation(Token::MOD), 2);
1140 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
1141 __ movsd(result, xmm0);
1142 break;
1143 default:
1144 UNREACHABLE();
1145 break;
1146 }
975 } 1147 }
976 1148
977 1149
978 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { 1150 void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
979 ASSERT(ToRegister(instr->InputAt(0)).is(rdx)); 1151 ASSERT(ToRegister(instr->InputAt(0)).is(rdx));
980 ASSERT(ToRegister(instr->InputAt(1)).is(rax)); 1152 ASSERT(ToRegister(instr->InputAt(1)).is(rax));
981 ASSERT(ToRegister(instr->result()).is(rax)); 1153 ASSERT(ToRegister(instr->result()).is(rax));
982 1154
983 TypeRecordingBinaryOpStub stub(instr->op(), NO_OVERWRITE); 1155 TypeRecordingBinaryOpStub stub(instr->op(), NO_OVERWRITE);
984 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 1156 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
(...skipping 441 matching lines...) Expand 10 before | Expand all | Expand 10 after
1426 InstanceType to = instr->to(); 1598 InstanceType to = instr->to();
1427 if (from == to) return equal; 1599 if (from == to) return equal;
1428 if (to == LAST_TYPE) return above_equal; 1600 if (to == LAST_TYPE) return above_equal;
1429 if (from == FIRST_TYPE) return below_equal; 1601 if (from == FIRST_TYPE) return below_equal;
1430 UNREACHABLE(); 1602 UNREACHABLE();
1431 return equal; 1603 return equal;
1432 } 1604 }
1433 1605
1434 1606
1435 void LCodeGen::DoHasInstanceType(LHasInstanceType* instr) { 1607 void LCodeGen::DoHasInstanceType(LHasInstanceType* instr) {
1436 Abort("Unimplemented: %s", "DoHasInstanceType"); 1608 Register input = ToRegister(instr->InputAt(0));
1609 Register result = ToRegister(instr->result());
1610
1611 ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1612 __ testl(input, Immediate(kSmiTagMask));
1613 NearLabel done, is_false;
1614 __ j(zero, &is_false);
1615 __ CmpObjectType(input, TestType(instr->hydrogen()), result);
1616 __ j(NegateCondition(BranchCondition(instr->hydrogen())), &is_false);
1617 __ LoadRoot(result, Heap::kTrueValueRootIndex);
1618 __ jmp(&done);
1619 __ bind(&is_false);
1620 __ LoadRoot(result, Heap::kFalseValueRootIndex);
1621 __ bind(&done);
1437 } 1622 }
1438 1623
1439 1624
1440 void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) { 1625 void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
1441 Register input = ToRegister(instr->InputAt(0)); 1626 Register input = ToRegister(instr->InputAt(0));
1442 1627
1443 int true_block = chunk_->LookupDestination(instr->true_block_id()); 1628 int true_block = chunk_->LookupDestination(instr->true_block_id());
1444 int false_block = chunk_->LookupDestination(instr->false_block_id()); 1629 int false_block = chunk_->LookupDestination(instr->false_block_id());
1445 1630
1446 Label* false_label = chunk_->GetAssemblyLabel(false_block); 1631 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1447 1632
1448 __ JumpIfSmi(input, false_label); 1633 __ JumpIfSmi(input, false_label);
1449 1634
1450 __ CmpObjectType(input, TestType(instr->hydrogen()), kScratchRegister); 1635 __ CmpObjectType(input, TestType(instr->hydrogen()), kScratchRegister);
1451 EmitBranch(true_block, false_block, BranchCondition(instr->hydrogen())); 1636 EmitBranch(true_block, false_block, BranchCondition(instr->hydrogen()));
1452 } 1637 }
1453 1638
1454 1639
1455 void LCodeGen::DoHasCachedArrayIndex(LHasCachedArrayIndex* instr) { 1640 void LCodeGen::DoHasCachedArrayIndex(LHasCachedArrayIndex* instr) {
1456 Abort("Unimplemented: %s", "DoHasCachedArrayIndex"); 1641 Register input = ToRegister(instr->InputAt(0));
1642 Register result = ToRegister(instr->result());
1643
1644 ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1645 __ LoadRoot(result, Heap::kTrueValueRootIndex);
1646 __ testl(FieldOperand(input, String::kHashFieldOffset),
1647 Immediate(String::kContainsCachedArrayIndexMask));
1648 NearLabel done;
1649 __ j(not_zero, &done);
1650 __ LoadRoot(result, Heap::kFalseValueRootIndex);
1651 __ bind(&done);
1457 } 1652 }
1458 1653
1459 1654
1460 void LCodeGen::DoHasCachedArrayIndexAndBranch( 1655 void LCodeGen::DoHasCachedArrayIndexAndBranch(
1461 LHasCachedArrayIndexAndBranch* instr) { 1656 LHasCachedArrayIndexAndBranch* instr) {
1462 Register input = ToRegister(instr->InputAt(0)); 1657 Register input = ToRegister(instr->InputAt(0));
1463 1658
1464 int true_block = chunk_->LookupDestination(instr->true_block_id()); 1659 int true_block = chunk_->LookupDestination(instr->true_block_id());
1465 int false_block = chunk_->LookupDestination(instr->false_block_id()); 1660 int false_block = chunk_->LookupDestination(instr->false_block_id());
1466 1661
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after
1568 Register reg = ToRegister(instr->InputAt(0)); 1763 Register reg = ToRegister(instr->InputAt(0));
1569 int true_block = instr->true_block_id(); 1764 int true_block = instr->true_block_id();
1570 int false_block = instr->false_block_id(); 1765 int false_block = instr->false_block_id();
1571 1766
1572 __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map()); 1767 __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map());
1573 EmitBranch(true_block, false_block, equal); 1768 EmitBranch(true_block, false_block, equal);
1574 } 1769 }
1575 1770
1576 1771
1577 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { 1772 void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
1578 Abort("Unimplemented: %s", "DoInstanceOf"); 1773 InstanceofStub stub(InstanceofStub::kNoFlags);
1774 __ push(ToRegister(instr->InputAt(0)));
1775 __ push(ToRegister(instr->InputAt(1)));
1776 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1777 NearLabel true_value, done;
1778 __ testq(rax, rax);
1779 __ j(zero, &true_value);
1780 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex);
1781 __ jmp(&done);
1782 __ bind(&true_value);
1783 __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex);
1784 __ bind(&done);
1579 } 1785 }
1580 1786
1581 1787
1582 void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) { 1788 void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) {
1583 int true_block = chunk_->LookupDestination(instr->true_block_id()); 1789 int true_block = chunk_->LookupDestination(instr->true_block_id());
1584 int false_block = chunk_->LookupDestination(instr->false_block_id()); 1790 int false_block = chunk_->LookupDestination(instr->false_block_id());
1585 1791
1586 InstanceofStub stub(InstanceofStub::kArgsInRegisters); 1792 InstanceofStub stub(InstanceofStub::kNoFlags);
1793 __ push(ToRegister(instr->InputAt(0)));
1794 __ push(ToRegister(instr->InputAt(1)));
1587 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 1795 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1588 __ testq(rax, rax); 1796 __ testq(rax, rax);
1589 EmitBranch(true_block, false_block, zero); 1797 EmitBranch(true_block, false_block, zero);
1590 } 1798 }
1591 1799
1592 1800
1593 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { 1801 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
1594 Abort("Unimplemented: %s", "DoInstanceOfKnowGLobal"); 1802 class DeferredInstanceOfKnownGlobal: public LDeferredCode {
1803 public:
1804 DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
1805 LInstanceOfKnownGlobal* instr)
1806 : LDeferredCode(codegen), instr_(instr) { }
1807 virtual void Generate() {
1808 codegen()->DoDeferredLInstanceOfKnownGlobal(instr_);
1809 }
1810
1811 private:
1812 LInstanceOfKnownGlobal* instr_;
1813 };
1814
1815
1816 DeferredInstanceOfKnownGlobal* deferred;
1817 deferred = new DeferredInstanceOfKnownGlobal(this, instr);
1818
1819 Label false_result;
1820 Register object = ToRegister(instr->InputAt(0));
1821
1822 // A Smi is not an instance of anything.
1823 __ JumpIfSmi(object, &false_result);
1824
1825 // Null is not an instance of anything.
1826 __ CompareRoot(object, Heap::kNullValueRootIndex);
1827 __ j(equal, &false_result);
1828
1829 // String values are not instances of anything.
1830 __ JumpIfNotString(object, kScratchRegister, deferred->entry());
1831
1832 __ bind(&false_result);
1833 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex);
1834
1835 __ bind(deferred->exit());
1595 } 1836 }
1596 1837
1597 1838
1598 void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, 1839 void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
1599 Label* map_check) { 1840 __ PushSafepointRegisters();
1600 Abort("Unimplemented: %s", "DoDeferredLInstanceOfKnownGlobakl"); 1841
1842 InstanceofStub stub(InstanceofStub::kNoFlags);
1843
1844 __ push(ToRegister(instr->InputAt(0)));
1845 __ Push(instr->function());
1846 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
1847 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1848 __ movq(kScratchRegister, rax);
1849 __ PopSafepointRegisters();
1850 __ testq(kScratchRegister, kScratchRegister);
1851 Label load_false;
1852 Label done;
1853 __ j(not_zero, &load_false);
1854 __ LoadRoot(rax, Heap::kTrueValueRootIndex);
1855 __ jmp(&done);
1856 __ bind(&load_false);
1857 __ LoadRoot(rax, Heap::kFalseValueRootIndex);
1858 __ bind(&done);
1601 } 1859 }
1602 1860
1603 1861
1604 void LCodeGen::DoCmpT(LCmpT* instr) { 1862 void LCodeGen::DoCmpT(LCmpT* instr) {
1605 Token::Value op = instr->op(); 1863 Token::Value op = instr->op();
1606 1864
1607 Handle<Code> ic = CompareIC::GetUninitialized(op); 1865 Handle<Code> ic = CompareIC::GetUninitialized(op);
1608 CallCode(ic, RelocInfo::CODE_TARGET, instr); 1866 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1609 1867
1610 Condition condition = TokenToCondition(op, false); 1868 Condition condition = TokenToCondition(op, false);
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after
1687 __ movq(temp, instr->hydrogen()->cell(), RelocInfo::GLOBAL_PROPERTY_CELL); 1945 __ movq(temp, instr->hydrogen()->cell(), RelocInfo::GLOBAL_PROPERTY_CELL);
1688 if (check_hole) { 1946 if (check_hole) {
1689 __ CompareRoot(Operand(temp, 0), Heap::kTheHoleValueRootIndex); 1947 __ CompareRoot(Operand(temp, 0), Heap::kTheHoleValueRootIndex);
1690 DeoptimizeIf(equal, instr->environment()); 1948 DeoptimizeIf(equal, instr->environment());
1691 } 1949 }
1692 __ movq(Operand(temp, 0), value); 1950 __ movq(Operand(temp, 0), value);
1693 } 1951 }
1694 1952
1695 1953
1696 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { 1954 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
1697 Abort("Unimplemented: %s", "DoLoadContextSlot"); 1955 Register context = ToRegister(instr->context());
1956 Register result = ToRegister(instr->result());
1957 __ movq(result, ContextOperand(context, instr->slot_index()));
1958 }
1959
1960
1961 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
1962 Register context = ToRegister(instr->context());
1963 Register value = ToRegister(instr->value());
1964 __ movq(ContextOperand(context, instr->slot_index()), value);
1965 if (instr->needs_write_barrier()) {
1966 int offset = Context::SlotOffset(instr->slot_index());
1967 __ RecordWrite(context, offset, value, kScratchRegister);
1968 }
1698 } 1969 }
1699 1970
1700 1971
1701 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) { 1972 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
1702 Register object = ToRegister(instr->InputAt(0)); 1973 Register object = ToRegister(instr->InputAt(0));
1703 Register result = ToRegister(instr->result()); 1974 Register result = ToRegister(instr->result());
1704 if (instr->hydrogen()->is_in_object()) { 1975 if (instr->hydrogen()->is_in_object()) {
1705 __ movq(result, FieldOperand(object, instr->hydrogen()->offset())); 1976 __ movq(result, FieldOperand(object, instr->hydrogen()->offset()));
1706 } else { 1977 } else {
1707 __ movq(result, FieldOperand(object, JSObject::kPropertiesOffset)); 1978 __ movq(result, FieldOperand(object, JSObject::kPropertiesOffset));
1708 __ movq(result, FieldOperand(result, instr->hydrogen()->offset())); 1979 __ movq(result, FieldOperand(result, instr->hydrogen()->offset()));
1709 } 1980 }
1710 } 1981 }
1711 1982
1712 1983
1713 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { 1984 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
1714 ASSERT(ToRegister(instr->object()).is(rax)); 1985 ASSERT(ToRegister(instr->object()).is(rax));
1715 ASSERT(ToRegister(instr->result()).is(rax)); 1986 ASSERT(ToRegister(instr->result()).is(rax));
1716 1987
1717 __ Move(rcx, instr->name()); 1988 __ Move(rcx, instr->name());
1718 Handle<Code> ic(isolate()->builtins()->builtin(Builtins::LoadIC_Initialize)); 1989 Handle<Code> ic(isolate()->builtins()->builtin(Builtins::LoadIC_Initialize));
1719 CallCode(ic, RelocInfo::CODE_TARGET, instr); 1990 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1720 } 1991 }
1721 1992
1722 1993
1723 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) { 1994 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
1724 Abort("Unimplemented: %s", "DoLoadFunctionPrototype"); 1995 Register function = ToRegister(instr->function());
1996 Register result = ToRegister(instr->result());
1997
1998 // Check that the function really is a function.
1999 __ CmpObjectType(function, JS_FUNCTION_TYPE, result);
2000 DeoptimizeIf(not_equal, instr->environment());
2001
2002 // Check whether the function has an instance prototype.
2003 NearLabel non_instance;
2004 __ testb(FieldOperand(result, Map::kBitFieldOffset),
2005 Immediate(1 << Map::kHasNonInstancePrototype));
2006 __ j(not_zero, &non_instance);
2007
2008 // Get the prototype or initial map from the function.
2009 __ movq(result,
2010 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2011
2012 // Check that the function has a prototype or an initial map.
2013 __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
2014 DeoptimizeIf(equal, instr->environment());
2015
2016 // If the function does not have an initial map, we're done.
2017 NearLabel done;
2018 __ CmpObjectType(result, MAP_TYPE, kScratchRegister);
2019 __ j(not_equal, &done);
2020
2021 // Get the prototype from the initial map.
2022 __ movq(result, FieldOperand(result, Map::kPrototypeOffset));
2023 __ jmp(&done);
2024
2025 // Non-instance prototype: Fetch prototype from constructor field
2026 // in the function's map.
2027 __ bind(&non_instance);
2028 __ movq(result, FieldOperand(result, Map::kConstructorOffset));
2029
2030 // All done.
2031 __ bind(&done);
1725 } 2032 }
1726 2033
1727 2034
1728 void LCodeGen::DoLoadElements(LLoadElements* instr) { 2035 void LCodeGen::DoLoadElements(LLoadElements* instr) {
1729 ASSERT(instr->result()->Equals(instr->InputAt(0))); 2036 Register result = ToRegister(instr->result());
1730 Register reg = ToRegister(instr->InputAt(0)); 2037 Register input = ToRegister(instr->InputAt(0));
1731 __ movq(reg, FieldOperand(reg, JSObject::kElementsOffset)); 2038 __ movq(result, FieldOperand(input, JSObject::kElementsOffset));
1732 if (FLAG_debug_code) { 2039 if (FLAG_debug_code) {
1733 NearLabel done; 2040 NearLabel done;
1734 __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), 2041 __ Cmp(FieldOperand(result, HeapObject::kMapOffset),
1735 FACTORY->fixed_array_map()); 2042 FACTORY->fixed_array_map());
1736 __ j(equal, &done); 2043 __ j(equal, &done);
1737 __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), 2044 __ Cmp(FieldOperand(result, HeapObject::kMapOffset),
2045 FACTORY->pixel_array_map());
2046 __ j(equal, &done);
2047 __ Cmp(FieldOperand(result, HeapObject::kMapOffset),
1738 FACTORY->fixed_cow_array_map()); 2048 FACTORY->fixed_cow_array_map());
1739 __ Check(equal, "Check for fast elements failed."); 2049 __ Check(equal, "Check for fast elements failed.");
1740 __ bind(&done); 2050 __ bind(&done);
1741 } 2051 }
1742 } 2052 }
1743 2053
1744 2054
2055 void LCodeGen::DoLoadPixelArrayExternalPointer(
2056 LLoadPixelArrayExternalPointer* instr) {
2057 Register result = ToRegister(instr->result());
2058 Register input = ToRegister(instr->InputAt(0));
2059 __ movq(result, FieldOperand(input, PixelArray::kExternalPointerOffset));
2060 }
2061
2062
1745 void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) { 2063 void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
1746 Abort("Unimplemented: %s", "DoAccessArgumentsAt"); 2064 Register arguments = ToRegister(instr->arguments());
2065 Register length = ToRegister(instr->length());
2066 Register result = ToRegister(instr->result());
2067
2068 if (instr->index()->IsRegister()) {
2069 __ subl(length, ToRegister(instr->index()));
2070 } else {
2071 __ subl(length, ToOperand(instr->index()));
2072 }
2073 DeoptimizeIf(below_equal, instr->environment());
2074
2075 // There are two words between the frame pointer and the last argument.
2076 // Subtracting from length accounts for one of them add one more.
2077 __ movq(result, Operand(arguments, length, times_pointer_size, kPointerSize));
1747 } 2078 }
1748 2079
1749 2080
1750 void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) { 2081 void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
1751 Register elements = ToRegister(instr->elements()); 2082 Register elements = ToRegister(instr->elements());
1752 Register key = ToRegister(instr->key()); 2083 Register key = ToRegister(instr->key());
1753 Register result = ToRegister(instr->result()); 2084 Register result = ToRegister(instr->result());
1754 ASSERT(result.is(elements)); 2085 ASSERT(result.is(elements));
1755 2086
1756 // Load the result. 2087 // Load the result.
1757 __ movq(result, FieldOperand(elements, 2088 __ movq(result, FieldOperand(elements,
1758 key, 2089 key,
1759 times_pointer_size, 2090 times_pointer_size,
1760 FixedArray::kHeaderSize)); 2091 FixedArray::kHeaderSize));
1761 2092
1762 // Check for the hole value. 2093 // Check for the hole value.
1763 __ Cmp(result, FACTORY->the_hole_value()); 2094 __ Cmp(result, FACTORY->the_hole_value());
1764 DeoptimizeIf(equal, instr->environment()); 2095 DeoptimizeIf(equal, instr->environment());
1765 } 2096 }
1766 2097
1767 2098
2099 void LCodeGen::DoLoadPixelArrayElement(LLoadPixelArrayElement* instr) {
2100 Register external_elements = ToRegister(instr->external_pointer());
2101 Register key = ToRegister(instr->key());
2102 Register result = ToRegister(instr->result());
2103 ASSERT(result.is(external_elements));
2104
2105 // Load the result.
2106 __ movzxbq(result, Operand(external_elements, key, times_1, 0));
2107 }
2108
2109
1768 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) { 2110 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
1769 Abort("Unimplemented: %s", "DoLoadKeyedGeneric"); 2111 ASSERT(ToRegister(instr->object()).is(rdx));
2112 ASSERT(ToRegister(instr->key()).is(rax));
2113
2114 Handle<Code> ic(isolate()->builtins()->builtin(
2115 Builtins::KeyedLoadIC_Initialize));
2116 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1770 } 2117 }
1771 2118
1772 2119
1773 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { 2120 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
1774 Abort("Unimplemented: %s", "DoArgumentsElements"); 2121 Register result = ToRegister(instr->result());
2122
2123 // Check for arguments adapter frame.
2124 NearLabel done, adapted;
2125 __ movq(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2126 __ SmiCompare(Operand(result, StandardFrameConstants::kContextOffset),
2127 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2128 __ j(equal, &adapted);
2129
2130 // No arguments adaptor frame.
2131 __ movq(result, rbp);
2132 __ jmp(&done);
2133
2134 // Arguments adaptor frame present.
2135 __ bind(&adapted);
2136 __ movq(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2137
2138 // Result is the frame pointer for the frame if not adapted and for the real
2139 // frame below the adaptor frame if adapted.
2140 __ bind(&done);
1775 } 2141 }
1776 2142
1777 2143
1778 void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) { 2144 void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
1779 Abort("Unimplemented: %s", "DoArgumentsLength"); 2145 Register result = ToRegister(instr->result());
2146
2147 NearLabel done;
2148
2149 // If no arguments adaptor frame the number of arguments is fixed.
2150 if (instr->InputAt(0)->IsRegister()) {
2151 __ cmpq(rbp, ToRegister(instr->InputAt(0)));
2152 } else {
2153 __ cmpq(rbp, ToOperand(instr->InputAt(0)));
2154 }
2155 __ movq(result, Immediate(scope()->num_parameters()));
2156 __ j(equal, &done);
2157
2158 // Arguments adaptor frame present. Get argument length from there.
2159 __ movq(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2160 __ movq(result, Operand(result,
2161 ArgumentsAdaptorFrameConstants::kLengthOffset));
2162 __ SmiToInteger32(result, result);
2163
2164 // Argument length is in result register.
2165 __ bind(&done);
1780 } 2166 }
1781 2167
1782 2168
1783 void LCodeGen::DoApplyArguments(LApplyArguments* instr) { 2169 void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
1784 Abort("Unimplemented: %s", "DoApplyArguments"); 2170 Register receiver = ToRegister(instr->receiver());
2171 Register function = ToRegister(instr->function());
2172 Register length = ToRegister(instr->length());
2173 Register elements = ToRegister(instr->elements());
2174 ASSERT(receiver.is(rax)); // Used for parameter count.
2175 ASSERT(function.is(rdi)); // Required by InvokeFunction.
2176 ASSERT(ToRegister(instr->result()).is(rax));
2177
2178 // If the receiver is null or undefined, we have to pass the global object
2179 // as a receiver.
2180 NearLabel global_object, receiver_ok;
2181 __ CompareRoot(receiver, Heap::kNullValueRootIndex);
2182 __ j(equal, &global_object);
2183 __ CompareRoot(receiver, Heap::kUndefinedValueRootIndex);
2184 __ j(equal, &global_object);
2185
2186 // The receiver should be a JS object.
2187 Condition is_smi = __ CheckSmi(receiver);
2188 DeoptimizeIf(is_smi, instr->environment());
2189 __ CmpObjectType(receiver, FIRST_JS_OBJECT_TYPE, kScratchRegister);
2190 DeoptimizeIf(below, instr->environment());
2191 __ jmp(&receiver_ok);
2192
2193 __ bind(&global_object);
2194 // TODO(kmillikin): We have a hydrogen value for the global object. See
2195 // if it's better to use it than to explicitly fetch it from the context
2196 // here.
2197 __ movq(receiver, Operand(rbp, StandardFrameConstants::kContextOffset));
2198 __ movq(receiver, ContextOperand(receiver, Context::GLOBAL_INDEX));
2199 __ bind(&receiver_ok);
2200
2201 // Copy the arguments to this function possibly from the
2202 // adaptor frame below it.
2203 const uint32_t kArgumentsLimit = 1 * KB;
2204 __ cmpq(length, Immediate(kArgumentsLimit));
2205 DeoptimizeIf(above, instr->environment());
2206
2207 __ push(receiver);
2208 __ movq(receiver, length);
2209
2210 // Loop through the arguments pushing them onto the execution
2211 // stack.
2212 NearLabel invoke, loop;
2213 // length is a small non-negative integer, due to the test above.
2214 __ testl(length, length);
2215 __ j(zero, &invoke);
2216 __ bind(&loop);
2217 __ push(Operand(elements, length, times_pointer_size, 1 * kPointerSize));
2218 __ decl(length);
2219 __ j(not_zero, &loop);
2220
2221 // Invoke the function.
2222 __ bind(&invoke);
2223 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
2224 LPointerMap* pointers = instr->pointer_map();
2225 LEnvironment* env = instr->deoptimization_environment();
2226 RecordPosition(pointers->position());
2227 RegisterEnvironmentForDeoptimization(env);
2228 SafepointGenerator safepoint_generator(this,
2229 pointers,
2230 env->deoptimization_index(),
2231 true);
2232 v8::internal::ParameterCount actual(rax);
2233 __ InvokeFunction(function, actual, CALL_FUNCTION, &safepoint_generator);
1785 } 2234 }
1786 2235
1787 2236
1788 void LCodeGen::DoPushArgument(LPushArgument* instr) { 2237 void LCodeGen::DoPushArgument(LPushArgument* instr) {
1789 LOperand* argument = instr->InputAt(0); 2238 LOperand* argument = instr->InputAt(0);
1790 if (argument->IsConstantOperand()) { 2239 if (argument->IsConstantOperand()) {
1791 LConstantOperand* const_op = LConstantOperand::cast(argument); 2240 EmitPushConstantOperand(argument);
1792 Handle<Object> literal = chunk_->LookupLiteral(const_op);
1793 Representation r = chunk_->LookupLiteralRepresentation(const_op);
1794 if (r.IsInteger32()) {
1795 ASSERT(literal->IsNumber());
1796 __ push(Immediate(static_cast<int32_t>(literal->Number())));
1797 } else if (r.IsDouble()) {
1798 Abort("unsupported double immediate");
1799 } else {
1800 ASSERT(r.IsTagged());
1801 __ Push(literal);
1802 }
1803 } else if (argument->IsRegister()) { 2241 } else if (argument->IsRegister()) {
1804 __ push(ToRegister(argument)); 2242 __ push(ToRegister(argument));
1805 } else { 2243 } else {
1806 ASSERT(!argument->IsDoubleRegister()); 2244 ASSERT(!argument->IsDoubleRegister());
1807 __ push(ToOperand(argument)); 2245 __ push(ToOperand(argument));
1808 } 2246 }
1809 } 2247 }
1810 2248
1811 2249
2250 void LCodeGen::DoContext(LContext* instr) {
2251 Register result = ToRegister(instr->result());
2252 __ movq(result, Operand(rbp, StandardFrameConstants::kContextOffset));
2253 }
2254
2255
2256 void LCodeGen::DoOuterContext(LOuterContext* instr) {
2257 Register context = ToRegister(instr->context());
2258 Register result = ToRegister(instr->result());
2259 __ movq(result,
2260 Operand(context, Context::SlotOffset(Context::CLOSURE_INDEX)));
2261 __ movq(result, FieldOperand(result, JSFunction::kContextOffset));
2262 }
2263
2264
1812 void LCodeGen::DoGlobalObject(LGlobalObject* instr) { 2265 void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
1813 Register result = ToRegister(instr->result()); 2266 Register result = ToRegister(instr->result());
1814 __ movq(result, GlobalObjectOperand()); 2267 __ movq(result, GlobalObjectOperand());
1815 } 2268 }
1816 2269
1817 2270
1818 void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) { 2271 void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
1819 Register result = ToRegister(instr->result()); 2272 Register result = ToRegister(instr->result());
1820 __ movq(result, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); 2273 __ movq(result, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
1821 __ movq(result, FieldOperand(result, GlobalObject::kGlobalReceiverOffset)); 2274 __ movq(result, FieldOperand(result, GlobalObject::kGlobalReceiverOffset));
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
1859 2312
1860 2313
1861 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { 2314 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
1862 ASSERT(ToRegister(instr->result()).is(rax)); 2315 ASSERT(ToRegister(instr->result()).is(rax));
1863 __ Move(rdi, instr->function()); 2316 __ Move(rdi, instr->function());
1864 CallKnownFunction(instr->function(), instr->arity(), instr); 2317 CallKnownFunction(instr->function(), instr->arity(), instr);
1865 } 2318 }
1866 2319
1867 2320
1868 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) { 2321 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
1869 Abort("Unimplemented: %s", "DoDeferredMathAbsTaggedHeapNumber"); 2322 Register input_reg = ToRegister(instr->InputAt(0));
2323 __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
2324 Heap::kHeapNumberMapRootIndex);
2325 DeoptimizeIf(not_equal, instr->environment());
2326
2327 Label done;
2328 Register tmp = input_reg.is(rax) ? rcx : rax;
2329 Register tmp2 = tmp.is(rcx) ? rdx : input_reg.is(rcx) ? rdx : rcx;
2330
2331 // Preserve the value of all registers.
2332 __ PushSafepointRegisters();
2333
2334 Label negative;
2335 __ movl(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset));
2336 // Check the sign of the argument. If the argument is positive, just
2337 // return it. We do not need to patch the stack since |input| and
2338 // |result| are the same register and |input| will be restored
2339 // unchanged by popping safepoint registers.
2340 __ testl(tmp, Immediate(HeapNumber::kSignMask));
2341 __ j(not_zero, &negative);
2342 __ jmp(&done);
2343
2344 __ bind(&negative);
2345
2346 Label allocated, slow;
2347 __ AllocateHeapNumber(tmp, tmp2, &slow);
2348 __ jmp(&allocated);
2349
2350 // Slow case: Call the runtime system to do the number allocation.
2351 __ bind(&slow);
2352
2353 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
2354 RecordSafepointWithRegisters(
2355 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
2356 // Set the pointer to the new heap number in tmp.
2357 if (!tmp.is(rax)) {
2358 __ movq(tmp, rax);
2359 }
2360
2361 // Restore input_reg after call to runtime.
2362 __ LoadFromSafepointRegisterSlot(input_reg, input_reg);
2363
2364 __ bind(&allocated);
2365 __ movq(tmp2, FieldOperand(input_reg, HeapNumber::kValueOffset));
2366 __ shl(tmp2, Immediate(1));
2367 __ shr(tmp2, Immediate(1));
2368 __ movq(FieldOperand(tmp, HeapNumber::kValueOffset), tmp2);
2369 __ StoreToSafepointRegisterSlot(input_reg, tmp);
2370
2371 __ bind(&done);
2372 __ PopSafepointRegisters();
2373 }
2374
2375
2376 void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) {
2377 Register input_reg = ToRegister(instr->InputAt(0));
2378 __ testl(input_reg, input_reg);
2379 Label is_positive;
2380 __ j(not_sign, &is_positive);
2381 __ negl(input_reg); // Sets flags.
2382 DeoptimizeIf(negative, instr->environment());
2383 __ bind(&is_positive);
1870 } 2384 }
1871 2385
1872 2386
1873 void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) { 2387 void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
1874 Abort("Unimplemented: %s", "DoMathAbs"); 2388 // Class for deferred case.
2389 class DeferredMathAbsTaggedHeapNumber: public LDeferredCode {
2390 public:
2391 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen,
2392 LUnaryMathOperation* instr)
2393 : LDeferredCode(codegen), instr_(instr) { }
2394 virtual void Generate() {
2395 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_);
2396 }
2397 private:
2398 LUnaryMathOperation* instr_;
2399 };
2400
2401 ASSERT(instr->InputAt(0)->Equals(instr->result()));
2402 Representation r = instr->hydrogen()->value()->representation();
2403
2404 if (r.IsDouble()) {
2405 XMMRegister scratch = xmm0;
2406 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
2407 __ xorpd(scratch, scratch);
2408 __ subsd(scratch, input_reg);
2409 __ andpd(input_reg, scratch);
2410 } else if (r.IsInteger32()) {
2411 EmitIntegerMathAbs(instr);
2412 } else { // Tagged case.
2413 DeferredMathAbsTaggedHeapNumber* deferred =
2414 new DeferredMathAbsTaggedHeapNumber(this, instr);
2415 Register input_reg = ToRegister(instr->InputAt(0));
2416 // Smi check.
2417 __ JumpIfNotSmi(input_reg, deferred->entry());
2418 EmitIntegerMathAbs(instr);
2419 __ bind(deferred->exit());
2420 }
1875 } 2421 }
1876 2422
1877 2423
1878 void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) { 2424 void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
1879 Abort("Unimplemented: %s", "DoMathFloor"); 2425 XMMRegister xmm_scratch = xmm0;
2426 Register output_reg = ToRegister(instr->result());
2427 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
2428 __ xorpd(xmm_scratch, xmm_scratch); // Zero the register.
2429 __ ucomisd(input_reg, xmm_scratch);
2430
2431 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2432 DeoptimizeIf(below_equal, instr->environment());
2433 } else {
2434 DeoptimizeIf(below, instr->environment());
2435 }
2436
2437 // Use truncating instruction (OK because input is positive).
2438 __ cvttsd2si(output_reg, input_reg);
2439
2440 // Overflow is signalled with minint.
2441 __ cmpl(output_reg, Immediate(0x80000000));
2442 DeoptimizeIf(equal, instr->environment());
1880 } 2443 }
1881 2444
1882 2445
1883 void LCodeGen::DoMathRound(LUnaryMathOperation* instr) { 2446 void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
1884 Abort("Unimplemented: %s", "DoMathRound"); 2447 const XMMRegister xmm_scratch = xmm0;
2448 Register output_reg = ToRegister(instr->result());
2449 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
2450
2451 // xmm_scratch = 0.5
2452 __ movq(kScratchRegister, V8_INT64_C(0x3FE0000000000000), RelocInfo::NONE);
2453 __ movq(xmm_scratch, kScratchRegister);
2454
2455 // input = input + 0.5
2456 __ addsd(input_reg, xmm_scratch);
2457
2458 // We need to return -0 for the input range [-0.5, 0[, otherwise
2459 // compute Math.floor(value + 0.5).
2460 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2461 __ ucomisd(input_reg, xmm_scratch);
2462 DeoptimizeIf(below_equal, instr->environment());
2463 } else {
2464 // If we don't need to bailout on -0, we check only bailout
2465 // on negative inputs.
2466 __ xorpd(xmm_scratch, xmm_scratch); // Zero the register.
2467 __ ucomisd(input_reg, xmm_scratch);
2468 DeoptimizeIf(below, instr->environment());
2469 }
2470
2471 // Compute Math.floor(value + 0.5).
2472 // Use truncating instruction (OK because input is positive).
2473 __ cvttsd2si(output_reg, input_reg);
2474
2475 // Overflow is signalled with minint.
2476 __ cmpl(output_reg, Immediate(0x80000000));
2477 DeoptimizeIf(equal, instr->environment());
1885 } 2478 }
1886 2479
1887 2480
1888 void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) { 2481 void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
1889 Abort("Unimplemented: %s", "DoMathSqrt"); 2482 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
2483 ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
2484 __ sqrtsd(input_reg, input_reg);
1890 } 2485 }
1891 2486
1892 2487
1893 void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) { 2488 void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) {
1894 Abort("Unimplemented: %s", "DoMathPowHalf"); 2489 XMMRegister xmm_scratch = xmm0;
2490 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
2491 ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
2492 __ xorpd(xmm_scratch, xmm_scratch);
2493 __ addsd(input_reg, xmm_scratch); // Convert -0 to +0.
2494 __ sqrtsd(input_reg, input_reg);
1895 } 2495 }
1896 2496
1897 2497
1898 void LCodeGen::DoPower(LPower* instr) { 2498 void LCodeGen::DoPower(LPower* instr) {
1899 Abort("Unimplemented: %s", "DoPower"); 2499 LOperand* left = instr->InputAt(0);
2500 XMMRegister left_reg = ToDoubleRegister(left);
2501 ASSERT(!left_reg.is(xmm1));
2502 LOperand* right = instr->InputAt(1);
2503 XMMRegister result_reg = ToDoubleRegister(instr->result());
2504 Representation exponent_type = instr->hydrogen()->right()->representation();
2505 if (exponent_type.IsDouble()) {
2506 __ PrepareCallCFunction(2);
2507 // Move arguments to correct registers
2508 __ movsd(xmm0, left_reg);
2509 ASSERT(ToDoubleRegister(right).is(xmm1));
2510 __ CallCFunction(ExternalReference::power_double_double_function(), 2);
2511 } else if (exponent_type.IsInteger32()) {
2512 __ PrepareCallCFunction(2);
2513 // Move arguments to correct registers: xmm0 and edi (not rdi).
2514 // On Windows, the registers are xmm0 and edx.
2515 __ movsd(xmm0, left_reg);
2516 #ifdef _WIN64
2517 ASSERT(ToRegister(right).is(rdx));
2518 #else
2519 ASSERT(ToRegister(right).is(rdi));
2520 #endif
2521 __ CallCFunction(ExternalReference::power_double_int_function(), 2);
2522 } else {
2523 ASSERT(exponent_type.IsTagged());
2524 CpuFeatures::Scope scope(SSE2);
2525 Register right_reg = ToRegister(right);
2526
2527 Label non_smi, call;
2528 __ JumpIfNotSmi(right_reg, &non_smi);
2529 __ SmiToInteger32(right_reg, right_reg);
2530 __ cvtlsi2sd(xmm1, right_reg);
2531 __ jmp(&call);
2532
2533 __ bind(&non_smi);
2534 __ CmpObjectType(right_reg, HEAP_NUMBER_TYPE , kScratchRegister);
2535 DeoptimizeIf(not_equal, instr->environment());
2536 __ movsd(xmm1, FieldOperand(right_reg, HeapNumber::kValueOffset));
2537
2538 __ bind(&call);
2539 __ PrepareCallCFunction(2);
2540 // Move arguments to correct registers xmm0 and xmm1.
2541 __ movsd(xmm0, left_reg);
2542 // Right argument is already in xmm1.
2543 __ CallCFunction(ExternalReference::power_double_double_function(), 2);
2544 }
2545 // Return value is in xmm0.
2546 __ movsd(result_reg, xmm0);
1900 } 2547 }
1901 2548
1902 2549
1903 void LCodeGen::DoMathLog(LUnaryMathOperation* instr) { 2550 void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
1904 Abort("Unimplemented: %s", "DoMathLog"); 2551 ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
2552 TranscendentalCacheStub stub(TranscendentalCache::LOG,
2553 TranscendentalCacheStub::UNTAGGED);
2554 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1905 } 2555 }
1906 2556
1907 2557
1908 void LCodeGen::DoMathCos(LUnaryMathOperation* instr) { 2558 void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
1909 Abort("Unimplemented: %s", "DoMathCos"); 2559 ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
2560 TranscendentalCacheStub stub(TranscendentalCache::LOG,
2561 TranscendentalCacheStub::UNTAGGED);
2562 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1910 } 2563 }
1911 2564
1912 2565
1913 void LCodeGen::DoMathSin(LUnaryMathOperation* instr) { 2566 void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
1914 Abort("Unimplemented: %s", "DoMathSin"); 2567 ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
2568 TranscendentalCacheStub stub(TranscendentalCache::LOG,
2569 TranscendentalCacheStub::UNTAGGED);
2570 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1915 } 2571 }
1916 2572
1917 2573
1918 void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) { 2574 void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
1919 Abort("Unimplemented: %s", "DoUnaryMathOperation"); 2575 switch (instr->op()) {
2576 case kMathAbs:
2577 DoMathAbs(instr);
2578 break;
2579 case kMathFloor:
2580 DoMathFloor(instr);
2581 break;
2582 case kMathRound:
2583 DoMathRound(instr);
2584 break;
2585 case kMathSqrt:
2586 DoMathSqrt(instr);
2587 break;
2588 case kMathPowHalf:
2589 DoMathPowHalf(instr);
2590 break;
2591 case kMathCos:
2592 DoMathCos(instr);
2593 break;
2594 case kMathSin:
2595 DoMathSin(instr);
2596 break;
2597 case kMathLog:
2598 DoMathLog(instr);
2599 break;
2600
2601 default:
2602 UNREACHABLE();
2603 }
1920 } 2604 }
1921 2605
1922 2606
1923 void LCodeGen::DoCallKeyed(LCallKeyed* instr) { 2607 void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
1924 Abort("Unimplemented: %s", "DoCallKeyed"); 2608 ASSERT(ToRegister(instr->key()).is(rcx));
2609 ASSERT(ToRegister(instr->result()).is(rax));
2610
2611 int arity = instr->arity();
2612 Handle<Code> ic = isolate()->stub_cache()->ComputeKeyedCallInitialize(
2613 arity, NOT_IN_LOOP);
2614 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2615 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
1925 } 2616 }
1926 2617
1927 2618
1928 void LCodeGen::DoCallNamed(LCallNamed* instr) { 2619 void LCodeGen::DoCallNamed(LCallNamed* instr) {
1929 Abort("Unimplemented: %s", "DoCallNamed"); 2620 ASSERT(ToRegister(instr->result()).is(rax));
2621
2622 int arity = instr->arity();
2623 Handle<Code> ic = isolate()->stub_cache()->ComputeCallInitialize(
2624 arity, NOT_IN_LOOP);
2625 __ Move(rcx, instr->name());
2626 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2627 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
1930 } 2628 }
1931 2629
1932 2630
1933 void LCodeGen::DoCallFunction(LCallFunction* instr) { 2631 void LCodeGen::DoCallFunction(LCallFunction* instr) {
1934 Abort("Unimplemented: %s", "DoCallFunction"); 2632 ASSERT(ToRegister(instr->result()).is(rax));
2633
2634 int arity = instr->arity();
2635 CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE);
2636 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2637 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2638 __ Drop(1);
1935 } 2639 }
1936 2640
1937 2641
1938 void LCodeGen::DoCallGlobal(LCallGlobal* instr) { 2642 void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
1939 Abort("Unimplemented: %s", "DoCallGlobal"); 2643 ASSERT(ToRegister(instr->result()).is(rax));
1940 } 2644 int arity = instr->arity();
1941 2645 Handle<Code> ic = isolate()->stub_cache()->ComputeCallInitialize(
1942 2646 arity, NOT_IN_LOOP);
2647 __ Move(rcx, instr->name());
2648 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
2649 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2650 }
2651
2652
1943 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) { 2653 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
1944 ASSERT(ToRegister(instr->result()).is(rax)); 2654 ASSERT(ToRegister(instr->result()).is(rax));
1945 __ Move(rdi, instr->target()); 2655 __ Move(rdi, instr->target());
1946 CallKnownFunction(instr->target(), instr->arity(), instr); 2656 CallKnownFunction(instr->target(), instr->arity(), instr);
1947 } 2657 }
1948 2658
1949 2659
1950 void LCodeGen::DoCallNew(LCallNew* instr) { 2660 void LCodeGen::DoCallNew(LCallNew* instr) {
1951 ASSERT(ToRegister(instr->InputAt(0)).is(rdi)); 2661 ASSERT(ToRegister(instr->InputAt(0)).is(rdi));
1952 ASSERT(ToRegister(instr->result()).is(rax)); 2662 ASSERT(ToRegister(instr->result()).is(rax));
1953 2663
1954 Handle<Code> builtin(isolate()->builtins()->builtin( 2664 Handle<Code> builtin(isolate()->builtins()->builtin(
1955 Builtins::JSConstructCall)); 2665 Builtins::JSConstructCall));
1956 __ Set(rax, instr->arity()); 2666 __ Set(rax, instr->arity());
1957 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr); 2667 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr);
1958 } 2668 }
1959 2669
1960 2670
1961 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { 2671 void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
1962 Abort("Unimplemented: %s", "DoCallRuntime"); 2672 CallRuntime(instr->function(), instr->arity(), instr);
1963 } 2673 }
1964 2674
1965 2675
1966 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { 2676 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
1967 Register object = ToRegister(instr->object()); 2677 Register object = ToRegister(instr->object());
1968 Register value = ToRegister(instr->value()); 2678 Register value = ToRegister(instr->value());
1969 int offset = instr->offset(); 2679 int offset = instr->offset();
1970 2680
1971 if (!instr->transition().is_null()) { 2681 if (!instr->transition().is_null()) {
1972 __ Move(FieldOperand(object, HeapObject::kMapOffset), instr->transition()); 2682 __ Move(FieldOperand(object, HeapObject::kMapOffset), instr->transition());
(...skipping 14 matching lines...) Expand all
1987 if (instr->needs_write_barrier()) { 2697 if (instr->needs_write_barrier()) {
1988 // Update the write barrier for the properties array. 2698 // Update the write barrier for the properties array.
1989 // object is used as a scratch register. 2699 // object is used as a scratch register.
1990 __ RecordWrite(temp, offset, value, object); 2700 __ RecordWrite(temp, offset, value, object);
1991 } 2701 }
1992 } 2702 }
1993 } 2703 }
1994 2704
1995 2705
1996 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { 2706 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
1997 Abort("Unimplemented: %s", "DoStoreNamedGeneric"); 2707 ASSERT(ToRegister(instr->object()).is(rdx));
2708 ASSERT(ToRegister(instr->value()).is(rax));
2709
2710 __ Move(rcx, instr->hydrogen()->name());
2711 Handle<Code> ic(isolate()->builtins()->builtin(
2712 info_->is_strict() ? Builtins::StoreIC_Initialize_Strict
2713 : Builtins::StoreIC_Initialize));
2714 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2715 }
2716
2717
2718 void LCodeGen::DoStorePixelArrayElement(LStorePixelArrayElement* instr) {
2719 Register external_pointer = ToRegister(instr->external_pointer());
2720 Register key = ToRegister(instr->key());
2721 Register value = ToRegister(instr->value());
2722
2723 { // Clamp the value to [0..255].
2724 NearLabel done;
2725 __ testl(value, Immediate(0xFFFFFF00));
2726 __ j(zero, &done);
2727 __ setcc(negative, value); // 1 if negative, 0 if positive.
2728 __ decb(value); // 0 if negative, 255 if positive.
2729 __ bind(&done);
2730 }
2731
2732 __ movb(Operand(external_pointer, key, times_1, 0), value);
1998 } 2733 }
1999 2734
2000 2735
2001 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { 2736 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
2002 if (instr->length()->IsRegister()) { 2737 if (instr->length()->IsRegister()) {
2003 __ cmpq(ToRegister(instr->index()), ToRegister(instr->length())); 2738 __ cmpq(ToRegister(instr->index()), ToRegister(instr->length()));
2004 } else { 2739 } else {
2005 __ cmpq(ToRegister(instr->index()), ToOperand(instr->length())); 2740 __ cmpq(ToRegister(instr->index()), ToOperand(instr->length()));
2006 } 2741 }
2007 DeoptimizeIf(above_equal, instr->environment()); 2742 DeoptimizeIf(above_equal, instr->environment());
(...skipping 25 matching lines...) Expand all
2033 __ lea(key, FieldOperand(elements, 2768 __ lea(key, FieldOperand(elements,
2034 key, 2769 key,
2035 times_pointer_size, 2770 times_pointer_size,
2036 FixedArray::kHeaderSize)); 2771 FixedArray::kHeaderSize));
2037 __ RecordWrite(elements, key, value); 2772 __ RecordWrite(elements, key, value);
2038 } 2773 }
2039 } 2774 }
2040 2775
2041 2776
2042 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { 2777 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
2043 Abort("Unimplemented: %s", "DoStoreKeyedGeneric"); 2778 ASSERT(ToRegister(instr->object()).is(rdx));
2779 ASSERT(ToRegister(instr->key()).is(rcx));
2780 ASSERT(ToRegister(instr->value()).is(rax));
2781
2782 Handle<Code> ic(isolate()->builtins()->builtin(
2783 info_->is_strict() ? Builtins::KeyedStoreIC_Initialize_Strict
2784 : Builtins::KeyedStoreIC_Initialize));
2785 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2786 }
2787
2788
2789 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
2790 class DeferredStringCharCodeAt: public LDeferredCode {
2791 public:
2792 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr)
2793 : LDeferredCode(codegen), instr_(instr) { }
2794 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); }
2795 private:
2796 LStringCharCodeAt* instr_;
2797 };
2798
2799 Register string = ToRegister(instr->string());
2800 Register index = no_reg;
2801 int const_index = -1;
2802 if (instr->index()->IsConstantOperand()) {
2803 const_index = ToInteger32(LConstantOperand::cast(instr->index()));
2804 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
2805 if (!Smi::IsValid(const_index)) {
2806 // Guaranteed to be out of bounds because of the assert above.
2807 // So the bounds check that must dominate this instruction must
2808 // have deoptimized already.
2809 if (FLAG_debug_code) {
2810 __ Abort("StringCharCodeAt: out of bounds index.");
2811 }
2812 // No code needs to be generated.
2813 return;
2814 }
2815 } else {
2816 index = ToRegister(instr->index());
2817 }
2818 Register result = ToRegister(instr->result());
2819
2820 DeferredStringCharCodeAt* deferred =
2821 new DeferredStringCharCodeAt(this, instr);
2822
2823 NearLabel flat_string, ascii_string, done;
2824
2825 // Fetch the instance type of the receiver into result register.
2826 __ movq(result, FieldOperand(string, HeapObject::kMapOffset));
2827 __ movzxbl(result, FieldOperand(result, Map::kInstanceTypeOffset));
2828
2829 // We need special handling for non-sequential strings.
2830 STATIC_ASSERT(kSeqStringTag == 0);
2831 __ testb(result, Immediate(kStringRepresentationMask));
2832 __ j(zero, &flat_string);
2833
2834 // Handle cons strings and go to deferred code for the rest.
2835 __ testb(result, Immediate(kIsConsStringMask));
2836 __ j(zero, deferred->entry());
2837
2838 // ConsString.
2839 // Check whether the right hand side is the empty string (i.e. if
2840 // this is really a flat string in a cons string). If that is not
2841 // the case we would rather go to the runtime system now to flatten
2842 // the string.
2843 __ CompareRoot(FieldOperand(string, ConsString::kSecondOffset),
2844 Heap::kEmptyStringRootIndex);
2845 __ j(not_equal, deferred->entry());
2846 // Get the first of the two strings and load its instance type.
2847 __ movq(string, FieldOperand(string, ConsString::kFirstOffset));
2848 __ movq(result, FieldOperand(string, HeapObject::kMapOffset));
2849 __ movzxbl(result, FieldOperand(result, Map::kInstanceTypeOffset));
2850 // If the first cons component is also non-flat, then go to runtime.
2851 STATIC_ASSERT(kSeqStringTag == 0);
2852 __ testb(result, Immediate(kStringRepresentationMask));
2853 __ j(not_zero, deferred->entry());
2854
2855 // Check for ASCII or two-byte string.
2856 __ bind(&flat_string);
2857 STATIC_ASSERT(kAsciiStringTag != 0);
2858 __ testb(result, Immediate(kStringEncodingMask));
2859 __ j(not_zero, &ascii_string);
2860
2861 // Two-byte string.
2862 // Load the two-byte character code into the result register.
2863 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
2864 if (instr->index()->IsConstantOperand()) {
2865 __ movzxwl(result,
2866 FieldOperand(string,
2867 SeqTwoByteString::kHeaderSize +
2868 (kUC16Size * const_index)));
2869 } else {
2870 __ movzxwl(result, FieldOperand(string,
2871 index,
2872 times_2,
2873 SeqTwoByteString::kHeaderSize));
2874 }
2875 __ jmp(&done);
2876
2877 // ASCII string.
2878 // Load the byte into the result register.
2879 __ bind(&ascii_string);
2880 if (instr->index()->IsConstantOperand()) {
2881 __ movzxbl(result, FieldOperand(string,
2882 SeqAsciiString::kHeaderSize + const_index));
2883 } else {
2884 __ movzxbl(result, FieldOperand(string,
2885 index,
2886 times_1,
2887 SeqAsciiString::kHeaderSize));
2888 }
2889 __ bind(&done);
2890 __ bind(deferred->exit());
2891 }
2892
2893
2894 void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
2895 Register string = ToRegister(instr->string());
2896 Register result = ToRegister(instr->result());
2897
2898 // TODO(3095996): Get rid of this. For now, we need to make the
2899 // result register contain a valid pointer because it is already
2900 // contained in the register pointer map.
2901 __ Set(result, 0);
2902
2903 __ PushSafepointRegisters();
2904 __ push(string);
2905 // Push the index as a smi. This is safe because of the checks in
2906 // DoStringCharCodeAt above.
2907 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
2908 if (instr->index()->IsConstantOperand()) {
2909 int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
2910 __ Push(Smi::FromInt(const_index));
2911 } else {
2912 Register index = ToRegister(instr->index());
2913 __ Integer32ToSmi(index, index);
2914 __ push(index);
2915 }
2916 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2917 __ CallRuntimeSaveDoubles(Runtime::kStringCharCodeAt);
2918 RecordSafepointWithRegisters(
2919 instr->pointer_map(), 2, Safepoint::kNoDeoptimizationIndex);
2920 if (FLAG_debug_code) {
2921 __ AbortIfNotSmi(rax);
2922 }
2923 __ SmiToInteger32(rax, rax);
2924 __ StoreToSafepointRegisterSlot(result, rax);
2925 __ PopSafepointRegisters();
2926 }
2927
2928
2929 void LCodeGen::DoStringLength(LStringLength* instr) {
2930 Register string = ToRegister(instr->string());
2931 Register result = ToRegister(instr->result());
2932 __ movq(result, FieldOperand(string, String::kLengthOffset));
2044 } 2933 }
2045 2934
2046 2935
2047 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) { 2936 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
2048 LOperand* input = instr->InputAt(0); 2937 LOperand* input = instr->InputAt(0);
2049 ASSERT(input->IsRegister() || input->IsStackSlot()); 2938 ASSERT(input->IsRegister() || input->IsStackSlot());
2050 LOperand* output = instr->result(); 2939 LOperand* output = instr->result();
2051 ASSERT(output->IsDoubleRegister()); 2940 ASSERT(output->IsDoubleRegister());
2052 __ cvtlsi2sd(ToDoubleRegister(output), ToOperand(input)); 2941 if (input->IsRegister()) {
2942 __ cvtlsi2sd(ToDoubleRegister(output), ToRegister(input));
2943 } else {
2944 __ cvtlsi2sd(ToDoubleRegister(output), ToOperand(input));
2945 }
2053 } 2946 }
2054 2947
2055 2948
2056 void LCodeGen::DoNumberTagI(LNumberTagI* instr) { 2949 void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
2057 LOperand* input = instr->InputAt(0); 2950 LOperand* input = instr->InputAt(0);
2058 ASSERT(input->IsRegister() && input->Equals(instr->result())); 2951 ASSERT(input->IsRegister() && input->Equals(instr->result()));
2059 Register reg = ToRegister(input); 2952 Register reg = ToRegister(input);
2060 2953
2061 __ Integer32ToSmi(reg, reg); 2954 __ Integer32ToSmi(reg, reg);
2062 } 2955 }
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after
2145 __ divsd(result_reg, result_reg); 3038 __ divsd(result_reg, result_reg);
2146 __ jmp(&done); 3039 __ jmp(&done);
2147 3040
2148 // Heap number to XMM conversion. 3041 // Heap number to XMM conversion.
2149 __ bind(&heap_number); 3042 __ bind(&heap_number);
2150 __ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset)); 3043 __ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
2151 __ jmp(&done); 3044 __ jmp(&done);
2152 3045
2153 // Smi to XMM conversion 3046 // Smi to XMM conversion
2154 __ bind(&load_smi); 3047 __ bind(&load_smi);
2155 __ SmiToInteger32(kScratchRegister, input_reg); // Untag smi first. 3048 __ SmiToInteger32(kScratchRegister, input_reg);
2156 __ cvtlsi2sd(result_reg, kScratchRegister); 3049 __ cvtlsi2sd(result_reg, kScratchRegister);
2157 __ bind(&done); 3050 __ bind(&done);
2158 } 3051 }
2159 3052
2160 3053
2161 class DeferredTaggedToI: public LDeferredCode { 3054 class DeferredTaggedToI: public LDeferredCode {
2162 public: 3055 public:
2163 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr) 3056 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr)
2164 : LDeferredCode(codegen), instr_(instr) { } 3057 : LDeferredCode(codegen), instr_(instr) { }
2165 virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); } 3058 virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); }
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
2222 3115
2223 Register input_reg = ToRegister(input); 3116 Register input_reg = ToRegister(input);
2224 DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr); 3117 DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
2225 __ JumpIfNotSmi(input_reg, deferred->entry()); 3118 __ JumpIfNotSmi(input_reg, deferred->entry());
2226 __ SmiToInteger32(input_reg, input_reg); 3119 __ SmiToInteger32(input_reg, input_reg);
2227 __ bind(deferred->exit()); 3120 __ bind(deferred->exit());
2228 } 3121 }
2229 3122
2230 3123
2231 void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) { 3124 void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
2232 Abort("Unimplemented: %s", "DoNumberUntagD"); 3125 LOperand* input = instr->InputAt(0);
3126 ASSERT(input->IsRegister());
3127 LOperand* result = instr->result();
3128 ASSERT(result->IsDoubleRegister());
3129
3130 Register input_reg = ToRegister(input);
3131 XMMRegister result_reg = ToDoubleRegister(result);
3132
3133 EmitNumberUntagD(input_reg, result_reg, instr->environment());
2233 } 3134 }
2234 3135
2235 3136
2236 void LCodeGen::DoDoubleToI(LDoubleToI* instr) { 3137 void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
2237 Abort("Unimplemented: %s", "DoDoubleToI"); 3138 LOperand* input = instr->InputAt(0);
3139 ASSERT(input->IsDoubleRegister());
3140 LOperand* result = instr->result();
3141 ASSERT(result->IsRegister());
3142
3143 XMMRegister input_reg = ToDoubleRegister(input);
3144 Register result_reg = ToRegister(result);
3145
3146 if (instr->truncating()) {
3147 // Performs a truncating conversion of a floating point number as used by
3148 // the JS bitwise operations.
3149 __ cvttsd2siq(result_reg, input_reg);
3150 __ movq(kScratchRegister, V8_INT64_C(0x8000000000000000), RelocInfo::NONE);
3151 __ cmpl(result_reg, kScratchRegister);
3152 DeoptimizeIf(equal, instr->environment());
3153 } else {
3154 __ cvttsd2si(result_reg, input_reg);
3155 __ cvtlsi2sd(xmm0, result_reg);
3156 __ ucomisd(xmm0, input_reg);
3157 DeoptimizeIf(not_equal, instr->environment());
3158 DeoptimizeIf(parity_even, instr->environment()); // NaN.
3159 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3160 NearLabel done;
3161 // The integer converted back is equal to the original. We
3162 // only have to test if we got -0 as an input.
3163 __ testl(result_reg, result_reg);
3164 __ j(not_zero, &done);
3165 __ movmskpd(result_reg, input_reg);
3166 // Bit 0 contains the sign of the double in input_reg.
3167 // If input was positive, we are ok and return 0, otherwise
3168 // deoptimize.
3169 __ andl(result_reg, Immediate(1));
3170 DeoptimizeIf(not_zero, instr->environment());
3171 __ bind(&done);
3172 }
3173 }
2238 } 3174 }
2239 3175
2240 3176
2241 void LCodeGen::DoCheckSmi(LCheckSmi* instr) { 3177 void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
2242 LOperand* input = instr->InputAt(0); 3178 LOperand* input = instr->InputAt(0);
2243 ASSERT(input->IsRegister()); 3179 ASSERT(input->IsRegister());
2244 Condition cc = masm()->CheckSmi(ToRegister(input)); 3180 Condition cc = masm()->CheckSmi(ToRegister(input));
2245 if (instr->condition() != equal) { 3181 if (instr->condition() != equal) {
2246 cc = NegateCondition(cc); 3182 cc = NegateCondition(cc);
2247 } 3183 }
(...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after
2376 // Pick the right runtime function to call. 3312 // Pick the right runtime function to call.
2377 if (instr->hydrogen()->depth() > 1) { 3313 if (instr->hydrogen()->depth() > 1) {
2378 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr); 3314 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
2379 } else { 3315 } else {
2380 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr); 3316 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
2381 } 3317 }
2382 } 3318 }
2383 3319
2384 3320
2385 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { 3321 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
2386 Abort("Unimplemented: %s", "DoRegExpLiteral"); 3322 NearLabel materialized;
3323 // Registers will be used as follows:
3324 // rdi = JS function.
3325 // rcx = literals array.
3326 // rbx = regexp literal.
3327 // rax = regexp literal clone.
3328 __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
3329 __ movq(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset));
3330 int literal_offset = FixedArray::kHeaderSize +
3331 instr->hydrogen()->literal_index() * kPointerSize;
3332 __ movq(rbx, FieldOperand(rcx, literal_offset));
3333 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
3334 __ j(not_equal, &materialized);
3335
3336 // Create regexp literal using runtime function
3337 // Result will be in rax.
3338 __ push(rcx);
3339 __ Push(Smi::FromInt(instr->hydrogen()->literal_index()));
3340 __ Push(instr->hydrogen()->pattern());
3341 __ Push(instr->hydrogen()->flags());
3342 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
3343 __ movq(rbx, rax);
3344
3345 __ bind(&materialized);
3346 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
3347 Label allocated, runtime_allocate;
3348 __ AllocateInNewSpace(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
3349 __ jmp(&allocated);
3350
3351 __ bind(&runtime_allocate);
3352 __ push(rbx);
3353 __ Push(Smi::FromInt(size));
3354 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
3355 __ pop(rbx);
3356
3357 __ bind(&allocated);
3358 // Copy the content into the newly allocated memory.
3359 // (Unroll copy loop once for better throughput).
3360 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
3361 __ movq(rdx, FieldOperand(rbx, i));
3362 __ movq(rcx, FieldOperand(rbx, i + kPointerSize));
3363 __ movq(FieldOperand(rax, i), rdx);
3364 __ movq(FieldOperand(rax, i + kPointerSize), rcx);
3365 }
3366 if ((size % (2 * kPointerSize)) != 0) {
3367 __ movq(rdx, FieldOperand(rbx, size - kPointerSize));
3368 __ movq(FieldOperand(rax, size - kPointerSize), rdx);
3369 }
2387 } 3370 }
2388 3371
2389 3372
2390 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { 3373 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
2391 // Use the fast case closure allocation code that allocates in new 3374 // Use the fast case closure allocation code that allocates in new
2392 // space for nested functions that don't need literals cloning. 3375 // space for nested functions that don't need literals cloning.
2393 Handle<SharedFunctionInfo> shared_info = instr->shared_info(); 3376 Handle<SharedFunctionInfo> shared_info = instr->shared_info();
2394 bool pretenure = instr->hydrogen()->pretenure(); 3377 bool pretenure = instr->hydrogen()->pretenure();
2395 if (shared_info->num_literals() == 0 && !pretenure) { 3378 if (shared_info->num_literals() == 0 && !pretenure) {
2396 FastNewClosureStub stub; 3379 FastNewClosureStub stub;
2397 __ Push(shared_info); 3380 __ Push(shared_info);
2398 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 3381 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2399 } else { 3382 } else {
2400 __ push(rsi); 3383 __ push(rsi);
2401 __ Push(shared_info); 3384 __ Push(shared_info);
2402 __ Push(pretenure ? FACTORY->true_value() : FACTORY->false_value()); 3385 __ Push(pretenure ? FACTORY->true_value() : FACTORY->false_value());
2403 CallRuntime(Runtime::kNewClosure, 3, instr); 3386 CallRuntime(Runtime::kNewClosure, 3, instr);
2404 } 3387 }
2405 } 3388 }
2406 3389
2407 3390
2408 void LCodeGen::DoTypeof(LTypeof* instr) { 3391 void LCodeGen::DoTypeof(LTypeof* instr) {
2409 Abort("Unimplemented: %s", "DoTypeof"); 3392 LOperand* input = instr->InputAt(0);
3393 if (input->IsConstantOperand()) {
3394 __ Push(ToHandle(LConstantOperand::cast(input)));
3395 } else if (input->IsRegister()) {
3396 __ push(ToRegister(input));
3397 } else {
3398 ASSERT(input->IsStackSlot());
3399 __ push(ToOperand(input));
3400 }
3401 CallRuntime(Runtime::kTypeof, 1, instr);
2410 } 3402 }
2411 3403
2412 3404
2413 void LCodeGen::DoTypeofIs(LTypeofIs* instr) { 3405 void LCodeGen::DoTypeofIs(LTypeofIs* instr) {
2414 Abort("Unimplemented: %s", "DoTypeofIs"); 3406 Register input = ToRegister(instr->InputAt(0));
2415 }
2416
2417
2418 void LCodeGen::DoIsConstructCall(LIsConstructCall* instr) {
2419 Register result = ToRegister(instr->result()); 3407 Register result = ToRegister(instr->result());
2420 NearLabel true_label; 3408 Label true_label;
2421 NearLabel false_label; 3409 Label false_label;
2422 NearLabel done; 3410 NearLabel done;
2423 3411
2424 EmitIsConstructCall(result); 3412 Condition final_branch_condition = EmitTypeofIs(&true_label,
2425 __ j(equal, &true_label); 3413 &false_label,
2426 3414 input,
3415 instr->type_literal());
3416 __ j(final_branch_condition, &true_label);
3417 __ bind(&false_label);
2427 __ LoadRoot(result, Heap::kFalseValueRootIndex); 3418 __ LoadRoot(result, Heap::kFalseValueRootIndex);
2428 __ jmp(&done); 3419 __ jmp(&done);
2429 3420
2430 __ bind(&true_label); 3421 __ bind(&true_label);
2431 __ LoadRoot(result, Heap::kTrueValueRootIndex); 3422 __ LoadRoot(result, Heap::kTrueValueRootIndex);
2432 3423
2433
2434 __ bind(&done); 3424 __ bind(&done);
2435 } 3425 }
2436 3426
2437 3427
2438 void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) { 3428 void LCodeGen::EmitPushConstantOperand(LOperand* operand) {
2439 Register temp = ToRegister(instr->TempAt(0)); 3429 ASSERT(operand->IsConstantOperand());
2440 int true_block = chunk_->LookupDestination(instr->true_block_id()); 3430 LConstantOperand* const_op = LConstantOperand::cast(operand);
2441 int false_block = chunk_->LookupDestination(instr->false_block_id()); 3431 Handle<Object> literal = chunk_->LookupLiteral(const_op);
2442 3432 Representation r = chunk_->LookupLiteralRepresentation(const_op);
2443 EmitIsConstructCall(temp); 3433 if (r.IsInteger32()) {
2444 EmitBranch(true_block, false_block, equal); 3434 ASSERT(literal->IsNumber());
2445 } 3435 __ push(Immediate(static_cast<int32_t>(literal->Number())));
2446 3436 } else if (r.IsDouble()) {
2447 3437 Abort("unsupported double immediate");
2448 void LCodeGen::EmitIsConstructCall(Register temp) { 3438 } else {
2449 // Get the frame pointer for the calling frame. 3439 ASSERT(r.IsTagged());
2450 __ movq(temp, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); 3440 __ Push(literal);
2451 3441 }
2452 // Skip the arguments adaptor frame if it exists.
2453 NearLabel check_frame_marker;
2454 __ SmiCompare(Operand(temp, StandardFrameConstants::kContextOffset),
2455 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2456 __ j(not_equal, &check_frame_marker);
2457 __ movq(temp, Operand(rax, StandardFrameConstants::kCallerFPOffset));
2458
2459 // Check the marker in the calling frame.
2460 __ bind(&check_frame_marker);
2461 __ SmiCompare(Operand(temp, StandardFrameConstants::kMarkerOffset),
2462 Smi::FromInt(StackFrame::CONSTRUCT));
2463 } 3442 }
2464 3443
2465 3444
2466 void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) { 3445 void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
2467 Register input = ToRegister(instr->InputAt(0)); 3446 Register input = ToRegister(instr->InputAt(0));
2468 int true_block = chunk_->LookupDestination(instr->true_block_id()); 3447 int true_block = chunk_->LookupDestination(instr->true_block_id());
2469 int false_block = chunk_->LookupDestination(instr->false_block_id()); 3448 int false_block = chunk_->LookupDestination(instr->false_block_id());
2470 Label* true_label = chunk_->GetAssemblyLabel(true_block); 3449 Label* true_label = chunk_->GetAssemblyLabel(true_block);
2471 Label* false_label = chunk_->GetAssemblyLabel(false_block); 3450 Label* false_label = chunk_->GetAssemblyLabel(false_block);
2472 3451
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after
2536 3515
2537 } else { 3516 } else {
2538 final_branch_condition = never; 3517 final_branch_condition = never;
2539 __ jmp(false_label); 3518 __ jmp(false_label);
2540 } 3519 }
2541 3520
2542 return final_branch_condition; 3521 return final_branch_condition;
2543 } 3522 }
2544 3523
2545 3524
3525 void LCodeGen::DoIsConstructCall(LIsConstructCall* instr) {
3526 Register result = ToRegister(instr->result());
3527 NearLabel true_label;
3528 NearLabel false_label;
3529 NearLabel done;
3530
3531 EmitIsConstructCall(result);
3532 __ j(equal, &true_label);
3533
3534 __ LoadRoot(result, Heap::kFalseValueRootIndex);
3535 __ jmp(&done);
3536
3537 __ bind(&true_label);
3538 __ LoadRoot(result, Heap::kTrueValueRootIndex);
3539
3540
3541 __ bind(&done);
3542 }
3543
3544
3545 void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
3546 Register temp = ToRegister(instr->TempAt(0));
3547 int true_block = chunk_->LookupDestination(instr->true_block_id());
3548 int false_block = chunk_->LookupDestination(instr->false_block_id());
3549
3550 EmitIsConstructCall(temp);
3551 EmitBranch(true_block, false_block, equal);
3552 }
3553
3554
3555 void LCodeGen::EmitIsConstructCall(Register temp) {
3556 // Get the frame pointer for the calling frame.
3557 __ movq(temp, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
3558
3559 // Skip the arguments adaptor frame if it exists.
3560 NearLabel check_frame_marker;
3561 __ SmiCompare(Operand(temp, StandardFrameConstants::kContextOffset),
3562 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3563 __ j(not_equal, &check_frame_marker);
3564 __ movq(temp, Operand(rax, StandardFrameConstants::kCallerFPOffset));
3565
3566 // Check the marker in the calling frame.
3567 __ bind(&check_frame_marker);
3568 __ SmiCompare(Operand(temp, StandardFrameConstants::kMarkerOffset),
3569 Smi::FromInt(StackFrame::CONSTRUCT));
3570 }
3571
3572
2546 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { 3573 void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
2547 // No code for lazy bailout instruction. Used to capture environment after a 3574 // No code for lazy bailout instruction. Used to capture environment after a
2548 // call for populating the safepoint data with deoptimization data. 3575 // call for populating the safepoint data with deoptimization data.
2549 } 3576 }
2550 3577
2551 3578
2552 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { 3579 void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
2553 DeoptimizeIf(no_condition, instr->environment()); 3580 DeoptimizeIf(no_condition, instr->environment());
2554 } 3581 }
2555 3582
2556 3583
2557 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) { 3584 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
2558 Abort("Unimplemented: %s", "DoDeleteProperty"); 3585 LOperand* obj = instr->object();
3586 LOperand* key = instr->key();
3587 // Push object.
3588 if (obj->IsRegister()) {
3589 __ push(ToRegister(obj));
3590 } else {
3591 __ push(ToOperand(obj));
3592 }
3593 // Push key.
3594 if (key->IsConstantOperand()) {
3595 EmitPushConstantOperand(key);
3596 } else if (key->IsRegister()) {
3597 __ push(ToRegister(key));
3598 } else {
3599 __ push(ToOperand(key));
3600 }
3601 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
3602 LPointerMap* pointers = instr->pointer_map();
3603 LEnvironment* env = instr->deoptimization_environment();
3604 RecordPosition(pointers->position());
3605 RegisterEnvironmentForDeoptimization(env);
3606 // Create safepoint generator that will also ensure enough space in the
3607 // reloc info for patching in deoptimization (since this is invoking a
3608 // builtin)
3609 SafepointGenerator safepoint_generator(this,
3610 pointers,
3611 env->deoptimization_index(),
3612 true);
3613 __ Push(Smi::FromInt(strict_mode_flag()));
3614 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, &safepoint_generator);
2559 } 3615 }
2560 3616
2561 3617
2562 void LCodeGen::DoStackCheck(LStackCheck* instr) { 3618 void LCodeGen::DoStackCheck(LStackCheck* instr) {
2563 // Perform stack overflow check. 3619 // Perform stack overflow check.
2564 NearLabel done; 3620 NearLabel done;
2565 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); 3621 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
2566 __ j(above_equal, &done); 3622 __ j(above_equal, &done);
2567 3623
2568 StackCheckStub stub; 3624 StackCheckStub stub;
2569 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 3625 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2570 __ bind(&done); 3626 __ bind(&done);
2571 } 3627 }
2572 3628
2573 3629
2574 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { 3630 void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
2575 Abort("Unimplemented: %s", "DoOsrEntry"); 3631 // This is a pseudo-instruction that ensures that the environment here is
3632 // properly registered for deoptimization and records the assembler's PC
3633 // offset.
3634 LEnvironment* environment = instr->environment();
3635 environment->SetSpilledRegisters(instr->SpilledRegisterArray(),
3636 instr->SpilledDoubleRegisterArray());
3637
3638 // If the environment were already registered, we would have no way of
3639 // backpatching it with the spill slot operands.
3640 ASSERT(!environment->HasBeenRegistered());
3641 RegisterEnvironmentForDeoptimization(environment);
3642 ASSERT(osr_pc_offset_ == -1);
3643 osr_pc_offset_ = masm()->pc_offset();
2576 } 3644 }
2577 3645
2578 #undef __ 3646 #undef __
2579 3647
2580 } } // namespace v8::internal 3648 } } // namespace v8::internal
2581 3649
2582 #endif // V8_TARGET_ARCH_X64 3650 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/lithium-codegen-x64.h ('k') | src/x64/lithium-x64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698