Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(48)

Side by Side Diff: src/x64/lithium-codegen-x64.cc

Issue 149413010: A64: Synchronize with r16024. (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/a64
Patch Set: Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/lithium-codegen-x64.h ('k') | src/x64/lithium-x64.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after
89 code->set_stack_slots(GetStackSlotCount()); 89 code->set_stack_slots(GetStackSlotCount());
90 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); 90 code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
91 if (FLAG_weak_embedded_maps_in_optimized_code) { 91 if (FLAG_weak_embedded_maps_in_optimized_code) {
92 RegisterDependentCodeForEmbeddedMaps(code); 92 RegisterDependentCodeForEmbeddedMaps(code);
93 } 93 }
94 PopulateDeoptimizationData(code); 94 PopulateDeoptimizationData(code);
95 info()->CommitDependencies(code); 95 info()->CommitDependencies(code);
96 } 96 }
97 97
98 98
99 void LChunkBuilder::Abort(const char* reason) { 99 void LChunkBuilder::Abort(BailoutReason reason) {
100 info()->set_bailout_reason(reason); 100 info()->set_bailout_reason(reason);
101 status_ = ABORTED; 101 status_ = ABORTED;
102 } 102 }
103 103
104 104
105 void LCodeGen::Comment(const char* format, ...) { 105 void LCodeGen::Comment(const char* format, ...) {
106 if (!FLAG_code_comments) return; 106 if (!FLAG_code_comments) return;
107 char buffer[4 * KB]; 107 char buffer[4 * KB];
108 StringBuilder builder(buffer, ARRAY_SIZE(buffer)); 108 StringBuilder builder(buffer, ARRAY_SIZE(buffer));
109 va_list arguments; 109 va_list arguments;
110 va_start(arguments, format); 110 va_start(arguments, format);
111 builder.AddFormattedList(format, arguments); 111 builder.AddFormattedList(format, arguments);
112 va_end(arguments); 112 va_end(arguments);
113 113
114 // Copy the string before recording it in the assembler to avoid 114 // Copy the string before recording it in the assembler to avoid
115 // issues when the stack allocated buffer goes out of scope. 115 // issues when the stack allocated buffer goes out of scope.
116 int length = builder.position(); 116 int length = builder.position();
117 Vector<char> copy = Vector<char>::New(length + 1); 117 Vector<char> copy = Vector<char>::New(length + 1);
118 OS::MemCopy(copy.start(), builder.Finalize(), copy.length()); 118 OS::MemCopy(copy.start(), builder.Finalize(), copy.length());
119 masm()->RecordComment(copy.start()); 119 masm()->RecordComment(copy.start());
120 } 120 }
121 121
122 122
123 #ifdef _MSC_VER
124 void LCodeGen::MakeSureStackPagesMapped(int offset) {
125 const int kPageSize = 4 * KB;
126 for (offset -= kPageSize; offset > 0; offset -= kPageSize) {
127 __ movq(Operand(rsp, offset), rax);
128 }
129 }
130 #endif
131
132
123 bool LCodeGen::GeneratePrologue() { 133 bool LCodeGen::GeneratePrologue() {
124 ASSERT(is_generating()); 134 ASSERT(is_generating());
125 135
126 if (info()->IsOptimizing()) { 136 if (info()->IsOptimizing()) {
127 ProfileEntryHookStub::MaybeCallEntryHook(masm_); 137 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
128 138
129 #ifdef DEBUG 139 #ifdef DEBUG
130 if (strlen(FLAG_stop_at) > 0 && 140 if (strlen(FLAG_stop_at) > 0 &&
131 info_->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) { 141 info_->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
132 __ int3(); 142 __ int3();
(...skipping 29 matching lines...) Expand all
162 __ push(rdi); // Callee's JS function. 172 __ push(rdi); // Callee's JS function.
163 } 173 }
164 info()->AddNoFrameRange(0, masm_->pc_offset()); 174 info()->AddNoFrameRange(0, masm_->pc_offset());
165 } 175 }
166 176
167 // Reserve space for the stack slots needed by the code. 177 // Reserve space for the stack slots needed by the code.
168 int slots = GetStackSlotCount(); 178 int slots = GetStackSlotCount();
169 if (slots > 0) { 179 if (slots > 0) {
170 if (FLAG_debug_code) { 180 if (FLAG_debug_code) {
171 __ subq(rsp, Immediate(slots * kPointerSize)); 181 __ subq(rsp, Immediate(slots * kPointerSize));
182 #ifdef _MSC_VER
183 MakeSureStackPagesMapped(slots * kPointerSize);
184 #endif
172 __ push(rax); 185 __ push(rax);
173 __ Set(rax, slots); 186 __ Set(rax, slots);
174 __ movq(kScratchRegister, kSlotsZapValue, RelocInfo::NONE64); 187 __ movq(kScratchRegister, kSlotsZapValue, RelocInfo::NONE64);
175 Label loop; 188 Label loop;
176 __ bind(&loop); 189 __ bind(&loop);
177 __ movq(MemOperand(rsp, rax, times_pointer_size, 0), 190 __ movq(MemOperand(rsp, rax, times_pointer_size, 0),
178 kScratchRegister); 191 kScratchRegister);
179 __ decl(rax); 192 __ decl(rax);
180 __ j(not_zero, &loop); 193 __ j(not_zero, &loop);
181 __ pop(rax); 194 __ pop(rax);
182 } else { 195 } else {
183 __ subq(rsp, Immediate(slots * kPointerSize)); 196 __ subq(rsp, Immediate(slots * kPointerSize));
184 #ifdef _MSC_VER 197 #ifdef _MSC_VER
185 // On windows, you may not access the stack more than one page below 198 MakeSureStackPagesMapped(slots * kPointerSize);
186 // the most recently mapped page. To make the allocated area randomly
187 // accessible, we write to each page in turn (the value is irrelevant).
188 const int kPageSize = 4 * KB;
189 for (int offset = slots * kPointerSize - kPageSize;
190 offset > 0;
191 offset -= kPageSize) {
192 __ movq(Operand(rsp, offset), rax);
193 }
194 #endif 199 #endif
195 } 200 }
196 201
197 if (info()->saves_caller_doubles()) { 202 if (info()->saves_caller_doubles()) {
198 Comment(";;; Save clobbered callee double registers"); 203 Comment(";;; Save clobbered callee double registers");
199 int count = 0; 204 int count = 0;
200 BitVector* doubles = chunk()->allocated_double_registers(); 205 BitVector* doubles = chunk()->allocated_double_registers();
201 BitVector::Iterator save_iterator(doubles); 206 BitVector::Iterator save_iterator(doubles);
202 while (!save_iterator.Done()) { 207 while (!save_iterator.Done()) {
203 __ movsd(MemOperand(rsp, count * kDoubleSize), 208 __ movsd(MemOperand(rsp, count * kDoubleSize),
(...skipping 218 matching lines...) Expand 10 before | Expand all | Expand 10 after
422 } 427 }
423 428
424 429
425 double LCodeGen::ToDouble(LConstantOperand* op) const { 430 double LCodeGen::ToDouble(LConstantOperand* op) const {
426 HConstant* constant = chunk_->LookupConstant(op); 431 HConstant* constant = chunk_->LookupConstant(op);
427 ASSERT(constant->HasDoubleValue()); 432 ASSERT(constant->HasDoubleValue());
428 return constant->DoubleValue(); 433 return constant->DoubleValue();
429 } 434 }
430 435
431 436
437 ExternalReference LCodeGen::ToExternalReference(LConstantOperand* op) const {
438 HConstant* constant = chunk_->LookupConstant(op);
439 ASSERT(constant->HasExternalReferenceValue());
440 return constant->ExternalReferenceValue();
441 }
442
443
432 Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const { 444 Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const {
433 HConstant* constant = chunk_->LookupConstant(op); 445 HConstant* constant = chunk_->LookupConstant(op);
434 ASSERT(chunk_->LookupLiteralRepresentation(op).IsSmiOrTagged()); 446 ASSERT(chunk_->LookupLiteralRepresentation(op).IsSmiOrTagged());
435 return constant->handle(); 447 return constant->handle();
436 } 448 }
437 449
438 450
439 Operand LCodeGen::ToOperand(LOperand* op) const { 451 Operand LCodeGen::ToOperand(LOperand* op) const {
440 // Does not handle registers. In X64 assembler, plain registers are not 452 // Does not handle registers. In X64 assembler, plain registers are not
441 // representable as an Operand. 453 // representable as an Operand.
(...skipping 200 matching lines...) Expand 10 before | Expand all | Expand 10 after
642 void LCodeGen::DeoptimizeIf(Condition cc, 654 void LCodeGen::DeoptimizeIf(Condition cc,
643 LEnvironment* environment, 655 LEnvironment* environment,
644 Deoptimizer::BailoutType bailout_type) { 656 Deoptimizer::BailoutType bailout_type) {
645 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); 657 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
646 ASSERT(environment->HasBeenRegistered()); 658 ASSERT(environment->HasBeenRegistered());
647 int id = environment->deoptimization_index(); 659 int id = environment->deoptimization_index();
648 ASSERT(info()->IsOptimizing() || info()->IsStub()); 660 ASSERT(info()->IsOptimizing() || info()->IsStub());
649 Address entry = 661 Address entry =
650 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); 662 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type);
651 if (entry == NULL) { 663 if (entry == NULL) {
652 Abort("bailout was not prepared"); 664 Abort(kBailoutWasNotPrepared);
653 return; 665 return;
654 } 666 }
655 667
656 ASSERT(FLAG_deopt_every_n_times == 0); // Not yet implemented on x64. 668 ASSERT(FLAG_deopt_every_n_times == 0); // Not yet implemented on x64.
657 669
658 if (FLAG_trap_on_deopt && info()->IsOptimizing()) { 670 if (FLAG_trap_on_deopt && info()->IsOptimizing()) {
659 Label done; 671 Label done;
660 if (cc != no_condition) { 672 if (cc != no_condition) {
661 __ j(NegateCondition(cc), &done, Label::kNear); 673 __ j(NegateCondition(cc), &done, Label::kNear);
662 } 674 }
(...skipping 853 matching lines...) Expand 10 before | Expand all | Expand 10 after
1516 if (int_val == 0) { 1528 if (int_val == 0) {
1517 __ xorps(res, res); 1529 __ xorps(res, res);
1518 } else { 1530 } else {
1519 Register tmp = ToRegister(instr->temp()); 1531 Register tmp = ToRegister(instr->temp());
1520 __ Set(tmp, int_val); 1532 __ Set(tmp, int_val);
1521 __ movq(res, tmp); 1533 __ movq(res, tmp);
1522 } 1534 }
1523 } 1535 }
1524 1536
1525 1537
1538 void LCodeGen::DoConstantE(LConstantE* instr) {
1539 __ LoadAddress(ToRegister(instr->result()), instr->value());
1540 }
1541
1542
1526 void LCodeGen::DoConstantT(LConstantT* instr) { 1543 void LCodeGen::DoConstantT(LConstantT* instr) {
1527 Handle<Object> value = instr->value(); 1544 Handle<Object> value = instr->value();
1528 AllowDeferredHandleDereference smi_check; 1545 AllowDeferredHandleDereference smi_check;
1529 __ LoadObject(ToRegister(instr->result()), value); 1546 __ LoadObject(ToRegister(instr->result()), value);
1530 } 1547 }
1531 1548
1532 1549
1533 void LCodeGen::DoMapEnumLength(LMapEnumLength* instr) { 1550 void LCodeGen::DoMapEnumLength(LMapEnumLength* instr) {
1534 Register result = ToRegister(instr->result()); 1551 Register result = ToRegister(instr->result());
1535 Register map = ToRegister(instr->value()); 1552 Register map = ToRegister(instr->value());
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after
1618 if (FLAG_debug_code) { 1635 if (FLAG_debug_code) {
1619 __ push(value); 1636 __ push(value);
1620 __ movq(value, FieldOperand(string, HeapObject::kMapOffset)); 1637 __ movq(value, FieldOperand(string, HeapObject::kMapOffset));
1621 __ movzxbq(value, FieldOperand(value, Map::kInstanceTypeOffset)); 1638 __ movzxbq(value, FieldOperand(value, Map::kInstanceTypeOffset));
1622 1639
1623 __ andb(value, Immediate(kStringRepresentationMask | kStringEncodingMask)); 1640 __ andb(value, Immediate(kStringRepresentationMask | kStringEncodingMask));
1624 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; 1641 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
1625 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; 1642 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
1626 __ cmpq(value, Immediate(encoding == String::ONE_BYTE_ENCODING 1643 __ cmpq(value, Immediate(encoding == String::ONE_BYTE_ENCODING
1627 ? one_byte_seq_type : two_byte_seq_type)); 1644 ? one_byte_seq_type : two_byte_seq_type));
1628 __ Check(equal, "Unexpected string type"); 1645 __ Check(equal, kUnexpectedStringType);
1629 __ pop(value); 1646 __ pop(value);
1630 } 1647 }
1631 1648
1632 if (encoding == String::ONE_BYTE_ENCODING) { 1649 if (encoding == String::ONE_BYTE_ENCODING) {
1633 __ movb(FieldOperand(string, index, times_1, SeqString::kHeaderSize), 1650 __ movb(FieldOperand(string, index, times_1, SeqString::kHeaderSize),
1634 value); 1651 value);
1635 } else { 1652 } else {
1636 __ movw(FieldOperand(string, index, times_2, SeqString::kHeaderSize), 1653 __ movw(FieldOperand(string, index, times_2, SeqString::kHeaderSize),
1637 value); 1654 value);
1638 } 1655 }
(...skipping 976 matching lines...) Expand 10 before | Expand all | Expand 10 after
2615 ASSERT(ToRegister(instr->value()).is(rax)); 2632 ASSERT(ToRegister(instr->value()).is(rax));
2616 2633
2617 __ Move(rcx, instr->name()); 2634 __ Move(rcx, instr->name());
2618 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) 2635 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
2619 ? isolate()->builtins()->StoreIC_Initialize_Strict() 2636 ? isolate()->builtins()->StoreIC_Initialize_Strict()
2620 : isolate()->builtins()->StoreIC_Initialize(); 2637 : isolate()->builtins()->StoreIC_Initialize();
2621 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr); 2638 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
2622 } 2639 }
2623 2640
2624 2641
2625 void LCodeGen::DoLinkObjectInList(LLinkObjectInList* instr) {
2626 Register object = ToRegister(instr->object());
2627 ExternalReference sites_list_address = instr->GetReference(isolate());
2628 __ Load(kScratchRegister, sites_list_address);
2629 __ movq(FieldOperand(object, instr->hydrogen()->store_field().offset()),
2630 kScratchRegister);
2631 __ Store(sites_list_address, object);
2632 }
2633
2634
2635 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { 2642 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
2636 Register context = ToRegister(instr->context()); 2643 Register context = ToRegister(instr->context());
2637 Register result = ToRegister(instr->result()); 2644 Register result = ToRegister(instr->result());
2638 __ movq(result, ContextOperand(context, instr->slot_index())); 2645 __ movq(result, ContextOperand(context, instr->slot_index()));
2639 if (instr->hydrogen()->RequiresHoleCheck()) { 2646 if (instr->hydrogen()->RequiresHoleCheck()) {
2640 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); 2647 __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
2641 if (instr->hydrogen()->DeoptimizesOnHole()) { 2648 if (instr->hydrogen()->DeoptimizesOnHole()) {
2642 DeoptimizeIf(equal, instr->environment()); 2649 DeoptimizeIf(equal, instr->environment());
2643 } else { 2650 } else {
2644 Label is_not_hole; 2651 Label is_not_hole;
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
2682 check_needed); 2689 check_needed);
2683 } 2690 }
2684 2691
2685 __ bind(&skip_assignment); 2692 __ bind(&skip_assignment);
2686 } 2693 }
2687 2694
2688 2695
2689 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) { 2696 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
2690 HObjectAccess access = instr->hydrogen()->access(); 2697 HObjectAccess access = instr->hydrogen()->access();
2691 int offset = access.offset(); 2698 int offset = access.offset();
2699
2700 if (access.IsExternalMemory()) {
2701 Register result = ToRegister(instr->result());
2702 if (instr->object()->IsConstantOperand()) {
2703 ASSERT(result.is(rax));
2704 __ load_rax(ToExternalReference(LConstantOperand::cast(instr->object())));
2705 } else {
2706 Register object = ToRegister(instr->object());
2707 __ movq(result, MemOperand(object, offset));
2708 }
2709 return;
2710 }
2711
2692 Register object = ToRegister(instr->object()); 2712 Register object = ToRegister(instr->object());
2693 if (FLAG_track_double_fields && 2713 if (FLAG_track_double_fields &&
2694 instr->hydrogen()->representation().IsDouble()) { 2714 instr->hydrogen()->representation().IsDouble()) {
2695 XMMRegister result = ToDoubleRegister(instr->result()); 2715 XMMRegister result = ToDoubleRegister(instr->result());
2696 __ movsd(result, FieldOperand(object, offset)); 2716 __ movsd(result, FieldOperand(object, offset));
2697 return; 2717 return;
2698 } 2718 }
2699 2719
2700 Register result = ToRegister(instr->result()); 2720 Register result = ToRegister(instr->result());
2701 if (access.IsInobject()) { 2721 if (access.IsInobject()) {
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
2748 } 2768 }
2749 2769
2750 2770
2751 // Check for cases where EmitLoadFieldOrConstantFunction needs to walk the 2771 // Check for cases where EmitLoadFieldOrConstantFunction needs to walk the
2752 // prototype chain, which causes unbounded code generation. 2772 // prototype chain, which causes unbounded code generation.
2753 static bool CompactEmit(SmallMapList* list, 2773 static bool CompactEmit(SmallMapList* list,
2754 Handle<String> name, 2774 Handle<String> name,
2755 int i, 2775 int i,
2756 Isolate* isolate) { 2776 Isolate* isolate) {
2757 Handle<Map> map = list->at(i); 2777 Handle<Map> map = list->at(i);
2758 // If the map has ElementsKind transitions, we will generate map checks
2759 // for each kind in __ CompareMap(..., ALLOW_ELEMENTS_TRANSITION_MAPS).
2760 if (map->HasElementsTransition()) return false;
2761 LookupResult lookup(isolate); 2778 LookupResult lookup(isolate);
2762 map->LookupDescriptor(NULL, *name, &lookup); 2779 map->LookupDescriptor(NULL, *name, &lookup);
2763 return lookup.IsField() || lookup.IsConstant(); 2780 return lookup.IsField() || lookup.IsConstant();
2764 } 2781 }
2765 2782
2766 2783
2767 void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) { 2784 void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
2768 Register object = ToRegister(instr->object()); 2785 Register object = ToRegister(instr->object());
2769 Register result = ToRegister(instr->result()); 2786 Register result = ToRegister(instr->result());
2770 2787
(...skipping 293 matching lines...) Expand 10 before | Expand all | Expand 10 after
3064 LOperand* elements_pointer, 3081 LOperand* elements_pointer,
3065 LOperand* key, 3082 LOperand* key,
3066 ElementsKind elements_kind, 3083 ElementsKind elements_kind,
3067 uint32_t offset, 3084 uint32_t offset,
3068 uint32_t additional_index) { 3085 uint32_t additional_index) {
3069 Register elements_pointer_reg = ToRegister(elements_pointer); 3086 Register elements_pointer_reg = ToRegister(elements_pointer);
3070 int shift_size = ElementsKindToShiftSize(elements_kind); 3087 int shift_size = ElementsKindToShiftSize(elements_kind);
3071 if (key->IsConstantOperand()) { 3088 if (key->IsConstantOperand()) {
3072 int constant_value = ToInteger32(LConstantOperand::cast(key)); 3089 int constant_value = ToInteger32(LConstantOperand::cast(key));
3073 if (constant_value & 0xF0000000) { 3090 if (constant_value & 0xF0000000) {
3074 Abort("array index constant value too big"); 3091 Abort(kArrayIndexConstantValueTooBig);
3075 } 3092 }
3076 return Operand(elements_pointer_reg, 3093 return Operand(elements_pointer_reg,
3077 ((constant_value + additional_index) << shift_size) 3094 ((constant_value + additional_index) << shift_size)
3078 + offset); 3095 + offset);
3079 } else { 3096 } else {
3080 ScaleFactor scale_factor = static_cast<ScaleFactor>(shift_size); 3097 ScaleFactor scale_factor = static_cast<ScaleFactor>(shift_size);
3081 return Operand(elements_pointer_reg, 3098 return Operand(elements_pointer_reg,
3082 ToRegister(key), 3099 ToRegister(key),
3083 scale_factor, 3100 scale_factor,
3084 offset + (additional_index << shift_size)); 3101 offset + (additional_index << shift_size));
(...skipping 317 matching lines...) Expand 10 before | Expand all | Expand 10 after
3402 Register input_reg = ToRegister(instr->value()); 3419 Register input_reg = ToRegister(instr->value());
3403 __ testl(input_reg, input_reg); 3420 __ testl(input_reg, input_reg);
3404 Label is_positive; 3421 Label is_positive;
3405 __ j(not_sign, &is_positive, Label::kNear); 3422 __ j(not_sign, &is_positive, Label::kNear);
3406 __ negl(input_reg); // Sets flags. 3423 __ negl(input_reg); // Sets flags.
3407 DeoptimizeIf(negative, instr->environment()); 3424 DeoptimizeIf(negative, instr->environment());
3408 __ bind(&is_positive); 3425 __ bind(&is_positive);
3409 } 3426 }
3410 3427
3411 3428
3429 void LCodeGen::EmitInteger64MathAbs(LMathAbs* instr) {
3430 Register input_reg = ToRegister(instr->value());
3431 __ testq(input_reg, input_reg);
3432 Label is_positive;
3433 __ j(not_sign, &is_positive, Label::kNear);
3434 __ neg(input_reg); // Sets flags.
3435 DeoptimizeIf(negative, instr->environment());
3436 __ bind(&is_positive);
3437 }
3438
3439
3412 void LCodeGen::DoMathAbs(LMathAbs* instr) { 3440 void LCodeGen::DoMathAbs(LMathAbs* instr) {
3413 // Class for deferred case. 3441 // Class for deferred case.
3414 class DeferredMathAbsTaggedHeapNumber: public LDeferredCode { 3442 class DeferredMathAbsTaggedHeapNumber: public LDeferredCode {
3415 public: 3443 public:
3416 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen, LMathAbs* instr) 3444 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen, LMathAbs* instr)
3417 : LDeferredCode(codegen), instr_(instr) { } 3445 : LDeferredCode(codegen), instr_(instr) { }
3418 virtual void Generate() { 3446 virtual void Generate() {
3419 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_); 3447 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_);
3420 } 3448 }
3421 virtual LInstruction* instr() { return instr_; } 3449 virtual LInstruction* instr() { return instr_; }
3422 private: 3450 private:
3423 LMathAbs* instr_; 3451 LMathAbs* instr_;
3424 }; 3452 };
3425 3453
3426 ASSERT(instr->value()->Equals(instr->result())); 3454 ASSERT(instr->value()->Equals(instr->result()));
3427 Representation r = instr->hydrogen()->value()->representation(); 3455 Representation r = instr->hydrogen()->value()->representation();
3428 3456
3429 if (r.IsDouble()) { 3457 if (r.IsDouble()) {
3430 XMMRegister scratch = xmm0; 3458 XMMRegister scratch = xmm0;
3431 XMMRegister input_reg = ToDoubleRegister(instr->value()); 3459 XMMRegister input_reg = ToDoubleRegister(instr->value());
3432 __ xorps(scratch, scratch); 3460 __ xorps(scratch, scratch);
3433 __ subsd(scratch, input_reg); 3461 __ subsd(scratch, input_reg);
3434 __ andpd(input_reg, scratch); 3462 __ andpd(input_reg, scratch);
3435 } else if (r.IsInteger32()) { 3463 } else if (r.IsInteger32()) {
3436 EmitIntegerMathAbs(instr); 3464 EmitIntegerMathAbs(instr);
3465 } else if (r.IsSmi()) {
3466 EmitInteger64MathAbs(instr);
3437 } else { // Tagged case. 3467 } else { // Tagged case.
3438 DeferredMathAbsTaggedHeapNumber* deferred = 3468 DeferredMathAbsTaggedHeapNumber* deferred =
3439 new(zone()) DeferredMathAbsTaggedHeapNumber(this, instr); 3469 new(zone()) DeferredMathAbsTaggedHeapNumber(this, instr);
3440 Register input_reg = ToRegister(instr->value()); 3470 Register input_reg = ToRegister(instr->value());
3441 // Smi check. 3471 // Smi check.
3442 __ JumpIfNotSmi(input_reg, deferred->entry()); 3472 __ JumpIfNotSmi(input_reg, deferred->entry());
3443 __ SmiToInteger32(input_reg, input_reg); 3473 __ SmiToInteger32(input_reg, input_reg);
3444 EmitIntegerMathAbs(instr); 3474 EmitIntegerMathAbs(instr);
3445 __ Integer32ToSmi(input_reg, input_reg); 3475 __ Integer32ToSmi(input_reg, input_reg);
3446 __ bind(deferred->exit()); 3476 __ bind(deferred->exit());
(...skipping 472 matching lines...) Expand 10 before | Expand all | Expand 10 after
3919 void LCodeGen::DoInnerAllocatedObject(LInnerAllocatedObject* instr) { 3949 void LCodeGen::DoInnerAllocatedObject(LInnerAllocatedObject* instr) {
3920 Register result = ToRegister(instr->result()); 3950 Register result = ToRegister(instr->result());
3921 Register base = ToRegister(instr->base_object()); 3951 Register base = ToRegister(instr->base_object());
3922 __ lea(result, Operand(base, instr->offset())); 3952 __ lea(result, Operand(base, instr->offset()));
3923 } 3953 }
3924 3954
3925 3955
3926 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { 3956 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
3927 Representation representation = instr->representation(); 3957 Representation representation = instr->representation();
3928 3958
3929 Register object = ToRegister(instr->object());
3930
3931 HObjectAccess access = instr->hydrogen()->access(); 3959 HObjectAccess access = instr->hydrogen()->access();
3932 int offset = access.offset(); 3960 int offset = access.offset();
3933 3961
3962 if (access.IsExternalMemory()) {
3963 ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
3964 Register value = ToRegister(instr->value());
3965 if (instr->object()->IsConstantOperand()) {
3966 ASSERT(value.is(rax));
3967 LConstantOperand* object = LConstantOperand::cast(instr->object());
3968 __ store_rax(ToExternalReference(object));
3969 } else {
3970 Register object = ToRegister(instr->object());
3971 __ movq(MemOperand(object, offset), value);
3972 }
3973 return;
3974 }
3975
3976 Register object = ToRegister(instr->object());
3934 Handle<Map> transition = instr->transition(); 3977 Handle<Map> transition = instr->transition();
3935 3978
3936 if (FLAG_track_fields && representation.IsSmi()) { 3979 if (FLAG_track_fields && representation.IsSmi()) {
3937 if (instr->value()->IsConstantOperand()) { 3980 if (instr->value()->IsConstantOperand()) {
3938 LConstantOperand* operand_value = LConstantOperand::cast(instr->value()); 3981 LConstantOperand* operand_value = LConstantOperand::cast(instr->value());
3939 if (!IsSmiConstant(operand_value)) { 3982 if (!IsSmiConstant(operand_value)) {
3940 DeoptimizeIf(no_condition, instr->environment()); 3983 DeoptimizeIf(no_condition, instr->environment());
3941 } 3984 }
3942 } 3985 }
3943 } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) { 3986 } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
(...skipping 469 matching lines...) Expand 10 before | Expand all | Expand 10 after
4413 __ Set(result, 0); 4456 __ Set(result, 0);
4414 4457
4415 PushSafepointRegistersScope scope(this); 4458 PushSafepointRegistersScope scope(this);
4416 __ Integer32ToSmi(char_code, char_code); 4459 __ Integer32ToSmi(char_code, char_code);
4417 __ push(char_code); 4460 __ push(char_code);
4418 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr); 4461 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr);
4419 __ StoreToSafepointRegisterSlot(result, rax); 4462 __ StoreToSafepointRegisterSlot(result, rax);
4420 } 4463 }
4421 4464
4422 4465
4423 void LCodeGen::DoStringLength(LStringLength* instr) {
4424 Register string = ToRegister(instr->string());
4425 Register result = ToRegister(instr->result());
4426 __ movq(result, FieldOperand(string, String::kLengthOffset));
4427 }
4428
4429
4430 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) { 4466 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
4431 LOperand* input = instr->value(); 4467 LOperand* input = instr->value();
4432 ASSERT(input->IsRegister() || input->IsStackSlot()); 4468 ASSERT(input->IsRegister() || input->IsStackSlot());
4433 LOperand* output = instr->result(); 4469 LOperand* output = instr->result();
4434 ASSERT(output->IsDoubleRegister()); 4470 ASSERT(output->IsDoubleRegister());
4435 if (input->IsRegister()) { 4471 if (input->IsRegister()) {
4436 __ cvtlsi2sd(ToDoubleRegister(output), ToRegister(input)); 4472 __ cvtlsi2sd(ToDoubleRegister(output), ToRegister(input));
4437 } else { 4473 } else {
4438 __ cvtlsi2sd(ToDoubleRegister(output), ToOperand(input)); 4474 __ cvtlsi2sd(ToDoubleRegister(output), ToOperand(input));
4439 } 4475 }
(...skipping 607 matching lines...) Expand 10 before | Expand all | Expand 10 after
5047 new(zone()) DeferredAllocate(this, instr); 5083 new(zone()) DeferredAllocate(this, instr);
5048 5084
5049 Register result = ToRegister(instr->result()); 5085 Register result = ToRegister(instr->result());
5050 Register temp = ToRegister(instr->temp()); 5086 Register temp = ToRegister(instr->temp());
5051 5087
5052 // Allocate memory for the object. 5088 // Allocate memory for the object.
5053 AllocationFlags flags = TAG_OBJECT; 5089 AllocationFlags flags = TAG_OBJECT;
5054 if (instr->hydrogen()->MustAllocateDoubleAligned()) { 5090 if (instr->hydrogen()->MustAllocateDoubleAligned()) {
5055 flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT); 5091 flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
5056 } 5092 }
5057 if (instr->hydrogen()->CanAllocateInOldPointerSpace()) { 5093 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
5058 ASSERT(!instr->hydrogen()->CanAllocateInOldDataSpace()); 5094 ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation());
5095 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
5059 flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE); 5096 flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
5060 } else if (instr->hydrogen()->CanAllocateInOldDataSpace()) { 5097 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
5098 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
5061 flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE); 5099 flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE);
5062 } 5100 }
5063 5101
5064 if (instr->size()->IsConstantOperand()) { 5102 if (instr->size()->IsConstantOperand()) {
5065 int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); 5103 int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
5066 __ Allocate(size, result, temp, no_reg, deferred->entry(), flags); 5104 __ Allocate(size, result, temp, no_reg, deferred->entry(), flags);
5067 } else { 5105 } else {
5068 Register size = ToRegister(instr->size()); 5106 Register size = ToRegister(instr->size());
5069 __ Allocate(size, result, temp, no_reg, deferred->entry(), flags); 5107 __ Allocate(size, result, temp, no_reg, deferred->entry(), flags);
5070 } 5108 }
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
5102 if (instr->size()->IsRegister()) { 5140 if (instr->size()->IsRegister()) {
5103 Register size = ToRegister(instr->size()); 5141 Register size = ToRegister(instr->size());
5104 ASSERT(!size.is(result)); 5142 ASSERT(!size.is(result));
5105 __ Integer32ToSmi(size, size); 5143 __ Integer32ToSmi(size, size);
5106 __ push(size); 5144 __ push(size);
5107 } else { 5145 } else {
5108 int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); 5146 int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
5109 __ Push(Smi::FromInt(size)); 5147 __ Push(Smi::FromInt(size));
5110 } 5148 }
5111 5149
5112 if (instr->hydrogen()->CanAllocateInOldPointerSpace()) { 5150 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
5113 ASSERT(!instr->hydrogen()->CanAllocateInOldDataSpace()); 5151 ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation());
5152 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
5114 CallRuntimeFromDeferred(Runtime::kAllocateInOldPointerSpace, 1, instr); 5153 CallRuntimeFromDeferred(Runtime::kAllocateInOldPointerSpace, 1, instr);
5115 } else if (instr->hydrogen()->CanAllocateInOldDataSpace()) { 5154 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
5155 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
5116 CallRuntimeFromDeferred(Runtime::kAllocateInOldDataSpace, 1, instr); 5156 CallRuntimeFromDeferred(Runtime::kAllocateInOldDataSpace, 1, instr);
5117 } else { 5157 } else {
5118 CallRuntimeFromDeferred(Runtime::kAllocateInNewSpace, 1, instr); 5158 CallRuntimeFromDeferred(Runtime::kAllocateInNewSpace, 1, instr);
5119 } 5159 }
5120 __ StoreToSafepointRegisterSlot(result, rax); 5160 __ StoreToSafepointRegisterSlot(result, rax);
5121 } 5161 }
5122 5162
5123 5163
5124 void LCodeGen::DoToFastProperties(LToFastProperties* instr) { 5164 void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
5125 ASSERT(ToRegister(instr->value()).is(rax)); 5165 ASSERT(ToRegister(instr->value()).is(rax));
(...skipping 411 matching lines...) Expand 10 before | Expand all | Expand 10 after
5537 FixedArray::kHeaderSize - kPointerSize)); 5577 FixedArray::kHeaderSize - kPointerSize));
5538 __ bind(&done); 5578 __ bind(&done);
5539 } 5579 }
5540 5580
5541 5581
5542 #undef __ 5582 #undef __
5543 5583
5544 } } // namespace v8::internal 5584 } } // namespace v8::internal
5545 5585
5546 #endif // V8_TARGET_ARCH_X64 5586 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/lithium-codegen-x64.h ('k') | src/x64/lithium-x64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698