Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(104)

Side by Side Diff: src/ia32/lithium-codegen-ia32.cc

Issue 149413010: A64: Synchronize with r16024. (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/a64
Patch Set: Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/lithium-codegen-ia32.h ('k') | src/ia32/lithium-ia32.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after
106 RegisterDependentCodeForEmbeddedMaps(code); 106 RegisterDependentCodeForEmbeddedMaps(code);
107 } 107 }
108 PopulateDeoptimizationData(code); 108 PopulateDeoptimizationData(code);
109 if (!info()->IsStub()) { 109 if (!info()->IsStub()) {
110 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code); 110 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code);
111 } 111 }
112 info()->CommitDependencies(code); 112 info()->CommitDependencies(code);
113 } 113 }
114 114
115 115
116 void LCodeGen::Abort(const char* reason) { 116 void LCodeGen::Abort(BailoutReason reason) {
117 info()->set_bailout_reason(reason); 117 info()->set_bailout_reason(reason);
118 status_ = ABORTED; 118 status_ = ABORTED;
119 } 119 }
120 120
121 121
122 void LCodeGen::Comment(const char* format, ...) { 122 void LCodeGen::Comment(const char* format, ...) {
123 if (!FLAG_code_comments) return; 123 if (!FLAG_code_comments) return;
124 char buffer[4 * KB]; 124 char buffer[4 * KB];
125 StringBuilder builder(buffer, ARRAY_SIZE(buffer)); 125 StringBuilder builder(buffer, ARRAY_SIZE(buffer));
126 va_list arguments; 126 va_list arguments;
127 va_start(arguments, format); 127 va_start(arguments, format);
128 builder.AddFormattedList(format, arguments); 128 builder.AddFormattedList(format, arguments);
129 va_end(arguments); 129 va_end(arguments);
130 130
131 // Copy the string before recording it in the assembler to avoid 131 // Copy the string before recording it in the assembler to avoid
132 // issues when the stack allocated buffer goes out of scope. 132 // issues when the stack allocated buffer goes out of scope.
133 size_t length = builder.position(); 133 size_t length = builder.position();
134 Vector<char> copy = Vector<char>::New(length + 1); 134 Vector<char> copy = Vector<char>::New(length + 1);
135 OS::MemCopy(copy.start(), builder.Finalize(), copy.length()); 135 OS::MemCopy(copy.start(), builder.Finalize(), copy.length());
136 masm()->RecordComment(copy.start()); 136 masm()->RecordComment(copy.start());
137 } 137 }
138 138
139 139
140 #ifdef _MSC_VER
141 void LCodeGen::MakeSureStackPagesMapped(int offset) {
142 const int kPageSize = 4 * KB;
143 for (offset -= kPageSize; offset > 0; offset -= kPageSize) {
144 __ mov(Operand(esp, offset), eax);
145 }
146 }
147 #endif
148
149
140 bool LCodeGen::GeneratePrologue() { 150 bool LCodeGen::GeneratePrologue() {
141 ASSERT(is_generating()); 151 ASSERT(is_generating());
142 152
143 if (info()->IsOptimizing()) { 153 if (info()->IsOptimizing()) {
144 ProfileEntryHookStub::MaybeCallEntryHook(masm_); 154 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
145 155
146 #ifdef DEBUG 156 #ifdef DEBUG
147 if (strlen(FLAG_stop_at) > 0 && 157 if (strlen(FLAG_stop_at) > 0 &&
148 info_->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) { 158 info_->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
149 __ int3(); 159 __ int3();
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
203 __ push(Immediate(Smi::FromInt(StackFrame::STUB))); 213 __ push(Immediate(Smi::FromInt(StackFrame::STUB)));
204 } else { 214 } else {
205 __ push(edi); // Callee's JS function. 215 __ push(edi); // Callee's JS function.
206 } 216 }
207 } 217 }
208 218
209 if (info()->IsOptimizing() && 219 if (info()->IsOptimizing() &&
210 dynamic_frame_alignment_ && 220 dynamic_frame_alignment_ &&
211 FLAG_debug_code) { 221 FLAG_debug_code) {
212 __ test(esp, Immediate(kPointerSize)); 222 __ test(esp, Immediate(kPointerSize));
213 __ Assert(zero, "frame is expected to be aligned"); 223 __ Assert(zero, kFrameIsExpectedToBeAligned);
214 } 224 }
215 225
216 // Reserve space for the stack slots needed by the code. 226 // Reserve space for the stack slots needed by the code.
217 int slots = GetStackSlotCount(); 227 int slots = GetStackSlotCount();
218 ASSERT(slots != 0 || !info()->IsOptimizing()); 228 ASSERT(slots != 0 || !info()->IsOptimizing());
219 if (slots > 0) { 229 if (slots > 0) {
220 if (slots == 1) { 230 if (slots == 1) {
221 if (dynamic_frame_alignment_) { 231 if (dynamic_frame_alignment_) {
222 __ push(edx); 232 __ push(edx);
223 } else { 233 } else {
224 __ push(Immediate(kNoAlignmentPadding)); 234 __ push(Immediate(kNoAlignmentPadding));
225 } 235 }
226 } else { 236 } else {
227 if (FLAG_debug_code) { 237 if (FLAG_debug_code) {
228 __ sub(Operand(esp), Immediate(slots * kPointerSize)); 238 __ sub(Operand(esp), Immediate(slots * kPointerSize));
239 #ifdef _MSC_VER
240 MakeSureStackPagesMapped(slots * kPointerSize);
241 #endif
229 __ push(eax); 242 __ push(eax);
230 __ mov(Operand(eax), Immediate(slots)); 243 __ mov(Operand(eax), Immediate(slots));
231 Label loop; 244 Label loop;
232 __ bind(&loop); 245 __ bind(&loop);
233 __ mov(MemOperand(esp, eax, times_4, 0), 246 __ mov(MemOperand(esp, eax, times_4, 0),
234 Immediate(kSlotsZapValue)); 247 Immediate(kSlotsZapValue));
235 __ dec(eax); 248 __ dec(eax);
236 __ j(not_zero, &loop); 249 __ j(not_zero, &loop);
237 __ pop(eax); 250 __ pop(eax);
238 } else { 251 } else {
239 __ sub(Operand(esp), Immediate(slots * kPointerSize)); 252 __ sub(Operand(esp), Immediate(slots * kPointerSize));
240 #ifdef _MSC_VER 253 #ifdef _MSC_VER
241 // On windows, you may not access the stack more than one page below 254 MakeSureStackPagesMapped(slots * kPointerSize);
242 // the most recently mapped page. To make the allocated area randomly
243 // accessible, we write to each page in turn (the value is irrelevant).
244 const int kPageSize = 4 * KB;
245 for (int offset = slots * kPointerSize - kPageSize;
246 offset > 0;
247 offset -= kPageSize) {
248 __ mov(Operand(esp, offset), eax);
249 }
250 #endif 255 #endif
251 } 256 }
252 257
253 if (support_aligned_spilled_doubles_) { 258 if (support_aligned_spilled_doubles_) {
254 Comment(";;; Store dynamic frame alignment tag for spilled doubles"); 259 Comment(";;; Store dynamic frame alignment tag for spilled doubles");
255 // Store dynamic frame alignment state in the first local. 260 // Store dynamic frame alignment state in the first local.
256 int offset = JavaScriptFrameConstants::kDynamicAlignmentStateOffset; 261 int offset = JavaScriptFrameConstants::kDynamicAlignmentStateOffset;
257 if (dynamic_frame_alignment_) { 262 if (dynamic_frame_alignment_) {
258 __ mov(Operand(ebp, offset), edx); 263 __ mov(Operand(ebp, offset), edx);
259 } else { 264 } else {
(...skipping 418 matching lines...) Expand 10 before | Expand all | Expand 10 after
678 } 683 }
679 684
680 685
681 double LCodeGen::ToDouble(LConstantOperand* op) const { 686 double LCodeGen::ToDouble(LConstantOperand* op) const {
682 HConstant* constant = chunk_->LookupConstant(op); 687 HConstant* constant = chunk_->LookupConstant(op);
683 ASSERT(constant->HasDoubleValue()); 688 ASSERT(constant->HasDoubleValue());
684 return constant->DoubleValue(); 689 return constant->DoubleValue();
685 } 690 }
686 691
687 692
693 ExternalReference LCodeGen::ToExternalReference(LConstantOperand* op) const {
694 HConstant* constant = chunk_->LookupConstant(op);
695 ASSERT(constant->HasExternalReferenceValue());
696 return constant->ExternalReferenceValue();
697 }
698
699
688 bool LCodeGen::IsInteger32(LConstantOperand* op) const { 700 bool LCodeGen::IsInteger32(LConstantOperand* op) const {
689 return chunk_->LookupLiteralRepresentation(op).IsSmiOrInteger32(); 701 return chunk_->LookupLiteralRepresentation(op).IsSmiOrInteger32();
690 } 702 }
691 703
692 704
693 bool LCodeGen::IsSmi(LConstantOperand* op) const { 705 bool LCodeGen::IsSmi(LConstantOperand* op) const {
694 return chunk_->LookupLiteralRepresentation(op).IsSmi(); 706 return chunk_->LookupLiteralRepresentation(op).IsSmi();
695 } 707 }
696 708
697 709
(...skipping 231 matching lines...) Expand 10 before | Expand all | Expand 10 after
929 void LCodeGen::DeoptimizeIf(Condition cc, 941 void LCodeGen::DeoptimizeIf(Condition cc,
930 LEnvironment* environment, 942 LEnvironment* environment,
931 Deoptimizer::BailoutType bailout_type) { 943 Deoptimizer::BailoutType bailout_type) {
932 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); 944 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
933 ASSERT(environment->HasBeenRegistered()); 945 ASSERT(environment->HasBeenRegistered());
934 int id = environment->deoptimization_index(); 946 int id = environment->deoptimization_index();
935 ASSERT(info()->IsOptimizing() || info()->IsStub()); 947 ASSERT(info()->IsOptimizing() || info()->IsStub());
936 Address entry = 948 Address entry =
937 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); 949 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type);
938 if (entry == NULL) { 950 if (entry == NULL) {
939 Abort("bailout was not prepared"); 951 Abort(kBailoutWasNotPrepared);
940 return; 952 return;
941 } 953 }
942 954
943 if (FLAG_deopt_every_n_times != 0 && !info()->IsStub()) { 955 if (FLAG_deopt_every_n_times != 0 && !info()->IsStub()) {
944 ExternalReference count = ExternalReference::stress_deopt_count(isolate()); 956 ExternalReference count = ExternalReference::stress_deopt_count(isolate());
945 Label no_deopt; 957 Label no_deopt;
946 __ pushfd(); 958 __ pushfd();
947 __ push(eax); 959 __ push(eax);
948 __ mov(eax, Operand::StaticVariable(count)); 960 __ mov(eax, Operand::StaticVariable(count));
949 __ sub(eax, Immediate(1)); 961 __ sub(eax, Immediate(1));
(...skipping 807 matching lines...) Expand 10 before | Expand all | Expand 10 after
1757 __ test(ToRegister(left), Immediate(0x80000000)); 1769 __ test(ToRegister(left), Immediate(0x80000000));
1758 DeoptimizeIf(not_zero, instr->environment()); 1770 DeoptimizeIf(not_zero, instr->environment());
1759 } else { 1771 } else {
1760 __ shr(ToRegister(left), shift_count); 1772 __ shr(ToRegister(left), shift_count);
1761 } 1773 }
1762 break; 1774 break;
1763 case Token::SHL: 1775 case Token::SHL:
1764 if (shift_count != 0) { 1776 if (shift_count != 0) {
1765 if (instr->hydrogen_value()->representation().IsSmi() && 1777 if (instr->hydrogen_value()->representation().IsSmi() &&
1766 instr->can_deopt()) { 1778 instr->can_deopt()) {
1767 __ shl(ToRegister(left), shift_count - 1); 1779 if (shift_count != 1) {
1780 __ shl(ToRegister(left), shift_count - 1);
1781 }
1768 __ SmiTag(ToRegister(left)); 1782 __ SmiTag(ToRegister(left));
1769 DeoptimizeIf(overflow, instr->environment()); 1783 DeoptimizeIf(overflow, instr->environment());
1770 } else { 1784 } else {
1771 __ shl(ToRegister(left), shift_count); 1785 __ shl(ToRegister(left), shift_count);
1772 } 1786 }
1773 } 1787 }
1774 break; 1788 break;
1775 default: 1789 default:
1776 UNREACHABLE(); 1790 UNREACHABLE();
1777 break; 1791 break;
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after
1846 __ Set(temp, Immediate(lower)); 1860 __ Set(temp, Immediate(lower));
1847 __ movd(xmm0, Operand(temp)); 1861 __ movd(xmm0, Operand(temp));
1848 __ por(res, xmm0); 1862 __ por(res, xmm0);
1849 } 1863 }
1850 } 1864 }
1851 } 1865 }
1852 } 1866 }
1853 } 1867 }
1854 1868
1855 1869
1870 void LCodeGen::DoConstantE(LConstantE* instr) {
1871 __ lea(ToRegister(instr->result()), Operand::StaticVariable(instr->value()));
1872 }
1873
1874
1856 void LCodeGen::DoConstantT(LConstantT* instr) { 1875 void LCodeGen::DoConstantT(LConstantT* instr) {
1857 Register reg = ToRegister(instr->result()); 1876 Register reg = ToRegister(instr->result());
1858 Handle<Object> handle = instr->value(); 1877 Handle<Object> handle = instr->value();
1859 AllowDeferredHandleDereference smi_check; 1878 AllowDeferredHandleDereference smi_check;
1860 __ LoadObject(reg, handle); 1879 __ LoadObject(reg, handle);
1861 } 1880 }
1862 1881
1863 1882
1864 void LCodeGen::DoMapEnumLength(LMapEnumLength* instr) { 1883 void LCodeGen::DoMapEnumLength(LMapEnumLength* instr) {
1865 Register result = ToRegister(instr->result()); 1884 Register result = ToRegister(instr->result());
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after
1950 if (FLAG_debug_code) { 1969 if (FLAG_debug_code) {
1951 __ push(value); 1970 __ push(value);
1952 __ mov(value, FieldOperand(string, HeapObject::kMapOffset)); 1971 __ mov(value, FieldOperand(string, HeapObject::kMapOffset));
1953 __ movzx_b(value, FieldOperand(value, Map::kInstanceTypeOffset)); 1972 __ movzx_b(value, FieldOperand(value, Map::kInstanceTypeOffset));
1954 1973
1955 __ and_(value, Immediate(kStringRepresentationMask | kStringEncodingMask)); 1974 __ and_(value, Immediate(kStringRepresentationMask | kStringEncodingMask));
1956 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; 1975 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
1957 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; 1976 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
1958 __ cmp(value, Immediate(encoding == String::ONE_BYTE_ENCODING 1977 __ cmp(value, Immediate(encoding == String::ONE_BYTE_ENCODING
1959 ? one_byte_seq_type : two_byte_seq_type)); 1978 ? one_byte_seq_type : two_byte_seq_type));
1960 __ Check(equal, "Unexpected string type"); 1979 __ Check(equal, kUnexpectedStringType);
1961 __ pop(value); 1980 __ pop(value);
1962 } 1981 }
1963 1982
1964 if (encoding == String::ONE_BYTE_ENCODING) { 1983 if (encoding == String::ONE_BYTE_ENCODING) {
1965 __ mov_b(FieldOperand(string, index, times_1, SeqString::kHeaderSize), 1984 __ mov_b(FieldOperand(string, index, times_1, SeqString::kHeaderSize),
1966 value); 1985 value);
1967 } else { 1986 } else {
1968 __ mov_w(FieldOperand(string, index, times_2, SeqString::kHeaderSize), 1987 __ mov_w(FieldOperand(string, index, times_2, SeqString::kHeaderSize),
1969 value); 1988 value);
1970 } 1989 }
(...skipping 866 matching lines...) Expand 10 before | Expand all | Expand 10 after
2837 2856
2838 void LCodeGen::EmitReturn(LReturn* instr, bool dynamic_frame_alignment) { 2857 void LCodeGen::EmitReturn(LReturn* instr, bool dynamic_frame_alignment) {
2839 int extra_value_count = dynamic_frame_alignment ? 2 : 1; 2858 int extra_value_count = dynamic_frame_alignment ? 2 : 1;
2840 2859
2841 if (instr->has_constant_parameter_count()) { 2860 if (instr->has_constant_parameter_count()) {
2842 int parameter_count = ToInteger32(instr->constant_parameter_count()); 2861 int parameter_count = ToInteger32(instr->constant_parameter_count());
2843 if (dynamic_frame_alignment && FLAG_debug_code) { 2862 if (dynamic_frame_alignment && FLAG_debug_code) {
2844 __ cmp(Operand(esp, 2863 __ cmp(Operand(esp,
2845 (parameter_count + extra_value_count) * kPointerSize), 2864 (parameter_count + extra_value_count) * kPointerSize),
2846 Immediate(kAlignmentZapValue)); 2865 Immediate(kAlignmentZapValue));
2847 __ Assert(equal, "expected alignment marker"); 2866 __ Assert(equal, kExpectedAlignmentMarker);
2848 } 2867 }
2849 __ Ret((parameter_count + extra_value_count) * kPointerSize, ecx); 2868 __ Ret((parameter_count + extra_value_count) * kPointerSize, ecx);
2850 } else { 2869 } else {
2851 Register reg = ToRegister(instr->parameter_count()); 2870 Register reg = ToRegister(instr->parameter_count());
2852 // The argument count parameter is a smi 2871 // The argument count parameter is a smi
2853 __ SmiUntag(reg); 2872 __ SmiUntag(reg);
2854 Register return_addr_reg = reg.is(ecx) ? ebx : ecx; 2873 Register return_addr_reg = reg.is(ecx) ? ebx : ecx;
2855 if (dynamic_frame_alignment && FLAG_debug_code) { 2874 if (dynamic_frame_alignment && FLAG_debug_code) {
2856 ASSERT(extra_value_count == 2); 2875 ASSERT(extra_value_count == 2);
2857 __ cmp(Operand(esp, reg, times_pointer_size, 2876 __ cmp(Operand(esp, reg, times_pointer_size,
2858 extra_value_count * kPointerSize), 2877 extra_value_count * kPointerSize),
2859 Immediate(kAlignmentZapValue)); 2878 Immediate(kAlignmentZapValue));
2860 __ Assert(equal, "expected alignment marker"); 2879 __ Assert(equal, kExpectedAlignmentMarker);
2861 } 2880 }
2862 2881
2863 // emit code to restore stack based on instr->parameter_count() 2882 // emit code to restore stack based on instr->parameter_count()
2864 __ pop(return_addr_reg); // save return address 2883 __ pop(return_addr_reg); // save return address
2865 if (dynamic_frame_alignment) { 2884 if (dynamic_frame_alignment) {
2866 __ inc(reg); // 1 more for alignment 2885 __ inc(reg); // 1 more for alignment
2867 } 2886 }
2868 __ shl(reg, kPointerSizeLog2); 2887 __ shl(reg, kPointerSizeLog2);
2869 __ add(esp, reg); 2888 __ add(esp, reg);
2870 __ jmp(return_addr_reg); 2889 __ jmp(return_addr_reg);
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after
2970 ASSERT(ToRegister(instr->value()).is(eax)); 2989 ASSERT(ToRegister(instr->value()).is(eax));
2971 2990
2972 __ mov(ecx, instr->name()); 2991 __ mov(ecx, instr->name());
2973 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) 2992 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
2974 ? isolate()->builtins()->StoreIC_Initialize_Strict() 2993 ? isolate()->builtins()->StoreIC_Initialize_Strict()
2975 : isolate()->builtins()->StoreIC_Initialize(); 2994 : isolate()->builtins()->StoreIC_Initialize();
2976 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr); 2995 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
2977 } 2996 }
2978 2997
2979 2998
2980 void LCodeGen::DoLinkObjectInList(LLinkObjectInList* instr) {
2981 Register object = ToRegister(instr->object());
2982 Register temp = ToRegister(instr->temp());
2983 ExternalReference sites_list_address = instr->GetReference(isolate());
2984
2985 __ mov(temp, Immediate(sites_list_address));
2986 __ mov(temp, Operand(temp, 0));
2987 __ mov(FieldOperand(object, instr->hydrogen()->store_field().offset()),
2988 temp);
2989 __ mov(temp, Immediate(sites_list_address));
2990 __ mov(Operand(temp, 0), object);
2991 }
2992
2993
2994 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { 2999 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
2995 Register context = ToRegister(instr->context()); 3000 Register context = ToRegister(instr->context());
2996 Register result = ToRegister(instr->result()); 3001 Register result = ToRegister(instr->result());
2997 __ mov(result, ContextOperand(context, instr->slot_index())); 3002 __ mov(result, ContextOperand(context, instr->slot_index()));
2998 3003
2999 if (instr->hydrogen()->RequiresHoleCheck()) { 3004 if (instr->hydrogen()->RequiresHoleCheck()) {
3000 __ cmp(result, factory()->the_hole_value()); 3005 __ cmp(result, factory()->the_hole_value());
3001 if (instr->hydrogen()->DeoptimizesOnHole()) { 3006 if (instr->hydrogen()->DeoptimizesOnHole()) {
3002 DeoptimizeIf(equal, instr->environment()); 3007 DeoptimizeIf(equal, instr->environment());
3003 } else { 3008 } else {
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
3042 check_needed); 3047 check_needed);
3043 } 3048 }
3044 3049
3045 __ bind(&skip_assignment); 3050 __ bind(&skip_assignment);
3046 } 3051 }
3047 3052
3048 3053
3049 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) { 3054 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
3050 HObjectAccess access = instr->hydrogen()->access(); 3055 HObjectAccess access = instr->hydrogen()->access();
3051 int offset = access.offset(); 3056 int offset = access.offset();
3057
3058 if (access.IsExternalMemory()) {
3059 Register result = ToRegister(instr->result());
3060 if (instr->object()->IsConstantOperand()) {
3061 ExternalReference external_reference = ToExternalReference(
3062 LConstantOperand::cast(instr->object()));
3063 __ mov(result, MemOperand::StaticVariable(external_reference));
3064 } else {
3065 __ mov(result, MemOperand(ToRegister(instr->object()), offset));
3066 }
3067 return;
3068 }
3069
3052 Register object = ToRegister(instr->object()); 3070 Register object = ToRegister(instr->object());
3053 if (FLAG_track_double_fields && 3071 if (FLAG_track_double_fields &&
3054 instr->hydrogen()->representation().IsDouble()) { 3072 instr->hydrogen()->representation().IsDouble()) {
3055 if (CpuFeatures::IsSupported(SSE2)) { 3073 if (CpuFeatures::IsSupported(SSE2)) {
3056 CpuFeatureScope scope(masm(), SSE2); 3074 CpuFeatureScope scope(masm(), SSE2);
3057 XMMRegister result = ToDoubleRegister(instr->result()); 3075 XMMRegister result = ToDoubleRegister(instr->result());
3058 __ movdbl(result, FieldOperand(object, offset)); 3076 __ movdbl(result, FieldOperand(object, offset));
3059 } else { 3077 } else {
3060 X87Mov(ToX87Register(instr->result()), FieldOperand(object, offset)); 3078 X87Mov(ToX87Register(instr->result()), FieldOperand(object, offset));
3061 } 3079 }
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
3131 } 3149 }
3132 3150
3133 3151
3134 // Check for cases where EmitLoadFieldOrConstantFunction needs to walk the 3152 // Check for cases where EmitLoadFieldOrConstantFunction needs to walk the
3135 // prototype chain, which causes unbounded code generation. 3153 // prototype chain, which causes unbounded code generation.
3136 static bool CompactEmit(SmallMapList* list, 3154 static bool CompactEmit(SmallMapList* list,
3137 Handle<String> name, 3155 Handle<String> name,
3138 int i, 3156 int i,
3139 Isolate* isolate) { 3157 Isolate* isolate) {
3140 Handle<Map> map = list->at(i); 3158 Handle<Map> map = list->at(i);
3141 // If the map has ElementsKind transitions, we will generate map checks
3142 // for each kind in __ CompareMap(..., ALLOW_ELEMENTS_TRANSITION_MAPS).
3143 if (map->HasElementsTransition()) return false;
3144 LookupResult lookup(isolate); 3159 LookupResult lookup(isolate);
3145 map->LookupDescriptor(NULL, *name, &lookup); 3160 map->LookupDescriptor(NULL, *name, &lookup);
3146 return lookup.IsField() || lookup.IsConstant(); 3161 return lookup.IsField() || lookup.IsConstant();
3147 } 3162 }
3148 3163
3149 3164
3150 void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) { 3165 void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
3151 Register object = ToRegister(instr->object()); 3166 Register object = ToRegister(instr->object());
3152 Register result = ToRegister(instr->result()); 3167 Register result = ToRegister(instr->result());
3153 3168
(...skipping 271 matching lines...) Expand 10 before | Expand all | Expand 10 after
3425 Representation key_representation, 3440 Representation key_representation,
3426 ElementsKind elements_kind, 3441 ElementsKind elements_kind,
3427 uint32_t offset, 3442 uint32_t offset,
3428 uint32_t additional_index) { 3443 uint32_t additional_index) {
3429 Register elements_pointer_reg = ToRegister(elements_pointer); 3444 Register elements_pointer_reg = ToRegister(elements_pointer);
3430 int element_shift_size = ElementsKindToShiftSize(elements_kind); 3445 int element_shift_size = ElementsKindToShiftSize(elements_kind);
3431 int shift_size = element_shift_size; 3446 int shift_size = element_shift_size;
3432 if (key->IsConstantOperand()) { 3447 if (key->IsConstantOperand()) {
3433 int constant_value = ToInteger32(LConstantOperand::cast(key)); 3448 int constant_value = ToInteger32(LConstantOperand::cast(key));
3434 if (constant_value & 0xF0000000) { 3449 if (constant_value & 0xF0000000) {
3435 Abort("array index constant value too big"); 3450 Abort(kArrayIndexConstantValueTooBig);
3436 } 3451 }
3437 return Operand(elements_pointer_reg, 3452 return Operand(elements_pointer_reg,
3438 ((constant_value + additional_index) << shift_size) 3453 ((constant_value + additional_index) << shift_size)
3439 + offset); 3454 + offset);
3440 } else { 3455 } else {
3441 // Take the tag bit into account while computing the shift size. 3456 // Take the tag bit into account while computing the shift size.
3442 if (key_representation.IsSmi() && (shift_size >= 1)) { 3457 if (key_representation.IsSmi() && (shift_size >= 1)) {
3443 shift_size -= kSmiTagSize; 3458 shift_size -= kSmiTagSize;
3444 } 3459 }
3445 ScaleFactor scale_factor = static_cast<ScaleFactor>(shift_size); 3460 ScaleFactor scale_factor = static_cast<ScaleFactor>(shift_size);
(...skipping 353 matching lines...) Expand 10 before | Expand all | Expand 10 after
3799 ASSERT(instr->value()->Equals(instr->result())); 3814 ASSERT(instr->value()->Equals(instr->result()));
3800 Representation r = instr->hydrogen()->value()->representation(); 3815 Representation r = instr->hydrogen()->value()->representation();
3801 3816
3802 CpuFeatureScope scope(masm(), SSE2); 3817 CpuFeatureScope scope(masm(), SSE2);
3803 if (r.IsDouble()) { 3818 if (r.IsDouble()) {
3804 XMMRegister scratch = xmm0; 3819 XMMRegister scratch = xmm0;
3805 XMMRegister input_reg = ToDoubleRegister(instr->value()); 3820 XMMRegister input_reg = ToDoubleRegister(instr->value());
3806 __ xorps(scratch, scratch); 3821 __ xorps(scratch, scratch);
3807 __ subsd(scratch, input_reg); 3822 __ subsd(scratch, input_reg);
3808 __ pand(input_reg, scratch); 3823 __ pand(input_reg, scratch);
3809 } else if (r.IsInteger32()) { 3824 } else if (r.IsSmiOrInteger32()) {
3810 EmitIntegerMathAbs(instr); 3825 EmitIntegerMathAbs(instr);
3811 } else { // Tagged case. 3826 } else { // Tagged case.
3812 DeferredMathAbsTaggedHeapNumber* deferred = 3827 DeferredMathAbsTaggedHeapNumber* deferred =
3813 new(zone()) DeferredMathAbsTaggedHeapNumber(this, instr); 3828 new(zone()) DeferredMathAbsTaggedHeapNumber(this, instr);
3814 Register input_reg = ToRegister(instr->value()); 3829 Register input_reg = ToRegister(instr->value());
3815 // Smi check. 3830 // Smi check.
3816 __ JumpIfNotSmi(input_reg, deferred->entry()); 3831 __ JumpIfNotSmi(input_reg, deferred->entry());
3817 EmitIntegerMathAbs(instr); 3832 EmitIntegerMathAbs(instr);
3818 __ bind(deferred->exit()); 3833 __ bind(deferred->exit());
3819 } 3834 }
(...skipping 501 matching lines...) Expand 10 before | Expand all | Expand 10 after
4321 void LCodeGen::DoInnerAllocatedObject(LInnerAllocatedObject* instr) { 4336 void LCodeGen::DoInnerAllocatedObject(LInnerAllocatedObject* instr) {
4322 Register result = ToRegister(instr->result()); 4337 Register result = ToRegister(instr->result());
4323 Register base = ToRegister(instr->base_object()); 4338 Register base = ToRegister(instr->base_object());
4324 __ lea(result, Operand(base, instr->offset())); 4339 __ lea(result, Operand(base, instr->offset()));
4325 } 4340 }
4326 4341
4327 4342
4328 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { 4343 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
4329 Representation representation = instr->representation(); 4344 Representation representation = instr->representation();
4330 4345
4331 Register object = ToRegister(instr->object());
4332 HObjectAccess access = instr->hydrogen()->access(); 4346 HObjectAccess access = instr->hydrogen()->access();
4333 int offset = access.offset(); 4347 int offset = access.offset();
4334 4348
4349 if (access.IsExternalMemory()) {
4350 ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
4351 MemOperand operand = instr->object()->IsConstantOperand()
4352 ? MemOperand::StaticVariable(
4353 ToExternalReference(LConstantOperand::cast(instr->object())))
4354 : MemOperand(ToRegister(instr->object()), offset);
4355 if (instr->value()->IsConstantOperand()) {
4356 LConstantOperand* operand_value = LConstantOperand::cast(instr->value());
4357 __ mov(operand, Immediate(ToInteger32(operand_value)));
4358 } else {
4359 Register value = ToRegister(instr->value());
4360 __ mov(operand, value);
4361 }
4362 return;
4363 }
4364
4365 Register object = ToRegister(instr->object());
4335 Handle<Map> transition = instr->transition(); 4366 Handle<Map> transition = instr->transition();
4336 4367
4337 if (FLAG_track_fields && representation.IsSmi()) { 4368 if (FLAG_track_fields && representation.IsSmi()) {
4338 if (instr->value()->IsConstantOperand()) { 4369 if (instr->value()->IsConstantOperand()) {
4339 LConstantOperand* operand_value = LConstantOperand::cast(instr->value()); 4370 LConstantOperand* operand_value = LConstantOperand::cast(instr->value());
4340 if (!IsSmi(operand_value)) { 4371 if (!IsSmi(operand_value)) {
4341 DeoptimizeIf(no_condition, instr->environment()); 4372 DeoptimizeIf(no_condition, instr->environment());
4342 } 4373 }
4343 } 4374 }
4344 } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) { 4375 } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
4389 } 4420 }
4390 4421
4391 // Do the store. 4422 // Do the store.
4392 SmiCheck check_needed = 4423 SmiCheck check_needed =
4393 instr->hydrogen()->value()->IsHeapObject() 4424 instr->hydrogen()->value()->IsHeapObject()
4394 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; 4425 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
4395 4426
4396 Register write_register = object; 4427 Register write_register = object;
4397 if (!access.IsInobject()) { 4428 if (!access.IsInobject()) {
4398 write_register = ToRegister(instr->temp()); 4429 write_register = ToRegister(instr->temp());
4399 __ mov(write_register, 4430 __ mov(write_register, FieldOperand(object, JSObject::kPropertiesOffset));
4400 FieldOperand(object, JSObject::kPropertiesOffset));
4401 } 4431 }
4402 4432
4403 if (instr->value()->IsConstantOperand()) { 4433 if (instr->value()->IsConstantOperand()) {
4404 LConstantOperand* operand_value = LConstantOperand::cast(instr->value()); 4434 LConstantOperand* operand_value = LConstantOperand::cast(instr->value());
4405 if (operand_value->IsRegister()) { 4435 if (operand_value->IsRegister()) {
4406 __ mov(FieldOperand(write_register, offset), ToRegister(operand_value)); 4436 __ mov(FieldOperand(write_register, offset), ToRegister(operand_value));
4407 } else { 4437 } else {
4408 Handle<Object> handle_value = ToHandle(operand_value); 4438 Handle<Object> handle_value = ToHandle(operand_value);
4409 ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); 4439 ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
4410 __ mov(FieldOperand(write_register, offset), handle_value); 4440 __ mov(FieldOperand(write_register, offset), handle_value);
(...skipping 419 matching lines...) Expand 10 before | Expand all | Expand 10 after
4830 __ Set(result, Immediate(0)); 4860 __ Set(result, Immediate(0));
4831 4861
4832 PushSafepointRegistersScope scope(this); 4862 PushSafepointRegistersScope scope(this);
4833 __ SmiTag(char_code); 4863 __ SmiTag(char_code);
4834 __ push(char_code); 4864 __ push(char_code);
4835 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr, instr->context()); 4865 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr, instr->context());
4836 __ StoreToSafepointRegisterSlot(result, eax); 4866 __ StoreToSafepointRegisterSlot(result, eax);
4837 } 4867 }
4838 4868
4839 4869
4840 void LCodeGen::DoStringLength(LStringLength* instr) {
4841 Register string = ToRegister(instr->string());
4842 Register result = ToRegister(instr->result());
4843 __ mov(result, FieldOperand(string, String::kLengthOffset));
4844 }
4845
4846
4847 void LCodeGen::DoStringAdd(LStringAdd* instr) { 4870 void LCodeGen::DoStringAdd(LStringAdd* instr) {
4848 EmitPushTaggedOperand(instr->left()); 4871 EmitPushTaggedOperand(instr->left());
4849 EmitPushTaggedOperand(instr->right()); 4872 EmitPushTaggedOperand(instr->right());
4850 StringAddStub stub(instr->hydrogen()->flags()); 4873 StringAddStub stub(instr->hydrogen()->flags());
4851 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 4874 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
4852 } 4875 }
4853 4876
4854 4877
4855 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) { 4878 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
4856 if (CpuFeatures::IsSupported(SSE2)) { 4879 if (CpuFeatures::IsSupported(SSE2)) {
(...skipping 1145 matching lines...) Expand 10 before | Expand all | Expand 10 after
6002 new(zone()) DeferredAllocate(this, instr); 6025 new(zone()) DeferredAllocate(this, instr);
6003 6026
6004 Register result = ToRegister(instr->result()); 6027 Register result = ToRegister(instr->result());
6005 Register temp = ToRegister(instr->temp()); 6028 Register temp = ToRegister(instr->temp());
6006 6029
6007 // Allocate memory for the object. 6030 // Allocate memory for the object.
6008 AllocationFlags flags = TAG_OBJECT; 6031 AllocationFlags flags = TAG_OBJECT;
6009 if (instr->hydrogen()->MustAllocateDoubleAligned()) { 6032 if (instr->hydrogen()->MustAllocateDoubleAligned()) {
6010 flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT); 6033 flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
6011 } 6034 }
6012 if (instr->hydrogen()->CanAllocateInOldPointerSpace()) { 6035 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
6013 ASSERT(!instr->hydrogen()->CanAllocateInOldDataSpace()); 6036 ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation());
6037 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
6014 flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE); 6038 flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
6015 } else if (instr->hydrogen()->CanAllocateInOldDataSpace()) { 6039 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
6040 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
6016 flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE); 6041 flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE);
6017 } 6042 }
6018 6043
6019 if (instr->size()->IsConstantOperand()) { 6044 if (instr->size()->IsConstantOperand()) {
6020 int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); 6045 int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
6021 __ Allocate(size, result, temp, no_reg, deferred->entry(), flags); 6046 __ Allocate(size, result, temp, no_reg, deferred->entry(), flags);
6022 } else { 6047 } else {
6023 Register size = ToRegister(instr->size()); 6048 Register size = ToRegister(instr->size());
6024 __ Allocate(size, result, temp, no_reg, deferred->entry(), flags); 6049 __ Allocate(size, result, temp, no_reg, deferred->entry(), flags);
6025 } 6050 }
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
6057 if (instr->size()->IsRegister()) { 6082 if (instr->size()->IsRegister()) {
6058 Register size = ToRegister(instr->size()); 6083 Register size = ToRegister(instr->size());
6059 ASSERT(!size.is(result)); 6084 ASSERT(!size.is(result));
6060 __ SmiTag(ToRegister(instr->size())); 6085 __ SmiTag(ToRegister(instr->size()));
6061 __ push(size); 6086 __ push(size);
6062 } else { 6087 } else {
6063 int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); 6088 int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
6064 __ push(Immediate(Smi::FromInt(size))); 6089 __ push(Immediate(Smi::FromInt(size)));
6065 } 6090 }
6066 6091
6067 if (instr->hydrogen()->CanAllocateInOldPointerSpace()) { 6092 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
6068 ASSERT(!instr->hydrogen()->CanAllocateInOldDataSpace()); 6093 ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation());
6094 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
6069 CallRuntimeFromDeferred( 6095 CallRuntimeFromDeferred(
6070 Runtime::kAllocateInOldPointerSpace, 1, instr, instr->context()); 6096 Runtime::kAllocateInOldPointerSpace, 1, instr, instr->context());
6071 } else if (instr->hydrogen()->CanAllocateInOldDataSpace()) { 6097 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
6098 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
6072 CallRuntimeFromDeferred( 6099 CallRuntimeFromDeferred(
6073 Runtime::kAllocateInOldDataSpace, 1, instr, instr->context()); 6100 Runtime::kAllocateInOldDataSpace, 1, instr, instr->context());
6074 } else { 6101 } else {
6075 CallRuntimeFromDeferred( 6102 CallRuntimeFromDeferred(
6076 Runtime::kAllocateInNewSpace, 1, instr, instr->context()); 6103 Runtime::kAllocateInNewSpace, 1, instr, instr->context());
6077 } 6104 }
6078 __ StoreToSafepointRegisterSlot(result, eax); 6105 __ StoreToSafepointRegisterSlot(result, eax);
6079 } 6106 }
6080 6107
6081 6108
(...skipping 402 matching lines...) Expand 10 before | Expand all | Expand 10 after
6484 FixedArray::kHeaderSize - kPointerSize)); 6511 FixedArray::kHeaderSize - kPointerSize));
6485 __ bind(&done); 6512 __ bind(&done);
6486 } 6513 }
6487 6514
6488 6515
6489 #undef __ 6516 #undef __
6490 6517
6491 } } // namespace v8::internal 6518 } } // namespace v8::internal
6492 6519
6493 #endif // V8_TARGET_ARCH_IA32 6520 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/lithium-codegen-ia32.h ('k') | src/ia32/lithium-ia32.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698