Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(6)

Side by Side Diff: src/x64/macro-assembler-x64.cc

Issue 6677044: Use a class field instead of global FLAG_debug_code in assember and (Closed)
Patch Set: Created 9 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/x64/assembler-x64.cc ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after
85 Heap::RootListIndex index) { 85 Heap::RootListIndex index) {
86 ASSERT(!with.AddressUsesRegister(kScratchRegister)); 86 ASSERT(!with.AddressUsesRegister(kScratchRegister));
87 LoadRoot(kScratchRegister, index); 87 LoadRoot(kScratchRegister, index);
88 cmpq(with, kScratchRegister); 88 cmpq(with, kScratchRegister);
89 } 89 }
90 90
91 91
92 void MacroAssembler::RecordWriteHelper(Register object, 92 void MacroAssembler::RecordWriteHelper(Register object,
93 Register addr, 93 Register addr,
94 Register scratch) { 94 Register scratch) {
95 if (FLAG_debug_code) { 95 if (emit_debug_code()) {
96 // Check that the object is not in new space. 96 // Check that the object is not in new space.
97 NearLabel not_in_new_space; 97 NearLabel not_in_new_space;
98 InNewSpace(object, scratch, not_equal, &not_in_new_space); 98 InNewSpace(object, scratch, not_equal, &not_in_new_space);
99 Abort("new-space object passed to RecordWriteHelper"); 99 Abort("new-space object passed to RecordWriteHelper");
100 bind(&not_in_new_space); 100 bind(&not_in_new_space);
101 } 101 }
102 102
103 // Compute the page start address from the heap object pointer, and reuse 103 // Compute the page start address from the heap object pointer, and reuse
104 // the 'object' register for it. 104 // the 'object' register for it.
105 and_(object, Immediate(~Page::kPageAlignmentMask)); 105 and_(object, Immediate(~Page::kPageAlignmentMask));
(...skipping 23 matching lines...) Expand all
129 JumpIfSmi(value, &done); 129 JumpIfSmi(value, &done);
130 130
131 RecordWriteNonSmi(object, offset, value, index); 131 RecordWriteNonSmi(object, offset, value, index);
132 bind(&done); 132 bind(&done);
133 133
134 // Clobber all input registers when running with the debug-code flag 134 // Clobber all input registers when running with the debug-code flag
135 // turned on to provoke errors. This clobbering repeats the 135 // turned on to provoke errors. This clobbering repeats the
136 // clobbering done inside RecordWriteNonSmi but it's necessary to 136 // clobbering done inside RecordWriteNonSmi but it's necessary to
137 // avoid having the fast case for smis leave the registers 137 // avoid having the fast case for smis leave the registers
138 // unchanged. 138 // unchanged.
139 if (FLAG_debug_code) { 139 if (emit_debug_code()) {
140 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE); 140 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
141 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE); 141 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
142 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE); 142 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
143 } 143 }
144 } 144 }
145 145
146 146
147 void MacroAssembler::RecordWrite(Register object, 147 void MacroAssembler::RecordWrite(Register object,
148 Register address, 148 Register address,
149 Register value) { 149 Register value) {
150 // The compiled code assumes that record write doesn't change the 150 // The compiled code assumes that record write doesn't change the
151 // context register, so we check that none of the clobbered 151 // context register, so we check that none of the clobbered
152 // registers are rsi. 152 // registers are rsi.
153 ASSERT(!object.is(rsi) && !value.is(rsi) && !address.is(rsi)); 153 ASSERT(!object.is(rsi) && !value.is(rsi) && !address.is(rsi));
154 154
155 // First, check if a write barrier is even needed. The tests below 155 // First, check if a write barrier is even needed. The tests below
156 // catch stores of smis and stores into the young generation. 156 // catch stores of smis and stores into the young generation.
157 Label done; 157 Label done;
158 JumpIfSmi(value, &done); 158 JumpIfSmi(value, &done);
159 159
160 InNewSpace(object, value, equal, &done); 160 InNewSpace(object, value, equal, &done);
161 161
162 RecordWriteHelper(object, address, value); 162 RecordWriteHelper(object, address, value);
163 163
164 bind(&done); 164 bind(&done);
165 165
166 // Clobber all input registers when running with the debug-code flag 166 // Clobber all input registers when running with the debug-code flag
167 // turned on to provoke errors. 167 // turned on to provoke errors.
168 if (FLAG_debug_code) { 168 if (emit_debug_code()) {
169 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE); 169 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
170 movq(address, BitCast<int64_t>(kZapValue), RelocInfo::NONE); 170 movq(address, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
171 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE); 171 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
172 } 172 }
173 } 173 }
174 174
175 175
176 void MacroAssembler::RecordWriteNonSmi(Register object, 176 void MacroAssembler::RecordWriteNonSmi(Register object,
177 int offset, 177 int offset,
178 Register scratch, 178 Register scratch,
179 Register index) { 179 Register index) {
180 Label done; 180 Label done;
181 181
182 if (FLAG_debug_code) { 182 if (emit_debug_code()) {
183 NearLabel okay; 183 NearLabel okay;
184 JumpIfNotSmi(object, &okay); 184 JumpIfNotSmi(object, &okay);
185 Abort("MacroAssembler::RecordWriteNonSmi cannot deal with smis"); 185 Abort("MacroAssembler::RecordWriteNonSmi cannot deal with smis");
186 bind(&okay); 186 bind(&okay);
187 187
188 if (offset == 0) { 188 if (offset == 0) {
189 // index must be int32. 189 // index must be int32.
190 Register tmp = index.is(rax) ? rbx : rax; 190 Register tmp = index.is(rax) ? rbx : rax;
191 push(tmp); 191 push(tmp);
192 movl(tmp, index); 192 movl(tmp, index);
(...skipping 23 matching lines...) Expand all
216 index, 216 index,
217 times_pointer_size, 217 times_pointer_size,
218 FixedArray::kHeaderSize)); 218 FixedArray::kHeaderSize));
219 } 219 }
220 RecordWriteHelper(object, dst, scratch); 220 RecordWriteHelper(object, dst, scratch);
221 221
222 bind(&done); 222 bind(&done);
223 223
224 // Clobber all input registers when running with the debug-code flag 224 // Clobber all input registers when running with the debug-code flag
225 // turned on to provoke errors. 225 // turned on to provoke errors.
226 if (FLAG_debug_code) { 226 if (emit_debug_code()) {
227 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE); 227 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
228 movq(scratch, BitCast<int64_t>(kZapValue), RelocInfo::NONE); 228 movq(scratch, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
229 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE); 229 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
230 } 230 }
231 } 231 }
232 232
233 void MacroAssembler::Assert(Condition cc, const char* msg) { 233 void MacroAssembler::Assert(Condition cc, const char* msg) {
234 if (FLAG_debug_code) Check(cc, msg); 234 if (emit_debug_code()) Check(cc, msg);
235 } 235 }
236 236
237 237
238 void MacroAssembler::AssertFastElements(Register elements) { 238 void MacroAssembler::AssertFastElements(Register elements) {
239 if (FLAG_debug_code) { 239 if (emit_debug_code()) {
240 NearLabel ok; 240 NearLabel ok;
241 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset), 241 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
242 Heap::kFixedArrayMapRootIndex); 242 Heap::kFixedArrayMapRootIndex);
243 j(equal, &ok); 243 j(equal, &ok);
244 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset), 244 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
245 Heap::kFixedCOWArrayMapRootIndex); 245 Heap::kFixedCOWArrayMapRootIndex);
246 j(equal, &ok); 246 j(equal, &ok);
247 Abort("JSObject with fast elements map has slow elements"); 247 Abort("JSObject with fast elements map has slow elements");
248 bind(&ok); 248 bind(&ok);
249 } 249 }
(...skipping 450 matching lines...) Expand 10 before | Expand all | Expand 10 after
700 return kScratchRegister; 700 return kScratchRegister;
701 } 701 }
702 if (value == 1) { 702 if (value == 1) {
703 return kSmiConstantRegister; 703 return kSmiConstantRegister;
704 } 704 }
705 LoadSmiConstant(kScratchRegister, source); 705 LoadSmiConstant(kScratchRegister, source);
706 return kScratchRegister; 706 return kScratchRegister;
707 } 707 }
708 708
709 void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) { 709 void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
710 if (FLAG_debug_code) { 710 if (emit_debug_code()) {
711 movq(dst, 711 movq(dst,
712 reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)), 712 reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)),
713 RelocInfo::NONE); 713 RelocInfo::NONE);
714 cmpq(dst, kSmiConstantRegister); 714 cmpq(dst, kSmiConstantRegister);
715 if (allow_stub_calls()) { 715 if (allow_stub_calls()) {
716 Assert(equal, "Uninitialized kSmiConstantRegister"); 716 Assert(equal, "Uninitialized kSmiConstantRegister");
717 } else { 717 } else {
718 NearLabel ok; 718 NearLabel ok;
719 j(equal, &ok); 719 j(equal, &ok);
720 int3(); 720 int3();
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
769 void MacroAssembler::Integer32ToSmi(Register dst, Register src) { 769 void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
770 ASSERT_EQ(0, kSmiTag); 770 ASSERT_EQ(0, kSmiTag);
771 if (!dst.is(src)) { 771 if (!dst.is(src)) {
772 movl(dst, src); 772 movl(dst, src);
773 } 773 }
774 shl(dst, Immediate(kSmiShift)); 774 shl(dst, Immediate(kSmiShift));
775 } 775 }
776 776
777 777
778 void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) { 778 void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
779 if (FLAG_debug_code) { 779 if (emit_debug_code()) {
780 testb(dst, Immediate(0x01)); 780 testb(dst, Immediate(0x01));
781 NearLabel ok; 781 NearLabel ok;
782 j(zero, &ok); 782 j(zero, &ok);
783 if (allow_stub_calls()) { 783 if (allow_stub_calls()) {
784 Abort("Integer32ToSmiField writing to non-smi location"); 784 Abort("Integer32ToSmiField writing to non-smi location");
785 } else { 785 } else {
786 int3(); 786 int3();
787 } 787 }
788 bind(&ok); 788 bind(&ok);
789 } 789 }
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
831 movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte)); 831 movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte));
832 } 832 }
833 833
834 834
835 void MacroAssembler::SmiTest(Register src) { 835 void MacroAssembler::SmiTest(Register src) {
836 testq(src, src); 836 testq(src, src);
837 } 837 }
838 838
839 839
840 void MacroAssembler::SmiCompare(Register smi1, Register smi2) { 840 void MacroAssembler::SmiCompare(Register smi1, Register smi2) {
841 if (FLAG_debug_code) { 841 if (emit_debug_code()) {
842 AbortIfNotSmi(smi1); 842 AbortIfNotSmi(smi1);
843 AbortIfNotSmi(smi2); 843 AbortIfNotSmi(smi2);
844 } 844 }
845 cmpq(smi1, smi2); 845 cmpq(smi1, smi2);
846 } 846 }
847 847
848 848
849 void MacroAssembler::SmiCompare(Register dst, Smi* src) { 849 void MacroAssembler::SmiCompare(Register dst, Smi* src) {
850 if (FLAG_debug_code) { 850 if (emit_debug_code()) {
851 AbortIfNotSmi(dst); 851 AbortIfNotSmi(dst);
852 } 852 }
853 Cmp(dst, src); 853 Cmp(dst, src);
854 } 854 }
855 855
856 856
857 void MacroAssembler::Cmp(Register dst, Smi* src) { 857 void MacroAssembler::Cmp(Register dst, Smi* src) {
858 ASSERT(!dst.is(kScratchRegister)); 858 ASSERT(!dst.is(kScratchRegister));
859 if (src->value() == 0) { 859 if (src->value() == 0) {
860 testq(dst, dst); 860 testq(dst, dst);
861 } else { 861 } else {
862 Register constant_reg = GetSmiConstant(src); 862 Register constant_reg = GetSmiConstant(src);
863 cmpq(dst, constant_reg); 863 cmpq(dst, constant_reg);
864 } 864 }
865 } 865 }
866 866
867 867
868 void MacroAssembler::SmiCompare(Register dst, const Operand& src) { 868 void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
869 if (FLAG_debug_code) { 869 if (emit_debug_code()) {
870 AbortIfNotSmi(dst); 870 AbortIfNotSmi(dst);
871 AbortIfNotSmi(src); 871 AbortIfNotSmi(src);
872 } 872 }
873 cmpq(dst, src); 873 cmpq(dst, src);
874 } 874 }
875 875
876 876
877 void MacroAssembler::SmiCompare(const Operand& dst, Register src) { 877 void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
878 if (FLAG_debug_code) { 878 if (emit_debug_code()) {
879 AbortIfNotSmi(dst); 879 AbortIfNotSmi(dst);
880 AbortIfNotSmi(src); 880 AbortIfNotSmi(src);
881 } 881 }
882 cmpq(dst, src); 882 cmpq(dst, src);
883 } 883 }
884 884
885 885
886 void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) { 886 void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
887 if (FLAG_debug_code) { 887 if (emit_debug_code()) {
888 AbortIfNotSmi(dst); 888 AbortIfNotSmi(dst);
889 } 889 }
890 cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value())); 890 cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
891 } 891 }
892 892
893 893
894 void MacroAssembler::Cmp(const Operand& dst, Smi* src) { 894 void MacroAssembler::Cmp(const Operand& dst, Smi* src) {
895 // The Operand cannot use the smi register. 895 // The Operand cannot use the smi register.
896 Register smi_reg = GetSmiConstant(src); 896 Register smi_reg = GetSmiConstant(src);
897 ASSERT(!dst.AddressUsesRegister(smi_reg)); 897 ASSERT(!dst.AddressUsesRegister(smi_reg));
(...skipping 1122 matching lines...) Expand 10 before | Expand all | Expand 10 after
2020 } 2020 }
2021 2021
2022 2022
2023 void MacroAssembler::EnterFrame(StackFrame::Type type) { 2023 void MacroAssembler::EnterFrame(StackFrame::Type type) {
2024 push(rbp); 2024 push(rbp);
2025 movq(rbp, rsp); 2025 movq(rbp, rsp);
2026 push(rsi); // Context. 2026 push(rsi); // Context.
2027 Push(Smi::FromInt(type)); 2027 Push(Smi::FromInt(type));
2028 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT); 2028 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
2029 push(kScratchRegister); 2029 push(kScratchRegister);
2030 if (FLAG_debug_code) { 2030 if (emit_debug_code()) {
2031 movq(kScratchRegister, 2031 movq(kScratchRegister,
2032 Factory::undefined_value(), 2032 Factory::undefined_value(),
2033 RelocInfo::EMBEDDED_OBJECT); 2033 RelocInfo::EMBEDDED_OBJECT);
2034 cmpq(Operand(rsp, 0), kScratchRegister); 2034 cmpq(Operand(rsp, 0), kScratchRegister);
2035 Check(not_equal, "code object not properly patched"); 2035 Check(not_equal, "code object not properly patched");
2036 } 2036 }
2037 } 2037 }
2038 2038
2039 2039
2040 void MacroAssembler::LeaveFrame(StackFrame::Type type) { 2040 void MacroAssembler::LeaveFrame(StackFrame::Type type) {
2041 if (FLAG_debug_code) { 2041 if (emit_debug_code()) {
2042 Move(kScratchRegister, Smi::FromInt(type)); 2042 Move(kScratchRegister, Smi::FromInt(type));
2043 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister); 2043 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
2044 Check(equal, "stack frame types must match"); 2044 Check(equal, "stack frame types must match");
2045 } 2045 }
2046 movq(rsp, rbp); 2046 movq(rsp, rbp);
2047 pop(rbp); 2047 pop(rbp);
2048 } 2048 }
2049 2049
2050 2050
2051 void MacroAssembler::EnterExitFramePrologue(bool save_rax) { 2051 void MacroAssembler::EnterExitFramePrologue(bool save_rax) {
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after
2181 Register scratch, 2181 Register scratch,
2182 Label* miss) { 2182 Label* miss) {
2183 Label same_contexts; 2183 Label same_contexts;
2184 2184
2185 ASSERT(!holder_reg.is(scratch)); 2185 ASSERT(!holder_reg.is(scratch));
2186 ASSERT(!scratch.is(kScratchRegister)); 2186 ASSERT(!scratch.is(kScratchRegister));
2187 // Load current lexical context from the stack frame. 2187 // Load current lexical context from the stack frame.
2188 movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset)); 2188 movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
2189 2189
2190 // When generating debug code, make sure the lexical context is set. 2190 // When generating debug code, make sure the lexical context is set.
2191 if (FLAG_debug_code) { 2191 if (emit_debug_code()) {
2192 cmpq(scratch, Immediate(0)); 2192 cmpq(scratch, Immediate(0));
2193 Check(not_equal, "we should not have an empty lexical context"); 2193 Check(not_equal, "we should not have an empty lexical context");
2194 } 2194 }
2195 // Load the global context of the current context. 2195 // Load the global context of the current context.
2196 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; 2196 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
2197 movq(scratch, FieldOperand(scratch, offset)); 2197 movq(scratch, FieldOperand(scratch, offset));
2198 movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset)); 2198 movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
2199 2199
2200 // Check the context is a global context. 2200 // Check the context is a global context.
2201 if (FLAG_debug_code) { 2201 if (emit_debug_code()) {
2202 Cmp(FieldOperand(scratch, HeapObject::kMapOffset), 2202 Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
2203 Factory::global_context_map()); 2203 Factory::global_context_map());
2204 Check(equal, "JSGlobalObject::global_context should be a global context."); 2204 Check(equal, "JSGlobalObject::global_context should be a global context.");
2205 } 2205 }
2206 2206
2207 // Check if both contexts are the same. 2207 // Check if both contexts are the same.
2208 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset)); 2208 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2209 j(equal, &same_contexts); 2209 j(equal, &same_contexts);
2210 2210
2211 // Compare security tokens. 2211 // Compare security tokens.
2212 // Check that the security token in the calling global object is 2212 // Check that the security token in the calling global object is
2213 // compatible with the security token in the receiving global 2213 // compatible with the security token in the receiving global
2214 // object. 2214 // object.
2215 2215
2216 // Check the context is a global context. 2216 // Check the context is a global context.
2217 if (FLAG_debug_code) { 2217 if (emit_debug_code()) {
2218 // Preserve original value of holder_reg. 2218 // Preserve original value of holder_reg.
2219 push(holder_reg); 2219 push(holder_reg);
2220 movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset)); 2220 movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2221 CompareRoot(holder_reg, Heap::kNullValueRootIndex); 2221 CompareRoot(holder_reg, Heap::kNullValueRootIndex);
2222 Check(not_equal, "JSGlobalProxy::context() should not be null."); 2222 Check(not_equal, "JSGlobalProxy::context() should not be null.");
2223 2223
2224 // Read the first word and compare to global_context_map(), 2224 // Read the first word and compare to global_context_map(),
2225 movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset)); 2225 movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
2226 CompareRoot(holder_reg, Heap::kGlobalContextMapRootIndex); 2226 CompareRoot(holder_reg, Heap::kGlobalContextMapRootIndex);
2227 Check(equal, "JSGlobalObject::global_context should be a global context."); 2227 Check(equal, "JSGlobalObject::global_context should be a global context.");
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
2268 load_rax(new_space_allocation_top); 2268 load_rax(new_space_allocation_top);
2269 } else { 2269 } else {
2270 movq(kScratchRegister, new_space_allocation_top); 2270 movq(kScratchRegister, new_space_allocation_top);
2271 movq(result, Operand(kScratchRegister, 0)); 2271 movq(result, Operand(kScratchRegister, 0));
2272 } 2272 }
2273 } 2273 }
2274 2274
2275 2275
2276 void MacroAssembler::UpdateAllocationTopHelper(Register result_end, 2276 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
2277 Register scratch) { 2277 Register scratch) {
2278 if (FLAG_debug_code) { 2278 if (emit_debug_code()) {
2279 testq(result_end, Immediate(kObjectAlignmentMask)); 2279 testq(result_end, Immediate(kObjectAlignmentMask));
2280 Check(zero, "Unaligned allocation in new space"); 2280 Check(zero, "Unaligned allocation in new space");
2281 } 2281 }
2282 2282
2283 ExternalReference new_space_allocation_top = 2283 ExternalReference new_space_allocation_top =
2284 ExternalReference::new_space_allocation_top_address(); 2284 ExternalReference::new_space_allocation_top_address();
2285 2285
2286 // Update new top. 2286 // Update new top.
2287 if (result_end.is(rax)) { 2287 if (result_end.is(rax)) {
2288 // rax can be stored directly to a memory location. 2288 // rax can be stored directly to a memory location.
(...skipping 10 matching lines...) Expand all
2299 } 2299 }
2300 2300
2301 2301
2302 void MacroAssembler::AllocateInNewSpace(int object_size, 2302 void MacroAssembler::AllocateInNewSpace(int object_size,
2303 Register result, 2303 Register result,
2304 Register result_end, 2304 Register result_end,
2305 Register scratch, 2305 Register scratch,
2306 Label* gc_required, 2306 Label* gc_required,
2307 AllocationFlags flags) { 2307 AllocationFlags flags) {
2308 if (!FLAG_inline_new) { 2308 if (!FLAG_inline_new) {
2309 if (FLAG_debug_code) { 2309 if (emit_debug_code()) {
2310 // Trash the registers to simulate an allocation failure. 2310 // Trash the registers to simulate an allocation failure.
2311 movl(result, Immediate(0x7091)); 2311 movl(result, Immediate(0x7091));
2312 if (result_end.is_valid()) { 2312 if (result_end.is_valid()) {
2313 movl(result_end, Immediate(0x7191)); 2313 movl(result_end, Immediate(0x7191));
2314 } 2314 }
2315 if (scratch.is_valid()) { 2315 if (scratch.is_valid()) {
2316 movl(scratch, Immediate(0x7291)); 2316 movl(scratch, Immediate(0x7291));
2317 } 2317 }
2318 } 2318 }
2319 jmp(gc_required); 2319 jmp(gc_required);
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
2357 2357
2358 void MacroAssembler::AllocateInNewSpace(int header_size, 2358 void MacroAssembler::AllocateInNewSpace(int header_size,
2359 ScaleFactor element_size, 2359 ScaleFactor element_size,
2360 Register element_count, 2360 Register element_count,
2361 Register result, 2361 Register result,
2362 Register result_end, 2362 Register result_end,
2363 Register scratch, 2363 Register scratch,
2364 Label* gc_required, 2364 Label* gc_required,
2365 AllocationFlags flags) { 2365 AllocationFlags flags) {
2366 if (!FLAG_inline_new) { 2366 if (!FLAG_inline_new) {
2367 if (FLAG_debug_code) { 2367 if (emit_debug_code()) {
2368 // Trash the registers to simulate an allocation failure. 2368 // Trash the registers to simulate an allocation failure.
2369 movl(result, Immediate(0x7091)); 2369 movl(result, Immediate(0x7091));
2370 movl(result_end, Immediate(0x7191)); 2370 movl(result_end, Immediate(0x7191));
2371 if (scratch.is_valid()) { 2371 if (scratch.is_valid()) {
2372 movl(scratch, Immediate(0x7291)); 2372 movl(scratch, Immediate(0x7291));
2373 } 2373 }
2374 // Register element_count is not modified by the function. 2374 // Register element_count is not modified by the function.
2375 } 2375 }
2376 jmp(gc_required); 2376 jmp(gc_required);
2377 return; 2377 return;
(...skipping 26 matching lines...) Expand all
2404 } 2404 }
2405 2405
2406 2406
2407 void MacroAssembler::AllocateInNewSpace(Register object_size, 2407 void MacroAssembler::AllocateInNewSpace(Register object_size,
2408 Register result, 2408 Register result,
2409 Register result_end, 2409 Register result_end,
2410 Register scratch, 2410 Register scratch,
2411 Label* gc_required, 2411 Label* gc_required,
2412 AllocationFlags flags) { 2412 AllocationFlags flags) {
2413 if (!FLAG_inline_new) { 2413 if (!FLAG_inline_new) {
2414 if (FLAG_debug_code) { 2414 if (emit_debug_code()) {
2415 // Trash the registers to simulate an allocation failure. 2415 // Trash the registers to simulate an allocation failure.
2416 movl(result, Immediate(0x7091)); 2416 movl(result, Immediate(0x7091));
2417 movl(result_end, Immediate(0x7191)); 2417 movl(result_end, Immediate(0x7191));
2418 if (scratch.is_valid()) { 2418 if (scratch.is_valid()) {
2419 movl(scratch, Immediate(0x7291)); 2419 movl(scratch, Immediate(0x7291));
2420 } 2420 }
2421 // object_size is left unchanged by this function. 2421 // object_size is left unchanged by this function.
2422 } 2422 }
2423 jmp(gc_required); 2423 jmp(gc_required);
2424 return; 2424 return;
(...skipping 186 matching lines...) Expand 10 before | Expand all | Expand 10 after
2611 // Slot is in the current function context. Move it into the 2611 // Slot is in the current function context. Move it into the
2612 // destination register in case we store into it (the write barrier 2612 // destination register in case we store into it (the write barrier
2613 // cannot be allowed to destroy the context in rsi). 2613 // cannot be allowed to destroy the context in rsi).
2614 movq(dst, rsi); 2614 movq(dst, rsi);
2615 } 2615 }
2616 2616
2617 // We should not have found a 'with' context by walking the context chain 2617 // We should not have found a 'with' context by walking the context chain
2618 // (i.e., the static scope chain and runtime context chain do not agree). 2618 // (i.e., the static scope chain and runtime context chain do not agree).
2619 // A variable occurring in such a scope should have slot type LOOKUP and 2619 // A variable occurring in such a scope should have slot type LOOKUP and
2620 // not CONTEXT. 2620 // not CONTEXT.
2621 if (FLAG_debug_code) { 2621 if (emit_debug_code()) {
2622 cmpq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX))); 2622 cmpq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2623 Check(equal, "Yo dawg, I heard you liked function contexts " 2623 Check(equal, "Yo dawg, I heard you liked function contexts "
2624 "so I put function contexts in all your contexts"); 2624 "so I put function contexts in all your contexts");
2625 } 2625 }
2626 } 2626 }
2627 2627
2628 2628
2629 void MacroAssembler::LoadGlobalFunction(int index, Register function) { 2629 void MacroAssembler::LoadGlobalFunction(int index, Register function) {
2630 // Load the global or builtins object from the current context. 2630 // Load the global or builtins object from the current context.
2631 movq(function, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); 2631 movq(function, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2632 // Load the global context from the global or builtins object. 2632 // Load the global context from the global or builtins object.
2633 movq(function, FieldOperand(function, GlobalObject::kGlobalContextOffset)); 2633 movq(function, FieldOperand(function, GlobalObject::kGlobalContextOffset));
2634 // Load the function from the global context. 2634 // Load the function from the global context.
2635 movq(function, Operand(function, Context::SlotOffset(index))); 2635 movq(function, Operand(function, Context::SlotOffset(index)));
2636 } 2636 }
2637 2637
2638 2638
2639 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function, 2639 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
2640 Register map) { 2640 Register map) {
2641 // Load the initial map. The global functions all have initial maps. 2641 // Load the initial map. The global functions all have initial maps.
2642 movq(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); 2642 movq(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2643 if (FLAG_debug_code) { 2643 if (emit_debug_code()) {
2644 Label ok, fail; 2644 Label ok, fail;
2645 CheckMap(map, Factory::meta_map(), &fail, false); 2645 CheckMap(map, Factory::meta_map(), &fail, false);
2646 jmp(&ok); 2646 jmp(&ok);
2647 bind(&fail); 2647 bind(&fail);
2648 Abort("Global functions must have initial map"); 2648 Abort("Global functions must have initial map");
2649 bind(&ok); 2649 bind(&ok);
2650 } 2650 }
2651 } 2651 }
2652 2652
2653 2653
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
2688 2688
2689 void MacroAssembler::CallCFunction(ExternalReference function, 2689 void MacroAssembler::CallCFunction(ExternalReference function,
2690 int num_arguments) { 2690 int num_arguments) {
2691 movq(rax, function); 2691 movq(rax, function);
2692 CallCFunction(rax, num_arguments); 2692 CallCFunction(rax, num_arguments);
2693 } 2693 }
2694 2694
2695 2695
2696 void MacroAssembler::CallCFunction(Register function, int num_arguments) { 2696 void MacroAssembler::CallCFunction(Register function, int num_arguments) {
2697 // Check stack alignment. 2697 // Check stack alignment.
2698 if (FLAG_debug_code) { 2698 if (emit_debug_code()) {
2699 CheckStackAlignment(); 2699 CheckStackAlignment();
2700 } 2700 }
2701 2701
2702 call(function); 2702 call(function);
2703 ASSERT(OS::ActivationFrameAlignment() != 0); 2703 ASSERT(OS::ActivationFrameAlignment() != 0);
2704 ASSERT(num_arguments >= 0); 2704 ASSERT(num_arguments >= 0);
2705 int argument_slots_on_stack = 2705 int argument_slots_on_stack =
2706 ArgumentStackSlotsForCFunctionCall(num_arguments); 2706 ArgumentStackSlotsForCFunctionCall(num_arguments);
2707 movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize)); 2707 movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize));
2708 } 2708 }
(...skipping 13 matching lines...) Expand all
2722 CPU::FlushICache(address_, size_); 2722 CPU::FlushICache(address_, size_);
2723 2723
2724 // Check that the code was patched as expected. 2724 // Check that the code was patched as expected.
2725 ASSERT(masm_.pc_ == address_ + size_); 2725 ASSERT(masm_.pc_ == address_ + size_);
2726 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); 2726 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2727 } 2727 }
2728 2728
2729 } } // namespace v8::internal 2729 } } // namespace v8::internal
2730 2730
2731 #endif // V8_TARGET_ARCH_X64 2731 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/assembler-x64.cc ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698