OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
142 void MacroAssembler::RecordWrite(Register object, | 142 void MacroAssembler::RecordWrite(Register object, |
143 int offset, | 143 int offset, |
144 Register value, | 144 Register value, |
145 Register scratch) { | 145 Register scratch) { |
146 // First, check if a write barrier is even needed. The tests below | 146 // First, check if a write barrier is even needed. The tests below |
147 // catch stores of Smis and stores into young gen. | 147 // catch stores of Smis and stores into young gen. |
148 Label done; | 148 Label done; |
149 | 149 |
150 // Skip barrier if writing a smi. | 150 // Skip barrier if writing a smi. |
151 ASSERT_EQ(0, kSmiTag); | 151 ASSERT_EQ(0, kSmiTag); |
152 test(value, Immediate(kSmiTagMask)); | 152 JumpIfSmi(value, &done, Label::kNear); |
153 j(zero, &done, Label::kNear); | |
154 | 153 |
155 InNewSpace(object, value, equal, &done, Label::kNear); | 154 InNewSpace(object, value, equal, &done, Label::kNear); |
156 | 155 |
157 // The offset is relative to a tagged or untagged HeapObject pointer, | 156 // The offset is relative to a tagged or untagged HeapObject pointer, |
158 // so either offset or offset + kHeapObjectTag must be a | 157 // so either offset or offset + kHeapObjectTag must be a |
159 // multiple of kPointerSize. | 158 // multiple of kPointerSize. |
160 ASSERT(IsAligned(offset, kPointerSize) || | 159 ASSERT(IsAligned(offset, kPointerSize) || |
161 IsAligned(offset + kHeapObjectTag, kPointerSize)); | 160 IsAligned(offset + kHeapObjectTag, kPointerSize)); |
162 | 161 |
163 Register dst = scratch; | 162 Register dst = scratch; |
(...skipping 24 matching lines...) Expand all Loading... |
188 | 187 |
189 void MacroAssembler::RecordWrite(Register object, | 188 void MacroAssembler::RecordWrite(Register object, |
190 Register address, | 189 Register address, |
191 Register value) { | 190 Register value) { |
192 // First, check if a write barrier is even needed. The tests below | 191 // First, check if a write barrier is even needed. The tests below |
193 // catch stores of Smis and stores into young gen. | 192 // catch stores of Smis and stores into young gen. |
194 Label done; | 193 Label done; |
195 | 194 |
196 // Skip barrier if writing a smi. | 195 // Skip barrier if writing a smi. |
197 ASSERT_EQ(0, kSmiTag); | 196 ASSERT_EQ(0, kSmiTag); |
198 test(value, Immediate(kSmiTagMask)); | 197 JumpIfSmi(value, &done, Label::kNear); |
199 j(zero, &done); | |
200 | 198 |
201 InNewSpace(object, value, equal, &done); | 199 InNewSpace(object, value, equal, &done); |
202 | 200 |
203 RecordWriteHelper(object, address, value); | 201 RecordWriteHelper(object, address, value); |
204 | 202 |
205 bind(&done); | 203 bind(&done); |
206 | 204 |
207 // Clobber all input registers when running with the debug-code flag | 205 // Clobber all input registers when running with the debug-code flag |
208 // turned on to provoke errors. | 206 // turned on to provoke errors. |
209 if (emit_debug_code()) { | 207 if (emit_debug_code()) { |
(...skipping 147 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
357 push(eax); | 355 push(eax); |
358 fnstsw_ax(); | 356 fnstsw_ax(); |
359 sahf(); | 357 sahf(); |
360 pop(eax); | 358 pop(eax); |
361 } | 359 } |
362 } | 360 } |
363 | 361 |
364 | 362 |
365 void MacroAssembler::AbortIfNotNumber(Register object) { | 363 void MacroAssembler::AbortIfNotNumber(Register object) { |
366 Label ok; | 364 Label ok; |
367 test(object, Immediate(kSmiTagMask)); | 365 JumpIfSmi(object, &ok); |
368 j(zero, &ok); | |
369 cmp(FieldOperand(object, HeapObject::kMapOffset), | 366 cmp(FieldOperand(object, HeapObject::kMapOffset), |
370 isolate()->factory()->heap_number_map()); | 367 isolate()->factory()->heap_number_map()); |
371 Assert(equal, "Operand not a number"); | 368 Assert(equal, "Operand not a number"); |
372 bind(&ok); | 369 bind(&ok); |
373 } | 370 } |
374 | 371 |
375 | 372 |
376 void MacroAssembler::AbortIfNotSmi(Register object) { | 373 void MacroAssembler::AbortIfNotSmi(Register object) { |
377 test(object, Immediate(kSmiTagMask)); | 374 test(object, Immediate(kSmiTagMask)); |
378 Assert(equal, "Operand is not a smi"); | 375 Assert(equal, "Operand is not a smi"); |
(...skipping 790 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1169 j(sign, then_label); | 1166 j(sign, then_label); |
1170 bind(&ok); | 1167 bind(&ok); |
1171 } | 1168 } |
1172 | 1169 |
1173 | 1170 |
1174 void MacroAssembler::TryGetFunctionPrototype(Register function, | 1171 void MacroAssembler::TryGetFunctionPrototype(Register function, |
1175 Register result, | 1172 Register result, |
1176 Register scratch, | 1173 Register scratch, |
1177 Label* miss) { | 1174 Label* miss) { |
1178 // Check that the receiver isn't a smi. | 1175 // Check that the receiver isn't a smi. |
1179 test(function, Immediate(kSmiTagMask)); | 1176 JumpIfSmi(function, miss); |
1180 j(zero, miss); | |
1181 | 1177 |
1182 // Check that the function really is a function. | 1178 // Check that the function really is a function. |
1183 CmpObjectType(function, JS_FUNCTION_TYPE, result); | 1179 CmpObjectType(function, JS_FUNCTION_TYPE, result); |
1184 j(not_equal, miss); | 1180 j(not_equal, miss); |
1185 | 1181 |
1186 // Make sure that the function has an instance prototype. | 1182 // Make sure that the function has an instance prototype. |
1187 Label non_instance; | 1183 Label non_instance; |
1188 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset)); | 1184 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset)); |
1189 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype)); | 1185 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype)); |
1190 j(not_zero, &non_instance); | 1186 j(not_zero, &non_instance); |
(...skipping 858 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2049 | 2045 |
2050 void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1, | 2046 void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1, |
2051 Register object2, | 2047 Register object2, |
2052 Register scratch1, | 2048 Register scratch1, |
2053 Register scratch2, | 2049 Register scratch2, |
2054 Label* failure) { | 2050 Label* failure) { |
2055 // Check that both objects are not smis. | 2051 // Check that both objects are not smis. |
2056 ASSERT_EQ(0, kSmiTag); | 2052 ASSERT_EQ(0, kSmiTag); |
2057 mov(scratch1, Operand(object1)); | 2053 mov(scratch1, Operand(object1)); |
2058 and_(scratch1, Operand(object2)); | 2054 and_(scratch1, Operand(object2)); |
2059 test(scratch1, Immediate(kSmiTagMask)); | 2055 JumpIfSmi(scratch1, failure); |
2060 j(zero, failure); | |
2061 | 2056 |
2062 // Load instance type for both strings. | 2057 // Load instance type for both strings. |
2063 mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset)); | 2058 mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset)); |
2064 mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset)); | 2059 mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset)); |
2065 movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset)); | 2060 movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset)); |
2066 movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset)); | 2061 movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset)); |
2067 | 2062 |
2068 // Check that both are flat ascii strings. | 2063 // Check that both are flat ascii strings. |
2069 const int kFlatAsciiStringMask = | 2064 const int kFlatAsciiStringMask = |
2070 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask; | 2065 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask; |
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2136 | 2131 |
2137 // Check that the code was patched as expected. | 2132 // Check that the code was patched as expected. |
2138 ASSERT(masm_.pc_ == address_ + size_); | 2133 ASSERT(masm_.pc_ == address_ + size_); |
2139 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 2134 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); |
2140 } | 2135 } |
2141 | 2136 |
2142 | 2137 |
2143 } } // namespace v8::internal | 2138 } } // namespace v8::internal |
2144 | 2139 |
2145 #endif // V8_TARGET_ARCH_IA32 | 2140 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |