OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 12 matching lines...) Expand all Loading... |
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
27 | 27 |
28 #include "v8.h" | 28 #include "v8.h" |
29 | 29 |
30 #if defined(V8_TARGET_ARCH_X64) | 30 #if defined(V8_TARGET_ARCH_X64) |
31 | 31 |
32 #include "codegen.h" | 32 #include "codegen.h" |
| 33 #include "macro-assembler.h" |
33 | 34 |
34 namespace v8 { | 35 namespace v8 { |
35 namespace internal { | 36 namespace internal { |
36 | 37 |
37 // ------------------------------------------------------------------------- | 38 // ------------------------------------------------------------------------- |
38 // Platform-specific RuntimeCallHelper functions. | 39 // Platform-specific RuntimeCallHelper functions. |
39 | 40 |
40 void StubRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const { | 41 void StubRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const { |
41 masm->EnterFrame(StackFrame::INTERNAL); | 42 masm->EnterFrame(StackFrame::INTERNAL); |
42 ASSERT(!masm->has_frame()); | 43 ASSERT(!masm->has_frame()); |
(...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
136 | 137 |
137 CodeDesc desc; | 138 CodeDesc desc; |
138 masm.GetCode(&desc); | 139 masm.GetCode(&desc); |
139 OS::ProtectCode(buffer, actual_size); | 140 OS::ProtectCode(buffer, actual_size); |
140 // Call the function from C++ through this pointer. | 141 // Call the function from C++ through this pointer. |
141 return FUNCTION_CAST<ModuloFunction>(buffer); | 142 return FUNCTION_CAST<ModuloFunction>(buffer); |
142 } | 143 } |
143 | 144 |
144 #endif | 145 #endif |
145 | 146 |
| 147 #undef __ |
| 148 |
| 149 // ------------------------------------------------------------------------- |
| 150 // Code generators |
| 151 |
| 152 #define __ ACCESS_MASM(masm) |
| 153 |
| 154 void ElementsTransitionGenerator::GenerateSmiOnlyToObject( |
| 155 MacroAssembler* masm) { |
| 156 // ----------- S t a t e ------------- |
| 157 // -- rax : value |
| 158 // -- rbx : target map |
| 159 // -- rcx : key |
| 160 // -- rdx : receiver |
| 161 // -- rsp[0] : return address |
| 162 // ----------------------------------- |
| 163 // Set transitioned map. |
| 164 __ movq(FieldOperand(rdx, HeapObject::kMapOffset), rbx); |
| 165 __ RecordWriteField(rdx, |
| 166 HeapObject::kMapOffset, |
| 167 rbx, |
| 168 rdi, |
| 169 kDontSaveFPRegs, |
| 170 EMIT_REMEMBERED_SET, |
| 171 OMIT_SMI_CHECK); |
| 172 } |
| 173 |
| 174 |
| 175 void ElementsTransitionGenerator::GenerateSmiOnlyToDouble( |
| 176 MacroAssembler* masm, Label* fail) { |
| 177 // ----------- S t a t e ------------- |
| 178 // -- rax : value |
| 179 // -- rbx : target map |
| 180 // -- rcx : key |
| 181 // -- rdx : receiver |
| 182 // -- rsp[0] : return address |
| 183 // ----------------------------------- |
| 184 // The fail label is not actually used since we do not allocate. |
| 185 |
| 186 // Set transitioned map. |
| 187 __ movq(FieldOperand(rdx, HeapObject::kMapOffset), rbx); |
| 188 __ RecordWriteField(rdx, |
| 189 HeapObject::kMapOffset, |
| 190 rbx, |
| 191 rdi, |
| 192 kDontSaveFPRegs, |
| 193 EMIT_REMEMBERED_SET, |
| 194 OMIT_SMI_CHECK); |
| 195 // Set backing store's map |
| 196 __ movq(r8, FieldOperand(rdx, JSObject::kElementsOffset)); |
| 197 __ LoadRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex); |
| 198 __ movq(FieldOperand(r8, HeapObject::kMapOffset), rdi); |
| 199 |
| 200 // Convert smis to doubles and holes to hole NaNs. Since FixedArray and |
| 201 // FixedDoubleArray do not differ in size, we do not allocate a new array. |
| 202 STATIC_ASSERT(FixedDoubleArray::kLengthOffset == FixedArray::kLengthOffset); |
| 203 STATIC_ASSERT(FixedDoubleArray::kHeaderSize == FixedArray::kHeaderSize); |
| 204 __ SmiToInteger32(r9, FieldOperand(r8, FixedDoubleArray::kLengthOffset)); |
| 205 // r8 : elements array |
| 206 // r9 : elements array length |
| 207 Label loop, entry, convert_hole; |
| 208 __ movq(r15, BitCast<int64_t, uint64_t>(kHoleNanInt64), RelocInfo::NONE); |
| 209 // r15: the-hole NaN |
| 210 __ jmp(&entry); |
| 211 __ bind(&loop); |
| 212 __ decq(r9); |
| 213 __ movq(rbx, |
| 214 FieldOperand(r8, r9, times_8, FixedArray::kHeaderSize)); |
| 215 // r9 : current element's index |
| 216 // rbx: current element (smi-tagged) |
| 217 __ JumpIfNotSmi(rbx, &convert_hole); |
| 218 __ SmiToInteger32(rbx, rbx); |
| 219 __ cvtlsi2sd(xmm0, rbx); |
| 220 __ movsd(FieldOperand(r8, r9, times_8, FixedDoubleArray::kHeaderSize), |
| 221 xmm0); |
| 222 __ jmp(&entry); |
| 223 __ bind(&convert_hole); |
| 224 __ movq(FieldOperand(r8, r9, times_8, FixedDoubleArray::kHeaderSize), r15); |
| 225 __ bind(&entry); |
| 226 __ testq(r9, r9); |
| 227 __ j(not_zero, &loop); |
| 228 } |
| 229 |
| 230 |
| 231 void ElementsTransitionGenerator::GenerateDoubleToObject( |
| 232 MacroAssembler* masm, Label* fail) { |
| 233 // ----------- S t a t e ------------- |
| 234 // -- rax : value |
| 235 // -- rbx : target map |
| 236 // -- rcx : key |
| 237 // -- rdx : receiver |
| 238 // -- rsp[0] : return address |
| 239 // ----------------------------------- |
| 240 Label loop, entry, convert_hole, gc_required; |
| 241 __ push(rax); |
| 242 |
| 243 __ movq(r8, FieldOperand(rdx, JSObject::kElementsOffset)); |
| 244 __ SmiToInteger32(r9, FieldOperand(r8, FixedDoubleArray::kLengthOffset)); |
| 245 // r8 : source FixedDoubleArray |
| 246 // r9 : number of elements |
| 247 __ lea(rdi, Operand(r9, times_pointer_size, FixedArray::kHeaderSize)); |
| 248 __ AllocateInNewSpace(rdi, rax, r14, r15, &gc_required, TAG_OBJECT); |
| 249 // rax: destination FixedArray |
| 250 __ LoadRoot(rdi, Heap::kFixedArrayMapRootIndex); |
| 251 __ movq(FieldOperand(rax, HeapObject::kMapOffset), rdi); |
| 252 __ Integer32ToSmi(r14, r9); |
| 253 __ movq(FieldOperand(rax, FixedArray::kLengthOffset), r14); |
| 254 |
| 255 // Prepare for conversion loop. |
| 256 __ movq(rsi, BitCast<int64_t, uint64_t>(kHoleNanInt64), RelocInfo::NONE); |
| 257 __ LoadRoot(rdi, Heap::kTheHoleValueRootIndex); |
| 258 // rsi: the-hole NaN |
| 259 // rdi: pointer to the-hole |
| 260 __ jmp(&entry); |
| 261 |
| 262 // Call into runtime if GC is required. |
| 263 __ bind(&gc_required); |
| 264 __ pop(rax); |
| 265 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
| 266 __ jmp(fail); |
| 267 |
| 268 // Box doubles into heap numbers. |
| 269 __ bind(&loop); |
| 270 __ decq(r9); |
| 271 __ movq(r14, FieldOperand(r8, |
| 272 r9, |
| 273 times_pointer_size, |
| 274 FixedDoubleArray::kHeaderSize)); |
| 275 // r9 : current element's index |
| 276 // r14: current element |
| 277 __ cmpq(r14, rsi); |
| 278 __ j(equal, &convert_hole); |
| 279 |
| 280 // Non-hole double, copy value into a heap number. |
| 281 __ AllocateHeapNumber(r11, r15, &gc_required); |
| 282 // r11: new heap number |
| 283 __ movq(FieldOperand(r11, HeapNumber::kValueOffset), r14); |
| 284 __ movq(FieldOperand(rax, |
| 285 r9, |
| 286 times_pointer_size, |
| 287 FixedArray::kHeaderSize), |
| 288 r11); |
| 289 __ movq(r15, r9); |
| 290 __ RecordWriteArray(rax, |
| 291 r11, |
| 292 r15, |
| 293 kDontSaveFPRegs, |
| 294 EMIT_REMEMBERED_SET, |
| 295 OMIT_SMI_CHECK); |
| 296 __ jmp(&entry, Label::kNear); |
| 297 |
| 298 // Replace the-hole NaN with the-hole pointer. |
| 299 __ bind(&convert_hole); |
| 300 __ movq(FieldOperand(rax, |
| 301 r9, |
| 302 times_pointer_size, |
| 303 FixedArray::kHeaderSize), |
| 304 rdi); |
| 305 |
| 306 __ bind(&entry); |
| 307 __ testq(r9, r9); |
| 308 __ j(not_zero, &loop); |
| 309 |
| 310 // Set transitioned map. |
| 311 __ movq(FieldOperand(rdx, HeapObject::kMapOffset), rbx); |
| 312 __ RecordWriteField(rdx, |
| 313 HeapObject::kMapOffset, |
| 314 rbx, |
| 315 rdi, |
| 316 kDontSaveFPRegs, |
| 317 EMIT_REMEMBERED_SET, |
| 318 OMIT_SMI_CHECK); |
| 319 // Replace receiver's backing store with newly created and filled FixedArray. |
| 320 __ movq(FieldOperand(rdx, JSObject::kElementsOffset), rax); |
| 321 __ RecordWriteField(rdx, |
| 322 JSObject::kElementsOffset, |
| 323 rax, |
| 324 rdi, |
| 325 kDontSaveFPRegs, |
| 326 EMIT_REMEMBERED_SET, |
| 327 OMIT_SMI_CHECK); |
| 328 __ pop(rax); |
| 329 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
| 330 } |
146 | 331 |
147 #undef __ | 332 #undef __ |
148 | 333 |
149 } } // namespace v8::internal | 334 } } // namespace v8::internal |
150 | 335 |
151 #endif // V8_TARGET_ARCH_X64 | 336 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |