OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1279 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1290 // Allocate heap number in new space. | 1290 // Allocate heap number in new space. |
1291 // Not using AllocateHeapNumber macro in order to reuse | 1291 // Not using AllocateHeapNumber macro in order to reuse |
1292 // already loaded heap_number_map. | 1292 // already loaded heap_number_map. |
1293 __ AllocateInNewSpace(HeapNumber::kSize, | 1293 __ AllocateInNewSpace(HeapNumber::kSize, |
1294 rax, | 1294 rax, |
1295 rdx, | 1295 rdx, |
1296 no_reg, | 1296 no_reg, |
1297 &allocation_failed, | 1297 &allocation_failed, |
1298 TAG_OBJECT); | 1298 TAG_OBJECT); |
1299 // Set the map. | 1299 // Set the map. |
1300 if (FLAG_debug_code) { | 1300 __ AssertRootValue(heap_number_map, |
1301 __ AbortIfNotRootValue(heap_number_map, | 1301 Heap::kHeapNumberMapRootIndex, |
1302 Heap::kHeapNumberMapRootIndex, | 1302 "HeapNumberMap register clobbered."); |
1303 "HeapNumberMap register clobbered."); | |
1304 } | |
1305 __ movq(FieldOperand(rax, HeapObject::kMapOffset), | 1303 __ movq(FieldOperand(rax, HeapObject::kMapOffset), |
1306 heap_number_map); | 1304 heap_number_map); |
1307 __ cvtqsi2sd(xmm0, rbx); | 1305 __ cvtqsi2sd(xmm0, rbx); |
1308 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0); | 1306 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0); |
1309 __ Ret(); | 1307 __ Ret(); |
1310 | 1308 |
1311 __ bind(&allocation_failed); | 1309 __ bind(&allocation_failed); |
1312 // We need tagged values in rdx and rax for the following code, | 1310 // We need tagged values in rdx and rax for the following code, |
1313 // not int32 in rax and rcx. | 1311 // not int32 in rax and rcx. |
1314 __ Integer32ToSmi(rax, rcx); | 1312 __ Integer32ToSmi(rax, rcx); |
(...skipping 727 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2042 // Check if conversion was successful by converting back and | 2040 // Check if conversion was successful by converting back and |
2043 // comparing to the original double's bits. | 2041 // comparing to the original double's bits. |
2044 __ cvtlsi2sd(xmm1, smi_result); | 2042 __ cvtlsi2sd(xmm1, smi_result); |
2045 __ movq(kScratchRegister, xmm1); | 2043 __ movq(kScratchRegister, xmm1); |
2046 __ cmpq(scratch2, kScratchRegister); | 2044 __ cmpq(scratch2, kScratchRegister); |
2047 __ j(not_equal, on_not_smis); | 2045 __ j(not_equal, on_not_smis); |
2048 __ Integer32ToSmi(first, smi_result); | 2046 __ Integer32ToSmi(first, smi_result); |
2049 | 2047 |
2050 __ JumpIfSmi(second, (on_success != NULL) ? on_success : &done); | 2048 __ JumpIfSmi(second, (on_success != NULL) ? on_success : &done); |
2051 __ bind(&first_smi); | 2049 __ bind(&first_smi); |
2052 if (FLAG_debug_code) { | 2050 __ AssertNotSmi(second); |
2053 // Second should be non-smi if we get here. | |
2054 __ AbortIfSmi(second); | |
2055 } | |
2056 __ cmpq(FieldOperand(second, HeapObject::kMapOffset), heap_number_map); | 2051 __ cmpq(FieldOperand(second, HeapObject::kMapOffset), heap_number_map); |
2057 __ j(not_equal, on_not_smis); | 2052 __ j(not_equal, on_not_smis); |
2058 // Convert second to smi, if possible. | 2053 // Convert second to smi, if possible. |
2059 __ movsd(xmm0, FieldOperand(second, HeapNumber::kValueOffset)); | 2054 __ movsd(xmm0, FieldOperand(second, HeapNumber::kValueOffset)); |
2060 __ movq(scratch2, xmm0); | 2055 __ movq(scratch2, xmm0); |
2061 __ cvttsd2siq(smi_result, xmm0); | 2056 __ cvttsd2siq(smi_result, xmm0); |
2062 __ cvtlsi2sd(xmm1, smi_result); | 2057 __ cvtlsi2sd(xmm1, smi_result); |
2063 __ movq(kScratchRegister, xmm1); | 2058 __ movq(kScratchRegister, xmm1); |
2064 __ cmpq(scratch2, kScratchRegister); | 2059 __ cmpq(scratch2, kScratchRegister); |
2065 __ j(not_equal, on_not_smis); | 2060 __ j(not_equal, on_not_smis); |
(...skipping 3856 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5922 Label* done, | 5917 Label* done, |
5923 Register elements, | 5918 Register elements, |
5924 Register name, | 5919 Register name, |
5925 Register r0, | 5920 Register r0, |
5926 Register r1) { | 5921 Register r1) { |
5927 ASSERT(!elements.is(r0)); | 5922 ASSERT(!elements.is(r0)); |
5928 ASSERT(!elements.is(r1)); | 5923 ASSERT(!elements.is(r1)); |
5929 ASSERT(!name.is(r0)); | 5924 ASSERT(!name.is(r0)); |
5930 ASSERT(!name.is(r1)); | 5925 ASSERT(!name.is(r1)); |
5931 | 5926 |
5932 // Assert that name contains a string. | 5927 __ AssertString(name); |
5933 if (FLAG_debug_code) __ AbortIfNotString(name); | |
5934 | 5928 |
5935 __ SmiToInteger32(r0, FieldOperand(elements, kCapacityOffset)); | 5929 __ SmiToInteger32(r0, FieldOperand(elements, kCapacityOffset)); |
5936 __ decl(r0); | 5930 __ decl(r0); |
5937 | 5931 |
5938 for (int i = 0; i < kInlinedProbes; i++) { | 5932 for (int i = 0; i < kInlinedProbes; i++) { |
5939 // Compute the masked index: (hash + i + i * i) & mask. | 5933 // Compute the masked index: (hash + i + i * i) & mask. |
5940 __ movl(r1, FieldOperand(name, String::kHashFieldOffset)); | 5934 __ movl(r1, FieldOperand(name, String::kHashFieldOffset)); |
5941 __ shrl(r1, Immediate(String::kHashShift)); | 5935 __ shrl(r1, Immediate(String::kHashShift)); |
5942 if (i > 0) { | 5936 if (i > 0) { |
5943 __ addl(r1, Immediate(StringDictionary::GetProbeOffset(i))); | 5937 __ addl(r1, Immediate(StringDictionary::GetProbeOffset(i))); |
(...skipping 561 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6505 #endif | 6499 #endif |
6506 | 6500 |
6507 __ Ret(); | 6501 __ Ret(); |
6508 } | 6502 } |
6509 | 6503 |
6510 #undef __ | 6504 #undef __ |
6511 | 6505 |
6512 } } // namespace v8::internal | 6506 } } // namespace v8::internal |
6513 | 6507 |
6514 #endif // V8_TARGET_ARCH_X64 | 6508 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |