Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(14)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 205343013: Introduce andp, notp, orp and xorp for x64 port (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Rebased with bleeding_edge Created 6 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/builtins-x64.cc ('k') | src/x64/codegen-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 1022 matching lines...) Expand 10 before | Expand all | Expand 10 after
1033 __ j(less_equal, &try_allocate, Label::kNear); 1033 __ j(less_equal, &try_allocate, Label::kNear);
1034 __ movp(rbx, rcx); 1034 __ movp(rbx, rcx);
1035 1035
1036 __ bind(&try_allocate); 1036 __ bind(&try_allocate);
1037 1037
1038 // Compute the sizes of backing store, parameter map, and arguments object. 1038 // Compute the sizes of backing store, parameter map, and arguments object.
1039 // 1. Parameter map, has 2 extra words containing context and backing store. 1039 // 1. Parameter map, has 2 extra words containing context and backing store.
1040 const int kParameterMapHeaderSize = 1040 const int kParameterMapHeaderSize =
1041 FixedArray::kHeaderSize + 2 * kPointerSize; 1041 FixedArray::kHeaderSize + 2 * kPointerSize;
1042 Label no_parameter_map; 1042 Label no_parameter_map;
1043 __ xor_(r8, r8); 1043 __ xorp(r8, r8);
1044 __ testp(rbx, rbx); 1044 __ testp(rbx, rbx);
1045 __ j(zero, &no_parameter_map, Label::kNear); 1045 __ j(zero, &no_parameter_map, Label::kNear);
1046 __ leap(r8, Operand(rbx, times_pointer_size, kParameterMapHeaderSize)); 1046 __ leap(r8, Operand(rbx, times_pointer_size, kParameterMapHeaderSize));
1047 __ bind(&no_parameter_map); 1047 __ bind(&no_parameter_map);
1048 1048
1049 // 2. Backing store. 1049 // 2. Backing store.
1050 __ leap(r8, Operand(r8, rcx, times_pointer_size, FixedArray::kHeaderSize)); 1050 __ leap(r8, Operand(r8, rcx, times_pointer_size, FixedArray::kHeaderSize));
1051 1051
1052 // 3. Arguments object. 1052 // 3. Arguments object.
1053 __ addp(r8, Immediate(Heap::kSloppyArgumentsObjectSize)); 1053 __ addp(r8, Immediate(Heap::kSloppyArgumentsObjectSize));
(...skipping 778 matching lines...) Expand 10 before | Expand all | Expand 10 after
1832 1832
1833 Label miss; 1833 Label miss;
1834 CheckInputType(masm, rdx, left_, &miss); 1834 CheckInputType(masm, rdx, left_, &miss);
1835 CheckInputType(masm, rax, right_, &miss); 1835 CheckInputType(masm, rax, right_, &miss);
1836 1836
1837 // Compare two smis. 1837 // Compare two smis.
1838 Label non_smi, smi_done; 1838 Label non_smi, smi_done;
1839 __ JumpIfNotBothSmi(rax, rdx, &non_smi); 1839 __ JumpIfNotBothSmi(rax, rdx, &non_smi);
1840 __ subp(rdx, rax); 1840 __ subp(rdx, rax);
1841 __ j(no_overflow, &smi_done); 1841 __ j(no_overflow, &smi_done);
1842 __ not_(rdx); // Correct sign in case of overflow. rdx cannot be 0 here. 1842 __ notp(rdx); // Correct sign in case of overflow. rdx cannot be 0 here.
1843 __ bind(&smi_done); 1843 __ bind(&smi_done);
1844 __ movp(rax, rdx); 1844 __ movp(rax, rdx);
1845 __ ret(0); 1845 __ ret(0);
1846 __ bind(&non_smi); 1846 __ bind(&non_smi);
1847 1847
1848 // The compare stub returns a positive, negative, or zero 64-bit integer 1848 // The compare stub returns a positive, negative, or zero 64-bit integer
1849 // value in rax, corresponding to result of comparing the two inputs. 1849 // value in rax, corresponding to result of comparing the two inputs.
1850 // NOTICE! This code is only reached after a smi-fast-case check, so 1850 // NOTICE! This code is only reached after a smi-fast-case check, so
1851 // it is certain that at least one operand isn't a smi. 1851 // it is certain that at least one operand isn't a smi.
1852 1852
(...skipping 1259 matching lines...) Expand 10 before | Expand all | Expand 10 after
3112 __ testl(count, Immediate(~(kPointerSize - 1))); 3112 __ testl(count, Immediate(~(kPointerSize - 1)));
3113 __ j(zero, &last_bytes, Label::kNear); 3113 __ j(zero, &last_bytes, Label::kNear);
3114 3114
3115 // Copy from edi to esi using rep movs instruction. 3115 // Copy from edi to esi using rep movs instruction.
3116 __ movl(kScratchRegister, count); 3116 __ movl(kScratchRegister, count);
3117 __ shr(count, Immediate(kPointerSizeLog2)); // Number of doublewords to copy. 3117 __ shr(count, Immediate(kPointerSizeLog2)); // Number of doublewords to copy.
3118 __ repmovsp(); 3118 __ repmovsp();
3119 3119
3120 // Find number of bytes left. 3120 // Find number of bytes left.
3121 __ movl(count, kScratchRegister); 3121 __ movl(count, kScratchRegister);
3122 __ and_(count, Immediate(kPointerSize - 1)); 3122 __ andp(count, Immediate(kPointerSize - 1));
3123 3123
3124 // Check if there are more bytes to copy. 3124 // Check if there are more bytes to copy.
3125 __ bind(&last_bytes); 3125 __ bind(&last_bytes);
3126 __ testl(count, count); 3126 __ testl(count, count);
3127 __ j(zero, &done, Label::kNear); 3127 __ j(zero, &done, Label::kNear);
3128 3128
3129 // Copy remaining characters. 3129 // Copy remaining characters.
3130 Label loop; 3130 Label loop;
3131 __ bind(&loop); 3131 __ bind(&loop);
3132 __ movb(kScratchRegister, Operand(src, 0)); 3132 __ movb(kScratchRegister, Operand(src, 0));
(...skipping 708 matching lines...) Expand 10 before | Expand all | Expand 10 after
3841 __ JumpIfNotBothSmi(rdx, rax, &miss, Label::kNear); 3841 __ JumpIfNotBothSmi(rdx, rax, &miss, Label::kNear);
3842 3842
3843 if (GetCondition() == equal) { 3843 if (GetCondition() == equal) {
3844 // For equality we do not care about the sign of the result. 3844 // For equality we do not care about the sign of the result.
3845 __ subp(rax, rdx); 3845 __ subp(rax, rdx);
3846 } else { 3846 } else {
3847 Label done; 3847 Label done;
3848 __ subp(rdx, rax); 3848 __ subp(rdx, rax);
3849 __ j(no_overflow, &done, Label::kNear); 3849 __ j(no_overflow, &done, Label::kNear);
3850 // Correct sign of result in case of overflow. 3850 // Correct sign of result in case of overflow.
3851 __ not_(rdx); 3851 __ notp(rdx);
3852 __ bind(&done); 3852 __ bind(&done);
3853 __ movp(rax, rdx); 3853 __ movp(rax, rdx);
3854 } 3854 }
3855 __ ret(0); 3855 __ ret(0);
3856 3856
3857 __ bind(&miss); 3857 __ bind(&miss);
3858 GenerateMiss(masm); 3858 GenerateMiss(masm);
3859 } 3859 }
3860 3860
3861 3861
(...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after
3950 Label miss; 3950 Label miss;
3951 Condition cond = masm->CheckEitherSmi(left, right, tmp1); 3951 Condition cond = masm->CheckEitherSmi(left, right, tmp1);
3952 __ j(cond, &miss, Label::kNear); 3952 __ j(cond, &miss, Label::kNear);
3953 3953
3954 // Check that both operands are internalized strings. 3954 // Check that both operands are internalized strings.
3955 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset)); 3955 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset));
3956 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset)); 3956 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset));
3957 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); 3957 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
3958 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); 3958 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
3959 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0); 3959 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
3960 __ or_(tmp1, tmp2); 3960 __ orp(tmp1, tmp2);
3961 __ testb(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask)); 3961 __ testb(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
3962 __ j(not_zero, &miss, Label::kNear); 3962 __ j(not_zero, &miss, Label::kNear);
3963 3963
3964 // Internalized strings are compared by identity. 3964 // Internalized strings are compared by identity.
3965 Label done; 3965 Label done;
3966 __ cmpp(left, right); 3966 __ cmpp(left, right);
3967 // Make sure rax is non-zero. At this point input operands are 3967 // Make sure rax is non-zero. At this point input operands are
3968 // guaranteed to be non-zero. 3968 // guaranteed to be non-zero.
3969 ASSERT(right.is(rax)); 3969 ASSERT(right.is(rax));
3970 __ j(not_equal, &done, Label::kNear); 3970 __ j(not_equal, &done, Label::kNear);
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
4040 __ j(cond, &miss); 4040 __ j(cond, &miss);
4041 4041
4042 // Check that both operands are strings. This leaves the instance 4042 // Check that both operands are strings. This leaves the instance
4043 // types loaded in tmp1 and tmp2. 4043 // types loaded in tmp1 and tmp2.
4044 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset)); 4044 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset));
4045 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset)); 4045 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset));
4046 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); 4046 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
4047 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); 4047 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
4048 __ movp(tmp3, tmp1); 4048 __ movp(tmp3, tmp1);
4049 STATIC_ASSERT(kNotStringTag != 0); 4049 STATIC_ASSERT(kNotStringTag != 0);
4050 __ or_(tmp3, tmp2); 4050 __ orp(tmp3, tmp2);
4051 __ testb(tmp3, Immediate(kIsNotStringMask)); 4051 __ testb(tmp3, Immediate(kIsNotStringMask));
4052 __ j(not_zero, &miss); 4052 __ j(not_zero, &miss);
4053 4053
4054 // Fast check for identical strings. 4054 // Fast check for identical strings.
4055 Label not_same; 4055 Label not_same;
4056 __ cmpp(left, right); 4056 __ cmpp(left, right);
4057 __ j(not_equal, &not_same, Label::kNear); 4057 __ j(not_equal, &not_same, Label::kNear);
4058 STATIC_ASSERT(EQUAL == 0); 4058 STATIC_ASSERT(EQUAL == 0);
4059 STATIC_ASSERT(kSmiTag == 0); 4059 STATIC_ASSERT(kSmiTag == 0);
4060 __ Move(rax, Smi::FromInt(EQUAL)); 4060 __ Move(rax, Smi::FromInt(EQUAL));
4061 __ ret(0); 4061 __ ret(0);
4062 4062
4063 // Handle not identical strings. 4063 // Handle not identical strings.
4064 __ bind(&not_same); 4064 __ bind(&not_same);
4065 4065
4066 // Check that both strings are internalized strings. If they are, we're done 4066 // Check that both strings are internalized strings. If they are, we're done
4067 // because we already know they are not identical. We also know they are both 4067 // because we already know they are not identical. We also know they are both
4068 // strings. 4068 // strings.
4069 if (equality) { 4069 if (equality) {
4070 Label do_compare; 4070 Label do_compare;
4071 STATIC_ASSERT(kInternalizedTag == 0); 4071 STATIC_ASSERT(kInternalizedTag == 0);
4072 __ or_(tmp1, tmp2); 4072 __ orp(tmp1, tmp2);
4073 __ testb(tmp1, Immediate(kIsNotInternalizedMask)); 4073 __ testb(tmp1, Immediate(kIsNotInternalizedMask));
4074 __ j(not_zero, &do_compare, Label::kNear); 4074 __ j(not_zero, &do_compare, Label::kNear);
4075 // Make sure rax is non-zero. At this point input operands are 4075 // Make sure rax is non-zero. At this point input operands are
4076 // guaranteed to be non-zero. 4076 // guaranteed to be non-zero.
4077 ASSERT(right.is(rax)); 4077 ASSERT(right.is(rax));
4078 __ ret(0); 4078 __ ret(0);
4079 __ bind(&do_compare); 4079 __ bind(&do_compare);
4080 } 4080 }
4081 4081
4082 // Check that both strings are sequential ASCII. 4082 // Check that both strings are sequential ASCII.
(...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after
4186 // undefined value), it guarantees the hash table doesn't contain the 4186 // undefined value), it guarantees the hash table doesn't contain the
4187 // property. It's true even if some slots represent deleted properties 4187 // property. It's true even if some slots represent deleted properties
4188 // (their names are the hole value). 4188 // (their names are the hole value).
4189 for (int i = 0; i < kInlinedProbes; i++) { 4189 for (int i = 0; i < kInlinedProbes; i++) {
4190 // r0 points to properties hash. 4190 // r0 points to properties hash.
4191 // Compute the masked index: (hash + i + i * i) & mask. 4191 // Compute the masked index: (hash + i + i * i) & mask.
4192 Register index = r0; 4192 Register index = r0;
4193 // Capacity is smi 2^n. 4193 // Capacity is smi 2^n.
4194 __ SmiToInteger32(index, FieldOperand(properties, kCapacityOffset)); 4194 __ SmiToInteger32(index, FieldOperand(properties, kCapacityOffset));
4195 __ decl(index); 4195 __ decl(index);
4196 __ and_(index, 4196 __ andp(index,
4197 Immediate(name->Hash() + NameDictionary::GetProbeOffset(i))); 4197 Immediate(name->Hash() + NameDictionary::GetProbeOffset(i)));
4198 4198
4199 // Scale the index by multiplying by the entry size. 4199 // Scale the index by multiplying by the entry size.
4200 ASSERT(NameDictionary::kEntrySize == 3); 4200 ASSERT(NameDictionary::kEntrySize == 3);
4201 __ leap(index, Operand(index, index, times_2, 0)); // index *= 3. 4201 __ leap(index, Operand(index, index, times_2, 0)); // index *= 3.
4202 4202
4203 Register entity_name = r0; 4203 Register entity_name = r0;
4204 // Having undefined at this place means the name is not contained. 4204 // Having undefined at this place means the name is not contained.
4205 ASSERT_EQ(kSmiTagSize, 1); 4205 ASSERT_EQ(kSmiTagSize, 1);
4206 __ movp(entity_name, Operand(properties, 4206 __ movp(entity_name, Operand(properties,
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
4257 __ SmiToInteger32(r0, FieldOperand(elements, kCapacityOffset)); 4257 __ SmiToInteger32(r0, FieldOperand(elements, kCapacityOffset));
4258 __ decl(r0); 4258 __ decl(r0);
4259 4259
4260 for (int i = 0; i < kInlinedProbes; i++) { 4260 for (int i = 0; i < kInlinedProbes; i++) {
4261 // Compute the masked index: (hash + i + i * i) & mask. 4261 // Compute the masked index: (hash + i + i * i) & mask.
4262 __ movl(r1, FieldOperand(name, Name::kHashFieldOffset)); 4262 __ movl(r1, FieldOperand(name, Name::kHashFieldOffset));
4263 __ shrl(r1, Immediate(Name::kHashShift)); 4263 __ shrl(r1, Immediate(Name::kHashShift));
4264 if (i > 0) { 4264 if (i > 0) {
4265 __ addl(r1, Immediate(NameDictionary::GetProbeOffset(i))); 4265 __ addl(r1, Immediate(NameDictionary::GetProbeOffset(i)));
4266 } 4266 }
4267 __ and_(r1, r0); 4267 __ andp(r1, r0);
4268 4268
4269 // Scale the index by multiplying by the entry size. 4269 // Scale the index by multiplying by the entry size.
4270 ASSERT(NameDictionary::kEntrySize == 3); 4270 ASSERT(NameDictionary::kEntrySize == 3);
4271 __ leap(r1, Operand(r1, r1, times_2, 0)); // r1 = r1 * 3 4271 __ leap(r1, Operand(r1, r1, times_2, 0)); // r1 = r1 * 3
4272 4272
4273 // Check if the key is identical to the name. 4273 // Check if the key is identical to the name.
4274 __ cmpp(name, Operand(elements, r1, times_pointer_size, 4274 __ cmpp(name, Operand(elements, r1, times_pointer_size,
4275 kElementsStartOffset - kHeapObjectTag)); 4275 kElementsStartOffset - kHeapObjectTag));
4276 __ j(equal, done); 4276 __ j(equal, done);
4277 } 4277 }
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
4318 // property. It's true even if some slots represent deleted properties 4318 // property. It's true even if some slots represent deleted properties
4319 // (their names are the null value). 4319 // (their names are the null value).
4320 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER, 4320 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER,
4321 kPointerSize); 4321 kPointerSize);
4322 for (int i = kInlinedProbes; i < kTotalProbes; i++) { 4322 for (int i = kInlinedProbes; i < kTotalProbes; i++) {
4323 // Compute the masked index: (hash + i + i * i) & mask. 4323 // Compute the masked index: (hash + i + i * i) & mask.
4324 __ movp(scratch, args.GetArgumentOperand(1)); 4324 __ movp(scratch, args.GetArgumentOperand(1));
4325 if (i > 0) { 4325 if (i > 0) {
4326 __ addl(scratch, Immediate(NameDictionary::GetProbeOffset(i))); 4326 __ addl(scratch, Immediate(NameDictionary::GetProbeOffset(i)));
4327 } 4327 }
4328 __ and_(scratch, Operand(rsp, 0)); 4328 __ andp(scratch, Operand(rsp, 0));
4329 4329
4330 // Scale the index by multiplying by the entry size. 4330 // Scale the index by multiplying by the entry size.
4331 ASSERT(NameDictionary::kEntrySize == 3); 4331 ASSERT(NameDictionary::kEntrySize == 3);
4332 __ leap(index_, Operand(scratch, scratch, times_2, 0)); // index *= 3. 4332 __ leap(index_, Operand(scratch, scratch, times_2, 0)); // index *= 3.
4333 4333
4334 // Having undefined at this place means the name is not contained. 4334 // Having undefined at this place means the name is not contained.
4335 __ movp(scratch, Operand(dictionary_, 4335 __ movp(scratch, Operand(dictionary_,
4336 index_, 4336 index_,
4337 times_pointer_size, 4337 times_pointer_size,
4338 kElementsStartOffset - kHeapObjectTag)); 4338 kElementsStartOffset - kHeapObjectTag));
(...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after
4497 4497
4498 void RecordWriteStub::CheckNeedsToInformIncrementalMarker( 4498 void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
4499 MacroAssembler* masm, 4499 MacroAssembler* masm,
4500 OnNoNeedToInformIncrementalMarker on_no_need, 4500 OnNoNeedToInformIncrementalMarker on_no_need,
4501 Mode mode) { 4501 Mode mode) {
4502 Label on_black; 4502 Label on_black;
4503 Label need_incremental; 4503 Label need_incremental;
4504 Label need_incremental_pop_object; 4504 Label need_incremental_pop_object;
4505 4505
4506 __ movp(regs_.scratch0(), Immediate(~Page::kPageAlignmentMask)); 4506 __ movp(regs_.scratch0(), Immediate(~Page::kPageAlignmentMask));
4507 __ and_(regs_.scratch0(), regs_.object()); 4507 __ andp(regs_.scratch0(), regs_.object());
4508 __ movp(regs_.scratch1(), 4508 __ movp(regs_.scratch1(),
4509 Operand(regs_.scratch0(), 4509 Operand(regs_.scratch0(),
4510 MemoryChunk::kWriteBarrierCounterOffset)); 4510 MemoryChunk::kWriteBarrierCounterOffset));
4511 __ subp(regs_.scratch1(), Immediate(1)); 4511 __ subp(regs_.scratch1(), Immediate(1));
4512 __ movp(Operand(regs_.scratch0(), 4512 __ movp(Operand(regs_.scratch0(),
4513 MemoryChunk::kWriteBarrierCounterOffset), 4513 MemoryChunk::kWriteBarrierCounterOffset),
4514 regs_.scratch1()); 4514 regs_.scratch1());
4515 __ j(negative, &need_incremental); 4515 __ j(negative, &need_incremental);
4516 4516
4517 // Let's look at the color of the object: If it is not black we don't have 4517 // Let's look at the color of the object: If it is not black we don't have
(...skipping 417 matching lines...) Expand 10 before | Expand all | Expand 10 after
4935 Label no_info; 4935 Label no_info;
4936 // If the feedback vector is the undefined value call an array constructor 4936 // If the feedback vector is the undefined value call an array constructor
4937 // that doesn't use AllocationSites. 4937 // that doesn't use AllocationSites.
4938 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); 4938 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
4939 __ j(equal, &no_info); 4939 __ j(equal, &no_info);
4940 4940
4941 // Only look at the lower 16 bits of the transition info. 4941 // Only look at the lower 16 bits of the transition info.
4942 __ movp(rdx, FieldOperand(rbx, AllocationSite::kTransitionInfoOffset)); 4942 __ movp(rdx, FieldOperand(rbx, AllocationSite::kTransitionInfoOffset));
4943 __ SmiToInteger32(rdx, rdx); 4943 __ SmiToInteger32(rdx, rdx);
4944 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); 4944 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
4945 __ and_(rdx, Immediate(AllocationSite::ElementsKindBits::kMask)); 4945 __ andp(rdx, Immediate(AllocationSite::ElementsKindBits::kMask));
4946 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE); 4946 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
4947 4947
4948 __ bind(&no_info); 4948 __ bind(&no_info);
4949 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES); 4949 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
4950 } 4950 }
4951 4951
4952 4952
4953 void InternalArrayConstructorStub::GenerateCase( 4953 void InternalArrayConstructorStub::GenerateCase(
4954 MacroAssembler* masm, ElementsKind kind) { 4954 MacroAssembler* masm, ElementsKind kind) {
4955 Label not_zero_case, not_one_case; 4955 Label not_zero_case, not_one_case;
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
5009 __ Check(equal, kUnexpectedInitialMapForArrayFunction); 5009 __ Check(equal, kUnexpectedInitialMapForArrayFunction);
5010 } 5010 }
5011 5011
5012 // Figure out the right elements kind 5012 // Figure out the right elements kind
5013 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); 5013 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
5014 5014
5015 // Load the map's "bit field 2" into |result|. We only need the first byte, 5015 // Load the map's "bit field 2" into |result|. We only need the first byte,
5016 // but the following masking takes care of that anyway. 5016 // but the following masking takes care of that anyway.
5017 __ movzxbp(rcx, FieldOperand(rcx, Map::kBitField2Offset)); 5017 __ movzxbp(rcx, FieldOperand(rcx, Map::kBitField2Offset));
5018 // Retrieve elements_kind from bit field 2. 5018 // Retrieve elements_kind from bit field 2.
5019 __ and_(rcx, Immediate(Map::kElementsKindMask)); 5019 __ andp(rcx, Immediate(Map::kElementsKindMask));
5020 __ shr(rcx, Immediate(Map::kElementsKindShift)); 5020 __ shr(rcx, Immediate(Map::kElementsKindShift));
5021 5021
5022 if (FLAG_debug_code) { 5022 if (FLAG_debug_code) {
5023 Label done; 5023 Label done;
5024 __ cmpl(rcx, Immediate(FAST_ELEMENTS)); 5024 __ cmpl(rcx, Immediate(FAST_ELEMENTS));
5025 __ j(equal, &done); 5025 __ j(equal, &done);
5026 __ cmpl(rcx, Immediate(FAST_HOLEY_ELEMENTS)); 5026 __ cmpl(rcx, Immediate(FAST_HOLEY_ELEMENTS));
5027 __ Assert(equal, 5027 __ Assert(equal,
5028 kInvalidElementsKindForInternalArrayOrInternalPackedArray); 5028 kInvalidElementsKindForInternalArrayOrInternalPackedArray);
5029 __ bind(&done); 5029 __ bind(&done);
(...skipping 184 matching lines...) Expand 10 before | Expand all | Expand 10 after
5214 return_value_operand, 5214 return_value_operand,
5215 NULL); 5215 NULL);
5216 } 5216 }
5217 5217
5218 5218
5219 #undef __ 5219 #undef __
5220 5220
5221 } } // namespace v8::internal 5221 } } // namespace v8::internal
5222 5222
5223 #endif // V8_TARGET_ARCH_X64 5223 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/builtins-x64.cc ('k') | src/x64/codegen-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698