OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_X64 | 7 #if V8_TARGET_ARCH_X64 |
8 | 8 |
9 #include "src/codegen.h" | 9 #include "src/codegen.h" |
10 #include "src/ic/ic.h" | 10 #include "src/ic/ic.h" |
(...skipping 153 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
164 FieldOperand(map, Map::kBitFieldOffset), | 164 FieldOperand(map, Map::kBitFieldOffset), |
165 Immediate((1 << Map::kIsAccessCheckNeeded) | (1 << interceptor_bit))); | 165 Immediate((1 << Map::kIsAccessCheckNeeded) | (1 << interceptor_bit))); |
166 __ j(not_zero, slow); | 166 __ j(not_zero, slow); |
167 } | 167 } |
168 | 168 |
169 | 169 |
170 // Loads an indexed element from a fast case array. | 170 // Loads an indexed element from a fast case array. |
171 static void GenerateFastArrayLoad(MacroAssembler* masm, Register receiver, | 171 static void GenerateFastArrayLoad(MacroAssembler* masm, Register receiver, |
172 Register key, Register elements, | 172 Register key, Register elements, |
173 Register scratch, Register result, | 173 Register scratch, Register result, |
174 Label* slow) { | 174 Label* slow, LanguageMode language_mode) { |
175 // Register use: | 175 // Register use: |
176 // | 176 // |
177 // receiver - holds the receiver on entry. | 177 // receiver - holds the receiver on entry. |
178 // Unchanged unless 'result' is the same register. | 178 // Unchanged unless 'result' is the same register. |
179 // | 179 // |
180 // key - holds the smi key on entry. | 180 // key - holds the smi key on entry. |
181 // Unchanged unless 'result' is the same register. | 181 // Unchanged unless 'result' is the same register. |
182 // | 182 // |
183 // result - holds the result on exit if the load succeeded. | 183 // result - holds the result on exit if the load succeeded. |
184 // Allowed to be the the same as 'receiver' or 'key'. | 184 // Allowed to be the the same as 'receiver' or 'key'. |
185 // Unchanged on bailout so 'receiver' and 'key' can be safely | 185 // Unchanged on bailout so 'receiver' and 'key' can be safely |
186 // used by further computation. | 186 // used by further computation. |
187 // | 187 // |
188 // Scratch registers: | 188 // Scratch registers: |
189 // | 189 // |
190 // elements - holds the elements of the receiver and its prototypes. | 190 // elements - holds the elements of the receiver and its prototypes. |
191 // | 191 // |
192 // scratch - used to hold maps, prototypes, and the loaded value. | 192 // scratch - used to hold maps, prototypes, and the loaded value. |
193 Label check_prototypes, check_next_prototype; | 193 Label check_prototypes, check_next_prototype; |
194 Label done, in_bounds, return_undefined; | 194 Label done, in_bounds, absent; |
195 | 195 |
196 __ movp(elements, FieldOperand(receiver, JSObject::kElementsOffset)); | 196 __ movp(elements, FieldOperand(receiver, JSObject::kElementsOffset)); |
197 __ AssertFastElements(elements); | 197 __ AssertFastElements(elements); |
198 // Check that the key (index) is within bounds. | 198 // Check that the key (index) is within bounds. |
199 __ SmiCompare(key, FieldOperand(elements, FixedArray::kLengthOffset)); | 199 __ SmiCompare(key, FieldOperand(elements, FixedArray::kLengthOffset)); |
200 // Unsigned comparison rejects negative indices. | 200 // Unsigned comparison rejects negative indices. |
201 __ j(below, &in_bounds); | 201 __ j(below, &in_bounds); |
202 | 202 |
203 // Out-of-bounds. Check the prototype chain to see if we can just return | 203 // Out-of-bounds. Check the prototype chain to see if we can just return |
204 // 'undefined'. | 204 // 'undefined'. |
205 __ SmiCompare(key, Smi::FromInt(0)); | 205 __ SmiCompare(key, Smi::FromInt(0)); |
206 __ j(less, slow); // Negative keys can't take the fast OOB path. | 206 __ j(less, slow); // Negative keys can't take the fast OOB path. |
207 __ bind(&check_prototypes); | 207 __ bind(&check_prototypes); |
208 __ movp(scratch, FieldOperand(receiver, HeapObject::kMapOffset)); | 208 __ movp(scratch, FieldOperand(receiver, HeapObject::kMapOffset)); |
209 __ bind(&check_next_prototype); | 209 __ bind(&check_next_prototype); |
210 __ movp(scratch, FieldOperand(scratch, Map::kPrototypeOffset)); | 210 __ movp(scratch, FieldOperand(scratch, Map::kPrototypeOffset)); |
211 // scratch: current prototype | 211 // scratch: current prototype |
212 __ CompareRoot(scratch, Heap::kNullValueRootIndex); | 212 __ CompareRoot(scratch, Heap::kNullValueRootIndex); |
213 __ j(equal, &return_undefined); | 213 __ j(equal, &absent); |
214 __ movp(elements, FieldOperand(scratch, JSObject::kElementsOffset)); | 214 __ movp(elements, FieldOperand(scratch, JSObject::kElementsOffset)); |
215 __ movp(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); | 215 __ movp(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); |
216 // elements: elements of current prototype | 216 // elements: elements of current prototype |
217 // scratch: map of current prototype | 217 // scratch: map of current prototype |
218 __ CmpInstanceType(scratch, JS_OBJECT_TYPE); | 218 __ CmpInstanceType(scratch, JS_OBJECT_TYPE); |
219 __ j(below, slow); | 219 __ j(below, slow); |
220 __ testb(FieldOperand(scratch, Map::kBitFieldOffset), | 220 __ testb(FieldOperand(scratch, Map::kBitFieldOffset), |
221 Immediate((1 << Map::kIsAccessCheckNeeded) | | 221 Immediate((1 << Map::kIsAccessCheckNeeded) | |
222 (1 << Map::kHasIndexedInterceptor))); | 222 (1 << Map::kHasIndexedInterceptor))); |
223 __ j(not_zero, slow); | 223 __ j(not_zero, slow); |
224 __ CompareRoot(elements, Heap::kEmptyFixedArrayRootIndex); | 224 __ CompareRoot(elements, Heap::kEmptyFixedArrayRootIndex); |
225 __ j(not_equal, slow); | 225 __ j(not_equal, slow); |
226 __ jmp(&check_next_prototype); | 226 __ jmp(&check_next_prototype); |
227 | 227 |
228 __ bind(&return_undefined); | 228 __ bind(&absent); |
229 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); | 229 if (is_strong(language_mode)) { |
230 __ jmp(&done); | 230 // Strong mode accesses must throw in this case, so call the runtime. |
| 231 __ jmp(slow); |
| 232 } else { |
| 233 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); |
| 234 __ jmp(&done); |
| 235 } |
231 | 236 |
232 __ bind(&in_bounds); | 237 __ bind(&in_bounds); |
233 // Fast case: Do the load. | 238 // Fast case: Do the load. |
234 SmiIndex index = masm->SmiToIndex(scratch, key, kPointerSizeLog2); | 239 SmiIndex index = masm->SmiToIndex(scratch, key, kPointerSizeLog2); |
235 __ movp(scratch, FieldOperand(elements, index.reg, index.scale, | 240 __ movp(scratch, FieldOperand(elements, index.reg, index.scale, |
236 FixedArray::kHeaderSize)); | 241 FixedArray::kHeaderSize)); |
237 __ CompareRoot(scratch, Heap::kTheHoleValueRootIndex); | 242 __ CompareRoot(scratch, Heap::kTheHoleValueRootIndex); |
238 // In case the loaded value is the_hole we have to check the prototype chain. | 243 // In case the loaded value is the_hole we have to check the prototype chain. |
239 __ j(equal, &check_prototypes); | 244 __ j(equal, &check_prototypes); |
240 __ Move(result, scratch); | 245 __ Move(result, scratch); |
(...skipping 26 matching lines...) Expand all Loading... |
267 // bit test is enough. | 272 // bit test is enough. |
268 STATIC_ASSERT(kNotInternalizedTag != 0); | 273 STATIC_ASSERT(kNotInternalizedTag != 0); |
269 __ testb(FieldOperand(map, Map::kInstanceTypeOffset), | 274 __ testb(FieldOperand(map, Map::kInstanceTypeOffset), |
270 Immediate(kIsNotInternalizedMask)); | 275 Immediate(kIsNotInternalizedMask)); |
271 __ j(not_zero, not_unique); | 276 __ j(not_zero, not_unique); |
272 | 277 |
273 __ bind(&unique); | 278 __ bind(&unique); |
274 } | 279 } |
275 | 280 |
276 | 281 |
277 void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm) { | 282 void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm, |
| 283 LanguageMode language_mode) { |
278 // The return address is on the stack. | 284 // The return address is on the stack. |
279 Label slow, check_name, index_smi, index_name, property_array_property; | 285 Label slow, check_name, index_smi, index_name, property_array_property; |
280 Label probe_dictionary, check_number_dictionary; | 286 Label probe_dictionary, check_number_dictionary; |
281 | 287 |
282 Register receiver = LoadDescriptor::ReceiverRegister(); | 288 Register receiver = LoadDescriptor::ReceiverRegister(); |
283 Register key = LoadDescriptor::NameRegister(); | 289 Register key = LoadDescriptor::NameRegister(); |
284 DCHECK(receiver.is(rdx)); | 290 DCHECK(receiver.is(rdx)); |
285 DCHECK(key.is(rcx)); | 291 DCHECK(key.is(rcx)); |
286 | 292 |
287 // Check that the key is a smi. | 293 // Check that the key is a smi. |
288 __ JumpIfNotSmi(key, &check_name); | 294 __ JumpIfNotSmi(key, &check_name); |
289 __ bind(&index_smi); | 295 __ bind(&index_smi); |
290 // Now the key is known to be a smi. This place is also jumped to from below | 296 // Now the key is known to be a smi. This place is also jumped to from below |
291 // where a numeric string is converted to a smi. | 297 // where a numeric string is converted to a smi. |
292 | 298 |
293 GenerateKeyedLoadReceiverCheck(masm, receiver, rax, | 299 GenerateKeyedLoadReceiverCheck(masm, receiver, rax, |
294 Map::kHasIndexedInterceptor, &slow); | 300 Map::kHasIndexedInterceptor, &slow); |
295 | 301 |
296 // Check the receiver's map to see if it has fast elements. | 302 // Check the receiver's map to see if it has fast elements. |
297 __ CheckFastElements(rax, &check_number_dictionary); | 303 __ CheckFastElements(rax, &check_number_dictionary); |
298 | 304 |
299 GenerateFastArrayLoad(masm, receiver, key, rax, rbx, rax, &slow); | 305 GenerateFastArrayLoad(masm, receiver, key, rax, rbx, rax, &slow, |
| 306 language_mode); |
300 Counters* counters = masm->isolate()->counters(); | 307 Counters* counters = masm->isolate()->counters(); |
301 __ IncrementCounter(counters->keyed_load_generic_smi(), 1); | 308 __ IncrementCounter(counters->keyed_load_generic_smi(), 1); |
302 __ ret(0); | 309 __ ret(0); |
303 | 310 |
304 __ bind(&check_number_dictionary); | 311 __ bind(&check_number_dictionary); |
305 __ SmiToInteger32(rbx, key); | 312 __ SmiToInteger32(rbx, key); |
306 __ movp(rax, FieldOperand(receiver, JSObject::kElementsOffset)); | 313 __ movp(rax, FieldOperand(receiver, JSObject::kElementsOffset)); |
307 | 314 |
308 // Check whether the elements is a number dictionary. | 315 // Check whether the elements is a number dictionary. |
309 // rbx: key as untagged int32 | 316 // rbx: key as untagged int32 |
310 // rax: elements | 317 // rax: elements |
311 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), | 318 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), |
312 Heap::kHashTableMapRootIndex); | 319 Heap::kHashTableMapRootIndex); |
313 __ j(not_equal, &slow); | 320 __ j(not_equal, &slow); |
314 __ LoadFromNumberDictionary(&slow, rax, key, rbx, r9, rdi, rax); | 321 __ LoadFromNumberDictionary(&slow, rax, key, rbx, r9, rdi, rax); |
315 __ ret(0); | 322 __ ret(0); |
316 | 323 |
317 __ bind(&slow); | 324 __ bind(&slow); |
318 // Slow case: Jump to runtime. | 325 // Slow case: Jump to runtime. |
319 __ IncrementCounter(counters->keyed_load_generic_slow(), 1); | 326 __ IncrementCounter(counters->keyed_load_generic_slow(), 1); |
320 GenerateRuntimeGetProperty(masm); | 327 GenerateSlow(masm); |
321 | 328 |
322 __ bind(&check_name); | 329 __ bind(&check_name); |
323 GenerateKeyNameCheck(masm, key, rax, rbx, &index_name, &slow); | 330 GenerateKeyNameCheck(masm, key, rax, rbx, &index_name, &slow); |
324 | 331 |
325 GenerateKeyedLoadReceiverCheck(masm, receiver, rax, Map::kHasNamedInterceptor, | 332 GenerateKeyedLoadReceiverCheck(masm, receiver, rax, Map::kHasNamedInterceptor, |
326 &slow); | 333 &slow); |
327 | 334 |
328 // If the receiver is a fast-case object, check the stub cache. Otherwise | 335 // If the receiver is a fast-case object, check the stub cache. Otherwise |
329 // probe the dictionary. | 336 // probe the dictionary. |
330 __ movp(rbx, FieldOperand(receiver, JSObject::kPropertiesOffset)); | 337 __ movp(rbx, FieldOperand(receiver, JSObject::kPropertiesOffset)); |
(...skipping 285 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
616 Label slow; | 623 Label slow; |
617 | 624 |
618 __ movp(dictionary, FieldOperand(LoadDescriptor::ReceiverRegister(), | 625 __ movp(dictionary, FieldOperand(LoadDescriptor::ReceiverRegister(), |
619 JSObject::kPropertiesOffset)); | 626 JSObject::kPropertiesOffset)); |
620 GenerateDictionaryLoad(masm, &slow, dictionary, | 627 GenerateDictionaryLoad(masm, &slow, dictionary, |
621 LoadDescriptor::NameRegister(), rbx, rdi, rax); | 628 LoadDescriptor::NameRegister(), rbx, rdi, rax); |
622 __ ret(0); | 629 __ ret(0); |
623 | 630 |
624 // Dictionary load failed, go slow (but don't miss). | 631 // Dictionary load failed, go slow (but don't miss). |
625 __ bind(&slow); | 632 __ bind(&slow); |
626 GenerateRuntimeGetProperty(masm); | 633 GenerateSlow(masm); |
627 } | 634 } |
628 | 635 |
629 | 636 |
630 static void LoadIC_PushArgs(MacroAssembler* masm) { | 637 static void LoadIC_PushArgs(MacroAssembler* masm) { |
631 Register receiver = LoadDescriptor::ReceiverRegister(); | 638 Register receiver = LoadDescriptor::ReceiverRegister(); |
632 Register name = LoadDescriptor::NameRegister(); | 639 Register name = LoadDescriptor::NameRegister(); |
633 Register slot = LoadDescriptor::SlotRegister(); | 640 Register slot = LoadDescriptor::SlotRegister(); |
634 Register vector = LoadWithVectorDescriptor::VectorRegister(); | 641 Register vector = LoadWithVectorDescriptor::VectorRegister(); |
635 DCHECK(!rdi.is(receiver) && !rdi.is(name) && !rdi.is(slot) && | 642 DCHECK(!rdi.is(receiver) && !rdi.is(name) && !rdi.is(slot) && |
636 !rdi.is(vector)); | 643 !rdi.is(vector)); |
(...skipping 16 matching lines...) Expand all Loading... |
653 LoadIC_PushArgs(masm); | 660 LoadIC_PushArgs(masm); |
654 | 661 |
655 // Perform tail call to the entry. | 662 // Perform tail call to the entry. |
656 ExternalReference ref = | 663 ExternalReference ref = |
657 ExternalReference(IC_Utility(kLoadIC_Miss), masm->isolate()); | 664 ExternalReference(IC_Utility(kLoadIC_Miss), masm->isolate()); |
658 int arg_count = 4; | 665 int arg_count = 4; |
659 __ TailCallExternalReference(ref, arg_count, 1); | 666 __ TailCallExternalReference(ref, arg_count, 1); |
660 } | 667 } |
661 | 668 |
662 | 669 |
663 void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { | 670 void LoadIC::GenerateSlow(MacroAssembler* masm) { |
664 // The return address is on the stack. | 671 // The return address is on the stack. |
665 Register receiver = LoadDescriptor::ReceiverRegister(); | 672 Register receiver = LoadDescriptor::ReceiverRegister(); |
666 Register name = LoadDescriptor::NameRegister(); | 673 Register name = LoadDescriptor::NameRegister(); |
667 DCHECK(!rbx.is(receiver) && !rbx.is(name)); | 674 DCHECK(!rbx.is(receiver) && !rbx.is(name)); |
668 | 675 |
669 __ PopReturnAddressTo(rbx); | 676 __ PopReturnAddressTo(rbx); |
670 __ Push(receiver); | 677 __ Push(receiver); |
671 __ Push(name); | 678 __ Push(name); |
672 __ PushReturnAddressFrom(rbx); | 679 __ PushReturnAddressFrom(rbx); |
673 | 680 |
674 // Perform tail call to the entry. | 681 // Perform tail call to the entry. |
675 __ TailCallRuntime(Runtime::kGetProperty, 2, 1); | 682 ExternalReference ref = |
| 683 ExternalReference(IC_Utility(kLoadIC_Slow), masm->isolate()); |
| 684 int arg_count = 2; |
| 685 __ TailCallExternalReference(ref, arg_count, 1); |
676 } | 686 } |
677 | 687 |
678 | 688 |
679 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { | 689 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { |
680 // The return address is on the stack. | 690 // The return address is on the stack. |
681 Counters* counters = masm->isolate()->counters(); | 691 Counters* counters = masm->isolate()->counters(); |
682 __ IncrementCounter(counters->keyed_load_miss(), 1); | 692 __ IncrementCounter(counters->keyed_load_miss(), 1); |
683 | 693 |
684 LoadIC_PushArgs(masm); | 694 LoadIC_PushArgs(masm); |
685 | 695 |
686 // Perform tail call to the entry. | 696 // Perform tail call to the entry. |
687 ExternalReference ref = | 697 ExternalReference ref = |
688 ExternalReference(IC_Utility(kKeyedLoadIC_Miss), masm->isolate()); | 698 ExternalReference(IC_Utility(kKeyedLoadIC_Miss), masm->isolate()); |
689 int arg_count = 4; | 699 int arg_count = 4; |
690 __ TailCallExternalReference(ref, arg_count, 1); | 700 __ TailCallExternalReference(ref, arg_count, 1); |
691 } | 701 } |
692 | 702 |
693 | 703 |
694 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { | 704 void KeyedLoadIC::GenerateSlow(MacroAssembler* masm) { |
695 // The return address is on the stack. | 705 // The return address is on the stack. |
696 Register receiver = LoadDescriptor::ReceiverRegister(); | 706 Register receiver = LoadDescriptor::ReceiverRegister(); |
697 Register name = LoadDescriptor::NameRegister(); | 707 Register name = LoadDescriptor::NameRegister(); |
698 DCHECK(!rbx.is(receiver) && !rbx.is(name)); | 708 DCHECK(!rbx.is(receiver) && !rbx.is(name)); |
699 | 709 |
700 __ PopReturnAddressTo(rbx); | 710 __ PopReturnAddressTo(rbx); |
701 __ Push(receiver); | 711 __ Push(receiver); |
702 __ Push(name); | 712 __ Push(name); |
703 __ PushReturnAddressFrom(rbx); | 713 __ PushReturnAddressFrom(rbx); |
704 | 714 |
705 // Perform tail call to the entry. | 715 // Perform tail call to the entry. |
706 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1); | 716 ExternalReference ref = |
| 717 ExternalReference(IC_Utility(kKeyedLoadIC_Slow), masm->isolate()); |
| 718 int arg_count = 2; |
| 719 __ TailCallExternalReference(ref, arg_count, 1); |
707 } | 720 } |
708 | 721 |
709 | 722 |
710 void StoreIC::GenerateMegamorphic(MacroAssembler* masm) { | 723 void StoreIC::GenerateMegamorphic(MacroAssembler* masm) { |
711 // The return address is on the stack. | 724 // The return address is on the stack. |
712 | 725 |
713 // Get the receiver from the stack and probe the stub cache. | 726 // Get the receiver from the stack and probe the stub cache. |
714 Code::Flags flags = Code::RemoveTypeAndHolderFromFlags( | 727 Code::Flags flags = Code::RemoveTypeAndHolderFromFlags( |
715 Code::ComputeHandlerFlags(Code::STORE_IC)); | 728 Code::ComputeHandlerFlags(Code::STORE_IC)); |
716 masm->isolate()->stub_cache()->GenerateProbe( | 729 masm->isolate()->stub_cache()->GenerateProbe( |
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
846 Condition cc = | 859 Condition cc = |
847 (check == ENABLE_INLINED_SMI_CHECK) | 860 (check == ENABLE_INLINED_SMI_CHECK) |
848 ? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero) | 861 ? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero) |
849 : (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry); | 862 : (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry); |
850 *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc); | 863 *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc); |
851 } | 864 } |
852 } // namespace internal | 865 } // namespace internal |
853 } // namespace v8 | 866 } // namespace v8 |
854 | 867 |
855 #endif // V8_TARGET_ARCH_X64 | 868 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |