Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(61)

Side by Side Diff: src/ic/x64/ic-x64.cc

Issue 1199493002: Revert relanded strong property access CL (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/ic/x64/handler-compiler-x64.cc ('k') | src/ic/x87/handler-compiler-x87.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #if V8_TARGET_ARCH_X64 7 #if V8_TARGET_ARCH_X64
8 8
9 #include "src/codegen.h" 9 #include "src/codegen.h"
10 #include "src/ic/ic.h" 10 #include "src/ic/ic.h"
(...skipping 153 matching lines...) Expand 10 before | Expand all | Expand 10 after
164 FieldOperand(map, Map::kBitFieldOffset), 164 FieldOperand(map, Map::kBitFieldOffset),
165 Immediate((1 << Map::kIsAccessCheckNeeded) | (1 << interceptor_bit))); 165 Immediate((1 << Map::kIsAccessCheckNeeded) | (1 << interceptor_bit)));
166 __ j(not_zero, slow); 166 __ j(not_zero, slow);
167 } 167 }
168 168
169 169
170 // Loads an indexed element from a fast case array. 170 // Loads an indexed element from a fast case array.
171 static void GenerateFastArrayLoad(MacroAssembler* masm, Register receiver, 171 static void GenerateFastArrayLoad(MacroAssembler* masm, Register receiver,
172 Register key, Register elements, 172 Register key, Register elements,
173 Register scratch, Register result, 173 Register scratch, Register result,
174 Label* slow, LanguageMode language_mode) { 174 Label* slow) {
175 // Register use: 175 // Register use:
176 // 176 //
177 // receiver - holds the receiver on entry. 177 // receiver - holds the receiver on entry.
178 // Unchanged unless 'result' is the same register. 178 // Unchanged unless 'result' is the same register.
179 // 179 //
180 // key - holds the smi key on entry. 180 // key - holds the smi key on entry.
181 // Unchanged unless 'result' is the same register. 181 // Unchanged unless 'result' is the same register.
182 // 182 //
183 // result - holds the result on exit if the load succeeded. 183 // result - holds the result on exit if the load succeeded.
184 // Allowed to be the the same as 'receiver' or 'key'. 184 // Allowed to be the the same as 'receiver' or 'key'.
185 // Unchanged on bailout so 'receiver' and 'key' can be safely 185 // Unchanged on bailout so 'receiver' and 'key' can be safely
186 // used by further computation. 186 // used by further computation.
187 // 187 //
188 // Scratch registers: 188 // Scratch registers:
189 // 189 //
190 // elements - holds the elements of the receiver and its prototypes. 190 // elements - holds the elements of the receiver and its prototypes.
191 // 191 //
192 // scratch - used to hold maps, prototypes, and the loaded value. 192 // scratch - used to hold maps, prototypes, and the loaded value.
193 Label check_prototypes, check_next_prototype; 193 Label check_prototypes, check_next_prototype;
194 Label done, in_bounds, absent; 194 Label done, in_bounds, return_undefined;
195 195
196 __ movp(elements, FieldOperand(receiver, JSObject::kElementsOffset)); 196 __ movp(elements, FieldOperand(receiver, JSObject::kElementsOffset));
197 __ AssertFastElements(elements); 197 __ AssertFastElements(elements);
198 // Check that the key (index) is within bounds. 198 // Check that the key (index) is within bounds.
199 __ SmiCompare(key, FieldOperand(elements, FixedArray::kLengthOffset)); 199 __ SmiCompare(key, FieldOperand(elements, FixedArray::kLengthOffset));
200 // Unsigned comparison rejects negative indices. 200 // Unsigned comparison rejects negative indices.
201 __ j(below, &in_bounds); 201 __ j(below, &in_bounds);
202 202
203 // Out-of-bounds. Check the prototype chain to see if we can just return 203 // Out-of-bounds. Check the prototype chain to see if we can just return
204 // 'undefined'. 204 // 'undefined'.
205 __ SmiCompare(key, Smi::FromInt(0)); 205 __ SmiCompare(key, Smi::FromInt(0));
206 __ j(less, slow); // Negative keys can't take the fast OOB path. 206 __ j(less, slow); // Negative keys can't take the fast OOB path.
207 __ bind(&check_prototypes); 207 __ bind(&check_prototypes);
208 __ movp(scratch, FieldOperand(receiver, HeapObject::kMapOffset)); 208 __ movp(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
209 __ bind(&check_next_prototype); 209 __ bind(&check_next_prototype);
210 __ movp(scratch, FieldOperand(scratch, Map::kPrototypeOffset)); 210 __ movp(scratch, FieldOperand(scratch, Map::kPrototypeOffset));
211 // scratch: current prototype 211 // scratch: current prototype
212 __ CompareRoot(scratch, Heap::kNullValueRootIndex); 212 __ CompareRoot(scratch, Heap::kNullValueRootIndex);
213 __ j(equal, &absent); 213 __ j(equal, &return_undefined);
214 __ movp(elements, FieldOperand(scratch, JSObject::kElementsOffset)); 214 __ movp(elements, FieldOperand(scratch, JSObject::kElementsOffset));
215 __ movp(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); 215 __ movp(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
216 // elements: elements of current prototype 216 // elements: elements of current prototype
217 // scratch: map of current prototype 217 // scratch: map of current prototype
218 __ CmpInstanceType(scratch, JS_OBJECT_TYPE); 218 __ CmpInstanceType(scratch, JS_OBJECT_TYPE);
219 __ j(below, slow); 219 __ j(below, slow);
220 __ testb(FieldOperand(scratch, Map::kBitFieldOffset), 220 __ testb(FieldOperand(scratch, Map::kBitFieldOffset),
221 Immediate((1 << Map::kIsAccessCheckNeeded) | 221 Immediate((1 << Map::kIsAccessCheckNeeded) |
222 (1 << Map::kHasIndexedInterceptor))); 222 (1 << Map::kHasIndexedInterceptor)));
223 __ j(not_zero, slow); 223 __ j(not_zero, slow);
224 __ CompareRoot(elements, Heap::kEmptyFixedArrayRootIndex); 224 __ CompareRoot(elements, Heap::kEmptyFixedArrayRootIndex);
225 __ j(not_equal, slow); 225 __ j(not_equal, slow);
226 __ jmp(&check_next_prototype); 226 __ jmp(&check_next_prototype);
227 227
228 __ bind(&absent); 228 __ bind(&return_undefined);
229 if (is_strong(language_mode)) { 229 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
230 // Strong mode accesses must throw in this case, so call the runtime. 230 __ jmp(&done);
231 __ jmp(slow);
232 } else {
233 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
234 __ jmp(&done);
235 }
236 231
237 __ bind(&in_bounds); 232 __ bind(&in_bounds);
238 // Fast case: Do the load. 233 // Fast case: Do the load.
239 SmiIndex index = masm->SmiToIndex(scratch, key, kPointerSizeLog2); 234 SmiIndex index = masm->SmiToIndex(scratch, key, kPointerSizeLog2);
240 __ movp(scratch, FieldOperand(elements, index.reg, index.scale, 235 __ movp(scratch, FieldOperand(elements, index.reg, index.scale,
241 FixedArray::kHeaderSize)); 236 FixedArray::kHeaderSize));
242 __ CompareRoot(scratch, Heap::kTheHoleValueRootIndex); 237 __ CompareRoot(scratch, Heap::kTheHoleValueRootIndex);
243 // In case the loaded value is the_hole we have to check the prototype chain. 238 // In case the loaded value is the_hole we have to check the prototype chain.
244 __ j(equal, &check_prototypes); 239 __ j(equal, &check_prototypes);
245 __ Move(result, scratch); 240 __ Move(result, scratch);
(...skipping 26 matching lines...) Expand all
272 // bit test is enough. 267 // bit test is enough.
273 STATIC_ASSERT(kNotInternalizedTag != 0); 268 STATIC_ASSERT(kNotInternalizedTag != 0);
274 __ testb(FieldOperand(map, Map::kInstanceTypeOffset), 269 __ testb(FieldOperand(map, Map::kInstanceTypeOffset),
275 Immediate(kIsNotInternalizedMask)); 270 Immediate(kIsNotInternalizedMask));
276 __ j(not_zero, not_unique); 271 __ j(not_zero, not_unique);
277 272
278 __ bind(&unique); 273 __ bind(&unique);
279 } 274 }
280 275
281 276
282 void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm, 277 void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm) {
283 LanguageMode language_mode) {
284 // The return address is on the stack. 278 // The return address is on the stack.
285 Label slow, check_name, index_smi, index_name, property_array_property; 279 Label slow, check_name, index_smi, index_name, property_array_property;
286 Label probe_dictionary, check_number_dictionary; 280 Label probe_dictionary, check_number_dictionary;
287 281
288 Register receiver = LoadDescriptor::ReceiverRegister(); 282 Register receiver = LoadDescriptor::ReceiverRegister();
289 Register key = LoadDescriptor::NameRegister(); 283 Register key = LoadDescriptor::NameRegister();
290 DCHECK(receiver.is(rdx)); 284 DCHECK(receiver.is(rdx));
291 DCHECK(key.is(rcx)); 285 DCHECK(key.is(rcx));
292 286
293 // Check that the key is a smi. 287 // Check that the key is a smi.
294 __ JumpIfNotSmi(key, &check_name); 288 __ JumpIfNotSmi(key, &check_name);
295 __ bind(&index_smi); 289 __ bind(&index_smi);
296 // Now the key is known to be a smi. This place is also jumped to from below 290 // Now the key is known to be a smi. This place is also jumped to from below
297 // where a numeric string is converted to a smi. 291 // where a numeric string is converted to a smi.
298 292
299 GenerateKeyedLoadReceiverCheck(masm, receiver, rax, 293 GenerateKeyedLoadReceiverCheck(masm, receiver, rax,
300 Map::kHasIndexedInterceptor, &slow); 294 Map::kHasIndexedInterceptor, &slow);
301 295
302 // Check the receiver's map to see if it has fast elements. 296 // Check the receiver's map to see if it has fast elements.
303 __ CheckFastElements(rax, &check_number_dictionary); 297 __ CheckFastElements(rax, &check_number_dictionary);
304 298
305 GenerateFastArrayLoad(masm, receiver, key, rax, rbx, rax, &slow, 299 GenerateFastArrayLoad(masm, receiver, key, rax, rbx, rax, &slow);
306 language_mode);
307 Counters* counters = masm->isolate()->counters(); 300 Counters* counters = masm->isolate()->counters();
308 __ IncrementCounter(counters->keyed_load_generic_smi(), 1); 301 __ IncrementCounter(counters->keyed_load_generic_smi(), 1);
309 __ ret(0); 302 __ ret(0);
310 303
311 __ bind(&check_number_dictionary); 304 __ bind(&check_number_dictionary);
312 __ SmiToInteger32(rbx, key); 305 __ SmiToInteger32(rbx, key);
313 __ movp(rax, FieldOperand(receiver, JSObject::kElementsOffset)); 306 __ movp(rax, FieldOperand(receiver, JSObject::kElementsOffset));
314 307
315 // Check whether the elements is a number dictionary. 308 // Check whether the elements is a number dictionary.
316 // rbx: key as untagged int32 309 // rbx: key as untagged int32
317 // rax: elements 310 // rax: elements
318 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), 311 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
319 Heap::kHashTableMapRootIndex); 312 Heap::kHashTableMapRootIndex);
320 __ j(not_equal, &slow); 313 __ j(not_equal, &slow);
321 __ LoadFromNumberDictionary(&slow, rax, key, rbx, r9, rdi, rax); 314 __ LoadFromNumberDictionary(&slow, rax, key, rbx, r9, rdi, rax);
322 __ ret(0); 315 __ ret(0);
323 316
324 __ bind(&slow); 317 __ bind(&slow);
325 // Slow case: Jump to runtime. 318 // Slow case: Jump to runtime.
326 __ IncrementCounter(counters->keyed_load_generic_slow(), 1); 319 __ IncrementCounter(counters->keyed_load_generic_slow(), 1);
327 GenerateSlow(masm); 320 GenerateRuntimeGetProperty(masm);
328 321
329 __ bind(&check_name); 322 __ bind(&check_name);
330 GenerateKeyNameCheck(masm, key, rax, rbx, &index_name, &slow); 323 GenerateKeyNameCheck(masm, key, rax, rbx, &index_name, &slow);
331 324
332 GenerateKeyedLoadReceiverCheck(masm, receiver, rax, Map::kHasNamedInterceptor, 325 GenerateKeyedLoadReceiverCheck(masm, receiver, rax, Map::kHasNamedInterceptor,
333 &slow); 326 &slow);
334 327
335 // If the receiver is a fast-case object, check the stub cache. Otherwise 328 // If the receiver is a fast-case object, check the stub cache. Otherwise
336 // probe the dictionary. 329 // probe the dictionary.
337 __ movp(rbx, FieldOperand(receiver, JSObject::kPropertiesOffset)); 330 __ movp(rbx, FieldOperand(receiver, JSObject::kPropertiesOffset));
(...skipping 285 matching lines...) Expand 10 before | Expand all | Expand 10 after
623 Label slow; 616 Label slow;
624 617
625 __ movp(dictionary, FieldOperand(LoadDescriptor::ReceiverRegister(), 618 __ movp(dictionary, FieldOperand(LoadDescriptor::ReceiverRegister(),
626 JSObject::kPropertiesOffset)); 619 JSObject::kPropertiesOffset));
627 GenerateDictionaryLoad(masm, &slow, dictionary, 620 GenerateDictionaryLoad(masm, &slow, dictionary,
628 LoadDescriptor::NameRegister(), rbx, rdi, rax); 621 LoadDescriptor::NameRegister(), rbx, rdi, rax);
629 __ ret(0); 622 __ ret(0);
630 623
631 // Dictionary load failed, go slow (but don't miss). 624 // Dictionary load failed, go slow (but don't miss).
632 __ bind(&slow); 625 __ bind(&slow);
633 GenerateSlow(masm); 626 GenerateRuntimeGetProperty(masm);
634 } 627 }
635 628
636 629
637 static void LoadIC_PushArgs(MacroAssembler* masm) { 630 static void LoadIC_PushArgs(MacroAssembler* masm) {
638 Register receiver = LoadDescriptor::ReceiverRegister(); 631 Register receiver = LoadDescriptor::ReceiverRegister();
639 Register name = LoadDescriptor::NameRegister(); 632 Register name = LoadDescriptor::NameRegister();
640 Register slot = LoadDescriptor::SlotRegister(); 633 Register slot = LoadDescriptor::SlotRegister();
641 Register vector = LoadWithVectorDescriptor::VectorRegister(); 634 Register vector = LoadWithVectorDescriptor::VectorRegister();
642 DCHECK(!rdi.is(receiver) && !rdi.is(name) && !rdi.is(slot) && 635 DCHECK(!rdi.is(receiver) && !rdi.is(name) && !rdi.is(slot) &&
643 !rdi.is(vector)); 636 !rdi.is(vector));
(...skipping 16 matching lines...) Expand all
660 LoadIC_PushArgs(masm); 653 LoadIC_PushArgs(masm);
661 654
662 // Perform tail call to the entry. 655 // Perform tail call to the entry.
663 ExternalReference ref = 656 ExternalReference ref =
664 ExternalReference(IC_Utility(kLoadIC_Miss), masm->isolate()); 657 ExternalReference(IC_Utility(kLoadIC_Miss), masm->isolate());
665 int arg_count = 4; 658 int arg_count = 4;
666 __ TailCallExternalReference(ref, arg_count, 1); 659 __ TailCallExternalReference(ref, arg_count, 1);
667 } 660 }
668 661
669 662
670 void LoadIC::GenerateSlow(MacroAssembler* masm) { 663 void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
671 // The return address is on the stack. 664 // The return address is on the stack.
672 Register receiver = LoadDescriptor::ReceiverRegister(); 665 Register receiver = LoadDescriptor::ReceiverRegister();
673 Register name = LoadDescriptor::NameRegister(); 666 Register name = LoadDescriptor::NameRegister();
674 DCHECK(!rbx.is(receiver) && !rbx.is(name)); 667 DCHECK(!rbx.is(receiver) && !rbx.is(name));
675 668
676 __ PopReturnAddressTo(rbx); 669 __ PopReturnAddressTo(rbx);
677 __ Push(receiver); 670 __ Push(receiver);
678 __ Push(name); 671 __ Push(name);
679 __ PushReturnAddressFrom(rbx); 672 __ PushReturnAddressFrom(rbx);
680 673
681 // Perform tail call to the entry. 674 // Perform tail call to the entry.
682 ExternalReference ref = 675 __ TailCallRuntime(Runtime::kGetProperty, 2, 1);
683 ExternalReference(IC_Utility(kLoadIC_Slow), masm->isolate());
684 int arg_count = 2;
685 __ TailCallExternalReference(ref, arg_count, 1);
686 } 676 }
687 677
688 678
689 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { 679 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
690 // The return address is on the stack. 680 // The return address is on the stack.
691 Counters* counters = masm->isolate()->counters(); 681 Counters* counters = masm->isolate()->counters();
692 __ IncrementCounter(counters->keyed_load_miss(), 1); 682 __ IncrementCounter(counters->keyed_load_miss(), 1);
693 683
694 LoadIC_PushArgs(masm); 684 LoadIC_PushArgs(masm);
695 685
696 // Perform tail call to the entry. 686 // Perform tail call to the entry.
697 ExternalReference ref = 687 ExternalReference ref =
698 ExternalReference(IC_Utility(kKeyedLoadIC_Miss), masm->isolate()); 688 ExternalReference(IC_Utility(kKeyedLoadIC_Miss), masm->isolate());
699 int arg_count = 4; 689 int arg_count = 4;
700 __ TailCallExternalReference(ref, arg_count, 1); 690 __ TailCallExternalReference(ref, arg_count, 1);
701 } 691 }
702 692
703 693
704 void KeyedLoadIC::GenerateSlow(MacroAssembler* masm) { 694 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
705 // The return address is on the stack. 695 // The return address is on the stack.
706 Register receiver = LoadDescriptor::ReceiverRegister(); 696 Register receiver = LoadDescriptor::ReceiverRegister();
707 Register name = LoadDescriptor::NameRegister(); 697 Register name = LoadDescriptor::NameRegister();
708 DCHECK(!rbx.is(receiver) && !rbx.is(name)); 698 DCHECK(!rbx.is(receiver) && !rbx.is(name));
709 699
710 __ PopReturnAddressTo(rbx); 700 __ PopReturnAddressTo(rbx);
711 __ Push(receiver); 701 __ Push(receiver);
712 __ Push(name); 702 __ Push(name);
713 __ PushReturnAddressFrom(rbx); 703 __ PushReturnAddressFrom(rbx);
714 704
715 // Perform tail call to the entry. 705 // Perform tail call to the entry.
716 ExternalReference ref = 706 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
717 ExternalReference(IC_Utility(kKeyedLoadIC_Slow), masm->isolate());
718 int arg_count = 2;
719 __ TailCallExternalReference(ref, arg_count, 1);
720 } 707 }
721 708
722 709
723 void StoreIC::GenerateMegamorphic(MacroAssembler* masm) { 710 void StoreIC::GenerateMegamorphic(MacroAssembler* masm) {
724 // The return address is on the stack. 711 // The return address is on the stack.
725 712
726 // Get the receiver from the stack and probe the stub cache. 713 // Get the receiver from the stack and probe the stub cache.
727 Code::Flags flags = Code::RemoveTypeAndHolderFromFlags( 714 Code::Flags flags = Code::RemoveTypeAndHolderFromFlags(
728 Code::ComputeHandlerFlags(Code::STORE_IC)); 715 Code::ComputeHandlerFlags(Code::STORE_IC));
729 masm->isolate()->stub_cache()->GenerateProbe( 716 masm->isolate()->stub_cache()->GenerateProbe(
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after
859 Condition cc = 846 Condition cc =
860 (check == ENABLE_INLINED_SMI_CHECK) 847 (check == ENABLE_INLINED_SMI_CHECK)
861 ? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero) 848 ? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero)
862 : (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry); 849 : (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry);
863 *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc); 850 *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc);
864 } 851 }
865 } // namespace internal 852 } // namespace internal
866 } // namespace v8 853 } // namespace v8
867 854
868 #endif // V8_TARGET_ARCH_X64 855 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/ic/x64/handler-compiler-x64.cc ('k') | src/ic/x87/handler-compiler-x87.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698