Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(284)

Side by Side Diff: src/a64/ic-a64.cc

Issue 155723005: A64: Synchronize with r19001. (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/a64
Patch Set: Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/a64/full-codegen-a64.cc ('k') | src/a64/lithium-a64.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after
95 __ Tbnz(scratch1, Map::kIsAccessCheckNeeded, miss); 95 __ Tbnz(scratch1, Map::kIsAccessCheckNeeded, miss);
96 __ Tbnz(scratch1, Map::kHasNamedInterceptor, miss); 96 __ Tbnz(scratch1, Map::kHasNamedInterceptor, miss);
97 97
98 // Check that the properties dictionary is valid. 98 // Check that the properties dictionary is valid.
99 __ Ldr(elements, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); 99 __ Ldr(elements, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
100 __ Ldr(scratch1, FieldMemOperand(elements, HeapObject::kMapOffset)); 100 __ Ldr(scratch1, FieldMemOperand(elements, HeapObject::kMapOffset));
101 __ JumpIfNotRoot(scratch1, Heap::kHashTableMapRootIndex, miss); 101 __ JumpIfNotRoot(scratch1, Heap::kHashTableMapRootIndex, miss);
102 } 102 }
103 103
104 104
105 // Helper function used from LoadIC/CallIC GenerateNormal. 105 // Helper function used from LoadIC GenerateNormal.
106 // 106 //
107 // elements: Property dictionary. It is not clobbered if a jump to the miss 107 // elements: Property dictionary. It is not clobbered if a jump to the miss
108 // label is done. 108 // label is done.
109 // name: Property name. It is not clobbered if a jump to the miss label is 109 // name: Property name. It is not clobbered if a jump to the miss label is
110 // done 110 // done
111 // result: Register for the result. It is only updated if a jump to the miss 111 // result: Register for the result. It is only updated if a jump to the miss
112 // label is not done. 112 // label is not done.
113 // The scratch registers need to be different from elements, name and result. 113 // The scratch registers need to be different from elements, name and result.
114 // The generated code assumes that the receiver has slow properties, 114 // The generated code assumes that the receiver has slow properties,
115 // is not a global object and does not have interceptors. 115 // is not a global object and does not have interceptors.
(...skipping 303 matching lines...) Expand 10 before | Expand all | Expand 10 after
419 __ B(hs, slow_case); 419 __ B(hs, slow_case);
420 420
421 __ Add(backing_store, 421 __ Add(backing_store,
422 backing_store, 422 backing_store,
423 FixedArray::kHeaderSize - kHeapObjectTag); 423 FixedArray::kHeaderSize - kHeapObjectTag);
424 __ SmiUntag(scratch, key); 424 __ SmiUntag(scratch, key);
425 return MemOperand(backing_store, scratch, LSL, kPointerSizeLog2); 425 return MemOperand(backing_store, scratch, LSL, kPointerSizeLog2);
426 } 426 }
427 427
428 428
429 Object* CallIC_Miss(Arguments args); 429 void LoadIC::GenerateMegamorphic(MacroAssembler* masm,
430 430 ExtraICState extra_state) {
431 void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm,
432 int argc,
433 Code::Kind kind,
434 ExtraICState extra_state) {
435 // ----------- S t a t e -------------
436 // -- x1 : receiver
437 // -- x2 : name
438 // -----------------------------------
439 Register receiver = x1;
440 Register name = x2;
441
442 Label number, non_number, non_string, boolean, probe, miss;
443
444 // Probe the stub cache.
445 Code::Flags flags = Code::ComputeFlags(kind,
446 MONOMORPHIC,
447 extra_state,
448 Code::NORMAL,
449 argc);
450 masm->isolate()->stub_cache()->GenerateProbe(
451 masm, flags, receiver, name, x3, x4, x5, x6);
452
453 // If the stub cache probing failed, the receiver might be a value.
454 // For value objects, we use the map of the prototype objects for
455 // the corresponding JSValue for the cache and that is what we need
456 // to probe.
457
458 // Check for number.
459 __ JumpIfSmi(receiver, &number);
460 Register receiver_type = x3;
461 __ JumpIfNotObjectType(receiver, x4, receiver_type, HEAP_NUMBER_TYPE,
462 &non_number);
463
464 __ Bind(&number);
465 StubCompiler::GenerateLoadGlobalFunctionPrototype(
466 masm, Context::NUMBER_FUNCTION_INDEX, receiver);
467 __ B(&probe);
468
469 // Check for string.
470 __ Bind(&non_number);
471 __ Cmp(receiver_type, FIRST_NONSTRING_TYPE);
472 __ B(hs, &non_string);
473 StubCompiler::GenerateLoadGlobalFunctionPrototype(
474 masm, Context::STRING_FUNCTION_INDEX, receiver);
475 __ B(&probe);
476
477 // Check for boolean.
478 __ Bind(&non_string);
479 __ JumpIfRoot(receiver, Heap::kTrueValueRootIndex, &boolean);
480 __ JumpIfNotRoot(receiver, Heap::kFalseValueRootIndex, &miss);
481
482 __ Bind(&boolean);
483 StubCompiler::GenerateLoadGlobalFunctionPrototype(
484 masm, Context::BOOLEAN_FUNCTION_INDEX, receiver);
485
486 // Probe the stub cache for the value object.
487 __ Bind(&probe);
488 masm->isolate()->stub_cache()->GenerateProbe(
489 masm, flags, receiver, name, x3, x4, x5, x6);
490
491 __ Bind(&miss);
492 // Fall-through on miss.
493 }
494
495
496 static void GenerateFunctionTailCall(MacroAssembler* masm,
497 int argc,
498 Label* miss,
499 Register function,
500 Register scratch) {
501 ASSERT(!AreAliased(function, scratch));
502
503 // Check that the value is a JSFunction.
504 __ JumpIfSmi(function, miss);
505 __ JumpIfNotObjectType(function, scratch, scratch, JS_FUNCTION_TYPE, miss);
506
507 // Invoke the function.
508 ParameterCount actual(argc);
509 __ InvokeFunction(function, actual, JUMP_FUNCTION, NullCallWrapper());
510 }
511
512
513 void CallICBase::GenerateNormal(MacroAssembler* masm, int argc) {
514 // ----------- S t a t e ------------- 431 // ----------- S t a t e -------------
515 // -- x2 : name 432 // -- x2 : name
516 // -- lr : return address 433 // -- lr : return address
517 // -----------------------------------
518 Label miss;
519 Register name = x2;
520
521 // Get the receiver of the function from the stack.
522 Register receiver = x1;
523 __ Peek(receiver, argc * kXRegSizeInBytes);
524
525 Register elements = x0;
526 GenerateNameDictionaryReceiverCheck(masm, receiver, elements, x3, x4, &miss);
527
528 // Search the dictionary.
529 Register function = x1;
530 GenerateDictionaryLoad(masm, &miss, elements, name, function, x3, x4);
531
532 GenerateFunctionTailCall(masm, argc, &miss, function, x4);
533
534 __ Bind(&miss);
535 // Fall-through on miss.
536 }
537
538
539 void CallICBase::GenerateMiss(MacroAssembler* masm,
540 int argc,
541 IC::UtilityId id,
542 ExtraICState extra_state) {
543 // ----------- S t a t e -------------
544 // -- x2 : name
545 // -- lr : return address
546 // -----------------------------------
547 Isolate* isolate = masm->isolate();
548
549 if (id == IC::kCallIC_Miss) {
550 __ IncrementCounter(isolate->counters()->call_miss(), 1, x3, x4);
551 } else {
552 __ IncrementCounter(isolate->counters()->keyed_call_miss(), 1, x3, x4);
553 }
554
555 // Get the receiver of the function from the stack.
556 __ Peek(x3, argc * kXRegSizeInBytes);
557
558 {
559 FrameScope scope(masm, StackFrame::INTERNAL);
560
561 // Push the receiver and the name of the function.
562 __ Push(x3, x2);
563
564 // Call the entry.
565 __ Mov(x0, 2);
566 __ Mov(x1, Operand(ExternalReference(IC_Utility(id), isolate)));
567
568 CEntryStub stub(1);
569 __ CallStub(&stub);
570
571 // Move result to x1 and leave the internal frame.
572 __ Mov(x1, x0);
573 }
574
575 // Check if the receiver is a global object of some sort.
576 // This can happen only for regular CallIC but not KeyedCallIC.
577 if (id == IC::kCallIC_Miss) {
578 Label invoke, global;
579 __ Peek(x2, argc * kPointerSize); // receiver
580 __ JumpIfSmi(x2, &invoke);
581 __ JumpIfObjectType(x2, x3, x3, JS_GLOBAL_OBJECT_TYPE, &global);
582 __ Cmp(x3, JS_BUILTINS_OBJECT_TYPE);
583 __ B(ne, &invoke);
584
585 // Patch the receiver on the stack.
586 __ Bind(&global);
587 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
588 __ Poke(x2, argc * kXRegSizeInBytes);
589 __ Bind(&invoke);
590 }
591
592 // Invoke the function.
593 ParameterCount actual(argc);
594 __ InvokeFunction(x1, actual, JUMP_FUNCTION, NullCallWrapper());
595 }
596
597
598 void CallIC::GenerateMegamorphic(MacroAssembler* masm,
599 int argc,
600 ExtraICState extra_ic_state) {
601 // ----------- S t a t e -------------
602 // -- x2 : name
603 // -- lr : return address
604 // -----------------------------------
605
606 // Get the receiver of the function from the stack.
607 // GenerateMonomorphicCacheProbe expects the receiver to be in x1.
608 Register receiver = x1;
609 __ Peek(receiver, argc * kXRegSizeInBytes);
610
611 GenerateMonomorphicCacheProbe(masm, argc, Code::CALL_IC, extra_ic_state);
612 GenerateMiss(masm, argc, extra_ic_state);
613 }
614
615
616 void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
617 // ----------- S t a t e -------------
618 // -- x2 : name / key
619 // -- lr : return address
620 // -----------------------------------
621 Register key = x2;
622
623 // Get the receiver of the function from the stack.
624 Register receiver = x1;
625 __ Peek(receiver, argc * kXRegSizeInBytes);
626
627 Label key_is_not_smi;
628 Label key_is_smi;
629 Label slow_call;
630 Label not_fast_array;
631 Label slow_load;
632 Label do_call;
633 Label key_is_index_name;
634 Label lookup_monomorphic_cache;
635
636 __ JumpIfNotSmi(key, &key_is_not_smi);
637 __ Bind(&key_is_smi);
638 // Now the key is known to be a smi. This place is also jumped to from below
639 // where a numeric string is converted to a smi.
640 // Live values:
641 // x1: receiver
642 // x2: key
643 GenerateKeyedLoadReceiverCheck(masm, receiver, x10, x11,
644 Map::kHasIndexedInterceptor, &slow_call);
645
646 // Due to the requirements of some helpers, both 'function' and 'receiver' are
647 // mapped to x1. However, they are never live at the same time.
648 Register function = x1;
649 Register elements_map = x3;
650 Register elements = x4;
651 GenerateFastArrayLoad(masm, receiver, key, elements, elements_map, x10,
652 function, &not_fast_array, &slow_load);
653 Counters* counters = masm->isolate()->counters();
654 __ IncrementCounter(counters->keyed_call_generic_smi_fast(), 1, x10, x11);
655
656 __ Bind(&do_call);
657 // Live values:
658 // x1: function
659 // x2: key
660 // GenerateFunctionTailCall requires that function is x1. This is enforced by
661 // MacroAssembler::InvokeFunction.
662 GenerateFunctionTailCall(masm, argc, &slow_call, function, x10);
663
664 // It should not be possible for execution to fall through a tail call.
665 if (__ emit_debug_code()) {
666 __ Unreachable();
667 }
668
669 __ Bind(&not_fast_array);
670 // Check whether the elements is a number dictionary.
671 // Live values:
672 // x1: receiver
673 // x2: key
674 // x3: elements map
675 // x4: elements
676 __ JumpIfNotRoot(elements_map, Heap::kHashTableMapRootIndex, &slow_load);
677 __ LoadFromNumberDictionary(&slow_load, elements, key, function,
678 x10, x11, x12, x13);
679 __ IncrementCounter(counters->keyed_call_generic_smi_dict(), 1, x10, x11);
680 __ B(&do_call);
681
682 __ Bind(&slow_load);
683 // This branch is taken when calling KeyedCallIC_Miss (via GenerateMiss) is
684 // neither required nor beneficial.
685 // Live values:
686 // x1: receiver
687 // x2: key
688 __ IncrementCounter(counters->keyed_call_generic_slow_load(), 1, x10, x11);
689 {
690 FrameScope scope(masm, StackFrame::INTERNAL);
691 // The key needs to be preserved across the runtime call.
692 __ Push(key);
693 // Pass the receiver and the key as argument to KeyedGetProperty.
694 __ Push(receiver, key);
695 __ CallRuntime(Runtime::kKeyedGetProperty, 2);
696 __ Pop(key);
697 }
698 // The return value is in x0 (as per AAPCS64).
699 __ Mov(function, x0);
700 __ B(&do_call);
701
702 __ Bind(&key_is_not_smi);
703 // The key isn't a SMI. Check to see if it's a name.
704 // Live values:
705 // x1: receiver
706 // x2: key
707 Register map = x6;
708 Register hash = x7;
709 GenerateKeyNameCheck(masm, key, map, hash, &key_is_index_name, &slow_call);
710
711 // If the check fell through, the key is known to be a unique name.
712 //
713 // If the receiver is a regular JS object with slow properties then do
714 // a quick inline probe of the receiver's dictionary.
715 // Otherwise do the monomorphic cache probe.
716 GenerateKeyedLoadReceiverCheck(masm, receiver, map, x10,
717 Map::kHasNamedInterceptor, &lookup_monomorphic_cache);
718
719 __ Ldr(elements, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
720 __ Ldr(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset));
721 __ JumpIfNotRoot(elements_map, Heap::kHashTableMapRootIndex,
722 &lookup_monomorphic_cache);
723
724 GenerateDictionaryLoad(masm, &slow_load, elements, key, function, x10, x11);
725 __ IncrementCounter(counters->keyed_call_generic_lookup_dict(), 1, x10, x11);
726 __ B(&do_call);
727
728 __ Bind(&lookup_monomorphic_cache);
729 // Live values:
730 // x1: receiver
731 // x2: key
732 // These assignments are expected by GenerateMonomorphicCacheProbe.
733 __ IncrementCounter(counters->keyed_call_generic_lookup_cache(), 1, x10, x11);
734 ASSERT(receiver.Is(x1));
735 ASSERT(key.Is(x2));
736 GenerateMonomorphicCacheProbe(masm,
737 argc,
738 Code::KEYED_CALL_IC,
739 kNoExtraICState);
740 // Fall through on miss.
741
742 __ Bind(&slow_call);
743 // This branch is taken if:
744 // - the receiver requires boxing or access check,
745 // - the key is neither smi nor a unique name,
746 // - the value loaded is not a function,
747 // - there is hope that the runtime will create a monomorphic call stub
748 // that will get fetched next time.
749 __ IncrementCounter(counters->keyed_call_generic_slow(), 1, x10, x11);
750 GenerateMiss(masm, argc);
751
752 __ Bind(&key_is_index_name);
753 // Live values:
754 // x1: receiver
755 // x2: key
756 // x7: hash
757 // The key is an array index string, so calculate its hash and derive the
758 // numerical index from it.
759 __ IndexFromHash(hash, key);
760 __ B(&key_is_smi);
761 }
762
763
764 void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) {
765 // ----------- S t a t e -------------
766 // -- x2 : name
767 // -- lr : return address
768 // -----------------------------------
769 Register name = x2;
770
771 // Check if the name is really a name.
772 Label miss;
773 __ JumpIfSmi(name, &miss);
774 __ IsObjectNameType(name, x0, &miss);
775 CallICBase::GenerateNormal(masm, argc);
776
777 __ Bind(&miss);
778 GenerateMiss(masm, argc);
779 }
780
781
782 void LoadIC::GenerateMegamorphic(MacroAssembler* masm, ContextualMode mode) {
783 // ----------- S t a t e -------------
784 // -- x2 : name
785 // -- lr : return address
786 // -- x0 : receiver 434 // -- x0 : receiver
787 // ----------------------------------- 435 // -----------------------------------
788 436
789 // Probe the stub cache. 437 // Probe the stub cache.
790 ExtraICState extra_ic_state = LoadIC::ComputeExtraICState(mode);
791 Code::Flags flags = Code::ComputeFlags( 438 Code::Flags flags = Code::ComputeFlags(
792 Code::HANDLER, MONOMORPHIC, extra_ic_state, 439 Code::HANDLER, MONOMORPHIC, extra_state,
793 Code::NORMAL, Code::LOAD_IC); 440 Code::NORMAL, Code::LOAD_IC);
794 masm->isolate()->stub_cache()->GenerateProbe( 441 masm->isolate()->stub_cache()->GenerateProbe(
795 masm, flags, x0, x2, x3, x4, x5, x6); 442 masm, flags, x0, x2, x3, x4, x5, x6);
796 443
797 // Cache miss: Jump to runtime. 444 // Cache miss: Jump to runtime.
798 GenerateMiss(masm); 445 GenerateMiss(masm);
799 } 446 }
800 447
801 448
802 void LoadIC::GenerateNormal(MacroAssembler* masm) { 449 void LoadIC::GenerateNormal(MacroAssembler* masm) {
(...skipping 132 matching lines...) Expand 10 before | Expand all | Expand 10 after
935 __ Add(x10, unmapped.base(), unmapped_offset); 582 __ Add(x10, unmapped.base(), unmapped_offset);
936 __ Mov(x11, value); 583 __ Mov(x11, value);
937 __ RecordWrite(unmapped.base(), x10, x11, 584 __ RecordWrite(unmapped.base(), x10, x11,
938 kLRHasNotBeenSaved, kDontSaveFPRegs); 585 kLRHasNotBeenSaved, kDontSaveFPRegs);
939 __ Ret(); 586 __ Ret();
940 __ Bind(&slow); 587 __ Bind(&slow);
941 GenerateMiss(masm); 588 GenerateMiss(masm);
942 } 589 }
943 590
944 591
945 void KeyedCallIC::GenerateNonStrictArguments(MacroAssembler* masm,
946 int argc) {
947 // ----------- S t a t e -------------
948 // -- x2 : key / name
949 // -- lr : return address
950 // -----------------------------------
951 Register key = x2;
952
953 Label slow, not_mapped, do_call;
954
955 // Get the receiver of the function from the stack.
956 Register map = x0;
957 Register function = x1;
958 Register receiver = x3;
959 __ Peek(receiver, argc * kXRegSizeInBytes);
960
961 MemOperand mapped_location =
962 GenerateMappedArgumentsLookup(masm, receiver, key, map, x10, x11,
963 &not_mapped, &slow);
964 // If we fell through, mapped_location will load the function.
965 __ Ldr(function, mapped_location);
966
967 __ Bind(&do_call);
968 // Live values:
969 // x1: function
970 // GenerateFunctionTailCall requires that function is x1. This is enforced by
971 // MacroAssembler::InvokeFunction.
972 GenerateFunctionTailCall(masm, argc, &slow, function, x10);
973
974 __ Bind(&not_mapped);
975 // The argument is not mapped, but 'map' has been populated.
976 MemOperand unmapped_location =
977 GenerateUnmappedArgumentsLookup(masm, key, map, x10, &slow);
978 // If we fell through, unmapped_location will load the function.
979 __ Ldr(function, unmapped_location);
980 // Check for the hole value before calling the function. For the mapped case,
981 // GenerateMappedArgumentsLookup does this check automatically. If we're not
982 // going to the slow case, we re-use the tail call code at do_call.
983 __ JumpIfNotRoot(function, Heap::kTheHoleValueRootIndex, &do_call);
984
985 __ Bind(&slow);
986 GenerateMiss(masm, argc);
987 }
988
989
990 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { 592 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
991 // ---------- S t a t e -------------- 593 // ---------- S t a t e --------------
992 // -- lr : return address 594 // -- lr : return address
993 // -- x0 : key 595 // -- x0 : key
994 // -- x1 : receiver 596 // -- x1 : receiver
995 // ----------------------------------- 597 // -----------------------------------
996 Isolate* isolate = masm->isolate(); 598 Isolate* isolate = masm->isolate();
997 599
998 __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, x10, x11); 600 __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, x10, x11);
999 601
(...skipping 809 matching lines...) Expand 10 before | Expand all | Expand 10 after
1809 ASSERT(to_patch->Mask(TestBranchMask) == TBNZ); 1411 ASSERT(to_patch->Mask(TestBranchMask) == TBNZ);
1810 // This is JumpIfSmi(smi_reg, branch_imm). 1412 // This is JumpIfSmi(smi_reg, branch_imm).
1811 patcher.tbz(smi_reg, 0, branch_imm); 1413 patcher.tbz(smi_reg, 0, branch_imm);
1812 } 1414 }
1813 } 1415 }
1814 1416
1815 1417
1816 } } // namespace v8::internal 1418 } } // namespace v8::internal
1817 1419
1818 #endif // V8_TARGET_ARCH_A64 1420 #endif // V8_TARGET_ARCH_A64
OLDNEW
« no previous file with comments | « src/a64/full-codegen-a64.cc ('k') | src/a64/lithium-a64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698