Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(509)

Side by Side Diff: src/mips/stub-cache-mips.cc

Issue 185653004: Experimental parser: merge to r19637 (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/parser
Patch Set: Created 6 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/mips/simulator-mips.cc ('k') | src/mksnapshot.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 269 matching lines...) Expand 10 before | Expand all | Expand 10 after
280 __ lw(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset)); 280 __ lw(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
281 } 281 }
282 282
283 283
284 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype( 284 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
285 MacroAssembler* masm, 285 MacroAssembler* masm,
286 int index, 286 int index,
287 Register prototype, 287 Register prototype,
288 Label* miss) { 288 Label* miss) {
289 Isolate* isolate = masm->isolate(); 289 Isolate* isolate = masm->isolate();
290 // Check we're still in the same context.
291 __ lw(prototype,
292 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
293 ASSERT(!prototype.is(at));
294 __ li(at, isolate->global_object());
295 __ Branch(miss, ne, prototype, Operand(at));
296 // Get the global function with the given index. 290 // Get the global function with the given index.
297 Handle<JSFunction> function( 291 Handle<JSFunction> function(
298 JSFunction::cast(isolate->native_context()->get(index))); 292 JSFunction::cast(isolate->native_context()->get(index)));
293
294 // Check we're still in the same context.
295 Register scratch = prototype;
296 const int offset = Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX);
297 __ lw(scratch, MemOperand(cp, offset));
298 __ lw(scratch, FieldMemOperand(scratch, GlobalObject::kNativeContextOffset));
299 __ lw(scratch, MemOperand(scratch, Context::SlotOffset(index)));
300 __ li(at, function);
301 __ Branch(miss, ne, at, Operand(scratch));
302
299 // Load its initial map. The global functions all have initial maps. 303 // Load its initial map. The global functions all have initial maps.
300 __ li(prototype, Handle<Map>(function->initial_map())); 304 __ li(prototype, Handle<Map>(function->initial_map()));
301 // Load the prototype from the initial map. 305 // Load the prototype from the initial map.
302 __ lw(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset)); 306 __ lw(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
303 } 307 }
304 308
305 309
306 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm, 310 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
307 Register dst, 311 Register dst,
308 Register src, 312 Register src,
(...skipping 449 matching lines...) Expand 10 before | Expand all | Expand 10 after
758 Register name, 762 Register name,
759 Handle<JSObject> holder_obj, 763 Handle<JSObject> holder_obj,
760 IC::UtilityId id) { 764 IC::UtilityId id) {
761 PushInterceptorArguments(masm, receiver, holder, name, holder_obj); 765 PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
762 __ CallExternalReference( 766 __ CallExternalReference(
763 ExternalReference(IC_Utility(id), masm->isolate()), 767 ExternalReference(IC_Utility(id), masm->isolate()),
764 StubCache::kInterceptorArgsLength); 768 StubCache::kInterceptorArgsLength);
765 } 769 }
766 770
767 771
768 static const int kFastApiCallArguments = FunctionCallbackArguments::kArgsLength; 772 // Generate call to api function.
773 void StubCompiler::GenerateFastApiCall(MacroAssembler* masm,
774 const CallOptimization& optimization,
775 Handle<Map> receiver_map,
776 Register receiver,
777 Register scratch_in,
778 bool is_store,
779 int argc,
780 Register* values) {
781 ASSERT(!receiver.is(scratch_in));
782 // Preparing to push, adjust sp.
783 __ Subu(sp, sp, Operand((argc + 1) * kPointerSize));
784 __ sw(receiver, MemOperand(sp, argc * kPointerSize)); // Push receiver.
785 // Write the arguments to stack frame.
786 for (int i = 0; i < argc; i++) {
787 Register arg = values[argc-1-i];
788 ASSERT(!receiver.is(arg));
789 ASSERT(!scratch_in.is(arg));
790 __ sw(arg, MemOperand(sp, (argc-1-i) * kPointerSize)); // Push arg.
791 }
792 ASSERT(optimization.is_simple_api_call());
769 793
770 // Reserves space for the extra arguments to API function in the 794 // Abi for CallApiFunctionStub.
771 // caller's frame. 795 Register callee = a0;
772 // 796 Register call_data = t0;
773 // These arguments are set by CheckPrototypes and GenerateFastApiDirectCall. 797 Register holder = a2;
774 static void ReserveSpaceForFastApiCall(MacroAssembler* masm, 798 Register api_function_address = a1;
775 Register scratch) { 799
776 ASSERT(Smi::FromInt(0) == 0); 800 // Put holder in place.
777 for (int i = 0; i < kFastApiCallArguments; i++) { 801 CallOptimization::HolderLookup holder_lookup;
778 __ push(zero_reg); 802 Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType(
803 receiver_map,
804 &holder_lookup);
805 switch (holder_lookup) {
806 case CallOptimization::kHolderIsReceiver:
807 __ Move(holder, receiver);
808 break;
809 case CallOptimization::kHolderFound:
810 __ li(holder, api_holder);
811 break;
812 case CallOptimization::kHolderNotFound:
813 UNREACHABLE();
814 break;
779 } 815 }
780 }
781 816
817 Isolate* isolate = masm->isolate();
818 Handle<JSFunction> function = optimization.constant_function();
819 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
820 Handle<Object> call_data_obj(api_call_info->data(), isolate);
782 821
783 // Undoes the effects of ReserveSpaceForFastApiCall. 822 // Put callee in place.
784 static void FreeSpaceForFastApiCall(MacroAssembler* masm) { 823 __ li(callee, function);
785 __ Drop(kFastApiCallArguments);
786 }
787 824
788 825 bool call_data_undefined = false;
789 static void GenerateFastApiDirectCall(MacroAssembler* masm, 826 // Put call_data in place.
790 const CallOptimization& optimization, 827 if (isolate->heap()->InNewSpace(*call_data_obj)) {
791 int argc, 828 __ li(call_data, api_call_info);
792 bool restore_context) { 829 __ lw(call_data, FieldMemOperand(call_data, CallHandlerInfo::kDataOffset));
793 // ----------- S t a t e ------------- 830 } else if (call_data_obj->IsUndefined()) {
794 // -- sp[0] - sp[24] : FunctionCallbackInfo, incl. 831 call_data_undefined = true;
795 // : holder (set by CheckPrototypes) 832 __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex);
796 // -- sp[28] : last JS argument
797 // -- ...
798 // -- sp[(argc + 6) * 4] : first JS argument
799 // -- sp[(argc + 7) * 4] : receiver
800 // -----------------------------------
801 typedef FunctionCallbackArguments FCA;
802 // Save calling context.
803 __ sw(cp, MemOperand(sp, FCA::kContextSaveIndex * kPointerSize));
804 // Get the function and setup the context.
805 Handle<JSFunction> function = optimization.constant_function();
806 __ li(t1, function);
807 __ lw(cp, FieldMemOperand(t1, JSFunction::kContextOffset));
808 __ sw(t1, MemOperand(sp, FCA::kCalleeIndex * kPointerSize));
809
810 // Construct the FunctionCallbackInfo.
811 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
812 Handle<Object> call_data(api_call_info->data(), masm->isolate());
813 if (masm->isolate()->heap()->InNewSpace(*call_data)) {
814 __ li(a0, api_call_info);
815 __ lw(t2, FieldMemOperand(a0, CallHandlerInfo::kDataOffset));
816 } else { 833 } else {
817 __ li(t2, call_data); 834 __ li(call_data, call_data_obj);
818 } 835 }
819 // Store call data. 836 // Put api_function_address in place.
820 __ sw(t2, MemOperand(sp, FCA::kDataIndex * kPointerSize));
821 // Store isolate.
822 __ li(t3, Operand(ExternalReference::isolate_address(masm->isolate())));
823 __ sw(t3, MemOperand(sp, FCA::kIsolateIndex * kPointerSize));
824 // Store ReturnValue default and ReturnValue.
825 __ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
826 __ sw(t1, MemOperand(sp, FCA::kReturnValueOffset * kPointerSize));
827 __ sw(t1, MemOperand(sp, FCA::kReturnValueDefaultValueIndex * kPointerSize));
828
829 // Prepare arguments.
830 __ Move(a2, sp);
831
832 // Allocate the v8::Arguments structure in the arguments' space since
833 // it's not controlled by GC.
834 const int kApiStackSpace = 4;
835
836 FrameScope frame_scope(masm, StackFrame::MANUAL);
837 __ EnterExitFrame(false, kApiStackSpace);
838
839 // a0 = FunctionCallbackInfo&
840 // Arguments is built at sp + 1 (sp is a reserved spot for ra).
841 __ Addu(a0, sp, kPointerSize);
842 // FunctionCallbackInfo::implicit_args_
843 __ sw(a2, MemOperand(a0, 0 * kPointerSize));
844 // FunctionCallbackInfo::values_
845 __ Addu(t0, a2, Operand((kFastApiCallArguments - 1 + argc) * kPointerSize));
846 __ sw(t0, MemOperand(a0, 1 * kPointerSize));
847 // FunctionCallbackInfo::length_ = argc
848 __ li(t0, Operand(argc));
849 __ sw(t0, MemOperand(a0, 2 * kPointerSize));
850 // FunctionCallbackInfo::is_construct_call = 0
851 __ sw(zero_reg, MemOperand(a0, 3 * kPointerSize));
852
853 const int kStackUnwindSpace = argc + kFastApiCallArguments + 1;
854 Address function_address = v8::ToCData<Address>(api_call_info->callback()); 837 Address function_address = v8::ToCData<Address>(api_call_info->callback());
855 ApiFunction fun(function_address); 838 ApiFunction fun(function_address);
856 ExternalReference::Type type = ExternalReference::DIRECT_API_CALL; 839 ExternalReference::Type type = ExternalReference::DIRECT_API_CALL;
857 ExternalReference ref = 840 ExternalReference ref =
858 ExternalReference(&fun, 841 ExternalReference(&fun,
859 type, 842 type,
860 masm->isolate()); 843 masm->isolate());
861 Address thunk_address = FUNCTION_ADDR(&InvokeFunctionCallback); 844 __ li(api_function_address, Operand(ref));
862 ExternalReference::Type thunk_type = ExternalReference::PROFILING_API_CALL;
863 ApiFunction thunk_fun(thunk_address);
864 ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
865 masm->isolate());
866 845
867 AllowExternalCallThatCantCauseGC scope(masm); 846 // Jump to stub.
868 MemOperand context_restore_operand( 847 CallApiFunctionStub stub(is_store, call_data_undefined, argc);
869 fp, (2 + FCA::kContextSaveIndex) * kPointerSize); 848 __ TailCallStub(&stub);
870 MemOperand return_value_operand(
871 fp, (2 + FCA::kReturnValueOffset) * kPointerSize);
872
873 __ CallApiFunctionAndReturn(ref,
874 function_address,
875 thunk_ref,
876 a1,
877 kStackUnwindSpace,
878 return_value_operand,
879 restore_context ?
880 &context_restore_operand : NULL);
881 } 849 }
882 850
883 851
884 // Generate call to api function.
885 static void GenerateFastApiCall(MacroAssembler* masm,
886 const CallOptimization& optimization,
887 Register receiver,
888 Register scratch,
889 int argc,
890 Register* values) {
891 ASSERT(optimization.is_simple_api_call());
892 ASSERT(!receiver.is(scratch));
893
894 typedef FunctionCallbackArguments FCA;
895 const int stack_space = kFastApiCallArguments + argc + 1;
896 // Assign stack space for the call arguments.
897 __ Subu(sp, sp, Operand(stack_space * kPointerSize));
898 // Write holder to stack frame.
899 __ sw(receiver, MemOperand(sp, FCA::kHolderIndex * kPointerSize));
900 // Write receiver to stack frame.
901 int index = stack_space - 1;
902 __ sw(receiver, MemOperand(sp, index-- * kPointerSize));
903 // Write the arguments to stack frame.
904 for (int i = 0; i < argc; i++) {
905 ASSERT(!receiver.is(values[i]));
906 ASSERT(!scratch.is(values[i]));
907 __ sw(values[i], MemOperand(sp, index-- * kPointerSize));
908 }
909
910 GenerateFastApiDirectCall(masm, optimization, argc, true);
911 }
912
913
914 class CallInterceptorCompiler BASE_EMBEDDED {
915 public:
916 CallInterceptorCompiler(CallStubCompiler* stub_compiler,
917 const ParameterCount& arguments,
918 Register name)
919 : stub_compiler_(stub_compiler),
920 arguments_(arguments),
921 name_(name) {}
922
923 void Compile(MacroAssembler* masm,
924 Handle<JSObject> object,
925 Handle<JSObject> holder,
926 Handle<Name> name,
927 LookupResult* lookup,
928 Register receiver,
929 Register scratch1,
930 Register scratch2,
931 Register scratch3,
932 Label* miss) {
933 ASSERT(holder->HasNamedInterceptor());
934 ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
935
936 // Check that the receiver isn't a smi.
937 __ JumpIfSmi(receiver, miss);
938 CallOptimization optimization(lookup);
939 if (optimization.is_constant_call()) {
940 CompileCacheable(masm, object, receiver, scratch1, scratch2, scratch3,
941 holder, lookup, name, optimization, miss);
942 } else {
943 CompileRegular(masm, object, receiver, scratch1, scratch2, scratch3,
944 name, holder, miss);
945 }
946 }
947
948 private:
949 void CompileCacheable(MacroAssembler* masm,
950 Handle<JSObject> object,
951 Register receiver,
952 Register scratch1,
953 Register scratch2,
954 Register scratch3,
955 Handle<JSObject> interceptor_holder,
956 LookupResult* lookup,
957 Handle<Name> name,
958 const CallOptimization& optimization,
959 Label* miss_label) {
960 ASSERT(optimization.is_constant_call());
961 ASSERT(!lookup->holder()->IsGlobalObject());
962 Counters* counters = masm->isolate()->counters();
963 int depth1 = kInvalidProtoDepth;
964 int depth2 = kInvalidProtoDepth;
965 bool can_do_fast_api_call = false;
966 if (optimization.is_simple_api_call() &&
967 !lookup->holder()->IsGlobalObject()) {
968 depth1 = optimization.GetPrototypeDepthOfExpectedType(
969 object, interceptor_holder);
970 if (depth1 == kInvalidProtoDepth) {
971 depth2 = optimization.GetPrototypeDepthOfExpectedType(
972 interceptor_holder, Handle<JSObject>(lookup->holder()));
973 }
974 can_do_fast_api_call =
975 depth1 != kInvalidProtoDepth || depth2 != kInvalidProtoDepth;
976 }
977
978 __ IncrementCounter(counters->call_const_interceptor(), 1,
979 scratch1, scratch2);
980
981 if (can_do_fast_api_call) {
982 __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1,
983 scratch1, scratch2);
984 ReserveSpaceForFastApiCall(masm, scratch1);
985 }
986
987 // Check that the maps from receiver to interceptor's holder
988 // haven't changed and thus we can invoke interceptor.
989 Label miss_cleanup;
990 Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label;
991 Register holder =
992 stub_compiler_->CheckPrototypes(
993 IC::CurrentTypeOf(object, masm->isolate()), receiver,
994 interceptor_holder, scratch1, scratch2, scratch3,
995 name, depth1, miss);
996
997 // Invoke an interceptor and if it provides a value,
998 // branch to |regular_invoke|.
999 Label regular_invoke;
1000 LoadWithInterceptor(masm, receiver, holder, interceptor_holder, scratch2,
1001 &regular_invoke);
1002
1003 // Interceptor returned nothing for this property. Try to use cached
1004 // constant function.
1005
1006 // Check that the maps from interceptor's holder to constant function's
1007 // holder haven't changed and thus we can use cached constant function.
1008 if (*interceptor_holder != lookup->holder()) {
1009 stub_compiler_->CheckPrototypes(
1010 IC::CurrentTypeOf(interceptor_holder, masm->isolate()), holder,
1011 handle(lookup->holder()), scratch1, scratch2, scratch3,
1012 name, depth2, miss);
1013 } else {
1014 // CheckPrototypes has a side effect of fetching a 'holder'
1015 // for API (object which is instanceof for the signature). It's
1016 // safe to omit it here, as if present, it should be fetched
1017 // by the previous CheckPrototypes.
1018 ASSERT(depth2 == kInvalidProtoDepth);
1019 }
1020
1021 // Invoke function.
1022 if (can_do_fast_api_call) {
1023 GenerateFastApiDirectCall(
1024 masm, optimization, arguments_.immediate(), false);
1025 } else {
1026 Handle<JSFunction> function = optimization.constant_function();
1027 __ Move(a0, receiver);
1028 stub_compiler_->GenerateJumpFunction(object, function);
1029 }
1030
1031 // Deferred code for fast API call case---clean preallocated space.
1032 if (can_do_fast_api_call) {
1033 __ bind(&miss_cleanup);
1034 FreeSpaceForFastApiCall(masm);
1035 __ Branch(miss_label);
1036 }
1037
1038 // Invoke a regular function.
1039 __ bind(&regular_invoke);
1040 if (can_do_fast_api_call) {
1041 FreeSpaceForFastApiCall(masm);
1042 }
1043 }
1044
1045 void CompileRegular(MacroAssembler* masm,
1046 Handle<JSObject> object,
1047 Register receiver,
1048 Register scratch1,
1049 Register scratch2,
1050 Register scratch3,
1051 Handle<Name> name,
1052 Handle<JSObject> interceptor_holder,
1053 Label* miss_label) {
1054 Register holder =
1055 stub_compiler_->CheckPrototypes(
1056 IC::CurrentTypeOf(object, masm->isolate()), receiver,
1057 interceptor_holder, scratch1, scratch2, scratch3, name, miss_label);
1058
1059 // Call a runtime function to load the interceptor property.
1060 FrameScope scope(masm, StackFrame::INTERNAL);
1061 // Save the name_ register across the call.
1062 __ push(name_);
1063
1064 CompileCallLoadPropertyWithInterceptor(
1065 masm, receiver, holder, name_, interceptor_holder,
1066 IC::kLoadPropertyWithInterceptorForCall);
1067
1068 // Restore the name_ register.
1069 __ pop(name_);
1070 // Leave the internal frame.
1071 }
1072
1073 void LoadWithInterceptor(MacroAssembler* masm,
1074 Register receiver,
1075 Register holder,
1076 Handle<JSObject> holder_obj,
1077 Register scratch,
1078 Label* interceptor_succeeded) {
1079 {
1080 FrameScope scope(masm, StackFrame::INTERNAL);
1081
1082 __ Push(receiver, holder, name_);
1083 CompileCallLoadPropertyWithInterceptor(
1084 masm, receiver, holder, name_, holder_obj,
1085 IC::kLoadPropertyWithInterceptorOnly);
1086 __ pop(name_);
1087 __ pop(holder);
1088 __ pop(receiver);
1089 }
1090 // If interceptor returns no-result sentinel, call the constant function.
1091 __ LoadRoot(scratch, Heap::kNoInterceptorResultSentinelRootIndex);
1092 __ Branch(interceptor_succeeded, ne, v0, Operand(scratch));
1093 }
1094
1095 CallStubCompiler* stub_compiler_;
1096 const ParameterCount& arguments_;
1097 Register name_;
1098 };
1099
1100
1101 void StubCompiler::GenerateTailCall(MacroAssembler* masm, Handle<Code> code) { 852 void StubCompiler::GenerateTailCall(MacroAssembler* masm, Handle<Code> code) {
1102 __ Jump(code, RelocInfo::CODE_TARGET); 853 __ Jump(code, RelocInfo::CODE_TARGET);
1103 } 854 }
1104 855
1105 856
1106 #undef __ 857 #undef __
1107 #define __ ACCESS_MASM(masm()) 858 #define __ ACCESS_MASM(masm())
1108 859
1109 860
1110 Register StubCompiler::CheckPrototypes(Handle<Type> type, 861 Register StubCompiler::CheckPrototypes(Handle<HeapType> type,
1111 Register object_reg, 862 Register object_reg,
1112 Handle<JSObject> holder, 863 Handle<JSObject> holder,
1113 Register holder_reg, 864 Register holder_reg,
1114 Register scratch1, 865 Register scratch1,
1115 Register scratch2, 866 Register scratch2,
1116 Handle<Name> name, 867 Handle<Name> name,
1117 int save_at_depth,
1118 Label* miss, 868 Label* miss,
1119 PrototypeCheckType check) { 869 PrototypeCheckType check) {
1120 Handle<Map> receiver_map(IC::TypeToMap(*type, isolate())); 870 Handle<Map> receiver_map(IC::TypeToMap(*type, isolate()));
1121 // Make sure that the type feedback oracle harvests the receiver map.
1122 // TODO(svenpanne) Remove this hack when all ICs are reworked.
1123 __ li(scratch1, Operand(receiver_map));
1124 871
1125 // Make sure there's no overlap between holder and object registers. 872 // Make sure there's no overlap between holder and object registers.
1126 ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg)); 873 ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
1127 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg) 874 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
1128 && !scratch2.is(scratch1)); 875 && !scratch2.is(scratch1));
1129 876
1130 // Keep track of the current object in register reg. 877 // Keep track of the current object in register reg.
1131 Register reg = object_reg; 878 Register reg = object_reg;
1132 int depth = 0; 879 int depth = 0;
1133 880
1134 typedef FunctionCallbackArguments FCA;
1135 if (save_at_depth == depth) {
1136 __ sw(reg, MemOperand(sp, FCA::kHolderIndex * kPointerSize));
1137 }
1138
1139 Handle<JSObject> current = Handle<JSObject>::null(); 881 Handle<JSObject> current = Handle<JSObject>::null();
1140 if (type->IsConstant()) current = Handle<JSObject>::cast(type->AsConstant()); 882 if (type->IsConstant()) current = Handle<JSObject>::cast(type->AsConstant());
1141 Handle<JSObject> prototype = Handle<JSObject>::null(); 883 Handle<JSObject> prototype = Handle<JSObject>::null();
1142 Handle<Map> current_map = receiver_map; 884 Handle<Map> current_map = receiver_map;
1143 Handle<Map> holder_map(holder->map()); 885 Handle<Map> holder_map(holder->map());
1144 // Traverse the prototype chain and check the maps in the prototype chain for 886 // Traverse the prototype chain and check the maps in the prototype chain for
1145 // fast and global objects or do negative lookup for normal objects. 887 // fast and global objects or do negative lookup for normal objects.
1146 while (!current_map.is_identical_to(holder_map)) { 888 while (!current_map.is_identical_to(holder_map)) {
1147 ++depth; 889 ++depth;
1148 890
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
1194 if (heap()->InNewSpace(*prototype)) { 936 if (heap()->InNewSpace(*prototype)) {
1195 // The prototype is in new space; we cannot store a reference to it 937 // The prototype is in new space; we cannot store a reference to it
1196 // in the code. Load it from the map. 938 // in the code. Load it from the map.
1197 __ lw(reg, FieldMemOperand(map_reg, Map::kPrototypeOffset)); 939 __ lw(reg, FieldMemOperand(map_reg, Map::kPrototypeOffset));
1198 } else { 940 } else {
1199 // The prototype is in old space; load it directly. 941 // The prototype is in old space; load it directly.
1200 __ li(reg, Operand(prototype)); 942 __ li(reg, Operand(prototype));
1201 } 943 }
1202 } 944 }
1203 945
1204 if (save_at_depth == depth) {
1205 __ sw(reg, MemOperand(sp, FCA::kHolderIndex * kPointerSize));
1206 }
1207
1208 // Go to the next object in the prototype chain. 946 // Go to the next object in the prototype chain.
1209 current = prototype; 947 current = prototype;
1210 current_map = handle(current->map()); 948 current_map = handle(current->map());
1211 } 949 }
1212 950
1213 // Log the check depth. 951 // Log the check depth.
1214 LOG(isolate(), IntEvent("check-maps-depth", depth + 1)); 952 LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
1215 953
1216 if (depth != 0 || check == CHECK_ALL_MAPS) { 954 if (depth != 0 || check == CHECK_ALL_MAPS) {
1217 // Check the holder map. 955 // Check the holder map.
(...skipping 28 matching lines...) Expand all
1246 Label success; 984 Label success;
1247 __ Branch(&success); 985 __ Branch(&success);
1248 GenerateRestoreName(masm(), miss, name); 986 GenerateRestoreName(masm(), miss, name);
1249 TailCallBuiltin(masm(), MissBuiltin(kind())); 987 TailCallBuiltin(masm(), MissBuiltin(kind()));
1250 __ bind(&success); 988 __ bind(&success);
1251 } 989 }
1252 } 990 }
1253 991
1254 992
1255 Register LoadStubCompiler::CallbackHandlerFrontend( 993 Register LoadStubCompiler::CallbackHandlerFrontend(
1256 Handle<Type> type, 994 Handle<HeapType> type,
1257 Register object_reg, 995 Register object_reg,
1258 Handle<JSObject> holder, 996 Handle<JSObject> holder,
1259 Handle<Name> name, 997 Handle<Name> name,
1260 Handle<Object> callback) { 998 Handle<Object> callback) {
1261 Label miss; 999 Label miss;
1262 1000
1263 Register reg = HandlerFrontendHeader(type, object_reg, holder, name, &miss); 1001 Register reg = HandlerFrontendHeader(type, object_reg, holder, name, &miss);
1264 1002
1265 if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) { 1003 if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
1266 ASSERT(!reg.is(scratch2())); 1004 ASSERT(!reg.is(scratch2()));
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
1317 1055
1318 1056
1319 void LoadStubCompiler::GenerateLoadConstant(Handle<Object> value) { 1057 void LoadStubCompiler::GenerateLoadConstant(Handle<Object> value) {
1320 // Return the constant value. 1058 // Return the constant value.
1321 __ li(v0, value); 1059 __ li(v0, value);
1322 __ Ret(); 1060 __ Ret();
1323 } 1061 }
1324 1062
1325 1063
1326 void LoadStubCompiler::GenerateLoadCallback( 1064 void LoadStubCompiler::GenerateLoadCallback(
1327 const CallOptimization& call_optimization) {
1328 GenerateFastApiCall(
1329 masm(), call_optimization, receiver(), scratch3(), 0, NULL);
1330 }
1331
1332
1333 void LoadStubCompiler::GenerateLoadCallback(
1334 Register reg, 1065 Register reg,
1335 Handle<ExecutableAccessorInfo> callback) { 1066 Handle<ExecutableAccessorInfo> callback) {
1336 // Build AccessorInfo::args_ list on the stack and push property name below 1067 // Build AccessorInfo::args_ list on the stack and push property name below
1337 // the exit frame to make GC aware of them and store pointers to them. 1068 // the exit frame to make GC aware of them and store pointers to them.
1338 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0); 1069 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0);
1339 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1); 1070 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1);
1340 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2); 1071 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2);
1341 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3); 1072 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3);
1342 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4); 1073 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4);
1343 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5); 1074 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5);
(...skipping 15 matching lines...) Expand all
1359 __ sw(scratch3(), MemOperand(sp, 4 * kPointerSize)); 1090 __ sw(scratch3(), MemOperand(sp, 4 * kPointerSize));
1360 __ sw(scratch3(), MemOperand(sp, 3 * kPointerSize)); 1091 __ sw(scratch3(), MemOperand(sp, 3 * kPointerSize));
1361 __ li(scratch4(), 1092 __ li(scratch4(),
1362 Operand(ExternalReference::isolate_address(isolate()))); 1093 Operand(ExternalReference::isolate_address(isolate())));
1363 __ sw(scratch4(), MemOperand(sp, 2 * kPointerSize)); 1094 __ sw(scratch4(), MemOperand(sp, 2 * kPointerSize));
1364 __ sw(reg, MemOperand(sp, 1 * kPointerSize)); 1095 __ sw(reg, MemOperand(sp, 1 * kPointerSize));
1365 __ sw(name(), MemOperand(sp, 0 * kPointerSize)); 1096 __ sw(name(), MemOperand(sp, 0 * kPointerSize));
1366 __ Addu(scratch2(), sp, 1 * kPointerSize); 1097 __ Addu(scratch2(), sp, 1 * kPointerSize);
1367 1098
1368 __ mov(a2, scratch2()); // Saved in case scratch2 == a1. 1099 __ mov(a2, scratch2()); // Saved in case scratch2 == a1.
1369 __ mov(a0, sp); // (first argument - a0) = Handle<Name> 1100 // Abi for CallApiGetter.
1101 Register getter_address_reg = a2;
1370 1102
1371 const int kApiStackSpace = 1;
1372 FrameScope frame_scope(masm(), StackFrame::MANUAL);
1373 __ EnterExitFrame(false, kApiStackSpace);
1374
1375 // Create PropertyAccessorInfo instance on the stack above the exit frame with
1376 // scratch2 (internal::Object** args_) as the data.
1377 __ sw(a2, MemOperand(sp, kPointerSize));
1378 // (second argument - a1) = AccessorInfo&
1379 __ Addu(a1, sp, kPointerSize);
1380
1381 const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
1382 Address getter_address = v8::ToCData<Address>(callback->getter()); 1103 Address getter_address = v8::ToCData<Address>(callback->getter());
1383 ApiFunction fun(getter_address); 1104 ApiFunction fun(getter_address);
1384 ExternalReference::Type type = ExternalReference::DIRECT_GETTER_CALL; 1105 ExternalReference::Type type = ExternalReference::DIRECT_GETTER_CALL;
1385 ExternalReference ref = ExternalReference(&fun, type, isolate()); 1106 ExternalReference ref = ExternalReference(&fun, type, isolate());
1107 __ li(getter_address_reg, Operand(ref));
1386 1108
1387 Address thunk_address = FUNCTION_ADDR(&InvokeAccessorGetterCallback); 1109 CallApiGetterStub stub;
1388 ExternalReference::Type thunk_type = 1110 __ TailCallStub(&stub);
1389 ExternalReference::PROFILING_GETTER_CALL;
1390 ApiFunction thunk_fun(thunk_address);
1391 ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
1392 isolate());
1393 __ CallApiFunctionAndReturn(ref,
1394 getter_address,
1395 thunk_ref,
1396 a2,
1397 kStackUnwindSpace,
1398 MemOperand(fp, 6 * kPointerSize),
1399 NULL);
1400 } 1111 }
1401 1112
1402 1113
1403 void LoadStubCompiler::GenerateLoadInterceptor( 1114 void LoadStubCompiler::GenerateLoadInterceptor(
1404 Register holder_reg, 1115 Register holder_reg,
1405 Handle<Object> object, 1116 Handle<Object> object,
1406 Handle<JSObject> interceptor_holder, 1117 Handle<JSObject> interceptor_holder,
1407 LookupResult* lookup, 1118 LookupResult* lookup,
1408 Handle<Name> name) { 1119 Handle<Name> name) {
1409 ASSERT(interceptor_holder->HasNamedInterceptor()); 1120 ASSERT(interceptor_holder->HasNamedInterceptor());
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after
1478 PushInterceptorArguments(masm(), receiver(), holder_reg, 1189 PushInterceptorArguments(masm(), receiver(), holder_reg,
1479 this->name(), interceptor_holder); 1190 this->name(), interceptor_holder);
1480 1191
1481 ExternalReference ref = ExternalReference( 1192 ExternalReference ref = ExternalReference(
1482 IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), isolate()); 1193 IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), isolate());
1483 __ TailCallExternalReference(ref, StubCache::kInterceptorArgsLength, 1); 1194 __ TailCallExternalReference(ref, StubCache::kInterceptorArgsLength, 1);
1484 } 1195 }
1485 } 1196 }
1486 1197
1487 1198
1488 void CallStubCompiler::GenerateNameCheck(Handle<Name> name, Label* miss) {
1489 if (kind_ == Code::KEYED_CALL_IC) {
1490 __ Branch(miss, ne, a2, Operand(name));
1491 }
1492 }
1493
1494
1495 void CallStubCompiler::GenerateFunctionCheck(Register function,
1496 Register scratch,
1497 Label* miss) {
1498 __ JumpIfSmi(function, miss);
1499 __ GetObjectType(function, scratch, scratch);
1500 __ Branch(miss, ne, scratch, Operand(JS_FUNCTION_TYPE));
1501 }
1502
1503
1504 void CallStubCompiler::GenerateLoadFunctionFromCell(
1505 Handle<Cell> cell,
1506 Handle<JSFunction> function,
1507 Label* miss) {
1508 // Get the value from the cell.
1509 __ li(a3, Operand(cell));
1510 __ lw(a1, FieldMemOperand(a3, Cell::kValueOffset));
1511
1512 // Check that the cell contains the same function.
1513 if (heap()->InNewSpace(*function)) {
1514 // We can't embed a pointer to a function in new space so we have
1515 // to verify that the shared function info is unchanged. This has
1516 // the nice side effect that multiple closures based on the same
1517 // function can all use this call IC. Before we load through the
1518 // function, we have to verify that it still is a function.
1519 GenerateFunctionCheck(a1, a3, miss);
1520
1521 // Check the shared function info. Make sure it hasn't changed.
1522 __ li(a3, Handle<SharedFunctionInfo>(function->shared()));
1523 __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1524 __ Branch(miss, ne, t0, Operand(a3));
1525 } else {
1526 __ Branch(miss, ne, a1, Operand(function));
1527 }
1528 }
1529
1530
1531 void CallStubCompiler::GenerateMissBranch() {
1532 Handle<Code> code =
1533 isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(),
1534 kind_,
1535 extra_state());
1536 __ Jump(code, RelocInfo::CODE_TARGET);
1537 }
1538
1539
1540 Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
1541 Handle<JSObject> holder,
1542 PropertyIndex index,
1543 Handle<Name> name) {
1544 Label miss;
1545
1546 Register reg = HandlerFrontendHeader(
1547 object, holder, name, RECEIVER_MAP_CHECK, &miss);
1548 GenerateFastPropertyLoad(masm(), a1, reg, index.is_inobject(holder),
1549 index.translate(holder), Representation::Tagged());
1550 GenerateJumpFunction(object, a1, &miss);
1551
1552 HandlerFrontendFooter(&miss);
1553
1554 // Return the generated code.
1555 return GetCode(Code::FAST, name);
1556 }
1557
1558
1559 Handle<Code> CallStubCompiler::CompileArrayPushCall(
1560 Handle<Object> object,
1561 Handle<JSObject> holder,
1562 Handle<Cell> cell,
1563 Handle<JSFunction> function,
1564 Handle<String> name,
1565 Code::StubType type) {
1566 // If object is not an array or is observed or sealed, bail out to regular
1567 // call.
1568 if (!object->IsJSArray() ||
1569 !cell.is_null() ||
1570 Handle<JSArray>::cast(object)->map()->is_observed() ||
1571 !Handle<JSArray>::cast(object)->map()->is_extensible()) {
1572 return Handle<Code>::null();
1573 }
1574
1575 Label miss;
1576 HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
1577 Register receiver = a0;
1578 Register scratch = a1;
1579
1580 const int argc = arguments().immediate();
1581
1582 if (argc == 0) {
1583 // Nothing to do, just return the length.
1584 __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1585 __ DropAndRet(argc + 1);
1586 } else {
1587 Label call_builtin;
1588 if (argc == 1) { // Otherwise fall through to call the builtin.
1589 Label attempt_to_grow_elements, with_write_barrier, check_double;
1590
1591 Register elements = t2;
1592 Register end_elements = t1;
1593 // Get the elements array of the object.
1594 __ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
1595
1596 // Check that the elements are in fast mode and writable.
1597 __ CheckMap(elements,
1598 scratch,
1599 Heap::kFixedArrayMapRootIndex,
1600 &check_double,
1601 DONT_DO_SMI_CHECK);
1602
1603 // Get the array's length into scratch and calculate new length.
1604 __ lw(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset));
1605 STATIC_ASSERT(kSmiTagSize == 1);
1606 STATIC_ASSERT(kSmiTag == 0);
1607 __ Addu(scratch, scratch, Operand(Smi::FromInt(argc)));
1608
1609 // Get the elements' length.
1610 __ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
1611
1612 // Check if we could survive without allocation.
1613 __ Branch(&attempt_to_grow_elements, gt, scratch, Operand(t0));
1614
1615 // Check if value is a smi.
1616 __ lw(t0, MemOperand(sp, (argc - 1) * kPointerSize));
1617 __ JumpIfNotSmi(t0, &with_write_barrier);
1618
1619 // Save new length.
1620 __ sw(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset));
1621
1622 // Store the value.
1623 // We may need a register containing the address end_elements below,
1624 // so write back the value in end_elements.
1625 __ sll(end_elements, scratch, kPointerSizeLog2 - kSmiTagSize);
1626 __ Addu(end_elements, elements, end_elements);
1627 const int kEndElementsOffset =
1628 FixedArray::kHeaderSize - kHeapObjectTag - argc * kPointerSize;
1629 __ Addu(end_elements, end_elements, kEndElementsOffset);
1630 __ sw(t0, MemOperand(end_elements));
1631
1632 // Check for a smi.
1633 __ mov(v0, scratch);
1634 __ DropAndRet(argc + 1);
1635
1636 __ bind(&check_double);
1637
1638 // Check that the elements are in fast mode and writable.
1639 __ CheckMap(elements,
1640 scratch,
1641 Heap::kFixedDoubleArrayMapRootIndex,
1642 &call_builtin,
1643 DONT_DO_SMI_CHECK);
1644
1645 // Get the array's length into scratch and calculate new length.
1646 __ lw(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset));
1647 STATIC_ASSERT(kSmiTagSize == 1);
1648 STATIC_ASSERT(kSmiTag == 0);
1649 __ Addu(scratch, scratch, Operand(Smi::FromInt(argc)));
1650
1651 // Get the elements' length.
1652 __ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
1653
1654 // Check if we could survive without allocation.
1655 __ Branch(&call_builtin, gt, scratch, Operand(t0));
1656
1657 __ lw(t0, MemOperand(sp, (argc - 1) * kPointerSize));
1658 __ StoreNumberToDoubleElements(
1659 t0, scratch, elements, a3, t1, a2,
1660 &call_builtin, argc * kDoubleSize);
1661
1662 // Save new length.
1663 __ sw(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset));
1664
1665 __ mov(v0, scratch);
1666 __ DropAndRet(argc + 1);
1667
1668 __ bind(&with_write_barrier);
1669
1670 __ lw(a3, FieldMemOperand(receiver, HeapObject::kMapOffset));
1671
1672 if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) {
1673 Label fast_object, not_fast_object;
1674 __ CheckFastObjectElements(a3, t3, &not_fast_object);
1675 __ jmp(&fast_object);
1676 // In case of fast smi-only, convert to fast object, otherwise bail out.
1677 __ bind(&not_fast_object);
1678 __ CheckFastSmiElements(a3, t3, &call_builtin);
1679
1680 __ lw(t3, FieldMemOperand(t0, HeapObject::kMapOffset));
1681 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
1682 __ Branch(&call_builtin, eq, t3, Operand(at));
1683 // edx: receiver
1684 // a3: map
1685 Label try_holey_map;
1686 __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
1687 FAST_ELEMENTS,
1688 a3,
1689 t3,
1690 &try_holey_map);
1691 __ mov(a2, receiver);
1692 ElementsTransitionGenerator::
1693 GenerateMapChangeElementsTransition(masm(),
1694 DONT_TRACK_ALLOCATION_SITE,
1695 NULL);
1696 __ jmp(&fast_object);
1697
1698 __ bind(&try_holey_map);
1699 __ LoadTransitionedArrayMapConditional(FAST_HOLEY_SMI_ELEMENTS,
1700 FAST_HOLEY_ELEMENTS,
1701 a3,
1702 t3,
1703 &call_builtin);
1704 __ mov(a2, receiver);
1705 ElementsTransitionGenerator::
1706 GenerateMapChangeElementsTransition(masm(),
1707 DONT_TRACK_ALLOCATION_SITE,
1708 NULL);
1709 __ bind(&fast_object);
1710 } else {
1711 __ CheckFastObjectElements(a3, a3, &call_builtin);
1712 }
1713
1714 // Save new length.
1715 __ sw(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset));
1716
1717 // Store the value.
1718 // We may need a register containing the address end_elements below,
1719 // so write back the value in end_elements.
1720 __ sll(end_elements, scratch, kPointerSizeLog2 - kSmiTagSize);
1721 __ Addu(end_elements, elements, end_elements);
1722 __ Addu(end_elements, end_elements, kEndElementsOffset);
1723 __ sw(t0, MemOperand(end_elements));
1724
1725 __ RecordWrite(elements,
1726 end_elements,
1727 t0,
1728 kRAHasNotBeenSaved,
1729 kDontSaveFPRegs,
1730 EMIT_REMEMBERED_SET,
1731 OMIT_SMI_CHECK);
1732 __ mov(v0, scratch);
1733 __ DropAndRet(argc + 1);
1734
1735 __ bind(&attempt_to_grow_elements);
1736 // scratch: array's length + 1.
1737 // t0: elements' length.
1738
1739 if (!FLAG_inline_new) {
1740 __ Branch(&call_builtin);
1741 }
1742
1743 __ lw(a2, MemOperand(sp, (argc - 1) * kPointerSize));
1744 // Growing elements that are SMI-only requires special handling in case
1745 // the new element is non-Smi. For now, delegate to the builtin.
1746 Label no_fast_elements_check;
1747 __ JumpIfSmi(a2, &no_fast_elements_check);
1748 __ lw(t3, FieldMemOperand(receiver, HeapObject::kMapOffset));
1749 __ CheckFastObjectElements(t3, t3, &call_builtin);
1750 __ bind(&no_fast_elements_check);
1751
1752 ExternalReference new_space_allocation_top =
1753 ExternalReference::new_space_allocation_top_address(isolate());
1754 ExternalReference new_space_allocation_limit =
1755 ExternalReference::new_space_allocation_limit_address(isolate());
1756
1757 const int kAllocationDelta = 4;
1758 // Load top and check if it is the end of elements.
1759 __ sll(end_elements, scratch, kPointerSizeLog2 - kSmiTagSize);
1760 __ Addu(end_elements, elements, end_elements);
1761 __ Addu(end_elements, end_elements, Operand(kEndElementsOffset));
1762 __ li(t3, Operand(new_space_allocation_top));
1763 __ lw(a3, MemOperand(t3));
1764 __ Branch(&call_builtin, ne, end_elements, Operand(a3));
1765
1766 __ li(t5, Operand(new_space_allocation_limit));
1767 __ lw(t5, MemOperand(t5));
1768 __ Addu(a3, a3, Operand(kAllocationDelta * kPointerSize));
1769 __ Branch(&call_builtin, hi, a3, Operand(t5));
1770
1771 // We fit and could grow elements.
1772 // Update new_space_allocation_top.
1773 __ sw(a3, MemOperand(t3));
1774 // Push the argument.
1775 __ sw(a2, MemOperand(end_elements));
1776 // Fill the rest with holes.
1777 __ LoadRoot(a3, Heap::kTheHoleValueRootIndex);
1778 for (int i = 1; i < kAllocationDelta; i++) {
1779 __ sw(a3, MemOperand(end_elements, i * kPointerSize));
1780 }
1781
1782 // Update elements' and array's sizes.
1783 __ sw(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset));
1784 __ Addu(t0, t0, Operand(Smi::FromInt(kAllocationDelta)));
1785 __ sw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
1786
1787 // Elements are in new space, so write barrier is not required.
1788 __ mov(v0, scratch);
1789 __ DropAndRet(argc + 1);
1790 }
1791 __ bind(&call_builtin);
1792 __ TailCallExternalReference(
1793 ExternalReference(Builtins::c_ArrayPush, isolate()), argc + 1, 1);
1794 }
1795
1796 HandlerFrontendFooter(&miss);
1797
1798 // Return the generated code.
1799 return GetCode(type, name);
1800 }
1801
1802
1803 Handle<Code> CallStubCompiler::CompileArrayPopCall(
1804 Handle<Object> object,
1805 Handle<JSObject> holder,
1806 Handle<Cell> cell,
1807 Handle<JSFunction> function,
1808 Handle<String> name,
1809 Code::StubType type) {
1810 // If object is not an array or is observed or sealed, bail out to regular
1811 // call.
1812 if (!object->IsJSArray() ||
1813 !cell.is_null() ||
1814 Handle<JSArray>::cast(object)->map()->is_observed() ||
1815 !Handle<JSArray>::cast(object)->map()->is_extensible()) {
1816 return Handle<Code>::null();
1817 }
1818
1819 Label miss, return_undefined, call_builtin;
1820 Register receiver = a0;
1821 Register scratch = a1;
1822 Register elements = a3;
1823 HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
1824
1825 // Get the elements array of the object.
1826 __ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
1827
1828 // Check that the elements are in fast mode and writable.
1829 __ CheckMap(elements,
1830 scratch,
1831 Heap::kFixedArrayMapRootIndex,
1832 &call_builtin,
1833 DONT_DO_SMI_CHECK);
1834
1835 // Get the array's length into t0 and calculate new length.
1836 __ lw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1837 __ Subu(t0, t0, Operand(Smi::FromInt(1)));
1838 __ Branch(&return_undefined, lt, t0, Operand(zero_reg));
1839
1840 // Get the last element.
1841 __ LoadRoot(t2, Heap::kTheHoleValueRootIndex);
1842 STATIC_ASSERT(kSmiTagSize == 1);
1843 STATIC_ASSERT(kSmiTag == 0);
1844 // We can't address the last element in one operation. Compute the more
1845 // expensive shift first, and use an offset later on.
1846 __ sll(t1, t0, kPointerSizeLog2 - kSmiTagSize);
1847 __ Addu(elements, elements, t1);
1848 __ lw(scratch, FieldMemOperand(elements, FixedArray::kHeaderSize));
1849 __ Branch(&call_builtin, eq, scratch, Operand(t2));
1850
1851 // Set the array's length.
1852 __ sw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1853
1854 // Fill with the hole.
1855 __ sw(t2, FieldMemOperand(elements, FixedArray::kHeaderSize));
1856 const int argc = arguments().immediate();
1857 __ mov(v0, scratch);
1858 __ DropAndRet(argc + 1);
1859
1860 __ bind(&return_undefined);
1861 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
1862 __ DropAndRet(argc + 1);
1863
1864 __ bind(&call_builtin);
1865 __ TailCallExternalReference(
1866 ExternalReference(Builtins::c_ArrayPop, isolate()), argc + 1, 1);
1867
1868 HandlerFrontendFooter(&miss);
1869
1870 // Return the generated code.
1871 return GetCode(type, name);
1872 }
1873
1874
1875 Handle<Code> CallStubCompiler::CompileFastApiCall(
1876 const CallOptimization& optimization,
1877 Handle<Object> object,
1878 Handle<JSObject> holder,
1879 Handle<Cell> cell,
1880 Handle<JSFunction> function,
1881 Handle<String> name) {
1882
1883 Counters* counters = isolate()->counters();
1884
1885 ASSERT(optimization.is_simple_api_call());
1886 // Bail out if object is a global object as we don't want to
1887 // repatch it to global receiver.
1888 if (object->IsGlobalObject()) return Handle<Code>::null();
1889 if (!cell.is_null()) return Handle<Code>::null();
1890 if (!object->IsJSObject()) return Handle<Code>::null();
1891 int depth = optimization.GetPrototypeDepthOfExpectedType(
1892 Handle<JSObject>::cast(object), holder);
1893 if (depth == kInvalidProtoDepth) return Handle<Code>::null();
1894
1895 Label miss, miss_before_stack_reserved;
1896
1897 GenerateNameCheck(name, &miss_before_stack_reserved);
1898
1899 // Get the receiver from the stack.
1900 const int argc = arguments().immediate();
1901 __ lw(a1, MemOperand(sp, argc * kPointerSize));
1902
1903 // Check that the receiver isn't a smi.
1904 __ JumpIfSmi(a1, &miss_before_stack_reserved);
1905
1906 __ IncrementCounter(counters->call_const(), 1, a0, a3);
1907 __ IncrementCounter(counters->call_const_fast_api(), 1, a0, a3);
1908
1909 ReserveSpaceForFastApiCall(masm(), a0);
1910
1911 // Check that the maps haven't changed and find a Holder as a side effect.
1912 CheckPrototypes(
1913 IC::CurrentTypeOf(object, isolate()),
1914 a1, holder, a0, a3, t0, name, depth, &miss);
1915
1916 GenerateFastApiDirectCall(masm(), optimization, argc, false);
1917
1918 __ bind(&miss);
1919 FreeSpaceForFastApiCall(masm());
1920
1921 HandlerFrontendFooter(&miss_before_stack_reserved);
1922
1923 // Return the generated code.
1924 return GetCode(function);
1925 }
1926
1927
1928 void StubCompiler::GenerateBooleanCheck(Register object, Label* miss) { 1199 void StubCompiler::GenerateBooleanCheck(Register object, Label* miss) {
1929 Label success; 1200 Label success;
1930 // Check that the object is a boolean. 1201 // Check that the object is a boolean.
1931 __ LoadRoot(at, Heap::kTrueValueRootIndex); 1202 __ LoadRoot(at, Heap::kTrueValueRootIndex);
1932 __ Branch(&success, eq, object, Operand(at)); 1203 __ Branch(&success, eq, object, Operand(at));
1933 __ LoadRoot(at, Heap::kFalseValueRootIndex); 1204 __ LoadRoot(at, Heap::kFalseValueRootIndex);
1934 __ Branch(miss, ne, object, Operand(at)); 1205 __ Branch(miss, ne, object, Operand(at));
1935 __ bind(&success); 1206 __ bind(&success);
1936 } 1207 }
1937 1208
1938 1209
1939 void CallStubCompiler::PatchImplicitReceiver(Handle<Object> object) {
1940 if (object->IsGlobalObject()) {
1941 const int argc = arguments().immediate();
1942 const int receiver_offset = argc * kPointerSize;
1943 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
1944 __ sw(a3, MemOperand(sp, receiver_offset));
1945 }
1946 }
1947
1948
1949 Register CallStubCompiler::HandlerFrontendHeader(Handle<Object> object,
1950 Handle<JSObject> holder,
1951 Handle<Name> name,
1952 CheckType check,
1953 Label* miss) {
1954 // ----------- S t a t e -------------
1955 // -- a2 : name
1956 // -- ra : return address
1957 // -----------------------------------
1958 GenerateNameCheck(name, miss);
1959
1960 Register reg = a0;
1961
1962 // Get the receiver from the stack.
1963 const int argc = arguments().immediate();
1964 const int receiver_offset = argc * kPointerSize;
1965 __ lw(a0, MemOperand(sp, receiver_offset));
1966
1967 // Check that the receiver isn't a smi.
1968 if (check != NUMBER_CHECK) {
1969 __ JumpIfSmi(a0, miss);
1970 }
1971
1972 // Make sure that it's okay not to patch the on stack receiver
1973 // unless we're doing a receiver map check.
1974 ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
1975 switch (check) {
1976 case RECEIVER_MAP_CHECK:
1977 __ IncrementCounter(isolate()->counters()->call_const(), 1, a1, a3);
1978
1979 // Check that the maps haven't changed.
1980 reg = CheckPrototypes(
1981 IC::CurrentTypeOf(object, isolate()),
1982 reg, holder, a1, a3, t0, name, miss);
1983 break;
1984
1985 case STRING_CHECK: {
1986 // Check that the object is a string.
1987 __ GetObjectType(reg, a3, a3);
1988 __ Branch(miss, Ugreater_equal, a3, Operand(FIRST_NONSTRING_TYPE));
1989 // Check that the maps starting from the prototype haven't changed.
1990 GenerateDirectLoadGlobalFunctionPrototype(
1991 masm(), Context::STRING_FUNCTION_INDEX, a1, miss);
1992 break;
1993 }
1994 case SYMBOL_CHECK: {
1995 // Check that the object is a symbol.
1996 __ GetObjectType(reg, a1, a3);
1997 __ Branch(miss, ne, a3, Operand(SYMBOL_TYPE));
1998 // Check that the maps starting from the prototype haven't changed.
1999 GenerateDirectLoadGlobalFunctionPrototype(
2000 masm(), Context::SYMBOL_FUNCTION_INDEX, a1, miss);
2001 break;
2002 }
2003 case NUMBER_CHECK: {
2004 Label fast;
2005 // Check that the object is a smi or a heap number.
2006 __ JumpIfSmi(reg, &fast);
2007 __ GetObjectType(reg, a3, a3);
2008 __ Branch(miss, ne, a3, Operand(HEAP_NUMBER_TYPE));
2009 __ bind(&fast);
2010 // Check that the maps starting from the prototype haven't changed.
2011 GenerateDirectLoadGlobalFunctionPrototype(
2012 masm(), Context::NUMBER_FUNCTION_INDEX, a1, miss);
2013 break;
2014 }
2015 case BOOLEAN_CHECK: {
2016 GenerateBooleanCheck(reg, miss);
2017
2018 // Check that the maps starting from the prototype haven't changed.
2019 GenerateDirectLoadGlobalFunctionPrototype(
2020 masm(), Context::BOOLEAN_FUNCTION_INDEX, a1, miss);
2021 break;
2022 }
2023 }
2024
2025 if (check != RECEIVER_MAP_CHECK) {
2026 Handle<Object> prototype(object->GetPrototype(isolate()), isolate());
2027 reg = CheckPrototypes(
2028 IC::CurrentTypeOf(prototype, isolate()),
2029 a1, holder, a1, a3, t0, name, miss);
2030 }
2031
2032 return reg;
2033 }
2034
2035
2036 void CallStubCompiler::GenerateJumpFunction(Handle<Object> object,
2037 Register function,
2038 Label* miss) {
2039 ASSERT(function.is(a1));
2040 // Check that the function really is a function.
2041 GenerateFunctionCheck(function, a3, miss);
2042 PatchImplicitReceiver(object);
2043
2044 // Invoke the function.
2045 __ InvokeFunction(a1, arguments(), JUMP_FUNCTION, NullCallWrapper());
2046 }
2047
2048
2049 Handle<Code> CallStubCompiler::CompileCallInterceptor(Handle<JSObject> object,
2050 Handle<JSObject> holder,
2051 Handle<Name> name) {
2052 Label miss;
2053
2054 GenerateNameCheck(name, &miss);
2055
2056 // Get the number of arguments.
2057 const int argc = arguments().immediate();
2058 LookupResult lookup(isolate());
2059 LookupPostInterceptor(holder, name, &lookup);
2060
2061 // Get the receiver from the stack.
2062 __ lw(a1, MemOperand(sp, argc * kPointerSize));
2063
2064 CallInterceptorCompiler compiler(this, arguments(), a2);
2065 compiler.Compile(masm(), object, holder, name, &lookup, a1, a3, t0, a0,
2066 &miss);
2067
2068 // Move returned value, the function to call, to a1.
2069 __ mov(a1, v0);
2070 // Restore receiver.
2071 __ lw(a0, MemOperand(sp, argc * kPointerSize));
2072
2073 GenerateJumpFunction(object, a1, &miss);
2074
2075 HandlerFrontendFooter(&miss);
2076
2077 // Return the generated code.
2078 return GetCode(Code::FAST, name);
2079 }
2080
2081
2082 Handle<Code> CallStubCompiler::CompileCallGlobal(
2083 Handle<JSObject> object,
2084 Handle<GlobalObject> holder,
2085 Handle<PropertyCell> cell,
2086 Handle<JSFunction> function,
2087 Handle<Name> name) {
2088 if (HasCustomCallGenerator(function)) {
2089 Handle<Code> code = CompileCustomCall(
2090 object, holder, cell, function, Handle<String>::cast(name),
2091 Code::NORMAL);
2092 // A null handle means bail out to the regular compiler code below.
2093 if (!code.is_null()) return code;
2094 }
2095
2096 Label miss;
2097 HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
2098 // Potentially loads a closure that matches the shared function info of the
2099 // function, rather than function.
2100 GenerateLoadFunctionFromCell(cell, function, &miss);
2101 Counters* counters = isolate()->counters();
2102 __ IncrementCounter(counters->call_global_inline(), 1, a3, t0);
2103 GenerateJumpFunction(object, a1, function);
2104 HandlerFrontendFooter(&miss);
2105
2106 // Return the generated code.
2107 return GetCode(Code::NORMAL, name);
2108 }
2109
2110
2111 Handle<Code> StoreStubCompiler::CompileStoreCallback( 1210 Handle<Code> StoreStubCompiler::CompileStoreCallback(
2112 Handle<JSObject> object, 1211 Handle<JSObject> object,
2113 Handle<JSObject> holder, 1212 Handle<JSObject> holder,
2114 Handle<Name> name, 1213 Handle<Name> name,
2115 Handle<ExecutableAccessorInfo> callback) { 1214 Handle<ExecutableAccessorInfo> callback) {
2116 Register holder_reg = HandlerFrontend( 1215 Register holder_reg = HandlerFrontend(
2117 IC::CurrentTypeOf(object, isolate()), receiver(), holder, name); 1216 IC::CurrentTypeOf(object, isolate()), receiver(), holder, name);
2118 1217
2119 // Stub never generated for non-global objects that require access 1218 // Stub never generated for non-global objects that require access
2120 // checks. 1219 // checks.
2121 ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded()); 1220 ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
2122 1221
2123 __ push(receiver()); // Receiver. 1222 __ Push(receiver(), holder_reg); // Receiver.
2124 __ push(holder_reg);
2125 __ li(at, Operand(callback)); // Callback info. 1223 __ li(at, Operand(callback)); // Callback info.
2126 __ push(at); 1224 __ push(at);
2127 __ li(at, Operand(name)); 1225 __ li(at, Operand(name));
2128 __ Push(at, value()); 1226 __ Push(at, value());
2129 1227
2130 // Do tail-call to the runtime system. 1228 // Do tail-call to the runtime system.
2131 ExternalReference store_callback_property = 1229 ExternalReference store_callback_property =
2132 ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate()); 1230 ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
2133 __ TailCallExternalReference(store_callback_property, 5, 1); 1231 __ TailCallExternalReference(store_callback_property, 5, 1);
2134 1232
2135 // Return the generated code. 1233 // Return the generated code.
2136 return GetCode(kind(), Code::FAST, name); 1234 return GetCode(kind(), Code::FAST, name);
2137 } 1235 }
2138 1236
2139 1237
2140 Handle<Code> StoreStubCompiler::CompileStoreCallback(
2141 Handle<JSObject> object,
2142 Handle<JSObject> holder,
2143 Handle<Name> name,
2144 const CallOptimization& call_optimization) {
2145 HandlerFrontend(IC::CurrentTypeOf(object, isolate()),
2146 receiver(), holder, name);
2147
2148 Register values[] = { value() };
2149 GenerateFastApiCall(
2150 masm(), call_optimization, receiver(), scratch3(), 1, values);
2151
2152 // Return the generated code.
2153 return GetCode(kind(), Code::FAST, name);
2154 }
2155
2156
2157 #undef __ 1238 #undef __
2158 #define __ ACCESS_MASM(masm) 1239 #define __ ACCESS_MASM(masm)
2159 1240
2160 1241
2161 void StoreStubCompiler::GenerateStoreViaSetter( 1242 void StoreStubCompiler::GenerateStoreViaSetter(
2162 MacroAssembler* masm, 1243 MacroAssembler* masm,
1244 Handle<HeapType> type,
2163 Handle<JSFunction> setter) { 1245 Handle<JSFunction> setter) {
2164 // ----------- S t a t e ------------- 1246 // ----------- S t a t e -------------
2165 // -- a0 : value 1247 // -- a0 : value
2166 // -- a1 : receiver 1248 // -- a1 : receiver
2167 // -- a2 : name 1249 // -- a2 : name
2168 // -- ra : return address 1250 // -- ra : return address
2169 // ----------------------------------- 1251 // -----------------------------------
2170 { 1252 {
2171 FrameScope scope(masm, StackFrame::INTERNAL); 1253 FrameScope scope(masm, StackFrame::INTERNAL);
1254 Register receiver = a1;
1255 Register value = a0;
2172 1256
2173 // Save value register, so we can restore it later. 1257 // Save value register, so we can restore it later.
2174 __ push(a0); 1258 __ push(value);
2175 1259
2176 if (!setter.is_null()) { 1260 if (!setter.is_null()) {
2177 // Call the JavaScript setter with receiver and value on the stack. 1261 // Call the JavaScript setter with receiver and value on the stack.
2178 __ push(a1); 1262 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
2179 __ push(a0); 1263 // Swap in the global receiver.
1264 __ lw(receiver,
1265 FieldMemOperand(
1266 receiver, JSGlobalObject::kGlobalReceiverOffset));
1267 }
1268 __ Push(receiver, value);
2180 ParameterCount actual(1); 1269 ParameterCount actual(1);
2181 ParameterCount expected(setter); 1270 ParameterCount expected(setter);
2182 __ InvokeFunction(setter, expected, actual, 1271 __ InvokeFunction(setter, expected, actual,
2183 CALL_FUNCTION, NullCallWrapper()); 1272 CALL_FUNCTION, NullCallWrapper());
2184 } else { 1273 } else {
2185 // If we generate a global code snippet for deoptimization only, remember 1274 // If we generate a global code snippet for deoptimization only, remember
2186 // the place to continue after deoptimization. 1275 // the place to continue after deoptimization.
2187 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset()); 1276 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
2188 } 1277 }
2189 1278
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
2228 1317
2229 // Handle store cache miss. 1318 // Handle store cache miss.
2230 __ bind(&miss); 1319 __ bind(&miss);
2231 TailCallBuiltin(masm(), MissBuiltin(kind())); 1320 TailCallBuiltin(masm(), MissBuiltin(kind()));
2232 1321
2233 // Return the generated code. 1322 // Return the generated code.
2234 return GetCode(kind(), Code::FAST, name); 1323 return GetCode(kind(), Code::FAST, name);
2235 } 1324 }
2236 1325
2237 1326
2238 Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<Type> type, 1327 Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<HeapType> type,
2239 Handle<JSObject> last, 1328 Handle<JSObject> last,
2240 Handle<Name> name) { 1329 Handle<Name> name) {
2241 NonexistentHandlerFrontend(type, last, name); 1330 NonexistentHandlerFrontend(type, last, name);
2242 1331
2243 // Return undefined if maps of the full prototype chain is still the same. 1332 // Return undefined if maps of the full prototype chain is still the same.
2244 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); 1333 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
2245 __ Ret(); 1334 __ Ret();
2246 1335
2247 // Return the generated code. 1336 // Return the generated code.
2248 return GetCode(kind(), Code::FAST, name); 1337 return GetCode(kind(), Code::FAST, name);
(...skipping 26 matching lines...) Expand all
2275 static Register registers[] = { a2, a1, a0, a3, t0, t1 }; 1364 static Register registers[] = { a2, a1, a0, a3, t0, t1 };
2276 return registers; 1365 return registers;
2277 } 1366 }
2278 1367
2279 1368
2280 #undef __ 1369 #undef __
2281 #define __ ACCESS_MASM(masm) 1370 #define __ ACCESS_MASM(masm)
2282 1371
2283 1372
2284 void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm, 1373 void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
1374 Handle<HeapType> type,
2285 Register receiver, 1375 Register receiver,
2286 Handle<JSFunction> getter) { 1376 Handle<JSFunction> getter) {
2287 // ----------- S t a t e ------------- 1377 // ----------- S t a t e -------------
2288 // -- a0 : receiver 1378 // -- a0 : receiver
2289 // -- a2 : name 1379 // -- a2 : name
2290 // -- ra : return address 1380 // -- ra : return address
2291 // ----------------------------------- 1381 // -----------------------------------
2292 { 1382 {
2293 FrameScope scope(masm, StackFrame::INTERNAL); 1383 FrameScope scope(masm, StackFrame::INTERNAL);
2294 1384
2295 if (!getter.is_null()) { 1385 if (!getter.is_null()) {
2296 // Call the JavaScript getter with the receiver on the stack. 1386 // Call the JavaScript getter with the receiver on the stack.
1387 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1388 // Swap in the global receiver.
1389 __ lw(receiver,
1390 FieldMemOperand(
1391 receiver, JSGlobalObject::kGlobalReceiverOffset));
1392 }
2297 __ push(receiver); 1393 __ push(receiver);
2298 ParameterCount actual(0); 1394 ParameterCount actual(0);
2299 ParameterCount expected(getter); 1395 ParameterCount expected(getter);
2300 __ InvokeFunction(getter, expected, actual, 1396 __ InvokeFunction(getter, expected, actual,
2301 CALL_FUNCTION, NullCallWrapper()); 1397 CALL_FUNCTION, NullCallWrapper());
2302 } else { 1398 } else {
2303 // If we generate a global code snippet for deoptimization only, remember 1399 // If we generate a global code snippet for deoptimization only, remember
2304 // the place to continue after deoptimization. 1400 // the place to continue after deoptimization.
2305 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset()); 1401 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
2306 } 1402 }
2307 1403
2308 // Restore context register. 1404 // Restore context register.
2309 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 1405 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2310 } 1406 }
2311 __ Ret(); 1407 __ Ret();
2312 } 1408 }
2313 1409
2314 1410
2315 #undef __ 1411 #undef __
2316 #define __ ACCESS_MASM(masm()) 1412 #define __ ACCESS_MASM(masm())
2317 1413
2318 1414
2319 Handle<Code> LoadStubCompiler::CompileLoadGlobal( 1415 Handle<Code> LoadStubCompiler::CompileLoadGlobal(
2320 Handle<Type> type, 1416 Handle<HeapType> type,
2321 Handle<GlobalObject> global, 1417 Handle<GlobalObject> global,
2322 Handle<PropertyCell> cell, 1418 Handle<PropertyCell> cell,
2323 Handle<Name> name, 1419 Handle<Name> name,
2324 bool is_dont_delete) { 1420 bool is_dont_delete) {
2325 Label miss; 1421 Label miss;
2326 1422
2327 HandlerFrontendHeader(type, receiver(), global, name, &miss); 1423 HandlerFrontendHeader(type, receiver(), global, name, &miss);
2328 1424
2329 // Get the value from the cell. 1425 // Get the value from the cell.
2330 __ li(a3, Operand(cell)); 1426 __ li(a3, Operand(cell));
2331 __ lw(t0, FieldMemOperand(a3, Cell::kValueOffset)); 1427 __ lw(t0, FieldMemOperand(a3, Cell::kValueOffset));
2332 1428
2333 // Check for deleted property if property can actually be deleted. 1429 // Check for deleted property if property can actually be deleted.
2334 if (!is_dont_delete) { 1430 if (!is_dont_delete) {
2335 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); 1431 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2336 __ Branch(&miss, eq, t0, Operand(at)); 1432 __ Branch(&miss, eq, t0, Operand(at));
2337 } 1433 }
2338 1434
2339 HandlerFrontendFooter(name, &miss);
2340
2341 Counters* counters = isolate()->counters(); 1435 Counters* counters = isolate()->counters();
2342 __ IncrementCounter(counters->named_load_global_stub(), 1, a1, a3); 1436 __ IncrementCounter(counters->named_load_global_stub(), 1, a1, a3);
2343 __ Ret(USE_DELAY_SLOT); 1437 __ Ret(USE_DELAY_SLOT);
2344 __ mov(v0, t0); 1438 __ mov(v0, t0);
2345 1439
1440 HandlerFrontendFooter(name, &miss);
1441
2346 // Return the generated code. 1442 // Return the generated code.
2347 return GetCode(kind(), Code::NORMAL, name); 1443 return GetCode(kind(), Code::NORMAL, name);
2348 } 1444 }
2349 1445
2350 1446
2351 Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC( 1447 Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC(
2352 TypeHandleList* types, 1448 TypeHandleList* types,
2353 CodeHandleList* handlers, 1449 CodeHandleList* handlers,
2354 Handle<Name> name, 1450 Handle<Name> name,
2355 Code::StubType type, 1451 Code::StubType type,
2356 IcCheckType check) { 1452 IcCheckType check) {
2357 Label miss; 1453 Label miss;
2358 1454
2359 if (check == PROPERTY && 1455 if (check == PROPERTY &&
2360 (kind() == Code::KEYED_LOAD_IC || kind() == Code::KEYED_STORE_IC)) { 1456 (kind() == Code::KEYED_LOAD_IC || kind() == Code::KEYED_STORE_IC)) {
2361 __ Branch(&miss, ne, this->name(), Operand(name)); 1457 __ Branch(&miss, ne, this->name(), Operand(name));
2362 } 1458 }
2363 1459
2364 Label number_case; 1460 Label number_case;
1461 Register match = scratch1();
2365 Label* smi_target = IncludesNumberType(types) ? &number_case : &miss; 1462 Label* smi_target = IncludesNumberType(types) ? &number_case : &miss;
2366 __ JumpIfSmi(receiver(), smi_target); 1463 __ JumpIfSmi(receiver(), smi_target, match); // Reg match is 0 if Smi.
2367 1464
2368 Register map_reg = scratch1(); 1465 Register map_reg = scratch2();
2369 1466
2370 int receiver_count = types->length(); 1467 int receiver_count = types->length();
2371 int number_of_handled_maps = 0; 1468 int number_of_handled_maps = 0;
2372 __ lw(map_reg, FieldMemOperand(receiver(), HeapObject::kMapOffset)); 1469 __ lw(map_reg, FieldMemOperand(receiver(), HeapObject::kMapOffset));
2373 for (int current = 0; current < receiver_count; ++current) { 1470 for (int current = 0; current < receiver_count; ++current) {
2374 Handle<Type> type = types->at(current); 1471 Handle<HeapType> type = types->at(current);
2375 Handle<Map> map = IC::TypeToMap(*type, isolate()); 1472 Handle<Map> map = IC::TypeToMap(*type, isolate());
2376 if (!map->is_deprecated()) { 1473 if (!map->is_deprecated()) {
2377 number_of_handled_maps++; 1474 number_of_handled_maps++;
2378 if (type->Is(Type::Number())) { 1475 // Check map and tail call if there's a match.
1476 // Separate compare from branch, to provide path for above JumpIfSmi().
1477 __ Subu(match, map_reg, Operand(map));
1478 if (type->Is(HeapType::Number())) {
2379 ASSERT(!number_case.is_unused()); 1479 ASSERT(!number_case.is_unused());
2380 __ bind(&number_case); 1480 __ bind(&number_case);
2381 } 1481 }
2382 __ Jump(handlers->at(current), RelocInfo::CODE_TARGET, 1482 __ Jump(handlers->at(current), RelocInfo::CODE_TARGET,
2383 eq, map_reg, Operand(map)); 1483 eq, match, Operand(zero_reg));
2384 } 1484 }
2385 } 1485 }
2386 ASSERT(number_of_handled_maps != 0); 1486 ASSERT(number_of_handled_maps != 0);
2387 1487
2388 __ bind(&miss); 1488 __ bind(&miss);
2389 TailCallBuiltin(masm(), MissBuiltin(kind())); 1489 TailCallBuiltin(masm(), MissBuiltin(kind()));
2390 1490
2391 // Return the generated code. 1491 // Return the generated code.
2392 InlineCacheState state = 1492 InlineCacheState state =
2393 number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC; 1493 number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC;
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after
2471 // ----------------------------------- 1571 // -----------------------------------
2472 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss); 1572 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss);
2473 } 1573 }
2474 1574
2475 1575
2476 #undef __ 1576 #undef __
2477 1577
2478 } } // namespace v8::internal 1578 } } // namespace v8::internal
2479 1579
2480 #endif // V8_TARGET_ARCH_MIPS 1580 #endif // V8_TARGET_ARCH_MIPS
OLDNEW
« no previous file with comments | « src/mips/simulator-mips.cc ('k') | src/mksnapshot.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698