Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(13)

Side by Side Diff: src/x64/stub-cache-x64.cc

Issue 1689010: X64: Faster push/pop implementation. (Closed)
Patch Set: Addressed review comments. Created 10 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/x64/macro-assembler-x64.cc ('k') | test/mjsunit/array-pop.js » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 546 matching lines...) Expand 10 before | Expand all | Expand 10 after
557 557
558 // Reserves space for the extra arguments to FastHandleApiCall in the 558 // Reserves space for the extra arguments to FastHandleApiCall in the
559 // caller's frame. 559 // caller's frame.
560 // 560 //
561 // These arguments are set by CheckPrototypes and GenerateFastApiCall. 561 // These arguments are set by CheckPrototypes and GenerateFastApiCall.
562 static void ReserveSpaceForFastApiCall(MacroAssembler* masm, Register scratch) { 562 static void ReserveSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
563 // ----------- S t a t e ------------- 563 // ----------- S t a t e -------------
564 // -- rsp[0] : return address 564 // -- rsp[0] : return address
565 // -- rsp[8] : last argument in the internal frame of the caller 565 // -- rsp[8] : last argument in the internal frame of the caller
566 // ----------------------------------- 566 // -----------------------------------
567 __ pop(scratch); 567 __ movq(scratch, Operand(rsp, 0));
568 __ Push(Smi::FromInt(0)); 568 __ subq(rsp, Immediate(4 * kPointerSize));
569 __ Push(Smi::FromInt(0)); 569 __ movq(Operand(rsp, 0), scratch);
570 __ Push(Smi::FromInt(0)); 570 __ Move(scratch, Smi::FromInt(0));
571 __ Push(Smi::FromInt(0)); 571 __ movq(Operand(rsp, 1 * kPointerSize), scratch);
572 __ push(scratch); 572 __ movq(Operand(rsp, 2 * kPointerSize), scratch);
573 __ movq(Operand(rsp, 3 * kPointerSize), scratch);
574 __ movq(Operand(rsp, 4 * kPointerSize), scratch);
573 } 575 }
574 576
575 577
576 // Undoes the effects of ReserveSpaceForFastApiCall. 578 // Undoes the effects of ReserveSpaceForFastApiCall.
577 static void FreeSpaceForFastApiCall(MacroAssembler* masm, Register scratch) { 579 static void FreeSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
578 // ----------- S t a t e ------------- 580 // ----------- S t a t e -------------
579 // -- rsp[0] : return address 581 // -- rsp[0] : return address
580 // -- rsp[8] : last fast api call extra argument 582 // -- rsp[8] : last fast api call extra argument
581 // -- ... 583 // -- ...
582 // -- rsp[32] : first fast api call extra argument 584 // -- rsp[32] : first fast api call extra argument
583 // -- rsp[40] : last argument in the internal frame 585 // -- rsp[40] : last argument in the internal frame
584 // ----------------------------------- 586 // -----------------------------------
585 __ pop(scratch); 587 __ movq(scratch, Operand(rsp, 0));
586 __ Drop(4); 588 __ movq(Operand(rsp, 4 * kPointerSize), scratch);
587 __ push(scratch); 589 __ addq(rsp, Immediate(kPointerSize * 4));
588 } 590 }
589 591
590 592
591 // Generates call to FastHandleApiCall builtin. 593 // Generates call to FastHandleApiCall builtin.
592 static void GenerateFastApiCall(MacroAssembler* masm, 594 static void GenerateFastApiCall(MacroAssembler* masm,
593 const CallOptimization& optimization, 595 const CallOptimization& optimization,
594 int argc) { 596 int argc) {
595 // ----------- S t a t e ------------- 597 // ----------- S t a t e -------------
596 // -- rsp[0] : return address 598 // -- rsp[0] : return address
597 // -- rsp[8] : object passing the type check 599 // -- rsp[8] : object passing the type check
(...skipping 248 matching lines...) Expand 10 before | Expand all | Expand 10 after
846 Factory::the_hole_value()); 848 Factory::the_hole_value());
847 __ j(not_equal, miss); 849 __ j(not_equal, miss);
848 return cell; 850 return cell;
849 } 851 }
850 852
851 853
852 #undef __ 854 #undef __
853 855
854 #define __ ACCESS_MASM((masm())) 856 #define __ ACCESS_MASM((masm()))
855 857
856
857 Object* CallStubCompiler::CompileArrayPushCall(Object* object,
858 JSObject* holder,
859 JSFunction* function,
860 String* name,
861 CheckType check) {
862 // ----------- S t a t e -------------
863 // rcx : function name
864 // rsp[0] : return address
865 // rsp[8] : argument argc
866 // rsp[16] : argument argc - 1
867 // ...
868 // rsp[argc * 8] : argument 1
869 // rsp[(argc + 1) * 8] : argument 0 = receiver
870 // -----------------------------------
871
872 // If object is not an array, bail out to regular call.
873 if (!object->IsJSArray()) {
874 return Heap::undefined_value();
875 }
876
877 // TODO(639): faster implementation.
878 ASSERT(check == RECEIVER_MAP_CHECK);
879
880 Label miss;
881
882 // Get the receiver from the stack.
883 const int argc = arguments().immediate();
884 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
885
886 // Check that the receiver isn't a smi.
887 __ JumpIfSmi(rdx, &miss);
888
889 // Check that the maps haven't changed.
890 CheckPrototypes(JSObject::cast(object), rdx, holder,
891 rbx, rax, name, &miss);
892
893 // Patch the receiver on the stack with the global proxy if
894 // necessary.
895 if (object->IsGlobalObject()) {
896 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
897 __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
898 }
899
900 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush),
901 argc + 1,
902 1);
903
904 // Handle call cache miss.
905 __ bind(&miss);
906 Handle<Code> ic = ComputeCallMiss(arguments().immediate());
907 __ Jump(ic, RelocInfo::CODE_TARGET);
908
909 // Return the generated code.
910 String* function_name = NULL;
911 if (function->shared()->name()->IsString()) {
912 function_name = String::cast(function->shared()->name());
913 }
914 return GetCode(CONSTANT_FUNCTION, function_name);
915 }
916
917
918 Object* CallStubCompiler::CompileArrayPopCall(Object* object,
919 JSObject* holder,
920 JSFunction* function,
921 String* name,
922 CheckType check) {
923 // ----------- S t a t e -------------
924 // rcx : function name
925 // rsp[0] : return address
926 // rsp[8] : argument argc
927 // rsp[16] : argument argc - 1
928 // ...
929 // rsp[argc * 8] : argument 1
930 // rsp[(argc + 1) * 8] : argument 0 = receiver
931 // -----------------------------------
932
933 // If object is not an array, bail out to regular call.
934 if (!object->IsJSArray()) {
935 return Heap::undefined_value();
936 }
937
938 // TODO(642): faster implementation.
939 ASSERT(check == RECEIVER_MAP_CHECK);
940
941 Label miss;
942
943 // Get the receiver from the stack.
944 const int argc = arguments().immediate();
945 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
946
947 // Check that the receiver isn't a smi.
948 __ JumpIfSmi(rdx, &miss);
949
950 // Check that the maps haven't changed.
951 CheckPrototypes(JSObject::cast(object), rdx, holder,
952 rbx, rax, name, &miss);
953
954 // Patch the receiver on the stack with the global proxy if
955 // necessary.
956 if (object->IsGlobalObject()) {
957 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
958 __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
959 }
960
961 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop),
962 argc + 1,
963 1);
964
965 // Handle call cache miss.
966 __ bind(&miss);
967 Handle<Code> ic = ComputeCallMiss(arguments().immediate());
968 __ Jump(ic, RelocInfo::CODE_TARGET);
969
970 // Return the generated code.
971 String* function_name = NULL;
972 if (function->shared()->name()->IsString()) {
973 function_name = String::cast(function->shared()->name());
974 }
975 return GetCode(CONSTANT_FUNCTION, function_name);
976 }
977
978
979 Object* CallStubCompiler::CompileCallConstant(Object* object, 858 Object* CallStubCompiler::CompileCallConstant(Object* object,
980 JSObject* holder, 859 JSObject* holder,
981 JSFunction* function, 860 JSFunction* function,
982 String* name, 861 String* name,
983 StubCompiler::CheckType check) { 862 StubCompiler::CheckType check) {
984 // ----------- S t a t e ------------- 863 // ----------- S t a t e -------------
985 // rcx : function name 864 // rcx : function name
986 // rsp[0] : return address 865 // rsp[0] : return address
987 // rsp[8] : argument argc 866 // rsp[8] : argument argc
988 // rsp[16] : argument argc - 1 867 // rsp[16] : argument argc - 1
(...skipping 194 matching lines...) Expand 10 before | Expand all | Expand 10 after
1183 // Handle call cache miss. 1062 // Handle call cache miss.
1184 __ bind(&miss); 1063 __ bind(&miss);
1185 Handle<Code> ic = ComputeCallMiss(arguments().immediate()); 1064 Handle<Code> ic = ComputeCallMiss(arguments().immediate());
1186 __ Jump(ic, RelocInfo::CODE_TARGET); 1065 __ Jump(ic, RelocInfo::CODE_TARGET);
1187 1066
1188 // Return the generated code. 1067 // Return the generated code.
1189 return GetCode(FIELD, name); 1068 return GetCode(FIELD, name);
1190 } 1069 }
1191 1070
1192 1071
1072 Object* CallStubCompiler::CompileArrayPushCall(Object* object,
1073 JSObject* holder,
1074 JSFunction* function,
1075 String* name,
1076 CheckType check) {
1077 // ----------- S t a t e -------------
1078 // -- rcx : name
1079 // -- rsp[0] : return address
1080 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1081 // -- ...
1082 // -- rsp[(argc + 1) * 8] : receiver
1083 // -----------------------------------
1084 ASSERT(check == RECEIVER_MAP_CHECK);
1085
1086 // If object is not an array, bail out to regular call.
1087 if (!object->IsJSArray()) {
1088 return Heap::undefined_value();
1089 }
1090
1091 Label miss;
1092
1093 // Get the receiver from the stack.
1094 const int argc = arguments().immediate();
1095 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1096
1097 // Check that the receiver isn't a smi.
1098 __ JumpIfSmi(rdx, &miss);
1099
1100 CheckPrototypes(JSObject::cast(object),
1101 rdx,
1102 holder,
1103 rbx,
1104 rax,
1105 name,
1106 &miss);
1107
1108 if (argc == 0) {
1109 // Noop, return the length.
1110 __ movq(rax, FieldOperand(rdx, JSArray::kLengthOffset));
1111 __ ret((argc + 1) * kPointerSize);
1112 } else {
1113 // Get the elements array of the object.
1114 __ movq(rbx, FieldOperand(rdx, JSArray::kElementsOffset));
1115
1116 // Check that the elements are in fast mode (not dictionary).
1117 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
1118 Factory::fixed_array_map());
1119 __ j(not_equal, &miss);
1120
1121 if (argc == 1) { // Otherwise fall through to call builtin.
1122 Label call_builtin, exit, with_rset_update, attempt_to_grow_elements;
1123
1124 // Get the array's length into rax and calculate new length.
1125 __ movq(rax, FieldOperand(rdx, JSArray::kLengthOffset));
1126 STATIC_ASSERT(FixedArray::kMaxLength < Smi::kMaxValue);
1127 __ SmiAddConstant(rax, rax, Smi::FromInt(argc));
1128
1129 // Get the element's length into rcx.
1130 __ movl(rcx, FieldOperand(rbx, FixedArray::kLengthOffset));
1131 __ Integer32ToSmi(rcx, rcx);
1132
1133 // Check if we could survive without allocation.
1134 __ SmiCompare(rax, rcx);
1135 __ j(greater, &attempt_to_grow_elements);
1136
1137 // Save new length.
1138 __ movq(FieldOperand(rdx, JSArray::kLengthOffset), rax);
1139
1140 // Push the element.
1141 __ movq(rcx, Operand(rsp, argc * kPointerSize));
1142 SmiIndex index =
1143 masm()->SmiToIndex(kScratchRegister, rax, times_pointer_size);
1144 __ lea(rdx, FieldOperand(rbx,
1145 index.reg, index.scale,
1146 FixedArray::kHeaderSize - argc * kPointerSize));
1147 __ movq(Operand(rdx, 0), rcx);
1148
1149 // Check if value is a smi.
1150 __ JumpIfNotSmi(rcx, &with_rset_update);
1151
1152 __ bind(&exit);
1153 __ ret((argc + 1) * kPointerSize);
1154
1155 __ bind(&with_rset_update);
1156
1157 __ InNewSpace(rbx, rcx, equal, &exit);
1158
1159 RecordWriteStub stub(rbx, rdx, rcx);
1160 __ CallStub(&stub);
1161 __ ret((argc + 1) * kPointerSize);
1162
1163 __ bind(&attempt_to_grow_elements);
1164 ExternalReference new_space_allocation_top =
1165 ExternalReference::new_space_allocation_top_address();
1166 ExternalReference new_space_allocation_limit =
1167 ExternalReference::new_space_allocation_limit_address();
1168
1169 const int kAllocationDelta = 4;
1170 // Load top.
1171 __ movq(rcx, new_space_allocation_top);
1172 __ movq(rcx, Operand(rcx, 0));
1173
1174 // Check if it's the end of elements.
1175 index = masm()->SmiToIndex(kScratchRegister, rax, times_pointer_size);
1176 __ lea(rdx, FieldOperand(rbx,
1177 index.reg, index.scale,
1178 FixedArray::kHeaderSize - argc * kPointerSize));
1179 __ cmpq(rdx, rcx);
1180 __ j(not_equal, &call_builtin);
1181 __ addq(rcx, Immediate(kAllocationDelta * kPointerSize));
1182 __ movq(kScratchRegister, new_space_allocation_limit);
1183 __ cmpq(rcx, Operand(kScratchRegister, 0));
1184 __ j(above, &call_builtin);
1185
1186 // We fit and could grow elements.
1187 __ movq(kScratchRegister, new_space_allocation_top);
1188 __ movq(Operand(kScratchRegister, 0), rcx);
1189 __ movq(rcx, Operand(rsp, argc * kPointerSize));
1190
1191 // Push the argument...
1192 __ movq(Operand(rdx, 0), rcx);
1193 // ... and fill the rest with holes.
1194 __ Move(kScratchRegister, Factory::the_hole_value());
1195 for (int i = 1; i < kAllocationDelta; i++) {
1196 __ movq(Operand(rdx, i * kPointerSize), kScratchRegister);
1197 }
1198
1199 // Restore receiver to rdx as finish sequence assumes it's here.
1200 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1201
1202 // Increment element's and array's sizes.
1203 __ addq(FieldOperand(rbx, FixedArray::kLengthOffset),
1204 Immediate(kAllocationDelta));
1205 __ movq(FieldOperand(rdx, JSArray::kLengthOffset), rax);
1206
1207 // Elements are in new space, so no remembered set updates are necessary.
1208 __ ret((argc + 1) * kPointerSize);
1209
1210 __ bind(&call_builtin);
1211 }
1212
1213 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush),
1214 argc + 1,
1215 1);
1216 }
1217
1218 __ bind(&miss);
1219
1220 Handle<Code> ic = ComputeCallMiss(arguments().immediate());
1221 __ jmp(ic, RelocInfo::CODE_TARGET);
1222
1223 // Return the generated code.
1224 String* function_name = NULL;
1225 if (function->shared()->name()->IsString()) {
1226 function_name = String::cast(function->shared()->name());
1227 }
1228 return GetCode(CONSTANT_FUNCTION, function_name);
1229 }
1230
1231
1232 Object* CallStubCompiler::CompileArrayPopCall(Object* object,
1233 JSObject* holder,
1234 JSFunction* function,
1235 String* name,
1236 CheckType check) {
1237 // ----------- S t a t e -------------
1238 // -- ecx : name
1239 // -- esp[0] : return address
1240 // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1241 // -- ...
1242 // -- esp[(argc + 1) * 4] : receiver
1243 // -----------------------------------
1244 ASSERT(check == RECEIVER_MAP_CHECK);
1245
1246 // If object is not an array, bail out to regular call.
1247 if (!object->IsJSArray()) {
1248 return Heap::undefined_value();
1249 }
1250
1251 Label miss, return_undefined, call_builtin;
1252
1253 // Get the receiver from the stack.
1254 const int argc = arguments().immediate();
1255 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1256
1257 // Check that the receiver isn't a smi.
1258 __ JumpIfSmi(rdx, &miss);
1259
1260 CheckPrototypes(JSObject::cast(object), rdx,
1261 holder, rbx,
1262 rax, name, &miss);
1263
1264 // Get the elements array of the object.
1265 __ movq(rbx, FieldOperand(rdx, JSArray::kElementsOffset));
1266
1267 // Check that the elements are in fast mode (not dictionary).
1268 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset), Factory::fixed_array_map());
1269 __ j(not_equal, &miss);
1270
1271 // Get the array's length into rcx and calculate new length.
1272 __ movq(rcx, FieldOperand(rdx, JSArray::kLengthOffset));
1273 __ SmiSubConstant(rcx, rcx, Smi::FromInt(1));
1274 __ SmiTest(rcx);
1275 __ j(negative, &return_undefined);
1276
1277 // Get the last element.
1278 __ Move(r9, Factory::the_hole_value());
1279 SmiIndex index =
1280 masm()->SmiToIndex(r8, rcx, times_pointer_size);
1281 __ movq(rax, FieldOperand(rbx,
1282 index.reg, index.scale,
1283 FixedArray::kHeaderSize));
1284 // Check if element is already the hole.
1285 __ cmpq(rax, r9);
1286 __ j(equal, &call_builtin);
1287
1288 // Set the array's length.
1289 __ movq(FieldOperand(rdx, JSArray::kLengthOffset), rcx);
1290
1291 // Fill with the hole and return original value..
1292 __ movq(FieldOperand(rbx,
1293 index.reg, index.scale,
1294 FixedArray::kHeaderSize),
1295 r9);
1296 __ ret((argc + 1) * kPointerSize);
1297
1298 __ bind(&return_undefined);
1299
1300 __ Move(rax, Factory::undefined_value());
1301 __ ret((argc + 1) * kPointerSize);
1302
1303 __ bind(&call_builtin);
1304 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop),
1305 argc + 1,
1306 1);
1307 __ bind(&miss);
1308
1309 Handle<Code> ic = ComputeCallMiss(arguments().immediate());
1310 __ jmp(ic, RelocInfo::CODE_TARGET);
1311
1312 // Return the generated code.
1313 String* function_name = NULL;
1314 if (function->shared()->name()->IsString()) {
1315 function_name = String::cast(function->shared()->name());
1316 }
1317 return GetCode(CONSTANT_FUNCTION, function_name);
1318 }
1319
1320
1321
1322
1193 Object* CallStubCompiler::CompileCallInterceptor(JSObject* object, 1323 Object* CallStubCompiler::CompileCallInterceptor(JSObject* object,
1194 JSObject* holder, 1324 JSObject* holder,
1195 String* name) { 1325 String* name) {
1196 // ----------- S t a t e ------------- 1326 // ----------- S t a t e -------------
1197 // rcx : function name 1327 // rcx : function name
1198 // rsp[0] : return address 1328 // rsp[0] : return address
1199 // rsp[8] : argument argc 1329 // rsp[8] : argument argc
1200 // rsp[16] : argument argc - 1 1330 // rsp[16] : argument argc - 1
1201 // ... 1331 // ...
1202 // rsp[argc * 8] : argument 1 1332 // rsp[argc * 8] : argument 1
(...skipping 833 matching lines...) Expand 10 before | Expand all | Expand 10 after
2036 Register StubCompiler::CheckPrototypes(JSObject* object, 2166 Register StubCompiler::CheckPrototypes(JSObject* object,
2037 Register object_reg, 2167 Register object_reg,
2038 JSObject* holder, 2168 JSObject* holder,
2039 Register holder_reg, 2169 Register holder_reg,
2040 Register scratch, 2170 Register scratch,
2041 String* name, 2171 String* name,
2042 int save_at_depth, 2172 int save_at_depth,
2043 Label* miss) { 2173 Label* miss) {
2044 // Check that the maps haven't changed. 2174 // Check that the maps haven't changed.
2045 Register result = 2175 Register result =
2046 __ CheckMaps(object, object_reg, holder, holder_reg, scratch, 2176 masm()->CheckMaps(object,
2047 save_at_depth, miss); 2177 object_reg,
2178 holder,
2179 holder_reg,
2180 scratch,
2181 save_at_depth,
2182 miss);
2048 2183
2049 // If we've skipped any global objects, it's not enough to verify 2184 // If we've skipped any global objects, it's not enough to verify
2050 // that their maps haven't changed. We also need to check that the 2185 // that their maps haven't changed. We also need to check that the
2051 // property cell for the property is still empty. 2186 // property cell for the property is still empty.
2052 while (object != holder) { 2187 while (object != holder) {
2053 if (object->IsGlobalObject()) { 2188 if (object->IsGlobalObject()) {
2054 Object* cell = GenerateCheckPropertyCell(masm(), 2189 Object* cell = GenerateCheckPropertyCell(masm(),
2055 GlobalObject::cast(object), 2190 GlobalObject::cast(object),
2056 name, 2191 name,
2057 scratch, 2192 scratch,
(...skipping 181 matching lines...) Expand 10 before | Expand all | Expand 10 after
2239 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET); 2374 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
2240 2375
2241 // Return the generated code. 2376 // Return the generated code.
2242 return GetCode(); 2377 return GetCode();
2243 } 2378 }
2244 2379
2245 2380
2246 #undef __ 2381 #undef __
2247 2382
2248 } } // namespace v8::internal 2383 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/x64/macro-assembler-x64.cc ('k') | test/mjsunit/array-pop.js » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698