Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(234)

Side by Side Diff: src/arm/builtins-arm.cc

Issue 190793002: Introduce FrameAndConstantPoolScope and ConstantPoolUnavailableScope. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Sync Created 6 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/assembler-arm.cc ('k') | src/arm/code-stubs-arm.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 243 matching lines...) Expand 10 before | Expand all | Expand 10 after
254 __ b(ne, &convert_argument); 254 __ b(ne, &convert_argument);
255 __ mov(argument, r0); 255 __ mov(argument, r0);
256 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4); 256 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4);
257 __ b(&argument_is_string); 257 __ b(&argument_is_string);
258 258
259 // Invoke the conversion builtin and put the result into r2. 259 // Invoke the conversion builtin and put the result into r2.
260 __ bind(&convert_argument); 260 __ bind(&convert_argument);
261 __ push(function); // Preserve the function. 261 __ push(function); // Preserve the function.
262 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4); 262 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4);
263 { 263 {
264 FrameScope scope(masm, StackFrame::INTERNAL); 264 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
265 __ push(r0); 265 __ push(r0);
266 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION); 266 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
267 } 267 }
268 __ pop(function); 268 __ pop(function);
269 __ mov(argument, r0); 269 __ mov(argument, r0);
270 __ b(&argument_is_string); 270 __ b(&argument_is_string);
271 271
272 // Load the empty string into r2, remove the receiver from the 272 // Load the empty string into r2, remove the receiver from the
273 // stack, and jump back to the case where the argument is a string. 273 // stack, and jump back to the case where the argument is a string.
274 __ bind(&no_arguments); 274 __ bind(&no_arguments);
275 __ LoadRoot(argument, Heap::kempty_stringRootIndex); 275 __ LoadRoot(argument, Heap::kempty_stringRootIndex);
276 __ Drop(1); 276 __ Drop(1);
277 __ b(&argument_is_string); 277 __ b(&argument_is_string);
278 278
279 // At this point the argument is already a string. Call runtime to 279 // At this point the argument is already a string. Call runtime to
280 // create a string wrapper. 280 // create a string wrapper.
281 __ bind(&gc_required); 281 __ bind(&gc_required);
282 __ IncrementCounter(counters->string_ctor_gc_required(), 1, r3, r4); 282 __ IncrementCounter(counters->string_ctor_gc_required(), 1, r3, r4);
283 { 283 {
284 FrameScope scope(masm, StackFrame::INTERNAL); 284 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
285 __ push(argument); 285 __ push(argument);
286 __ CallRuntime(Runtime::kNewStringWrapper, 1); 286 __ CallRuntime(Runtime::kNewStringWrapper, 1);
287 } 287 }
288 __ Ret(); 288 __ Ret();
289 } 289 }
290 290
291 291
292 static void CallRuntimePassFunction( 292 static void CallRuntimePassFunction(
293 MacroAssembler* masm, Runtime::FunctionId function_id) { 293 MacroAssembler* masm, Runtime::FunctionId function_id) {
294 FrameScope scope(masm, StackFrame::INTERNAL); 294 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
295 // Push a copy of the function onto the stack. 295 // Push a copy of the function onto the stack.
296 __ push(r1); 296 __ push(r1);
297 // Push function as parameter to the runtime call. 297 // Push function as parameter to the runtime call.
298 __ Push(r1); 298 __ Push(r1);
299 299
300 __ CallRuntime(function_id, 1); 300 __ CallRuntime(function_id, 1);
301 // Restore receiver. 301 // Restore receiver.
302 __ pop(r1); 302 __ pop(r1);
303 } 303 }
304 304
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
346 // -- sp[...]: constructor arguments 346 // -- sp[...]: constructor arguments
347 // ----------------------------------- 347 // -----------------------------------
348 348
349 // Should never count constructions for api objects. 349 // Should never count constructions for api objects.
350 ASSERT(!is_api_function || !count_constructions); 350 ASSERT(!is_api_function || !count_constructions);
351 351
352 Isolate* isolate = masm->isolate(); 352 Isolate* isolate = masm->isolate();
353 353
354 // Enter a construct frame. 354 // Enter a construct frame.
355 { 355 {
356 FrameScope scope(masm, StackFrame::CONSTRUCT); 356 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
357 357
358 // Preserve the two incoming parameters on the stack. 358 // Preserve the two incoming parameters on the stack.
359 __ SmiTag(r0); 359 __ SmiTag(r0);
360 __ push(r0); // Smi-tagged arguments count. 360 __ push(r0); // Smi-tagged arguments count.
361 __ push(r1); // Constructor function. 361 __ push(r1); // Constructor function.
362 362
363 // Try to allocate the object without transitioning into C code. If any of 363 // Try to allocate the object without transitioning into C code. If any of
364 // the preconditions is not met, the code bails out to the runtime call. 364 // the preconditions is not met, the code bails out to the runtime call.
365 Label rt_call, allocated; 365 Label rt_call, allocated;
366 if (FLAG_inline_new) { 366 if (FLAG_inline_new) {
(...skipping 399 matching lines...) Expand 10 before | Expand all | Expand 10 after
766 } 766 }
767 767
768 768
769 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) { 769 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) {
770 CallRuntimePassFunction(masm, Runtime::kCompileUnoptimized); 770 CallRuntimePassFunction(masm, Runtime::kCompileUnoptimized);
771 GenerateTailCallToReturnedCode(masm); 771 GenerateTailCallToReturnedCode(masm);
772 } 772 }
773 773
774 774
775 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) { 775 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
776 FrameScope scope(masm, StackFrame::INTERNAL); 776 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
777 // Push a copy of the function onto the stack. 777 // Push a copy of the function onto the stack.
778 __ push(r1); 778 __ push(r1);
779 // Push function as parameter to the runtime call. 779 // Push function as parameter to the runtime call.
780 __ Push(r1); 780 __ Push(r1);
781 // Whether to compile in a background thread. 781 // Whether to compile in a background thread.
782 __ Push(masm->isolate()->factory()->ToBoolean(concurrent)); 782 __ Push(masm->isolate()->factory()->ToBoolean(concurrent));
783 783
784 __ CallRuntime(Runtime::kCompileOptimized, 2); 784 __ CallRuntime(Runtime::kCompileOptimized, 2);
785 // Restore receiver. 785 // Restore receiver.
786 __ pop(r1); 786 __ pop(r1);
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after
862 862
863 863
864 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) { 864 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
865 GenerateMakeCodeYoungAgainCommon(masm); 865 GenerateMakeCodeYoungAgainCommon(masm);
866 } 866 }
867 867
868 868
869 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, 869 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
870 SaveFPRegsMode save_doubles) { 870 SaveFPRegsMode save_doubles) {
871 { 871 {
872 FrameScope scope(masm, StackFrame::INTERNAL); 872 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
873 873
874 // Preserve registers across notification, this is important for compiled 874 // Preserve registers across notification, this is important for compiled
875 // stubs that tail call the runtime on deopts passing their parameters in 875 // stubs that tail call the runtime on deopts passing their parameters in
876 // registers. 876 // registers.
877 __ stm(db_w, sp, kJSCallerSaved | kCalleeSaved); 877 __ stm(db_w, sp, kJSCallerSaved | kCalleeSaved);
878 // Pass the function and deoptimization type to the runtime system. 878 // Pass the function and deoptimization type to the runtime system.
879 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles); 879 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
880 __ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved); 880 __ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved);
881 } 881 }
882 882
883 __ add(sp, sp, Operand(kPointerSize)); // Ignore state 883 __ add(sp, sp, Operand(kPointerSize)); // Ignore state
884 __ mov(pc, lr); // Jump to miss handler 884 __ mov(pc, lr); // Jump to miss handler
885 } 885 }
886 886
887 887
888 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { 888 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
889 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); 889 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
890 } 890 }
891 891
892 892
893 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { 893 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
894 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); 894 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
895 } 895 }
896 896
897 897
898 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, 898 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
899 Deoptimizer::BailoutType type) { 899 Deoptimizer::BailoutType type) {
900 { 900 {
901 FrameScope scope(masm, StackFrame::INTERNAL); 901 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
902 // Pass the function and deoptimization type to the runtime system. 902 // Pass the function and deoptimization type to the runtime system.
903 __ mov(r0, Operand(Smi::FromInt(static_cast<int>(type)))); 903 __ mov(r0, Operand(Smi::FromInt(static_cast<int>(type))));
904 __ push(r0); 904 __ push(r0);
905 __ CallRuntime(Runtime::kNotifyDeoptimized, 1); 905 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
906 } 906 }
907 907
908 // Get the full codegen state from the stack and untag it -> r6. 908 // Get the full codegen state from the stack and untag it -> r6.
909 __ ldr(r6, MemOperand(sp, 0 * kPointerSize)); 909 __ ldr(r6, MemOperand(sp, 0 * kPointerSize));
910 __ SmiUntag(r6); 910 __ SmiUntag(r6);
911 // Switch on the state. 911 // Switch on the state.
(...skipping 27 matching lines...) Expand all
939 939
940 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { 940 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
941 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); 941 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
942 } 942 }
943 943
944 944
945 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { 945 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
946 // Lookup the function in the JavaScript frame. 946 // Lookup the function in the JavaScript frame.
947 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 947 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
948 { 948 {
949 FrameScope scope(masm, StackFrame::INTERNAL); 949 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
950 // Pass function as argument. 950 // Pass function as argument.
951 __ push(r0); 951 __ push(r0);
952 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1); 952 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
953 } 953 }
954 954
955 // If the code object is null, just return to the unoptimized code. 955 // If the code object is null, just return to the unoptimized code.
956 Label skip; 956 Label skip;
957 __ cmp(r0, Operand(Smi::FromInt(0))); 957 __ cmp(r0, Operand(Smi::FromInt(0)));
958 __ b(ne, &skip); 958 __ b(ne, &skip);
959 __ Ret(); 959 __ Ret();
(...skipping 19 matching lines...) Expand all
979 } 979 }
980 980
981 981
982 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) { 982 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
983 // We check the stack limit as indicator that recompilation might be done. 983 // We check the stack limit as indicator that recompilation might be done.
984 Label ok; 984 Label ok;
985 __ LoadRoot(ip, Heap::kStackLimitRootIndex); 985 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
986 __ cmp(sp, Operand(ip)); 986 __ cmp(sp, Operand(ip));
987 __ b(hs, &ok); 987 __ b(hs, &ok);
988 { 988 {
989 FrameScope scope(masm, StackFrame::INTERNAL); 989 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
990 __ CallRuntime(Runtime::kStackGuard, 0); 990 __ CallRuntime(Runtime::kStackGuard, 0);
991 } 991 }
992 __ Jump(masm->isolate()->builtins()->OnStackReplacement(), 992 __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
993 RelocInfo::CODE_TARGET); 993 RelocInfo::CODE_TARGET);
994 994
995 __ bind(&ok); 995 __ bind(&ok);
996 __ Ret(); 996 __ Ret();
997 } 997 }
998 998
999 999
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after
1054 __ b(eq, &use_global_receiver); 1054 __ b(eq, &use_global_receiver);
1055 1055
1056 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); 1056 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1057 __ CompareObjectType(r2, r3, r3, FIRST_SPEC_OBJECT_TYPE); 1057 __ CompareObjectType(r2, r3, r3, FIRST_SPEC_OBJECT_TYPE);
1058 __ b(ge, &shift_arguments); 1058 __ b(ge, &shift_arguments);
1059 1059
1060 __ bind(&convert_to_object); 1060 __ bind(&convert_to_object);
1061 1061
1062 { 1062 {
1063 // Enter an internal frame in order to preserve argument count. 1063 // Enter an internal frame in order to preserve argument count.
1064 FrameScope scope(masm, StackFrame::INTERNAL); 1064 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1065 __ SmiTag(r0); 1065 __ SmiTag(r0);
1066 __ push(r0); 1066 __ push(r0);
1067 1067
1068 __ push(r2); 1068 __ push(r2);
1069 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 1069 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1070 __ mov(r2, r0); 1070 __ mov(r2, r0);
1071 1071
1072 __ pop(r0); 1072 __ pop(r0);
1073 __ SmiUntag(r0); 1073 __ SmiUntag(r0);
1074 1074
(...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after
1181 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { 1181 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1182 const int kIndexOffset = 1182 const int kIndexOffset =
1183 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize); 1183 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize);
1184 const int kLimitOffset = 1184 const int kLimitOffset =
1185 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize); 1185 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize);
1186 const int kArgsOffset = 2 * kPointerSize; 1186 const int kArgsOffset = 2 * kPointerSize;
1187 const int kRecvOffset = 3 * kPointerSize; 1187 const int kRecvOffset = 3 * kPointerSize;
1188 const int kFunctionOffset = 4 * kPointerSize; 1188 const int kFunctionOffset = 4 * kPointerSize;
1189 1189
1190 { 1190 {
1191 FrameScope frame_scope(masm, StackFrame::INTERNAL); 1191 FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL);
1192 1192
1193 __ ldr(r0, MemOperand(fp, kFunctionOffset)); // get the function 1193 __ ldr(r0, MemOperand(fp, kFunctionOffset)); // get the function
1194 __ push(r0); 1194 __ push(r0);
1195 __ ldr(r0, MemOperand(fp, kArgsOffset)); // get the args array 1195 __ ldr(r0, MemOperand(fp, kArgsOffset)); // get the args array
1196 __ push(r0); 1196 __ push(r0);
1197 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); 1197 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
1198 1198
1199 // Check the stack for overflow. We are not trying to catch 1199 // Check the stack for overflow. We are not trying to catch
1200 // interruptions (e.g. debug break and preemption) here, so the "real stack 1200 // interruptions (e.g. debug break and preemption) here, so the "real stack
1201 // limit" is checked. 1201 // limit" is checked.
(...skipping 265 matching lines...) Expand 10 before | Expand all | Expand 10 after
1467 __ bind(&dont_adapt_arguments); 1467 __ bind(&dont_adapt_arguments);
1468 __ Jump(r3); 1468 __ Jump(r3);
1469 } 1469 }
1470 1470
1471 1471
1472 #undef __ 1472 #undef __
1473 1473
1474 } } // namespace v8::internal 1474 } } // namespace v8::internal
1475 1475
1476 #endif // V8_TARGET_ARCH_ARM 1476 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/assembler-arm.cc ('k') | src/arm/code-stubs-arm.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698