OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 271 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
282 __ IncrementCounter(counters->string_ctor_gc_required(), 1, r3, r4); | 282 __ IncrementCounter(counters->string_ctor_gc_required(), 1, r3, r4); |
283 { | 283 { |
284 FrameScope scope(masm, StackFrame::INTERNAL); | 284 FrameScope scope(masm, StackFrame::INTERNAL); |
285 __ push(argument); | 285 __ push(argument); |
286 __ CallRuntime(Runtime::kNewStringWrapper, 1); | 286 __ CallRuntime(Runtime::kNewStringWrapper, 1); |
287 } | 287 } |
288 __ Ret(); | 288 __ Ret(); |
289 } | 289 } |
290 | 290 |
291 | 291 |
292 static void CallRuntimePassFunction(MacroAssembler* masm, | 292 static void CallRuntimePassFunctionAndTailCall( |
titzer
2013/12/09 14:49:28
Maybe split this into a CallRuntimeWithFunction an
Yang
2013/12/10 11:22:04
Done.
| |
293 Runtime::FunctionId function_id) { | 293 MacroAssembler* masm, Runtime::FunctionId function_id) { |
294 FrameScope scope(masm, StackFrame::INTERNAL); | 294 { FrameScope scope(masm, StackFrame::INTERNAL); |
295 // Push a copy of the function onto the stack. | 295 // Push a copy of the function onto the stack. |
296 __ push(r1); | 296 __ push(r1); |
297 // Push call kind information and function as parameter to the runtime call. | 297 // Push call kind information and function as parameter to the runtime call. |
298 __ Push(r5, r1); | 298 __ Push(r5, r1); |
299 | 299 |
300 __ CallRuntime(function_id, 1); | 300 __ CallRuntime(function_id, 1); |
301 // Restore call kind information. | 301 // Restore call kind information. |
302 __ pop(r5); | 302 __ pop(r5); |
303 // Restore receiver. | 303 // Restore receiver. |
304 __ pop(r1); | 304 __ pop(r1); |
305 } | |
306 | |
307 // Tail call to returned code. | |
308 __ add(r0, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
309 __ Jump(r0); | |
305 } | 310 } |
306 | 311 |
307 | 312 |
308 static void GenerateTailCallToSharedCode(MacroAssembler* masm) { | 313 static void GenerateTailCallToSharedCode(MacroAssembler* masm) { |
309 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); | 314 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); |
310 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCodeOffset)); | 315 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCodeOffset)); |
311 __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag)); | 316 __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag)); |
312 __ Jump(r2); | 317 __ Jump(r2); |
313 } | 318 } |
314 | 319 |
315 | 320 |
316 void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) { | 321 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { |
317 // Checking whether the queued function is ready for install is optional, | 322 // Checking whether the queued function is ready for install is optional, |
318 // since we come across interrupts and stack checks elsewhere. However, | 323 // since we come across interrupts and stack checks elsewhere. However, |
319 // not checking may delay installing ready functions, and always checking | 324 // not checking may delay installing ready functions, and always checking |
320 // would be quite expensive. A good compromise is to first check against | 325 // would be quite expensive. A good compromise is to first check against |
321 // stack limit as a cue for an interrupt signal. | 326 // stack limit as a cue for an interrupt signal. |
322 Label ok; | 327 Label ok; |
323 __ LoadRoot(ip, Heap::kStackLimitRootIndex); | 328 __ LoadRoot(ip, Heap::kStackLimitRootIndex); |
324 __ cmp(sp, Operand(ip)); | 329 __ cmp(sp, Operand(ip)); |
325 __ b(hs, &ok); | 330 __ b(hs, &ok); |
326 | 331 |
327 CallRuntimePassFunction(masm, Runtime::kTryInstallRecompiledCode); | 332 CallRuntimePassFunctionAndTailCall(masm, Runtime::kTryInstallOptimizedCode); |
328 // Tail call to returned code. | |
329 __ add(r0, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
330 __ Jump(r0); | |
331 | 333 |
332 __ bind(&ok); | 334 __ bind(&ok); |
333 GenerateTailCallToSharedCode(masm); | 335 GenerateTailCallToSharedCode(masm); |
334 } | 336 } |
335 | 337 |
336 | 338 |
337 void Builtins::Generate_ConcurrentRecompile(MacroAssembler* masm) { | |
338 CallRuntimePassFunction(masm, Runtime::kConcurrentRecompile); | |
339 GenerateTailCallToSharedCode(masm); | |
340 } | |
341 | |
342 | |
343 static void Generate_JSConstructStubHelper(MacroAssembler* masm, | 339 static void Generate_JSConstructStubHelper(MacroAssembler* masm, |
344 bool is_api_function, | 340 bool is_api_function, |
345 bool count_constructions) { | 341 bool count_constructions) { |
346 // ----------- S t a t e ------------- | 342 // ----------- S t a t e ------------- |
347 // -- r0 : number of arguments | 343 // -- r0 : number of arguments |
348 // -- r1 : constructor function | 344 // -- r1 : constructor function |
349 // -- lr : return address | 345 // -- lr : return address |
350 // -- sp[...]: constructor arguments | 346 // -- sp[...]: constructor arguments |
351 // ----------------------------------- | 347 // ----------------------------------- |
352 | 348 |
(...skipping 415 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
768 Generate_JSEntryTrampolineHelper(masm, false); | 764 Generate_JSEntryTrampolineHelper(masm, false); |
769 } | 765 } |
770 | 766 |
771 | 767 |
772 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { | 768 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { |
773 Generate_JSEntryTrampolineHelper(masm, true); | 769 Generate_JSEntryTrampolineHelper(masm, true); |
774 } | 770 } |
775 | 771 |
776 | 772 |
777 void Builtins::Generate_LazyCompile(MacroAssembler* masm) { | 773 void Builtins::Generate_LazyCompile(MacroAssembler* masm) { |
778 CallRuntimePassFunction(masm, Runtime::kLazyCompile); | 774 CallRuntimePassFunctionAndTailCall(masm, Runtime::kCompileUnoptimized); |
779 // Do a tail-call of the compiled function. | |
780 __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
781 __ Jump(r2); | |
782 } | 775 } |
783 | 776 |
784 | 777 |
785 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) { | 778 static void CallCompileOptimizedAndTailCall(MacroAssembler* masm, |
786 CallRuntimePassFunction(masm, Runtime::kLazyRecompile); | 779 bool concurrent) { |
787 // Do a tail-call of the compiled function. | 780 { FrameScope scope(masm, StackFrame::INTERNAL); |
titzer
2013/12/09 14:49:28
This is an almost exact copy of CallRuntimePassFun
Yang
2013/12/10 11:22:04
I thought it would make things unnecessarily compl
| |
788 __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); | 781 // Push a copy of the function onto the stack. |
789 __ Jump(r2); | 782 __ push(r1); |
783 // Push call kind information and function as parameter to the runtime call. | |
784 __ Push(r5, r1); | |
785 // Whether to compile in a background thread. | |
786 __ Push(masm->isolate()->factory()->ToBoolean(concurrent)); | |
787 | |
788 __ CallRuntime(Runtime::kCompileOptimized, 2); | |
789 // Restore call kind information. | |
790 __ pop(r5); | |
791 // Restore receiver. | |
792 __ pop(r1); | |
793 } | |
794 | |
795 // Tail call to returned code. | |
796 __ add(r0, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
797 __ Jump(r0); | |
790 } | 798 } |
791 | 799 |
792 | 800 |
801 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { | |
802 CallCompileOptimizedAndTailCall(masm, false); | |
803 } | |
804 | |
805 | |
806 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) { | |
807 CallCompileOptimizedAndTailCall(masm, true); | |
808 } | |
809 | |
810 | |
793 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) { | 811 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) { |
794 // For now, we are relying on the fact that make_code_young doesn't do any | 812 // For now, we are relying on the fact that make_code_young doesn't do any |
795 // garbage collection which allows us to save/restore the registers without | 813 // garbage collection which allows us to save/restore the registers without |
796 // worrying about which of them contain pointers. We also don't build an | 814 // worrying about which of them contain pointers. We also don't build an |
797 // internal frame to make the code faster, since we shouldn't have to do stack | 815 // internal frame to make the code faster, since we shouldn't have to do stack |
798 // crawls in MakeCodeYoung. This seems a bit fragile. | 816 // crawls in MakeCodeYoung. This seems a bit fragile. |
799 | 817 |
800 // The following registers must be saved and restored when calling through to | 818 // The following registers must be saved and restored when calling through to |
801 // the runtime: | 819 // the runtime: |
802 // r0 - contains return address (beginning of patch sequence) | 820 // r0 - contains return address (beginning of patch sequence) |
(...skipping 670 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1473 __ bind(&dont_adapt_arguments); | 1491 __ bind(&dont_adapt_arguments); |
1474 __ Jump(r3); | 1492 __ Jump(r3); |
1475 } | 1493 } |
1476 | 1494 |
1477 | 1495 |
1478 #undef __ | 1496 #undef __ |
1479 | 1497 |
1480 } } // namespace v8::internal | 1498 } } // namespace v8::internal |
1481 | 1499 |
1482 #endif // V8_TARGET_ARCH_ARM | 1500 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |