| OLD | NEW | 
|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. | 
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without | 
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are | 
| 4 // met: | 4 // met: | 
| 5 // | 5 // | 
| 6 //     * Redistributions of source code must retain the above copyright | 6 //     * Redistributions of source code must retain the above copyright | 
| 7 //       notice, this list of conditions and the following disclaimer. | 7 //       notice, this list of conditions and the following disclaimer. | 
| 8 //     * Redistributions in binary form must reproduce the above | 8 //     * Redistributions in binary form must reproduce the above | 
| 9 //       copyright notice, this list of conditions and the following | 9 //       copyright notice, this list of conditions and the following | 
| 10 //       disclaimer in the documentation and/or other materials provided | 10 //       disclaimer in the documentation and/or other materials provided | 
| (...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 67     ASSERT(extra_args == NO_EXTRA_ARGUMENTS); | 67     ASSERT(extra_args == NO_EXTRA_ARGUMENTS); | 
| 68   } | 68   } | 
| 69 | 69 | 
| 70   // JumpToExternalReference expects eax to contain the number of arguments | 70   // JumpToExternalReference expects eax to contain the number of arguments | 
| 71   // including the receiver and the extra arguments. | 71   // including the receiver and the extra arguments. | 
| 72   __ add(eax, Immediate(num_extra_args + 1)); | 72   __ add(eax, Immediate(num_extra_args + 1)); | 
| 73   __ JumpToExternalReference(ExternalReference(id, masm->isolate())); | 73   __ JumpToExternalReference(ExternalReference(id, masm->isolate())); | 
| 74 } | 74 } | 
| 75 | 75 | 
| 76 | 76 | 
| 77 static void CallRuntimePassFunction(MacroAssembler* masm, | 77 static void CallRuntimePassFunction( | 
| 78                                     Runtime::FunctionId function_id) { | 78     MacroAssembler* masm, Runtime::FunctionId function_id) { | 
| 79   FrameScope scope(masm, StackFrame::INTERNAL); | 79   FrameScope scope(masm, StackFrame::INTERNAL); | 
| 80   // Push a copy of the function. | 80   // Push a copy of the function. | 
| 81   __ push(edi); | 81   __ push(edi); | 
| 82   // Push call kind information. | 82   // Push call kind information. | 
| 83   __ push(ecx); | 83   __ push(ecx); | 
| 84   // Function is also the parameter to the runtime call. | 84   // Function is also the parameter to the runtime call. | 
| 85   __ push(edi); | 85   __ push(edi); | 
| 86 | 86 | 
| 87   __ CallRuntime(function_id, 1); | 87   __ CallRuntime(function_id, 1); | 
| 88   // Restore call kind information. | 88   // Restore call kind information. | 
| 89   __ pop(ecx); | 89   __ pop(ecx); | 
| 90   // Restore receiver. | 90   // Restore receiver. | 
| 91   __ pop(edi); | 91   __ pop(edi); | 
| 92 } | 92 } | 
| 93 | 93 | 
| 94 | 94 | 
| 95 static void GenerateTailCallToSharedCode(MacroAssembler* masm) { | 95 static void GenerateTailCallToSharedCode(MacroAssembler* masm) { | 
| 96   __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); | 96   __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); | 
| 97   __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kCodeOffset)); | 97   __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kCodeOffset)); | 
| 98   __ lea(eax, FieldOperand(eax, Code::kHeaderSize)); | 98   __ lea(eax, FieldOperand(eax, Code::kHeaderSize)); | 
| 99   __ jmp(eax); | 99   __ jmp(eax); | 
| 100 } | 100 } | 
| 101 | 101 | 
| 102 | 102 | 
| 103 void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) { | 103 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) { | 
|  | 104   __ lea(eax, FieldOperand(eax, Code::kHeaderSize)); | 
|  | 105   __ jmp(eax); | 
|  | 106 } | 
|  | 107 | 
|  | 108 | 
|  | 109 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { | 
| 104   // Checking whether the queued function is ready for install is optional, | 110   // Checking whether the queued function is ready for install is optional, | 
| 105   // since we come across interrupts and stack checks elsewhere.  However, | 111   // since we come across interrupts and stack checks elsewhere.  However, | 
| 106   // not checking may delay installing ready functions, and always checking | 112   // not checking may delay installing ready functions, and always checking | 
| 107   // would be quite expensive.  A good compromise is to first check against | 113   // would be quite expensive.  A good compromise is to first check against | 
| 108   // stack limit as a cue for an interrupt signal. | 114   // stack limit as a cue for an interrupt signal. | 
| 109   Label ok; | 115   Label ok; | 
| 110   ExternalReference stack_limit = | 116   ExternalReference stack_limit = | 
| 111       ExternalReference::address_of_stack_limit(masm->isolate()); | 117       ExternalReference::address_of_stack_limit(masm->isolate()); | 
| 112   __ cmp(esp, Operand::StaticVariable(stack_limit)); | 118   __ cmp(esp, Operand::StaticVariable(stack_limit)); | 
| 113   __ j(above_equal, &ok, Label::kNear); | 119   __ j(above_equal, &ok, Label::kNear); | 
| 114 | 120 | 
| 115   CallRuntimePassFunction(masm, Runtime::kTryInstallRecompiledCode); | 121   CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode); | 
| 116   // Tail call to returned code. | 122   GenerateTailCallToReturnedCode(masm); | 
| 117   __ lea(eax, FieldOperand(eax, Code::kHeaderSize)); |  | 
| 118   __ jmp(eax); |  | 
| 119 | 123 | 
| 120   __ bind(&ok); | 124   __ bind(&ok); | 
| 121   GenerateTailCallToSharedCode(masm); | 125   GenerateTailCallToSharedCode(masm); | 
| 122 } | 126 } | 
| 123 | 127 | 
| 124 | 128 | 
| 125 void Builtins::Generate_ConcurrentRecompile(MacroAssembler* masm) { |  | 
| 126   CallRuntimePassFunction(masm, Runtime::kConcurrentRecompile); |  | 
| 127   GenerateTailCallToSharedCode(masm); |  | 
| 128 } |  | 
| 129 |  | 
| 130 |  | 
| 131 static void Generate_JSConstructStubHelper(MacroAssembler* masm, | 129 static void Generate_JSConstructStubHelper(MacroAssembler* masm, | 
| 132                                            bool is_api_function, | 130                                            bool is_api_function, | 
| 133                                            bool count_constructions) { | 131                                            bool count_constructions) { | 
| 134   // ----------- S t a t e ------------- | 132   // ----------- S t a t e ------------- | 
| 135   //  -- eax: number of arguments | 133   //  -- eax: number of arguments | 
| 136   //  -- edi: constructor function | 134   //  -- edi: constructor function | 
| 137   // ----------------------------------- | 135   // ----------------------------------- | 
| 138 | 136 | 
| 139   // Should never count constructions for api objects. | 137   // Should never count constructions for api objects. | 
| 140   ASSERT(!is_api_function || !count_constructions); | 138   ASSERT(!is_api_function || !count_constructions); | 
| (...skipping 361 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 502 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { | 500 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { | 
| 503   Generate_JSEntryTrampolineHelper(masm, false); | 501   Generate_JSEntryTrampolineHelper(masm, false); | 
| 504 } | 502 } | 
| 505 | 503 | 
| 506 | 504 | 
| 507 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { | 505 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { | 
| 508   Generate_JSEntryTrampolineHelper(masm, true); | 506   Generate_JSEntryTrampolineHelper(masm, true); | 
| 509 } | 507 } | 
| 510 | 508 | 
| 511 | 509 | 
| 512 void Builtins::Generate_LazyCompile(MacroAssembler* masm) { | 510 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) { | 
| 513   CallRuntimePassFunction(masm, Runtime::kLazyCompile); | 511   CallRuntimePassFunction(masm, Runtime::kCompileUnoptimized); | 
| 514   // Do a tail-call of the compiled function. | 512   GenerateTailCallToReturnedCode(masm); | 
| 515   __ lea(eax, FieldOperand(eax, Code::kHeaderSize)); |  | 
| 516   __ jmp(eax); |  | 
| 517 } | 513 } | 
| 518 | 514 | 
| 519 | 515 | 
| 520 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) { | 516 | 
| 521   CallRuntimePassFunction(masm, Runtime::kLazyRecompile); | 517 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) { | 
| 522   // Do a tail-call of the compiled function. | 518   FrameScope scope(masm, StackFrame::INTERNAL); | 
| 523   __ lea(eax, FieldOperand(eax, Code::kHeaderSize)); | 519   // Push a copy of the function. | 
| 524   __ jmp(eax); | 520   __ push(edi); | 
|  | 521   // Push call kind information. | 
|  | 522   __ push(ecx); | 
|  | 523   // Function is also the parameter to the runtime call. | 
|  | 524   __ push(edi); | 
|  | 525   // Whether to compile in a background thread. | 
|  | 526   __ Push(masm->isolate()->factory()->ToBoolean(concurrent)); | 
|  | 527 | 
|  | 528   __ CallRuntime(Runtime::kCompileOptimized, 2); | 
|  | 529   // Restore call kind information. | 
|  | 530   __ pop(ecx); | 
|  | 531   // Restore receiver. | 
|  | 532   __ pop(edi); | 
| 525 } | 533 } | 
| 526 | 534 | 
| 527 | 535 | 
|  | 536 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { | 
|  | 537   CallCompileOptimized(masm, false); | 
|  | 538   GenerateTailCallToReturnedCode(masm); | 
|  | 539 } | 
|  | 540 | 
|  | 541 | 
|  | 542 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) { | 
|  | 543   CallCompileOptimized(masm, true); | 
|  | 544   GenerateTailCallToReturnedCode(masm); | 
|  | 545 } | 
|  | 546 | 
|  | 547 | 
| 528 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) { | 548 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) { | 
| 529   // For now, we are relying on the fact that make_code_young doesn't do any | 549   // For now, we are relying on the fact that make_code_young doesn't do any | 
| 530   // garbage collection which allows us to save/restore the registers without | 550   // garbage collection which allows us to save/restore the registers without | 
| 531   // worrying about which of them contain pointers. We also don't build an | 551   // worrying about which of them contain pointers. We also don't build an | 
| 532   // internal frame to make the code faster, since we shouldn't have to do stack | 552   // internal frame to make the code faster, since we shouldn't have to do stack | 
| 533   // crawls in MakeCodeYoung. This seems a bit fragile. | 553   // crawls in MakeCodeYoung. This seems a bit fragile. | 
| 534 | 554 | 
| 535   // Re-execute the code that was patched back to the young age when | 555   // Re-execute the code that was patched back to the young age when | 
| 536   // the stub returns. | 556   // the stub returns. | 
| 537   __ sub(Operand(esp, 0), Immediate(5)); | 557   __ sub(Operand(esp, 0), Immediate(5)); | 
| (...skipping 826 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 1364 | 1384 | 
| 1365   __ bind(&ok); | 1385   __ bind(&ok); | 
| 1366   __ ret(0); | 1386   __ ret(0); | 
| 1367 } | 1387 } | 
| 1368 | 1388 | 
| 1369 #undef __ | 1389 #undef __ | 
| 1370 } | 1390 } | 
| 1371 }  // namespace v8::internal | 1391 }  // namespace v8::internal | 
| 1372 | 1392 | 
| 1373 #endif  // V8_TARGET_ARCH_IA32 | 1393 #endif  // V8_TARGET_ARCH_IA32 | 
| OLD | NEW | 
|---|