OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 4227 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4238 __ cmp(scratch, kSymbolTag | kStringTag); | 4238 __ cmp(scratch, kSymbolTag | kStringTag); |
4239 __ j(not_equal, label); | 4239 __ j(not_equal, label); |
4240 } | 4240 } |
4241 | 4241 |
4242 | 4242 |
4243 void StackCheckStub::Generate(MacroAssembler* masm) { | 4243 void StackCheckStub::Generate(MacroAssembler* masm) { |
4244 __ TailCallRuntime(Runtime::kStackGuard, 0, 1); | 4244 __ TailCallRuntime(Runtime::kStackGuard, 0, 1); |
4245 } | 4245 } |
4246 | 4246 |
4247 | 4247 |
| 4248 void CallFunctionStub::FinishCode(Code* code) { |
| 4249 code->set_has_function_cache(RecordCallTarget()); |
| 4250 } |
| 4251 |
| 4252 |
| 4253 void CallFunctionStub::Clear(Heap* heap, Address address) { |
| 4254 ASSERT(Memory::uint8_at(address + kPointerSize) == Assembler::kTestEaxByte); |
| 4255 // 1 ~ size of the test eax opcode. |
| 4256 Object* cell = Memory::Object_at(address + kPointerSize + 1); |
| 4257 // Low-level because clearing happens during GC. |
| 4258 reinterpret_cast<JSGlobalPropertyCell*>(cell)->set_value( |
| 4259 RawUninitializedSentinel(heap)); |
| 4260 } |
| 4261 |
| 4262 |
| 4263 Object* CallFunctionStub::GetCachedValue(Address address) { |
| 4264 ASSERT(Memory::uint8_at(address + kPointerSize) == Assembler::kTestEaxByte); |
| 4265 // 1 ~ size of the test eax opcode. |
| 4266 Object* cell = Memory::Object_at(address + kPointerSize + 1); |
| 4267 return JSGlobalPropertyCell::cast(cell)->value(); |
| 4268 } |
| 4269 |
| 4270 |
4248 void CallFunctionStub::Generate(MacroAssembler* masm) { | 4271 void CallFunctionStub::Generate(MacroAssembler* masm) { |
| 4272 Isolate* isolate = masm->isolate(); |
4249 Label slow, non_function; | 4273 Label slow, non_function; |
4250 | 4274 |
4251 // The receiver might implicitly be the global object. This is | 4275 // The receiver might implicitly be the global object. This is |
4252 // indicated by passing the hole as the receiver to the call | 4276 // indicated by passing the hole as the receiver to the call |
4253 // function stub. | 4277 // function stub. |
4254 if (ReceiverMightBeImplicit()) { | 4278 if (ReceiverMightBeImplicit()) { |
4255 Label call; | 4279 Label receiver_ok; |
4256 // Get the receiver from the stack. | 4280 // Get the receiver from the stack. |
4257 // +1 ~ return address | 4281 // +1 ~ return address |
4258 __ mov(eax, Operand(esp, (argc_ + 1) * kPointerSize)); | 4282 __ mov(eax, Operand(esp, (argc_ + 1) * kPointerSize)); |
4259 // Call as function is indicated with the hole. | 4283 // Call as function is indicated with the hole. |
4260 __ cmp(eax, masm->isolate()->factory()->the_hole_value()); | 4284 __ cmp(eax, isolate->factory()->the_hole_value()); |
4261 __ j(not_equal, &call, Label::kNear); | 4285 __ j(not_equal, &receiver_ok, Label::kNear); |
4262 // Patch the receiver on the stack with the global receiver object. | 4286 // Patch the receiver on the stack with the global receiver object. |
4263 __ mov(ebx, GlobalObjectOperand()); | 4287 __ mov(ebx, GlobalObjectOperand()); |
4264 __ mov(ebx, FieldOperand(ebx, GlobalObject::kGlobalReceiverOffset)); | 4288 __ mov(ebx, FieldOperand(ebx, GlobalObject::kGlobalReceiverOffset)); |
4265 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), ebx); | 4289 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), ebx); |
4266 __ bind(&call); | 4290 __ bind(&receiver_ok); |
4267 } | 4291 } |
4268 | 4292 |
4269 // Get the function to call from the stack. | 4293 // Get the function to call from the stack. |
4270 // +2 ~ receiver, return address | 4294 // +2 ~ receiver, return address |
4271 __ mov(edi, Operand(esp, (argc_ + 2) * kPointerSize)); | 4295 __ mov(edi, Operand(esp, (argc_ + 2) * kPointerSize)); |
4272 | 4296 |
4273 // Check that the function really is a JavaScript function. | 4297 // Check that the function really is a JavaScript function. |
4274 __ JumpIfSmi(edi, &non_function); | 4298 __ JumpIfSmi(edi, &non_function); |
4275 // Goto slow case if we do not have a function. | 4299 // Goto slow case if we do not have a function. |
4276 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); | 4300 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); |
4277 __ j(not_equal, &slow); | 4301 __ j(not_equal, &slow); |
4278 | 4302 |
| 4303 if (RecordCallTarget()) { |
| 4304 // Cache the called function in a global property cell in the |
| 4305 // instruction stream after the call. Cache states are uninitialized, |
| 4306 // monomorphic (indicated by a JSFunction), and megamorphic. |
| 4307 Label initialize, call; |
| 4308 // Load the cache cell address into ebx and the cache state into ecx. |
| 4309 __ mov(ebx, Operand(esp, 0)); // Return address. |
| 4310 __ mov(ebx, Operand(ebx, 1)); // 1 ~ sizeof 'test eax' opcode in bytes. |
| 4311 __ mov(ecx, FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset)); |
| 4312 |
| 4313 // A monomorphic cache hit or an already megamorphic state: invoke the |
| 4314 // function without changing the state. |
| 4315 __ cmp(ecx, Operand(edi)); |
| 4316 __ j(equal, &call, Label::kNear); |
| 4317 __ cmp(Operand(ecx), Immediate(MegamorphicSentinel(isolate))); |
| 4318 __ j(equal, &call, Label::kNear); |
| 4319 |
| 4320 // A monomorphic miss (i.e, here the cache is not uninitialized) goes |
| 4321 // megamorphic. |
| 4322 __ cmp(Operand(ecx), Immediate(UninitializedSentinel(isolate))); |
| 4323 __ j(equal, &initialize, Label::kNear); |
| 4324 __ mov(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset), |
| 4325 Immediate(MegamorphicSentinel(isolate))); |
| 4326 __ jmp(&call, Label::kNear); |
| 4327 |
| 4328 // An uninitialized cache is patched with the function. |
| 4329 __ bind(&initialize); |
| 4330 __ mov(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset), edi); |
| 4331 |
| 4332 __ bind(&call); |
| 4333 } |
| 4334 |
4279 // Fast-case: Just invoke the function. | 4335 // Fast-case: Just invoke the function. |
4280 ParameterCount actual(argc_); | 4336 ParameterCount actual(argc_); |
4281 | 4337 |
4282 if (ReceiverMightBeImplicit()) { | 4338 if (ReceiverMightBeImplicit()) { |
4283 Label call_as_function; | 4339 Label call_as_function; |
4284 __ cmp(eax, masm->isolate()->factory()->the_hole_value()); | 4340 __ cmp(eax, isolate->factory()->the_hole_value()); |
4285 __ j(equal, &call_as_function); | 4341 __ j(equal, &call_as_function); |
4286 __ InvokeFunction(edi, | 4342 __ InvokeFunction(edi, |
4287 actual, | 4343 actual, |
4288 JUMP_FUNCTION, | 4344 JUMP_FUNCTION, |
4289 NullCallWrapper(), | 4345 NullCallWrapper(), |
4290 CALL_AS_METHOD); | 4346 CALL_AS_METHOD); |
4291 __ bind(&call_as_function); | 4347 __ bind(&call_as_function); |
4292 } | 4348 } |
4293 __ InvokeFunction(edi, | 4349 __ InvokeFunction(edi, |
4294 actual, | 4350 actual, |
4295 JUMP_FUNCTION, | 4351 JUMP_FUNCTION, |
4296 NullCallWrapper(), | 4352 NullCallWrapper(), |
4297 CALL_AS_FUNCTION); | 4353 CALL_AS_FUNCTION); |
4298 | 4354 |
4299 // Slow-case: Non-function called. | 4355 // Slow-case: Non-function called. |
4300 __ bind(&slow); | 4356 __ bind(&slow); |
| 4357 if (RecordCallTarget()) { |
| 4358 // If there is a call target cache, mark it megamorphic in the |
| 4359 // non-function case. |
| 4360 __ mov(ebx, Operand(esp, 0)); |
| 4361 __ mov(ebx, Operand(ebx, 1)); |
| 4362 __ mov(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset), |
| 4363 Immediate(MegamorphicSentinel(isolate))); |
| 4364 } |
4301 // Check for function proxy. | 4365 // Check for function proxy. |
4302 __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE); | 4366 __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE); |
4303 __ j(not_equal, &non_function); | 4367 __ j(not_equal, &non_function); |
4304 __ pop(ecx); | 4368 __ pop(ecx); |
4305 __ push(edi); // put proxy as additional argument under return address | 4369 __ push(edi); // put proxy as additional argument under return address |
4306 __ push(ecx); | 4370 __ push(ecx); |
4307 __ Set(eax, Immediate(argc_ + 1)); | 4371 __ Set(eax, Immediate(argc_ + 1)); |
4308 __ Set(ebx, Immediate(0)); | 4372 __ Set(ebx, Immediate(0)); |
4309 __ SetCallKind(ecx, CALL_AS_FUNCTION); | 4373 __ SetCallKind(ecx, CALL_AS_FUNCTION); |
4310 __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY); | 4374 __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY); |
4311 { | 4375 { |
4312 Handle<Code> adaptor = | 4376 Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline(); |
4313 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); | |
4314 __ jmp(adaptor, RelocInfo::CODE_TARGET); | 4377 __ jmp(adaptor, RelocInfo::CODE_TARGET); |
4315 } | 4378 } |
4316 | 4379 |
4317 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead | 4380 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead |
4318 // of the original receiver from the call site). | 4381 // of the original receiver from the call site). |
4319 __ bind(&non_function); | 4382 __ bind(&non_function); |
4320 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), edi); | 4383 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), edi); |
4321 __ Set(eax, Immediate(argc_)); | 4384 __ Set(eax, Immediate(argc_)); |
4322 __ Set(ebx, Immediate(0)); | 4385 __ Set(ebx, Immediate(0)); |
4323 __ SetCallKind(ecx, CALL_AS_METHOD); | 4386 __ SetCallKind(ecx, CALL_AS_METHOD); |
4324 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION); | 4387 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION); |
4325 Handle<Code> adaptor = | 4388 Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline(); |
4326 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); | |
4327 __ jmp(adaptor, RelocInfo::CODE_TARGET); | 4389 __ jmp(adaptor, RelocInfo::CODE_TARGET); |
4328 } | 4390 } |
4329 | 4391 |
4330 | 4392 |
4331 bool CEntryStub::NeedsImmovableCode() { | 4393 bool CEntryStub::NeedsImmovableCode() { |
4332 return false; | 4394 return false; |
4333 } | 4395 } |
4334 | 4396 |
4335 | 4397 |
4336 bool CEntryStub::CompilingCallsToThisStubIsGCSafe() { | 4398 bool CEntryStub::CompilingCallsToThisStubIsGCSafe() { |
(...skipping 2503 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6840 | 6902 |
6841 // Fall through when we need to inform the incremental marker. | 6903 // Fall through when we need to inform the incremental marker. |
6842 } | 6904 } |
6843 | 6905 |
6844 | 6906 |
6845 #undef __ | 6907 #undef __ |
6846 | 6908 |
6847 } } // namespace v8::internal | 6909 } } // namespace v8::internal |
6848 | 6910 |
6849 #endif // V8_TARGET_ARCH_IA32 | 6911 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |