| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 227 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 238 __ movp(prototype, FieldOperand(prototype, Map::kPrototypeOffset)); | 238 __ movp(prototype, FieldOperand(prototype, Map::kPrototypeOffset)); |
| 239 } | 239 } |
| 240 | 240 |
| 241 | 241 |
| 242 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype( | 242 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype( |
| 243 MacroAssembler* masm, | 243 MacroAssembler* masm, |
| 244 int index, | 244 int index, |
| 245 Register prototype, | 245 Register prototype, |
| 246 Label* miss) { | 246 Label* miss) { |
| 247 Isolate* isolate = masm->isolate(); | 247 Isolate* isolate = masm->isolate(); |
| 248 // Check we're still in the same context. | |
| 249 __ Move(prototype, isolate->global_object()); | |
| 250 __ cmpq(Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)), | |
| 251 prototype); | |
| 252 __ j(not_equal, miss); | |
| 253 // Get the global function with the given index. | 248 // Get the global function with the given index. |
| 254 Handle<JSFunction> function( | 249 Handle<JSFunction> function( |
| 255 JSFunction::cast(isolate->native_context()->get(index))); | 250 JSFunction::cast(isolate->native_context()->get(index))); |
| 251 |
| 252 // Check we're still in the same context. |
| 253 Register scratch = prototype; |
| 254 const int offset = Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX); |
| 255 __ movp(scratch, Operand(rsi, offset)); |
| 256 __ movp(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset)); |
| 257 __ Cmp(Operand(scratch, Context::SlotOffset(index)), function); |
| 258 __ j(not_equal, miss); |
| 259 |
| 256 // Load its initial map. The global functions all have initial maps. | 260 // Load its initial map. The global functions all have initial maps. |
| 257 __ Move(prototype, Handle<Map>(function->initial_map())); | 261 __ Move(prototype, Handle<Map>(function->initial_map())); |
| 258 // Load the prototype from the initial map. | 262 // Load the prototype from the initial map. |
| 259 __ movp(prototype, FieldOperand(prototype, Map::kPrototypeOffset)); | 263 __ movp(prototype, FieldOperand(prototype, Map::kPrototypeOffset)); |
| 260 } | 264 } |
| 261 | 265 |
| 262 | 266 |
| 263 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm, | 267 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm, |
| 264 Register receiver, | 268 Register receiver, |
| 265 Register scratch, | 269 Register scratch, |
| (...skipping 164 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 430 Address function_address = v8::ToCData<Address>(api_call_info->callback()); | 434 Address function_address = v8::ToCData<Address>(api_call_info->callback()); |
| 431 __ Move( | 435 __ Move( |
| 432 api_function_address, function_address, RelocInfo::EXTERNAL_REFERENCE); | 436 api_function_address, function_address, RelocInfo::EXTERNAL_REFERENCE); |
| 433 | 437 |
| 434 // Jump to stub. | 438 // Jump to stub. |
| 435 CallApiFunctionStub stub(restore_context, call_data_undefined, argc); | 439 CallApiFunctionStub stub(restore_context, call_data_undefined, argc); |
| 436 __ TailCallStub(&stub); | 440 __ TailCallStub(&stub); |
| 437 } | 441 } |
| 438 | 442 |
| 439 | 443 |
| 440 // Generates call to API function. | |
| 441 static void GenerateFastApiCall(MacroAssembler* masm, | |
| 442 const CallOptimization& optimization, | |
| 443 int argc, | |
| 444 Handle<Map> map_to_holder, | |
| 445 CallOptimization::HolderLookup holder_lookup) { | |
| 446 Counters* counters = masm->isolate()->counters(); | |
| 447 __ IncrementCounter(counters->call_const_fast_api(), 1); | |
| 448 | |
| 449 // Move holder to a register | |
| 450 Register holder_reg = rax; | |
| 451 switch (holder_lookup) { | |
| 452 case CallOptimization::kHolderIsReceiver: | |
| 453 { | |
| 454 ASSERT(map_to_holder.is_null()); | |
| 455 StackArgumentsAccessor args(rsp, argc); | |
| 456 __ movp(holder_reg, args.GetReceiverOperand()); | |
| 457 } | |
| 458 break; | |
| 459 case CallOptimization::kHolderIsPrototypeOfMap: | |
| 460 { | |
| 461 Handle<JSObject> holder(JSObject::cast(map_to_holder->prototype())); | |
| 462 if (!masm->isolate()->heap()->InNewSpace(*holder)) { | |
| 463 __ Move(holder_reg, holder); | |
| 464 } else { | |
| 465 __ Move(holder_reg, map_to_holder); | |
| 466 __ movp(holder_reg, FieldOperand(holder_reg, Map::kPrototypeOffset)); | |
| 467 } | |
| 468 } | |
| 469 break; | |
| 470 case CallOptimization::kHolderNotFound: | |
| 471 UNREACHABLE(); | |
| 472 } | |
| 473 GenerateFastApiCallBody(masm, | |
| 474 optimization, | |
| 475 argc, | |
| 476 holder_reg, | |
| 477 false); | |
| 478 } | |
| 479 | |
| 480 | |
| 481 // Generate call to api function. | 444 // Generate call to api function. |
| 482 static void GenerateFastApiCall(MacroAssembler* masm, | 445 static void GenerateFastApiCall(MacroAssembler* masm, |
| 483 const CallOptimization& optimization, | 446 const CallOptimization& optimization, |
| 484 Register receiver, | 447 Register receiver, |
| 485 Register scratch1, | 448 Register scratch1, |
| 486 int argc, | 449 int argc, |
| 487 Register* values) { | 450 Register* values) { |
| 488 __ PopReturnAddressTo(scratch1); | 451 __ PopReturnAddressTo(scratch1); |
| 489 // receiver | 452 // receiver |
| 490 __ push(receiver); | 453 __ push(receiver); |
| 491 // Write the arguments to stack frame. | 454 // Write the arguments to stack frame. |
| 492 for (int i = 0; i < argc; i++) { | 455 for (int i = 0; i < argc; i++) { |
| 493 Register arg = values[argc-1-i]; | 456 Register arg = values[argc-1-i]; |
| 494 ASSERT(!receiver.is(arg)); | 457 ASSERT(!receiver.is(arg)); |
| 495 ASSERT(!scratch1.is(arg)); | 458 ASSERT(!scratch1.is(arg)); |
| 496 __ push(arg); | 459 __ push(arg); |
| 497 } | 460 } |
| 498 __ PushReturnAddressFrom(scratch1); | 461 __ PushReturnAddressFrom(scratch1); |
| 499 // Stack now matches JSFunction abi. | 462 // Stack now matches JSFunction abi. |
| 500 GenerateFastApiCallBody(masm, | 463 GenerateFastApiCallBody(masm, |
| 501 optimization, | 464 optimization, |
| 502 argc, | 465 argc, |
| 503 receiver, | 466 receiver, |
| 504 true); | 467 true); |
| 505 } | 468 } |
| 506 | 469 |
| 507 | 470 |
| 508 class CallInterceptorCompiler BASE_EMBEDDED { | |
| 509 public: | |
| 510 CallInterceptorCompiler(CallStubCompiler* stub_compiler, | |
| 511 const ParameterCount& arguments, | |
| 512 Register name) | |
| 513 : stub_compiler_(stub_compiler), | |
| 514 arguments_(arguments), | |
| 515 name_(name) {} | |
| 516 | |
| 517 void Compile(MacroAssembler* masm, | |
| 518 Handle<JSObject> object, | |
| 519 Handle<JSObject> holder, | |
| 520 Handle<Name> name, | |
| 521 LookupResult* lookup, | |
| 522 Register receiver, | |
| 523 Register scratch1, | |
| 524 Register scratch2, | |
| 525 Register scratch3, | |
| 526 Label* miss) { | |
| 527 ASSERT(holder->HasNamedInterceptor()); | |
| 528 ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined()); | |
| 529 | |
| 530 // Check that the receiver isn't a smi. | |
| 531 __ JumpIfSmi(receiver, miss); | |
| 532 | |
| 533 CallOptimization optimization(lookup); | |
| 534 if (optimization.is_constant_call()) { | |
| 535 CompileCacheable(masm, object, receiver, scratch1, scratch2, scratch3, | |
| 536 holder, lookup, name, optimization, miss); | |
| 537 } else { | |
| 538 CompileRegular(masm, object, receiver, scratch1, scratch2, scratch3, | |
| 539 name, holder, miss); | |
| 540 } | |
| 541 } | |
| 542 | |
| 543 private: | |
| 544 void CompileCacheable(MacroAssembler* masm, | |
| 545 Handle<JSObject> object, | |
| 546 Register receiver, | |
| 547 Register scratch1, | |
| 548 Register scratch2, | |
| 549 Register scratch3, | |
| 550 Handle<JSObject> interceptor_holder, | |
| 551 LookupResult* lookup, | |
| 552 Handle<Name> name, | |
| 553 const CallOptimization& optimization, | |
| 554 Label* miss_label) { | |
| 555 ASSERT(optimization.is_constant_call()); | |
| 556 ASSERT(!lookup->holder()->IsGlobalObject()); | |
| 557 | |
| 558 Counters* counters = masm->isolate()->counters(); | |
| 559 __ IncrementCounter(counters->call_const_interceptor(), 1); | |
| 560 | |
| 561 // Check that the maps from receiver to interceptor's holder | |
| 562 // haven't changed and thus we can invoke interceptor. | |
| 563 Label miss_cleanup; | |
| 564 Register holder = | |
| 565 stub_compiler_->CheckPrototypes( | |
| 566 IC::CurrentTypeOf(object, masm->isolate()), receiver, | |
| 567 interceptor_holder, scratch1, scratch2, scratch3, | |
| 568 name, miss_label); | |
| 569 | |
| 570 // Invoke an interceptor and if it provides a value, | |
| 571 // branch to |regular_invoke|. | |
| 572 Label regular_invoke; | |
| 573 LoadWithInterceptor(masm, receiver, holder, interceptor_holder, | |
| 574 ®ular_invoke); | |
| 575 | |
| 576 // Interceptor returned nothing for this property. Try to use cached | |
| 577 // constant function. | |
| 578 | |
| 579 // Check that the maps from interceptor's holder to constant function's | |
| 580 // holder haven't changed and thus we can use cached constant function. | |
| 581 if (*interceptor_holder != lookup->holder()) { | |
| 582 stub_compiler_->CheckPrototypes( | |
| 583 IC::CurrentTypeOf(interceptor_holder, masm->isolate()), holder, | |
| 584 handle(lookup->holder()), scratch1, scratch2, scratch3, | |
| 585 name, miss_label); | |
| 586 } | |
| 587 | |
| 588 Handle<Map> lookup_map; | |
| 589 CallOptimization::HolderLookup holder_lookup = | |
| 590 CallOptimization::kHolderNotFound; | |
| 591 if (optimization.is_simple_api_call() && | |
| 592 !lookup->holder()->IsGlobalObject()) { | |
| 593 lookup_map = optimization.LookupHolderOfExpectedType( | |
| 594 object, object, interceptor_holder, &holder_lookup); | |
| 595 if (holder_lookup == CallOptimization::kHolderNotFound) { | |
| 596 lookup_map = | |
| 597 optimization.LookupHolderOfExpectedType( | |
| 598 object, | |
| 599 interceptor_holder, | |
| 600 Handle<JSObject>(lookup->holder()), | |
| 601 &holder_lookup); | |
| 602 } | |
| 603 } | |
| 604 | |
| 605 // Invoke function. | |
| 606 if (holder_lookup != CallOptimization::kHolderNotFound) { | |
| 607 int argc = arguments_.immediate(); | |
| 608 GenerateFastApiCall(masm, | |
| 609 optimization, | |
| 610 argc, | |
| 611 lookup_map, | |
| 612 holder_lookup); | |
| 613 } else { | |
| 614 Handle<JSFunction> fun = optimization.constant_function(); | |
| 615 stub_compiler_->GenerateJumpFunction(object, fun); | |
| 616 } | |
| 617 | |
| 618 // Invoke a regular function. | |
| 619 __ bind(®ular_invoke); | |
| 620 } | |
| 621 | |
| 622 void CompileRegular(MacroAssembler* masm, | |
| 623 Handle<JSObject> object, | |
| 624 Register receiver, | |
| 625 Register scratch1, | |
| 626 Register scratch2, | |
| 627 Register scratch3, | |
| 628 Handle<Name> name, | |
| 629 Handle<JSObject> interceptor_holder, | |
| 630 Label* miss_label) { | |
| 631 Register holder = | |
| 632 stub_compiler_->CheckPrototypes( | |
| 633 IC::CurrentTypeOf(object, masm->isolate()), receiver, | |
| 634 interceptor_holder, scratch1, scratch2, scratch3, name, miss_label); | |
| 635 | |
| 636 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 637 // Save the name_ register across the call. | |
| 638 __ push(name_); | |
| 639 | |
| 640 CompileCallLoadPropertyWithInterceptor( | |
| 641 masm, receiver, holder, name_, interceptor_holder, | |
| 642 IC::kLoadPropertyWithInterceptorForCall); | |
| 643 | |
| 644 // Restore the name_ register. | |
| 645 __ pop(name_); | |
| 646 | |
| 647 // Leave the internal frame. | |
| 648 } | |
| 649 | |
| 650 void LoadWithInterceptor(MacroAssembler* masm, | |
| 651 Register receiver, | |
| 652 Register holder, | |
| 653 Handle<JSObject> holder_obj, | |
| 654 Label* interceptor_succeeded) { | |
| 655 { | |
| 656 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 657 __ push(receiver); | |
| 658 __ push(holder); | |
| 659 __ push(name_); | |
| 660 | |
| 661 CompileCallLoadPropertyWithInterceptor( | |
| 662 masm, receiver, holder, name_, holder_obj, | |
| 663 IC::kLoadPropertyWithInterceptorOnly); | |
| 664 | |
| 665 __ pop(name_); | |
| 666 __ pop(holder); | |
| 667 __ pop(receiver); | |
| 668 // Leave the internal frame. | |
| 669 } | |
| 670 | |
| 671 __ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex); | |
| 672 __ j(not_equal, interceptor_succeeded); | |
| 673 } | |
| 674 | |
| 675 CallStubCompiler* stub_compiler_; | |
| 676 const ParameterCount& arguments_; | |
| 677 Register name_; | |
| 678 }; | |
| 679 | |
| 680 | |
| 681 void StoreStubCompiler::GenerateRestoreName(MacroAssembler* masm, | 471 void StoreStubCompiler::GenerateRestoreName(MacroAssembler* masm, |
| 682 Label* label, | 472 Label* label, |
| 683 Handle<Name> name) { | 473 Handle<Name> name) { |
| 684 if (!label->is_unused()) { | 474 if (!label->is_unused()) { |
| 685 __ bind(label); | 475 __ bind(label); |
| 686 __ Move(this->name(), name); | 476 __ Move(this->name(), name); |
| 687 } | 477 } |
| 688 } | 478 } |
| 689 | 479 |
| 690 | 480 |
| (...skipping 520 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1211 ASSERT(!kScratchRegister.is(reg)); | 1001 ASSERT(!kScratchRegister.is(reg)); |
| 1212 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex); | 1002 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex); |
| 1213 __ push(kScratchRegister); // return value | 1003 __ push(kScratchRegister); // return value |
| 1214 __ push(kScratchRegister); // return value default | 1004 __ push(kScratchRegister); // return value default |
| 1215 __ PushAddress(ExternalReference::isolate_address(isolate())); | 1005 __ PushAddress(ExternalReference::isolate_address(isolate())); |
| 1216 __ push(reg); // holder | 1006 __ push(reg); // holder |
| 1217 __ push(name()); // name | 1007 __ push(name()); // name |
| 1218 // Save a pointer to where we pushed the arguments pointer. This will be | 1008 // Save a pointer to where we pushed the arguments pointer. This will be |
| 1219 // passed as the const PropertyAccessorInfo& to the C++ callback. | 1009 // passed as the const PropertyAccessorInfo& to the C++ callback. |
| 1220 | 1010 |
| 1221 Address getter_address = v8::ToCData<Address>(callback->getter()); | |
| 1222 | |
| 1223 #if defined(__MINGW64__) || defined(_WIN64) | |
| 1224 Register getter_arg = r8; | |
| 1225 Register accessor_info_arg = rdx; | |
| 1226 Register name_arg = rcx; | |
| 1227 #else | |
| 1228 Register getter_arg = rdx; | |
| 1229 Register accessor_info_arg = rsi; | |
| 1230 Register name_arg = rdi; | |
| 1231 #endif | |
| 1232 | |
| 1233 ASSERT(!name_arg.is(scratch4())); | |
| 1234 __ movp(name_arg, rsp); | |
| 1235 __ PushReturnAddressFrom(scratch4()); | 1011 __ PushReturnAddressFrom(scratch4()); |
| 1236 | 1012 |
| 1237 // v8::Arguments::values_ and handler for name. | 1013 // Abi for CallApiGetter |
| 1238 const int kStackSpace = PropertyCallbackArguments::kArgsLength + 1; | |
| 1239 | |
| 1240 // Allocate v8::AccessorInfo in non-GCed stack space. | |
| 1241 const int kArgStackSpace = 1; | |
| 1242 | |
| 1243 __ PrepareCallApiFunction(kArgStackSpace); | |
| 1244 __ lea(rax, Operand(name_arg, 1 * kPointerSize)); | |
| 1245 | |
| 1246 // v8::PropertyAccessorInfo::args_. | |
| 1247 __ movp(StackSpaceOperand(0), rax); | |
| 1248 | |
| 1249 // The context register (rsi) has been saved in PrepareCallApiFunction and | |
| 1250 // could be used to pass arguments. | |
| 1251 __ lea(accessor_info_arg, StackSpaceOperand(0)); | |
| 1252 | |
| 1253 Address thunk_address = FUNCTION_ADDR(&InvokeAccessorGetterCallback); | |
| 1254 | |
| 1255 Register api_function_address = r8; | 1014 Register api_function_address = r8; |
| 1256 // It's okay if api_function_address == getter_arg | 1015 Address getter_address = v8::ToCData<Address>(callback->getter()); |
| 1257 // but not accessor_info_arg or name_arg | |
| 1258 ASSERT(!api_function_address.is(accessor_info_arg) && | |
| 1259 !api_function_address.is(name_arg)); | |
| 1260 | |
| 1261 __ Move(api_function_address, getter_address, RelocInfo::EXTERNAL_REFERENCE); | 1016 __ Move(api_function_address, getter_address, RelocInfo::EXTERNAL_REFERENCE); |
| 1262 | 1017 |
| 1263 // The name handler is counted as an argument. | 1018 CallApiGetterStub stub; |
| 1264 StackArgumentsAccessor args(rbp, PropertyCallbackArguments::kArgsLength); | 1019 __ TailCallStub(&stub); |
| 1265 Operand return_value_operand = args.GetArgumentOperand( | |
| 1266 PropertyCallbackArguments::kArgsLength - 1 - | |
| 1267 PropertyCallbackArguments::kReturnValueOffset); | |
| 1268 __ CallApiFunctionAndReturn(api_function_address, | |
| 1269 thunk_address, | |
| 1270 getter_arg, | |
| 1271 kStackSpace, | |
| 1272 return_value_operand, | |
| 1273 NULL); | |
| 1274 } | 1020 } |
| 1275 | 1021 |
| 1276 | 1022 |
| 1277 void LoadStubCompiler::GenerateLoadConstant(Handle<Object> value) { | 1023 void LoadStubCompiler::GenerateLoadConstant(Handle<Object> value) { |
| 1278 // Return the constant value. | 1024 // Return the constant value. |
| 1279 __ Move(rax, value); | 1025 __ Move(rax, value); |
| 1280 __ ret(0); | 1026 __ ret(0); |
| 1281 } | 1027 } |
| 1282 | 1028 |
| 1283 | 1029 |
| (...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1365 this->name(), interceptor_holder); | 1111 this->name(), interceptor_holder); |
| 1366 __ PushReturnAddressFrom(scratch2()); | 1112 __ PushReturnAddressFrom(scratch2()); |
| 1367 | 1113 |
| 1368 ExternalReference ref = ExternalReference( | 1114 ExternalReference ref = ExternalReference( |
| 1369 IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), isolate()); | 1115 IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), isolate()); |
| 1370 __ TailCallExternalReference(ref, StubCache::kInterceptorArgsLength, 1); | 1116 __ TailCallExternalReference(ref, StubCache::kInterceptorArgsLength, 1); |
| 1371 } | 1117 } |
| 1372 } | 1118 } |
| 1373 | 1119 |
| 1374 | 1120 |
| 1375 void CallStubCompiler::GenerateNameCheck(Handle<Name> name, Label* miss) { | |
| 1376 if (kind_ == Code::KEYED_CALL_IC) { | |
| 1377 __ Cmp(rcx, name); | |
| 1378 __ j(not_equal, miss); | |
| 1379 } | |
| 1380 } | |
| 1381 | |
| 1382 | |
| 1383 void CallStubCompiler::GenerateFunctionCheck(Register function, | |
| 1384 Register scratch, | |
| 1385 Label* miss) { | |
| 1386 __ JumpIfSmi(function, miss); | |
| 1387 __ CmpObjectType(function, JS_FUNCTION_TYPE, scratch); | |
| 1388 __ j(not_equal, miss); | |
| 1389 } | |
| 1390 | |
| 1391 | |
| 1392 void CallStubCompiler::GenerateLoadFunctionFromCell( | |
| 1393 Handle<Cell> cell, | |
| 1394 Handle<JSFunction> function, | |
| 1395 Label* miss) { | |
| 1396 // Get the value from the cell. | |
| 1397 __ Move(rdi, cell); | |
| 1398 __ movp(rdi, FieldOperand(rdi, Cell::kValueOffset)); | |
| 1399 | |
| 1400 // Check that the cell contains the same function. | |
| 1401 if (heap()->InNewSpace(*function)) { | |
| 1402 // We can't embed a pointer to a function in new space so we have | |
| 1403 // to verify that the shared function info is unchanged. This has | |
| 1404 // the nice side effect that multiple closures based on the same | |
| 1405 // function can all use this call IC. Before we load through the | |
| 1406 // function, we have to verify that it still is a function. | |
| 1407 GenerateFunctionCheck(rdi, rax, miss); | |
| 1408 | |
| 1409 // Check the shared function info. Make sure it hasn't changed. | |
| 1410 __ Move(rax, Handle<SharedFunctionInfo>(function->shared())); | |
| 1411 __ cmpq(FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset), rax); | |
| 1412 } else { | |
| 1413 __ Cmp(rdi, function); | |
| 1414 } | |
| 1415 __ j(not_equal, miss); | |
| 1416 } | |
| 1417 | |
| 1418 | |
| 1419 void CallStubCompiler::GenerateMissBranch() { | |
| 1420 Handle<Code> code = | |
| 1421 isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(), | |
| 1422 kind_, | |
| 1423 extra_state()); | |
| 1424 __ Jump(code, RelocInfo::CODE_TARGET); | |
| 1425 } | |
| 1426 | |
| 1427 | |
| 1428 Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object, | |
| 1429 Handle<JSObject> holder, | |
| 1430 PropertyIndex index, | |
| 1431 Handle<Name> name) { | |
| 1432 Label miss; | |
| 1433 | |
| 1434 Register reg = HandlerFrontendHeader( | |
| 1435 object, holder, name, RECEIVER_MAP_CHECK, &miss); | |
| 1436 | |
| 1437 GenerateFastPropertyLoad(masm(), rdi, reg, index.is_inobject(holder), | |
| 1438 index.translate(holder), Representation::Tagged()); | |
| 1439 GenerateJumpFunction(object, rdi, &miss); | |
| 1440 | |
| 1441 HandlerFrontendFooter(&miss); | |
| 1442 | |
| 1443 // Return the generated code. | |
| 1444 return GetCode(Code::FAST, name); | |
| 1445 } | |
| 1446 | |
| 1447 | |
| 1448 Handle<Code> CallStubCompiler::CompileFastApiCall( | |
| 1449 const CallOptimization& optimization, | |
| 1450 Handle<Object> object, | |
| 1451 Handle<JSObject> holder, | |
| 1452 Handle<Cell> cell, | |
| 1453 Handle<JSFunction> function, | |
| 1454 Handle<String> name) { | |
| 1455 ASSERT(optimization.is_simple_api_call()); | |
| 1456 // Bail out if object is a global object as we don't want to | |
| 1457 // repatch it to global receiver. | |
| 1458 if (object->IsGlobalObject()) return Handle<Code>::null(); | |
| 1459 if (!cell.is_null()) return Handle<Code>::null(); | |
| 1460 if (!object->IsJSObject()) return Handle<Code>::null(); | |
| 1461 Handle<JSObject> receiver = Handle<JSObject>::cast(object); | |
| 1462 CallOptimization::HolderLookup holder_lookup = | |
| 1463 CallOptimization::kHolderNotFound; | |
| 1464 Handle<Map> lookup_map = optimization.LookupHolderOfExpectedType( | |
| 1465 receiver, receiver, holder, &holder_lookup); | |
| 1466 if (holder_lookup == CallOptimization::kHolderNotFound) { | |
| 1467 return Handle<Code>::null(); | |
| 1468 } | |
| 1469 | |
| 1470 Label miss; | |
| 1471 GenerateNameCheck(name, &miss); | |
| 1472 | |
| 1473 const int argc = arguments().immediate(); | |
| 1474 StackArgumentsAccessor args(rsp, argc); | |
| 1475 __ movp(rdx, args.GetReceiverOperand()); | |
| 1476 | |
| 1477 // Check that the receiver isn't a smi. | |
| 1478 __ JumpIfSmi(rdx, &miss); | |
| 1479 | |
| 1480 Counters* counters = isolate()->counters(); | |
| 1481 __ IncrementCounter(counters->call_const(), 1); | |
| 1482 | |
| 1483 // Check that the maps haven't changed and find a Holder as a side effect. | |
| 1484 CheckPrototypes(IC::CurrentTypeOf(object, isolate()), rdx, holder, | |
| 1485 rbx, rax, rdi, name, &miss); | |
| 1486 | |
| 1487 GenerateFastApiCall(masm(), optimization, argc, lookup_map, holder_lookup); | |
| 1488 | |
| 1489 HandlerFrontendFooter(&miss); | |
| 1490 | |
| 1491 // Return the generated code. | |
| 1492 return GetCode(function); | |
| 1493 } | |
| 1494 | |
| 1495 | |
| 1496 void StubCompiler::GenerateBooleanCheck(Register object, Label* miss) { | 1121 void StubCompiler::GenerateBooleanCheck(Register object, Label* miss) { |
| 1497 Label success; | 1122 Label success; |
| 1498 // Check that the object is a boolean. | 1123 // Check that the object is a boolean. |
| 1499 __ CompareRoot(object, Heap::kTrueValueRootIndex); | 1124 __ Cmp(object, factory()->true_value()); |
| 1500 __ j(equal, &success); | 1125 __ j(equal, &success); |
| 1501 __ CompareRoot(object, Heap::kFalseValueRootIndex); | 1126 __ Cmp(object, factory()->false_value()); |
| 1502 __ j(not_equal, miss); | 1127 __ j(not_equal, miss); |
| 1503 __ bind(&success); | 1128 __ bind(&success); |
| 1504 } | 1129 } |
| 1505 | 1130 |
| 1506 | 1131 |
| 1507 void CallStubCompiler::PatchImplicitReceiver(Handle<Object> object) { | |
| 1508 if (object->IsGlobalObject()) { | |
| 1509 StackArgumentsAccessor args(rsp, arguments()); | |
| 1510 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex); | |
| 1511 __ movp(args.GetReceiverOperand(), rdx); | |
| 1512 } | |
| 1513 } | |
| 1514 | |
| 1515 | |
| 1516 Register CallStubCompiler::HandlerFrontendHeader(Handle<Object> object, | |
| 1517 Handle<JSObject> holder, | |
| 1518 Handle<Name> name, | |
| 1519 CheckType check, | |
| 1520 Label* miss) { | |
| 1521 GenerateNameCheck(name, miss); | |
| 1522 | |
| 1523 Register reg = rdx; | |
| 1524 | |
| 1525 StackArgumentsAccessor args(rsp, arguments()); | |
| 1526 __ movp(reg, args.GetReceiverOperand()); | |
| 1527 | |
| 1528 // Check that the receiver isn't a smi. | |
| 1529 if (check != NUMBER_CHECK) { | |
| 1530 __ JumpIfSmi(reg, miss); | |
| 1531 } | |
| 1532 | |
| 1533 // Make sure that it's okay not to patch the on stack receiver | |
| 1534 // unless we're doing a receiver map check. | |
| 1535 ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK); | |
| 1536 | |
| 1537 Counters* counters = isolate()->counters(); | |
| 1538 switch (check) { | |
| 1539 case RECEIVER_MAP_CHECK: | |
| 1540 __ IncrementCounter(counters->call_const(), 1); | |
| 1541 | |
| 1542 // Check that the maps haven't changed. | |
| 1543 reg = CheckPrototypes(IC::CurrentTypeOf(object, isolate()), reg, holder, | |
| 1544 rbx, rax, rdi, name, miss); | |
| 1545 break; | |
| 1546 | |
| 1547 case STRING_CHECK: { | |
| 1548 // Check that the object is a string. | |
| 1549 __ CmpObjectType(reg, FIRST_NONSTRING_TYPE, rax); | |
| 1550 __ j(above_equal, miss); | |
| 1551 // Check that the maps starting from the prototype haven't changed. | |
| 1552 GenerateDirectLoadGlobalFunctionPrototype( | |
| 1553 masm(), Context::STRING_FUNCTION_INDEX, rax, miss); | |
| 1554 break; | |
| 1555 } | |
| 1556 case SYMBOL_CHECK: { | |
| 1557 // Check that the object is a symbol. | |
| 1558 __ CmpObjectType(reg, SYMBOL_TYPE, rax); | |
| 1559 __ j(not_equal, miss); | |
| 1560 // Check that the maps starting from the prototype haven't changed. | |
| 1561 GenerateDirectLoadGlobalFunctionPrototype( | |
| 1562 masm(), Context::SYMBOL_FUNCTION_INDEX, rax, miss); | |
| 1563 break; | |
| 1564 } | |
| 1565 case NUMBER_CHECK: { | |
| 1566 Label fast; | |
| 1567 // Check that the object is a smi or a heap number. | |
| 1568 __ JumpIfSmi(reg, &fast); | |
| 1569 __ CmpObjectType(reg, HEAP_NUMBER_TYPE, rax); | |
| 1570 __ j(not_equal, miss); | |
| 1571 __ bind(&fast); | |
| 1572 // Check that the maps starting from the prototype haven't changed. | |
| 1573 GenerateDirectLoadGlobalFunctionPrototype( | |
| 1574 masm(), Context::NUMBER_FUNCTION_INDEX, rax, miss); | |
| 1575 break; | |
| 1576 } | |
| 1577 case BOOLEAN_CHECK: { | |
| 1578 GenerateBooleanCheck(reg, miss); | |
| 1579 // Check that the maps starting from the prototype haven't changed. | |
| 1580 GenerateDirectLoadGlobalFunctionPrototype( | |
| 1581 masm(), Context::BOOLEAN_FUNCTION_INDEX, rax, miss); | |
| 1582 break; | |
| 1583 } | |
| 1584 } | |
| 1585 | |
| 1586 if (check != RECEIVER_MAP_CHECK) { | |
| 1587 Handle<Object> prototype(object->GetPrototype(isolate()), isolate()); | |
| 1588 reg = CheckPrototypes( | |
| 1589 IC::CurrentTypeOf(prototype, isolate()), | |
| 1590 rax, holder, rbx, rdx, rdi, name, miss); | |
| 1591 } | |
| 1592 | |
| 1593 return reg; | |
| 1594 } | |
| 1595 | |
| 1596 | |
| 1597 void CallStubCompiler::GenerateJumpFunction(Handle<Object> object, | |
| 1598 Register function, | |
| 1599 Label* miss) { | |
| 1600 // Check that the function really is a function. | |
| 1601 GenerateFunctionCheck(function, rbx, miss); | |
| 1602 | |
| 1603 if (!function.is(rdi)) __ movp(rdi, function); | |
| 1604 PatchImplicitReceiver(object); | |
| 1605 | |
| 1606 // Invoke the function. | |
| 1607 __ InvokeFunction(rdi, arguments(), JUMP_FUNCTION, NullCallWrapper()); | |
| 1608 } | |
| 1609 | |
| 1610 | |
| 1611 Handle<Code> CallStubCompiler::CompileCallInterceptor(Handle<JSObject> object, | |
| 1612 Handle<JSObject> holder, | |
| 1613 Handle<Name> name) { | |
| 1614 Label miss; | |
| 1615 GenerateNameCheck(name, &miss); | |
| 1616 | |
| 1617 LookupResult lookup(isolate()); | |
| 1618 LookupPostInterceptor(holder, name, &lookup); | |
| 1619 | |
| 1620 // Get the receiver from the stack. | |
| 1621 StackArgumentsAccessor args(rsp, arguments()); | |
| 1622 __ movp(rdx, args.GetReceiverOperand()); | |
| 1623 | |
| 1624 CallInterceptorCompiler compiler(this, arguments(), rcx); | |
| 1625 compiler.Compile(masm(), object, holder, name, &lookup, rdx, rbx, rdi, rax, | |
| 1626 &miss); | |
| 1627 | |
| 1628 // Restore receiver. | |
| 1629 __ movp(rdx, args.GetReceiverOperand()); | |
| 1630 | |
| 1631 GenerateJumpFunction(object, rax, &miss); | |
| 1632 | |
| 1633 HandlerFrontendFooter(&miss); | |
| 1634 | |
| 1635 // Return the generated code. | |
| 1636 return GetCode(Code::FAST, name); | |
| 1637 } | |
| 1638 | |
| 1639 | |
| 1640 Handle<Code> CallStubCompiler::CompileCallGlobal( | |
| 1641 Handle<JSObject> object, | |
| 1642 Handle<GlobalObject> holder, | |
| 1643 Handle<PropertyCell> cell, | |
| 1644 Handle<JSFunction> function, | |
| 1645 Handle<Name> name) { | |
| 1646 if (HasCustomCallGenerator(function)) { | |
| 1647 Handle<Code> code = CompileCustomCall( | |
| 1648 object, holder, cell, function, Handle<String>::cast(name), | |
| 1649 Code::NORMAL); | |
| 1650 // A null handle means bail out to the regular compiler code below. | |
| 1651 if (!code.is_null()) return code; | |
| 1652 } | |
| 1653 | |
| 1654 Label miss; | |
| 1655 HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss); | |
| 1656 // Potentially loads a closure that matches the shared function info of the | |
| 1657 // function, rather than function. | |
| 1658 GenerateLoadFunctionFromCell(cell, function, &miss); | |
| 1659 Counters* counters = isolate()->counters(); | |
| 1660 __ IncrementCounter(counters->call_global_inline(), 1); | |
| 1661 GenerateJumpFunction(object, rdi, function); | |
| 1662 HandlerFrontendFooter(&miss); | |
| 1663 | |
| 1664 // Return the generated code. | |
| 1665 return GetCode(Code::NORMAL, name); | |
| 1666 } | |
| 1667 | |
| 1668 | |
| 1669 Handle<Code> StoreStubCompiler::CompileStoreCallback( | 1132 Handle<Code> StoreStubCompiler::CompileStoreCallback( |
| 1670 Handle<JSObject> object, | 1133 Handle<JSObject> object, |
| 1671 Handle<JSObject> holder, | 1134 Handle<JSObject> holder, |
| 1672 Handle<Name> name, | 1135 Handle<Name> name, |
| 1673 Handle<ExecutableAccessorInfo> callback) { | 1136 Handle<ExecutableAccessorInfo> callback) { |
| 1674 Register holder_reg = HandlerFrontend( | 1137 Register holder_reg = HandlerFrontend( |
| 1675 IC::CurrentTypeOf(object, isolate()), receiver(), holder, name); | 1138 IC::CurrentTypeOf(object, isolate()), receiver(), holder, name); |
| 1676 | 1139 |
| 1677 __ PopReturnAddressTo(scratch1()); | 1140 __ PopReturnAddressTo(scratch1()); |
| 1678 __ push(receiver()); | 1141 __ push(receiver()); |
| (...skipping 232 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1911 | 1374 |
| 1912 // Check for deleted property if property can actually be deleted. | 1375 // Check for deleted property if property can actually be deleted. |
| 1913 if (!is_dont_delete) { | 1376 if (!is_dont_delete) { |
| 1914 __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex); | 1377 __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex); |
| 1915 __ j(equal, &miss); | 1378 __ j(equal, &miss); |
| 1916 } else if (FLAG_debug_code) { | 1379 } else if (FLAG_debug_code) { |
| 1917 __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex); | 1380 __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex); |
| 1918 __ Check(not_equal, kDontDeleteCellsCannotContainTheHole); | 1381 __ Check(not_equal, kDontDeleteCellsCannotContainTheHole); |
| 1919 } | 1382 } |
| 1920 | 1383 |
| 1921 HandlerFrontendFooter(name, &miss); | |
| 1922 | |
| 1923 Counters* counters = isolate()->counters(); | 1384 Counters* counters = isolate()->counters(); |
| 1924 __ IncrementCounter(counters->named_load_global_stub(), 1); | 1385 __ IncrementCounter(counters->named_load_global_stub(), 1); |
| 1925 __ movp(rax, rbx); | 1386 __ movp(rax, rbx); |
| 1926 __ ret(0); | 1387 __ ret(0); |
| 1927 | 1388 |
| 1389 HandlerFrontendFooter(name, &miss); |
| 1390 |
| 1928 // Return the generated code. | 1391 // Return the generated code. |
| 1929 return GetCode(kind(), Code::NORMAL, name); | 1392 return GetCode(kind(), Code::NORMAL, name); |
| 1930 } | 1393 } |
| 1931 | 1394 |
| 1932 | 1395 |
| 1933 Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC( | 1396 Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC( |
| 1934 TypeHandleList* types, | 1397 TypeHandleList* types, |
| 1935 CodeHandleList* handlers, | 1398 CodeHandleList* handlers, |
| 1936 Handle<Name> name, | 1399 Handle<Name> name, |
| 1937 Code::StubType type, | 1400 Code::StubType type, |
| (...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2022 // ----------------------------------- | 1485 // ----------------------------------- |
| 2023 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss); | 1486 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss); |
| 2024 } | 1487 } |
| 2025 | 1488 |
| 2026 | 1489 |
| 2027 #undef __ | 1490 #undef __ |
| 2028 | 1491 |
| 2029 } } // namespace v8::internal | 1492 } } // namespace v8::internal |
| 2030 | 1493 |
| 2031 #endif // V8_TARGET_ARCH_X64 | 1494 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |