OLD | NEW |
---|---|
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 461 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
472 descriptor->param_representations_ = representations; | 472 descriptor->param_representations_ = representations; |
473 } | 473 } |
474 } | 474 } |
475 | 475 |
476 | 476 |
477 #define __ ACCESS_MASM(masm) | 477 #define __ ACCESS_MASM(masm) |
478 | 478 |
479 | 479 |
480 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) { | 480 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) { |
481 // Update the static counter each time a new code stub is generated. | 481 // Update the static counter each time a new code stub is generated. |
482 Isolate* isolate = masm->isolate(); | 482 isolate()->counters()->code_stubs()->Increment(); |
483 isolate->counters()->code_stubs()->Increment(); | |
484 | 483 |
485 CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate); | 484 CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate()); |
486 int param_count = descriptor->register_param_count_; | 485 int param_count = descriptor->register_param_count_; |
487 { | 486 { |
488 // Call the runtime system in a fresh internal frame. | 487 // Call the runtime system in a fresh internal frame. |
489 FrameScope scope(masm, StackFrame::INTERNAL); | 488 FrameScope scope(masm, StackFrame::INTERNAL); |
490 ASSERT(descriptor->register_param_count_ == 0 || | 489 ASSERT(descriptor->register_param_count_ == 0 || |
491 rax.is(descriptor->register_params_[param_count - 1])); | 490 rax.is(descriptor->register_params_[param_count - 1])); |
492 // Push arguments | 491 // Push arguments |
493 for (int i = 0; i < param_count; ++i) { | 492 for (int i = 0; i < param_count; ++i) { |
494 __ Push(descriptor->register_params_[i]); | 493 __ Push(descriptor->register_params_[i]); |
495 } | 494 } |
496 ExternalReference miss = descriptor->miss_handler(); | 495 ExternalReference miss = descriptor->miss_handler(); |
497 __ CallExternalReference(miss, descriptor->register_param_count_); | 496 __ CallExternalReference(miss, descriptor->register_param_count_); |
498 } | 497 } |
499 | 498 |
500 __ Ret(); | 499 __ Ret(); |
501 } | 500 } |
502 | 501 |
503 | 502 |
504 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { | 503 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { |
505 __ PushCallerSaved(save_doubles_); | 504 __ PushCallerSaved(save_doubles_); |
506 const int argument_count = 1; | 505 const int argument_count = 1; |
507 __ PrepareCallCFunction(argument_count); | 506 __ PrepareCallCFunction(argument_count); |
508 __ LoadAddress(arg_reg_1, | 507 __ LoadAddress(arg_reg_1, |
509 ExternalReference::isolate_address(masm->isolate())); | 508 ExternalReference::isolate_address(isolate())); |
510 | 509 |
511 AllowExternalCallThatCantCauseGC scope(masm); | 510 AllowExternalCallThatCantCauseGC scope(masm); |
512 __ CallCFunction( | 511 __ CallCFunction( |
513 ExternalReference::store_buffer_overflow_function(masm->isolate()), | 512 ExternalReference::store_buffer_overflow_function(isolate()), |
514 argument_count); | 513 argument_count); |
515 __ PopCallerSaved(save_doubles_); | 514 __ PopCallerSaved(save_doubles_); |
516 __ ret(0); | 515 __ ret(0); |
517 } | 516 } |
518 | 517 |
519 | 518 |
520 class FloatingPointHelper : public AllStatic { | 519 class FloatingPointHelper : public AllStatic { |
521 public: | 520 public: |
522 enum ConvertUndefined { | 521 enum ConvertUndefined { |
523 CONVERT_UNDEFINED_TO_ZERO, | 522 CONVERT_UNDEFINED_TO_ZERO, |
(...skipping 340 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
864 // Due to subnormals, x^-y == (1/x)^y does not hold in all cases. | 863 // Due to subnormals, x^-y == (1/x)^y does not hold in all cases. |
865 __ xorps(double_scratch2, double_scratch2); | 864 __ xorps(double_scratch2, double_scratch2); |
866 __ ucomisd(double_scratch2, double_result); | 865 __ ucomisd(double_scratch2, double_result); |
867 // double_exponent aliased as double_scratch2 has already been overwritten | 866 // double_exponent aliased as double_scratch2 has already been overwritten |
868 // and may not have contained the exponent value in the first place when the | 867 // and may not have contained the exponent value in the first place when the |
869 // input was a smi. We reset it with exponent value before bailing out. | 868 // input was a smi. We reset it with exponent value before bailing out. |
870 __ j(not_equal, &done); | 869 __ j(not_equal, &done); |
871 __ Cvtlsi2sd(double_exponent, exponent); | 870 __ Cvtlsi2sd(double_exponent, exponent); |
872 | 871 |
873 // Returning or bailing out. | 872 // Returning or bailing out. |
874 Counters* counters = masm->isolate()->counters(); | 873 Counters* counters = isolate()->counters(); |
875 if (exponent_type_ == ON_STACK) { | 874 if (exponent_type_ == ON_STACK) { |
876 // The arguments are still on the stack. | 875 // The arguments are still on the stack. |
877 __ bind(&call_runtime); | 876 __ bind(&call_runtime); |
878 __ TailCallRuntime(Runtime::kHiddenMathPow, 2, 1); | 877 __ TailCallRuntime(Runtime::kHiddenMathPow, 2, 1); |
879 | 878 |
880 // The stub is called from non-optimized code, which expects the result | 879 // The stub is called from non-optimized code, which expects the result |
881 // as heap number in rax. | 880 // as heap number in rax. |
882 __ bind(&done); | 881 __ bind(&done); |
883 __ AllocateHeapNumber(rax, rcx, &call_runtime); | 882 __ AllocateHeapNumber(rax, rcx, &call_runtime); |
884 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), double_result); | 883 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), double_result); |
885 __ IncrementCounter(counters->math_pow(), 1); | 884 __ IncrementCounter(counters->math_pow(), 1); |
886 __ ret(2 * kPointerSize); | 885 __ ret(2 * kPointerSize); |
887 } else { | 886 } else { |
888 __ bind(&call_runtime); | 887 __ bind(&call_runtime); |
889 // Move base to the correct argument register. Exponent is already in xmm1. | 888 // Move base to the correct argument register. Exponent is already in xmm1. |
890 __ movsd(xmm0, double_base); | 889 __ movsd(xmm0, double_base); |
891 ASSERT(double_exponent.is(xmm1)); | 890 ASSERT(double_exponent.is(xmm1)); |
892 { | 891 { |
893 AllowExternalCallThatCantCauseGC scope(masm); | 892 AllowExternalCallThatCantCauseGC scope(masm); |
894 __ PrepareCallCFunction(2); | 893 __ PrepareCallCFunction(2); |
895 __ CallCFunction( | 894 __ CallCFunction( |
896 ExternalReference::power_double_double_function(masm->isolate()), 2); | 895 ExternalReference::power_double_double_function(isolate()), 2); |
897 } | 896 } |
898 // Return value is in xmm0. | 897 // Return value is in xmm0. |
899 __ movsd(double_result, xmm0); | 898 __ movsd(double_result, xmm0); |
900 | 899 |
901 __ bind(&done); | 900 __ bind(&done); |
902 __ IncrementCounter(counters->math_pow(), 1); | 901 __ IncrementCounter(counters->math_pow(), 1); |
903 __ ret(0); | 902 __ ret(0); |
904 } | 903 } |
905 } | 904 } |
906 | 905 |
907 | 906 |
908 void FunctionPrototypeStub::Generate(MacroAssembler* masm) { | 907 void FunctionPrototypeStub::Generate(MacroAssembler* masm) { |
909 Label miss; | 908 Label miss; |
910 Register receiver; | 909 Register receiver; |
911 if (kind() == Code::KEYED_LOAD_IC) { | 910 if (kind() == Code::KEYED_LOAD_IC) { |
912 // ----------- S t a t e ------------- | 911 // ----------- S t a t e ------------- |
913 // -- rax : key | 912 // -- rax : key |
914 // -- rdx : receiver | 913 // -- rdx : receiver |
915 // -- rsp[0] : return address | 914 // -- rsp[0] : return address |
916 // ----------------------------------- | 915 // ----------------------------------- |
917 __ Cmp(rax, masm->isolate()->factory()->prototype_string()); | 916 __ Cmp(rax, isolate()->factory()->prototype_string()); |
918 __ j(not_equal, &miss); | 917 __ j(not_equal, &miss); |
919 receiver = rdx; | 918 receiver = rdx; |
920 } else { | 919 } else { |
921 ASSERT(kind() == Code::LOAD_IC); | 920 ASSERT(kind() == Code::LOAD_IC); |
922 // ----------- S t a t e ------------- | 921 // ----------- S t a t e ------------- |
923 // -- rax : receiver | 922 // -- rax : receiver |
924 // -- rcx : name | 923 // -- rcx : name |
925 // -- rsp[0] : return address | 924 // -- rsp[0] : return address |
926 // ----------------------------------- | 925 // ----------------------------------- |
927 receiver = rax; | 926 receiver = rax; |
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
993 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { | 992 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { |
994 // Stack layout: | 993 // Stack layout: |
995 // rsp[0] : return address | 994 // rsp[0] : return address |
996 // rsp[8] : number of parameters (tagged) | 995 // rsp[8] : number of parameters (tagged) |
997 // rsp[16] : receiver displacement | 996 // rsp[16] : receiver displacement |
998 // rsp[24] : function | 997 // rsp[24] : function |
999 // Registers used over the whole function: | 998 // Registers used over the whole function: |
1000 // rbx: the mapped parameter count (untagged) | 999 // rbx: the mapped parameter count (untagged) |
1001 // rax: the allocated object (tagged). | 1000 // rax: the allocated object (tagged). |
1002 | 1001 |
1003 Factory* factory = masm->isolate()->factory(); | 1002 Factory* factory = isolate()->factory(); |
1004 | 1003 |
1005 StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER); | 1004 StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER); |
1006 __ SmiToInteger64(rbx, args.GetArgumentOperand(2)); | 1005 __ SmiToInteger64(rbx, args.GetArgumentOperand(2)); |
1007 // rbx = parameter count (untagged) | 1006 // rbx = parameter count (untagged) |
1008 | 1007 |
1009 // Check if the calling frame is an arguments adaptor frame. | 1008 // Check if the calling frame is an arguments adaptor frame. |
1010 Label runtime; | 1009 Label runtime; |
1011 Label adaptor_frame, try_allocate; | 1010 Label adaptor_frame, try_allocate; |
1012 __ movp(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); | 1011 __ movp(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); |
1013 __ movp(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); | 1012 __ movp(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); |
(...skipping 342 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1356 SUBJECT_STRING_ARGUMENT_INDEX, | 1355 SUBJECT_STRING_ARGUMENT_INDEX, |
1357 PREVIOUS_INDEX_ARGUMENT_INDEX, | 1356 PREVIOUS_INDEX_ARGUMENT_INDEX, |
1358 LAST_MATCH_INFO_ARGUMENT_INDEX, | 1357 LAST_MATCH_INFO_ARGUMENT_INDEX, |
1359 REG_EXP_EXEC_ARGUMENT_COUNT | 1358 REG_EXP_EXEC_ARGUMENT_COUNT |
1360 }; | 1359 }; |
1361 | 1360 |
1362 StackArgumentsAccessor args(rsp, REG_EXP_EXEC_ARGUMENT_COUNT, | 1361 StackArgumentsAccessor args(rsp, REG_EXP_EXEC_ARGUMENT_COUNT, |
1363 ARGUMENTS_DONT_CONTAIN_RECEIVER); | 1362 ARGUMENTS_DONT_CONTAIN_RECEIVER); |
1364 Label runtime; | 1363 Label runtime; |
1365 // Ensure that a RegExp stack is allocated. | 1364 // Ensure that a RegExp stack is allocated. |
1366 Isolate* isolate = masm->isolate(); | |
1367 ExternalReference address_of_regexp_stack_memory_address = | 1365 ExternalReference address_of_regexp_stack_memory_address = |
1368 ExternalReference::address_of_regexp_stack_memory_address(isolate); | 1366 ExternalReference::address_of_regexp_stack_memory_address(isolate()); |
1369 ExternalReference address_of_regexp_stack_memory_size = | 1367 ExternalReference address_of_regexp_stack_memory_size = |
1370 ExternalReference::address_of_regexp_stack_memory_size(isolate); | 1368 ExternalReference::address_of_regexp_stack_memory_size(isolate()); |
1371 __ Load(kScratchRegister, address_of_regexp_stack_memory_size); | 1369 __ Load(kScratchRegister, address_of_regexp_stack_memory_size); |
1372 __ testp(kScratchRegister, kScratchRegister); | 1370 __ testp(kScratchRegister, kScratchRegister); |
1373 __ j(zero, &runtime); | 1371 __ j(zero, &runtime); |
1374 | 1372 |
1375 // Check that the first argument is a JSRegExp object. | 1373 // Check that the first argument is a JSRegExp object. |
1376 __ movp(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX)); | 1374 __ movp(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX)); |
1377 __ JumpIfSmi(rax, &runtime); | 1375 __ JumpIfSmi(rax, &runtime); |
1378 __ CmpObjectType(rax, JS_REGEXP_TYPE, kScratchRegister); | 1376 __ CmpObjectType(rax, JS_REGEXP_TYPE, kScratchRegister); |
1379 __ j(not_equal, &runtime); | 1377 __ j(not_equal, &runtime); |
1380 | 1378 |
(...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1512 __ JumpIfNotSmi(rbx, &runtime); | 1510 __ JumpIfNotSmi(rbx, &runtime); |
1513 __ SmiCompare(rbx, FieldOperand(r15, String::kLengthOffset)); | 1511 __ SmiCompare(rbx, FieldOperand(r15, String::kLengthOffset)); |
1514 __ j(above_equal, &runtime); | 1512 __ j(above_equal, &runtime); |
1515 __ SmiToInteger64(rbx, rbx); | 1513 __ SmiToInteger64(rbx, rbx); |
1516 | 1514 |
1517 // rdi: subject string | 1515 // rdi: subject string |
1518 // rbx: previous index | 1516 // rbx: previous index |
1519 // rcx: encoding of subject string (1 if ASCII 0 if two_byte); | 1517 // rcx: encoding of subject string (1 if ASCII 0 if two_byte); |
1520 // r11: code | 1518 // r11: code |
1521 // All checks done. Now push arguments for native regexp code. | 1519 // All checks done. Now push arguments for native regexp code. |
1522 Counters* counters = masm->isolate()->counters(); | 1520 Counters* counters = isolate()->counters(); |
1523 __ IncrementCounter(counters->regexp_entry_native(), 1); | 1521 __ IncrementCounter(counters->regexp_entry_native(), 1); |
1524 | 1522 |
1525 // Isolates: note we add an additional parameter here (isolate pointer). | 1523 // Isolates: note we add an additional parameter here (isolate pointer). |
1526 static const int kRegExpExecuteArguments = 9; | 1524 static const int kRegExpExecuteArguments = 9; |
1527 int argument_slots_on_stack = | 1525 int argument_slots_on_stack = |
1528 masm->ArgumentStackSlotsForCFunctionCall(kRegExpExecuteArguments); | 1526 masm->ArgumentStackSlotsForCFunctionCall(kRegExpExecuteArguments); |
1529 __ EnterApiExitFrame(argument_slots_on_stack); | 1527 __ EnterApiExitFrame(argument_slots_on_stack); |
1530 | 1528 |
1531 // Argument 9: Pass current isolate address. | 1529 // Argument 9: Pass current isolate address. |
1532 __ LoadAddress(kScratchRegister, | 1530 __ LoadAddress(kScratchRegister, |
1533 ExternalReference::isolate_address(masm->isolate())); | 1531 ExternalReference::isolate_address(isolate())); |
1534 __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kRegisterSize), | 1532 __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kRegisterSize), |
1535 kScratchRegister); | 1533 kScratchRegister); |
1536 | 1534 |
1537 // Argument 8: Indicate that this is a direct call from JavaScript. | 1535 // Argument 8: Indicate that this is a direct call from JavaScript. |
1538 __ movq(Operand(rsp, (argument_slots_on_stack - 2) * kRegisterSize), | 1536 __ movq(Operand(rsp, (argument_slots_on_stack - 2) * kRegisterSize), |
1539 Immediate(1)); | 1537 Immediate(1)); |
1540 | 1538 |
1541 // Argument 7: Start (high end) of backtracking stack memory area. | 1539 // Argument 7: Start (high end) of backtracking stack memory area. |
1542 __ Move(kScratchRegister, address_of_regexp_stack_memory_address); | 1540 __ Move(kScratchRegister, address_of_regexp_stack_memory_address); |
1543 __ movp(r9, Operand(kScratchRegister, 0)); | 1541 __ movp(r9, Operand(kScratchRegister, 0)); |
1544 __ Move(kScratchRegister, address_of_regexp_stack_memory_size); | 1542 __ Move(kScratchRegister, address_of_regexp_stack_memory_size); |
1545 __ addp(r9, Operand(kScratchRegister, 0)); | 1543 __ addp(r9, Operand(kScratchRegister, 0)); |
1546 __ movq(Operand(rsp, (argument_slots_on_stack - 3) * kRegisterSize), r9); | 1544 __ movq(Operand(rsp, (argument_slots_on_stack - 3) * kRegisterSize), r9); |
1547 | 1545 |
1548 // Argument 6: Set the number of capture registers to zero to force global | 1546 // Argument 6: Set the number of capture registers to zero to force global |
1549 // regexps to behave as non-global. This does not affect non-global regexps. | 1547 // regexps to behave as non-global. This does not affect non-global regexps. |
1550 // Argument 6 is passed in r9 on Linux and on the stack on Windows. | 1548 // Argument 6 is passed in r9 on Linux and on the stack on Windows. |
1551 #ifdef _WIN64 | 1549 #ifdef _WIN64 |
1552 __ movq(Operand(rsp, (argument_slots_on_stack - 4) * kRegisterSize), | 1550 __ movq(Operand(rsp, (argument_slots_on_stack - 4) * kRegisterSize), |
1553 Immediate(0)); | 1551 Immediate(0)); |
1554 #else | 1552 #else |
1555 __ Set(r9, 0); | 1553 __ Set(r9, 0); |
1556 #endif | 1554 #endif |
1557 | 1555 |
1558 // Argument 5: static offsets vector buffer. | 1556 // Argument 5: static offsets vector buffer. |
1559 __ LoadAddress(r8, | 1557 __ LoadAddress( |
1560 ExternalReference::address_of_static_offsets_vector(isolate)); | 1558 r8, ExternalReference::address_of_static_offsets_vector(isolate())); |
1561 // Argument 5 passed in r8 on Linux and on the stack on Windows. | 1559 // Argument 5 passed in r8 on Linux and on the stack on Windows. |
1562 #ifdef _WIN64 | 1560 #ifdef _WIN64 |
1563 __ movq(Operand(rsp, (argument_slots_on_stack - 5) * kRegisterSize), r8); | 1561 __ movq(Operand(rsp, (argument_slots_on_stack - 5) * kRegisterSize), r8); |
1564 #endif | 1562 #endif |
1565 | 1563 |
1566 // rdi: subject string | 1564 // rdi: subject string |
1567 // rbx: previous index | 1565 // rbx: previous index |
1568 // rcx: encoding of subject string (1 if ASCII 0 if two_byte); | 1566 // rcx: encoding of subject string (1 if ASCII 0 if two_byte); |
1569 // r11: code | 1567 // r11: code |
1570 // r14: slice offset | 1568 // r14: slice offset |
(...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1675 kDontSaveFPRegs); | 1673 kDontSaveFPRegs); |
1676 __ movp(rax, rcx); | 1674 __ movp(rax, rcx); |
1677 __ movp(FieldOperand(rbx, RegExpImpl::kLastInputOffset), rax); | 1675 __ movp(FieldOperand(rbx, RegExpImpl::kLastInputOffset), rax); |
1678 __ RecordWriteField(rbx, | 1676 __ RecordWriteField(rbx, |
1679 RegExpImpl::kLastInputOffset, | 1677 RegExpImpl::kLastInputOffset, |
1680 rax, | 1678 rax, |
1681 rdi, | 1679 rdi, |
1682 kDontSaveFPRegs); | 1680 kDontSaveFPRegs); |
1683 | 1681 |
1684 // Get the static offsets vector filled by the native regexp code. | 1682 // Get the static offsets vector filled by the native regexp code. |
1685 __ LoadAddress(rcx, | 1683 __ LoadAddress( |
1686 ExternalReference::address_of_static_offsets_vector(isolate)); | 1684 rcx, ExternalReference::address_of_static_offsets_vector(isolate())); |
1687 | 1685 |
1688 // rbx: last_match_info backing store (FixedArray) | 1686 // rbx: last_match_info backing store (FixedArray) |
1689 // rcx: offsets vector | 1687 // rcx: offsets vector |
1690 // rdx: number of capture registers | 1688 // rdx: number of capture registers |
1691 Label next_capture, done; | 1689 Label next_capture, done; |
1692 // Capture register counter starts from number of capture registers and | 1690 // Capture register counter starts from number of capture registers and |
1693 // counts down until wraping after zero. | 1691 // counts down until wraping after zero. |
1694 __ bind(&next_capture); | 1692 __ bind(&next_capture); |
1695 __ subp(rdx, Immediate(1)); | 1693 __ subp(rdx, Immediate(1)); |
1696 __ j(negative, &done, Label::kNear); | 1694 __ j(negative, &done, Label::kNear); |
(...skipping 12 matching lines...) Expand all Loading... | |
1709 // Return last match info. | 1707 // Return last match info. |
1710 __ movp(rax, r15); | 1708 __ movp(rax, r15); |
1711 __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize); | 1709 __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize); |
1712 | 1710 |
1713 __ bind(&exception); | 1711 __ bind(&exception); |
1714 // Result must now be exception. If there is no pending exception already a | 1712 // Result must now be exception. If there is no pending exception already a |
1715 // stack overflow (on the backtrack stack) was detected in RegExp code but | 1713 // stack overflow (on the backtrack stack) was detected in RegExp code but |
1716 // haven't created the exception yet. Handle that in the runtime system. | 1714 // haven't created the exception yet. Handle that in the runtime system. |
1717 // TODO(592): Rerunning the RegExp to get the stack overflow exception. | 1715 // TODO(592): Rerunning the RegExp to get the stack overflow exception. |
1718 ExternalReference pending_exception_address( | 1716 ExternalReference pending_exception_address( |
1719 Isolate::kPendingExceptionAddress, isolate); | 1717 Isolate::kPendingExceptionAddress, isolate()); |
1720 Operand pending_exception_operand = | 1718 Operand pending_exception_operand = |
1721 masm->ExternalOperand(pending_exception_address, rbx); | 1719 masm->ExternalOperand(pending_exception_address, rbx); |
1722 __ movp(rax, pending_exception_operand); | 1720 __ movp(rax, pending_exception_operand); |
1723 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex); | 1721 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex); |
1724 __ cmpp(rax, rdx); | 1722 __ cmpp(rax, rdx); |
1725 __ j(equal, &runtime); | 1723 __ j(equal, &runtime); |
1726 __ movp(pending_exception_operand, rdx); | 1724 __ movp(pending_exception_operand, rdx); |
1727 | 1725 |
1728 __ CompareRoot(rax, Heap::kTerminationExceptionRootIndex); | 1726 __ CompareRoot(rax, Heap::kTerminationExceptionRootIndex); |
1729 Label termination_exception; | 1727 Label termination_exception; |
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1822 FieldOperand(scratch, Map::kInstanceTypeOffset)); | 1820 FieldOperand(scratch, Map::kInstanceTypeOffset)); |
1823 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0); | 1821 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0); |
1824 __ testb(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask)); | 1822 __ testb(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask)); |
1825 __ j(not_zero, label); | 1823 __ j(not_zero, label); |
1826 } | 1824 } |
1827 | 1825 |
1828 | 1826 |
1829 void ICCompareStub::GenerateGeneric(MacroAssembler* masm) { | 1827 void ICCompareStub::GenerateGeneric(MacroAssembler* masm) { |
1830 Label check_unequal_objects, done; | 1828 Label check_unequal_objects, done; |
1831 Condition cc = GetCondition(); | 1829 Condition cc = GetCondition(); |
1832 Factory* factory = masm->isolate()->factory(); | 1830 Factory* factory = isolate()->factory(); |
1833 | 1831 |
1834 Label miss; | 1832 Label miss; |
1835 CheckInputType(masm, rdx, left_, &miss); | 1833 CheckInputType(masm, rdx, left_, &miss); |
1836 CheckInputType(masm, rax, right_, &miss); | 1834 CheckInputType(masm, rax, right_, &miss); |
1837 | 1835 |
1838 // Compare two smis. | 1836 // Compare two smis. |
1839 Label non_smi, smi_done; | 1837 Label non_smi, smi_done; |
1840 __ JumpIfNotBothSmi(rax, rdx, &non_smi); | 1838 __ JumpIfNotBothSmi(rax, rdx, &non_smi); |
1841 __ subp(rdx, rax); | 1839 __ subp(rdx, rax); |
1842 __ j(no_overflow, &smi_done); | 1840 __ j(no_overflow, &smi_done); |
(...skipping 313 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2156 FrameScope scope(masm, StackFrame::INTERNAL); | 2154 FrameScope scope(masm, StackFrame::INTERNAL); |
2157 | 2155 |
2158 // Arguments register must be smi-tagged to call out. | 2156 // Arguments register must be smi-tagged to call out. |
2159 __ Integer32ToSmi(rax, rax); | 2157 __ Integer32ToSmi(rax, rax); |
2160 __ Push(rax); | 2158 __ Push(rax); |
2161 __ Push(rdi); | 2159 __ Push(rdi); |
2162 __ Integer32ToSmi(rdx, rdx); | 2160 __ Integer32ToSmi(rdx, rdx); |
2163 __ Push(rdx); | 2161 __ Push(rdx); |
2164 __ Push(rbx); | 2162 __ Push(rbx); |
2165 | 2163 |
2166 CreateAllocationSiteStub create_stub; | 2164 CreateAllocationSiteStub create_stub(isolate); |
2167 __ CallStub(&create_stub); | 2165 __ CallStub(&create_stub); |
2168 | 2166 |
2169 __ Pop(rbx); | 2167 __ Pop(rbx); |
2170 __ Pop(rdx); | 2168 __ Pop(rdx); |
2171 __ Pop(rdi); | 2169 __ Pop(rdi); |
2172 __ Pop(rax); | 2170 __ Pop(rax); |
2173 __ SmiToInteger32(rax, rax); | 2171 __ SmiToInteger32(rax, rax); |
2174 } | 2172 } |
2175 __ jmp(&done_no_smi_convert); | 2173 __ jmp(&done_no_smi_convert); |
2176 | 2174 |
(...skipping 18 matching lines...) Expand all Loading... | |
2195 | 2193 |
2196 __ bind(&done_no_smi_convert); | 2194 __ bind(&done_no_smi_convert); |
2197 } | 2195 } |
2198 | 2196 |
2199 | 2197 |
2200 void CallFunctionStub::Generate(MacroAssembler* masm) { | 2198 void CallFunctionStub::Generate(MacroAssembler* masm) { |
2201 // rbx : feedback vector | 2199 // rbx : feedback vector |
2202 // rdx : (only if rbx is not the megamorphic symbol) slot in feedback | 2200 // rdx : (only if rbx is not the megamorphic symbol) slot in feedback |
2203 // vector (Smi) | 2201 // vector (Smi) |
2204 // rdi : the function to call | 2202 // rdi : the function to call |
2205 Isolate* isolate = masm->isolate(); | |
2206 Label slow, non_function, wrap, cont; | 2203 Label slow, non_function, wrap, cont; |
2207 StackArgumentsAccessor args(rsp, argc_); | 2204 StackArgumentsAccessor args(rsp, argc_); |
2208 | 2205 |
2209 if (NeedsChecks()) { | 2206 if (NeedsChecks()) { |
2210 // Check that the function really is a JavaScript function. | 2207 // Check that the function really is a JavaScript function. |
2211 __ JumpIfSmi(rdi, &non_function); | 2208 __ JumpIfSmi(rdi, &non_function); |
2212 | 2209 |
2213 // Goto slow case if we do not have a function. | 2210 // Goto slow case if we do not have a function. |
2214 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); | 2211 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); |
2215 __ j(not_equal, &slow); | 2212 __ j(not_equal, &slow); |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2261 if (NeedsChecks()) { | 2258 if (NeedsChecks()) { |
2262 // Slow-case: Non-function called. | 2259 // Slow-case: Non-function called. |
2263 __ bind(&slow); | 2260 __ bind(&slow); |
2264 if (RecordCallTarget()) { | 2261 if (RecordCallTarget()) { |
2265 // If there is a call target cache, mark it megamorphic in the | 2262 // If there is a call target cache, mark it megamorphic in the |
2266 // non-function case. MegamorphicSentinel is an immortal immovable | 2263 // non-function case. MegamorphicSentinel is an immortal immovable |
2267 // object (megamorphic symbol) so no write barrier is needed. | 2264 // object (megamorphic symbol) so no write barrier is needed. |
2268 __ SmiToInteger32(rdx, rdx); | 2265 __ SmiToInteger32(rdx, rdx); |
2269 __ Move(FieldOperand(rbx, rdx, times_pointer_size, | 2266 __ Move(FieldOperand(rbx, rdx, times_pointer_size, |
2270 FixedArray::kHeaderSize), | 2267 FixedArray::kHeaderSize), |
2271 TypeFeedbackInfo::MegamorphicSentinel(isolate)); | 2268 TypeFeedbackInfo::MegamorphicSentinel(isolate())); |
2272 __ Integer32ToSmi(rdx, rdx); | 2269 __ Integer32ToSmi(rdx, rdx); |
2273 } | 2270 } |
2274 // Check for function proxy. | 2271 // Check for function proxy. |
2275 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE); | 2272 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE); |
2276 __ j(not_equal, &non_function); | 2273 __ j(not_equal, &non_function); |
2277 __ PopReturnAddressTo(rcx); | 2274 __ PopReturnAddressTo(rcx); |
2278 __ Push(rdi); // put proxy as additional argument under return address | 2275 __ Push(rdi); // put proxy as additional argument under return address |
2279 __ PushReturnAddressFrom(rcx); | 2276 __ PushReturnAddressFrom(rcx); |
2280 __ Set(rax, argc_ + 1); | 2277 __ Set(rax, argc_ + 1); |
2281 __ Set(rbx, 0); | 2278 __ Set(rbx, 0); |
2282 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); | 2279 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); |
2283 { | 2280 { |
2284 Handle<Code> adaptor = | 2281 Handle<Code> adaptor = |
2285 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); | 2282 isolate()->builtins()->ArgumentsAdaptorTrampoline(); |
2286 __ jmp(adaptor, RelocInfo::CODE_TARGET); | 2283 __ jmp(adaptor, RelocInfo::CODE_TARGET); |
2287 } | 2284 } |
2288 | 2285 |
2289 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead | 2286 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead |
2290 // of the original receiver from the call site). | 2287 // of the original receiver from the call site). |
2291 __ bind(&non_function); | 2288 __ bind(&non_function); |
2292 __ movp(args.GetReceiverOperand(), rdi); | 2289 __ movp(args.GetReceiverOperand(), rdi); |
2293 __ Set(rax, argc_); | 2290 __ Set(rax, argc_); |
2294 __ Set(rbx, 0); | 2291 __ Set(rbx, 0); |
2295 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION); | 2292 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION); |
2296 Handle<Code> adaptor = | 2293 Handle<Code> adaptor = |
2297 isolate->builtins()->ArgumentsAdaptorTrampoline(); | 2294 isolate()->builtins()->ArgumentsAdaptorTrampoline(); |
2298 __ Jump(adaptor, RelocInfo::CODE_TARGET); | 2295 __ Jump(adaptor, RelocInfo::CODE_TARGET); |
2299 } | 2296 } |
2300 | 2297 |
2301 if (CallAsMethod()) { | 2298 if (CallAsMethod()) { |
2302 __ bind(&wrap); | 2299 __ bind(&wrap); |
2303 // Wrap the receiver and patch it back onto the stack. | 2300 // Wrap the receiver and patch it back onto the stack. |
2304 { FrameScope frame_scope(masm, StackFrame::INTERNAL); | 2301 { FrameScope frame_scope(masm, StackFrame::INTERNAL); |
2305 __ Push(rdi); | 2302 __ Push(rdi); |
2306 __ Push(rax); | 2303 __ Push(rax); |
2307 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); | 2304 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); |
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2367 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE); | 2364 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE); |
2368 __ j(not_equal, &non_function_call); | 2365 __ j(not_equal, &non_function_call); |
2369 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR); | 2366 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR); |
2370 __ jmp(&do_call); | 2367 __ jmp(&do_call); |
2371 | 2368 |
2372 __ bind(&non_function_call); | 2369 __ bind(&non_function_call); |
2373 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); | 2370 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); |
2374 __ bind(&do_call); | 2371 __ bind(&do_call); |
2375 // Set expected number of arguments to zero (not changing rax). | 2372 // Set expected number of arguments to zero (not changing rax). |
2376 __ Set(rbx, 0); | 2373 __ Set(rbx, 0); |
2377 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), | 2374 __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
2378 RelocInfo::CODE_TARGET); | 2375 RelocInfo::CODE_TARGET); |
2379 } | 2376 } |
2380 | 2377 |
2381 | 2378 |
2382 bool CEntryStub::NeedsImmovableCode() { | 2379 bool CEntryStub::NeedsImmovableCode() { |
2383 return false; | 2380 return false; |
2384 } | 2381 } |
2385 | 2382 |
2386 | 2383 |
2387 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { | 2384 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { |
2388 CEntryStub::GenerateAheadOfTime(isolate); | 2385 CEntryStub::GenerateAheadOfTime(isolate); |
2389 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); | 2386 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); |
2390 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); | 2387 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); |
2391 // It is important that the store buffer overflow stubs are generated first. | 2388 // It is important that the store buffer overflow stubs are generated first. |
2392 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); | 2389 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); |
2393 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); | 2390 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); |
2394 BinaryOpICStub::GenerateAheadOfTime(isolate); | 2391 BinaryOpICStub::GenerateAheadOfTime(isolate); |
2395 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); | 2392 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); |
2396 } | 2393 } |
2397 | 2394 |
2398 | 2395 |
2399 void CodeStub::GenerateFPStubs(Isolate* isolate) { | 2396 void CodeStub::GenerateFPStubs(Isolate* isolate) { |
2400 } | 2397 } |
2401 | 2398 |
2402 | 2399 |
2403 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) { | 2400 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) { |
2404 CEntryStub stub(1, kDontSaveFPRegs); | 2401 CEntryStub stub(isolate, 1, kDontSaveFPRegs); |
2405 stub.GetCode(isolate); | 2402 stub.GetCode(isolate); |
2406 CEntryStub save_doubles(1, kSaveFPRegs); | 2403 CEntryStub save_doubles(isolate, 1, kSaveFPRegs); |
2407 save_doubles.GetCode(isolate); | 2404 save_doubles.GetCode(isolate); |
2408 } | 2405 } |
2409 | 2406 |
2410 | 2407 |
2411 void CEntryStub::Generate(MacroAssembler* masm) { | 2408 void CEntryStub::Generate(MacroAssembler* masm) { |
2412 // rax: number of arguments including receiver | 2409 // rax: number of arguments including receiver |
2413 // rbx: pointer to C function (C callee-saved) | 2410 // rbx: pointer to C function (C callee-saved) |
2414 // rbp: frame pointer of calling JS frame (restored after C call) | 2411 // rbp: frame pointer of calling JS frame (restored after C call) |
2415 // rsp: stack pointer (restored after C call) | 2412 // rsp: stack pointer (restored after C call) |
2416 // rsi: current context (restored) | 2413 // rsi: current context (restored) |
(...skipping 26 matching lines...) Expand all Loading... | |
2443 // Call C function. | 2440 // Call C function. |
2444 #ifdef _WIN64 | 2441 #ifdef _WIN64 |
2445 // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9. | 2442 // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9. |
2446 // Pass argv and argc as two parameters. The arguments object will | 2443 // Pass argv and argc as two parameters. The arguments object will |
2447 // be created by stubs declared by DECLARE_RUNTIME_FUNCTION(). | 2444 // be created by stubs declared by DECLARE_RUNTIME_FUNCTION(). |
2448 if (result_size_ < 2) { | 2445 if (result_size_ < 2) { |
2449 // Pass a pointer to the Arguments object as the first argument. | 2446 // Pass a pointer to the Arguments object as the first argument. |
2450 // Return result in single register (rax). | 2447 // Return result in single register (rax). |
2451 __ movp(rcx, r14); // argc. | 2448 __ movp(rcx, r14); // argc. |
2452 __ movp(rdx, r15); // argv. | 2449 __ movp(rdx, r15); // argv. |
2453 __ Move(r8, ExternalReference::isolate_address(masm->isolate())); | 2450 __ Move(r8, ExternalReference::isolate_address(isolate())); |
2454 } else { | 2451 } else { |
2455 ASSERT_EQ(2, result_size_); | 2452 ASSERT_EQ(2, result_size_); |
2456 // Pass a pointer to the result location as the first argument. | 2453 // Pass a pointer to the result location as the first argument. |
2457 __ leap(rcx, StackSpaceOperand(2)); | 2454 __ leap(rcx, StackSpaceOperand(2)); |
2458 // Pass a pointer to the Arguments object as the second argument. | 2455 // Pass a pointer to the Arguments object as the second argument. |
2459 __ movp(rdx, r14); // argc. | 2456 __ movp(rdx, r14); // argc. |
2460 __ movp(r8, r15); // argv. | 2457 __ movp(r8, r15); // argv. |
2461 __ Move(r9, ExternalReference::isolate_address(masm->isolate())); | 2458 __ Move(r9, ExternalReference::isolate_address(isolate())); |
2462 } | 2459 } |
2463 | 2460 |
2464 #else // _WIN64 | 2461 #else // _WIN64 |
2465 // GCC passes arguments in rdi, rsi, rdx, rcx, r8, r9. | 2462 // GCC passes arguments in rdi, rsi, rdx, rcx, r8, r9. |
2466 __ movp(rdi, r14); // argc. | 2463 __ movp(rdi, r14); // argc. |
2467 __ movp(rsi, r15); // argv. | 2464 __ movp(rsi, r15); // argv. |
2468 __ Move(rdx, ExternalReference::isolate_address(masm->isolate())); | 2465 __ Move(rdx, ExternalReference::isolate_address(isolate())); |
2469 #endif | 2466 #endif |
2470 __ call(rbx); | 2467 __ call(rbx); |
2471 // Result is in rax - do not destroy this register! | 2468 // Result is in rax - do not destroy this register! |
2472 | 2469 |
2473 #ifdef _WIN64 | 2470 #ifdef _WIN64 |
2474 // If return value is on the stack, pop it to registers. | 2471 // If return value is on the stack, pop it to registers. |
2475 if (result_size_ > 1) { | 2472 if (result_size_ > 1) { |
2476 ASSERT_EQ(2, result_size_); | 2473 ASSERT_EQ(2, result_size_); |
2477 // Read result values stored on stack. Result is stored | 2474 // Read result values stored on stack. Result is stored |
2478 // above the four argument mirror slots and the two | 2475 // above the four argument mirror slots and the two |
(...skipping 12 matching lines...) Expand all Loading... | |
2491 __ int3(); | 2488 __ int3(); |
2492 __ bind(&okay); | 2489 __ bind(&okay); |
2493 } | 2490 } |
2494 | 2491 |
2495 // Check result for exception sentinel. | 2492 // Check result for exception sentinel. |
2496 Label exception_returned; | 2493 Label exception_returned; |
2497 __ CompareRoot(rax, Heap::kExceptionRootIndex); | 2494 __ CompareRoot(rax, Heap::kExceptionRootIndex); |
2498 __ j(equal, &exception_returned); | 2495 __ j(equal, &exception_returned); |
2499 | 2496 |
2500 ExternalReference pending_exception_address( | 2497 ExternalReference pending_exception_address( |
2501 Isolate::kPendingExceptionAddress, masm->isolate()); | 2498 Isolate::kPendingExceptionAddress, isolate()); |
2502 | 2499 |
2503 // Check that there is no pending exception, otherwise we | 2500 // Check that there is no pending exception, otherwise we |
2504 // should have returned the exception sentinel. | 2501 // should have returned the exception sentinel. |
2505 if (FLAG_debug_code) { | 2502 if (FLAG_debug_code) { |
2506 Label okay; | 2503 Label okay; |
2507 __ LoadRoot(r14, Heap::kTheHoleValueRootIndex); | 2504 __ LoadRoot(r14, Heap::kTheHoleValueRootIndex); |
2508 Operand pending_exception_operand = | 2505 Operand pending_exception_operand = |
2509 masm->ExternalOperand(pending_exception_address); | 2506 masm->ExternalOperand(pending_exception_address); |
2510 __ cmpp(r14, pending_exception_operand); | 2507 __ cmpp(r14, pending_exception_operand); |
2511 __ j(equal, &okay, Label::kNear); | 2508 __ j(equal, &okay, Label::kNear); |
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2588 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8), xmm14); | 2585 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8), xmm14); |
2589 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9), xmm15); | 2586 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9), xmm15); |
2590 #endif | 2587 #endif |
2591 | 2588 |
2592 // Set up the roots and smi constant registers. | 2589 // Set up the roots and smi constant registers. |
2593 // Needs to be done before any further smi loads. | 2590 // Needs to be done before any further smi loads. |
2594 __ InitializeSmiConstantRegister(); | 2591 __ InitializeSmiConstantRegister(); |
2595 __ InitializeRootRegister(); | 2592 __ InitializeRootRegister(); |
2596 } | 2593 } |
2597 | 2594 |
2598 Isolate* isolate = masm->isolate(); | |
2599 | |
2600 // Save copies of the top frame descriptor on the stack. | 2595 // Save copies of the top frame descriptor on the stack. |
2601 ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate); | 2596 ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate()); |
2602 { | 2597 { |
2603 Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp); | 2598 Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp); |
2604 __ Push(c_entry_fp_operand); | 2599 __ Push(c_entry_fp_operand); |
2605 } | 2600 } |
2606 | 2601 |
2607 // If this is the outermost JS call, set js_entry_sp value. | 2602 // If this is the outermost JS call, set js_entry_sp value. |
2608 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate); | 2603 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate()); |
2609 __ Load(rax, js_entry_sp); | 2604 __ Load(rax, js_entry_sp); |
2610 __ testp(rax, rax); | 2605 __ testp(rax, rax); |
2611 __ j(not_zero, ¬_outermost_js); | 2606 __ j(not_zero, ¬_outermost_js); |
2612 __ Push(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)); | 2607 __ Push(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)); |
2613 __ movp(rax, rbp); | 2608 __ movp(rax, rbp); |
2614 __ Store(js_entry_sp, rax); | 2609 __ Store(js_entry_sp, rax); |
2615 Label cont; | 2610 Label cont; |
2616 __ jmp(&cont); | 2611 __ jmp(&cont); |
2617 __ bind(¬_outermost_js); | 2612 __ bind(¬_outermost_js); |
2618 __ Push(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)); | 2613 __ Push(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)); |
2619 __ bind(&cont); | 2614 __ bind(&cont); |
2620 | 2615 |
2621 // Jump to a faked try block that does the invoke, with a faked catch | 2616 // Jump to a faked try block that does the invoke, with a faked catch |
2622 // block that sets the pending exception. | 2617 // block that sets the pending exception. |
2623 __ jmp(&invoke); | 2618 __ jmp(&invoke); |
2624 __ bind(&handler_entry); | 2619 __ bind(&handler_entry); |
2625 handler_offset_ = handler_entry.pos(); | 2620 handler_offset_ = handler_entry.pos(); |
2626 // Caught exception: Store result (exception) in the pending exception | 2621 // Caught exception: Store result (exception) in the pending exception |
2627 // field in the JSEnv and return a failure sentinel. | 2622 // field in the JSEnv and return a failure sentinel. |
2628 ExternalReference pending_exception(Isolate::kPendingExceptionAddress, | 2623 ExternalReference pending_exception(Isolate::kPendingExceptionAddress, |
2629 isolate); | 2624 isolate()); |
2630 __ Store(pending_exception, rax); | 2625 __ Store(pending_exception, rax); |
2631 __ LoadRoot(rax, Heap::kExceptionRootIndex); | 2626 __ LoadRoot(rax, Heap::kExceptionRootIndex); |
2632 __ jmp(&exit); | 2627 __ jmp(&exit); |
2633 | 2628 |
2634 // Invoke: Link this frame into the handler chain. There's only one | 2629 // Invoke: Link this frame into the handler chain. There's only one |
2635 // handler block in this code object, so its index is 0. | 2630 // handler block in this code object, so its index is 0. |
2636 __ bind(&invoke); | 2631 __ bind(&invoke); |
2637 __ PushTryHandler(StackHandler::JS_ENTRY, 0); | 2632 __ PushTryHandler(StackHandler::JS_ENTRY, 0); |
2638 | 2633 |
2639 // Clear any pending exceptions. | 2634 // Clear any pending exceptions. |
2640 __ LoadRoot(rax, Heap::kTheHoleValueRootIndex); | 2635 __ LoadRoot(rax, Heap::kTheHoleValueRootIndex); |
2641 __ Store(pending_exception, rax); | 2636 __ Store(pending_exception, rax); |
2642 | 2637 |
2643 // Fake a receiver (NULL). | 2638 // Fake a receiver (NULL). |
2644 __ Push(Immediate(0)); // receiver | 2639 __ Push(Immediate(0)); // receiver |
2645 | 2640 |
2646 // Invoke the function by calling through JS entry trampoline builtin and | 2641 // Invoke the function by calling through JS entry trampoline builtin and |
2647 // pop the faked function when we return. We load the address from an | 2642 // pop the faked function when we return. We load the address from an |
2648 // external reference instead of inlining the call target address directly | 2643 // external reference instead of inlining the call target address directly |
2649 // in the code, because the builtin stubs may not have been generated yet | 2644 // in the code, because the builtin stubs may not have been generated yet |
2650 // at the time this code is generated. | 2645 // at the time this code is generated. |
2651 if (is_construct) { | 2646 if (is_construct) { |
2652 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline, | 2647 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline, |
2653 isolate); | 2648 isolate()); |
2654 __ Load(rax, construct_entry); | 2649 __ Load(rax, construct_entry); |
2655 } else { | 2650 } else { |
2656 ExternalReference entry(Builtins::kJSEntryTrampoline, isolate); | 2651 ExternalReference entry(Builtins::kJSEntryTrampoline, isolate()); |
2657 __ Load(rax, entry); | 2652 __ Load(rax, entry); |
2658 } | 2653 } |
2659 __ leap(kScratchRegister, FieldOperand(rax, Code::kHeaderSize)); | 2654 __ leap(kScratchRegister, FieldOperand(rax, Code::kHeaderSize)); |
2660 __ call(kScratchRegister); | 2655 __ call(kScratchRegister); |
2661 | 2656 |
2662 // Unlink this frame from the handler chain. | 2657 // Unlink this frame from the handler chain. |
2663 __ PopTryHandler(); | 2658 __ PopTryHandler(); |
2664 | 2659 |
2665 __ bind(&exit); | 2660 __ bind(&exit); |
2666 // Check if the current stack frame is marked as the outermost JS frame. | 2661 // Check if the current stack frame is marked as the outermost JS frame. |
(...skipping 503 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3170 __ JumpUnlessBothNonNegativeSmi(rcx, rdx, &runtime); | 3165 __ JumpUnlessBothNonNegativeSmi(rcx, rdx, &runtime); |
3171 | 3166 |
3172 __ SmiSub(rcx, rcx, rdx); // Overflow doesn't happen. | 3167 __ SmiSub(rcx, rcx, rdx); // Overflow doesn't happen. |
3173 __ cmpp(rcx, FieldOperand(rax, String::kLengthOffset)); | 3168 __ cmpp(rcx, FieldOperand(rax, String::kLengthOffset)); |
3174 Label not_original_string; | 3169 Label not_original_string; |
3175 // Shorter than original string's length: an actual substring. | 3170 // Shorter than original string's length: an actual substring. |
3176 __ j(below, ¬_original_string, Label::kNear); | 3171 __ j(below, ¬_original_string, Label::kNear); |
3177 // Longer than original string's length or negative: unsafe arguments. | 3172 // Longer than original string's length or negative: unsafe arguments. |
3178 __ j(above, &runtime); | 3173 __ j(above, &runtime); |
3179 // Return original string. | 3174 // Return original string. |
3180 Counters* counters = masm->isolate()->counters(); | 3175 Counters* counters = isolate()->counters(); |
3181 __ IncrementCounter(counters->sub_string_native(), 1); | 3176 __ IncrementCounter(counters->sub_string_native(), 1); |
3182 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize); | 3177 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize); |
3183 __ bind(¬_original_string); | 3178 __ bind(¬_original_string); |
3184 | 3179 |
3185 Label single_char; | 3180 Label single_char; |
3186 __ SmiCompare(rcx, Smi::FromInt(1)); | 3181 __ SmiCompare(rcx, Smi::FromInt(1)); |
3187 __ j(equal, &single_char); | 3182 __ j(equal, &single_char); |
3188 | 3183 |
3189 __ SmiToInteger32(rcx, rcx); | 3184 __ SmiToInteger32(rcx, rcx); |
3190 | 3185 |
(...skipping 324 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3515 | 3510 |
3516 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER); | 3511 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER); |
3517 __ movp(rdx, args.GetArgumentOperand(0)); // left | 3512 __ movp(rdx, args.GetArgumentOperand(0)); // left |
3518 __ movp(rax, args.GetArgumentOperand(1)); // right | 3513 __ movp(rax, args.GetArgumentOperand(1)); // right |
3519 | 3514 |
3520 // Check for identity. | 3515 // Check for identity. |
3521 Label not_same; | 3516 Label not_same; |
3522 __ cmpp(rdx, rax); | 3517 __ cmpp(rdx, rax); |
3523 __ j(not_equal, ¬_same, Label::kNear); | 3518 __ j(not_equal, ¬_same, Label::kNear); |
3524 __ Move(rax, Smi::FromInt(EQUAL)); | 3519 __ Move(rax, Smi::FromInt(EQUAL)); |
3525 Counters* counters = masm->isolate()->counters(); | 3520 Counters* counters = isolate()->counters(); |
3526 __ IncrementCounter(counters->string_compare_native(), 1); | 3521 __ IncrementCounter(counters->string_compare_native(), 1); |
3527 __ ret(2 * kPointerSize); | 3522 __ ret(2 * kPointerSize); |
3528 | 3523 |
3529 __ bind(¬_same); | 3524 __ bind(¬_same); |
3530 | 3525 |
3531 // Check that both are sequential ASCII strings. | 3526 // Check that both are sequential ASCII strings. |
3532 __ JumpIfNotBothSequentialAsciiStrings(rdx, rax, rcx, rbx, &runtime); | 3527 __ JumpIfNotBothSequentialAsciiStrings(rdx, rax, rcx, rbx, &runtime); |
3533 | 3528 |
3534 // Inline comparison of ASCII strings. | 3529 // Inline comparison of ASCII strings. |
3535 __ IncrementCounter(counters->string_compare_native(), 1); | 3530 __ IncrementCounter(counters->string_compare_native(), 1); |
3536 // Drop arguments from the stack | 3531 // Drop arguments from the stack |
3537 __ PopReturnAddressTo(rcx); | 3532 __ PopReturnAddressTo(rcx); |
3538 __ addp(rsp, Immediate(2 * kPointerSize)); | 3533 __ addp(rsp, Immediate(2 * kPointerSize)); |
3539 __ PushReturnAddressFrom(rcx); | 3534 __ PushReturnAddressFrom(rcx); |
3540 GenerateCompareFlatAsciiStrings(masm, rdx, rax, rcx, rbx, rdi, r8); | 3535 GenerateCompareFlatAsciiStrings(masm, rdx, rax, rcx, rbx, rdi, r8); |
3541 | 3536 |
3542 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) | 3537 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) |
3543 // tagged as a small integer. | 3538 // tagged as a small integer. |
3544 __ bind(&runtime); | 3539 __ bind(&runtime); |
3545 __ TailCallRuntime(Runtime::kHiddenStringCompare, 2, 1); | 3540 __ TailCallRuntime(Runtime::kHiddenStringCompare, 2, 1); |
3546 } | 3541 } |
3547 | 3542 |
3548 | 3543 |
3549 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { | 3544 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { |
3550 // ----------- S t a t e ------------- | 3545 // ----------- S t a t e ------------- |
3551 // -- rdx : left | 3546 // -- rdx : left |
3552 // -- rax : right | 3547 // -- rax : right |
3553 // -- rsp[0] : return address | 3548 // -- rsp[0] : return address |
3554 // ----------------------------------- | 3549 // ----------------------------------- |
3555 Isolate* isolate = masm->isolate(); | |
3556 | |
Michael Starzinger
2014/04/23 13:58:21
nit: Can we get the empty newline back for readabi
Sven Panne
2014/04/24 06:05:34
Done.
| |
3557 // Load rcx with the allocation site. We stick an undefined dummy value here | 3550 // Load rcx with the allocation site. We stick an undefined dummy value here |
3558 // and replace it with the real allocation site later when we instantiate this | 3551 // and replace it with the real allocation site later when we instantiate this |
3559 // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate(). | 3552 // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate(). |
3560 __ Move(rcx, handle(isolate->heap()->undefined_value())); | 3553 __ Move(rcx, handle(isolate()->heap()->undefined_value())); |
3561 | 3554 |
3562 // Make sure that we actually patched the allocation site. | 3555 // Make sure that we actually patched the allocation site. |
3563 if (FLAG_debug_code) { | 3556 if (FLAG_debug_code) { |
3564 __ testb(rcx, Immediate(kSmiTagMask)); | 3557 __ testb(rcx, Immediate(kSmiTagMask)); |
3565 __ Assert(not_equal, kExpectedAllocationSite); | 3558 __ Assert(not_equal, kExpectedAllocationSite); |
3566 __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset), | 3559 __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset), |
3567 isolate->factory()->allocation_site_map()); | 3560 isolate()->factory()->allocation_site_map()); |
3568 __ Assert(equal, kExpectedAllocationSite); | 3561 __ Assert(equal, kExpectedAllocationSite); |
3569 } | 3562 } |
3570 | 3563 |
3571 // Tail call into the stub that handles binary operations with allocation | 3564 // Tail call into the stub that handles binary operations with allocation |
3572 // sites. | 3565 // sites. |
3573 BinaryOpWithAllocationSiteStub stub(state_); | 3566 BinaryOpWithAllocationSiteStub stub(isolate(), state_); |
3574 __ TailCallStub(&stub); | 3567 __ TailCallStub(&stub); |
3575 } | 3568 } |
3576 | 3569 |
3577 | 3570 |
3578 void ICCompareStub::GenerateSmis(MacroAssembler* masm) { | 3571 void ICCompareStub::GenerateSmis(MacroAssembler* masm) { |
3579 ASSERT(state_ == CompareIC::SMI); | 3572 ASSERT(state_ == CompareIC::SMI); |
3580 Label miss; | 3573 Label miss; |
3581 __ JumpIfNotBothSmi(rdx, rax, &miss, Label::kNear); | 3574 __ JumpIfNotBothSmi(rdx, rax, &miss, Label::kNear); |
3582 | 3575 |
3583 if (GetCondition() == equal) { | 3576 if (GetCondition() == equal) { |
(...skipping 25 matching lines...) Expand all Loading... | |
3609 if (left_ == CompareIC::SMI) { | 3602 if (left_ == CompareIC::SMI) { |
3610 __ JumpIfNotSmi(rdx, &miss); | 3603 __ JumpIfNotSmi(rdx, &miss); |
3611 } | 3604 } |
3612 if (right_ == CompareIC::SMI) { | 3605 if (right_ == CompareIC::SMI) { |
3613 __ JumpIfNotSmi(rax, &miss); | 3606 __ JumpIfNotSmi(rax, &miss); |
3614 } | 3607 } |
3615 | 3608 |
3616 // Load left and right operand. | 3609 // Load left and right operand. |
3617 Label done, left, left_smi, right_smi; | 3610 Label done, left, left_smi, right_smi; |
3618 __ JumpIfSmi(rax, &right_smi, Label::kNear); | 3611 __ JumpIfSmi(rax, &right_smi, Label::kNear); |
3619 __ CompareMap(rax, masm->isolate()->factory()->heap_number_map()); | 3612 __ CompareMap(rax, isolate()->factory()->heap_number_map()); |
3620 __ j(not_equal, &maybe_undefined1, Label::kNear); | 3613 __ j(not_equal, &maybe_undefined1, Label::kNear); |
3621 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); | 3614 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); |
3622 __ jmp(&left, Label::kNear); | 3615 __ jmp(&left, Label::kNear); |
3623 __ bind(&right_smi); | 3616 __ bind(&right_smi); |
3624 __ SmiToInteger32(rcx, rax); // Can't clobber rax yet. | 3617 __ SmiToInteger32(rcx, rax); // Can't clobber rax yet. |
3625 __ Cvtlsi2sd(xmm1, rcx); | 3618 __ Cvtlsi2sd(xmm1, rcx); |
3626 | 3619 |
3627 __ bind(&left); | 3620 __ bind(&left); |
3628 __ JumpIfSmi(rdx, &left_smi, Label::kNear); | 3621 __ JumpIfSmi(rdx, &left_smi, Label::kNear); |
3629 __ CompareMap(rdx, masm->isolate()->factory()->heap_number_map()); | 3622 __ CompareMap(rdx, isolate()->factory()->heap_number_map()); |
3630 __ j(not_equal, &maybe_undefined2, Label::kNear); | 3623 __ j(not_equal, &maybe_undefined2, Label::kNear); |
3631 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); | 3624 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); |
3632 __ jmp(&done); | 3625 __ jmp(&done); |
3633 __ bind(&left_smi); | 3626 __ bind(&left_smi); |
3634 __ SmiToInteger32(rcx, rdx); // Can't clobber rdx yet. | 3627 __ SmiToInteger32(rcx, rdx); // Can't clobber rdx yet. |
3635 __ Cvtlsi2sd(xmm0, rcx); | 3628 __ Cvtlsi2sd(xmm0, rcx); |
3636 | 3629 |
3637 __ bind(&done); | 3630 __ bind(&done); |
3638 // Compare operands | 3631 // Compare operands |
3639 __ ucomisd(xmm0, xmm1); | 3632 __ ucomisd(xmm0, xmm1); |
3640 | 3633 |
3641 // Don't base result on EFLAGS when a NaN is involved. | 3634 // Don't base result on EFLAGS when a NaN is involved. |
3642 __ j(parity_even, &unordered, Label::kNear); | 3635 __ j(parity_even, &unordered, Label::kNear); |
3643 | 3636 |
3644 // Return a result of -1, 0, or 1, based on EFLAGS. | 3637 // Return a result of -1, 0, or 1, based on EFLAGS. |
3645 // Performing mov, because xor would destroy the flag register. | 3638 // Performing mov, because xor would destroy the flag register. |
3646 __ movl(rax, Immediate(0)); | 3639 __ movl(rax, Immediate(0)); |
3647 __ movl(rcx, Immediate(0)); | 3640 __ movl(rcx, Immediate(0)); |
3648 __ setcc(above, rax); // Add one to zero if carry clear and not equal. | 3641 __ setcc(above, rax); // Add one to zero if carry clear and not equal. |
3649 __ sbbp(rax, rcx); // Subtract one if below (aka. carry set). | 3642 __ sbbp(rax, rcx); // Subtract one if below (aka. carry set). |
3650 __ ret(0); | 3643 __ ret(0); |
3651 | 3644 |
3652 __ bind(&unordered); | 3645 __ bind(&unordered); |
3653 __ bind(&generic_stub); | 3646 __ bind(&generic_stub); |
3654 ICCompareStub stub(op_, CompareIC::GENERIC, CompareIC::GENERIC, | 3647 ICCompareStub stub(isolate(), op_, CompareIC::GENERIC, CompareIC::GENERIC, |
3655 CompareIC::GENERIC); | 3648 CompareIC::GENERIC); |
3656 __ jmp(stub.GetCode(masm->isolate()), RelocInfo::CODE_TARGET); | 3649 __ jmp(stub.GetCode(isolate()), RelocInfo::CODE_TARGET); |
3657 | 3650 |
3658 __ bind(&maybe_undefined1); | 3651 __ bind(&maybe_undefined1); |
3659 if (Token::IsOrderedRelationalCompareOp(op_)) { | 3652 if (Token::IsOrderedRelationalCompareOp(op_)) { |
3660 __ Cmp(rax, masm->isolate()->factory()->undefined_value()); | 3653 __ Cmp(rax, isolate()->factory()->undefined_value()); |
3661 __ j(not_equal, &miss); | 3654 __ j(not_equal, &miss); |
3662 __ JumpIfSmi(rdx, &unordered); | 3655 __ JumpIfSmi(rdx, &unordered); |
3663 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx); | 3656 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx); |
3664 __ j(not_equal, &maybe_undefined2, Label::kNear); | 3657 __ j(not_equal, &maybe_undefined2, Label::kNear); |
3665 __ jmp(&unordered); | 3658 __ jmp(&unordered); |
3666 } | 3659 } |
3667 | 3660 |
3668 __ bind(&maybe_undefined2); | 3661 __ bind(&maybe_undefined2); |
3669 if (Token::IsOrderedRelationalCompareOp(op_)) { | 3662 if (Token::IsOrderedRelationalCompareOp(op_)) { |
3670 __ Cmp(rdx, masm->isolate()->factory()->undefined_value()); | 3663 __ Cmp(rdx, isolate()->factory()->undefined_value()); |
3671 __ j(equal, &unordered); | 3664 __ j(equal, &unordered); |
3672 } | 3665 } |
3673 | 3666 |
3674 __ bind(&miss); | 3667 __ bind(&miss); |
3675 GenerateMiss(masm); | 3668 GenerateMiss(masm); |
3676 } | 3669 } |
3677 | 3670 |
3678 | 3671 |
3679 void ICCompareStub::GenerateInternalizedStrings(MacroAssembler* masm) { | 3672 void ICCompareStub::GenerateInternalizedStrings(MacroAssembler* masm) { |
3680 ASSERT(state_ == CompareIC::INTERNALIZED_STRING); | 3673 ASSERT(state_ == CompareIC::INTERNALIZED_STRING); |
(...skipping 205 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3886 | 3879 |
3887 __ bind(&miss); | 3880 __ bind(&miss); |
3888 GenerateMiss(masm); | 3881 GenerateMiss(masm); |
3889 } | 3882 } |
3890 | 3883 |
3891 | 3884 |
3892 void ICCompareStub::GenerateMiss(MacroAssembler* masm) { | 3885 void ICCompareStub::GenerateMiss(MacroAssembler* masm) { |
3893 { | 3886 { |
3894 // Call the runtime system in a fresh internal frame. | 3887 // Call the runtime system in a fresh internal frame. |
3895 ExternalReference miss = | 3888 ExternalReference miss = |
3896 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate()); | 3889 ExternalReference(IC_Utility(IC::kCompareIC_Miss), isolate()); |
3897 | 3890 |
3898 FrameScope scope(masm, StackFrame::INTERNAL); | 3891 FrameScope scope(masm, StackFrame::INTERNAL); |
3899 __ Push(rdx); | 3892 __ Push(rdx); |
3900 __ Push(rax); | 3893 __ Push(rax); |
3901 __ Push(rdx); | 3894 __ Push(rdx); |
3902 __ Push(rax); | 3895 __ Push(rax); |
3903 __ Push(Smi::FromInt(op_)); | 3896 __ Push(Smi::FromInt(op_)); |
3904 __ CallExternalReference(miss, 3); | 3897 __ CallExternalReference(miss, 3); |
3905 | 3898 |
3906 // Compute the entry point of the rewritten stub. | 3899 // Compute the entry point of the rewritten stub. |
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3959 __ CompareRoot(entity_name, Heap::kTheHoleValueRootIndex); | 3952 __ CompareRoot(entity_name, Heap::kTheHoleValueRootIndex); |
3960 __ j(equal, &good, Label::kNear); | 3953 __ j(equal, &good, Label::kNear); |
3961 | 3954 |
3962 // Check if the entry name is not a unique name. | 3955 // Check if the entry name is not a unique name. |
3963 __ movp(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset)); | 3956 __ movp(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset)); |
3964 __ JumpIfNotUniqueName(FieldOperand(entity_name, Map::kInstanceTypeOffset), | 3957 __ JumpIfNotUniqueName(FieldOperand(entity_name, Map::kInstanceTypeOffset), |
3965 miss); | 3958 miss); |
3966 __ bind(&good); | 3959 __ bind(&good); |
3967 } | 3960 } |
3968 | 3961 |
3969 NameDictionaryLookupStub stub(properties, r0, r0, NEGATIVE_LOOKUP); | 3962 NameDictionaryLookupStub stub(masm->isolate(), properties, r0, r0, |
3963 NEGATIVE_LOOKUP); | |
3970 __ Push(Handle<Object>(name)); | 3964 __ Push(Handle<Object>(name)); |
3971 __ Push(Immediate(name->Hash())); | 3965 __ Push(Immediate(name->Hash())); |
3972 __ CallStub(&stub); | 3966 __ CallStub(&stub); |
3973 __ testp(r0, r0); | 3967 __ testp(r0, r0); |
3974 __ j(not_zero, miss); | 3968 __ j(not_zero, miss); |
3975 __ jmp(done); | 3969 __ jmp(done); |
3976 } | 3970 } |
3977 | 3971 |
3978 | 3972 |
3979 // Probe the name dictionary in the |elements| register. Jump to the | 3973 // Probe the name dictionary in the |elements| register. Jump to the |
(...skipping 29 matching lines...) Expand all Loading... | |
4009 // Scale the index by multiplying by the entry size. | 4003 // Scale the index by multiplying by the entry size. |
4010 ASSERT(NameDictionary::kEntrySize == 3); | 4004 ASSERT(NameDictionary::kEntrySize == 3); |
4011 __ leap(r1, Operand(r1, r1, times_2, 0)); // r1 = r1 * 3 | 4005 __ leap(r1, Operand(r1, r1, times_2, 0)); // r1 = r1 * 3 |
4012 | 4006 |
4013 // Check if the key is identical to the name. | 4007 // Check if the key is identical to the name. |
4014 __ cmpp(name, Operand(elements, r1, times_pointer_size, | 4008 __ cmpp(name, Operand(elements, r1, times_pointer_size, |
4015 kElementsStartOffset - kHeapObjectTag)); | 4009 kElementsStartOffset - kHeapObjectTag)); |
4016 __ j(equal, done); | 4010 __ j(equal, done); |
4017 } | 4011 } |
4018 | 4012 |
4019 NameDictionaryLookupStub stub(elements, r0, r1, POSITIVE_LOOKUP); | 4013 NameDictionaryLookupStub stub(masm->isolate(), elements, r0, r1, |
4014 POSITIVE_LOOKUP); | |
4020 __ Push(name); | 4015 __ Push(name); |
4021 __ movl(r0, FieldOperand(name, Name::kHashFieldOffset)); | 4016 __ movl(r0, FieldOperand(name, Name::kHashFieldOffset)); |
4022 __ shrl(r0, Immediate(Name::kHashShift)); | 4017 __ shrl(r0, Immediate(Name::kHashShift)); |
4023 __ Push(r0); | 4018 __ Push(r0); |
4024 __ CallStub(&stub); | 4019 __ CallStub(&stub); |
4025 | 4020 |
4026 __ testp(r0, r0); | 4021 __ testp(r0, r0); |
4027 __ j(zero, miss); | 4022 __ j(zero, miss); |
4028 __ jmp(done); | 4023 __ jmp(done); |
4029 } | 4024 } |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4070 // Scale the index by multiplying by the entry size. | 4065 // Scale the index by multiplying by the entry size. |
4071 ASSERT(NameDictionary::kEntrySize == 3); | 4066 ASSERT(NameDictionary::kEntrySize == 3); |
4072 __ leap(index_, Operand(scratch, scratch, times_2, 0)); // index *= 3. | 4067 __ leap(index_, Operand(scratch, scratch, times_2, 0)); // index *= 3. |
4073 | 4068 |
4074 // Having undefined at this place means the name is not contained. | 4069 // Having undefined at this place means the name is not contained. |
4075 __ movp(scratch, Operand(dictionary_, | 4070 __ movp(scratch, Operand(dictionary_, |
4076 index_, | 4071 index_, |
4077 times_pointer_size, | 4072 times_pointer_size, |
4078 kElementsStartOffset - kHeapObjectTag)); | 4073 kElementsStartOffset - kHeapObjectTag)); |
4079 | 4074 |
4080 __ Cmp(scratch, masm->isolate()->factory()->undefined_value()); | 4075 __ Cmp(scratch, isolate()->factory()->undefined_value()); |
4081 __ j(equal, ¬_in_dictionary); | 4076 __ j(equal, ¬_in_dictionary); |
4082 | 4077 |
4083 // Stop if found the property. | 4078 // Stop if found the property. |
4084 __ cmpp(scratch, args.GetArgumentOperand(0)); | 4079 __ cmpp(scratch, args.GetArgumentOperand(0)); |
4085 __ j(equal, &in_dictionary); | 4080 __ j(equal, &in_dictionary); |
4086 | 4081 |
4087 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) { | 4082 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) { |
4088 // If we hit a key that is not a unique name during negative | 4083 // If we hit a key that is not a unique name during negative |
4089 // lookup we have to bailout as this key might be equal to the | 4084 // lookup we have to bailout as this key might be equal to the |
4090 // key we are looking for. | 4085 // key we are looking for. |
(...skipping 22 matching lines...) Expand all Loading... | |
4113 | 4108 |
4114 __ bind(¬_in_dictionary); | 4109 __ bind(¬_in_dictionary); |
4115 __ movp(scratch, Immediate(0)); | 4110 __ movp(scratch, Immediate(0)); |
4116 __ Drop(1); | 4111 __ Drop(1); |
4117 __ ret(2 * kPointerSize); | 4112 __ ret(2 * kPointerSize); |
4118 } | 4113 } |
4119 | 4114 |
4120 | 4115 |
4121 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime( | 4116 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime( |
4122 Isolate* isolate) { | 4117 Isolate* isolate) { |
4123 StoreBufferOverflowStub stub1(kDontSaveFPRegs); | 4118 StoreBufferOverflowStub stub1(isolate, kDontSaveFPRegs); |
4124 stub1.GetCode(isolate); | 4119 stub1.GetCode(isolate); |
4125 StoreBufferOverflowStub stub2(kSaveFPRegs); | 4120 StoreBufferOverflowStub stub2(isolate, kSaveFPRegs); |
4126 stub2.GetCode(isolate); | 4121 stub2.GetCode(isolate); |
4127 } | 4122 } |
4128 | 4123 |
4129 | 4124 |
4130 bool CodeStub::CanUseFPRegisters() { | 4125 bool CodeStub::CanUseFPRegisters() { |
4131 return true; // Always have SSE2 on x64. | 4126 return true; // Always have SSE2 on x64. |
4132 } | 4127 } |
4133 | 4128 |
4134 | 4129 |
4135 // Takes the input in 3 registers: address_ value_ and object_. A pointer to | 4130 // Takes the input in 3 registers: address_ value_ and object_. A pointer to |
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4215 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_); | 4210 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_); |
4216 Register address = | 4211 Register address = |
4217 arg_reg_1.is(regs_.address()) ? kScratchRegister : regs_.address(); | 4212 arg_reg_1.is(regs_.address()) ? kScratchRegister : regs_.address(); |
4218 ASSERT(!address.is(regs_.object())); | 4213 ASSERT(!address.is(regs_.object())); |
4219 ASSERT(!address.is(arg_reg_1)); | 4214 ASSERT(!address.is(arg_reg_1)); |
4220 __ Move(address, regs_.address()); | 4215 __ Move(address, regs_.address()); |
4221 __ Move(arg_reg_1, regs_.object()); | 4216 __ Move(arg_reg_1, regs_.object()); |
4222 // TODO(gc) Can we just set address arg2 in the beginning? | 4217 // TODO(gc) Can we just set address arg2 in the beginning? |
4223 __ Move(arg_reg_2, address); | 4218 __ Move(arg_reg_2, address); |
4224 __ LoadAddress(arg_reg_3, | 4219 __ LoadAddress(arg_reg_3, |
4225 ExternalReference::isolate_address(masm->isolate())); | 4220 ExternalReference::isolate_address(isolate())); |
4226 int argument_count = 3; | 4221 int argument_count = 3; |
4227 | 4222 |
4228 AllowExternalCallThatCantCauseGC scope(masm); | 4223 AllowExternalCallThatCantCauseGC scope(masm); |
4229 __ PrepareCallCFunction(argument_count); | 4224 __ PrepareCallCFunction(argument_count); |
4230 __ CallCFunction( | 4225 __ CallCFunction( |
4231 ExternalReference::incremental_marking_record_write_function( | 4226 ExternalReference::incremental_marking_record_write_function(isolate()), |
4232 masm->isolate()), | |
4233 argument_count); | 4227 argument_count); |
4234 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_); | 4228 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_); |
4235 } | 4229 } |
4236 | 4230 |
4237 | 4231 |
4238 void RecordWriteStub::CheckNeedsToInformIncrementalMarker( | 4232 void RecordWriteStub::CheckNeedsToInformIncrementalMarker( |
4239 MacroAssembler* masm, | 4233 MacroAssembler* masm, |
4240 OnNoNeedToInformIncrementalMarker on_no_need, | 4234 OnNoNeedToInformIncrementalMarker on_no_need, |
4241 Mode mode) { | 4235 Mode mode) { |
4242 Label on_black; | 4236 Label on_black; |
(...skipping 157 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4400 __ StoreNumberToDoubleElements(rax, | 4394 __ StoreNumberToDoubleElements(rax, |
4401 r9, | 4395 r9, |
4402 r11, | 4396 r11, |
4403 xmm0, | 4397 xmm0, |
4404 &slow_elements); | 4398 &slow_elements); |
4405 __ ret(0); | 4399 __ ret(0); |
4406 } | 4400 } |
4407 | 4401 |
4408 | 4402 |
4409 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) { | 4403 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) { |
4410 CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs); | 4404 CEntryStub ces(isolate(), 1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs); |
4411 __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET); | 4405 __ Call(ces.GetCode(isolate()), RelocInfo::CODE_TARGET); |
4412 int parameter_count_offset = | 4406 int parameter_count_offset = |
4413 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset; | 4407 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset; |
4414 __ movp(rbx, MemOperand(rbp, parameter_count_offset)); | 4408 __ movp(rbx, MemOperand(rbp, parameter_count_offset)); |
4415 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); | 4409 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); |
4416 __ PopReturnAddressTo(rcx); | 4410 __ PopReturnAddressTo(rcx); |
4417 int additional_offset = function_mode_ == JS_FUNCTION_STUB_MODE | 4411 int additional_offset = function_mode_ == JS_FUNCTION_STUB_MODE |
4418 ? kPointerSize | 4412 ? kPointerSize |
4419 : 0; | 4413 : 0; |
4420 __ leap(rsp, MemOperand(rsp, rbx, times_pointer_size, additional_offset)); | 4414 __ leap(rsp, MemOperand(rsp, rbx, times_pointer_size, additional_offset)); |
4421 __ jmp(rcx); // Return to IC Miss stub, continuation still on stack. | 4415 __ jmp(rcx); // Return to IC Miss stub, continuation still on stack. |
4422 } | 4416 } |
4423 | 4417 |
4424 | 4418 |
4425 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { | 4419 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { |
4426 if (masm->isolate()->function_entry_hook() != NULL) { | 4420 if (masm->isolate()->function_entry_hook() != NULL) { |
4427 ProfileEntryHookStub stub; | 4421 ProfileEntryHookStub stub(masm->isolate()); |
4428 masm->CallStub(&stub); | 4422 masm->CallStub(&stub); |
4429 } | 4423 } |
4430 } | 4424 } |
4431 | 4425 |
4432 | 4426 |
4433 void ProfileEntryHookStub::Generate(MacroAssembler* masm) { | 4427 void ProfileEntryHookStub::Generate(MacroAssembler* masm) { |
4434 // This stub can be called from essentially anywhere, so it needs to save | 4428 // This stub can be called from essentially anywhere, so it needs to save |
4435 // all volatile and callee-save registers. | 4429 // all volatile and callee-save registers. |
4436 const size_t kNumSavedRegisters = 2; | 4430 const size_t kNumSavedRegisters = 2; |
4437 __ pushq(arg_reg_1); | 4431 __ pushq(arg_reg_1); |
4438 __ pushq(arg_reg_2); | 4432 __ pushq(arg_reg_2); |
4439 | 4433 |
4440 // Calculate the original stack pointer and store it in the second arg. | 4434 // Calculate the original stack pointer and store it in the second arg. |
4441 __ leap(arg_reg_2, | 4435 __ leap(arg_reg_2, |
4442 Operand(rsp, kNumSavedRegisters * kRegisterSize + kPCOnStackSize)); | 4436 Operand(rsp, kNumSavedRegisters * kRegisterSize + kPCOnStackSize)); |
4443 | 4437 |
4444 // Calculate the function address to the first arg. | 4438 // Calculate the function address to the first arg. |
4445 __ movp(arg_reg_1, Operand(rsp, kNumSavedRegisters * kRegisterSize)); | 4439 __ movp(arg_reg_1, Operand(rsp, kNumSavedRegisters * kRegisterSize)); |
4446 __ subp(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength)); | 4440 __ subp(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength)); |
4447 | 4441 |
4448 // Save the remainder of the volatile registers. | 4442 // Save the remainder of the volatile registers. |
4449 masm->PushCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2); | 4443 masm->PushCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2); |
4450 | 4444 |
4451 // Call the entry hook function. | 4445 // Call the entry hook function. |
4452 __ Move(rax, FUNCTION_ADDR(masm->isolate()->function_entry_hook()), | 4446 __ Move(rax, FUNCTION_ADDR(isolate()->function_entry_hook()), |
4453 Assembler::RelocInfoNone()); | 4447 Assembler::RelocInfoNone()); |
4454 | 4448 |
4455 AllowExternalCallThatCantCauseGC scope(masm); | 4449 AllowExternalCallThatCantCauseGC scope(masm); |
4456 | 4450 |
4457 const int kArgumentCount = 2; | 4451 const int kArgumentCount = 2; |
4458 __ PrepareCallCFunction(kArgumentCount); | 4452 __ PrepareCallCFunction(kArgumentCount); |
4459 __ CallCFunction(rax, kArgumentCount); | 4453 __ CallCFunction(rax, kArgumentCount); |
4460 | 4454 |
4461 // Restore volatile regs. | 4455 // Restore volatile regs. |
4462 masm->PopCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2); | 4456 masm->PopCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2); |
4463 __ popq(arg_reg_2); | 4457 __ popq(arg_reg_2); |
4464 __ popq(arg_reg_1); | 4458 __ popq(arg_reg_1); |
4465 | 4459 |
4466 __ Ret(); | 4460 __ Ret(); |
4467 } | 4461 } |
4468 | 4462 |
4469 | 4463 |
4470 template<class T> | 4464 template<class T> |
4471 static void CreateArrayDispatch(MacroAssembler* masm, | 4465 static void CreateArrayDispatch(MacroAssembler* masm, |
4472 AllocationSiteOverrideMode mode) { | 4466 AllocationSiteOverrideMode mode) { |
4473 if (mode == DISABLE_ALLOCATION_SITES) { | 4467 if (mode == DISABLE_ALLOCATION_SITES) { |
4474 T stub(GetInitialFastElementsKind(), mode); | 4468 T stub(masm->isolate(), GetInitialFastElementsKind(), mode); |
4475 __ TailCallStub(&stub); | 4469 __ TailCallStub(&stub); |
4476 } else if (mode == DONT_OVERRIDE) { | 4470 } else if (mode == DONT_OVERRIDE) { |
4477 int last_index = GetSequenceIndexFromFastElementsKind( | 4471 int last_index = GetSequenceIndexFromFastElementsKind( |
4478 TERMINAL_FAST_ELEMENTS_KIND); | 4472 TERMINAL_FAST_ELEMENTS_KIND); |
4479 for (int i = 0; i <= last_index; ++i) { | 4473 for (int i = 0; i <= last_index; ++i) { |
4480 Label next; | 4474 Label next; |
4481 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); | 4475 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); |
4482 __ cmpl(rdx, Immediate(kind)); | 4476 __ cmpl(rdx, Immediate(kind)); |
4483 __ j(not_equal, &next); | 4477 __ j(not_equal, &next); |
4484 T stub(kind); | 4478 T stub(masm->isolate(), kind); |
4485 __ TailCallStub(&stub); | 4479 __ TailCallStub(&stub); |
4486 __ bind(&next); | 4480 __ bind(&next); |
4487 } | 4481 } |
4488 | 4482 |
4489 // If we reached this point there is a problem. | 4483 // If we reached this point there is a problem. |
4490 __ Abort(kUnexpectedElementsKindInArrayConstructor); | 4484 __ Abort(kUnexpectedElementsKindInArrayConstructor); |
4491 } else { | 4485 } else { |
4492 UNREACHABLE(); | 4486 UNREACHABLE(); |
4493 } | 4487 } |
4494 } | 4488 } |
(...skipping 28 matching lines...) Expand all Loading... | |
4523 // look at the first argument | 4517 // look at the first argument |
4524 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER); | 4518 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER); |
4525 __ movp(rcx, args.GetArgumentOperand(0)); | 4519 __ movp(rcx, args.GetArgumentOperand(0)); |
4526 __ testp(rcx, rcx); | 4520 __ testp(rcx, rcx); |
4527 __ j(zero, &normal_sequence); | 4521 __ j(zero, &normal_sequence); |
4528 | 4522 |
4529 if (mode == DISABLE_ALLOCATION_SITES) { | 4523 if (mode == DISABLE_ALLOCATION_SITES) { |
4530 ElementsKind initial = GetInitialFastElementsKind(); | 4524 ElementsKind initial = GetInitialFastElementsKind(); |
4531 ElementsKind holey_initial = GetHoleyElementsKind(initial); | 4525 ElementsKind holey_initial = GetHoleyElementsKind(initial); |
4532 | 4526 |
4533 ArraySingleArgumentConstructorStub stub_holey(holey_initial, | 4527 ArraySingleArgumentConstructorStub stub_holey(masm->isolate(), |
4528 holey_initial, | |
4534 DISABLE_ALLOCATION_SITES); | 4529 DISABLE_ALLOCATION_SITES); |
4535 __ TailCallStub(&stub_holey); | 4530 __ TailCallStub(&stub_holey); |
4536 | 4531 |
4537 __ bind(&normal_sequence); | 4532 __ bind(&normal_sequence); |
4538 ArraySingleArgumentConstructorStub stub(initial, | 4533 ArraySingleArgumentConstructorStub stub(masm->isolate(), |
4534 initial, | |
4539 DISABLE_ALLOCATION_SITES); | 4535 DISABLE_ALLOCATION_SITES); |
4540 __ TailCallStub(&stub); | 4536 __ TailCallStub(&stub); |
4541 } else if (mode == DONT_OVERRIDE) { | 4537 } else if (mode == DONT_OVERRIDE) { |
4542 // We are going to create a holey array, but our kind is non-holey. | 4538 // We are going to create a holey array, but our kind is non-holey. |
4543 // Fix kind and retry (only if we have an allocation site in the slot). | 4539 // Fix kind and retry (only if we have an allocation site in the slot). |
4544 __ incl(rdx); | 4540 __ incl(rdx); |
4545 | 4541 |
4546 if (FLAG_debug_code) { | 4542 if (FLAG_debug_code) { |
4547 Handle<Map> allocation_site_map = | 4543 Handle<Map> allocation_site_map = |
4548 masm->isolate()->factory()->allocation_site_map(); | 4544 masm->isolate()->factory()->allocation_site_map(); |
4549 __ Cmp(FieldOperand(rbx, 0), allocation_site_map); | 4545 __ Cmp(FieldOperand(rbx, 0), allocation_site_map); |
4550 __ Assert(equal, kExpectedAllocationSite); | 4546 __ Assert(equal, kExpectedAllocationSite); |
4551 } | 4547 } |
4552 | 4548 |
4553 // Save the resulting elements kind in type info. We can't just store r3 | 4549 // Save the resulting elements kind in type info. We can't just store r3 |
4554 // in the AllocationSite::transition_info field because elements kind is | 4550 // in the AllocationSite::transition_info field because elements kind is |
4555 // restricted to a portion of the field...upper bits need to be left alone. | 4551 // restricted to a portion of the field...upper bits need to be left alone. |
4556 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); | 4552 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); |
4557 __ SmiAddConstant(FieldOperand(rbx, AllocationSite::kTransitionInfoOffset), | 4553 __ SmiAddConstant(FieldOperand(rbx, AllocationSite::kTransitionInfoOffset), |
4558 Smi::FromInt(kFastElementsKindPackedToHoley)); | 4554 Smi::FromInt(kFastElementsKindPackedToHoley)); |
4559 | 4555 |
4560 __ bind(&normal_sequence); | 4556 __ bind(&normal_sequence); |
4561 int last_index = GetSequenceIndexFromFastElementsKind( | 4557 int last_index = GetSequenceIndexFromFastElementsKind( |
4562 TERMINAL_FAST_ELEMENTS_KIND); | 4558 TERMINAL_FAST_ELEMENTS_KIND); |
4563 for (int i = 0; i <= last_index; ++i) { | 4559 for (int i = 0; i <= last_index; ++i) { |
4564 Label next; | 4560 Label next; |
4565 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); | 4561 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); |
4566 __ cmpl(rdx, Immediate(kind)); | 4562 __ cmpl(rdx, Immediate(kind)); |
4567 __ j(not_equal, &next); | 4563 __ j(not_equal, &next); |
4568 ArraySingleArgumentConstructorStub stub(kind); | 4564 ArraySingleArgumentConstructorStub stub(masm->isolate(), kind); |
4569 __ TailCallStub(&stub); | 4565 __ TailCallStub(&stub); |
4570 __ bind(&next); | 4566 __ bind(&next); |
4571 } | 4567 } |
4572 | 4568 |
4573 // If we reached this point there is a problem. | 4569 // If we reached this point there is a problem. |
4574 __ Abort(kUnexpectedElementsKindInArrayConstructor); | 4570 __ Abort(kUnexpectedElementsKindInArrayConstructor); |
4575 } else { | 4571 } else { |
4576 UNREACHABLE(); | 4572 UNREACHABLE(); |
4577 } | 4573 } |
4578 } | 4574 } |
4579 | 4575 |
4580 | 4576 |
4581 template<class T> | 4577 template<class T> |
4582 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { | 4578 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { |
4583 int to_index = GetSequenceIndexFromFastElementsKind( | 4579 int to_index = GetSequenceIndexFromFastElementsKind( |
4584 TERMINAL_FAST_ELEMENTS_KIND); | 4580 TERMINAL_FAST_ELEMENTS_KIND); |
4585 for (int i = 0; i <= to_index; ++i) { | 4581 for (int i = 0; i <= to_index; ++i) { |
4586 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); | 4582 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); |
4587 T stub(kind); | 4583 T stub(isolate, kind); |
4588 stub.GetCode(isolate); | 4584 stub.GetCode(isolate); |
4589 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) { | 4585 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) { |
4590 T stub1(kind, DISABLE_ALLOCATION_SITES); | 4586 T stub1(isolate, kind, DISABLE_ALLOCATION_SITES); |
4591 stub1.GetCode(isolate); | 4587 stub1.GetCode(isolate); |
4592 } | 4588 } |
4593 } | 4589 } |
4594 } | 4590 } |
4595 | 4591 |
4596 | 4592 |
4597 void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) { | 4593 void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) { |
4598 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>( | 4594 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>( |
4599 isolate); | 4595 isolate); |
4600 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>( | 4596 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>( |
4601 isolate); | 4597 isolate); |
4602 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>( | 4598 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>( |
4603 isolate); | 4599 isolate); |
4604 } | 4600 } |
4605 | 4601 |
4606 | 4602 |
4607 void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime( | 4603 void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime( |
4608 Isolate* isolate) { | 4604 Isolate* isolate) { |
4609 ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS }; | 4605 ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS }; |
4610 for (int i = 0; i < 2; i++) { | 4606 for (int i = 0; i < 2; i++) { |
4611 // For internal arrays we only need a few things | 4607 // For internal arrays we only need a few things |
4612 InternalArrayNoArgumentConstructorStub stubh1(kinds[i]); | 4608 InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]); |
4613 stubh1.GetCode(isolate); | 4609 stubh1.GetCode(isolate); |
4614 InternalArraySingleArgumentConstructorStub stubh2(kinds[i]); | 4610 InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]); |
4615 stubh2.GetCode(isolate); | 4611 stubh2.GetCode(isolate); |
4616 InternalArrayNArgumentsConstructorStub stubh3(kinds[i]); | 4612 InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]); |
4617 stubh3.GetCode(isolate); | 4613 stubh3.GetCode(isolate); |
4618 } | 4614 } |
4619 } | 4615 } |
4620 | 4616 |
4621 | 4617 |
4622 void ArrayConstructorStub::GenerateDispatchToArrayStub( | 4618 void ArrayConstructorStub::GenerateDispatchToArrayStub( |
4623 MacroAssembler* masm, | 4619 MacroAssembler* masm, |
4624 AllocationSiteOverrideMode mode) { | 4620 AllocationSiteOverrideMode mode) { |
4625 if (argument_count_ == ANY) { | 4621 if (argument_count_ == ANY) { |
4626 Label not_zero_case, not_one_case; | 4622 Label not_zero_case, not_one_case; |
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4690 } | 4686 } |
4691 | 4687 |
4692 | 4688 |
4693 void InternalArrayConstructorStub::GenerateCase( | 4689 void InternalArrayConstructorStub::GenerateCase( |
4694 MacroAssembler* masm, ElementsKind kind) { | 4690 MacroAssembler* masm, ElementsKind kind) { |
4695 Label not_zero_case, not_one_case; | 4691 Label not_zero_case, not_one_case; |
4696 Label normal_sequence; | 4692 Label normal_sequence; |
4697 | 4693 |
4698 __ testp(rax, rax); | 4694 __ testp(rax, rax); |
4699 __ j(not_zero, ¬_zero_case); | 4695 __ j(not_zero, ¬_zero_case); |
4700 InternalArrayNoArgumentConstructorStub stub0(kind); | 4696 InternalArrayNoArgumentConstructorStub stub0(isolate(), kind); |
4701 __ TailCallStub(&stub0); | 4697 __ TailCallStub(&stub0); |
4702 | 4698 |
4703 __ bind(¬_zero_case); | 4699 __ bind(¬_zero_case); |
4704 __ cmpl(rax, Immediate(1)); | 4700 __ cmpl(rax, Immediate(1)); |
4705 __ j(greater, ¬_one_case); | 4701 __ j(greater, ¬_one_case); |
4706 | 4702 |
4707 if (IsFastPackedElementsKind(kind)) { | 4703 if (IsFastPackedElementsKind(kind)) { |
4708 // We might need to create a holey array | 4704 // We might need to create a holey array |
4709 // look at the first argument | 4705 // look at the first argument |
4710 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER); | 4706 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER); |
4711 __ movp(rcx, args.GetArgumentOperand(0)); | 4707 __ movp(rcx, args.GetArgumentOperand(0)); |
4712 __ testp(rcx, rcx); | 4708 __ testp(rcx, rcx); |
4713 __ j(zero, &normal_sequence); | 4709 __ j(zero, &normal_sequence); |
4714 | 4710 |
4715 InternalArraySingleArgumentConstructorStub | 4711 InternalArraySingleArgumentConstructorStub |
4716 stub1_holey(GetHoleyElementsKind(kind)); | 4712 stub1_holey(isolate(), GetHoleyElementsKind(kind)); |
4717 __ TailCallStub(&stub1_holey); | 4713 __ TailCallStub(&stub1_holey); |
4718 } | 4714 } |
4719 | 4715 |
4720 __ bind(&normal_sequence); | 4716 __ bind(&normal_sequence); |
4721 InternalArraySingleArgumentConstructorStub stub1(kind); | 4717 InternalArraySingleArgumentConstructorStub stub1(isolate(), kind); |
4722 __ TailCallStub(&stub1); | 4718 __ TailCallStub(&stub1); |
4723 | 4719 |
4724 __ bind(¬_one_case); | 4720 __ bind(¬_one_case); |
4725 InternalArrayNArgumentsConstructorStub stubN(kind); | 4721 InternalArrayNArgumentsConstructorStub stubN(isolate(), kind); |
4726 __ TailCallStub(&stubN); | 4722 __ TailCallStub(&stubN); |
4727 } | 4723 } |
4728 | 4724 |
4729 | 4725 |
4730 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) { | 4726 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) { |
4731 // ----------- S t a t e ------------- | 4727 // ----------- S t a t e ------------- |
4732 // -- rax : argc | 4728 // -- rax : argc |
4733 // -- rdi : constructor | 4729 // -- rdi : constructor |
4734 // -- rsp[0] : return address | 4730 // -- rsp[0] : return address |
4735 // -- rsp[8] : last argument | 4731 // -- rsp[8] : last argument |
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4831 Register scratch = call_data; | 4827 Register scratch = call_data; |
4832 if (!call_data_undefined) { | 4828 if (!call_data_undefined) { |
4833 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); | 4829 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); |
4834 } | 4830 } |
4835 // return value | 4831 // return value |
4836 __ Push(scratch); | 4832 __ Push(scratch); |
4837 // return value default | 4833 // return value default |
4838 __ Push(scratch); | 4834 __ Push(scratch); |
4839 // isolate | 4835 // isolate |
4840 __ Move(scratch, | 4836 __ Move(scratch, |
4841 ExternalReference::isolate_address(masm->isolate())); | 4837 ExternalReference::isolate_address(isolate())); |
4842 __ Push(scratch); | 4838 __ Push(scratch); |
4843 // holder | 4839 // holder |
4844 __ Push(holder); | 4840 __ Push(holder); |
4845 | 4841 |
4846 __ movp(scratch, rsp); | 4842 __ movp(scratch, rsp); |
4847 // Push return address back on stack. | 4843 // Push return address back on stack. |
4848 __ PushReturnAddressFrom(return_address); | 4844 __ PushReturnAddressFrom(return_address); |
4849 | 4845 |
4850 // Allocate the v8::Arguments structure in the arguments' space since | 4846 // Allocate the v8::Arguments structure in the arguments' space since |
4851 // it's not controlled by GC. | 4847 // it's not controlled by GC. |
(...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4954 return_value_operand, | 4950 return_value_operand, |
4955 NULL); | 4951 NULL); |
4956 } | 4952 } |
4957 | 4953 |
4958 | 4954 |
4959 #undef __ | 4955 #undef __ |
4960 | 4956 |
4961 } } // namespace v8::internal | 4957 } } // namespace v8::internal |
4962 | 4958 |
4963 #endif // V8_TARGET_ARCH_X64 | 4959 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |