Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(354)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 246643014: CodeStubs contain their corresponding Isolate* now. (part 1) (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Feedback. Rebased. Created 6 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/code-stubs-x64.h ('k') | src/x64/debug-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 461 matching lines...) Expand 10 before | Expand all | Expand 10 after
472 descriptor->param_representations_ = representations; 472 descriptor->param_representations_ = representations;
473 } 473 }
474 } 474 }
475 475
476 476
477 #define __ ACCESS_MASM(masm) 477 #define __ ACCESS_MASM(masm)
478 478
479 479
480 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) { 480 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
481 // Update the static counter each time a new code stub is generated. 481 // Update the static counter each time a new code stub is generated.
482 Isolate* isolate = masm->isolate(); 482 isolate()->counters()->code_stubs()->Increment();
483 isolate->counters()->code_stubs()->Increment();
484 483
485 CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate); 484 CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate());
486 int param_count = descriptor->register_param_count_; 485 int param_count = descriptor->register_param_count_;
487 { 486 {
488 // Call the runtime system in a fresh internal frame. 487 // Call the runtime system in a fresh internal frame.
489 FrameScope scope(masm, StackFrame::INTERNAL); 488 FrameScope scope(masm, StackFrame::INTERNAL);
490 ASSERT(descriptor->register_param_count_ == 0 || 489 ASSERT(descriptor->register_param_count_ == 0 ||
491 rax.is(descriptor->register_params_[param_count - 1])); 490 rax.is(descriptor->register_params_[param_count - 1]));
492 // Push arguments 491 // Push arguments
493 for (int i = 0; i < param_count; ++i) { 492 for (int i = 0; i < param_count; ++i) {
494 __ Push(descriptor->register_params_[i]); 493 __ Push(descriptor->register_params_[i]);
495 } 494 }
496 ExternalReference miss = descriptor->miss_handler(); 495 ExternalReference miss = descriptor->miss_handler();
497 __ CallExternalReference(miss, descriptor->register_param_count_); 496 __ CallExternalReference(miss, descriptor->register_param_count_);
498 } 497 }
499 498
500 __ Ret(); 499 __ Ret();
501 } 500 }
502 501
503 502
504 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { 503 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
505 __ PushCallerSaved(save_doubles_); 504 __ PushCallerSaved(save_doubles_);
506 const int argument_count = 1; 505 const int argument_count = 1;
507 __ PrepareCallCFunction(argument_count); 506 __ PrepareCallCFunction(argument_count);
508 __ LoadAddress(arg_reg_1, 507 __ LoadAddress(arg_reg_1,
509 ExternalReference::isolate_address(masm->isolate())); 508 ExternalReference::isolate_address(isolate()));
510 509
511 AllowExternalCallThatCantCauseGC scope(masm); 510 AllowExternalCallThatCantCauseGC scope(masm);
512 __ CallCFunction( 511 __ CallCFunction(
513 ExternalReference::store_buffer_overflow_function(masm->isolate()), 512 ExternalReference::store_buffer_overflow_function(isolate()),
514 argument_count); 513 argument_count);
515 __ PopCallerSaved(save_doubles_); 514 __ PopCallerSaved(save_doubles_);
516 __ ret(0); 515 __ ret(0);
517 } 516 }
518 517
519 518
520 class FloatingPointHelper : public AllStatic { 519 class FloatingPointHelper : public AllStatic {
521 public: 520 public:
522 enum ConvertUndefined { 521 enum ConvertUndefined {
523 CONVERT_UNDEFINED_TO_ZERO, 522 CONVERT_UNDEFINED_TO_ZERO,
(...skipping 340 matching lines...) Expand 10 before | Expand all | Expand 10 after
864 // Due to subnormals, x^-y == (1/x)^y does not hold in all cases. 863 // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
865 __ xorps(double_scratch2, double_scratch2); 864 __ xorps(double_scratch2, double_scratch2);
866 __ ucomisd(double_scratch2, double_result); 865 __ ucomisd(double_scratch2, double_result);
867 // double_exponent aliased as double_scratch2 has already been overwritten 866 // double_exponent aliased as double_scratch2 has already been overwritten
868 // and may not have contained the exponent value in the first place when the 867 // and may not have contained the exponent value in the first place when the
869 // input was a smi. We reset it with exponent value before bailing out. 868 // input was a smi. We reset it with exponent value before bailing out.
870 __ j(not_equal, &done); 869 __ j(not_equal, &done);
871 __ Cvtlsi2sd(double_exponent, exponent); 870 __ Cvtlsi2sd(double_exponent, exponent);
872 871
873 // Returning or bailing out. 872 // Returning or bailing out.
874 Counters* counters = masm->isolate()->counters(); 873 Counters* counters = isolate()->counters();
875 if (exponent_type_ == ON_STACK) { 874 if (exponent_type_ == ON_STACK) {
876 // The arguments are still on the stack. 875 // The arguments are still on the stack.
877 __ bind(&call_runtime); 876 __ bind(&call_runtime);
878 __ TailCallRuntime(Runtime::kHiddenMathPow, 2, 1); 877 __ TailCallRuntime(Runtime::kHiddenMathPow, 2, 1);
879 878
880 // The stub is called from non-optimized code, which expects the result 879 // The stub is called from non-optimized code, which expects the result
881 // as heap number in rax. 880 // as heap number in rax.
882 __ bind(&done); 881 __ bind(&done);
883 __ AllocateHeapNumber(rax, rcx, &call_runtime); 882 __ AllocateHeapNumber(rax, rcx, &call_runtime);
884 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), double_result); 883 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), double_result);
885 __ IncrementCounter(counters->math_pow(), 1); 884 __ IncrementCounter(counters->math_pow(), 1);
886 __ ret(2 * kPointerSize); 885 __ ret(2 * kPointerSize);
887 } else { 886 } else {
888 __ bind(&call_runtime); 887 __ bind(&call_runtime);
889 // Move base to the correct argument register. Exponent is already in xmm1. 888 // Move base to the correct argument register. Exponent is already in xmm1.
890 __ movsd(xmm0, double_base); 889 __ movsd(xmm0, double_base);
891 ASSERT(double_exponent.is(xmm1)); 890 ASSERT(double_exponent.is(xmm1));
892 { 891 {
893 AllowExternalCallThatCantCauseGC scope(masm); 892 AllowExternalCallThatCantCauseGC scope(masm);
894 __ PrepareCallCFunction(2); 893 __ PrepareCallCFunction(2);
895 __ CallCFunction( 894 __ CallCFunction(
896 ExternalReference::power_double_double_function(masm->isolate()), 2); 895 ExternalReference::power_double_double_function(isolate()), 2);
897 } 896 }
898 // Return value is in xmm0. 897 // Return value is in xmm0.
899 __ movsd(double_result, xmm0); 898 __ movsd(double_result, xmm0);
900 899
901 __ bind(&done); 900 __ bind(&done);
902 __ IncrementCounter(counters->math_pow(), 1); 901 __ IncrementCounter(counters->math_pow(), 1);
903 __ ret(0); 902 __ ret(0);
904 } 903 }
905 } 904 }
906 905
907 906
908 void FunctionPrototypeStub::Generate(MacroAssembler* masm) { 907 void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
909 Label miss; 908 Label miss;
910 Register receiver; 909 Register receiver;
911 if (kind() == Code::KEYED_LOAD_IC) { 910 if (kind() == Code::KEYED_LOAD_IC) {
912 // ----------- S t a t e ------------- 911 // ----------- S t a t e -------------
913 // -- rax : key 912 // -- rax : key
914 // -- rdx : receiver 913 // -- rdx : receiver
915 // -- rsp[0] : return address 914 // -- rsp[0] : return address
916 // ----------------------------------- 915 // -----------------------------------
917 __ Cmp(rax, masm->isolate()->factory()->prototype_string()); 916 __ Cmp(rax, isolate()->factory()->prototype_string());
918 __ j(not_equal, &miss); 917 __ j(not_equal, &miss);
919 receiver = rdx; 918 receiver = rdx;
920 } else { 919 } else {
921 ASSERT(kind() == Code::LOAD_IC); 920 ASSERT(kind() == Code::LOAD_IC);
922 // ----------- S t a t e ------------- 921 // ----------- S t a t e -------------
923 // -- rax : receiver 922 // -- rax : receiver
924 // -- rcx : name 923 // -- rcx : name
925 // -- rsp[0] : return address 924 // -- rsp[0] : return address
926 // ----------------------------------- 925 // -----------------------------------
927 receiver = rax; 926 receiver = rax;
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
993 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { 992 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
994 // Stack layout: 993 // Stack layout:
995 // rsp[0] : return address 994 // rsp[0] : return address
996 // rsp[8] : number of parameters (tagged) 995 // rsp[8] : number of parameters (tagged)
997 // rsp[16] : receiver displacement 996 // rsp[16] : receiver displacement
998 // rsp[24] : function 997 // rsp[24] : function
999 // Registers used over the whole function: 998 // Registers used over the whole function:
1000 // rbx: the mapped parameter count (untagged) 999 // rbx: the mapped parameter count (untagged)
1001 // rax: the allocated object (tagged). 1000 // rax: the allocated object (tagged).
1002 1001
1003 Factory* factory = masm->isolate()->factory(); 1002 Factory* factory = isolate()->factory();
1004 1003
1005 StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER); 1004 StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER);
1006 __ SmiToInteger64(rbx, args.GetArgumentOperand(2)); 1005 __ SmiToInteger64(rbx, args.GetArgumentOperand(2));
1007 // rbx = parameter count (untagged) 1006 // rbx = parameter count (untagged)
1008 1007
1009 // Check if the calling frame is an arguments adaptor frame. 1008 // Check if the calling frame is an arguments adaptor frame.
1010 Label runtime; 1009 Label runtime;
1011 Label adaptor_frame, try_allocate; 1010 Label adaptor_frame, try_allocate;
1012 __ movp(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); 1011 __ movp(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
1013 __ movp(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); 1012 __ movp(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
(...skipping 342 matching lines...) Expand 10 before | Expand all | Expand 10 after
1356 SUBJECT_STRING_ARGUMENT_INDEX, 1355 SUBJECT_STRING_ARGUMENT_INDEX,
1357 PREVIOUS_INDEX_ARGUMENT_INDEX, 1356 PREVIOUS_INDEX_ARGUMENT_INDEX,
1358 LAST_MATCH_INFO_ARGUMENT_INDEX, 1357 LAST_MATCH_INFO_ARGUMENT_INDEX,
1359 REG_EXP_EXEC_ARGUMENT_COUNT 1358 REG_EXP_EXEC_ARGUMENT_COUNT
1360 }; 1359 };
1361 1360
1362 StackArgumentsAccessor args(rsp, REG_EXP_EXEC_ARGUMENT_COUNT, 1361 StackArgumentsAccessor args(rsp, REG_EXP_EXEC_ARGUMENT_COUNT,
1363 ARGUMENTS_DONT_CONTAIN_RECEIVER); 1362 ARGUMENTS_DONT_CONTAIN_RECEIVER);
1364 Label runtime; 1363 Label runtime;
1365 // Ensure that a RegExp stack is allocated. 1364 // Ensure that a RegExp stack is allocated.
1366 Isolate* isolate = masm->isolate();
1367 ExternalReference address_of_regexp_stack_memory_address = 1365 ExternalReference address_of_regexp_stack_memory_address =
1368 ExternalReference::address_of_regexp_stack_memory_address(isolate); 1366 ExternalReference::address_of_regexp_stack_memory_address(isolate());
1369 ExternalReference address_of_regexp_stack_memory_size = 1367 ExternalReference address_of_regexp_stack_memory_size =
1370 ExternalReference::address_of_regexp_stack_memory_size(isolate); 1368 ExternalReference::address_of_regexp_stack_memory_size(isolate());
1371 __ Load(kScratchRegister, address_of_regexp_stack_memory_size); 1369 __ Load(kScratchRegister, address_of_regexp_stack_memory_size);
1372 __ testp(kScratchRegister, kScratchRegister); 1370 __ testp(kScratchRegister, kScratchRegister);
1373 __ j(zero, &runtime); 1371 __ j(zero, &runtime);
1374 1372
1375 // Check that the first argument is a JSRegExp object. 1373 // Check that the first argument is a JSRegExp object.
1376 __ movp(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX)); 1374 __ movp(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX));
1377 __ JumpIfSmi(rax, &runtime); 1375 __ JumpIfSmi(rax, &runtime);
1378 __ CmpObjectType(rax, JS_REGEXP_TYPE, kScratchRegister); 1376 __ CmpObjectType(rax, JS_REGEXP_TYPE, kScratchRegister);
1379 __ j(not_equal, &runtime); 1377 __ j(not_equal, &runtime);
1380 1378
(...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after
1512 __ JumpIfNotSmi(rbx, &runtime); 1510 __ JumpIfNotSmi(rbx, &runtime);
1513 __ SmiCompare(rbx, FieldOperand(r15, String::kLengthOffset)); 1511 __ SmiCompare(rbx, FieldOperand(r15, String::kLengthOffset));
1514 __ j(above_equal, &runtime); 1512 __ j(above_equal, &runtime);
1515 __ SmiToInteger64(rbx, rbx); 1513 __ SmiToInteger64(rbx, rbx);
1516 1514
1517 // rdi: subject string 1515 // rdi: subject string
1518 // rbx: previous index 1516 // rbx: previous index
1519 // rcx: encoding of subject string (1 if ASCII 0 if two_byte); 1517 // rcx: encoding of subject string (1 if ASCII 0 if two_byte);
1520 // r11: code 1518 // r11: code
1521 // All checks done. Now push arguments for native regexp code. 1519 // All checks done. Now push arguments for native regexp code.
1522 Counters* counters = masm->isolate()->counters(); 1520 Counters* counters = isolate()->counters();
1523 __ IncrementCounter(counters->regexp_entry_native(), 1); 1521 __ IncrementCounter(counters->regexp_entry_native(), 1);
1524 1522
1525 // Isolates: note we add an additional parameter here (isolate pointer). 1523 // Isolates: note we add an additional parameter here (isolate pointer).
1526 static const int kRegExpExecuteArguments = 9; 1524 static const int kRegExpExecuteArguments = 9;
1527 int argument_slots_on_stack = 1525 int argument_slots_on_stack =
1528 masm->ArgumentStackSlotsForCFunctionCall(kRegExpExecuteArguments); 1526 masm->ArgumentStackSlotsForCFunctionCall(kRegExpExecuteArguments);
1529 __ EnterApiExitFrame(argument_slots_on_stack); 1527 __ EnterApiExitFrame(argument_slots_on_stack);
1530 1528
1531 // Argument 9: Pass current isolate address. 1529 // Argument 9: Pass current isolate address.
1532 __ LoadAddress(kScratchRegister, 1530 __ LoadAddress(kScratchRegister,
1533 ExternalReference::isolate_address(masm->isolate())); 1531 ExternalReference::isolate_address(isolate()));
1534 __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kRegisterSize), 1532 __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kRegisterSize),
1535 kScratchRegister); 1533 kScratchRegister);
1536 1534
1537 // Argument 8: Indicate that this is a direct call from JavaScript. 1535 // Argument 8: Indicate that this is a direct call from JavaScript.
1538 __ movq(Operand(rsp, (argument_slots_on_stack - 2) * kRegisterSize), 1536 __ movq(Operand(rsp, (argument_slots_on_stack - 2) * kRegisterSize),
1539 Immediate(1)); 1537 Immediate(1));
1540 1538
1541 // Argument 7: Start (high end) of backtracking stack memory area. 1539 // Argument 7: Start (high end) of backtracking stack memory area.
1542 __ Move(kScratchRegister, address_of_regexp_stack_memory_address); 1540 __ Move(kScratchRegister, address_of_regexp_stack_memory_address);
1543 __ movp(r9, Operand(kScratchRegister, 0)); 1541 __ movp(r9, Operand(kScratchRegister, 0));
1544 __ Move(kScratchRegister, address_of_regexp_stack_memory_size); 1542 __ Move(kScratchRegister, address_of_regexp_stack_memory_size);
1545 __ addp(r9, Operand(kScratchRegister, 0)); 1543 __ addp(r9, Operand(kScratchRegister, 0));
1546 __ movq(Operand(rsp, (argument_slots_on_stack - 3) * kRegisterSize), r9); 1544 __ movq(Operand(rsp, (argument_slots_on_stack - 3) * kRegisterSize), r9);
1547 1545
1548 // Argument 6: Set the number of capture registers to zero to force global 1546 // Argument 6: Set the number of capture registers to zero to force global
1549 // regexps to behave as non-global. This does not affect non-global regexps. 1547 // regexps to behave as non-global. This does not affect non-global regexps.
1550 // Argument 6 is passed in r9 on Linux and on the stack on Windows. 1548 // Argument 6 is passed in r9 on Linux and on the stack on Windows.
1551 #ifdef _WIN64 1549 #ifdef _WIN64
1552 __ movq(Operand(rsp, (argument_slots_on_stack - 4) * kRegisterSize), 1550 __ movq(Operand(rsp, (argument_slots_on_stack - 4) * kRegisterSize),
1553 Immediate(0)); 1551 Immediate(0));
1554 #else 1552 #else
1555 __ Set(r9, 0); 1553 __ Set(r9, 0);
1556 #endif 1554 #endif
1557 1555
1558 // Argument 5: static offsets vector buffer. 1556 // Argument 5: static offsets vector buffer.
1559 __ LoadAddress(r8, 1557 __ LoadAddress(
1560 ExternalReference::address_of_static_offsets_vector(isolate)); 1558 r8, ExternalReference::address_of_static_offsets_vector(isolate()));
1561 // Argument 5 passed in r8 on Linux and on the stack on Windows. 1559 // Argument 5 passed in r8 on Linux and on the stack on Windows.
1562 #ifdef _WIN64 1560 #ifdef _WIN64
1563 __ movq(Operand(rsp, (argument_slots_on_stack - 5) * kRegisterSize), r8); 1561 __ movq(Operand(rsp, (argument_slots_on_stack - 5) * kRegisterSize), r8);
1564 #endif 1562 #endif
1565 1563
1566 // rdi: subject string 1564 // rdi: subject string
1567 // rbx: previous index 1565 // rbx: previous index
1568 // rcx: encoding of subject string (1 if ASCII 0 if two_byte); 1566 // rcx: encoding of subject string (1 if ASCII 0 if two_byte);
1569 // r11: code 1567 // r11: code
1570 // r14: slice offset 1568 // r14: slice offset
(...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after
1675 kDontSaveFPRegs); 1673 kDontSaveFPRegs);
1676 __ movp(rax, rcx); 1674 __ movp(rax, rcx);
1677 __ movp(FieldOperand(rbx, RegExpImpl::kLastInputOffset), rax); 1675 __ movp(FieldOperand(rbx, RegExpImpl::kLastInputOffset), rax);
1678 __ RecordWriteField(rbx, 1676 __ RecordWriteField(rbx,
1679 RegExpImpl::kLastInputOffset, 1677 RegExpImpl::kLastInputOffset,
1680 rax, 1678 rax,
1681 rdi, 1679 rdi,
1682 kDontSaveFPRegs); 1680 kDontSaveFPRegs);
1683 1681
1684 // Get the static offsets vector filled by the native regexp code. 1682 // Get the static offsets vector filled by the native regexp code.
1685 __ LoadAddress(rcx, 1683 __ LoadAddress(
1686 ExternalReference::address_of_static_offsets_vector(isolate)); 1684 rcx, ExternalReference::address_of_static_offsets_vector(isolate()));
1687 1685
1688 // rbx: last_match_info backing store (FixedArray) 1686 // rbx: last_match_info backing store (FixedArray)
1689 // rcx: offsets vector 1687 // rcx: offsets vector
1690 // rdx: number of capture registers 1688 // rdx: number of capture registers
1691 Label next_capture, done; 1689 Label next_capture, done;
1692 // Capture register counter starts from number of capture registers and 1690 // Capture register counter starts from number of capture registers and
1693 // counts down until wraping after zero. 1691 // counts down until wraping after zero.
1694 __ bind(&next_capture); 1692 __ bind(&next_capture);
1695 __ subp(rdx, Immediate(1)); 1693 __ subp(rdx, Immediate(1));
1696 __ j(negative, &done, Label::kNear); 1694 __ j(negative, &done, Label::kNear);
(...skipping 12 matching lines...) Expand all
1709 // Return last match info. 1707 // Return last match info.
1710 __ movp(rax, r15); 1708 __ movp(rax, r15);
1711 __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize); 1709 __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize);
1712 1710
1713 __ bind(&exception); 1711 __ bind(&exception);
1714 // Result must now be exception. If there is no pending exception already a 1712 // Result must now be exception. If there is no pending exception already a
1715 // stack overflow (on the backtrack stack) was detected in RegExp code but 1713 // stack overflow (on the backtrack stack) was detected in RegExp code but
1716 // haven't created the exception yet. Handle that in the runtime system. 1714 // haven't created the exception yet. Handle that in the runtime system.
1717 // TODO(592): Rerunning the RegExp to get the stack overflow exception. 1715 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
1718 ExternalReference pending_exception_address( 1716 ExternalReference pending_exception_address(
1719 Isolate::kPendingExceptionAddress, isolate); 1717 Isolate::kPendingExceptionAddress, isolate());
1720 Operand pending_exception_operand = 1718 Operand pending_exception_operand =
1721 masm->ExternalOperand(pending_exception_address, rbx); 1719 masm->ExternalOperand(pending_exception_address, rbx);
1722 __ movp(rax, pending_exception_operand); 1720 __ movp(rax, pending_exception_operand);
1723 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex); 1721 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
1724 __ cmpp(rax, rdx); 1722 __ cmpp(rax, rdx);
1725 __ j(equal, &runtime); 1723 __ j(equal, &runtime);
1726 __ movp(pending_exception_operand, rdx); 1724 __ movp(pending_exception_operand, rdx);
1727 1725
1728 __ CompareRoot(rax, Heap::kTerminationExceptionRootIndex); 1726 __ CompareRoot(rax, Heap::kTerminationExceptionRootIndex);
1729 Label termination_exception; 1727 Label termination_exception;
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after
1822 FieldOperand(scratch, Map::kInstanceTypeOffset)); 1820 FieldOperand(scratch, Map::kInstanceTypeOffset));
1823 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0); 1821 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
1824 __ testb(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask)); 1822 __ testb(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
1825 __ j(not_zero, label); 1823 __ j(not_zero, label);
1826 } 1824 }
1827 1825
1828 1826
1829 void ICCompareStub::GenerateGeneric(MacroAssembler* masm) { 1827 void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
1830 Label check_unequal_objects, done; 1828 Label check_unequal_objects, done;
1831 Condition cc = GetCondition(); 1829 Condition cc = GetCondition();
1832 Factory* factory = masm->isolate()->factory(); 1830 Factory* factory = isolate()->factory();
1833 1831
1834 Label miss; 1832 Label miss;
1835 CheckInputType(masm, rdx, left_, &miss); 1833 CheckInputType(masm, rdx, left_, &miss);
1836 CheckInputType(masm, rax, right_, &miss); 1834 CheckInputType(masm, rax, right_, &miss);
1837 1835
1838 // Compare two smis. 1836 // Compare two smis.
1839 Label non_smi, smi_done; 1837 Label non_smi, smi_done;
1840 __ JumpIfNotBothSmi(rax, rdx, &non_smi); 1838 __ JumpIfNotBothSmi(rax, rdx, &non_smi);
1841 __ subp(rdx, rax); 1839 __ subp(rdx, rax);
1842 __ j(no_overflow, &smi_done); 1840 __ j(no_overflow, &smi_done);
(...skipping 313 matching lines...) Expand 10 before | Expand all | Expand 10 after
2156 FrameScope scope(masm, StackFrame::INTERNAL); 2154 FrameScope scope(masm, StackFrame::INTERNAL);
2157 2155
2158 // Arguments register must be smi-tagged to call out. 2156 // Arguments register must be smi-tagged to call out.
2159 __ Integer32ToSmi(rax, rax); 2157 __ Integer32ToSmi(rax, rax);
2160 __ Push(rax); 2158 __ Push(rax);
2161 __ Push(rdi); 2159 __ Push(rdi);
2162 __ Integer32ToSmi(rdx, rdx); 2160 __ Integer32ToSmi(rdx, rdx);
2163 __ Push(rdx); 2161 __ Push(rdx);
2164 __ Push(rbx); 2162 __ Push(rbx);
2165 2163
2166 CreateAllocationSiteStub create_stub; 2164 CreateAllocationSiteStub create_stub(isolate);
2167 __ CallStub(&create_stub); 2165 __ CallStub(&create_stub);
2168 2166
2169 __ Pop(rbx); 2167 __ Pop(rbx);
2170 __ Pop(rdx); 2168 __ Pop(rdx);
2171 __ Pop(rdi); 2169 __ Pop(rdi);
2172 __ Pop(rax); 2170 __ Pop(rax);
2173 __ SmiToInteger32(rax, rax); 2171 __ SmiToInteger32(rax, rax);
2174 } 2172 }
2175 __ jmp(&done_no_smi_convert); 2173 __ jmp(&done_no_smi_convert);
2176 2174
(...skipping 18 matching lines...) Expand all
2195 2193
2196 __ bind(&done_no_smi_convert); 2194 __ bind(&done_no_smi_convert);
2197 } 2195 }
2198 2196
2199 2197
2200 void CallFunctionStub::Generate(MacroAssembler* masm) { 2198 void CallFunctionStub::Generate(MacroAssembler* masm) {
2201 // rbx : feedback vector 2199 // rbx : feedback vector
2202 // rdx : (only if rbx is not the megamorphic symbol) slot in feedback 2200 // rdx : (only if rbx is not the megamorphic symbol) slot in feedback
2203 // vector (Smi) 2201 // vector (Smi)
2204 // rdi : the function to call 2202 // rdi : the function to call
2205 Isolate* isolate = masm->isolate();
2206 Label slow, non_function, wrap, cont; 2203 Label slow, non_function, wrap, cont;
2207 StackArgumentsAccessor args(rsp, argc_); 2204 StackArgumentsAccessor args(rsp, argc_);
2208 2205
2209 if (NeedsChecks()) { 2206 if (NeedsChecks()) {
2210 // Check that the function really is a JavaScript function. 2207 // Check that the function really is a JavaScript function.
2211 __ JumpIfSmi(rdi, &non_function); 2208 __ JumpIfSmi(rdi, &non_function);
2212 2209
2213 // Goto slow case if we do not have a function. 2210 // Goto slow case if we do not have a function.
2214 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); 2211 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2215 __ j(not_equal, &slow); 2212 __ j(not_equal, &slow);
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
2261 if (NeedsChecks()) { 2258 if (NeedsChecks()) {
2262 // Slow-case: Non-function called. 2259 // Slow-case: Non-function called.
2263 __ bind(&slow); 2260 __ bind(&slow);
2264 if (RecordCallTarget()) { 2261 if (RecordCallTarget()) {
2265 // If there is a call target cache, mark it megamorphic in the 2262 // If there is a call target cache, mark it megamorphic in the
2266 // non-function case. MegamorphicSentinel is an immortal immovable 2263 // non-function case. MegamorphicSentinel is an immortal immovable
2267 // object (megamorphic symbol) so no write barrier is needed. 2264 // object (megamorphic symbol) so no write barrier is needed.
2268 __ SmiToInteger32(rdx, rdx); 2265 __ SmiToInteger32(rdx, rdx);
2269 __ Move(FieldOperand(rbx, rdx, times_pointer_size, 2266 __ Move(FieldOperand(rbx, rdx, times_pointer_size,
2270 FixedArray::kHeaderSize), 2267 FixedArray::kHeaderSize),
2271 TypeFeedbackInfo::MegamorphicSentinel(isolate)); 2268 TypeFeedbackInfo::MegamorphicSentinel(isolate()));
2272 __ Integer32ToSmi(rdx, rdx); 2269 __ Integer32ToSmi(rdx, rdx);
2273 } 2270 }
2274 // Check for function proxy. 2271 // Check for function proxy.
2275 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE); 2272 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
2276 __ j(not_equal, &non_function); 2273 __ j(not_equal, &non_function);
2277 __ PopReturnAddressTo(rcx); 2274 __ PopReturnAddressTo(rcx);
2278 __ Push(rdi); // put proxy as additional argument under return address 2275 __ Push(rdi); // put proxy as additional argument under return address
2279 __ PushReturnAddressFrom(rcx); 2276 __ PushReturnAddressFrom(rcx);
2280 __ Set(rax, argc_ + 1); 2277 __ Set(rax, argc_ + 1);
2281 __ Set(rbx, 0); 2278 __ Set(rbx, 0);
2282 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); 2279 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
2283 { 2280 {
2284 Handle<Code> adaptor = 2281 Handle<Code> adaptor =
2285 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); 2282 isolate()->builtins()->ArgumentsAdaptorTrampoline();
2286 __ jmp(adaptor, RelocInfo::CODE_TARGET); 2283 __ jmp(adaptor, RelocInfo::CODE_TARGET);
2287 } 2284 }
2288 2285
2289 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead 2286 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
2290 // of the original receiver from the call site). 2287 // of the original receiver from the call site).
2291 __ bind(&non_function); 2288 __ bind(&non_function);
2292 __ movp(args.GetReceiverOperand(), rdi); 2289 __ movp(args.GetReceiverOperand(), rdi);
2293 __ Set(rax, argc_); 2290 __ Set(rax, argc_);
2294 __ Set(rbx, 0); 2291 __ Set(rbx, 0);
2295 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION); 2292 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
2296 Handle<Code> adaptor = 2293 Handle<Code> adaptor =
2297 isolate->builtins()->ArgumentsAdaptorTrampoline(); 2294 isolate()->builtins()->ArgumentsAdaptorTrampoline();
2298 __ Jump(adaptor, RelocInfo::CODE_TARGET); 2295 __ Jump(adaptor, RelocInfo::CODE_TARGET);
2299 } 2296 }
2300 2297
2301 if (CallAsMethod()) { 2298 if (CallAsMethod()) {
2302 __ bind(&wrap); 2299 __ bind(&wrap);
2303 // Wrap the receiver and patch it back onto the stack. 2300 // Wrap the receiver and patch it back onto the stack.
2304 { FrameScope frame_scope(masm, StackFrame::INTERNAL); 2301 { FrameScope frame_scope(masm, StackFrame::INTERNAL);
2305 __ Push(rdi); 2302 __ Push(rdi);
2306 __ Push(rax); 2303 __ Push(rax);
2307 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 2304 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after
2367 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE); 2364 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
2368 __ j(not_equal, &non_function_call); 2365 __ j(not_equal, &non_function_call);
2369 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR); 2366 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
2370 __ jmp(&do_call); 2367 __ jmp(&do_call);
2371 2368
2372 __ bind(&non_function_call); 2369 __ bind(&non_function_call);
2373 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); 2370 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
2374 __ bind(&do_call); 2371 __ bind(&do_call);
2375 // Set expected number of arguments to zero (not changing rax). 2372 // Set expected number of arguments to zero (not changing rax).
2376 __ Set(rbx, 0); 2373 __ Set(rbx, 0);
2377 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 2374 __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(),
2378 RelocInfo::CODE_TARGET); 2375 RelocInfo::CODE_TARGET);
2379 } 2376 }
2380 2377
2381 2378
2382 bool CEntryStub::NeedsImmovableCode() { 2379 bool CEntryStub::NeedsImmovableCode() {
2383 return false; 2380 return false;
2384 } 2381 }
2385 2382
2386 2383
2387 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { 2384 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
2388 CEntryStub::GenerateAheadOfTime(isolate); 2385 CEntryStub::GenerateAheadOfTime(isolate);
2389 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); 2386 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
2390 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); 2387 StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
2391 // It is important that the store buffer overflow stubs are generated first. 2388 // It is important that the store buffer overflow stubs are generated first.
2392 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); 2389 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
2393 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); 2390 CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
2394 BinaryOpICStub::GenerateAheadOfTime(isolate); 2391 BinaryOpICStub::GenerateAheadOfTime(isolate);
2395 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); 2392 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
2396 } 2393 }
2397 2394
2398 2395
2399 void CodeStub::GenerateFPStubs(Isolate* isolate) { 2396 void CodeStub::GenerateFPStubs(Isolate* isolate) {
2400 } 2397 }
2401 2398
2402 2399
2403 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) { 2400 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
2404 CEntryStub stub(1, kDontSaveFPRegs); 2401 CEntryStub stub(isolate, 1, kDontSaveFPRegs);
2405 stub.GetCode(isolate); 2402 stub.GetCode(isolate);
2406 CEntryStub save_doubles(1, kSaveFPRegs); 2403 CEntryStub save_doubles(isolate, 1, kSaveFPRegs);
2407 save_doubles.GetCode(isolate); 2404 save_doubles.GetCode(isolate);
2408 } 2405 }
2409 2406
2410 2407
2411 void CEntryStub::Generate(MacroAssembler* masm) { 2408 void CEntryStub::Generate(MacroAssembler* masm) {
2412 // rax: number of arguments including receiver 2409 // rax: number of arguments including receiver
2413 // rbx: pointer to C function (C callee-saved) 2410 // rbx: pointer to C function (C callee-saved)
2414 // rbp: frame pointer of calling JS frame (restored after C call) 2411 // rbp: frame pointer of calling JS frame (restored after C call)
2415 // rsp: stack pointer (restored after C call) 2412 // rsp: stack pointer (restored after C call)
2416 // rsi: current context (restored) 2413 // rsi: current context (restored)
(...skipping 26 matching lines...) Expand all
2443 // Call C function. 2440 // Call C function.
2444 #ifdef _WIN64 2441 #ifdef _WIN64
2445 // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9. 2442 // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9.
2446 // Pass argv and argc as two parameters. The arguments object will 2443 // Pass argv and argc as two parameters. The arguments object will
2447 // be created by stubs declared by DECLARE_RUNTIME_FUNCTION(). 2444 // be created by stubs declared by DECLARE_RUNTIME_FUNCTION().
2448 if (result_size_ < 2) { 2445 if (result_size_ < 2) {
2449 // Pass a pointer to the Arguments object as the first argument. 2446 // Pass a pointer to the Arguments object as the first argument.
2450 // Return result in single register (rax). 2447 // Return result in single register (rax).
2451 __ movp(rcx, r14); // argc. 2448 __ movp(rcx, r14); // argc.
2452 __ movp(rdx, r15); // argv. 2449 __ movp(rdx, r15); // argv.
2453 __ Move(r8, ExternalReference::isolate_address(masm->isolate())); 2450 __ Move(r8, ExternalReference::isolate_address(isolate()));
2454 } else { 2451 } else {
2455 ASSERT_EQ(2, result_size_); 2452 ASSERT_EQ(2, result_size_);
2456 // Pass a pointer to the result location as the first argument. 2453 // Pass a pointer to the result location as the first argument.
2457 __ leap(rcx, StackSpaceOperand(2)); 2454 __ leap(rcx, StackSpaceOperand(2));
2458 // Pass a pointer to the Arguments object as the second argument. 2455 // Pass a pointer to the Arguments object as the second argument.
2459 __ movp(rdx, r14); // argc. 2456 __ movp(rdx, r14); // argc.
2460 __ movp(r8, r15); // argv. 2457 __ movp(r8, r15); // argv.
2461 __ Move(r9, ExternalReference::isolate_address(masm->isolate())); 2458 __ Move(r9, ExternalReference::isolate_address(isolate()));
2462 } 2459 }
2463 2460
2464 #else // _WIN64 2461 #else // _WIN64
2465 // GCC passes arguments in rdi, rsi, rdx, rcx, r8, r9. 2462 // GCC passes arguments in rdi, rsi, rdx, rcx, r8, r9.
2466 __ movp(rdi, r14); // argc. 2463 __ movp(rdi, r14); // argc.
2467 __ movp(rsi, r15); // argv. 2464 __ movp(rsi, r15); // argv.
2468 __ Move(rdx, ExternalReference::isolate_address(masm->isolate())); 2465 __ Move(rdx, ExternalReference::isolate_address(isolate()));
2469 #endif 2466 #endif
2470 __ call(rbx); 2467 __ call(rbx);
2471 // Result is in rax - do not destroy this register! 2468 // Result is in rax - do not destroy this register!
2472 2469
2473 #ifdef _WIN64 2470 #ifdef _WIN64
2474 // If return value is on the stack, pop it to registers. 2471 // If return value is on the stack, pop it to registers.
2475 if (result_size_ > 1) { 2472 if (result_size_ > 1) {
2476 ASSERT_EQ(2, result_size_); 2473 ASSERT_EQ(2, result_size_);
2477 // Read result values stored on stack. Result is stored 2474 // Read result values stored on stack. Result is stored
2478 // above the four argument mirror slots and the two 2475 // above the four argument mirror slots and the two
(...skipping 12 matching lines...) Expand all
2491 __ int3(); 2488 __ int3();
2492 __ bind(&okay); 2489 __ bind(&okay);
2493 } 2490 }
2494 2491
2495 // Check result for exception sentinel. 2492 // Check result for exception sentinel.
2496 Label exception_returned; 2493 Label exception_returned;
2497 __ CompareRoot(rax, Heap::kExceptionRootIndex); 2494 __ CompareRoot(rax, Heap::kExceptionRootIndex);
2498 __ j(equal, &exception_returned); 2495 __ j(equal, &exception_returned);
2499 2496
2500 ExternalReference pending_exception_address( 2497 ExternalReference pending_exception_address(
2501 Isolate::kPendingExceptionAddress, masm->isolate()); 2498 Isolate::kPendingExceptionAddress, isolate());
2502 2499
2503 // Check that there is no pending exception, otherwise we 2500 // Check that there is no pending exception, otherwise we
2504 // should have returned the exception sentinel. 2501 // should have returned the exception sentinel.
2505 if (FLAG_debug_code) { 2502 if (FLAG_debug_code) {
2506 Label okay; 2503 Label okay;
2507 __ LoadRoot(r14, Heap::kTheHoleValueRootIndex); 2504 __ LoadRoot(r14, Heap::kTheHoleValueRootIndex);
2508 Operand pending_exception_operand = 2505 Operand pending_exception_operand =
2509 masm->ExternalOperand(pending_exception_address); 2506 masm->ExternalOperand(pending_exception_address);
2510 __ cmpp(r14, pending_exception_operand); 2507 __ cmpp(r14, pending_exception_operand);
2511 __ j(equal, &okay, Label::kNear); 2508 __ j(equal, &okay, Label::kNear);
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after
2588 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8), xmm14); 2585 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8), xmm14);
2589 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9), xmm15); 2586 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9), xmm15);
2590 #endif 2587 #endif
2591 2588
2592 // Set up the roots and smi constant registers. 2589 // Set up the roots and smi constant registers.
2593 // Needs to be done before any further smi loads. 2590 // Needs to be done before any further smi loads.
2594 __ InitializeSmiConstantRegister(); 2591 __ InitializeSmiConstantRegister();
2595 __ InitializeRootRegister(); 2592 __ InitializeRootRegister();
2596 } 2593 }
2597 2594
2598 Isolate* isolate = masm->isolate();
2599
2600 // Save copies of the top frame descriptor on the stack. 2595 // Save copies of the top frame descriptor on the stack.
2601 ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate); 2596 ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate());
2602 { 2597 {
2603 Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp); 2598 Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
2604 __ Push(c_entry_fp_operand); 2599 __ Push(c_entry_fp_operand);
2605 } 2600 }
2606 2601
2607 // If this is the outermost JS call, set js_entry_sp value. 2602 // If this is the outermost JS call, set js_entry_sp value.
2608 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate); 2603 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate());
2609 __ Load(rax, js_entry_sp); 2604 __ Load(rax, js_entry_sp);
2610 __ testp(rax, rax); 2605 __ testp(rax, rax);
2611 __ j(not_zero, &not_outermost_js); 2606 __ j(not_zero, &not_outermost_js);
2612 __ Push(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)); 2607 __ Push(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME));
2613 __ movp(rax, rbp); 2608 __ movp(rax, rbp);
2614 __ Store(js_entry_sp, rax); 2609 __ Store(js_entry_sp, rax);
2615 Label cont; 2610 Label cont;
2616 __ jmp(&cont); 2611 __ jmp(&cont);
2617 __ bind(&not_outermost_js); 2612 __ bind(&not_outermost_js);
2618 __ Push(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)); 2613 __ Push(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME));
2619 __ bind(&cont); 2614 __ bind(&cont);
2620 2615
2621 // Jump to a faked try block that does the invoke, with a faked catch 2616 // Jump to a faked try block that does the invoke, with a faked catch
2622 // block that sets the pending exception. 2617 // block that sets the pending exception.
2623 __ jmp(&invoke); 2618 __ jmp(&invoke);
2624 __ bind(&handler_entry); 2619 __ bind(&handler_entry);
2625 handler_offset_ = handler_entry.pos(); 2620 handler_offset_ = handler_entry.pos();
2626 // Caught exception: Store result (exception) in the pending exception 2621 // Caught exception: Store result (exception) in the pending exception
2627 // field in the JSEnv and return a failure sentinel. 2622 // field in the JSEnv and return a failure sentinel.
2628 ExternalReference pending_exception(Isolate::kPendingExceptionAddress, 2623 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
2629 isolate); 2624 isolate());
2630 __ Store(pending_exception, rax); 2625 __ Store(pending_exception, rax);
2631 __ LoadRoot(rax, Heap::kExceptionRootIndex); 2626 __ LoadRoot(rax, Heap::kExceptionRootIndex);
2632 __ jmp(&exit); 2627 __ jmp(&exit);
2633 2628
2634 // Invoke: Link this frame into the handler chain. There's only one 2629 // Invoke: Link this frame into the handler chain. There's only one
2635 // handler block in this code object, so its index is 0. 2630 // handler block in this code object, so its index is 0.
2636 __ bind(&invoke); 2631 __ bind(&invoke);
2637 __ PushTryHandler(StackHandler::JS_ENTRY, 0); 2632 __ PushTryHandler(StackHandler::JS_ENTRY, 0);
2638 2633
2639 // Clear any pending exceptions. 2634 // Clear any pending exceptions.
2640 __ LoadRoot(rax, Heap::kTheHoleValueRootIndex); 2635 __ LoadRoot(rax, Heap::kTheHoleValueRootIndex);
2641 __ Store(pending_exception, rax); 2636 __ Store(pending_exception, rax);
2642 2637
2643 // Fake a receiver (NULL). 2638 // Fake a receiver (NULL).
2644 __ Push(Immediate(0)); // receiver 2639 __ Push(Immediate(0)); // receiver
2645 2640
2646 // Invoke the function by calling through JS entry trampoline builtin and 2641 // Invoke the function by calling through JS entry trampoline builtin and
2647 // pop the faked function when we return. We load the address from an 2642 // pop the faked function when we return. We load the address from an
2648 // external reference instead of inlining the call target address directly 2643 // external reference instead of inlining the call target address directly
2649 // in the code, because the builtin stubs may not have been generated yet 2644 // in the code, because the builtin stubs may not have been generated yet
2650 // at the time this code is generated. 2645 // at the time this code is generated.
2651 if (is_construct) { 2646 if (is_construct) {
2652 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline, 2647 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
2653 isolate); 2648 isolate());
2654 __ Load(rax, construct_entry); 2649 __ Load(rax, construct_entry);
2655 } else { 2650 } else {
2656 ExternalReference entry(Builtins::kJSEntryTrampoline, isolate); 2651 ExternalReference entry(Builtins::kJSEntryTrampoline, isolate());
2657 __ Load(rax, entry); 2652 __ Load(rax, entry);
2658 } 2653 }
2659 __ leap(kScratchRegister, FieldOperand(rax, Code::kHeaderSize)); 2654 __ leap(kScratchRegister, FieldOperand(rax, Code::kHeaderSize));
2660 __ call(kScratchRegister); 2655 __ call(kScratchRegister);
2661 2656
2662 // Unlink this frame from the handler chain. 2657 // Unlink this frame from the handler chain.
2663 __ PopTryHandler(); 2658 __ PopTryHandler();
2664 2659
2665 __ bind(&exit); 2660 __ bind(&exit);
2666 // Check if the current stack frame is marked as the outermost JS frame. 2661 // Check if the current stack frame is marked as the outermost JS frame.
(...skipping 503 matching lines...) Expand 10 before | Expand all | Expand 10 after
3170 __ JumpUnlessBothNonNegativeSmi(rcx, rdx, &runtime); 3165 __ JumpUnlessBothNonNegativeSmi(rcx, rdx, &runtime);
3171 3166
3172 __ SmiSub(rcx, rcx, rdx); // Overflow doesn't happen. 3167 __ SmiSub(rcx, rcx, rdx); // Overflow doesn't happen.
3173 __ cmpp(rcx, FieldOperand(rax, String::kLengthOffset)); 3168 __ cmpp(rcx, FieldOperand(rax, String::kLengthOffset));
3174 Label not_original_string; 3169 Label not_original_string;
3175 // Shorter than original string's length: an actual substring. 3170 // Shorter than original string's length: an actual substring.
3176 __ j(below, &not_original_string, Label::kNear); 3171 __ j(below, &not_original_string, Label::kNear);
3177 // Longer than original string's length or negative: unsafe arguments. 3172 // Longer than original string's length or negative: unsafe arguments.
3178 __ j(above, &runtime); 3173 __ j(above, &runtime);
3179 // Return original string. 3174 // Return original string.
3180 Counters* counters = masm->isolate()->counters(); 3175 Counters* counters = isolate()->counters();
3181 __ IncrementCounter(counters->sub_string_native(), 1); 3176 __ IncrementCounter(counters->sub_string_native(), 1);
3182 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize); 3177 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
3183 __ bind(&not_original_string); 3178 __ bind(&not_original_string);
3184 3179
3185 Label single_char; 3180 Label single_char;
3186 __ SmiCompare(rcx, Smi::FromInt(1)); 3181 __ SmiCompare(rcx, Smi::FromInt(1));
3187 __ j(equal, &single_char); 3182 __ j(equal, &single_char);
3188 3183
3189 __ SmiToInteger32(rcx, rcx); 3184 __ SmiToInteger32(rcx, rcx);
3190 3185
(...skipping 324 matching lines...) Expand 10 before | Expand all | Expand 10 after
3515 3510
3516 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER); 3511 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
3517 __ movp(rdx, args.GetArgumentOperand(0)); // left 3512 __ movp(rdx, args.GetArgumentOperand(0)); // left
3518 __ movp(rax, args.GetArgumentOperand(1)); // right 3513 __ movp(rax, args.GetArgumentOperand(1)); // right
3519 3514
3520 // Check for identity. 3515 // Check for identity.
3521 Label not_same; 3516 Label not_same;
3522 __ cmpp(rdx, rax); 3517 __ cmpp(rdx, rax);
3523 __ j(not_equal, &not_same, Label::kNear); 3518 __ j(not_equal, &not_same, Label::kNear);
3524 __ Move(rax, Smi::FromInt(EQUAL)); 3519 __ Move(rax, Smi::FromInt(EQUAL));
3525 Counters* counters = masm->isolate()->counters(); 3520 Counters* counters = isolate()->counters();
3526 __ IncrementCounter(counters->string_compare_native(), 1); 3521 __ IncrementCounter(counters->string_compare_native(), 1);
3527 __ ret(2 * kPointerSize); 3522 __ ret(2 * kPointerSize);
3528 3523
3529 __ bind(&not_same); 3524 __ bind(&not_same);
3530 3525
3531 // Check that both are sequential ASCII strings. 3526 // Check that both are sequential ASCII strings.
3532 __ JumpIfNotBothSequentialAsciiStrings(rdx, rax, rcx, rbx, &runtime); 3527 __ JumpIfNotBothSequentialAsciiStrings(rdx, rax, rcx, rbx, &runtime);
3533 3528
3534 // Inline comparison of ASCII strings. 3529 // Inline comparison of ASCII strings.
3535 __ IncrementCounter(counters->string_compare_native(), 1); 3530 __ IncrementCounter(counters->string_compare_native(), 1);
3536 // Drop arguments from the stack 3531 // Drop arguments from the stack
3537 __ PopReturnAddressTo(rcx); 3532 __ PopReturnAddressTo(rcx);
3538 __ addp(rsp, Immediate(2 * kPointerSize)); 3533 __ addp(rsp, Immediate(2 * kPointerSize));
3539 __ PushReturnAddressFrom(rcx); 3534 __ PushReturnAddressFrom(rcx);
3540 GenerateCompareFlatAsciiStrings(masm, rdx, rax, rcx, rbx, rdi, r8); 3535 GenerateCompareFlatAsciiStrings(masm, rdx, rax, rcx, rbx, rdi, r8);
3541 3536
3542 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) 3537 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
3543 // tagged as a small integer. 3538 // tagged as a small integer.
3544 __ bind(&runtime); 3539 __ bind(&runtime);
3545 __ TailCallRuntime(Runtime::kHiddenStringCompare, 2, 1); 3540 __ TailCallRuntime(Runtime::kHiddenStringCompare, 2, 1);
3546 } 3541 }
3547 3542
3548 3543
3549 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { 3544 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
3550 // ----------- S t a t e ------------- 3545 // ----------- S t a t e -------------
3551 // -- rdx : left 3546 // -- rdx : left
3552 // -- rax : right 3547 // -- rax : right
3553 // -- rsp[0] : return address 3548 // -- rsp[0] : return address
3554 // ----------------------------------- 3549 // -----------------------------------
3555 Isolate* isolate = masm->isolate();
3556 3550
3557 // Load rcx with the allocation site. We stick an undefined dummy value here 3551 // Load rcx with the allocation site. We stick an undefined dummy value here
3558 // and replace it with the real allocation site later when we instantiate this 3552 // and replace it with the real allocation site later when we instantiate this
3559 // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate(). 3553 // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
3560 __ Move(rcx, handle(isolate->heap()->undefined_value())); 3554 __ Move(rcx, handle(isolate()->heap()->undefined_value()));
3561 3555
3562 // Make sure that we actually patched the allocation site. 3556 // Make sure that we actually patched the allocation site.
3563 if (FLAG_debug_code) { 3557 if (FLAG_debug_code) {
3564 __ testb(rcx, Immediate(kSmiTagMask)); 3558 __ testb(rcx, Immediate(kSmiTagMask));
3565 __ Assert(not_equal, kExpectedAllocationSite); 3559 __ Assert(not_equal, kExpectedAllocationSite);
3566 __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset), 3560 __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset),
3567 isolate->factory()->allocation_site_map()); 3561 isolate()->factory()->allocation_site_map());
3568 __ Assert(equal, kExpectedAllocationSite); 3562 __ Assert(equal, kExpectedAllocationSite);
3569 } 3563 }
3570 3564
3571 // Tail call into the stub that handles binary operations with allocation 3565 // Tail call into the stub that handles binary operations with allocation
3572 // sites. 3566 // sites.
3573 BinaryOpWithAllocationSiteStub stub(state_); 3567 BinaryOpWithAllocationSiteStub stub(isolate(), state_);
3574 __ TailCallStub(&stub); 3568 __ TailCallStub(&stub);
3575 } 3569 }
3576 3570
3577 3571
3578 void ICCompareStub::GenerateSmis(MacroAssembler* masm) { 3572 void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
3579 ASSERT(state_ == CompareIC::SMI); 3573 ASSERT(state_ == CompareIC::SMI);
3580 Label miss; 3574 Label miss;
3581 __ JumpIfNotBothSmi(rdx, rax, &miss, Label::kNear); 3575 __ JumpIfNotBothSmi(rdx, rax, &miss, Label::kNear);
3582 3576
3583 if (GetCondition() == equal) { 3577 if (GetCondition() == equal) {
(...skipping 25 matching lines...) Expand all
3609 if (left_ == CompareIC::SMI) { 3603 if (left_ == CompareIC::SMI) {
3610 __ JumpIfNotSmi(rdx, &miss); 3604 __ JumpIfNotSmi(rdx, &miss);
3611 } 3605 }
3612 if (right_ == CompareIC::SMI) { 3606 if (right_ == CompareIC::SMI) {
3613 __ JumpIfNotSmi(rax, &miss); 3607 __ JumpIfNotSmi(rax, &miss);
3614 } 3608 }
3615 3609
3616 // Load left and right operand. 3610 // Load left and right operand.
3617 Label done, left, left_smi, right_smi; 3611 Label done, left, left_smi, right_smi;
3618 __ JumpIfSmi(rax, &right_smi, Label::kNear); 3612 __ JumpIfSmi(rax, &right_smi, Label::kNear);
3619 __ CompareMap(rax, masm->isolate()->factory()->heap_number_map()); 3613 __ CompareMap(rax, isolate()->factory()->heap_number_map());
3620 __ j(not_equal, &maybe_undefined1, Label::kNear); 3614 __ j(not_equal, &maybe_undefined1, Label::kNear);
3621 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); 3615 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
3622 __ jmp(&left, Label::kNear); 3616 __ jmp(&left, Label::kNear);
3623 __ bind(&right_smi); 3617 __ bind(&right_smi);
3624 __ SmiToInteger32(rcx, rax); // Can't clobber rax yet. 3618 __ SmiToInteger32(rcx, rax); // Can't clobber rax yet.
3625 __ Cvtlsi2sd(xmm1, rcx); 3619 __ Cvtlsi2sd(xmm1, rcx);
3626 3620
3627 __ bind(&left); 3621 __ bind(&left);
3628 __ JumpIfSmi(rdx, &left_smi, Label::kNear); 3622 __ JumpIfSmi(rdx, &left_smi, Label::kNear);
3629 __ CompareMap(rdx, masm->isolate()->factory()->heap_number_map()); 3623 __ CompareMap(rdx, isolate()->factory()->heap_number_map());
3630 __ j(not_equal, &maybe_undefined2, Label::kNear); 3624 __ j(not_equal, &maybe_undefined2, Label::kNear);
3631 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); 3625 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
3632 __ jmp(&done); 3626 __ jmp(&done);
3633 __ bind(&left_smi); 3627 __ bind(&left_smi);
3634 __ SmiToInteger32(rcx, rdx); // Can't clobber rdx yet. 3628 __ SmiToInteger32(rcx, rdx); // Can't clobber rdx yet.
3635 __ Cvtlsi2sd(xmm0, rcx); 3629 __ Cvtlsi2sd(xmm0, rcx);
3636 3630
3637 __ bind(&done); 3631 __ bind(&done);
3638 // Compare operands 3632 // Compare operands
3639 __ ucomisd(xmm0, xmm1); 3633 __ ucomisd(xmm0, xmm1);
3640 3634
3641 // Don't base result on EFLAGS when a NaN is involved. 3635 // Don't base result on EFLAGS when a NaN is involved.
3642 __ j(parity_even, &unordered, Label::kNear); 3636 __ j(parity_even, &unordered, Label::kNear);
3643 3637
3644 // Return a result of -1, 0, or 1, based on EFLAGS. 3638 // Return a result of -1, 0, or 1, based on EFLAGS.
3645 // Performing mov, because xor would destroy the flag register. 3639 // Performing mov, because xor would destroy the flag register.
3646 __ movl(rax, Immediate(0)); 3640 __ movl(rax, Immediate(0));
3647 __ movl(rcx, Immediate(0)); 3641 __ movl(rcx, Immediate(0));
3648 __ setcc(above, rax); // Add one to zero if carry clear and not equal. 3642 __ setcc(above, rax); // Add one to zero if carry clear and not equal.
3649 __ sbbp(rax, rcx); // Subtract one if below (aka. carry set). 3643 __ sbbp(rax, rcx); // Subtract one if below (aka. carry set).
3650 __ ret(0); 3644 __ ret(0);
3651 3645
3652 __ bind(&unordered); 3646 __ bind(&unordered);
3653 __ bind(&generic_stub); 3647 __ bind(&generic_stub);
3654 ICCompareStub stub(op_, CompareIC::GENERIC, CompareIC::GENERIC, 3648 ICCompareStub stub(isolate(), op_, CompareIC::GENERIC, CompareIC::GENERIC,
3655 CompareIC::GENERIC); 3649 CompareIC::GENERIC);
3656 __ jmp(stub.GetCode(masm->isolate()), RelocInfo::CODE_TARGET); 3650 __ jmp(stub.GetCode(isolate()), RelocInfo::CODE_TARGET);
3657 3651
3658 __ bind(&maybe_undefined1); 3652 __ bind(&maybe_undefined1);
3659 if (Token::IsOrderedRelationalCompareOp(op_)) { 3653 if (Token::IsOrderedRelationalCompareOp(op_)) {
3660 __ Cmp(rax, masm->isolate()->factory()->undefined_value()); 3654 __ Cmp(rax, isolate()->factory()->undefined_value());
3661 __ j(not_equal, &miss); 3655 __ j(not_equal, &miss);
3662 __ JumpIfSmi(rdx, &unordered); 3656 __ JumpIfSmi(rdx, &unordered);
3663 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx); 3657 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx);
3664 __ j(not_equal, &maybe_undefined2, Label::kNear); 3658 __ j(not_equal, &maybe_undefined2, Label::kNear);
3665 __ jmp(&unordered); 3659 __ jmp(&unordered);
3666 } 3660 }
3667 3661
3668 __ bind(&maybe_undefined2); 3662 __ bind(&maybe_undefined2);
3669 if (Token::IsOrderedRelationalCompareOp(op_)) { 3663 if (Token::IsOrderedRelationalCompareOp(op_)) {
3670 __ Cmp(rdx, masm->isolate()->factory()->undefined_value()); 3664 __ Cmp(rdx, isolate()->factory()->undefined_value());
3671 __ j(equal, &unordered); 3665 __ j(equal, &unordered);
3672 } 3666 }
3673 3667
3674 __ bind(&miss); 3668 __ bind(&miss);
3675 GenerateMiss(masm); 3669 GenerateMiss(masm);
3676 } 3670 }
3677 3671
3678 3672
3679 void ICCompareStub::GenerateInternalizedStrings(MacroAssembler* masm) { 3673 void ICCompareStub::GenerateInternalizedStrings(MacroAssembler* masm) {
3680 ASSERT(state_ == CompareIC::INTERNALIZED_STRING); 3674 ASSERT(state_ == CompareIC::INTERNALIZED_STRING);
(...skipping 205 matching lines...) Expand 10 before | Expand all | Expand 10 after
3886 3880
3887 __ bind(&miss); 3881 __ bind(&miss);
3888 GenerateMiss(masm); 3882 GenerateMiss(masm);
3889 } 3883 }
3890 3884
3891 3885
3892 void ICCompareStub::GenerateMiss(MacroAssembler* masm) { 3886 void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
3893 { 3887 {
3894 // Call the runtime system in a fresh internal frame. 3888 // Call the runtime system in a fresh internal frame.
3895 ExternalReference miss = 3889 ExternalReference miss =
3896 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate()); 3890 ExternalReference(IC_Utility(IC::kCompareIC_Miss), isolate());
3897 3891
3898 FrameScope scope(masm, StackFrame::INTERNAL); 3892 FrameScope scope(masm, StackFrame::INTERNAL);
3899 __ Push(rdx); 3893 __ Push(rdx);
3900 __ Push(rax); 3894 __ Push(rax);
3901 __ Push(rdx); 3895 __ Push(rdx);
3902 __ Push(rax); 3896 __ Push(rax);
3903 __ Push(Smi::FromInt(op_)); 3897 __ Push(Smi::FromInt(op_));
3904 __ CallExternalReference(miss, 3); 3898 __ CallExternalReference(miss, 3);
3905 3899
3906 // Compute the entry point of the rewritten stub. 3900 // Compute the entry point of the rewritten stub.
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
3959 __ CompareRoot(entity_name, Heap::kTheHoleValueRootIndex); 3953 __ CompareRoot(entity_name, Heap::kTheHoleValueRootIndex);
3960 __ j(equal, &good, Label::kNear); 3954 __ j(equal, &good, Label::kNear);
3961 3955
3962 // Check if the entry name is not a unique name. 3956 // Check if the entry name is not a unique name.
3963 __ movp(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset)); 3957 __ movp(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
3964 __ JumpIfNotUniqueName(FieldOperand(entity_name, Map::kInstanceTypeOffset), 3958 __ JumpIfNotUniqueName(FieldOperand(entity_name, Map::kInstanceTypeOffset),
3965 miss); 3959 miss);
3966 __ bind(&good); 3960 __ bind(&good);
3967 } 3961 }
3968 3962
3969 NameDictionaryLookupStub stub(properties, r0, r0, NEGATIVE_LOOKUP); 3963 NameDictionaryLookupStub stub(masm->isolate(), properties, r0, r0,
3964 NEGATIVE_LOOKUP);
3970 __ Push(Handle<Object>(name)); 3965 __ Push(Handle<Object>(name));
3971 __ Push(Immediate(name->Hash())); 3966 __ Push(Immediate(name->Hash()));
3972 __ CallStub(&stub); 3967 __ CallStub(&stub);
3973 __ testp(r0, r0); 3968 __ testp(r0, r0);
3974 __ j(not_zero, miss); 3969 __ j(not_zero, miss);
3975 __ jmp(done); 3970 __ jmp(done);
3976 } 3971 }
3977 3972
3978 3973
3979 // Probe the name dictionary in the |elements| register. Jump to the 3974 // Probe the name dictionary in the |elements| register. Jump to the
(...skipping 29 matching lines...) Expand all
4009 // Scale the index by multiplying by the entry size. 4004 // Scale the index by multiplying by the entry size.
4010 ASSERT(NameDictionary::kEntrySize == 3); 4005 ASSERT(NameDictionary::kEntrySize == 3);
4011 __ leap(r1, Operand(r1, r1, times_2, 0)); // r1 = r1 * 3 4006 __ leap(r1, Operand(r1, r1, times_2, 0)); // r1 = r1 * 3
4012 4007
4013 // Check if the key is identical to the name. 4008 // Check if the key is identical to the name.
4014 __ cmpp(name, Operand(elements, r1, times_pointer_size, 4009 __ cmpp(name, Operand(elements, r1, times_pointer_size,
4015 kElementsStartOffset - kHeapObjectTag)); 4010 kElementsStartOffset - kHeapObjectTag));
4016 __ j(equal, done); 4011 __ j(equal, done);
4017 } 4012 }
4018 4013
4019 NameDictionaryLookupStub stub(elements, r0, r1, POSITIVE_LOOKUP); 4014 NameDictionaryLookupStub stub(masm->isolate(), elements, r0, r1,
4015 POSITIVE_LOOKUP);
4020 __ Push(name); 4016 __ Push(name);
4021 __ movl(r0, FieldOperand(name, Name::kHashFieldOffset)); 4017 __ movl(r0, FieldOperand(name, Name::kHashFieldOffset));
4022 __ shrl(r0, Immediate(Name::kHashShift)); 4018 __ shrl(r0, Immediate(Name::kHashShift));
4023 __ Push(r0); 4019 __ Push(r0);
4024 __ CallStub(&stub); 4020 __ CallStub(&stub);
4025 4021
4026 __ testp(r0, r0); 4022 __ testp(r0, r0);
4027 __ j(zero, miss); 4023 __ j(zero, miss);
4028 __ jmp(done); 4024 __ jmp(done);
4029 } 4025 }
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
4070 // Scale the index by multiplying by the entry size. 4066 // Scale the index by multiplying by the entry size.
4071 ASSERT(NameDictionary::kEntrySize == 3); 4067 ASSERT(NameDictionary::kEntrySize == 3);
4072 __ leap(index_, Operand(scratch, scratch, times_2, 0)); // index *= 3. 4068 __ leap(index_, Operand(scratch, scratch, times_2, 0)); // index *= 3.
4073 4069
4074 // Having undefined at this place means the name is not contained. 4070 // Having undefined at this place means the name is not contained.
4075 __ movp(scratch, Operand(dictionary_, 4071 __ movp(scratch, Operand(dictionary_,
4076 index_, 4072 index_,
4077 times_pointer_size, 4073 times_pointer_size,
4078 kElementsStartOffset - kHeapObjectTag)); 4074 kElementsStartOffset - kHeapObjectTag));
4079 4075
4080 __ Cmp(scratch, masm->isolate()->factory()->undefined_value()); 4076 __ Cmp(scratch, isolate()->factory()->undefined_value());
4081 __ j(equal, &not_in_dictionary); 4077 __ j(equal, &not_in_dictionary);
4082 4078
4083 // Stop if found the property. 4079 // Stop if found the property.
4084 __ cmpp(scratch, args.GetArgumentOperand(0)); 4080 __ cmpp(scratch, args.GetArgumentOperand(0));
4085 __ j(equal, &in_dictionary); 4081 __ j(equal, &in_dictionary);
4086 4082
4087 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) { 4083 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) {
4088 // If we hit a key that is not a unique name during negative 4084 // If we hit a key that is not a unique name during negative
4089 // lookup we have to bailout as this key might be equal to the 4085 // lookup we have to bailout as this key might be equal to the
4090 // key we are looking for. 4086 // key we are looking for.
(...skipping 22 matching lines...) Expand all
4113 4109
4114 __ bind(&not_in_dictionary); 4110 __ bind(&not_in_dictionary);
4115 __ movp(scratch, Immediate(0)); 4111 __ movp(scratch, Immediate(0));
4116 __ Drop(1); 4112 __ Drop(1);
4117 __ ret(2 * kPointerSize); 4113 __ ret(2 * kPointerSize);
4118 } 4114 }
4119 4115
4120 4116
4121 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime( 4117 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
4122 Isolate* isolate) { 4118 Isolate* isolate) {
4123 StoreBufferOverflowStub stub1(kDontSaveFPRegs); 4119 StoreBufferOverflowStub stub1(isolate, kDontSaveFPRegs);
4124 stub1.GetCode(isolate); 4120 stub1.GetCode(isolate);
4125 StoreBufferOverflowStub stub2(kSaveFPRegs); 4121 StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
4126 stub2.GetCode(isolate); 4122 stub2.GetCode(isolate);
4127 } 4123 }
4128 4124
4129 4125
4130 bool CodeStub::CanUseFPRegisters() { 4126 bool CodeStub::CanUseFPRegisters() {
4131 return true; // Always have SSE2 on x64. 4127 return true; // Always have SSE2 on x64.
4132 } 4128 }
4133 4129
4134 4130
4135 // Takes the input in 3 registers: address_ value_ and object_. A pointer to 4131 // Takes the input in 3 registers: address_ value_ and object_. A pointer to
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after
4215 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_); 4211 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_);
4216 Register address = 4212 Register address =
4217 arg_reg_1.is(regs_.address()) ? kScratchRegister : regs_.address(); 4213 arg_reg_1.is(regs_.address()) ? kScratchRegister : regs_.address();
4218 ASSERT(!address.is(regs_.object())); 4214 ASSERT(!address.is(regs_.object()));
4219 ASSERT(!address.is(arg_reg_1)); 4215 ASSERT(!address.is(arg_reg_1));
4220 __ Move(address, regs_.address()); 4216 __ Move(address, regs_.address());
4221 __ Move(arg_reg_1, regs_.object()); 4217 __ Move(arg_reg_1, regs_.object());
4222 // TODO(gc) Can we just set address arg2 in the beginning? 4218 // TODO(gc) Can we just set address arg2 in the beginning?
4223 __ Move(arg_reg_2, address); 4219 __ Move(arg_reg_2, address);
4224 __ LoadAddress(arg_reg_3, 4220 __ LoadAddress(arg_reg_3,
4225 ExternalReference::isolate_address(masm->isolate())); 4221 ExternalReference::isolate_address(isolate()));
4226 int argument_count = 3; 4222 int argument_count = 3;
4227 4223
4228 AllowExternalCallThatCantCauseGC scope(masm); 4224 AllowExternalCallThatCantCauseGC scope(masm);
4229 __ PrepareCallCFunction(argument_count); 4225 __ PrepareCallCFunction(argument_count);
4230 __ CallCFunction( 4226 __ CallCFunction(
4231 ExternalReference::incremental_marking_record_write_function( 4227 ExternalReference::incremental_marking_record_write_function(isolate()),
4232 masm->isolate()),
4233 argument_count); 4228 argument_count);
4234 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_); 4229 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
4235 } 4230 }
4236 4231
4237 4232
4238 void RecordWriteStub::CheckNeedsToInformIncrementalMarker( 4233 void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
4239 MacroAssembler* masm, 4234 MacroAssembler* masm,
4240 OnNoNeedToInformIncrementalMarker on_no_need, 4235 OnNoNeedToInformIncrementalMarker on_no_need,
4241 Mode mode) { 4236 Mode mode) {
4242 Label on_black; 4237 Label on_black;
(...skipping 157 matching lines...) Expand 10 before | Expand all | Expand 10 after
4400 __ StoreNumberToDoubleElements(rax, 4395 __ StoreNumberToDoubleElements(rax,
4401 r9, 4396 r9,
4402 r11, 4397 r11,
4403 xmm0, 4398 xmm0,
4404 &slow_elements); 4399 &slow_elements);
4405 __ ret(0); 4400 __ ret(0);
4406 } 4401 }
4407 4402
4408 4403
4409 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) { 4404 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
4410 CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs); 4405 CEntryStub ces(isolate(), 1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
4411 __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET); 4406 __ Call(ces.GetCode(isolate()), RelocInfo::CODE_TARGET);
4412 int parameter_count_offset = 4407 int parameter_count_offset =
4413 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset; 4408 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
4414 __ movp(rbx, MemOperand(rbp, parameter_count_offset)); 4409 __ movp(rbx, MemOperand(rbp, parameter_count_offset));
4415 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); 4410 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
4416 __ PopReturnAddressTo(rcx); 4411 __ PopReturnAddressTo(rcx);
4417 int additional_offset = function_mode_ == JS_FUNCTION_STUB_MODE 4412 int additional_offset = function_mode_ == JS_FUNCTION_STUB_MODE
4418 ? kPointerSize 4413 ? kPointerSize
4419 : 0; 4414 : 0;
4420 __ leap(rsp, MemOperand(rsp, rbx, times_pointer_size, additional_offset)); 4415 __ leap(rsp, MemOperand(rsp, rbx, times_pointer_size, additional_offset));
4421 __ jmp(rcx); // Return to IC Miss stub, continuation still on stack. 4416 __ jmp(rcx); // Return to IC Miss stub, continuation still on stack.
4422 } 4417 }
4423 4418
4424 4419
4425 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { 4420 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
4426 if (masm->isolate()->function_entry_hook() != NULL) { 4421 if (masm->isolate()->function_entry_hook() != NULL) {
4427 ProfileEntryHookStub stub; 4422 ProfileEntryHookStub stub(masm->isolate());
4428 masm->CallStub(&stub); 4423 masm->CallStub(&stub);
4429 } 4424 }
4430 } 4425 }
4431 4426
4432 4427
4433 void ProfileEntryHookStub::Generate(MacroAssembler* masm) { 4428 void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
4434 // This stub can be called from essentially anywhere, so it needs to save 4429 // This stub can be called from essentially anywhere, so it needs to save
4435 // all volatile and callee-save registers. 4430 // all volatile and callee-save registers.
4436 const size_t kNumSavedRegisters = 2; 4431 const size_t kNumSavedRegisters = 2;
4437 __ pushq(arg_reg_1); 4432 __ pushq(arg_reg_1);
4438 __ pushq(arg_reg_2); 4433 __ pushq(arg_reg_2);
4439 4434
4440 // Calculate the original stack pointer and store it in the second arg. 4435 // Calculate the original stack pointer and store it in the second arg.
4441 __ leap(arg_reg_2, 4436 __ leap(arg_reg_2,
4442 Operand(rsp, kNumSavedRegisters * kRegisterSize + kPCOnStackSize)); 4437 Operand(rsp, kNumSavedRegisters * kRegisterSize + kPCOnStackSize));
4443 4438
4444 // Calculate the function address to the first arg. 4439 // Calculate the function address to the first arg.
4445 __ movp(arg_reg_1, Operand(rsp, kNumSavedRegisters * kRegisterSize)); 4440 __ movp(arg_reg_1, Operand(rsp, kNumSavedRegisters * kRegisterSize));
4446 __ subp(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength)); 4441 __ subp(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength));
4447 4442
4448 // Save the remainder of the volatile registers. 4443 // Save the remainder of the volatile registers.
4449 masm->PushCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2); 4444 masm->PushCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2);
4450 4445
4451 // Call the entry hook function. 4446 // Call the entry hook function.
4452 __ Move(rax, FUNCTION_ADDR(masm->isolate()->function_entry_hook()), 4447 __ Move(rax, FUNCTION_ADDR(isolate()->function_entry_hook()),
4453 Assembler::RelocInfoNone()); 4448 Assembler::RelocInfoNone());
4454 4449
4455 AllowExternalCallThatCantCauseGC scope(masm); 4450 AllowExternalCallThatCantCauseGC scope(masm);
4456 4451
4457 const int kArgumentCount = 2; 4452 const int kArgumentCount = 2;
4458 __ PrepareCallCFunction(kArgumentCount); 4453 __ PrepareCallCFunction(kArgumentCount);
4459 __ CallCFunction(rax, kArgumentCount); 4454 __ CallCFunction(rax, kArgumentCount);
4460 4455
4461 // Restore volatile regs. 4456 // Restore volatile regs.
4462 masm->PopCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2); 4457 masm->PopCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2);
4463 __ popq(arg_reg_2); 4458 __ popq(arg_reg_2);
4464 __ popq(arg_reg_1); 4459 __ popq(arg_reg_1);
4465 4460
4466 __ Ret(); 4461 __ Ret();
4467 } 4462 }
4468 4463
4469 4464
4470 template<class T> 4465 template<class T>
4471 static void CreateArrayDispatch(MacroAssembler* masm, 4466 static void CreateArrayDispatch(MacroAssembler* masm,
4472 AllocationSiteOverrideMode mode) { 4467 AllocationSiteOverrideMode mode) {
4473 if (mode == DISABLE_ALLOCATION_SITES) { 4468 if (mode == DISABLE_ALLOCATION_SITES) {
4474 T stub(GetInitialFastElementsKind(), mode); 4469 T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
4475 __ TailCallStub(&stub); 4470 __ TailCallStub(&stub);
4476 } else if (mode == DONT_OVERRIDE) { 4471 } else if (mode == DONT_OVERRIDE) {
4477 int last_index = GetSequenceIndexFromFastElementsKind( 4472 int last_index = GetSequenceIndexFromFastElementsKind(
4478 TERMINAL_FAST_ELEMENTS_KIND); 4473 TERMINAL_FAST_ELEMENTS_KIND);
4479 for (int i = 0; i <= last_index; ++i) { 4474 for (int i = 0; i <= last_index; ++i) {
4480 Label next; 4475 Label next;
4481 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); 4476 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4482 __ cmpl(rdx, Immediate(kind)); 4477 __ cmpl(rdx, Immediate(kind));
4483 __ j(not_equal, &next); 4478 __ j(not_equal, &next);
4484 T stub(kind); 4479 T stub(masm->isolate(), kind);
4485 __ TailCallStub(&stub); 4480 __ TailCallStub(&stub);
4486 __ bind(&next); 4481 __ bind(&next);
4487 } 4482 }
4488 4483
4489 // If we reached this point there is a problem. 4484 // If we reached this point there is a problem.
4490 __ Abort(kUnexpectedElementsKindInArrayConstructor); 4485 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4491 } else { 4486 } else {
4492 UNREACHABLE(); 4487 UNREACHABLE();
4493 } 4488 }
4494 } 4489 }
(...skipping 28 matching lines...) Expand all
4523 // look at the first argument 4518 // look at the first argument
4524 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER); 4519 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
4525 __ movp(rcx, args.GetArgumentOperand(0)); 4520 __ movp(rcx, args.GetArgumentOperand(0));
4526 __ testp(rcx, rcx); 4521 __ testp(rcx, rcx);
4527 __ j(zero, &normal_sequence); 4522 __ j(zero, &normal_sequence);
4528 4523
4529 if (mode == DISABLE_ALLOCATION_SITES) { 4524 if (mode == DISABLE_ALLOCATION_SITES) {
4530 ElementsKind initial = GetInitialFastElementsKind(); 4525 ElementsKind initial = GetInitialFastElementsKind();
4531 ElementsKind holey_initial = GetHoleyElementsKind(initial); 4526 ElementsKind holey_initial = GetHoleyElementsKind(initial);
4532 4527
4533 ArraySingleArgumentConstructorStub stub_holey(holey_initial, 4528 ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
4529 holey_initial,
4534 DISABLE_ALLOCATION_SITES); 4530 DISABLE_ALLOCATION_SITES);
4535 __ TailCallStub(&stub_holey); 4531 __ TailCallStub(&stub_holey);
4536 4532
4537 __ bind(&normal_sequence); 4533 __ bind(&normal_sequence);
4538 ArraySingleArgumentConstructorStub stub(initial, 4534 ArraySingleArgumentConstructorStub stub(masm->isolate(),
4535 initial,
4539 DISABLE_ALLOCATION_SITES); 4536 DISABLE_ALLOCATION_SITES);
4540 __ TailCallStub(&stub); 4537 __ TailCallStub(&stub);
4541 } else if (mode == DONT_OVERRIDE) { 4538 } else if (mode == DONT_OVERRIDE) {
4542 // We are going to create a holey array, but our kind is non-holey. 4539 // We are going to create a holey array, but our kind is non-holey.
4543 // Fix kind and retry (only if we have an allocation site in the slot). 4540 // Fix kind and retry (only if we have an allocation site in the slot).
4544 __ incl(rdx); 4541 __ incl(rdx);
4545 4542
4546 if (FLAG_debug_code) { 4543 if (FLAG_debug_code) {
4547 Handle<Map> allocation_site_map = 4544 Handle<Map> allocation_site_map =
4548 masm->isolate()->factory()->allocation_site_map(); 4545 masm->isolate()->factory()->allocation_site_map();
4549 __ Cmp(FieldOperand(rbx, 0), allocation_site_map); 4546 __ Cmp(FieldOperand(rbx, 0), allocation_site_map);
4550 __ Assert(equal, kExpectedAllocationSite); 4547 __ Assert(equal, kExpectedAllocationSite);
4551 } 4548 }
4552 4549
4553 // Save the resulting elements kind in type info. We can't just store r3 4550 // Save the resulting elements kind in type info. We can't just store r3
4554 // in the AllocationSite::transition_info field because elements kind is 4551 // in the AllocationSite::transition_info field because elements kind is
4555 // restricted to a portion of the field...upper bits need to be left alone. 4552 // restricted to a portion of the field...upper bits need to be left alone.
4556 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); 4553 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
4557 __ SmiAddConstant(FieldOperand(rbx, AllocationSite::kTransitionInfoOffset), 4554 __ SmiAddConstant(FieldOperand(rbx, AllocationSite::kTransitionInfoOffset),
4558 Smi::FromInt(kFastElementsKindPackedToHoley)); 4555 Smi::FromInt(kFastElementsKindPackedToHoley));
4559 4556
4560 __ bind(&normal_sequence); 4557 __ bind(&normal_sequence);
4561 int last_index = GetSequenceIndexFromFastElementsKind( 4558 int last_index = GetSequenceIndexFromFastElementsKind(
4562 TERMINAL_FAST_ELEMENTS_KIND); 4559 TERMINAL_FAST_ELEMENTS_KIND);
4563 for (int i = 0; i <= last_index; ++i) { 4560 for (int i = 0; i <= last_index; ++i) {
4564 Label next; 4561 Label next;
4565 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); 4562 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4566 __ cmpl(rdx, Immediate(kind)); 4563 __ cmpl(rdx, Immediate(kind));
4567 __ j(not_equal, &next); 4564 __ j(not_equal, &next);
4568 ArraySingleArgumentConstructorStub stub(kind); 4565 ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
4569 __ TailCallStub(&stub); 4566 __ TailCallStub(&stub);
4570 __ bind(&next); 4567 __ bind(&next);
4571 } 4568 }
4572 4569
4573 // If we reached this point there is a problem. 4570 // If we reached this point there is a problem.
4574 __ Abort(kUnexpectedElementsKindInArrayConstructor); 4571 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4575 } else { 4572 } else {
4576 UNREACHABLE(); 4573 UNREACHABLE();
4577 } 4574 }
4578 } 4575 }
4579 4576
4580 4577
4581 template<class T> 4578 template<class T>
4582 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { 4579 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
4583 int to_index = GetSequenceIndexFromFastElementsKind( 4580 int to_index = GetSequenceIndexFromFastElementsKind(
4584 TERMINAL_FAST_ELEMENTS_KIND); 4581 TERMINAL_FAST_ELEMENTS_KIND);
4585 for (int i = 0; i <= to_index; ++i) { 4582 for (int i = 0; i <= to_index; ++i) {
4586 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); 4583 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4587 T stub(kind); 4584 T stub(isolate, kind);
4588 stub.GetCode(isolate); 4585 stub.GetCode(isolate);
4589 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) { 4586 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
4590 T stub1(kind, DISABLE_ALLOCATION_SITES); 4587 T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
4591 stub1.GetCode(isolate); 4588 stub1.GetCode(isolate);
4592 } 4589 }
4593 } 4590 }
4594 } 4591 }
4595 4592
4596 4593
4597 void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) { 4594 void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) {
4598 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>( 4595 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
4599 isolate); 4596 isolate);
4600 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>( 4597 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
4601 isolate); 4598 isolate);
4602 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>( 4599 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
4603 isolate); 4600 isolate);
4604 } 4601 }
4605 4602
4606 4603
4607 void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime( 4604 void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime(
4608 Isolate* isolate) { 4605 Isolate* isolate) {
4609 ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS }; 4606 ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
4610 for (int i = 0; i < 2; i++) { 4607 for (int i = 0; i < 2; i++) {
4611 // For internal arrays we only need a few things 4608 // For internal arrays we only need a few things
4612 InternalArrayNoArgumentConstructorStub stubh1(kinds[i]); 4609 InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
4613 stubh1.GetCode(isolate); 4610 stubh1.GetCode(isolate);
4614 InternalArraySingleArgumentConstructorStub stubh2(kinds[i]); 4611 InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
4615 stubh2.GetCode(isolate); 4612 stubh2.GetCode(isolate);
4616 InternalArrayNArgumentsConstructorStub stubh3(kinds[i]); 4613 InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]);
4617 stubh3.GetCode(isolate); 4614 stubh3.GetCode(isolate);
4618 } 4615 }
4619 } 4616 }
4620 4617
4621 4618
4622 void ArrayConstructorStub::GenerateDispatchToArrayStub( 4619 void ArrayConstructorStub::GenerateDispatchToArrayStub(
4623 MacroAssembler* masm, 4620 MacroAssembler* masm,
4624 AllocationSiteOverrideMode mode) { 4621 AllocationSiteOverrideMode mode) {
4625 if (argument_count_ == ANY) { 4622 if (argument_count_ == ANY) {
4626 Label not_zero_case, not_one_case; 4623 Label not_zero_case, not_one_case;
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after
4690 } 4687 }
4691 4688
4692 4689
4693 void InternalArrayConstructorStub::GenerateCase( 4690 void InternalArrayConstructorStub::GenerateCase(
4694 MacroAssembler* masm, ElementsKind kind) { 4691 MacroAssembler* masm, ElementsKind kind) {
4695 Label not_zero_case, not_one_case; 4692 Label not_zero_case, not_one_case;
4696 Label normal_sequence; 4693 Label normal_sequence;
4697 4694
4698 __ testp(rax, rax); 4695 __ testp(rax, rax);
4699 __ j(not_zero, &not_zero_case); 4696 __ j(not_zero, &not_zero_case);
4700 InternalArrayNoArgumentConstructorStub stub0(kind); 4697 InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
4701 __ TailCallStub(&stub0); 4698 __ TailCallStub(&stub0);
4702 4699
4703 __ bind(&not_zero_case); 4700 __ bind(&not_zero_case);
4704 __ cmpl(rax, Immediate(1)); 4701 __ cmpl(rax, Immediate(1));
4705 __ j(greater, &not_one_case); 4702 __ j(greater, &not_one_case);
4706 4703
4707 if (IsFastPackedElementsKind(kind)) { 4704 if (IsFastPackedElementsKind(kind)) {
4708 // We might need to create a holey array 4705 // We might need to create a holey array
4709 // look at the first argument 4706 // look at the first argument
4710 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER); 4707 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
4711 __ movp(rcx, args.GetArgumentOperand(0)); 4708 __ movp(rcx, args.GetArgumentOperand(0));
4712 __ testp(rcx, rcx); 4709 __ testp(rcx, rcx);
4713 __ j(zero, &normal_sequence); 4710 __ j(zero, &normal_sequence);
4714 4711
4715 InternalArraySingleArgumentConstructorStub 4712 InternalArraySingleArgumentConstructorStub
4716 stub1_holey(GetHoleyElementsKind(kind)); 4713 stub1_holey(isolate(), GetHoleyElementsKind(kind));
4717 __ TailCallStub(&stub1_holey); 4714 __ TailCallStub(&stub1_holey);
4718 } 4715 }
4719 4716
4720 __ bind(&normal_sequence); 4717 __ bind(&normal_sequence);
4721 InternalArraySingleArgumentConstructorStub stub1(kind); 4718 InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
4722 __ TailCallStub(&stub1); 4719 __ TailCallStub(&stub1);
4723 4720
4724 __ bind(&not_one_case); 4721 __ bind(&not_one_case);
4725 InternalArrayNArgumentsConstructorStub stubN(kind); 4722 InternalArrayNArgumentsConstructorStub stubN(isolate(), kind);
4726 __ TailCallStub(&stubN); 4723 __ TailCallStub(&stubN);
4727 } 4724 }
4728 4725
4729 4726
4730 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) { 4727 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
4731 // ----------- S t a t e ------------- 4728 // ----------- S t a t e -------------
4732 // -- rax : argc 4729 // -- rax : argc
4733 // -- rdi : constructor 4730 // -- rdi : constructor
4734 // -- rsp[0] : return address 4731 // -- rsp[0] : return address
4735 // -- rsp[8] : last argument 4732 // -- rsp[8] : last argument
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after
4831 Register scratch = call_data; 4828 Register scratch = call_data;
4832 if (!call_data_undefined) { 4829 if (!call_data_undefined) {
4833 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); 4830 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
4834 } 4831 }
4835 // return value 4832 // return value
4836 __ Push(scratch); 4833 __ Push(scratch);
4837 // return value default 4834 // return value default
4838 __ Push(scratch); 4835 __ Push(scratch);
4839 // isolate 4836 // isolate
4840 __ Move(scratch, 4837 __ Move(scratch,
4841 ExternalReference::isolate_address(masm->isolate())); 4838 ExternalReference::isolate_address(isolate()));
4842 __ Push(scratch); 4839 __ Push(scratch);
4843 // holder 4840 // holder
4844 __ Push(holder); 4841 __ Push(holder);
4845 4842
4846 __ movp(scratch, rsp); 4843 __ movp(scratch, rsp);
4847 // Push return address back on stack. 4844 // Push return address back on stack.
4848 __ PushReturnAddressFrom(return_address); 4845 __ PushReturnAddressFrom(return_address);
4849 4846
4850 // Allocate the v8::Arguments structure in the arguments' space since 4847 // Allocate the v8::Arguments structure in the arguments' space since
4851 // it's not controlled by GC. 4848 // it's not controlled by GC.
(...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after
4954 return_value_operand, 4951 return_value_operand,
4955 NULL); 4952 NULL);
4956 } 4953 }
4957 4954
4958 4955
4959 #undef __ 4956 #undef __
4960 4957
4961 } } // namespace v8::internal 4958 } } // namespace v8::internal
4962 4959
4963 #endif // V8_TARGET_ARCH_X64 4960 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/code-stubs-x64.h ('k') | src/x64/debug-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698