OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 429 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
440 | 440 |
441 void FastNewContextStub::Generate(MacroAssembler* masm) { | 441 void FastNewContextStub::Generate(MacroAssembler* masm) { |
442 // Try to allocate the context in new space. | 442 // Try to allocate the context in new space. |
443 Label gc; | 443 Label gc; |
444 int length = slots_ + Context::MIN_CONTEXT_SLOTS; | 444 int length = slots_ + Context::MIN_CONTEXT_SLOTS; |
445 __ Allocate((length * kPointerSize) + FixedArray::kHeaderSize, | 445 __ Allocate((length * kPointerSize) + FixedArray::kHeaderSize, |
446 rax, rbx, rcx, &gc, TAG_OBJECT); | 446 rax, rbx, rcx, &gc, TAG_OBJECT); |
447 | 447 |
448 // Get the function from the stack. | 448 // Get the function from the stack. |
449 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER); | 449 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER); |
450 __ movq(rcx, args.GetArgumentOperand(0)); | 450 __ movp(rcx, args.GetArgumentOperand(0)); |
451 | 451 |
452 // Set up the object header. | 452 // Set up the object header. |
453 __ LoadRoot(kScratchRegister, Heap::kFunctionContextMapRootIndex); | 453 __ LoadRoot(kScratchRegister, Heap::kFunctionContextMapRootIndex); |
454 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); | 454 __ movp(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); |
455 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length)); | 455 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length)); |
456 | 456 |
457 // Set up the fixed slots. | 457 // Set up the fixed slots. |
458 __ Set(rbx, 0); // Set to NULL. | 458 __ Set(rbx, 0); // Set to NULL. |
459 __ movq(Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX)), rcx); | 459 __ movp(Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX)), rcx); |
460 __ movq(Operand(rax, Context::SlotOffset(Context::PREVIOUS_INDEX)), rsi); | 460 __ movp(Operand(rax, Context::SlotOffset(Context::PREVIOUS_INDEX)), rsi); |
461 __ movq(Operand(rax, Context::SlotOffset(Context::EXTENSION_INDEX)), rbx); | 461 __ movp(Operand(rax, Context::SlotOffset(Context::EXTENSION_INDEX)), rbx); |
462 | 462 |
463 // Copy the global object from the previous context. | 463 // Copy the global object from the previous context. |
464 __ movq(rbx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); | 464 __ movp(rbx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); |
465 __ movq(Operand(rax, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)), rbx); | 465 __ movp(Operand(rax, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)), rbx); |
466 | 466 |
467 // Initialize the rest of the slots to undefined. | 467 // Initialize the rest of the slots to undefined. |
468 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex); | 468 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex); |
469 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) { | 469 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) { |
470 __ movq(Operand(rax, Context::SlotOffset(i)), rbx); | 470 __ movp(Operand(rax, Context::SlotOffset(i)), rbx); |
471 } | 471 } |
472 | 472 |
473 // Return and remove the on-stack parameter. | 473 // Return and remove the on-stack parameter. |
474 __ movq(rsi, rax); | 474 __ movp(rsi, rax); |
475 __ ret(1 * kPointerSize); | 475 __ ret(1 * kPointerSize); |
476 | 476 |
477 // Need to collect. Call into runtime system. | 477 // Need to collect. Call into runtime system. |
478 __ bind(&gc); | 478 __ bind(&gc); |
479 __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1); | 479 __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1); |
480 } | 480 } |
481 | 481 |
482 | 482 |
483 void FastNewBlockContextStub::Generate(MacroAssembler* masm) { | 483 void FastNewBlockContextStub::Generate(MacroAssembler* masm) { |
484 // Stack layout on entry: | 484 // Stack layout on entry: |
485 // | 485 // |
486 // [rsp + (1 * kPointerSize)] : function | 486 // [rsp + (1 * kPointerSize)] : function |
487 // [rsp + (2 * kPointerSize)] : serialized scope info | 487 // [rsp + (2 * kPointerSize)] : serialized scope info |
488 | 488 |
489 // Try to allocate the context in new space. | 489 // Try to allocate the context in new space. |
490 Label gc; | 490 Label gc; |
491 int length = slots_ + Context::MIN_CONTEXT_SLOTS; | 491 int length = slots_ + Context::MIN_CONTEXT_SLOTS; |
492 __ Allocate(FixedArray::SizeFor(length), | 492 __ Allocate(FixedArray::SizeFor(length), |
493 rax, rbx, rcx, &gc, TAG_OBJECT); | 493 rax, rbx, rcx, &gc, TAG_OBJECT); |
494 | 494 |
495 // Get the function from the stack. | 495 // Get the function from the stack. |
496 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER); | 496 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER); |
497 __ movq(rcx, args.GetArgumentOperand(1)); | 497 __ movp(rcx, args.GetArgumentOperand(1)); |
498 // Get the serialized scope info from the stack. | 498 // Get the serialized scope info from the stack. |
499 __ movq(rbx, args.GetArgumentOperand(0)); | 499 __ movp(rbx, args.GetArgumentOperand(0)); |
500 | 500 |
501 // Set up the object header. | 501 // Set up the object header. |
502 __ LoadRoot(kScratchRegister, Heap::kBlockContextMapRootIndex); | 502 __ LoadRoot(kScratchRegister, Heap::kBlockContextMapRootIndex); |
503 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); | 503 __ movp(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); |
504 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length)); | 504 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length)); |
505 | 505 |
506 // If this block context is nested in the native context we get a smi | 506 // If this block context is nested in the native context we get a smi |
507 // sentinel instead of a function. The block context should get the | 507 // sentinel instead of a function. The block context should get the |
508 // canonical empty function of the native context as its closure which | 508 // canonical empty function of the native context as its closure which |
509 // we still have to look up. | 509 // we still have to look up. |
510 Label after_sentinel; | 510 Label after_sentinel; |
511 __ JumpIfNotSmi(rcx, &after_sentinel, Label::kNear); | 511 __ JumpIfNotSmi(rcx, &after_sentinel, Label::kNear); |
512 if (FLAG_debug_code) { | 512 if (FLAG_debug_code) { |
513 __ cmpq(rcx, Immediate(0)); | 513 __ cmpq(rcx, Immediate(0)); |
514 __ Assert(equal, kExpected0AsASmiSentinel); | 514 __ Assert(equal, kExpected0AsASmiSentinel); |
515 } | 515 } |
516 __ movq(rcx, GlobalObjectOperand()); | 516 __ movp(rcx, GlobalObjectOperand()); |
517 __ movq(rcx, FieldOperand(rcx, GlobalObject::kNativeContextOffset)); | 517 __ movp(rcx, FieldOperand(rcx, GlobalObject::kNativeContextOffset)); |
518 __ movq(rcx, ContextOperand(rcx, Context::CLOSURE_INDEX)); | 518 __ movp(rcx, ContextOperand(rcx, Context::CLOSURE_INDEX)); |
519 __ bind(&after_sentinel); | 519 __ bind(&after_sentinel); |
520 | 520 |
521 // Set up the fixed slots. | 521 // Set up the fixed slots. |
522 __ movq(ContextOperand(rax, Context::CLOSURE_INDEX), rcx); | 522 __ movp(ContextOperand(rax, Context::CLOSURE_INDEX), rcx); |
523 __ movq(ContextOperand(rax, Context::PREVIOUS_INDEX), rsi); | 523 __ movp(ContextOperand(rax, Context::PREVIOUS_INDEX), rsi); |
524 __ movq(ContextOperand(rax, Context::EXTENSION_INDEX), rbx); | 524 __ movp(ContextOperand(rax, Context::EXTENSION_INDEX), rbx); |
525 | 525 |
526 // Copy the global object from the previous context. | 526 // Copy the global object from the previous context. |
527 __ movq(rbx, ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX)); | 527 __ movp(rbx, ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX)); |
528 __ movq(ContextOperand(rax, Context::GLOBAL_OBJECT_INDEX), rbx); | 528 __ movp(ContextOperand(rax, Context::GLOBAL_OBJECT_INDEX), rbx); |
529 | 529 |
530 // Initialize the rest of the slots to the hole value. | 530 // Initialize the rest of the slots to the hole value. |
531 __ LoadRoot(rbx, Heap::kTheHoleValueRootIndex); | 531 __ LoadRoot(rbx, Heap::kTheHoleValueRootIndex); |
532 for (int i = 0; i < slots_; i++) { | 532 for (int i = 0; i < slots_; i++) { |
533 __ movq(ContextOperand(rax, i + Context::MIN_CONTEXT_SLOTS), rbx); | 533 __ movp(ContextOperand(rax, i + Context::MIN_CONTEXT_SLOTS), rbx); |
534 } | 534 } |
535 | 535 |
536 // Return and remove the on-stack parameter. | 536 // Return and remove the on-stack parameter. |
537 __ movq(rsi, rax); | 537 __ movp(rsi, rax); |
538 __ ret(2 * kPointerSize); | 538 __ ret(2 * kPointerSize); |
539 | 539 |
540 // Need to collect. Call into runtime system. | 540 // Need to collect. Call into runtime system. |
541 __ bind(&gc); | 541 __ bind(&gc); |
542 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1); | 542 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1); |
543 } | 543 } |
544 | 544 |
545 | 545 |
546 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { | 546 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { |
547 __ PushCallerSaved(save_doubles_); | 547 __ PushCallerSaved(save_doubles_); |
(...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
693 const Register base = rax; | 693 const Register base = rax; |
694 const Register scratch = rcx; | 694 const Register scratch = rcx; |
695 const XMMRegister double_result = xmm3; | 695 const XMMRegister double_result = xmm3; |
696 const XMMRegister double_base = xmm2; | 696 const XMMRegister double_base = xmm2; |
697 const XMMRegister double_exponent = xmm1; | 697 const XMMRegister double_exponent = xmm1; |
698 const XMMRegister double_scratch = xmm4; | 698 const XMMRegister double_scratch = xmm4; |
699 | 699 |
700 Label call_runtime, done, exponent_not_smi, int_exponent; | 700 Label call_runtime, done, exponent_not_smi, int_exponent; |
701 | 701 |
702 // Save 1 in double_result - we need this several times later on. | 702 // Save 1 in double_result - we need this several times later on. |
703 __ movq(scratch, Immediate(1)); | 703 __ movp(scratch, Immediate(1)); |
704 __ Cvtlsi2sd(double_result, scratch); | 704 __ Cvtlsi2sd(double_result, scratch); |
705 | 705 |
706 if (exponent_type_ == ON_STACK) { | 706 if (exponent_type_ == ON_STACK) { |
707 Label base_is_smi, unpack_exponent; | 707 Label base_is_smi, unpack_exponent; |
708 // The exponent and base are supplied as arguments on the stack. | 708 // The exponent and base are supplied as arguments on the stack. |
709 // This can only happen if the stub is called from non-optimized code. | 709 // This can only happen if the stub is called from non-optimized code. |
710 // Load input parameters from stack. | 710 // Load input parameters from stack. |
711 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER); | 711 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER); |
712 __ movq(base, args.GetArgumentOperand(0)); | 712 __ movp(base, args.GetArgumentOperand(0)); |
713 __ movq(exponent, args.GetArgumentOperand(1)); | 713 __ movp(exponent, args.GetArgumentOperand(1)); |
714 __ JumpIfSmi(base, &base_is_smi, Label::kNear); | 714 __ JumpIfSmi(base, &base_is_smi, Label::kNear); |
715 __ CompareRoot(FieldOperand(base, HeapObject::kMapOffset), | 715 __ CompareRoot(FieldOperand(base, HeapObject::kMapOffset), |
716 Heap::kHeapNumberMapRootIndex); | 716 Heap::kHeapNumberMapRootIndex); |
717 __ j(not_equal, &call_runtime); | 717 __ j(not_equal, &call_runtime); |
718 | 718 |
719 __ movsd(double_base, FieldOperand(base, HeapNumber::kValueOffset)); | 719 __ movsd(double_base, FieldOperand(base, HeapNumber::kValueOffset)); |
720 __ jmp(&unpack_exponent, Label::kNear); | 720 __ jmp(&unpack_exponent, Label::kNear); |
721 | 721 |
722 __ bind(&base_is_smi); | 722 __ bind(&base_is_smi); |
723 __ SmiToInteger32(base, base); | 723 __ SmiToInteger32(base, base); |
(...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
863 __ bind(&fast_power_failed); | 863 __ bind(&fast_power_failed); |
864 __ fninit(); | 864 __ fninit(); |
865 __ addq(rsp, Immediate(kDoubleSize)); | 865 __ addq(rsp, Immediate(kDoubleSize)); |
866 __ jmp(&call_runtime); | 866 __ jmp(&call_runtime); |
867 } | 867 } |
868 | 868 |
869 // Calculate power with integer exponent. | 869 // Calculate power with integer exponent. |
870 __ bind(&int_exponent); | 870 __ bind(&int_exponent); |
871 const XMMRegister double_scratch2 = double_exponent; | 871 const XMMRegister double_scratch2 = double_exponent; |
872 // Back up exponent as we need to check if exponent is negative later. | 872 // Back up exponent as we need to check if exponent is negative later. |
873 __ movq(scratch, exponent); // Back up exponent. | 873 __ movp(scratch, exponent); // Back up exponent. |
874 __ movsd(double_scratch, double_base); // Back up base. | 874 __ movsd(double_scratch, double_base); // Back up base. |
875 __ movsd(double_scratch2, double_result); // Load double_exponent with 1. | 875 __ movsd(double_scratch2, double_result); // Load double_exponent with 1. |
876 | 876 |
877 // Get absolute value of exponent. | 877 // Get absolute value of exponent. |
878 Label no_neg, while_true, while_false; | 878 Label no_neg, while_true, while_false; |
879 __ testl(scratch, scratch); | 879 __ testl(scratch, scratch); |
880 __ j(positive, &no_neg, Label::kNear); | 880 __ j(positive, &no_neg, Label::kNear); |
881 __ negl(scratch); | 881 __ negl(scratch); |
882 __ bind(&no_neg); | 882 __ bind(&no_neg); |
883 | 883 |
(...skipping 147 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1031 // Check that the receiver isn't a smi. | 1031 // Check that the receiver isn't a smi. |
1032 __ JumpIfSmi(receiver, &miss); | 1032 __ JumpIfSmi(receiver, &miss); |
1033 | 1033 |
1034 // Check that the object is a JS array. | 1034 // Check that the object is a JS array. |
1035 __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch); | 1035 __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch); |
1036 __ j(not_equal, &miss); | 1036 __ j(not_equal, &miss); |
1037 | 1037 |
1038 // Check that elements are FixedArray. | 1038 // Check that elements are FixedArray. |
1039 // We rely on StoreIC_ArrayLength below to deal with all types of | 1039 // We rely on StoreIC_ArrayLength below to deal with all types of |
1040 // fast elements (including COW). | 1040 // fast elements (including COW). |
1041 __ movq(scratch, FieldOperand(receiver, JSArray::kElementsOffset)); | 1041 __ movp(scratch, FieldOperand(receiver, JSArray::kElementsOffset)); |
1042 __ CmpObjectType(scratch, FIXED_ARRAY_TYPE, scratch); | 1042 __ CmpObjectType(scratch, FIXED_ARRAY_TYPE, scratch); |
1043 __ j(not_equal, &miss); | 1043 __ j(not_equal, &miss); |
1044 | 1044 |
1045 // Check that the array has fast properties, otherwise the length | 1045 // Check that the array has fast properties, otherwise the length |
1046 // property might have been redefined. | 1046 // property might have been redefined. |
1047 __ movq(scratch, FieldOperand(receiver, JSArray::kPropertiesOffset)); | 1047 __ movp(scratch, FieldOperand(receiver, JSArray::kPropertiesOffset)); |
1048 __ CompareRoot(FieldOperand(scratch, FixedArray::kMapOffset), | 1048 __ CompareRoot(FieldOperand(scratch, FixedArray::kMapOffset), |
1049 Heap::kHashTableMapRootIndex); | 1049 Heap::kHashTableMapRootIndex); |
1050 __ j(equal, &miss); | 1050 __ j(equal, &miss); |
1051 | 1051 |
1052 // Check that value is a smi. | 1052 // Check that value is a smi. |
1053 __ JumpIfNotSmi(value, &miss); | 1053 __ JumpIfNotSmi(value, &miss); |
1054 | 1054 |
1055 // Prepare tail call to StoreIC_ArrayLength. | 1055 // Prepare tail call to StoreIC_ArrayLength. |
1056 __ PopReturnAddressTo(scratch); | 1056 __ PopReturnAddressTo(scratch); |
1057 __ push(receiver); | 1057 __ push(receiver); |
(...skipping 16 matching lines...) Expand all Loading... |
1074 | 1074 |
1075 // Check that the key is a smi. | 1075 // Check that the key is a smi. |
1076 Label slow; | 1076 Label slow; |
1077 __ JumpIfNotSmi(rdx, &slow); | 1077 __ JumpIfNotSmi(rdx, &slow); |
1078 | 1078 |
1079 // Check if the calling frame is an arguments adaptor frame. We look at the | 1079 // Check if the calling frame is an arguments adaptor frame. We look at the |
1080 // context offset, and if the frame is not a regular one, then we find a | 1080 // context offset, and if the frame is not a regular one, then we find a |
1081 // Smi instead of the context. We can't use SmiCompare here, because that | 1081 // Smi instead of the context. We can't use SmiCompare here, because that |
1082 // only works for comparing two smis. | 1082 // only works for comparing two smis. |
1083 Label adaptor; | 1083 Label adaptor; |
1084 __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); | 1084 __ movp(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); |
1085 __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset), | 1085 __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset), |
1086 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); | 1086 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); |
1087 __ j(equal, &adaptor); | 1087 __ j(equal, &adaptor); |
1088 | 1088 |
1089 // Check index against formal parameters count limit passed in | 1089 // Check index against formal parameters count limit passed in |
1090 // through register rax. Use unsigned comparison to get negative | 1090 // through register rax. Use unsigned comparison to get negative |
1091 // check for free. | 1091 // check for free. |
1092 __ cmpq(rdx, rax); | 1092 __ cmpq(rdx, rax); |
1093 __ j(above_equal, &slow); | 1093 __ j(above_equal, &slow); |
1094 | 1094 |
1095 // Read the argument from the stack and return it. | 1095 // Read the argument from the stack and return it. |
1096 __ SmiSub(rax, rax, rdx); | 1096 __ SmiSub(rax, rax, rdx); |
1097 __ SmiToInteger32(rax, rax); | 1097 __ SmiToInteger32(rax, rax); |
1098 StackArgumentsAccessor args(rbp, rax, ARGUMENTS_DONT_CONTAIN_RECEIVER); | 1098 StackArgumentsAccessor args(rbp, rax, ARGUMENTS_DONT_CONTAIN_RECEIVER); |
1099 __ movq(rax, args.GetArgumentOperand(0)); | 1099 __ movp(rax, args.GetArgumentOperand(0)); |
1100 __ Ret(); | 1100 __ Ret(); |
1101 | 1101 |
1102 // Arguments adaptor case: Check index against actual arguments | 1102 // Arguments adaptor case: Check index against actual arguments |
1103 // limit found in the arguments adaptor frame. Use unsigned | 1103 // limit found in the arguments adaptor frame. Use unsigned |
1104 // comparison to get negative check for free. | 1104 // comparison to get negative check for free. |
1105 __ bind(&adaptor); | 1105 __ bind(&adaptor); |
1106 __ movq(rcx, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 1106 __ movp(rcx, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
1107 __ cmpq(rdx, rcx); | 1107 __ cmpq(rdx, rcx); |
1108 __ j(above_equal, &slow); | 1108 __ j(above_equal, &slow); |
1109 | 1109 |
1110 // Read the argument from the stack and return it. | 1110 // Read the argument from the stack and return it. |
1111 __ SmiSub(rcx, rcx, rdx); | 1111 __ SmiSub(rcx, rcx, rdx); |
1112 __ SmiToInteger32(rcx, rcx); | 1112 __ SmiToInteger32(rcx, rcx); |
1113 StackArgumentsAccessor adaptor_args(rbx, rcx, | 1113 StackArgumentsAccessor adaptor_args(rbx, rcx, |
1114 ARGUMENTS_DONT_CONTAIN_RECEIVER); | 1114 ARGUMENTS_DONT_CONTAIN_RECEIVER); |
1115 __ movq(rax, adaptor_args.GetArgumentOperand(0)); | 1115 __ movp(rax, adaptor_args.GetArgumentOperand(0)); |
1116 __ Ret(); | 1116 __ Ret(); |
1117 | 1117 |
1118 // Slow-case: Handle non-smi or out-of-bounds access to arguments | 1118 // Slow-case: Handle non-smi or out-of-bounds access to arguments |
1119 // by calling the runtime system. | 1119 // by calling the runtime system. |
1120 __ bind(&slow); | 1120 __ bind(&slow); |
1121 __ PopReturnAddressTo(rbx); | 1121 __ PopReturnAddressTo(rbx); |
1122 __ push(rdx); | 1122 __ push(rdx); |
1123 __ PushReturnAddressFrom(rbx); | 1123 __ PushReturnAddressFrom(rbx); |
1124 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1); | 1124 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1); |
1125 } | 1125 } |
(...skipping 11 matching lines...) Expand all Loading... |
1137 | 1137 |
1138 Factory* factory = masm->isolate()->factory(); | 1138 Factory* factory = masm->isolate()->factory(); |
1139 | 1139 |
1140 StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER); | 1140 StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER); |
1141 __ SmiToInteger64(rbx, args.GetArgumentOperand(2)); | 1141 __ SmiToInteger64(rbx, args.GetArgumentOperand(2)); |
1142 // rbx = parameter count (untagged) | 1142 // rbx = parameter count (untagged) |
1143 | 1143 |
1144 // Check if the calling frame is an arguments adaptor frame. | 1144 // Check if the calling frame is an arguments adaptor frame. |
1145 Label runtime; | 1145 Label runtime; |
1146 Label adaptor_frame, try_allocate; | 1146 Label adaptor_frame, try_allocate; |
1147 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); | 1147 __ movp(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); |
1148 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); | 1148 __ movp(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); |
1149 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); | 1149 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); |
1150 __ j(equal, &adaptor_frame); | 1150 __ j(equal, &adaptor_frame); |
1151 | 1151 |
1152 // No adaptor, parameter count = argument count. | 1152 // No adaptor, parameter count = argument count. |
1153 __ movq(rcx, rbx); | 1153 __ movp(rcx, rbx); |
1154 __ jmp(&try_allocate, Label::kNear); | 1154 __ jmp(&try_allocate, Label::kNear); |
1155 | 1155 |
1156 // We have an adaptor frame. Patch the parameters pointer. | 1156 // We have an adaptor frame. Patch the parameters pointer. |
1157 __ bind(&adaptor_frame); | 1157 __ bind(&adaptor_frame); |
1158 __ SmiToInteger64(rcx, | 1158 __ SmiToInteger64(rcx, |
1159 Operand(rdx, | 1159 Operand(rdx, |
1160 ArgumentsAdaptorFrameConstants::kLengthOffset)); | 1160 ArgumentsAdaptorFrameConstants::kLengthOffset)); |
1161 __ lea(rdx, Operand(rdx, rcx, times_pointer_size, | 1161 __ lea(rdx, Operand(rdx, rcx, times_pointer_size, |
1162 StandardFrameConstants::kCallerSPOffset)); | 1162 StandardFrameConstants::kCallerSPOffset)); |
1163 __ movq(args.GetArgumentOperand(1), rdx); | 1163 __ movp(args.GetArgumentOperand(1), rdx); |
1164 | 1164 |
1165 // rbx = parameter count (untagged) | 1165 // rbx = parameter count (untagged) |
1166 // rcx = argument count (untagged) | 1166 // rcx = argument count (untagged) |
1167 // Compute the mapped parameter count = min(rbx, rcx) in rbx. | 1167 // Compute the mapped parameter count = min(rbx, rcx) in rbx. |
1168 __ cmpq(rbx, rcx); | 1168 __ cmpq(rbx, rcx); |
1169 __ j(less_equal, &try_allocate, Label::kNear); | 1169 __ j(less_equal, &try_allocate, Label::kNear); |
1170 __ movq(rbx, rcx); | 1170 __ movp(rbx, rcx); |
1171 | 1171 |
1172 __ bind(&try_allocate); | 1172 __ bind(&try_allocate); |
1173 | 1173 |
1174 // Compute the sizes of backing store, parameter map, and arguments object. | 1174 // Compute the sizes of backing store, parameter map, and arguments object. |
1175 // 1. Parameter map, has 2 extra words containing context and backing store. | 1175 // 1. Parameter map, has 2 extra words containing context and backing store. |
1176 const int kParameterMapHeaderSize = | 1176 const int kParameterMapHeaderSize = |
1177 FixedArray::kHeaderSize + 2 * kPointerSize; | 1177 FixedArray::kHeaderSize + 2 * kPointerSize; |
1178 Label no_parameter_map; | 1178 Label no_parameter_map; |
1179 __ xor_(r8, r8); | 1179 __ xor_(r8, r8); |
1180 __ testq(rbx, rbx); | 1180 __ testq(rbx, rbx); |
1181 __ j(zero, &no_parameter_map, Label::kNear); | 1181 __ j(zero, &no_parameter_map, Label::kNear); |
1182 __ lea(r8, Operand(rbx, times_pointer_size, kParameterMapHeaderSize)); | 1182 __ lea(r8, Operand(rbx, times_pointer_size, kParameterMapHeaderSize)); |
1183 __ bind(&no_parameter_map); | 1183 __ bind(&no_parameter_map); |
1184 | 1184 |
1185 // 2. Backing store. | 1185 // 2. Backing store. |
1186 __ lea(r8, Operand(r8, rcx, times_pointer_size, FixedArray::kHeaderSize)); | 1186 __ lea(r8, Operand(r8, rcx, times_pointer_size, FixedArray::kHeaderSize)); |
1187 | 1187 |
1188 // 3. Arguments object. | 1188 // 3. Arguments object. |
1189 __ addq(r8, Immediate(Heap::kArgumentsObjectSize)); | 1189 __ addq(r8, Immediate(Heap::kArgumentsObjectSize)); |
1190 | 1190 |
1191 // Do the allocation of all three objects in one go. | 1191 // Do the allocation of all three objects in one go. |
1192 __ Allocate(r8, rax, rdx, rdi, &runtime, TAG_OBJECT); | 1192 __ Allocate(r8, rax, rdx, rdi, &runtime, TAG_OBJECT); |
1193 | 1193 |
1194 // rax = address of new object(s) (tagged) | 1194 // rax = address of new object(s) (tagged) |
1195 // rcx = argument count (untagged) | 1195 // rcx = argument count (untagged) |
1196 // Get the arguments boilerplate from the current native context into rdi. | 1196 // Get the arguments boilerplate from the current native context into rdi. |
1197 Label has_mapped_parameters, copy; | 1197 Label has_mapped_parameters, copy; |
1198 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); | 1198 __ movp(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); |
1199 __ movq(rdi, FieldOperand(rdi, GlobalObject::kNativeContextOffset)); | 1199 __ movp(rdi, FieldOperand(rdi, GlobalObject::kNativeContextOffset)); |
1200 __ testq(rbx, rbx); | 1200 __ testq(rbx, rbx); |
1201 __ j(not_zero, &has_mapped_parameters, Label::kNear); | 1201 __ j(not_zero, &has_mapped_parameters, Label::kNear); |
1202 | 1202 |
1203 const int kIndex = Context::ARGUMENTS_BOILERPLATE_INDEX; | 1203 const int kIndex = Context::ARGUMENTS_BOILERPLATE_INDEX; |
1204 __ movq(rdi, Operand(rdi, Context::SlotOffset(kIndex))); | 1204 __ movp(rdi, Operand(rdi, Context::SlotOffset(kIndex))); |
1205 __ jmp(©, Label::kNear); | 1205 __ jmp(©, Label::kNear); |
1206 | 1206 |
1207 const int kAliasedIndex = Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX; | 1207 const int kAliasedIndex = Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX; |
1208 __ bind(&has_mapped_parameters); | 1208 __ bind(&has_mapped_parameters); |
1209 __ movq(rdi, Operand(rdi, Context::SlotOffset(kAliasedIndex))); | 1209 __ movp(rdi, Operand(rdi, Context::SlotOffset(kAliasedIndex))); |
1210 __ bind(©); | 1210 __ bind(©); |
1211 | 1211 |
1212 // rax = address of new object (tagged) | 1212 // rax = address of new object (tagged) |
1213 // rbx = mapped parameter count (untagged) | 1213 // rbx = mapped parameter count (untagged) |
1214 // rcx = argument count (untagged) | 1214 // rcx = argument count (untagged) |
1215 // rdi = address of boilerplate object (tagged) | 1215 // rdi = address of boilerplate object (tagged) |
1216 // Copy the JS object part. | 1216 // Copy the JS object part. |
1217 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) { | 1217 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) { |
1218 __ movq(rdx, FieldOperand(rdi, i)); | 1218 __ movp(rdx, FieldOperand(rdi, i)); |
1219 __ movq(FieldOperand(rax, i), rdx); | 1219 __ movp(FieldOperand(rax, i), rdx); |
1220 } | 1220 } |
1221 | 1221 |
1222 // Set up the callee in-object property. | 1222 // Set up the callee in-object property. |
1223 STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1); | 1223 STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1); |
1224 __ movq(rdx, args.GetArgumentOperand(0)); | 1224 __ movp(rdx, args.GetArgumentOperand(0)); |
1225 __ movq(FieldOperand(rax, JSObject::kHeaderSize + | 1225 __ movp(FieldOperand(rax, JSObject::kHeaderSize + |
1226 Heap::kArgumentsCalleeIndex * kPointerSize), | 1226 Heap::kArgumentsCalleeIndex * kPointerSize), |
1227 rdx); | 1227 rdx); |
1228 | 1228 |
1229 // Use the length (smi tagged) and set that as an in-object property too. | 1229 // Use the length (smi tagged) and set that as an in-object property too. |
1230 // Note: rcx is tagged from here on. | 1230 // Note: rcx is tagged from here on. |
1231 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); | 1231 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); |
1232 __ Integer32ToSmi(rcx, rcx); | 1232 __ Integer32ToSmi(rcx, rcx); |
1233 __ movq(FieldOperand(rax, JSObject::kHeaderSize + | 1233 __ movp(FieldOperand(rax, JSObject::kHeaderSize + |
1234 Heap::kArgumentsLengthIndex * kPointerSize), | 1234 Heap::kArgumentsLengthIndex * kPointerSize), |
1235 rcx); | 1235 rcx); |
1236 | 1236 |
1237 // Set up the elements pointer in the allocated arguments object. | 1237 // Set up the elements pointer in the allocated arguments object. |
1238 // If we allocated a parameter map, edi will point there, otherwise to the | 1238 // If we allocated a parameter map, edi will point there, otherwise to the |
1239 // backing store. | 1239 // backing store. |
1240 __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSize)); | 1240 __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSize)); |
1241 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi); | 1241 __ movp(FieldOperand(rax, JSObject::kElementsOffset), rdi); |
1242 | 1242 |
1243 // rax = address of new object (tagged) | 1243 // rax = address of new object (tagged) |
1244 // rbx = mapped parameter count (untagged) | 1244 // rbx = mapped parameter count (untagged) |
1245 // rcx = argument count (tagged) | 1245 // rcx = argument count (tagged) |
1246 // rdi = address of parameter map or backing store (tagged) | 1246 // rdi = address of parameter map or backing store (tagged) |
1247 | 1247 |
1248 // Initialize parameter map. If there are no mapped arguments, we're done. | 1248 // Initialize parameter map. If there are no mapped arguments, we're done. |
1249 Label skip_parameter_map; | 1249 Label skip_parameter_map; |
1250 __ testq(rbx, rbx); | 1250 __ testq(rbx, rbx); |
1251 __ j(zero, &skip_parameter_map); | 1251 __ j(zero, &skip_parameter_map); |
1252 | 1252 |
1253 __ LoadRoot(kScratchRegister, Heap::kNonStrictArgumentsElementsMapRootIndex); | 1253 __ LoadRoot(kScratchRegister, Heap::kNonStrictArgumentsElementsMapRootIndex); |
1254 // rbx contains the untagged argument count. Add 2 and tag to write. | 1254 // rbx contains the untagged argument count. Add 2 and tag to write. |
1255 __ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister); | 1255 __ movp(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister); |
1256 __ Integer64PlusConstantToSmi(r9, rbx, 2); | 1256 __ Integer64PlusConstantToSmi(r9, rbx, 2); |
1257 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), r9); | 1257 __ movp(FieldOperand(rdi, FixedArray::kLengthOffset), r9); |
1258 __ movq(FieldOperand(rdi, FixedArray::kHeaderSize + 0 * kPointerSize), rsi); | 1258 __ movp(FieldOperand(rdi, FixedArray::kHeaderSize + 0 * kPointerSize), rsi); |
1259 __ lea(r9, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize)); | 1259 __ lea(r9, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize)); |
1260 __ movq(FieldOperand(rdi, FixedArray::kHeaderSize + 1 * kPointerSize), r9); | 1260 __ movp(FieldOperand(rdi, FixedArray::kHeaderSize + 1 * kPointerSize), r9); |
1261 | 1261 |
1262 // Copy the parameter slots and the holes in the arguments. | 1262 // Copy the parameter slots and the holes in the arguments. |
1263 // We need to fill in mapped_parameter_count slots. They index the context, | 1263 // We need to fill in mapped_parameter_count slots. They index the context, |
1264 // where parameters are stored in reverse order, at | 1264 // where parameters are stored in reverse order, at |
1265 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1 | 1265 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1 |
1266 // The mapped parameter thus need to get indices | 1266 // The mapped parameter thus need to get indices |
1267 // MIN_CONTEXT_SLOTS+parameter_count-1 .. | 1267 // MIN_CONTEXT_SLOTS+parameter_count-1 .. |
1268 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count | 1268 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count |
1269 // We loop from right to left. | 1269 // We loop from right to left. |
1270 Label parameters_loop, parameters_test; | 1270 Label parameters_loop, parameters_test; |
1271 | 1271 |
1272 // Load tagged parameter count into r9. | 1272 // Load tagged parameter count into r9. |
1273 __ Integer32ToSmi(r9, rbx); | 1273 __ Integer32ToSmi(r9, rbx); |
1274 __ Move(r8, Smi::FromInt(Context::MIN_CONTEXT_SLOTS)); | 1274 __ Move(r8, Smi::FromInt(Context::MIN_CONTEXT_SLOTS)); |
1275 __ addq(r8, args.GetArgumentOperand(2)); | 1275 __ addq(r8, args.GetArgumentOperand(2)); |
1276 __ subq(r8, r9); | 1276 __ subq(r8, r9); |
1277 __ Move(r11, factory->the_hole_value()); | 1277 __ Move(r11, factory->the_hole_value()); |
1278 __ movq(rdx, rdi); | 1278 __ movp(rdx, rdi); |
1279 __ lea(rdi, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize)); | 1279 __ lea(rdi, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize)); |
1280 // r9 = loop variable (tagged) | 1280 // r9 = loop variable (tagged) |
1281 // r8 = mapping index (tagged) | 1281 // r8 = mapping index (tagged) |
1282 // r11 = the hole value | 1282 // r11 = the hole value |
1283 // rdx = address of parameter map (tagged) | 1283 // rdx = address of parameter map (tagged) |
1284 // rdi = address of backing store (tagged) | 1284 // rdi = address of backing store (tagged) |
1285 __ jmp(¶meters_test, Label::kNear); | 1285 __ jmp(¶meters_test, Label::kNear); |
1286 | 1286 |
1287 __ bind(¶meters_loop); | 1287 __ bind(¶meters_loop); |
1288 __ SmiSubConstant(r9, r9, Smi::FromInt(1)); | 1288 __ SmiSubConstant(r9, r9, Smi::FromInt(1)); |
1289 __ SmiToInteger64(kScratchRegister, r9); | 1289 __ SmiToInteger64(kScratchRegister, r9); |
1290 __ movq(FieldOperand(rdx, kScratchRegister, | 1290 __ movp(FieldOperand(rdx, kScratchRegister, |
1291 times_pointer_size, | 1291 times_pointer_size, |
1292 kParameterMapHeaderSize), | 1292 kParameterMapHeaderSize), |
1293 r8); | 1293 r8); |
1294 __ movq(FieldOperand(rdi, kScratchRegister, | 1294 __ movp(FieldOperand(rdi, kScratchRegister, |
1295 times_pointer_size, | 1295 times_pointer_size, |
1296 FixedArray::kHeaderSize), | 1296 FixedArray::kHeaderSize), |
1297 r11); | 1297 r11); |
1298 __ SmiAddConstant(r8, r8, Smi::FromInt(1)); | 1298 __ SmiAddConstant(r8, r8, Smi::FromInt(1)); |
1299 __ bind(¶meters_test); | 1299 __ bind(¶meters_test); |
1300 __ SmiTest(r9); | 1300 __ SmiTest(r9); |
1301 __ j(not_zero, ¶meters_loop, Label::kNear); | 1301 __ j(not_zero, ¶meters_loop, Label::kNear); |
1302 | 1302 |
1303 __ bind(&skip_parameter_map); | 1303 __ bind(&skip_parameter_map); |
1304 | 1304 |
1305 // rcx = argument count (tagged) | 1305 // rcx = argument count (tagged) |
1306 // rdi = address of backing store (tagged) | 1306 // rdi = address of backing store (tagged) |
1307 // Copy arguments header and remaining slots (if there are any). | 1307 // Copy arguments header and remaining slots (if there are any). |
1308 __ Move(FieldOperand(rdi, FixedArray::kMapOffset), | 1308 __ Move(FieldOperand(rdi, FixedArray::kMapOffset), |
1309 factory->fixed_array_map()); | 1309 factory->fixed_array_map()); |
1310 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx); | 1310 __ movp(FieldOperand(rdi, FixedArray::kLengthOffset), rcx); |
1311 | 1311 |
1312 Label arguments_loop, arguments_test; | 1312 Label arguments_loop, arguments_test; |
1313 __ movq(r8, rbx); | 1313 __ movp(r8, rbx); |
1314 __ movq(rdx, args.GetArgumentOperand(1)); | 1314 __ movp(rdx, args.GetArgumentOperand(1)); |
1315 // Untag rcx for the loop below. | 1315 // Untag rcx for the loop below. |
1316 __ SmiToInteger64(rcx, rcx); | 1316 __ SmiToInteger64(rcx, rcx); |
1317 __ lea(kScratchRegister, Operand(r8, times_pointer_size, 0)); | 1317 __ lea(kScratchRegister, Operand(r8, times_pointer_size, 0)); |
1318 __ subq(rdx, kScratchRegister); | 1318 __ subq(rdx, kScratchRegister); |
1319 __ jmp(&arguments_test, Label::kNear); | 1319 __ jmp(&arguments_test, Label::kNear); |
1320 | 1320 |
1321 __ bind(&arguments_loop); | 1321 __ bind(&arguments_loop); |
1322 __ subq(rdx, Immediate(kPointerSize)); | 1322 __ subq(rdx, Immediate(kPointerSize)); |
1323 __ movq(r9, Operand(rdx, 0)); | 1323 __ movp(r9, Operand(rdx, 0)); |
1324 __ movq(FieldOperand(rdi, r8, | 1324 __ movp(FieldOperand(rdi, r8, |
1325 times_pointer_size, | 1325 times_pointer_size, |
1326 FixedArray::kHeaderSize), | 1326 FixedArray::kHeaderSize), |
1327 r9); | 1327 r9); |
1328 __ addq(r8, Immediate(1)); | 1328 __ addq(r8, Immediate(1)); |
1329 | 1329 |
1330 __ bind(&arguments_test); | 1330 __ bind(&arguments_test); |
1331 __ cmpq(r8, rcx); | 1331 __ cmpq(r8, rcx); |
1332 __ j(less, &arguments_loop, Label::kNear); | 1332 __ j(less, &arguments_loop, Label::kNear); |
1333 | 1333 |
1334 // Return and remove the on-stack parameters. | 1334 // Return and remove the on-stack parameters. |
1335 __ ret(3 * kPointerSize); | 1335 __ ret(3 * kPointerSize); |
1336 | 1336 |
1337 // Do the runtime call to allocate the arguments object. | 1337 // Do the runtime call to allocate the arguments object. |
1338 // rcx = argument count (untagged) | 1338 // rcx = argument count (untagged) |
1339 __ bind(&runtime); | 1339 __ bind(&runtime); |
1340 __ Integer32ToSmi(rcx, rcx); | 1340 __ Integer32ToSmi(rcx, rcx); |
1341 __ movq(args.GetArgumentOperand(2), rcx); // Patch argument count. | 1341 __ movp(args.GetArgumentOperand(2), rcx); // Patch argument count. |
1342 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1); | 1342 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1); |
1343 } | 1343 } |
1344 | 1344 |
1345 | 1345 |
1346 void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) { | 1346 void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) { |
1347 // rsp[0] : return address | 1347 // rsp[0] : return address |
1348 // rsp[8] : number of parameters | 1348 // rsp[8] : number of parameters |
1349 // rsp[16] : receiver displacement | 1349 // rsp[16] : receiver displacement |
1350 // rsp[24] : function | 1350 // rsp[24] : function |
1351 | 1351 |
1352 // Check if the calling frame is an arguments adaptor frame. | 1352 // Check if the calling frame is an arguments adaptor frame. |
1353 Label runtime; | 1353 Label runtime; |
1354 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); | 1354 __ movp(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); |
1355 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); | 1355 __ movp(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); |
1356 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); | 1356 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); |
1357 __ j(not_equal, &runtime); | 1357 __ j(not_equal, &runtime); |
1358 | 1358 |
1359 // Patch the arguments.length and the parameters pointer. | 1359 // Patch the arguments.length and the parameters pointer. |
1360 StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER); | 1360 StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER); |
1361 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 1361 __ movp(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
1362 __ movq(args.GetArgumentOperand(2), rcx); | 1362 __ movp(args.GetArgumentOperand(2), rcx); |
1363 __ SmiToInteger64(rcx, rcx); | 1363 __ SmiToInteger64(rcx, rcx); |
1364 __ lea(rdx, Operand(rdx, rcx, times_pointer_size, | 1364 __ lea(rdx, Operand(rdx, rcx, times_pointer_size, |
1365 StandardFrameConstants::kCallerSPOffset)); | 1365 StandardFrameConstants::kCallerSPOffset)); |
1366 __ movq(args.GetArgumentOperand(1), rdx); | 1366 __ movp(args.GetArgumentOperand(1), rdx); |
1367 | 1367 |
1368 __ bind(&runtime); | 1368 __ bind(&runtime); |
1369 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1); | 1369 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1); |
1370 } | 1370 } |
1371 | 1371 |
1372 | 1372 |
1373 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { | 1373 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { |
1374 // rsp[0] : return address | 1374 // rsp[0] : return address |
1375 // rsp[8] : number of parameters | 1375 // rsp[8] : number of parameters |
1376 // rsp[16] : receiver displacement | 1376 // rsp[16] : receiver displacement |
1377 // rsp[24] : function | 1377 // rsp[24] : function |
1378 | 1378 |
1379 // Check if the calling frame is an arguments adaptor frame. | 1379 // Check if the calling frame is an arguments adaptor frame. |
1380 Label adaptor_frame, try_allocate, runtime; | 1380 Label adaptor_frame, try_allocate, runtime; |
1381 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); | 1381 __ movp(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); |
1382 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); | 1382 __ movp(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); |
1383 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); | 1383 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); |
1384 __ j(equal, &adaptor_frame); | 1384 __ j(equal, &adaptor_frame); |
1385 | 1385 |
1386 // Get the length from the frame. | 1386 // Get the length from the frame. |
1387 StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER); | 1387 StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER); |
1388 __ movq(rcx, args.GetArgumentOperand(2)); | 1388 __ movp(rcx, args.GetArgumentOperand(2)); |
1389 __ SmiToInteger64(rcx, rcx); | 1389 __ SmiToInteger64(rcx, rcx); |
1390 __ jmp(&try_allocate); | 1390 __ jmp(&try_allocate); |
1391 | 1391 |
1392 // Patch the arguments.length and the parameters pointer. | 1392 // Patch the arguments.length and the parameters pointer. |
1393 __ bind(&adaptor_frame); | 1393 __ bind(&adaptor_frame); |
1394 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 1394 __ movp(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
1395 __ movq(args.GetArgumentOperand(2), rcx); | 1395 __ movp(args.GetArgumentOperand(2), rcx); |
1396 __ SmiToInteger64(rcx, rcx); | 1396 __ SmiToInteger64(rcx, rcx); |
1397 __ lea(rdx, Operand(rdx, rcx, times_pointer_size, | 1397 __ lea(rdx, Operand(rdx, rcx, times_pointer_size, |
1398 StandardFrameConstants::kCallerSPOffset)); | 1398 StandardFrameConstants::kCallerSPOffset)); |
1399 __ movq(args.GetArgumentOperand(1), rdx); | 1399 __ movp(args.GetArgumentOperand(1), rdx); |
1400 | 1400 |
1401 // Try the new space allocation. Start out with computing the size of | 1401 // Try the new space allocation. Start out with computing the size of |
1402 // the arguments object and the elements array. | 1402 // the arguments object and the elements array. |
1403 Label add_arguments_object; | 1403 Label add_arguments_object; |
1404 __ bind(&try_allocate); | 1404 __ bind(&try_allocate); |
1405 __ testq(rcx, rcx); | 1405 __ testq(rcx, rcx); |
1406 __ j(zero, &add_arguments_object, Label::kNear); | 1406 __ j(zero, &add_arguments_object, Label::kNear); |
1407 __ lea(rcx, Operand(rcx, times_pointer_size, FixedArray::kHeaderSize)); | 1407 __ lea(rcx, Operand(rcx, times_pointer_size, FixedArray::kHeaderSize)); |
1408 __ bind(&add_arguments_object); | 1408 __ bind(&add_arguments_object); |
1409 __ addq(rcx, Immediate(Heap::kArgumentsObjectSizeStrict)); | 1409 __ addq(rcx, Immediate(Heap::kArgumentsObjectSizeStrict)); |
1410 | 1410 |
1411 // Do the allocation of both objects in one go. | 1411 // Do the allocation of both objects in one go. |
1412 __ Allocate(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT); | 1412 __ Allocate(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT); |
1413 | 1413 |
1414 // Get the arguments boilerplate from the current native context. | 1414 // Get the arguments boilerplate from the current native context. |
1415 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); | 1415 __ movp(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); |
1416 __ movq(rdi, FieldOperand(rdi, GlobalObject::kNativeContextOffset)); | 1416 __ movp(rdi, FieldOperand(rdi, GlobalObject::kNativeContextOffset)); |
1417 const int offset = | 1417 const int offset = |
1418 Context::SlotOffset(Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX); | 1418 Context::SlotOffset(Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX); |
1419 __ movq(rdi, Operand(rdi, offset)); | 1419 __ movp(rdi, Operand(rdi, offset)); |
1420 | 1420 |
1421 // Copy the JS object part. | 1421 // Copy the JS object part. |
1422 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) { | 1422 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) { |
1423 __ movq(rbx, FieldOperand(rdi, i)); | 1423 __ movp(rbx, FieldOperand(rdi, i)); |
1424 __ movq(FieldOperand(rax, i), rbx); | 1424 __ movp(FieldOperand(rax, i), rbx); |
1425 } | 1425 } |
1426 | 1426 |
1427 // Get the length (smi tagged) and set that as an in-object property too. | 1427 // Get the length (smi tagged) and set that as an in-object property too. |
1428 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); | 1428 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); |
1429 __ movq(rcx, args.GetArgumentOperand(2)); | 1429 __ movp(rcx, args.GetArgumentOperand(2)); |
1430 __ movq(FieldOperand(rax, JSObject::kHeaderSize + | 1430 __ movp(FieldOperand(rax, JSObject::kHeaderSize + |
1431 Heap::kArgumentsLengthIndex * kPointerSize), | 1431 Heap::kArgumentsLengthIndex * kPointerSize), |
1432 rcx); | 1432 rcx); |
1433 | 1433 |
1434 // If there are no actual arguments, we're done. | 1434 // If there are no actual arguments, we're done. |
1435 Label done; | 1435 Label done; |
1436 __ testq(rcx, rcx); | 1436 __ testq(rcx, rcx); |
1437 __ j(zero, &done); | 1437 __ j(zero, &done); |
1438 | 1438 |
1439 // Get the parameters pointer from the stack. | 1439 // Get the parameters pointer from the stack. |
1440 __ movq(rdx, args.GetArgumentOperand(1)); | 1440 __ movp(rdx, args.GetArgumentOperand(1)); |
1441 | 1441 |
1442 // Set up the elements pointer in the allocated arguments object and | 1442 // Set up the elements pointer in the allocated arguments object and |
1443 // initialize the header in the elements fixed array. | 1443 // initialize the header in the elements fixed array. |
1444 __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSizeStrict)); | 1444 __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSizeStrict)); |
1445 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi); | 1445 __ movp(FieldOperand(rax, JSObject::kElementsOffset), rdi); |
1446 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex); | 1446 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex); |
1447 __ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister); | 1447 __ movp(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister); |
1448 | 1448 |
1449 | 1449 |
1450 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx); | 1450 __ movp(FieldOperand(rdi, FixedArray::kLengthOffset), rcx); |
1451 // Untag the length for the loop below. | 1451 // Untag the length for the loop below. |
1452 __ SmiToInteger64(rcx, rcx); | 1452 __ SmiToInteger64(rcx, rcx); |
1453 | 1453 |
1454 // Copy the fixed array slots. | 1454 // Copy the fixed array slots. |
1455 Label loop; | 1455 Label loop; |
1456 __ bind(&loop); | 1456 __ bind(&loop); |
1457 __ movq(rbx, Operand(rdx, -1 * kPointerSize)); // Skip receiver. | 1457 __ movp(rbx, Operand(rdx, -1 * kPointerSize)); // Skip receiver. |
1458 __ movq(FieldOperand(rdi, FixedArray::kHeaderSize), rbx); | 1458 __ movp(FieldOperand(rdi, FixedArray::kHeaderSize), rbx); |
1459 __ addq(rdi, Immediate(kPointerSize)); | 1459 __ addq(rdi, Immediate(kPointerSize)); |
1460 __ subq(rdx, Immediate(kPointerSize)); | 1460 __ subq(rdx, Immediate(kPointerSize)); |
1461 __ decq(rcx); | 1461 __ decq(rcx); |
1462 __ j(not_zero, &loop); | 1462 __ j(not_zero, &loop); |
1463 | 1463 |
1464 // Return and remove the on-stack parameters. | 1464 // Return and remove the on-stack parameters. |
1465 __ bind(&done); | 1465 __ bind(&done); |
1466 __ ret(3 * kPointerSize); | 1466 __ ret(3 * kPointerSize); |
1467 | 1467 |
1468 // Do the runtime call to allocate the arguments object. | 1468 // Do the runtime call to allocate the arguments object. |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1501 Isolate* isolate = masm->isolate(); | 1501 Isolate* isolate = masm->isolate(); |
1502 ExternalReference address_of_regexp_stack_memory_address = | 1502 ExternalReference address_of_regexp_stack_memory_address = |
1503 ExternalReference::address_of_regexp_stack_memory_address(isolate); | 1503 ExternalReference::address_of_regexp_stack_memory_address(isolate); |
1504 ExternalReference address_of_regexp_stack_memory_size = | 1504 ExternalReference address_of_regexp_stack_memory_size = |
1505 ExternalReference::address_of_regexp_stack_memory_size(isolate); | 1505 ExternalReference::address_of_regexp_stack_memory_size(isolate); |
1506 __ Load(kScratchRegister, address_of_regexp_stack_memory_size); | 1506 __ Load(kScratchRegister, address_of_regexp_stack_memory_size); |
1507 __ testq(kScratchRegister, kScratchRegister); | 1507 __ testq(kScratchRegister, kScratchRegister); |
1508 __ j(zero, &runtime); | 1508 __ j(zero, &runtime); |
1509 | 1509 |
1510 // Check that the first argument is a JSRegExp object. | 1510 // Check that the first argument is a JSRegExp object. |
1511 __ movq(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX)); | 1511 __ movp(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX)); |
1512 __ JumpIfSmi(rax, &runtime); | 1512 __ JumpIfSmi(rax, &runtime); |
1513 __ CmpObjectType(rax, JS_REGEXP_TYPE, kScratchRegister); | 1513 __ CmpObjectType(rax, JS_REGEXP_TYPE, kScratchRegister); |
1514 __ j(not_equal, &runtime); | 1514 __ j(not_equal, &runtime); |
1515 | 1515 |
1516 // Check that the RegExp has been compiled (data contains a fixed array). | 1516 // Check that the RegExp has been compiled (data contains a fixed array). |
1517 __ movq(rax, FieldOperand(rax, JSRegExp::kDataOffset)); | 1517 __ movp(rax, FieldOperand(rax, JSRegExp::kDataOffset)); |
1518 if (FLAG_debug_code) { | 1518 if (FLAG_debug_code) { |
1519 Condition is_smi = masm->CheckSmi(rax); | 1519 Condition is_smi = masm->CheckSmi(rax); |
1520 __ Check(NegateCondition(is_smi), | 1520 __ Check(NegateCondition(is_smi), |
1521 kUnexpectedTypeForRegExpDataFixedArrayExpected); | 1521 kUnexpectedTypeForRegExpDataFixedArrayExpected); |
1522 __ CmpObjectType(rax, FIXED_ARRAY_TYPE, kScratchRegister); | 1522 __ CmpObjectType(rax, FIXED_ARRAY_TYPE, kScratchRegister); |
1523 __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected); | 1523 __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected); |
1524 } | 1524 } |
1525 | 1525 |
1526 // rax: RegExp data (FixedArray) | 1526 // rax: RegExp data (FixedArray) |
1527 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP. | 1527 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP. |
1528 __ SmiToInteger32(rbx, FieldOperand(rax, JSRegExp::kDataTagOffset)); | 1528 __ SmiToInteger32(rbx, FieldOperand(rax, JSRegExp::kDataTagOffset)); |
1529 __ cmpl(rbx, Immediate(JSRegExp::IRREGEXP)); | 1529 __ cmpl(rbx, Immediate(JSRegExp::IRREGEXP)); |
1530 __ j(not_equal, &runtime); | 1530 __ j(not_equal, &runtime); |
1531 | 1531 |
1532 // rax: RegExp data (FixedArray) | 1532 // rax: RegExp data (FixedArray) |
1533 // Check that the number of captures fit in the static offsets vector buffer. | 1533 // Check that the number of captures fit in the static offsets vector buffer. |
1534 __ SmiToInteger32(rdx, | 1534 __ SmiToInteger32(rdx, |
1535 FieldOperand(rax, JSRegExp::kIrregexpCaptureCountOffset)); | 1535 FieldOperand(rax, JSRegExp::kIrregexpCaptureCountOffset)); |
1536 // Check (number_of_captures + 1) * 2 <= offsets vector size | 1536 // Check (number_of_captures + 1) * 2 <= offsets vector size |
1537 // Or number_of_captures <= offsets vector size / 2 - 1 | 1537 // Or number_of_captures <= offsets vector size / 2 - 1 |
1538 STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2); | 1538 STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2); |
1539 __ cmpl(rdx, Immediate(Isolate::kJSRegexpStaticOffsetsVectorSize / 2 - 1)); | 1539 __ cmpl(rdx, Immediate(Isolate::kJSRegexpStaticOffsetsVectorSize / 2 - 1)); |
1540 __ j(above, &runtime); | 1540 __ j(above, &runtime); |
1541 | 1541 |
1542 // Reset offset for possibly sliced string. | 1542 // Reset offset for possibly sliced string. |
1543 __ Set(r14, 0); | 1543 __ Set(r14, 0); |
1544 __ movq(rdi, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX)); | 1544 __ movp(rdi, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX)); |
1545 __ JumpIfSmi(rdi, &runtime); | 1545 __ JumpIfSmi(rdi, &runtime); |
1546 __ movq(r15, rdi); // Make a copy of the original subject string. | 1546 __ movp(r15, rdi); // Make a copy of the original subject string. |
1547 __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset)); | 1547 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset)); |
1548 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); | 1548 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); |
1549 // rax: RegExp data (FixedArray) | 1549 // rax: RegExp data (FixedArray) |
1550 // rdi: subject string | 1550 // rdi: subject string |
1551 // r15: subject string | 1551 // r15: subject string |
1552 // Handle subject string according to its encoding and representation: | 1552 // Handle subject string according to its encoding and representation: |
1553 // (1) Sequential two byte? If yes, go to (9). | 1553 // (1) Sequential two byte? If yes, go to (9). |
1554 // (2) Sequential one byte? If yes, go to (6). | 1554 // (2) Sequential one byte? If yes, go to (6). |
1555 // (3) Anything but sequential or cons? If yes, go to (7). | 1555 // (3) Anything but sequential or cons? If yes, go to (7). |
1556 // (4) Cons string. If the string is flat, replace subject with first string. | 1556 // (4) Cons string. If the string is flat, replace subject with first string. |
1557 // Otherwise bailout. | 1557 // Otherwise bailout. |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1597 STATIC_ASSERT(kIsNotStringMask > kExternalStringTag); | 1597 STATIC_ASSERT(kIsNotStringMask > kExternalStringTag); |
1598 STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag); | 1598 STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag); |
1599 __ cmpq(rbx, Immediate(kExternalStringTag)); | 1599 __ cmpq(rbx, Immediate(kExternalStringTag)); |
1600 __ j(greater_equal, ¬_seq_nor_cons); // Go to (7). | 1600 __ j(greater_equal, ¬_seq_nor_cons); // Go to (7). |
1601 | 1601 |
1602 // (4) Cons string. Check that it's flat. | 1602 // (4) Cons string. Check that it's flat. |
1603 // Replace subject with first string and reload instance type. | 1603 // Replace subject with first string and reload instance type. |
1604 __ CompareRoot(FieldOperand(rdi, ConsString::kSecondOffset), | 1604 __ CompareRoot(FieldOperand(rdi, ConsString::kSecondOffset), |
1605 Heap::kempty_stringRootIndex); | 1605 Heap::kempty_stringRootIndex); |
1606 __ j(not_equal, &runtime); | 1606 __ j(not_equal, &runtime); |
1607 __ movq(rdi, FieldOperand(rdi, ConsString::kFirstOffset)); | 1607 __ movp(rdi, FieldOperand(rdi, ConsString::kFirstOffset)); |
1608 __ bind(&check_underlying); | 1608 __ bind(&check_underlying); |
1609 __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset)); | 1609 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset)); |
1610 __ movq(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); | 1610 __ movp(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); |
1611 | 1611 |
1612 // (5a) Is subject sequential two byte? If yes, go to (9). | 1612 // (5a) Is subject sequential two byte? If yes, go to (9). |
1613 __ testb(rbx, Immediate(kStringRepresentationMask | kStringEncodingMask)); | 1613 __ testb(rbx, Immediate(kStringRepresentationMask | kStringEncodingMask)); |
1614 STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0); | 1614 STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0); |
1615 __ j(zero, &seq_two_byte_string); // Go to (9). | 1615 __ j(zero, &seq_two_byte_string); // Go to (9). |
1616 // (5b) Is subject external? If yes, go to (8). | 1616 // (5b) Is subject external? If yes, go to (8). |
1617 __ testb(rbx, Immediate(kStringRepresentationMask)); | 1617 __ testb(rbx, Immediate(kStringRepresentationMask)); |
1618 // The underlying external string is never a short external string. | 1618 // The underlying external string is never a short external string. |
1619 STATIC_CHECK(ExternalString::kMaxShortLength < ConsString::kMinLength); | 1619 STATIC_CHECK(ExternalString::kMaxShortLength < ConsString::kMinLength); |
1620 STATIC_CHECK(ExternalString::kMaxShortLength < SlicedString::kMinLength); | 1620 STATIC_CHECK(ExternalString::kMaxShortLength < SlicedString::kMinLength); |
1621 __ j(not_zero, &external_string); // Go to (8) | 1621 __ j(not_zero, &external_string); // Go to (8) |
1622 | 1622 |
1623 // (6) One byte sequential. Load regexp code for one byte. | 1623 // (6) One byte sequential. Load regexp code for one byte. |
1624 __ bind(&seq_one_byte_string); | 1624 __ bind(&seq_one_byte_string); |
1625 // rax: RegExp data (FixedArray) | 1625 // rax: RegExp data (FixedArray) |
1626 __ movq(r11, FieldOperand(rax, JSRegExp::kDataAsciiCodeOffset)); | 1626 __ movp(r11, FieldOperand(rax, JSRegExp::kDataAsciiCodeOffset)); |
1627 __ Set(rcx, 1); // Type is one byte. | 1627 __ Set(rcx, 1); // Type is one byte. |
1628 | 1628 |
1629 // (E) Carry on. String handling is done. | 1629 // (E) Carry on. String handling is done. |
1630 __ bind(&check_code); | 1630 __ bind(&check_code); |
1631 // r11: irregexp code | 1631 // r11: irregexp code |
1632 // Check that the irregexp code has been generated for the actual string | 1632 // Check that the irregexp code has been generated for the actual string |
1633 // encoding. If it has, the field contains a code object otherwise it contains | 1633 // encoding. If it has, the field contains a code object otherwise it contains |
1634 // smi (code flushing support) | 1634 // smi (code flushing support) |
1635 __ JumpIfSmi(r11, &runtime); | 1635 __ JumpIfSmi(r11, &runtime); |
1636 | 1636 |
1637 // rdi: sequential subject string (or look-alike, external string) | 1637 // rdi: sequential subject string (or look-alike, external string) |
1638 // r15: original subject string | 1638 // r15: original subject string |
1639 // rcx: encoding of subject string (1 if ASCII, 0 if two_byte); | 1639 // rcx: encoding of subject string (1 if ASCII, 0 if two_byte); |
1640 // r11: code | 1640 // r11: code |
1641 // Load used arguments before starting to push arguments for call to native | 1641 // Load used arguments before starting to push arguments for call to native |
1642 // RegExp code to avoid handling changing stack height. | 1642 // RegExp code to avoid handling changing stack height. |
1643 // We have to use r15 instead of rdi to load the length because rdi might | 1643 // We have to use r15 instead of rdi to load the length because rdi might |
1644 // have been only made to look like a sequential string when it actually | 1644 // have been only made to look like a sequential string when it actually |
1645 // is an external string. | 1645 // is an external string. |
1646 __ movq(rbx, args.GetArgumentOperand(PREVIOUS_INDEX_ARGUMENT_INDEX)); | 1646 __ movp(rbx, args.GetArgumentOperand(PREVIOUS_INDEX_ARGUMENT_INDEX)); |
1647 __ JumpIfNotSmi(rbx, &runtime); | 1647 __ JumpIfNotSmi(rbx, &runtime); |
1648 __ SmiCompare(rbx, FieldOperand(r15, String::kLengthOffset)); | 1648 __ SmiCompare(rbx, FieldOperand(r15, String::kLengthOffset)); |
1649 __ j(above_equal, &runtime); | 1649 __ j(above_equal, &runtime); |
1650 __ SmiToInteger64(rbx, rbx); | 1650 __ SmiToInteger64(rbx, rbx); |
1651 | 1651 |
1652 // rdi: subject string | 1652 // rdi: subject string |
1653 // rbx: previous index | 1653 // rbx: previous index |
1654 // rcx: encoding of subject string (1 if ASCII 0 if two_byte); | 1654 // rcx: encoding of subject string (1 if ASCII 0 if two_byte); |
1655 // r11: code | 1655 // r11: code |
1656 // All checks done. Now push arguments for native regexp code. | 1656 // All checks done. Now push arguments for native regexp code. |
(...skipping 11 matching lines...) Expand all Loading... |
1668 ExternalReference::isolate_address(masm->isolate())); | 1668 ExternalReference::isolate_address(masm->isolate())); |
1669 __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kRegisterSize), | 1669 __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kRegisterSize), |
1670 kScratchRegister); | 1670 kScratchRegister); |
1671 | 1671 |
1672 // Argument 8: Indicate that this is a direct call from JavaScript. | 1672 // Argument 8: Indicate that this is a direct call from JavaScript. |
1673 __ movq(Operand(rsp, (argument_slots_on_stack - 2) * kRegisterSize), | 1673 __ movq(Operand(rsp, (argument_slots_on_stack - 2) * kRegisterSize), |
1674 Immediate(1)); | 1674 Immediate(1)); |
1675 | 1675 |
1676 // Argument 7: Start (high end) of backtracking stack memory area. | 1676 // Argument 7: Start (high end) of backtracking stack memory area. |
1677 __ Move(kScratchRegister, address_of_regexp_stack_memory_address); | 1677 __ Move(kScratchRegister, address_of_regexp_stack_memory_address); |
1678 __ movq(r9, Operand(kScratchRegister, 0)); | 1678 __ movp(r9, Operand(kScratchRegister, 0)); |
1679 __ Move(kScratchRegister, address_of_regexp_stack_memory_size); | 1679 __ Move(kScratchRegister, address_of_regexp_stack_memory_size); |
1680 __ addq(r9, Operand(kScratchRegister, 0)); | 1680 __ addq(r9, Operand(kScratchRegister, 0)); |
1681 __ movq(Operand(rsp, (argument_slots_on_stack - 3) * kRegisterSize), r9); | 1681 __ movq(Operand(rsp, (argument_slots_on_stack - 3) * kRegisterSize), r9); |
1682 | 1682 |
1683 // Argument 6: Set the number of capture registers to zero to force global | 1683 // Argument 6: Set the number of capture registers to zero to force global |
1684 // regexps to behave as non-global. This does not affect non-global regexps. | 1684 // regexps to behave as non-global. This does not affect non-global regexps. |
1685 // Argument 6 is passed in r9 on Linux and on the stack on Windows. | 1685 // Argument 6 is passed in r9 on Linux and on the stack on Windows. |
1686 #ifdef _WIN64 | 1686 #ifdef _WIN64 |
1687 __ movq(Operand(rsp, (argument_slots_on_stack - 4) * kRegisterSize), | 1687 __ movq(Operand(rsp, (argument_slots_on_stack - 4) * kRegisterSize), |
1688 Immediate(0)); | 1688 Immediate(0)); |
(...skipping 10 matching lines...) Expand all Loading... |
1699 #endif | 1699 #endif |
1700 | 1700 |
1701 // rdi: subject string | 1701 // rdi: subject string |
1702 // rbx: previous index | 1702 // rbx: previous index |
1703 // rcx: encoding of subject string (1 if ASCII 0 if two_byte); | 1703 // rcx: encoding of subject string (1 if ASCII 0 if two_byte); |
1704 // r11: code | 1704 // r11: code |
1705 // r14: slice offset | 1705 // r14: slice offset |
1706 // r15: original subject string | 1706 // r15: original subject string |
1707 | 1707 |
1708 // Argument 2: Previous index. | 1708 // Argument 2: Previous index. |
1709 __ movq(arg_reg_2, rbx); | 1709 __ movp(arg_reg_2, rbx); |
1710 | 1710 |
1711 // Argument 4: End of string data | 1711 // Argument 4: End of string data |
1712 // Argument 3: Start of string data | 1712 // Argument 3: Start of string data |
1713 Label setup_two_byte, setup_rest, got_length, length_not_from_slice; | 1713 Label setup_two_byte, setup_rest, got_length, length_not_from_slice; |
1714 // Prepare start and end index of the input. | 1714 // Prepare start and end index of the input. |
1715 // Load the length from the original sliced string if that is the case. | 1715 // Load the length from the original sliced string if that is the case. |
1716 __ addq(rbx, r14); | 1716 __ addq(rbx, r14); |
1717 __ SmiToInteger32(arg_reg_3, FieldOperand(r15, String::kLengthOffset)); | 1717 __ SmiToInteger32(arg_reg_3, FieldOperand(r15, String::kLengthOffset)); |
1718 __ addq(r14, arg_reg_3); // Using arg3 as scratch. | 1718 __ addq(r14, arg_reg_3); // Using arg3 as scratch. |
1719 | 1719 |
(...skipping 12 matching lines...) Expand all Loading... |
1732 FieldOperand(rdi, r14, times_2, SeqTwoByteString::kHeaderSize)); | 1732 FieldOperand(rdi, r14, times_2, SeqTwoByteString::kHeaderSize)); |
1733 __ lea(arg_reg_3, | 1733 __ lea(arg_reg_3, |
1734 FieldOperand(rdi, rbx, times_2, SeqTwoByteString::kHeaderSize)); | 1734 FieldOperand(rdi, rbx, times_2, SeqTwoByteString::kHeaderSize)); |
1735 __ bind(&setup_rest); | 1735 __ bind(&setup_rest); |
1736 | 1736 |
1737 // Argument 1: Original subject string. | 1737 // Argument 1: Original subject string. |
1738 // The original subject is in the previous stack frame. Therefore we have to | 1738 // The original subject is in the previous stack frame. Therefore we have to |
1739 // use rbp, which points exactly to one pointer size below the previous rsp. | 1739 // use rbp, which points exactly to one pointer size below the previous rsp. |
1740 // (Because creating a new stack frame pushes the previous rbp onto the stack | 1740 // (Because creating a new stack frame pushes the previous rbp onto the stack |
1741 // and thereby moves up rsp by one kPointerSize.) | 1741 // and thereby moves up rsp by one kPointerSize.) |
1742 __ movq(arg_reg_1, r15); | 1742 __ movp(arg_reg_1, r15); |
1743 | 1743 |
1744 // Locate the code entry and call it. | 1744 // Locate the code entry and call it. |
1745 __ addq(r11, Immediate(Code::kHeaderSize - kHeapObjectTag)); | 1745 __ addq(r11, Immediate(Code::kHeaderSize - kHeapObjectTag)); |
1746 __ call(r11); | 1746 __ call(r11); |
1747 | 1747 |
1748 __ LeaveApiExitFrame(true); | 1748 __ LeaveApiExitFrame(true); |
1749 | 1749 |
1750 // Check the result. | 1750 // Check the result. |
1751 Label success; | 1751 Label success; |
1752 Label exception; | 1752 Label exception; |
1753 __ cmpl(rax, Immediate(1)); | 1753 __ cmpl(rax, Immediate(1)); |
1754 // We expect exactly one result since we force the called regexp to behave | 1754 // We expect exactly one result since we force the called regexp to behave |
1755 // as non-global. | 1755 // as non-global. |
1756 __ j(equal, &success, Label::kNear); | 1756 __ j(equal, &success, Label::kNear); |
1757 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION)); | 1757 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION)); |
1758 __ j(equal, &exception); | 1758 __ j(equal, &exception); |
1759 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::FAILURE)); | 1759 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::FAILURE)); |
1760 // If none of the above, it can only be retry. | 1760 // If none of the above, it can only be retry. |
1761 // Handle that in the runtime system. | 1761 // Handle that in the runtime system. |
1762 __ j(not_equal, &runtime); | 1762 __ j(not_equal, &runtime); |
1763 | 1763 |
1764 // For failure return null. | 1764 // For failure return null. |
1765 __ LoadRoot(rax, Heap::kNullValueRootIndex); | 1765 __ LoadRoot(rax, Heap::kNullValueRootIndex); |
1766 __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize); | 1766 __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize); |
1767 | 1767 |
1768 // Load RegExp data. | 1768 // Load RegExp data. |
1769 __ bind(&success); | 1769 __ bind(&success); |
1770 __ movq(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX)); | 1770 __ movp(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX)); |
1771 __ movq(rcx, FieldOperand(rax, JSRegExp::kDataOffset)); | 1771 __ movp(rcx, FieldOperand(rax, JSRegExp::kDataOffset)); |
1772 __ SmiToInteger32(rax, | 1772 __ SmiToInteger32(rax, |
1773 FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset)); | 1773 FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset)); |
1774 // Calculate number of capture registers (number_of_captures + 1) * 2. | 1774 // Calculate number of capture registers (number_of_captures + 1) * 2. |
1775 __ leal(rdx, Operand(rax, rax, times_1, 2)); | 1775 __ leal(rdx, Operand(rax, rax, times_1, 2)); |
1776 | 1776 |
1777 // rdx: Number of capture registers | 1777 // rdx: Number of capture registers |
1778 // Check that the fourth object is a JSArray object. | 1778 // Check that the fourth object is a JSArray object. |
1779 __ movq(r15, args.GetArgumentOperand(LAST_MATCH_INFO_ARGUMENT_INDEX)); | 1779 __ movp(r15, args.GetArgumentOperand(LAST_MATCH_INFO_ARGUMENT_INDEX)); |
1780 __ JumpIfSmi(r15, &runtime); | 1780 __ JumpIfSmi(r15, &runtime); |
1781 __ CmpObjectType(r15, JS_ARRAY_TYPE, kScratchRegister); | 1781 __ CmpObjectType(r15, JS_ARRAY_TYPE, kScratchRegister); |
1782 __ j(not_equal, &runtime); | 1782 __ j(not_equal, &runtime); |
1783 // Check that the JSArray is in fast case. | 1783 // Check that the JSArray is in fast case. |
1784 __ movq(rbx, FieldOperand(r15, JSArray::kElementsOffset)); | 1784 __ movp(rbx, FieldOperand(r15, JSArray::kElementsOffset)); |
1785 __ movq(rax, FieldOperand(rbx, HeapObject::kMapOffset)); | 1785 __ movp(rax, FieldOperand(rbx, HeapObject::kMapOffset)); |
1786 __ CompareRoot(rax, Heap::kFixedArrayMapRootIndex); | 1786 __ CompareRoot(rax, Heap::kFixedArrayMapRootIndex); |
1787 __ j(not_equal, &runtime); | 1787 __ j(not_equal, &runtime); |
1788 // Check that the last match info has space for the capture registers and the | 1788 // Check that the last match info has space for the capture registers and the |
1789 // additional information. Ensure no overflow in add. | 1789 // additional information. Ensure no overflow in add. |
1790 STATIC_ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset); | 1790 STATIC_ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset); |
1791 __ SmiToInteger32(rax, FieldOperand(rbx, FixedArray::kLengthOffset)); | 1791 __ SmiToInteger32(rax, FieldOperand(rbx, FixedArray::kLengthOffset)); |
1792 __ subl(rax, Immediate(RegExpImpl::kLastMatchOverhead)); | 1792 __ subl(rax, Immediate(RegExpImpl::kLastMatchOverhead)); |
1793 __ cmpl(rdx, rax); | 1793 __ cmpl(rdx, rax); |
1794 __ j(greater, &runtime); | 1794 __ j(greater, &runtime); |
1795 | 1795 |
1796 // rbx: last_match_info backing store (FixedArray) | 1796 // rbx: last_match_info backing store (FixedArray) |
1797 // rdx: number of capture registers | 1797 // rdx: number of capture registers |
1798 // Store the capture count. | 1798 // Store the capture count. |
1799 __ Integer32ToSmi(kScratchRegister, rdx); | 1799 __ Integer32ToSmi(kScratchRegister, rdx); |
1800 __ movq(FieldOperand(rbx, RegExpImpl::kLastCaptureCountOffset), | 1800 __ movp(FieldOperand(rbx, RegExpImpl::kLastCaptureCountOffset), |
1801 kScratchRegister); | 1801 kScratchRegister); |
1802 // Store last subject and last input. | 1802 // Store last subject and last input. |
1803 __ movq(rax, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX)); | 1803 __ movp(rax, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX)); |
1804 __ movq(FieldOperand(rbx, RegExpImpl::kLastSubjectOffset), rax); | 1804 __ movp(FieldOperand(rbx, RegExpImpl::kLastSubjectOffset), rax); |
1805 __ movq(rcx, rax); | 1805 __ movp(rcx, rax); |
1806 __ RecordWriteField(rbx, | 1806 __ RecordWriteField(rbx, |
1807 RegExpImpl::kLastSubjectOffset, | 1807 RegExpImpl::kLastSubjectOffset, |
1808 rax, | 1808 rax, |
1809 rdi, | 1809 rdi, |
1810 kDontSaveFPRegs); | 1810 kDontSaveFPRegs); |
1811 __ movq(rax, rcx); | 1811 __ movp(rax, rcx); |
1812 __ movq(FieldOperand(rbx, RegExpImpl::kLastInputOffset), rax); | 1812 __ movp(FieldOperand(rbx, RegExpImpl::kLastInputOffset), rax); |
1813 __ RecordWriteField(rbx, | 1813 __ RecordWriteField(rbx, |
1814 RegExpImpl::kLastInputOffset, | 1814 RegExpImpl::kLastInputOffset, |
1815 rax, | 1815 rax, |
1816 rdi, | 1816 rdi, |
1817 kDontSaveFPRegs); | 1817 kDontSaveFPRegs); |
1818 | 1818 |
1819 // Get the static offsets vector filled by the native regexp code. | 1819 // Get the static offsets vector filled by the native regexp code. |
1820 __ LoadAddress(rcx, | 1820 __ LoadAddress(rcx, |
1821 ExternalReference::address_of_static_offsets_vector(isolate)); | 1821 ExternalReference::address_of_static_offsets_vector(isolate)); |
1822 | 1822 |
1823 // rbx: last_match_info backing store (FixedArray) | 1823 // rbx: last_match_info backing store (FixedArray) |
1824 // rcx: offsets vector | 1824 // rcx: offsets vector |
1825 // rdx: number of capture registers | 1825 // rdx: number of capture registers |
1826 Label next_capture, done; | 1826 Label next_capture, done; |
1827 // Capture register counter starts from number of capture registers and | 1827 // Capture register counter starts from number of capture registers and |
1828 // counts down until wraping after zero. | 1828 // counts down until wraping after zero. |
1829 __ bind(&next_capture); | 1829 __ bind(&next_capture); |
1830 __ subq(rdx, Immediate(1)); | 1830 __ subq(rdx, Immediate(1)); |
1831 __ j(negative, &done, Label::kNear); | 1831 __ j(negative, &done, Label::kNear); |
1832 // Read the value from the static offsets vector buffer and make it a smi. | 1832 // Read the value from the static offsets vector buffer and make it a smi. |
1833 __ movl(rdi, Operand(rcx, rdx, times_int_size, 0)); | 1833 __ movl(rdi, Operand(rcx, rdx, times_int_size, 0)); |
1834 __ Integer32ToSmi(rdi, rdi); | 1834 __ Integer32ToSmi(rdi, rdi); |
1835 // Store the smi value in the last match info. | 1835 // Store the smi value in the last match info. |
1836 __ movq(FieldOperand(rbx, | 1836 __ movp(FieldOperand(rbx, |
1837 rdx, | 1837 rdx, |
1838 times_pointer_size, | 1838 times_pointer_size, |
1839 RegExpImpl::kFirstCaptureOffset), | 1839 RegExpImpl::kFirstCaptureOffset), |
1840 rdi); | 1840 rdi); |
1841 __ jmp(&next_capture); | 1841 __ jmp(&next_capture); |
1842 __ bind(&done); | 1842 __ bind(&done); |
1843 | 1843 |
1844 // Return last match info. | 1844 // Return last match info. |
1845 __ movq(rax, r15); | 1845 __ movp(rax, r15); |
1846 __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize); | 1846 __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize); |
1847 | 1847 |
1848 __ bind(&exception); | 1848 __ bind(&exception); |
1849 // Result must now be exception. If there is no pending exception already a | 1849 // Result must now be exception. If there is no pending exception already a |
1850 // stack overflow (on the backtrack stack) was detected in RegExp code but | 1850 // stack overflow (on the backtrack stack) was detected in RegExp code but |
1851 // haven't created the exception yet. Handle that in the runtime system. | 1851 // haven't created the exception yet. Handle that in the runtime system. |
1852 // TODO(592): Rerunning the RegExp to get the stack overflow exception. | 1852 // TODO(592): Rerunning the RegExp to get the stack overflow exception. |
1853 ExternalReference pending_exception_address( | 1853 ExternalReference pending_exception_address( |
1854 Isolate::kPendingExceptionAddress, isolate); | 1854 Isolate::kPendingExceptionAddress, isolate); |
1855 Operand pending_exception_operand = | 1855 Operand pending_exception_operand = |
1856 masm->ExternalOperand(pending_exception_address, rbx); | 1856 masm->ExternalOperand(pending_exception_address, rbx); |
1857 __ movq(rax, pending_exception_operand); | 1857 __ movp(rax, pending_exception_operand); |
1858 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex); | 1858 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex); |
1859 __ cmpq(rax, rdx); | 1859 __ cmpq(rax, rdx); |
1860 __ j(equal, &runtime); | 1860 __ j(equal, &runtime); |
1861 __ movq(pending_exception_operand, rdx); | 1861 __ movp(pending_exception_operand, rdx); |
1862 | 1862 |
1863 __ CompareRoot(rax, Heap::kTerminationExceptionRootIndex); | 1863 __ CompareRoot(rax, Heap::kTerminationExceptionRootIndex); |
1864 Label termination_exception; | 1864 Label termination_exception; |
1865 __ j(equal, &termination_exception, Label::kNear); | 1865 __ j(equal, &termination_exception, Label::kNear); |
1866 __ Throw(rax); | 1866 __ Throw(rax); |
1867 | 1867 |
1868 __ bind(&termination_exception); | 1868 __ bind(&termination_exception); |
1869 __ ThrowUncatchable(rax); | 1869 __ ThrowUncatchable(rax); |
1870 | 1870 |
1871 // Do the runtime call to execute the regexp. | 1871 // Do the runtime call to execute the regexp. |
1872 __ bind(&runtime); | 1872 __ bind(&runtime); |
1873 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); | 1873 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); |
1874 | 1874 |
1875 // Deferred code for string handling. | 1875 // Deferred code for string handling. |
1876 // (7) Not a long external string? If yes, go to (10). | 1876 // (7) Not a long external string? If yes, go to (10). |
1877 __ bind(¬_seq_nor_cons); | 1877 __ bind(¬_seq_nor_cons); |
1878 // Compare flags are still set from (3). | 1878 // Compare flags are still set from (3). |
1879 __ j(greater, ¬_long_external, Label::kNear); // Go to (10). | 1879 __ j(greater, ¬_long_external, Label::kNear); // Go to (10). |
1880 | 1880 |
1881 // (8) External string. Short external strings have been ruled out. | 1881 // (8) External string. Short external strings have been ruled out. |
1882 __ bind(&external_string); | 1882 __ bind(&external_string); |
1883 __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset)); | 1883 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset)); |
1884 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); | 1884 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); |
1885 if (FLAG_debug_code) { | 1885 if (FLAG_debug_code) { |
1886 // Assert that we do not have a cons or slice (indirect strings) here. | 1886 // Assert that we do not have a cons or slice (indirect strings) here. |
1887 // Sequential strings have already been ruled out. | 1887 // Sequential strings have already been ruled out. |
1888 __ testb(rbx, Immediate(kIsIndirectStringMask)); | 1888 __ testb(rbx, Immediate(kIsIndirectStringMask)); |
1889 __ Assert(zero, kExternalStringExpectedButNotFound); | 1889 __ Assert(zero, kExternalStringExpectedButNotFound); |
1890 } | 1890 } |
1891 __ movq(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset)); | 1891 __ movp(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset)); |
1892 // Move the pointer so that offset-wise, it looks like a sequential string. | 1892 // Move the pointer so that offset-wise, it looks like a sequential string. |
1893 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize); | 1893 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize); |
1894 __ subq(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); | 1894 __ subq(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); |
1895 STATIC_ASSERT(kTwoByteStringTag == 0); | 1895 STATIC_ASSERT(kTwoByteStringTag == 0); |
1896 // (8a) Is the external string one byte? If yes, go to (6). | 1896 // (8a) Is the external string one byte? If yes, go to (6). |
1897 __ testb(rbx, Immediate(kStringEncodingMask)); | 1897 __ testb(rbx, Immediate(kStringEncodingMask)); |
1898 __ j(not_zero, &seq_one_byte_string); // Goto (6). | 1898 __ j(not_zero, &seq_one_byte_string); // Goto (6). |
1899 | 1899 |
1900 // rdi: subject string (flat two-byte) | 1900 // rdi: subject string (flat two-byte) |
1901 // rax: RegExp data (FixedArray) | 1901 // rax: RegExp data (FixedArray) |
1902 // (9) Two byte sequential. Load regexp code for one byte. Go to (E). | 1902 // (9) Two byte sequential. Load regexp code for one byte. Go to (E). |
1903 __ bind(&seq_two_byte_string); | 1903 __ bind(&seq_two_byte_string); |
1904 __ movq(r11, FieldOperand(rax, JSRegExp::kDataUC16CodeOffset)); | 1904 __ movp(r11, FieldOperand(rax, JSRegExp::kDataUC16CodeOffset)); |
1905 __ Set(rcx, 0); // Type is two byte. | 1905 __ Set(rcx, 0); // Type is two byte. |
1906 __ jmp(&check_code); // Go to (E). | 1906 __ jmp(&check_code); // Go to (E). |
1907 | 1907 |
1908 // (10) Not a string or a short external string? If yes, bail out to runtime. | 1908 // (10) Not a string or a short external string? If yes, bail out to runtime. |
1909 __ bind(¬_long_external); | 1909 __ bind(¬_long_external); |
1910 // Catch non-string subject or short external string. | 1910 // Catch non-string subject or short external string. |
1911 STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0); | 1911 STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0); |
1912 __ testb(rbx, Immediate(kIsNotStringMask | kShortExternalStringMask)); | 1912 __ testb(rbx, Immediate(kIsNotStringMask | kShortExternalStringMask)); |
1913 __ j(not_zero, &runtime); | 1913 __ j(not_zero, &runtime); |
1914 | 1914 |
1915 // (11) Sliced string. Replace subject with parent. Go to (5a). | 1915 // (11) Sliced string. Replace subject with parent. Go to (5a). |
1916 // Load offset into r14 and replace subject string with parent. | 1916 // Load offset into r14 and replace subject string with parent. |
1917 __ SmiToInteger32(r14, FieldOperand(rdi, SlicedString::kOffsetOffset)); | 1917 __ SmiToInteger32(r14, FieldOperand(rdi, SlicedString::kOffsetOffset)); |
1918 __ movq(rdi, FieldOperand(rdi, SlicedString::kParentOffset)); | 1918 __ movp(rdi, FieldOperand(rdi, SlicedString::kParentOffset)); |
1919 __ jmp(&check_underlying); | 1919 __ jmp(&check_underlying); |
1920 #endif // V8_INTERPRETED_REGEXP | 1920 #endif // V8_INTERPRETED_REGEXP |
1921 } | 1921 } |
1922 | 1922 |
1923 | 1923 |
1924 void RegExpConstructResultStub::Generate(MacroAssembler* masm) { | 1924 void RegExpConstructResultStub::Generate(MacroAssembler* masm) { |
1925 const int kMaxInlineLength = 100; | 1925 const int kMaxInlineLength = 100; |
1926 Label slowcase; | 1926 Label slowcase; |
1927 Label done; | 1927 Label done; |
1928 StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER); | 1928 StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER); |
1929 __ movq(r8, args.GetArgumentOperand(0)); | 1929 __ movp(r8, args.GetArgumentOperand(0)); |
1930 __ JumpIfNotSmi(r8, &slowcase); | 1930 __ JumpIfNotSmi(r8, &slowcase); |
1931 __ SmiToInteger32(rbx, r8); | 1931 __ SmiToInteger32(rbx, r8); |
1932 __ cmpl(rbx, Immediate(kMaxInlineLength)); | 1932 __ cmpl(rbx, Immediate(kMaxInlineLength)); |
1933 __ j(above, &slowcase); | 1933 __ j(above, &slowcase); |
1934 // Smi-tagging is equivalent to multiplying by 2. | 1934 // Smi-tagging is equivalent to multiplying by 2. |
1935 STATIC_ASSERT(kSmiTag == 0); | 1935 STATIC_ASSERT(kSmiTag == 0); |
1936 STATIC_ASSERT(kSmiTagSize == 1); | 1936 STATIC_ASSERT(kSmiTagSize == 1); |
1937 // Allocate RegExpResult followed by FixedArray with size in rbx. | 1937 // Allocate RegExpResult followed by FixedArray with size in rbx. |
1938 // JSArray: [Map][empty properties][Elements][Length-smi][index][input] | 1938 // JSArray: [Map][empty properties][Elements][Length-smi][index][input] |
1939 // Elements: [Map][Length][..elements..] | 1939 // Elements: [Map][Length][..elements..] |
1940 __ Allocate(JSRegExpResult::kSize + FixedArray::kHeaderSize, | 1940 __ Allocate(JSRegExpResult::kSize + FixedArray::kHeaderSize, |
1941 times_pointer_size, | 1941 times_pointer_size, |
1942 rbx, // In: Number of elements. | 1942 rbx, // In: Number of elements. |
1943 rax, // Out: Start of allocation (tagged). | 1943 rax, // Out: Start of allocation (tagged). |
1944 rcx, // Out: End of allocation. | 1944 rcx, // Out: End of allocation. |
1945 rdx, // Scratch register | 1945 rdx, // Scratch register |
1946 &slowcase, | 1946 &slowcase, |
1947 TAG_OBJECT); | 1947 TAG_OBJECT); |
1948 // rax: Start of allocated area, object-tagged. | 1948 // rax: Start of allocated area, object-tagged. |
1949 // rbx: Number of array elements as int32. | 1949 // rbx: Number of array elements as int32. |
1950 // r8: Number of array elements as smi. | 1950 // r8: Number of array elements as smi. |
1951 | 1951 |
1952 // Set JSArray map to global.regexp_result_map(). | 1952 // Set JSArray map to global.regexp_result_map(). |
1953 __ movq(rdx, ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX)); | 1953 __ movp(rdx, ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX)); |
1954 __ movq(rdx, FieldOperand(rdx, GlobalObject::kNativeContextOffset)); | 1954 __ movp(rdx, FieldOperand(rdx, GlobalObject::kNativeContextOffset)); |
1955 __ movq(rdx, ContextOperand(rdx, Context::REGEXP_RESULT_MAP_INDEX)); | 1955 __ movp(rdx, ContextOperand(rdx, Context::REGEXP_RESULT_MAP_INDEX)); |
1956 __ movq(FieldOperand(rax, HeapObject::kMapOffset), rdx); | 1956 __ movp(FieldOperand(rax, HeapObject::kMapOffset), rdx); |
1957 | 1957 |
1958 // Set empty properties FixedArray. | 1958 // Set empty properties FixedArray. |
1959 __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex); | 1959 __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex); |
1960 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister); | 1960 __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister); |
1961 | 1961 |
1962 // Set elements to point to FixedArray allocated right after the JSArray. | 1962 // Set elements to point to FixedArray allocated right after the JSArray. |
1963 __ lea(rcx, Operand(rax, JSRegExpResult::kSize)); | 1963 __ lea(rcx, Operand(rax, JSRegExpResult::kSize)); |
1964 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rcx); | 1964 __ movp(FieldOperand(rax, JSObject::kElementsOffset), rcx); |
1965 | 1965 |
1966 // Set input, index and length fields from arguments. | 1966 // Set input, index and length fields from arguments. |
1967 __ movq(r8, args.GetArgumentOperand(2)); | 1967 __ movp(r8, args.GetArgumentOperand(2)); |
1968 __ movq(FieldOperand(rax, JSRegExpResult::kInputOffset), r8); | 1968 __ movp(FieldOperand(rax, JSRegExpResult::kInputOffset), r8); |
1969 __ movq(r8, args.GetArgumentOperand(1)); | 1969 __ movp(r8, args.GetArgumentOperand(1)); |
1970 __ movq(FieldOperand(rax, JSRegExpResult::kIndexOffset), r8); | 1970 __ movp(FieldOperand(rax, JSRegExpResult::kIndexOffset), r8); |
1971 __ movq(r8, args.GetArgumentOperand(0)); | 1971 __ movp(r8, args.GetArgumentOperand(0)); |
1972 __ movq(FieldOperand(rax, JSArray::kLengthOffset), r8); | 1972 __ movp(FieldOperand(rax, JSArray::kLengthOffset), r8); |
1973 | 1973 |
1974 // Fill out the elements FixedArray. | 1974 // Fill out the elements FixedArray. |
1975 // rax: JSArray. | 1975 // rax: JSArray. |
1976 // rcx: FixedArray. | 1976 // rcx: FixedArray. |
1977 // rbx: Number of elements in array as int32. | 1977 // rbx: Number of elements in array as int32. |
1978 | 1978 |
1979 // Set map. | 1979 // Set map. |
1980 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex); | 1980 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex); |
1981 __ movq(FieldOperand(rcx, HeapObject::kMapOffset), kScratchRegister); | 1981 __ movp(FieldOperand(rcx, HeapObject::kMapOffset), kScratchRegister); |
1982 // Set length. | 1982 // Set length. |
1983 __ Integer32ToSmi(rdx, rbx); | 1983 __ Integer32ToSmi(rdx, rbx); |
1984 __ movq(FieldOperand(rcx, FixedArray::kLengthOffset), rdx); | 1984 __ movp(FieldOperand(rcx, FixedArray::kLengthOffset), rdx); |
1985 // Fill contents of fixed-array with undefined. | 1985 // Fill contents of fixed-array with undefined. |
1986 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex); | 1986 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex); |
1987 __ lea(rcx, FieldOperand(rcx, FixedArray::kHeaderSize)); | 1987 __ lea(rcx, FieldOperand(rcx, FixedArray::kHeaderSize)); |
1988 // Fill fixed array elements with undefined. | 1988 // Fill fixed array elements with undefined. |
1989 // rax: JSArray. | 1989 // rax: JSArray. |
1990 // rbx: Number of elements in array that remains to be filled, as int32. | 1990 // rbx: Number of elements in array that remains to be filled, as int32. |
1991 // rcx: Start of elements in FixedArray. | 1991 // rcx: Start of elements in FixedArray. |
1992 // rdx: undefined. | 1992 // rdx: undefined. |
1993 Label loop; | 1993 Label loop; |
1994 __ testl(rbx, rbx); | 1994 __ testl(rbx, rbx); |
1995 __ bind(&loop); | 1995 __ bind(&loop); |
1996 __ j(less_equal, &done); // Jump if rcx is negative or zero. | 1996 __ j(less_equal, &done); // Jump if rcx is negative or zero. |
1997 __ subl(rbx, Immediate(1)); | 1997 __ subl(rbx, Immediate(1)); |
1998 __ movq(Operand(rcx, rbx, times_pointer_size, 0), rdx); | 1998 __ movp(Operand(rcx, rbx, times_pointer_size, 0), rdx); |
1999 __ jmp(&loop); | 1999 __ jmp(&loop); |
2000 | 2000 |
2001 __ bind(&done); | 2001 __ bind(&done); |
2002 __ ret(3 * kPointerSize); | 2002 __ ret(3 * kPointerSize); |
2003 | 2003 |
2004 __ bind(&slowcase); | 2004 __ bind(&slowcase); |
2005 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1); | 2005 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1); |
2006 } | 2006 } |
2007 | 2007 |
2008 | 2008 |
(...skipping 21 matching lines...) Expand all Loading... |
2030 // hydrogen doesn't care, the stub doesn't have to care either. | 2030 // hydrogen doesn't care, the stub doesn't have to care either. |
2031 __ bind(&ok); | 2031 __ bind(&ok); |
2032 } | 2032 } |
2033 | 2033 |
2034 | 2034 |
2035 static void BranchIfNotInternalizedString(MacroAssembler* masm, | 2035 static void BranchIfNotInternalizedString(MacroAssembler* masm, |
2036 Label* label, | 2036 Label* label, |
2037 Register object, | 2037 Register object, |
2038 Register scratch) { | 2038 Register scratch) { |
2039 __ JumpIfSmi(object, label); | 2039 __ JumpIfSmi(object, label); |
2040 __ movq(scratch, FieldOperand(object, HeapObject::kMapOffset)); | 2040 __ movp(scratch, FieldOperand(object, HeapObject::kMapOffset)); |
2041 __ movzxbq(scratch, | 2041 __ movzxbq(scratch, |
2042 FieldOperand(scratch, Map::kInstanceTypeOffset)); | 2042 FieldOperand(scratch, Map::kInstanceTypeOffset)); |
2043 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0); | 2043 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0); |
2044 __ testb(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask)); | 2044 __ testb(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask)); |
2045 __ j(not_zero, label); | 2045 __ j(not_zero, label); |
2046 } | 2046 } |
2047 | 2047 |
2048 | 2048 |
2049 void ICCompareStub::GenerateGeneric(MacroAssembler* masm) { | 2049 void ICCompareStub::GenerateGeneric(MacroAssembler* masm) { |
2050 Label check_unequal_objects, done; | 2050 Label check_unequal_objects, done; |
2051 Condition cc = GetCondition(); | 2051 Condition cc = GetCondition(); |
2052 Factory* factory = masm->isolate()->factory(); | 2052 Factory* factory = masm->isolate()->factory(); |
2053 | 2053 |
2054 Label miss; | 2054 Label miss; |
2055 CheckInputType(masm, rdx, left_, &miss); | 2055 CheckInputType(masm, rdx, left_, &miss); |
2056 CheckInputType(masm, rax, right_, &miss); | 2056 CheckInputType(masm, rax, right_, &miss); |
2057 | 2057 |
2058 // Compare two smis. | 2058 // Compare two smis. |
2059 Label non_smi, smi_done; | 2059 Label non_smi, smi_done; |
2060 __ JumpIfNotBothSmi(rax, rdx, &non_smi); | 2060 __ JumpIfNotBothSmi(rax, rdx, &non_smi); |
2061 __ subq(rdx, rax); | 2061 __ subq(rdx, rax); |
2062 __ j(no_overflow, &smi_done); | 2062 __ j(no_overflow, &smi_done); |
2063 __ not_(rdx); // Correct sign in case of overflow. rdx cannot be 0 here. | 2063 __ not_(rdx); // Correct sign in case of overflow. rdx cannot be 0 here. |
2064 __ bind(&smi_done); | 2064 __ bind(&smi_done); |
2065 __ movq(rax, rdx); | 2065 __ movp(rax, rdx); |
2066 __ ret(0); | 2066 __ ret(0); |
2067 __ bind(&non_smi); | 2067 __ bind(&non_smi); |
2068 | 2068 |
2069 // The compare stub returns a positive, negative, or zero 64-bit integer | 2069 // The compare stub returns a positive, negative, or zero 64-bit integer |
2070 // value in rax, corresponding to result of comparing the two inputs. | 2070 // value in rax, corresponding to result of comparing the two inputs. |
2071 // NOTICE! This code is only reached after a smi-fast-case check, so | 2071 // NOTICE! This code is only reached after a smi-fast-case check, so |
2072 // it is certain that at least one operand isn't a smi. | 2072 // it is certain that at least one operand isn't a smi. |
2073 | 2073 |
2074 // Two identical objects are equal unless they are both NaN or undefined. | 2074 // Two identical objects are equal unless they are both NaN or undefined. |
2075 { | 2075 { |
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2134 { | 2134 { |
2135 Label not_smis; | 2135 Label not_smis; |
2136 __ SelectNonSmi(rbx, rax, rdx, ¬_smis); | 2136 __ SelectNonSmi(rbx, rax, rdx, ¬_smis); |
2137 | 2137 |
2138 // Check if the non-smi operand is a heap number. | 2138 // Check if the non-smi operand is a heap number. |
2139 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset), | 2139 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset), |
2140 factory->heap_number_map()); | 2140 factory->heap_number_map()); |
2141 // If heap number, handle it in the slow case. | 2141 // If heap number, handle it in the slow case. |
2142 __ j(equal, &slow); | 2142 __ j(equal, &slow); |
2143 // Return non-equal. ebx (the lower half of rbx) is not zero. | 2143 // Return non-equal. ebx (the lower half of rbx) is not zero. |
2144 __ movq(rax, rbx); | 2144 __ movp(rax, rbx); |
2145 __ ret(0); | 2145 __ ret(0); |
2146 | 2146 |
2147 __ bind(¬_smis); | 2147 __ bind(¬_smis); |
2148 } | 2148 } |
2149 | 2149 |
2150 // If either operand is a JSObject or an oddball value, then they are not | 2150 // If either operand is a JSObject or an oddball value, then they are not |
2151 // equal since their pointers are different | 2151 // equal since their pointers are different |
2152 // There is no test for undetectability in strict equality. | 2152 // There is no test for undetectability in strict equality. |
2153 | 2153 |
2154 // If the first object is a JS object, we have done pointer comparison. | 2154 // If the first object is a JS object, we have done pointer comparison. |
(...skipping 157 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2312 // Cache the called function in a global property cell. Cache states | 2312 // Cache the called function in a global property cell. Cache states |
2313 // are uninitialized, monomorphic (indicated by a JSFunction), and | 2313 // are uninitialized, monomorphic (indicated by a JSFunction), and |
2314 // megamorphic. | 2314 // megamorphic. |
2315 // rax : number of arguments to the construct function | 2315 // rax : number of arguments to the construct function |
2316 // rbx : cache cell for call target | 2316 // rbx : cache cell for call target |
2317 // rdi : the function to call | 2317 // rdi : the function to call |
2318 Isolate* isolate = masm->isolate(); | 2318 Isolate* isolate = masm->isolate(); |
2319 Label initialize, done, miss, megamorphic, not_array_function; | 2319 Label initialize, done, miss, megamorphic, not_array_function; |
2320 | 2320 |
2321 // Load the cache state into rcx. | 2321 // Load the cache state into rcx. |
2322 __ movq(rcx, FieldOperand(rbx, Cell::kValueOffset)); | 2322 __ movp(rcx, FieldOperand(rbx, Cell::kValueOffset)); |
2323 | 2323 |
2324 // A monomorphic cache hit or an already megamorphic state: invoke the | 2324 // A monomorphic cache hit or an already megamorphic state: invoke the |
2325 // function without changing the state. | 2325 // function without changing the state. |
2326 __ cmpq(rcx, rdi); | 2326 __ cmpq(rcx, rdi); |
2327 __ j(equal, &done); | 2327 __ j(equal, &done); |
2328 __ Cmp(rcx, TypeFeedbackCells::MegamorphicSentinel(isolate)); | 2328 __ Cmp(rcx, TypeFeedbackCells::MegamorphicSentinel(isolate)); |
2329 __ j(equal, &done); | 2329 __ j(equal, &done); |
2330 | 2330 |
2331 // If we came here, we need to see if we are the array function. | 2331 // If we came here, we need to see if we are the array function. |
2332 // If we didn't have a matching function, and we didn't find the megamorph | 2332 // If we didn't have a matching function, and we didn't find the megamorph |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2379 __ CallStub(&create_stub); | 2379 __ CallStub(&create_stub); |
2380 | 2380 |
2381 __ pop(rbx); | 2381 __ pop(rbx); |
2382 __ pop(rdi); | 2382 __ pop(rdi); |
2383 __ pop(rax); | 2383 __ pop(rax); |
2384 __ SmiToInteger32(rax, rax); | 2384 __ SmiToInteger32(rax, rax); |
2385 } | 2385 } |
2386 __ jmp(&done); | 2386 __ jmp(&done); |
2387 | 2387 |
2388 __ bind(¬_array_function); | 2388 __ bind(¬_array_function); |
2389 __ movq(FieldOperand(rbx, Cell::kValueOffset), rdi); | 2389 __ movp(FieldOperand(rbx, Cell::kValueOffset), rdi); |
2390 // No need for a write barrier here - cells are rescanned. | 2390 // No need for a write barrier here - cells are rescanned. |
2391 | 2391 |
2392 __ bind(&done); | 2392 __ bind(&done); |
2393 } | 2393 } |
2394 | 2394 |
2395 | 2395 |
2396 void CallFunctionStub::Generate(MacroAssembler* masm) { | 2396 void CallFunctionStub::Generate(MacroAssembler* masm) { |
2397 // rbx : cache cell for call target | 2397 // rbx : cache cell for call target |
2398 // rdi : the function to call | 2398 // rdi : the function to call |
2399 Isolate* isolate = masm->isolate(); | 2399 Isolate* isolate = masm->isolate(); |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2436 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); | 2436 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); |
2437 { | 2437 { |
2438 Handle<Code> adaptor = | 2438 Handle<Code> adaptor = |
2439 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); | 2439 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); |
2440 __ jmp(adaptor, RelocInfo::CODE_TARGET); | 2440 __ jmp(adaptor, RelocInfo::CODE_TARGET); |
2441 } | 2441 } |
2442 | 2442 |
2443 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead | 2443 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead |
2444 // of the original receiver from the call site). | 2444 // of the original receiver from the call site). |
2445 __ bind(&non_function); | 2445 __ bind(&non_function); |
2446 __ movq(args.GetReceiverOperand(), rdi); | 2446 __ movp(args.GetReceiverOperand(), rdi); |
2447 __ Set(rax, argc_); | 2447 __ Set(rax, argc_); |
2448 __ Set(rbx, 0); | 2448 __ Set(rbx, 0); |
2449 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION); | 2449 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION); |
2450 Handle<Code> adaptor = | 2450 Handle<Code> adaptor = |
2451 isolate->builtins()->ArgumentsAdaptorTrampoline(); | 2451 isolate->builtins()->ArgumentsAdaptorTrampoline(); |
2452 __ Jump(adaptor, RelocInfo::CODE_TARGET); | 2452 __ Jump(adaptor, RelocInfo::CODE_TARGET); |
2453 } | 2453 } |
2454 | 2454 |
2455 | 2455 |
2456 void CallConstructStub::Generate(MacroAssembler* masm) { | 2456 void CallConstructStub::Generate(MacroAssembler* masm) { |
2457 // rax : number of arguments | 2457 // rax : number of arguments |
2458 // rbx : cache cell for call target | 2458 // rbx : cache cell for call target |
2459 // rdi : constructor function | 2459 // rdi : constructor function |
2460 Label slow, non_function_call; | 2460 Label slow, non_function_call; |
2461 | 2461 |
2462 // Check that function is not a smi. | 2462 // Check that function is not a smi. |
2463 __ JumpIfSmi(rdi, &non_function_call); | 2463 __ JumpIfSmi(rdi, &non_function_call); |
2464 // Check that function is a JSFunction. | 2464 // Check that function is a JSFunction. |
2465 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); | 2465 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); |
2466 __ j(not_equal, &slow); | 2466 __ j(not_equal, &slow); |
2467 | 2467 |
2468 if (RecordCallTarget()) { | 2468 if (RecordCallTarget()) { |
2469 GenerateRecordCallTarget(masm); | 2469 GenerateRecordCallTarget(masm); |
2470 } | 2470 } |
2471 | 2471 |
2472 // Jump to the function-specific construct stub. | 2472 // Jump to the function-specific construct stub. |
2473 Register jmp_reg = rcx; | 2473 Register jmp_reg = rcx; |
2474 __ movq(jmp_reg, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); | 2474 __ movp(jmp_reg, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); |
2475 __ movq(jmp_reg, FieldOperand(jmp_reg, | 2475 __ movp(jmp_reg, FieldOperand(jmp_reg, |
2476 SharedFunctionInfo::kConstructStubOffset)); | 2476 SharedFunctionInfo::kConstructStubOffset)); |
2477 __ lea(jmp_reg, FieldOperand(jmp_reg, Code::kHeaderSize)); | 2477 __ lea(jmp_reg, FieldOperand(jmp_reg, Code::kHeaderSize)); |
2478 __ jmp(jmp_reg); | 2478 __ jmp(jmp_reg); |
2479 | 2479 |
2480 // rdi: called object | 2480 // rdi: called object |
2481 // rax: number of arguments | 2481 // rax: number of arguments |
2482 // rcx: object map | 2482 // rcx: object map |
2483 Label do_call; | 2483 Label do_call; |
2484 __ bind(&slow); | 2484 __ bind(&slow); |
2485 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE); | 2485 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE); |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2523 stub.GetCode(isolate); | 2523 stub.GetCode(isolate); |
2524 CEntryStub save_doubles(1, kSaveFPRegs); | 2524 CEntryStub save_doubles(1, kSaveFPRegs); |
2525 save_doubles.GetCode(isolate); | 2525 save_doubles.GetCode(isolate); |
2526 } | 2526 } |
2527 | 2527 |
2528 | 2528 |
2529 static void JumpIfOOM(MacroAssembler* masm, | 2529 static void JumpIfOOM(MacroAssembler* masm, |
2530 Register value, | 2530 Register value, |
2531 Register scratch, | 2531 Register scratch, |
2532 Label* oom_label) { | 2532 Label* oom_label) { |
2533 __ movq(scratch, value); | 2533 __ movp(scratch, value); |
2534 STATIC_ASSERT(Failure::OUT_OF_MEMORY_EXCEPTION == 3); | 2534 STATIC_ASSERT(Failure::OUT_OF_MEMORY_EXCEPTION == 3); |
2535 STATIC_ASSERT(kFailureTag == 3); | 2535 STATIC_ASSERT(kFailureTag == 3); |
2536 __ and_(scratch, Immediate(0xf)); | 2536 __ and_(scratch, Immediate(0xf)); |
2537 __ cmpq(scratch, Immediate(0xf)); | 2537 __ cmpq(scratch, Immediate(0xf)); |
2538 __ j(equal, oom_label); | 2538 __ j(equal, oom_label); |
2539 } | 2539 } |
2540 | 2540 |
2541 | 2541 |
2542 void CEntryStub::GenerateCore(MacroAssembler* masm, | 2542 void CEntryStub::GenerateCore(MacroAssembler* masm, |
2543 Label* throw_normal_exception, | 2543 Label* throw_normal_exception, |
(...skipping 18 matching lines...) Expand all Loading... |
2562 if (FLAG_debug_code) { | 2562 if (FLAG_debug_code) { |
2563 __ CheckStackAlignment(); | 2563 __ CheckStackAlignment(); |
2564 } | 2564 } |
2565 | 2565 |
2566 if (do_gc) { | 2566 if (do_gc) { |
2567 // Pass failure code returned from last attempt as first argument to | 2567 // Pass failure code returned from last attempt as first argument to |
2568 // PerformGC. No need to use PrepareCallCFunction/CallCFunction here as the | 2568 // PerformGC. No need to use PrepareCallCFunction/CallCFunction here as the |
2569 // stack is known to be aligned. This function takes one argument which is | 2569 // stack is known to be aligned. This function takes one argument which is |
2570 // passed in register. | 2570 // passed in register. |
2571 __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate())); | 2571 __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate())); |
2572 __ movq(arg_reg_1, rax); | 2572 __ movp(arg_reg_1, rax); |
2573 __ Move(kScratchRegister, | 2573 __ Move(kScratchRegister, |
2574 ExternalReference::perform_gc_function(masm->isolate())); | 2574 ExternalReference::perform_gc_function(masm->isolate())); |
2575 __ call(kScratchRegister); | 2575 __ call(kScratchRegister); |
2576 } | 2576 } |
2577 | 2577 |
2578 ExternalReference scope_depth = | 2578 ExternalReference scope_depth = |
2579 ExternalReference::heap_always_allocate_scope_depth(masm->isolate()); | 2579 ExternalReference::heap_always_allocate_scope_depth(masm->isolate()); |
2580 if (always_allocate_scope) { | 2580 if (always_allocate_scope) { |
2581 Operand scope_depth_operand = masm->ExternalOperand(scope_depth); | 2581 Operand scope_depth_operand = masm->ExternalOperand(scope_depth); |
2582 __ incl(scope_depth_operand); | 2582 __ incl(scope_depth_operand); |
2583 } | 2583 } |
2584 | 2584 |
2585 // Call C function. | 2585 // Call C function. |
2586 #ifdef _WIN64 | 2586 #ifdef _WIN64 |
2587 // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9. | 2587 // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9. |
2588 // Pass argv and argc as two parameters. The arguments object will | 2588 // Pass argv and argc as two parameters. The arguments object will |
2589 // be created by stubs declared by DECLARE_RUNTIME_FUNCTION(). | 2589 // be created by stubs declared by DECLARE_RUNTIME_FUNCTION(). |
2590 if (result_size_ < 2) { | 2590 if (result_size_ < 2) { |
2591 // Pass a pointer to the Arguments object as the first argument. | 2591 // Pass a pointer to the Arguments object as the first argument. |
2592 // Return result in single register (rax). | 2592 // Return result in single register (rax). |
2593 __ movq(rcx, r14); // argc. | 2593 __ movp(rcx, r14); // argc. |
2594 __ movq(rdx, r15); // argv. | 2594 __ movp(rdx, r15); // argv. |
2595 __ Move(r8, ExternalReference::isolate_address(masm->isolate())); | 2595 __ Move(r8, ExternalReference::isolate_address(masm->isolate())); |
2596 } else { | 2596 } else { |
2597 ASSERT_EQ(2, result_size_); | 2597 ASSERT_EQ(2, result_size_); |
2598 // Pass a pointer to the result location as the first argument. | 2598 // Pass a pointer to the result location as the first argument. |
2599 __ lea(rcx, StackSpaceOperand(2)); | 2599 __ lea(rcx, StackSpaceOperand(2)); |
2600 // Pass a pointer to the Arguments object as the second argument. | 2600 // Pass a pointer to the Arguments object as the second argument. |
2601 __ movq(rdx, r14); // argc. | 2601 __ movp(rdx, r14); // argc. |
2602 __ movq(r8, r15); // argv. | 2602 __ movp(r8, r15); // argv. |
2603 __ Move(r9, ExternalReference::isolate_address(masm->isolate())); | 2603 __ Move(r9, ExternalReference::isolate_address(masm->isolate())); |
2604 } | 2604 } |
2605 | 2605 |
2606 #else // _WIN64 | 2606 #else // _WIN64 |
2607 // GCC passes arguments in rdi, rsi, rdx, rcx, r8, r9. | 2607 // GCC passes arguments in rdi, rsi, rdx, rcx, r8, r9. |
2608 __ movq(rdi, r14); // argc. | 2608 __ movp(rdi, r14); // argc. |
2609 __ movq(rsi, r15); // argv. | 2609 __ movp(rsi, r15); // argv. |
2610 __ Move(rdx, ExternalReference::isolate_address(masm->isolate())); | 2610 __ Move(rdx, ExternalReference::isolate_address(masm->isolate())); |
2611 #endif | 2611 #endif |
2612 __ call(rbx); | 2612 __ call(rbx); |
2613 // Result is in rax - do not destroy this register! | 2613 // Result is in rax - do not destroy this register! |
2614 | 2614 |
2615 if (always_allocate_scope) { | 2615 if (always_allocate_scope) { |
2616 Operand scope_depth_operand = masm->ExternalOperand(scope_depth); | 2616 Operand scope_depth_operand = masm->ExternalOperand(scope_depth); |
2617 __ decl(scope_depth_operand); | 2617 __ decl(scope_depth_operand); |
2618 } | 2618 } |
2619 | 2619 |
(...skipping 30 matching lines...) Expand all Loading... |
2650 __ j(zero, &retry, Label::kNear); | 2650 __ j(zero, &retry, Label::kNear); |
2651 | 2651 |
2652 // Special handling of out of memory exceptions. | 2652 // Special handling of out of memory exceptions. |
2653 JumpIfOOM(masm, rax, kScratchRegister, throw_out_of_memory_exception); | 2653 JumpIfOOM(masm, rax, kScratchRegister, throw_out_of_memory_exception); |
2654 | 2654 |
2655 // Retrieve the pending exception. | 2655 // Retrieve the pending exception. |
2656 ExternalReference pending_exception_address( | 2656 ExternalReference pending_exception_address( |
2657 Isolate::kPendingExceptionAddress, masm->isolate()); | 2657 Isolate::kPendingExceptionAddress, masm->isolate()); |
2658 Operand pending_exception_operand = | 2658 Operand pending_exception_operand = |
2659 masm->ExternalOperand(pending_exception_address); | 2659 masm->ExternalOperand(pending_exception_address); |
2660 __ movq(rax, pending_exception_operand); | 2660 __ movp(rax, pending_exception_operand); |
2661 | 2661 |
2662 // See if we just retrieved an OOM exception. | 2662 // See if we just retrieved an OOM exception. |
2663 JumpIfOOM(masm, rax, kScratchRegister, throw_out_of_memory_exception); | 2663 JumpIfOOM(masm, rax, kScratchRegister, throw_out_of_memory_exception); |
2664 | 2664 |
2665 // Clear the pending exception. | 2665 // Clear the pending exception. |
2666 pending_exception_operand = | 2666 pending_exception_operand = |
2667 masm->ExternalOperand(pending_exception_address); | 2667 masm->ExternalOperand(pending_exception_address); |
2668 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex); | 2668 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex); |
2669 __ movq(pending_exception_operand, rdx); | 2669 __ movp(pending_exception_operand, rdx); |
2670 | 2670 |
2671 // Special handling of termination exceptions which are uncatchable | 2671 // Special handling of termination exceptions which are uncatchable |
2672 // by javascript code. | 2672 // by javascript code. |
2673 __ CompareRoot(rax, Heap::kTerminationExceptionRootIndex); | 2673 __ CompareRoot(rax, Heap::kTerminationExceptionRootIndex); |
2674 __ j(equal, throw_termination_exception); | 2674 __ j(equal, throw_termination_exception); |
2675 | 2675 |
2676 // Handle normal exception. | 2676 // Handle normal exception. |
2677 __ jmp(throw_normal_exception); | 2677 __ jmp(throw_normal_exception); |
2678 | 2678 |
2679 // Retry. | 2679 // Retry. |
(...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2774 void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { | 2774 void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { |
2775 Label invoke, handler_entry, exit; | 2775 Label invoke, handler_entry, exit; |
2776 Label not_outermost_js, not_outermost_js_2; | 2776 Label not_outermost_js, not_outermost_js_2; |
2777 | 2777 |
2778 ProfileEntryHookStub::MaybeCallEntryHook(masm); | 2778 ProfileEntryHookStub::MaybeCallEntryHook(masm); |
2779 | 2779 |
2780 { // NOLINT. Scope block confuses linter. | 2780 { // NOLINT. Scope block confuses linter. |
2781 MacroAssembler::NoRootArrayScope uninitialized_root_register(masm); | 2781 MacroAssembler::NoRootArrayScope uninitialized_root_register(masm); |
2782 // Set up frame. | 2782 // Set up frame. |
2783 __ push(rbp); | 2783 __ push(rbp); |
2784 __ movq(rbp, rsp); | 2784 __ movp(rbp, rsp); |
2785 | 2785 |
2786 // Push the stack frame type marker twice. | 2786 // Push the stack frame type marker twice. |
2787 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY; | 2787 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY; |
2788 // Scratch register is neither callee-save, nor an argument register on any | 2788 // Scratch register is neither callee-save, nor an argument register on any |
2789 // platform. It's free to use at this point. | 2789 // platform. It's free to use at this point. |
2790 // Cannot use smi-register for loading yet. | 2790 // Cannot use smi-register for loading yet. |
2791 __ Move(kScratchRegister, Smi::FromInt(marker), RelocInfo::NONE64); | 2791 __ Move(kScratchRegister, Smi::FromInt(marker), RelocInfo::NONE64); |
2792 __ push(kScratchRegister); // context slot | 2792 __ push(kScratchRegister); // context slot |
2793 __ push(kScratchRegister); // function slot | 2793 __ push(kScratchRegister); // function slot |
2794 // Save callee-saved registers (X64/Win64 calling conventions). | 2794 // Save callee-saved registers (X64/Win64 calling conventions). |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2831 Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp); | 2831 Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp); |
2832 __ push(c_entry_fp_operand); | 2832 __ push(c_entry_fp_operand); |
2833 } | 2833 } |
2834 | 2834 |
2835 // If this is the outermost JS call, set js_entry_sp value. | 2835 // If this is the outermost JS call, set js_entry_sp value. |
2836 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate); | 2836 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate); |
2837 __ Load(rax, js_entry_sp); | 2837 __ Load(rax, js_entry_sp); |
2838 __ testq(rax, rax); | 2838 __ testq(rax, rax); |
2839 __ j(not_zero, ¬_outermost_js); | 2839 __ j(not_zero, ¬_outermost_js); |
2840 __ Push(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)); | 2840 __ Push(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)); |
2841 __ movq(rax, rbp); | 2841 __ movp(rax, rbp); |
2842 __ Store(js_entry_sp, rax); | 2842 __ Store(js_entry_sp, rax); |
2843 Label cont; | 2843 Label cont; |
2844 __ jmp(&cont); | 2844 __ jmp(&cont); |
2845 __ bind(¬_outermost_js); | 2845 __ bind(¬_outermost_js); |
2846 __ Push(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)); | 2846 __ Push(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)); |
2847 __ bind(&cont); | 2847 __ bind(&cont); |
2848 | 2848 |
2849 // Jump to a faked try block that does the invoke, with a faked catch | 2849 // Jump to a faked try block that does the invoke, with a faked catch |
2850 // block that sets the pending exception. | 2850 // block that sets the pending exception. |
2851 __ jmp(&invoke); | 2851 __ jmp(&invoke); |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2889 | 2889 |
2890 // Unlink this frame from the handler chain. | 2890 // Unlink this frame from the handler chain. |
2891 __ PopTryHandler(); | 2891 __ PopTryHandler(); |
2892 | 2892 |
2893 __ bind(&exit); | 2893 __ bind(&exit); |
2894 // Check if the current stack frame is marked as the outermost JS frame. | 2894 // Check if the current stack frame is marked as the outermost JS frame. |
2895 __ pop(rbx); | 2895 __ pop(rbx); |
2896 __ Cmp(rbx, Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)); | 2896 __ Cmp(rbx, Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)); |
2897 __ j(not_equal, ¬_outermost_js_2); | 2897 __ j(not_equal, ¬_outermost_js_2); |
2898 __ Move(kScratchRegister, js_entry_sp); | 2898 __ Move(kScratchRegister, js_entry_sp); |
2899 __ movq(Operand(kScratchRegister, 0), Immediate(0)); | 2899 __ movp(Operand(kScratchRegister, 0), Immediate(0)); |
2900 __ bind(¬_outermost_js_2); | 2900 __ bind(¬_outermost_js_2); |
2901 | 2901 |
2902 // Restore the top frame descriptor from the stack. | 2902 // Restore the top frame descriptor from the stack. |
2903 { Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp); | 2903 { Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp); |
2904 __ pop(c_entry_fp_operand); | 2904 __ pop(c_entry_fp_operand); |
2905 } | 2905 } |
2906 | 2906 |
2907 // Restore callee-saved registers (X64 conventions). | 2907 // Restore callee-saved registers (X64 conventions). |
2908 #ifdef _WIN64 | 2908 #ifdef _WIN64 |
2909 // On Win64 XMM6-XMM15 are callee-save | 2909 // On Win64 XMM6-XMM15 are callee-save |
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2966 // before the offset of the hole value in the root array. | 2966 // before the offset of the hole value in the root array. |
2967 static const unsigned int kWordBeforeResultValue = 0x458B4906; | 2967 static const unsigned int kWordBeforeResultValue = 0x458B4906; |
2968 // Only the inline check flag is supported on X64. | 2968 // Only the inline check flag is supported on X64. |
2969 ASSERT(flags_ == kNoFlags || HasCallSiteInlineCheck()); | 2969 ASSERT(flags_ == kNoFlags || HasCallSiteInlineCheck()); |
2970 int extra_argument_offset = HasCallSiteInlineCheck() ? 1 : 0; | 2970 int extra_argument_offset = HasCallSiteInlineCheck() ? 1 : 0; |
2971 | 2971 |
2972 // Get the object - go slow case if it's a smi. | 2972 // Get the object - go slow case if it's a smi. |
2973 Label slow; | 2973 Label slow; |
2974 StackArgumentsAccessor args(rsp, 2 + extra_argument_offset, | 2974 StackArgumentsAccessor args(rsp, 2 + extra_argument_offset, |
2975 ARGUMENTS_DONT_CONTAIN_RECEIVER); | 2975 ARGUMENTS_DONT_CONTAIN_RECEIVER); |
2976 __ movq(rax, args.GetArgumentOperand(0)); | 2976 __ movp(rax, args.GetArgumentOperand(0)); |
2977 __ JumpIfSmi(rax, &slow); | 2977 __ JumpIfSmi(rax, &slow); |
2978 | 2978 |
2979 // Check that the left hand is a JS object. Leave its map in rax. | 2979 // Check that the left hand is a JS object. Leave its map in rax. |
2980 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rax); | 2980 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rax); |
2981 __ j(below, &slow); | 2981 __ j(below, &slow); |
2982 __ CmpInstanceType(rax, LAST_SPEC_OBJECT_TYPE); | 2982 __ CmpInstanceType(rax, LAST_SPEC_OBJECT_TYPE); |
2983 __ j(above, &slow); | 2983 __ j(above, &slow); |
2984 | 2984 |
2985 // Get the prototype of the function. | 2985 // Get the prototype of the function. |
2986 __ movq(rdx, args.GetArgumentOperand(1)); | 2986 __ movp(rdx, args.GetArgumentOperand(1)); |
2987 // rdx is function, rax is map. | 2987 // rdx is function, rax is map. |
2988 | 2988 |
2989 // If there is a call site cache don't look in the global cache, but do the | 2989 // If there is a call site cache don't look in the global cache, but do the |
2990 // real lookup and update the call site cache. | 2990 // real lookup and update the call site cache. |
2991 if (!HasCallSiteInlineCheck()) { | 2991 if (!HasCallSiteInlineCheck()) { |
2992 // Look up the function and the map in the instanceof cache. | 2992 // Look up the function and the map in the instanceof cache. |
2993 Label miss; | 2993 Label miss; |
2994 __ CompareRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); | 2994 __ CompareRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); |
2995 __ j(not_equal, &miss, Label::kNear); | 2995 __ j(not_equal, &miss, Label::kNear); |
2996 __ CompareRoot(rax, Heap::kInstanceofCacheMapRootIndex); | 2996 __ CompareRoot(rax, Heap::kInstanceofCacheMapRootIndex); |
(...skipping 14 matching lines...) Expand all Loading... |
3011 | 3011 |
3012 // Register mapping: | 3012 // Register mapping: |
3013 // rax is object map. | 3013 // rax is object map. |
3014 // rdx is function. | 3014 // rdx is function. |
3015 // rbx is function prototype. | 3015 // rbx is function prototype. |
3016 if (!HasCallSiteInlineCheck()) { | 3016 if (!HasCallSiteInlineCheck()) { |
3017 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); | 3017 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); |
3018 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex); | 3018 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex); |
3019 } else { | 3019 } else { |
3020 // Get return address and delta to inlined map check. | 3020 // Get return address and delta to inlined map check. |
3021 __ movq(kScratchRegister, StackOperandForReturnAddress(0)); | 3021 __ movp(kScratchRegister, StackOperandForReturnAddress(0)); |
3022 __ subq(kScratchRegister, args.GetArgumentOperand(2)); | 3022 __ subq(kScratchRegister, args.GetArgumentOperand(2)); |
3023 if (FLAG_debug_code) { | 3023 if (FLAG_debug_code) { |
3024 __ movl(rdi, Immediate(kWordBeforeMapCheckValue)); | 3024 __ movl(rdi, Immediate(kWordBeforeMapCheckValue)); |
3025 __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi); | 3025 __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi); |
3026 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCheck); | 3026 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCheck); |
3027 } | 3027 } |
3028 __ movq(kScratchRegister, | 3028 __ movp(kScratchRegister, |
3029 Operand(kScratchRegister, kOffsetToMapCheckValue)); | 3029 Operand(kScratchRegister, kOffsetToMapCheckValue)); |
3030 __ movq(Operand(kScratchRegister, 0), rax); | 3030 __ movp(Operand(kScratchRegister, 0), rax); |
3031 } | 3031 } |
3032 | 3032 |
3033 __ movq(rcx, FieldOperand(rax, Map::kPrototypeOffset)); | 3033 __ movp(rcx, FieldOperand(rax, Map::kPrototypeOffset)); |
3034 | 3034 |
3035 // Loop through the prototype chain looking for the function prototype. | 3035 // Loop through the prototype chain looking for the function prototype. |
3036 Label loop, is_instance, is_not_instance; | 3036 Label loop, is_instance, is_not_instance; |
3037 __ LoadRoot(kScratchRegister, Heap::kNullValueRootIndex); | 3037 __ LoadRoot(kScratchRegister, Heap::kNullValueRootIndex); |
3038 __ bind(&loop); | 3038 __ bind(&loop); |
3039 __ cmpq(rcx, rbx); | 3039 __ cmpq(rcx, rbx); |
3040 __ j(equal, &is_instance, Label::kNear); | 3040 __ j(equal, &is_instance, Label::kNear); |
3041 __ cmpq(rcx, kScratchRegister); | 3041 __ cmpq(rcx, kScratchRegister); |
3042 // The code at is_not_instance assumes that kScratchRegister contains a | 3042 // The code at is_not_instance assumes that kScratchRegister contains a |
3043 // non-zero GCable value (the null object in this case). | 3043 // non-zero GCable value (the null object in this case). |
3044 __ j(equal, &is_not_instance, Label::kNear); | 3044 __ j(equal, &is_not_instance, Label::kNear); |
3045 __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset)); | 3045 __ movp(rcx, FieldOperand(rcx, HeapObject::kMapOffset)); |
3046 __ movq(rcx, FieldOperand(rcx, Map::kPrototypeOffset)); | 3046 __ movp(rcx, FieldOperand(rcx, Map::kPrototypeOffset)); |
3047 __ jmp(&loop); | 3047 __ jmp(&loop); |
3048 | 3048 |
3049 __ bind(&is_instance); | 3049 __ bind(&is_instance); |
3050 if (!HasCallSiteInlineCheck()) { | 3050 if (!HasCallSiteInlineCheck()) { |
3051 __ xorl(rax, rax); | 3051 __ xorl(rax, rax); |
3052 // Store bitwise zero in the cache. This is a Smi in GC terms. | 3052 // Store bitwise zero in the cache. This is a Smi in GC terms. |
3053 STATIC_ASSERT(kSmiTag == 0); | 3053 STATIC_ASSERT(kSmiTag == 0); |
3054 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex); | 3054 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex); |
3055 } else { | 3055 } else { |
3056 // Store offset of true in the root array at the inline check site. | 3056 // Store offset of true in the root array at the inline check site. |
3057 int true_offset = 0x100 + | 3057 int true_offset = 0x100 + |
3058 (Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias; | 3058 (Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias; |
3059 // Assert it is a 1-byte signed value. | 3059 // Assert it is a 1-byte signed value. |
3060 ASSERT(true_offset >= 0 && true_offset < 0x100); | 3060 ASSERT(true_offset >= 0 && true_offset < 0x100); |
3061 __ movl(rax, Immediate(true_offset)); | 3061 __ movl(rax, Immediate(true_offset)); |
3062 __ movq(kScratchRegister, StackOperandForReturnAddress(0)); | 3062 __ movp(kScratchRegister, StackOperandForReturnAddress(0)); |
3063 __ subq(kScratchRegister, args.GetArgumentOperand(2)); | 3063 __ subq(kScratchRegister, args.GetArgumentOperand(2)); |
3064 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); | 3064 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); |
3065 if (FLAG_debug_code) { | 3065 if (FLAG_debug_code) { |
3066 __ movl(rax, Immediate(kWordBeforeResultValue)); | 3066 __ movl(rax, Immediate(kWordBeforeResultValue)); |
3067 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); | 3067 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); |
3068 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov); | 3068 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov); |
3069 } | 3069 } |
3070 __ Set(rax, 0); | 3070 __ Set(rax, 0); |
3071 } | 3071 } |
3072 __ ret((2 + extra_argument_offset) * kPointerSize); | 3072 __ ret((2 + extra_argument_offset) * kPointerSize); |
3073 | 3073 |
3074 __ bind(&is_not_instance); | 3074 __ bind(&is_not_instance); |
3075 if (!HasCallSiteInlineCheck()) { | 3075 if (!HasCallSiteInlineCheck()) { |
3076 // We have to store a non-zero value in the cache. | 3076 // We have to store a non-zero value in the cache. |
3077 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex); | 3077 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex); |
3078 } else { | 3078 } else { |
3079 // Store offset of false in the root array at the inline check site. | 3079 // Store offset of false in the root array at the inline check site. |
3080 int false_offset = 0x100 + | 3080 int false_offset = 0x100 + |
3081 (Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias; | 3081 (Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias; |
3082 // Assert it is a 1-byte signed value. | 3082 // Assert it is a 1-byte signed value. |
3083 ASSERT(false_offset >= 0 && false_offset < 0x100); | 3083 ASSERT(false_offset >= 0 && false_offset < 0x100); |
3084 __ movl(rax, Immediate(false_offset)); | 3084 __ movl(rax, Immediate(false_offset)); |
3085 __ movq(kScratchRegister, StackOperandForReturnAddress(0)); | 3085 __ movp(kScratchRegister, StackOperandForReturnAddress(0)); |
3086 __ subq(kScratchRegister, args.GetArgumentOperand(2)); | 3086 __ subq(kScratchRegister, args.GetArgumentOperand(2)); |
3087 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); | 3087 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); |
3088 if (FLAG_debug_code) { | 3088 if (FLAG_debug_code) { |
3089 __ movl(rax, Immediate(kWordBeforeResultValue)); | 3089 __ movl(rax, Immediate(kWordBeforeResultValue)); |
3090 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); | 3090 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); |
3091 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov); | 3091 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov); |
3092 } | 3092 } |
3093 } | 3093 } |
3094 __ ret((2 + extra_argument_offset) * kPointerSize); | 3094 __ ret((2 + extra_argument_offset) * kPointerSize); |
3095 | 3095 |
(...skipping 22 matching lines...) Expand all Loading... |
3118 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { | 3118 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { |
3119 Label flat_string; | 3119 Label flat_string; |
3120 Label ascii_string; | 3120 Label ascii_string; |
3121 Label got_char_code; | 3121 Label got_char_code; |
3122 Label sliced_string; | 3122 Label sliced_string; |
3123 | 3123 |
3124 // If the receiver is a smi trigger the non-string case. | 3124 // If the receiver is a smi trigger the non-string case. |
3125 __ JumpIfSmi(object_, receiver_not_string_); | 3125 __ JumpIfSmi(object_, receiver_not_string_); |
3126 | 3126 |
3127 // Fetch the instance type of the receiver into result register. | 3127 // Fetch the instance type of the receiver into result register. |
3128 __ movq(result_, FieldOperand(object_, HeapObject::kMapOffset)); | 3128 __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset)); |
3129 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset)); | 3129 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset)); |
3130 // If the receiver is not a string trigger the non-string case. | 3130 // If the receiver is not a string trigger the non-string case. |
3131 __ testb(result_, Immediate(kIsNotStringMask)); | 3131 __ testb(result_, Immediate(kIsNotStringMask)); |
3132 __ j(not_zero, receiver_not_string_); | 3132 __ j(not_zero, receiver_not_string_); |
3133 | 3133 |
3134 // If the index is non-smi trigger the non-smi case. | 3134 // If the index is non-smi trigger the non-smi case. |
3135 __ JumpIfNotSmi(index_, &index_not_smi_); | 3135 __ JumpIfNotSmi(index_, &index_not_smi_); |
3136 __ bind(&got_smi_index_); | 3136 __ bind(&got_smi_index_); |
3137 | 3137 |
3138 // Check for index out of range. | 3138 // Check for index out of range. |
(...skipping 29 matching lines...) Expand all Loading... |
3168 if (index_flags_ == STRING_INDEX_IS_NUMBER) { | 3168 if (index_flags_ == STRING_INDEX_IS_NUMBER) { |
3169 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1); | 3169 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1); |
3170 } else { | 3170 } else { |
3171 ASSERT(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX); | 3171 ASSERT(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX); |
3172 // NumberToSmi discards numbers that are not exact integers. | 3172 // NumberToSmi discards numbers that are not exact integers. |
3173 __ CallRuntime(Runtime::kNumberToSmi, 1); | 3173 __ CallRuntime(Runtime::kNumberToSmi, 1); |
3174 } | 3174 } |
3175 if (!index_.is(rax)) { | 3175 if (!index_.is(rax)) { |
3176 // Save the conversion result before the pop instructions below | 3176 // Save the conversion result before the pop instructions below |
3177 // have a chance to overwrite it. | 3177 // have a chance to overwrite it. |
3178 __ movq(index_, rax); | 3178 __ movp(index_, rax); |
3179 } | 3179 } |
3180 __ pop(object_); | 3180 __ pop(object_); |
3181 // Reload the instance type. | 3181 // Reload the instance type. |
3182 __ movq(result_, FieldOperand(object_, HeapObject::kMapOffset)); | 3182 __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset)); |
3183 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset)); | 3183 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset)); |
3184 call_helper.AfterCall(masm); | 3184 call_helper.AfterCall(masm); |
3185 // If index is still not a smi, it must be out of range. | 3185 // If index is still not a smi, it must be out of range. |
3186 __ JumpIfNotSmi(index_, index_out_of_range_); | 3186 __ JumpIfNotSmi(index_, index_out_of_range_); |
3187 // Otherwise, return to the fast path. | 3187 // Otherwise, return to the fast path. |
3188 __ jmp(&got_smi_index_); | 3188 __ jmp(&got_smi_index_); |
3189 | 3189 |
3190 // Call runtime. We get here when the receiver is a string and the | 3190 // Call runtime. We get here when the receiver is a string and the |
3191 // index is a number, but the code of getting the actual character | 3191 // index is a number, but the code of getting the actual character |
3192 // is too complex (e.g., when the string needs to be flattened). | 3192 // is too complex (e.g., when the string needs to be flattened). |
3193 __ bind(&call_runtime_); | 3193 __ bind(&call_runtime_); |
3194 call_helper.BeforeCall(masm); | 3194 call_helper.BeforeCall(masm); |
3195 __ push(object_); | 3195 __ push(object_); |
3196 __ Integer32ToSmi(index_, index_); | 3196 __ Integer32ToSmi(index_, index_); |
3197 __ push(index_); | 3197 __ push(index_); |
3198 __ CallRuntime(Runtime::kStringCharCodeAt, 2); | 3198 __ CallRuntime(Runtime::kStringCharCodeAt, 2); |
3199 if (!result_.is(rax)) { | 3199 if (!result_.is(rax)) { |
3200 __ movq(result_, rax); | 3200 __ movp(result_, rax); |
3201 } | 3201 } |
3202 call_helper.AfterCall(masm); | 3202 call_helper.AfterCall(masm); |
3203 __ jmp(&exit_); | 3203 __ jmp(&exit_); |
3204 | 3204 |
3205 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase); | 3205 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase); |
3206 } | 3206 } |
3207 | 3207 |
3208 | 3208 |
3209 // ------------------------------------------------------------------------- | 3209 // ------------------------------------------------------------------------- |
3210 // StringCharFromCodeGenerator | 3210 // StringCharFromCodeGenerator |
3211 | 3211 |
3212 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) { | 3212 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) { |
3213 // Fast case of Heap::LookupSingleCharacterStringFromCode. | 3213 // Fast case of Heap::LookupSingleCharacterStringFromCode. |
3214 __ JumpIfNotSmi(code_, &slow_case_); | 3214 __ JumpIfNotSmi(code_, &slow_case_); |
3215 __ SmiCompare(code_, Smi::FromInt(String::kMaxOneByteCharCode)); | 3215 __ SmiCompare(code_, Smi::FromInt(String::kMaxOneByteCharCode)); |
3216 __ j(above, &slow_case_); | 3216 __ j(above, &slow_case_); |
3217 | 3217 |
3218 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex); | 3218 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex); |
3219 SmiIndex index = masm->SmiToIndex(kScratchRegister, code_, kPointerSizeLog2); | 3219 SmiIndex index = masm->SmiToIndex(kScratchRegister, code_, kPointerSizeLog2); |
3220 __ movq(result_, FieldOperand(result_, index.reg, index.scale, | 3220 __ movp(result_, FieldOperand(result_, index.reg, index.scale, |
3221 FixedArray::kHeaderSize)); | 3221 FixedArray::kHeaderSize)); |
3222 __ CompareRoot(result_, Heap::kUndefinedValueRootIndex); | 3222 __ CompareRoot(result_, Heap::kUndefinedValueRootIndex); |
3223 __ j(equal, &slow_case_); | 3223 __ j(equal, &slow_case_); |
3224 __ bind(&exit_); | 3224 __ bind(&exit_); |
3225 } | 3225 } |
3226 | 3226 |
3227 | 3227 |
3228 void StringCharFromCodeGenerator::GenerateSlow( | 3228 void StringCharFromCodeGenerator::GenerateSlow( |
3229 MacroAssembler* masm, | 3229 MacroAssembler* masm, |
3230 const RuntimeCallHelper& call_helper) { | 3230 const RuntimeCallHelper& call_helper) { |
3231 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase); | 3231 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase); |
3232 | 3232 |
3233 __ bind(&slow_case_); | 3233 __ bind(&slow_case_); |
3234 call_helper.BeforeCall(masm); | 3234 call_helper.BeforeCall(masm); |
3235 __ push(code_); | 3235 __ push(code_); |
3236 __ CallRuntime(Runtime::kCharFromCode, 1); | 3236 __ CallRuntime(Runtime::kCharFromCode, 1); |
3237 if (!result_.is(rax)) { | 3237 if (!result_.is(rax)) { |
3238 __ movq(result_, rax); | 3238 __ movp(result_, rax); |
3239 } | 3239 } |
3240 call_helper.AfterCall(masm); | 3240 call_helper.AfterCall(masm); |
3241 __ jmp(&exit_); | 3241 __ jmp(&exit_); |
3242 | 3242 |
3243 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase); | 3243 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase); |
3244 } | 3244 } |
3245 | 3245 |
3246 | 3246 |
3247 void StringAddStub::Generate(MacroAssembler* masm) { | 3247 void StringAddStub::Generate(MacroAssembler* masm) { |
3248 Label call_runtime, call_builtin; | 3248 Label call_runtime, call_builtin; |
3249 Builtins::JavaScript builtin_id = Builtins::ADD; | 3249 Builtins::JavaScript builtin_id = Builtins::ADD; |
3250 | 3250 |
3251 // Load the two arguments. | 3251 // Load the two arguments. |
3252 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER); | 3252 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER); |
3253 __ movq(rax, args.GetArgumentOperand(0)); // First argument (left). | 3253 __ movp(rax, args.GetArgumentOperand(0)); // First argument (left). |
3254 __ movq(rdx, args.GetArgumentOperand(1)); // Second argument (right). | 3254 __ movp(rdx, args.GetArgumentOperand(1)); // Second argument (right). |
3255 | 3255 |
3256 // Make sure that both arguments are strings if not known in advance. | 3256 // Make sure that both arguments are strings if not known in advance. |
3257 // Otherwise, at least one of the arguments is definitely a string, | 3257 // Otherwise, at least one of the arguments is definitely a string, |
3258 // and we convert the one that is not known to be a string. | 3258 // and we convert the one that is not known to be a string. |
3259 if ((flags_ & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_BOTH) { | 3259 if ((flags_ & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_BOTH) { |
3260 ASSERT((flags_ & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT); | 3260 ASSERT((flags_ & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT); |
3261 ASSERT((flags_ & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT); | 3261 ASSERT((flags_ & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT); |
3262 __ JumpIfSmi(rax, &call_runtime); | 3262 __ JumpIfSmi(rax, &call_runtime); |
3263 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, r8); | 3263 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, r8); |
3264 __ j(above_equal, &call_runtime); | 3264 __ j(above_equal, &call_runtime); |
(...skipping 12 matching lines...) Expand all Loading... |
3277 GenerateConvertArgument(masm, 1 * kPointerSize, rdx, rbx, rcx, rdi, | 3277 GenerateConvertArgument(masm, 1 * kPointerSize, rdx, rbx, rcx, rdi, |
3278 &call_builtin); | 3278 &call_builtin); |
3279 builtin_id = Builtins::STRING_ADD_LEFT; | 3279 builtin_id = Builtins::STRING_ADD_LEFT; |
3280 } | 3280 } |
3281 | 3281 |
3282 // Both arguments are strings. | 3282 // Both arguments are strings. |
3283 // rax: first string | 3283 // rax: first string |
3284 // rdx: second string | 3284 // rdx: second string |
3285 // Check if either of the strings are empty. In that case return the other. | 3285 // Check if either of the strings are empty. In that case return the other. |
3286 Label second_not_zero_length, both_not_zero_length; | 3286 Label second_not_zero_length, both_not_zero_length; |
3287 __ movq(rcx, FieldOperand(rdx, String::kLengthOffset)); | 3287 __ movp(rcx, FieldOperand(rdx, String::kLengthOffset)); |
3288 __ SmiTest(rcx); | 3288 __ SmiTest(rcx); |
3289 __ j(not_zero, &second_not_zero_length, Label::kNear); | 3289 __ j(not_zero, &second_not_zero_length, Label::kNear); |
3290 // Second string is empty, result is first string which is already in rax. | 3290 // Second string is empty, result is first string which is already in rax. |
3291 Counters* counters = masm->isolate()->counters(); | 3291 Counters* counters = masm->isolate()->counters(); |
3292 __ IncrementCounter(counters->string_add_native(), 1); | 3292 __ IncrementCounter(counters->string_add_native(), 1); |
3293 __ ret(2 * kPointerSize); | 3293 __ ret(2 * kPointerSize); |
3294 __ bind(&second_not_zero_length); | 3294 __ bind(&second_not_zero_length); |
3295 __ movq(rbx, FieldOperand(rax, String::kLengthOffset)); | 3295 __ movp(rbx, FieldOperand(rax, String::kLengthOffset)); |
3296 __ SmiTest(rbx); | 3296 __ SmiTest(rbx); |
3297 __ j(not_zero, &both_not_zero_length, Label::kNear); | 3297 __ j(not_zero, &both_not_zero_length, Label::kNear); |
3298 // First string is empty, result is second string which is in rdx. | 3298 // First string is empty, result is second string which is in rdx. |
3299 __ movq(rax, rdx); | 3299 __ movp(rax, rdx); |
3300 __ IncrementCounter(counters->string_add_native(), 1); | 3300 __ IncrementCounter(counters->string_add_native(), 1); |
3301 __ ret(2 * kPointerSize); | 3301 __ ret(2 * kPointerSize); |
3302 | 3302 |
3303 // Both strings are non-empty. | 3303 // Both strings are non-empty. |
3304 // rax: first string | 3304 // rax: first string |
3305 // rbx: length of first string | 3305 // rbx: length of first string |
3306 // rcx: length of second string | 3306 // rcx: length of second string |
3307 // rdx: second string | 3307 // rdx: second string |
3308 // r8: map of first string (if flags_ == NO_STRING_ADD_FLAGS) | 3308 // r8: map of first string (if flags_ == NO_STRING_ADD_FLAGS) |
3309 // r9: map of second string (if flags_ == NO_STRING_ADD_FLAGS) | 3309 // r9: map of second string (if flags_ == NO_STRING_ADD_FLAGS) |
3310 Label string_add_flat_result, longer_than_two; | 3310 Label string_add_flat_result, longer_than_two; |
3311 __ bind(&both_not_zero_length); | 3311 __ bind(&both_not_zero_length); |
3312 | 3312 |
3313 // If arguments where known to be strings, maps are not loaded to r8 and r9 | 3313 // If arguments where known to be strings, maps are not loaded to r8 and r9 |
3314 // by the code above. | 3314 // by the code above. |
3315 if ((flags_ & STRING_ADD_CHECK_BOTH) != STRING_ADD_CHECK_BOTH) { | 3315 if ((flags_ & STRING_ADD_CHECK_BOTH) != STRING_ADD_CHECK_BOTH) { |
3316 __ movq(r8, FieldOperand(rax, HeapObject::kMapOffset)); | 3316 __ movp(r8, FieldOperand(rax, HeapObject::kMapOffset)); |
3317 __ movq(r9, FieldOperand(rdx, HeapObject::kMapOffset)); | 3317 __ movp(r9, FieldOperand(rdx, HeapObject::kMapOffset)); |
3318 } | 3318 } |
3319 // Get the instance types of the two strings as they will be needed soon. | 3319 // Get the instance types of the two strings as they will be needed soon. |
3320 __ movzxbl(r8, FieldOperand(r8, Map::kInstanceTypeOffset)); | 3320 __ movzxbl(r8, FieldOperand(r8, Map::kInstanceTypeOffset)); |
3321 __ movzxbl(r9, FieldOperand(r9, Map::kInstanceTypeOffset)); | 3321 __ movzxbl(r9, FieldOperand(r9, Map::kInstanceTypeOffset)); |
3322 | 3322 |
3323 // Look at the length of the result of adding the two strings. | 3323 // Look at the length of the result of adding the two strings. |
3324 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue / 2); | 3324 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue / 2); |
3325 __ SmiAdd(rbx, rbx, rcx); | 3325 __ SmiAdd(rbx, rbx, rcx); |
3326 // Use the string table when adding two one character strings, as it | 3326 // Use the string table when adding two one character strings, as it |
3327 // helps later optimizations to return an internalized string here. | 3327 // helps later optimizations to return an internalized string here. |
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3379 __ and_(rcx, r9); | 3379 __ and_(rcx, r9); |
3380 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0); | 3380 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0); |
3381 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0); | 3381 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0); |
3382 __ testl(rcx, Immediate(kStringEncodingMask)); | 3382 __ testl(rcx, Immediate(kStringEncodingMask)); |
3383 __ j(zero, &non_ascii); | 3383 __ j(zero, &non_ascii); |
3384 __ bind(&ascii_data); | 3384 __ bind(&ascii_data); |
3385 // Allocate an ASCII cons string. | 3385 // Allocate an ASCII cons string. |
3386 __ AllocateAsciiConsString(rcx, rdi, no_reg, &call_runtime); | 3386 __ AllocateAsciiConsString(rcx, rdi, no_reg, &call_runtime); |
3387 __ bind(&allocated); | 3387 __ bind(&allocated); |
3388 // Fill the fields of the cons string. | 3388 // Fill the fields of the cons string. |
3389 __ movq(FieldOperand(rcx, ConsString::kLengthOffset), rbx); | 3389 __ movp(FieldOperand(rcx, ConsString::kLengthOffset), rbx); |
3390 __ movq(FieldOperand(rcx, ConsString::kHashFieldOffset), | 3390 __ movp(FieldOperand(rcx, ConsString::kHashFieldOffset), |
3391 Immediate(String::kEmptyHashField)); | 3391 Immediate(String::kEmptyHashField)); |
3392 | 3392 |
3393 Label skip_write_barrier, after_writing; | 3393 Label skip_write_barrier, after_writing; |
3394 ExternalReference high_promotion_mode = ExternalReference:: | 3394 ExternalReference high_promotion_mode = ExternalReference:: |
3395 new_space_high_promotion_mode_active_address(masm->isolate()); | 3395 new_space_high_promotion_mode_active_address(masm->isolate()); |
3396 __ Load(rbx, high_promotion_mode); | 3396 __ Load(rbx, high_promotion_mode); |
3397 __ testb(rbx, Immediate(1)); | 3397 __ testb(rbx, Immediate(1)); |
3398 __ j(zero, &skip_write_barrier); | 3398 __ j(zero, &skip_write_barrier); |
3399 | 3399 |
3400 __ movq(FieldOperand(rcx, ConsString::kFirstOffset), rax); | 3400 __ movp(FieldOperand(rcx, ConsString::kFirstOffset), rax); |
3401 __ RecordWriteField(rcx, | 3401 __ RecordWriteField(rcx, |
3402 ConsString::kFirstOffset, | 3402 ConsString::kFirstOffset, |
3403 rax, | 3403 rax, |
3404 rbx, | 3404 rbx, |
3405 kDontSaveFPRegs); | 3405 kDontSaveFPRegs); |
3406 __ movq(FieldOperand(rcx, ConsString::kSecondOffset), rdx); | 3406 __ movp(FieldOperand(rcx, ConsString::kSecondOffset), rdx); |
3407 __ RecordWriteField(rcx, | 3407 __ RecordWriteField(rcx, |
3408 ConsString::kSecondOffset, | 3408 ConsString::kSecondOffset, |
3409 rdx, | 3409 rdx, |
3410 rbx, | 3410 rbx, |
3411 kDontSaveFPRegs); | 3411 kDontSaveFPRegs); |
3412 __ jmp(&after_writing); | 3412 __ jmp(&after_writing); |
3413 | 3413 |
3414 __ bind(&skip_write_barrier); | 3414 __ bind(&skip_write_barrier); |
3415 __ movq(FieldOperand(rcx, ConsString::kFirstOffset), rax); | 3415 __ movp(FieldOperand(rcx, ConsString::kFirstOffset), rax); |
3416 __ movq(FieldOperand(rcx, ConsString::kSecondOffset), rdx); | 3416 __ movp(FieldOperand(rcx, ConsString::kSecondOffset), rdx); |
3417 | 3417 |
3418 __ bind(&after_writing); | 3418 __ bind(&after_writing); |
3419 | 3419 |
3420 __ movq(rax, rcx); | 3420 __ movp(rax, rcx); |
3421 __ IncrementCounter(counters->string_add_native(), 1); | 3421 __ IncrementCounter(counters->string_add_native(), 1); |
3422 __ ret(2 * kPointerSize); | 3422 __ ret(2 * kPointerSize); |
3423 __ bind(&non_ascii); | 3423 __ bind(&non_ascii); |
3424 // At least one of the strings is two-byte. Check whether it happens | 3424 // At least one of the strings is two-byte. Check whether it happens |
3425 // to contain only one byte characters. | 3425 // to contain only one byte characters. |
3426 // rcx: first instance type AND second instance type. | 3426 // rcx: first instance type AND second instance type. |
3427 // r8: first instance type. | 3427 // r8: first instance type. |
3428 // r9: second instance type. | 3428 // r9: second instance type. |
3429 __ testb(rcx, Immediate(kOneByteDataHintMask)); | 3429 __ testb(rcx, Immediate(kOneByteDataHintMask)); |
3430 __ j(not_zero, &ascii_data); | 3430 __ j(not_zero, &ascii_data); |
(...skipping 21 matching lines...) Expand all Loading... |
3452 | 3452 |
3453 __ SmiToInteger32(r14, FieldOperand(rax, SeqString::kLengthOffset)); | 3453 __ SmiToInteger32(r14, FieldOperand(rax, SeqString::kLengthOffset)); |
3454 // r14: length of first string | 3454 // r14: length of first string |
3455 STATIC_ASSERT(kSeqStringTag == 0); | 3455 STATIC_ASSERT(kSeqStringTag == 0); |
3456 __ testb(r8, Immediate(kStringRepresentationMask)); | 3456 __ testb(r8, Immediate(kStringRepresentationMask)); |
3457 __ j(zero, &first_is_sequential, Label::kNear); | 3457 __ j(zero, &first_is_sequential, Label::kNear); |
3458 // Rule out short external string and load string resource. | 3458 // Rule out short external string and load string resource. |
3459 STATIC_ASSERT(kShortExternalStringTag != 0); | 3459 STATIC_ASSERT(kShortExternalStringTag != 0); |
3460 __ testb(r8, Immediate(kShortExternalStringMask)); | 3460 __ testb(r8, Immediate(kShortExternalStringMask)); |
3461 __ j(not_zero, &call_runtime); | 3461 __ j(not_zero, &call_runtime); |
3462 __ movq(rcx, FieldOperand(rax, ExternalString::kResourceDataOffset)); | 3462 __ movp(rcx, FieldOperand(rax, ExternalString::kResourceDataOffset)); |
3463 __ jmp(&first_prepared, Label::kNear); | 3463 __ jmp(&first_prepared, Label::kNear); |
3464 __ bind(&first_is_sequential); | 3464 __ bind(&first_is_sequential); |
3465 STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize); | 3465 STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize); |
3466 __ lea(rcx, FieldOperand(rax, SeqOneByteString::kHeaderSize)); | 3466 __ lea(rcx, FieldOperand(rax, SeqOneByteString::kHeaderSize)); |
3467 __ bind(&first_prepared); | 3467 __ bind(&first_prepared); |
3468 | 3468 |
3469 // Check whether both strings have same encoding. | 3469 // Check whether both strings have same encoding. |
3470 __ xorl(r8, r9); | 3470 __ xorl(r8, r9); |
3471 __ testb(r8, Immediate(kStringEncodingMask)); | 3471 __ testb(r8, Immediate(kStringEncodingMask)); |
3472 __ j(not_zero, &call_runtime); | 3472 __ j(not_zero, &call_runtime); |
3473 | 3473 |
3474 __ SmiToInteger32(r15, FieldOperand(rdx, SeqString::kLengthOffset)); | 3474 __ SmiToInteger32(r15, FieldOperand(rdx, SeqString::kLengthOffset)); |
3475 // r15: length of second string | 3475 // r15: length of second string |
3476 STATIC_ASSERT(kSeqStringTag == 0); | 3476 STATIC_ASSERT(kSeqStringTag == 0); |
3477 __ testb(r9, Immediate(kStringRepresentationMask)); | 3477 __ testb(r9, Immediate(kStringRepresentationMask)); |
3478 __ j(zero, &second_is_sequential, Label::kNear); | 3478 __ j(zero, &second_is_sequential, Label::kNear); |
3479 // Rule out short external string and load string resource. | 3479 // Rule out short external string and load string resource. |
3480 STATIC_ASSERT(kShortExternalStringTag != 0); | 3480 STATIC_ASSERT(kShortExternalStringTag != 0); |
3481 __ testb(r9, Immediate(kShortExternalStringMask)); | 3481 __ testb(r9, Immediate(kShortExternalStringMask)); |
3482 __ j(not_zero, &call_runtime); | 3482 __ j(not_zero, &call_runtime); |
3483 __ movq(rdx, FieldOperand(rdx, ExternalString::kResourceDataOffset)); | 3483 __ movp(rdx, FieldOperand(rdx, ExternalString::kResourceDataOffset)); |
3484 __ jmp(&second_prepared, Label::kNear); | 3484 __ jmp(&second_prepared, Label::kNear); |
3485 __ bind(&second_is_sequential); | 3485 __ bind(&second_is_sequential); |
3486 STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize); | 3486 STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize); |
3487 __ lea(rdx, FieldOperand(rdx, SeqOneByteString::kHeaderSize)); | 3487 __ lea(rdx, FieldOperand(rdx, SeqOneByteString::kHeaderSize)); |
3488 __ bind(&second_prepared); | 3488 __ bind(&second_prepared); |
3489 | 3489 |
3490 Label non_ascii_string_add_flat_result; | 3490 Label non_ascii_string_add_flat_result; |
3491 // r9: instance type of second string | 3491 // r9: instance type of second string |
3492 // First string and second string have the same encoding. | 3492 // First string and second string have the same encoding. |
3493 STATIC_ASSERT(kTwoByteStringTag == 0); | 3493 STATIC_ASSERT(kTwoByteStringTag == 0); |
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3565 // First check if the argument is already a string. | 3565 // First check if the argument is already a string. |
3566 Label not_string, done; | 3566 Label not_string, done; |
3567 __ JumpIfSmi(arg, ¬_string); | 3567 __ JumpIfSmi(arg, ¬_string); |
3568 __ CmpObjectType(arg, FIRST_NONSTRING_TYPE, scratch1); | 3568 __ CmpObjectType(arg, FIRST_NONSTRING_TYPE, scratch1); |
3569 __ j(below, &done); | 3569 __ j(below, &done); |
3570 | 3570 |
3571 // Check the number to string cache. | 3571 // Check the number to string cache. |
3572 __ bind(¬_string); | 3572 __ bind(¬_string); |
3573 // Puts the cached result into scratch1. | 3573 // Puts the cached result into scratch1. |
3574 __ LookupNumberStringCache(arg, scratch1, scratch2, scratch3, slow); | 3574 __ LookupNumberStringCache(arg, scratch1, scratch2, scratch3, slow); |
3575 __ movq(arg, scratch1); | 3575 __ movp(arg, scratch1); |
3576 __ movq(Operand(rsp, stack_offset), arg); | 3576 __ movp(Operand(rsp, stack_offset), arg); |
3577 __ bind(&done); | 3577 __ bind(&done); |
3578 } | 3578 } |
3579 | 3579 |
3580 | 3580 |
3581 void StringHelper::GenerateCopyCharacters(MacroAssembler* masm, | 3581 void StringHelper::GenerateCopyCharacters(MacroAssembler* masm, |
3582 Register dest, | 3582 Register dest, |
3583 Register src, | 3583 Register src, |
3584 Register count, | 3584 Register count, |
3585 bool ascii) { | 3585 bool ascii) { |
3586 Label loop; | 3586 Label loop; |
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3723 for (int i = 0; i < kProbes; i++) { | 3723 for (int i = 0; i < kProbes; i++) { |
3724 // Calculate entry in string table. | 3724 // Calculate entry in string table. |
3725 __ movl(scratch, hash); | 3725 __ movl(scratch, hash); |
3726 if (i > 0) { | 3726 if (i > 0) { |
3727 __ addl(scratch, Immediate(StringTable::GetProbeOffset(i))); | 3727 __ addl(scratch, Immediate(StringTable::GetProbeOffset(i))); |
3728 } | 3728 } |
3729 __ andl(scratch, mask); | 3729 __ andl(scratch, mask); |
3730 | 3730 |
3731 // Load the entry from the string table. | 3731 // Load the entry from the string table. |
3732 STATIC_ASSERT(StringTable::kEntrySize == 1); | 3732 STATIC_ASSERT(StringTable::kEntrySize == 1); |
3733 __ movq(candidate, | 3733 __ movp(candidate, |
3734 FieldOperand(string_table, | 3734 FieldOperand(string_table, |
3735 scratch, | 3735 scratch, |
3736 times_pointer_size, | 3736 times_pointer_size, |
3737 StringTable::kElementsStartOffset)); | 3737 StringTable::kElementsStartOffset)); |
3738 | 3738 |
3739 // If entry is undefined no string with this hash can be found. | 3739 // If entry is undefined no string with this hash can be found. |
3740 Label is_string; | 3740 Label is_string; |
3741 __ CmpObjectType(candidate, ODDBALL_TYPE, map); | 3741 __ CmpObjectType(candidate, ODDBALL_TYPE, map); |
3742 __ j(not_equal, &is_string, Label::kNear); | 3742 __ j(not_equal, &is_string, Label::kNear); |
3743 | 3743 |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3775 __ bind(&next_probe[i]); | 3775 __ bind(&next_probe[i]); |
3776 } | 3776 } |
3777 | 3777 |
3778 // No matching 2 character string found by probing. | 3778 // No matching 2 character string found by probing. |
3779 __ jmp(not_found); | 3779 __ jmp(not_found); |
3780 | 3780 |
3781 // Scratch register contains result when we fall through to here. | 3781 // Scratch register contains result when we fall through to here. |
3782 Register result = candidate; | 3782 Register result = candidate; |
3783 __ bind(&found_in_string_table); | 3783 __ bind(&found_in_string_table); |
3784 if (!result.is(rax)) { | 3784 if (!result.is(rax)) { |
3785 __ movq(rax, result); | 3785 __ movp(rax, result); |
3786 } | 3786 } |
3787 } | 3787 } |
3788 | 3788 |
3789 | 3789 |
3790 void StringHelper::GenerateHashInit(MacroAssembler* masm, | 3790 void StringHelper::GenerateHashInit(MacroAssembler* masm, |
3791 Register hash, | 3791 Register hash, |
3792 Register character, | 3792 Register character, |
3793 Register scratch) { | 3793 Register scratch) { |
3794 // hash = (seed + character) + ((seed + character) << 10); | 3794 // hash = (seed + character) + ((seed + character) << 10); |
3795 __ LoadRoot(scratch, Heap::kHashSeedRootIndex); | 3795 __ LoadRoot(scratch, Heap::kHashSeedRootIndex); |
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3859 STRING_ARGUMENT_INDEX, | 3859 STRING_ARGUMENT_INDEX, |
3860 FROM_ARGUMENT_INDEX, | 3860 FROM_ARGUMENT_INDEX, |
3861 TO_ARGUMENT_INDEX, | 3861 TO_ARGUMENT_INDEX, |
3862 SUB_STRING_ARGUMENT_COUNT | 3862 SUB_STRING_ARGUMENT_COUNT |
3863 }; | 3863 }; |
3864 | 3864 |
3865 StackArgumentsAccessor args(rsp, SUB_STRING_ARGUMENT_COUNT, | 3865 StackArgumentsAccessor args(rsp, SUB_STRING_ARGUMENT_COUNT, |
3866 ARGUMENTS_DONT_CONTAIN_RECEIVER); | 3866 ARGUMENTS_DONT_CONTAIN_RECEIVER); |
3867 | 3867 |
3868 // Make sure first argument is a string. | 3868 // Make sure first argument is a string. |
3869 __ movq(rax, args.GetArgumentOperand(STRING_ARGUMENT_INDEX)); | 3869 __ movp(rax, args.GetArgumentOperand(STRING_ARGUMENT_INDEX)); |
3870 STATIC_ASSERT(kSmiTag == 0); | 3870 STATIC_ASSERT(kSmiTag == 0); |
3871 __ testl(rax, Immediate(kSmiTagMask)); | 3871 __ testl(rax, Immediate(kSmiTagMask)); |
3872 __ j(zero, &runtime); | 3872 __ j(zero, &runtime); |
3873 Condition is_string = masm->IsObjectStringType(rax, rbx, rbx); | 3873 Condition is_string = masm->IsObjectStringType(rax, rbx, rbx); |
3874 __ j(NegateCondition(is_string), &runtime); | 3874 __ j(NegateCondition(is_string), &runtime); |
3875 | 3875 |
3876 // rax: string | 3876 // rax: string |
3877 // rbx: instance type | 3877 // rbx: instance type |
3878 // Calculate length of sub string using the smi values. | 3878 // Calculate length of sub string using the smi values. |
3879 __ movq(rcx, args.GetArgumentOperand(TO_ARGUMENT_INDEX)); | 3879 __ movp(rcx, args.GetArgumentOperand(TO_ARGUMENT_INDEX)); |
3880 __ movq(rdx, args.GetArgumentOperand(FROM_ARGUMENT_INDEX)); | 3880 __ movp(rdx, args.GetArgumentOperand(FROM_ARGUMENT_INDEX)); |
3881 __ JumpUnlessBothNonNegativeSmi(rcx, rdx, &runtime); | 3881 __ JumpUnlessBothNonNegativeSmi(rcx, rdx, &runtime); |
3882 | 3882 |
3883 __ SmiSub(rcx, rcx, rdx); // Overflow doesn't happen. | 3883 __ SmiSub(rcx, rcx, rdx); // Overflow doesn't happen. |
3884 __ cmpq(rcx, FieldOperand(rax, String::kLengthOffset)); | 3884 __ cmpq(rcx, FieldOperand(rax, String::kLengthOffset)); |
3885 Label not_original_string; | 3885 Label not_original_string; |
3886 // Shorter than original string's length: an actual substring. | 3886 // Shorter than original string's length: an actual substring. |
3887 __ j(below, ¬_original_string, Label::kNear); | 3887 __ j(below, ¬_original_string, Label::kNear); |
3888 // Longer than original string's length or negative: unsafe arguments. | 3888 // Longer than original string's length or negative: unsafe arguments. |
3889 __ j(above, &runtime); | 3889 __ j(above, &runtime); |
3890 // Return original string. | 3890 // Return original string. |
(...skipping 21 matching lines...) Expand all Loading... |
3912 __ testb(rbx, Immediate(kIsIndirectStringMask)); | 3912 __ testb(rbx, Immediate(kIsIndirectStringMask)); |
3913 __ j(zero, &seq_or_external_string, Label::kNear); | 3913 __ j(zero, &seq_or_external_string, Label::kNear); |
3914 | 3914 |
3915 __ testb(rbx, Immediate(kSlicedNotConsMask)); | 3915 __ testb(rbx, Immediate(kSlicedNotConsMask)); |
3916 __ j(not_zero, &sliced_string, Label::kNear); | 3916 __ j(not_zero, &sliced_string, Label::kNear); |
3917 // Cons string. Check whether it is flat, then fetch first part. | 3917 // Cons string. Check whether it is flat, then fetch first part. |
3918 // Flat cons strings have an empty second part. | 3918 // Flat cons strings have an empty second part. |
3919 __ CompareRoot(FieldOperand(rax, ConsString::kSecondOffset), | 3919 __ CompareRoot(FieldOperand(rax, ConsString::kSecondOffset), |
3920 Heap::kempty_stringRootIndex); | 3920 Heap::kempty_stringRootIndex); |
3921 __ j(not_equal, &runtime); | 3921 __ j(not_equal, &runtime); |
3922 __ movq(rdi, FieldOperand(rax, ConsString::kFirstOffset)); | 3922 __ movp(rdi, FieldOperand(rax, ConsString::kFirstOffset)); |
3923 // Update instance type. | 3923 // Update instance type. |
3924 __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset)); | 3924 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset)); |
3925 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); | 3925 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); |
3926 __ jmp(&underlying_unpacked, Label::kNear); | 3926 __ jmp(&underlying_unpacked, Label::kNear); |
3927 | 3927 |
3928 __ bind(&sliced_string); | 3928 __ bind(&sliced_string); |
3929 // Sliced string. Fetch parent and correct start index by offset. | 3929 // Sliced string. Fetch parent and correct start index by offset. |
3930 __ addq(rdx, FieldOperand(rax, SlicedString::kOffsetOffset)); | 3930 __ addq(rdx, FieldOperand(rax, SlicedString::kOffsetOffset)); |
3931 __ movq(rdi, FieldOperand(rax, SlicedString::kParentOffset)); | 3931 __ movp(rdi, FieldOperand(rax, SlicedString::kParentOffset)); |
3932 // Update instance type. | 3932 // Update instance type. |
3933 __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset)); | 3933 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset)); |
3934 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); | 3934 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); |
3935 __ jmp(&underlying_unpacked, Label::kNear); | 3935 __ jmp(&underlying_unpacked, Label::kNear); |
3936 | 3936 |
3937 __ bind(&seq_or_external_string); | 3937 __ bind(&seq_or_external_string); |
3938 // Sequential or external string. Just move string to the correct register. | 3938 // Sequential or external string. Just move string to the correct register. |
3939 __ movq(rdi, rax); | 3939 __ movp(rdi, rax); |
3940 | 3940 |
3941 __ bind(&underlying_unpacked); | 3941 __ bind(&underlying_unpacked); |
3942 | 3942 |
3943 if (FLAG_string_slices) { | 3943 if (FLAG_string_slices) { |
3944 Label copy_routine; | 3944 Label copy_routine; |
3945 // rdi: underlying subject string | 3945 // rdi: underlying subject string |
3946 // rbx: instance type of underlying subject string | 3946 // rbx: instance type of underlying subject string |
3947 // rdx: adjusted start index (smi) | 3947 // rdx: adjusted start index (smi) |
3948 // rcx: length | 3948 // rcx: length |
3949 // If coming from the make_two_character_string path, the string | 3949 // If coming from the make_two_character_string path, the string |
(...skipping 10 matching lines...) Expand all Loading... |
3960 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0); | 3960 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0); |
3961 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0); | 3961 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0); |
3962 __ testb(rbx, Immediate(kStringEncodingMask)); | 3962 __ testb(rbx, Immediate(kStringEncodingMask)); |
3963 __ j(zero, &two_byte_slice, Label::kNear); | 3963 __ j(zero, &two_byte_slice, Label::kNear); |
3964 __ AllocateAsciiSlicedString(rax, rbx, r14, &runtime); | 3964 __ AllocateAsciiSlicedString(rax, rbx, r14, &runtime); |
3965 __ jmp(&set_slice_header, Label::kNear); | 3965 __ jmp(&set_slice_header, Label::kNear); |
3966 __ bind(&two_byte_slice); | 3966 __ bind(&two_byte_slice); |
3967 __ AllocateTwoByteSlicedString(rax, rbx, r14, &runtime); | 3967 __ AllocateTwoByteSlicedString(rax, rbx, r14, &runtime); |
3968 __ bind(&set_slice_header); | 3968 __ bind(&set_slice_header); |
3969 __ Integer32ToSmi(rcx, rcx); | 3969 __ Integer32ToSmi(rcx, rcx); |
3970 __ movq(FieldOperand(rax, SlicedString::kLengthOffset), rcx); | 3970 __ movp(FieldOperand(rax, SlicedString::kLengthOffset), rcx); |
3971 __ movq(FieldOperand(rax, SlicedString::kHashFieldOffset), | 3971 __ movp(FieldOperand(rax, SlicedString::kHashFieldOffset), |
3972 Immediate(String::kEmptyHashField)); | 3972 Immediate(String::kEmptyHashField)); |
3973 __ movq(FieldOperand(rax, SlicedString::kParentOffset), rdi); | 3973 __ movp(FieldOperand(rax, SlicedString::kParentOffset), rdi); |
3974 __ movq(FieldOperand(rax, SlicedString::kOffsetOffset), rdx); | 3974 __ movp(FieldOperand(rax, SlicedString::kOffsetOffset), rdx); |
3975 __ IncrementCounter(counters->sub_string_native(), 1); | 3975 __ IncrementCounter(counters->sub_string_native(), 1); |
3976 __ ret(3 * kPointerSize); | 3976 __ ret(3 * kPointerSize); |
3977 | 3977 |
3978 __ bind(©_routine); | 3978 __ bind(©_routine); |
3979 } | 3979 } |
3980 | 3980 |
3981 // rdi: underlying subject string | 3981 // rdi: underlying subject string |
3982 // rbx: instance type of underlying subject string | 3982 // rbx: instance type of underlying subject string |
3983 // rdx: adjusted start index (smi) | 3983 // rdx: adjusted start index (smi) |
3984 // rcx: length | 3984 // rcx: length |
3985 // The subject string can only be external or sequential string of either | 3985 // The subject string can only be external or sequential string of either |
3986 // encoding at this point. | 3986 // encoding at this point. |
3987 Label two_byte_sequential, sequential_string; | 3987 Label two_byte_sequential, sequential_string; |
3988 STATIC_ASSERT(kExternalStringTag != 0); | 3988 STATIC_ASSERT(kExternalStringTag != 0); |
3989 STATIC_ASSERT(kSeqStringTag == 0); | 3989 STATIC_ASSERT(kSeqStringTag == 0); |
3990 __ testb(rbx, Immediate(kExternalStringTag)); | 3990 __ testb(rbx, Immediate(kExternalStringTag)); |
3991 __ j(zero, &sequential_string); | 3991 __ j(zero, &sequential_string); |
3992 | 3992 |
3993 // Handle external string. | 3993 // Handle external string. |
3994 // Rule out short external strings. | 3994 // Rule out short external strings. |
3995 STATIC_CHECK(kShortExternalStringTag != 0); | 3995 STATIC_CHECK(kShortExternalStringTag != 0); |
3996 __ testb(rbx, Immediate(kShortExternalStringMask)); | 3996 __ testb(rbx, Immediate(kShortExternalStringMask)); |
3997 __ j(not_zero, &runtime); | 3997 __ j(not_zero, &runtime); |
3998 __ movq(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset)); | 3998 __ movp(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset)); |
3999 // Move the pointer so that offset-wise, it looks like a sequential string. | 3999 // Move the pointer so that offset-wise, it looks like a sequential string. |
4000 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize); | 4000 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize); |
4001 __ subq(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); | 4001 __ subq(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); |
4002 | 4002 |
4003 __ bind(&sequential_string); | 4003 __ bind(&sequential_string); |
4004 STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0); | 4004 STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0); |
4005 __ testb(rbx, Immediate(kStringEncodingMask)); | 4005 __ testb(rbx, Immediate(kStringEncodingMask)); |
4006 __ j(zero, &two_byte_sequential); | 4006 __ j(zero, &two_byte_sequential); |
4007 | 4007 |
4008 // Allocate the result. | 4008 // Allocate the result. |
4009 __ AllocateAsciiString(rax, rcx, r11, r14, r15, &runtime); | 4009 __ AllocateAsciiString(rax, rcx, r11, r14, r15, &runtime); |
4010 | 4010 |
4011 // rax: result string | 4011 // rax: result string |
4012 // rcx: result string length | 4012 // rcx: result string length |
4013 __ movq(r14, rsi); // esi used by following code. | 4013 __ movp(r14, rsi); // esi used by following code. |
4014 { // Locate character of sub string start. | 4014 { // Locate character of sub string start. |
4015 SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_1); | 4015 SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_1); |
4016 __ lea(rsi, Operand(rdi, smi_as_index.reg, smi_as_index.scale, | 4016 __ lea(rsi, Operand(rdi, smi_as_index.reg, smi_as_index.scale, |
4017 SeqOneByteString::kHeaderSize - kHeapObjectTag)); | 4017 SeqOneByteString::kHeaderSize - kHeapObjectTag)); |
4018 } | 4018 } |
4019 // Locate first character of result. | 4019 // Locate first character of result. |
4020 __ lea(rdi, FieldOperand(rax, SeqOneByteString::kHeaderSize)); | 4020 __ lea(rdi, FieldOperand(rax, SeqOneByteString::kHeaderSize)); |
4021 | 4021 |
4022 // rax: result string | 4022 // rax: result string |
4023 // rcx: result length | 4023 // rcx: result length |
4024 // rdi: first character of result | 4024 // rdi: first character of result |
4025 // rsi: character of sub string start | 4025 // rsi: character of sub string start |
4026 // r14: original value of rsi | 4026 // r14: original value of rsi |
4027 StringHelper::GenerateCopyCharactersREP(masm, rdi, rsi, rcx, true); | 4027 StringHelper::GenerateCopyCharactersREP(masm, rdi, rsi, rcx, true); |
4028 __ movq(rsi, r14); // Restore rsi. | 4028 __ movp(rsi, r14); // Restore rsi. |
4029 __ IncrementCounter(counters->sub_string_native(), 1); | 4029 __ IncrementCounter(counters->sub_string_native(), 1); |
4030 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize); | 4030 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize); |
4031 | 4031 |
4032 __ bind(&two_byte_sequential); | 4032 __ bind(&two_byte_sequential); |
4033 // Allocate the result. | 4033 // Allocate the result. |
4034 __ AllocateTwoByteString(rax, rcx, r11, r14, r15, &runtime); | 4034 __ AllocateTwoByteString(rax, rcx, r11, r14, r15, &runtime); |
4035 | 4035 |
4036 // rax: result string | 4036 // rax: result string |
4037 // rcx: result string length | 4037 // rcx: result string length |
4038 __ movq(r14, rsi); // esi used by following code. | 4038 __ movp(r14, rsi); // esi used by following code. |
4039 { // Locate character of sub string start. | 4039 { // Locate character of sub string start. |
4040 SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_2); | 4040 SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_2); |
4041 __ lea(rsi, Operand(rdi, smi_as_index.reg, smi_as_index.scale, | 4041 __ lea(rsi, Operand(rdi, smi_as_index.reg, smi_as_index.scale, |
4042 SeqOneByteString::kHeaderSize - kHeapObjectTag)); | 4042 SeqOneByteString::kHeaderSize - kHeapObjectTag)); |
4043 } | 4043 } |
4044 // Locate first character of result. | 4044 // Locate first character of result. |
4045 __ lea(rdi, FieldOperand(rax, SeqTwoByteString::kHeaderSize)); | 4045 __ lea(rdi, FieldOperand(rax, SeqTwoByteString::kHeaderSize)); |
4046 | 4046 |
4047 // rax: result string | 4047 // rax: result string |
4048 // rcx: result length | 4048 // rcx: result length |
4049 // rdi: first character of result | 4049 // rdi: first character of result |
4050 // rsi: character of sub string start | 4050 // rsi: character of sub string start |
4051 // r14: original value of rsi | 4051 // r14: original value of rsi |
4052 StringHelper::GenerateCopyCharactersREP(masm, rdi, rsi, rcx, false); | 4052 StringHelper::GenerateCopyCharactersREP(masm, rdi, rsi, rcx, false); |
4053 __ movq(rsi, r14); // Restore esi. | 4053 __ movp(rsi, r14); // Restore esi. |
4054 __ IncrementCounter(counters->sub_string_native(), 1); | 4054 __ IncrementCounter(counters->sub_string_native(), 1); |
4055 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize); | 4055 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize); |
4056 | 4056 |
4057 // Just jump to runtime to create the sub string. | 4057 // Just jump to runtime to create the sub string. |
4058 __ bind(&runtime); | 4058 __ bind(&runtime); |
4059 __ TailCallRuntime(Runtime::kSubString, 3, 1); | 4059 __ TailCallRuntime(Runtime::kSubString, 3, 1); |
4060 | 4060 |
4061 __ bind(&single_char); | 4061 __ bind(&single_char); |
4062 // rax: string | 4062 // rax: string |
4063 // rbx: instance type | 4063 // rbx: instance type |
4064 // rcx: sub string length (smi) | 4064 // rcx: sub string length (smi) |
4065 // rdx: from index (smi) | 4065 // rdx: from index (smi) |
4066 StringCharAtGenerator generator( | 4066 StringCharAtGenerator generator( |
4067 rax, rdx, rcx, rax, &runtime, &runtime, &runtime, STRING_INDEX_IS_NUMBER); | 4067 rax, rdx, rcx, rax, &runtime, &runtime, &runtime, STRING_INDEX_IS_NUMBER); |
4068 generator.GenerateFast(masm); | 4068 generator.GenerateFast(masm); |
4069 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize); | 4069 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize); |
4070 generator.SkipSlow(masm, &runtime); | 4070 generator.SkipSlow(masm, &runtime); |
4071 } | 4071 } |
4072 | 4072 |
4073 | 4073 |
4074 void StringCompareStub::GenerateFlatAsciiStringEquals(MacroAssembler* masm, | 4074 void StringCompareStub::GenerateFlatAsciiStringEquals(MacroAssembler* masm, |
4075 Register left, | 4075 Register left, |
4076 Register right, | 4076 Register right, |
4077 Register scratch1, | 4077 Register scratch1, |
4078 Register scratch2) { | 4078 Register scratch2) { |
4079 Register length = scratch1; | 4079 Register length = scratch1; |
4080 | 4080 |
4081 // Compare lengths. | 4081 // Compare lengths. |
4082 Label check_zero_length; | 4082 Label check_zero_length; |
4083 __ movq(length, FieldOperand(left, String::kLengthOffset)); | 4083 __ movp(length, FieldOperand(left, String::kLengthOffset)); |
4084 __ SmiCompare(length, FieldOperand(right, String::kLengthOffset)); | 4084 __ SmiCompare(length, FieldOperand(right, String::kLengthOffset)); |
4085 __ j(equal, &check_zero_length, Label::kNear); | 4085 __ j(equal, &check_zero_length, Label::kNear); |
4086 __ Move(rax, Smi::FromInt(NOT_EQUAL)); | 4086 __ Move(rax, Smi::FromInt(NOT_EQUAL)); |
4087 __ ret(0); | 4087 __ ret(0); |
4088 | 4088 |
4089 // Check if the length is zero. | 4089 // Check if the length is zero. |
4090 Label compare_chars; | 4090 Label compare_chars; |
4091 __ bind(&check_zero_length); | 4091 __ bind(&check_zero_length); |
4092 STATIC_ASSERT(kSmiTag == 0); | 4092 STATIC_ASSERT(kSmiTag == 0); |
4093 __ SmiTest(length); | 4093 __ SmiTest(length); |
(...skipping 23 matching lines...) Expand all Loading... |
4117 Register right, | 4117 Register right, |
4118 Register scratch1, | 4118 Register scratch1, |
4119 Register scratch2, | 4119 Register scratch2, |
4120 Register scratch3, | 4120 Register scratch3, |
4121 Register scratch4) { | 4121 Register scratch4) { |
4122 // Ensure that you can always subtract a string length from a non-negative | 4122 // Ensure that you can always subtract a string length from a non-negative |
4123 // number (e.g. another length). | 4123 // number (e.g. another length). |
4124 STATIC_ASSERT(String::kMaxLength < 0x7fffffff); | 4124 STATIC_ASSERT(String::kMaxLength < 0x7fffffff); |
4125 | 4125 |
4126 // Find minimum length and length difference. | 4126 // Find minimum length and length difference. |
4127 __ movq(scratch1, FieldOperand(left, String::kLengthOffset)); | 4127 __ movp(scratch1, FieldOperand(left, String::kLengthOffset)); |
4128 __ movq(scratch4, scratch1); | 4128 __ movp(scratch4, scratch1); |
4129 __ SmiSub(scratch4, | 4129 __ SmiSub(scratch4, |
4130 scratch4, | 4130 scratch4, |
4131 FieldOperand(right, String::kLengthOffset)); | 4131 FieldOperand(right, String::kLengthOffset)); |
4132 // Register scratch4 now holds left.length - right.length. | 4132 // Register scratch4 now holds left.length - right.length. |
4133 const Register length_difference = scratch4; | 4133 const Register length_difference = scratch4; |
4134 Label left_shorter; | 4134 Label left_shorter; |
4135 __ j(less, &left_shorter, Label::kNear); | 4135 __ j(less, &left_shorter, Label::kNear); |
4136 // The right string isn't longer that the left one. | 4136 // The right string isn't longer that the left one. |
4137 // Get the right string's length by subtracting the (non-negative) difference | 4137 // Get the right string's length by subtracting the (non-negative) difference |
4138 // from the left string's length. | 4138 // from the left string's length. |
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4218 | 4218 |
4219 void StringCompareStub::Generate(MacroAssembler* masm) { | 4219 void StringCompareStub::Generate(MacroAssembler* masm) { |
4220 Label runtime; | 4220 Label runtime; |
4221 | 4221 |
4222 // Stack frame on entry. | 4222 // Stack frame on entry. |
4223 // rsp[0] : return address | 4223 // rsp[0] : return address |
4224 // rsp[8] : right string | 4224 // rsp[8] : right string |
4225 // rsp[16] : left string | 4225 // rsp[16] : left string |
4226 | 4226 |
4227 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER); | 4227 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER); |
4228 __ movq(rdx, args.GetArgumentOperand(0)); // left | 4228 __ movp(rdx, args.GetArgumentOperand(0)); // left |
4229 __ movq(rax, args.GetArgumentOperand(1)); // right | 4229 __ movp(rax, args.GetArgumentOperand(1)); // right |
4230 | 4230 |
4231 // Check for identity. | 4231 // Check for identity. |
4232 Label not_same; | 4232 Label not_same; |
4233 __ cmpq(rdx, rax); | 4233 __ cmpq(rdx, rax); |
4234 __ j(not_equal, ¬_same, Label::kNear); | 4234 __ j(not_equal, ¬_same, Label::kNear); |
4235 __ Move(rax, Smi::FromInt(EQUAL)); | 4235 __ Move(rax, Smi::FromInt(EQUAL)); |
4236 Counters* counters = masm->isolate()->counters(); | 4236 Counters* counters = masm->isolate()->counters(); |
4237 __ IncrementCounter(counters->string_compare_native(), 1); | 4237 __ IncrementCounter(counters->string_compare_native(), 1); |
4238 __ ret(2 * kPointerSize); | 4238 __ ret(2 * kPointerSize); |
4239 | 4239 |
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4294 if (GetCondition() == equal) { | 4294 if (GetCondition() == equal) { |
4295 // For equality we do not care about the sign of the result. | 4295 // For equality we do not care about the sign of the result. |
4296 __ subq(rax, rdx); | 4296 __ subq(rax, rdx); |
4297 } else { | 4297 } else { |
4298 Label done; | 4298 Label done; |
4299 __ subq(rdx, rax); | 4299 __ subq(rdx, rax); |
4300 __ j(no_overflow, &done, Label::kNear); | 4300 __ j(no_overflow, &done, Label::kNear); |
4301 // Correct sign of result in case of overflow. | 4301 // Correct sign of result in case of overflow. |
4302 __ not_(rdx); | 4302 __ not_(rdx); |
4303 __ bind(&done); | 4303 __ bind(&done); |
4304 __ movq(rax, rdx); | 4304 __ movp(rax, rdx); |
4305 } | 4305 } |
4306 __ ret(0); | 4306 __ ret(0); |
4307 | 4307 |
4308 __ bind(&miss); | 4308 __ bind(&miss); |
4309 GenerateMiss(masm); | 4309 GenerateMiss(masm); |
4310 } | 4310 } |
4311 | 4311 |
4312 | 4312 |
4313 void ICCompareStub::GenerateNumbers(MacroAssembler* masm) { | 4313 void ICCompareStub::GenerateNumbers(MacroAssembler* masm) { |
4314 ASSERT(state_ == CompareIC::NUMBER); | 4314 ASSERT(state_ == CompareIC::NUMBER); |
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4396 Register right = rax; | 4396 Register right = rax; |
4397 Register tmp1 = rcx; | 4397 Register tmp1 = rcx; |
4398 Register tmp2 = rbx; | 4398 Register tmp2 = rbx; |
4399 | 4399 |
4400 // Check that both operands are heap objects. | 4400 // Check that both operands are heap objects. |
4401 Label miss; | 4401 Label miss; |
4402 Condition cond = masm->CheckEitherSmi(left, right, tmp1); | 4402 Condition cond = masm->CheckEitherSmi(left, right, tmp1); |
4403 __ j(cond, &miss, Label::kNear); | 4403 __ j(cond, &miss, Label::kNear); |
4404 | 4404 |
4405 // Check that both operands are internalized strings. | 4405 // Check that both operands are internalized strings. |
4406 __ movq(tmp1, FieldOperand(left, HeapObject::kMapOffset)); | 4406 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset)); |
4407 __ movq(tmp2, FieldOperand(right, HeapObject::kMapOffset)); | 4407 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset)); |
4408 __ movzxbq(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); | 4408 __ movzxbq(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); |
4409 __ movzxbq(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); | 4409 __ movzxbq(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); |
4410 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0); | 4410 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0); |
4411 __ or_(tmp1, tmp2); | 4411 __ or_(tmp1, tmp2); |
4412 __ testb(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask)); | 4412 __ testb(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask)); |
4413 __ j(not_zero, &miss, Label::kNear); | 4413 __ j(not_zero, &miss, Label::kNear); |
4414 | 4414 |
4415 // Internalized strings are compared by identity. | 4415 // Internalized strings are compared by identity. |
4416 Label done; | 4416 Label done; |
4417 __ cmpq(left, right); | 4417 __ cmpq(left, right); |
(...skipping 22 matching lines...) Expand all Loading... |
4440 Register tmp1 = rcx; | 4440 Register tmp1 = rcx; |
4441 Register tmp2 = rbx; | 4441 Register tmp2 = rbx; |
4442 | 4442 |
4443 // Check that both operands are heap objects. | 4443 // Check that both operands are heap objects. |
4444 Label miss; | 4444 Label miss; |
4445 Condition cond = masm->CheckEitherSmi(left, right, tmp1); | 4445 Condition cond = masm->CheckEitherSmi(left, right, tmp1); |
4446 __ j(cond, &miss, Label::kNear); | 4446 __ j(cond, &miss, Label::kNear); |
4447 | 4447 |
4448 // Check that both operands are unique names. This leaves the instance | 4448 // Check that both operands are unique names. This leaves the instance |
4449 // types loaded in tmp1 and tmp2. | 4449 // types loaded in tmp1 and tmp2. |
4450 __ movq(tmp1, FieldOperand(left, HeapObject::kMapOffset)); | 4450 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset)); |
4451 __ movq(tmp2, FieldOperand(right, HeapObject::kMapOffset)); | 4451 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset)); |
4452 __ movzxbq(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); | 4452 __ movzxbq(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); |
4453 __ movzxbq(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); | 4453 __ movzxbq(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); |
4454 | 4454 |
4455 __ JumpIfNotUniqueName(tmp1, &miss, Label::kNear); | 4455 __ JumpIfNotUniqueName(tmp1, &miss, Label::kNear); |
4456 __ JumpIfNotUniqueName(tmp2, &miss, Label::kNear); | 4456 __ JumpIfNotUniqueName(tmp2, &miss, Label::kNear); |
4457 | 4457 |
4458 // Unique names are compared by identity. | 4458 // Unique names are compared by identity. |
4459 Label done; | 4459 Label done; |
4460 __ cmpq(left, right); | 4460 __ cmpq(left, right); |
4461 // Make sure rax is non-zero. At this point input operands are | 4461 // Make sure rax is non-zero. At this point input operands are |
(...skipping 23 matching lines...) Expand all Loading... |
4485 Register tmp1 = rcx; | 4485 Register tmp1 = rcx; |
4486 Register tmp2 = rbx; | 4486 Register tmp2 = rbx; |
4487 Register tmp3 = rdi; | 4487 Register tmp3 = rdi; |
4488 | 4488 |
4489 // Check that both operands are heap objects. | 4489 // Check that both operands are heap objects. |
4490 Condition cond = masm->CheckEitherSmi(left, right, tmp1); | 4490 Condition cond = masm->CheckEitherSmi(left, right, tmp1); |
4491 __ j(cond, &miss); | 4491 __ j(cond, &miss); |
4492 | 4492 |
4493 // Check that both operands are strings. This leaves the instance | 4493 // Check that both operands are strings. This leaves the instance |
4494 // types loaded in tmp1 and tmp2. | 4494 // types loaded in tmp1 and tmp2. |
4495 __ movq(tmp1, FieldOperand(left, HeapObject::kMapOffset)); | 4495 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset)); |
4496 __ movq(tmp2, FieldOperand(right, HeapObject::kMapOffset)); | 4496 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset)); |
4497 __ movzxbq(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); | 4497 __ movzxbq(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); |
4498 __ movzxbq(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); | 4498 __ movzxbq(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); |
4499 __ movq(tmp3, tmp1); | 4499 __ movp(tmp3, tmp1); |
4500 STATIC_ASSERT(kNotStringTag != 0); | 4500 STATIC_ASSERT(kNotStringTag != 0); |
4501 __ or_(tmp3, tmp2); | 4501 __ or_(tmp3, tmp2); |
4502 __ testb(tmp3, Immediate(kIsNotStringMask)); | 4502 __ testb(tmp3, Immediate(kIsNotStringMask)); |
4503 __ j(not_zero, &miss); | 4503 __ j(not_zero, &miss); |
4504 | 4504 |
4505 // Fast check for identical strings. | 4505 // Fast check for identical strings. |
4506 Label not_same; | 4506 Label not_same; |
4507 __ cmpq(left, right); | 4507 __ cmpq(left, right); |
4508 __ j(not_equal, ¬_same, Label::kNear); | 4508 __ j(not_equal, ¬_same, Label::kNear); |
4509 STATIC_ASSERT(EQUAL == 0); | 4509 STATIC_ASSERT(EQUAL == 0); |
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4578 __ bind(&miss); | 4578 __ bind(&miss); |
4579 GenerateMiss(masm); | 4579 GenerateMiss(masm); |
4580 } | 4580 } |
4581 | 4581 |
4582 | 4582 |
4583 void ICCompareStub::GenerateKnownObjects(MacroAssembler* masm) { | 4583 void ICCompareStub::GenerateKnownObjects(MacroAssembler* masm) { |
4584 Label miss; | 4584 Label miss; |
4585 Condition either_smi = masm->CheckEitherSmi(rdx, rax); | 4585 Condition either_smi = masm->CheckEitherSmi(rdx, rax); |
4586 __ j(either_smi, &miss, Label::kNear); | 4586 __ j(either_smi, &miss, Label::kNear); |
4587 | 4587 |
4588 __ movq(rcx, FieldOperand(rax, HeapObject::kMapOffset)); | 4588 __ movp(rcx, FieldOperand(rax, HeapObject::kMapOffset)); |
4589 __ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset)); | 4589 __ movp(rbx, FieldOperand(rdx, HeapObject::kMapOffset)); |
4590 __ Cmp(rcx, known_map_); | 4590 __ Cmp(rcx, known_map_); |
4591 __ j(not_equal, &miss, Label::kNear); | 4591 __ j(not_equal, &miss, Label::kNear); |
4592 __ Cmp(rbx, known_map_); | 4592 __ Cmp(rbx, known_map_); |
4593 __ j(not_equal, &miss, Label::kNear); | 4593 __ j(not_equal, &miss, Label::kNear); |
4594 | 4594 |
4595 __ subq(rax, rdx); | 4595 __ subq(rax, rdx); |
4596 __ ret(0); | 4596 __ ret(0); |
4597 | 4597 |
4598 __ bind(&miss); | 4598 __ bind(&miss); |
4599 GenerateMiss(masm); | 4599 GenerateMiss(masm); |
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4647 __ and_(index, | 4647 __ and_(index, |
4648 Immediate(name->Hash() + NameDictionary::GetProbeOffset(i))); | 4648 Immediate(name->Hash() + NameDictionary::GetProbeOffset(i))); |
4649 | 4649 |
4650 // Scale the index by multiplying by the entry size. | 4650 // Scale the index by multiplying by the entry size. |
4651 ASSERT(NameDictionary::kEntrySize == 3); | 4651 ASSERT(NameDictionary::kEntrySize == 3); |
4652 __ lea(index, Operand(index, index, times_2, 0)); // index *= 3. | 4652 __ lea(index, Operand(index, index, times_2, 0)); // index *= 3. |
4653 | 4653 |
4654 Register entity_name = r0; | 4654 Register entity_name = r0; |
4655 // Having undefined at this place means the name is not contained. | 4655 // Having undefined at this place means the name is not contained. |
4656 ASSERT_EQ(kSmiTagSize, 1); | 4656 ASSERT_EQ(kSmiTagSize, 1); |
4657 __ movq(entity_name, Operand(properties, | 4657 __ movp(entity_name, Operand(properties, |
4658 index, | 4658 index, |
4659 times_pointer_size, | 4659 times_pointer_size, |
4660 kElementsStartOffset - kHeapObjectTag)); | 4660 kElementsStartOffset - kHeapObjectTag)); |
4661 __ Cmp(entity_name, masm->isolate()->factory()->undefined_value()); | 4661 __ Cmp(entity_name, masm->isolate()->factory()->undefined_value()); |
4662 __ j(equal, done); | 4662 __ j(equal, done); |
4663 | 4663 |
4664 // Stop if found the property. | 4664 // Stop if found the property. |
4665 __ Cmp(entity_name, Handle<Name>(name)); | 4665 __ Cmp(entity_name, Handle<Name>(name)); |
4666 __ j(equal, miss); | 4666 __ j(equal, miss); |
4667 | 4667 |
4668 Label good; | 4668 Label good; |
4669 // Check for the hole and skip. | 4669 // Check for the hole and skip. |
4670 __ CompareRoot(entity_name, Heap::kTheHoleValueRootIndex); | 4670 __ CompareRoot(entity_name, Heap::kTheHoleValueRootIndex); |
4671 __ j(equal, &good, Label::kNear); | 4671 __ j(equal, &good, Label::kNear); |
4672 | 4672 |
4673 // Check if the entry name is not a unique name. | 4673 // Check if the entry name is not a unique name. |
4674 __ movq(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset)); | 4674 __ movp(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset)); |
4675 __ JumpIfNotUniqueName(FieldOperand(entity_name, Map::kInstanceTypeOffset), | 4675 __ JumpIfNotUniqueName(FieldOperand(entity_name, Map::kInstanceTypeOffset), |
4676 miss); | 4676 miss); |
4677 __ bind(&good); | 4677 __ bind(&good); |
4678 } | 4678 } |
4679 | 4679 |
4680 NameDictionaryLookupStub stub(properties, r0, r0, NEGATIVE_LOOKUP); | 4680 NameDictionaryLookupStub stub(properties, r0, r0, NEGATIVE_LOOKUP); |
4681 __ Push(Handle<Object>(name)); | 4681 __ Push(Handle<Object>(name)); |
4682 __ push(Immediate(name->Hash())); | 4682 __ push(Immediate(name->Hash())); |
4683 __ CallStub(&stub); | 4683 __ CallStub(&stub); |
4684 __ testq(r0, r0); | 4684 __ testq(r0, r0); |
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4765 | 4765 |
4766 // If names of slots in range from 1 to kProbes - 1 for the hash value are | 4766 // If names of slots in range from 1 to kProbes - 1 for the hash value are |
4767 // not equal to the name and kProbes-th slot is not used (its name is the | 4767 // not equal to the name and kProbes-th slot is not used (its name is the |
4768 // undefined value), it guarantees the hash table doesn't contain the | 4768 // undefined value), it guarantees the hash table doesn't contain the |
4769 // property. It's true even if some slots represent deleted properties | 4769 // property. It's true even if some slots represent deleted properties |
4770 // (their names are the null value). | 4770 // (their names are the null value). |
4771 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER, | 4771 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER, |
4772 kPointerSize); | 4772 kPointerSize); |
4773 for (int i = kInlinedProbes; i < kTotalProbes; i++) { | 4773 for (int i = kInlinedProbes; i < kTotalProbes; i++) { |
4774 // Compute the masked index: (hash + i + i * i) & mask. | 4774 // Compute the masked index: (hash + i + i * i) & mask. |
4775 __ movq(scratch, args.GetArgumentOperand(1)); | 4775 __ movp(scratch, args.GetArgumentOperand(1)); |
4776 if (i > 0) { | 4776 if (i > 0) { |
4777 __ addl(scratch, Immediate(NameDictionary::GetProbeOffset(i))); | 4777 __ addl(scratch, Immediate(NameDictionary::GetProbeOffset(i))); |
4778 } | 4778 } |
4779 __ and_(scratch, Operand(rsp, 0)); | 4779 __ and_(scratch, Operand(rsp, 0)); |
4780 | 4780 |
4781 // Scale the index by multiplying by the entry size. | 4781 // Scale the index by multiplying by the entry size. |
4782 ASSERT(NameDictionary::kEntrySize == 3); | 4782 ASSERT(NameDictionary::kEntrySize == 3); |
4783 __ lea(index_, Operand(scratch, scratch, times_2, 0)); // index *= 3. | 4783 __ lea(index_, Operand(scratch, scratch, times_2, 0)); // index *= 3. |
4784 | 4784 |
4785 // Having undefined at this place means the name is not contained. | 4785 // Having undefined at this place means the name is not contained. |
4786 __ movq(scratch, Operand(dictionary_, | 4786 __ movp(scratch, Operand(dictionary_, |
4787 index_, | 4787 index_, |
4788 times_pointer_size, | 4788 times_pointer_size, |
4789 kElementsStartOffset - kHeapObjectTag)); | 4789 kElementsStartOffset - kHeapObjectTag)); |
4790 | 4790 |
4791 __ Cmp(scratch, masm->isolate()->factory()->undefined_value()); | 4791 __ Cmp(scratch, masm->isolate()->factory()->undefined_value()); |
4792 __ j(equal, ¬_in_dictionary); | 4792 __ j(equal, ¬_in_dictionary); |
4793 | 4793 |
4794 // Stop if found the property. | 4794 // Stop if found the property. |
4795 __ cmpq(scratch, args.GetArgumentOperand(0)); | 4795 __ cmpq(scratch, args.GetArgumentOperand(0)); |
4796 __ j(equal, &in_dictionary); | 4796 __ j(equal, &in_dictionary); |
4797 | 4797 |
4798 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) { | 4798 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) { |
4799 // If we hit a key that is not a unique name during negative | 4799 // If we hit a key that is not a unique name during negative |
4800 // lookup we have to bailout as this key might be equal to the | 4800 // lookup we have to bailout as this key might be equal to the |
4801 // key we are looking for. | 4801 // key we are looking for. |
4802 | 4802 |
4803 // Check if the entry name is not a unique name. | 4803 // Check if the entry name is not a unique name. |
4804 __ movq(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); | 4804 __ movp(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); |
4805 __ JumpIfNotUniqueName(FieldOperand(scratch, Map::kInstanceTypeOffset), | 4805 __ JumpIfNotUniqueName(FieldOperand(scratch, Map::kInstanceTypeOffset), |
4806 &maybe_in_dictionary); | 4806 &maybe_in_dictionary); |
4807 } | 4807 } |
4808 } | 4808 } |
4809 | 4809 |
4810 __ bind(&maybe_in_dictionary); | 4810 __ bind(&maybe_in_dictionary); |
4811 // If we are doing negative lookup then probing failure should be | 4811 // If we are doing negative lookup then probing failure should be |
4812 // treated as a lookup success. For positive lookup probing failure | 4812 // treated as a lookup success. For positive lookup probing failure |
4813 // should be treated as lookup failure. | 4813 // should be treated as lookup failure. |
4814 if (mode_ == POSITIVE_LOOKUP) { | 4814 if (mode_ == POSITIVE_LOOKUP) { |
4815 __ movq(scratch, Immediate(0)); | 4815 __ movp(scratch, Immediate(0)); |
4816 __ Drop(1); | 4816 __ Drop(1); |
4817 __ ret(2 * kPointerSize); | 4817 __ ret(2 * kPointerSize); |
4818 } | 4818 } |
4819 | 4819 |
4820 __ bind(&in_dictionary); | 4820 __ bind(&in_dictionary); |
4821 __ movq(scratch, Immediate(1)); | 4821 __ movp(scratch, Immediate(1)); |
4822 __ Drop(1); | 4822 __ Drop(1); |
4823 __ ret(2 * kPointerSize); | 4823 __ ret(2 * kPointerSize); |
4824 | 4824 |
4825 __ bind(¬_in_dictionary); | 4825 __ bind(¬_in_dictionary); |
4826 __ movq(scratch, Immediate(0)); | 4826 __ movp(scratch, Immediate(0)); |
4827 __ Drop(1); | 4827 __ Drop(1); |
4828 __ ret(2 * kPointerSize); | 4828 __ ret(2 * kPointerSize); |
4829 } | 4829 } |
4830 | 4830 |
4831 | 4831 |
4832 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime( | 4832 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime( |
4833 Isolate* isolate) { | 4833 Isolate* isolate) { |
4834 StoreBufferOverflowStub stub1(kDontSaveFPRegs); | 4834 StoreBufferOverflowStub stub1(kDontSaveFPRegs); |
4835 stub1.GetCode(isolate); | 4835 stub1.GetCode(isolate); |
4836 StoreBufferOverflowStub stub2(kSaveFPRegs); | 4836 StoreBufferOverflowStub stub2(kSaveFPRegs); |
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4881 masm->set_byte_at(2, kFiveByteNopInstruction); | 4881 masm->set_byte_at(2, kFiveByteNopInstruction); |
4882 } | 4882 } |
4883 | 4883 |
4884 | 4884 |
4885 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) { | 4885 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) { |
4886 regs_.Save(masm); | 4886 regs_.Save(masm); |
4887 | 4887 |
4888 if (remembered_set_action_ == EMIT_REMEMBERED_SET) { | 4888 if (remembered_set_action_ == EMIT_REMEMBERED_SET) { |
4889 Label dont_need_remembered_set; | 4889 Label dont_need_remembered_set; |
4890 | 4890 |
4891 __ movq(regs_.scratch0(), Operand(regs_.address(), 0)); | 4891 __ movp(regs_.scratch0(), Operand(regs_.address(), 0)); |
4892 __ JumpIfNotInNewSpace(regs_.scratch0(), | 4892 __ JumpIfNotInNewSpace(regs_.scratch0(), |
4893 regs_.scratch0(), | 4893 regs_.scratch0(), |
4894 &dont_need_remembered_set); | 4894 &dont_need_remembered_set); |
4895 | 4895 |
4896 __ CheckPageFlag(regs_.object(), | 4896 __ CheckPageFlag(regs_.object(), |
4897 regs_.scratch0(), | 4897 regs_.scratch0(), |
4898 1 << MemoryChunk::SCAN_ON_SCAVENGE, | 4898 1 << MemoryChunk::SCAN_ON_SCAVENGE, |
4899 not_zero, | 4899 not_zero, |
4900 &dont_need_remembered_set); | 4900 &dont_need_remembered_set); |
4901 | 4901 |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4955 | 4955 |
4956 | 4956 |
4957 void RecordWriteStub::CheckNeedsToInformIncrementalMarker( | 4957 void RecordWriteStub::CheckNeedsToInformIncrementalMarker( |
4958 MacroAssembler* masm, | 4958 MacroAssembler* masm, |
4959 OnNoNeedToInformIncrementalMarker on_no_need, | 4959 OnNoNeedToInformIncrementalMarker on_no_need, |
4960 Mode mode) { | 4960 Mode mode) { |
4961 Label on_black; | 4961 Label on_black; |
4962 Label need_incremental; | 4962 Label need_incremental; |
4963 Label need_incremental_pop_object; | 4963 Label need_incremental_pop_object; |
4964 | 4964 |
4965 __ movq(regs_.scratch0(), Immediate(~Page::kPageAlignmentMask)); | 4965 __ movp(regs_.scratch0(), Immediate(~Page::kPageAlignmentMask)); |
4966 __ and_(regs_.scratch0(), regs_.object()); | 4966 __ and_(regs_.scratch0(), regs_.object()); |
4967 __ movq(regs_.scratch1(), | 4967 __ movp(regs_.scratch1(), |
4968 Operand(regs_.scratch0(), | 4968 Operand(regs_.scratch0(), |
4969 MemoryChunk::kWriteBarrierCounterOffset)); | 4969 MemoryChunk::kWriteBarrierCounterOffset)); |
4970 __ subq(regs_.scratch1(), Immediate(1)); | 4970 __ subq(regs_.scratch1(), Immediate(1)); |
4971 __ movq(Operand(regs_.scratch0(), | 4971 __ movp(Operand(regs_.scratch0(), |
4972 MemoryChunk::kWriteBarrierCounterOffset), | 4972 MemoryChunk::kWriteBarrierCounterOffset), |
4973 regs_.scratch1()); | 4973 regs_.scratch1()); |
4974 __ j(negative, &need_incremental); | 4974 __ j(negative, &need_incremental); |
4975 | 4975 |
4976 // Let's look at the color of the object: If it is not black we don't have | 4976 // Let's look at the color of the object: If it is not black we don't have |
4977 // to inform the incremental marker. | 4977 // to inform the incremental marker. |
4978 __ JumpIfBlack(regs_.object(), | 4978 __ JumpIfBlack(regs_.object(), |
4979 regs_.scratch0(), | 4979 regs_.scratch0(), |
4980 regs_.scratch1(), | 4980 regs_.scratch1(), |
4981 &on_black, | 4981 &on_black, |
4982 Label::kNear); | 4982 Label::kNear); |
4983 | 4983 |
4984 regs_.Restore(masm); | 4984 regs_.Restore(masm); |
4985 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { | 4985 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { |
4986 __ RememberedSetHelper(object_, | 4986 __ RememberedSetHelper(object_, |
4987 address_, | 4987 address_, |
4988 value_, | 4988 value_, |
4989 save_fp_regs_mode_, | 4989 save_fp_regs_mode_, |
4990 MacroAssembler::kReturnAtEnd); | 4990 MacroAssembler::kReturnAtEnd); |
4991 } else { | 4991 } else { |
4992 __ ret(0); | 4992 __ ret(0); |
4993 } | 4993 } |
4994 | 4994 |
4995 __ bind(&on_black); | 4995 __ bind(&on_black); |
4996 | 4996 |
4997 // Get the value from the slot. | 4997 // Get the value from the slot. |
4998 __ movq(regs_.scratch0(), Operand(regs_.address(), 0)); | 4998 __ movp(regs_.scratch0(), Operand(regs_.address(), 0)); |
4999 | 4999 |
5000 if (mode == INCREMENTAL_COMPACTION) { | 5000 if (mode == INCREMENTAL_COMPACTION) { |
5001 Label ensure_not_white; | 5001 Label ensure_not_white; |
5002 | 5002 |
5003 __ CheckPageFlag(regs_.scratch0(), // Contains value. | 5003 __ CheckPageFlag(regs_.scratch0(), // Contains value. |
5004 regs_.scratch1(), // Scratch. | 5004 regs_.scratch1(), // Scratch. |
5005 MemoryChunk::kEvacuationCandidateMask, | 5005 MemoryChunk::kEvacuationCandidateMask, |
5006 zero, | 5006 zero, |
5007 &ensure_not_white, | 5007 &ensure_not_white, |
5008 Label::kNear); | 5008 Label::kNear); |
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5057 // ----------------------------------- | 5057 // ----------------------------------- |
5058 | 5058 |
5059 Label element_done; | 5059 Label element_done; |
5060 Label double_elements; | 5060 Label double_elements; |
5061 Label smi_element; | 5061 Label smi_element; |
5062 Label slow_elements; | 5062 Label slow_elements; |
5063 Label fast_elements; | 5063 Label fast_elements; |
5064 | 5064 |
5065 // Get array literal index, array literal and its map. | 5065 // Get array literal index, array literal and its map. |
5066 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER); | 5066 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER); |
5067 __ movq(rdx, args.GetArgumentOperand(1)); | 5067 __ movp(rdx, args.GetArgumentOperand(1)); |
5068 __ movq(rbx, args.GetArgumentOperand(0)); | 5068 __ movp(rbx, args.GetArgumentOperand(0)); |
5069 __ movq(rdi, FieldOperand(rbx, JSObject::kMapOffset)); | 5069 __ movp(rdi, FieldOperand(rbx, JSObject::kMapOffset)); |
5070 | 5070 |
5071 __ CheckFastElements(rdi, &double_elements); | 5071 __ CheckFastElements(rdi, &double_elements); |
5072 | 5072 |
5073 // FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS | 5073 // FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS |
5074 __ JumpIfSmi(rax, &smi_element); | 5074 __ JumpIfSmi(rax, &smi_element); |
5075 __ CheckFastSmiElements(rdi, &fast_elements); | 5075 __ CheckFastSmiElements(rdi, &fast_elements); |
5076 | 5076 |
5077 // Store into the array literal requires a elements transition. Call into | 5077 // Store into the array literal requires a elements transition. Call into |
5078 // the runtime. | 5078 // the runtime. |
5079 | 5079 |
5080 __ bind(&slow_elements); | 5080 __ bind(&slow_elements); |
5081 __ PopReturnAddressTo(rdi); | 5081 __ PopReturnAddressTo(rdi); |
5082 __ push(rbx); | 5082 __ push(rbx); |
5083 __ push(rcx); | 5083 __ push(rcx); |
5084 __ push(rax); | 5084 __ push(rax); |
5085 __ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); | 5085 __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); |
5086 __ push(FieldOperand(rbx, JSFunction::kLiteralsOffset)); | 5086 __ push(FieldOperand(rbx, JSFunction::kLiteralsOffset)); |
5087 __ push(rdx); | 5087 __ push(rdx); |
5088 __ PushReturnAddressFrom(rdi); | 5088 __ PushReturnAddressFrom(rdi); |
5089 __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1); | 5089 __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1); |
5090 | 5090 |
5091 // Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object. | 5091 // Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object. |
5092 __ bind(&fast_elements); | 5092 __ bind(&fast_elements); |
5093 __ SmiToInteger32(kScratchRegister, rcx); | 5093 __ SmiToInteger32(kScratchRegister, rcx); |
5094 __ movq(rbx, FieldOperand(rbx, JSObject::kElementsOffset)); | 5094 __ movp(rbx, FieldOperand(rbx, JSObject::kElementsOffset)); |
5095 __ lea(rcx, FieldOperand(rbx, kScratchRegister, times_pointer_size, | 5095 __ lea(rcx, FieldOperand(rbx, kScratchRegister, times_pointer_size, |
5096 FixedArrayBase::kHeaderSize)); | 5096 FixedArrayBase::kHeaderSize)); |
5097 __ movq(Operand(rcx, 0), rax); | 5097 __ movp(Operand(rcx, 0), rax); |
5098 // Update the write barrier for the array store. | 5098 // Update the write barrier for the array store. |
5099 __ RecordWrite(rbx, rcx, rax, | 5099 __ RecordWrite(rbx, rcx, rax, |
5100 kDontSaveFPRegs, | 5100 kDontSaveFPRegs, |
5101 EMIT_REMEMBERED_SET, | 5101 EMIT_REMEMBERED_SET, |
5102 OMIT_SMI_CHECK); | 5102 OMIT_SMI_CHECK); |
5103 __ ret(0); | 5103 __ ret(0); |
5104 | 5104 |
5105 // Array literal has ElementsKind of FAST_*_SMI_ELEMENTS or | 5105 // Array literal has ElementsKind of FAST_*_SMI_ELEMENTS or |
5106 // FAST_*_ELEMENTS, and value is Smi. | 5106 // FAST_*_ELEMENTS, and value is Smi. |
5107 __ bind(&smi_element); | 5107 __ bind(&smi_element); |
5108 __ SmiToInteger32(kScratchRegister, rcx); | 5108 __ SmiToInteger32(kScratchRegister, rcx); |
5109 __ movq(rbx, FieldOperand(rbx, JSObject::kElementsOffset)); | 5109 __ movp(rbx, FieldOperand(rbx, JSObject::kElementsOffset)); |
5110 __ movq(FieldOperand(rbx, kScratchRegister, times_pointer_size, | 5110 __ movp(FieldOperand(rbx, kScratchRegister, times_pointer_size, |
5111 FixedArrayBase::kHeaderSize), rax); | 5111 FixedArrayBase::kHeaderSize), rax); |
5112 __ ret(0); | 5112 __ ret(0); |
5113 | 5113 |
5114 // Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS. | 5114 // Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS. |
5115 __ bind(&double_elements); | 5115 __ bind(&double_elements); |
5116 | 5116 |
5117 __ movq(r9, FieldOperand(rbx, JSObject::kElementsOffset)); | 5117 __ movp(r9, FieldOperand(rbx, JSObject::kElementsOffset)); |
5118 __ SmiToInteger32(r11, rcx); | 5118 __ SmiToInteger32(r11, rcx); |
5119 __ StoreNumberToDoubleElements(rax, | 5119 __ StoreNumberToDoubleElements(rax, |
5120 r9, | 5120 r9, |
5121 r11, | 5121 r11, |
5122 xmm0, | 5122 xmm0, |
5123 &slow_elements); | 5123 &slow_elements); |
5124 __ ret(0); | 5124 __ ret(0); |
5125 } | 5125 } |
5126 | 5126 |
5127 | 5127 |
5128 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) { | 5128 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) { |
5129 CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs); | 5129 CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs); |
5130 __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET); | 5130 __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET); |
5131 int parameter_count_offset = | 5131 int parameter_count_offset = |
5132 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset; | 5132 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset; |
5133 __ movq(rbx, MemOperand(rbp, parameter_count_offset)); | 5133 __ movp(rbx, MemOperand(rbp, parameter_count_offset)); |
5134 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); | 5134 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); |
5135 __ PopReturnAddressTo(rcx); | 5135 __ PopReturnAddressTo(rcx); |
5136 int additional_offset = function_mode_ == JS_FUNCTION_STUB_MODE | 5136 int additional_offset = function_mode_ == JS_FUNCTION_STUB_MODE |
5137 ? kPointerSize | 5137 ? kPointerSize |
5138 : 0; | 5138 : 0; |
5139 __ lea(rsp, MemOperand(rsp, rbx, times_pointer_size, additional_offset)); | 5139 __ lea(rsp, MemOperand(rsp, rbx, times_pointer_size, additional_offset)); |
5140 __ jmp(rcx); // Return to IC Miss stub, continuation still on stack. | 5140 __ jmp(rcx); // Return to IC Miss stub, continuation still on stack. |
5141 } | 5141 } |
5142 | 5142 |
5143 | 5143 |
5144 void StubFailureTailCallTrampolineStub::Generate(MacroAssembler* masm) { | 5144 void StubFailureTailCallTrampolineStub::Generate(MacroAssembler* masm) { |
5145 CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs); | 5145 CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs); |
5146 __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET); | 5146 __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET); |
5147 __ movq(rdi, rax); | 5147 __ movp(rdi, rax); |
5148 int parameter_count_offset = | 5148 int parameter_count_offset = |
5149 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset; | 5149 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset; |
5150 __ movq(rax, MemOperand(rbp, parameter_count_offset)); | 5150 __ movp(rax, MemOperand(rbp, parameter_count_offset)); |
5151 // The parameter count above includes the receiver for the arguments passed to | 5151 // The parameter count above includes the receiver for the arguments passed to |
5152 // the deoptimization handler. Subtract the receiver for the parameter count | 5152 // the deoptimization handler. Subtract the receiver for the parameter count |
5153 // for the call. | 5153 // for the call. |
5154 __ subl(rax, Immediate(1)); | 5154 __ subl(rax, Immediate(1)); |
5155 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); | 5155 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); |
5156 ParameterCount argument_count(rax); | 5156 ParameterCount argument_count(rax); |
5157 __ InvokeFunction(rdi, argument_count, JUMP_FUNCTION, NullCallWrapper()); | 5157 __ InvokeFunction(rdi, argument_count, JUMP_FUNCTION, NullCallWrapper()); |
5158 } | 5158 } |
5159 | 5159 |
5160 | 5160 |
(...skipping 10 matching lines...) Expand all Loading... |
5171 // all volatile and callee-save registers. | 5171 // all volatile and callee-save registers. |
5172 const size_t kNumSavedRegisters = 2; | 5172 const size_t kNumSavedRegisters = 2; |
5173 __ push(arg_reg_1); | 5173 __ push(arg_reg_1); |
5174 __ push(arg_reg_2); | 5174 __ push(arg_reg_2); |
5175 | 5175 |
5176 // Calculate the original stack pointer and store it in the second arg. | 5176 // Calculate the original stack pointer and store it in the second arg. |
5177 __ lea(arg_reg_2, | 5177 __ lea(arg_reg_2, |
5178 Operand(rsp, kNumSavedRegisters * kRegisterSize + kPCOnStackSize)); | 5178 Operand(rsp, kNumSavedRegisters * kRegisterSize + kPCOnStackSize)); |
5179 | 5179 |
5180 // Calculate the function address to the first arg. | 5180 // Calculate the function address to the first arg. |
5181 __ movq(arg_reg_1, Operand(rsp, kNumSavedRegisters * kRegisterSize)); | 5181 __ movp(arg_reg_1, Operand(rsp, kNumSavedRegisters * kRegisterSize)); |
5182 __ subq(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength)); | 5182 __ subq(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength)); |
5183 | 5183 |
5184 // Save the remainder of the volatile registers. | 5184 // Save the remainder of the volatile registers. |
5185 masm->PushCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2); | 5185 masm->PushCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2); |
5186 | 5186 |
5187 // Call the entry hook function. | 5187 // Call the entry hook function. |
5188 __ Move(rax, FUNCTION_ADDR(masm->isolate()->function_entry_hook()), | 5188 __ Move(rax, FUNCTION_ADDR(masm->isolate()->function_entry_hook()), |
5189 RelocInfo::NONE64); | 5189 RelocInfo::NONE64); |
5190 | 5190 |
5191 AllowExternalCallThatCantCauseGC scope(masm); | 5191 AllowExternalCallThatCantCauseGC scope(masm); |
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5251 ASSERT(FAST_DOUBLE_ELEMENTS == 4); | 5251 ASSERT(FAST_DOUBLE_ELEMENTS == 4); |
5252 ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5); | 5252 ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5); |
5253 | 5253 |
5254 // is the low bit set? If so, we are holey and that is good. | 5254 // is the low bit set? If so, we are holey and that is good. |
5255 __ testb(rdx, Immediate(1)); | 5255 __ testb(rdx, Immediate(1)); |
5256 __ j(not_zero, &normal_sequence); | 5256 __ j(not_zero, &normal_sequence); |
5257 } | 5257 } |
5258 | 5258 |
5259 // look at the first argument | 5259 // look at the first argument |
5260 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER); | 5260 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER); |
5261 __ movq(rcx, args.GetArgumentOperand(0)); | 5261 __ movp(rcx, args.GetArgumentOperand(0)); |
5262 __ testq(rcx, rcx); | 5262 __ testq(rcx, rcx); |
5263 __ j(zero, &normal_sequence); | 5263 __ j(zero, &normal_sequence); |
5264 | 5264 |
5265 if (mode == DISABLE_ALLOCATION_SITES) { | 5265 if (mode == DISABLE_ALLOCATION_SITES) { |
5266 ElementsKind initial = GetInitialFastElementsKind(); | 5266 ElementsKind initial = GetInitialFastElementsKind(); |
5267 ElementsKind holey_initial = GetHoleyElementsKind(initial); | 5267 ElementsKind holey_initial = GetHoleyElementsKind(initial); |
5268 | 5268 |
5269 ArraySingleArgumentConstructorStub stub_holey(holey_initial, | 5269 ArraySingleArgumentConstructorStub stub_holey(holey_initial, |
5270 DISABLE_ALLOCATION_SITES); | 5270 DISABLE_ALLOCATION_SITES); |
5271 __ TailCallStub(&stub_holey); | 5271 __ TailCallStub(&stub_holey); |
5272 | 5272 |
5273 __ bind(&normal_sequence); | 5273 __ bind(&normal_sequence); |
5274 ArraySingleArgumentConstructorStub stub(initial, | 5274 ArraySingleArgumentConstructorStub stub(initial, |
5275 DISABLE_ALLOCATION_SITES); | 5275 DISABLE_ALLOCATION_SITES); |
5276 __ TailCallStub(&stub); | 5276 __ TailCallStub(&stub); |
5277 } else if (mode == DONT_OVERRIDE) { | 5277 } else if (mode == DONT_OVERRIDE) { |
5278 // We are going to create a holey array, but our kind is non-holey. | 5278 // We are going to create a holey array, but our kind is non-holey. |
5279 // Fix kind and retry (only if we have an allocation site in the cell). | 5279 // Fix kind and retry (only if we have an allocation site in the cell). |
5280 __ incl(rdx); | 5280 __ incl(rdx); |
5281 __ movq(rcx, FieldOperand(rbx, Cell::kValueOffset)); | 5281 __ movp(rcx, FieldOperand(rbx, Cell::kValueOffset)); |
5282 if (FLAG_debug_code) { | 5282 if (FLAG_debug_code) { |
5283 Handle<Map> allocation_site_map = | 5283 Handle<Map> allocation_site_map = |
5284 masm->isolate()->factory()->allocation_site_map(); | 5284 masm->isolate()->factory()->allocation_site_map(); |
5285 __ Cmp(FieldOperand(rcx, 0), allocation_site_map); | 5285 __ Cmp(FieldOperand(rcx, 0), allocation_site_map); |
5286 __ Assert(equal, kExpectedAllocationSiteInCell); | 5286 __ Assert(equal, kExpectedAllocationSiteInCell); |
5287 } | 5287 } |
5288 | 5288 |
5289 // Save the resulting elements kind in type info. We can't just store r3 | 5289 // Save the resulting elements kind in type info. We can't just store r3 |
5290 // in the AllocationSite::transition_info field because elements kind is | 5290 // in the AllocationSite::transition_info field because elements kind is |
5291 // restricted to a portion of the field...upper bits need to be left alone. | 5291 // restricted to a portion of the field...upper bits need to be left alone. |
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5393 // ----------------------------------- | 5393 // ----------------------------------- |
5394 Handle<Object> undefined_sentinel( | 5394 Handle<Object> undefined_sentinel( |
5395 masm->isolate()->heap()->undefined_value(), | 5395 masm->isolate()->heap()->undefined_value(), |
5396 masm->isolate()); | 5396 masm->isolate()); |
5397 | 5397 |
5398 if (FLAG_debug_code) { | 5398 if (FLAG_debug_code) { |
5399 // The array construct code is only set for the global and natives | 5399 // The array construct code is only set for the global and natives |
5400 // builtin Array functions which always have maps. | 5400 // builtin Array functions which always have maps. |
5401 | 5401 |
5402 // Initial map for the builtin Array function should be a map. | 5402 // Initial map for the builtin Array function should be a map. |
5403 __ movq(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); | 5403 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); |
5404 // Will both indicate a NULL and a Smi. | 5404 // Will both indicate a NULL and a Smi. |
5405 STATIC_ASSERT(kSmiTag == 0); | 5405 STATIC_ASSERT(kSmiTag == 0); |
5406 Condition not_smi = NegateCondition(masm->CheckSmi(rcx)); | 5406 Condition not_smi = NegateCondition(masm->CheckSmi(rcx)); |
5407 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction); | 5407 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction); |
5408 __ CmpObjectType(rcx, MAP_TYPE, rcx); | 5408 __ CmpObjectType(rcx, MAP_TYPE, rcx); |
5409 __ Check(equal, kUnexpectedInitialMapForArrayFunction); | 5409 __ Check(equal, kUnexpectedInitialMapForArrayFunction); |
5410 | 5410 |
5411 // We should either have undefined in rbx or a valid cell | 5411 // We should either have undefined in rbx or a valid cell |
5412 Label okay_here; | 5412 Label okay_here; |
5413 Handle<Map> cell_map = masm->isolate()->factory()->cell_map(); | 5413 Handle<Map> cell_map = masm->isolate()->factory()->cell_map(); |
5414 __ Cmp(rbx, undefined_sentinel); | 5414 __ Cmp(rbx, undefined_sentinel); |
5415 __ j(equal, &okay_here); | 5415 __ j(equal, &okay_here); |
5416 __ Cmp(FieldOperand(rbx, 0), cell_map); | 5416 __ Cmp(FieldOperand(rbx, 0), cell_map); |
5417 __ Assert(equal, kExpectedPropertyCellInRegisterRbx); | 5417 __ Assert(equal, kExpectedPropertyCellInRegisterRbx); |
5418 __ bind(&okay_here); | 5418 __ bind(&okay_here); |
5419 } | 5419 } |
5420 | 5420 |
5421 Label no_info; | 5421 Label no_info; |
5422 // If the type cell is undefined, or contains anything other than an | 5422 // If the type cell is undefined, or contains anything other than an |
5423 // AllocationSite, call an array constructor that doesn't use AllocationSites. | 5423 // AllocationSite, call an array constructor that doesn't use AllocationSites. |
5424 __ Cmp(rbx, undefined_sentinel); | 5424 __ Cmp(rbx, undefined_sentinel); |
5425 __ j(equal, &no_info); | 5425 __ j(equal, &no_info); |
5426 __ movq(rdx, FieldOperand(rbx, Cell::kValueOffset)); | 5426 __ movp(rdx, FieldOperand(rbx, Cell::kValueOffset)); |
5427 __ Cmp(FieldOperand(rdx, 0), | 5427 __ Cmp(FieldOperand(rdx, 0), |
5428 masm->isolate()->factory()->allocation_site_map()); | 5428 masm->isolate()->factory()->allocation_site_map()); |
5429 __ j(not_equal, &no_info); | 5429 __ j(not_equal, &no_info); |
5430 | 5430 |
5431 // Only look at the lower 16 bits of the transition info. | 5431 // Only look at the lower 16 bits of the transition info. |
5432 __ movq(rdx, FieldOperand(rdx, AllocationSite::kTransitionInfoOffset)); | 5432 __ movp(rdx, FieldOperand(rdx, AllocationSite::kTransitionInfoOffset)); |
5433 __ SmiToInteger32(rdx, rdx); | 5433 __ SmiToInteger32(rdx, rdx); |
5434 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); | 5434 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); |
5435 __ and_(rdx, Immediate(AllocationSite::ElementsKindBits::kMask)); | 5435 __ and_(rdx, Immediate(AllocationSite::ElementsKindBits::kMask)); |
5436 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE); | 5436 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE); |
5437 | 5437 |
5438 __ bind(&no_info); | 5438 __ bind(&no_info); |
5439 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES); | 5439 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES); |
5440 } | 5440 } |
5441 | 5441 |
5442 | 5442 |
5443 void InternalArrayConstructorStub::GenerateCase( | 5443 void InternalArrayConstructorStub::GenerateCase( |
5444 MacroAssembler* masm, ElementsKind kind) { | 5444 MacroAssembler* masm, ElementsKind kind) { |
5445 Label not_zero_case, not_one_case; | 5445 Label not_zero_case, not_one_case; |
5446 Label normal_sequence; | 5446 Label normal_sequence; |
5447 | 5447 |
5448 __ testq(rax, rax); | 5448 __ testq(rax, rax); |
5449 __ j(not_zero, ¬_zero_case); | 5449 __ j(not_zero, ¬_zero_case); |
5450 InternalArrayNoArgumentConstructorStub stub0(kind); | 5450 InternalArrayNoArgumentConstructorStub stub0(kind); |
5451 __ TailCallStub(&stub0); | 5451 __ TailCallStub(&stub0); |
5452 | 5452 |
5453 __ bind(¬_zero_case); | 5453 __ bind(¬_zero_case); |
5454 __ cmpl(rax, Immediate(1)); | 5454 __ cmpl(rax, Immediate(1)); |
5455 __ j(greater, ¬_one_case); | 5455 __ j(greater, ¬_one_case); |
5456 | 5456 |
5457 if (IsFastPackedElementsKind(kind)) { | 5457 if (IsFastPackedElementsKind(kind)) { |
5458 // We might need to create a holey array | 5458 // We might need to create a holey array |
5459 // look at the first argument | 5459 // look at the first argument |
5460 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER); | 5460 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER); |
5461 __ movq(rcx, args.GetArgumentOperand(0)); | 5461 __ movp(rcx, args.GetArgumentOperand(0)); |
5462 __ testq(rcx, rcx); | 5462 __ testq(rcx, rcx); |
5463 __ j(zero, &normal_sequence); | 5463 __ j(zero, &normal_sequence); |
5464 | 5464 |
5465 InternalArraySingleArgumentConstructorStub | 5465 InternalArraySingleArgumentConstructorStub |
5466 stub1_holey(GetHoleyElementsKind(kind)); | 5466 stub1_holey(GetHoleyElementsKind(kind)); |
5467 __ TailCallStub(&stub1_holey); | 5467 __ TailCallStub(&stub1_holey); |
5468 } | 5468 } |
5469 | 5469 |
5470 __ bind(&normal_sequence); | 5470 __ bind(&normal_sequence); |
5471 InternalArraySingleArgumentConstructorStub stub1(kind); | 5471 InternalArraySingleArgumentConstructorStub stub1(kind); |
(...skipping 12 matching lines...) Expand all Loading... |
5484 // -- rdi : constructor | 5484 // -- rdi : constructor |
5485 // -- rsp[0] : return address | 5485 // -- rsp[0] : return address |
5486 // -- rsp[8] : last argument | 5486 // -- rsp[8] : last argument |
5487 // ----------------------------------- | 5487 // ----------------------------------- |
5488 | 5488 |
5489 if (FLAG_debug_code) { | 5489 if (FLAG_debug_code) { |
5490 // The array construct code is only set for the global and natives | 5490 // The array construct code is only set for the global and natives |
5491 // builtin Array functions which always have maps. | 5491 // builtin Array functions which always have maps. |
5492 | 5492 |
5493 // Initial map for the builtin Array function should be a map. | 5493 // Initial map for the builtin Array function should be a map. |
5494 __ movq(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); | 5494 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); |
5495 // Will both indicate a NULL and a Smi. | 5495 // Will both indicate a NULL and a Smi. |
5496 STATIC_ASSERT(kSmiTag == 0); | 5496 STATIC_ASSERT(kSmiTag == 0); |
5497 Condition not_smi = NegateCondition(masm->CheckSmi(rcx)); | 5497 Condition not_smi = NegateCondition(masm->CheckSmi(rcx)); |
5498 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction); | 5498 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction); |
5499 __ CmpObjectType(rcx, MAP_TYPE, rcx); | 5499 __ CmpObjectType(rcx, MAP_TYPE, rcx); |
5500 __ Check(equal, kUnexpectedInitialMapForArrayFunction); | 5500 __ Check(equal, kUnexpectedInitialMapForArrayFunction); |
5501 } | 5501 } |
5502 | 5502 |
5503 // Figure out the right elements kind | 5503 // Figure out the right elements kind |
5504 __ movq(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); | 5504 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); |
5505 | 5505 |
5506 // Load the map's "bit field 2" into |result|. We only need the first byte, | 5506 // Load the map's "bit field 2" into |result|. We only need the first byte, |
5507 // but the following masking takes care of that anyway. | 5507 // but the following masking takes care of that anyway. |
5508 __ movzxbq(rcx, FieldOperand(rcx, Map::kBitField2Offset)); | 5508 __ movzxbq(rcx, FieldOperand(rcx, Map::kBitField2Offset)); |
5509 // Retrieve elements_kind from bit field 2. | 5509 // Retrieve elements_kind from bit field 2. |
5510 __ and_(rcx, Immediate(Map::kElementsKindMask)); | 5510 __ and_(rcx, Immediate(Map::kElementsKindMask)); |
5511 __ shr(rcx, Immediate(Map::kElementsKindShift)); | 5511 __ shr(rcx, Immediate(Map::kElementsKindShift)); |
5512 | 5512 |
5513 if (FLAG_debug_code) { | 5513 if (FLAG_debug_code) { |
5514 Label done; | 5514 Label done; |
(...skipping 13 matching lines...) Expand all Loading... |
5528 __ bind(&fast_elements_case); | 5528 __ bind(&fast_elements_case); |
5529 GenerateCase(masm, FAST_ELEMENTS); | 5529 GenerateCase(masm, FAST_ELEMENTS); |
5530 } | 5530 } |
5531 | 5531 |
5532 | 5532 |
5533 #undef __ | 5533 #undef __ |
5534 | 5534 |
5535 } } // namespace v8::internal | 5535 } } // namespace v8::internal |
5536 | 5536 |
5537 #endif // V8_TARGET_ARCH_X64 | 5537 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |