Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(143)

Side by Side Diff: src/a64/code-stubs-a64.cc

Issue 149413010: A64: Synchronize with r16024. (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/a64
Patch Set: Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/a64/builtins-a64.cc ('k') | src/a64/codegen-a64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 561 matching lines...) Expand 10 before | Expand all | Expand 10 after
572 __ Str(obj_length, FieldMemOperand(context, FixedArray::kLengthOffset)); 572 __ Str(obj_length, FieldMemOperand(context, FixedArray::kLengthOffset));
573 573
574 // If this block context is nested in the native context we get a smi 574 // If this block context is nested in the native context we get a smi
575 // sentinel instead of a function. The block context should get the 575 // sentinel instead of a function. The block context should get the
576 // canonical empty function of the native context as its closure which we 576 // canonical empty function of the native context as its closure which we
577 // still have to look up. 577 // still have to look up.
578 Label after_sentinel; 578 Label after_sentinel;
579 __ JumpIfNotSmi(function, &after_sentinel); 579 __ JumpIfNotSmi(function, &after_sentinel);
580 if (FLAG_debug_code) { 580 if (FLAG_debug_code) {
581 __ Cmp(function, 0); 581 __ Cmp(function, 0);
582 __ Assert(eq, "Expected 0 as a Smi sentinel"); 582 __ Assert(eq, kExpected0AsASmiSentinel);
583 } 583 }
584 584
585 Register global_ctx = x14; 585 Register global_ctx = x14;
586 __ Ldr(global_ctx, FieldMemOperand(global_obj, 586 __ Ldr(global_ctx, FieldMemOperand(global_obj,
587 GlobalObject::kNativeContextOffset)); 587 GlobalObject::kNativeContextOffset));
588 __ Ldr(function, ContextMemOperand(global_ctx, Context::CLOSURE_INDEX)); 588 __ Ldr(function, ContextMemOperand(global_ctx, Context::CLOSURE_INDEX));
589 __ Bind(&after_sentinel); 589 __ Bind(&after_sentinel);
590 590
591 // Store the global object from the previous context, and set up the fixed 591 // Store the global object from the previous context, and set up the fixed
592 // slots. 592 // slots.
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after
688 if (cond == le) { 688 if (cond == le) {
689 __ Mov(result, GREATER); 689 __ Mov(result, GREATER);
690 } else { 690 } else {
691 __ Mov(result, LESS); 691 __ Mov(result, LESS);
692 } 692 }
693 __ Ret(); 693 __ Ret();
694 } 694 }
695 695
696 // No fall through here. 696 // No fall through here.
697 if (FLAG_debug_code) { 697 if (FLAG_debug_code) {
698 __ Abort("We should never reach this code."); 698 __ Unreachable();
699 } 699 }
700 700
701 __ Bind(&not_identical); 701 __ Bind(&not_identical);
702 } 702 }
703 703
704 704
705 // See call site for description. 705 // See call site for description.
706 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm, 706 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
707 Register left, 707 Register left,
708 Register right, 708 Register right,
709 Register left_type, 709 Register left_type,
710 Register right_type, 710 Register right_type,
711 Register scratch) { 711 Register scratch) {
712 ASSERT(!AreAliased(left, right, left_type, right_type, scratch)); 712 ASSERT(!AreAliased(left, right, left_type, right_type, scratch));
713 713
714 if (masm->emit_debug_code()) { 714 if (masm->emit_debug_code()) {
715 // We assume that the arguments are not identical. 715 // We assume that the arguments are not identical.
716 __ Cmp(left, right); 716 __ Cmp(left, right);
717 __ Assert(ne, "Expected non-identical objects."); 717 __ Assert(ne, kExpectedNonIdenticalObjects);
718 } 718 }
719 719
720 // If either operand is a JS object or an oddball value, then they are not 720 // If either operand is a JS object or an oddball value, then they are not
721 // equal since their pointers are different. 721 // equal since their pointers are different.
722 // There is no test for undetectability in strict equality. 722 // There is no test for undetectability in strict equality.
723 STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE); 723 STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE);
724 Label right_non_object; 724 Label right_non_object;
725 725
726 __ Cmp(right_type, FIRST_SPEC_OBJECT_TYPE); 726 __ Cmp(right_type, FIRST_SPEC_OBJECT_TYPE);
727 __ B(lt, &right_non_object); 727 __ B(lt, &right_non_object);
(...skipping 301 matching lines...) Expand 10 before | Expand all | Expand 10 after
1029 if (cond == eq) { 1029 if (cond == eq) {
1030 StringCompareStub::GenerateFlatAsciiStringEquals(masm, lhs, rhs, 1030 StringCompareStub::GenerateFlatAsciiStringEquals(masm, lhs, rhs,
1031 x10, x11, x12); 1031 x10, x11, x12);
1032 } else { 1032 } else {
1033 StringCompareStub::GenerateCompareFlatAsciiStrings(masm, lhs, rhs, 1033 StringCompareStub::GenerateCompareFlatAsciiStrings(masm, lhs, rhs,
1034 x10, x11, x12, x13); 1034 x10, x11, x12, x13);
1035 } 1035 }
1036 1036
1037 // Never fall through to here. 1037 // Never fall through to here.
1038 if (FLAG_debug_code) { 1038 if (FLAG_debug_code) {
1039 __ Abort("We should never reach this code."); 1039 __ Unreachable();
1040 } 1040 }
1041 1041
1042 __ Bind(&slow); 1042 __ Bind(&slow);
1043 1043
1044 __ Push(lhs, rhs); 1044 __ Push(lhs, rhs);
1045 // Figure out which native to call and setup the arguments. 1045 // Figure out which native to call and setup the arguments.
1046 Builtins::JavaScript native; 1046 Builtins::JavaScript native;
1047 if (cond == eq) { 1047 if (cond == eq) {
1048 native = strict() ? Builtins::STRICT_EQUALS : Builtins::EQUALS; 1048 native = strict() ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
1049 } else { 1049 } else {
(...skipping 529 matching lines...) Expand 10 before | Expand all | Expand 10 after
1579 1579
1580 // Code falls through if the result is not returned as either a smi or heap 1580 // Code falls through if the result is not returned as either a smi or heap
1581 // number. 1581 // number.
1582 __ Bind(&right_arg_changed); 1582 __ Bind(&right_arg_changed);
1583 GenerateTypeTransition(masm); 1583 GenerateTypeTransition(masm);
1584 1584
1585 __ Bind(&call_runtime); 1585 __ Bind(&call_runtime);
1586 #ifdef DEBUG 1586 #ifdef DEBUG
1587 if (masm->emit_debug_code()) { 1587 if (masm->emit_debug_code()) {
1588 __ Cmp(saved_left, x1); 1588 __ Cmp(saved_left, x1);
1589 __ Assert(eq, "lhs has been clobbered."); 1589 __ Assert(eq, kLhsHasBeenClobbered);
1590 __ Cmp(saved_right, x0); 1590 __ Cmp(saved_right, x0);
1591 __ Assert(eq, "lhs has been clobbered."); 1591 __ Assert(eq, kRhsHasBeenClobbered);
1592 } 1592 }
1593 #endif 1593 #endif
1594 { 1594 {
1595 FrameScope scope(masm, StackFrame::INTERNAL); 1595 FrameScope scope(masm, StackFrame::INTERNAL);
1596 GenerateRegisterArgsPush(masm); 1596 GenerateRegisterArgsPush(masm);
1597 GenerateCallRuntime(masm); 1597 GenerateCallRuntime(masm);
1598 } 1598 }
1599 __ Ret(); 1599 __ Ret();
1600 } 1600 }
1601 1601
(...skipping 167 matching lines...) Expand 10 before | Expand all | Expand 10 after
1769 ASSERT(!AreAliased(result, heap_number_map, scratch1, scratch2)); 1769 ASSERT(!AreAliased(result, heap_number_map, scratch1, scratch2));
1770 1770
1771 if ((mode == OVERWRITE_LEFT) || (mode == OVERWRITE_RIGHT)) { 1771 if ((mode == OVERWRITE_LEFT) || (mode == OVERWRITE_RIGHT)) {
1772 Label skip_allocation, allocated; 1772 Label skip_allocation, allocated;
1773 Register overwritable_operand = (mode == OVERWRITE_LEFT) ? x1 : x0; 1773 Register overwritable_operand = (mode == OVERWRITE_LEFT) ? x1 : x0;
1774 if (masm->emit_debug_code()) { 1774 if (masm->emit_debug_code()) {
1775 // Check that the overwritable operand is a Smi or a HeapNumber. 1775 // Check that the overwritable operand is a Smi or a HeapNumber.
1776 Label ok; 1776 Label ok;
1777 __ JumpIfSmi(overwritable_operand, &ok); 1777 __ JumpIfSmi(overwritable_operand, &ok);
1778 __ JumpIfHeapNumber(overwritable_operand, &ok); 1778 __ JumpIfHeapNumber(overwritable_operand, &ok);
1779 __ Abort("The overwritable operand should be a HeapNumber"); 1779 __ Abort(kExpectedSmiOrHeapNumber);
1780 __ Bind(&ok); 1780 __ Bind(&ok);
1781 } 1781 }
1782 // If the overwritable operand is already a HeapNumber, we can skip 1782 // If the overwritable operand is already a HeapNumber, we can skip
1783 // allocation of a heap number. 1783 // allocation of a heap number.
1784 __ JumpIfNotSmi(overwritable_operand, &skip_allocation); 1784 __ JumpIfNotSmi(overwritable_operand, &skip_allocation);
1785 // Allocate a heap number for the result. 1785 // Allocate a heap number for the result.
1786 __ AllocateHeapNumber(result, gc_required, scratch1, scratch2, 1786 __ AllocateHeapNumber(result, gc_required, scratch1, scratch2,
1787 heap_number_map); 1787 heap_number_map);
1788 __ B(&allocated); 1788 __ B(&allocated);
1789 __ Bind(&skip_allocation); 1789 __ Bind(&skip_allocation);
(...skipping 356 matching lines...) Expand 10 before | Expand all | Expand 10 after
2146 // A64 simulator does not currently simulate FPCR (where the rounding 2146 // A64 simulator does not currently simulate FPCR (where the rounding
2147 // mode is set), so test the operation with some debug code. 2147 // mode is set), so test the operation with some debug code.
2148 if (masm->emit_debug_code()) { 2148 if (masm->emit_debug_code()) {
2149 Register temp = masm->Tmp1(); 2149 Register temp = masm->Tmp1();
2150 // d5 zero_double The value +0.0 as a double. 2150 // d5 zero_double The value +0.0 as a double.
2151 __ Fneg(scratch0_double, zero_double); 2151 __ Fneg(scratch0_double, zero_double);
2152 // Verify that we correctly generated +0.0 and -0.0. 2152 // Verify that we correctly generated +0.0 and -0.0.
2153 // bits(+0.0) = 0x0000000000000000 2153 // bits(+0.0) = 0x0000000000000000
2154 // bits(-0.0) = 0x8000000000000000 2154 // bits(-0.0) = 0x8000000000000000
2155 __ Fmov(temp, zero_double); 2155 __ Fmov(temp, zero_double);
2156 __ CheckRegisterIsClear(temp, "Could not generate +0.0."); 2156 __ CheckRegisterIsClear(temp, kCouldNotGenerateZero);
2157 __ Fmov(temp, scratch0_double); 2157 __ Fmov(temp, scratch0_double);
2158 __ Eor(temp, temp, kDSignMask); 2158 __ Eor(temp, temp, kDSignMask);
2159 __ CheckRegisterIsClear(temp, "Could not generate -0.0."); 2159 __ CheckRegisterIsClear(temp, kCouldNotGenerateNegativeZero);
2160 // Check that -0.0 + 0.0 == +0.0. 2160 // Check that -0.0 + 0.0 == +0.0.
2161 __ Fadd(scratch0_double, scratch0_double, zero_double); 2161 __ Fadd(scratch0_double, scratch0_double, zero_double);
2162 __ Fmov(temp, scratch0_double); 2162 __ Fmov(temp, scratch0_double);
2163 __ CheckRegisterIsClear(temp, "-0.0 + 0.0 did not produce +0.0."); 2163 __ CheckRegisterIsClear(temp, kExpectedPositiveZero);
2164 } 2164 }
2165 2165
2166 // If base is -INFINITY, make it +INFINITY. 2166 // If base is -INFINITY, make it +INFINITY.
2167 // * Calculate base - base: All infinities will become NaNs since both 2167 // * Calculate base - base: All infinities will become NaNs since both
2168 // -INFINITY+INFINITY and +INFINITY-INFINITY are NaN in A64. 2168 // -INFINITY+INFINITY and +INFINITY-INFINITY are NaN in A64.
2169 // * If the result is NaN, calculate abs(base). 2169 // * If the result is NaN, calculate abs(base).
2170 __ Fsub(scratch0_double, base_double, base_double); 2170 __ Fsub(scratch0_double, base_double, base_double);
2171 __ Fcmp(scratch0_double, 0.0); 2171 __ Fcmp(scratch0_double, 0.0);
2172 __ Fabs(scratch1_double, base_double); 2172 __ Fabs(scratch1_double, base_double);
2173 __ Fcsel(base_double, scratch1_double, base_double, vs); 2173 __ Fcsel(base_double, scratch1_double, base_double, vs);
(...skipping 251 matching lines...) Expand 10 before | Expand all | Expand 10 after
2425 Label return_location; 2425 Label return_location;
2426 __ Adr(x12, &return_location); 2426 __ Adr(x12, &return_location);
2427 __ Poke(x12, 0); 2427 __ Poke(x12, 0);
2428 if (__ emit_debug_code()) { 2428 if (__ emit_debug_code()) {
2429 // Verify that the slot below fp[kSPOffset]-8 points to the return location 2429 // Verify that the slot below fp[kSPOffset]-8 points to the return location
2430 // (currently in x12). 2430 // (currently in x12).
2431 Register temp = masm->Tmp1(); 2431 Register temp = masm->Tmp1();
2432 __ Ldr(temp, MemOperand(fp, ExitFrameConstants::kSPOffset)); 2432 __ Ldr(temp, MemOperand(fp, ExitFrameConstants::kSPOffset));
2433 __ Ldr(temp, MemOperand(temp, -static_cast<int64_t>(kXRegSizeInBytes))); 2433 __ Ldr(temp, MemOperand(temp, -static_cast<int64_t>(kXRegSizeInBytes)));
2434 __ Cmp(temp, x12); 2434 __ Cmp(temp, x12);
2435 __ Check(eq, "fp[kSPOffset]-8 does not hold the return address."); 2435 __ Check(eq, kReturnAddressNotFoundInFrame);
2436 } 2436 }
2437 2437
2438 // Call the builtin. 2438 // Call the builtin.
2439 __ Blr(target); 2439 __ Blr(target);
2440 __ Bind(&return_location); 2440 __ Bind(&return_location);
2441 const Register& result = x0; 2441 const Register& result = x0;
2442 2442
2443 if (always_allocate) { 2443 if (always_allocate) {
2444 __ Mov(x10, Operand(scope_depth)); 2444 __ Mov(x10, Operand(scope_depth));
2445 __ Ldr(x11, MemOperand(x10)); 2445 __ Ldr(x11, MemOperand(x10));
(...skipping 1304 matching lines...) Expand 10 before | Expand all | Expand 10 after
3750 ASSERT(jssp.Is(__ StackPointer())); 3750 ASSERT(jssp.Is(__ StackPointer()));
3751 __ Peek(jsregexp_object, kJSRegExpOffset); 3751 __ Peek(jsregexp_object, kJSRegExpOffset);
3752 __ JumpIfSmi(jsregexp_object, &runtime); 3752 __ JumpIfSmi(jsregexp_object, &runtime);
3753 __ JumpIfNotObjectType(jsregexp_object, x10, x10, JS_REGEXP_TYPE, &runtime); 3753 __ JumpIfNotObjectType(jsregexp_object, x10, x10, JS_REGEXP_TYPE, &runtime);
3754 3754
3755 // Check that the RegExp has been compiled (data contains a fixed array). 3755 // Check that the RegExp has been compiled (data contains a fixed array).
3756 __ Ldr(regexp_data, FieldMemOperand(jsregexp_object, JSRegExp::kDataOffset)); 3756 __ Ldr(regexp_data, FieldMemOperand(jsregexp_object, JSRegExp::kDataOffset));
3757 if (FLAG_debug_code) { 3757 if (FLAG_debug_code) {
3758 STATIC_ASSERT(kSmiTag == 0); 3758 STATIC_ASSERT(kSmiTag == 0);
3759 __ Tst(regexp_data, kSmiTagMask); 3759 __ Tst(regexp_data, kSmiTagMask);
3760 __ Check(ne, "Unexpected type for RegExp data, FixedArray expected"); 3760 __ Check(ne, kUnexpectedTypeForRegExpDataFixedArrayExpected);
3761 __ CompareObjectType(regexp_data, x10, x10, FIXED_ARRAY_TYPE); 3761 __ CompareObjectType(regexp_data, x10, x10, FIXED_ARRAY_TYPE);
3762 __ Check(eq, "Unexpected type for RegExp data, FixedArray expected"); 3762 __ Check(eq, kUnexpectedTypeForRegExpDataFixedArrayExpected);
3763 } 3763 }
3764 3764
3765 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP. 3765 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
3766 __ Ldr(x10, FieldMemOperand(regexp_data, JSRegExp::kDataTagOffset)); 3766 __ Ldr(x10, FieldMemOperand(regexp_data, JSRegExp::kDataTagOffset));
3767 __ Cmp(x10, Operand(Smi::FromInt(JSRegExp::IRREGEXP))); 3767 __ Cmp(x10, Operand(Smi::FromInt(JSRegExp::IRREGEXP)));
3768 __ B(ne, &runtime); 3768 __ B(ne, &runtime);
3769 3769
3770 // Check that the number of captures fit in the static offsets vector buffer. 3770 // Check that the number of captures fit in the static offsets vector buffer.
3771 // We have always at least one capture for the whole match, plus additional 3771 // We have always at least one capture for the whole match, plus additional
3772 // ones due to capturing parentheses. A capture takes 2 registers. 3772 // ones due to capturing parentheses. A capture takes 2 registers.
(...skipping 379 matching lines...) Expand 10 before | Expand all | Expand 10 after
4152 __ B(ne, &not_long_external); // Go to (8). 4152 __ B(ne, &not_long_external); // Go to (8).
4153 4153
4154 // (7) External string. Make it, offset-wise, look like a sequential string. 4154 // (7) External string. Make it, offset-wise, look like a sequential string.
4155 __ Bind(&external_string); 4155 __ Bind(&external_string);
4156 if (masm->emit_debug_code()) { 4156 if (masm->emit_debug_code()) {
4157 // Assert that we do not have a cons or slice (indirect strings) here. 4157 // Assert that we do not have a cons or slice (indirect strings) here.
4158 // Sequential strings have already been ruled out. 4158 // Sequential strings have already been ruled out.
4159 __ Ldr(x10, FieldMemOperand(subject, HeapObject::kMapOffset)); 4159 __ Ldr(x10, FieldMemOperand(subject, HeapObject::kMapOffset));
4160 __ Ldrb(x10, FieldMemOperand(x10, Map::kInstanceTypeOffset)); 4160 __ Ldrb(x10, FieldMemOperand(x10, Map::kInstanceTypeOffset));
4161 __ Tst(x10, kIsIndirectStringMask); 4161 __ Tst(x10, kIsIndirectStringMask);
4162 __ Check(eq, "external string expected, but cons or sliced string found"); 4162 __ Check(eq, kExternalStringExpectedButNotFound);
4163 __ And(x10, x10, kStringRepresentationMask); 4163 __ And(x10, x10, kStringRepresentationMask);
4164 __ Cmp(x10, 0); 4164 __ Cmp(x10, 0);
4165 __ Check(ne, "external string expected, but sequential string found"); 4165 __ Check(ne, kExternalStringExpectedButNotFound);
4166 } 4166 }
4167 __ Ldr(subject, 4167 __ Ldr(subject,
4168 FieldMemOperand(subject, ExternalString::kResourceDataOffset)); 4168 FieldMemOperand(subject, ExternalString::kResourceDataOffset));
4169 // Move the pointer so that offset-wise, it looks like a sequential string. 4169 // Move the pointer so that offset-wise, it looks like a sequential string.
4170 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize); 4170 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
4171 __ Sub(subject, subject, SeqTwoByteString::kHeaderSize - kHeapObjectTag); 4171 __ Sub(subject, subject, SeqTwoByteString::kHeaderSize - kHeapObjectTag);
4172 __ B(&seq_string); // Go to (5). 4172 __ B(&seq_string); // Go to (5).
4173 4173
4174 // (8) If this is a short external string or not a string, bail out to 4174 // (8) If this is a short external string or not a string, bail out to
4175 // runtime. 4175 // runtime.
(...skipping 352 matching lines...) Expand 10 before | Expand all | Expand 10 after
4528 result_, 4528 result_,
4529 &call_runtime_); 4529 &call_runtime_);
4530 __ SmiTag(result_); 4530 __ SmiTag(result_);
4531 __ Bind(&exit_); 4531 __ Bind(&exit_);
4532 } 4532 }
4533 4533
4534 4534
4535 void StringCharCodeAtGenerator::GenerateSlow( 4535 void StringCharCodeAtGenerator::GenerateSlow(
4536 MacroAssembler* masm, 4536 MacroAssembler* masm,
4537 const RuntimeCallHelper& call_helper) { 4537 const RuntimeCallHelper& call_helper) {
4538 __ Abort("Unexpected fallthrough to CharCodeAt slow case"); 4538 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
4539 4539
4540 __ Bind(&index_not_smi_); 4540 __ Bind(&index_not_smi_);
4541 // If index is a heap number, try converting it to an integer. 4541 // If index is a heap number, try converting it to an integer.
4542 __ CheckMap(index_, 4542 __ CheckMap(index_,
4543 result_, 4543 result_,
4544 Heap::kHeapNumberMapRootIndex, 4544 Heap::kHeapNumberMapRootIndex,
4545 index_not_number_, 4545 index_not_number_,
4546 DONT_DO_SMI_CHECK); 4546 DONT_DO_SMI_CHECK);
4547 call_helper.BeforeCall(masm); 4547 call_helper.BeforeCall(masm);
4548 // Save object_ on the stack and pass index_ as argument for runtime call. 4548 // Save object_ on the stack and pass index_ as argument for runtime call.
(...skipping 24 matching lines...) Expand all
4573 // is too complex (e.g., when the string needs to be flattened). 4573 // is too complex (e.g., when the string needs to be flattened).
4574 __ Bind(&call_runtime_); 4574 __ Bind(&call_runtime_);
4575 call_helper.BeforeCall(masm); 4575 call_helper.BeforeCall(masm);
4576 __ SmiTag(index_); 4576 __ SmiTag(index_);
4577 __ Push(object_, index_); 4577 __ Push(object_, index_);
4578 __ CallRuntime(Runtime::kStringCharCodeAt, 2); 4578 __ CallRuntime(Runtime::kStringCharCodeAt, 2);
4579 __ Mov(result_, x0); 4579 __ Mov(result_, x0);
4580 call_helper.AfterCall(masm); 4580 call_helper.AfterCall(masm);
4581 __ B(&exit_); 4581 __ B(&exit_);
4582 4582
4583 __ Abort("Unexpected fallthrough from CharCodeAt slow case"); 4583 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
4584 } 4584 }
4585 4585
4586 4586
4587 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) { 4587 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
4588 __ JumpIfNotSmi(code_, &slow_case_); 4588 __ JumpIfNotSmi(code_, &slow_case_);
4589 __ Cmp(code_, Operand(Smi::FromInt(String::kMaxOneByteCharCode))); 4589 __ Cmp(code_, Operand(Smi::FromInt(String::kMaxOneByteCharCode)));
4590 __ B(hi, &slow_case_); 4590 __ B(hi, &slow_case_);
4591 4591
4592 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex); 4592 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
4593 // At this point code register contains smi tagged ASCII char code. 4593 // At this point code register contains smi tagged ASCII char code.
4594 STATIC_ASSERT(kSmiShift > kPointerSizeLog2); 4594 STATIC_ASSERT(kSmiShift > kPointerSizeLog2);
4595 __ Add(result_, result_, Operand(code_, LSR, kSmiShift - kPointerSizeLog2)); 4595 __ Add(result_, result_, Operand(code_, LSR, kSmiShift - kPointerSizeLog2));
4596 __ Ldr(result_, FieldMemOperand(result_, FixedArray::kHeaderSize)); 4596 __ Ldr(result_, FieldMemOperand(result_, FixedArray::kHeaderSize));
4597 __ JumpIfRoot(result_, Heap::kUndefinedValueRootIndex, &slow_case_); 4597 __ JumpIfRoot(result_, Heap::kUndefinedValueRootIndex, &slow_case_);
4598 __ Bind(&exit_); 4598 __ Bind(&exit_);
4599 } 4599 }
4600 4600
4601 4601
4602 void StringCharFromCodeGenerator::GenerateSlow( 4602 void StringCharFromCodeGenerator::GenerateSlow(
4603 MacroAssembler* masm, 4603 MacroAssembler* masm,
4604 const RuntimeCallHelper& call_helper) { 4604 const RuntimeCallHelper& call_helper) {
4605 __ Abort("Unexpected fallthrough to CharFromCode slow case"); 4605 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
4606 4606
4607 __ Bind(&slow_case_); 4607 __ Bind(&slow_case_);
4608 call_helper.BeforeCall(masm); 4608 call_helper.BeforeCall(masm);
4609 __ Push(code_); 4609 __ Push(code_);
4610 __ CallRuntime(Runtime::kCharFromCode, 1); 4610 __ CallRuntime(Runtime::kCharFromCode, 1);
4611 __ Mov(result_, x0); 4611 __ Mov(result_, x0);
4612 call_helper.AfterCall(masm); 4612 call_helper.AfterCall(masm);
4613 __ B(&exit_); 4613 __ B(&exit_);
4614 4614
4615 __ Abort("Unexpected fallthrough from CharFromCode slow case"); 4615 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
4616 } 4616 }
4617 4617
4618 4618
4619 void ICCompareStub::GenerateSmis(MacroAssembler* masm) { 4619 void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
4620 // Inputs are in x0 (lhs) and x1 (rhs). 4620 // Inputs are in x0 (lhs) and x1 (rhs).
4621 ASSERT(state_ == CompareIC::SMI); 4621 ASSERT(state_ == CompareIC::SMI);
4622 ASM_LOCATION("ICCompareStub[Smis]"); 4622 ASM_LOCATION("ICCompareStub[Smis]");
4623 Label miss; 4623 Label miss;
4624 // Bail out (to 'miss') unless both x0 and x1 are smis. 4624 // Bail out (to 'miss') unless both x0 and x1 are smis.
4625 __ JumpIfEitherNotSmi(x0, x1, &miss); 4625 __ JumpIfEitherNotSmi(x0, x1, &miss);
(...skipping 518 matching lines...) Expand 10 before | Expand all | Expand 10 after
5144 // If entry is undefined no string with this hash can be found. 5144 // If entry is undefined no string with this hash can be found.
5145 Label is_string; 5145 Label is_string;
5146 Register type = scratch; 5146 Register type = scratch;
5147 __ JumpIfNotObjectType(candidate, type, type, ODDBALL_TYPE, &is_string); 5147 __ JumpIfNotObjectType(candidate, type, type, ODDBALL_TYPE, &is_string);
5148 5148
5149 __ Cmp(undefined, candidate); 5149 __ Cmp(undefined, candidate);
5150 __ B(eq, not_found); 5150 __ B(eq, not_found);
5151 // Must be the hole (deleted entry). 5151 // Must be the hole (deleted entry).
5152 if (FLAG_debug_code) { 5152 if (FLAG_debug_code) {
5153 __ CompareRoot(candidate, Heap::kTheHoleValueRootIndex); 5153 __ CompareRoot(candidate, Heap::kTheHoleValueRootIndex);
5154 __ Assert(eq, "oddball in string table is not undefined or the hole"); 5154 __ Assert(eq, kOddballInStringTableIsNotUndefinedOrTheHole);
5155 } 5155 }
5156 __ B(&next_probe[i]); 5156 __ B(&next_probe[i]);
5157 5157
5158 __ Bind(&is_string); 5158 __ Bind(&is_string);
5159 5159
5160 // Check that the candidate is a non-external ASCII string. The instance 5160 // Check that the candidate is a non-external ASCII string. The instance
5161 // type is still in the type register from the CompareObjectType 5161 // type is still in the type register from the CompareObjectType
5162 // operation. 5162 // operation.
5163 __ JumpIfInstanceTypeIsNotSequentialAscii(type, type, &next_probe[i]); 5163 __ JumpIfInstanceTypeIsNotSequentialAscii(type, type, &next_probe[i]);
5164 5164
(...skipping 1611 matching lines...) Expand 10 before | Expand all | Expand 10 after
6776 ElementsKind candidate_kind = GetFastElementsKindFromSequenceIndex(i); 6776 ElementsKind candidate_kind = GetFastElementsKindFromSequenceIndex(i);
6777 // TODO(jbramley): Is this the best way to handle this? Can we make the tail 6777 // TODO(jbramley): Is this the best way to handle this? Can we make the tail
6778 // calls conditional, rather than hopping over each one? 6778 // calls conditional, rather than hopping over each one?
6779 __ CompareAndBranch(kind, candidate_kind, ne, &next); 6779 __ CompareAndBranch(kind, candidate_kind, ne, &next);
6780 T stub(candidate_kind); 6780 T stub(candidate_kind);
6781 __ TailCallStub(&stub); 6781 __ TailCallStub(&stub);
6782 __ Bind(&next); 6782 __ Bind(&next);
6783 } 6783 }
6784 6784
6785 // If we reached this point there is a problem. 6785 // If we reached this point there is a problem.
6786 __ Abort("Unexpected ElementsKind in array constructor"); 6786 __ Abort(kUnexpectedElementsKindInArrayConstructor);
6787 } 6787 }
6788 6788
6789 6789
6790 // TODO(jbramley): If this needs to be a special case, make it a proper template 6790 // TODO(jbramley): If this needs to be a special case, make it a proper template
6791 // specialization, and not a separate function. 6791 // specialization, and not a separate function.
6792 static void CreateArrayDispatchOneArgument(MacroAssembler* masm) { 6792 static void CreateArrayDispatchOneArgument(MacroAssembler* masm) {
6793 // x0 - argc 6793 // x0 - argc
6794 // x1 - constructor? 6794 // x1 - constructor?
6795 // x2 - type info cell 6795 // x2 - type info cell
6796 // x3 - kind 6796 // x3 - kind
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
6839 ElementsKind candidate_kind = GetFastElementsKindFromSequenceIndex(i); 6839 ElementsKind candidate_kind = GetFastElementsKindFromSequenceIndex(i);
6840 // TODO(jbramley): Is this the best way to handle this? Can we make the tail 6840 // TODO(jbramley): Is this the best way to handle this? Can we make the tail
6841 // calls conditional, rather than hopping over each one? 6841 // calls conditional, rather than hopping over each one?
6842 __ CompareAndBranch(kind, candidate_kind, ne, &next); 6842 __ CompareAndBranch(kind, candidate_kind, ne, &next);
6843 ArraySingleArgumentConstructorStub stub(candidate_kind); 6843 ArraySingleArgumentConstructorStub stub(candidate_kind);
6844 __ TailCallStub(&stub); 6844 __ TailCallStub(&stub);
6845 __ Bind(&next); 6845 __ Bind(&next);
6846 } 6846 }
6847 6847
6848 // If we reached this point there is a problem. 6848 // If we reached this point there is a problem.
6849 __ Abort("Unexpected ElementsKind in array constructor"); 6849 __ Abort(kUnexpectedElementsKindInArrayConstructor);
6850 } 6850 }
6851 6851
6852 6852
6853 template<class T> 6853 template<class T>
6854 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { 6854 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
6855 int to_index = GetSequenceIndexFromFastElementsKind( 6855 int to_index = GetSequenceIndexFromFastElementsKind(
6856 TERMINAL_FAST_ELEMENTS_KIND); 6856 TERMINAL_FAST_ELEMENTS_KIND);
6857 for (int i = 0; i <= to_index; ++i) { 6857 for (int i = 0; i <= to_index; ++i) {
6858 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); 6858 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
6859 T stub(kind); 6859 T stub(kind);
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
6908 // builtin Array functions which always have maps. 6908 // builtin Array functions which always have maps.
6909 6909
6910 Label unexpected_map, map_ok; 6910 Label unexpected_map, map_ok;
6911 // Initial map for the builtin Array function should be a map. 6911 // Initial map for the builtin Array function should be a map.
6912 __ Ldr(x10, FieldMemOperand(constructor, 6912 __ Ldr(x10, FieldMemOperand(constructor,
6913 JSFunction::kPrototypeOrInitialMapOffset)); 6913 JSFunction::kPrototypeOrInitialMapOffset));
6914 // Will both indicate a NULL and a Smi. 6914 // Will both indicate a NULL and a Smi.
6915 __ JumpIfSmi(x10, &unexpected_map); 6915 __ JumpIfSmi(x10, &unexpected_map);
6916 __ JumpIfObjectType(x10, x10, x11, MAP_TYPE, &map_ok); 6916 __ JumpIfObjectType(x10, x10, x11, MAP_TYPE, &map_ok);
6917 __ Bind(&unexpected_map); 6917 __ Bind(&unexpected_map);
6918 __ Abort("Unexpected initial map for Array function"); 6918 __ Abort(kUnexpectedInitialMapForArrayFunction);
6919 __ Bind(&map_ok); 6919 __ Bind(&map_ok);
6920 6920
6921 // In type_info_cell, we expect either undefined or a valid Cell. 6921 // In type_info_cell, we expect either undefined or a valid Cell.
6922 Label okay_here; 6922 Label okay_here;
6923 Handle<Map> cell_map = masm->isolate()->factory()->cell_map(); 6923 Handle<Map> cell_map = masm->isolate()->factory()->cell_map();
6924 __ JumpIfRoot(type_info_cell, Heap::kUndefinedValueRootIndex, &okay_here); 6924 __ JumpIfRoot(type_info_cell, Heap::kUndefinedValueRootIndex, &okay_here);
6925 __ Ldr(x10, FieldMemOperand(type_info_cell, Cell::kMapOffset)); 6925 __ Ldr(x10, FieldMemOperand(type_info_cell, Cell::kMapOffset));
6926 __ Cmp(x10, Operand(cell_map)); 6926 __ Cmp(x10, Operand(cell_map));
6927 __ Assert(eq, "Expected property cell in type_info_cell"); 6927 __ Assert(eq, kExpectedPropertyCellInTypeInfoCell);
6928 __ Bind(&okay_here); 6928 __ Bind(&okay_here);
6929 } 6929 }
6930 6930
6931 Register kind = x3; 6931 Register kind = x3;
6932 Label no_info, switch_ready; 6932 Label no_info, switch_ready;
6933 // Get the elements kind and case on that. 6933 // Get the elements kind and case on that.
6934 __ JumpIfRoot(type_info_cell, Heap::kUndefinedValueRootIndex, &no_info); 6934 __ JumpIfRoot(type_info_cell, Heap::kUndefinedValueRootIndex, &no_info);
6935 __ Ldr(kind, FieldMemOperand(type_info_cell, PropertyCell::kValueOffset)); 6935 __ Ldr(kind, FieldMemOperand(type_info_cell, PropertyCell::kValueOffset));
6936 6936
6937 // The type cell may have undefined in its value. 6937 // The type cell may have undefined in its value.
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after
7033 // builtin Array functions which always have maps. 7033 // builtin Array functions which always have maps.
7034 7034
7035 Label unexpected_map, map_ok; 7035 Label unexpected_map, map_ok;
7036 // Initial map for the builtin Array function should be a map. 7036 // Initial map for the builtin Array function should be a map.
7037 __ Ldr(x10, FieldMemOperand(constructor, 7037 __ Ldr(x10, FieldMemOperand(constructor,
7038 JSFunction::kPrototypeOrInitialMapOffset)); 7038 JSFunction::kPrototypeOrInitialMapOffset));
7039 // Will both indicate a NULL and a Smi. 7039 // Will both indicate a NULL and a Smi.
7040 __ JumpIfSmi(x10, &unexpected_map); 7040 __ JumpIfSmi(x10, &unexpected_map);
7041 __ JumpIfObjectType(x10, x10, x11, MAP_TYPE, &map_ok); 7041 __ JumpIfObjectType(x10, x10, x11, MAP_TYPE, &map_ok);
7042 __ Bind(&unexpected_map); 7042 __ Bind(&unexpected_map);
7043 __ Abort("Unexpected initial map for Array function"); 7043 __ Abort(kUnexpectedInitialMapForArrayFunction);
7044 __ Bind(&map_ok); 7044 __ Bind(&map_ok);
7045 } 7045 }
7046 7046
7047 Register kind = w3; 7047 Register kind = w3;
7048 // Figure out the right elements kind 7048 // Figure out the right elements kind
7049 __ Ldr(x10, FieldMemOperand(constructor, 7049 __ Ldr(x10, FieldMemOperand(constructor,
7050 JSFunction::kPrototypeOrInitialMapOffset)); 7050 JSFunction::kPrototypeOrInitialMapOffset));
7051 7051
7052 // TODO(jbramley): Add a helper function to read elements kind from an 7052 // TODO(jbramley): Add a helper function to read elements kind from an
7053 // existing map. 7053 // existing map.
7054 // Load the map's "bit field 2" into result. 7054 // Load the map's "bit field 2" into result.
7055 __ Ldr(kind, FieldMemOperand(x10, Map::kBitField2Offset)); 7055 __ Ldr(kind, FieldMemOperand(x10, Map::kBitField2Offset));
7056 // Retrieve elements_kind from bit field 2. 7056 // Retrieve elements_kind from bit field 2.
7057 __ Ubfx(kind, kind, Map::kElementsKindShift, Map::kElementsKindBitCount); 7057 __ Ubfx(kind, kind, Map::kElementsKindShift, Map::kElementsKindBitCount);
7058 7058
7059 if (FLAG_debug_code) { 7059 if (FLAG_debug_code) {
7060 Label done; 7060 Label done;
7061 __ Cmp(x3, FAST_ELEMENTS); 7061 __ Cmp(x3, FAST_ELEMENTS);
7062 __ Ccmp(x3, FAST_HOLEY_ELEMENTS, ZFlag, ne); 7062 __ Ccmp(x3, FAST_HOLEY_ELEMENTS, ZFlag, ne);
7063 __ Assert(eq, 7063 __ Assert(eq, kInvalidElementsKindForInternalArrayOrInternalPackedArray);
7064 "Invalid ElementsKind for InternalArray or InternalPackedArray");
7065 } 7064 }
7066 7065
7067 Label fast_elements_case; 7066 Label fast_elements_case;
7068 __ CompareAndBranch(kind, FAST_ELEMENTS, eq, &fast_elements_case); 7067 __ CompareAndBranch(kind, FAST_ELEMENTS, eq, &fast_elements_case);
7069 GenerateCase(masm, FAST_HOLEY_ELEMENTS); 7068 GenerateCase(masm, FAST_HOLEY_ELEMENTS);
7070 7069
7071 __ Bind(&fast_elements_case); 7070 __ Bind(&fast_elements_case);
7072 GenerateCase(masm, FAST_ELEMENTS); 7071 GenerateCase(masm, FAST_ELEMENTS);
7073 } 7072 }
7074 7073
7075 7074
7076 #undef __ 7075 #undef __
7077 7076
7078 } } // namespace v8::internal 7077 } } // namespace v8::internal
7079 7078
7080 #endif // V8_TARGET_ARCH_A64 7079 #endif // V8_TARGET_ARCH_A64
OLDNEW
« no previous file with comments | « src/a64/builtins-a64.cc ('k') | src/a64/codegen-a64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698