Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1026)

Side by Side Diff: src/a64/macro-assembler-a64.cc

Issue 149413010: A64: Synchronize with r16024. (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/a64
Patch Set: Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/a64/macro-assembler-a64.h ('k') | src/a64/regexp-macro-assembler-a64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 747 matching lines...) Expand 10 before | Expand all | Expand 10 after
758 ASSERT((count * size) % 16 == 0); 758 ASSERT((count * size) % 16 == 0);
759 } 759 }
760 } 760 }
761 761
762 762
763 void MacroAssembler::Poke(const CPURegister& src, const Operand& offset) { 763 void MacroAssembler::Poke(const CPURegister& src, const Operand& offset) {
764 if (offset.IsImmediate()) { 764 if (offset.IsImmediate()) {
765 ASSERT(offset.immediate() >= 0); 765 ASSERT(offset.immediate() >= 0);
766 } else if (emit_debug_code()) { 766 } else if (emit_debug_code()) {
767 Cmp(xzr, offset); 767 Cmp(xzr, offset);
768 Check(le, "Poke offset is negative."); 768 Check(le, kStackAccessBelowStackPointer);
769 } 769 }
770 770
771 Str(src, MemOperand(StackPointer(), offset)); 771 Str(src, MemOperand(StackPointer(), offset));
772 } 772 }
773 773
774 774
775 void MacroAssembler::Peek(const CPURegister& dst, const Operand& offset) { 775 void MacroAssembler::Peek(const CPURegister& dst, const Operand& offset) {
776 if (offset.IsImmediate()) { 776 if (offset.IsImmediate()) {
777 ASSERT(offset.immediate() >= 0); 777 ASSERT(offset.immediate() >= 0);
778 } else if (emit_debug_code()) { 778 } else if (emit_debug_code()) {
779 Cmp(xzr, offset); 779 Cmp(xzr, offset);
780 Check(le, "Peek offset is negative."); 780 Check(le, kStackAccessBelowStackPointer);
781 } 781 }
782 782
783 Ldr(dst, MemOperand(StackPointer(), offset)); 783 Ldr(dst, MemOperand(StackPointer(), offset));
784 } 784 }
785 785
786 786
787 void MacroAssembler::PushCalleeSavedRegisters() { 787 void MacroAssembler::PushCalleeSavedRegisters() {
788 // Ensure that the macro-assembler doesn't use any scratch registers. 788 // Ensure that the macro-assembler doesn't use any scratch registers.
789 InstructionAccurateScope scope(this); 789 InstructionAccurateScope scope(this);
790 790
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
834 834
835 void MacroAssembler::AssertStackConsistency() { 835 void MacroAssembler::AssertStackConsistency() {
836 if (emit_debug_code() && !csp.Is(StackPointer())) { 836 if (emit_debug_code() && !csp.Is(StackPointer())) {
837 if (csp.Is(StackPointer())) { 837 if (csp.Is(StackPointer())) {
838 // TODO(jbramley): Check for csp alignment if it is the stack pointer. 838 // TODO(jbramley): Check for csp alignment if it is the stack pointer.
839 } else { 839 } else {
840 // TODO(jbramley): Currently we cannot use this assertion in Push because 840 // TODO(jbramley): Currently we cannot use this assertion in Push because
841 // some calling code assumes that the flags are preserved. For an example, 841 // some calling code assumes that the flags are preserved. For an example,
842 // look at Builtins::Generate_ArgumentsAdaptorTrampoline. 842 // look at Builtins::Generate_ArgumentsAdaptorTrampoline.
843 Cmp(csp, StackPointer()); 843 Cmp(csp, StackPointer());
844 Check(ls, "The current stack pointer is below csp."); 844 Check(ls, kTheCurrentStackPointerIsBelowCsp);
845 } 845 }
846 } 846 }
847 } 847 }
848 848
849 849
850 void MacroAssembler::LoadRoot(Register destination, 850 void MacroAssembler::LoadRoot(Register destination,
851 Heap::RootListIndex index) { 851 Heap::RootListIndex index) {
852 // TODO(jbramley): Most root values are constants, and can be synthesized 852 // TODO(jbramley): Most root values are constants, and can be synthesized
853 // without a load. Refer to the ARM back end for details. 853 // without a load. Refer to the ARM back end for details.
854 Ldr(destination, MemOperand(root, index << kPointerSizeLog2)); 854 Ldr(destination, MemOperand(root, index << kPointerSizeLog2));
(...skipping 20 matching lines...) Expand all
875 if (isolate()->heap()->InNewSpace(*object)) { 875 if (isolate()->heap()->InNewSpace(*object)) {
876 Handle<Cell> cell = isolate()->factory()->NewCell(object); 876 Handle<Cell> cell = isolate()->factory()->NewCell(object);
877 Mov(result, Operand(cell)); 877 Mov(result, Operand(cell));
878 Ldr(result, FieldMemOperand(result, Cell::kValueOffset)); 878 Ldr(result, FieldMemOperand(result, Cell::kValueOffset));
879 } else { 879 } else {
880 Mov(result, Operand(object)); 880 Mov(result, Operand(object));
881 } 881 }
882 } 882 }
883 883
884 884
885 void MacroAssembler::CheckForInvalidValuesInCalleeSavedRegs(RegList list) {
886 if (emit_debug_code()) {
887 // Only check for callee-saved registers.
888 // TODO(jbramley): Why? We still don't want caller-saved registers to be
889 // pushed with invalid values. Perhaps we need a
890 // CheckForInvalidValuesInRegs for other cases.
891 Label invalid, ok;
892 list &= kJSCalleeSavedRegList;
893 for (unsigned i = kFirstCalleeSavedRegisterIndex; list != 0; i++) {
894 if (list & (1 << i)) {
895 // Clear the current register from the list.
896 list &= ~(1 << i);
897 Register current = Register(i, kXRegSize);
898 Label smi;
899 JumpIfSmi(current, &smi);
900 // TODO(all): Better check for invalid values in callee-saved registers.
901 // Check that the register is not in [0, 4 KB].
902 // This catches odd (untagged) integers.
903 // We should actually check that the pointer is valid.
904 Cmp(current, 4 * KB);
905 B(hs, &invalid);
906 Bind(&smi);
907 }
908 }
909 B(&ok);
910 Bind(&invalid);
911 Abort("Invalid value in a callee saved register.");
912 Bind(&ok);
913 }
914 }
915
916
917 void MacroAssembler::LoadInstanceDescriptors(Register map, 885 void MacroAssembler::LoadInstanceDescriptors(Register map,
918 Register descriptors) { 886 Register descriptors) {
919 Ldr(descriptors, FieldMemOperand(map, Map::kDescriptorsOffset)); 887 Ldr(descriptors, FieldMemOperand(map, Map::kDescriptorsOffset));
920 } 888 }
921 889
922 890
923 void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) { 891 void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
924 Ldr(dst, FieldMemOperand(map, Map::kBitField3Offset)); 892 Ldr(dst, FieldMemOperand(map, Map::kBitField3Offset));
925 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst); 893 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
926 } 894 }
(...skipping 235 matching lines...) Expand 10 before | Expand all | Expand 10 after
1162 __ Eor(scratch, smi, Operand(smi, ASR, kXRegSize - 1)); 1130 __ Eor(scratch, smi, Operand(smi, ASR, kXRegSize - 1));
1163 // Add 1 or do nothing depending on the sign of the argument. 1131 // Add 1 or do nothing depending on the sign of the argument.
1164 __ Adds(smi, scratch, Operand(smi, LSR, kXRegSize - 1)); 1132 __ Adds(smi, scratch, Operand(smi, LSR, kXRegSize - 1));
1165 1133
1166 // If the result is still negative, go to the slow case. 1134 // If the result is still negative, go to the slow case.
1167 // This only happens for the most negative smi. 1135 // This only happens for the most negative smi.
1168 __ B(mi, slow); 1136 __ B(mi, slow);
1169 } 1137 }
1170 1138
1171 1139
1172 void MacroAssembler::AssertSmi(Register object, char const* fail_message) { 1140 void MacroAssembler::AssertSmi(Register object, BailoutReason reason) {
1173 if (emit_debug_code()) { 1141 if (emit_debug_code()) {
1174 STATIC_ASSERT(kSmiTag == 0); 1142 STATIC_ASSERT(kSmiTag == 0);
1175 Tst(object, kSmiTagMask); 1143 Tst(object, kSmiTagMask);
1176 Check(eq, fail_message); 1144 Check(eq, reason);
1177 } 1145 }
1178 } 1146 }
1179 1147
1180 1148
1181 void MacroAssembler::AssertNotSmi(Register object, char const* fail_message) { 1149 void MacroAssembler::AssertNotSmi(Register object, BailoutReason reason) {
1182 if (emit_debug_code()) { 1150 if (emit_debug_code()) {
1183 STATIC_ASSERT(kSmiTag == 0); 1151 STATIC_ASSERT(kSmiTag == 0);
1184 Tst(object, kSmiTagMask); 1152 Tst(object, kSmiTagMask);
1185 Check(ne, fail_message); 1153 Check(ne, reason);
1186 } 1154 }
1187 } 1155 }
1188 1156
1189 1157
1190 void MacroAssembler::AssertName(Register object) { 1158 void MacroAssembler::AssertName(Register object) {
1191 if (emit_debug_code()) { 1159 if (emit_debug_code()) {
1192 STATIC_ASSERT(kSmiTag == 0); 1160 STATIC_ASSERT(kSmiTag == 0);
1193 // TODO(jbramley): Add AbortIfSmi and related functions. 1161 // TODO(jbramley): Add AbortIfSmi and related functions.
1194 Label not_smi; 1162 Label not_smi;
1195 JumpIfNotSmi(object, &not_smi); 1163 JumpIfNotSmi(object, &not_smi);
1196 Abort("Operand is a smi and not a name"); 1164 Abort(kOperandIsASmiAndNotAName);
1197 Bind(&not_smi); 1165 Bind(&not_smi);
1198 1166
1199 Ldr(Tmp1(), FieldMemOperand(object, HeapObject::kMapOffset)); 1167 Ldr(Tmp1(), FieldMemOperand(object, HeapObject::kMapOffset));
1200 CompareInstanceType(Tmp1(), Tmp1(), LAST_NAME_TYPE); 1168 CompareInstanceType(Tmp1(), Tmp1(), LAST_NAME_TYPE);
1201 Check(ls, "Operand is not a name"); 1169 Check(ls, kOperandIsNotAName);
1202 } 1170 }
1203 } 1171 }
1204 1172
1205 1173
1206 void MacroAssembler::AssertString(Register object) { 1174 void MacroAssembler::AssertString(Register object) {
1207 if (emit_debug_code()) { 1175 if (emit_debug_code()) {
1208 Register temp = Tmp1(); 1176 Register temp = Tmp1();
1209 STATIC_ASSERT(kSmiTag == 0); 1177 STATIC_ASSERT(kSmiTag == 0);
1210 Tst(object, kSmiTagMask); 1178 Tst(object, kSmiTagMask);
1211 Check(ne, "Operand is a smi and not a string"); 1179 Check(ne, kOperandIsASmiAndNotAString);
1212 Ldr(temp, FieldMemOperand(object, HeapObject::kMapOffset)); 1180 Ldr(temp, FieldMemOperand(object, HeapObject::kMapOffset));
1213 CompareInstanceType(temp, temp, FIRST_NONSTRING_TYPE); 1181 CompareInstanceType(temp, temp, FIRST_NONSTRING_TYPE);
1214 Check(lo, "Operand is not a string"); 1182 Check(lo, kOperandIsNotAString);
1215 } 1183 }
1216 } 1184 }
1217 1185
1218
1219 void MacroAssembler::AssertRootValue(Register src,
1220 Heap::RootListIndex root_value_index,
1221 const char* message) {
1222 if (emit_debug_code()) {
1223 CompareRoot(src, root_value_index);
1224 Check(eq, message);
1225 }
1226 }
1227
1228 1186
1229 void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) { 1187 void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
1230 ASSERT(AllowThisStubCall(stub)); // Stub calls are not allowed in some stubs. 1188 ASSERT(AllowThisStubCall(stub)); // Stub calls are not allowed in some stubs.
1231 Call(stub->GetCode(isolate()), RelocInfo::CODE_TARGET, ast_id); 1189 Call(stub->GetCode(isolate()), RelocInfo::CODE_TARGET, ast_id);
1232 } 1190 }
1233 1191
1234 1192
1235 void MacroAssembler::TailCallStub(CodeStub* stub) { 1193 void MacroAssembler::TailCallStub(CodeStub* stub) {
1236 ASSERT(allow_stub_calls_ || 1194 ASSERT(allow_stub_calls_ ||
1237 stub->CompilingCallsToThisStubIsGCSafe(isolate())); 1195 stub->CompilingCallsToThisStubIsGCSafe(isolate()));
(...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after
1386 } 1344 }
1387 // load value from ReturnValue 1345 // load value from ReturnValue
1388 Ldr(x0, MemOperand(fp, return_value_offset_from_fp * kPointerSize)); 1346 Ldr(x0, MemOperand(fp, return_value_offset_from_fp * kPointerSize));
1389 Bind(&return_value_loaded); 1347 Bind(&return_value_loaded);
1390 // No more valid handles (the result handle was the last one). Restore 1348 // No more valid handles (the result handle was the last one). Restore
1391 // previous handle scope. 1349 // previous handle scope.
1392 Str(next_address_reg, MemOperand(handle_scope_base, kNextOffset)); 1350 Str(next_address_reg, MemOperand(handle_scope_base, kNextOffset));
1393 if (emit_debug_code()) { 1351 if (emit_debug_code()) {
1394 Ldr(w1, MemOperand(handle_scope_base, kLevelOffset)); 1352 Ldr(w1, MemOperand(handle_scope_base, kLevelOffset));
1395 Cmp(w1, level_reg); 1353 Cmp(w1, level_reg);
1396 Check(eq, "Unexpected level after return from api call"); 1354 Check(eq, kUnexpectedLevelAfterReturnFromApiCall);
1397 } 1355 }
1398 Sub(level_reg, level_reg, 1); 1356 Sub(level_reg, level_reg, 1);
1399 Str(level_reg, MemOperand(handle_scope_base, kLevelOffset)); 1357 Str(level_reg, MemOperand(handle_scope_base, kLevelOffset));
1400 Ldr(x1, MemOperand(handle_scope_base, kLimitOffset)); 1358 Ldr(x1, MemOperand(handle_scope_base, kLimitOffset));
1401 Cmp(limit_reg, x1); 1359 Cmp(limit_reg, x1);
1402 B(ne, &delete_allocated_handles); 1360 B(ne, &delete_allocated_handles);
1403 1361
1404 Bind(&leave_exit_frame); 1362 Bind(&leave_exit_frame);
1405 // Restore callee-saved registers. 1363 // Restore callee-saved registers.
1406 Peek(x19, (spill_offset + 0) * kXRegSizeInBytes); 1364 Peek(x19, (spill_offset + 0) * kXRegSizeInBytes);
(...skipping 203 matching lines...) Expand 10 before | Expand all | Expand 10 after
1610 if (emit_debug_code()) { 1568 if (emit_debug_code()) {
1611 // Because the stack pointer must be aligned on a 16-byte boundary, the 1569 // Because the stack pointer must be aligned on a 16-byte boundary, the
1612 // aligned csp can be up to 12 bytes below the jssp. This is the case 1570 // aligned csp can be up to 12 bytes below the jssp. This is the case
1613 // where we only pushed one W register on top of an aligned jssp. 1571 // where we only pushed one W register on top of an aligned jssp.
1614 Register temp = Tmp1(); 1572 Register temp = Tmp1();
1615 ASSERT(ActivationFrameAlignment() == 16); 1573 ASSERT(ActivationFrameAlignment() == 16);
1616 Sub(temp, csp, old_stack_pointer); 1574 Sub(temp, csp, old_stack_pointer);
1617 // We want temp <= 0 && temp >= -12. 1575 // We want temp <= 0 && temp >= -12.
1618 Cmp(temp, 0); 1576 Cmp(temp, 0);
1619 Ccmp(temp, -12, NFlag, le); 1577 Ccmp(temp, -12, NFlag, le);
1620 Check(ge, "The stack was corrupted by MacroAssembler::Call()."); 1578 Check(ge, kTheStackWasCorruptedByMacroAssemblerCall);
1621 } 1579 }
1622 SetStackPointer(old_stack_pointer); 1580 SetStackPointer(old_stack_pointer);
1623 } 1581 }
1624 } 1582 }
1625 1583
1626 1584
1627 void MacroAssembler::Jump(Register target) { 1585 void MacroAssembler::Jump(Register target) {
1628 Br(target); 1586 Br(target);
1629 } 1587 }
1630 1588
(...skipping 376 matching lines...) Expand 10 before | Expand all | Expand 10 after
2007 // handled separately. 1965 // handled separately.
2008 ASSERT(!temps.IncludesAliasOf(dst)); 1966 ASSERT(!temps.IncludesAliasOf(dst));
2009 ASSERT(!temps.IncludesAliasOf(src)); 1967 ASSERT(!temps.IncludesAliasOf(src));
2010 ASSERT(!temps.IncludesAliasOf(Tmp0())); 1968 ASSERT(!temps.IncludesAliasOf(Tmp0()));
2011 ASSERT(!temps.IncludesAliasOf(Tmp1())); 1969 ASSERT(!temps.IncludesAliasOf(Tmp1()));
2012 ASSERT(!temps.IncludesAliasOf(xzr)); 1970 ASSERT(!temps.IncludesAliasOf(xzr));
2013 ASSERT(!AreAliased(dst, src, Tmp0(), Tmp1())); 1971 ASSERT(!AreAliased(dst, src, Tmp0(), Tmp1()));
2014 1972
2015 if (emit_debug_code()) { 1973 if (emit_debug_code()) {
2016 Cmp(dst, src); 1974 Cmp(dst, src);
2017 Check(ne, "In CopyFields, the destination is the same as the source."); 1975 Check(ne, kTheSourceAndDestinationAreTheSame);
2018 } 1976 }
2019 1977
2020 // The value of 'count' at which a loop will be generated (if there are 1978 // The value of 'count' at which a loop will be generated (if there are
2021 // enough scratch registers). 1979 // enough scratch registers).
2022 static const unsigned kLoopThreshold = 8; 1980 static const unsigned kLoopThreshold = 8;
2023 1981
2024 ASSERT(!temps.IsEmpty()); 1982 ASSERT(!temps.IsEmpty());
2025 Register scratch1 = Register(temps.PopLowestIndex()); 1983 Register scratch1 = Register(temps.PopLowestIndex());
2026 Register scratch2 = Register(temps.PopLowestIndex()); 1984 Register scratch2 = Register(temps.PopLowestIndex());
2027 Register scratch3 = Register(temps.PopLowestIndex()); 1985 Register scratch3 = Register(temps.PopLowestIndex());
(...skipping 15 matching lines...) Expand all
2043 Register length, 2001 Register length,
2044 Register scratch, 2002 Register scratch,
2045 CopyHint hint) { 2003 CopyHint hint) {
2046 ASSERT(!AreAliased(src, dst, length, scratch)); 2004 ASSERT(!AreAliased(src, dst, length, scratch));
2047 2005
2048 // TODO(all): Implement a faster copy function, and use hint to determine 2006 // TODO(all): Implement a faster copy function, and use hint to determine
2049 // which algorithm to use for copies. 2007 // which algorithm to use for copies.
2050 if (emit_debug_code()) { 2008 if (emit_debug_code()) {
2051 // Check copy length. 2009 // Check copy length.
2052 Cmp(length, 0); 2010 Cmp(length, 0);
2053 Assert(ge, "Copy length < 0"); 2011 // TODO(all): Add this error code to objects.h.
2012 // Assert(ge, kCopyLengthIsBelowZero);
2013 Assert(ge, kUnknown);
2054 2014
2055 // Check src and dst buffers don't overlap. 2015 // Check src and dst buffers don't overlap.
2056 Add(scratch, src, length); // Calculate end of src buffer. 2016 Add(scratch, src, length); // Calculate end of src buffer.
2057 Cmp(scratch, dst); 2017 Cmp(scratch, dst);
2058 Add(scratch, dst, length); // Calculate end of dst buffer. 2018 Add(scratch, dst, length); // Calculate end of dst buffer.
2059 Ccmp(scratch, src, ZFlag, gt); 2019 Ccmp(scratch, src, ZFlag, gt);
2060 Assert(le, "CopyBytes src and dst buffers overlap"); 2020 // TODO(all): Add this error code to objects.h.
2021 // Assert(le, kCopyBytesSrcAndDstBuffersOverlap);
2022 Assert(le, kUnknown);
2061 } 2023 }
2062 2024
2063 Label loop, done; 2025 Label loop, done;
2064 Cbz(length, &done); 2026 Cbz(length, &done);
2065 2027
2066 Bind(&loop); 2028 Bind(&loop);
2067 Sub(length, length, 1); 2029 Sub(length, length, 1);
2068 Ldrb(scratch, MemOperand(src, 1, PostIndex)); 2030 Ldrb(scratch, MemOperand(src, 1, PostIndex));
2069 Strb(scratch, MemOperand(dst, 1, PostIndex)); 2031 Strb(scratch, MemOperand(dst, 1, PostIndex));
2070 Cbnz(length, &loop); 2032 Cbnz(length, &loop);
(...skipping 22 matching lines...) Expand all
2093 Register scratch2, 2055 Register scratch2,
2094 Label* failure, 2056 Label* failure,
2095 SmiCheckType smi_check) { 2057 SmiCheckType smi_check) {
2096 2058
2097 if (smi_check == DO_SMI_CHECK) { 2059 if (smi_check == DO_SMI_CHECK) {
2098 JumpIfEitherSmi(first, second, failure); 2060 JumpIfEitherSmi(first, second, failure);
2099 } else if (emit_debug_code()) { 2061 } else if (emit_debug_code()) {
2100 ASSERT(smi_check == DONT_DO_SMI_CHECK); 2062 ASSERT(smi_check == DONT_DO_SMI_CHECK);
2101 Label not_smi; 2063 Label not_smi;
2102 JumpIfEitherSmi(first, second, NULL, &not_smi); 2064 JumpIfEitherSmi(first, second, NULL, &not_smi);
2103 Abort("At least one input is a smi."); 2065
2066 // TODO(all): Add this error code to objects.h.
2067 // Abort(kAtLeastOneInputIsASmi);
2068 Abort(kUnknown);
2069
2104 Bind(&not_smi); 2070 Bind(&not_smi);
2105 } 2071 }
2106 2072
2107 // Test that both first and second are sequential ASCII strings. 2073 // Test that both first and second are sequential ASCII strings.
2108 Ldr(scratch1, FieldMemOperand(first, HeapObject::kMapOffset)); 2074 Ldr(scratch1, FieldMemOperand(first, HeapObject::kMapOffset));
2109 Ldr(scratch2, FieldMemOperand(second, HeapObject::kMapOffset)); 2075 Ldr(scratch2, FieldMemOperand(second, HeapObject::kMapOffset));
2110 Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); 2076 Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
2111 Ldrb(scratch2, FieldMemOperand(scratch2, Map::kInstanceTypeOffset)); 2077 Ldrb(scratch2, FieldMemOperand(scratch2, Map::kInstanceTypeOffset));
2112 2078
2113 JumpIfEitherInstanceTypeIsNotSequentialAscii(scratch1, 2079 JumpIfEitherInstanceTypeIsNotSequentialAscii(scratch1,
(...skipping 436 matching lines...) Expand 10 before | Expand all | Expand 10 after
2550 Cmp(exponent, HeapNumber::kExponentBias + HeapNumber::kMantissaBits + 32); 2516 Cmp(exponent, HeapNumber::kExponentBias + HeapNumber::kMantissaBits + 32);
2551 CzeroX(result, ge); 2517 CzeroX(result, ge);
2552 B(ge, &done); 2518 B(ge, &done);
2553 2519
2554 // The Fcvtzs sequence handles all cases except where the conversion causes 2520 // The Fcvtzs sequence handles all cases except where the conversion causes
2555 // signed overflow in the int64_t target. Since we've already handled 2521 // signed overflow in the int64_t target. Since we've already handled
2556 // exponents >= 84, we can guarantee that 63 <= exponent < 84. 2522 // exponents >= 84, we can guarantee that 63 <= exponent < 84.
2557 2523
2558 if (emit_debug_code()) { 2524 if (emit_debug_code()) {
2559 Cmp(exponent, HeapNumber::kExponentBias + 63); 2525 Cmp(exponent, HeapNumber::kExponentBias + 63);
2560 Check(ge, "This input should have been handled by the FPU."); 2526 // TODO(all): Add this error code to objects.h.
2527 // Check(ge, kThisInputShouldHaveBeenHandledByTheFPU);
2528 Check(ge, kUnknown);
2561 } 2529 }
2562 2530
2563 // Isolate the mantissa bits, and set the implicit '1'. 2531 // Isolate the mantissa bits, and set the implicit '1'.
2564 Register mantissa = scratch2; 2532 Register mantissa = scratch2;
2565 Ubfx(mantissa, result, 0, HeapNumber::kMantissaBits); 2533 Ubfx(mantissa, result, 0, HeapNumber::kMantissaBits);
2566 Orr(mantissa, mantissa, 1UL << HeapNumber::kMantissaBits); 2534 Orr(mantissa, mantissa, 1UL << HeapNumber::kMantissaBits);
2567 2535
2568 // Negate the mantissa if necessary. 2536 // Negate the mantissa if necessary.
2569 Tst(result, kXSignMask); 2537 Tst(result, kXSignMask);
2570 Cneg(mantissa, mantissa, ne); 2538 Cneg(mantissa, mantissa, ne);
(...skipping 28 matching lines...) Expand all
2599 void MacroAssembler::HeapNumberECMA262ToInt32(Register result, 2567 void MacroAssembler::HeapNumberECMA262ToInt32(Register result,
2600 Register heap_number, 2568 Register heap_number,
2601 Register scratch1, 2569 Register scratch1,
2602 Register scratch2, 2570 Register scratch2,
2603 DoubleRegister double_scratch, 2571 DoubleRegister double_scratch,
2604 ECMA262ToInt32Result format) { 2572 ECMA262ToInt32Result format) {
2605 if (emit_debug_code()) { 2573 if (emit_debug_code()) {
2606 // Verify we indeed have a HeapNumber. 2574 // Verify we indeed have a HeapNumber.
2607 Label ok; 2575 Label ok;
2608 JumpIfHeapNumber(heap_number, &ok); 2576 JumpIfHeapNumber(heap_number, &ok);
2609 Abort("A HeapNumber is expected as input."); 2577 // TODO(all): Add this error code to objects.h.
2578 // Abort(kExpectedHeapNumber);
2579 Abort(kUnknown);
2610 Bind(&ok); 2580 Bind(&ok);
2611 } 2581 }
2612 2582
2613 Ldr(double_scratch, FieldMemOperand(heap_number, HeapNumber::kValueOffset)); 2583 Ldr(double_scratch, FieldMemOperand(heap_number, HeapNumber::kValueOffset));
2614 ECMA262ToInt32(result, double_scratch, scratch1, scratch2, format); 2584 ECMA262ToInt32(result, double_scratch, scratch1, scratch2, format);
2615 } 2585 }
2616 2586
2617 2587
2618 void MacroAssembler::EnterFrame(StackFrame::Type type) { 2588 void MacroAssembler::EnterFrame(StackFrame::Type type) {
2619 ASSERT(jssp.Is(StackPointer())); 2589 ASSERT(jssp.Is(StackPointer()));
(...skipping 294 matching lines...) Expand 10 before | Expand all | Expand 10 after
2914 Mov(top_address, Operand(heap_allocation_top)); 2884 Mov(top_address, Operand(heap_allocation_top));
2915 2885
2916 if ((flags & RESULT_CONTAINS_TOP) == 0) { 2886 if ((flags & RESULT_CONTAINS_TOP) == 0) {
2917 // Load allocation top into result and the allocation limit. 2887 // Load allocation top into result and the allocation limit.
2918 Ldp(result, allocation_limit, MemOperand(top_address)); 2888 Ldp(result, allocation_limit, MemOperand(top_address));
2919 } else { 2889 } else {
2920 if (emit_debug_code()) { 2890 if (emit_debug_code()) {
2921 // Assert that result actually contains top on entry. 2891 // Assert that result actually contains top on entry.
2922 Ldr(Tmp0(), MemOperand(top_address)); 2892 Ldr(Tmp0(), MemOperand(top_address));
2923 Cmp(result, Tmp0()); 2893 Cmp(result, Tmp0());
2924 Check(eq, "Unexpected allocation top."); 2894 Check(eq, kUnexpectedAllocationTop);
2925 } 2895 }
2926 // Load the allocation limit. 'result' already contains the allocation top. 2896 // Load the allocation limit. 'result' already contains the allocation top.
2927 Ldr(allocation_limit, MemOperand(top_address, limit - top)); 2897 Ldr(allocation_limit, MemOperand(top_address, limit - top));
2928 } 2898 }
2929 2899
2930 // We can ignore DOUBLE_ALIGNMENT flags here because doubles and pointers have 2900 // We can ignore DOUBLE_ALIGNMENT flags here because doubles and pointers have
2931 // the same alignment on A64. 2901 // the same alignment on A64.
2932 STATIC_ASSERT(kPointerAlignment == kDoubleAlignment); 2902 STATIC_ASSERT(kPointerAlignment == kDoubleAlignment);
2933 2903
2934 // Calculate new top and bail out if new space is exhausted. 2904 // Calculate new top and bail out if new space is exhausted.
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
2983 Mov(top_address, Operand(heap_allocation_top)); 2953 Mov(top_address, Operand(heap_allocation_top));
2984 2954
2985 if ((flags & RESULT_CONTAINS_TOP) == 0) { 2955 if ((flags & RESULT_CONTAINS_TOP) == 0) {
2986 // Load allocation top into result and the allocation limit. 2956 // Load allocation top into result and the allocation limit.
2987 Ldp(result, allocation_limit, MemOperand(top_address)); 2957 Ldp(result, allocation_limit, MemOperand(top_address));
2988 } else { 2958 } else {
2989 if (emit_debug_code()) { 2959 if (emit_debug_code()) {
2990 // Assert that result actually contains top on entry. 2960 // Assert that result actually contains top on entry.
2991 Ldr(Tmp0(), MemOperand(top_address)); 2961 Ldr(Tmp0(), MemOperand(top_address));
2992 Cmp(result, Tmp0()); 2962 Cmp(result, Tmp0());
2993 Check(eq, "Unexpected allocation top."); 2963 Check(eq, kUnexpectedAllocationTop);
2994 } 2964 }
2995 // Load the allocation limit. 'result' already contains the allocation top. 2965 // Load the allocation limit. 'result' already contains the allocation top.
2996 Ldr(allocation_limit, MemOperand(top_address, limit - top)); 2966 Ldr(allocation_limit, MemOperand(top_address, limit - top));
2997 } 2967 }
2998 2968
2999 // We can ignore DOUBLE_ALIGNMENT flags here because doubles and pointers have 2969 // We can ignore DOUBLE_ALIGNMENT flags here because doubles and pointers have
3000 // the same alignment on A64. 2970 // the same alignment on A64.
3001 STATIC_ASSERT(kPointerAlignment == kDoubleAlignment); 2971 STATIC_ASSERT(kPointerAlignment == kDoubleAlignment);
3002 2972
3003 // Calculate new top and bail out if new space is exhausted 2973 // Calculate new top and bail out if new space is exhausted
3004 if ((flags & SIZE_IN_WORDS) != 0) { 2974 if ((flags & SIZE_IN_WORDS) != 0) {
3005 Adds(Tmp1(), result, Operand(object_size, LSL, kPointerSizeLog2)); 2975 Adds(Tmp1(), result, Operand(object_size, LSL, kPointerSizeLog2));
3006 } else { 2976 } else {
3007 Adds(Tmp1(), result, object_size); 2977 Adds(Tmp1(), result, object_size);
3008 } 2978 }
3009 2979
3010 if (emit_debug_code()) { 2980 if (emit_debug_code()) {
3011 Tst(Tmp1(), kObjectAlignmentMask); 2981 Tst(Tmp1(), kObjectAlignmentMask);
3012 Check(eq, "Unaligned allocation in new space"); 2982 Check(eq, kUnalignedAllocationInNewSpace);
3013 } 2983 }
3014 2984
3015 B(vs, gc_required); 2985 B(vs, gc_required);
3016 Cmp(Tmp1(), allocation_limit); 2986 Cmp(Tmp1(), allocation_limit);
3017 B(hi, gc_required); 2987 B(hi, gc_required);
3018 Str(Tmp1(), MemOperand(top_address)); 2988 Str(Tmp1(), MemOperand(top_address));
3019 2989
3020 // Tag the object if requested. 2990 // Tag the object if requested.
3021 if ((flags & TAG_OBJECT) != 0) { 2991 if ((flags & TAG_OBJECT) != 0) {
3022 Orr(result, result, kHeapObjectTag); 2992 Orr(result, result, kHeapObjectTag);
3023 } 2993 }
3024 } 2994 }
3025 2995
3026 2996
3027 void MacroAssembler::UndoAllocationInNewSpace(Register object, 2997 void MacroAssembler::UndoAllocationInNewSpace(Register object,
3028 Register scratch) { 2998 Register scratch) {
3029 ExternalReference new_space_allocation_top = 2999 ExternalReference new_space_allocation_top =
3030 ExternalReference::new_space_allocation_top_address(isolate()); 3000 ExternalReference::new_space_allocation_top_address(isolate());
3031 3001
3032 // Make sure the object has no tag before resetting top. 3002 // Make sure the object has no tag before resetting top.
3033 Bic(object, object, kHeapObjectTagMask); 3003 Bic(object, object, kHeapObjectTagMask);
3034 #ifdef DEBUG 3004 #ifdef DEBUG
3035 // Check that the object un-allocated is below the current top. 3005 // Check that the object un-allocated is below the current top.
3036 Mov(scratch, Operand(new_space_allocation_top)); 3006 Mov(scratch, Operand(new_space_allocation_top));
3037 Ldr(scratch, MemOperand(scratch)); 3007 Ldr(scratch, MemOperand(scratch));
3038 Cmp(object, scratch); 3008 Cmp(object, scratch);
3039 Check(lt, "Trying to undo allocation of non allocated memory."); 3009 // TODO(all): Add this error code to objects.h.
3010 // Check(lt, kTryingToUndoAllocationOfNonAllocatedMemory);
3011 Check(lt, kUnknown);
3040 #endif 3012 #endif
3041 // Write the address of the object to un-allocate as the current top. 3013 // Write the address of the object to un-allocate as the current top.
3042 Mov(scratch, Operand(new_space_allocation_top)); 3014 Mov(scratch, Operand(new_space_allocation_top));
3043 Str(object, MemOperand(scratch)); 3015 Str(object, MemOperand(scratch));
3044 } 3016 }
3045 3017
3046 3018
3047 void MacroAssembler::AllocateTwoByteString(Register result, 3019 void MacroAssembler::AllocateTwoByteString(Register result,
3048 Register length, 3020 Register length,
3049 Register scratch1, 3021 Register scratch1,
(...skipping 546 matching lines...) Expand 10 before | Expand all | Expand 10 after
3596 3568
3597 Label same_contexts; 3569 Label same_contexts;
3598 3570
3599 ASSERT(!AreAliased(holder_reg, scratch)); 3571 ASSERT(!AreAliased(holder_reg, scratch));
3600 3572
3601 // Load current lexical context from the stack frame. 3573 // Load current lexical context from the stack frame.
3602 Ldr(scratch, MemOperand(fp, StandardFrameConstants::kContextOffset)); 3574 Ldr(scratch, MemOperand(fp, StandardFrameConstants::kContextOffset));
3603 // In debug mode, make sure the lexical context is set. 3575 // In debug mode, make sure the lexical context is set.
3604 #ifdef DEBUG 3576 #ifdef DEBUG
3605 Cmp(scratch, 0); 3577 Cmp(scratch, 0);
3606 Check(ne, "we should not have an empty lexical context"); 3578 Check(ne, kWeShouldNotHaveAnEmptyLexicalContext);
3607 #endif 3579 #endif
3608 3580
3609 // Load the native context of the current context. 3581 // Load the native context of the current context.
3610 int offset = 3582 int offset =
3611 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize; 3583 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
3612 Ldr(scratch, FieldMemOperand(scratch, offset)); 3584 Ldr(scratch, FieldMemOperand(scratch, offset));
3613 Ldr(scratch, FieldMemOperand(scratch, GlobalObject::kNativeContextOffset)); 3585 Ldr(scratch, FieldMemOperand(scratch, GlobalObject::kNativeContextOffset));
3614 3586
3615 // Check the context is a native context. 3587 // Check the context is a native context.
3616 if (emit_debug_code()) { 3588 if (emit_debug_code()) {
3617 // Read the first word and compare to the global_context_map. 3589 // Read the first word and compare to the global_context_map.
3618 Register temp = Tmp1(); 3590 Register temp = Tmp1();
3619 Ldr(temp, FieldMemOperand(scratch, HeapObject::kMapOffset)); 3591 Ldr(temp, FieldMemOperand(scratch, HeapObject::kMapOffset));
3620 CompareRoot(temp, Heap::kNativeContextMapRootIndex); 3592 CompareRoot(temp, Heap::kNativeContextMapRootIndex);
3621 Check(eq, "JSGlobalObject::native_context should be a native context."); 3593 // TODO(all): Add this error code to objects.h.
3594 // Check(eq, kExpectedNativeContext);
3595 Check(eq, kUnknown);
3622 } 3596 }
3623 3597
3624 // Check if both contexts are the same. 3598 // Check if both contexts are the same.
3625 ldr(Tmp0(), FieldMemOperand(holder_reg, JSGlobalProxy::kNativeContextOffset)); 3599 ldr(Tmp0(), FieldMemOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
3626 cmp(scratch, Tmp0()); 3600 cmp(scratch, Tmp0());
3627 b(&same_contexts, eq); 3601 b(&same_contexts, eq);
3628 3602
3629 // Check the context is a native context. 3603 // Check the context is a native context.
3630 if (emit_debug_code()) { 3604 if (emit_debug_code()) {
3631 // Move Tmp0() into a different register, as CompareRoot will use it. 3605 // Move Tmp0() into a different register, as CompareRoot will use it.
3632 Register temp = Tmp1(); 3606 Register temp = Tmp1();
3633 mov(temp, Tmp0()); 3607 mov(temp, Tmp0());
3634 CompareRoot(temp, Heap::kNullValueRootIndex); 3608 CompareRoot(temp, Heap::kNullValueRootIndex);
3635 Check(ne, "JSGlobalProxy::context() should not be null."); 3609 // TODO(all): Add this error code to objects.h.
3610 // Check(ne, kExpectedNonNullContext);
3611 Check(ne, kUnknown);
3636 3612
3637 Ldr(temp, FieldMemOperand(temp, HeapObject::kMapOffset)); 3613 Ldr(temp, FieldMemOperand(temp, HeapObject::kMapOffset));
3638 CompareRoot(temp, Heap::kNativeContextMapRootIndex); 3614 CompareRoot(temp, Heap::kNativeContextMapRootIndex);
3639 Check(eq, "JSGlobalObject::native_context should be a native context."); 3615 // TODO(all): Add this error code to objects.h.
3616 // Check(eq, kExpectedNativeContext);
3617 Check(eq, kUnknown);
3640 3618
3641 // Let's consider that Tmp0() has been cloberred by the MacroAssembler. 3619 // Let's consider that Tmp0() has been cloberred by the MacroAssembler.
3642 // We reload it with its value. 3620 // We reload it with its value.
3643 ldr(Tmp0(), FieldMemOperand(holder_reg, 3621 ldr(Tmp0(), FieldMemOperand(holder_reg,
3644 JSGlobalProxy::kNativeContextOffset)); 3622 JSGlobalProxy::kNativeContextOffset));
3645 } 3623 }
3646 3624
3647 // Check that the security token in the calling global object is 3625 // Check that the security token in the calling global object is
3648 // compatible with the security token in the receiving global 3626 // compatible with the security token in the receiving global
3649 // object. 3627 // object.
(...skipping 107 matching lines...) Expand 10 before | Expand all | Expand 10 after
3757 void MacroAssembler::RememberedSetHelper(Register object, // For debug tests. 3735 void MacroAssembler::RememberedSetHelper(Register object, // For debug tests.
3758 Register address, 3736 Register address,
3759 Register scratch, 3737 Register scratch,
3760 SaveFPRegsMode fp_mode, 3738 SaveFPRegsMode fp_mode,
3761 RememberedSetFinalAction and_then) { 3739 RememberedSetFinalAction and_then) {
3762 ASSERT(!AreAliased(object, address, scratch)); 3740 ASSERT(!AreAliased(object, address, scratch));
3763 Label done, store_buffer_overflow; 3741 Label done, store_buffer_overflow;
3764 if (emit_debug_code()) { 3742 if (emit_debug_code()) {
3765 Label ok; 3743 Label ok;
3766 JumpIfNotInNewSpace(object, &ok); 3744 JumpIfNotInNewSpace(object, &ok);
3767 Abort("Remembered set pointer is in new space"); 3745 // TODO(all): Add this error code to objects.h.
3746 // Abort(kRememberedSetPointerIsInNewSpace);
3747 Abort(kUnknown);
3768 bind(&ok); 3748 bind(&ok);
3769 } 3749 }
3770 // Load store buffer top. 3750 // Load store buffer top.
3771 Mov(Tmp0(), Operand(ExternalReference::store_buffer_top(isolate()))); 3751 Mov(Tmp0(), Operand(ExternalReference::store_buffer_top(isolate())));
3772 Ldr(scratch, MemOperand(Tmp0())); 3752 Ldr(scratch, MemOperand(Tmp0()));
3773 // Store pointer to buffer and increment buffer top. 3753 // Store pointer to buffer and increment buffer top.
3774 Str(address, MemOperand(scratch, kPointerSize, PostIndex)); 3754 Str(address, MemOperand(scratch, kPointerSize, PostIndex));
3775 // Write back new top of buffer. 3755 // Write back new top of buffer.
3776 Str(scratch, MemOperand(Tmp0())); 3756 Str(scratch, MemOperand(Tmp0()));
3777 // Call stub on end of buffer. 3757 // Call stub on end of buffer.
(...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after
3889 3869
3890 // Although the object register is tagged, the offset is relative to the start 3870 // Although the object register is tagged, the offset is relative to the start
3891 // of the object, so offset must be a multiple of kPointerSize. 3871 // of the object, so offset must be a multiple of kPointerSize.
3892 ASSERT(IsAligned(offset, kPointerSize)); 3872 ASSERT(IsAligned(offset, kPointerSize));
3893 3873
3894 Add(scratch, object, offset - kHeapObjectTag); 3874 Add(scratch, object, offset - kHeapObjectTag);
3895 if (emit_debug_code()) { 3875 if (emit_debug_code()) {
3896 Label ok; 3876 Label ok;
3897 Tst(scratch, (1 << kPointerSizeLog2) - 1); 3877 Tst(scratch, (1 << kPointerSizeLog2) - 1);
3898 B(eq, &ok); 3878 B(eq, &ok);
3899 Abort("Unaligned cell in write barrier"); 3879 // TODO(all): Add this error code to objects.h.
3880 // Abort(kUnalignedCellInWriteBarrier);
3881 Abort(kUnknown);
3900 Bind(&ok); 3882 Bind(&ok);
3901 } 3883 }
3902 3884
3903 RecordWrite(object, 3885 RecordWrite(object,
3904 scratch, 3886 scratch,
3905 value, 3887 value,
3906 lr_status, 3888 lr_status,
3907 save_fp, 3889 save_fp,
3908 remembered_set_action, 3890 remembered_set_action,
3909 OMIT_SMI_CHECK, 3891 OMIT_SMI_CHECK,
(...skipping 30 matching lines...) Expand all
3940 SmiCheck smi_check, 3922 SmiCheck smi_check,
3941 PregenExpectation pregen_expectation) { 3923 PregenExpectation pregen_expectation) {
3942 // The compiled code assumes that record write doesn't change the 3924 // The compiled code assumes that record write doesn't change the
3943 // context register, so we check that none of the clobbered 3925 // context register, so we check that none of the clobbered
3944 // registers are cp. 3926 // registers are cp.
3945 ASSERT(!address.is(cp) && !value.is(cp)); 3927 ASSERT(!address.is(cp) && !value.is(cp));
3946 3928
3947 if (emit_debug_code()) { 3929 if (emit_debug_code()) {
3948 Ldr(Tmp0(), MemOperand(address)); 3930 Ldr(Tmp0(), MemOperand(address));
3949 Cmp(Tmp0(), value); 3931 Cmp(Tmp0(), value);
3950 Check(eq, "Wrong address or value passed to RecordWrite."); 3932 Check(eq, kWrongAddressOrValuePassedToRecordWrite);
3951 } 3933 }
3952 3934
3953 Label done; 3935 Label done;
3954 3936
3955 if (smi_check == INLINE_SMI_CHECK) { 3937 if (smi_check == INLINE_SMI_CHECK) {
3956 ASSERT_EQ(0, kSmiTag); 3938 ASSERT_EQ(0, kSmiTag);
3957 JumpIfSmi(value, &done); 3939 JumpIfSmi(value, &done);
3958 } 3940 }
3959 3941
3960 CheckPageFlagClear(value, 3942 CheckPageFlagClear(value,
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
3995 3977
3996 void MacroAssembler::AssertHasValidColor(const Register& reg) { 3978 void MacroAssembler::AssertHasValidColor(const Register& reg) {
3997 if (emit_debug_code()) { 3979 if (emit_debug_code()) {
3998 // The bit sequence is backward. The first character in the string 3980 // The bit sequence is backward. The first character in the string
3999 // represents the least significant bit. 3981 // represents the least significant bit.
4000 ASSERT(strcmp(Marking::kImpossibleBitPattern, "01") == 0); 3982 ASSERT(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
4001 3983
4002 Label color_is_valid; 3984 Label color_is_valid;
4003 Tbnz(reg, 0, &color_is_valid); 3985 Tbnz(reg, 0, &color_is_valid);
4004 Tbz(reg, 1, &color_is_valid); 3986 Tbz(reg, 1, &color_is_valid);
4005 Abort("Impossible color bit pattern found."); 3987 // TODO(all): Add this error code to objects.h.
3988 // Abort(kImpossibleColorBitPatternFound);
3989 Abort(kUnknown);
4006 Bind(&color_is_valid); 3990 Bind(&color_is_valid);
4007 } 3991 }
4008 } 3992 }
4009 3993
4010 3994
4011 void MacroAssembler::GetMarkBits(Register addr_reg, 3995 void MacroAssembler::GetMarkBits(Register addr_reg,
4012 Register bitmap_reg, 3996 Register bitmap_reg,
4013 Register shift_reg) { 3997 Register shift_reg) {
4014 ASSERT(!AreAliased(addr_reg, bitmap_reg, shift_reg, no_reg)); 3998 ASSERT(!AreAliased(addr_reg, bitmap_reg, shift_reg, no_reg));
4015 // addr_reg is divided into fields: 3999 // addr_reg is divided into fields:
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after
4092 4076
4093 void MacroAssembler::GetRelocatedValueLocation(Register ldr_location, 4077 void MacroAssembler::GetRelocatedValueLocation(Register ldr_location,
4094 Register result) { 4078 Register result) {
4095 ASSERT(!result.Is(ldr_location)); 4079 ASSERT(!result.Is(ldr_location));
4096 const uint32_t kLdrLitOffset_lsb = 5; 4080 const uint32_t kLdrLitOffset_lsb = 5;
4097 const uint32_t kLdrLitOffset_width = 19; 4081 const uint32_t kLdrLitOffset_width = 19;
4098 Ldr(result, MemOperand(ldr_location)); 4082 Ldr(result, MemOperand(ldr_location));
4099 if (emit_debug_code()) { 4083 if (emit_debug_code()) {
4100 And(result, result, LoadLiteralFMask); 4084 And(result, result, LoadLiteralFMask);
4101 Cmp(result, LoadLiteralFixed); 4085 Cmp(result, LoadLiteralFixed);
4102 Check(eq, "The instruction to patch should be a load literal."); 4086 // TODO(all): Add this error code to objects.h.
4087 // Check(eq, kTheInstructionToPatchShouldBeALoadLiteral);
4088 Check(eq, kUnknown);
4103 // The instruction was clobbered. Reload it. 4089 // The instruction was clobbered. Reload it.
4104 Ldr(result, MemOperand(ldr_location)); 4090 Ldr(result, MemOperand(ldr_location));
4105 } 4091 }
4106 Sbfx(result, result, kLdrLitOffset_lsb, kLdrLitOffset_width); 4092 Sbfx(result, result, kLdrLitOffset_lsb, kLdrLitOffset_width);
4107 Add(result, ldr_location, Operand(result, LSL, kWordSizeInBytesLog2)); 4093 Add(result, ldr_location, Operand(result, LSL, kWordSizeInBytesLog2));
4108 } 4094 }
4109 4095
4110 4096
4111 void MacroAssembler::EnsureNotWhite( 4097 void MacroAssembler::EnsureNotWhite(
4112 Register value, 4098 Register value,
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after
4194 4180
4195 Bic(bitmap_scratch, bitmap_scratch, Page::kPageAlignmentMask); 4181 Bic(bitmap_scratch, bitmap_scratch, Page::kPageAlignmentMask);
4196 Ldr(load_scratch, MemOperand(bitmap_scratch, MemoryChunk::kLiveBytesOffset)); 4182 Ldr(load_scratch, MemOperand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
4197 Add(load_scratch, load_scratch, length_scratch); 4183 Add(load_scratch, load_scratch, length_scratch);
4198 Str(load_scratch, MemOperand(bitmap_scratch, MemoryChunk::kLiveBytesOffset)); 4184 Str(load_scratch, MemOperand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
4199 4185
4200 Bind(&done); 4186 Bind(&done);
4201 } 4187 }
4202 4188
4203 4189
4204 void MacroAssembler::Assert(Condition cond, const char* msg) { 4190 void MacroAssembler::Assert(Condition cond, BailoutReason reason) {
4205 if (emit_debug_code()) { 4191 if (emit_debug_code()) {
4206 Check(cond, msg); 4192 Check(cond, reason);
4207 } 4193 }
4208 } 4194 }
4209 4195
4210 4196
4211 4197
4212 void MacroAssembler::AssertRegisterIsClear(Register reg, const char* msg) { 4198 void MacroAssembler::AssertRegisterIsClear(Register reg, BailoutReason reason) {
4213 if (emit_debug_code()) { 4199 if (emit_debug_code()) {
4214 CheckRegisterIsClear(reg, msg); 4200 CheckRegisterIsClear(reg, reason);
4215 } 4201 }
4216 } 4202 }
4217 4203
4218 4204
4219 void MacroAssembler::AssertRegisterIsRoot(Register reg, 4205 void MacroAssembler::AssertRegisterIsRoot(Register reg,
4220 Heap::RootListIndex index) { 4206 Heap::RootListIndex index) {
4221 // CompareRoot uses Tmp0(). 4207 // CompareRoot uses Tmp0().
4222 ASSERT(!reg.Is(Tmp0())); 4208 ASSERT(!reg.Is(Tmp0()));
4223 if (emit_debug_code()) { 4209 if (emit_debug_code()) {
4224 CompareRoot(reg, index); 4210 CompareRoot(reg, index);
4225 Check(eq, "Register did not match expected root"); 4211 Check(eq, kRegisterDidNotMatchExpectedRoot);
4226 } 4212 }
4227 } 4213 }
4228 4214
4229 4215
4230 void MacroAssembler::AssertFastElements(Register elements) { 4216 void MacroAssembler::AssertFastElements(Register elements) {
4231 if (emit_debug_code()) { 4217 if (emit_debug_code()) {
4232 Register temp = Tmp1(); 4218 Register temp = Tmp1();
4233 Label ok; 4219 Label ok;
4234 Ldr(temp, FieldMemOperand(elements, HeapObject::kMapOffset)); 4220 Ldr(temp, FieldMemOperand(elements, HeapObject::kMapOffset));
4235 JumpIfRoot(temp, Heap::kFixedArrayMapRootIndex, &ok); 4221 JumpIfRoot(temp, Heap::kFixedArrayMapRootIndex, &ok);
4236 JumpIfRoot(temp, Heap::kFixedDoubleArrayMapRootIndex, &ok); 4222 JumpIfRoot(temp, Heap::kFixedDoubleArrayMapRootIndex, &ok);
4237 JumpIfRoot(temp, Heap::kFixedCOWArrayMapRootIndex, &ok); 4223 JumpIfRoot(temp, Heap::kFixedCOWArrayMapRootIndex, &ok);
4238 Abort("JSObject with fast elements map has slow elements"); 4224 Abort(kJSObjectWithFastElementsMapHasSlowElements);
4239 Bind(&ok); 4225 Bind(&ok);
4240 } 4226 }
4241 } 4227 }
4242 4228
4243 4229
4244 void MacroAssembler::AssertIsString(const Register& object) { 4230 void MacroAssembler::AssertIsString(const Register& object) {
4245 if (emit_debug_code()) { 4231 if (emit_debug_code()) {
4246 Register temp = Tmp1(); 4232 Register temp = Tmp1();
4247 STATIC_ASSERT(kSmiTag == 0); 4233 STATIC_ASSERT(kSmiTag == 0);
4248 Tst(object, Operand(kSmiTagMask)); 4234 Tst(object, Operand(kSmiTagMask));
4249 Check(ne, "Operand is not a string"); 4235 Check(ne, kOperandIsNotAString);
4250 Ldr(temp, FieldMemOperand(object, HeapObject::kMapOffset)); 4236 Ldr(temp, FieldMemOperand(object, HeapObject::kMapOffset));
4251 CompareInstanceType(temp, temp, FIRST_NONSTRING_TYPE); 4237 CompareInstanceType(temp, temp, FIRST_NONSTRING_TYPE);
4252 Check(lo, "Operand is not a string"); 4238 Check(lo, kOperandIsNotAString);
4253 } 4239 }
4254 } 4240 }
4255 4241
4256 4242
4257 void MacroAssembler::Check(Condition cond, const char* msg) { 4243 void MacroAssembler::Check(Condition cond, BailoutReason reason) {
4258 Label ok; 4244 Label ok;
4259 B(cond, &ok); 4245 B(cond, &ok);
4260 Abort(msg); 4246 Abort(reason);
4261 // Will not return here. 4247 // Will not return here.
4262 Bind(&ok); 4248 Bind(&ok);
4263 } 4249 }
4264 4250
4265 4251
4266 void MacroAssembler::CheckRegisterIsClear(Register reg, const char* msg) { 4252 void MacroAssembler::CheckRegisterIsClear(Register reg, BailoutReason reason) {
4267 Label ok; 4253 Label ok;
4268 Cbz(reg, &ok); 4254 Cbz(reg, &ok);
4269 Abort(msg); 4255 Abort(reason);
4270 // Will not return here. 4256 // Will not return here.
4271 Bind(&ok); 4257 Bind(&ok);
4272 } 4258 }
4273 4259
4274 4260
4275 void MacroAssembler::Abort(const char* msg) { 4261 void MacroAssembler::Abort(BailoutReason reason) {
4276 #ifdef DEBUG 4262 #ifdef DEBUG
4277 if (msg != NULL) { 4263 RecordComment("Abort message: ");
4278 RecordComment("Abort message: "); 4264 RecordComment(GetBailoutReason(reason));
4279 RecordComment(msg);
4280 }
4281 #endif 4265 #endif
4282 4266
4283 Label msg_address; 4267 Label msg_address;
4284 Adr(x0, &msg_address); 4268 Adr(x0, &msg_address);
4285 4269
4286 if (use_real_aborts()) { 4270 if (use_real_aborts()) {
4287 // Split the message pointer into two SMI to avoid the GC 4271 // Split the message pointer into two SMI to avoid the GC
4288 // trying to scan the string. 4272 // trying to scan the string.
4289 STATIC_ASSERT((kSmiShift == 32) && (kSmiTag == 0)); 4273 STATIC_ASSERT((kSmiShift == 32) && (kSmiTag == 0));
4290 SmiTag(x1, x0); 4274 SmiTag(x1, x0);
(...skipping 23 matching lines...) Expand all
4314 // valid code). 4298 // valid code).
4315 // - We need a way to stop execution on both the simulator and real 4299 // - We need a way to stop execution on both the simulator and real
4316 // hardware, and Unreachable() is the best option. 4300 // hardware, and Unreachable() is the best option.
4317 Unreachable(); 4301 Unreachable();
4318 } 4302 }
4319 4303
4320 // Emit the message string directly in the instruction stream. 4304 // Emit the message string directly in the instruction stream.
4321 { 4305 {
4322 BlockConstPoolScope scope(this); 4306 BlockConstPoolScope scope(this);
4323 Bind(&msg_address); 4307 Bind(&msg_address);
4324 EmitStringData(msg); 4308 // TODO(jbramley): Since the reason is an enum, why do we still encode the
4309 // string (and a pointer to it) in the instruction stream?
4310 EmitStringData(GetBailoutReason(reason));
4325 } 4311 }
4326 } 4312 }
4327 4313
4328 4314
4329 void MacroAssembler::LoadTransitionedArrayMapConditional( 4315 void MacroAssembler::LoadTransitionedArrayMapConditional(
4330 ElementsKind expected_kind, 4316 ElementsKind expected_kind,
4331 ElementsKind transitioned_kind, 4317 ElementsKind transitioned_kind,
4332 Register map_in_out, 4318 Register map_in_out,
4333 Register scratch, 4319 Register scratch,
4334 Label* no_map_match) { 4320 Label* no_map_match) {
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
4397 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function, 4383 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
4398 Register map, 4384 Register map,
4399 Register scratch) { 4385 Register scratch) {
4400 // Load the initial map. The global functions all have initial maps. 4386 // Load the initial map. The global functions all have initial maps.
4401 Ldr(map, FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); 4387 Ldr(map, FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
4402 if (emit_debug_code()) { 4388 if (emit_debug_code()) {
4403 Label ok, fail; 4389 Label ok, fail;
4404 CheckMap(map, scratch, Heap::kMetaMapRootIndex, &fail, DO_SMI_CHECK); 4390 CheckMap(map, scratch, Heap::kMetaMapRootIndex, &fail, DO_SMI_CHECK);
4405 B(&ok); 4391 B(&ok);
4406 Bind(&fail); 4392 Bind(&fail);
4407 Abort("Global function must have initial map"); 4393 // TODO(all): Add this error code to objects.h.
4394 // Abort(kGlobalFunctionMustHaveInitialMap);
4395 Abort(kUnknown);
4408 Bind(&ok); 4396 Bind(&ok);
4409 } 4397 }
4410 } 4398 }
4411 4399
4412 4400
4413 // This is the main Printf implementation. All other Printf variants call 4401 // This is the main Printf implementation. All other Printf variants call
4414 // PrintfNoPreserve after setting up one or more PreserveRegisterScopes. 4402 // PrintfNoPreserve after setting up one or more PreserveRegisterScopes.
4415 void MacroAssembler::PrintfNoPreserve(const char * format, 4403 void MacroAssembler::PrintfNoPreserve(const char * format,
4416 const CPURegister& arg0, 4404 const CPURegister& arg0,
4417 const CPURegister& arg1, 4405 const CPURegister& arg1,
(...skipping 309 matching lines...) Expand 10 before | Expand all | Expand 10 after
4727 } 4715 }
4728 } 4716 }
4729 4717
4730 4718
4731 #undef __ 4719 #undef __
4732 4720
4733 4721
4734 } } // namespace v8::internal 4722 } } // namespace v8::internal
4735 4723
4736 #endif // V8_TARGET_ARCH_A64 4724 #endif // V8_TARGET_ARCH_A64
OLDNEW
« no previous file with comments | « src/a64/macro-assembler-a64.h ('k') | src/a64/regexp-macro-assembler-a64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698