Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(865)

Side by Side Diff: runtime/vm/intermediate_language_arm.cc

Issue 14476009: Enable api tests on ARM. (Closed) Base URL: http://dart.googlecode.com/svn/branches/bleeding_edge/dart/
Patch Set: Created 7 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « runtime/vm/flow_graph_compiler_arm.cc ('k') | runtime/vm/stub_code_arm.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM. 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM.
6 #if defined(TARGET_ARCH_ARM) 6 #if defined(TARGET_ARCH_ARM)
7 7
8 #include "vm/intermediate_language.h" 8 #include "vm/intermediate_language.h"
9 9
10 #include "lib/error.h" 10 #include "lib/error.h"
(...skipping 823 matching lines...) Expand 10 before | Expand all | Expand 10 after
834 return NULL; 834 return NULL;
835 } 835 }
836 836
837 837
838 void LoadUntaggedInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 838 void LoadUntaggedInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
839 UNIMPLEMENTED(); 839 UNIMPLEMENTED();
840 } 840 }
841 841
842 842
843 CompileType LoadIndexedInstr::ComputeType() const { 843 CompileType LoadIndexedInstr::ComputeType() const {
844 UNIMPLEMENTED(); 844 switch (class_id_) {
845 return CompileType::Dynamic(); 845 case kArrayCid:
846 case kImmutableArrayCid:
847 return CompileType::Dynamic();
848
849 case kTypedDataFloat32ArrayCid:
850 case kTypedDataFloat64ArrayCid:
851 return CompileType::FromCid(kDoubleCid);
852 case kTypedDataFloat32x4ArrayCid:
853 return CompileType::FromCid(kFloat32x4Cid);
854
855 case kTypedDataInt8ArrayCid:
856 case kTypedDataUint8ArrayCid:
857 case kTypedDataUint8ClampedArrayCid:
858 case kExternalTypedDataUint8ArrayCid:
859 case kExternalTypedDataUint8ClampedArrayCid:
860 case kTypedDataInt16ArrayCid:
861 case kTypedDataUint16ArrayCid:
862 case kOneByteStringCid:
863 case kTwoByteStringCid:
864 return CompileType::FromCid(kSmiCid);
865
866 case kTypedDataInt32ArrayCid:
867 case kTypedDataUint32ArrayCid:
868 // Result can be Smi or Mint when boxed.
869 // Instruction can deoptimize if we optimistically assumed that the result
870 // fits into Smi.
871 return CanDeoptimize() ? CompileType::FromCid(kSmiCid)
872 : CompileType::Int();
873
874 default:
875 UNIMPLEMENTED();
876 return CompileType::Dynamic();
877 }
846 } 878 }
847 879
848 880
849 Representation LoadIndexedInstr::representation() const { 881 Representation LoadIndexedInstr::representation() const {
850 UNIMPLEMENTED(); 882 switch (class_id_) {
851 return kTagged; 883 case kArrayCid:
884 case kImmutableArrayCid:
885 case kTypedDataInt8ArrayCid:
886 case kTypedDataUint8ArrayCid:
887 case kTypedDataUint8ClampedArrayCid:
888 case kExternalTypedDataUint8ArrayCid:
889 case kExternalTypedDataUint8ClampedArrayCid:
890 case kTypedDataInt16ArrayCid:
891 case kTypedDataUint16ArrayCid:
892 case kOneByteStringCid:
893 case kTwoByteStringCid:
894 return kTagged;
895 case kTypedDataInt32ArrayCid:
896 case kTypedDataUint32ArrayCid:
897 // Instruction can deoptimize if we optimistically assumed that the result
898 // fits into Smi.
899 return CanDeoptimize() ? kTagged : kUnboxedMint;
900 case kTypedDataFloat32ArrayCid:
901 case kTypedDataFloat64ArrayCid:
902 return kUnboxedDouble;
903 case kTypedDataFloat32x4ArrayCid:
904 return kUnboxedFloat32x4;
905 default:
906 UNIMPLEMENTED();
907 return kTagged;
908 }
852 } 909 }
853 910
854 911
855 LocationSummary* LoadIndexedInstr::MakeLocationSummary() const { 912 LocationSummary* LoadIndexedInstr::MakeLocationSummary() const {
856 UNIMPLEMENTED(); 913 const intptr_t kNumInputs = 2;
857 return NULL; 914 const intptr_t kNumTemps = 0;
915 LocationSummary* locs =
916 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kNoCall);
917 locs->set_in(0, Location::RequiresRegister());
918 // The smi index is either untagged (element size == 1), or it is left smi
919 // tagged (for all element sizes > 1).
920 // TODO(regis): Revisit and see if the index can be immediate.
921 locs->set_in(1, Location::WritableRegister());
922 if (representation() == kUnboxedDouble) {
923 locs->set_out(Location::RequiresFpuRegister());
924 } else {
925 locs->set_out(Location::RequiresRegister());
926 }
927 return locs;
858 } 928 }
859 929
860 930
861 void LoadIndexedInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 931 void LoadIndexedInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
862 UNIMPLEMENTED(); 932 Register array = locs()->in(0).reg();
933 Location index = locs()->in(1);
934
935 Address element_address(kNoRegister, 0);
936 if (IsExternal()) {
937 UNIMPLEMENTED();
938 } else {
939 ASSERT(this->array()->definition()->representation() == kTagged);
940 ASSERT(index.IsRegister()); // TODO(regis): Revisit.
941 // Note that index is expected smi-tagged, (i.e, times 2) for all arrays
942 // with index scale factor > 1. E.g., for Uint8Array and OneByteString the
943 // index is expected to be untagged before accessing.
944 ASSERT(kSmiTagShift == 1);
945 switch (index_scale()) {
946 case 1: {
947 __ SmiUntag(index.reg());
948 break;
949 }
950 case 2: {
951 break;
952 }
953 case 4: {
954 __ mov(index.reg(), ShifterOperand(index.reg(), LSL, 1));
955 break;
956 }
957 case 8: {
958 __ mov(index.reg(), ShifterOperand(index.reg(), LSL, 2));
959 break;
960 }
961 case 16: {
962 __ mov(index.reg(), ShifterOperand(index.reg(), LSL, 3));
963 break;
964 }
965 default:
966 UNREACHABLE();
967 }
968 __ AddImmediate(index.reg(),
969 FlowGraphCompiler::DataOffsetFor(class_id()) - kHeapObjectTag);
970 element_address = Address(array, index.reg(), LSL, 0);
971 }
972
973 if ((representation() == kUnboxedDouble) ||
974 (representation() == kUnboxedMint) ||
975 (representation() == kUnboxedFloat32x4)) {
976 UNIMPLEMENTED();
977 }
978
979 Register result = locs()->out().reg();
980 if ((index_scale() == 1) && index.IsRegister()) {
981 __ SmiUntag(index.reg());
982 }
983 switch (class_id()) {
984 case kTypedDataInt8ArrayCid:
985 ASSERT(index_scale() == 1);
986 __ ldrsb(result, element_address);
987 __ SmiTag(result);
988 break;
989 case kTypedDataUint8ArrayCid:
990 case kTypedDataUint8ClampedArrayCid:
991 case kExternalTypedDataUint8ArrayCid:
992 case kExternalTypedDataUint8ClampedArrayCid:
993 case kOneByteStringCid:
994 ASSERT(index_scale() == 1);
995 __ ldrb(result, element_address);
996 __ SmiTag(result);
997 break;
998 case kTypedDataInt16ArrayCid:
999 __ ldrsh(result, element_address);
1000 __ SmiTag(result);
1001 break;
1002 case kTypedDataUint16ArrayCid:
1003 case kTwoByteStringCid:
1004 __ ldrh(result, element_address);
1005 __ SmiTag(result);
1006 break;
1007 case kTypedDataInt32ArrayCid: {
1008 Label* deopt = compiler->AddDeoptStub(deopt_id(), kDeoptInt32Load);
1009 __ ldr(result, element_address);
1010 // Verify that the signed value in 'result' can fit inside a Smi.
1011 __ CompareImmediate(result, 0xC0000000);
1012 __ b(deopt, MI);
1013 __ SmiTag(result);
1014 }
1015 break;
1016 case kTypedDataUint32ArrayCid: {
1017 Label* deopt = compiler->AddDeoptStub(deopt_id(), kDeoptUint32Load);
1018 __ ldr(result, element_address);
1019 // Verify that the unsigned value in 'result' can fit inside a Smi.
1020 __ tst(result, ShifterOperand(0xC0000000));
1021 __ b(deopt, NE);
1022 __ SmiTag(result);
1023 }
1024 break;
1025 default:
1026 ASSERT((class_id() == kArrayCid) || (class_id() == kImmutableArrayCid));
1027 __ ldr(result, element_address);
1028 break;
1029 }
863 } 1030 }
864 1031
865 1032
866 Representation StoreIndexedInstr::RequiredInputRepresentation( 1033 Representation StoreIndexedInstr::RequiredInputRepresentation(
867 intptr_t idx) const { 1034 intptr_t idx) const {
868 UNIMPLEMENTED(); 1035 UNIMPLEMENTED();
869 return kTagged; 1036 return kTagged;
870 } 1037 }
871 1038
872 1039
(...skipping 163 matching lines...) Expand 10 before | Expand all | Expand 10 after
1036 field_reg = R2; 1203 field_reg = R2;
1037 ASSERT((field_reg != value_reg) && (field_reg != value_cid_reg)); 1204 ASSERT((field_reg != value_reg) && (field_reg != value_cid_reg));
1038 } 1205 }
1039 1206
1040 __ LoadObject(field_reg, Field::ZoneHandle(field().raw())); 1207 __ LoadObject(field_reg, Field::ZoneHandle(field().raw()));
1041 1208
1042 FieldAddress field_cid_operand(field_reg, Field::guarded_cid_offset()); 1209 FieldAddress field_cid_operand(field_reg, Field::guarded_cid_offset());
1043 FieldAddress field_nullability_operand( 1210 FieldAddress field_nullability_operand(
1044 field_reg, Field::is_nullable_offset()); 1211 field_reg, Field::is_nullable_offset());
1045 1212
1213 if (value_cid_reg == kNoRegister) {
1214 ASSERT(!compiler->is_optimizing());
1215 value_cid_reg = R3;
1216 ASSERT((value_cid_reg != value_reg) && (field_reg != value_cid_reg));
1217 }
1218
1046 if (value_cid == kDynamicCid) { 1219 if (value_cid == kDynamicCid) {
1047 if (value_cid_reg == kNoRegister) {
1048 ASSERT(!compiler->is_optimizing());
1049 value_cid_reg = R3;
1050 ASSERT((value_cid_reg != value_reg) && (field_reg != value_cid_reg));
1051 }
1052
1053 LoadValueCid(compiler, value_cid_reg, value_reg); 1220 LoadValueCid(compiler, value_cid_reg, value_reg);
1054
1055 __ ldr(IP, field_cid_operand); 1221 __ ldr(IP, field_cid_operand);
1056 __ cmp(value_cid_reg, ShifterOperand(IP)); 1222 __ cmp(value_cid_reg, ShifterOperand(IP));
1057 __ b(&ok, EQ); 1223 __ b(&ok, EQ);
1058 __ ldr(IP, field_nullability_operand); 1224 __ ldr(IP, field_nullability_operand);
1059 __ cmp(value_cid_reg, ShifterOperand(IP)); 1225 __ cmp(value_cid_reg, ShifterOperand(IP));
1060 } else if (value_cid == kNullCid) { 1226 } else if (value_cid == kNullCid) {
1061 // TODO(regis): IP may conflict. Revisit. 1227 __ ldr(value_cid_reg, field_nullability_operand);
1062 __ ldr(IP, field_nullability_operand); 1228 __ CompareImmediate(value_cid_reg, value_cid);
1063 __ CompareImmediate(IP, value_cid);
1064 } else { 1229 } else {
1065 // TODO(regis): IP may conflict. Revisit. 1230 __ ldr(value_cid_reg, field_cid_operand);
1066 __ ldr(IP, field_cid_operand); 1231 __ CompareImmediate(value_cid_reg, value_cid);
1067 __ CompareImmediate(IP, value_cid);
1068 } 1232 }
1069 __ b(&ok, EQ); 1233 __ b(&ok, EQ);
1070 1234
1071 __ ldr(IP, field_cid_operand); 1235 __ ldr(IP, field_cid_operand);
1072 __ CompareImmediate(IP, kIllegalCid); 1236 __ CompareImmediate(IP, kIllegalCid);
1073 __ b(fail, NE); 1237 __ b(fail, NE);
1074 1238
1075 if (value_cid == kDynamicCid) { 1239 if (value_cid == kDynamicCid) {
1076 __ str(value_cid_reg, field_cid_operand); 1240 __ str(value_cid_reg, field_cid_operand);
1077 __ str(value_cid_reg, field_nullability_operand); 1241 __ str(value_cid_reg, field_nullability_operand);
(...skipping 911 matching lines...) Expand 10 before | Expand all | Expand 10 after
1989 return NULL; 2153 return NULL;
1990 } 2154 }
1991 2155
1992 2156
1993 void BranchInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 2157 void BranchInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
1994 comparison()->EmitBranchCode(compiler, this); 2158 comparison()->EmitBranchCode(compiler, this);
1995 } 2159 }
1996 2160
1997 2161
1998 LocationSummary* CheckClassInstr::MakeLocationSummary() const { 2162 LocationSummary* CheckClassInstr::MakeLocationSummary() const {
1999 UNIMPLEMENTED(); 2163 const intptr_t kNumInputs = 1;
2000 return NULL; 2164 const intptr_t kNumTemps = 0;
2165 LocationSummary* summary =
2166 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kNoCall);
2167 summary->set_in(0, Location::RequiresRegister());
2168 if (!null_check()) {
2169 summary->AddTemp(Location::RequiresRegister());
2170 }
2171 return summary;
2001 } 2172 }
2002 2173
2003 2174
2004 void CheckClassInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 2175 void CheckClassInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
2005 UNIMPLEMENTED(); 2176 if (null_check()) {
2177 Label* deopt = compiler->AddDeoptStub(deopt_id(),
2178 kDeoptCheckClass);
2179 __ CompareImmediate(locs()->in(0).reg(),
2180 reinterpret_cast<intptr_t>(Object::null()));
2181 __ b(deopt, EQ);
2182 return;
2183 }
2184
2185 ASSERT((unary_checks().GetReceiverClassIdAt(0) != kSmiCid) ||
2186 (unary_checks().NumberOfChecks() > 1));
2187 Register value = locs()->in(0).reg();
2188 Register temp = locs()->temp(0).reg();
2189 Label* deopt = compiler->AddDeoptStub(deopt_id(),
2190 kDeoptCheckClass);
2191 Label is_ok;
2192 intptr_t cix = 0;
2193 if (unary_checks().GetReceiverClassIdAt(cix) == kSmiCid) {
2194 __ tst(value, ShifterOperand(kSmiTagMask));
2195 __ b(&is_ok, EQ);
2196 cix++; // Skip first check.
2197 } else {
2198 __ tst(value, ShifterOperand(kSmiTagMask));
2199 __ b(deopt, EQ);
2200 }
2201 __ LoadClassId(temp, value);
2202 const intptr_t num_checks = unary_checks().NumberOfChecks();
2203 for (intptr_t i = cix; i < num_checks; i++) {
2204 ASSERT(unary_checks().GetReceiverClassIdAt(i) != kSmiCid);
2205 __ CompareImmediate(temp, unary_checks().GetReceiverClassIdAt(i));
2206 if (i == (num_checks - 1)) {
2207 __ b(deopt, NE);
2208 } else {
2209 __ b(&is_ok, EQ);
2210 }
2211 }
2212 __ Bind(&is_ok);
2006 } 2213 }
2007 2214
2008 2215
2009 LocationSummary* CheckSmiInstr::MakeLocationSummary() const { 2216 LocationSummary* CheckSmiInstr::MakeLocationSummary() const {
2010 const intptr_t kNumInputs = 1; 2217 const intptr_t kNumInputs = 1;
2011 const intptr_t kNumTemps = 0; 2218 const intptr_t kNumTemps = 0;
2012 LocationSummary* summary = 2219 LocationSummary* summary =
2013 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kNoCall); 2220 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kNoCall);
2014 summary->set_in(0, Location::RequiresRegister()); 2221 summary->set_in(0, Location::RequiresRegister());
2015 return summary; 2222 return summary;
2016 } 2223 }
2017 2224
2018 2225
2019 void CheckSmiInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 2226 void CheckSmiInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
2020 Register value = locs()->in(0).reg(); 2227 Register value = locs()->in(0).reg();
2021 Label* deopt = compiler->AddDeoptStub(deopt_id(), 2228 Label* deopt = compiler->AddDeoptStub(deopt_id(),
2022 kDeoptCheckSmi); 2229 kDeoptCheckSmi);
2023 __ tst(value, ShifterOperand(kSmiTagMask)); 2230 __ tst(value, ShifterOperand(kSmiTagMask));
2024 __ b(deopt, NE); 2231 __ b(deopt, NE);
2025 } 2232 }
2026 2233
2027 2234
2028 LocationSummary* CheckArrayBoundInstr::MakeLocationSummary() const { 2235 LocationSummary* CheckArrayBoundInstr::MakeLocationSummary() const {
2029 UNIMPLEMENTED(); 2236 const intptr_t kNumInputs = 2;
2030 return NULL; 2237 const intptr_t kNumTemps = 0;
2238 LocationSummary* locs =
2239 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kNoCall);
2240 locs->set_in(0, Location::RegisterOrSmiConstant(length()));
2241 locs->set_in(1, Location::RegisterOrSmiConstant(index()));
2242 return locs;
2031 } 2243 }
2032 2244
2033 2245
2034 void CheckArrayBoundInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 2246 void CheckArrayBoundInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
2035 UNIMPLEMENTED(); 2247 Label* deopt = compiler->AddDeoptStub(deopt_id(),
2248 kDeoptCheckArrayBound);
2249 if (locs()->in(0).IsConstant() && locs()->in(1).IsConstant()) {
2250 // Unconditionally deoptimize for constant bounds checks because they
2251 // only occur only when index is out-of-bounds.
2252 __ b(deopt);
2253 return;
2254 }
2255
2256 if (locs()->in(1).IsConstant()) {
2257 Register length = locs()->in(0).reg();
2258 const Object& constant = locs()->in(1).constant();
2259 ASSERT(constant.IsSmi());
2260 __ CompareImmediate(length, reinterpret_cast<int32_t>(constant.raw()));
2261 __ b(deopt, LS);
2262 } else if (locs()->in(0).IsConstant()) {
2263 ASSERT(locs()->in(0).constant().IsSmi());
2264 const Smi& smi_const = Smi::Cast(locs()->in(0).constant());
2265 Register index = locs()->in(1).reg();
2266 __ CompareImmediate(index, reinterpret_cast<int32_t>(smi_const.raw()));
2267 __ b(deopt, CS);
2268 } else {
2269 Register length = locs()->in(0).reg();
2270 Register index = locs()->in(1).reg();
2271 __ cmp(index, ShifterOperand(length));
2272 __ b(deopt, CS);
2273 }
2036 } 2274 }
2037 2275
2038 2276
2039 LocationSummary* UnboxIntegerInstr::MakeLocationSummary() const { 2277 LocationSummary* UnboxIntegerInstr::MakeLocationSummary() const {
2040 UNIMPLEMENTED(); 2278 UNIMPLEMENTED();
2041 return NULL; 2279 return NULL;
2042 } 2280 }
2043 2281
2044 2282
2045 void UnboxIntegerInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 2283 void UnboxIntegerInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
(...skipping 312 matching lines...) Expand 10 before | Expand all | Expand 10 after
2358 &label, 2596 &label,
2359 PcDescriptors::kOther, 2597 PcDescriptors::kOther,
2360 locs()); 2598 locs());
2361 __ Drop(2); // Discard type arguments and receiver. 2599 __ Drop(2); // Discard type arguments and receiver.
2362 } 2600 }
2363 2601
2364 } // namespace dart 2602 } // namespace dart
2365 2603
2366 #endif // defined TARGET_ARCH_ARM 2604 #endif // defined TARGET_ARCH_ARM
2367 2605
OLDNEW
« no previous file with comments | « runtime/vm/flow_graph_compiler_arm.cc ('k') | runtime/vm/stub_code_arm.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698