OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 157 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
168 } | 168 } |
169 | 169 |
170 | 170 |
171 void ArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor( | 171 void ArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor( |
172 Isolate* isolate, | 172 Isolate* isolate, |
173 CodeStubInterfaceDescriptor* descriptor) { | 173 CodeStubInterfaceDescriptor* descriptor) { |
174 InitializeArrayConstructorDescriptor(isolate, descriptor, -1); | 174 InitializeArrayConstructorDescriptor(isolate, descriptor, -1); |
175 } | 175 } |
176 | 176 |
177 | 177 |
| 178 void ToBooleanStub::InitializeInterfaceDescriptor( |
| 179 Isolate* isolate, |
| 180 CodeStubInterfaceDescriptor* descriptor) { |
| 181 static Register registers[] = { r0 }; |
| 182 descriptor->register_param_count_ = 1; |
| 183 descriptor->register_params_ = registers; |
| 184 descriptor->deoptimization_handler_ = |
| 185 FUNCTION_ADDR(ToBooleanIC_Miss); |
| 186 descriptor->SetMissHandler( |
| 187 ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate)); |
| 188 } |
| 189 |
| 190 |
178 #define __ ACCESS_MASM(masm) | 191 #define __ ACCESS_MASM(masm) |
179 | 192 |
180 static void EmitIdenticalObjectComparison(MacroAssembler* masm, | 193 static void EmitIdenticalObjectComparison(MacroAssembler* masm, |
181 Label* slow, | 194 Label* slow, |
182 Condition cond); | 195 Condition cond); |
183 static void EmitSmiNonsmiComparison(MacroAssembler* masm, | 196 static void EmitSmiNonsmiComparison(MacroAssembler* masm, |
184 Register lhs, | 197 Register lhs, |
185 Register rhs, | 198 Register rhs, |
186 Label* lhs_not_nan, | 199 Label* lhs_not_nan, |
187 Label* slow, | 200 Label* slow, |
(...skipping 1012 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1200 | 1213 |
1201 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) | 1214 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) |
1202 // tagged as a small integer. | 1215 // tagged as a small integer. |
1203 __ InvokeBuiltin(native, JUMP_FUNCTION); | 1216 __ InvokeBuiltin(native, JUMP_FUNCTION); |
1204 | 1217 |
1205 __ bind(&miss); | 1218 __ bind(&miss); |
1206 GenerateMiss(masm); | 1219 GenerateMiss(masm); |
1207 } | 1220 } |
1208 | 1221 |
1209 | 1222 |
1210 // The stub expects its argument in the tos_ register and returns its result in | |
1211 // it, too: zero for false, and a non-zero value for true. | |
1212 void ToBooleanStub::Generate(MacroAssembler* masm) { | |
1213 // This stub overrides SometimesSetsUpAFrame() to return false. That means | |
1214 // we cannot call anything that could cause a GC from this stub. | |
1215 Label patch; | |
1216 const Register map = r9.is(tos_) ? r7 : r9; | |
1217 | |
1218 // undefined -> false. | |
1219 CheckOddball(masm, UNDEFINED, Heap::kUndefinedValueRootIndex, false); | |
1220 | |
1221 // Boolean -> its value. | |
1222 CheckOddball(masm, BOOLEAN, Heap::kFalseValueRootIndex, false); | |
1223 CheckOddball(masm, BOOLEAN, Heap::kTrueValueRootIndex, true); | |
1224 | |
1225 // 'null' -> false. | |
1226 CheckOddball(masm, NULL_TYPE, Heap::kNullValueRootIndex, false); | |
1227 | |
1228 if (types_.Contains(SMI)) { | |
1229 // Smis: 0 -> false, all other -> true | |
1230 __ SmiTst(tos_); | |
1231 // tos_ contains the correct return value already | |
1232 __ Ret(eq); | |
1233 } else if (types_.NeedsMap()) { | |
1234 // If we need a map later and have a Smi -> patch. | |
1235 __ JumpIfSmi(tos_, &patch); | |
1236 } | |
1237 | |
1238 if (types_.NeedsMap()) { | |
1239 __ ldr(map, FieldMemOperand(tos_, HeapObject::kMapOffset)); | |
1240 | |
1241 if (types_.CanBeUndetectable()) { | |
1242 __ ldrb(ip, FieldMemOperand(map, Map::kBitFieldOffset)); | |
1243 __ tst(ip, Operand(1 << Map::kIsUndetectable)); | |
1244 // Undetectable -> false. | |
1245 __ mov(tos_, Operand::Zero(), LeaveCC, ne); | |
1246 __ Ret(ne); | |
1247 } | |
1248 } | |
1249 | |
1250 if (types_.Contains(SPEC_OBJECT)) { | |
1251 // Spec object -> true. | |
1252 __ CompareInstanceType(map, ip, FIRST_SPEC_OBJECT_TYPE); | |
1253 // tos_ contains the correct non-zero return value already. | |
1254 __ Ret(ge); | |
1255 } | |
1256 | |
1257 if (types_.Contains(STRING)) { | |
1258 // String value -> false iff empty. | |
1259 __ CompareInstanceType(map, ip, FIRST_NONSTRING_TYPE); | |
1260 __ ldr(tos_, FieldMemOperand(tos_, String::kLengthOffset), lt); | |
1261 __ Ret(lt); // the string length is OK as the return value | |
1262 } | |
1263 | |
1264 if (types_.Contains(HEAP_NUMBER)) { | |
1265 // Heap number -> false iff +0, -0, or NaN. | |
1266 Label not_heap_number; | |
1267 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex); | |
1268 __ b(ne, ¬_heap_number); | |
1269 | |
1270 __ vldr(d1, FieldMemOperand(tos_, HeapNumber::kValueOffset)); | |
1271 __ VFPCompareAndSetFlags(d1, 0.0); | |
1272 // "tos_" is a register, and contains a non zero value by default. | |
1273 // Hence we only need to overwrite "tos_" with zero to return false for | |
1274 // FP_ZERO or FP_NAN cases. Otherwise, by default it returns true. | |
1275 __ mov(tos_, Operand::Zero(), LeaveCC, eq); // for FP_ZERO | |
1276 __ mov(tos_, Operand::Zero(), LeaveCC, vs); // for FP_NAN | |
1277 __ Ret(); | |
1278 __ bind(¬_heap_number); | |
1279 } | |
1280 | |
1281 __ bind(&patch); | |
1282 GenerateTypeTransition(masm); | |
1283 } | |
1284 | |
1285 | |
1286 void ToBooleanStub::CheckOddball(MacroAssembler* masm, | |
1287 Type type, | |
1288 Heap::RootListIndex value, | |
1289 bool result) { | |
1290 if (types_.Contains(type)) { | |
1291 // If we see an expected oddball, return its ToBoolean value tos_. | |
1292 __ LoadRoot(ip, value); | |
1293 __ cmp(tos_, ip); | |
1294 // The value of a root is never NULL, so we can avoid loading a non-null | |
1295 // value into tos_ when we want to return 'true'. | |
1296 if (!result) { | |
1297 __ mov(tos_, Operand::Zero(), LeaveCC, eq); | |
1298 } | |
1299 __ Ret(eq); | |
1300 } | |
1301 } | |
1302 | |
1303 | |
1304 void ToBooleanStub::GenerateTypeTransition(MacroAssembler* masm) { | |
1305 if (!tos_.is(r3)) { | |
1306 __ mov(r3, Operand(tos_)); | |
1307 } | |
1308 __ mov(r2, Operand(Smi::FromInt(tos_.code()))); | |
1309 __ mov(r1, Operand(Smi::FromInt(types_.ToByte()))); | |
1310 __ Push(r3, r2, r1); | |
1311 // Patch the caller to an appropriate specialized stub and return the | |
1312 // operation result to the caller of the stub. | |
1313 __ TailCallExternalReference( | |
1314 ExternalReference(IC_Utility(IC::kToBoolean_Patch), masm->isolate()), | |
1315 3, | |
1316 1); | |
1317 } | |
1318 | |
1319 | |
1320 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { | 1223 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { |
1321 // We don't allow a GC during a store buffer overflow so there is no need to | 1224 // We don't allow a GC during a store buffer overflow so there is no need to |
1322 // store the registers in any particular way, but we do have to store and | 1225 // store the registers in any particular way, but we do have to store and |
1323 // restore them. | 1226 // restore them. |
1324 __ stm(db_w, sp, kCallerSaved | lr.bit()); | 1227 __ stm(db_w, sp, kCallerSaved | lr.bit()); |
1325 | 1228 |
1326 const Register scratch = r1; | 1229 const Register scratch = r1; |
1327 | 1230 |
1328 if (save_doubles_ == kSaveFPRegs) { | 1231 if (save_doubles_ == kSaveFPRegs) { |
1329 __ SaveFPRegs(sp, scratch); | 1232 __ SaveFPRegs(sp, scratch); |
(...skipping 6104 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7434 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET); | 7337 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET); |
7435 } | 7338 } |
7436 } | 7339 } |
7437 | 7340 |
7438 | 7341 |
7439 #undef __ | 7342 #undef __ |
7440 | 7343 |
7441 } } // namespace v8::internal | 7344 } } // namespace v8::internal |
7442 | 7345 |
7443 #endif // V8_TARGET_ARCH_ARM | 7346 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |