OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_ARM | 5 #if V8_TARGET_ARCH_ARM |
6 | 6 |
7 #include "src/base/bits.h" | 7 #include "src/base/bits.h" |
8 #include "src/bootstrapper.h" | 8 #include "src/bootstrapper.h" |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 1269 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1280 } | 1280 } |
1281 #endif | 1281 #endif |
1282 | 1282 |
1283 // Restore callee-saved vfp registers. | 1283 // Restore callee-saved vfp registers. |
1284 __ vldm(ia_w, sp, kFirstCalleeSavedDoubleReg, kLastCalleeSavedDoubleReg); | 1284 __ vldm(ia_w, sp, kFirstCalleeSavedDoubleReg, kLastCalleeSavedDoubleReg); |
1285 | 1285 |
1286 __ ldm(ia_w, sp, kCalleeSaved | pc.bit()); | 1286 __ ldm(ia_w, sp, kCalleeSaved | pc.bit()); |
1287 } | 1287 } |
1288 | 1288 |
1289 | 1289 |
1290 // Uses registers r0 to r4. | 1290 void InstanceOfStub::Generate(MacroAssembler* masm) { |
1291 // Expected input (depending on whether args are in registers or on the stack): | 1291 Register const object = r1; // Object (lhs). |
1292 // * object: r0 or at sp + 1 * kPointerSize. | 1292 Register const function = r0; // Function (rhs). |
1293 // * function: r1 or at sp. | 1293 Register const object_map = r2; // Map of {object}. |
1294 // | 1294 Register const function_map = r3; // Map of {function}. |
1295 // An inlined call site may have been generated before calling this stub. | 1295 Register const function_prototype = r4; // Prototype of {function}. |
1296 // In this case the offset to the inline sites to patch are passed in r5 and r6. | 1296 Register const scratch = r5; |
1297 // (See LCodeGen::DoInstanceOfKnownGlobal) | |
1298 void InstanceofStub::Generate(MacroAssembler* masm) { | |
1299 // Call site inlining and patching implies arguments in registers. | |
1300 DCHECK(HasArgsInRegisters() || !HasCallSiteInlineCheck()); | |
1301 | 1297 |
1302 // Fixed register usage throughout the stub: | 1298 DCHECK(object.is(InstanceOfDescriptor::LeftRegister())); |
1303 const Register object = r0; // Object (lhs). | 1299 DCHECK(function.is(InstanceOfDescriptor::RightRegister())); |
1304 Register map = r3; // Map of the object. | |
1305 const Register function = r1; // Function (rhs). | |
1306 const Register prototype = r4; // Prototype of the function. | |
1307 const Register scratch = r2; | |
1308 | 1300 |
1309 Label slow, loop, is_instance, is_not_instance, not_js_object; | 1301 // Check if {object} is a smi. |
1302 Label object_is_smi; | |
1303 __ JumpIfSmi(object, &object_is_smi); | |
1310 | 1304 |
1311 if (!HasArgsInRegisters()) { | 1305 // Lookup the {function} and the {object} map in the global instanceof cache. |
Yang
2015/08/24 07:31:06
Please add a comment here or somewhere appropriate
Benedikt Meurer
2015/08/24 07:37:46
Done.
| |
1312 __ ldr(object, MemOperand(sp, 1 * kPointerSize)); | 1306 Label fast_case, slow_case; |
1313 __ ldr(function, MemOperand(sp, 0)); | 1307 __ ldr(object_map, FieldMemOperand(object, HeapObject::kMapOffset)); |
1314 } | 1308 __ CompareRoot(function, Heap::kInstanceofCacheFunctionRootIndex); |
1309 __ b(ne, &fast_case); | |
1310 __ CompareRoot(object_map, Heap::kInstanceofCacheMapRootIndex); | |
1311 __ b(ne, &fast_case); | |
1312 __ LoadRoot(r0, Heap::kInstanceofCacheAnswerRootIndex); | |
1313 __ Ret(); | |
1315 | 1314 |
1316 // Check that the left hand is a JS object and load map. | 1315 // If {object} is a smi we can safely return false if {function} is a JS |
1317 __ JumpIfSmi(object, ¬_js_object); | 1316 // function, otherwise we have to miss to the runtime and throw an exception. |
1318 __ IsObjectJSObjectType(object, map, scratch, ¬_js_object); | 1317 __ bind(&object_is_smi); |
1318 __ JumpIfSmi(function, &slow_case); | |
1319 __ CompareObjectType(function, function_map, scratch, JS_FUNCTION_TYPE); | |
1320 __ b(ne, &slow_case); | |
1321 __ LoadRoot(r0, Heap::kFalseValueRootIndex); | |
1322 __ Ret(); | |
1319 | 1323 |
1320 // If there is a call site cache don't look in the global cache, but do the | 1324 // Fast-case: The {function} must be a valid JSFunction. |
1321 // real lookup and update the call site cache. | 1325 __ bind(&fast_case); |
1322 if (!HasCallSiteInlineCheck() && !ReturnTrueFalseObject()) { | 1326 __ JumpIfSmi(function, &slow_case); |
1323 Label miss; | 1327 __ CompareObjectType(function, function_map, scratch, JS_FUNCTION_TYPE); |
1324 __ CompareRoot(function, Heap::kInstanceofCacheFunctionRootIndex); | 1328 __ b(ne, &slow_case); |
1325 __ b(ne, &miss); | |
1326 __ CompareRoot(map, Heap::kInstanceofCacheMapRootIndex); | |
1327 __ b(ne, &miss); | |
1328 __ LoadRoot(r0, Heap::kInstanceofCacheAnswerRootIndex); | |
1329 __ Ret(HasArgsInRegisters() ? 0 : 2); | |
1330 | 1329 |
1331 __ bind(&miss); | 1330 // Ensure that {function} has an instance prototype. |
1332 } | 1331 __ ldrb(scratch, FieldMemOperand(function_map, Map::kBitFieldOffset)); |
1332 __ tst(scratch, Operand(1 << Map::kHasNonInstancePrototype)); | |
1333 __ b(ne, &slow_case); | |
1333 | 1334 |
1334 // Get the prototype of the function. | 1335 // Ensure that {function} is not bound. |
1335 __ TryGetFunctionPrototype(function, prototype, scratch, &slow, true); | 1336 Register const shared_info = scratch; |
1337 __ ldr(shared_info, | |
1338 FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset)); | |
1339 __ ldr(scratch, FieldMemOperand(shared_info, | |
1340 SharedFunctionInfo::kCompilerHintsOffset)); | |
1341 __ tst(scratch, | |
1342 Operand(Smi::FromInt(1 << SharedFunctionInfo::kBoundFunction))); | |
1343 __ b(ne, &slow_case); | |
1336 | 1344 |
1337 // Check that the function prototype is a JS object. | 1345 // Get the "prototype" (or initial map) of the {function}. |
1338 __ JumpIfSmi(prototype, &slow); | 1346 __ ldr(function_prototype, |
1339 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow); | 1347 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); |
1348 __ AssertNotSmi(function_prototype); | |
1340 | 1349 |
1341 // Update the global instanceof or call site inlined cache with the current | 1350 // Resolve the prototype if the {function} has an initial map. Afterwards the |
1342 // map and function. The cached answer will be set when it is known below. | 1351 // {function_prototype} will be either the JSReceiver prototype object or the |
1343 if (!HasCallSiteInlineCheck()) { | 1352 // hole value, which means that no instances of the {function} were created so |
1344 __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex); | 1353 // far and hence we should return false. |
1345 __ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex); | 1354 Label function_prototype_valid; |
1346 } else { | 1355 __ CompareObjectType(function_prototype, scratch, scratch, MAP_TYPE); |
1347 DCHECK(HasArgsInRegisters()); | 1356 __ b(ne, &function_prototype_valid); |
1348 // Patch the (relocated) inlined map check. | 1357 __ ldr(function_prototype, |
1358 FieldMemOperand(function_prototype, Map::kPrototypeOffset)); | |
1359 __ bind(&function_prototype_valid); | |
1360 __ AssertNotSmi(function_prototype); | |
1349 | 1361 |
1350 // The map_load_offset was stored in r5 | 1362 // Update the global instanceof cache with the current {object} map and |
1351 // (See LCodeGen::DoDeferredLInstanceOfKnownGlobal). | 1363 // {function}. The cached answer will be set when it is known below. |
1352 const Register map_load_offset = r5; | 1364 __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex); |
1353 __ sub(r9, lr, map_load_offset); | 1365 __ StoreRoot(object_map, Heap::kInstanceofCacheMapRootIndex); |
1354 // Get the map location in r5 and patch it. | |
1355 __ GetRelocatedValueLocation(r9, map_load_offset, scratch); | |
1356 __ ldr(map_load_offset, MemOperand(map_load_offset)); | |
1357 __ str(map, FieldMemOperand(map_load_offset, Cell::kValueOffset)); | |
1358 | 1366 |
1359 __ mov(scratch, map); | 1367 // Loop through the prototype chain looking for the {function} prototype. |
1360 // |map_load_offset| points at the beginning of the cell. Calculate the | 1368 // Assume true, and change to false if not found. |
1361 // field containing the map. | 1369 Register const object_prototype = object_map; |
1362 __ add(function, map_load_offset, Operand(Cell::kValueOffset - 1)); | 1370 Register const null = scratch; |
1363 __ RecordWriteField(map_load_offset, Cell::kValueOffset, scratch, function, | 1371 Label done, loop; |
1364 kLRHasNotBeenSaved, kDontSaveFPRegs, | 1372 __ LoadRoot(r0, Heap::kTrueValueRootIndex); |
1365 OMIT_REMEMBERED_SET, OMIT_SMI_CHECK); | 1373 __ LoadRoot(null, Heap::kNullValueRootIndex); |
1366 } | 1374 __ bind(&loop); |
1375 __ ldr(object_prototype, FieldMemOperand(object_map, Map::kPrototypeOffset)); | |
1376 __ cmp(object_prototype, function_prototype); | |
1377 __ b(eq, &done); | |
1378 __ cmp(object_prototype, null); | |
1379 __ ldr(object_map, FieldMemOperand(object_prototype, HeapObject::kMapOffset)); | |
1380 __ b(ne, &loop); | |
1381 __ LoadRoot(r0, Heap::kFalseValueRootIndex); | |
1382 __ bind(&done); | |
1383 __ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex); | |
1384 __ Ret(); | |
1367 | 1385 |
1368 // Register mapping: r3 is object map and r4 is function prototype. | 1386 // Slow-case: Call the runtime function. |
1369 // Get prototype of object into r2. | 1387 __ bind(&slow_case); |
1370 __ ldr(scratch, FieldMemOperand(map, Map::kPrototypeOffset)); | 1388 __ Push(object, function); |
1371 | 1389 __ TailCallRuntime(Runtime::kInstanceOf, 2, 1); |
1372 // We don't need map any more. Use it as a scratch register. | |
1373 Register scratch2 = map; | |
1374 map = no_reg; | |
1375 | |
1376 // Loop through the prototype chain looking for the function prototype. | |
1377 __ LoadRoot(scratch2, Heap::kNullValueRootIndex); | |
1378 __ bind(&loop); | |
1379 __ cmp(scratch, Operand(prototype)); | |
1380 __ b(eq, &is_instance); | |
1381 __ cmp(scratch, scratch2); | |
1382 __ b(eq, &is_not_instance); | |
1383 __ ldr(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset)); | |
1384 __ ldr(scratch, FieldMemOperand(scratch, Map::kPrototypeOffset)); | |
1385 __ jmp(&loop); | |
1386 Factory* factory = isolate()->factory(); | |
1387 | |
1388 __ bind(&is_instance); | |
1389 if (!HasCallSiteInlineCheck()) { | |
1390 __ mov(r0, Operand(Smi::FromInt(0))); | |
1391 __ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex); | |
1392 if (ReturnTrueFalseObject()) { | |
1393 __ Move(r0, factory->true_value()); | |
1394 } | |
1395 } else { | |
1396 // Patch the call site to return true. | |
1397 __ LoadRoot(r0, Heap::kTrueValueRootIndex); | |
1398 // The bool_load_offset was stored in r6 | |
1399 // (See LCodeGen::DoDeferredLInstanceOfKnownGlobal). | |
1400 const Register bool_load_offset = r6; | |
1401 __ sub(r9, lr, bool_load_offset); | |
1402 // Get the boolean result location in scratch and patch it. | |
1403 __ GetRelocatedValueLocation(r9, scratch, scratch2); | |
1404 __ str(r0, MemOperand(scratch)); | |
1405 | |
1406 if (!ReturnTrueFalseObject()) { | |
1407 __ mov(r0, Operand(Smi::FromInt(0))); | |
1408 } | |
1409 } | |
1410 __ Ret(HasArgsInRegisters() ? 0 : 2); | |
1411 | |
1412 __ bind(&is_not_instance); | |
1413 if (!HasCallSiteInlineCheck()) { | |
1414 __ mov(r0, Operand(Smi::FromInt(1))); | |
1415 __ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex); | |
1416 if (ReturnTrueFalseObject()) { | |
1417 __ Move(r0, factory->false_value()); | |
1418 } | |
1419 } else { | |
1420 // Patch the call site to return false. | |
1421 __ LoadRoot(r0, Heap::kFalseValueRootIndex); | |
1422 // The bool_load_offset was stored in r6 | |
1423 // (See LCodeGen::DoDeferredLInstanceOfKnownGlobal). | |
1424 const Register bool_load_offset = r6; | |
1425 __ sub(r9, lr, bool_load_offset); | |
1426 ; | |
1427 // Get the boolean result location in scratch and patch it. | |
1428 __ GetRelocatedValueLocation(r9, scratch, scratch2); | |
1429 __ str(r0, MemOperand(scratch)); | |
1430 | |
1431 if (!ReturnTrueFalseObject()) { | |
1432 __ mov(r0, Operand(Smi::FromInt(1))); | |
1433 } | |
1434 } | |
1435 __ Ret(HasArgsInRegisters() ? 0 : 2); | |
1436 | |
1437 Label object_not_null, object_not_null_or_smi; | |
1438 __ bind(¬_js_object); | |
1439 // Before null, smi and string value checks, check that the rhs is a function | |
1440 // as for a non-function rhs an exception needs to be thrown. | |
1441 __ JumpIfSmi(function, &slow); | |
1442 __ CompareObjectType(function, scratch2, scratch, JS_FUNCTION_TYPE); | |
1443 __ b(ne, &slow); | |
1444 | |
1445 // Null is not instance of anything. | |
1446 __ cmp(object, Operand(isolate()->factory()->null_value())); | |
1447 __ b(ne, &object_not_null); | |
1448 if (ReturnTrueFalseObject()) { | |
1449 __ Move(r0, factory->false_value()); | |
1450 } else { | |
1451 __ mov(r0, Operand(Smi::FromInt(1))); | |
1452 } | |
1453 __ Ret(HasArgsInRegisters() ? 0 : 2); | |
1454 | |
1455 __ bind(&object_not_null); | |
1456 // Smi values are not instances of anything. | |
1457 __ JumpIfNotSmi(object, &object_not_null_or_smi); | |
1458 if (ReturnTrueFalseObject()) { | |
1459 __ Move(r0, factory->false_value()); | |
1460 } else { | |
1461 __ mov(r0, Operand(Smi::FromInt(1))); | |
1462 } | |
1463 __ Ret(HasArgsInRegisters() ? 0 : 2); | |
1464 | |
1465 __ bind(&object_not_null_or_smi); | |
1466 // String values are not instances of anything. | |
1467 __ IsObjectJSStringType(object, scratch, &slow); | |
1468 if (ReturnTrueFalseObject()) { | |
1469 __ Move(r0, factory->false_value()); | |
1470 } else { | |
1471 __ mov(r0, Operand(Smi::FromInt(1))); | |
1472 } | |
1473 __ Ret(HasArgsInRegisters() ? 0 : 2); | |
1474 | |
1475 // Slow-case. Tail call builtin. | |
1476 __ bind(&slow); | |
1477 if (!ReturnTrueFalseObject()) { | |
1478 if (HasArgsInRegisters()) { | |
1479 __ Push(r0, r1); | |
1480 } | |
1481 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); | |
1482 } else { | |
1483 { | |
1484 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | |
1485 __ Push(r0, r1); | |
1486 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION); | |
1487 } | |
1488 __ cmp(r0, Operand::Zero()); | |
1489 __ LoadRoot(r0, Heap::kTrueValueRootIndex, eq); | |
1490 __ LoadRoot(r0, Heap::kFalseValueRootIndex, ne); | |
1491 __ Ret(HasArgsInRegisters() ? 0 : 2); | |
1492 } | |
1493 } | 1390 } |
1494 | 1391 |
1495 | 1392 |
1496 void FunctionPrototypeStub::Generate(MacroAssembler* masm) { | 1393 void FunctionPrototypeStub::Generate(MacroAssembler* masm) { |
1497 Label miss; | 1394 Label miss; |
1498 Register receiver = LoadDescriptor::ReceiverRegister(); | 1395 Register receiver = LoadDescriptor::ReceiverRegister(); |
1499 // Ensure that the vector and slot registers won't be clobbered before | 1396 // Ensure that the vector and slot registers won't be clobbered before |
1500 // calling the miss handler. | 1397 // calling the miss handler. |
1501 DCHECK(!AreAliased(r4, r5, LoadWithVectorDescriptor::VectorRegister(), | 1398 DCHECK(!AreAliased(r4, r5, LoadWithVectorDescriptor::VectorRegister(), |
1502 LoadWithVectorDescriptor::SlotRegister())); | 1399 LoadWithVectorDescriptor::SlotRegister())); |
(...skipping 4025 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
5528 MemOperand(fp, 6 * kPointerSize), NULL); | 5425 MemOperand(fp, 6 * kPointerSize), NULL); |
5529 } | 5426 } |
5530 | 5427 |
5531 | 5428 |
5532 #undef __ | 5429 #undef __ |
5533 | 5430 |
5534 } // namespace internal | 5431 } // namespace internal |
5535 } // namespace v8 | 5432 } // namespace v8 |
5536 | 5433 |
5537 #endif // V8_TARGET_ARCH_ARM | 5434 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |