Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(362)

Side by Side Diff: src/arm/code-stubs-arm.cc

Issue 1304633002: Correctify instanceof and make it optimizable. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: REBASE. Add MIPS/MIPS64 ports. Created 5 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | src/arm/interface-descriptors-arm.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_ARM 5 #if V8_TARGET_ARCH_ARM
6 6
7 #include "src/base/bits.h" 7 #include "src/base/bits.h"
8 #include "src/bootstrapper.h" 8 #include "src/bootstrapper.h"
9 #include "src/code-stubs.h" 9 #include "src/code-stubs.h"
10 #include "src/codegen.h" 10 #include "src/codegen.h"
(...skipping 1269 matching lines...) Expand 10 before | Expand all | Expand 10 after
1280 } 1280 }
1281 #endif 1281 #endif
1282 1282
1283 // Restore callee-saved vfp registers. 1283 // Restore callee-saved vfp registers.
1284 __ vldm(ia_w, sp, kFirstCalleeSavedDoubleReg, kLastCalleeSavedDoubleReg); 1284 __ vldm(ia_w, sp, kFirstCalleeSavedDoubleReg, kLastCalleeSavedDoubleReg);
1285 1285
1286 __ ldm(ia_w, sp, kCalleeSaved | pc.bit()); 1286 __ ldm(ia_w, sp, kCalleeSaved | pc.bit());
1287 } 1287 }
1288 1288
1289 1289
1290 // Uses registers r0 to r4. 1290 void InstanceOfStub::Generate(MacroAssembler* masm) {
1291 // Expected input (depending on whether args are in registers or on the stack): 1291 Register const object = r1; // Object (lhs).
1292 // * object: r0 or at sp + 1 * kPointerSize. 1292 Register const function = r0; // Function (rhs).
1293 // * function: r1 or at sp. 1293 Register const object_map = r2; // Map of {object}.
1294 // 1294 Register const function_map = r3; // Map of {function}.
1295 // An inlined call site may have been generated before calling this stub. 1295 Register const function_prototype = r4; // Prototype of {function}.
1296 // In this case the offset to the inline sites to patch are passed in r5 and r6. 1296 Register const scratch = r5;
1297 // (See LCodeGen::DoInstanceOfKnownGlobal)
1298 void InstanceofStub::Generate(MacroAssembler* masm) {
1299 // Call site inlining and patching implies arguments in registers.
1300 DCHECK(HasArgsInRegisters() || !HasCallSiteInlineCheck());
1301 1297
1302 // Fixed register usage throughout the stub: 1298 DCHECK(object.is(InstanceOfDescriptor::LeftRegister()));
1303 const Register object = r0; // Object (lhs). 1299 DCHECK(function.is(InstanceOfDescriptor::RightRegister()));
1304 Register map = r3; // Map of the object.
1305 const Register function = r1; // Function (rhs).
1306 const Register prototype = r4; // Prototype of the function.
1307 const Register scratch = r2;
1308 1300
1309 Label slow, loop, is_instance, is_not_instance, not_js_object; 1301 // Check if {object} is a smi.
1302 Label object_is_smi;
1303 __ JumpIfSmi(object, &object_is_smi);
1310 1304
1311 if (!HasArgsInRegisters()) { 1305 // Lookup the {function} and the {object} map in the global instanceof cache.
1312 __ ldr(object, MemOperand(sp, 1 * kPointerSize)); 1306 // Note: This is safe because we clear the global instanceof cache whenever
1313 __ ldr(function, MemOperand(sp, 0)); 1307 // we change the prototype of any object.
1314 } 1308 Label fast_case, slow_case;
1309 __ ldr(object_map, FieldMemOperand(object, HeapObject::kMapOffset));
1310 __ CompareRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
1311 __ b(ne, &fast_case);
1312 __ CompareRoot(object_map, Heap::kInstanceofCacheMapRootIndex);
1313 __ b(ne, &fast_case);
1314 __ LoadRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
1315 __ Ret();
1315 1316
1316 // Check that the left hand is a JS object and load map. 1317 // If {object} is a smi we can safely return false if {function} is a JS
1317 __ JumpIfSmi(object, &not_js_object); 1318 // function, otherwise we have to miss to the runtime and throw an exception.
1318 __ IsObjectJSObjectType(object, map, scratch, &not_js_object); 1319 __ bind(&object_is_smi);
1320 __ JumpIfSmi(function, &slow_case);
1321 __ CompareObjectType(function, function_map, scratch, JS_FUNCTION_TYPE);
1322 __ b(ne, &slow_case);
1323 __ LoadRoot(r0, Heap::kFalseValueRootIndex);
1324 __ Ret();
1319 1325
1320 // If there is a call site cache don't look in the global cache, but do the 1326 // Fast-case: The {function} must be a valid JSFunction.
1321 // real lookup and update the call site cache. 1327 __ bind(&fast_case);
1322 if (!HasCallSiteInlineCheck() && !ReturnTrueFalseObject()) { 1328 __ JumpIfSmi(function, &slow_case);
1323 Label miss; 1329 __ CompareObjectType(function, function_map, scratch, JS_FUNCTION_TYPE);
1324 __ CompareRoot(function, Heap::kInstanceofCacheFunctionRootIndex); 1330 __ b(ne, &slow_case);
1325 __ b(ne, &miss);
1326 __ CompareRoot(map, Heap::kInstanceofCacheMapRootIndex);
1327 __ b(ne, &miss);
1328 __ LoadRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
1329 __ Ret(HasArgsInRegisters() ? 0 : 2);
1330 1331
1331 __ bind(&miss); 1332 // Ensure that {function} has an instance prototype.
1332 } 1333 __ ldrb(scratch, FieldMemOperand(function_map, Map::kBitFieldOffset));
1334 __ tst(scratch, Operand(1 << Map::kHasNonInstancePrototype));
1335 __ b(ne, &slow_case);
1333 1336
1334 // Get the prototype of the function. 1337 // Ensure that {function} is not bound.
1335 __ TryGetFunctionPrototype(function, prototype, scratch, &slow, true); 1338 Register const shared_info = scratch;
1339 __ ldr(shared_info,
1340 FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
1341 __ ldr(scratch, FieldMemOperand(shared_info,
1342 SharedFunctionInfo::kCompilerHintsOffset));
1343 __ tst(scratch,
1344 Operand(Smi::FromInt(1 << SharedFunctionInfo::kBoundFunction)));
1345 __ b(ne, &slow_case);
1336 1346
1337 // Check that the function prototype is a JS object. 1347 // Get the "prototype" (or initial map) of the {function}.
1338 __ JumpIfSmi(prototype, &slow); 1348 __ ldr(function_prototype,
1339 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow); 1349 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1350 __ AssertNotSmi(function_prototype);
1340 1351
1341 // Update the global instanceof or call site inlined cache with the current 1352 // Resolve the prototype if the {function} has an initial map. Afterwards the
1342 // map and function. The cached answer will be set when it is known below. 1353 // {function_prototype} will be either the JSReceiver prototype object or the
1343 if (!HasCallSiteInlineCheck()) { 1354 // hole value, which means that no instances of the {function} were created so
1344 __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex); 1355 // far and hence we should return false.
1345 __ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex); 1356 Label function_prototype_valid;
1346 } else { 1357 __ CompareObjectType(function_prototype, scratch, scratch, MAP_TYPE);
1347 DCHECK(HasArgsInRegisters()); 1358 __ b(ne, &function_prototype_valid);
1348 // Patch the (relocated) inlined map check. 1359 __ ldr(function_prototype,
1360 FieldMemOperand(function_prototype, Map::kPrototypeOffset));
1361 __ bind(&function_prototype_valid);
1362 __ AssertNotSmi(function_prototype);
1349 1363
1350 // The map_load_offset was stored in r5 1364 // Update the global instanceof cache with the current {object} map and
1351 // (See LCodeGen::DoDeferredLInstanceOfKnownGlobal). 1365 // {function}. The cached answer will be set when it is known below.
1352 const Register map_load_offset = r5; 1366 __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
1353 __ sub(r9, lr, map_load_offset); 1367 __ StoreRoot(object_map, Heap::kInstanceofCacheMapRootIndex);
1354 // Get the map location in r5 and patch it.
1355 __ GetRelocatedValueLocation(r9, map_load_offset, scratch);
1356 __ ldr(map_load_offset, MemOperand(map_load_offset));
1357 __ str(map, FieldMemOperand(map_load_offset, Cell::kValueOffset));
1358 1368
1359 __ mov(scratch, map); 1369 // Loop through the prototype chain looking for the {function} prototype.
1360 // |map_load_offset| points at the beginning of the cell. Calculate the 1370 // Assume true, and change to false if not found.
1361 // field containing the map. 1371 Register const object_prototype = object_map;
1362 __ add(function, map_load_offset, Operand(Cell::kValueOffset - 1)); 1372 Register const null = scratch;
1363 __ RecordWriteField(map_load_offset, Cell::kValueOffset, scratch, function, 1373 Label done, loop;
1364 kLRHasNotBeenSaved, kDontSaveFPRegs, 1374 __ LoadRoot(r0, Heap::kTrueValueRootIndex);
1365 OMIT_REMEMBERED_SET, OMIT_SMI_CHECK); 1375 __ LoadRoot(null, Heap::kNullValueRootIndex);
1366 } 1376 __ bind(&loop);
1377 __ ldr(object_prototype, FieldMemOperand(object_map, Map::kPrototypeOffset));
1378 __ cmp(object_prototype, function_prototype);
1379 __ b(eq, &done);
1380 __ cmp(object_prototype, null);
1381 __ ldr(object_map, FieldMemOperand(object_prototype, HeapObject::kMapOffset));
1382 __ b(ne, &loop);
1383 __ LoadRoot(r0, Heap::kFalseValueRootIndex);
1384 __ bind(&done);
1385 __ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
1386 __ Ret();
1367 1387
1368 // Register mapping: r3 is object map and r4 is function prototype. 1388 // Slow-case: Call the runtime function.
1369 // Get prototype of object into r2. 1389 __ bind(&slow_case);
1370 __ ldr(scratch, FieldMemOperand(map, Map::kPrototypeOffset)); 1390 __ Push(object, function);
1371 1391 __ TailCallRuntime(Runtime::kInstanceOf, 2, 1);
1372 // We don't need map any more. Use it as a scratch register.
1373 Register scratch2 = map;
1374 map = no_reg;
1375
1376 // Loop through the prototype chain looking for the function prototype.
1377 __ LoadRoot(scratch2, Heap::kNullValueRootIndex);
1378 __ bind(&loop);
1379 __ cmp(scratch, Operand(prototype));
1380 __ b(eq, &is_instance);
1381 __ cmp(scratch, scratch2);
1382 __ b(eq, &is_not_instance);
1383 __ ldr(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset));
1384 __ ldr(scratch, FieldMemOperand(scratch, Map::kPrototypeOffset));
1385 __ jmp(&loop);
1386 Factory* factory = isolate()->factory();
1387
1388 __ bind(&is_instance);
1389 if (!HasCallSiteInlineCheck()) {
1390 __ mov(r0, Operand(Smi::FromInt(0)));
1391 __ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
1392 if (ReturnTrueFalseObject()) {
1393 __ Move(r0, factory->true_value());
1394 }
1395 } else {
1396 // Patch the call site to return true.
1397 __ LoadRoot(r0, Heap::kTrueValueRootIndex);
1398 // The bool_load_offset was stored in r6
1399 // (See LCodeGen::DoDeferredLInstanceOfKnownGlobal).
1400 const Register bool_load_offset = r6;
1401 __ sub(r9, lr, bool_load_offset);
1402 // Get the boolean result location in scratch and patch it.
1403 __ GetRelocatedValueLocation(r9, scratch, scratch2);
1404 __ str(r0, MemOperand(scratch));
1405
1406 if (!ReturnTrueFalseObject()) {
1407 __ mov(r0, Operand(Smi::FromInt(0)));
1408 }
1409 }
1410 __ Ret(HasArgsInRegisters() ? 0 : 2);
1411
1412 __ bind(&is_not_instance);
1413 if (!HasCallSiteInlineCheck()) {
1414 __ mov(r0, Operand(Smi::FromInt(1)));
1415 __ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
1416 if (ReturnTrueFalseObject()) {
1417 __ Move(r0, factory->false_value());
1418 }
1419 } else {
1420 // Patch the call site to return false.
1421 __ LoadRoot(r0, Heap::kFalseValueRootIndex);
1422 // The bool_load_offset was stored in r6
1423 // (See LCodeGen::DoDeferredLInstanceOfKnownGlobal).
1424 const Register bool_load_offset = r6;
1425 __ sub(r9, lr, bool_load_offset);
1426 ;
1427 // Get the boolean result location in scratch and patch it.
1428 __ GetRelocatedValueLocation(r9, scratch, scratch2);
1429 __ str(r0, MemOperand(scratch));
1430
1431 if (!ReturnTrueFalseObject()) {
1432 __ mov(r0, Operand(Smi::FromInt(1)));
1433 }
1434 }
1435 __ Ret(HasArgsInRegisters() ? 0 : 2);
1436
1437 Label object_not_null, object_not_null_or_smi;
1438 __ bind(&not_js_object);
1439 // Before null, smi and string value checks, check that the rhs is a function
1440 // as for a non-function rhs an exception needs to be thrown.
1441 __ JumpIfSmi(function, &slow);
1442 __ CompareObjectType(function, scratch2, scratch, JS_FUNCTION_TYPE);
1443 __ b(ne, &slow);
1444
1445 // Null is not instance of anything.
1446 __ cmp(object, Operand(isolate()->factory()->null_value()));
1447 __ b(ne, &object_not_null);
1448 if (ReturnTrueFalseObject()) {
1449 __ Move(r0, factory->false_value());
1450 } else {
1451 __ mov(r0, Operand(Smi::FromInt(1)));
1452 }
1453 __ Ret(HasArgsInRegisters() ? 0 : 2);
1454
1455 __ bind(&object_not_null);
1456 // Smi values are not instances of anything.
1457 __ JumpIfNotSmi(object, &object_not_null_or_smi);
1458 if (ReturnTrueFalseObject()) {
1459 __ Move(r0, factory->false_value());
1460 } else {
1461 __ mov(r0, Operand(Smi::FromInt(1)));
1462 }
1463 __ Ret(HasArgsInRegisters() ? 0 : 2);
1464
1465 __ bind(&object_not_null_or_smi);
1466 // String values are not instances of anything.
1467 __ IsObjectJSStringType(object, scratch, &slow);
1468 if (ReturnTrueFalseObject()) {
1469 __ Move(r0, factory->false_value());
1470 } else {
1471 __ mov(r0, Operand(Smi::FromInt(1)));
1472 }
1473 __ Ret(HasArgsInRegisters() ? 0 : 2);
1474
1475 // Slow-case. Tail call builtin.
1476 __ bind(&slow);
1477 if (!ReturnTrueFalseObject()) {
1478 if (HasArgsInRegisters()) {
1479 __ Push(r0, r1);
1480 }
1481 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
1482 } else {
1483 {
1484 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1485 __ Push(r0, r1);
1486 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
1487 }
1488 __ cmp(r0, Operand::Zero());
1489 __ LoadRoot(r0, Heap::kTrueValueRootIndex, eq);
1490 __ LoadRoot(r0, Heap::kFalseValueRootIndex, ne);
1491 __ Ret(HasArgsInRegisters() ? 0 : 2);
1492 }
1493 } 1392 }
1494 1393
1495 1394
1496 void FunctionPrototypeStub::Generate(MacroAssembler* masm) { 1395 void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
1497 Label miss; 1396 Label miss;
1498 Register receiver = LoadDescriptor::ReceiverRegister(); 1397 Register receiver = LoadDescriptor::ReceiverRegister();
1499 // Ensure that the vector and slot registers won't be clobbered before 1398 // Ensure that the vector and slot registers won't be clobbered before
1500 // calling the miss handler. 1399 // calling the miss handler.
1501 DCHECK(!AreAliased(r4, r5, LoadWithVectorDescriptor::VectorRegister(), 1400 DCHECK(!AreAliased(r4, r5, LoadWithVectorDescriptor::VectorRegister(),
1502 LoadWithVectorDescriptor::SlotRegister())); 1401 LoadWithVectorDescriptor::SlotRegister()));
(...skipping 4025 matching lines...) Expand 10 before | Expand all | Expand 10 after
5528 MemOperand(fp, 6 * kPointerSize), NULL); 5427 MemOperand(fp, 6 * kPointerSize), NULL);
5529 } 5428 }
5530 5429
5531 5430
5532 #undef __ 5431 #undef __
5533 5432
5534 } // namespace internal 5433 } // namespace internal
5535 } // namespace v8 5434 } // namespace v8
5536 5435
5537 #endif // V8_TARGET_ARCH_ARM 5436 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « no previous file | src/arm/interface-descriptors-arm.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698