Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(8)

Side by Side Diff: src/ppc/code-stubs-ppc.cc

Issue 1314263002: PPC: Correctify instanceof and make it optimizable. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Rebase Created 5 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/full-codegen/ppc/full-codegen-ppc.cc ('k') | src/ppc/interface-descriptors-ppc.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_PPC 5 #if V8_TARGET_ARCH_PPC
6 6
7 #include "src/base/bits.h" 7 #include "src/base/bits.h"
8 #include "src/bootstrapper.h" 8 #include "src/bootstrapper.h"
9 #include "src/code-stubs.h" 9 #include "src/code-stubs.h"
10 #include "src/codegen.h" 10 #include "src/codegen.h"
(...skipping 1349 matching lines...) Expand 10 before | Expand all | Expand 10 after
1360 // Restore callee-saved registers. 1360 // Restore callee-saved registers.
1361 __ MultiPop(kCalleeSaved); 1361 __ MultiPop(kCalleeSaved);
1362 1362
1363 // Return 1363 // Return
1364 __ LoadP(r0, MemOperand(sp, kStackFrameLRSlot * kPointerSize)); 1364 __ LoadP(r0, MemOperand(sp, kStackFrameLRSlot * kPointerSize));
1365 __ mtlr(r0); 1365 __ mtlr(r0);
1366 __ blr(); 1366 __ blr();
1367 } 1367 }
1368 1368
1369 1369
1370 // Uses registers r3 to r7. 1370 void InstanceOfStub::Generate(MacroAssembler* masm) {
1371 // Expected input (depending on whether args are in registers or on the stack): 1371 Register const object = r4; // Object (lhs).
1372 // * object: r3 or at sp + 1 * kPointerSize. 1372 Register const function = r3; // Function (rhs).
1373 // * function: r4 or at sp. 1373 Register const object_map = r5; // Map of {object}.
1374 // 1374 Register const function_map = r6; // Map of {function}.
1375 // An inlined call site may have been generated before calling this stub. 1375 Register const function_prototype = r7; // Prototype of {function}.
1376 // In this case the offset to the inline site to patch is passed in r8. 1376 Register const scratch = r8;
1377 // (See LCodeGen::DoInstanceOfKnownGlobal)
1378 void InstanceofStub::Generate(MacroAssembler* masm) {
1379 // Call site inlining and patching implies arguments in registers.
1380 DCHECK(HasArgsInRegisters() || !HasCallSiteInlineCheck());
1381 1377
1382 // Fixed register usage throughout the stub: 1378 DCHECK(object.is(InstanceOfDescriptor::LeftRegister()));
1383 const Register object = r3; // Object (lhs). 1379 DCHECK(function.is(InstanceOfDescriptor::RightRegister()));
1384 Register map = r6; // Map of the object.
1385 const Register function = r4; // Function (rhs).
1386 const Register prototype = r7; // Prototype of the function.
1387 // The map_check_delta was stored in r8
1388 // The bool_load_delta was stored in r9
1389 // (See LCodeGen::DoDeferredLInstanceOfKnownGlobal).
1390 const Register map_check_delta = r8;
1391 const Register bool_load_delta = r9;
1392 const Register inline_site = r10;
1393 const Register scratch = r5;
1394 Register scratch3 = no_reg;
1395 Label slow, loop, is_instance, is_not_instance, not_js_object;
1396 1380
1397 if (!HasArgsInRegisters()) { 1381 // Check if {object} is a smi.
1398 __ LoadP(object, MemOperand(sp, 1 * kPointerSize)); 1382 Label object_is_smi;
1399 __ LoadP(function, MemOperand(sp, 0)); 1383 __ JumpIfSmi(object, &object_is_smi);
1400 }
1401 1384
1402 // Check that the left hand is a JS object and load map. 1385 // Lookup the {function} and the {object} map in the global instanceof cache.
1403 __ JumpIfSmi(object, &not_js_object); 1386 // Note: This is safe because we clear the global instanceof cache whenever
1404 __ IsObjectJSObjectType(object, map, scratch, &not_js_object); 1387 // we change the prototype of any object.
1388 Label fast_case, slow_case;
1389 __ LoadP(object_map, FieldMemOperand(object, HeapObject::kMapOffset));
1390 __ CompareRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
1391 __ bne(&fast_case);
1392 __ CompareRoot(object_map, Heap::kInstanceofCacheMapRootIndex);
1393 __ bne(&fast_case);
1394 __ LoadRoot(r3, Heap::kInstanceofCacheAnswerRootIndex);
1395 __ Ret();
1405 1396
1406 // If there is a call site cache don't look in the global cache, but do the 1397 // If {object} is a smi we can safely return false if {function} is a JS
1407 // real lookup and update the call site cache. 1398 // function, otherwise we have to miss to the runtime and throw an exception.
1408 if (!HasCallSiteInlineCheck() && !ReturnTrueFalseObject()) { 1399 __ bind(&object_is_smi);
1409 Label miss; 1400 __ JumpIfSmi(function, &slow_case);
1410 __ CompareRoot(function, Heap::kInstanceofCacheFunctionRootIndex); 1401 __ CompareObjectType(function, function_map, scratch, JS_FUNCTION_TYPE);
1411 __ bne(&miss); 1402 __ bne(&slow_case);
1412 __ CompareRoot(map, Heap::kInstanceofCacheMapRootIndex); 1403 __ LoadRoot(r3, Heap::kFalseValueRootIndex);
1413 __ bne(&miss); 1404 __ Ret();
1414 __ LoadRoot(r3, Heap::kInstanceofCacheAnswerRootIndex);
1415 __ Ret(HasArgsInRegisters() ? 0 : 2);
1416 1405
1417 __ bind(&miss); 1406 // Fast-case: The {function} must be a valid JSFunction.
1418 } 1407 __ bind(&fast_case);
1408 __ JumpIfSmi(function, &slow_case);
1409 __ CompareObjectType(function, function_map, scratch, JS_FUNCTION_TYPE);
1410 __ bne(&slow_case);
1419 1411
1420 // Get the prototype of the function. 1412 // Ensure that {function} has an instance prototype.
1421 __ TryGetFunctionPrototype(function, prototype, scratch, &slow, true); 1413 __ lbz(scratch, FieldMemOperand(function_map, Map::kBitFieldOffset));
1414 __ TestBit(scratch, Map::HasNonInstancePrototype, r0);
1415 __ bne(&slow_case, cr0);
1422 1416
1423 // Check that the function prototype is a JS object. 1417 // Ensure that {function} is not bound.
1424 __ JumpIfSmi(prototype, &slow); 1418 Register const shared_info = scratch;
1425 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow); 1419 __ LoadP(shared_info,
1420 FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
1421 __ lwz(scratch, FieldMemOperand(shared_info,
1422 SharedFunctionInfo::kCompilerHintsOffset));
1423 __ TestBit(scratch,
1424 #if V8_TARGET_ARCH_PPC64
1425 SharedFunctionInfo::kBoundFunction,
1426 #else
1427 SharedFunctionInfo::kBoundFunction + kSmiTagSize,
1428 #endif
1429 r0);
1430 __ bne(&slow_case, cr0);
1426 1431
1427 // Update the global instanceof or call site inlined cache with the current 1432 // Get the "prototype" (or initial map) of the {function}.
1428 // map and function. The cached answer will be set when it is known below. 1433 __ LoadP(function_prototype,
1429 if (!HasCallSiteInlineCheck()) { 1434 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1430 __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex); 1435 __ AssertNotSmi(function_prototype);
1431 __ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex);
1432 } else {
1433 DCHECK(HasArgsInRegisters());
1434 // Patch the (relocated) inlined map check.
1435 1436
1436 const Register offset = map_check_delta; 1437 // Resolve the prototype if the {function} has an initial map. Afterwards the
1437 __ mflr(inline_site); 1438 // {function_prototype} will be either the JSReceiver prototype object or the
1438 __ sub(inline_site, inline_site, offset); 1439 // hole value, which means that no instances of the {function} were created so
1439 // Get the map location in offset and patch it. 1440 // far and hence we should return false.
1440 __ GetRelocatedValue(inline_site, offset, scratch); 1441 Label function_prototype_valid;
1441 __ StoreP(map, FieldMemOperand(offset, Cell::kValueOffset), r0); 1442 __ CompareObjectType(function_prototype, scratch, scratch, MAP_TYPE);
1443 __ bne(&function_prototype_valid);
1444 __ LoadP(function_prototype,
1445 FieldMemOperand(function_prototype, Map::kPrototypeOffset));
1446 __ bind(&function_prototype_valid);
1447 __ AssertNotSmi(function_prototype);
1442 1448
1443 __ mr(r11, map); 1449 // Update the global instanceof cache with the current {object} map and
1444 __ RecordWriteField(offset, Cell::kValueOffset, r11, function, 1450 // {function}. The cached answer will be set when it is known below.
1445 kLRHasNotBeenSaved, kDontSaveFPRegs, 1451 __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
1446 OMIT_REMEMBERED_SET, OMIT_SMI_CHECK); 1452 __ StoreRoot(object_map, Heap::kInstanceofCacheMapRootIndex);
1447 }
1448 1453
1449 // Register mapping: r6 is object map and r7 is function prototype. 1454 // Loop through the prototype chain looking for the {function} prototype.
1450 // Get prototype of object into r5. 1455 // Assume true, and change to false if not found.
1451 __ LoadP(scratch, FieldMemOperand(map, Map::kPrototypeOffset)); 1456 Register const object_prototype = object_map;
1457 Register const null = scratch;
1458 Label done, loop;
1459 __ LoadRoot(r3, Heap::kTrueValueRootIndex);
1460 __ LoadRoot(null, Heap::kNullValueRootIndex);
1461 __ bind(&loop);
1462 __ LoadP(object_prototype,
1463 FieldMemOperand(object_map, Map::kPrototypeOffset));
1464 __ cmp(object_prototype, function_prototype);
1465 __ beq(&done);
1466 __ cmp(object_prototype, null);
1467 __ LoadP(object_map,
1468 FieldMemOperand(object_prototype, HeapObject::kMapOffset));
1469 __ bne(&loop);
1470 __ LoadRoot(r3, Heap::kFalseValueRootIndex);
1471 __ bind(&done);
1472 __ StoreRoot(r3, Heap::kInstanceofCacheAnswerRootIndex);
1473 __ Ret();
1452 1474
1453 // We don't need map any more. Use it as a scratch register. 1475 // Slow-case: Call the runtime function.
1454 scratch3 = map; 1476 __ bind(&slow_case);
1455 map = no_reg; 1477 __ Push(object, function);
1456 1478 __ TailCallRuntime(Runtime::kInstanceOf, 2, 1);
1457 // Loop through the prototype chain looking for the function prototype.
1458 __ LoadRoot(scratch3, Heap::kNullValueRootIndex);
1459 __ bind(&loop);
1460 __ cmp(scratch, prototype);
1461 __ beq(&is_instance);
1462 __ cmp(scratch, scratch3);
1463 __ beq(&is_not_instance);
1464 __ LoadP(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset));
1465 __ LoadP(scratch, FieldMemOperand(scratch, Map::kPrototypeOffset));
1466 __ b(&loop);
1467 Factory* factory = isolate()->factory();
1468
1469 __ bind(&is_instance);
1470 if (!HasCallSiteInlineCheck()) {
1471 __ LoadSmiLiteral(r3, Smi::FromInt(0));
1472 __ StoreRoot(r3, Heap::kInstanceofCacheAnswerRootIndex);
1473 if (ReturnTrueFalseObject()) {
1474 __ Move(r3, factory->true_value());
1475 }
1476 } else {
1477 // Patch the call site to return true.
1478 __ LoadRoot(r3, Heap::kTrueValueRootIndex);
1479 __ add(inline_site, inline_site, bool_load_delta);
1480 // Get the boolean result location in scratch and patch it.
1481 __ SetRelocatedValue(inline_site, scratch, r3);
1482
1483 if (!ReturnTrueFalseObject()) {
1484 __ LoadSmiLiteral(r3, Smi::FromInt(0));
1485 }
1486 }
1487 __ Ret(HasArgsInRegisters() ? 0 : 2);
1488
1489 __ bind(&is_not_instance);
1490 if (!HasCallSiteInlineCheck()) {
1491 __ LoadSmiLiteral(r3, Smi::FromInt(1));
1492 __ StoreRoot(r3, Heap::kInstanceofCacheAnswerRootIndex);
1493 if (ReturnTrueFalseObject()) {
1494 __ Move(r3, factory->false_value());
1495 }
1496 } else {
1497 // Patch the call site to return false.
1498 __ LoadRoot(r3, Heap::kFalseValueRootIndex);
1499 __ add(inline_site, inline_site, bool_load_delta);
1500 // Get the boolean result location in scratch and patch it.
1501 __ SetRelocatedValue(inline_site, scratch, r3);
1502
1503 if (!ReturnTrueFalseObject()) {
1504 __ LoadSmiLiteral(r3, Smi::FromInt(1));
1505 }
1506 }
1507 __ Ret(HasArgsInRegisters() ? 0 : 2);
1508
1509 Label object_not_null, object_not_null_or_smi;
1510 __ bind(&not_js_object);
1511 // Before null, smi and string value checks, check that the rhs is a function
1512 // as for a non-function rhs an exception needs to be thrown.
1513 __ JumpIfSmi(function, &slow);
1514 __ CompareObjectType(function, scratch3, scratch, JS_FUNCTION_TYPE);
1515 __ bne(&slow);
1516
1517 // Null is not instance of anything.
1518 __ Cmpi(object, Operand(isolate()->factory()->null_value()), r0);
1519 __ bne(&object_not_null);
1520 if (ReturnTrueFalseObject()) {
1521 __ Move(r3, factory->false_value());
1522 } else {
1523 __ LoadSmiLiteral(r3, Smi::FromInt(1));
1524 }
1525 __ Ret(HasArgsInRegisters() ? 0 : 2);
1526
1527 __ bind(&object_not_null);
1528 // Smi values are not instances of anything.
1529 __ JumpIfNotSmi(object, &object_not_null_or_smi);
1530 if (ReturnTrueFalseObject()) {
1531 __ Move(r3, factory->false_value());
1532 } else {
1533 __ LoadSmiLiteral(r3, Smi::FromInt(1));
1534 }
1535 __ Ret(HasArgsInRegisters() ? 0 : 2);
1536
1537 __ bind(&object_not_null_or_smi);
1538 // String values are not instances of anything.
1539 __ IsObjectJSStringType(object, scratch, &slow);
1540 if (ReturnTrueFalseObject()) {
1541 __ Move(r3, factory->false_value());
1542 } else {
1543 __ LoadSmiLiteral(r3, Smi::FromInt(1));
1544 }
1545 __ Ret(HasArgsInRegisters() ? 0 : 2);
1546
1547 // Slow-case. Tail call builtin.
1548 __ bind(&slow);
1549 if (!ReturnTrueFalseObject()) {
1550 if (HasArgsInRegisters()) {
1551 __ Push(r3, r4);
1552 }
1553 __ InvokeBuiltin(Context::INSTANCE_OF_BUILTIN_INDEX, JUMP_FUNCTION);
1554 } else {
1555 {
1556 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1557 __ Push(r3, r4);
1558 __ InvokeBuiltin(Context::INSTANCE_OF_BUILTIN_INDEX, CALL_FUNCTION);
1559 }
1560 if (CpuFeatures::IsSupported(ISELECT)) {
1561 __ cmpi(r3, Operand::Zero());
1562 __ LoadRoot(r3, Heap::kTrueValueRootIndex);
1563 __ LoadRoot(r4, Heap::kFalseValueRootIndex);
1564 __ isel(eq, r3, r3, r4);
1565 } else {
1566 Label true_value, done;
1567 __ cmpi(r3, Operand::Zero());
1568 __ beq(&true_value);
1569
1570 __ LoadRoot(r3, Heap::kFalseValueRootIndex);
1571 __ b(&done);
1572
1573 __ bind(&true_value);
1574 __ LoadRoot(r3, Heap::kTrueValueRootIndex);
1575
1576 __ bind(&done);
1577 }
1578 __ Ret(HasArgsInRegisters() ? 0 : 2);
1579 }
1580 } 1479 }
1581 1480
1582 1481
1583 void FunctionPrototypeStub::Generate(MacroAssembler* masm) { 1482 void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
1584 Label miss; 1483 Label miss;
1585 Register receiver = LoadDescriptor::ReceiverRegister(); 1484 Register receiver = LoadDescriptor::ReceiverRegister();
1586 // Ensure that the vector and slot registers won't be clobbered before 1485 // Ensure that the vector and slot registers won't be clobbered before
1587 // calling the miss handler. 1486 // calling the miss handler.
1588 DCHECK(!AreAliased(r7, r8, LoadWithVectorDescriptor::VectorRegister(), 1487 DCHECK(!AreAliased(r7, r8, LoadWithVectorDescriptor::VectorRegister(),
1589 LoadWithVectorDescriptor::SlotRegister())); 1488 LoadWithVectorDescriptor::SlotRegister()));
(...skipping 4238 matching lines...) Expand 10 before | Expand all | Expand 10 after
5828 kStackUnwindSpace, NULL, 5727 kStackUnwindSpace, NULL,
5829 MemOperand(fp, 6 * kPointerSize), NULL); 5728 MemOperand(fp, 6 * kPointerSize), NULL);
5830 } 5729 }
5831 5730
5832 5731
5833 #undef __ 5732 #undef __
5834 } // namespace internal 5733 } // namespace internal
5835 } // namespace v8 5734 } // namespace v8
5836 5735
5837 #endif // V8_TARGET_ARCH_PPC 5736 #endif // V8_TARGET_ARCH_PPC
OLDNEW
« no previous file with comments | « src/full-codegen/ppc/full-codegen-ppc.cc ('k') | src/ppc/interface-descriptors-ppc.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698