Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(339)

Side by Side Diff: src/x87/code-stubs-x87.cc

Issue 1318663003: X87: Correctify instanceof and make it optimizable. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/full-codegen/x87/full-codegen-x87.cc ('k') | src/x87/interface-descriptors-x87.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_X87 5 #if V8_TARGET_ARCH_X87
6 6
7 #include "src/base/bits.h" 7 #include "src/base/bits.h"
8 #include "src/bootstrapper.h" 8 #include "src/bootstrapper.h"
9 #include "src/code-stubs.h" 9 #include "src/code-stubs.h"
10 #include "src/codegen.h" 10 #include "src/codegen.h"
(...skipping 2412 matching lines...) Expand 10 before | Expand all | Expand 10 after
2423 __ pop(esi); 2423 __ pop(esi);
2424 __ pop(edi); 2424 __ pop(edi);
2425 __ add(esp, Immediate(2 * kPointerSize)); // remove markers 2425 __ add(esp, Immediate(2 * kPointerSize)); // remove markers
2426 2426
2427 // Restore frame pointer and return. 2427 // Restore frame pointer and return.
2428 __ pop(ebp); 2428 __ pop(ebp);
2429 __ ret(0); 2429 __ ret(0);
2430 } 2430 }
2431 2431
2432 2432
2433 // Generate stub code for instanceof. 2433 void InstanceOfStub::Generate(MacroAssembler* masm) {
2434 // This code can patch a call site inlined cache of the instance of check, 2434 Register const object = edx; // Object (lhs).
2435 // which looks like this. 2435 Register const function = eax; // Function (rhs).
2436 // 2436 Register const object_map = ecx; // Map of {object}.
2437 // 81 ff XX XX XX XX cmp edi, <the hole, patched to a map> 2437 Register const function_map = ebx; // Map of {function}.
2438 // 75 0a jne <some near label> 2438 Register const function_prototype = function_map; // Prototype of {function}.
2439 // b8 XX XX XX XX mov eax, <the hole, patched to either true or false> 2439 Register const scratch = edi;
2440 //
2441 // If call site patching is requested the stack will have the delta from the
2442 // return address to the cmp instruction just below the return address. This
2443 // also means that call site patching can only take place with arguments in
2444 // registers. TOS looks like this when call site patching is requested
2445 //
2446 // esp[0] : return address
2447 // esp[4] : delta from return address to cmp instruction
2448 //
2449 void InstanceofStub::Generate(MacroAssembler* masm) {
2450 // Call site inlining and patching implies arguments in registers.
2451 DCHECK(HasArgsInRegisters() || !HasCallSiteInlineCheck());
2452 2440
2453 // Fixed register usage throughout the stub. 2441 DCHECK(object.is(InstanceOfDescriptor::LeftRegister()));
2454 Register object = eax; // Object (lhs). 2442 DCHECK(function.is(InstanceOfDescriptor::RightRegister()));
2455 Register map = ebx; // Map of the object.
2456 Register function = edx; // Function (rhs).
2457 Register prototype = edi; // Prototype of the function.
2458 Register scratch = ecx;
2459 2443
2460 // Constants describing the call site code to patch. 2444 // Check if {object} is a smi.
2461 static const int kDeltaToCmpImmediate = 2; 2445 Label object_is_smi;
2462 static const int kDeltaToMov = 8; 2446 __ JumpIfSmi(object, &object_is_smi, Label::kNear);
2463 static const int kDeltaToMovImmediate = 9;
2464 static const int8_t kCmpEdiOperandByte1 = bit_cast<int8_t, uint8_t>(0x3b);
2465 static const int8_t kCmpEdiOperandByte2 = bit_cast<int8_t, uint8_t>(0x3d);
2466 static const int8_t kMovEaxImmediateByte = bit_cast<int8_t, uint8_t>(0xb8);
2467 2447
2468 DCHECK_EQ(object.code(), InstanceofStub::left().code()); 2448 // Lookup the {function} and the {object} map in the global instanceof cache.
2469 DCHECK_EQ(function.code(), InstanceofStub::right().code()); 2449 // Note: This is safe because we clear the global instanceof cache whenever
2450 // we change the prototype of any object.
2451 Label fast_case, slow_case;
2452 __ mov(object_map, FieldOperand(object, HeapObject::kMapOffset));
2453 __ CompareRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex);
2454 __ j(not_equal, &fast_case, Label::kNear);
2455 __ CompareRoot(object_map, scratch, Heap::kInstanceofCacheMapRootIndex);
2456 __ j(not_equal, &fast_case, Label::kNear);
2457 __ LoadRoot(eax, Heap::kInstanceofCacheAnswerRootIndex);
2458 __ ret(0);
2470 2459
2471 // Get the object and function - they are always both needed. 2460 // If {object} is a smi we can safely return false if {function} is a JS
2472 Label slow, not_js_object; 2461 // function, otherwise we have to miss to the runtime and throw an exception.
2473 if (!HasArgsInRegisters()) { 2462 __ bind(&object_is_smi);
2474 __ mov(object, Operand(esp, 2 * kPointerSize)); 2463 __ JumpIfSmi(function, &slow_case);
2475 __ mov(function, Operand(esp, 1 * kPointerSize)); 2464 __ CmpObjectType(function, JS_FUNCTION_TYPE, function_map);
2476 } 2465 __ j(not_equal, &slow_case);
2466 __ LoadRoot(eax, Heap::kFalseValueRootIndex);
2467 __ ret(0);
2477 2468
2478 // Check that the left hand is a JS object. 2469 // Fast-case: The {function} must be a valid JSFunction.
2479 __ JumpIfSmi(object, &not_js_object); 2470 __ bind(&fast_case);
2480 __ IsObjectJSObjectType(object, map, scratch, &not_js_object); 2471 __ JumpIfSmi(function, &slow_case);
2472 __ CmpObjectType(function, JS_FUNCTION_TYPE, function_map);
2473 __ j(not_equal, &slow_case);
2481 2474
2482 // If there is a call site cache don't look in the global cache, but do the 2475 // Ensure that {function} has an instance prototype.
2483 // real lookup and update the call site cache. 2476 __ test_b(FieldOperand(function_map, Map::kBitFieldOffset),
2484 if (!HasCallSiteInlineCheck() && !ReturnTrueFalseObject()) { 2477 static_cast<uint8_t>(1 << Map::kHasNonInstancePrototype));
2485 // Look up the function and the map in the instanceof cache. 2478 __ j(not_zero, &slow_case);
2486 Label miss;
2487 __ CompareRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex);
2488 __ j(not_equal, &miss, Label::kNear);
2489 __ CompareRoot(map, scratch, Heap::kInstanceofCacheMapRootIndex);
2490 __ j(not_equal, &miss, Label::kNear);
2491 __ LoadRoot(eax, Heap::kInstanceofCacheAnswerRootIndex);
2492 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
2493 __ bind(&miss);
2494 }
2495 2479
2496 // Get the prototype of the function. 2480 // Ensure that {function} is not bound.
2497 __ TryGetFunctionPrototype(function, prototype, scratch, &slow, true); 2481 Register const shared_info = scratch;
2482 __ mov(shared_info,
2483 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
2484 __ BooleanBitTest(shared_info, SharedFunctionInfo::kCompilerHintsOffset,
2485 SharedFunctionInfo::kBoundFunction);
2486 __ j(not_zero, &slow_case);
2498 2487
2499 // Check that the function prototype is a JS object. 2488 // Get the "prototype" (or initial map) of the {function}.
2500 __ JumpIfSmi(prototype, &slow); 2489 __ mov(function_prototype,
2501 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow); 2490 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2491 __ AssertNotSmi(function_prototype);
2502 2492
2503 // Update the global instanceof or call site inlined cache with the current 2493 // Resolve the prototype if the {function} has an initial map. Afterwards the
2504 // map and function. The cached answer will be set when it is known below. 2494 // {function_prototype} will be either the JSReceiver prototype object or the
2505 if (!HasCallSiteInlineCheck()) { 2495 // hole value, which means that no instances of the {function} were created so
2506 __ StoreRoot(map, scratch, Heap::kInstanceofCacheMapRootIndex); 2496 // far and hence we should return false.
2507 __ StoreRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex); 2497 Label function_prototype_valid;
2508 } else { 2498 Register const function_prototype_map = scratch;
2509 // The constants for the code patching are based on no push instructions 2499 __ CmpObjectType(function_prototype, MAP_TYPE, function_prototype_map);
2510 // at the call site. 2500 __ j(not_equal, &function_prototype_valid, Label::kNear);
2511 DCHECK(HasArgsInRegisters()); 2501 __ mov(function_prototype,
2512 // Get return address and delta to inlined map check. 2502 FieldOperand(function_prototype, Map::kPrototypeOffset));
2513 __ mov(scratch, Operand(esp, 0 * kPointerSize)); 2503 __ bind(&function_prototype_valid);
2514 __ sub(scratch, Operand(esp, 1 * kPointerSize)); 2504 __ AssertNotSmi(function_prototype);
2515 if (FLAG_debug_code) {
2516 __ cmpb(Operand(scratch, 0), kCmpEdiOperandByte1);
2517 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCmp1);
2518 __ cmpb(Operand(scratch, 1), kCmpEdiOperandByte2);
2519 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCmp2);
2520 }
2521 __ mov(scratch, Operand(scratch, kDeltaToCmpImmediate));
2522 __ mov(Operand(scratch, 0), map);
2523 __ push(map);
2524 // Scratch points at the cell payload. Calculate the start of the object.
2525 __ sub(scratch, Immediate(Cell::kValueOffset - 1));
2526 __ RecordWriteField(scratch, Cell::kValueOffset, map, function,
2527 kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
2528 __ pop(map);
2529 }
2530 2505
2531 // Loop through the prototype chain of the object looking for the function 2506 // Update the global instanceof cache with the current {object} map and
2532 // prototype. 2507 // {function}. The cached answer will be set when it is known below.
2533 __ mov(scratch, FieldOperand(map, Map::kPrototypeOffset)); 2508 __ StoreRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex);
2534 Label loop, is_instance, is_not_instance; 2509 __ StoreRoot(object_map, scratch, Heap::kInstanceofCacheMapRootIndex);
2510
2511 // Loop through the prototype chain looking for the {function} prototype.
2512 // Assume true, and change to false if not found.
2513 Register const object_prototype = object_map;
2514 Label done, loop;
2515 __ mov(eax, isolate()->factory()->true_value());
2535 __ bind(&loop); 2516 __ bind(&loop);
2536 __ cmp(scratch, prototype); 2517 __ mov(object_prototype, FieldOperand(object_map, Map::kPrototypeOffset));
2537 __ j(equal, &is_instance, Label::kNear); 2518 __ cmp(object_prototype, function_prototype);
2538 Factory* factory = isolate()->factory(); 2519 __ j(equal, &done, Label::kNear);
2539 __ cmp(scratch, Immediate(factory->null_value())); 2520 __ cmp(object_prototype, isolate()->factory()->null_value());
2540 __ j(equal, &is_not_instance, Label::kNear); 2521 __ mov(object_map, FieldOperand(object_prototype, HeapObject::kMapOffset));
2541 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); 2522 __ j(not_equal, &loop);
2542 __ mov(scratch, FieldOperand(scratch, Map::kPrototypeOffset)); 2523 __ mov(eax, isolate()->factory()->false_value());
2543 __ jmp(&loop); 2524 __ bind(&done);
2525 __ StoreRoot(eax, scratch, Heap::kInstanceofCacheAnswerRootIndex);
2526 __ ret(0);
2544 2527
2545 __ bind(&is_instance); 2528 // Slow-case: Call the runtime function.
2546 if (!HasCallSiteInlineCheck()) { 2529 __ bind(&slow_case);
2547 __ mov(eax, Immediate(0)); 2530 __ pop(scratch); // Pop return address.
2548 __ StoreRoot(eax, scratch, Heap::kInstanceofCacheAnswerRootIndex); 2531 __ push(object); // Push {object}.
2549 if (ReturnTrueFalseObject()) { 2532 __ push(function); // Push {function}.
2550 __ mov(eax, factory->true_value()); 2533 __ push(scratch); // Push return address.
2551 } 2534 __ TailCallRuntime(Runtime::kInstanceOf, 2, 1);
2552 } else {
2553 // Get return address and delta to inlined map check.
2554 __ mov(eax, factory->true_value());
2555 __ mov(scratch, Operand(esp, 0 * kPointerSize));
2556 __ sub(scratch, Operand(esp, 1 * kPointerSize));
2557 if (FLAG_debug_code) {
2558 __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
2559 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
2560 }
2561 __ mov(Operand(scratch, kDeltaToMovImmediate), eax);
2562 if (!ReturnTrueFalseObject()) {
2563 __ Move(eax, Immediate(0));
2564 }
2565 }
2566 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
2567
2568 __ bind(&is_not_instance);
2569 if (!HasCallSiteInlineCheck()) {
2570 __ mov(eax, Immediate(Smi::FromInt(1)));
2571 __ StoreRoot(eax, scratch, Heap::kInstanceofCacheAnswerRootIndex);
2572 if (ReturnTrueFalseObject()) {
2573 __ mov(eax, factory->false_value());
2574 }
2575 } else {
2576 // Get return address and delta to inlined map check.
2577 __ mov(eax, factory->false_value());
2578 __ mov(scratch, Operand(esp, 0 * kPointerSize));
2579 __ sub(scratch, Operand(esp, 1 * kPointerSize));
2580 if (FLAG_debug_code) {
2581 __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
2582 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
2583 }
2584 __ mov(Operand(scratch, kDeltaToMovImmediate), eax);
2585 if (!ReturnTrueFalseObject()) {
2586 __ Move(eax, Immediate(Smi::FromInt(1)));
2587 }
2588 }
2589 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
2590
2591 Label object_not_null, object_not_null_or_smi;
2592 __ bind(&not_js_object);
2593 // Before null, smi and string value checks, check that the rhs is a function
2594 // as for a non-function rhs an exception needs to be thrown.
2595 __ JumpIfSmi(function, &slow, Label::kNear);
2596 __ CmpObjectType(function, JS_FUNCTION_TYPE, scratch);
2597 __ j(not_equal, &slow, Label::kNear);
2598
2599 // Null is not instance of anything.
2600 __ cmp(object, factory->null_value());
2601 __ j(not_equal, &object_not_null, Label::kNear);
2602 if (ReturnTrueFalseObject()) {
2603 __ mov(eax, factory->false_value());
2604 } else {
2605 __ Move(eax, Immediate(Smi::FromInt(1)));
2606 }
2607 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
2608
2609 __ bind(&object_not_null);
2610 // Smi values is not instance of anything.
2611 __ JumpIfNotSmi(object, &object_not_null_or_smi, Label::kNear);
2612 if (ReturnTrueFalseObject()) {
2613 __ mov(eax, factory->false_value());
2614 } else {
2615 __ Move(eax, Immediate(Smi::FromInt(1)));
2616 }
2617 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
2618
2619 __ bind(&object_not_null_or_smi);
2620 // String values is not instance of anything.
2621 Condition is_string = masm->IsObjectStringType(object, scratch, scratch);
2622 __ j(NegateCondition(is_string), &slow, Label::kNear);
2623 if (ReturnTrueFalseObject()) {
2624 __ mov(eax, factory->false_value());
2625 } else {
2626 __ Move(eax, Immediate(Smi::FromInt(1)));
2627 }
2628 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
2629
2630 // Slow-case: Go through the JavaScript implementation.
2631 __ bind(&slow);
2632 if (!ReturnTrueFalseObject()) {
2633 // Tail call the builtin which returns 0 or 1.
2634 if (HasArgsInRegisters()) {
2635 // Push arguments below return address.
2636 __ pop(scratch);
2637 __ push(object);
2638 __ push(function);
2639 __ push(scratch);
2640 }
2641 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
2642 } else {
2643 // Call the builtin and convert 0/1 to true/false.
2644 {
2645 FrameScope scope(masm, StackFrame::INTERNAL);
2646 __ push(object);
2647 __ push(function);
2648 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
2649 }
2650 Label true_value, done;
2651 __ test(eax, eax);
2652 __ j(zero, &true_value, Label::kNear);
2653 __ mov(eax, factory->false_value());
2654 __ jmp(&done, Label::kNear);
2655 __ bind(&true_value);
2656 __ mov(eax, factory->true_value());
2657 __ bind(&done);
2658 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
2659 }
2660 } 2535 }
2661 2536
2662 2537
2663 // ------------------------------------------------------------------------- 2538 // -------------------------------------------------------------------------
2664 // StringCharCodeAtGenerator 2539 // StringCharCodeAtGenerator
2665 2540
2666 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { 2541 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
2667 // If the receiver is a smi trigger the non-string case. 2542 // If the receiver is a smi trigger the non-string case.
2668 if (check_mode_ == RECEIVER_IS_UNKNOWN) { 2543 if (check_mode_ == RECEIVER_IS_UNKNOWN) {
2669 __ JumpIfSmi(object_, receiver_not_string_); 2544 __ JumpIfSmi(object_, receiver_not_string_);
(...skipping 2668 matching lines...) Expand 10 before | Expand all | Expand 10 after
5338 Operand(ebp, 7 * kPointerSize), NULL); 5213 Operand(ebp, 7 * kPointerSize), NULL);
5339 } 5214 }
5340 5215
5341 5216
5342 #undef __ 5217 #undef __
5343 5218
5344 } // namespace internal 5219 } // namespace internal
5345 } // namespace v8 5220 } // namespace v8
5346 5221
5347 #endif // V8_TARGET_ARCH_X87 5222 #endif // V8_TARGET_ARCH_X87
OLDNEW
« no previous file with comments | « src/full-codegen/x87/full-codegen-x87.cc ('k') | src/x87/interface-descriptors-x87.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698