Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(32)

Side by Side Diff: src/ia32/code-stubs-ia32.cc

Issue 6577036: [Isolates] Merge from bleeding_edge to isolates, revisions 6100-6300. (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/isolates/
Patch Set: '' Created 9 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/code-stubs-ia32.h ('k') | src/ia32/codegen-ia32.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after
98 98
99 // Get the function from the stack. 99 // Get the function from the stack.
100 __ mov(ecx, Operand(esp, 1 * kPointerSize)); 100 __ mov(ecx, Operand(esp, 1 * kPointerSize));
101 101
102 // Setup the object header. 102 // Setup the object header.
103 __ mov(FieldOperand(eax, HeapObject::kMapOffset), FACTORY->context_map()); 103 __ mov(FieldOperand(eax, HeapObject::kMapOffset), FACTORY->context_map());
104 __ mov(FieldOperand(eax, Context::kLengthOffset), 104 __ mov(FieldOperand(eax, Context::kLengthOffset),
105 Immediate(Smi::FromInt(length))); 105 Immediate(Smi::FromInt(length)));
106 106
107 // Setup the fixed slots. 107 // Setup the fixed slots.
108 __ xor_(ebx, Operand(ebx)); // Set to NULL. 108 __ Set(ebx, Immediate(0)); // Set to NULL.
109 __ mov(Operand(eax, Context::SlotOffset(Context::CLOSURE_INDEX)), ecx); 109 __ mov(Operand(eax, Context::SlotOffset(Context::CLOSURE_INDEX)), ecx);
110 __ mov(Operand(eax, Context::SlotOffset(Context::FCONTEXT_INDEX)), eax); 110 __ mov(Operand(eax, Context::SlotOffset(Context::FCONTEXT_INDEX)), eax);
111 __ mov(Operand(eax, Context::SlotOffset(Context::PREVIOUS_INDEX)), ebx); 111 __ mov(Operand(eax, Context::SlotOffset(Context::PREVIOUS_INDEX)), ebx);
112 __ mov(Operand(eax, Context::SlotOffset(Context::EXTENSION_INDEX)), ebx); 112 __ mov(Operand(eax, Context::SlotOffset(Context::EXTENSION_INDEX)), ebx);
113 113
114 // Copy the global object from the surrounding context. We go through the 114 // Copy the global object from the surrounding context. We go through the
115 // context in the function (ecx) to match the allocation behavior we have 115 // context in the function (ecx) to match the allocation behavior we have
116 // in the runtime system (see Heap::AllocateFunctionContext). 116 // in the runtime system (see Heap::AllocateFunctionContext).
117 __ mov(ebx, FieldOperand(ecx, JSFunction::kContextOffset)); 117 __ mov(ebx, FieldOperand(ecx, JSFunction::kContextOffset));
118 __ mov(ebx, Operand(ebx, Context::SlotOffset(Context::GLOBAL_INDEX))); 118 __ mov(ebx, Operand(ebx, Context::SlotOffset(Context::GLOBAL_INDEX)));
(...skipping 1649 matching lines...) Expand 10 before | Expand all | Expand 10 after
1768 case Token::SHL: 1768 case Token::SHL:
1769 case Token::SHR: 1769 case Token::SHR:
1770 GenerateTypeTransitionWithSavedArgs(masm); 1770 GenerateTypeTransitionWithSavedArgs(masm);
1771 break; 1771 break;
1772 default: 1772 default:
1773 UNREACHABLE(); 1773 UNREACHABLE();
1774 } 1774 }
1775 } 1775 }
1776 1776
1777 1777
1778
1779 void TypeRecordingBinaryOpStub::GenerateStringStub(MacroAssembler* masm) { 1778 void TypeRecordingBinaryOpStub::GenerateStringStub(MacroAssembler* masm) {
1780 Label call_runtime; 1779 Label call_runtime;
1781 ASSERT(operands_type_ == TRBinaryOpIC::STRING); 1780 ASSERT(operands_type_ == TRBinaryOpIC::STRING);
1782 ASSERT(op_ == Token::ADD); 1781 ASSERT(op_ == Token::ADD);
1783 // If one of the arguments is a string, call the string add stub. 1782 // If one of the arguments is a string, call the string add stub.
1784 // Otherwise, transition to the generic TRBinaryOpIC type. 1783 // Otherwise, transition to the generic TRBinaryOpIC type.
1785 1784
1786 // Registers containing left and right operands respectively. 1785 // Registers containing left and right operands respectively.
1787 Register left = edx; 1786 Register left = edx;
1788 Register right = eax; 1787 Register right = eax;
(...skipping 223 matching lines...) Expand 10 before | Expand all | Expand 10 after
2012 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION); 2011 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
2013 break; 2012 break;
2014 default: 2013 default:
2015 UNREACHABLE(); 2014 UNREACHABLE();
2016 } 2015 }
2017 } 2016 }
2018 2017
2019 2018
2020 void TypeRecordingBinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { 2019 void TypeRecordingBinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
2021 Label call_runtime; 2020 Label call_runtime;
2022 ASSERT(operands_type_ == TRBinaryOpIC::HEAP_NUMBER || 2021 ASSERT(operands_type_ == TRBinaryOpIC::HEAP_NUMBER);
2023 operands_type_ == TRBinaryOpIC::INT32);
2024 2022
2025 // Floating point case. 2023 // Floating point case.
2026 switch (op_) { 2024 switch (op_) {
2027 case Token::ADD: 2025 case Token::ADD:
2028 case Token::SUB: 2026 case Token::SUB:
2029 case Token::MUL: 2027 case Token::MUL:
2030 case Token::DIV: { 2028 case Token::DIV: {
2031 Label not_floats; 2029 Label not_floats;
2032 if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) { 2030 if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
2033 CpuFeatures::Scope use_sse2(SSE2); 2031 CpuFeatures::Scope use_sse2(SSE2);
(...skipping 2268 matching lines...) Expand 10 before | Expand all | Expand 10 after
4302 // The representation of NaN values has all exponent bits (52..62) set, 4300 // The representation of NaN values has all exponent bits (52..62) set,
4303 // and not all mantissa bits (0..51) clear. 4301 // and not all mantissa bits (0..51) clear.
4304 // We only accept QNaNs, which have bit 51 set. 4302 // We only accept QNaNs, which have bit 51 set.
4305 // Read top bits of double representation (second word of value). 4303 // Read top bits of double representation (second word of value).
4306 4304
4307 // Value is a QNaN if value & kQuietNaNMask == kQuietNaNMask, i.e., 4305 // Value is a QNaN if value & kQuietNaNMask == kQuietNaNMask, i.e.,
4308 // all bits in the mask are set. We only need to check the word 4306 // all bits in the mask are set. We only need to check the word
4309 // that contains the exponent and high bit of the mantissa. 4307 // that contains the exponent and high bit of the mantissa.
4310 STATIC_ASSERT(((kQuietNaNHighBitsMask << 1) & 0x80000000u) != 0); 4308 STATIC_ASSERT(((kQuietNaNHighBitsMask << 1) & 0x80000000u) != 0);
4311 __ mov(edx, FieldOperand(edx, HeapNumber::kExponentOffset)); 4309 __ mov(edx, FieldOperand(edx, HeapNumber::kExponentOffset));
4312 __ xor_(eax, Operand(eax)); 4310 __ Set(eax, Immediate(0));
4313 // Shift value and mask so kQuietNaNHighBitsMask applies to topmost 4311 // Shift value and mask so kQuietNaNHighBitsMask applies to topmost
4314 // bits. 4312 // bits.
4315 __ add(edx, Operand(edx)); 4313 __ add(edx, Operand(edx));
4316 __ cmp(edx, kQuietNaNHighBitsMask << 1); 4314 __ cmp(edx, kQuietNaNHighBitsMask << 1);
4317 if (cc_ == equal) { 4315 if (cc_ == equal) {
4318 STATIC_ASSERT(EQUAL != 1); 4316 STATIC_ASSERT(EQUAL != 1);
4319 __ setcc(above_equal, eax); 4317 __ setcc(above_equal, eax);
4320 __ ret(0); 4318 __ ret(0);
4321 } else { 4319 } else {
4322 NearLabel nan; 4320 NearLabel nan;
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after
4432 __ FCmp(); 4430 __ FCmp();
4433 4431
4434 // Don't base result on EFLAGS when a NaN is involved. 4432 // Don't base result on EFLAGS when a NaN is involved.
4435 __ j(parity_even, &unordered, not_taken); 4433 __ j(parity_even, &unordered, not_taken);
4436 4434
4437 NearLabel below_label, above_label; 4435 NearLabel below_label, above_label;
4438 // Return a result of -1, 0, or 1, based on EFLAGS. 4436 // Return a result of -1, 0, or 1, based on EFLAGS.
4439 __ j(below, &below_label, not_taken); 4437 __ j(below, &below_label, not_taken);
4440 __ j(above, &above_label, not_taken); 4438 __ j(above, &above_label, not_taken);
4441 4439
4442 __ xor_(eax, Operand(eax)); 4440 __ Set(eax, Immediate(0));
4443 __ ret(0); 4441 __ ret(0);
4444 4442
4445 __ bind(&below_label); 4443 __ bind(&below_label);
4446 __ mov(eax, Immediate(Smi::FromInt(-1))); 4444 __ mov(eax, Immediate(Smi::FromInt(-1)));
4447 __ ret(0); 4445 __ ret(0);
4448 4446
4449 __ bind(&above_label); 4447 __ bind(&above_label);
4450 __ mov(eax, Immediate(Smi::FromInt(1))); 4448 __ mov(eax, Immediate(Smi::FromInt(1)));
4451 __ ret(0); 4449 __ ret(0);
4452 } 4450 }
(...skipping 193 matching lines...) Expand 10 before | Expand all | Expand 10 after
4646 // Restore next handler and frame pointer, discard handler state. 4644 // Restore next handler and frame pointer, discard handler state.
4647 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); 4645 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
4648 __ pop(Operand::StaticVariable(handler_address)); 4646 __ pop(Operand::StaticVariable(handler_address));
4649 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize); 4647 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize);
4650 __ pop(ebp); 4648 __ pop(ebp);
4651 __ pop(edx); // Remove state. 4649 __ pop(edx); // Remove state.
4652 4650
4653 // Before returning we restore the context from the frame pointer if 4651 // Before returning we restore the context from the frame pointer if
4654 // not NULL. The frame pointer is NULL in the exception handler of 4652 // not NULL. The frame pointer is NULL in the exception handler of
4655 // a JS entry frame. 4653 // a JS entry frame.
4656 __ xor_(esi, Operand(esi)); // Tentatively set context pointer to NULL. 4654 __ Set(esi, Immediate(0)); // Tentatively set context pointer to NULL.
4657 NearLabel skip; 4655 NearLabel skip;
4658 __ cmp(ebp, 0); 4656 __ cmp(ebp, 0);
4659 __ j(equal, &skip, not_taken); 4657 __ j(equal, &skip, not_taken);
4660 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); 4658 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4661 __ bind(&skip); 4659 __ bind(&skip);
4662 4660
4663 STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize); 4661 STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
4664 __ ret(0); 4662 __ ret(0);
4665 } 4663 }
4666 4664
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after
4803 __ mov(eax, false); 4801 __ mov(eax, false);
4804 __ mov(Operand::StaticVariable(external_caught), eax); 4802 __ mov(Operand::StaticVariable(external_caught), eax);
4805 4803
4806 // Set pending exception and eax to out of memory exception. 4804 // Set pending exception and eax to out of memory exception.
4807 ExternalReference pending_exception(Isolate::k_pending_exception_address); 4805 ExternalReference pending_exception(Isolate::k_pending_exception_address);
4808 __ mov(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException())); 4806 __ mov(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException()));
4809 __ mov(Operand::StaticVariable(pending_exception), eax); 4807 __ mov(Operand::StaticVariable(pending_exception), eax);
4810 } 4808 }
4811 4809
4812 // Clear the context pointer. 4810 // Clear the context pointer.
4813 __ xor_(esi, Operand(esi)); 4811 __ Set(esi, Immediate(0));
4814 4812
4815 // Restore fp from handler and discard handler state. 4813 // Restore fp from handler and discard handler state.
4816 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize); 4814 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize);
4817 __ pop(ebp); 4815 __ pop(ebp);
4818 __ pop(edx); // State. 4816 __ pop(edx); // State.
4819 4817
4820 STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize); 4818 STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
4821 __ ret(0); 4819 __ ret(0);
4822 } 4820 }
4823 4821
(...skipping 155 matching lines...) Expand 10 before | Expand all | Expand 10 after
4979 __ pop(esi); 4977 __ pop(esi);
4980 __ pop(edi); 4978 __ pop(edi);
4981 __ add(Operand(esp), Immediate(2 * kPointerSize)); // remove markers 4979 __ add(Operand(esp), Immediate(2 * kPointerSize)); // remove markers
4982 4980
4983 // Restore frame pointer and return. 4981 // Restore frame pointer and return.
4984 __ pop(ebp); 4982 __ pop(ebp);
4985 __ ret(0); 4983 __ ret(0);
4986 } 4984 }
4987 4985
4988 4986
4987 // Generate stub code for instanceof.
4988 // This code can patch a call site inlined cache of the instance of check,
4989 // which looks like this.
4990 //
4991 // 81 ff XX XX XX XX cmp edi, <the hole, patched to a map>
4992 // 75 0a jne <some near label>
4993 // b8 XX XX XX XX mov eax, <the hole, patched to either true or false>
4994 //
4995 // If call site patching is requested the stack will have the delta from the
4996 // return address to the cmp instruction just below the return address. This
4997 // also means that call site patching can only take place with arguments in
4998 // registers. TOS looks like this when call site patching is requested
4999 //
5000 // esp[0] : return address
5001 // esp[4] : delta from return address to cmp instruction
5002 //
4989 void InstanceofStub::Generate(MacroAssembler* masm) { 5003 void InstanceofStub::Generate(MacroAssembler* masm) {
5004 // Call site inlining and patching implies arguments in registers.
5005 ASSERT(HasArgsInRegisters() || !HasCallSiteInlineCheck());
5006
4990 // Fixed register usage throughout the stub. 5007 // Fixed register usage throughout the stub.
4991 Register object = eax; // Object (lhs). 5008 Register object = eax; // Object (lhs).
4992 Register map = ebx; // Map of the object. 5009 Register map = ebx; // Map of the object.
4993 Register function = edx; // Function (rhs). 5010 Register function = edx; // Function (rhs).
4994 Register prototype = edi; // Prototype of the function. 5011 Register prototype = edi; // Prototype of the function.
4995 Register scratch = ecx; 5012 Register scratch = ecx;
4996 5013
5014 // Constants describing the call site code to patch.
5015 static const int kDeltaToCmpImmediate = 2;
5016 static const int kDeltaToMov = 8;
5017 static const int kDeltaToMovImmediate = 9;
5018 static const int8_t kCmpEdiImmediateByte1 = BitCast<int8_t, uint8_t>(0x81);
5019 static const int8_t kCmpEdiImmediateByte2 = BitCast<int8_t, uint8_t>(0xff);
5020 static const int8_t kMovEaxImmediateByte = BitCast<int8_t, uint8_t>(0xb8);
5021
5022 ExternalReference roots_address = ExternalReference::roots_address();
5023
5024 ASSERT_EQ(object.code(), InstanceofStub::left().code());
5025 ASSERT_EQ(function.code(), InstanceofStub::right().code());
5026
4997 // Get the object and function - they are always both needed. 5027 // Get the object and function - they are always both needed.
4998 Label slow, not_js_object; 5028 Label slow, not_js_object;
4999 if (!args_in_registers()) { 5029 if (!HasArgsInRegisters()) {
5000 __ mov(object, Operand(esp, 2 * kPointerSize)); 5030 __ mov(object, Operand(esp, 2 * kPointerSize));
5001 __ mov(function, Operand(esp, 1 * kPointerSize)); 5031 __ mov(function, Operand(esp, 1 * kPointerSize));
5002 } 5032 }
5003 5033
5004 // Check that the left hand is a JS object. 5034 // Check that the left hand is a JS object.
5005 __ test(object, Immediate(kSmiTagMask)); 5035 __ test(object, Immediate(kSmiTagMask));
5006 __ j(zero, &not_js_object, not_taken); 5036 __ j(zero, &not_js_object, not_taken);
5007 __ IsObjectJSObjectType(object, map, scratch, &not_js_object); 5037 __ IsObjectJSObjectType(object, map, scratch, &not_js_object);
5008 5038
5009 // Look up the function and the map in the instanceof cache. 5039 // If there is a call site cache don't look in the global cache, but do the
5010 NearLabel miss; 5040 // real lookup and update the call site cache.
5011 ExternalReference roots_address = ExternalReference::roots_address(); 5041 if (!HasCallSiteInlineCheck()) {
5012 __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex)); 5042 // Look up the function and the map in the instanceof cache.
5013 __ cmp(function, 5043 NearLabel miss;
5014 Operand::StaticArray(scratch, times_pointer_size, roots_address)); 5044 __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex));
5015 __ j(not_equal, &miss); 5045 __ cmp(function,
5016 __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex)); 5046 Operand::StaticArray(scratch, times_pointer_size, roots_address));
5017 __ cmp(map, Operand::StaticArray(scratch, times_pointer_size, roots_address)); 5047 __ j(not_equal, &miss);
5018 __ j(not_equal, &miss); 5048 __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex));
5019 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex)); 5049 __ cmp(map, Operand::StaticArray(
5020 __ mov(eax, Operand::StaticArray(scratch, times_pointer_size, roots_address)); 5050 scratch, times_pointer_size, roots_address));
5021 __ IncrementCounter(COUNTERS->instance_of_cache(), 1); 5051 __ j(not_equal, &miss);
5022 __ ret((args_in_registers() ? 0 : 2) * kPointerSize); 5052 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
5053 __ mov(eax, Operand::StaticArray(
5054 scratch, times_pointer_size, roots_address));
5055 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
5056 __ bind(&miss);
5057 }
5023 5058
5024 __ bind(&miss);
5025 // Get the prototype of the function. 5059 // Get the prototype of the function.
5026 __ TryGetFunctionPrototype(function, prototype, scratch, &slow); 5060 __ TryGetFunctionPrototype(function, prototype, scratch, &slow);
5027 5061
5028 // Check that the function prototype is a JS object. 5062 // Check that the function prototype is a JS object.
5029 __ test(prototype, Immediate(kSmiTagMask)); 5063 __ test(prototype, Immediate(kSmiTagMask));
5030 __ j(zero, &slow, not_taken); 5064 __ j(zero, &slow, not_taken);
5031 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow); 5065 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
5032 5066
5033 // Update the golbal instanceof cache with the current map and function. The 5067 // Update the global instanceof or call site inlined cache with the current
5034 // cached answer will be set when it is known. 5068 // map and function. The cached answer will be set when it is known below.
5069 if (!HasCallSiteInlineCheck()) {
5035 __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex)); 5070 __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex));
5036 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), map); 5071 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), map);
5037 __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex)); 5072 __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex));
5038 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), 5073 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address),
5039 function); 5074 function);
5075 } else {
5076 // The constants for the code patching are based on no push instructions
5077 // at the call site.
5078 ASSERT(HasArgsInRegisters());
5079 // Get return address and delta to inlined map check.
5080 __ mov(scratch, Operand(esp, 0 * kPointerSize));
5081 __ sub(scratch, Operand(esp, 1 * kPointerSize));
5082 if (FLAG_debug_code) {
5083 __ cmpb(Operand(scratch, 0), kCmpEdiImmediateByte1);
5084 __ Assert(equal, "InstanceofStub unexpected call site cache (cmp 1)");
5085 __ cmpb(Operand(scratch, 1), kCmpEdiImmediateByte2);
5086 __ Assert(equal, "InstanceofStub unexpected call site cache (cmp 2)");
5087 }
5088 __ mov(Operand(scratch, kDeltaToCmpImmediate), map);
5089 }
5040 5090
5041 // Loop through the prototype chain of the object looking for the function 5091 // Loop through the prototype chain of the object looking for the function
5042 // prototype. 5092 // prototype.
5043 __ mov(scratch, FieldOperand(map, Map::kPrototypeOffset)); 5093 __ mov(scratch, FieldOperand(map, Map::kPrototypeOffset));
5044 NearLabel loop, is_instance, is_not_instance; 5094 NearLabel loop, is_instance, is_not_instance;
5045 __ bind(&loop); 5095 __ bind(&loop);
5046 __ cmp(scratch, Operand(prototype)); 5096 __ cmp(scratch, Operand(prototype));
5047 __ j(equal, &is_instance); 5097 __ j(equal, &is_instance);
5048 __ cmp(Operand(scratch), Immediate(FACTORY->null_value())); 5098 __ cmp(Operand(scratch), Immediate(FACTORY->null_value()));
5049 __ j(equal, &is_not_instance); 5099 __ j(equal, &is_not_instance);
5050 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); 5100 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
5051 __ mov(scratch, FieldOperand(scratch, Map::kPrototypeOffset)); 5101 __ mov(scratch, FieldOperand(scratch, Map::kPrototypeOffset));
5052 __ jmp(&loop); 5102 __ jmp(&loop);
5053 5103
5054 __ bind(&is_instance); 5104 __ bind(&is_instance);
5055 __ IncrementCounter(COUNTERS->instance_of_stub_true(), 1); 5105 if (!HasCallSiteInlineCheck()) {
5056 __ Set(eax, Immediate(0)); 5106 __ Set(eax, Immediate(0));
5057 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex)); 5107 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
5058 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), eax); 5108 __ mov(Operand::StaticArray(scratch,
5059 __ ret((args_in_registers() ? 0 : 2) * kPointerSize); 5109 times_pointer_size, roots_address), eax);
5110 } else {
5111 // Get return address and delta to inlined map check.
5112 __ mov(eax, FACTORY->true_value());
5113 __ mov(scratch, Operand(esp, 0 * kPointerSize));
5114 __ sub(scratch, Operand(esp, 1 * kPointerSize));
5115 if (FLAG_debug_code) {
5116 __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
5117 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
5118 }
5119 __ mov(Operand(scratch, kDeltaToMovImmediate), eax);
5120 if (!ReturnTrueFalseObject()) {
5121 __ Set(eax, Immediate(0));
5122 }
5123 }
5124 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
5060 5125
5061 __ bind(&is_not_instance); 5126 __ bind(&is_not_instance);
5062 __ IncrementCounter(COUNTERS->instance_of_stub_false(), 1); 5127 if (!HasCallSiteInlineCheck()) {
5063 __ Set(eax, Immediate(Smi::FromInt(1))); 5128 __ Set(eax, Immediate(Smi::FromInt(1)));
5064 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex)); 5129 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
5065 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), eax); 5130 __ mov(Operand::StaticArray(
5066 __ ret((args_in_registers() ? 0 : 2) * kPointerSize); 5131 scratch, times_pointer_size, roots_address), eax);
5132 } else {
5133 // Get return address and delta to inlined map check.
5134 __ mov(eax, FACTORY->false_value());
5135 __ mov(scratch, Operand(esp, 0 * kPointerSize));
5136 __ sub(scratch, Operand(esp, 1 * kPointerSize));
5137 if (FLAG_debug_code) {
5138 __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
5139 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
5140 }
5141 __ mov(Operand(scratch, kDeltaToMovImmediate), eax);
5142 if (!ReturnTrueFalseObject()) {
5143 __ Set(eax, Immediate(Smi::FromInt(1)));
5144 }
5145 }
5146 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
5067 5147
5068 Label object_not_null, object_not_null_or_smi; 5148 Label object_not_null, object_not_null_or_smi;
5069 __ bind(&not_js_object); 5149 __ bind(&not_js_object);
5070 // Before null, smi and string value checks, check that the rhs is a function 5150 // Before null, smi and string value checks, check that the rhs is a function
5071 // as for a non-function rhs an exception needs to be thrown. 5151 // as for a non-function rhs an exception needs to be thrown.
5072 __ test(function, Immediate(kSmiTagMask)); 5152 __ test(function, Immediate(kSmiTagMask));
5073 __ j(zero, &slow, not_taken); 5153 __ j(zero, &slow, not_taken);
5074 __ CmpObjectType(function, JS_FUNCTION_TYPE, scratch); 5154 __ CmpObjectType(function, JS_FUNCTION_TYPE, scratch);
5075 __ j(not_equal, &slow, not_taken); 5155 __ j(not_equal, &slow, not_taken);
5076 5156
5077 // Null is not instance of anything. 5157 // Null is not instance of anything.
5078 __ cmp(object, FACTORY->null_value()); 5158 __ cmp(object, FACTORY->null_value());
5079 __ j(not_equal, &object_not_null); 5159 __ j(not_equal, &object_not_null);
5080 __ IncrementCounter(COUNTERS->instance_of_stub_false_null(), 1);
5081 __ Set(eax, Immediate(Smi::FromInt(1))); 5160 __ Set(eax, Immediate(Smi::FromInt(1)));
5082 __ ret((args_in_registers() ? 0 : 2) * kPointerSize); 5161 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
5083 5162
5084 __ bind(&object_not_null); 5163 __ bind(&object_not_null);
5085 // Smi values is not instance of anything. 5164 // Smi values is not instance of anything.
5086 __ test(object, Immediate(kSmiTagMask)); 5165 __ test(object, Immediate(kSmiTagMask));
5087 __ j(not_zero, &object_not_null_or_smi, not_taken); 5166 __ j(not_zero, &object_not_null_or_smi, not_taken);
5088 __ Set(eax, Immediate(Smi::FromInt(1))); 5167 __ Set(eax, Immediate(Smi::FromInt(1)));
5089 __ ret((args_in_registers() ? 0 : 2) * kPointerSize); 5168 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
5090 5169
5091 __ bind(&object_not_null_or_smi); 5170 __ bind(&object_not_null_or_smi);
5092 // String values is not instance of anything. 5171 // String values is not instance of anything.
5093 Condition is_string = masm->IsObjectStringType(object, scratch, scratch); 5172 Condition is_string = masm->IsObjectStringType(object, scratch, scratch);
5094 __ j(NegateCondition(is_string), &slow); 5173 __ j(NegateCondition(is_string), &slow);
5095 __ IncrementCounter(COUNTERS->instance_of_stub_false_string(), 1);
5096 __ Set(eax, Immediate(Smi::FromInt(1))); 5174 __ Set(eax, Immediate(Smi::FromInt(1)));
5097 __ ret((args_in_registers() ? 0 : 2) * kPointerSize); 5175 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
5098 5176
5099 // Slow-case: Go through the JavaScript implementation. 5177 // Slow-case: Go through the JavaScript implementation.
5100 __ bind(&slow); 5178 __ bind(&slow);
5101 if (args_in_registers()) { 5179 if (!ReturnTrueFalseObject()) {
5102 // Push arguments below return address. 5180 // Tail call the builtin which returns 0 or 1.
5103 __ pop(scratch); 5181 if (HasArgsInRegisters()) {
5182 // Push arguments below return address.
5183 __ pop(scratch);
5184 __ push(object);
5185 __ push(function);
5186 __ push(scratch);
5187 }
5188 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
5189 } else {
5190 // Call the builtin and convert 0/1 to true/false.
5191 __ EnterInternalFrame();
5104 __ push(object); 5192 __ push(object);
5105 __ push(function); 5193 __ push(function);
5106 __ push(scratch); 5194 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
5195 __ LeaveInternalFrame();
5196 NearLabel true_value, done;
5197 __ test(eax, Operand(eax));
5198 __ j(zero, &true_value);
5199 __ mov(eax, FACTORY->false_value());
5200 __ jmp(&done);
5201 __ bind(&true_value);
5202 __ mov(eax, FACTORY->true_value());
5203 __ bind(&done);
5204 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
5107 } 5205 }
5108 __ IncrementCounter(COUNTERS->instance_of_slow(), 1);
5109 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
5110 } 5206 }
5111 5207
5112 5208
5209 Register InstanceofStub::left() { return eax; }
5210
5211
5212 Register InstanceofStub::right() { return edx; }
5213
5214
5113 int CompareStub::MinorKey() { 5215 int CompareStub::MinorKey() {
5114 // Encode the three parameters in a unique 16 bit value. To avoid duplicate 5216 // Encode the three parameters in a unique 16 bit value. To avoid duplicate
5115 // stubs the never NaN NaN condition is only taken into account if the 5217 // stubs the never NaN NaN condition is only taken into account if the
5116 // condition is equals. 5218 // condition is equals.
5117 ASSERT(static_cast<unsigned>(cc_) < (1 << 12)); 5219 ASSERT(static_cast<unsigned>(cc_) < (1 << 12));
5118 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg)); 5220 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
5119 return ConditionField::encode(static_cast<unsigned>(cc_)) 5221 return ConditionField::encode(static_cast<unsigned>(cc_))
5120 | RegisterField::encode(false) // lhs_ and rhs_ are not used 5222 | RegisterField::encode(false) // lhs_ and rhs_ are not used
5121 | StrictField::encode(strict_) 5223 | StrictField::encode(strict_)
5122 | NeverNanNanField::encode(cc_ == equal ? never_nan_nan_ : false) 5224 | NeverNanNanField::encode(cc_ == equal ? never_nan_nan_ : false)
(...skipping 1297 matching lines...) Expand 10 before | Expand all | Expand 10 after
6420 // Do a tail call to the rewritten stub. 6522 // Do a tail call to the rewritten stub.
6421 __ jmp(Operand(edi)); 6523 __ jmp(Operand(edi));
6422 } 6524 }
6423 6525
6424 6526
6425 #undef __ 6527 #undef __
6426 6528
6427 } } // namespace v8::internal 6529 } } // namespace v8::internal
6428 6530
6429 #endif // V8_TARGET_ARCH_IA32 6531 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/code-stubs-ia32.h ('k') | src/ia32/codegen-ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698