OLD | NEW |
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 4955 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4966 __ pop(esi); | 4966 __ pop(esi); |
4967 __ pop(edi); | 4967 __ pop(edi); |
4968 __ add(Operand(esp), Immediate(2 * kPointerSize)); // remove markers | 4968 __ add(Operand(esp), Immediate(2 * kPointerSize)); // remove markers |
4969 | 4969 |
4970 // Restore frame pointer and return. | 4970 // Restore frame pointer and return. |
4971 __ pop(ebp); | 4971 __ pop(ebp); |
4972 __ ret(0); | 4972 __ ret(0); |
4973 } | 4973 } |
4974 | 4974 |
4975 | 4975 |
| 4976 // Generate stub code for instanceof. |
| 4977 // This code can patch a call site inlined cache of the instance of check, |
| 4978 // which looks like this. |
| 4979 // |
| 4980 // 81 ff XX XX XX XX cmp edi, <the hole, patched to a map> |
| 4981 // 75 0a jne <some near label> |
| 4982 // b8 XX XX XX XX mov eax, <the hole, patched to either true or false> |
| 4983 // |
| 4984 // If call site patching is requested the stack will have the delta from the |
| 4985 // return address to the cmp instruction just below the return address. This |
| 4986 // also means that call site patching can only take place with arguments in |
| 4987 // registers. TOS looks like this when call site patching is requested |
| 4988 // |
| 4989 // esp[0] : return address |
| 4990 // esp[4] : delta from return address to cmp instruction |
| 4991 // |
4976 void InstanceofStub::Generate(MacroAssembler* masm) { | 4992 void InstanceofStub::Generate(MacroAssembler* masm) { |
| 4993 // Call site inlining and patching implies arguments in registers. |
| 4994 ASSERT(HasArgsInRegisters() || !HasCallSiteInlineCheck()); |
| 4995 |
4977 // Fixed register usage throughout the stub. | 4996 // Fixed register usage throughout the stub. |
4978 Register object = eax; // Object (lhs). | 4997 Register object = eax; // Object (lhs). |
4979 Register map = ebx; // Map of the object. | 4998 Register map = ebx; // Map of the object. |
4980 Register function = edx; // Function (rhs). | 4999 Register function = edx; // Function (rhs). |
4981 Register prototype = edi; // Prototype of the function. | 5000 Register prototype = edi; // Prototype of the function. |
4982 Register scratch = ecx; | 5001 Register scratch = ecx; |
4983 | 5002 |
| 5003 // Constants describing the call site code to patch. |
| 5004 static const int kDeltaToCmpImmediate = 2; |
| 5005 static const int kDeltaToMov = 8; |
| 5006 static const int kDeltaToMovImmediate = 9; |
| 5007 static const int8_t kCmpEdiImmediateByte1 = static_cast<int8_t>(0x81); |
| 5008 static const int8_t kCmpEdiImmediateByte2 = static_cast<int8_t>(0xff); |
| 5009 static const int8_t kMovEaxImmediateByte = static_cast<int8_t>(0xb8); |
| 5010 |
| 5011 ExternalReference roots_address = ExternalReference::roots_address(); |
| 5012 |
| 5013 ASSERT_EQ(object.code(), InstanceofStub::left().code()); |
| 5014 ASSERT_EQ(function.code(), InstanceofStub::right().code()); |
| 5015 |
4984 // Get the object and function - they are always both needed. | 5016 // Get the object and function - they are always both needed. |
4985 Label slow, not_js_object; | 5017 Label slow, not_js_object; |
4986 if (!args_in_registers()) { | 5018 if (!HasArgsInRegisters()) { |
4987 __ mov(object, Operand(esp, 2 * kPointerSize)); | 5019 __ mov(object, Operand(esp, 2 * kPointerSize)); |
4988 __ mov(function, Operand(esp, 1 * kPointerSize)); | 5020 __ mov(function, Operand(esp, 1 * kPointerSize)); |
4989 } | 5021 } |
4990 | 5022 |
4991 // Check that the left hand is a JS object. | 5023 // Check that the left hand is a JS object. |
4992 __ test(object, Immediate(kSmiTagMask)); | 5024 __ test(object, Immediate(kSmiTagMask)); |
4993 __ j(zero, ¬_js_object, not_taken); | 5025 __ j(zero, ¬_js_object, not_taken); |
4994 __ IsObjectJSObjectType(object, map, scratch, ¬_js_object); | 5026 __ IsObjectJSObjectType(object, map, scratch, ¬_js_object); |
4995 | 5027 |
4996 // Look up the function and the map in the instanceof cache. | 5028 // If there is a call site cache don't look in the global cache, but do the |
4997 NearLabel miss; | 5029 // real lookup and update the call site cache. |
4998 ExternalReference roots_address = ExternalReference::roots_address(); | 5030 if (!HasCallSiteInlineCheck()) { |
4999 __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex)); | 5031 // Look up the function and the map in the instanceof cache. |
5000 __ cmp(function, | 5032 NearLabel miss; |
5001 Operand::StaticArray(scratch, times_pointer_size, roots_address)); | 5033 __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex)); |
5002 __ j(not_equal, &miss); | 5034 __ cmp(function, |
5003 __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex)); | 5035 Operand::StaticArray(scratch, times_pointer_size, roots_address)); |
5004 __ cmp(map, Operand::StaticArray(scratch, times_pointer_size, roots_address)); | 5036 __ j(not_equal, &miss); |
5005 __ j(not_equal, &miss); | 5037 __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex)); |
5006 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex)); | 5038 __ cmp(map, Operand::StaticArray( |
5007 __ mov(eax, Operand::StaticArray(scratch, times_pointer_size, roots_address)); | 5039 scratch, times_pointer_size, roots_address)); |
5008 __ IncrementCounter(&Counters::instance_of_cache, 1); | 5040 __ j(not_equal, &miss); |
5009 __ ret((args_in_registers() ? 0 : 2) * kPointerSize); | 5041 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex)); |
| 5042 __ mov(eax, Operand::StaticArray( |
| 5043 scratch, times_pointer_size, roots_address)); |
| 5044 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); |
| 5045 __ bind(&miss); |
| 5046 } |
5010 | 5047 |
5011 __ bind(&miss); | |
5012 // Get the prototype of the function. | 5048 // Get the prototype of the function. |
5013 __ TryGetFunctionPrototype(function, prototype, scratch, &slow); | 5049 __ TryGetFunctionPrototype(function, prototype, scratch, &slow); |
5014 | 5050 |
5015 // Check that the function prototype is a JS object. | 5051 // Check that the function prototype is a JS object. |
5016 __ test(prototype, Immediate(kSmiTagMask)); | 5052 __ test(prototype, Immediate(kSmiTagMask)); |
5017 __ j(zero, &slow, not_taken); | 5053 __ j(zero, &slow, not_taken); |
5018 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow); | 5054 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow); |
5019 | 5055 |
5020 // Update the golbal instanceof cache with the current map and function. The | 5056 // Update the global instanceof or call site inlined cache with the current |
5021 // cached answer will be set when it is known. | 5057 // map and function. The cached answer will be set when it is known below. |
| 5058 if (!HasCallSiteInlineCheck()) { |
5022 __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex)); | 5059 __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex)); |
5023 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), map); | 5060 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), map); |
5024 __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex)); | 5061 __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex)); |
5025 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), | 5062 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), |
5026 function); | 5063 function); |
| 5064 } else { |
| 5065 // The constants for the code patching are based on no push instructions |
| 5066 // at the call site. |
| 5067 ASSERT(HasArgsInRegisters()); |
| 5068 // Get return address and delta to inlined map check. |
| 5069 __ mov(scratch, Operand(esp, 0 * kPointerSize)); |
| 5070 __ sub(scratch, Operand(esp, 1 * kPointerSize)); |
| 5071 if (FLAG_debug_code) { |
| 5072 __ cmpb(Operand(scratch, 0), kCmpEdiImmediateByte1); |
| 5073 __ Assert(equal, "InstanceofStub unexpected call site cache (cmp 1)"); |
| 5074 __ cmpb(Operand(scratch, 1), kCmpEdiImmediateByte2); |
| 5075 __ Assert(equal, "InstanceofStub unexpected call site cache (cmp 2)"); |
| 5076 } |
| 5077 __ mov(Operand(scratch, kDeltaToCmpImmediate), map); |
| 5078 } |
5027 | 5079 |
5028 // Loop through the prototype chain of the object looking for the function | 5080 // Loop through the prototype chain of the object looking for the function |
5029 // prototype. | 5081 // prototype. |
5030 __ mov(scratch, FieldOperand(map, Map::kPrototypeOffset)); | 5082 __ mov(scratch, FieldOperand(map, Map::kPrototypeOffset)); |
5031 NearLabel loop, is_instance, is_not_instance; | 5083 NearLabel loop, is_instance, is_not_instance; |
5032 __ bind(&loop); | 5084 __ bind(&loop); |
5033 __ cmp(scratch, Operand(prototype)); | 5085 __ cmp(scratch, Operand(prototype)); |
5034 __ j(equal, &is_instance); | 5086 __ j(equal, &is_instance); |
5035 __ cmp(Operand(scratch), Immediate(Factory::null_value())); | 5087 __ cmp(Operand(scratch), Immediate(Factory::null_value())); |
5036 __ j(equal, &is_not_instance); | 5088 __ j(equal, &is_not_instance); |
5037 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); | 5089 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); |
5038 __ mov(scratch, FieldOperand(scratch, Map::kPrototypeOffset)); | 5090 __ mov(scratch, FieldOperand(scratch, Map::kPrototypeOffset)); |
5039 __ jmp(&loop); | 5091 __ jmp(&loop); |
5040 | 5092 |
5041 __ bind(&is_instance); | 5093 __ bind(&is_instance); |
5042 __ IncrementCounter(&Counters::instance_of_stub_true, 1); | 5094 if (!HasCallSiteInlineCheck()) { |
5043 __ Set(eax, Immediate(0)); | 5095 __ Set(eax, Immediate(0)); |
5044 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex)); | 5096 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex)); |
5045 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), eax); | 5097 __ mov(Operand::StaticArray(scratch, |
5046 __ ret((args_in_registers() ? 0 : 2) * kPointerSize); | 5098 times_pointer_size, roots_address), eax); |
| 5099 } else { |
| 5100 // Get return address and delta to inlined map check. |
| 5101 __ mov(eax, Factory::true_value()); |
| 5102 __ mov(scratch, Operand(esp, 0 * kPointerSize)); |
| 5103 __ sub(scratch, Operand(esp, 1 * kPointerSize)); |
| 5104 if (FLAG_debug_code) { |
| 5105 __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte); |
| 5106 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)"); |
| 5107 } |
| 5108 __ mov(Operand(scratch, kDeltaToMovImmediate), eax); |
| 5109 if (!ReturnTrueFalseObject()) { |
| 5110 __ Set(eax, Immediate(0)); |
| 5111 } |
| 5112 } |
| 5113 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); |
5047 | 5114 |
5048 __ bind(&is_not_instance); | 5115 __ bind(&is_not_instance); |
5049 __ IncrementCounter(&Counters::instance_of_stub_false, 1); | 5116 if (!HasCallSiteInlineCheck()) { |
5050 __ Set(eax, Immediate(Smi::FromInt(1))); | 5117 __ Set(eax, Immediate(Smi::FromInt(1))); |
5051 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex)); | 5118 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex)); |
5052 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), eax); | 5119 __ mov(Operand::StaticArray( |
5053 __ ret((args_in_registers() ? 0 : 2) * kPointerSize); | 5120 scratch, times_pointer_size, roots_address), eax); |
| 5121 } else { |
| 5122 // Get return address and delta to inlined map check. |
| 5123 __ mov(eax, Factory::false_value()); |
| 5124 __ mov(scratch, Operand(esp, 0 * kPointerSize)); |
| 5125 __ sub(scratch, Operand(esp, 1 * kPointerSize)); |
| 5126 if (FLAG_debug_code) { |
| 5127 __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte); |
| 5128 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)"); |
| 5129 } |
| 5130 __ mov(Operand(scratch, kDeltaToMovImmediate), eax); |
| 5131 if (!ReturnTrueFalseObject()) { |
| 5132 __ Set(eax, Immediate(Smi::FromInt(1))); |
| 5133 } |
| 5134 } |
| 5135 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); |
5054 | 5136 |
5055 Label object_not_null, object_not_null_or_smi; | 5137 Label object_not_null, object_not_null_or_smi; |
5056 __ bind(¬_js_object); | 5138 __ bind(¬_js_object); |
5057 // Before null, smi and string value checks, check that the rhs is a function | 5139 // Before null, smi and string value checks, check that the rhs is a function |
5058 // as for a non-function rhs an exception needs to be thrown. | 5140 // as for a non-function rhs an exception needs to be thrown. |
5059 __ test(function, Immediate(kSmiTagMask)); | 5141 __ test(function, Immediate(kSmiTagMask)); |
5060 __ j(zero, &slow, not_taken); | 5142 __ j(zero, &slow, not_taken); |
5061 __ CmpObjectType(function, JS_FUNCTION_TYPE, scratch); | 5143 __ CmpObjectType(function, JS_FUNCTION_TYPE, scratch); |
5062 __ j(not_equal, &slow, not_taken); | 5144 __ j(not_equal, &slow, not_taken); |
5063 | 5145 |
5064 // Null is not instance of anything. | 5146 // Null is not instance of anything. |
5065 __ cmp(object, Factory::null_value()); | 5147 __ cmp(object, Factory::null_value()); |
5066 __ j(not_equal, &object_not_null); | 5148 __ j(not_equal, &object_not_null); |
5067 __ IncrementCounter(&Counters::instance_of_stub_false_null, 1); | |
5068 __ Set(eax, Immediate(Smi::FromInt(1))); | 5149 __ Set(eax, Immediate(Smi::FromInt(1))); |
5069 __ ret((args_in_registers() ? 0 : 2) * kPointerSize); | 5150 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); |
5070 | 5151 |
5071 __ bind(&object_not_null); | 5152 __ bind(&object_not_null); |
5072 // Smi values is not instance of anything. | 5153 // Smi values is not instance of anything. |
5073 __ test(object, Immediate(kSmiTagMask)); | 5154 __ test(object, Immediate(kSmiTagMask)); |
5074 __ j(not_zero, &object_not_null_or_smi, not_taken); | 5155 __ j(not_zero, &object_not_null_or_smi, not_taken); |
5075 __ Set(eax, Immediate(Smi::FromInt(1))); | 5156 __ Set(eax, Immediate(Smi::FromInt(1))); |
5076 __ ret((args_in_registers() ? 0 : 2) * kPointerSize); | 5157 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); |
5077 | 5158 |
5078 __ bind(&object_not_null_or_smi); | 5159 __ bind(&object_not_null_or_smi); |
5079 // String values is not instance of anything. | 5160 // String values is not instance of anything. |
5080 Condition is_string = masm->IsObjectStringType(object, scratch, scratch); | 5161 Condition is_string = masm->IsObjectStringType(object, scratch, scratch); |
5081 __ j(NegateCondition(is_string), &slow); | 5162 __ j(NegateCondition(is_string), &slow); |
5082 __ IncrementCounter(&Counters::instance_of_stub_false_string, 1); | |
5083 __ Set(eax, Immediate(Smi::FromInt(1))); | 5163 __ Set(eax, Immediate(Smi::FromInt(1))); |
5084 __ ret((args_in_registers() ? 0 : 2) * kPointerSize); | 5164 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); |
5085 | 5165 |
5086 // Slow-case: Go through the JavaScript implementation. | 5166 // Slow-case: Go through the JavaScript implementation. |
5087 __ bind(&slow); | 5167 __ bind(&slow); |
5088 if (args_in_registers()) { | 5168 if (HasArgsInRegisters()) { |
5089 // Push arguments below return address. | 5169 // Push arguments below return address. |
5090 __ pop(scratch); | 5170 __ pop(scratch); |
5091 __ push(object); | 5171 __ push(object); |
5092 __ push(function); | 5172 __ push(function); |
5093 __ push(scratch); | 5173 __ push(scratch); |
5094 } | 5174 } |
5095 __ IncrementCounter(&Counters::instance_of_slow, 1); | |
5096 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); | 5175 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); |
5097 } | 5176 } |
5098 | 5177 |
5099 | 5178 |
| 5179 Register InstanceofStub::left() { return eax; } |
| 5180 |
| 5181 |
| 5182 Register InstanceofStub::right() { return edx; } |
| 5183 |
| 5184 |
| 5185 const char* InstanceofStub::GetName() { |
| 5186 if (name_ != NULL) return name_; |
| 5187 const int kMaxNameLength = 100; |
| 5188 name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength); |
| 5189 if (name_ == NULL) return "OOM"; |
| 5190 |
| 5191 const char* args = ""; |
| 5192 if (HasArgsInRegisters()) { |
| 5193 args = "_REGS"; |
| 5194 } |
| 5195 |
| 5196 const char* inline_check = ""; |
| 5197 if (HasCallSiteInlineCheck()) { |
| 5198 inline_check = "_INLINE"; |
| 5199 } |
| 5200 |
| 5201 const char* return_true_false_object = ""; |
| 5202 if (ReturnTrueFalseObject()) { |
| 5203 return_true_false_object = "_TRUEFALSE"; |
| 5204 } |
| 5205 |
| 5206 OS::SNPrintF(Vector<char>(name_, kMaxNameLength), |
| 5207 "InstanceofStub%s%s%s", |
| 5208 args, |
| 5209 inline_check, |
| 5210 return_true_false_object); |
| 5211 return name_; |
| 5212 } |
| 5213 |
| 5214 |
5100 int CompareStub::MinorKey() { | 5215 int CompareStub::MinorKey() { |
5101 // Encode the three parameters in a unique 16 bit value. To avoid duplicate | 5216 // Encode the three parameters in a unique 16 bit value. To avoid duplicate |
5102 // stubs the never NaN NaN condition is only taken into account if the | 5217 // stubs the never NaN NaN condition is only taken into account if the |
5103 // condition is equals. | 5218 // condition is equals. |
5104 ASSERT(static_cast<unsigned>(cc_) < (1 << 12)); | 5219 ASSERT(static_cast<unsigned>(cc_) < (1 << 12)); |
5105 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg)); | 5220 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg)); |
5106 return ConditionField::encode(static_cast<unsigned>(cc_)) | 5221 return ConditionField::encode(static_cast<unsigned>(cc_)) |
5107 | RegisterField::encode(false) // lhs_ and rhs_ are not used | 5222 | RegisterField::encode(false) // lhs_ and rhs_ are not used |
5108 | StrictField::encode(strict_) | 5223 | StrictField::encode(strict_) |
5109 | NeverNanNanField::encode(cc_ == equal ? never_nan_nan_ : false) | 5224 | NeverNanNanField::encode(cc_ == equal ? never_nan_nan_ : false) |
(...skipping 1295 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6405 // Do a tail call to the rewritten stub. | 6520 // Do a tail call to the rewritten stub. |
6406 __ jmp(Operand(edi)); | 6521 __ jmp(Operand(edi)); |
6407 } | 6522 } |
6408 | 6523 |
6409 | 6524 |
6410 #undef __ | 6525 #undef __ |
6411 | 6526 |
6412 } } // namespace v8::internal | 6527 } } // namespace v8::internal |
6413 | 6528 |
6414 #endif // V8_TARGET_ARCH_IA32 | 6529 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |