OLD | NEW |
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1754 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1765 case Token::SHL: | 1765 case Token::SHL: |
1766 case Token::SHR: | 1766 case Token::SHR: |
1767 GenerateTypeTransitionWithSavedArgs(masm); | 1767 GenerateTypeTransitionWithSavedArgs(masm); |
1768 break; | 1768 break; |
1769 default: | 1769 default: |
1770 UNREACHABLE(); | 1770 UNREACHABLE(); |
1771 } | 1771 } |
1772 } | 1772 } |
1773 | 1773 |
1774 | 1774 |
1775 | |
1776 void TypeRecordingBinaryOpStub::GenerateStringStub(MacroAssembler* masm) { | 1775 void TypeRecordingBinaryOpStub::GenerateStringStub(MacroAssembler* masm) { |
1777 Label call_runtime; | 1776 Label call_runtime; |
1778 ASSERT(operands_type_ == TRBinaryOpIC::STRING); | 1777 ASSERT(operands_type_ == TRBinaryOpIC::STRING); |
1779 ASSERT(op_ == Token::ADD); | 1778 ASSERT(op_ == Token::ADD); |
1780 // If one of the arguments is a string, call the string add stub. | 1779 // If one of the arguments is a string, call the string add stub. |
1781 // Otherwise, transition to the generic TRBinaryOpIC type. | 1780 // Otherwise, transition to the generic TRBinaryOpIC type. |
1782 | 1781 |
1783 // Registers containing left and right operands respectively. | 1782 // Registers containing left and right operands respectively. |
1784 Register left = edx; | 1783 Register left = edx; |
1785 Register right = eax; | 1784 Register right = eax; |
(...skipping 3180 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4966 __ pop(esi); | 4965 __ pop(esi); |
4967 __ pop(edi); | 4966 __ pop(edi); |
4968 __ add(Operand(esp), Immediate(2 * kPointerSize)); // remove markers | 4967 __ add(Operand(esp), Immediate(2 * kPointerSize)); // remove markers |
4969 | 4968 |
4970 // Restore frame pointer and return. | 4969 // Restore frame pointer and return. |
4971 __ pop(ebp); | 4970 __ pop(ebp); |
4972 __ ret(0); | 4971 __ ret(0); |
4973 } | 4972 } |
4974 | 4973 |
4975 | 4974 |
| 4975 // Generate stub code for instanceof. |
| 4976 // This code can patch a call site inlined cache of the instance of check, |
| 4977 // which looks like this. |
| 4978 // |
| 4979 // 81 ff XX XX XX XX cmp edi, <the hole, patched to a map> |
| 4980 // 75 0a jne <some near label> |
| 4981 // b8 XX XX XX XX mov eax, <the hole, patched to either true or false> |
| 4982 // |
| 4983 // If call site patching is requested the stack will have the delta from the |
| 4984 // return address to the cmp instruction just below the return address. This |
| 4985 // also means that call site patching can only take place with arguments in |
| 4986 // registers. TOS looks like this when call site patching is requested |
| 4987 // |
| 4988 // esp[0] : return address |
| 4989 // esp[4] : delta from return address to cmp instruction |
| 4990 // |
4976 void InstanceofStub::Generate(MacroAssembler* masm) { | 4991 void InstanceofStub::Generate(MacroAssembler* masm) { |
| 4992 // Call site inlining and patching implies arguments in registers. |
| 4993 ASSERT(HasArgsInRegisters() || !HasCallSiteInlineCheck()); |
| 4994 |
4977 // Fixed register usage throughout the stub. | 4995 // Fixed register usage throughout the stub. |
4978 Register object = eax; // Object (lhs). | 4996 Register object = eax; // Object (lhs). |
4979 Register map = ebx; // Map of the object. | 4997 Register map = ebx; // Map of the object. |
4980 Register function = edx; // Function (rhs). | 4998 Register function = edx; // Function (rhs). |
4981 Register prototype = edi; // Prototype of the function. | 4999 Register prototype = edi; // Prototype of the function. |
4982 Register scratch = ecx; | 5000 Register scratch = ecx; |
4983 | 5001 |
| 5002 // Constants describing the call site code to patch. |
| 5003 static const int kDeltaToCmpImmediate = 2; |
| 5004 static const int kDeltaToMov = 8; |
| 5005 static const int kDeltaToMovImmediate = 9; |
| 5006 static const int8_t kCmpEdiImmediateByte1 = BitCast<int8_t, uint8_t>(0x81); |
| 5007 static const int8_t kCmpEdiImmediateByte2 = BitCast<int8_t, uint8_t>(0xff); |
| 5008 static const int8_t kMovEaxImmediateByte = BitCast<int8_t, uint8_t>(0xb8); |
| 5009 |
| 5010 ExternalReference roots_address = ExternalReference::roots_address(); |
| 5011 |
| 5012 ASSERT_EQ(object.code(), InstanceofStub::left().code()); |
| 5013 ASSERT_EQ(function.code(), InstanceofStub::right().code()); |
| 5014 |
4984 // Get the object and function - they are always both needed. | 5015 // Get the object and function - they are always both needed. |
4985 Label slow, not_js_object; | 5016 Label slow, not_js_object; |
4986 if (!args_in_registers()) { | 5017 if (!HasArgsInRegisters()) { |
4987 __ mov(object, Operand(esp, 2 * kPointerSize)); | 5018 __ mov(object, Operand(esp, 2 * kPointerSize)); |
4988 __ mov(function, Operand(esp, 1 * kPointerSize)); | 5019 __ mov(function, Operand(esp, 1 * kPointerSize)); |
4989 } | 5020 } |
4990 | 5021 |
4991 // Check that the left hand is a JS object. | 5022 // Check that the left hand is a JS object. |
4992 __ test(object, Immediate(kSmiTagMask)); | 5023 __ test(object, Immediate(kSmiTagMask)); |
4993 __ j(zero, ¬_js_object, not_taken); | 5024 __ j(zero, ¬_js_object, not_taken); |
4994 __ IsObjectJSObjectType(object, map, scratch, ¬_js_object); | 5025 __ IsObjectJSObjectType(object, map, scratch, ¬_js_object); |
4995 | 5026 |
4996 // Look up the function and the map in the instanceof cache. | 5027 // If there is a call site cache don't look in the global cache, but do the |
4997 NearLabel miss; | 5028 // real lookup and update the call site cache. |
4998 ExternalReference roots_address = ExternalReference::roots_address(); | 5029 if (!HasCallSiteInlineCheck()) { |
4999 __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex)); | 5030 // Look up the function and the map in the instanceof cache. |
5000 __ cmp(function, | 5031 NearLabel miss; |
5001 Operand::StaticArray(scratch, times_pointer_size, roots_address)); | 5032 __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex)); |
5002 __ j(not_equal, &miss); | 5033 __ cmp(function, |
5003 __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex)); | 5034 Operand::StaticArray(scratch, times_pointer_size, roots_address)); |
5004 __ cmp(map, Operand::StaticArray(scratch, times_pointer_size, roots_address)); | 5035 __ j(not_equal, &miss); |
5005 __ j(not_equal, &miss); | 5036 __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex)); |
5006 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex)); | 5037 __ cmp(map, Operand::StaticArray( |
5007 __ mov(eax, Operand::StaticArray(scratch, times_pointer_size, roots_address)); | 5038 scratch, times_pointer_size, roots_address)); |
5008 __ IncrementCounter(&Counters::instance_of_cache, 1); | 5039 __ j(not_equal, &miss); |
5009 __ ret((args_in_registers() ? 0 : 2) * kPointerSize); | 5040 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex)); |
| 5041 __ mov(eax, Operand::StaticArray( |
| 5042 scratch, times_pointer_size, roots_address)); |
| 5043 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); |
| 5044 __ bind(&miss); |
| 5045 } |
5010 | 5046 |
5011 __ bind(&miss); | |
5012 // Get the prototype of the function. | 5047 // Get the prototype of the function. |
5013 __ TryGetFunctionPrototype(function, prototype, scratch, &slow); | 5048 __ TryGetFunctionPrototype(function, prototype, scratch, &slow); |
5014 | 5049 |
5015 // Check that the function prototype is a JS object. | 5050 // Check that the function prototype is a JS object. |
5016 __ test(prototype, Immediate(kSmiTagMask)); | 5051 __ test(prototype, Immediate(kSmiTagMask)); |
5017 __ j(zero, &slow, not_taken); | 5052 __ j(zero, &slow, not_taken); |
5018 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow); | 5053 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow); |
5019 | 5054 |
5020 // Update the golbal instanceof cache with the current map and function. The | 5055 // Update the global instanceof or call site inlined cache with the current |
5021 // cached answer will be set when it is known. | 5056 // map and function. The cached answer will be set when it is known below. |
| 5057 if (!HasCallSiteInlineCheck()) { |
5022 __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex)); | 5058 __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex)); |
5023 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), map); | 5059 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), map); |
5024 __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex)); | 5060 __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex)); |
5025 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), | 5061 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), |
5026 function); | 5062 function); |
| 5063 } else { |
| 5064 // The constants for the code patching are based on no push instructions |
| 5065 // at the call site. |
| 5066 ASSERT(HasArgsInRegisters()); |
| 5067 // Get return address and delta to inlined map check. |
| 5068 __ mov(scratch, Operand(esp, 0 * kPointerSize)); |
| 5069 __ sub(scratch, Operand(esp, 1 * kPointerSize)); |
| 5070 if (FLAG_debug_code) { |
| 5071 __ cmpb(Operand(scratch, 0), kCmpEdiImmediateByte1); |
| 5072 __ Assert(equal, "InstanceofStub unexpected call site cache (cmp 1)"); |
| 5073 __ cmpb(Operand(scratch, 1), kCmpEdiImmediateByte2); |
| 5074 __ Assert(equal, "InstanceofStub unexpected call site cache (cmp 2)"); |
| 5075 } |
| 5076 __ mov(Operand(scratch, kDeltaToCmpImmediate), map); |
| 5077 } |
5027 | 5078 |
5028 // Loop through the prototype chain of the object looking for the function | 5079 // Loop through the prototype chain of the object looking for the function |
5029 // prototype. | 5080 // prototype. |
5030 __ mov(scratch, FieldOperand(map, Map::kPrototypeOffset)); | 5081 __ mov(scratch, FieldOperand(map, Map::kPrototypeOffset)); |
5031 NearLabel loop, is_instance, is_not_instance; | 5082 NearLabel loop, is_instance, is_not_instance; |
5032 __ bind(&loop); | 5083 __ bind(&loop); |
5033 __ cmp(scratch, Operand(prototype)); | 5084 __ cmp(scratch, Operand(prototype)); |
5034 __ j(equal, &is_instance); | 5085 __ j(equal, &is_instance); |
5035 __ cmp(Operand(scratch), Immediate(Factory::null_value())); | 5086 __ cmp(Operand(scratch), Immediate(Factory::null_value())); |
5036 __ j(equal, &is_not_instance); | 5087 __ j(equal, &is_not_instance); |
5037 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); | 5088 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); |
5038 __ mov(scratch, FieldOperand(scratch, Map::kPrototypeOffset)); | 5089 __ mov(scratch, FieldOperand(scratch, Map::kPrototypeOffset)); |
5039 __ jmp(&loop); | 5090 __ jmp(&loop); |
5040 | 5091 |
5041 __ bind(&is_instance); | 5092 __ bind(&is_instance); |
5042 __ IncrementCounter(&Counters::instance_of_stub_true, 1); | 5093 if (!HasCallSiteInlineCheck()) { |
5043 __ Set(eax, Immediate(0)); | 5094 __ Set(eax, Immediate(0)); |
5044 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex)); | 5095 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex)); |
5045 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), eax); | 5096 __ mov(Operand::StaticArray(scratch, |
5046 __ ret((args_in_registers() ? 0 : 2) * kPointerSize); | 5097 times_pointer_size, roots_address), eax); |
| 5098 } else { |
| 5099 // Get return address and delta to inlined map check. |
| 5100 __ mov(eax, Factory::true_value()); |
| 5101 __ mov(scratch, Operand(esp, 0 * kPointerSize)); |
| 5102 __ sub(scratch, Operand(esp, 1 * kPointerSize)); |
| 5103 if (FLAG_debug_code) { |
| 5104 __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte); |
| 5105 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)"); |
| 5106 } |
| 5107 __ mov(Operand(scratch, kDeltaToMovImmediate), eax); |
| 5108 if (!ReturnTrueFalseObject()) { |
| 5109 __ Set(eax, Immediate(0)); |
| 5110 } |
| 5111 } |
| 5112 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); |
5047 | 5113 |
5048 __ bind(&is_not_instance); | 5114 __ bind(&is_not_instance); |
5049 __ IncrementCounter(&Counters::instance_of_stub_false, 1); | 5115 if (!HasCallSiteInlineCheck()) { |
5050 __ Set(eax, Immediate(Smi::FromInt(1))); | 5116 __ Set(eax, Immediate(Smi::FromInt(1))); |
5051 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex)); | 5117 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex)); |
5052 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), eax); | 5118 __ mov(Operand::StaticArray( |
5053 __ ret((args_in_registers() ? 0 : 2) * kPointerSize); | 5119 scratch, times_pointer_size, roots_address), eax); |
| 5120 } else { |
| 5121 // Get return address and delta to inlined map check. |
| 5122 __ mov(eax, Factory::false_value()); |
| 5123 __ mov(scratch, Operand(esp, 0 * kPointerSize)); |
| 5124 __ sub(scratch, Operand(esp, 1 * kPointerSize)); |
| 5125 if (FLAG_debug_code) { |
| 5126 __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte); |
| 5127 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)"); |
| 5128 } |
| 5129 __ mov(Operand(scratch, kDeltaToMovImmediate), eax); |
| 5130 if (!ReturnTrueFalseObject()) { |
| 5131 __ Set(eax, Immediate(Smi::FromInt(1))); |
| 5132 } |
| 5133 } |
| 5134 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); |
5054 | 5135 |
5055 Label object_not_null, object_not_null_or_smi; | 5136 Label object_not_null, object_not_null_or_smi; |
5056 __ bind(¬_js_object); | 5137 __ bind(¬_js_object); |
5057 // Before null, smi and string value checks, check that the rhs is a function | 5138 // Before null, smi and string value checks, check that the rhs is a function |
5058 // as for a non-function rhs an exception needs to be thrown. | 5139 // as for a non-function rhs an exception needs to be thrown. |
5059 __ test(function, Immediate(kSmiTagMask)); | 5140 __ test(function, Immediate(kSmiTagMask)); |
5060 __ j(zero, &slow, not_taken); | 5141 __ j(zero, &slow, not_taken); |
5061 __ CmpObjectType(function, JS_FUNCTION_TYPE, scratch); | 5142 __ CmpObjectType(function, JS_FUNCTION_TYPE, scratch); |
5062 __ j(not_equal, &slow, not_taken); | 5143 __ j(not_equal, &slow, not_taken); |
5063 | 5144 |
5064 // Null is not instance of anything. | 5145 // Null is not instance of anything. |
5065 __ cmp(object, Factory::null_value()); | 5146 __ cmp(object, Factory::null_value()); |
5066 __ j(not_equal, &object_not_null); | 5147 __ j(not_equal, &object_not_null); |
5067 __ IncrementCounter(&Counters::instance_of_stub_false_null, 1); | |
5068 __ Set(eax, Immediate(Smi::FromInt(1))); | 5148 __ Set(eax, Immediate(Smi::FromInt(1))); |
5069 __ ret((args_in_registers() ? 0 : 2) * kPointerSize); | 5149 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); |
5070 | 5150 |
5071 __ bind(&object_not_null); | 5151 __ bind(&object_not_null); |
5072 // Smi values is not instance of anything. | 5152 // Smi values is not instance of anything. |
5073 __ test(object, Immediate(kSmiTagMask)); | 5153 __ test(object, Immediate(kSmiTagMask)); |
5074 __ j(not_zero, &object_not_null_or_smi, not_taken); | 5154 __ j(not_zero, &object_not_null_or_smi, not_taken); |
5075 __ Set(eax, Immediate(Smi::FromInt(1))); | 5155 __ Set(eax, Immediate(Smi::FromInt(1))); |
5076 __ ret((args_in_registers() ? 0 : 2) * kPointerSize); | 5156 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); |
5077 | 5157 |
5078 __ bind(&object_not_null_or_smi); | 5158 __ bind(&object_not_null_or_smi); |
5079 // String values is not instance of anything. | 5159 // String values is not instance of anything. |
5080 Condition is_string = masm->IsObjectStringType(object, scratch, scratch); | 5160 Condition is_string = masm->IsObjectStringType(object, scratch, scratch); |
5081 __ j(NegateCondition(is_string), &slow); | 5161 __ j(NegateCondition(is_string), &slow); |
5082 __ IncrementCounter(&Counters::instance_of_stub_false_string, 1); | |
5083 __ Set(eax, Immediate(Smi::FromInt(1))); | 5162 __ Set(eax, Immediate(Smi::FromInt(1))); |
5084 __ ret((args_in_registers() ? 0 : 2) * kPointerSize); | 5163 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); |
5085 | 5164 |
5086 // Slow-case: Go through the JavaScript implementation. | 5165 // Slow-case: Go through the JavaScript implementation. |
5087 __ bind(&slow); | 5166 __ bind(&slow); |
5088 if (args_in_registers()) { | 5167 if (!ReturnTrueFalseObject()) { |
5089 // Push arguments below return address. | 5168 // Tail call the builtin which returns 0 or 1. |
5090 __ pop(scratch); | 5169 if (HasArgsInRegisters()) { |
| 5170 // Push arguments below return address. |
| 5171 __ pop(scratch); |
| 5172 __ push(object); |
| 5173 __ push(function); |
| 5174 __ push(scratch); |
| 5175 } |
| 5176 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); |
| 5177 } else { |
| 5178 // Call the builtin and convert 0/1 to true/false. |
| 5179 __ EnterInternalFrame(); |
5091 __ push(object); | 5180 __ push(object); |
5092 __ push(function); | 5181 __ push(function); |
5093 __ push(scratch); | 5182 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION); |
| 5183 __ LeaveInternalFrame(); |
| 5184 NearLabel true_value, done; |
| 5185 __ test(eax, Operand(eax)); |
| 5186 __ j(zero, &true_value); |
| 5187 __ mov(eax, Factory::false_value()); |
| 5188 __ jmp(&done); |
| 5189 __ bind(&true_value); |
| 5190 __ mov(eax, Factory::true_value()); |
| 5191 __ bind(&done); |
| 5192 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); |
5094 } | 5193 } |
5095 __ IncrementCounter(&Counters::instance_of_slow, 1); | |
5096 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); | |
5097 } | 5194 } |
5098 | 5195 |
5099 | 5196 |
| 5197 Register InstanceofStub::left() { return eax; } |
| 5198 |
| 5199 |
| 5200 Register InstanceofStub::right() { return edx; } |
| 5201 |
| 5202 |
5100 int CompareStub::MinorKey() { | 5203 int CompareStub::MinorKey() { |
5101 // Encode the three parameters in a unique 16 bit value. To avoid duplicate | 5204 // Encode the three parameters in a unique 16 bit value. To avoid duplicate |
5102 // stubs the never NaN NaN condition is only taken into account if the | 5205 // stubs the never NaN NaN condition is only taken into account if the |
5103 // condition is equals. | 5206 // condition is equals. |
5104 ASSERT(static_cast<unsigned>(cc_) < (1 << 12)); | 5207 ASSERT(static_cast<unsigned>(cc_) < (1 << 12)); |
5105 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg)); | 5208 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg)); |
5106 return ConditionField::encode(static_cast<unsigned>(cc_)) | 5209 return ConditionField::encode(static_cast<unsigned>(cc_)) |
5107 | RegisterField::encode(false) // lhs_ and rhs_ are not used | 5210 | RegisterField::encode(false) // lhs_ and rhs_ are not used |
5108 | StrictField::encode(strict_) | 5211 | StrictField::encode(strict_) |
5109 | NeverNanNanField::encode(cc_ == equal ? never_nan_nan_ : false) | 5212 | NeverNanNanField::encode(cc_ == equal ? never_nan_nan_ : false) |
(...skipping 1295 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6405 // Do a tail call to the rewritten stub. | 6508 // Do a tail call to the rewritten stub. |
6406 __ jmp(Operand(edi)); | 6509 __ jmp(Operand(edi)); |
6407 } | 6510 } |
6408 | 6511 |
6409 | 6512 |
6410 #undef __ | 6513 #undef __ |
6411 | 6514 |
6412 } } // namespace v8::internal | 6515 } } // namespace v8::internal |
6413 | 6516 |
6414 #endif // V8_TARGET_ARCH_IA32 | 6517 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |